diff --git a/qa_mdt/.gitattributes b/qa_mdt/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..a6344aac8c09253b3b630fb776ae94478aa0275b --- /dev/null +++ b/qa_mdt/.gitattributes @@ -0,0 +1,35 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text diff --git a/qa_mdt/README.md b/qa_mdt/README.md new file mode 100644 index 0000000000000000000000000000000000000000..decfebd43b1de3303f9e0306b059120e13ccc811 --- /dev/null +++ b/qa_mdt/README.md @@ -0,0 +1,37 @@ +--- +library_name: diffusers +tags: +- music +--- + +# Hugging Face Diffusers Implementation of QA-MDT +**QADMT: Quality-Aware Diffusion for Text-to-Music 🎶** + +QADMT brings a new approach to text-to-music generation by using quality-aware training to tackle issues like low-fidelity audio and weak labeling in datasets. + +With a masked diffusion transformer (MDT), QADMT delivers SOTA results on MusicCaps and Song-Describer, enhancing both quality and musicality. + + +## Usage: + +```bash +!git lfs install +!git clone https://huggingface.co/jadechoghari/qa-mdt +``` + +```bash +pip install -r qa_mdt/requirements.txt +pip install xformers==0.0.26.post1 +pip install torchlibrosa==0.0.9 librosa==0.9.2 +pip install -q pytorch_lightning==2.1.3 torchlibrosa==0.0.9 librosa==0.9.2 ftfy==6.1.1 braceexpand +pip install torch==2.3.0+cu121 torchvision==0.18.0+cu121 torchaudio==2.3.0 --index-url https://download.pytorch.org/whl/cu121 +``` + +```python +from qa_mdt.pipeline import MOSDiffusionPipeline + +pipe = MOSDiffusionPipeline() +pipe("A modern synthesizer creating futuristic soundscapes.") +``` + +# Enjoy the music!! 🎶 \ No newline at end of file diff --git a/qa_mdt/audioldm_train/.DS_Store b/qa_mdt/audioldm_train/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..7aee32f6664eb748738f565084732d0490387a90 Binary files /dev/null and b/qa_mdt/audioldm_train/.DS_Store differ diff --git a/qa_mdt/audioldm_train/__init__.py b/qa_mdt/audioldm_train/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d234495016670668fd952f3dde312045d59d73b4 --- /dev/null +++ b/qa_mdt/audioldm_train/__init__.py @@ -0,0 +1 @@ +from . import utilities diff --git a/qa_mdt/audioldm_train/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83a1376ed1c87467ff668a47d1a04fa480bc504c Binary files /dev/null and b/qa_mdt/audioldm_train/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/__pycache__/conditional_models.cpython-310.pyc b/qa_mdt/audioldm_train/__pycache__/conditional_models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b49a44e1f5ed5bc0a1913d67cd1503d552af772b Binary files /dev/null and b/qa_mdt/audioldm_train/__pycache__/conditional_models.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/__pycache__/dataset_plugin.cpython-310.pyc b/qa_mdt/audioldm_train/__pycache__/dataset_plugin.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0392cbcbeb0d0cc18c4a50806f0b9aea86f0a645 Binary files /dev/null and b/qa_mdt/audioldm_train/__pycache__/dataset_plugin.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/conditional_models.py b/qa_mdt/audioldm_train/conditional_models.py new file mode 100644 index 0000000000000000000000000000000000000000..1be342856efc63fe95183e720b8458ee04ca5c77 --- /dev/null +++ b/qa_mdt/audioldm_train/conditional_models.py @@ -0,0 +1,1354 @@ +import sys + +sys.path.append("src") +import torch +import logging +import torch.nn as nn +from qa_mdt.audioldm_train.modules.clap.open_clip import create_model +from qa_mdt.audioldm_train.modules.clap.training.data import get_audio_features + +import torchaudio +from transformers import ( + RobertaTokenizer, + AutoTokenizer, + T5EncoderModel, + MT5EncoderModel, +) +import torch.nn.functional as F +from qa_mdt.audioldm_train.modules.audiomae.AudioMAE import Vanilla_AudioMAE +from qa_mdt.audioldm_train.modules.phoneme_encoder.encoder import TextEncoder + +from transformers import SpeechT5Processor, AutoTokenizer, GPT2Model, GPT2Tokenizer +from transformers.models.speecht5.modeling_speecht5 import SpeechT5EncoderWithTextPrenet + +from qa_mdt.audioldm_train.modules.audiomae.sequence_gen.model import CLAP2AudioMAE +from qa_mdt.audioldm_train.modules.audiomae.sequence_gen.sequence_input import ( + Sequence2AudioMAE, +) +import numpy as np +from qa_mdt.audioldm_train.modules.audiomae.sequence_gen.model import Prenet +import json +with open('./qa_mdt/offset_pretrained_checkpoints.json', 'r') as config_file: + config_data = json.load(config_file) + +""" +The model forward function can return three types of data: +1. tensor: used directly as conditioning signal +2. dict: where there is a main key as condition, there are also other key that you can use to pass loss function and itermediate result. etc. +3. list: the length is 2, in which the first element is tensor, the second element is attntion mask. + +The output shape for the cross attention condition should be: +x,x_mask = [bs, seq_len, emb_dim], [bs, seq_len] + +All the returned data, in which will be used as diffusion input, will need to be in float type +""" + + +class GPT2WordEmbedding(nn.Module): + def __init__(self): + super().__init__() + # self.tokenizer = AutoTokenizer.from_pretrained("gpt2") + self.tokenizer = GPT2Tokenizer.from_pretrained("gpt2") + self.tokenizer.pad_token = self.tokenizer.eos_token + self.model = GPT2Model.from_pretrained("gpt2").wte + self.device = None + + def get_unconditional_condition(self, batchsize): + unconditional_condition = ["random"] * batchsize + return self(unconditional_condition) + + def forward(self, text): + assert isinstance(text, list) + if self.device is None: + self.device = next(self.model.parameters()).device + + tokenization_result = self.tokenizer(text, return_tensors="pt", padding=True) + input_ids, attn_mask = tokenization_result["input_ids"].to( + self.device + ), tokenization_result["attention_mask"].to(self.device) + + input_embed = self.model(input_ids.long()) + + return [input_embed, attn_mask] + + +class ConcateBandWidthCond(nn.Module): + def __init__(self, latent_t_size, latent_f_size): + super().__init__() + self.placeholder = nn.Linear(1, 1) + self.latent_t_size = latent_t_size + self.latent_f_size = latent_f_size + self.device = None + + def get_unconditional_condition(self, batchsize): + return torch.zeros((batchsize, self.latent_t_size, self.latent_f_size)).to( + self.device + ) + + def forward(self, mel_spec_bandwidth_cond_extra_channel): + if self.device is None: + self.device = mel_spec_bandwidth_cond_extra_channel.device + + return mel_spec_bandwidth_cond_extra_channel + + +class BandwidthEncoder(nn.Module): + def __init__(self): + super().__init__() + self.emb = nn.Embedding(1000, 128) + nn.init.normal_(self.emb.weight, 0.0, 128**-0.5) + self.linear_bandwidth = nn.Linear(128, 128) + self.unconditional_condition = torch.zeros((1, 256)) + self.device = None + + def get_unconditional_condition(self, batchsize): + return self.unconditional_condition.expand(batchsize, 256) + + def forward(self, bandwidth): + + if self.device is None: + self.device = next(self.linear_bandwidth.parameters()).device + self.unconditional_condition = self.unconditional_condition.to(self.device) + + # freq_energy_percentile + lower_cutoff, higher_cutoff = bandwidth[..., 0], bandwidth[..., 1] + # lower_cutoff, higher_cutoff = lower_cutoff*0+5, higher_cutoff*0+300 + + lower_cutoff_emb = self.linear_bandwidth(self.emb(lower_cutoff.long())) + higher_cutoff_emb = self.linear_bandwidth(self.emb(higher_cutoff.long())) + cutoff_emb = torch.cat([lower_cutoff_emb, higher_cutoff_emb], dim=-1) + # [bs, 256] + return cutoff_emb + + +class SpeechT5TextEncoder(nn.Module): + def __init__(self): + super().__init__() + self.processor = SpeechT5Processor.from_pretrained("microsoft/speecht5_tts") + self.model = SpeechT5EncoderWithTextPrenet.from_pretrained( + "microsoft/speecht5_tts" + ) + for p in self.model.parameters(): + p.requires_grad = False + self.model.eval() + + # Required + def get_unconditional_condition(self, batchsize): + device = self.model.device + hidden_state = torch.zeros((batchsize, 1, 768)).to(device) + attention_mask = torch.ones((batchsize, 1)).to(device) + return [hidden_state.float(), attention_mask.float()] + + def forward(self, text): + with torch.no_grad(): + device = self.model.device + inputs = self.processor(text=text, return_tensors="pt", padding=True) + input_ids, attention_mask = inputs["input_ids"].to(device), inputs[ + "attention_mask" + ].to(device) + emb = self.model(input_ids, attention_mask) + emb = emb.last_hidden_state.detach() + return [emb.float(), attention_mask.float()] + + +class PhonemeEncoder(nn.Module): + def __init__(self, vocabs_size=41, pad_length=250, pad_token_id=None): + super().__init__() + """ + encoder = PhonemeEncoder(40) + data = torch.randint(0, 39, (2, 250)) + output = encoder(data) + import ipdb;ipdb.set_trace() + """ + assert pad_token_id is not None + + self.device = None + self.PAD_LENGTH = int(pad_length) + self.pad_token_id = pad_token_id + self.pad_token_sequence = torch.tensor([self.pad_token_id] * self.PAD_LENGTH) + + self.text_encoder = TextEncoder( + n_vocab=vocabs_size, + out_channels=192, + hidden_channels=192, + filter_channels=768, + n_heads=2, + n_layers=6, + kernel_size=3, + p_dropout=0.1, + ) + + self.learnable_positional_embedding = torch.nn.Parameter( + torch.zeros((1, 192, self.PAD_LENGTH)) + ) # [batchsize, seqlen, padlen] + self.learnable_positional_embedding.requires_grad = True + + # Required + def get_unconditional_condition(self, batchsize): + unconditional_tokens = self.pad_token_sequence.expand( + batchsize, self.PAD_LENGTH + ) + return self(unconditional_tokens) # Need to return float type + + # def get_unconditional_condition(self, batchsize): + + # hidden_state = torch.zeros((batchsize, self.PAD_LENGTH, 192)).to(self.device) + # attention_mask = torch.ones((batchsize, self.PAD_LENGTH)).to(self.device) + # return [hidden_state, attention_mask] # Need to return float type + + def _get_src_mask(self, phoneme): + src_mask = phoneme != self.pad_token_id + return src_mask + + def _get_src_length(self, phoneme): + src_mask = self._get_src_mask(phoneme) + length = torch.sum(src_mask, dim=-1) + return length + + # def make_empty_condition_unconditional(self, src_length, text_emb, attention_mask): + # # src_length: [bs] + # # text_emb: [bs, 192, pad_length] + # # attention_mask: [bs, pad_length] + # mask = src_length[..., None, None] > 1 + # text_emb = text_emb * mask + + # attention_mask[src_length < 1] = attention_mask[src_length < 1] * 0.0 + 1.0 + # return text_emb, attention_mask + + def forward(self, phoneme_idx): + if self.device is None: + self.device = self.learnable_positional_embedding.device + self.pad_token_sequence = self.pad_token_sequence.to(self.device) + + src_length = self._get_src_length(phoneme_idx) + text_emb, m, logs, text_emb_mask = self.text_encoder(phoneme_idx, src_length) + text_emb = text_emb + self.learnable_positional_embedding + + # text_emb, text_emb_mask = self.make_empty_condition_unconditional(src_length, text_emb, text_emb_mask) + + return [ + text_emb.permute(0, 2, 1), + text_emb_mask.squeeze(1), + ] # [2, 250, 192], [2, 250] + + +class FlanT5HiddenState(nn.Module): + """ + llama = FlanT5HiddenState() + data = ["","this is not an empty sentence"] + encoder_hidden_states = llama(data) + import ipdb;ipdb.set_trace() + """ + + def __init__( + self, text_encoder_name=config_data['flan_t5'], freeze_text_encoder=True + ): + super().__init__() + self.freeze_text_encoder = freeze_text_encoder + ## MODIFIED + self.tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large") + self.model = T5EncoderModel.from_pretrained("google/flan-t5-large") + if freeze_text_encoder: + self.model.eval() + for p in self.model.parameters(): + p.requires_grad = False + else: + print("=> The text encoder is learnable") + + self.empty_hidden_state_cfg = None + self.device = None + + # Required + def get_unconditional_condition(self, batchsize): + param = next(self.model.parameters()) + if self.freeze_text_encoder: + assert param.requires_grad == False + + # device = param.device + if self.empty_hidden_state_cfg is None: + self.empty_hidden_state_cfg, _ = self([""]) + + hidden_state = torch.cat([self.empty_hidden_state_cfg] * batchsize).float() + attention_mask = ( + torch.ones((batchsize, hidden_state.size(1))) + .to(hidden_state.device) + .float() + ) + return [hidden_state, attention_mask] # Need to return float type + + def forward(self, batch): + param = next(self.model.parameters()) + if self.freeze_text_encoder: + assert param.requires_grad == False + + if self.device is None: + self.device = param.device + + # print("Manually change text") + # for i in range(len(batch)): + # batch[i] = "dog barking" + try: + return self.encode_text(batch) + except Exception as e: + print(e, batch) + logging.exception("An error occurred: %s", str(e)) + + def encode_text(self, prompt): + device = self.model.device + batch = self.tokenizer( + prompt, + max_length=128, # self.tokenizer.model_max_length + padding=True, + truncation=True, + return_tensors="pt", + ) + input_ids, attention_mask = batch.input_ids.to(device), batch.attention_mask.to( + device + ) + # Get text encoding + if self.freeze_text_encoder: + with torch.no_grad(): + encoder_hidden_states = self.model( + input_ids=input_ids, attention_mask=attention_mask + )[0] + else: + encoder_hidden_states = self.model( + input_ids=input_ids, attention_mask=attention_mask + )[0] + return [ + encoder_hidden_states.detach(), + attention_mask.float(), + ] # Attention mask == 1 means usable token + + +class FlanT5HiddenStatePaddedSameLength(nn.Module): + """ + llama = FlanT5HiddenState() + data = ["","this is not an empty sentence"] + encoder_hidden_states = llama(data) + import ipdb;ipdb.set_trace() + """ + + def __init__( + self, text_encoder_name="google/flan-t5-large", freeze_text_encoder=True + ): + super().__init__() + self.freeze_text_encoder = freeze_text_encoder + self.tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large") + self.model = T5EncoderModel.from_pretrained("google/flan-t5-large") + if freeze_text_encoder: + self.model.eval() + for p in self.model.parameters(): + p.requires_grad = False + else: + print("=> The text encoder is learnable") + + self.empty_hidden_state_cfg = None + self.device = None + + # Required + def get_unconditional_condition(self, batchsize): + param = next(self.model.parameters()) + if self.freeze_text_encoder: + assert param.requires_grad == False + + # device = param.device + if self.empty_hidden_state_cfg is None: + self.empty_hidden_state_cfg, _ = self([""]) + + hidden_state = torch.cat([self.empty_hidden_state_cfg] * batchsize).float() + attention_mask = ( + torch.ones((batchsize, hidden_state.size(1))) + .to(hidden_state.device) + .float() + ) + return [hidden_state, attention_mask] # Need to return float type + + def forward(self, batch): + param = next(self.model.parameters()) + if self.freeze_text_encoder: + assert param.requires_grad == False + + if self.device is None: + self.device = param.device + + # print("Manually change text") + # for i in range(len(batch)): + # batch[i] = "dog barking" + try: + text_embed = self.encode_text(batch) + return text_embed + except Exception as e: + print(e, batch) + logging.exception("An error occurred: %s", str(e)) + + def encode_text(self, prompt): + device = self.model.device + batch = self.tokenizer( + prompt, + max_length=128, + padding="max_length", + truncation=True, + return_tensors="pt", + ) + input_ids, attention_mask = batch.input_ids.to(device), batch.attention_mask.to( + device + ) + + # Get text encoding + if self.freeze_text_encoder: + with torch.no_grad(): + encoder_hidden_states = self.model( + input_ids=input_ids, attention_mask=attention_mask + )[0] + else: + encoder_hidden_states = self.model( + input_ids=input_ids, attention_mask=attention_mask + )[0] + return [ + encoder_hidden_states.detach(), + attention_mask.float(), + ] # Attention mask == 1 means usable token + + +class CLAPGenAudioMAECond(CLAP2AudioMAE): + def __init__( + self, + cond_stage_config, + learnable=True, + pretrained_path=None, + use_gt_mae_output=None, # False: does not use AudioMAE GT, True: Use AudioMAE GT + use_gt_mae_prob=None, + ): # The prob of using AudioMAE GT + super().__init__(base_learning_rate=1e-5, cond_stage_config=cond_stage_config) + assert use_gt_mae_output is not None and use_gt_mae_prob is not None + + if pretrained_path is not None: + print("Reload CLAPGenAudioMAECond from %s" % pretrained_path) + state_dict = torch.load(pretrained_path)["state_dict"] + self.load_state_dict(state_dict) + + self.use_gt_mae_output = use_gt_mae_output + self.use_gt_mae_prob = use_gt_mae_prob + self.learnable = learnable + + if not learnable: + # Only optimize the GPT2 model + for p in self.model.parameters(): + p.requires_grad = False + self.eval() + + # Required + def get_unconditional_condition(self, batchsize): + return_dict = self.cfg_uncond(batchsize) + return return_dict + + def forward(self, batch): + # The conditional module can return both tensor or dictionaries + # The returned tensor will be corresponding to the cond_stage_key + # The returned dict will have keys that correspond to the cond_stage_key + ret_dict = {} + if self.use_gt_mae_output and torch.rand(1).item() < self.use_gt_mae_prob: + cond_dict = self.get_input(batch) + # Used as condition + ret_dict["crossattn_clap_to_audiomae_feature"] = [ + cond_dict["crossattn_audiomae_pooled"][0], + torch.ones_like(cond_dict["crossattn_audiomae_pooled"][1]).float(), + ] # Input sequence and mask + else: + # Used as condition + input_embeds, cond_dict = self.generate(batch) + input_embeds_mask = ( + torch.ones((input_embeds.size(0), input_embeds.size(1))) + .to(input_embeds.device) + .float() + ) + ret_dict["crossattn_clap_to_audiomae_feature"] = [ + input_embeds, + input_embeds_mask, + ] # Input sequence and mask + + # If the following two keys are not in cond_stage_key, then they will not be used as condition + ret_dict["film_clap_cond1"] = cond_dict[ + "film_clap_cond1" + ] # the clap target latent + ret_dict["crossattn_audiomae_pooled"] = cond_dict[ + "crossattn_audiomae_pooled" + ] # audiomae target latent + + if self.learnable and self.training: + loss = self.training_step(batch, cond_dict=cond_dict) + ret_dict["noncond_loss_clap2audiomae"] = loss + + return ret_dict + + +class SequenceGenAudioMAECond(Sequence2AudioMAE): + def __init__( + self, + cond_stage_config, + base_learning_rate, + sequence_gen_length, + sequence_input_key, + sequence_input_embed_dim, + batchsize, + always_output_audiomae_gt=False, + pretrained_path=None, + force_reload_pretrain_avoid_overwrite=False, + learnable=True, + use_warmup=True, + use_gt_mae_output=None, # False: does not use AudioMAE GT, True: Use AudioMAE GT + use_gt_mae_prob=None, + ): # The prob of using AudioMAE GT + if use_warmup: + print( + "Warning: You didn't initialize sequence prediction module with trainer. Set warmup to False. You can still use the warmup scheme from the latent diffusion model." + ) + use_warmup = False + + super().__init__( + base_learning_rate=base_learning_rate, + cond_stage_config=cond_stage_config, + sequence_gen_length=sequence_gen_length, + sequence_input_key=sequence_input_key, + use_warmup=use_warmup, + sequence_input_embed_dim=sequence_input_embed_dim, + batchsize=batchsize, + ) + + assert use_gt_mae_output is not None and use_gt_mae_prob is not None + self.always_output_audiomae_gt = always_output_audiomae_gt + self.force_reload_pretrain_avoid_overwrite = ( + force_reload_pretrain_avoid_overwrite + ) + self.pretrained_path = pretrained_path + if self.force_reload_pretrain_avoid_overwrite: + self.is_reload = False + else: + self.is_reload = True + + self.load_pretrain_model() + + self.use_gt_mae_output = use_gt_mae_output + self.use_gt_mae_prob = use_gt_mae_prob + self.learnable = learnable + + if not learnable: + # Only optimize the GPT2 model + for p in self.model.parameters(): + p.requires_grad = False + self.eval() + + def load_pretrain_model(self): + if self.pretrained_path is not None: + print("Reload SequenceGenAudioMAECond from %s" % self.pretrained_path) + state_dict = torch.load(self.pretrained_path)["state_dict"] + self.load_state_dict(state_dict) + + # Required + def get_unconditional_condition(self, batchsize): + return_dict = self.cfg_uncond(batchsize) + return_dict["crossattn_audiomae_generated"] = [ + return_dict["crossattn_audiomae_pooled"][0], + torch.ones_like(return_dict["crossattn_audiomae_pooled"][1]).float(), + ] + return return_dict + + def forward(self, batch): + # The conditional module can return both tensor or dictionaries + # The returned tensor will be corresponding to the cond_stage_key + # The returned dict will have keys that correspond to the cond_stage_key + ret_dict = {} + + if self.force_reload_pretrain_avoid_overwrite and not self.is_reload: + self.load_pretrain_model() + self.is_reload = True + + self.check_module_param_update() + + if self.always_output_audiomae_gt or ( + self.use_gt_mae_output and torch.rand(1).item() < self.use_gt_mae_prob + ): + cond_dict = self.get_input(batch) + ret_dict["crossattn_audiomae_generated"] = [ + cond_dict["crossattn_audiomae_pooled"][0], + torch.ones_like(cond_dict["crossattn_audiomae_pooled"][1]).float(), + ] # Input sequence and mask + # _, output = self.training_step(batch, cond_dict=cond_dict, return_output=True) + # ret_dict["crossattn_audiomae_generated"] = [output, torch.ones_like(cond_dict["crossattn_audiomae_pooled"][1]).float()] # Input sequence and mask + else: + if not self.training: + print("--------------> Generate !!!!!!!!!!!!") + input_embeds, cond_dict = self.generate(batch) + # print("Generate Partial!!!!"); input_embeds, cond_dict = self.generate_partial(batch) + input_embeds_mask = ( + torch.ones((input_embeds.size(0), input_embeds.size(1))) + .to(input_embeds.device) + .float() + ) + ret_dict["crossattn_audiomae_generated"] = [ + input_embeds, + input_embeds_mask, + ] # Input sequence and mask + + # If the following two keys are not in cond_stage_key, then they will not be used as condition + for key in cond_dict.keys(): + ret_dict[key] = cond_dict[key] + + if self.learnable and self.training: + loss = self.training_step(batch, cond_dict=cond_dict) + ret_dict["noncond_loss_clap2audiomae"] = loss + + return ret_dict + + +class SequenceGenAudioMAECond_AudioMAE_PostNet(Sequence2AudioMAE): + def __init__( + self, + cond_stage_config, + base_learning_rate, + sequence_gen_length, + sequence_input_key, + sequence_input_embed_dim, + batchsize, + always_output_audiomae_gt=False, + pretrained_path=None, + use_ar_gen_loss=False, + force_reload_pretrain_avoid_overwrite=False, + learnable=True, + use_warmup=True, + use_gt_mae_output=None, # False: does not use AudioMAE GT, True: Use AudioMAE GT + use_gt_mae_prob=None, + ): # The prob of using AudioMAE GT + if use_warmup: + print( + "Warning: You didn't initialize sequence prediction module with trainer. Set warmup to False. You can still use the warmup scheme from the latent diffusion model." + ) + use_warmup = False + + super().__init__( + base_learning_rate=base_learning_rate, + cond_stage_config=cond_stage_config, + sequence_gen_length=sequence_gen_length, + sequence_input_key=sequence_input_key, + use_ar_gen_loss=use_ar_gen_loss, + use_warmup=use_warmup, + sequence_input_embed_dim=sequence_input_embed_dim, + batchsize=batchsize, + ) + + assert use_gt_mae_output is not None and use_gt_mae_prob is not None + self.always_output_audiomae_gt = always_output_audiomae_gt + self.force_reload_pretrain_avoid_overwrite = ( + force_reload_pretrain_avoid_overwrite + ) + self.pretrained_path = pretrained_path + if self.force_reload_pretrain_avoid_overwrite: + self.is_reload = False + else: + self.is_reload = True + + self.load_pretrain_model() + + self.prenet = Prenet(in_dim=768, sizes=[768, 768, 768], dropout_rate=0.5) + + self.use_gt_mae_output = use_gt_mae_output + self.use_gt_mae_prob = use_gt_mae_prob + self.learnable = learnable + + if not learnable: + # Only optimize the GPT2 model + for p in self.model.parameters(): + p.requires_grad = False + self.eval() + + def load_pretrain_model(self): + if self.pretrained_path is not None: + print("Reload SequenceGenAudioMAECond from %s" % self.pretrained_path) + state_dict = torch.load(self.pretrained_path)["state_dict"] + self.load_state_dict(state_dict) + + # Required + def get_unconditional_condition(self, batchsize): + return_dict = self.cfg_uncond(batchsize) + return_dict["crossattn_audiomae_generated"] = [ + return_dict["crossattn_audiomae_pooled"][0], + torch.ones_like(return_dict["crossattn_audiomae_pooled"][1]).float(), + ] + return return_dict + + def forward(self, batch): + # The conditional module can return both tensor or dictionaries + # The returned tensor will be corresponding to the cond_stage_key + # The returned dict will have keys that correspond to the cond_stage_key + ret_dict = {} + + if self.force_reload_pretrain_avoid_overwrite and not self.is_reload: + self.load_pretrain_model() + self.is_reload = True + + self.check_module_param_update() + + if self.always_output_audiomae_gt or ( + self.use_gt_mae_output and torch.rand(1).item() < self.use_gt_mae_prob + ): + cond_dict = self.get_input(batch) + gt_audiomae = self.prenet(cond_dict["crossattn_audiomae_pooled"][0]) + ret_dict["crossattn_audiomae_generated"] = [ + gt_audiomae, + torch.ones_like(cond_dict["crossattn_audiomae_pooled"][1]).float(), + ] # Input sequence and mask + else: + print("--------------> Generate!!!!!!!!!!!!") + input_embeds, cond_dict = self.generate(batch) + # input_embeds, cond_dict = self.generate_partial(batch) + input_embeds = self.prenet(input_embeds) + input_embeds_mask = ( + torch.ones((input_embeds.size(0), input_embeds.size(1))) + .to(input_embeds.device) + .float() + ) + ret_dict["crossattn_audiomae_generated"] = [ + input_embeds, + input_embeds_mask, + ] # Input sequence and mask + + # If the following two keys are not in cond_stage_key, then they will not be used as condition + for key in cond_dict.keys(): + ret_dict[key] = cond_dict[key] + + if self.learnable and self.training: + loss = self.training_step(batch, cond_dict=cond_dict) + ret_dict["noncond_loss_clap2audiomae"] = loss + + return ret_dict + + +class AudioMAEConditionCTPoolRandTFSeparated(nn.Module): + """ + audiomae = AudioMAEConditionCTPool2x2() + data = torch.randn((4, 1024, 128)) + output = audiomae(data) + import ipdb;ipdb.set_trace() + exit(0) + """ + + def __init__( + self, + time_pooling_factors=[1, 2, 4, 8], + freq_pooling_factors=[1, 2, 4, 8], + eval_time_pooling=None, + eval_freq_pooling=None, + mask_ratio=0.0, + regularization=False, + no_audiomae_mask=True, + no_audiomae_average=False, + ): + super().__init__() + self.device = None + self.time_pooling_factors = time_pooling_factors + self.freq_pooling_factors = freq_pooling_factors + self.no_audiomae_mask = no_audiomae_mask + self.no_audiomae_average = no_audiomae_average + + self.eval_freq_pooling = eval_freq_pooling + self.eval_time_pooling = eval_time_pooling + self.mask_ratio = mask_ratio + self.use_reg = regularization + + self.audiomae = Vanilla_AudioMAE() + self.audiomae.eval() + for p in self.audiomae.parameters(): + p.requires_grad = False + + # Required + def get_unconditional_condition(self, batchsize): + param = next(self.audiomae.parameters()) + assert param.requires_grad == False + device = param.device + # time_pool, freq_pool = max(self.time_pooling_factors), max(self.freq_pooling_factors) + time_pool, freq_pool = min(self.eval_time_pooling, 64), min( + self.eval_freq_pooling, 8 + ) + # time_pool = self.time_pooling_factors[np.random.choice(list(range(len(self.time_pooling_factors))))] + # freq_pool = self.freq_pooling_factors[np.random.choice(list(range(len(self.freq_pooling_factors))))] + token_num = int(512 / (time_pool * freq_pool)) + return [ + torch.zeros((batchsize, token_num, 768)).to(device).float(), + torch.ones((batchsize, token_num)).to(device).float(), + ] + + def pool(self, representation, time_pool=None, freq_pool=None): + assert representation.size(-1) == 768 + representation = representation[:, 1:, :].transpose(1, 2) + bs, embedding_dim, token_num = representation.size() + representation = representation.reshape(bs, embedding_dim, 64, 8) + + if self.training: + if time_pool is None and freq_pool is None: + time_pool = min( + 64, + self.time_pooling_factors[ + np.random.choice(list(range(len(self.time_pooling_factors)))) + ], + ) + freq_pool = min( + 8, + self.freq_pooling_factors[ + np.random.choice(list(range(len(self.freq_pooling_factors)))) + ], + ) + # freq_pool = min(8, time_pool) # TODO here I make some modification. + else: + time_pool, freq_pool = min(self.eval_time_pooling, 64), min( + self.eval_freq_pooling, 8 + ) + + self.avgpooling = nn.AvgPool2d( + kernel_size=(time_pool, freq_pool), stride=(time_pool, freq_pool) + ) + self.maxpooling = nn.MaxPool2d( + kernel_size=(time_pool, freq_pool), stride=(time_pool, freq_pool) + ) + + pooled = ( + self.avgpooling(representation) + self.maxpooling(representation) + ) / 2 # [bs, embedding_dim, time_token_num, freq_token_num] + pooled = pooled.flatten(2).transpose(1, 2) + return pooled # [bs, token_num, embedding_dim] + + def regularization(self, x): + assert x.size(-1) == 768 + x = F.normalize(x, p=2, dim=-1) + return x + + # Required + def forward(self, batch, time_pool=None, freq_pool=None): + assert batch.size(-2) == 1024 and batch.size(-1) == 128 + + if self.device is None: + self.device = batch.device + + batch = batch.unsqueeze(1) + with torch.no_grad(): + representation = self.audiomae( + batch, + mask_ratio=self.mask_ratio, + no_mask=self.no_audiomae_mask, + no_average=self.no_audiomae_average, + ) + representation = self.pool(representation, time_pool, freq_pool) + if self.use_reg: + representation = self.regularization(representation) + return [ + representation, + torch.ones((representation.size(0), representation.size(1))) + .to(representation.device) + .float(), + ] + + +class AudioMAEConditionCTPoolRand(nn.Module): + """ + audiomae = AudioMAEConditionCTPool2x2() + data = torch.randn((4, 1024, 128)) + output = audiomae(data) + import ipdb;ipdb.set_trace() + exit(0) + """ + + def __init__( + self, + time_pooling_factors=[1, 2, 4, 8], + freq_pooling_factors=[1, 2, 4, 8], + eval_time_pooling=None, + eval_freq_pooling=None, + mask_ratio=0.0, + regularization=False, + no_audiomae_mask=True, + no_audiomae_average=False, + ): + super().__init__() + self.device = None + self.time_pooling_factors = time_pooling_factors + self.freq_pooling_factors = freq_pooling_factors + self.no_audiomae_mask = no_audiomae_mask + self.no_audiomae_average = no_audiomae_average + + self.eval_freq_pooling = eval_freq_pooling + self.eval_time_pooling = eval_time_pooling + self.mask_ratio = mask_ratio + self.use_reg = regularization + + self.audiomae = Vanilla_AudioMAE() + self.audiomae.eval() + for p in self.audiomae.parameters(): + p.requires_grad = False + + # Required + def get_unconditional_condition(self, batchsize): + param = next(self.audiomae.parameters()) + assert param.requires_grad == False + device = param.device + # time_pool, freq_pool = max(self.time_pooling_factors), max(self.freq_pooling_factors) + time_pool, freq_pool = min(self.eval_time_pooling, 64), min( + self.eval_freq_pooling, 8 + ) + # time_pool = self.time_pooling_factors[np.random.choice(list(range(len(self.time_pooling_factors))))] + # freq_pool = self.freq_pooling_factors[np.random.choice(list(range(len(self.freq_pooling_factors))))] + token_num = int(512 / (time_pool * freq_pool)) + return [ + torch.zeros((batchsize, token_num, 768)).to(device).float(), + torch.ones((batchsize, token_num)).to(device).float(), + ] + + def pool(self, representation, time_pool=None, freq_pool=None): + assert representation.size(-1) == 768 + representation = representation[:, 1:, :].transpose(1, 2) + bs, embedding_dim, token_num = representation.size() + representation = representation.reshape(bs, embedding_dim, 64, 8) + + if self.training: + if time_pool is None and freq_pool is None: + time_pool = min( + 64, + self.time_pooling_factors[ + np.random.choice(list(range(len(self.time_pooling_factors)))) + ], + ) + # freq_pool = self.freq_pooling_factors[np.random.choice(list(range(len(self.freq_pooling_factors))))] + freq_pool = min(8, time_pool) # TODO here I make some modification. + else: + time_pool, freq_pool = min(self.eval_time_pooling, 64), min( + self.eval_freq_pooling, 8 + ) + + self.avgpooling = nn.AvgPool2d( + kernel_size=(time_pool, freq_pool), stride=(time_pool, freq_pool) + ) + self.maxpooling = nn.MaxPool2d( + kernel_size=(time_pool, freq_pool), stride=(time_pool, freq_pool) + ) + + pooled = ( + self.avgpooling(representation) + self.maxpooling(representation) + ) / 2 # [bs, embedding_dim, time_token_num, freq_token_num] + pooled = pooled.flatten(2).transpose(1, 2) + return pooled # [bs, token_num, embedding_dim] + + def regularization(self, x): + assert x.size(-1) == 768 + x = F.normalize(x, p=2, dim=-1) + return x + + # Required + def forward(self, batch, time_pool=None, freq_pool=None): + assert batch.size(-2) == 1024 and batch.size(-1) == 128 + + if self.device is None: + self.device = batch.device + + batch = batch.unsqueeze(1) + with torch.no_grad(): + representation = self.audiomae( + batch, + mask_ratio=self.mask_ratio, + no_mask=self.no_audiomae_mask, + no_average=self.no_audiomae_average, + ) + representation = self.pool(representation, time_pool, freq_pool) + if self.use_reg: + representation = self.regularization(representation) + return [ + representation, + torch.ones((representation.size(0), representation.size(1))) + .to(representation.device) + .float(), + ] + + +class ConditionalToken(nn.Module): + def __init__(self, embedding_dim): + super(ConditionalToken, self).__init__() + self.embedding_dim = embedding_dim + # Define the conditional tokens as fixed values + self.pooling_factor_tokens = { + 1: torch.Tensor([1.0, 0.0] * (embedding_dim // 2)), + 2: torch.Tensor([0.0, 1.0] * (embedding_dim // 2)), + 4: torch.Tensor([1.0, 1.0] * (embedding_dim // 2)), + 8: torch.Tensor([-1.0, 0.0] * (embedding_dim // 2)), + 16: torch.Tensor([0.0, -1.0] * (embedding_dim // 2)), + 32: torch.Tensor([-1.0, -1.0] * (embedding_dim // 2)), + 64: torch.Tensor([0.0, 0.0] * (embedding_dim // 2)), + } + for p in self.parameters(): + p.requires_grad = False + + def forward(self, condition, batchsize): + """ + Returns the conditional token for the given condition. + """ + if condition not in self.pooling_factor_tokens.keys(): + raise ValueError(f"Unsupported condition: {condition}") + batched_token = self.pooling_factor_tokens[condition][None, None].expand( + batchsize, 1, self.embedding_dim + ) + return batched_token + + +class AudioMAEConditionCTPoolRandV2(nn.Module): + """ + audiomae = AudioMAEConditionCTPool2x2() + data = torch.randn((4, 1024, 128)) + output = audiomae(data) + import ipdb;ipdb.set_trace() + exit(0) + """ + + def __init__( + self, + time_pooling_factors=[1, 2, 4, 8], + freq_pooling_factors=[1, 2, 4, 8], + eval_time_pooling=None, + eval_freq_pooling=None, + mask_ratio=0.0, + regularization=False, + no_audiomae_mask=True, + no_audiomae_average=False, + ): + super().__init__() + self.device = None + self.time_pooling_factors = time_pooling_factors + self.freq_pooling_factors = freq_pooling_factors + self.no_audiomae_mask = no_audiomae_mask + self.no_audiomae_average = no_audiomae_average + + self.eval_freq_pooling = eval_freq_pooling + self.eval_time_pooling = eval_time_pooling + self.mask_ratio = mask_ratio + self.use_reg = regularization + + self.pooling_tokens = ConditionalToken(768) + + self.audiomae = Vanilla_AudioMAE() + self.audiomae.eval() + + for p in self.audiomae.parameters(): + p.requires_grad = False + + # Required + def get_unconditional_condition(self, batchsize): + param = next(self.audiomae.parameters()) + assert param.requires_grad == False + device = param.device + # time_pool, freq_pool = max(self.time_pooling_factors), max(self.freq_pooling_factors) + time_pool, freq_pool = min(self.eval_time_pooling, 64), min( + self.eval_freq_pooling, 8 + ) + # time_pool = self.time_pooling_factors[np.random.choice(list(range(len(self.time_pooling_factors))))] + # freq_pool = self.freq_pooling_factors[np.random.choice(list(range(len(self.freq_pooling_factors))))] + pool_condition_token = self.pooling_tokens(time_pool, batchsize).to(device) + token_num = int(512 / (time_pool * freq_pool)) + + rep = torch.zeros((batchsize, token_num, 768)).to(device).float() + rep = torch.cat([rep, pool_condition_token], dim=1) + + return [rep, torch.ones((batchsize, token_num + 1)).to(device).float()] + + def pool(self, representation, time_pool=None, freq_pool=None): + assert representation.size(-1) == 768 + representation = representation[:, 1:, :].transpose(1, 2) + bs, embedding_dim, token_num = representation.size() + representation = representation.reshape(bs, embedding_dim, 64, 8) + + if self.training: + if time_pool is None and freq_pool is None: + time_pool = min( + 64, + self.time_pooling_factors[ + np.random.choice(list(range(len(self.time_pooling_factors)))) + ], + ) + # freq_pool = self.freq_pooling_factors[np.random.choice(list(range(len(self.freq_pooling_factors))))] + freq_pool = min(8, time_pool) # TODO here I make some modification. + else: + time_pool, freq_pool = min(self.eval_time_pooling, 64), min( + self.eval_freq_pooling, 8 + ) + + self.avgpooling = nn.AvgPool2d( + kernel_size=(time_pool, freq_pool), stride=(time_pool, freq_pool) + ) + self.maxpooling = nn.MaxPool2d( + kernel_size=(time_pool, freq_pool), stride=(time_pool, freq_pool) + ) + pooled = ( + self.avgpooling(representation) + self.maxpooling(representation) + ) / 2 # [bs, embedding_dim, time_token_num, freq_token_num] + pooled = pooled.flatten(2).transpose(1, 2) + return pooled, time_pool, freq_pool # [bs, token_num, embedding_dim] + + def regularization(self, x): + assert x.size(-1) == 768 + x = F.normalize(x, p=2, dim=-1) + return x + + # Required + def forward(self, batch): + assert batch.size(-2) == 1024 and batch.size(-1) == 128 + + if self.device is None: + self.device = batch.device + + batch = batch.unsqueeze(1) + + with torch.no_grad(): + representation = self.audiomae( + batch, + mask_ratio=self.mask_ratio, + no_mask=self.no_audiomae_mask, + no_average=self.no_audiomae_average, + ) + representation, time_pool, freq_pool = self.pool(representation) + if self.use_reg: + representation = self.regularization(representation) + pool_condition_token = self.pooling_tokens( + time_pool, representation.size(0) + ).to(representation.device) + representation = torch.cat([representation, pool_condition_token], dim=1) + + return [ + representation, + torch.ones((representation.size(0), representation.size(1))) + .to(representation.device) + .float(), + ] + + +class BeatDownbeatConditionConcat(nn.Module): + def __init__(self, latent_t_size, latent_f_size): + super().__init__() + self.latent_t_size = latent_t_size + self.latent_f_size = latent_f_size + self.device = None + + # Required + def get_unconditional_condition(self, batchsize): + return torch.zeros((batchsize, self.latent_t_size, self.latent_f_size)).to( + self.device + ) + + # Required + def forward(self, batch): + if self.device is None: + self.device = batch.device + return batch + + +class CLAPAudioEmbeddingClassifierFreev2(nn.Module): + def __init__( + self, + pretrained_path, + sampling_rate=16000, + embed_mode="audio", + amodel="HTSAT-base", + unconditional_prob=0.1, + random_mute=False, + max_random_mute_portion=0.5, + training_mode=True, + ): + super().__init__() + self.device = "cpu" + self.precision = "fp32" + self.amodel = amodel # or 'PANN-14' + self.tmodel = "roberta" # the best text encoder in our training + self.enable_fusion = False # False if you do not want to use the fusion model + self.fusion_type = "aff_2d" + self.pretrained = pretrained_path + self.embed_mode = embed_mode + self.embed_mode_orig = embed_mode + self.sampling_rate = sampling_rate + self.unconditional_prob = unconditional_prob + self.random_mute = random_mute + self.tokenize = RobertaTokenizer.from_pretrained(config_data["roberta-base"]) + self.max_random_mute_portion = max_random_mute_portion + self.training_mode = training_mode + self.model, self.model_cfg = create_model( + self.amodel, + self.tmodel, + self.pretrained, + precision=self.precision, + device=self.device, + enable_fusion=self.enable_fusion, + fusion_type=self.fusion_type, + ) + audio_cfg = self.model_cfg["audio_cfg"] + self.mel_transform = torchaudio.transforms.MelSpectrogram( + sample_rate=audio_cfg["sample_rate"], + n_fft=audio_cfg["window_size"], + win_length=audio_cfg["window_size"], + hop_length=audio_cfg["hop_size"], + center=True, + pad_mode="reflect", + power=2.0, + norm=None, + onesided=True, + n_mels=64, + f_min=audio_cfg["fmin"], + f_max=audio_cfg["fmax"], + ) + for p in self.model.parameters(): + p.requires_grad = False + self.unconditional_token = None + self.model.eval() + + def get_unconditional_condition(self, batchsize): + self.unconditional_token = self.model.get_text_embedding( + self.tokenizer(["", ""]) + )[0:1] + return torch.cat([self.unconditional_token.unsqueeze(0)] * batchsize, dim=0) + + def batch_to_list(self, batch): + ret = [] + for i in range(batch.size(0)): + ret.append(batch[i]) + return ret + + def make_decision(self, probability): + if float(torch.rand(1)) < probability: + return True + else: + return False + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def _random_mute(self, waveform): + # waveform: [bs, t-steps] + t_steps = waveform.size(-1) + for i in range(waveform.size(0)): + mute_size = int( + self.random_uniform(0, end=int(t_steps * self.max_random_mute_portion)) + ) + mute_start = int(self.random_uniform(0, t_steps - mute_size)) + waveform[i, mute_start : mute_start + mute_size] = 0 + return waveform + + def cos_similarity(self, waveform, text): + # waveform: [bs, t_steps] + original_embed_mode = self.embed_mode + with torch.no_grad(): + self.embed_mode = "audio" + audio_emb = self(waveform.cuda()) + self.embed_mode = "text" + text_emb = self(text) + similarity = F.cosine_similarity(audio_emb, text_emb, dim=2) + self.embed_mode = original_embed_mode + return similarity.squeeze() + + def build_unconditional_emb(self): + self.unconditional_token = self.model.get_text_embedding( + self.tokenizer(["", ""]) + )[0:1] + + def forward(self, batch): + # If you want this conditioner to be unconditional, set self.unconditional_prob = 1.0 + # If you want this conditioner to be fully conditional, set self.unconditional_prob = 0.0 + if self.model.training == True and not self.training_mode: + print( + "The pretrained CLAP model should always be in eval mode. Reloading model just in case you change the parameters." + ) + self.model, self.model_cfg = create_model( + self.amodel, + self.tmodel, + self.pretrained, + precision=self.precision, + device="cuda", + enable_fusion=self.enable_fusion, + fusion_type=self.fusion_type, + ) + for p in self.model.parameters(): + p.requires_grad = False + self.model.eval() + + if self.unconditional_token is None: + self.build_unconditional_emb() + + # if(self.training_mode): + # assert self.model.training == True + # else: + # assert self.model.training == False + + # the 'fusion' truncate mode can be changed to 'rand_trunc' if run in unfusion mode + if self.embed_mode == "audio": + if not self.training: + print("INFO: clap model calculate the audio embedding as condition") + with torch.no_grad(): + # assert ( + # self.sampling_rate == 16000 + # ), "We only support 16000 sampling rate" + + # if self.random_mute: + # batch = self._random_mute(batch) + # batch: [bs, 1, t-samples] + if self.sampling_rate != 48000: + batch = torchaudio.functional.resample( + batch, orig_freq=self.sampling_rate, new_freq=48000 + ) + + audio_data = batch.squeeze(1) + mel = self.mel_transform(audio_data) + audio_dict = get_audio_features( + audio_data, + mel, + 480000, + data_truncating="fusion", + data_filling="repeatpad", + audio_cfg=self.model_cfg["audio_cfg"], + ) + # [bs, 512] + embed = self.model.get_audio_embedding(audio_dict) + elif self.embed_mode == "text": + with torch.no_grad(): + # the 'fusion' truncate mode can be changed to 'rand_trunc' if run in unfusion mode + text_data = self.tokenizer(batch) + + if isinstance(batch, str) or ( + isinstance(batch, list) and len(batch) == 1 + ): + for key in text_data.keys(): + text_data[key] = text_data[key].unsqueeze(0) + + embed = self.model.get_text_embedding(text_data) + + embed = embed.unsqueeze(1) + for i in range(embed.size(0)): + if self.make_decision(self.unconditional_prob): + embed[i] = self.unconditional_token + # embed = torch.randn((batch.size(0), 1, 512)).type_as(batch) + return embed.detach() + + def tokenizer(self, text): + result = self.tokenize( + text, + padding="max_length", + truncation=True, + max_length=512, + return_tensors="pt", + ) + return {k: v.squeeze(0) for k, v in result.items()} + + +if __name__ == "__main__": + model = CLAPAudioEmbeddingClassifierFreev2( + pretrained_path="/mnt/bn/lqhaoheliu/exps/checkpoints/audioldm/ckpt/CLAP.pt", + embed_mode="text", + amodel="HTSAT-tiny", + ) + # data = torch.randn((6, 1, int(16000*10.24))) + data = ["text", "text"] + res = model(data) + import ipdb + + ipdb.set_trace() diff --git a/qa_mdt/audioldm_train/config/mos_as_token/qa_mdt.yaml b/qa_mdt/audioldm_train/config/mos_as_token/qa_mdt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e171f8b32bd15d825812c8884612dccaeb67dc27 --- /dev/null +++ b/qa_mdt/audioldm_train/config/mos_as_token/qa_mdt.yaml @@ -0,0 +1,169 @@ +log_directory: "./log/latent_diffusion" +project: "audioldm" +precision: "high" + +# TODO: change this with your project path +base_root: "./qa_mdt" + +# TODO: change this with your pretrained path +# TODO: pretrained path is also needed in "base_root/offset_pretrained_checkpoints.json" +pretrained: + clap_music: "./qa_mdt/checkpoints/clap_music" + flan_t5: "./qa_mdt/checkpoints/flant5" + hifi-gan: "./qa_mdt/checkpoints/hifi-gan/checkpoints" + roberta-base: "./qa_mdt/checkpoints/robertabase" + +# TODO: lmdb dataset that stores pMOS of the training dataset +# while in inference, we don't need it !!! +# while in inference, we don't need it !!! +# while in inference, we don't need it !!! +mos_path: "" + +train_path: + train_lmdb_path: [] # path list of training lmdb folders + +val_path: + val_lmdb_path: [] # path list of training lmdb folders + val_key_path: [] # path list of training lmdb key files + +variables: + sampling_rate: &sampling_rate 16000 + mel_bins: &mel_bins 64 + latent_embed_dim: &latent_embed_dim 8 + latent_t_size: &latent_t_size 256 # TODO might need to change + latent_f_size: &latent_f_size 16 # TODO might need to change + in_channels: &unet_in_channels 8 # TODO might need to change + optimize_ddpm_parameter: &optimize_ddpm_parameter true + optimize_gpt: &optimize_gpt true + warmup_steps: &warmup_steps 2000 + +# we rewrite the dataset so it may not be needed +data: + train: ["audiocaps"] + val: "audiocaps" + test: "audiocaps" + class_label_indices: "audioset_eval_subset" + dataloader_add_ons: ["waveform_rs_48k"] + +step: + validation_every_n_epochs: 10000 + save_checkpoint_every_n_steps: 1000 + # limit_val_batches: 2 + max_steps: 8000000 + save_top_k: 1000 + +preprocessing: + audio: + sampling_rate: *sampling_rate + max_wav_value: 32768.0 + duration: 10.24 + stft: + filter_length: 1024 + hop_length: 160 + win_length: 1024 + mel: + n_mel_channels: *mel_bins + mel_fmin: 0 + mel_fmax: 8000 + +augmentation: + mixup: 0.0 + +model: + target: qa_mdt.audioldm_train.modules.latent_diffusion.ddpm.LatentDiffusion + params: + # Autoencoder + first_stage_config: + base_learning_rate: 8.0e-06 + target: qa_mdt.audioldm_train.modules.latent_encoder.autoencoder.AutoencoderKL + params: + # TODO: change it with your VAE checkpoint + reload_from_ckpt: "./qa_mdt/checkpoints/hifi-gan/checkpoints/vae_mel_16k_64bins.ckpt" + sampling_rate: *sampling_rate + batchsize: 1 + monitor: val/rec_loss + image_key: fbank + subband: 1 + embed_dim: *latent_embed_dim + time_shuffle: 1 + lossconfig: + target: qa_mdt.audioldm_train.losses.LPIPSWithDiscriminator + params: + disc_start: 50001 + kl_weight: 1000.0 + disc_weight: 0.5 + disc_in_channels: 1 + ddconfig: + double_z: true + mel_bins: *mel_bins + z_channels: 8 + resolution: 256 + downsample_time: false + in_channels: 1 + out_ch: 1 + ch: 128 + ch_mult: + - 1 + - 2 + - 4 + num_res_blocks: 2 + attn_resolutions: [] + dropout: 0.0 + + # Other parameters + base_learning_rate: 8.0e-5 + warmup_steps: *warmup_steps + optimize_ddpm_parameter: *optimize_ddpm_parameter + sampling_rate: *sampling_rate + batchsize: 16 + linear_start: 0.0015 + linear_end: 0.0195 + num_timesteps_cond: 1 + log_every_t: 200 + timesteps: 1000 + unconditional_prob_cfg: 0.1 + parameterization: eps # [eps, x0, v] + first_stage_key: fbank + latent_t_size: *latent_t_size + latent_f_size: *latent_f_size + channels: *latent_embed_dim + monitor: val/loss_simple_ema + scale_by_std: true + + unet_config: + # TODO: choose your class, Default: MDT_MOS_AS_TOKEN + # (Noted: the 2D-Rope, SwiGLU and the MDT are in two classes, when training with all of them, they should be changed and merged) + target: qa_mdt.audioldm_train.modules.diffusionmodules.PixArt.PixArt_MDT_MOS_AS_TOKEN + params: + input_size : [256, 16] + # patch_size: [16,4] + patch_size : [4, 1] + overlap_size: [0, 0] + in_channels : 8 + hidden_size : 1152 + depth : 28 + num_heads : 16 + mlp_ratio : 4.0 + class_dropout_prob : 0.1 + pred_sigma : True + drop_path : 0. + window_size : 0 + window_block_indexes : None + use_rel_pos : False + cond_dim : 1024 + lewei_scale : 1.0 + overlap: [0, 0] + use_cfg: true + mask_ratio: 0.30 + decode_layer: 8 + + cond_stage_config: + crossattn_flan_t5: + cond_stage_key: text + conditioning_key: crossattn + target: qa_mdt.audioldm_train.conditional_models.FlanT5HiddenState + + evaluation_params: + unconditional_guidance_scale: 3.5 + ddim_sampling_steps: 200 + n_candidates_per_samples: 3 \ No newline at end of file diff --git a/qa_mdt/audioldm_train/dataset_plugin.py b/qa_mdt/audioldm_train/dataset_plugin.py new file mode 100644 index 0000000000000000000000000000000000000000..4cfc6297e2f66759077c1540fc04b19560f3659c --- /dev/null +++ b/qa_mdt/audioldm_train/dataset_plugin.py @@ -0,0 +1,508 @@ +import os +import torch +import numpy as np +import torchaudio +import matplotlib.pyplot as plt + +CACHE = { + "get_vits_phoneme_ids": { + "PAD_LENGTH": 310, + "_pad": "_", + "_punctuation": ';:,.!?¡¿—…"«»“” ', + "_letters": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", + "_letters_ipa": "ɑɐɒæɓʙβɔɕçɗɖðʤəɘɚɛɜɝɞɟʄɡɠɢʛɦɧħɥʜɨɪʝɭɬɫɮʟɱɯɰŋɳɲɴøɵɸθœɶʘɹɺɾɻʀʁɽʂʃʈʧʉʊʋⱱʌɣɤʍχʎʏʑʐʒʔʡʕʢǀǁǂǃˈˌːˑʼʴʰʱʲʷˠˤ˞↓↑→↗↘'̩'ᵻ", + "_special": "♪☎☒☝⚠", + } +} + +CACHE["get_vits_phoneme_ids"]["symbols"] = ( + [CACHE["get_vits_phoneme_ids"]["_pad"]] + + list(CACHE["get_vits_phoneme_ids"]["_punctuation"]) + + list(CACHE["get_vits_phoneme_ids"]["_letters"]) + + list(CACHE["get_vits_phoneme_ids"]["_letters_ipa"]) + + list(CACHE["get_vits_phoneme_ids"]["_special"]) +) +CACHE["get_vits_phoneme_ids"]["_symbol_to_id"] = { + s: i for i, s in enumerate(CACHE["get_vits_phoneme_ids"]["symbols"]) +} + + +def get_vits_phoneme_ids(config, dl_output, metadata): + pad_token_id = 0 + pad_length = CACHE["get_vits_phoneme_ids"]["PAD_LENGTH"] + _symbol_to_id = CACHE["get_vits_phoneme_ids"]["_symbol_to_id"] + + assert ( + "phonemes" in metadata.keys() + ), "You must provide vits phonemes on using addon get_vits_phoneme_ids" + clean_text = metadata["phonemes"] + sequence = [] + + for symbol in clean_text: + symbol_id = _symbol_to_id[symbol] + sequence += [symbol_id] + + inserted_zero_sequence = [0] * (len(sequence) * 2) + inserted_zero_sequence[1::2] = sequence + inserted_zero_sequence = inserted_zero_sequence + [0] + + def _pad_phonemes(phonemes_list): + return phonemes_list + [pad_token_id] * (pad_length - len(phonemes_list)) + + return {"phoneme_idx": torch.LongTensor(_pad_phonemes(inserted_zero_sequence))} + + +def get_vits_phoneme_ids_no_padding(config, dl_output, metadata): + pad_token_id = 0 + pad_length = CACHE["get_vits_phoneme_ids"]["PAD_LENGTH"] + _symbol_to_id = CACHE["get_vits_phoneme_ids"]["_symbol_to_id"] + + assert ( + "phonemes" in metadata.keys() + ), "You must provide vits phonemes on using addon get_vits_phoneme_ids" + clean_text = metadata["phonemes"] + "⚠" + sequence = [] + + for symbol in clean_text: + if symbol not in _symbol_to_id.keys(): + print("%s is not in the vocabulary. %s" % (symbol, clean_text)) + symbol = "_" + symbol_id = _symbol_to_id[symbol] + sequence += [symbol_id] + + def _pad_phonemes(phonemes_list): + return phonemes_list + [pad_token_id] * (pad_length - len(phonemes_list)) + + sequence = sequence[:pad_length] + + return {"phoneme_idx": torch.LongTensor(_pad_phonemes(sequence))} + + +def calculate_relative_bandwidth(config, dl_output, metadata): + assert "stft" in dl_output.keys() + + # The last dimension of the stft feature is the frequency dimension + freq_dimensions = dl_output["stft"].size(-1) + + freq_energy_dist = torch.sum(dl_output["stft"], dim=0) + freq_energy_dist = torch.cumsum(freq_energy_dist, dim=0) + total_energy = freq_energy_dist[-1] + + percentile_5th = total_energy * 0.05 + percentile_95th = total_energy * 0.95 + + lower_idx = torch.argmin(torch.abs(percentile_5th - freq_energy_dist)) + higher_idx = torch.argmin(torch.abs(percentile_95th - freq_energy_dist)) + + lower_idx = int((lower_idx / freq_dimensions) * 1000) + higher_idx = int((higher_idx / freq_dimensions) * 1000) + + return {"freq_energy_percentile": torch.LongTensor([lower_idx, higher_idx])} + + +def calculate_mel_spec_relative_bandwidth_as_extra_channel(config, dl_output, metadata): + assert "stft" in dl_output.keys() + linear_mel_spec = torch.exp(torch.clip(dl_output["log_mel_spec"], max=10)) + + # The last dimension of the stft feature is the frequency dimension + freq_dimensions = linear_mel_spec.size(-1) + freq_energy_dist = torch.sum(linear_mel_spec, dim=0) + freq_energy_dist = torch.cumsum(freq_energy_dist, dim=0) + total_energy = freq_energy_dist[-1] + + percentile_5th = total_energy * 0.05 + percentile_95th = total_energy * 0.95 + + lower_idx = torch.argmin(torch.abs(percentile_5th - freq_energy_dist)) + higher_idx = torch.argmin(torch.abs(percentile_95th - freq_energy_dist)) + + latent_t_size = config["model"]["params"]["latent_t_size"] + latent_f_size = config["model"]["params"]["latent_f_size"] + + lower_idx = int(latent_f_size * float((lower_idx / freq_dimensions))) + higher_idx = int(latent_f_size * float((higher_idx / freq_dimensions))) + + bandwidth_condition = torch.zeros((latent_t_size, latent_f_size)) + bandwidth_condition[:, lower_idx:higher_idx] += 1.0 + + return { + "mel_spec_bandwidth_cond_extra_channel": bandwidth_condition, + "freq_energy_percentile": torch.LongTensor([lower_idx, higher_idx]), + } + + +def waveform_rs_48k(config, dl_output, metadata): + waveform = dl_output["waveform"] # [1, samples] + sampling_rate = dl_output["sampling_rate"] + + if sampling_rate != 48000: + waveform_48k = torchaudio.functional.resample( + waveform, orig_freq=sampling_rate, new_freq=48000 + ) + else: + waveform_48k = waveform + + return {"waveform_48k": waveform_48k} + + +def extract_vits_phoneme_and_flant5_text(config, dl_output, metadata): + assert ( + "phoneme" not in metadata.keys() + ), "The metadata of speech you use seems belong to fastspeech. Please check dataset_root.json" + + if "phonemes" in metadata.keys(): + new_item = get_vits_phoneme_ids_no_padding(config, dl_output, metadata) + new_item["text"] = "" # We assume TTS data does not have text description + else: + fake_metadata = {"phonemes": ""} # Add empty phoneme sequence + new_item = get_vits_phoneme_ids_no_padding(config, dl_output, fake_metadata) + + return new_item + + +def extract_fs2_phoneme_and_flant5_text(config, dl_output, metadata): + if "phoneme" in metadata.keys(): + new_item = extract_fs2_phoneme_g2p_en_feature(config, dl_output, metadata) + new_item["text"] = "" + else: + fake_metadata = {"phoneme": []} + new_item = extract_fs2_phoneme_g2p_en_feature(config, dl_output, fake_metadata) + return new_item + + +def extract_fs2_phoneme_g2p_en_feature(config, dl_output, metadata): + PAD_LENGTH = 135 + + phonemes_lookup_dict = { + "K": 0, + "IH2": 1, + "NG": 2, + "OW2": 3, + "AH2": 4, + "F": 5, + "AE0": 6, + "IY0": 7, + "SH": 8, + "G": 9, + "W": 10, + "UW1": 11, + "AO2": 12, + "AW2": 13, + "UW0": 14, + "EY2": 15, + "UW2": 16, + "AE2": 17, + "IH0": 18, + "P": 19, + "D": 20, + "ER1": 21, + "AA1": 22, + "EH0": 23, + "UH1": 24, + "N": 25, + "V": 26, + "AY1": 27, + "EY1": 28, + "UH2": 29, + "EH1": 30, + "L": 31, + "AA2": 32, + "R": 33, + "OY1": 34, + "Y": 35, + "ER2": 36, + "S": 37, + "AE1": 38, + "AH1": 39, + "JH": 40, + "ER0": 41, + "EH2": 42, + "IY2": 43, + "OY2": 44, + "AW1": 45, + "IH1": 46, + "IY1": 47, + "OW0": 48, + "AO0": 49, + "AY0": 50, + "EY0": 51, + "AY2": 52, + "UH0": 53, + "M": 54, + "TH": 55, + "T": 56, + "OY0": 57, + "AW0": 58, + "DH": 59, + "Z": 60, + "spn": 61, + "AH0": 62, + "sp": 63, + "AO1": 64, + "OW1": 65, + "ZH": 66, + "B": 67, + "AA0": 68, + "CH": 69, + "HH": 70, + } + pad_token_id = len(phonemes_lookup_dict.keys()) + + assert ( + "phoneme" in metadata.keys() + ), "The dataloader add-on extract_phoneme_g2p_en_feature will output phoneme id, which is not specified in your dataset" + + phonemes = [ + phonemes_lookup_dict[x] + for x in metadata["phoneme"] + if (x in phonemes_lookup_dict.keys()) + ] + + if (len(phonemes) / PAD_LENGTH) > 5: + print( + "Warning: Phonemes length is too long and is truncated too much! %s" + % metadata + ) + + phonemes = phonemes[:PAD_LENGTH] + + def _pad_phonemes(phonemes_list): + return phonemes_list + [pad_token_id] * (PAD_LENGTH - len(phonemes_list)) + + return {"phoneme_idx": torch.LongTensor(_pad_phonemes(phonemes))} + + +def extract_phoneme_g2p_en_feature(config, dl_output, metadata): + PAD_LENGTH = 250 + + phonemes_lookup_dict = { + " ": 0, + "AA": 1, + "AE": 2, + "AH": 3, + "AO": 4, + "AW": 5, + "AY": 6, + "B": 7, + "CH": 8, + "D": 9, + "DH": 10, + "EH": 11, + "ER": 12, + "EY": 13, + "F": 14, + "G": 15, + "HH": 16, + "IH": 17, + "IY": 18, + "JH": 19, + "K": 20, + "L": 21, + "M": 22, + "N": 23, + "NG": 24, + "OW": 25, + "OY": 26, + "P": 27, + "R": 28, + "S": 29, + "SH": 30, + "T": 31, + "TH": 32, + "UH": 33, + "UW": 34, + "V": 35, + "W": 36, + "Y": 37, + "Z": 38, + "ZH": 39, + } + pad_token_id = len(phonemes_lookup_dict.keys()) + + assert ( + "phoneme" in metadata.keys() + ), "The dataloader add-on extract_phoneme_g2p_en_feature will output phoneme id, which is not specified in your dataset" + phonemes = [ + phonemes_lookup_dict[x] + for x in metadata["phoneme"] + if (x in phonemes_lookup_dict.keys()) + ] + + if (len(phonemes) / PAD_LENGTH) > 5: + print( + "Warning: Phonemes length is too long and is truncated too much! %s" + % metadata + ) + + phonemes = phonemes[:PAD_LENGTH] + + def _pad_phonemes(phonemes_list): + return phonemes_list + [pad_token_id] * (PAD_LENGTH - len(phonemes_list)) + + return {"phoneme_idx": torch.LongTensor(_pad_phonemes(phonemes))} + + +def extract_kaldi_fbank_feature(config, dl_output, metadata): + norm_mean = -4.2677393 + norm_std = 4.5689974 + + waveform = dl_output["waveform"] # [1, samples] + sampling_rate = dl_output["sampling_rate"] + log_mel_spec_hifigan = dl_output["log_mel_spec"] + + if sampling_rate != 16000: + waveform_16k = torchaudio.functional.resample( + waveform, orig_freq=sampling_rate, new_freq=16000 + ) + else: + waveform_16k = waveform + + waveform_16k = waveform_16k - waveform_16k.mean() + fbank = torchaudio.compliance.kaldi.fbank( + waveform_16k, + htk_compat=True, + sample_frequency=16000, + use_energy=False, + window_type="hanning", + num_mel_bins=128, + dither=0.0, + frame_shift=10, + ) + + TARGET_LEN = log_mel_spec_hifigan.size(0) + + # cut and pad + n_frames = fbank.shape[0] + p = TARGET_LEN - n_frames + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + fbank = m(fbank) + elif p < 0: + fbank = fbank[:TARGET_LEN, :] + + fbank = (fbank - norm_mean) / (norm_std * 2) + + return {"ta_kaldi_fbank": fbank} # [1024, 128] + + +def extract_kaldi_fbank_feature_32k(config, dl_output, metadata): + norm_mean = -4.2677393 + norm_std = 4.5689974 + + waveform = dl_output["waveform"] # [1, samples] + sampling_rate = dl_output["sampling_rate"] + log_mel_spec_hifigan = dl_output["log_mel_spec"] + + if sampling_rate != 32000: + waveform_32k = torchaudio.functional.resample( + waveform, orig_freq=sampling_rate, new_freq=32000 + ) + else: + waveform_32k = waveform + + waveform_32k = waveform_32k - waveform_32k.mean() + fbank = torchaudio.compliance.kaldi.fbank( + waveform_32k, + htk_compat=True, + sample_frequency=32000, + use_energy=False, + window_type="hanning", + num_mel_bins=128, + dither=0.0, + frame_shift=10, + ) + + TARGET_LEN = log_mel_spec_hifigan.size(0) + + # cut and pad + n_frames = fbank.shape[0] + p = TARGET_LEN - n_frames + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + fbank = m(fbank) + elif p < 0: + fbank = fbank[:TARGET_LEN, :] + + fbank = (fbank - norm_mean) / (norm_std * 2) + + return {"ta_kaldi_fbank": fbank} # [1024, 128] + + +# Use the beat and downbeat information as music conditions +def extract_drum_beat(config, dl_output, metadata): + def visualization(conditional_signal, mel_spectrogram, filename): + import soundfile as sf + + sf.write( + os.path.basename(dl_output["fname"]), + np.array(dl_output["waveform"])[0], + dl_output["sampling_rate"], + ) + plt.figure(figsize=(10, 10)) + + plt.subplot(211) + plt.imshow(np.array(conditional_signal).T, aspect="auto") + plt.title("Conditional Signal") + + plt.subplot(212) + plt.imshow(np.array(mel_spectrogram).T, aspect="auto") + plt.title("Mel Spectrogram") + + plt.savefig(filename) + plt.close() + + assert "sample_rate" in metadata and "beat" in metadata and "downbeat" in metadata + + sampling_rate = metadata["sample_rate"] + duration = dl_output["duration"] + # The dataloader segment length before performing torch resampling + original_segment_length_before_resample = int(sampling_rate * duration) + + random_start_sample = int(dl_output["random_start_sample_in_original_audio_file"]) + + # The sample idx for beat and downbeat, relatively to the segmented audio + beat = [ + x - random_start_sample + for x in metadata["beat"] + if ( + x - random_start_sample >= 0 + and x - random_start_sample <= original_segment_length_before_resample + ) + ] + downbeat = [ + x - random_start_sample + for x in metadata["downbeat"] + if ( + x - random_start_sample >= 0 + and x - random_start_sample <= original_segment_length_before_resample + ) + ] + + latent_shape = ( + config["model"]["params"]["latent_t_size"], + config["model"]["params"]["latent_f_size"], + ) + conditional_signal = torch.zeros(latent_shape) + + # beat: -0.5 + # downbeat: +1.0 + # 0: none; -0.5: beat; 1.0: downbeat; 0.5: downbeat+beat + for each in beat: + beat_index = int( + (each / original_segment_length_before_resample) * latent_shape[0] + ) + beat_index = min(beat_index, conditional_signal.size(0) - 1) + + conditional_signal[beat_index, :] -= 0.5 + + for each in downbeat: + beat_index = int( + (each / original_segment_length_before_resample) * latent_shape[0] + ) + beat_index = min(beat_index, conditional_signal.size(0) - 1) + + conditional_signal[beat_index, :] += 1.0 + + # visualization(conditional_signal, dl_output["log_mel_spec"], filename = os.path.basename(dl_output["fname"])+".png") + + return {"cond_beat_downbeat": conditional_signal} diff --git a/qa_mdt/audioldm_train/losses/__init__.py b/qa_mdt/audioldm_train/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..51f52025c46f6c1c8fe54d4bafff1610eecfee0e --- /dev/null +++ b/qa_mdt/audioldm_train/losses/__init__.py @@ -0,0 +1 @@ +from .contperceptual import LPIPSWithDiscriminator diff --git a/qa_mdt/audioldm_train/losses/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/losses/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..644ee32348f2ff385534d32e3fbdce8567658525 Binary files /dev/null and b/qa_mdt/audioldm_train/losses/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/losses/__pycache__/contperceptual.cpython-310.pyc b/qa_mdt/audioldm_train/losses/__pycache__/contperceptual.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..488c4e43068cc043b95b3e9be2aa06160310b05b Binary files /dev/null and b/qa_mdt/audioldm_train/losses/__pycache__/contperceptual.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/losses/contperceptual.py b/qa_mdt/audioldm_train/losses/contperceptual.py new file mode 100644 index 0000000000000000000000000000000000000000..867adad821447c2707b3325e9f0af435963c77b5 --- /dev/null +++ b/qa_mdt/audioldm_train/losses/contperceptual.py @@ -0,0 +1,160 @@ +import torch +import torch.nn as nn + +import sys +sys.path.append("/train20/intern/permanent/changli7/dataset_ptm") +from taming.modules.losses.vqperceptual import * # TODO: taming dependency yes/no? + + +class LPIPSWithDiscriminator(nn.Module): + def __init__( + self, + disc_start, + logvar_init=0.0, + kl_weight=1.0, + pixelloss_weight=1.0, + disc_num_layers=3, + disc_in_channels=3, + disc_factor=1.0, + disc_weight=1.0, + perceptual_weight=1.0, + use_actnorm=False, + disc_conditional=False, + disc_loss="hinge", + ): + super().__init__() + assert disc_loss in ["hinge", "vanilla"] + self.kl_weight = kl_weight + self.pixel_weight = pixelloss_weight + self.perceptual_loss = LPIPS().eval() + self.perceptual_weight = perceptual_weight + # output log variance + self.logvar = nn.Parameter(torch.ones(size=()) * logvar_init) + + self.discriminator = NLayerDiscriminator( + input_nc=disc_in_channels, n_layers=disc_num_layers, use_actnorm=use_actnorm + ).apply(weights_init) + self.discriminator_iter_start = disc_start + self.disc_loss = hinge_d_loss if disc_loss == "hinge" else vanilla_d_loss + self.disc_factor = disc_factor + self.discriminator_weight = disc_weight + self.disc_conditional = disc_conditional + + def calculate_adaptive_weight(self, nll_loss, g_loss, last_layer=None): + if last_layer is not None: + nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0] + g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0] + else: + nll_grads = torch.autograd.grad( + nll_loss, self.last_layer[0], retain_graph=True + )[0] + g_grads = torch.autograd.grad( + g_loss, self.last_layer[0], retain_graph=True + )[0] + + d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4) + d_weight = torch.clamp(d_weight, 0.0, 1e4).detach() + d_weight = d_weight * self.discriminator_weight + return d_weight + + def forward( + self, + inputs, + reconstructions, + posteriors, + optimizer_idx, + global_step, + waveform=None, + rec_waveform=None, + last_layer=None, + cond=None, + split="train", + weights=None, + ): + rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous()) + + # Always true + if self.perceptual_weight > 0: + p_loss = self.perceptual_loss( + inputs.contiguous(), reconstructions.contiguous() + ) + rec_loss = rec_loss + self.perceptual_weight * p_loss + + nll_loss = rec_loss / torch.exp(self.logvar) + self.logvar + weighted_nll_loss = nll_loss + if weights is not None: + weighted_nll_loss = weights * nll_loss + weighted_nll_loss = torch.sum(weighted_nll_loss) / weighted_nll_loss.shape[0] + nll_loss = torch.sum(nll_loss) / nll_loss.shape[0] + kl_loss = posteriors.kl() + kl_loss = torch.sum(kl_loss) / kl_loss.shape[0] + + # now the GAN part + if optimizer_idx == 0: + # generator update + if cond is None: + assert not self.disc_conditional + logits_fake = self.discriminator(reconstructions.contiguous()) + else: + assert self.disc_conditional + logits_fake = self.discriminator( + torch.cat((reconstructions.contiguous(), cond), dim=1) + ) + g_loss = -torch.mean(logits_fake) + + if self.disc_factor > 0.0: + try: + d_weight = self.calculate_adaptive_weight( + nll_loss, g_loss, last_layer=last_layer + ) + except RuntimeError: + assert not self.training + d_weight = torch.tensor(0.0) + else: + d_weight = torch.tensor(0.0) + + disc_factor = adopt_weight( + self.disc_factor, global_step, threshold=self.discriminator_iter_start + ) + loss = ( + weighted_nll_loss + + self.kl_weight * kl_loss + + d_weight * disc_factor * g_loss + ) + + log = { + "{}/total_loss".format(split): loss.clone().detach().mean(), + "{}/logvar".format(split): self.logvar.detach(), + "{}/kl_loss".format(split): kl_loss.detach().mean(), + "{}/nll_loss".format(split): nll_loss.detach().mean(), + "{}/rec_loss".format(split): rec_loss.detach().mean(), + "{}/d_weight".format(split): d_weight.detach(), + "{}/disc_factor".format(split): torch.tensor(disc_factor), + "{}/g_loss".format(split): g_loss.detach().mean(), + } + return loss, log + + if optimizer_idx == 1: + # second pass for discriminator update + if cond is None: + logits_real = self.discriminator(inputs.contiguous().detach()) + logits_fake = self.discriminator(reconstructions.contiguous().detach()) + else: + logits_real = self.discriminator( + torch.cat((inputs.contiguous().detach(), cond), dim=1) + ) + logits_fake = self.discriminator( + torch.cat((reconstructions.contiguous().detach(), cond), dim=1) + ) + + disc_factor = adopt_weight( + self.disc_factor, global_step, threshold=self.discriminator_iter_start + ) + d_loss = disc_factor * self.disc_loss(logits_real, logits_fake) + + log = { + "{}/disc_loss".format(split): d_loss.clone().detach().mean(), + "{}/logits_real".format(split): logits_real.detach().mean(), + "{}/logits_fake".format(split): logits_fake.detach().mean(), + } + return d_loss, log \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/.DS_Store b/qa_mdt/audioldm_train/modules/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..d7cd554dcd7f346914ab959bffdd5562e89426c8 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/.DS_Store differ diff --git a/qa_mdt/audioldm_train/modules/__init__.py b/qa_mdt/audioldm_train/modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..42439fc436a7a6b66505b9040baacbd084bff473 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/AudioMAE.py b/qa_mdt/audioldm_train/modules/audiomae/AudioMAE.py new file mode 100644 index 0000000000000000000000000000000000000000..f07aeb1240b678c50de7be2d8c57273bb1182ddf --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/AudioMAE.py @@ -0,0 +1,151 @@ +""" +Reference Repo: https://github.com/facebookresearch/AudioMAE +""" + +import torch +import torch.nn as nn +from timm.models.layers import to_2tuple +import qa_mdt.audioldm_train.modules.audiomae.models_vit as models_vit +import qa_mdt.audioldm_train.modules.audiomae.models_mae as models_mae + +# model = mae_vit_base_patch16(in_chans=1, audio_exp=True, img_size=(1024, 128)) + + +class PatchEmbed_new(nn.Module): + """Flexible Image to Patch Embedding""" + + def __init__( + self, img_size=224, patch_size=16, in_chans=3, embed_dim=768, stride=10 + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + stride = to_2tuple(stride) + + self.img_size = img_size + self.patch_size = patch_size + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=patch_size, stride=stride + ) # with overlapped patches + # self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + + # self.patch_hw = (img_size[1] // patch_size[1], img_size[0] // patch_size[0]) + # self.num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0]) + _, _, h, w = self.get_output_shape(img_size) # n, emb_dim, h, w + self.patch_hw = (h, w) + self.num_patches = h * w + + def get_output_shape(self, img_size): + # todo: don't be lazy.. + return self.proj(torch.randn(1, 1, img_size[0], img_size[1])).shape + + def forward(self, x): + B, C, H, W = x.shape + # FIXME look at relaxing size constraints + # assert H == self.img_size[0] and W == self.img_size[1], \ + # f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + x = self.proj(x) + x = x.flatten(2).transpose(1, 2) + return x + + +class AudioMAE(nn.Module): + """Audio Masked Autoencoder (MAE) pre-trained and finetuned on AudioSet (for SoundCLIP)""" + + def __init__( + self, + ): + super().__init__() + model = models_vit.__dict__["vit_base_patch16"]( + num_classes=527, + drop_path_rate=0.1, + global_pool=True, + mask_2d=True, + use_custom_patch=False, + ) + + img_size = (1024, 128) + emb_dim = 768 + + model.patch_embed = PatchEmbed_new( + img_size=img_size, + patch_size=(16, 16), + in_chans=1, + embed_dim=emb_dim, + stride=16, + ) + num_patches = model.patch_embed.num_patches + # num_patches = 512 # assume audioset, 1024//16=64, 128//16=8, 512=64x8 + model.pos_embed = nn.Parameter( + torch.zeros(1, num_patches + 1, emb_dim), requires_grad=False + ) # fixed sin-cos embedding + + checkpoint_path = ( + "/mnt/bn/data-xubo/project/Masked_AudioEncoder/checkpoint/finetuned.pth" + ) + checkpoint = torch.load(checkpoint_path, map_location="cpu") + msg = model.load_state_dict(checkpoint["model"], strict=False) + # print(f'Load AudioMAE from {checkpoint_path} / message: {msg}') + + self.model = model + + def forward(self, x, mask_t_prob=0.0, mask_f_prob=0.0): + """ + x: mel fbank [Batch, 1, T, F] + mask_t_prob: 'T masking ratio (percentage of removed patches).' + mask_f_prob: 'F masking ratio (percentage of removed patches).' + """ + return self.model(x=x, mask_t_prob=mask_t_prob, mask_f_prob=mask_f_prob) + + +class Vanilla_AudioMAE(nn.Module): + """Audio Masked Autoencoder (MAE) pre-trained on AudioSet (for AudioLDM)""" + + def __init__( + self, + ): + super().__init__() + model = models_mae.__dict__["mae_vit_base_patch16"]( + in_chans=1, audio_exp=True, img_size=(1024, 128) + ) + + checkpoint_path = "data/checkpoints/audiomae_16k_128bins.ckpt" + checkpoint = torch.load(checkpoint_path, map_location="cpu") + msg = model.load_state_dict(checkpoint["model"], strict=False) + + # Skip the missing keys of decoder modules (not required) + # print(f'Load AudioMAE from {checkpoint_path} / message: {msg}') + + self.model = model.eval() + + def forward(self, x, mask_ratio=0.0, no_mask=False, no_average=False): + """ + x: mel fbank [Batch, 1, 1024 (T), 128 (F)] + mask_ratio: 'masking ratio (percentage of removed patches).' + """ + with torch.no_grad(): + # embed: [B, 513, 768] for mask_ratio=0.0 + if no_mask: + if no_average: + raise RuntimeError("This function is deprecated") + embed = self.model.forward_encoder_no_random_mask_no_average( + x + ) # mask_ratio + else: + embed = self.model.forward_encoder_no_mask(x) # mask_ratio + else: + raise RuntimeError("This function is deprecated") + embed, _, _, _ = self.model.forward_encoder(x, mask_ratio=mask_ratio) + return embed + + +if __name__ == "__main__": + model = Vanilla_AudioMAE().cuda() + input = torch.randn(4, 1, 1024, 128).cuda() + print("The first run") + embed = model(input, mask_ratio=0.0, no_mask=True) + print(embed) + print("The second run") + embed = model(input, mask_ratio=0.0) + print(embed) diff --git a/qa_mdt/audioldm_train/modules/audiomae/README.md b/qa_mdt/audioldm_train/modules/audiomae/README.md new file mode 100644 index 0000000000000000000000000000000000000000..1e04ededf5456b6e0e9eca190478d62d3eefb754 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/README.md @@ -0,0 +1,24 @@ +# A simple use of Audio Masked AutoEncoder (AudioMAE) +Reference code: https://github.com/facebookresearch/AudioMAE + +Paper: https://arxiv.org/abs/2207.06405 + +Install the required python packages: +``` +pip install -r requirments.txt +``` + + +See the usage in example.py + + + + ``` + python example.py + + """ + Load AudioMAE from /mnt/bn/data-xubo/project/Masked_AudioEncoder checkpoint/finetuned.pth / message: + Start evaluation on AudioSet ... + mAP: 0.463003 + """ + ``` \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/audiomae/__init__.py b/qa_mdt/audioldm_train/modules/audiomae/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/audiomae/__pycache__/AudioMAE.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/AudioMAE.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d9b0aaee3a9af7c4d3da073f7c2ba2d835537356 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/AudioMAE.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e498fa70caf550841eacaf6df3dd52b1bb6dd8d Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/__pycache__/models_mae.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/models_mae.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..156772b039bc7cfa9b9538fc0ca2b2fe9775af2f Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/models_mae.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/__pycache__/models_vit.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/models_vit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0525dde7420ef0fd0ab0485041272fe4d6529f43 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/__pycache__/models_vit.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/audiovisual_dataset.py b/qa_mdt/audioldm_train/modules/audiomae/audiovisual_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..3f000e7736b58da744a5d999c92a5e9eb98b1405 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/audiovisual_dataset.py @@ -0,0 +1,256 @@ +import json +import random +from tqdm import tqdm +import torch +import decord + +decord.bridge.set_bridge("torch") +import torchaudio +from math import ceil +from torch.utils.data import Dataset, DataLoader +import pandas as pd +import numpy as np + + +class AudioVisualDataset(Dataset): + """Can sample data from audio-visual databases + Params: + min_video_frames: used to drop short video clips + video_resize: resize for CLIP processing + sampling_rate: audio sampling rate + max_clip_len: max length (seconds) of audiovisual clip to be sampled + num_sample_frames: number of image frames to be uniformly sampled from video + """ + + def __init__( + self, + datafiles=[ + "/mnt/bn/data-xubo/dataset/audioset_videos/datafiles/audioset_balanced_train.json", + ], + min_video_frames=30, + video_resize=[224, 224], + sampling_rate=16000, + sample_av_clip=True, + max_clip_len=10, + num_sample_frames=10, + # hyparameters used for SpecAug + freqm=48, + timem=192, + return_label=False, + ): + all_data_json = [] + for datafile in datafiles: + with open(datafile, "r") as fp: + data_json = json.load(fp)["data"] + all_data_json.extend(data_json) + + # drop short video clips + self.all_data_json = [ + data + for data in all_data_json + if int(data["video_shape"][0]) >= min_video_frames + ] + + self.max_clip_len = max_clip_len + self.video_resize = video_resize + self.sampling_rate = sampling_rate + self.sample_av_clip = sample_av_clip + self.num_sample_frames = num_sample_frames + self.corresponding_audio_len = self.sampling_rate * self.max_clip_len + + # hyparameters used for AudioMAE + self.freqm = freqm + self.timem = timem + self.norm_mean = -4.2677393 + self.norm_std = 4.5689974 + self.melbins = 128 + self.TARGET_LEN = 1024 + + self.return_label = return_label + if self.return_label: + self.audioset_label2idx = self._prepare_audioset() + + def __len__(self): + return len(self.all_data_json) + + def _read_audio_video(self, index): + try: + video_path = self.all_data_json[index]["mp4"] + # read audio + ar = decord.AudioReader( + video_path, sample_rate=self.sampling_rate, mono=True + ) + # read video frames + vr = decord.VideoReader( + video_path, + height=self.video_resize[0], + width=self.video_resize[1], + ) + + labels = self.all_data_json[index]["labels"] + return vr, ar, labels + + except Exception as e: + print(f"error: {e} occurs, when loading {video_path}") + random_index = random.randint(0, len(self.all_data_json) - 1) + return self._read_audio_video(index=random_index) + + def _prepare_audioset(self): + df1 = pd.read_csv( + "/mnt/bn/lqhaoheliu/datasets/audioset/metadata/class_labels_indices.csv", + delimiter=",", + skiprows=0, + ) + label_set = df1.to_numpy() + code2id = {} + for i in range(len(label_set)): + code2id[label_set[i][1]] = label_set[i][0] + return code2id + + def __getitem__(self, index): + # read audio and video + vr, ar, labels = self._read_audio_video(index) + + # create a audio tensor + audio_data = ar[:] # [1, samples] + audio_len = audio_data.shape[1] / self.sampling_rate + audio_data = audio_data.squeeze(0) # [samples] + + # create a video tensor + full_vid_length = len(vr) + video_rate = ceil(vr.get_avg_fps()) + samples_per_frame = float(self.sampling_rate) / video_rate + start_frame = 0 + + # sample video clip + if audio_len > self.max_clip_len and self.sample_av_clip: + start_frame = random.randint( + 0, max(full_vid_length - video_rate * self.max_clip_len, 0) + ) + end_frame = min(start_frame + video_rate * self.max_clip_len, full_vid_length) + video_data = vr.get_batch(range(start_frame, end_frame)) + + # sample audio clip + if audio_len > self.max_clip_len and self.sample_av_clip: + # corresponding_audio_len = int(video_data.size()[0] * samples_per_frame) + corresponding_audio_start = int(start_frame * samples_per_frame) + audio_data = audio_data[corresponding_audio_start:] + + # cut or pad audio clip with respect to the sampled video clip + if audio_data.shape[0] < self.corresponding_audio_len: + zero_data = torch.zeros(self.corresponding_audio_len) + zero_data[: audio_data.shape[0]] = audio_data + audio_data = zero_data + elif audio_data.shape[0] > self.corresponding_audio_len: + audio_data = audio_data[: self.corresponding_audio_len] + + # uniformly sample image frames from video [tentative solution] + interval = video_data.shape[0] // self.num_sample_frames + video_data = video_data[::interval][: self.num_sample_frames] + + assert ( + video_data.shape[0] == self.num_sample_frames + ), f"number of sampled image frames is {video_data.shape[0]}" + + assert ( + audio_data.shape[0] == self.corresponding_audio_len + ), f"number of audio samples is {audio_data.shape[0]}" + + # video transformation + video_data = video_data / 255.0 + video_data = video_data.permute(0, 3, 1, 2) # [N, H, W, C] -> [N, C, H, W] + + # calculate mel fbank of waveform for audio encoder + audio_data = audio_data.unsqueeze(0) # [1, samples] + audio_data = audio_data - audio_data.mean() + fbank = torchaudio.compliance.kaldi.fbank( + audio_data, + htk_compat=True, + sample_frequency=self.sampling_rate, + use_energy=False, + window_type="hanning", + num_mel_bins=self.melbins, + dither=0.0, + frame_shift=10, + ) + # cut and pad + n_frames = fbank.shape[0] + p = self.TARGET_LEN - n_frames + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + fbank = m(fbank) + elif p < 0: + fbank = fbank[0 : self.TARGET_LEN, :] + + # SpecAug for training (not for eval) + freqm = torchaudio.transforms.FrequencyMasking(self.freqm) + timem = torchaudio.transforms.TimeMasking(self.timem) + fbank = fbank.transpose(0, 1).unsqueeze(0) # 1, 128, 1024 (...,freq,time) + if self.freqm != 0: + fbank = freqm(fbank) + if self.timem != 0: + fbank = timem(fbank) # (..., freq, time) + fbank = torch.transpose(fbank.squeeze(), 0, 1) # time, freq + fbank = (fbank - self.norm_mean) / (self.norm_std * 2) + fbank = fbank.unsqueeze(0) + + if self.return_label: + # get audioset lebel indexes + label_indices = np.zeros(527) + + for label_str in labels.split(","): + label_indices[int(self.audioset_label2idx[label_str])] = 1.0 + + label_indices = torch.FloatTensor(label_indices) + + data_dict = { + "labels": label_indices, + "images": video_data, + "fbank": fbank, + # 'modality': 'audio_visual' + } + + else: + data_dict = { + "images": video_data, + "fbank": fbank, + # 'modality': 'audio_visual' + } + + return data_dict + + +def collate_fn(list_data_dict): + r"""Collate mini-batch data to inputs and targets for training. + + Args: + list_data_dict: e.g., [ + {'vocals': (channels_num, segment_samples), + 'accompaniment': (channels_num, segment_samples), + 'mixture': (channels_num, segment_samples) + }, + {'vocals': (channels_num, segment_samples), + 'accompaniment': (channels_num, segment_samples), + 'mixture': (channels_num, segment_samples) + }, + ...] + + Returns: + data_dict: e.g. { + 'vocals': (batch_size, channels_num, segment_samples), + 'accompaniment': (batch_size, channels_num, segment_samples), + 'mixture': (batch_size, channels_num, segment_samples) + } + """ + + data_dict = {} + for key in list_data_dict[0].keys(): + # for key in ['waveform']: + # try: + data_dict[key] = [data_dict[key] for data_dict in list_data_dict] + # except: + # from IPython import embed; embed(using=False); os._exit(0) + + data_dict[key] = torch.stack(data_dict[key]) + + return data_dict diff --git a/qa_mdt/audioldm_train/modules/audiomae/example.py b/qa_mdt/audioldm_train/modules/audiomae/example.py new file mode 100644 index 0000000000000000000000000000000000000000..78da1186393cc0b3028b883bdf2da10c9060c324 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/example.py @@ -0,0 +1,52 @@ +import torch +import torch.nn as nn +import numpy as np +from timm.models.layers import to_2tuple +import models_vit +from audiovisual_dataset import AudioVisualDataset, collate_fn +from torch.utils.data import DataLoader +from util.stat import calculate_stats +from tqdm import tqdm +from AudioMAE import AudioMAE + +if __name__ == "__main__": + device = "cuda" + dataset = AudioVisualDataset( + datafiles=[ + "/mnt/bn/data-xubo/dataset/audioset_videos/datafiles/audioset_eval.json" + ], + # disable SpecAug during evaluation + freqm=0, + timem=0, + return_label=True, + ) + + model = AudioMAE().to(device) + model.eval() + + outputs = [] + targets = [] + + dataloader = DataLoader( + dataset, batch_size=64, num_workers=8, shuffle=False, collate_fn=collate_fn + ) + + print("Start evaluation on AudioSet ...") + with torch.no_grad(): + for data in tqdm(dataloader): + fbank = data["fbank"] # [B, 1, T, F] + fbank = fbank.to(device) + output = model(fbank, mask_t_prob=0.0, mask_f_prob=0.0) + target = data["labels"] + outputs.append(output) + targets.append(target) + + outputs = torch.cat(outputs).cpu().numpy() + targets = torch.cat(targets).cpu().numpy() + stats = calculate_stats(outputs, targets) + + AP = [stat["AP"] for stat in stats] + mAP = np.mean([stat["AP"] for stat in stats]) + print("Done ... mAP: {:.6f}".format(mAP)) + + # mAP: 0.463003 diff --git a/qa_mdt/audioldm_train/modules/audiomae/models_mae.py b/qa_mdt/audioldm_train/modules/audiomae/models_mae.py new file mode 100644 index 0000000000000000000000000000000000000000..f6a3db9e80e57e995a7a8673bb4a33cfef8ac8b0 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/models_mae.py @@ -0,0 +1,615 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# timm: https://github.com/rwightman/pytorch-image-models/tree/master/timm +# DeiT: https://github.com/facebookresearch/deit +# -------------------------------------------------------- + +from functools import partial +from json import encoder + +import torch +import torch.nn as nn + +from timm.models.vision_transformer import Block +from qa_mdt.audioldm_train.modules.audiomae.util.pos_embed import ( + get_2d_sincos_pos_embed, + get_2d_sincos_pos_embed_flexible, + get_1d_sincos_pos_embed_from_grid, +) +from qa_mdt.audioldm_train.modules.audiomae.util.patch_embed import ( + PatchEmbed_new, + PatchEmbed_org, +) + + +class MaskedAutoencoderViT(nn.Module): + """Masked Autoencoder with VisionTransformer backbone""" + + def __init__( + self, + img_size=224, + patch_size=16, + stride=10, + in_chans=3, + embed_dim=1024, + depth=24, + num_heads=16, + decoder_embed_dim=512, + decoder_depth=8, + decoder_num_heads=16, + mlp_ratio=4.0, + norm_layer=nn.LayerNorm, + norm_pix_loss=False, + audio_exp=False, + alpha=0.0, + temperature=0.2, + mode=0, + contextual_depth=8, + use_custom_patch=False, + split_pos=False, + pos_trainable=False, + use_nce=False, + beta=4.0, + decoder_mode=0, + mask_t_prob=0.6, + mask_f_prob=0.5, + mask_2d=False, + epoch=0, + no_shift=False, + ): + super().__init__() + + self.audio_exp = audio_exp + self.embed_dim = embed_dim + self.decoder_embed_dim = decoder_embed_dim + # -------------------------------------------------------------------------- + # MAE encoder specifics + if use_custom_patch: + print( + f"Use custom patch_emb with patch size: {patch_size}, stride: {stride}" + ) + self.patch_embed = PatchEmbed_new( + img_size=img_size, + patch_size=patch_size, + in_chans=in_chans, + embed_dim=embed_dim, + stride=stride, + ) + else: + self.patch_embed = PatchEmbed_org(img_size, patch_size, in_chans, embed_dim) + self.use_custom_patch = use_custom_patch + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + + # self.split_pos = split_pos # not useful + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches + 1, embed_dim), requires_grad=pos_trainable + ) # fixed sin-cos embedding + + self.encoder_depth = depth + self.contextual_depth = contextual_depth + self.blocks = nn.ModuleList( + [ + Block( + embed_dim, + num_heads, + mlp_ratio, + qkv_bias=True, + norm_layer=norm_layer, + ) # qk_scale=None + for i in range(depth) + ] + ) + self.norm = norm_layer(embed_dim) + + # -------------------------------------------------------------------------- + # MAE decoder specifics + self.decoder_embed = nn.Linear(embed_dim, decoder_embed_dim, bias=True) + + self.mask_token = nn.Parameter(torch.zeros(1, 1, decoder_embed_dim)) + self.decoder_pos_embed = nn.Parameter( + torch.zeros(1, num_patches + 1, decoder_embed_dim), + requires_grad=pos_trainable, + ) # fixed sin-cos embedding + + self.no_shift = no_shift + + self.decoder_mode = decoder_mode + if ( + self.use_custom_patch + ): # overlapped patches as in AST. Similar performance yet compute heavy + window_size = (6, 6) + feat_size = (102, 12) + else: + window_size = (4, 4) + feat_size = (64, 8) + if self.decoder_mode == 1: + decoder_modules = [] + for index in range(16): + if self.no_shift: + shift_size = (0, 0) + else: + if (index % 2) == 0: + shift_size = (0, 0) + else: + shift_size = (2, 0) + # shift_size = tuple([0 if ((index % 2) == 0) else w // 2 for w in window_size]) + decoder_modules.append( + SwinTransformerBlock( + dim=decoder_embed_dim, + num_heads=16, + feat_size=feat_size, + window_size=window_size, + shift_size=shift_size, + mlp_ratio=mlp_ratio, + drop=0.0, + drop_attn=0.0, + drop_path=0.0, + extra_norm=False, + sequential_attn=False, + norm_layer=norm_layer, # nn.LayerNorm, + ) + ) + self.decoder_blocks = nn.ModuleList(decoder_modules) + else: + # Transfomer + self.decoder_blocks = nn.ModuleList( + [ + Block( + decoder_embed_dim, + decoder_num_heads, + mlp_ratio, + qkv_bias=True, + norm_layer=norm_layer, + ) # qk_scale=None, + for i in range(decoder_depth) + ] + ) + + self.decoder_norm = norm_layer(decoder_embed_dim) + self.decoder_pred = nn.Linear( + decoder_embed_dim, patch_size**2 * in_chans, bias=True + ) # decoder to patch + + # -------------------------------------------------------------------------- + + self.norm_pix_loss = norm_pix_loss + + self.patch_size = patch_size + self.stride = stride + + # audio exps + self.alpha = alpha + self.T = temperature + self.mode = mode + self.use_nce = use_nce + self.beta = beta + + self.log_softmax = nn.LogSoftmax(dim=-1) + + self.mask_t_prob = mask_t_prob + self.mask_f_prob = mask_f_prob + self.mask_2d = mask_2d + + self.epoch = epoch + + self.initialize_weights() + + def initialize_weights(self): + # initialization + # initialize (and freeze) pos_embed by sin-cos embedding + if self.audio_exp: + pos_embed = get_2d_sincos_pos_embed_flexible( + self.pos_embed.shape[-1], self.patch_embed.patch_hw, cls_token=True + ) + else: + pos_embed = get_2d_sincos_pos_embed( + self.pos_embed.shape[-1], + int(self.patch_embed.num_patches**0.5), + cls_token=True, + ) + self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0)) + + if self.audio_exp: + decoder_pos_embed = get_2d_sincos_pos_embed_flexible( + self.decoder_pos_embed.shape[-1], + self.patch_embed.patch_hw, + cls_token=True, + ) + else: + decoder_pos_embed = get_2d_sincos_pos_embed( + self.decoder_pos_embed.shape[-1], + int(self.patch_embed.num_patches**0.5), + cls_token=True, + ) + self.decoder_pos_embed.data.copy_( + torch.from_numpy(decoder_pos_embed).float().unsqueeze(0) + ) + + # initialize patch_embed like nn.Linear (instead of nn.Conv2d) + w = self.patch_embed.proj.weight.data + torch.nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # timm's trunc_normal_(std=.02) is effectively normal_(std=0.02) as cutoff is too big (2.) + torch.nn.init.normal_(self.cls_token, std=0.02) + torch.nn.init.normal_(self.mask_token, std=0.02) + + # initialize nn.Linear and nn.LayerNorm + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + # we use xavier_uniform following official JAX ViT: + torch.nn.init.xavier_uniform_(m.weight) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def patchify(self, imgs): + """ + imgs: (N, 3, H, W) + x: (N, L, patch_size**2 *3) + L = (H/p)*(W/p) + """ + p = self.patch_embed.patch_size[0] + # assert imgs.shape[2] == imgs.shape[3] and imgs.shape[2] % p == 0 + + if self.audio_exp: + if self.use_custom_patch: # overlapped patch + h, w = self.patch_embed.patch_hw + # todo: fixed h/w patch size and stride size. Make hw custom in the future + x = imgs.unfold(2, self.patch_size, self.stride).unfold( + 3, self.patch_size, self.stride + ) # n,1,H,W -> n,1,h,w,p,p + x = x.reshape(shape=(imgs.shape[0], h * w, p**2 * 1)) + # x = imgs.reshape(shape=(imgs.shape[0], 1, h, p, w, p)) + # x = torch.einsum('nchpwq->nhwpqc', x) + # x = x.reshape(shape=(imgs.shape[0], h * w, p**2 * 1)) + else: + h = imgs.shape[2] // p + w = imgs.shape[3] // p + # h,w = self.patch_embed.patch_hw + x = imgs.reshape(shape=(imgs.shape[0], 1, h, p, w, p)) + x = torch.einsum("nchpwq->nhwpqc", x) + x = x.reshape(shape=(imgs.shape[0], h * w, p**2 * 1)) + else: + h = w = imgs.shape[2] // p + x = imgs.reshape(shape=(imgs.shape[0], 3, h, p, w, p)) + x = torch.einsum("nchpwq->nhwpqc", x) + x = x.reshape(shape=(imgs.shape[0], h * w, p**2 * 3)) + + return x + + def unpatchify(self, x): + """ + x: (N, L, patch_size**2 *3) + specs: (N, 1, H, W) + """ + p = self.patch_embed.patch_size[0] + h = 1024 // p + w = 128 // p + x = x.reshape(shape=(x.shape[0], h, w, p, p, 1)) + x = torch.einsum("nhwpqc->nchpwq", x) + specs = x.reshape(shape=(x.shape[0], 1, h * p, w * p)) + return specs + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = torch.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + ids_shuffle = torch.argsort( + noise, dim=1 + ) # ascend: small is keep, large is remove + ids_restore = torch.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = torch.gather(x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = torch.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = torch.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore + + def random_masking_2d(self, x, mask_t_prob, mask_f_prob): + """ + 2D: Spectrogram (msking t and f under mask_t_prob and mask_f_prob) + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + if self.use_custom_patch: # overlapped patch + T = 101 + F = 12 + else: + T = 64 + F = 8 + # x = x.reshape(N, T, F, D) + len_keep_t = int(T * (1 - mask_t_prob)) + len_keep_f = int(F * (1 - mask_f_prob)) + + # noise for mask in time + noise_t = torch.rand(N, T, device=x.device) # noise in [0, 1] + # sort noise for each sample aling time + ids_shuffle_t = torch.argsort( + noise_t, dim=1 + ) # ascend: small is keep, large is remove + ids_restore_t = torch.argsort(ids_shuffle_t, dim=1) + ids_keep_t = ids_shuffle_t[:, :len_keep_t] + # noise mask in freq + noise_f = torch.rand(N, F, device=x.device) # noise in [0, 1] + ids_shuffle_f = torch.argsort( + noise_f, dim=1 + ) # ascend: small is keep, large is remove + ids_restore_f = torch.argsort(ids_shuffle_f, dim=1) + ids_keep_f = ids_shuffle_f[:, :len_keep_f] # + + # generate the binary mask: 0 is keep, 1 is remove + # mask in freq + mask_f = torch.ones(N, F, device=x.device) + mask_f[:, :len_keep_f] = 0 + mask_f = ( + torch.gather(mask_f, dim=1, index=ids_restore_f) + .unsqueeze(1) + .repeat(1, T, 1) + ) # N,T,F + # mask in time + mask_t = torch.ones(N, T, device=x.device) + mask_t[:, :len_keep_t] = 0 + mask_t = ( + torch.gather(mask_t, dim=1, index=ids_restore_t) + .unsqueeze(1) + .repeat(1, F, 1) + .permute(0, 2, 1) + ) # N,T,F + mask = 1 - (1 - mask_t) * (1 - mask_f) # N, T, F + + # get masked x + id2res = torch.Tensor(list(range(N * T * F))).reshape(N, T, F).to(x.device) + id2res = id2res + 999 * mask # add a large value for masked elements + id2res2 = torch.argsort(id2res.flatten(start_dim=1)) + ids_keep = id2res2.flatten(start_dim=1)[:, : len_keep_f * len_keep_t] + x_masked = torch.gather(x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + ids_restore = torch.argsort(id2res2.flatten(start_dim=1)) + mask = mask.flatten(start_dim=1) + + return x_masked, mask, ids_restore + + def forward_encoder(self, x, mask_ratio, mask_2d=False): + # embed patches + x = self.patch_embed(x) + # add pos embed w/o cls token + x = x + self.pos_embed[:, 1:, :] + + # masking: length -> length * mask_ratio + if mask_2d: + x, mask, ids_restore = self.random_masking_2d( + x, mask_t_prob=self.mask_t_prob, mask_f_prob=self.mask_f_prob + ) + else: + x, mask, ids_restore = self.random_masking(x, mask_ratio) + + # append cls token + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand(x.shape[0], -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + # apply Transformer blocks + for blk in self.blocks: + x = blk(x) + x = self.norm(x) + + return x, mask, ids_restore, None + + def forward_encoder_no_random_mask_no_average(self, x): + # embed patches + x = self.patch_embed(x) + # add pos embed w/o cls token + x = x + self.pos_embed[:, 1:, :] + + # masking: length -> length * mask_ratio + # if mask_2d: + # x, mask, ids_restore = self.random_masking_2d(x, mask_t_prob=self.mask_t_prob, mask_f_prob=self.mask_f_prob) + # else: + # x, mask, ids_restore = self.random_masking(x, mask_ratio) + + # append cls token + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand(x.shape[0], -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + # apply Transformer blocks + for blk in self.blocks: + x = blk(x) + x = self.norm(x) + + return x + + def forward_encoder_no_mask(self, x): + # embed patches + x = self.patch_embed(x) + + # add pos embed w/o cls token + x = x + self.pos_embed[:, 1:, :] + + # masking: length -> length * mask_ratio + # x, mask, ids_restore = self.random_masking(x, mask_ratio) + # append cls token + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand(x.shape[0], -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + # apply Transformer blocks + contextual_embs = [] + for n, blk in enumerate(self.blocks): + x = blk(x) + if n > self.contextual_depth: + contextual_embs.append(self.norm(x)) + # x = self.norm(x) + contextual_emb = torch.stack(contextual_embs, dim=0).mean(dim=0) + + return contextual_emb + + def forward_decoder(self, x, ids_restore): + # embed tokens + x = self.decoder_embed(x) + + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] + 1 - x.shape[1], 1 + ) + x_ = torch.cat([x[:, 1:, :], mask_tokens], dim=1) # no cls token + x_ = torch.gather( + x_, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2]) + ) # unshuffle + x = torch.cat([x[:, :1, :], x_], dim=1) # append cls token + + # add pos embed + x = x + self.decoder_pos_embed + + if self.decoder_mode != 0: + B, L, D = x.shape + x = x[:, 1:, :] + if self.use_custom_patch: + x = x.reshape(B, 101, 12, D) + x = torch.cat([x, x[:, -1, :].unsqueeze(1)], dim=1) # hack + x = x.reshape(B, 1224, D) + if self.decoder_mode > 3: # mvit + x = self.decoder_blocks(x) + else: + # apply Transformer blocks + for blk in self.decoder_blocks: + x = blk(x) + x = self.decoder_norm(x) + + # predictor projection + pred = self.decoder_pred(x) + + # remove cls token + if self.decoder_mode != 0: + if self.use_custom_patch: + pred = pred.reshape(B, 102, 12, 256) + pred = pred[:, :101, :, :] + pred = pred.reshape(B, 1212, 256) + else: + pred = pred + else: + pred = pred[:, 1:, :] + return pred, None, None # emb, emb_pixel + + def forward_loss(self, imgs, pred, mask, norm_pix_loss=False): + """ + imgs: [N, 3, H, W] + pred: [N, L, p*p*3] + mask: [N, L], 0 is keep, 1 is remove, + """ + target = self.patchify(imgs) + if norm_pix_loss: + mean = target.mean(dim=-1, keepdim=True) + var = target.var(dim=-1, keepdim=True) + target = (target - mean) / (var + 1.0e-6) ** 0.5 + + loss = (pred - target) ** 2 + loss = loss.mean(dim=-1) # [N, L], mean loss per patch + + loss = (loss * mask).sum() / mask.sum() # mean loss on removed patches + return loss + + def forward(self, imgs, mask_ratio=0.8): + emb_enc, mask, ids_restore, _ = self.forward_encoder( + imgs, mask_ratio, mask_2d=self.mask_2d + ) + pred, _, _ = self.forward_decoder(emb_enc, ids_restore) # [N, L, p*p*3] + loss_recon = self.forward_loss( + imgs, pred, mask, norm_pix_loss=self.norm_pix_loss + ) + loss_contrastive = torch.FloatTensor([0.0]).cuda() + return loss_recon, pred, mask, loss_contrastive + + +def mae_vit_small_patch16_dec512d8b(**kwargs): + model = MaskedAutoencoderViT( + patch_size=16, + embed_dim=384, + depth=12, + num_heads=6, + decoder_embed_dim=512, + decoder_num_heads=16, + mlp_ratio=4, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs, + ) + return model + + +def mae_vit_base_patch16_dec512d8b(**kwargs): + model = MaskedAutoencoderViT( + patch_size=16, + embed_dim=768, + depth=12, + num_heads=12, + decoder_embed_dim=512, + decoder_num_heads=16, + mlp_ratio=4, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs, + ) + return model + + +def mae_vit_large_patch16_dec512d8b(**kwargs): + model = MaskedAutoencoderViT( + patch_size=16, + embed_dim=1024, + depth=24, + num_heads=16, + decoder_embed_dim=512, + decoder_num_heads=16, + mlp_ratio=4, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs, + ) + return model + + +def mae_vit_huge_patch14_dec512d8b(**kwargs): + model = MaskedAutoencoderViT( + patch_size=14, + embed_dim=1280, + depth=32, + num_heads=16, + decoder_embed_dim=512, + decoder_num_heads=16, + mlp_ratio=4, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs, + ) + return model + + +# set recommended archs +mae_vit_base_patch16 = mae_vit_base_patch16_dec512d8b # decoder: 512 dim, 8 blocks +mae_vit_large_patch16 = mae_vit_large_patch16_dec512d8b # decoder: 512 dim, 8 blocks +mae_vit_huge_patch14 = mae_vit_huge_patch14_dec512d8b # decoder: 512 dim, 8 blocks +mae_vit_small_patch16 = mae_vit_small_patch16_dec512d8b # decoder: 512 dim, 8 blocks diff --git a/qa_mdt/audioldm_train/modules/audiomae/models_vit.py b/qa_mdt/audioldm_train/modules/audiomae/models_vit.py new file mode 100644 index 0000000000000000000000000000000000000000..1587630a2327387d6b7b6353e9a7d623c148cfb2 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/models_vit.py @@ -0,0 +1,252 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# timm: https://github.com/rwightman/pytorch-image-models/tree/master/timm +# DeiT: https://github.com/facebookresearch/deit +# -------------------------------------------------------- + +from functools import partial + +import torch +import torch.nn as nn +import numpy as np +import timm.models.vision_transformer +from timm.models.vision_transformer import PatchEmbed, Block +from qa_mdt.audioldm_train.modules.audiomae.util.patch_embed import ( + PatchEmbed_new, + PatchEmbed3D_new, +) + + +class VisionTransformer(timm.models.vision_transformer.VisionTransformer): + """Vision Transformer with support for global average pooling""" + + def __init__( + self, global_pool=False, mask_2d=True, use_custom_patch=False, **kwargs + ): + super(VisionTransformer, self).__init__(**kwargs) + + self.global_pool = global_pool + if self.global_pool: + norm_layer = kwargs["norm_layer"] + embed_dim = kwargs["embed_dim"] + self.fc_norm = norm_layer(embed_dim) + del self.norm # remove the original norm + self.mask_2d = mask_2d + self.use_custom_patch = use_custom_patch + num_heads = 12 + depth = 12 + mlp_ratio = 4 + + def forward_features(self, x): + B = x.shape[0] + x = self.patch_embed(x) + x = x + self.pos_embed[:, 1:, :] + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand( + B, -1, -1 + ) # stole cls_tokens impl from Phil Wang, thanks + x = torch.cat((cls_tokens, x), dim=1) + x = self.pos_drop(x) + + for blk in self.blocks: + x = blk(x) + + if self.global_pool: + x = x[:, 1:, :].mean(dim=1) # global pool without cls token + outcome = self.fc_norm(x) + else: + x = self.norm(x) + outcome = x[:, 0] + + return outcome + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = torch.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + ids_shuffle = torch.argsort( + noise, dim=1 + ) # ascend: small is keep, large is remove + ids_restore = torch.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = torch.gather(x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = torch.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = torch.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore + + def random_masking_2d(self, x, mask_t_prob, mask_f_prob): + """ + 2D: Spectrogram (msking t and f under mask_t_prob and mask_f_prob) + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + + N, L, D = x.shape # batch, length, dim + if self.use_custom_patch: + # # for AS + T = 101 # 64,101 + F = 12 # 8,12 + # # for ESC + # T=50 + # F=12 + # for SPC + # T=12 + # F=12 + else: + # ## for AS + T = 64 + F = 8 + # ## for ESC + # T=32 + # F=8 + ## for SPC + # T=8 + # F=8 + + # mask T + x = x.reshape(N, T, F, D) + len_keep_T = int(T * (1 - mask_t_prob)) + noise = torch.rand(N, T, device=x.device) # noise in [0, 1] + # sort noise for each sample + ids_shuffle = torch.argsort( + noise, dim=1 + ) # ascend: small is keep, large is remove + ids_keep = ids_shuffle[:, :len_keep_T] + index = ids_keep.unsqueeze(-1).unsqueeze(-1).repeat(1, 1, F, D) + # x_masked = torch.gather(x, dim=1, index=index) + # x_masked = x_masked.reshape(N,len_keep_T*F,D) + x = torch.gather(x, dim=1, index=index) # N, len_keep_T(T'), F, D + + # mask F + # x = x.reshape(N, T, F, D) + x = x.permute(0, 2, 1, 3) # N T' F D => N F T' D + len_keep_F = int(F * (1 - mask_f_prob)) + noise = torch.rand(N, F, device=x.device) # noise in [0, 1] + # sort noise for each sample + ids_shuffle = torch.argsort( + noise, dim=1 + ) # ascend: small is keep, large is remove + ids_keep = ids_shuffle[:, :len_keep_F] + # index = ids_keep.unsqueeze(-1).unsqueeze(-1).repeat(1, 1, T, D) + index = ids_keep.unsqueeze(-1).unsqueeze(-1).repeat(1, 1, len_keep_T, D) + x_masked = torch.gather(x, dim=1, index=index) + x_masked = x_masked.permute(0, 2, 1, 3) # N F' T' D => N T' F' D + # x_masked = x_masked.reshape(N,len_keep*T,D) + x_masked = x_masked.reshape(N, len_keep_F * len_keep_T, D) + + return x_masked, None, None + + def forward_features_mask(self, x, mask_t_prob, mask_f_prob): + B = x.shape[0] # 4,1,1024,128 + x = self.patch_embed(x) # 4, 512, 768 + + x = x + self.pos_embed[:, 1:, :] + if self.random_masking_2d: + x, mask, ids_restore = self.random_masking_2d(x, mask_t_prob, mask_f_prob) + else: + x, mask, ids_restore = self.random_masking(x, mask_t_prob) + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + x = self.pos_drop(x) + + # apply Transformer blocks + for blk in self.blocks: + x = blk(x) + + if self.global_pool: + x = x[:, 1:, :].mean(dim=1) # global pool without cls token + outcome = self.fc_norm(x) + else: + x = self.norm(x) + outcome = x[:, 0] + + return outcome + + # overwrite original timm + def forward(self, x, v=None, mask_t_prob=0.0, mask_f_prob=0.0): + if mask_t_prob > 0.0 or mask_f_prob > 0.0: + x = self.forward_features_mask( + x, mask_t_prob=mask_t_prob, mask_f_prob=mask_f_prob + ) + else: + x = self.forward_features(x) + x = self.head(x) + return x + + +def vit_small_patch16(**kwargs): + model = VisionTransformer( + patch_size=16, + embed_dim=384, + depth=12, + num_heads=6, + mlp_ratio=4, + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs + ) + return model + + +def vit_base_patch16(**kwargs): + model = VisionTransformer( + patch_size=16, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4, + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs + ) + return model + + +def vit_large_patch16(**kwargs): + model = VisionTransformer( + patch_size=16, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs + ) + return model + + +def vit_huge_patch14(**kwargs): + model = VisionTransformer( + patch_size=14, + embed_dim=1280, + depth=32, + num_heads=16, + mlp_ratio=4, + qkv_bias=True, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + **kwargs + ) + return model diff --git a/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__init__.py b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8c95422ae05e4f5ce0ec2ca3cbfbca860860bf53 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__init__.py @@ -0,0 +1,2 @@ +from .sequence_input import Sequence2AudioMAE +from .model import CLAP2AudioMAE diff --git a/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11a0e39e7100cb0fb7a1195d08cab66d4c16bf34 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/model.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a3adf280f5e9d6295b4417cde18dbde348d9b94 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/model.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/sequence_input.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/sequence_input.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9642f32a570e9574eaca5b08457fdf5076ac490 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/__pycache__/sequence_input.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/model.py b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/model.py new file mode 100644 index 0000000000000000000000000000000000000000..fd4a4ffdcb30ec79e5ddd41c6c5035d9c1c4f09a --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/model.py @@ -0,0 +1,329 @@ +import torch +import torch.nn as nn +import pytorch_lightning as pl +from qa_mdt.audioldm_train.utilities.model_util import ( + exists, + default, + mean_flat, + count_params, + instantiate_from_config, +) + +from transformers import GPT2Config, GPT2Model +import torch.optim.lr_scheduler as lr_scheduler + + +class Prenet(nn.Module): + def __init__(self, in_dim, sizes=[256, 128], dropout_rate=0.5): + super(Prenet, self).__init__() + in_sizes = [in_dim] + sizes[:-1] + self.layers = nn.ModuleList( + [ + nn.Linear(in_size, out_size) + for (in_size, out_size) in zip(in_sizes, sizes) + ] + ) + self.relu = nn.ReLU() + self.dropout = nn.Dropout(dropout_rate) + + def forward(self, inputs): + for linear in self.layers: + inputs = self.dropout(self.relu(linear(inputs))) + return inputs + + +class CLAP2AudioMAE(pl.LightningModule): + def __init__( + self, + sequence_gen_length, + base_learning_rate, + cond_stage_config, + use_audiomae_linear=False, + **kwargs + ): + + super().__init__() + assert use_audiomae_linear == False + self.learning_rate = base_learning_rate + self.cond_stage_config = cond_stage_config + self.use_audiomae_linear = use_audiomae_linear + + self.mae_token_num = sequence_gen_length # 4*4 pooling of the audiomae latent + + self.cond_stage_models = nn.ModuleList([]) + self.instantiate_cond_stage(cond_stage_config) + + self.model = GPT2Model.from_pretrained("gpt2") + + self.linear_clap = nn.Linear(512, 768) + + if use_audiomae_linear: + # self.linear_audiomae = nn.Linear(768, 768) # TODO remove linear_audiomae + self.linear_audiomae = None # TODO remove linear_audiomae + + self.loss_fn = nn.MSELoss() + + self.logger_save_dir = None + self.logger_exp_name = None + self.logger_exp_group_name = None + self.logger_version = None + + def set_log_dir(self, save_dir, exp_group_name, exp_name): + self.logger_save_dir = save_dir + self.logger_exp_group_name = exp_group_name + self.logger_exp_name = exp_name + + def cfg_uncond(self, batch_size): + unconditional_conditioning = {} + for key in self.cond_stage_model_metadata: + model_idx = self.cond_stage_model_metadata[key]["model_idx"] + unconditional_conditioning[key] = self.cond_stage_models[ + model_idx + ].get_unconditional_condition(batch_size) + assert ( + "crossattn_audiomae_pooled" in unconditional_conditioning.keys() + ), "The module is not initialized with AudioMAE" + unconditional_conditioning[ + "crossattn_clap_to_audiomae_feature" + ] = unconditional_conditioning["crossattn_audiomae_pooled"] + return unconditional_conditioning + + def configure_optimizers(self): + lr = float(self.learning_rate) + params = list(self.model.parameters()) + list(self.linear_clap.parameters()) + + if self.use_audiomae_linear: + params += list(self.linear_audiomae.parameters()) + + opt = torch.optim.AdamW(params, lr=lr) + scheduler = lr_scheduler.StepLR(opt, step_size=1, gamma=0.9) + return [opt], [scheduler] + + def training_step(self, batch, batch_idx=None, cond_dict=None): + if cond_dict is None: + cond_dict = self.get_input(batch) + + input_embeds, target_embeds = ( + cond_dict["film_clap_cond1"], + cond_dict["crossattn_audiomae_pooled"][0], + ) + + # Some times if the pooling factor is random, the length of crossattn_audiomae_pooled is not necessary 32, so need to calculate separately + if "crossattn_audiomae_pooled_44" in cond_dict.keys(): + target_embeds = cond_dict["crossattn_audiomae_pooled_44"][0] + + if self.use_audiomae_linear: + input_embeds = torch.cat( + [self.linear_clap(input_embeds), self.linear_audiomae(target_embeds)], + dim=1, + ) + else: + input_embeds = torch.cat( + [self.linear_clap(input_embeds), target_embeds], dim=1 + ) + + output_embeds = self.model(inputs_embeds=input_embeds)["last_hidden_state"] + + target = target_embeds + output = output_embeds[:, :-1] + + loss = self.loss_fn(output, target) + + self.log( + "train/loss_clap_2_audiomae", + loss, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + sync_dist=True, + ) + + self.log( + "global_step_audiomae", + float(self.global_step), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + sync_dist=True, + ) + + return loss + + def generate(self, batch, cond_dict=None, no_grad=False): + if cond_dict is None: + cond_dict = self.get_input(batch) + input_embeds = cond_dict["film_clap_cond1"] + steps = self.mae_token_num + + if no_grad: + with torch.no_grad(): + model_input = self.linear_clap(input_embeds) + for _ in range(steps): + output = self.model(inputs_embeds=model_input)["last_hidden_state"] + model_input = torch.cat([model_input, output[:, -1:, :]], dim=1) + else: + model_input = self.linear_clap(input_embeds) + for _ in range(steps): + output = self.model(inputs_embeds=model_input)["last_hidden_state"] + model_input = torch.cat([model_input, output[:, -1:, :]], dim=1) + + return model_input[:, 1:], cond_dict + + # def on_validation_epoch_start(self) -> None: + # # Use text as condition during validation + # for key in self.cond_stage_model_metadata.keys(): + # metadata = self.cond_stage_model_metadata[key] + # model_idx, cond_stage_key, conditioning_key = metadata["model_idx"], metadata["cond_stage_key"], metadata["conditioning_key"] + + # # If we use CLAP as condition, we might use audio for training, but we also must use text for evaluation + # # if(isinstance(self.cond_stage_models[model_idx], CLAPAudioEmbeddingClassifierFreev2)): + # # self.cond_stage_model_metadata[key]["cond_stage_key_orig"] = self.cond_stage_model_metadata[key]["cond_stage_key"] + # # self.cond_stage_model_metadata[key]["embed_mode_orig"] = self.cond_stage_models[model_idx].embed_mode + # # print("Change the model original cond_keyand embed_mode %s, %s to text during evaluation" % (self.cond_stage_model_metadata[key]["cond_stage_key_orig"], self.cond_stage_model_metadata[key]["embed_mode_orig"])) + # # self.cond_stage_model_metadata[key]["cond_stage_key"] = "text" + # # self.cond_stage_models[model_idx].embed_mode = "text" + + # return super().on_validation_epoch_start() + + def validation_step(self, batch, batch_idx): + cond_dict = self.get_input(batch) + # cond_dict['film_clap_cond1']: [2,1,512] + # cond_dict['crossattn_audiomae_pooled']: [2, 128, 768] + + input_embeds, target_embeds = ( + cond_dict["film_clap_cond1"], + cond_dict["crossattn_audiomae_pooled"][0], + ) + + # Some times if the pooling factor is random, the length of crossattn_audiomae_pooled is not necessary 32, so need to calculate separately + if "crossattn_audiomae_pooled_44" in cond_dict.keys(): + target_embeds = cond_dict["crossattn_audiomae_pooled_44"][0] + + if self.use_audiomae_linear: + input_embeds = torch.cat( + [self.linear_clap(input_embeds), self.linear_audiomae(target_embeds)], + dim=1, + ) + else: + input_embeds = torch.cat( + [self.linear_clap(input_embeds), target_embeds], dim=1 + ) + + output_embeds = self.model(inputs_embeds=input_embeds)["last_hidden_state"] + + target = target_embeds + output = output_embeds[:, :-1] + + loss = self.loss_fn(output, target) + + self.log( + "val/loss", + loss, + prog_bar=True, + logger=True, + on_step=True, + sync_dist=True, + on_epoch=True, + ) + + generation_output, _ = self.generate(batch) + ar_gen_loss = self.loss_fn(generation_output, target) + + self.log( + "val/ar_gen_loss", + ar_gen_loss, + prog_bar=True, + logger=True, + on_step=True, + sync_dist=True, + on_epoch=True, + ) + + return {"loss": loss, "ar_gen_loss": ar_gen_loss} + + def get_input_item(self, batch, k): + fname, text, label_indices, waveform, stft, fbank = ( + batch["fname"], + batch["text"], + batch["label_vector"], + batch["waveform"], + batch["stft"], + batch["log_mel_spec"], + ) + ret = {} + + ret["fbank"] = ( + fbank.unsqueeze(1).to(memory_format=torch.contiguous_format).float() + ) + ret["stft"] = stft.to(memory_format=torch.contiguous_format).float() + # ret["clip_label"] = clip_label.to(memory_format=torch.contiguous_format).float() + ret["waveform"] = waveform.to(memory_format=torch.contiguous_format).float() + ret["text"] = list(text) + ret["fname"] = fname + + for key in batch.keys(): + if key not in ret.keys(): + ret[key] = batch[key] + + return ret[k] + + def get_input(self, batch): + cond_dict = {} + if len(self.cond_stage_model_metadata.keys()) > 0: + unconditional_cfg = False + + for cond_model_key in self.cond_stage_model_metadata.keys(): + cond_stage_key = self.cond_stage_model_metadata[cond_model_key][ + "cond_stage_key" + ] + + # if(not self.training): + # if(isinstance(self.cond_stage_models[self.cond_stage_model_metadata[cond_model_key]["model_idx"]], CLAPAudioEmbeddingClassifierFreev2)): + # assert cond_stage_key == "text" # CLAP model should use text for evaluation + + # The original data for conditioning + xc = self.get_input_item(batch, cond_stage_key) + if type(xc) == torch.Tensor: + xc = xc.to(self.device) + + c = self.get_learned_conditioning( + xc, key=cond_model_key, unconditional_cfg=unconditional_cfg + ) + cond_dict[cond_model_key] = c + + return cond_dict + + def instantiate_cond_stage(self, config): + self.cond_stage_model_metadata = {} + + for i, cond_model_key in enumerate(config.keys()): + model = instantiate_from_config(config[cond_model_key]) + self.cond_stage_models.append(model) + self.cond_stage_model_metadata[cond_model_key] = { + "model_idx": i, + "cond_stage_key": config[cond_model_key]["cond_stage_key"], + "conditioning_key": config[cond_model_key]["conditioning_key"], + } + + def get_learned_conditioning(self, c, key, unconditional_cfg): + assert key in self.cond_stage_model_metadata.keys() + + # Classifier-free guidance + if not unconditional_cfg: + c = self.cond_stage_models[ + self.cond_stage_model_metadata[key]["model_idx"] + ](c) + else: + if isinstance(c, torch.Tensor): + batchsize = c.size(0) + elif isinstance(c, list): + batchsize = len(c) + else: + raise NotImplementedError() + c = self.cond_stage_models[ + self.cond_stage_model_metadata[key]["model_idx"] + ].get_unconditional_condition(batchsize) + + return c diff --git a/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/sequence_input.py b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/sequence_input.py new file mode 100644 index 0000000000000000000000000000000000000000..de0d212c088d212123ed2177a9708785c143c640 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/sequence_gen/sequence_input.py @@ -0,0 +1,737 @@ +import torch +import torch.nn as nn +import numpy as np +import pytorch_lightning as pl +from qa_mdt.audioldm_train.utilities.model_util import ( + exists, + default, + mean_flat, + count_params, + instantiate_from_config, +) +from torch.optim import * + +from transformers import GPT2Config, GPT2Model, GPTJConfig, GPTJModel +import torch.optim.lr_scheduler as lr_scheduler + + +class Sequence2AudioMAE(pl.LightningModule): + def __init__( + self, + base_learning_rate, + sequence_gen_length, + sequence_input_key, + sequence_input_embed_dim, + cond_stage_config, + optimizer_type="AdamW", + use_warmup=True, + use_ar_gen_loss=False, + use_audiomae_linear=False, + target_tokens_mask_ratio=0.0, + random_mask_ratio=False, + **kwargs + ): + + super().__init__() + assert use_audiomae_linear == False + self.random_mask_ratio = random_mask_ratio + self.learning_rate = base_learning_rate + self.cond_stage_config = cond_stage_config + self.use_audiomae_linear = use_audiomae_linear + self.optimizer_type = optimizer_type + self.use_warmup = use_warmup + self.use_ar_gen_loss = use_ar_gen_loss + # Even though the LDM can be conditioned on mutliple pooling rate + # Our model always predict the higest pooling rate + + self.mae_token_num = sequence_gen_length + self.sequence_input_key = sequence_input_key + self.sequence_input_embed_dim = sequence_input_embed_dim + self.target_tokens_mask_ratio = target_tokens_mask_ratio + + self.start_of_sequence_tokens = nn.Embedding(32, 768) + self.end_of_sequence_tokens = nn.Embedding(32, 768) + + self.input_sequence_embed_linear = nn.ModuleList([]) + self.initial_learning_rate = None + + for dim in self.sequence_input_embed_dim: + self.input_sequence_embed_linear.append(nn.Linear(dim, 768)) + + self.cond_stage_models = nn.ModuleList([]) + self.instantiate_cond_stage(cond_stage_config) + self.initialize_param_check_toolkit() + + self.private_training_step = 0 + + # configuration = GPT2Config(n_layer=1) # TODO + # self.model=GPT2Model(configuration) + ################### + # self.model=nn.Linear(768,768, bias=False) # TODO change the model + # with torch.no_grad(): + # self.model.weight.copy_(torch.eye(768)) + ################### + self.model = GPT2Model.from_pretrained("gpt2") + ################### + # self.model = nn.LSTM(input_size=768, hidden_size=768, num_layers=1,bias=False) # TODO + + # self.loss_fn = nn.MSELoss() + self.loss_fn = nn.L1Loss() + + self.logger_save_dir = None + self.logger_exp_name = None + self.logger_exp_group_name = None + self.logger_version = None + + def set_log_dir(self, save_dir, exp_group_name, exp_name): + self.logger_save_dir = save_dir + self.logger_exp_group_name = exp_group_name + self.logger_exp_name = exp_name + + def cfg_uncond(self, batch_size): + unconditional_conditioning = {} + for key in self.cond_stage_model_metadata: + model_idx = self.cond_stage_model_metadata[key]["model_idx"] + unconditional_conditioning[key] = self.cond_stage_models[ + model_idx + ].get_unconditional_condition(batch_size) + assert ( + "crossattn_audiomae_pooled" in unconditional_conditioning.keys() + ), "The module is not initialized with AudioMAE" + unconditional_conditioning[ + "crossattn_clap_to_audiomae_feature" + ] = unconditional_conditioning["crossattn_audiomae_pooled"] + return unconditional_conditioning + + def configure_optimizers(self): + lr = float(self.learning_rate) + # params = list(self.model.parameters()) + list(self.input_sequence_embed_linear.parameters()) + params = list(self.parameters()) + + # opt = torch.optim.Adam(params, lr=lr, betas=(0.9, 0.98), eps=1e-9) + opt = eval(self.optimizer_type)(params, lr=lr) + scheduler = lr_scheduler.StepLR(opt, step_size=10, gamma=0.8) + return [opt], [scheduler] + + def add_sos_eos_tokens(self, _id, sequence, attn_mask): + batchsize = sequence.size(0) + + new_attn_mask_step = torch.ones((batchsize, 1)).to(sequence.device) + key_id = torch.tensor([_id]).to(sequence.device) + + # Add two more steps to attn mask + new_attn_mask = torch.cat( + [new_attn_mask_step, attn_mask, new_attn_mask_step], dim=1 + ) + + # Add two more tokens in the sequence + sos_token = self.start_of_sequence_tokens(key_id).expand(batchsize, 1, -1) + eos_token = self.end_of_sequence_tokens(key_id).expand(batchsize, 1, -1) + new_sequence = torch.cat([sos_token, sequence, eos_token], dim=1) + return new_sequence, new_attn_mask + + def truncate_sequence_and_mask(self, sequence, mask, max_len=512): + if sequence.size(1) > max_len: + print( + "The input sequence length to GPT-2 model is too long:", + sequence.size(1), + ) + return sequence[:, :max_len], mask[:, :max_len] + else: + return sequence, mask + + def get_input_sequence_and_mask(self, cond_dict): + input_embeds = None + input_embeds_attn_mask = None + for _id, sequence_key in enumerate(self.sequence_input_key): + assert sequence_key in cond_dict.keys(), ( + "Invalid sequence key %s" % sequence_key + ) + cond_embed = cond_dict[sequence_key] + if isinstance(cond_embed, list): + assert ( + len(cond_embed) == 2 + ), "The crossattn returned list should have length 2, including embed and attn_mask" + item_input_embeds, item_attn_mask = cond_embed + + item_input_embeds = self.input_sequence_embed_linear[_id]( + item_input_embeds + ) + + item_input_embeds, item_attn_mask = self.add_sos_eos_tokens( + _id, item_input_embeds, item_attn_mask + ) + + if input_embeds is None and input_embeds_attn_mask is None: + input_embeds, input_embeds_attn_mask = ( + item_input_embeds, + item_attn_mask, + ) + else: + input_embeds = torch.cat( + [input_embeds, item_input_embeds], dim=1 + ) # The 1-st dimension is time steps + input_embeds_attn_mask = torch.cat( + [input_embeds_attn_mask, item_attn_mask], dim=1 + ) # The 1-st dimension is time steps + else: + assert isinstance(cond_embed, torch.Tensor) + cond_embed = self.input_sequence_embed_linear[_id](cond_embed) + attn_mask = torch.ones((cond_embed.size(0), cond_embed.size(1))).to( + cond_embed.device + ) + + item_input_embeds, item_attn_mask = self.add_sos_eos_tokens( + _id, cond_embed, attn_mask + ) + + if input_embeds is None and input_embeds_attn_mask is None: + input_embeds, input_embeds_attn_mask = ( + item_input_embeds, + item_attn_mask, + ) + else: + input_embeds, input_embeds_attn_mask = torch.cat( + [input_embeds, item_input_embeds], dim=1 + ), torch.cat([input_embeds_attn_mask, item_attn_mask], dim=1) + + assert input_embeds is not None and input_embeds_attn_mask is not None + + input_embeds, input_embeds_attn_mask = self.truncate_sequence_and_mask( + input_embeds, input_embeds_attn_mask, int(1024 - self.mae_token_num) + ) + cond_sequence_end_time_idx = input_embeds.size( + 1 + ) # The index that we start to collect the output embeds + + return input_embeds, input_embeds_attn_mask, cond_sequence_end_time_idx + + def warmup_step(self): + if self.initial_learning_rate is None: + self.initial_learning_rate = float(self.learning_rate) + + # Only the first parameter group + if self.global_step <= 1000: + if self.global_step == 0: + print( + "Warming up learning rate start with %s" + % self.initial_learning_rate + ) + self.trainer.optimizers[0].param_groups[0]["lr"] = ( + self.global_step / 1000 + ) * self.initial_learning_rate + else: + # TODO set learning rate here + self.trainer.optimizers[0].param_groups[0][ + "lr" + ] = self.initial_learning_rate + + def mask_target_sequence(self, target_embeds, target_embeds_attn_mask): + time_seq_mask = None + if self.target_tokens_mask_ratio > 1e-4: + batchsize, time_seq_len, embed_dim = target_embeds.size() + _, time_seq_len = target_embeds_attn_mask.size() + # Generate random mask + if self.random_mask_ratio: + mask_ratio = torch.rand(1).item() * self.target_tokens_mask_ratio + else: + mask_ratio = self.target_tokens_mask_ratio + + time_seq_mask = (torch.rand((batchsize, time_seq_len)) > mask_ratio).to( + target_embeds.device + ) + # Mask the target embedding + target_embeds = target_embeds * time_seq_mask.unsqueeze(-1) + target_embeds_attn_mask = target_embeds_attn_mask * time_seq_mask + return target_embeds, target_embeds_attn_mask, time_seq_mask + + def training_step(self, batch, batch_idx=None, cond_dict=None, return_output=False): + # cond_dict['film_clap_cond1']: [2,1,512] + # cond_dict['crossattn_audiomae_pooled']: [2, 128, 768] + + if self.use_warmup: + self.warmup_step() + + if cond_dict is None: + cond_dict = self.get_input(batch) + + # param_list = list(self.model.parameters()) + target_embeds, target_embeds_attn_mask = ( + cond_dict["crossattn_audiomae_pooled"][0], + cond_dict["crossattn_audiomae_pooled"][1], + ) + + ( + input_embeds, + input_embeds_attn_mask, + cond_sequence_end_time_idx, + ) = self.get_input_sequence_and_mask(cond_dict) + + # Some times if the pooling factor is random, the length of crossattn_audiomae_pooled is not necessary 32, so need to calculate separately + if "crossattn_audiomae_pooled_44" in cond_dict.keys(): + target_embeds = cond_dict["crossattn_audiomae_pooled_44"][0] + + # target_embeds, target_embeds_attn_mask, time_seq_mask = self.mask_target_sequence(target_embeds, target_embeds_attn_mask) + + final_input_embeds = torch.cat([input_embeds, target_embeds], dim=1) + final_input_embeds_attn_mask = torch.cat( + [input_embeds_attn_mask, target_embeds_attn_mask], dim=1 + ) + + ########################### GPT-2 + output_embeds = self.model( + inputs_embeds=final_input_embeds, + attention_mask=final_input_embeds_attn_mask, + )["last_hidden_state"] + ########################### DNN + # output_embeds = self.model(final_input_embeds) + ########################### LSTM + # output_embeds,_ = self.model(final_input_embeds) + + target = target_embeds + output = output_embeds[:, cond_sequence_end_time_idx - 1 : -1] + + # output = output_embeds[:, cond_sequence_end_time_idx: ] # TODO bug here intentionally + + assert target.size(1) == self.mae_token_num + + # if(batch_idx % 1000 == 0): + # print(output[0], target[0]) + loss = self.loss_fn(output, target) + + if self.use_ar_gen_loss: + ar_gen_loss = self.calculate_ahead_k_step_loss(batch, batch_idx, cond_dict) + else: + ar_gen_loss = loss + + if self.private_training_step % 500 == 0: + print( + "AudioMAE prediction module:", "loss", loss, "ar_gen_loss", ar_gen_loss + ) + + try: + learning_rate = self.trainer.optimizers[0].param_groups[0]["lr"] + + self.log( + "train/lr_audiomae_pred", + learning_rate, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + sync_dist=True, + ) + except: + pass + + self.log( + "train/loss_clap_2_audiomae", + loss, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + sync_dist=True, + ) + + self.log( + "train/loss_ar_gen_loss", + ar_gen_loss, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + sync_dist=True, + ) + + self.log( + "global_step_audiomae", + float(self.global_step), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + sync_dist=True, + ) + self.private_training_step += 1 + if return_output: + return loss + ar_gen_loss, output + else: + return loss + ar_gen_loss + + def calculate_ahead_k_step_loss(self, batch, batch_idx=None, cond_dict=None): + if cond_dict is None: + cond_dict = self.get_input(batch) + + target_embeds, target_embeds_attn_mask = ( + cond_dict["crossattn_audiomae_pooled"][0], + cond_dict["crossattn_audiomae_pooled"][1], + ) + + assert ( + torch.sum(target_embeds_attn_mask < 0.1) < 1 + ), "This function only works for AudioMAE prediction, which should have all one atten_mask" + + ( + input_embeds, + input_embeds_attn_mask, + cond_sequence_end_time_idx, + ) = self.get_input_sequence_and_mask(cond_dict) + + target_total_time_steps = target_embeds.size(1) + + steps = min(round(torch.rand(1).item() * 8), target_total_time_steps) + + if steps < 2: + steps = 2 + + start_idx = max( + 0, round(torch.rand(1).item() * (target_total_time_steps - steps)) - 1 + ) + + model_input = input_embeds + model_input_mask = input_embeds_attn_mask + target_embeds_ar_gen = target_embeds[:, start_idx : start_idx + steps, :] + generation = [] + + if start_idx > 0: + model_input = torch.cat( + [input_embeds, target_embeds[:, :start_idx, :]], dim=1 + ) + attention_mask_known_steps = torch.ones( + (model_input_mask.size(0), start_idx) + ).to(model_input.device) + model_input_mask = torch.cat( + [input_embeds_attn_mask, attention_mask_known_steps], dim=1 + ) + + for _ in range(steps): + output = self.model( + inputs_embeds=model_input, attention_mask=model_input_mask + )["last_hidden_state"] + # Update the model input + generation.append(output[:, -1:, :]) + model_input = torch.cat([model_input, output[:, -1:, :]], dim=1) + # Update the attention mask + attention_mask_new_step = torch.ones((model_input_mask.size(0), 1)).to( + model_input.device + ) + model_input_mask = torch.cat( + [model_input_mask, attention_mask_new_step], dim=1 + ) + + generation = torch.cat(generation, dim=1) + + return self.loss_fn(generation, target_embeds_ar_gen) + + def generate_partial(self, batch, cond_dict=None, no_grad=False): + if cond_dict is None: + cond_dict = self.get_input(batch) + + print("Generate partially prompted audio with in-context learning") + # self.model.train() + # assert self.model.training==True + + target_embeds, target_embeds_attn_mask = ( + cond_dict["crossattn_audiomae_pooled"][0], + cond_dict["crossattn_audiomae_pooled"][1], + ) + + target_time_steps = target_embeds.size(1) + + ( + input_embeds, + input_embeds_attn_mask, + cond_sequence_end_time_idx, + ) = self.get_input_sequence_and_mask(cond_dict) + + model_input = torch.cat( + [input_embeds, target_embeds[:, : target_time_steps // 4, :]], dim=1 + ) + model_input_mask = torch.cat( + [ + input_embeds_attn_mask, + target_embeds_attn_mask[:, : target_time_steps // 4], + ], + dim=1, + ) + + steps = self.mae_token_num + + for _ in range(3 * steps // 4): + output = self.model( + inputs_embeds=model_input, attention_mask=model_input_mask + )["last_hidden_state"] + # Update the model input + model_input = torch.cat([model_input, output[:, -1:, :]], dim=1) + # Update the attention mask + attention_mask_new_step = torch.ones((model_input_mask.size(0), 1)).to( + model_input.device + ) + model_input_mask = torch.cat( + [model_input_mask, attention_mask_new_step], dim=1 + ) + + output = model_input[:, cond_sequence_end_time_idx:] + + return output, cond_dict + + def generate(self, batch, cond_dict=None, no_grad=False): + if cond_dict is None: + cond_dict = self.get_input(batch) + + # self.model.train() + # print("!!!!!!!!!!!!!train") + + ( + input_embeds, + input_embeds_attn_mask, + cond_sequence_end_time_idx, + ) = self.get_input_sequence_and_mask(cond_dict) + model_input = input_embeds + model_input_mask = input_embeds_attn_mask + + steps = self.mae_token_num + + for _ in range(steps): + output = self.model( + inputs_embeds=model_input, attention_mask=model_input_mask + )["last_hidden_state"] + # Update the model input + model_input = torch.cat([model_input, output[:, -1:, :]], dim=1) + # Update the attention mask + attention_mask_new_step = torch.ones((model_input_mask.size(0), 1)).to( + model_input.device + ) + model_input_mask = torch.cat( + [model_input_mask, attention_mask_new_step], dim=1 + ) + + return model_input[:, cond_sequence_end_time_idx:], cond_dict + + # def on_validation_epoch_start(self) -> None: + # # Use text as condition during validation + # for key in self.cond_stage_model_metadata.keys(): + # metadata = self.cond_stage_model_metadata[key] + # model_idx, cond_stage_key, conditioning_key = metadata["model_idx"], metadata["cond_stage_key"], metadata["conditioning_key"] + + # # If we use CLAP as condition, we might use audio for training, but we also must use text for evaluation + # # if(isinstance(self.cond_stage_models[model_idx], CLAPAudioEmbeddingClassifierFreev2)): + # # self.cond_stage_model_metadata[key]["cond_stage_key_orig"] = self.cond_stage_model_metadata[key]["cond_stage_key"] + # # self.cond_stage_model_metadata[key]["embed_mode_orig"] = self.cond_stage_models[model_idx].embed_mode + # # print("Change the model original cond_keyand embed_mode %s, %s to text during evaluation" % (self.cond_stage_model_metadata[key]["cond_stage_key_orig"], self.cond_stage_model_metadata[key]["embed_mode_orig"])) + # # self.cond_stage_model_metadata[key]["cond_stage_key"] = "text" + # # self.cond_stage_models[model_idx].embed_mode = "text" + + # return super().on_validation_epoch_start() + + def validation_step(self, batch, batch_idx): + cond_dict = self.get_input(batch) + # cond_dict['film_clap_cond1']: [2,1,512] + # cond_dict['crossattn_audiomae_pooled']: [2, 128, 768] + + target_embeds, target_embeds_attn_mask = ( + cond_dict["crossattn_audiomae_pooled"][0], + cond_dict["crossattn_audiomae_pooled"][1], + ) + + ( + input_embeds, + input_embeds_attn_mask, + cond_sequence_end_time_idx, + ) = self.get_input_sequence_and_mask(cond_dict) + + # Some times if the pooling factor is random, the length of crossattn_audiomae_pooled is not necessary 32, so need to calculate separately + if "crossattn_audiomae_pooled_44" in cond_dict.keys(): + target_embeds = cond_dict["crossattn_audiomae_pooled_44"][0] + + final_input_embeds = torch.cat([input_embeds, target_embeds], dim=1) + final_input_embeds_attn_mask = torch.cat( + [input_embeds_attn_mask, target_embeds_attn_mask], dim=1 + ) + + output_embeds = self.model( + inputs_embeds=final_input_embeds, + attention_mask=final_input_embeds_attn_mask, + )["last_hidden_state"] + + target = target_embeds + output = output_embeds[:, cond_sequence_end_time_idx - 1 : -1] + + loss = self.loss_fn(output, target) + + self.log( + "val/loss", + loss, + prog_bar=True, + logger=True, + on_step=True, + sync_dist=True, + on_epoch=True, + ) + + generation_output, _ = self.generate(batch) + ar_gen_loss = self.loss_fn(generation_output, target) + + self.log( + "val/ar_gen_loss", + ar_gen_loss, + prog_bar=True, + logger=True, + on_step=True, + sync_dist=True, + on_epoch=True, + ) + + return {"loss": loss, "ar_gen_loss": ar_gen_loss} + + def get_input_item(self, batch, k): + fname, text, label_indices, waveform, stft, fbank = ( + batch["fname"], + batch["text"], + batch["label_vector"], + batch["waveform"], + batch["stft"], + batch["log_mel_spec"], + ) + ret = {} + + ret["fbank"] = ( + fbank.unsqueeze(1).to(memory_format=torch.contiguous_format).float() + ) + ret["stft"] = stft.to(memory_format=torch.contiguous_format).float() + # ret["clip_label"] = clip_label.to(memory_format=torch.contiguous_format).float() + ret["waveform"] = waveform.to(memory_format=torch.contiguous_format).float() + ret["text"] = list(text) + ret["fname"] = fname + + for key in batch.keys(): + if key not in ret.keys(): + ret[key] = batch[key] + + return ret[k] + + def get_input(self, batch): + cond_dict = {} + if len(self.cond_stage_model_metadata.keys()) > 0: + unconditional_cfg = False + + for cond_model_key in self.cond_stage_model_metadata.keys(): + cond_stage_key = self.cond_stage_model_metadata[cond_model_key][ + "cond_stage_key" + ] + + # if(not self.training): + # if(isinstance(self.cond_stage_models[self.cond_stage_model_metadata[cond_model_key]["model_idx"]], CLAPAudioEmbeddingClassifierFreev2)): + # assert cond_stage_key == "text" # CLAP model should use text for evaluation + + # The original data for conditioning + xc = self.get_input_item(batch, cond_stage_key) + if type(xc) == torch.Tensor: + xc = xc.to(self.device) + + c = self.get_learned_conditioning( + xc, key=cond_model_key, unconditional_cfg=unconditional_cfg + ) + cond_dict[cond_model_key] = c + + return cond_dict + + def instantiate_cond_stage(self, config): + self.cond_stage_model_metadata = {} + + for i, cond_model_key in enumerate(config.keys()): + model = instantiate_from_config(config[cond_model_key]) + self.cond_stage_models.append(model) + self.cond_stage_model_metadata[cond_model_key] = { + "model_idx": i, + "cond_stage_key": config[cond_model_key]["cond_stage_key"], + "conditioning_key": config[cond_model_key]["conditioning_key"], + } + + def get_learned_conditioning(self, c, key, unconditional_cfg): + assert key in self.cond_stage_model_metadata.keys() + + # Classifier-free guidance + if not unconditional_cfg: + c = self.cond_stage_models[ + self.cond_stage_model_metadata[key]["model_idx"] + ](c) + else: + if isinstance(c, torch.Tensor): + batchsize = c.size(0) + elif isinstance(c, list): + batchsize = len(c) + else: + raise NotImplementedError() + c = self.cond_stage_models[ + self.cond_stage_model_metadata[key]["model_idx"] + ].get_unconditional_condition(batchsize) + + return c + + def initialize_param_check_toolkit(self): + self.tracked_steps = 0 + self.param_dict = {} + + def statistic_require_grad_tensor_number(self, module, name=None): + requires_grad_num = 0 + total_num = 0 + require_grad_tensor = None + for p in module.parameters(): + if p.requires_grad: + requires_grad_num += 1 + if require_grad_tensor is None: + require_grad_tensor = p + total_num += 1 + print( + "Module: [%s] have %s trainable parameters out of %s total parameters (%.2f)" + % (name, requires_grad_num, total_num, requires_grad_num / total_num) + ) + return require_grad_tensor + + def check_module_param_update(self): + + if self.tracked_steps == 0: + print("Sequence2AudioMAE") + for name, module in self.named_children(): + try: + require_grad_tensor = self.statistic_require_grad_tensor_number( + module, name=name + ) + if require_grad_tensor is not None: + self.param_dict[name] = require_grad_tensor.clone() + else: + print("==> %s does not requires grad" % name) + except Exception as e: + print("%s does not have trainable parameters: %s" % (name, e)) + continue + + if self.tracked_steps % 5000 == 0: + print("Sequence2AudioMAE") + for name, module in self.named_children(): + try: + require_grad_tensor = self.statistic_require_grad_tensor_number( + module, name=name + ) + + if require_grad_tensor is not None: + print( + "===> Param diff %s: %s; Size: %s" + % ( + name, + torch.sum( + torch.abs( + self.param_dict[name] - require_grad_tensor + ) + ), + require_grad_tensor.size(), + ) + ) + else: + print("%s does not requires grad" % name) + except Exception as e: + print("%s does not have trainable parameters: %s" % (name, e)) + continue + + self.tracked_steps += 1 diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/__pycache__/patch_embed.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/util/__pycache__/patch_embed.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a79d1452d19ec36eab0a727736962f6875a1168a Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/util/__pycache__/patch_embed.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/__pycache__/pos_embed.cpython-310.pyc b/qa_mdt/audioldm_train/modules/audiomae/util/__pycache__/pos_embed.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e424e9374743c7521a6a41aa11d69bf3952be9b Binary files /dev/null and b/qa_mdt/audioldm_train/modules/audiomae/util/__pycache__/pos_embed.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/crop.py b/qa_mdt/audioldm_train/modules/audiomae/util/crop.py new file mode 100644 index 0000000000000000000000000000000000000000..525e3c783c3d348e593dc89c2b5fb8520918e9ea --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/crop.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import math + +import torch + +from torchvision import transforms +from torchvision.transforms import functional as F + + +class RandomResizedCrop(transforms.RandomResizedCrop): + """ + RandomResizedCrop for matching TF/TPU implementation: no for-loop is used. + This may lead to results different with torchvision's version. + Following BYOL's TF code: + https://github.com/deepmind/deepmind-research/blob/master/byol/utils/dataset.py#L206 + """ + + @staticmethod + def get_params(img, scale, ratio): + width, height = F._get_image_size(img) + area = height * width + + target_area = area * torch.empty(1).uniform_(scale[0], scale[1]).item() + log_ratio = torch.log(torch.tensor(ratio)) + aspect_ratio = torch.exp( + torch.empty(1).uniform_(log_ratio[0], log_ratio[1]) + ).item() + + w = int(round(math.sqrt(target_area * aspect_ratio))) + h = int(round(math.sqrt(target_area / aspect_ratio))) + + w = min(w, width) + h = min(h, height) + + i = torch.randint(0, height - h + 1, size=(1,)).item() + j = torch.randint(0, width - w + 1, size=(1,)).item() + + return i, j, h, w diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/datasets.py b/qa_mdt/audioldm_train/modules/audiomae/util/datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..b90f89a7d5f78c31bc9113dd88b632b0c234f10a --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/datasets.py @@ -0,0 +1,67 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# DeiT: https://github.com/facebookresearch/deit +# -------------------------------------------------------- + +import os +import PIL + +from torchvision import datasets, transforms + +from timm.data import create_transform +from timm.data.constants import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD + + +def build_dataset(is_train, args): + transform = build_transform(is_train, args) + + root = os.path.join(args.data_path, "train" if is_train else "val") + dataset = datasets.ImageFolder(root, transform=transform) + + print(dataset) + + return dataset + + +def build_transform(is_train, args): + mean = IMAGENET_DEFAULT_MEAN + std = IMAGENET_DEFAULT_STD + # train transform + if is_train: + # this should always dispatch to transforms_imagenet_train + transform = create_transform( + input_size=args.input_size, + is_training=True, + color_jitter=args.color_jitter, + auto_augment=args.aa, + interpolation="bicubic", + re_prob=args.reprob, + re_mode=args.remode, + re_count=args.recount, + mean=mean, + std=std, + ) + return transform + + # eval transform + t = [] + if args.input_size <= 224: + crop_pct = 224 / 256 + else: + crop_pct = 1.0 + size = int(args.input_size / crop_pct) + t.append( + transforms.Resize( + size, interpolation=PIL.Image.BICUBIC + ), # to maintain same ratio w.r.t. 224 images + ) + t.append(transforms.CenterCrop(args.input_size)) + + t.append(transforms.ToTensor()) + t.append(transforms.Normalize(mean, std)) + return transforms.Compose(t) diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/lars.py b/qa_mdt/audioldm_train/modules/audiomae/util/lars.py new file mode 100644 index 0000000000000000000000000000000000000000..fc43923d22cf2c9af4ae9166612c3f3477faf254 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/lars.py @@ -0,0 +1,60 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# LARS optimizer, implementation from MoCo v3: +# https://github.com/facebookresearch/moco-v3 +# -------------------------------------------------------- + +import torch + + +class LARS(torch.optim.Optimizer): + """ + LARS optimizer, no rate scaling or weight decay for parameters <= 1D. + """ + + def __init__( + self, params, lr=0, weight_decay=0, momentum=0.9, trust_coefficient=0.001 + ): + defaults = dict( + lr=lr, + weight_decay=weight_decay, + momentum=momentum, + trust_coefficient=trust_coefficient, + ) + super().__init__(params, defaults) + + @torch.no_grad() + def step(self): + for g in self.param_groups: + for p in g["params"]: + dp = p.grad + + if dp is None: + continue + + if p.ndim > 1: # if not normalization gamma/beta or bias + dp = dp.add(p, alpha=g["weight_decay"]) + param_norm = torch.norm(p) + update_norm = torch.norm(dp) + one = torch.ones_like(param_norm) + q = torch.where( + param_norm > 0.0, + torch.where( + update_norm > 0, + (g["trust_coefficient"] * param_norm / update_norm), + one, + ), + one, + ) + dp = dp.mul(q) + + param_state = self.state[p] + if "mu" not in param_state: + param_state["mu"] = torch.zeros_like(p) + mu = param_state["mu"] + mu.mul_(g["momentum"]).add_(dp) + p.add_(mu, alpha=-g["lr"]) diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/lr_decay.py b/qa_mdt/audioldm_train/modules/audiomae/util/lr_decay.py new file mode 100644 index 0000000000000000000000000000000000000000..41509d714328163feb4a0fa6234b095e59a12810 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/lr_decay.py @@ -0,0 +1,78 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# ELECTRA https://github.com/google-research/electra +# BEiT: https://github.com/microsoft/unilm/tree/master/beit +# -------------------------------------------------------- + +import json + + +def param_groups_lrd( + model, weight_decay=0.05, no_weight_decay_list=[], layer_decay=0.75 +): + """ + Parameter groups for layer-wise lr decay + Following BEiT: https://github.com/microsoft/unilm/blob/master/beit/optim_factory.py#L58 + """ + param_group_names = {} + param_groups = {} + + num_layers = len(model.blocks) + 1 + + layer_scales = list(layer_decay ** (num_layers - i) for i in range(num_layers + 1)) + + for n, p in model.named_parameters(): + if not p.requires_grad: + continue + + # no decay: all 1D parameters and model specific ones + if p.ndim == 1 or n in no_weight_decay_list: + g_decay = "no_decay" + this_decay = 0.0 + else: + g_decay = "decay" + this_decay = weight_decay + + layer_id = get_layer_id_for_vit(n, num_layers) + group_name = "layer_%d_%s" % (layer_id, g_decay) + + if group_name not in param_group_names: + this_scale = layer_scales[layer_id] + + param_group_names[group_name] = { + "lr_scale": this_scale, + "weight_decay": this_decay, + "params": [], + } + param_groups[group_name] = { + "lr_scale": this_scale, + "weight_decay": this_decay, + "params": [], + } + + param_group_names[group_name]["params"].append(n) + param_groups[group_name]["params"].append(p) + + # print("parameter groups: \n%s" % json.dumps(param_group_names, indent=2)) + + return list(param_groups.values()) + + +def get_layer_id_for_vit(name, num_layers): + """ + Assign a parameter with its layer id + Following BEiT: https://github.com/microsoft/unilm/blob/master/beit/optim_factory.py#L33 + """ + if name in ["cls_token", "pos_embed"]: + return 0 + elif name.startswith("patch_embed"): + return 0 + elif name.startswith("blocks"): + return int(name.split(".")[1]) + 1 + else: + return num_layers diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/lr_sched.py b/qa_mdt/audioldm_train/modules/audiomae/util/lr_sched.py new file mode 100644 index 0000000000000000000000000000000000000000..efe184d8e3fb63ec6b4f83375b6ea719985900de --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/lr_sched.py @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import math + + +def adjust_learning_rate(optimizer, epoch, args): + """Decay the learning rate with half-cycle cosine after warmup""" + if epoch < args.warmup_epochs: + lr = args.lr * epoch / args.warmup_epochs + else: + lr = args.min_lr + (args.lr - args.min_lr) * 0.5 * ( + 1.0 + + math.cos( + math.pi + * (epoch - args.warmup_epochs) + / (args.epochs - args.warmup_epochs) + ) + ) + for param_group in optimizer.param_groups: + if "lr_scale" in param_group: + param_group["lr"] = lr * param_group["lr_scale"] + else: + param_group["lr"] = lr + return lr diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/misc.py b/qa_mdt/audioldm_train/modules/audiomae/util/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..287eec7ed4ebbb35a3bcfbbe1262f50975ebd8dd --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/misc.py @@ -0,0 +1,454 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# DeiT: https://github.com/facebookresearch/deit +# BEiT: https://github.com/microsoft/unilm/tree/master/beit +# -------------------------------------------------------- + +import builtins +import datetime +import os +import time +from collections import defaultdict, deque +from pathlib import Path + +import torch +import torch.distributed as dist +from torch._six import inf + + +class SmoothedValue(object): + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt=None): + if fmt is None: + fmt = "{median:.4f} ({global_avg:.4f})" + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + if not is_dist_avail_and_initialized(): + return + t = torch.tensor([self.count, self.total], dtype=torch.float64, device="cuda") + dist.barrier() + dist.all_reduce(t) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, + avg=self.avg, + global_avg=self.global_avg, + max=self.max, + value=self.value, + ) + + +class MetricLogger(object): + def __init__(self, delimiter="\t"): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + + def update(self, **kwargs): + for k, v in kwargs.items(): + if v is None: + continue + if isinstance(v, torch.Tensor): + v = v.item() + assert isinstance(v, (float, int)) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError( + "'{}' object has no attribute '{}'".format(type(self).__name__, attr) + ) + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append("{}: {}".format(name, str(meter))) + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, meter): + self.meters[name] = meter + + def log_every(self, iterable, print_freq, header=None): + i = 0 + if not header: + header = "" + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" + log_msg = [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + ] + if torch.cuda.is_available(): + log_msg.append("max mem: {memory:.0f}") + log_msg = self.delimiter.join(log_msg) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if i % print_freq == 0 or i == len(iterable) - 1: + eta_seconds = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) + else: + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + ) + ) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print( + "{} Total time: {} ({:.4f} s / it)".format( + header, total_time_str, total_time / len(iterable) + ) + ) + + +def setup_for_distributed(is_master): + """ + This function disables printing when not in master process + """ + builtin_print = builtins.print + + def print(*args, **kwargs): + force = kwargs.pop("force", False) + force = force or (get_world_size() > 8) + if is_master or force: + now = datetime.datetime.now().time() + builtin_print("[{}] ".format(now), end="") # print with time stamp + builtin_print(*args, **kwargs) + + builtins.print = print + + +def is_dist_avail_and_initialized(): + if not dist.is_available(): + return False + if not dist.is_initialized(): + return False + return True + + +def get_world_size(): + if not is_dist_avail_and_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank(): + if not is_dist_avail_and_initialized(): + return 0 + return dist.get_rank() + + +def is_main_process(): + return get_rank() == 0 + + +def save_on_master(*args, **kwargs): + if is_main_process(): + torch.save(*args, **kwargs) + + +def init_distributed_mode(args): + if args.dist_on_itp: + args.rank = int(os.environ["OMPI_COMM_WORLD_RANK"]) + args.world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"]) + args.gpu = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"]) + args.dist_url = "tcp://%s:%s" % ( + os.environ["MASTER_ADDR"], + os.environ["MASTER_PORT"], + ) + os.environ["LOCAL_RANK"] = str(args.gpu) + os.environ["RANK"] = str(args.rank) + os.environ["WORLD_SIZE"] = str(args.world_size) + # ["RANK", "WORLD_SIZE", "MASTER_ADDR", "MASTER_PORT", "LOCAL_RANK"] + elif "RANK" in os.environ and "WORLD_SIZE" in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + elif "SLURM_PROCID" in os.environ: + args.rank = int(os.environ["SLURM_PROCID"]) + args.gpu = args.rank % torch.cuda.device_count() + else: + print("Not using distributed mode") + setup_for_distributed(is_master=True) # hack + args.distributed = False + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + args.dist_backend = "nccl" + print( + "| distributed init (rank {}): {}, gpu {}".format( + args.rank, args.dist_url, args.gpu + ), + flush=True, + ) + torch.distributed.init_process_group( + backend=args.dist_backend, + init_method=args.dist_url, + world_size=args.world_size, + rank=args.rank, + ) + torch.distributed.barrier() + setup_for_distributed(args.rank == 0) + + +class NativeScalerWithGradNormCount: + state_dict_key = "amp_scaler" + + def __init__(self): + self._scaler = torch.cuda.amp.GradScaler() + + def __call__( + self, + loss, + optimizer, + clip_grad=None, + parameters=None, + create_graph=False, + update_grad=True, + ): + self._scaler.scale(loss).backward(create_graph=create_graph) + if update_grad: + if clip_grad is not None: + assert parameters is not None + self._scaler.unscale_( + optimizer + ) # unscale the gradients of optimizer's assigned params in-place + norm = torch.nn.utils.clip_grad_norm_(parameters, clip_grad) + else: + self._scaler.unscale_(optimizer) + norm = get_grad_norm_(parameters) + self._scaler.step(optimizer) + self._scaler.update() + else: + norm = None + return norm + + def state_dict(self): + return self._scaler.state_dict() + + def load_state_dict(self, state_dict): + self._scaler.load_state_dict(state_dict) + + +def get_grad_norm_(parameters, norm_type: float = 2.0) -> torch.Tensor: + if isinstance(parameters, torch.Tensor): + parameters = [parameters] + parameters = [p for p in parameters if p.grad is not None] + norm_type = float(norm_type) + if len(parameters) == 0: + return torch.tensor(0.0) + device = parameters[0].grad.device + if norm_type == inf: + total_norm = max(p.grad.detach().abs().max().to(device) for p in parameters) + else: + total_norm = torch.norm( + torch.stack( + [torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters] + ), + norm_type, + ) + return total_norm + + +def save_model(args, epoch, model, model_without_ddp, optimizer, loss_scaler): + output_dir = Path(args.output_dir) + epoch_name = str(epoch) + if loss_scaler is not None: + checkpoint_paths = [output_dir / ("checkpoint-%s.pth" % epoch_name)] + for checkpoint_path in checkpoint_paths: + to_save = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "epoch": epoch, + "scaler": loss_scaler.state_dict(), + "args": args, + } + + save_on_master(to_save, checkpoint_path) + else: + client_state = {"epoch": epoch} + model.save_checkpoint( + save_dir=args.output_dir, + tag="checkpoint-%s" % epoch_name, + client_state=client_state, + ) + + +def load_model(args, model_without_ddp, optimizer, loss_scaler): + if args.resume: + if args.resume.startswith("https"): + checkpoint = torch.hub.load_state_dict_from_url( + args.resume, map_location="cpu", check_hash=True + ) + else: + checkpoint = torch.load(args.resume, map_location="cpu") + model_without_ddp.load_state_dict(checkpoint["model"]) + print("Resume checkpoint %s" % args.resume) + if ( + "optimizer" in checkpoint + and "epoch" in checkpoint + and not (hasattr(args, "eval") and args.eval) + ): + optimizer.load_state_dict(checkpoint["optimizer"]) + args.start_epoch = checkpoint["epoch"] + 1 + if "scaler" in checkpoint: + loss_scaler.load_state_dict(checkpoint["scaler"]) + print("With optim & sched!") + + +def all_reduce_mean(x): + world_size = get_world_size() + if world_size > 1: + x_reduce = torch.tensor(x).cuda() + dist.all_reduce(x_reduce) + x_reduce /= world_size + return x_reduce.item() + else: + return x + + +# utils +@torch.no_grad() +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [ + torch.ones_like(tensor) for _ in range(torch.distributed.get_world_size()) + ] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +def merge_vmae_to_avmae(avmae_state_dict, vmae_ckpt): + # keys_to_copy=['pos_embed','patch_embed'] + # replaced=0 + + vmae_ckpt["cls_token"] = vmae_ckpt["cls_token_v"] + vmae_ckpt["mask_token"] = vmae_ckpt["mask_token_v"] + + # pos_emb % not trainable, use default + pos_embed_v = vmae_ckpt["pos_embed_v"] # 1,589,768 + pos_embed = pos_embed_v[:, 1:, :] # 1,588,768 + cls_embed = pos_embed_v[:, 0, :].unsqueeze(1) + pos_embed = pos_embed.reshape(1, 2, 14, 14, 768).sum(dim=1) # 1, 14, 14, 768 + print("Position interpolate from 14,14 to 64,8") + pos_embed = pos_embed.permute(0, 3, 1, 2) # 1, 14,14,768 -> 1,768,14,14 + pos_embed = torch.nn.functional.interpolate( + pos_embed, size=(64, 8), mode="bicubic", align_corners=False + ) + pos_embed = pos_embed.permute(0, 2, 3, 1).flatten( + 1, 2 + ) # 1, 14, 14, 768 => 1, 196,768 + pos_embed = torch.cat((cls_embed, pos_embed), dim=1) + assert vmae_ckpt["pos_embed"].shape == pos_embed.shape + vmae_ckpt["pos_embed"] = pos_embed + # patch_emb + # aggregate 3 channels in video-rgb ckpt to 1 channel for audio + v_weight = vmae_ckpt["patch_embed_v.proj.weight"] # 768,3,2,16,16 + new_proj_weight = torch.nn.Parameter(v_weight.sum(dim=2).sum(dim=1).unsqueeze(1)) + assert new_proj_weight.shape == vmae_ckpt["patch_embed.proj.weight"].shape + vmae_ckpt["patch_embed.proj.weight"] = new_proj_weight + vmae_ckpt["patch_embed.proj.bias"] = vmae_ckpt["patch_embed_v.proj.bias"] + + # hack + vmae_ckpt["norm.weight"] = vmae_ckpt["norm_v.weight"] + vmae_ckpt["norm.bias"] = vmae_ckpt["norm_v.bias"] + + # replace transformer encoder + for k, v in vmae_ckpt.items(): + if k.startswith("blocks."): + kk = k.replace("blocks.", "blocks_v.") + vmae_ckpt[k] = vmae_ckpt[kk] + elif k.startswith("blocks_v."): + pass + else: + print(k) + pass + print(k) diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/patch_embed.py b/qa_mdt/audioldm_train/modules/audiomae/util/patch_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..ac1e4d436c6f79aef9bf1de32cdac5d4f037c775 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/patch_embed.py @@ -0,0 +1,127 @@ +import torch +import torch.nn as nn +from timm.models.layers import to_2tuple + + +class PatchEmbed_org(nn.Module): + """Image to Patch Embedding""" + + def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0]) + self.patch_hw = (img_size[1] // patch_size[1], img_size[0] // patch_size[0]) + self.img_size = img_size + self.patch_size = patch_size + self.num_patches = num_patches + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=patch_size, stride=patch_size + ) + + def forward(self, x): + B, C, H, W = x.shape + # FIXME look at relaxing size constraints + # assert H == self.img_size[0] and W == self.img_size[1], \ + # f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + x = self.proj(x) + y = x.flatten(2).transpose(1, 2) + return y + + +class PatchEmbed_new(nn.Module): + """Flexible Image to Patch Embedding""" + + def __init__( + self, img_size=224, patch_size=16, in_chans=3, embed_dim=768, stride=10 + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + stride = to_2tuple(stride) + + self.img_size = img_size + self.patch_size = patch_size + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=patch_size, stride=stride + ) # with overlapped patches + # self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + + # self.patch_hw = (img_size[1] // patch_size[1], img_size[0] // patch_size[0]) + # self.num_patches = (img_size[1] // patch_size[1]) * (img_size[0] // patch_size[0]) + _, _, h, w = self.get_output_shape(img_size) # n, emb_dim, h, w + self.patch_hw = (h, w) + self.num_patches = h * w + + def get_output_shape(self, img_size): + # todo: don't be lazy.. + return self.proj(torch.randn(1, 1, img_size[0], img_size[1])).shape + + def forward(self, x): + B, C, H, W = x.shape + # FIXME look at relaxing size constraints + # assert H == self.img_size[0] and W == self.img_size[1], \ + # f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + # x = self.proj(x).flatten(2).transpose(1, 2) + x = self.proj(x) # 32, 1, 1024, 128 -> 32, 768, 101, 12 + x = x.flatten(2) # 32, 768, 101, 12 -> 32, 768, 1212 + x = x.transpose(1, 2) # 32, 768, 1212 -> 32, 1212, 768 + return x + + +class PatchEmbed3D_new(nn.Module): + """Flexible Image to Patch Embedding""" + + def __init__( + self, + video_size=(16, 224, 224), + patch_size=(2, 16, 16), + in_chans=3, + embed_dim=768, + stride=(2, 16, 16), + ): + super().__init__() + + self.video_size = video_size + self.patch_size = patch_size + self.in_chans = in_chans + + self.proj = nn.Conv3d( + in_chans, embed_dim, kernel_size=patch_size, stride=stride + ) + _, _, t, h, w = self.get_output_shape(video_size) # n, emb_dim, h, w + self.patch_thw = (t, h, w) + self.num_patches = t * h * w + + def get_output_shape(self, video_size): + # todo: don't be lazy.. + return self.proj( + torch.randn(1, self.in_chans, video_size[0], video_size[1], video_size[2]) + ).shape + + def forward(self, x): + B, C, T, H, W = x.shape + x = self.proj(x) # 32, 3, 16, 224, 224 -> 32, 768, 8, 14, 14 + x = x.flatten(2) # 32, 768, 1568 + x = x.transpose(1, 2) # 32, 768, 1568 -> 32, 1568, 768 + return x + + +if __name__ == "__main__": + # patch_emb = PatchEmbed_new(img_size=224, patch_size=16, in_chans=1, embed_dim=64, stride=(16,16)) + # input = torch.rand(8,1,1024,128) + # output = patch_emb(input) + # print(output.shape) # (8,512,64) + + patch_emb = PatchEmbed3D_new( + video_size=(6, 224, 224), + patch_size=(2, 16, 16), + in_chans=3, + embed_dim=768, + stride=(2, 16, 16), + ) + input = torch.rand(8, 3, 6, 224, 224) + output = patch_emb(input) + print(output.shape) # (8,64) diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/pos_embed.py b/qa_mdt/audioldm_train/modules/audiomae/util/pos_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..785e3d3e6cd079f1a23b3fc6ba66f2992582dffa --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/pos_embed.py @@ -0,0 +1,205 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# Position embedding utils +# -------------------------------------------------------- + +import numpy as np + +import torch + +# -------------------------------------------------------- +# 2D sine-cosine position embedding +# References: +# Transformer: https://github.com/tensorflow/models/blob/master/official/nlp/transformer/model_utils.py +# MoCo v3: https://github.com/facebookresearch/moco-v3 +# -------------------------------------------------------- +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid_h = np.arange(grid_size, dtype=np.float32) + grid_w = np.arange(grid_size, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, 1, grid_size, grid_size]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token: + pos_embed = np.concatenate([np.zeros([1, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_flexible(embed_dim, grid_size, cls_token=False): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid_h = np.arange(grid_size[0], dtype=np.float32) + grid_w = np.arange(grid_size[1], dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, 1, grid_size[0], grid_size[1]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token: + pos_embed = np.concatenate([np.zeros([1, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + # omega = np.arange(embed_dim // 2, dtype=np.float) + omega = np.arange(embed_dim // 2, dtype=float) + omega /= embed_dim / 2.0 + omega = 1.0 / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum("m,d->md", pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +# -------------------------------------------------------- +# Interpolate position embeddings for high-resolution +# References: +# DeiT: https://github.com/facebookresearch/deit +# -------------------------------------------------------- +def interpolate_pos_embed(model, checkpoint_model): + if "pos_embed" in checkpoint_model: + pos_embed_checkpoint = checkpoint_model["pos_embed"] + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = model.patch_embed.num_patches + num_extra_tokens = model.pos_embed.shape[-2] - num_patches + # height (== width) for the checkpoint position embedding + orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) + # height (== width) for the new position embedding + new_size = int(num_patches**0.5) + # class_token and dist_token are kept unchanged + if orig_size != new_size: + print( + "Position interpolate from %dx%d to %dx%d" + % (orig_size, orig_size, new_size, new_size) + ) + extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] + pos_tokens = pos_tokens.reshape( + -1, orig_size, orig_size, embedding_size + ).permute(0, 3, 1, 2) + pos_tokens = torch.nn.functional.interpolate( + pos_tokens, + size=(new_size, new_size), + mode="bicubic", + align_corners=False, + ) + pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) + checkpoint_model["pos_embed"] = new_pos_embed + + +def interpolate_pos_embed_img2audio(model, checkpoint_model, orig_size, new_size): + if "pos_embed" in checkpoint_model: + pos_embed_checkpoint = checkpoint_model["pos_embed"] + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = model.patch_embed.num_patches + num_extra_tokens = model.pos_embed.shape[-2] - num_patches + # height (== width) for the checkpoint position embedding + # orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) + # height (== width) for the new position embedding + # new_size = int(num_patches ** 0.5) + # class_token and dist_token are kept unchanged + if orig_size != new_size: + print( + "Position interpolate from %dx%d to %dx%d" + % (orig_size[0], orig_size[1], new_size[0], new_size[1]) + ) + extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] + pos_tokens = pos_tokens.reshape( + -1, orig_size[0], orig_size[1], embedding_size + ).permute(0, 3, 1, 2) + pos_tokens = torch.nn.functional.interpolate( + pos_tokens, + size=(new_size[0], new_size[1]), + mode="bicubic", + align_corners=False, + ) + pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) + checkpoint_model["pos_embed"] = new_pos_embed + + +def interpolate_pos_embed_audio(model, checkpoint_model, orig_size, new_size): + if "pos_embed" in checkpoint_model: + pos_embed_checkpoint = checkpoint_model["pos_embed"] + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = model.patch_embed.num_patches + num_extra_tokens = model.pos_embed.shape[-2] - num_patches + if orig_size != new_size: + print( + "Position interpolate from %dx%d to %dx%d" + % (orig_size[0], orig_size[1], new_size[0], new_size[1]) + ) + # extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + cls_token = pos_embed_checkpoint[:, 0, :].unsqueeze(1) + pos_tokens = pos_embed_checkpoint[:, 1:, :] # remove + pos_tokens = pos_tokens.reshape( + -1, orig_size[0], orig_size[1], embedding_size + ) # .permute(0, 3, 1, 2) + # pos_tokens = torch.nn.functional.interpolate( + # pos_tokens, size=(new_size[0], new_size[1]), mode='bicubic', align_corners=False) + + # pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + pos_tokens = pos_tokens[:, :, : new_size[1], :] # assume only time diff + pos_tokens = pos_tokens.flatten(1, 2) + new_pos_embed = torch.cat((cls_token, pos_tokens), dim=1) + checkpoint_model["pos_embed"] = new_pos_embed + + +def interpolate_patch_embed_audio( + model, + checkpoint_model, + orig_channel, + new_channel=1, + kernel_size=(16, 16), + stride=(16, 16), + padding=(0, 0), +): + if orig_channel != new_channel: + if "patch_embed.proj.weight" in checkpoint_model: + # aggregate 3 channels in rgb ckpt to 1 channel for audio + new_proj_weight = torch.nn.Parameter( + torch.sum(checkpoint_model["patch_embed.proj.weight"], dim=1).unsqueeze( + 1 + ) + ) + checkpoint_model["patch_embed.proj.weight"] = new_proj_weight diff --git a/qa_mdt/audioldm_train/modules/audiomae/util/stat.py b/qa_mdt/audioldm_train/modules/audiomae/util/stat.py new file mode 100644 index 0000000000000000000000000000000000000000..1148af87ffcbcf3d5a589c632617e72f00b98b92 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/audiomae/util/stat.py @@ -0,0 +1,77 @@ +import numpy as np +from scipy import stats +from sklearn import metrics +import torch + + +def d_prime(auc): + standard_normal = stats.norm() + d_prime = standard_normal.ppf(auc) * np.sqrt(2.0) + return d_prime + + +@torch.no_grad() +def concat_all_gather(tensor): + """ + Performs all_gather operation on the provided tensors. + *** Warning ***: torch.distributed.all_gather has no gradient. + """ + tensors_gather = [ + torch.ones_like(tensor) for _ in range(torch.distributed.get_world_size()) + ] + torch.distributed.all_gather(tensors_gather, tensor, async_op=False) + + output = torch.cat(tensors_gather, dim=0) + return output + + +def calculate_stats(output, target): + """Calculate statistics including mAP, AUC, etc. + + Args: + output: 2d array, (samples_num, classes_num) + target: 2d array, (samples_num, classes_num) + + Returns: + stats: list of statistic of each class. + """ + + classes_num = target.shape[-1] + stats = [] + + # Accuracy, only used for single-label classification such as esc-50, not for multiple label one such as AudioSet + acc = metrics.accuracy_score(np.argmax(target, 1), np.argmax(output, 1)) + + # Class-wise statistics + for k in range(classes_num): + + # Average precision + avg_precision = metrics.average_precision_score( + target[:, k], output[:, k], average=None + ) + + # AUC + # auc = metrics.roc_auc_score(target[:, k], output[:, k], average=None) + + # Precisions, recalls + (precisions, recalls, thresholds) = metrics.precision_recall_curve( + target[:, k], output[:, k] + ) + + # FPR, TPR + (fpr, tpr, thresholds) = metrics.roc_curve(target[:, k], output[:, k]) + + save_every_steps = 1000 # Sample statistics to reduce size + dict = { + "precisions": precisions[0::save_every_steps], + "recalls": recalls[0::save_every_steps], + "AP": avg_precision, + "fpr": fpr[0::save_every_steps], + "fnr": 1.0 - tpr[0::save_every_steps], + # 'auc': auc, + # note acc is not class-wise, this is just to keep consistent with other metrics + "acc": acc, + } + stats.append(dict) + + return stats diff --git a/qa_mdt/audioldm_train/modules/clap/__init__.py b/qa_mdt/audioldm_train/modules/clap/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/clap/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ce15db9ac6078e2450dfc8e284cc499e5e4fabd Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__init__.py b/qa_mdt/audioldm_train/modules/clap/open_clip/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e9f728f2f273be5d5fdbec6c6cc41d737176a8c0 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/__init__.py @@ -0,0 +1,25 @@ +from .factory import ( + list_models, + create_model, + create_model_and_transforms, + add_model_config, +) +from .loss import ClipLoss, gather_features, LPLoss, lp_gather_features, LPMetrics +from .model import ( + CLAP, + CLAPTextCfg, + CLAPVisionCfg, + CLAPAudioCfp, + convert_weights_to_fp16, + trace_model, +) +from .openai import load_openai_model, list_openai_models +from .pretrained import ( + list_pretrained, + list_pretrained_tag_models, + list_pretrained_model_tags, + get_pretrained_url, + download_pretrained, +) +from .tokenizer import SimpleTokenizer, tokenize +from .transform import image_transform diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4af4d2e11e37923a70e8970b27ac4147220aca6b Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a51d84f782a3b877f12c5575c0b8c6add45fd376 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/factory.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/factory.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f20f1762fa111b36ed2ee95a29d91e86ff0b63a7 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/factory.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/factory.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/factory.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a0b04d56dadd20363286c8033a7fad87f688c39d Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/factory.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/feature_fusion.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/feature_fusion.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f1f429dfbe292da5246a7b54742dcf0a6daece66 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/feature_fusion.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/feature_fusion.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/feature_fusion.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4758cd953255cd047a7fbc956982a9b787cb0cc Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/feature_fusion.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/htsat.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/htsat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..998c43e4f049c7dcf43cc0cfa779bf09ffdfe0f2 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/htsat.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/htsat.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/htsat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f888e272904c61355a37099e6e8cea935734001 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/htsat.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/loss.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/loss.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e4335954faa0b94dc6cae28c86ef50177dfee39 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/loss.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/loss.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/loss.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..334d1438862d7a768da35f91a8ebad40da841a57 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/loss.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/model.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..153564d5c39b30f0c83a6c74e3b9ce1b8ae6f2a2 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/model.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/model.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9baec6897022657c2f0291e95c8f55410c75a20f Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/model.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/openai.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/openai.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..193ade09157cc5c8e846c16433cfcf046a651257 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/openai.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/openai.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/openai.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9d86255fd9d2d4d8dbb6ea2dac7775ca81f7e63 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/openai.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pann_model.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pann_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5e8465a30943f3927266c81f7da766efd39d5d8 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pann_model.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pann_model.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pann_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe275c1ed1587ba1ca28f4b03967dba06b9f0a33 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pann_model.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pretrained.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pretrained.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf08c9bf6fe0e4483cbefa02628f934c813a3653 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pretrained.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pretrained.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pretrained.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38cff89a3be2fb65508697a7aa98427d8ec3b688 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/pretrained.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/timm_model.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/timm_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c06fa3d2153d2dd5f6d07ab9d0d05ff11497fc0 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/timm_model.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/timm_model.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/timm_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb85aa2ed1be010bf8330005f9ac313be4d179ab Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/timm_model.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/tokenizer.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/tokenizer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31ecca34d6550e57e48bef091a8361e0c6c0346c Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/tokenizer.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/tokenizer.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/tokenizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8dc01f908477293b4e8e507a39ace9e2493cf8ac Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/tokenizer.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/transform.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/transform.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb29f3b57b17bea42078003fd83d4ecc3ae459fa Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/transform.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/transform.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/transform.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..15f4f1f0052c10252abe6686f23c2226d4966adc Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/transform.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/utils.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a338f120c6d3827f4209da224e9750c9ca4558aa Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/utils.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/utils.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..552d4ca3ab982ec564f5e47dd1ef90a6c3646d35 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/open_clip/__pycache__/utils.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/bert.py b/qa_mdt/audioldm_train/modules/clap/open_clip/bert.py new file mode 100644 index 0000000000000000000000000000000000000000..a83d96d2a77ed05198efc05837522bc88d2499cc --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/bert.py @@ -0,0 +1,40 @@ +from transformers import BertTokenizer, BertModel + +tokenizer = BertTokenizer.from_pretrained("bert-base-uncased") +model = BertModel.from_pretrained("bert-base-uncased") +text = "Replace me by any text you'd like." + + +def bert_embeddings(text): + # text = "Replace me by any text you'd like." + encoded_input = tokenizer(text, return_tensors="pt") + output = model(**encoded_input) + return output + + +from transformers import RobertaTokenizer, RobertaModel + +tokenizer = RobertaTokenizer.from_pretrained("roberta-base") +model = RobertaModel.from_pretrained("roberta-base") +text = "Replace me by any text you'd like." + + +def Roberta_embeddings(text): + # text = "Replace me by any text you'd like." + encoded_input = tokenizer(text, return_tensors="pt") + output = model(**encoded_input) + return output + + +from transformers import BartTokenizer, BartModel + +tokenizer = BartTokenizer.from_pretrained("facebook/bart-base") +model = BartModel.from_pretrained("facebook/bart-base") +text = "Replace me by any text you'd like." + + +def bart_embeddings(text): + # text = "Replace me by any text you'd like." + encoded_input = tokenizer(text, return_tensors="pt") + output = model(**encoded_input) + return output diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/bpe_simple_vocab_16e6.txt.gz b/qa_mdt/audioldm_train/modules/clap/open_clip/bpe_simple_vocab_16e6.txt.gz new file mode 100644 index 0000000000000000000000000000000000000000..36a15856e00a06a9fbed8cdd34d2393fea4a3113 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/bpe_simple_vocab_16e6.txt.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:924691ac288e54409236115652ad4aa250f48203de50a9e4722a6ecd48d6804a +size 1356917 diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/factory.py b/qa_mdt/audioldm_train/modules/clap/open_clip/factory.py new file mode 100644 index 0000000000000000000000000000000000000000..1ff7f6ef13384db126120c74bdc9048f19174896 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/factory.py @@ -0,0 +1,284 @@ +import json +import logging +import os +import pathlib +import re +from copy import deepcopy +from pathlib import Path + +import torch + +from .model import CLAP, convert_weights_to_fp16 +from .openai import load_openai_model +from .pretrained import get_pretrained_url, download_pretrained +from .transform import image_transform + +_MODEL_CONFIG_PATHS = [Path(__file__).parent / f"model_configs/"] +_MODEL_CONFIGS = {} # directory (model_name: config) of model architecture configs + + +def _natural_key(string_): + return [int(s) if s.isdigit() else s for s in re.split(r"(\d+)", string_.lower())] + + +def _rescan_model_configs(): + global _MODEL_CONFIGS + + config_ext = (".json",) + config_files = [] + for config_path in _MODEL_CONFIG_PATHS: + if config_path.is_file() and config_path.suffix in config_ext: + config_files.append(config_path) + elif config_path.is_dir(): + for ext in config_ext: + config_files.extend(config_path.glob(f"*{ext}")) + + for cf in config_files: + if os.path.basename(cf)[0] == ".": + continue # Ignore hidden files + + with open(cf, "r") as f: + model_cfg = json.load(f) + if all(a in model_cfg for a in ("embed_dim", "audio_cfg", "text_cfg")): + _MODEL_CONFIGS[cf.stem] = model_cfg + + _MODEL_CONFIGS = { + k: v + for k, v in sorted(_MODEL_CONFIGS.items(), key=lambda x: _natural_key(x[0])) + } + + +_rescan_model_configs() # initial populate of model config registry + + +def load_state_dict(checkpoint_path: str, map_location="cpu", skip_params=True): + checkpoint = torch.load(checkpoint_path, map_location=map_location) + if isinstance(checkpoint, dict) and "state_dict" in checkpoint: + state_dict = checkpoint["state_dict"] + else: + state_dict = checkpoint + if skip_params: + if next(iter(state_dict.items()))[0].startswith("module"): + state_dict = {k[7:]: v for k, v in state_dict.items()} + # for k in state_dict: + # if k.startswith('transformer'): + # v = state_dict.pop(k) + # state_dict['text_branch.' + k[12:]] = v + return state_dict + + +def create_model( + amodel_name: str, + tmodel_name: str, + pretrained: str = "", + precision: str = "fp32", + device: torch.device = torch.device("cpu"), + jit: bool = False, + force_quick_gelu: bool = False, + openai_model_cache_dir: str = os.path.expanduser("~/.cache/clip"), + skip_params=True, + pretrained_audio: str = "", + pretrained_text: str = "", + enable_fusion: bool = False, + fusion_type: str = "None" + # pretrained_image: bool = False, +): + amodel_name = amodel_name.replace( + "/", "-" + ) # for callers using old naming with / in ViT names + pretrained_orig = pretrained + pretrained = pretrained.lower() + if pretrained == "openai": + if amodel_name in _MODEL_CONFIGS: + logging.info(f"Loading {amodel_name} model config.") + model_cfg = deepcopy(_MODEL_CONFIGS[amodel_name]) + else: + logging.error( + f"Model config for {amodel_name} not found; available models {list_models()}." + ) + raise RuntimeError(f"Model config for {amodel_name} not found.") + + logging.info(f"Loading pretrained ViT-B-16 text encoder from OpenAI.") + # Hard Code in model name + model_cfg["text_cfg"]["model_type"] = tmodel_name + model = load_openai_model( + "ViT-B-16", + model_cfg, + device=device, + jit=jit, + cache_dir=openai_model_cache_dir, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + # See https://discuss.pytorch.org/t/valueerror-attemting-to-unscale-fp16-gradients/81372 + if precision == "amp" or precision == "fp32": + model = model.float() + else: + if amodel_name in _MODEL_CONFIGS: + logging.info(f"Loading {amodel_name} model config.") + model_cfg = deepcopy(_MODEL_CONFIGS[amodel_name]) + else: + logging.error( + f"Model config for {amodel_name} not found; available models {list_models()}." + ) + raise RuntimeError(f"Model config for {amodel_name} not found.") + + if force_quick_gelu: + # override for use of QuickGELU on non-OpenAI transformer models + model_cfg["quick_gelu"] = True + + # if pretrained_image: + # if 'timm_amodel_name' in model_cfg.get('vision_cfg', {}): + # # pretrained weight loading for timm models set via vision_cfg + # model_cfg['vision_cfg']['timm_model_pretrained'] = True + # else: + # assert False, 'pretrained image towers currently only supported for timm models' + model_cfg["text_cfg"]["model_type"] = tmodel_name + model_cfg["enable_fusion"] = enable_fusion + model_cfg["fusion_type"] = fusion_type + model = CLAP(**model_cfg) + + if pretrained: + checkpoint_path = "" + url = get_pretrained_url(amodel_name, pretrained) + if url: + checkpoint_path = download_pretrained(url, root=openai_model_cache_dir) + elif os.path.exists(pretrained_orig): + checkpoint_path = pretrained_orig + if checkpoint_path: + logging.info( + f"Loading pretrained {amodel_name}-{tmodel_name} weights ({pretrained})." + ) + # import pdb + # pdb.set_trace() + ckpt = load_state_dict(checkpoint_path, skip_params=True) + from collections import OrderedDict + new_state_dict = OrderedDict() + for k, v in ckpt.items(): + if k in model.state_dict(): + new_state_dict[k] = v + model.load_state_dict(new_state_dict) + param_names = [n for n, p in model.named_parameters()] + # for n in param_names: + # print(n, "\t", "Loaded" if n in ckpt else "Unloaded") + else: + logging.warning( + f"Pretrained weights ({pretrained}) not found for model {amodel_name}." + ) + raise RuntimeError( + f"Pretrained weights ({pretrained}) not found for model {amodel_name}." + ) + + if pretrained_audio: + if amodel_name.startswith("PANN"): + if "Cnn14_mAP" in pretrained_audio: # official checkpoint + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["model"] + keys = list(audio_ckpt.keys()) + for key in keys: + if ( + "spectrogram_extractor" not in key + and "logmel_extractor" not in key + ): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key] = v + elif os.path.basename(pretrained_audio).startswith( + "PANN" + ): # checkpoint trained via HTSAT codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["state_dict"] + keys = list(audio_ckpt.keys()) + for key in keys: + if key.startswith("sed_model"): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key[10:]] = v + elif os.path.basename(pretrained_audio).startswith( + "finetuned" + ): # checkpoint trained via linear probe codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + else: + raise ValueError("Unknown audio checkpoint") + elif amodel_name.startswith("HTSAT"): + if "HTSAT_AudioSet_Saved" in pretrained_audio: # official checkpoint + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["state_dict"] + keys = list(audio_ckpt.keys()) + for key in keys: + if key.startswith("sed_model") and ( + "spectrogram_extractor" not in key + and "logmel_extractor" not in key + ): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key[10:]] = v + elif os.path.basename(pretrained_audio).startswith( + "HTSAT" + ): # checkpoint trained via HTSAT codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["state_dict"] + keys = list(audio_ckpt.keys()) + for key in keys: + if key.startswith("sed_model"): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key[10:]] = v + elif os.path.basename(pretrained_audio).startswith( + "finetuned" + ): # checkpoint trained via linear probe codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + else: + raise ValueError("Unknown audio checkpoint") + else: + raise f"this audio encoder pretrained checkpoint is not support" + + model.load_state_dict(audio_ckpt, strict=False) + logging.info( + f"Loading pretrained {amodel_name} weights ({pretrained_audio})." + ) + param_names = [n for n, p in model.named_parameters()] + for n in param_names: + print(n, "\t", "Loaded" if n in audio_ckpt else "Unloaded") + + model.to(device=device) + if precision == "fp16": + assert device.type != "cpu" + convert_weights_to_fp16(model) + + if jit: + model = torch.jit.script(model) + + return model, model_cfg + + +def create_model_and_transforms( + model_name: str, + pretrained: str = "", + precision: str = "fp32", + device: torch.device = torch.device("cpu"), + jit: bool = False, + force_quick_gelu: bool = False, + # pretrained_image: bool = False, +): + model = create_model( + model_name, + pretrained, + precision, + device, + jit, + force_quick_gelu=force_quick_gelu, + # pretrained_image=pretrained_image + ) + preprocess_train = image_transform(model.visual.image_size, is_train=True) + preprocess_val = image_transform(model.visual.image_size, is_train=False) + return model, preprocess_train, preprocess_val + + +def list_models(): + """enumerate available model architectures based on config files""" + return list(_MODEL_CONFIGS.keys()) + + +def add_model_config(path): + """add model config path or file and update registry""" + if not isinstance(path, Path): + path = Path(path) + _MODEL_CONFIG_PATHS.append(path) + _rescan_model_configs() diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/feature_fusion.py b/qa_mdt/audioldm_train/modules/clap/open_clip/feature_fusion.py new file mode 100644 index 0000000000000000000000000000000000000000..dbe4e170e05894c12ebdc36ba1dc1de65e441b89 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/feature_fusion.py @@ -0,0 +1,192 @@ +""" +Feature Fusion for Varible-Length Data Processing +AFF/iAFF is referred and modified from https://github.com/YimianDai/open-aff/blob/master/aff_pytorch/aff_net/fusion.py +According to the paper: Yimian Dai et al, Attentional Feature Fusion, IEEE Winter Conference on Applications of Computer Vision, WACV 2021 +""" + +import torch +import torch.nn as nn + + +class DAF(nn.Module): + """ + 直接相加 DirectAddFuse + """ + + def __init__(self): + super(DAF, self).__init__() + + def forward(self, x, residual): + return x + residual + + +class iAFF(nn.Module): + """ + 多特征融合 iAFF + """ + + def __init__(self, channels=64, r=4, type="2D"): + super(iAFF, self).__init__() + inter_channels = int(channels // r) + + if type == "1D": + # 本地注意力 + self.local_att = nn.Sequential( + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + + # 全局注意力 + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool1d(1), + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + + # 第二次本地注意力 + self.local_att2 = nn.Sequential( + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + # 第二次全局注意力 + self.global_att2 = nn.Sequential( + nn.AdaptiveAvgPool1d(1), + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + elif type == "2D": + # 本地注意力 + self.local_att = nn.Sequential( + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + + # 全局注意力 + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + + # 第二次本地注意力 + self.local_att2 = nn.Sequential( + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + # 第二次全局注意力 + self.global_att2 = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + else: + raise f"the type is not supported" + + self.sigmoid = nn.Sigmoid() + + def forward(self, x, residual): + flag = False + xa = x + residual + if xa.size(0) == 1: + xa = torch.cat([xa, xa], dim=0) + flag = True + xl = self.local_att(xa) + xg = self.global_att(xa) + xlg = xl + xg + wei = self.sigmoid(xlg) + xi = x * wei + residual * (1 - wei) + + xl2 = self.local_att2(xi) + xg2 = self.global_att(xi) + xlg2 = xl2 + xg2 + wei2 = self.sigmoid(xlg2) + xo = x * wei2 + residual * (1 - wei2) + if flag: + xo = xo[0].unsqueeze(0) + return xo + + +class AFF(nn.Module): + """ + 多特征融合 AFF + """ + + def __init__(self, channels=64, r=4, type="2D"): + super(AFF, self).__init__() + inter_channels = int(channels // r) + + if type == "1D": + self.local_att = nn.Sequential( + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool1d(1), + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + elif type == "2D": + self.local_att = nn.Sequential( + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + else: + raise f"the type is not supported." + + self.sigmoid = nn.Sigmoid() + + def forward(self, x, residual): + flag = False + xa = x + residual + if xa.size(0) == 1: + xa = torch.cat([xa, xa], dim=0) + flag = True + xl = self.local_att(xa) + xg = self.global_att(xa) + xlg = xl + xg + wei = self.sigmoid(xlg) + xo = 2 * x * wei + 2 * residual * (1 - wei) + if flag: + xo = xo[0].unsqueeze(0) + return xo diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/htsat.py b/qa_mdt/audioldm_train/modules/clap/open_clip/htsat.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c6742c9c2b9ef1ea6afa5bad3572fcc2e31189 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/htsat.py @@ -0,0 +1,1305 @@ +# Ke Chen +# knutchen@ucsd.edu +# HTS-AT: A HIERARCHICAL TOKEN-SEMANTIC AUDIO TRANSFORMER FOR SOUND CLASSIFICATION AND DETECTION +# Some layers designed on the model +# below codes are based and referred from https://github.com/microsoft/Swin-Transformer +# Swin Transformer for Computer Vision: https://arxiv.org/pdf/2103.14030.pdf + +import torch +import torch.nn as nn +import torch.nn.functional as F +from itertools import repeat +import collections.abc +import math +import warnings + +from torch.nn.init import _calculate_fan_in_and_fan_out +import torch.utils.checkpoint as checkpoint + +import random + +from torchlibrosa.stft import Spectrogram, LogmelFilterBank +from torchlibrosa.augmentation import SpecAugmentation + +from itertools import repeat +from .utils import do_mixup, interpolate + +from .feature_fusion import iAFF, AFF, DAF + + +# from PyTorch internals +def _ntuple(n): + def parse(x): + if isinstance(x, collections.abc.Iterable): + return x + return tuple(repeat(x, n)) + + return parse + + +to_1tuple = _ntuple(1) +to_2tuple = _ntuple(2) +to_3tuple = _ntuple(3) +to_4tuple = _ntuple(4) +to_ntuple = _ntuple + + +def drop_path(x, drop_prob: float = 0.0, training: bool = False): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, + the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... + See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for + changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use + 'survival rate' as the argument. + """ + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * ( + x.ndim - 1 + ) # work with diff dim tensors, not just 2D ConvNets + random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device) + random_tensor.floor_() # binarize + output = x.div(keep_prob) * random_tensor + return output + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" + + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) + + +class PatchEmbed(nn.Module): + """2D Image to Patch Embedding""" + + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + norm_layer=None, + flatten=True, + patch_stride=16, + enable_fusion=False, + fusion_type="None", + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patch_stride = to_2tuple(patch_stride) + self.img_size = img_size + self.patch_size = patch_size + self.patch_stride = patch_stride + self.grid_size = ( + img_size[0] // patch_stride[0], + img_size[1] // patch_stride[1], + ) + self.num_patches = self.grid_size[0] * self.grid_size[1] + self.flatten = flatten + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + padding = ( + (patch_size[0] - patch_stride[0]) // 2, + (patch_size[1] - patch_stride[1]) // 2, + ) + + if (self.enable_fusion) and (self.fusion_type == "channel_map"): + self.proj = nn.Conv2d( + in_chans * 4, + embed_dim, + kernel_size=patch_size, + stride=patch_stride, + padding=padding, + ) + else: + self.proj = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=patch_size, + stride=patch_stride, + padding=padding, + ) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + self.mel_conv2d = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=(patch_size[0], patch_size[1] * 3), + stride=(patch_stride[0], patch_stride[1] * 3), + padding=padding, + ) + if self.fusion_type == "daf_2d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_2d": + self.fusion_model = AFF(channels=embed_dim, type="2D") + elif self.fusion_type == "iaff_2d": + self.fusion_model = iAFF(channels=embed_dim, type="2D") + + def forward(self, x, longer_idx=None): + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + global_x = x[:, 0:1, :, :] + + # global processing + B, C, H, W = global_x.shape + assert ( + H == self.img_size[0] and W == self.img_size[1] + ), f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + global_x = self.proj(global_x) + TW = global_x.size(-1) + if len(longer_idx) > 0: + # local processing + local_x = x[longer_idx, 1:, :, :].contiguous() + B, C, H, W = local_x.shape + local_x = local_x.view(B * C, 1, H, W) + local_x = self.mel_conv2d(local_x) + local_x = local_x.view( + B, C, local_x.size(1), local_x.size(2), local_x.size(3) + ) + local_x = local_x.permute((0, 2, 3, 1, 4)).contiguous().flatten(3) + TB, TC, TH, _ = local_x.size() + if local_x.size(-1) < TW: + local_x = torch.cat( + [ + local_x, + torch.zeros( + (TB, TC, TH, TW - local_x.size(-1)), + device=global_x.device, + ), + ], + dim=-1, + ) + else: + local_x = local_x[:, :, :, :TW] + + global_x[longer_idx] = self.fusion_model(global_x[longer_idx], local_x) + x = global_x + else: + B, C, H, W = x.shape + assert ( + H == self.img_size[0] and W == self.img_size[1] + ), f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + x = self.proj(x) + + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + + +class Mlp(nn.Module): + """MLP as used in Vision Transformer, MLP-Mixer and related networks""" + + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def _no_grad_trunc_normal_(tensor, mean, std, a, b): + # Cut & paste from PyTorch official master until it's in a few official releases - RW + # Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf + def norm_cdf(x): + # Computes standard normal cumulative distribution function + return (1.0 + math.erf(x / math.sqrt(2.0))) / 2.0 + + if (mean < a - 2 * std) or (mean > b + 2 * std): + warnings.warn( + "mean is more than 2 std from [a, b] in nn.init.trunc_normal_. " + "The distribution of values may be incorrect.", + stacklevel=2, + ) + + with torch.no_grad(): + # Values are generated by using a truncated uniform distribution and + # then using the inverse CDF for the normal distribution. + # Get upper and lower cdf values + l = norm_cdf((a - mean) / std) + u = norm_cdf((b - mean) / std) + + # Uniformly fill tensor with values from [l, u], then translate to + # [2l-1, 2u-1]. + tensor.uniform_(2 * l - 1, 2 * u - 1) + + # Use inverse cdf transform for normal distribution to get truncated + # standard normal + tensor.erfinv_() + + # Transform to proper mean, std + tensor.mul_(std * math.sqrt(2.0)) + tensor.add_(mean) + + # Clamp to ensure it's in the proper range + tensor.clamp_(min=a, max=b) + return tensor + + +def trunc_normal_(tensor, mean=0.0, std=1.0, a=-2.0, b=2.0): + # type: (Tensor, float, float, float, float) -> Tensor + r"""Fills the input Tensor with values drawn from a truncated + normal distribution. The values are effectively drawn from the + normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)` + with values outside :math:`[a, b]` redrawn until they are within + the bounds. The method used for generating the random values works + best when :math:`a \leq \text{mean} \leq b`. + Args: + tensor: an n-dimensional `torch.Tensor` + mean: the mean of the normal distribution + std: the standard deviation of the normal distribution + a: the minimum cutoff value + b: the maximum cutoff value + Examples: + >>> w = torch.empty(3, 5) + >>> nn.init.trunc_normal_(w) + """ + return _no_grad_trunc_normal_(tensor, mean, std, a, b) + + +def variance_scaling_(tensor, scale=1.0, mode="fan_in", distribution="normal"): + fan_in, fan_out = _calculate_fan_in_and_fan_out(tensor) + if mode == "fan_in": + denom = fan_in + elif mode == "fan_out": + denom = fan_out + elif mode == "fan_avg": + denom = (fan_in + fan_out) / 2 + + variance = scale / denom + + if distribution == "truncated_normal": + # constant is stddev of standard normal truncated to (-2, 2) + trunc_normal_(tensor, std=math.sqrt(variance) / 0.87962566103423978) + elif distribution == "normal": + tensor.normal_(std=math.sqrt(variance)) + elif distribution == "uniform": + bound = math.sqrt(3 * variance) + tensor.uniform_(-bound, bound) + else: + raise ValueError(f"invalid distribution {distribution}") + + +def lecun_normal_(tensor): + variance_scaling_(tensor, mode="fan_in", distribution="truncated_normal") + + +def window_partition(x, window_size): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = ( + x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + ) + return windows + + +def window_reverse(windows, window_size, H, W): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view( + B, H // window_size, W // window_size, window_size, window_size, -1 + ) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + r"""Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + """ + + def __init__( + self, + dim, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + ): + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads) + ) # 2*Wh-1 * 2*Ww-1, nH + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.window_size[0]) + coords_w = torch.arange(self.window_size[1]) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :] + ) # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer("relative_position_index", relative_position_index) + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + trunc_normal_(self.relative_position_bias_table, std=0.02) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ + Args: + x: input features with shape of (num_windows*B, N, C) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + B_, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B_, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = ( + qkv[0], + qkv[1], + qkv[2], + ) # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = q @ k.transpose(-2, -1) + + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1) + ].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1, + ) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze( + 1 + ).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x, attn + + def extra_repr(self): + return f"dim={self.dim}, window_size={self.window_size}, num_heads={self.num_heads}" + + +# We use the model based on Swintransformer Block, therefore we can use the swin-transformer pretrained model +class SwinTransformerBlock(nn.Module): + r"""Swin Transformer Block. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resulotion. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__( + self, + dim, + input_resolution, + num_heads, + window_size=7, + shift_size=0, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + norm_before_mlp="ln", + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + self.norm_before_mlp = norm_before_mlp + if min(self.input_resolution) <= self.window_size: + # if window size is larger than input resolution, we don't partition windows + self.shift_size = 0 + self.window_size = min(self.input_resolution) + assert ( + 0 <= self.shift_size < self.window_size + ), "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + ) + + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + if self.norm_before_mlp == "ln": + self.norm2 = nn.LayerNorm(dim) + elif self.norm_before_mlp == "bn": + self.norm2 = lambda x: nn.BatchNorm1d(dim)(x.transpose(1, 2)).transpose( + 1, 2 + ) + else: + raise NotImplementedError + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + if self.shift_size > 0: + # calculate attention mask for SW-MSA + H, W = self.input_resolution + img_mask = torch.zeros((1, H, W, 1)) # 1 H W 1 + h_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + w_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition( + img_mask, self.window_size + ) # nW, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill( + attn_mask != 0, float(-100.0) + ).masked_fill(attn_mask == 0, float(0.0)) + else: + attn_mask = None + + self.register_buffer("attn_mask", attn_mask) + + def forward(self, x): + # pdb.set_trace() + H, W = self.input_resolution + # print("H: ", H) + # print("W: ", W) + # pdb.set_trace() + B, L, C = x.shape + # assert L == H * W, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(B, H, W, C) + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll( + x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2) + ) + else: + shifted_x = x + + # partition windows + x_windows = window_partition( + shifted_x, self.window_size + ) # nW*B, window_size, window_size, C + x_windows = x_windows.view( + -1, self.window_size * self.window_size, C + ) # nW*B, window_size*window_size, C + + # W-MSA/SW-MSA + attn_windows, attn = self.attn( + x_windows, mask=self.attn_mask + ) # nW*B, window_size*window_size, C + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, H, W) # B H' W' C + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll( + shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2) + ) + else: + x = shifted_x + x = x.view(B, H * W, C) + + # FFN + x = shortcut + self.drop_path(x) + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x, attn + + def extra_repr(self): + return ( + f"dim={self.dim}, input_resolution={self.input_resolution}, num_heads={self.num_heads}, " + f"window_size={self.window_size}, shift_size={self.shift_size}, mlp_ratio={self.mlp_ratio}" + ) + + +class PatchMerging(nn.Module): + r"""Patch Merging Layer. + Args: + input_resolution (tuple[int]): Resolution of input feature. + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, input_resolution, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.input_resolution = input_resolution + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(4 * dim) + + def forward(self, x): + """ + x: B, H*W, C + """ + H, W = self.input_resolution + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + assert H % 2 == 0 and W % 2 == 0, f"x size ({H}*{W}) are not even." + + x = x.view(B, H, W, C) + + x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C + x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C + x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C + x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C + x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C + x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C + + x = self.norm(x) + x = self.reduction(x) + + return x + + def extra_repr(self): + return f"input_resolution={self.input_resolution}, dim={self.dim}" + + +class BasicLayer(nn.Module): + """A basic Swin Transformer layer for one stage. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + norm_before_mlp="ln", + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList( + [ + SwinTransformerBlock( + dim=dim, + input_resolution=input_resolution, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path[i] + if isinstance(drop_path, list) + else drop_path, + norm_layer=norm_layer, + norm_before_mlp=norm_before_mlp, + ) + for i in range(depth) + ] + ) + + # patch merging layer + if downsample is not None: + self.downsample = downsample( + input_resolution, dim=dim, norm_layer=norm_layer + ) + else: + self.downsample = None + + def forward(self, x): + attns = [] + for blk in self.blocks: + if self.use_checkpoint: + x = checkpoint.checkpoint(blk, x) + else: + x, attn = blk(x) + if not self.training: + attns.append(attn.unsqueeze(0)) + if self.downsample is not None: + x = self.downsample(x) + if not self.training: + attn = torch.cat(attns, dim=0) + attn = torch.mean(attn, dim=0) + return x, attn + + def extra_repr(self): + return f"dim={self.dim}, input_resolution={self.input_resolution}, depth={self.depth}" + + +# The Core of HTSAT +class HTSAT_Swin_Transformer(nn.Module): + r"""HTSAT based on the Swin Transformer + Args: + spec_size (int | tuple(int)): Input Spectrogram size. Default 256 + patch_size (int | tuple(int)): Patch size. Default: 4 + path_stride (iot | tuple(int)): Patch Stride for Frequency and Time Axis. Default: 4 + in_chans (int): Number of input image channels. Default: 1 (mono) + num_classes (int): Number of classes for classification head. Default: 527 + embed_dim (int): Patch embedding dimension. Default: 96 + depths (tuple(int)): Depth of each HTSAT-Swin Transformer layer. + num_heads (tuple(int)): Number of attention heads in different layers. + window_size (int): Window size. Default: 8 + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. Default: None + drop_rate (float): Dropout rate. Default: 0 + attn_drop_rate (float): Attention dropout rate. Default: 0 + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False + patch_norm (bool): If True, add normalization after patch embedding. Default: True + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False + config (module): The configuration Module from config.py + """ + + def __init__( + self, + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + in_chans=1, + num_classes=527, + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop_rate=0.0, + attn_drop_rate=0.0, + drop_path_rate=0.1, + norm_layer=nn.LayerNorm, + ape=False, + patch_norm=True, + use_checkpoint=False, + norm_before_mlp="ln", + config=None, + enable_fusion=False, + fusion_type="None", + **kwargs, + ): + super(HTSAT_Swin_Transformer, self).__init__() + + self.config = config + self.spec_size = spec_size + self.patch_stride = patch_stride + self.patch_size = patch_size + self.window_size = window_size + self.embed_dim = embed_dim + self.depths = depths + self.ape = ape + self.in_chans = in_chans + self.num_classes = num_classes + self.num_heads = num_heads + self.num_layers = len(self.depths) + self.num_features = int(self.embed_dim * 2 ** (self.num_layers - 1)) + + self.drop_rate = drop_rate + self.attn_drop_rate = attn_drop_rate + self.drop_path_rate = drop_path_rate + + self.qkv_bias = qkv_bias + self.qk_scale = None + + self.patch_norm = patch_norm + self.norm_layer = norm_layer if self.patch_norm else None + self.norm_before_mlp = norm_before_mlp + self.mlp_ratio = mlp_ratio + + self.use_checkpoint = use_checkpoint + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # process mel-spec ; used only once + self.freq_ratio = self.spec_size // self.config.mel_bins + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + self.interpolate_ratio = 32 # Downsampled ratio + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=config.window_size, + hop_length=config.hop_size, + win_length=config.window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=config.sample_rate, + n_fft=config.window_size, + n_mels=config.mel_bins, + fmin=config.fmin, + fmax=config.fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) # 2 2 + self.bn0 = nn.BatchNorm2d(self.config.mel_bins) + + # split spctrogram into non-overlapping patches + self.patch_embed = PatchEmbed( + img_size=self.spec_size, + patch_size=self.patch_size, + in_chans=self.in_chans, + embed_dim=self.embed_dim, + norm_layer=self.norm_layer, + patch_stride=patch_stride, + enable_fusion=self.enable_fusion, + fusion_type=self.fusion_type, + ) + + num_patches = self.patch_embed.num_patches + patches_resolution = self.patch_embed.grid_size + self.patches_resolution = patches_resolution + + # absolute position embedding + if self.ape: + self.absolute_pos_embed = nn.Parameter( + torch.zeros(1, num_patches, self.embed_dim) + ) + trunc_normal_(self.absolute_pos_embed, std=0.02) + + self.pos_drop = nn.Dropout(p=self.drop_rate) + + # stochastic depth + dpr = [ + x.item() for x in torch.linspace(0, self.drop_path_rate, sum(self.depths)) + ] # stochastic depth decay rule + + # build layers + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = BasicLayer( + dim=int(self.embed_dim * 2**i_layer), + input_resolution=( + patches_resolution[0] // (2**i_layer), + patches_resolution[1] // (2**i_layer), + ), + depth=self.depths[i_layer], + num_heads=self.num_heads[i_layer], + window_size=self.window_size, + mlp_ratio=self.mlp_ratio, + qkv_bias=self.qkv_bias, + qk_scale=self.qk_scale, + drop=self.drop_rate, + attn_drop=self.attn_drop_rate, + drop_path=dpr[ + sum(self.depths[:i_layer]) : sum(self.depths[: i_layer + 1]) + ], + norm_layer=self.norm_layer, + downsample=PatchMerging if (i_layer < self.num_layers - 1) else None, + use_checkpoint=use_checkpoint, + norm_before_mlp=self.norm_before_mlp, + ) + self.layers.append(layer) + + self.norm = self.norm_layer(self.num_features) + self.avgpool = nn.AdaptiveAvgPool1d(1) + self.maxpool = nn.AdaptiveMaxPool1d(1) + + SF = ( + self.spec_size + // (2 ** (len(self.depths) - 1)) + // self.patch_stride[0] + // self.freq_ratio + ) + self.tscam_conv = nn.Conv2d( + in_channels=self.num_features, + out_channels=self.num_classes, + kernel_size=(SF, 3), + padding=(0, 1), + ) + self.head = nn.Linear(num_classes, num_classes) + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"] + ): + self.mel_conv1d = nn.Sequential( + nn.Conv1d(64, 64, kernel_size=5, stride=3, padding=2), + nn.BatchNorm1d(64), + ) + if self.fusion_type == "daf_1d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_1d": + self.fusion_model = AFF(channels=64, type="1D") + elif self.fusion_type == "iaff_1d": + self.fusion_model = iAFF(channels=64, type="1D") + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {"absolute_pos_embed"} + + @torch.jit.ignore + def no_weight_decay_keywords(self): + return {"relative_position_bias_table"} + + def forward_features(self, x, longer_idx=None): + # A deprecated optimization for using a hierarchical output from different blocks + + frames_num = x.shape[2] + x = self.patch_embed(x, longer_idx=longer_idx) + if self.ape: + x = x + self.absolute_pos_embed + x = self.pos_drop(x) + for i, layer in enumerate(self.layers): + x, attn = layer(x) + # for x + x = self.norm(x) + B, N, C = x.shape + SF = frames_num // (2 ** (len(self.depths) - 1)) // self.patch_stride[0] + ST = frames_num // (2 ** (len(self.depths) - 1)) // self.patch_stride[1] + x = x.permute(0, 2, 1).contiguous().reshape(B, C, SF, ST) + B, C, F, T = x.shape + # group 2D CNN + c_freq_bin = F // self.freq_ratio + x = x.reshape(B, C, F // c_freq_bin, c_freq_bin, T) + x = x.permute(0, 1, 3, 2, 4).contiguous().reshape(B, C, c_freq_bin, -1) + # get latent_output + fine_grained_latent_output = torch.mean(x, dim=2) + fine_grained_latent_output = interpolate( + fine_grained_latent_output.permute(0, 2, 1).contiguous(), + 8 * self.patch_stride[1], + ) + + latent_output = self.avgpool(torch.flatten(x, 2)) + latent_output = torch.flatten(latent_output, 1) + + # display the attention map, if needed + + x = self.tscam_conv(x) + x = torch.flatten(x, 2) # B, C, T + + fpx = interpolate( + torch.sigmoid(x).permute(0, 2, 1).contiguous(), 8 * self.patch_stride[1] + ) + + x = self.avgpool(x) + x = torch.flatten(x, 1) + + output_dict = { + "framewise_output": fpx, # already sigmoided + "clipwise_output": torch.sigmoid(x), + "fine_grained_embedding": fine_grained_latent_output, + "embedding": latent_output, + } + + return output_dict + + def crop_wav(self, x, crop_size, spe_pos=None): + time_steps = x.shape[2] + tx = torch.zeros(x.shape[0], x.shape[1], crop_size, x.shape[3]).to(x.device) + for i in range(len(x)): + if spe_pos is None: + crop_pos = random.randint(0, time_steps - crop_size - 1) + else: + crop_pos = spe_pos + tx[i][0] = x[i, 0, crop_pos : crop_pos + crop_size, :] + return tx + + # Reshape the wavform to a img size, if you want to use the pretrained swin transformer model + def reshape_wav2img(self, x): + B, C, T, F = x.shape + target_T = int(self.spec_size * self.freq_ratio) + target_F = self.spec_size // self.freq_ratio + assert ( + T <= target_T and F <= target_F + ), "the wav size should less than or equal to the swin input size" + # to avoid bicubic zero error + if T < target_T: + x = nn.functional.interpolate( + x, (target_T, x.shape[3]), mode="bicubic", align_corners=True + ) + if F < target_F: + x = nn.functional.interpolate( + x, (x.shape[2], target_F), mode="bicubic", align_corners=True + ) + x = x.permute(0, 1, 3, 2).contiguous() + x = x.reshape( + x.shape[0], + x.shape[1], + x.shape[2], + self.freq_ratio, + x.shape[3] // self.freq_ratio, + ) + # print(x.shape) + x = x.permute(0, 1, 3, 2, 4).contiguous() + x = x.reshape(x.shape[0], x.shape[1], x.shape[2] * x.shape[3], x.shape[4]) + return x + + # Repeat the wavform to a img size, if you want to use the pretrained swin transformer model + def repeat_wat2img(self, x, cur_pos): + B, C, T, F = x.shape + target_T = int(self.spec_size * self.freq_ratio) + target_F = self.spec_size // self.freq_ratio + assert ( + T <= target_T and F <= target_F + ), "the wav size should less than or equal to the swin input size" + # to avoid bicubic zero error + if T < target_T: + x = nn.functional.interpolate( + x, (target_T, x.shape[3]), mode="bicubic", align_corners=True + ) + if F < target_F: + x = nn.functional.interpolate( + x, (x.shape[2], target_F), mode="bicubic", align_corners=True + ) + x = x.permute(0, 1, 3, 2).contiguous() # B C F T + x = x[:, :, :, cur_pos : cur_pos + self.spec_size] + x = x.repeat(repeats=(1, 1, 4, 1)) + return x + + def forward( + self, x: torch.Tensor, mixup_lambda=None, infer_mode=False, device=None + ): # out_feat_keys: List[str] = None): + if self.enable_fusion and x["longer"].sum() == 0: + # if no audio is longer than 10s, then randomly select one audio to be longer + x["longer"][torch.randint(0, x["longer"].shape[0], (1,))] = True + + if not self.enable_fusion: + x = x["waveform"].to(device=device, non_blocking=True) + x = self.spectrogram_extractor(x) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + if self.training: + x = self.spec_augmenter(x) + + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.reshape_wav2img(x) + output_dict = self.forward_features(x) + else: + longer_list = x["longer"].to(device=device, non_blocking=True) + x = x["mel_fusion"].to(device=device, non_blocking=True) + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + longer_list_idx = torch.where(longer_list)[0] + if self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"]: + new_x = x[:, 0:1, :, :].clone().contiguous() + if len(longer_list_idx) > 0: + # local processing + fusion_x_local = x[longer_list_idx, 1:, :, :].clone().contiguous() + FB, FC, FT, FF = fusion_x_local.size() + fusion_x_local = fusion_x_local.view(FB * FC, FT, FF) + fusion_x_local = torch.permute( + fusion_x_local, (0, 2, 1) + ).contiguous() + fusion_x_local = self.mel_conv1d(fusion_x_local) + fusion_x_local = fusion_x_local.view( + FB, FC, FF, fusion_x_local.size(-1) + ) + fusion_x_local = ( + torch.permute(fusion_x_local, (0, 2, 1, 3)) + .contiguous() + .flatten(2) + ) + if fusion_x_local.size(-1) < FT: + fusion_x_local = torch.cat( + [ + fusion_x_local, + torch.zeros( + (FB, FF, FT - fusion_x_local.size(-1)), + device=device, + ), + ], + dim=-1, + ) + else: + fusion_x_local = fusion_x_local[:, :, :FT] + # 1D fusion + new_x = new_x.squeeze(1).permute((0, 2, 1)).contiguous() + new_x[longer_list_idx] = self.fusion_model( + new_x[longer_list_idx], fusion_x_local + ) + x = new_x.permute((0, 2, 1)).contiguous()[:, None, :, :] + else: + x = new_x + + elif self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d", "channel_map"]: + x = x # no change + + if self.training: + x = self.spec_augmenter(x) + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.reshape_wav2img(x) + output_dict = self.forward_features(x, longer_idx=longer_list_idx) + + # if infer_mode: + # # in infer mode. we need to handle different length audio input + # frame_num = x.shape[2] + # target_T = int(self.spec_size * self.freq_ratio) + # repeat_ratio = math.floor(target_T / frame_num) + # x = x.repeat(repeats=(1,1,repeat_ratio,1)) + # x = self.reshape_wav2img(x) + # output_dict = self.forward_features(x) + # else: + # if x.shape[2] > self.freq_ratio * self.spec_size: + # if self.training: + # x = self.crop_wav(x, crop_size=self.freq_ratio * self.spec_size) + # x = self.reshape_wav2img(x) + # output_dict = self.forward_features(x) + # else: + # # Change: Hard code here + # overlap_size = (x.shape[2] - 1) // 4 + # output_dicts = [] + # crop_size = (x.shape[2] - 1) // 2 + # for cur_pos in range(0, x.shape[2] - crop_size - 1, overlap_size): + # tx = self.crop_wav(x, crop_size = crop_size, spe_pos = cur_pos) + # tx = self.reshape_wav2img(tx) + # output_dicts.append(self.forward_features(tx)) + # clipwise_output = torch.zeros_like(output_dicts[0]["clipwise_output"]).float().to(x.device) + # framewise_output = torch.zeros_like(output_dicts[0]["framewise_output"]).float().to(x.device) + # for d in output_dicts: + # clipwise_output += d["clipwise_output"] + # framewise_output += d["framewise_output"] + # clipwise_output = clipwise_output / len(output_dicts) + # framewise_output = framewise_output / len(output_dicts) + # output_dict = { + # 'framewise_output': framewise_output, + # 'clipwise_output': clipwise_output + # } + # else: # this part is typically used, and most easy one + # x = self.reshape_wav2img(x) + # output_dict = self.forward_features(x) + # x = self.head(x) + + # We process the data in the dataloader part, in that here we only consider the input_T < fixed_T + + return output_dict + + +def create_htsat_model(audio_cfg, enable_fusion=False, fusion_type="None"): + try: + assert audio_cfg.model_name in [ + "tiny", + "base", + "large", + ], "model name for HTS-AT is wrong!" + if audio_cfg.model_name == "tiny": + model = HTSAT_Swin_Transformer( + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + num_classes=audio_cfg.class_num, + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + config=audio_cfg, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + elif audio_cfg.model_name == "base": + model = HTSAT_Swin_Transformer( + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + num_classes=audio_cfg.class_num, + embed_dim=128, + depths=[2, 2, 12, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + config=audio_cfg, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + elif audio_cfg.model_name == "large": + model = HTSAT_Swin_Transformer( + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + num_classes=audio_cfg.class_num, + embed_dim=256, + depths=[2, 2, 12, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + config=audio_cfg, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + + return model + except: + raise RuntimeError( + f"Import Model for {audio_cfg.model_name} not found, or the audio cfg parameters are not enough." + ) diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/linear_probe.py b/qa_mdt/audioldm_train/modules/clap/open_clip/linear_probe.py new file mode 100644 index 0000000000000000000000000000000000000000..9d7e23b6b67a53e16d050d675a99d01d7d04d581 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/linear_probe.py @@ -0,0 +1,66 @@ +import numpy as np +import torch.nn.functional as F +from torch import nn +from .model import MLPLayers + + +class LinearProbe(nn.Module): + def __init__(self, model, mlp, freeze, in_ch, out_ch, act=None): + """ + Args: + model: nn.Module + mlp: bool, if True, then use the MLP layer as the linear probe module + freeze: bool, if Ture, then freeze all the CLAP model's layers when training the linear probe + in_ch: int, the output channel from CLAP model + out_ch: int, the output channel from linear probe (class_num) + act: torch.nn.functional, the activation function before the loss function + """ + super().__init__() + in_ch = 512 + self.clap_model = model + self.clap_model.text_branch = None # to save memory + self.freeze = freeze + if mlp: + self.lp_layer = MLPLayers(units=[in_ch, in_ch * 2, out_ch]) + else: + self.lp_layer = nn.Linear(in_ch, out_ch) + + if self.freeze: + for param in self.clap_model.parameters(): + param.requires_grad = False + + if act == "None": + self.act = None + elif act == "relu": + self.act = nn.ReLU() + elif act == "elu": + self.act = nn.ELU() + elif act == "prelu": + self.act = nn.PReLU(num_parameters=in_ch) + elif act == "softmax": + self.act = nn.Softmax(dim=-1) + elif act == "sigmoid": + self.act = nn.Sigmoid() + + def forward(self, x, mix_lambda=None, device=None): + """ + Args: + x: waveform, torch.tensor [batch, t_samples] / batch of mel_spec and longer list + mix_lambda: torch.tensor [batch], the mixup lambda + Returns: + class_prob: torch.tensor [batch, class_num] + + """ + # batchnorm cancel grandient + if self.freeze: + self.clap_model.eval() + + x = self.clap_model.audio_projection( + self.clap_model.audio_branch(x, mixup_lambda=mix_lambda, device=device)[ + "embedding" + ] + ) + out = self.lp_layer(x) + if self.act is not None: + out = self.act(out) + return out diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/loss.py b/qa_mdt/audioldm_train/modules/clap/open_clip/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..cc66298a14997da4aa2efc71e37c0a6bcda53fd1 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/loss.py @@ -0,0 +1,398 @@ +from multiprocessing.sharedctypes import Value +import torch +import torch.distributed.nn +from torch import distributed as dist, nn as nn +from torch.nn import functional as F +import numpy as np +from sklearn.metrics import average_precision_score, roc_auc_score, accuracy_score + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + + +def gather_features( + audio_features, + text_features, + audio_features_mlp=None, + text_features_mlp=None, + local_loss=False, + gather_with_grad=False, + rank=0, + world_size=1, + use_horovod=False, + mlp_loss=False, +): + if use_horovod: + assert hvd is not None, "Please install horovod" + if gather_with_grad: + all_audio_features = hvd.allgather(audio_features) + all_text_features = hvd.allgather(text_features) + if mlp_loss: + all_audio_features_mlp = hvd.allgather(audio_features_mlp) + all_text_features_mlp = hvd.allgather(text_features_mlp) + else: + with torch.no_grad(): + all_audio_features = hvd.allgather(audio_features) + all_text_features = hvd.allgather(text_features) + if mlp_loss: + all_audio_features_mlp = hvd.allgather(audio_features_mlp) + all_text_features_mlp = hvd.allgather(text_features_mlp) + if not local_loss: + # ensure grads for local rank when all_* features don't have a gradient + gathered_audio_features = list( + all_audio_features.chunk(world_size, dim=0) + ) + gathered_text_features = list( + all_text_features.chunk(world_size, dim=0) + ) + gathered_audio_features[rank] = audio_features + gathered_text_features[rank] = text_features + all_audio_features = torch.cat(gathered_audio_features, dim=0) + all_text_features = torch.cat(gathered_text_features, dim=0) + if mlp_loss: + gathered_audio_features_mlp = list( + all_audio_features_mlp.chunk(world_size, dim=0) + ) + gathered_text_features_mlp = list( + all_text_features_mlp.chunk(world_size, dim=0) + ) + gathered_audio_features_mlp[rank] = audio_features_mlp + gathered_text_features_mlp[rank] = text_features_mlp + all_audio_features_mlp = torch.cat( + gathered_audio_features_mlp, dim=0 + ) + all_text_features_mlp = torch.cat(gathered_text_features_mlp, dim=0) + else: + # We gather tensors from all gpus + if gather_with_grad: + all_audio_features = torch.cat( + torch.distributed.nn.all_gather(audio_features), dim=0 + ) + all_text_features = torch.cat( + torch.distributed.nn.all_gather(text_features), dim=0 + ) + if mlp_loss: + all_audio_features_mlp = torch.cat( + torch.distributed.nn.all_gather(audio_features_mlp), dim=0 + ) + all_text_features_mlp = torch.cat( + torch.distributed.nn.all_gather(text_features_mlp), dim=0 + ) + else: + gathered_audio_features = [ + torch.zeros_like(audio_features) for _ in range(world_size) + ] + gathered_text_features = [ + torch.zeros_like(text_features) for _ in range(world_size) + ] + dist.all_gather(gathered_audio_features, audio_features) + dist.all_gather(gathered_text_features, text_features) + if mlp_loss: + gathered_audio_features_mlp = [ + torch.zeros_like(audio_features_mlp) for _ in range(world_size) + ] + gathered_text_features_mlp = [ + torch.zeros_like(text_features_mlp) for _ in range(world_size) + ] + dist.all_gather(gathered_audio_features_mlp, audio_features_mlp) + dist.all_gather(gathered_text_features_mlp, text_features_mlp) + if not local_loss: + # ensure grads for local rank when all_* features don't have a gradient + gathered_audio_features[rank] = audio_features + gathered_text_features[rank] = text_features + if mlp_loss: + gathered_audio_features_mlp[rank] = audio_features_mlp + gathered_text_features_mlp[rank] = text_features_mlp + + all_audio_features = torch.cat(gathered_audio_features, dim=0) + all_text_features = torch.cat(gathered_text_features, dim=0) + if mlp_loss: + all_audio_features_mlp = torch.cat(gathered_audio_features_mlp, dim=0) + all_text_features_mlp = torch.cat(gathered_text_features_mlp, dim=0) + if mlp_loss: + return ( + all_audio_features, + all_text_features, + all_audio_features_mlp, + all_text_features_mlp, + ) + else: + return all_audio_features, all_text_features + + +class ClipLoss(nn.Module): + def __init__( + self, + local_loss=False, + gather_with_grad=False, + cache_labels=False, + rank=0, + world_size=1, + use_horovod=False, + mlp_loss=False, + weight_loss_kappa=0, + ): + super().__init__() + self.local_loss = local_loss + self.gather_with_grad = gather_with_grad + self.cache_labels = cache_labels + self.rank = rank + self.world_size = world_size + self.use_horovod = use_horovod + self.mlp_loss = mlp_loss + self.weighted_loss = bool(weight_loss_kappa != 0) + self.weight_loss_kappa = weight_loss_kappa + # cache state + self.prev_num_logits = 0 + self.labels = {} + + def forward( + self, + audio_features, + text_features, + logit_scale_a, + logit_scale_t=None, + audio_features_mlp=None, + text_features_mlp=None, + ): + device = audio_features.device + if self.mlp_loss: + if self.world_size > 1: + ( + all_audio_features, + all_text_features, + all_audio_features_mlp, + all_text_features_mlp, + ) = gather_features( + audio_features=audio_features, + text_features=text_features, + audio_features_mlp=audio_features_mlp, + text_features_mlp=text_features_mlp, + local_loss=self.local_loss, + gather_with_grad=self.gather_with_grad, + rank=self.rank, + world_size=self.world_size, + use_horovod=self.use_horovod, + mlp_loss=self.mlp_loss, + ) + if self.local_loss: + a_logits_per_audio = ( + logit_scale_a * audio_features @ all_text_features_mlp.T + ) + a_logits_per_text = ( + logit_scale_a * text_features_mlp @ all_audio_features.T + ) + t_logits_per_audio = ( + logit_scale_t * audio_features_mlp @ all_text_features.T + ) + t_logits_per_text = ( + logit_scale_t * text_features @ all_audio_features_mlp.T + ) + else: + a_logits_per_audio = ( + logit_scale_a * all_audio_features @ all_text_features_mlp.T + ) + a_logits_per_text = a_logits_per_audio.T + t_logits_per_audio = ( + logit_scale_t * all_audio_features_mlp @ all_text_features.T + ) + t_logits_per_text = t_logits_per_audio.T + else: + a_logits_per_audio = ( + logit_scale_a * audio_features @ text_features_mlp.T + ) + a_logits_per_text = logit_scale_a * text_features_mlp @ audio_features.T + t_logits_per_audio = ( + logit_scale_t * audio_features_mlp @ text_features.T + ) + t_logits_per_text = logit_scale_t * text_features @ audio_features_mlp.T + + # calculated ground-truth and cache if enabled + num_logits = a_logits_per_audio.shape[0] + if self.prev_num_logits != num_logits or device not in self.labels: + labels = torch.arange(num_logits, device=device, dtype=torch.long) + if self.world_size > 1 and self.local_loss: + labels = labels + num_logits * self.rank + if self.cache_labels: + self.labels[device] = labels + self.prev_num_logits = num_logits + else: + labels = self.labels[device] + + if not self.weighted_loss: + total_loss = ( + F.cross_entropy(a_logits_per_audio, labels) + + F.cross_entropy(a_logits_per_text, labels) + + F.cross_entropy(t_logits_per_audio, labels) + + F.cross_entropy(t_logits_per_text, labels) + ) / 4 + else: + audio_weight = (audio_features @ audio_features.T).detach() + audio_weight = ( + torch.exp( + torch.sum(audio_weight, axis=1) + / (self.weight_loss_kappa * len(audio_weight)) + ) + ).detach() + text_weight = (text_features @ text_features.T).detach() + text_weight = ( + torch.exp( + torch.sum(text_weight, axis=1) + / (self.weight_loss_kappa * len(text_features)) + ) + ).detach() + total_loss = ( + F.cross_entropy(a_logits_per_audio, labels, weight=audio_weight) + + F.cross_entropy(a_logits_per_text, labels, weight=audio_weight) + + F.cross_entropy(t_logits_per_audio, labels, weight=text_weight) + + F.cross_entropy(t_logits_per_text, labels, weight=text_weight) + ) / 4 + else: + if self.world_size > 1: + all_audio_features, all_text_features = gather_features( + audio_features=audio_features, + text_features=text_features, + local_loss=self.local_loss, + gather_with_grad=self.gather_with_grad, + rank=self.rank, + world_size=self.world_size, + use_horovod=self.use_horovod, + mlp_loss=self.mlp_loss, + ) + + if self.local_loss: + logits_per_audio = ( + logit_scale_a * audio_features @ all_text_features.T + ) + logits_per_text = ( + logit_scale_a * text_features @ all_audio_features.T + ) + else: + logits_per_audio = ( + logit_scale_a * all_audio_features @ all_text_features.T + ) + logits_per_text = logits_per_audio.T + else: + logits_per_audio = logit_scale_a * audio_features @ text_features.T + logits_per_text = logit_scale_a * text_features @ audio_features.T + + # calculated ground-truth and cache if enabled + num_logits = logits_per_audio.shape[0] + if self.prev_num_logits != num_logits or device not in self.labels: + labels = torch.arange(num_logits, device=device, dtype=torch.long) + if self.world_size > 1 and self.local_loss: + labels = labels + num_logits * self.rank + if self.cache_labels: + self.labels[device] = labels + self.prev_num_logits = num_logits + else: + labels = self.labels[device] + if not self.weighted_loss: + total_loss = ( + F.cross_entropy(logits_per_audio, labels) + + F.cross_entropy(logits_per_text, labels) + ) / 2 + else: + audio_weight = (all_audio_features @ all_audio_features.T).detach() + audio_weight = ( + torch.exp( + torch.sum(audio_weight, axis=1) + / (self.weight_loss_kappa * len(all_audio_features)) + ) + ).detach() + text_weight = (all_text_features @ all_text_features.T).detach() + text_weight = ( + torch.exp( + torch.sum(text_weight, axis=1) + / (self.weight_loss_kappa * len(all_text_features)) + ) + ).detach() + total_loss = ( + F.cross_entropy(logits_per_audio, labels, weight=text_weight) + + F.cross_entropy(logits_per_text, labels, weight=audio_weight) + ) / 2 + return total_loss + + +def lp_gather_features(pred, target, world_size=1, use_horovod=False): + if use_horovod: + assert hvd is not None, "Please install horovod" + with torch.no_grad(): + all_preds = hvd.allgather(pred) + all_targets = hvd.allgath(target) + else: + gathered_preds = [torch.zeros_like(pred) for _ in range(world_size)] + gathered_targets = [torch.zeros_like(target) for _ in range(world_size)] + + dist.all_gather(gathered_preds, pred) + dist.all_gather(gathered_targets, target) + all_preds = torch.cat(gathered_preds, dim=0) + all_targets = torch.cat(gathered_targets, dim=0) + + return all_preds, all_targets + + +def get_map(pred, target): + pred = torch.sigmoid(pred).numpy() + target = target.numpy() + return np.mean(average_precision_score(target, pred, average=None)) + + +def get_acc(pred, target): + pred = torch.argmax(pred, 1).numpy() + target = torch.argmax(target, 1).numpy() + return accuracy_score(target, pred) + + +def get_mauc(pred, target): + pred = torch.sigmoid(pred).numpy() + target = target.numpy() + return np.mean(roc_auc_score(target, pred, average=None)) + + +class LPMetrics(object): + def __init__(self, metric_names=["map", "acc", "mauc"]): + self.metrics = [] + for name in metric_names: + self.metrics.append(self.get_metric(name)) + self.metric_names = metric_names + + def get_metric(self, name): + if name == "map": + return get_map + elif name == "acc": + return get_acc + elif name == "mauc": + return get_mauc + else: + raise ValueError(f"the metric should be at least one of [map, acc, mauc]") + + def evaluate_mertics(self, pred, target): + metric_dict = {} + for i in range(len(self.metric_names)): + metric_dict[self.metric_names[i]] = self.metrics[i](pred, target) + return metric_dict + + +def calc_celoss(pred, target): + target = torch.argmax(target, 1).long() + return nn.CrossEntropyLoss()(pred, target) + + +class LPLoss(nn.Module): + def __init__(self, loss_name): + super().__init__() + if loss_name == "bce": + self.loss_func = nn.BCEWithLogitsLoss() + elif loss_name == "ce": + self.loss_func = calc_celoss + elif loss_name == "mse": + self.loss_func = nn.MSELoss() + else: + raise ValueError(f"the loss func should be at least one of [bce, ce, mse]") + + def forward(self, pred, target): + loss = self.loss_func(pred, target) + return loss diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model.py b/qa_mdt/audioldm_train/modules/clap/open_clip/model.py new file mode 100644 index 0000000000000000000000000000000000000000..75c5a955cef74cb9e2f0cc32aeaecdad48dc9c8a --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model.py @@ -0,0 +1,935 @@ +""" CLAP Model + +Adapted from CLIP: https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. +Adapted to the Audio Task. +""" + +from collections import OrderedDict +from dataclasses import dataclass +from email.mime import audio +from typing import Tuple, Union, Callable, Optional + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn + +from .timm_model import TimmModel +import logging +from .utils import freeze_batch_norm_2d + +from .pann_model import create_pann_model +from .htsat import create_htsat_model +from transformers import BertModel, RobertaModel, BartModel +from transformers.tokenization_utils_base import BatchEncoding + +import json +with open('./qa_mdt/offset_pretrained_checkpoints.json', 'r') as config_file: + config_data = json.load(config_file) + +class MLPLayers(nn.Module): + def __init__(self, units=[512, 512, 512], nonlin=nn.ReLU(), dropout=0.1): + super(MLPLayers, self).__init__() + self.nonlin = nonlin + self.dropout = dropout + + sequence = [] + for u0, u1 in zip(units[:-1], units[1:]): + sequence.append(nn.Linear(u0, u1)) + sequence.append(self.nonlin) + sequence.append(nn.Dropout(self.dropout)) + sequence = sequence[:-2] + + self.sequential = nn.Sequential(*sequence) + + def forward(self, X): + X = self.sequential(X) + return X + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1): + super().__init__() + + # all conv layers have stride 1. an avgpool is performed after the second convolution when stride > 1 + self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + + self.conv2 = nn.Conv2d(planes, planes, 3, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + + self.avgpool = nn.AvgPool2d(stride) if stride > 1 else nn.Identity() + + self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + + self.relu = nn.ReLU(inplace=True) + self.downsample = None + self.stride = stride + + if stride > 1 or inplanes != planes * Bottleneck.expansion: + # downsampling layer is prepended with an avgpool, and the subsequent convolution has stride 1 + self.downsample = nn.Sequential( + OrderedDict( + [ + ("-1", nn.AvgPool2d(stride)), + ( + "0", + nn.Conv2d( + inplanes, + planes * self.expansion, + 1, + stride=1, + bias=False, + ), + ), + ("1", nn.BatchNorm2d(planes * self.expansion)), + ] + ) + ) + + def forward(self, x: torch.Tensor): + identity = x + + out = self.relu(self.bn1(self.conv1(x))) + out = self.relu(self.bn2(self.conv2(out))) + out = self.avgpool(out) + out = self.bn3(self.conv3(out)) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + return out + + +class AttentionPool2d(nn.Module): + def __init__( + self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None + ): + super().__init__() + self.positional_embedding = nn.Parameter( + torch.randn(spacial_dim**2 + 1, embed_dim) / embed_dim**0.5 + ) + self.k_proj = nn.Linear(embed_dim, embed_dim) + self.q_proj = nn.Linear(embed_dim, embed_dim) + self.v_proj = nn.Linear(embed_dim, embed_dim) + self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) + self.num_heads = num_heads + + def forward(self, x): + x = x.reshape(x.shape[0], x.shape[1], x.shape[2] * x.shape[3]).permute( + 2, 0, 1 + ) # NCHW -> (HW)NC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (HW+1)NC + x = x + self.positional_embedding[:, None, :].to(x.dtype) # (HW+1)NC + x, _ = F.multi_head_attention_forward( + query=x, + key=x, + value=x, + embed_dim_to_check=x.shape[-1], + num_heads=self.num_heads, + q_proj_weight=self.q_proj.weight, + k_proj_weight=self.k_proj.weight, + v_proj_weight=self.v_proj.weight, + in_proj_weight=None, + in_proj_bias=torch.cat( + [self.q_proj.bias, self.k_proj.bias, self.v_proj.bias] + ), + bias_k=None, + bias_v=None, + add_zero_attn=False, + dropout_p=0, + out_proj_weight=self.c_proj.weight, + out_proj_bias=self.c_proj.bias, + use_separate_proj_weight=True, + training=self.training, + need_weights=False, + ) + + return x[0] + + +class ModifiedResNet(nn.Module): + """ + A ResNet class that is similar to torchvision's but contains the following changes: + - There are now 3 "stem" convolutions as opposed to 1, with an average pool instead of a max pool. + - Performs anti-aliasing strided convolutions, where an avgpool is prepended to convolutions with stride > 1 + - The final pooling layer is a QKV attention instead of an average pool + """ + + def __init__(self, layers, output_dim, heads, image_size=224, width=64): + super().__init__() + self.output_dim = output_dim + self.image_size = image_size + + # the 3-layer stem + self.conv1 = nn.Conv2d( + 3, width // 2, kernel_size=3, stride=2, padding=1, bias=False + ) + self.bn1 = nn.BatchNorm2d(width // 2) + self.conv2 = nn.Conv2d( + width // 2, width // 2, kernel_size=3, padding=1, bias=False + ) + self.bn2 = nn.BatchNorm2d(width // 2) + self.conv3 = nn.Conv2d(width // 2, width, kernel_size=3, padding=1, bias=False) + self.bn3 = nn.BatchNorm2d(width) + self.avgpool = nn.AvgPool2d(2) + self.relu = nn.ReLU(inplace=True) + + # residual layers + self._inplanes = width # this is a *mutable* variable used during construction + self.layer1 = self._make_layer(width, layers[0]) + self.layer2 = self._make_layer(width * 2, layers[1], stride=2) + self.layer3 = self._make_layer(width * 4, layers[2], stride=2) + self.layer4 = self._make_layer(width * 8, layers[3], stride=2) + + embed_dim = width * 32 # the ResNet feature dimension + self.attnpool = AttentionPool2d(image_size // 32, embed_dim, heads, output_dim) + + self.init_parameters() + + def _make_layer(self, planes, blocks, stride=1): + layers = [Bottleneck(self._inplanes, planes, stride)] + + self._inplanes = planes * Bottleneck.expansion + for _ in range(1, blocks): + layers.append(Bottleneck(self._inplanes, planes)) + + return nn.Sequential(*layers) + + def init_parameters(self): + if self.attnpool is not None: + std = self.attnpool.c_proj.in_features**-0.5 + nn.init.normal_(self.attnpool.q_proj.weight, std=std) + nn.init.normal_(self.attnpool.k_proj.weight, std=std) + nn.init.normal_(self.attnpool.v_proj.weight, std=std) + nn.init.normal_(self.attnpool.c_proj.weight, std=std) + + for resnet_block in [self.layer1, self.layer2, self.layer3, self.layer4]: + for name, param in resnet_block.named_parameters(): + if name.endswith("bn3.weight"): + nn.init.zeros_(param) + + def lock(self, unlocked_groups=0, freeze_bn_stats=False): + assert ( + unlocked_groups == 0 + ), "partial locking not currently supported for this model" + for param in self.parameters(): + param.requires_grad = False + if freeze_bn_stats: + freeze_batch_norm_2d(self) + + def stem(self, x): + for conv, bn in [ + (self.conv1, self.bn1), + (self.conv2, self.bn2), + (self.conv3, self.bn3), + ]: + x = self.relu(bn(conv(x))) + x = self.avgpool(x) + return x + + def forward(self, x): + x = self.stem(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.attnpool(x) + + return x + + +class LayerNorm(nn.LayerNorm): + """Subclass torch's LayerNorm to handle fp16.""" + + def forward(self, x: torch.Tensor): + orig_type = x.dtype + x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) + return x.to(orig_type) + + +class QuickGELU(nn.Module): + # NOTE This is slower than nn.GELU or nn.SiLU and uses more GPU memory + def forward(self, x: torch.Tensor): + return x * torch.sigmoid(1.702 * x) + + +class ResidualAttentionBlock(nn.Module): + def __init__(self, d_model: int, n_head: int, act_layer: Callable = nn.GELU): + super().__init__() + + self.attn = nn.MultiheadAttention(d_model, n_head) + self.ln_1 = LayerNorm(d_model) + self.mlp = nn.Sequential( + OrderedDict( + [ + ("c_fc", nn.Linear(d_model, d_model * 4)), + ("gelu", act_layer()), + ("c_proj", nn.Linear(d_model * 4, d_model)), + ] + ) + ) + self.ln_2 = LayerNorm(d_model) + + def attention(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): + return self.attn(x, x, x, need_weights=False, attn_mask=attn_mask)[0] + + def forward(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): + x = x + self.attention(self.ln_1(x), attn_mask=attn_mask) + x = x + self.mlp(self.ln_2(x)) + return x + + +class Transformer(nn.Module): + def __init__( + self, width: int, layers: int, heads: int, act_layer: Callable = nn.GELU + ): + super().__init__() + self.width = width + self.layers = layers + self.resblocks = nn.ModuleList( + [ + ResidualAttentionBlock(width, heads, act_layer=act_layer) + for _ in range(layers) + ] + ) + + def forward(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): + for r in self.resblocks: + x = r(x, attn_mask=attn_mask) + return x + + +class VisualTransformer(nn.Module): + def __init__( + self, + image_size: int, + patch_size: int, + width: int, + layers: int, + heads: int, + output_dim: int, + act_layer: Callable = nn.GELU, + ): + super().__init__() + self.image_size = image_size + self.output_dim = output_dim + self.conv1 = nn.Conv2d( + in_channels=3, + out_channels=width, + kernel_size=patch_size, + stride=patch_size, + bias=False, + ) + + scale = width**-0.5 + self.class_embedding = nn.Parameter(scale * torch.randn(width)) + self.positional_embedding = nn.Parameter( + scale * torch.randn((image_size // patch_size) ** 2 + 1, width) + ) + self.ln_pre = LayerNorm(width) + + self.text_branch = Transformer(width, layers, heads, act_layer=act_layer) + + self.ln_post = LayerNorm(width) + self.proj = nn.Parameter(scale * torch.randn(width, output_dim)) + + def lock(self, unlocked_groups=0, freeze_bn_stats=False): + assert ( + unlocked_groups == 0 + ), "partial locking not currently supported for this model" + for param in self.parameters(): + param.requires_grad = False + + def forward(self, x: torch.Tensor): + x = self.conv1(x) # shape = [*, width, grid, grid] + x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2] + x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width] + x = torch.cat( + [ + self.class_embedding.to(x.dtype) + + torch.zeros( + x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device + ), + x, + ], + dim=1, + ) # shape = [*, grid ** 2 + 1, width] + x = x + self.positional_embedding.to(x.dtype) + x = self.ln_pre(x) + + x = x.permute(1, 0, 2) # NLD -> LND + x = self.text_branch(x) + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.ln_post(x[:, 0, :]) + + if self.proj is not None: + x = x @ self.proj + + return x + + +@dataclass +class CLAPVisionCfg: + layers: Union[Tuple[int, int, int, int], int] = 12 + width: int = 768 + patch_size: int = 16 + image_size: Union[Tuple[int, int], int] = 224 + timm_model_name: str = ( + None # a valid model name overrides layers, width, patch_size + ) + timm_model_pretrained: bool = ( + False # use (imagenet) pretrained weights for named model + ) + timm_pool: str = ( + "avg" # feature pooling for timm model ('abs_attn', 'rot_attn', 'avg', '') + ) + timm_proj: str = ( + "linear" # linear projection for timm model output ('linear', 'mlp', '') + ) + + +# Audio Config Class +@dataclass +class CLAPAudioCfp: + model_type: str = "PANN" + model_name: str = "Cnn14" + sample_rate: int = 48000 + # Param + audio_length: int = 1024 + window_size: int = 1024 + hop_size: int = 1024 + fmin: int = 50 + fmax: int = 14000 + class_num: int = 527 + mel_bins: int = 64 + clip_samples: int = 480000 + + +@dataclass +class CLAPTextCfg: + context_length: int + vocab_size: int + width: int + heads: int + layers: int + model_type: str + + +class CLAP(nn.Module): + def __init__( + self, + embed_dim: int, + audio_cfg: CLAPAudioCfp, + text_cfg: CLAPTextCfg, + quick_gelu: bool = False, + enable_fusion: bool = False, + fusion_type: str = "None", + joint_embed_shape: int = 512, + mlp_act: str = "relu", + ): + super().__init__() + if isinstance(audio_cfg, dict): + audio_cfg = CLAPAudioCfp(**audio_cfg) + if isinstance(text_cfg, dict): + text_cfg = CLAPTextCfg(**text_cfg) + + self.audio_cfg = audio_cfg + self.text_cfg = text_cfg + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + self.joint_embed_shape = joint_embed_shape + self.mlp_act = mlp_act + + self.context_length = text_cfg.context_length + + # OpenAI models are pretrained w/ QuickGELU but native nn.GELU is both faster and more + # memory efficient in recent PyTorch releases (>= 1.10). + # NOTE: timm models always use native GELU regardless of quick_gelu flag. + act_layer = QuickGELU if quick_gelu else nn.GELU + + if mlp_act == "relu": + mlp_act_layer = nn.ReLU() + elif mlp_act == "gelu": + mlp_act_layer = nn.GELU() + else: + raise NotImplementedError + + # audio branch + # audio branch parameters + if audio_cfg.model_type == "PANN": + self.audio_branch = create_pann_model(audio_cfg, enable_fusion, fusion_type) + elif audio_cfg.model_type == "HTSAT": + self.audio_branch = create_htsat_model( + audio_cfg, enable_fusion, fusion_type + ) + else: + logging.error(f"Model config for {audio_cfg.model_type} not found") + raise RuntimeError(f"Model config for {audio_cfg.model_type} not found.") + + # text branch + # text branch parameters + if text_cfg.model_type == "transformer": + self.text_branch = Transformer( + width=text_cfg.width, + layers=text_cfg.layers, + heads=text_cfg.heads, + act_layer=act_layer, + ) + self.vocab_size = text_cfg.vocab_size + self.token_embedding = nn.Embedding(text_cfg.vocab_size, text_cfg.width) + self.positional_embedding = nn.Parameter( + torch.empty(self.context_length, text_cfg.width) + ) + self.ln_final = LayerNorm(text_cfg.width) + self.text_transform = MLPLayers( + units=[ + self.joint_embed_shape, + self.joint_embed_shape, + self.joint_embed_shape, + ], + dropout=0.1, + ) + self.text_projection = nn.Sequential( + nn.Linear(text_cfg.width, self.joint_embed_shape), + mlp_act_layer, + nn.Linear(self.joint_embed_shape, self.joint_embed_shape), + ) + elif text_cfg.model_type == "bert": + self.text_branch = BertModel.from_pretrained("/train20/intern/permanent/changli7/dataset_ptm/bert_base_uncased") + self.text_transform = MLPLayers( + units=[ + self.joint_embed_shape, + self.joint_embed_shape, + self.joint_embed_shape, + ], + dropout=0.1, + ) + self.text_projection = nn.Sequential( + nn.Linear(768, self.joint_embed_shape), + mlp_act_layer, + nn.Linear(self.joint_embed_shape, self.joint_embed_shape), + ) + elif text_cfg.model_type == "roberta": + self.text_branch = RobertaModel.from_pretrained(config_data["roberta-base"]) + self.text_transform = MLPLayers( + units=[ + self.joint_embed_shape, + self.joint_embed_shape, + self.joint_embed_shape, + ], + dropout=0.1, + ) + self.text_projection = nn.Sequential( + nn.Linear(768, self.joint_embed_shape), + mlp_act_layer, + nn.Linear(self.joint_embed_shape, self.joint_embed_shape), + ) + elif text_cfg.model_type == "bart": + self.text_branch = BartModel.from_pretrained("/train20/intern/permanent/changli7/dataset_ptm/bart-base") + self.text_transform = MLPLayers( + units=[ + self.joint_embed_shape, + self.joint_embed_shape, + self.joint_embed_shape, + ], + dropout=0.1, + ) + self.text_projection = nn.Sequential( + nn.Linear(768, self.joint_embed_shape), + mlp_act_layer, + nn.Linear(self.joint_embed_shape, self.joint_embed_shape), + ) + else: + logging.error(f"Model config for {text_cfg.model_type} not found") + raise RuntimeError(f"Model config for {text_cfg.model_type} not found.") + self.text_branch_type = text_cfg.model_type + # text branch parameters + + # audio branch parameters + self.audio_transform = MLPLayers( + units=[ + self.joint_embed_shape, + self.joint_embed_shape, + self.joint_embed_shape, + ], + dropout=0.1, + ) + + # below here is text branch parameters + + # ============================================================================================================ + self.audio_projection = nn.Sequential( + nn.Linear(embed_dim, self.joint_embed_shape), + mlp_act_layer, + nn.Linear(self.joint_embed_shape, self.joint_embed_shape), + ) + + self.logit_scale_a = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) + self.logit_scale_t = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) + self.register_buffer("attn_mask", self.build_attention_mask(), persistent=False) + + self.init_text_branch_parameters() + + def init_text_branch_parameters(self): + if self.text_branch_type == "transformer": + nn.init.normal_(self.token_embedding.weight, std=0.02) + nn.init.normal_(self.positional_embedding, std=0.01) + proj_std = (self.text_branch.width**-0.5) * ( + (2 * self.text_branch.layers) ** -0.5 + ) + attn_std = self.text_branch.width**-0.5 + fc_std = (2 * self.text_branch.width) ** -0.5 + for block in self.text_branch.resblocks: + nn.init.normal_(block.attn.in_proj_weight, std=attn_std) + nn.init.normal_(block.attn.out_proj.weight, std=proj_std) + nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) + nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) + if self.text_branch_type == "bert" or self.text_branch_type == "roberta": + width = self.text_branch.embeddings.word_embeddings.weight.shape[-1] + elif self.text_branch_type == "bart": + width = self.text_branch.shared.weight.shape[-1] + else: + width = self.text_branch.width + nn.init.constant_(self.logit_scale_a, np.log(1 / 0.07)) + nn.init.constant_(self.logit_scale_t, np.log(1 / 0.07)) + + # deprecated + # if hasattr(self.visual, 'init_parameters'): + # self.visual.init_parameters() + + # if self.text_projection is not None: + # nn.init.normal_(self.text_projection, std=width**-0.5) + + def build_attention_mask(self): + # lazily create causal attention mask, with full attention between the vision tokens + # pytorch uses additive attention mask; fill with -inf + mask = torch.empty(self.context_length, self.context_length) + mask.fill_(float("-inf")) + mask.triu_(1) # zero out the lower diagonal + return mask + + def encode_audio(self, audio, device): + return self.audio_branch( + audio, mixup_lambda=None, device=device + ) # mix lambda needs to add + + # def list_of_dict_of_tensor2dict_of_tensor(self, x, device): + # tmp = {} + # for k in x[0].keys(): + # tmp[k] = [] + # for i in range(len(x)): + # tmp[k].append(x[i][k][:77]) + # for k in x[0].keys(): + # tmp[k] = torch.tensor(tmp[k]).to(device=device, non_blocking=True) + # return tmp + + def encode_text(self, text, device): + if self.text_branch_type == "transformer": + text = text.to(device=device, non_blocking=True) + x = self.token_embedding(text) # [batch_size, n_ctx, d_model] + + x = x + self.positional_embedding + x = x.permute(1, 0, 2) # NLD -> LND + x = self.text_branch(x, attn_mask=self.attn_mask) + x = x.permute(1, 0, 2) # LND -> NLD + x = self.ln_final(x) + + # x.shape = [batch_size, n_ctx, transformer.width] + # take features from the eot embedding (eot_token is the highest number in each sequence) + x = self.text_projection(x[torch.arange(x.shape[0]), text.argmax(dim=-1)]) + elif self.text_branch_type == "bert": + # text = self.list_of_dict_of_tensor2dict_of_tensor(text, device) + # text = BatchEncoding(text) + x = self.text_branch( + input_ids=text["input_ids"].to(device=device, non_blocking=True), + attention_mask=text["attention_mask"].to( + device=device, non_blocking=True + ), + token_type_ids=text["token_type_ids"].to( + device=device, non_blocking=True + ), + )["pooler_output"] + x = self.text_projection(x) + elif self.text_branch_type == "roberta": + x = self.text_branch( + input_ids=text["input_ids"].to(device=device, non_blocking=True), + attention_mask=text["attention_mask"].to( + device=device, non_blocking=True + ), + )["pooler_output"] + x = self.text_projection(x) + elif self.text_branch_type == "bart": + x = torch.mean( + self.text_branch( + input_ids=text["input_ids"].to(device=device, non_blocking=True), + attention_mask=text["attention_mask"].to( + device=device, non_blocking=True + ), + )["encoder_last_hidden_state"], + axis=1, + ) + x = self.text_projection(x) + else: + logging.error(f"Model type {self.text_branch_type} not found") + raise RuntimeError(f"Model type {self.text_branch_type} not found.") + return x + + def forward(self, audio, text, device=None): + """Forward audio and text into the CLAP + + Parameters + ---------- + audio: torch.Tensor (batch_size, audio_length) + the time-domain audio input / the batch of mel_spec and longer list. + text: torch.Tensor () // need to add + the text token input + """ + if device is None: + if audio is not None: + device = audio.device + elif text is not None: + device = text.device + if audio is None and text is None: + # a hack to get the logit scale + return self.logit_scale_a.exp(), self.logit_scale_t.exp() + elif audio is None: + return self.encode_text(text, device=device) + elif text is None: + return self.audio_projection( + self.encode_audio(audio, device=device)["embedding"] + ) + audio_features = self.audio_projection( + self.encode_audio(audio, device=device)["embedding"] + ) + audio_features = F.normalize(audio_features, dim=-1) + + text_features = self.encode_text(text, device=device) + # print("text_features", text_features) + # print("text_features.shape", text_features.shape) + # print("text_features.type", type(text_features)) + text_features = F.normalize(text_features, dim=-1) + + audio_features_mlp = self.audio_transform(audio_features) + text_features_mlp = self.text_transform(text_features) + # Four outputs: audio features (basic & MLP), text features (basic & MLP) + return ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + self.logit_scale_a.exp(), + self.logit_scale_t.exp(), + ) + + def get_logit_scale(self): + return self.logit_scale_a.exp(), self.logit_scale_t.exp() + + def get_text_embedding(self, data): + """Get the text embedding from the model + + Parameters + ---------- + data: torch.Tensor + a tensor of text embedding + + Returns + ---------- + text_embed: torch.Tensor + a tensor of text_embeds (N, D) + + """ + device = next(self.parameters()).device + for k in data: + data[k] = data[k].to(device) + text_embeds = self.encode_text(data, device=device) + text_embeds = F.normalize(text_embeds, dim=-1) + + return text_embeds + + def get_audio_embedding(self, data): + """Get the audio embedding from the model + + Parameters + ---------- + data: a list of dict + the audio input dict list from 'get_audio_feature' method + + Returns + ---------- + audio_embed: torch.Tensor + a tensor of audio_embeds (N, D) + + """ + device = next(self.parameters()).device + # input_dict = {} + # keys = data[0].keys() + # for k in keys: + # input_dict[k] = torch.cat([d[k].unsqueeze(0) for d in data], dim=0).to( + # device + # ) + audio_embeds = self.audio_projection( + self.encode_audio(data, device=device)["embedding"] + ) + audio_embeds = F.normalize(audio_embeds, dim=-1) + + return audio_embeds + + def audio_infer(self, audio, hopsize=None, device=None): + """Forward one audio and produce the audio embedding + + Parameters + ---------- + audio: (audio_length) + the time-domain audio input, notice that it must be only one input + hopsize: int + the overlap hopsize as the sliding window + + Returns + ---------- + output_dict: { + key: [n, (embedding_shape)] if "HTS-AT" + or + key: [(embedding_shape)] if "PANN" + } + the list of key values of the audio branch + + """ + + assert not self.training, "the inference mode must be run at eval stage" + output_dict = {} + # PANN + if self.audio_cfg.model_type == "PANN": + audio_input = audio.unsqueeze(dim=0) + output_dict[key] = self.encode_audio(audio_input, device=device)[ + key + ].squeeze(dim=0) + elif self.audio_cfg.model_type == "HTSAT": + # repeat + audio_len = len(audio) + k = self.audio_cfg.clip_samples // audio_len + if k > 1: + audio = audio.repeat(k) + audio_len = len(audio) + + if hopsize is None: + hopsize = min(hopsize, audio_len) + + if audio_len > self.audio_cfg.clip_samples: + audio_input = [ + audio[pos : pos + self.audio_cfg.clip_samples].clone() + for pos in range( + 0, audio_len - self.audio_cfg.clip_samples, hopsize + ) + ] + audio_input.append(audio[-self.audio_cfg.clip_samples :].clone()) + audio_input = torch.stack(audio_input) + output_dict[key] = self.encode_audio(audio_input, device=device)[key] + else: + audio_input = audio.unsqueeze(dim=0) + output_dict[key] = self.encode_audio(audio_input, device=device)[ + key + ].squeeze(dim=0) + + return output_dict + + +def convert_weights_to_fp16(model: nn.Module): + """Convert applicable model parameters to fp16""" + + def _convert_weights_to_fp16(l): + if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): + l.weight.data = l.weight.data.half() + if l.bias is not None: + l.bias.data = l.bias.data.half() + + if isinstance(l, nn.MultiheadAttention): + for attr in [ + *[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], + "in_proj_bias", + "bias_k", + "bias_v", + ]: + tensor = getattr(l, attr) + if tensor is not None: + tensor.data = tensor.data.half() + + for name in ["text_projection", "proj"]: + if hasattr(l, name): + attr = getattr(l, name) + if attr is not None: + attr.data = attr.data.half() + + model.apply(_convert_weights_to_fp16) + + +# Ignore the state dict of the vision part +def build_model_from_openai_state_dict( + state_dict: dict, model_cfg, enable_fusion: bool = False, fusion_type: str = "None" +): + embed_dim = model_cfg["embed_dim"] + audio_cfg = model_cfg["audio_cfg"] + text_cfg = model_cfg["text_cfg"] + context_length = state_dict["positional_embedding"].shape[0] + vocab_size = state_dict["token_embedding.weight"].shape[0] + transformer_width = state_dict["ln_final.weight"].shape[0] + transformer_heads = transformer_width // 64 + transformer_layers = len( + set( + k.split(".")[2] + for k in state_dict + if k.startswith(f"transformer.resblocks") + ) + ) + + audio_cfg = CLAPAudioCfp(**audio_cfg) + text_cfg = CLAPTextCfg(**text_cfg) + + model = CLAP( + embed_dim, + audio_cfg=audio_cfg, + text_cfg=text_cfg, + quick_gelu=True, # OpenAI models were trained with QuickGELU + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + state_dict["logit_scale_a"] = state_dict["logit_scale"] + state_dict["logit_scale_t"] = state_dict["logit_scale"] + pop_keys = list(state_dict.keys())[::] + # pop the visual branch saved weights + for key in pop_keys: + if key.startswith("visual."): + state_dict.pop(key, None) + + for key in ["logit_scale", "input_resolution", "context_length", "vocab_size"]: + state_dict.pop(key, None) + + # not use fp16 + # convert_weights_to_fp16(model) + model.load_state_dict(state_dict, strict=False) + return model.eval() + + +def trace_model(model, batch_size=256, device=torch.device("cpu")): + model.eval() + audio_length = model.audio_cfg.audio_length + example_audio = torch.ones((batch_size, audio_length), device=device) + example_text = torch.zeros( + (batch_size, model.context_length), dtype=torch.int, device=device + ) + model = torch.jit.trace_module( + model, + inputs=dict( + forward=(example_audio, example_text), + encode_text=(example_text,), + encode_image=(example_audio,), + ), + ) + model.audio_cfg.audio_length = audio_length # Question: what does this do? + return model diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-base.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-base.json new file mode 100644 index 0000000000000000000000000000000000000000..6cef625a89daf4431f1c9f72e10bc9640eef2ba8 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-base.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 1024, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "base" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-large.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-large.json new file mode 100644 index 0000000000000000000000000000000000000000..699cdb1b16855582606551e4196b24aba2ffd871 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-large.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "large" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-tiny-win-1536.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-tiny-win-1536.json new file mode 100644 index 0000000000000000000000000000000000000000..73e42990fe8361a0df502e7f93d29f19f58c9ecb --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-tiny-win-1536.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 768, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1536, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "tiny" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-tiny.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-tiny.json new file mode 100644 index 0000000000000000000000000000000000000000..a6e7821163d9afa81c27345a1e472475b92af169 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/HTSAT-tiny.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 768, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "tiny" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-10.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-10.json new file mode 100644 index 0000000000000000000000000000000000000000..954ddf62921aed7dde9c37ffffec98a2e96a4ee7 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-10.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 1024, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn10" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-fmax-18k.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-fmax-18k.json new file mode 100644 index 0000000000000000000000000000000000000000..b7989bc0cd95d0d39049b7524eba508b3e386439 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-fmax-18k.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 18000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn14" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-fmax-8k-20s.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-fmax-8k-20s.json new file mode 100644 index 0000000000000000000000000000000000000000..56bdb56bedc304ffa52d8bf5988cea2c1d82d14e --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-fmax-8k-20s.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 960000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 360, + "fmin": 50, + "fmax": 8000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn14" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-tiny-transformer.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-tiny-transformer.json new file mode 100644 index 0000000000000000000000000000000000000000..5756e3bebc97cc985f512cb081930fee4e49bec1 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-tiny-transformer.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn14" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 4 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-win-1536.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-win-1536.json new file mode 100644 index 0000000000000000000000000000000000000000..5a9e7e208b661619d5e26625e849da1adda8a475 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14-win-1536.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1536, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn14" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14.json new file mode 100644 index 0000000000000000000000000000000000000000..39a5134cde1d8c50f4758377c952ef22f07bab41 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-14.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn14" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-6.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-6.json new file mode 100644 index 0000000000000000000000000000000000000000..21ebc344326de260c386ba77e0ad63cf9b04febf --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/PANN-6.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 512, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "PANN", + "model_name": "Cnn6" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN101-quickgelu.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN101-quickgelu.json new file mode 100644 index 0000000000000000000000000000000000000000..d0db2c161d13138788c4609d373b023b8454d624 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN101-quickgelu.json @@ -0,0 +1,22 @@ +{ + "embed_dim": 512, + "quick_gelu": true, + "vision_cfg": { + "image_size": 224, + "layers": [ + 3, + 4, + 23, + 3 + ], + "width": 64, + "patch_size": null + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN101.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN101.json new file mode 100644 index 0000000000000000000000000000000000000000..b88b4d3acbaa701c614ab0ea65fc88fcfe289c32 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN101.json @@ -0,0 +1,21 @@ +{ + "embed_dim": 512, + "vision_cfg": { + "image_size": 224, + "layers": [ + 3, + 4, + 23, + 3 + ], + "width": 64, + "patch_size": null + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50-quickgelu.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50-quickgelu.json new file mode 100644 index 0000000000000000000000000000000000000000..8c2f91260cdeb043434dc1e893cce81d4ce7f0d1 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50-quickgelu.json @@ -0,0 +1,22 @@ +{ + "embed_dim": 1024, + "quick_gelu": true, + "vision_cfg": { + "image_size": 224, + "layers": [ + 3, + 4, + 6, + 3 + ], + "width": 64, + "patch_size": null + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50.json new file mode 100644 index 0000000000000000000000000000000000000000..33aa884d54fee0076c33676831e49d5e1ffcb8f2 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50.json @@ -0,0 +1,21 @@ +{ + "embed_dim": 1024, + "vision_cfg": { + "image_size": 224, + "layers": [ + 3, + 4, + 6, + 3 + ], + "width": 64, + "patch_size": null + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50x16.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50x16.json new file mode 100644 index 0000000000000000000000000000000000000000..3161e1a2c9a839161e652a4d729c2cdc971161db --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50x16.json @@ -0,0 +1,21 @@ +{ + "embed_dim": 768, + "vision_cfg": { + "image_size": 384, + "layers": [ + 6, + 8, + 18, + 8 + ], + "width": 96, + "patch_size": null + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 768, + "heads": 12, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50x4.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50x4.json new file mode 100644 index 0000000000000000000000000000000000000000..e155237f8ce1026aaaeecc80751eabe6f329f0bb --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/RN50x4.json @@ -0,0 +1,21 @@ +{ + "embed_dim": 640, + "vision_cfg": { + "image_size": 288, + "layers": [ + 4, + 6, + 10, + 6 + ], + "width": 80, + "patch_size": null + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 640, + "heads": 10, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-16.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-16.json new file mode 100644 index 0000000000000000000000000000000000000000..395eea77ec3907c0611531aba63459b193e67b9c --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-16.json @@ -0,0 +1,16 @@ +{ + "embed_dim": 512, + "vision_cfg": { + "image_size": 224, + "layers": 12, + "width": 768, + "patch_size": 16 + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-32-quickgelu.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-32-quickgelu.json new file mode 100644 index 0000000000000000000000000000000000000000..ce6bd923593293ed50dfcfb28b73ca7403bcf3c5 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-32-quickgelu.json @@ -0,0 +1,17 @@ +{ + "embed_dim": 512, + "quick_gelu": true, + "vision_cfg": { + "image_size": 224, + "layers": 12, + "width": 768, + "patch_size": 32 + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-32.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-32.json new file mode 100644 index 0000000000000000000000000000000000000000..07c8e28eb06fa1813ba932fe4eec668262d1c47f --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-B-32.json @@ -0,0 +1,16 @@ +{ + "embed_dim": 512, + "vision_cfg": { + "image_size": 224, + "layers": 12, + "width": 768, + "patch_size": 32 + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-L-14.json b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-L-14.json new file mode 100644 index 0000000000000000000000000000000000000000..d4a4bbb1dd4ed4edb317d3ace4f3ad13b211c241 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/model_configs/ViT-L-14.json @@ -0,0 +1,16 @@ +{ + "embed_dim": 768, + "vision_cfg": { + "image_size": 224, + "layers": 24, + "width": 1024, + "patch_size": 14 + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 768, + "heads": 12, + "layers": 12 + } +} \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/openai.py b/qa_mdt/audioldm_train/modules/clap/open_clip/openai.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4eb8b55fe960e1792b3da804b60b3d8f70fe26 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/openai.py @@ -0,0 +1,156 @@ +""" OpenAI pretrained model functions + +Adapted from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. +""" + +import os +import warnings +from typing import Union, List + +import torch + +from .model import build_model_from_openai_state_dict +from .pretrained import ( + get_pretrained_url, + list_pretrained_tag_models, + download_pretrained, +) + +__all__ = ["list_openai_models", "load_openai_model"] + + +def list_openai_models() -> List[str]: + """Returns the names of available CLIP models""" + return list_pretrained_tag_models("openai") + + +def load_openai_model( + name: str, + model_cfg, + device: Union[str, torch.device] = "cuda" if torch.cuda.is_available() else "cpu", + jit=True, + cache_dir=os.path.expanduser("~/.cache/clip"), + enable_fusion: bool = False, + fusion_type: str = "None", +): + """Load a CLIP model, preserve its text pretrained part, and set in the CLAP model + + Parameters + ---------- + name : str + A model name listed by `clip.available_models()`, or the path to a model checkpoint containing the state_dict + device : Union[str, torch.device] + The device to put the loaded model + jit : bool + Whether to load the optimized JIT model (default) or more hackable non-JIT model. + + Returns + ------- + model : torch.nn.Module + The CLAP model + preprocess : Callable[[PIL.Image], torch.Tensor] + A torchvision transform that converts a PIL image into a tensor that the returned model can take as its input + """ + if get_pretrained_url(name, "openai"): + model_path = download_pretrained( + get_pretrained_url(name, "openai"), root=cache_dir + ) + elif os.path.isfile(name): + model_path = name + else: + raise RuntimeError( + f"Model {name} not found; available models = {list_openai_models()}" + ) + + try: + # loading JIT archive + model = torch.jit.load(model_path, map_location=device if jit else "cpu").eval() + state_dict = None + except RuntimeError: + # loading saved state dict + if jit: + warnings.warn( + f"File {model_path} is not a JIT archive. Loading as a state dict instead" + ) + jit = False + state_dict = torch.load(model_path, map_location="cpu") + + if not jit: + try: + model = build_model_from_openai_state_dict( + state_dict or model.state_dict(), model_cfg, enable_fusion, fusion_type + ).to(device) + except KeyError: + sd = {k[7:]: v for k, v in state_dict["state_dict"].items()} + model = build_model_from_openai_state_dict( + sd, model_cfg, enable_fusion, fusion_type + ).to(device) + + if str(device) == "cpu": + model.float() + return model + + # patch the device names + device_holder = torch.jit.trace( + lambda: torch.ones([]).to(torch.device(device)), example_inputs=[] + ) + device_node = [ + n + for n in device_holder.graph.findAllNodes("prim::Constant") + if "Device" in repr(n) + ][-1] + + def patch_device(module): + try: + graphs = [module.graph] if hasattr(module, "graph") else [] + except RuntimeError: + graphs = [] + + if hasattr(module, "forward1"): + graphs.append(module.forward1.graph) + + for graph in graphs: + for node in graph.findAllNodes("prim::Constant"): + if "value" in node.attributeNames() and str(node["value"]).startswith( + "cuda" + ): + node.copyAttributes(device_node) + + model.apply(patch_device) + patch_device(model.encode_audio) + patch_device(model.encode_text) + + # patch dtype to float32 on CPU + if str(device) == "cpu": + float_holder = torch.jit.trace( + lambda: torch.ones([]).float(), example_inputs=[] + ) + float_input = list(float_holder.graph.findNode("aten::to").inputs())[1] + float_node = float_input.node() + + def patch_float(module): + try: + graphs = [module.graph] if hasattr(module, "graph") else [] + except RuntimeError: + graphs = [] + + if hasattr(module, "forward1"): + graphs.append(module.forward1.graph) + + for graph in graphs: + for node in graph.findAllNodes("aten::to"): + inputs = list(node.inputs()) + for i in [ + 1, + 2, + ]: # dtype can be the second or third argument to aten::to() + if inputs[i].node()["value"] == 5: + inputs[i].node().copyAttributes(float_node) + + model.apply(patch_float) + patch_float(model.encode_audio) + patch_float(model.encode_text) + model.float() + + model.audio_branch.audio_length = model.audio_cfg.audio_length + return model diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/pann_model.py b/qa_mdt/audioldm_train/modules/clap/open_clip/pann_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c96170c7c16dc352c99d87d02fc38ba4b3527f15 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/pann_model.py @@ -0,0 +1,697 @@ +# PANNs: Large-Scale Pretrained Audio Neural Networks for Audio Pattern Recognition +# Reference from https://github.com/qiuqiangkong/audioset_tagging_cnn +# Some layers are re-designed for CLAP +import os + +os.environ["NUMBA_CACHE_DIR"] = "/tmp/" + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchlibrosa.stft import Spectrogram, LogmelFilterBank +from torchlibrosa.augmentation import SpecAugmentation + +from .utils import do_mixup, interpolate, pad_framewise_output +from .feature_fusion import iAFF, AFF, DAF + + +def init_layer(layer): + """Initialize a Linear or Convolutional layer.""" + nn.init.xavier_uniform_(layer.weight) + + if hasattr(layer, "bias"): + if layer.bias is not None: + layer.bias.data.fill_(0.0) + + +def init_bn(bn): + """Initialize a Batchnorm layer.""" + bn.bias.data.fill_(0.0) + bn.weight.data.fill_(1.0) + + +class ConvBlock(nn.Module): + def __init__(self, in_channels, out_channels): + super(ConvBlock, self).__init__() + + self.conv1 = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=(3, 3), + stride=(1, 1), + padding=(1, 1), + bias=False, + ) + + self.conv2 = nn.Conv2d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=(3, 3), + stride=(1, 1), + padding=(1, 1), + bias=False, + ) + + self.bn1 = nn.BatchNorm2d(out_channels) + self.bn2 = nn.BatchNorm2d(out_channels) + + self.init_weight() + + def init_weight(self): + init_layer(self.conv1) + init_layer(self.conv2) + init_bn(self.bn1) + init_bn(self.bn2) + + def forward(self, input, pool_size=(2, 2), pool_type="avg"): + x = input + x = F.relu_(self.bn1(self.conv1(x))) + x = F.relu_(self.bn2(self.conv2(x))) + if pool_type == "max": + x = F.max_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg": + x = F.avg_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg+max": + x1 = F.avg_pool2d(x, kernel_size=pool_size) + x2 = F.max_pool2d(x, kernel_size=pool_size) + x = x1 + x2 + else: + raise Exception("Incorrect argument!") + + return x + + +class ConvBlock5x5(nn.Module): + def __init__(self, in_channels, out_channels): + super(ConvBlock5x5, self).__init__() + + self.conv1 = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=(5, 5), + stride=(1, 1), + padding=(2, 2), + bias=False, + ) + + self.bn1 = nn.BatchNorm2d(out_channels) + + self.init_weight() + + def init_weight(self): + init_layer(self.conv1) + init_bn(self.bn1) + + def forward(self, input, pool_size=(2, 2), pool_type="avg"): + x = input + x = F.relu_(self.bn1(self.conv1(x))) + if pool_type == "max": + x = F.max_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg": + x = F.avg_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg+max": + x1 = F.avg_pool2d(x, kernel_size=pool_size) + x2 = F.max_pool2d(x, kernel_size=pool_size) + x = x1 + x2 + else: + raise Exception("Incorrect argument!") + + return x + + +class AttBlock(nn.Module): + def __init__(self, n_in, n_out, activation="linear", temperature=1.0): + super(AttBlock, self).__init__() + + self.activation = activation + self.temperature = temperature + self.att = nn.Conv1d( + in_channels=n_in, + out_channels=n_out, + kernel_size=1, + stride=1, + padding=0, + bias=True, + ) + self.cla = nn.Conv1d( + in_channels=n_in, + out_channels=n_out, + kernel_size=1, + stride=1, + padding=0, + bias=True, + ) + + self.bn_att = nn.BatchNorm1d(n_out) + self.init_weights() + + def init_weights(self): + init_layer(self.att) + init_layer(self.cla) + init_bn(self.bn_att) + + def forward(self, x): + # x: (n_samples, n_in, n_time) + norm_att = torch.softmax(torch.clamp(self.att(x), -10, 10), dim=-1) + cla = self.nonlinear_transform(self.cla(x)) + x = torch.sum(norm_att * cla, dim=2) + return x, norm_att, cla + + def nonlinear_transform(self, x): + if self.activation == "linear": + return x + elif self.activation == "sigmoid": + return torch.sigmoid(x) + + +class Cnn14(nn.Module): + def __init__( + self, + sample_rate, + window_size, + hop_size, + mel_bins, + fmin, + fmax, + classes_num, + enable_fusion=False, + fusion_type="None", + ): + super(Cnn14, self).__init__() + + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=window_size, + hop_length=hop_size, + win_length=window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=sample_rate, + n_fft=window_size, + n_mels=mel_bins, + fmin=fmin, + fmax=fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) + + self.bn0 = nn.BatchNorm2d(64) + + if (self.enable_fusion) and (self.fusion_type == "channel_map"): + self.conv_block1 = ConvBlock(in_channels=4, out_channels=64) + else: + self.conv_block1 = ConvBlock(in_channels=1, out_channels=64) + self.conv_block2 = ConvBlock(in_channels=64, out_channels=128) + self.conv_block3 = ConvBlock(in_channels=128, out_channels=256) + self.conv_block4 = ConvBlock(in_channels=256, out_channels=512) + self.conv_block5 = ConvBlock(in_channels=512, out_channels=1024) + self.conv_block6 = ConvBlock(in_channels=1024, out_channels=2048) + + self.fc1 = nn.Linear(2048, 2048, bias=True) + self.fc_audioset = nn.Linear(2048, classes_num, bias=True) + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"] + ): + self.mel_conv1d = nn.Sequential( + nn.Conv1d(64, 64, kernel_size=5, stride=3, padding=2), + nn.BatchNorm1d(64), # No Relu + ) + if self.fusion_type == "daf_1d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_1d": + self.fusion_model = AFF(channels=64, type="1D") + elif self.fusion_type == "iaff_1d": + self.fusion_model = iAFF(channels=64, type="1D") + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + self.mel_conv2d = nn.Sequential( + nn.Conv2d(1, 64, kernel_size=(5, 5), stride=(6, 2), padding=(2, 2)), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + if self.fusion_type == "daf_2d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_2d": + self.fusion_model = AFF(channels=64, type="2D") + elif self.fusion_type == "iaff_2d": + self.fusion_model = iAFF(channels=64, type="2D") + self.init_weight() + + def init_weight(self): + init_bn(self.bn0) + init_layer(self.fc1) + init_layer(self.fc_audioset) + + def forward(self, input, mixup_lambda=None, device=None): + """ + Input: (batch_size, data_length)""" + + if self.enable_fusion and input["longer"].sum() == 0: + # if no audio is longer than 10s, then randomly select one audio to be longer + input["longer"][torch.randint(0, input["longer"].shape[0], (1,))] = True + + if not self.enable_fusion: + x = self.spectrogram_extractor( + input["waveform"].to(device=device, non_blocking=True) + ) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + else: + longer_list = input["longer"].to(device=device, non_blocking=True) + x = input["mel_fusion"].to(device=device, non_blocking=True) + longer_list_idx = torch.where(longer_list)[0] + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + if self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"]: + new_x = x[:, 0:1, :, :].clone().contiguous() + # local processing + if len(longer_list_idx) > 0: + fusion_x_local = x[longer_list_idx, 1:, :, :].clone().contiguous() + FB, FC, FT, FF = fusion_x_local.size() + fusion_x_local = fusion_x_local.view(FB * FC, FT, FF) + fusion_x_local = torch.permute( + fusion_x_local, (0, 2, 1) + ).contiguous() + fusion_x_local = self.mel_conv1d(fusion_x_local) + fusion_x_local = fusion_x_local.view( + FB, FC, FF, fusion_x_local.size(-1) + ) + fusion_x_local = ( + torch.permute(fusion_x_local, (0, 2, 1, 3)) + .contiguous() + .flatten(2) + ) + if fusion_x_local.size(-1) < FT: + fusion_x_local = torch.cat( + [ + fusion_x_local, + torch.zeros( + (FB, FF, FT - fusion_x_local.size(-1)), + device=device, + ), + ], + dim=-1, + ) + else: + fusion_x_local = fusion_x_local[:, :, :FT] + # 1D fusion + new_x = new_x.squeeze(1).permute((0, 2, 1)).contiguous() + new_x[longer_list_idx] = self.fusion_model( + new_x[longer_list_idx], fusion_x_local + ) + x = new_x.permute((0, 2, 1)).contiguous()[:, None, :, :] + else: + x = new_x + elif self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d", "channel_map"]: + x = x # no change + + if self.training: + x = self.spec_augmenter(x) + # Mixup on spectrogram + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + global_x = x[:, 0:1, :, :] + + # global processing + B, C, H, W = global_x.shape + global_x = self.conv_block1(global_x, pool_size=(2, 2), pool_type="avg") + if len(longer_list_idx) > 0: + local_x = x[longer_list_idx, 1:, :, :].contiguous() + TH = global_x.size(-2) + # local processing + B, C, H, W = local_x.shape + local_x = local_x.view(B * C, 1, H, W) + local_x = self.mel_conv2d(local_x) + local_x = local_x.view( + B, C, local_x.size(1), local_x.size(2), local_x.size(3) + ) + local_x = local_x.permute((0, 2, 1, 3, 4)).contiguous().flatten(2, 3) + TB, TC, _, TW = local_x.size() + if local_x.size(-2) < TH: + local_x = torch.cat( + [ + local_x, + torch.zeros( + (TB, TC, TH - local_x.size(-2), TW), + device=global_x.device, + ), + ], + dim=-2, + ) + else: + local_x = local_x[:, :, :TH, :] + + global_x[longer_list_idx] = self.fusion_model( + global_x[longer_list_idx], local_x + ) + x = global_x + else: + x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg") + + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block2(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block3(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block4(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block5(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block6(x, pool_size=(1, 1), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = torch.mean(x, dim=3) + + latent_x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x = latent_x1 + latent_x2 + latent_x = latent_x.transpose(1, 2) + latent_x = F.relu_(self.fc1(latent_x)) + latent_output = interpolate(latent_x, 32) + + (x1, _) = torch.max(x, dim=2) + x2 = torch.mean(x, dim=2) + x = x1 + x2 + x = F.dropout(x, p=0.5, training=self.training) + x = F.relu_(self.fc1(x)) + embedding = F.dropout(x, p=0.5, training=self.training) + clipwise_output = torch.sigmoid(self.fc_audioset(x)) + + output_dict = { + "clipwise_output": clipwise_output, + "embedding": embedding, + "fine_grained_embedding": latent_output, + } + return output_dict + + +class Cnn6(nn.Module): + def __init__( + self, + sample_rate, + window_size, + hop_size, + mel_bins, + fmin, + fmax, + classes_num, + enable_fusion=False, + fusion_type="None", + ): + super(Cnn6, self).__init__() + + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=window_size, + hop_length=hop_size, + win_length=window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=sample_rate, + n_fft=window_size, + n_mels=mel_bins, + fmin=fmin, + fmax=fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) + + self.bn0 = nn.BatchNorm2d(64) + + self.conv_block1 = ConvBlock5x5(in_channels=1, out_channels=64) + self.conv_block2 = ConvBlock5x5(in_channels=64, out_channels=128) + self.conv_block3 = ConvBlock5x5(in_channels=128, out_channels=256) + self.conv_block4 = ConvBlock5x5(in_channels=256, out_channels=512) + + self.fc1 = nn.Linear(512, 512, bias=True) + self.fc_audioset = nn.Linear(512, classes_num, bias=True) + + self.init_weight() + + def init_weight(self): + init_bn(self.bn0) + init_layer(self.fc1) + init_layer(self.fc_audioset) + + def forward(self, input, mixup_lambda=None, device=None): + """ + Input: (batch_size, data_length)""" + + x = self.spectrogram_extractor(input) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + + if self.training: + x = self.spec_augmenter(x) + + # Mixup on spectrogram + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block2(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block3(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block4(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = torch.mean(x, dim=3) + + latent_x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x = latent_x1 + latent_x2 + latent_x = latent_x.transpose(1, 2) + latent_x = F.relu_(self.fc1(latent_x)) + latent_output = interpolate(latent_x, 16) + + (x1, _) = torch.max(x, dim=2) + x2 = torch.mean(x, dim=2) + x = x1 + x2 + x = F.dropout(x, p=0.5, training=self.training) + x = F.relu_(self.fc1(x)) + embedding = F.dropout(x, p=0.5, training=self.training) + clipwise_output = torch.sigmoid(self.fc_audioset(x)) + + output_dict = { + "clipwise_output": clipwise_output, + "embedding": embedding, + "fine_grained_embedding": latent_output, + } + + return output_dict + + +class Cnn10(nn.Module): + def __init__( + self, + sample_rate, + window_size, + hop_size, + mel_bins, + fmin, + fmax, + classes_num, + enable_fusion=False, + fusion_type="None", + ): + super(Cnn10, self).__init__() + + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=window_size, + hop_length=hop_size, + win_length=window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=sample_rate, + n_fft=window_size, + n_mels=mel_bins, + fmin=fmin, + fmax=fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) + + self.bn0 = nn.BatchNorm2d(64) + + self.conv_block1 = ConvBlock(in_channels=1, out_channels=64) + self.conv_block2 = ConvBlock(in_channels=64, out_channels=128) + self.conv_block3 = ConvBlock(in_channels=128, out_channels=256) + self.conv_block4 = ConvBlock(in_channels=256, out_channels=512) + self.conv_block5 = ConvBlock(in_channels=512, out_channels=1024) + + self.fc1 = nn.Linear(1024, 1024, bias=True) + self.fc_audioset = nn.Linear(1024, classes_num, bias=True) + + self.init_weight() + + def init_weight(self): + init_bn(self.bn0) + init_layer(self.fc1) + init_layer(self.fc_audioset) + + def forward(self, input, mixup_lambda=None, device=None): + """ + Input: (batch_size, data_length)""" + + x = self.spectrogram_extractor(input) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + + if self.training: + x = self.spec_augmenter(x) + + # Mixup on spectrogram + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block2(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block3(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block4(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block5(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = torch.mean(x, dim=3) + + latent_x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x = latent_x1 + latent_x2 + latent_x = latent_x.transpose(1, 2) + latent_x = F.relu_(self.fc1(latent_x)) + latent_output = interpolate(latent_x, 32) + + (x1, _) = torch.max(x, dim=2) + x2 = torch.mean(x, dim=2) + x = x1 + x2 + x = F.dropout(x, p=0.5, training=self.training) + x = F.relu_(self.fc1(x)) + embedding = F.dropout(x, p=0.5, training=self.training) + clipwise_output = torch.sigmoid(self.fc_audioset(x)) + + output_dict = { + "clipwise_output": clipwise_output, + "embedding": embedding, + "fine_grained_embedding": latent_output, + } + + return output_dict + + +def create_pann_model(audio_cfg, enable_fusion=False, fusion_type="None"): + try: + ModelProto = eval(audio_cfg.model_name) + model = ModelProto( + sample_rate=audio_cfg.sample_rate, + window_size=audio_cfg.window_size, + hop_size=audio_cfg.hop_size, + mel_bins=audio_cfg.mel_bins, + fmin=audio_cfg.fmin, + fmax=audio_cfg.fmax, + classes_num=audio_cfg.class_num, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + return model + except: + raise RuntimeError( + f"Import Model for {audio_cfg.model_name} not found, or the audio cfg parameters are not enough." + ) diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/pretrained.py b/qa_mdt/audioldm_train/modules/clap/open_clip/pretrained.py new file mode 100644 index 0000000000000000000000000000000000000000..e211d8b5b59320a599e62605f1dee6199f317253 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/pretrained.py @@ -0,0 +1,167 @@ +import hashlib +import os +import urllib +import warnings + +from tqdm import tqdm + +_RN50 = dict( + openai="https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt", + cc12m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt", +) + +_RN50_quickgelu = dict( + openai="https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt", + cc12m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt", +) + +_RN101 = dict( + openai="https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt", +) + +_RN101_quickgelu = dict( + openai="https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt", +) + +_RN50x4 = dict( + openai="https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt", +) + +_RN50x16 = dict( + openai="https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt", +) + +_RN50x64 = dict( + openai="https://openaipublic.azureedge.net/clip/models/be1cfb55d75a9666199fb2206c106743da0f6468c9d327f3e0d0a543a9919d9c/RN50x64.pt", +) + +_VITB32 = dict( + openai="https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt", + laion400m_e31="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt", + laion400m_e32="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt", + laion400m_avg="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_avg-8a00ab3c.pt", +) + +_VITB32_quickgelu = dict( + openai="https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt", + laion400m_e31="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt", + laion400m_e32="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt", + laion400m_avg="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_avg-8a00ab3c.pt", +) + +_VITB16 = dict( + openai="https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt", +) + +_VITL14 = dict( + openai="https://openaipublic.azureedge.net/clip/models/b8cca3fd41ae0c99ba7e8951adf17d267cdb84cd88be6f7c2e0eca1737a03836/ViT-L-14.pt", +) + +_PRETRAINED = { + "RN50": _RN50, + "RN50-quickgelu": _RN50_quickgelu, + "RN101": _RN101, + "RN101-quickgelu": _RN101_quickgelu, + "RN50x4": _RN50x4, + "RN50x16": _RN50x16, + "ViT-B-32": _VITB32, + "ViT-B-32-quickgelu": _VITB32_quickgelu, + "ViT-B-16": _VITB16, + "ViT-L-14": _VITL14, +} + + +def list_pretrained(as_str: bool = False): + """returns list of pretrained models + Returns a tuple (model_name, pretrain_tag) by default or 'name:tag' if as_str == True + """ + return [ + ":".join([k, t]) if as_str else (k, t) + for k in _PRETRAINED.keys() + for t in _PRETRAINED[k].keys() + ] + + +def list_pretrained_tag_models(tag: str): + """return all models having the specified pretrain tag""" + models = [] + for k in _PRETRAINED.keys(): + if tag in _PRETRAINED[k]: + models.append(k) + return models + + +def list_pretrained_model_tags(model: str): + """return all pretrain tags for the specified model architecture""" + tags = [] + if model in _PRETRAINED: + tags.extend(_PRETRAINED[model].keys()) + return tags + + +def get_pretrained_url(model: str, tag: str): + if model not in _PRETRAINED: + return "" + model_pretrained = _PRETRAINED[model] + if tag not in model_pretrained: + return "" + return model_pretrained[tag] + + +def download_pretrained(url: str, root: str = os.path.expanduser("~/.cache/clip")): + os.makedirs(root, exist_ok=True) + filename = os.path.basename(url) + + if "openaipublic" in url: + expected_sha256 = url.split("/")[-2] + else: + expected_sha256 = "" + + download_target = os.path.join(root, filename) + + if os.path.exists(download_target) and not os.path.isfile(download_target): + raise RuntimeError(f"{download_target} exists and is not a regular file") + + if os.path.isfile(download_target): + if expected_sha256: + if ( + hashlib.sha256(open(download_target, "rb").read()).hexdigest() + == expected_sha256 + ): + return download_target + else: + warnings.warn( + f"{download_target} exists, but the SHA256 checksum does not match; re-downloading the file" + ) + else: + return download_target + + with urllib.request.urlopen(url) as source, open(download_target, "wb") as output: + with tqdm( + total=int(source.info().get("Content-Length")), + ncols=80, + unit="iB", + unit_scale=True, + ) as loop: + while True: + buffer = source.read(8192) + if not buffer: + break + + output.write(buffer) + loop.update(len(buffer)) + + if ( + expected_sha256 + and hashlib.sha256(open(download_target, "rb").read()).hexdigest() + != expected_sha256 + ): + raise RuntimeError( + f"Model has been downloaded but the SHA256 checksum does not not match" + ) + + return download_target diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/timm_model.py b/qa_mdt/audioldm_train/modules/clap/open_clip/timm_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c9d1ab4666b5bab5038d44b90c9ddca5087de460 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/timm_model.py @@ -0,0 +1,112 @@ +""" timm model adapter + +Wraps timm (https://github.com/rwightman/pytorch-image-models) models for use as a vision tower in CLIP model. +""" +from collections import OrderedDict + +import torch.nn as nn + +try: + import timm + from timm.models.layers import Mlp, to_2tuple + from timm.models.layers.attention_pool2d import RotAttentionPool2d + from timm.models.layers.attention_pool2d import ( + AttentionPool2d as AbsAttentionPool2d, + ) +except ImportError as e: + timm = None + +from .utils import freeze_batch_norm_2d + + +class TimmModel(nn.Module): + """timm model adapter + # FIXME this adapter is a work in progress, may change in ways that break weight compat + """ + + def __init__( + self, + model_name, + embed_dim, + image_size=224, + pool="avg", + proj="linear", + drop=0.0, + pretrained=False, + ): + super().__init__() + if timm is None: + raise RuntimeError("Please `pip install timm` to use timm models.") + + self.image_size = to_2tuple(image_size) + self.trunk = timm.create_model(model_name, pretrained=pretrained) + feat_size = self.trunk.default_cfg.get("pool_size", None) + feature_ndim = 1 if not feat_size else 2 + if pool in ("abs_attn", "rot_attn"): + assert feature_ndim == 2 + # if attn pooling used, remove both classifier and default pool + self.trunk.reset_classifier(0, global_pool="") + else: + # reset global pool if pool config set, otherwise leave as network default + reset_kwargs = dict(global_pool=pool) if pool else {} + self.trunk.reset_classifier(0, **reset_kwargs) + prev_chs = self.trunk.num_features + + head_layers = OrderedDict() + if pool == "abs_attn": + head_layers["pool"] = AbsAttentionPool2d( + prev_chs, feat_size=feat_size, out_features=embed_dim + ) + prev_chs = embed_dim + elif pool == "rot_attn": + head_layers["pool"] = RotAttentionPool2d(prev_chs, out_features=embed_dim) + prev_chs = embed_dim + else: + assert proj, "projection layer needed if non-attention pooling is used." + + # NOTE attention pool ends with a projection layer, so proj should usually be set to '' if such pooling is used + if proj == "linear": + head_layers["drop"] = nn.Dropout(drop) + head_layers["proj"] = nn.Linear(prev_chs, embed_dim) + elif proj == "mlp": + head_layers["mlp"] = Mlp(prev_chs, 2 * embed_dim, embed_dim, drop=drop) + + self.head = nn.Sequential(head_layers) + + def lock(self, unlocked_groups=0, freeze_bn_stats=False): + """lock modules + Args: + unlocked_groups (int): leave last n layer groups unlocked (default: 0) + """ + if not unlocked_groups: + # lock full model + for param in self.trunk.parameters(): + param.requires_grad = False + if freeze_bn_stats: + freeze_batch_norm_2d(self.trunk) + else: + # NOTE: partial freeze requires latest timm (master) branch and is subject to change + try: + # FIXME import here until API stable and in an official release + from timm.models.helpers import group_parameters, group_modules + except ImportError: + raise RuntimeError( + "Please install latest timm `pip install git+https://github.com/rwightman/pytorch-image-models`" + ) + matcher = self.trunk.group_matcher() + gparams = group_parameters(self.trunk, matcher) + max_layer_id = max(gparams.keys()) + max_layer_id = max_layer_id - unlocked_groups + for group_idx in range(max_layer_id + 1): + group = gparams[group_idx] + for param in group: + self.trunk.get_parameter(param).requires_grad = False + if freeze_bn_stats: + gmodules = group_modules(self.trunk, matcher, reverse=True) + gmodules = {k for k, v in gmodules.items() if v <= max_layer_id} + freeze_batch_norm_2d(self.trunk, gmodules) + + def forward(self, x): + x = self.trunk(x) + x = self.head(x) + return x diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/tokenizer.py b/qa_mdt/audioldm_train/modules/clap/open_clip/tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..ee4d28450ec5dd12a79daf38cf3088e9e73c2cd5 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/tokenizer.py @@ -0,0 +1,197 @@ +""" CLIP tokenizer + +Copied from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. +""" +import gzip +import html +import os +from functools import lru_cache +from typing import Union, List + +import ftfy +import regex as re +import torch + + +@lru_cache() +def default_bpe(): + return os.path.join( + os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz" + ) + + +@lru_cache() +def bytes_to_unicode(): + """ + Returns list of utf-8 byte and a corresponding list of unicode strings. + The reversible bpe codes work on unicode strings. + This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. + When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. + This is a signficant percentage of your normal, say, 32K bpe vocab. + To avoid that, we want lookup tables between utf-8 bytes and unicode strings. + And avoids mapping to whitespace/control characters the bpe code barfs on. + """ + bs = ( + list(range(ord("!"), ord("~") + 1)) + + list(range(ord("¡"), ord("¬") + 1)) + + list(range(ord("®"), ord("ÿ") + 1)) + ) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8 + n) + n += 1 + cs = [chr(n) for n in cs] + return dict(zip(bs, cs)) + + +def get_pairs(word): + """Return set of symbol pairs in a word. + Word is represented as tuple of symbols (symbols being variable-length strings). + """ + pairs = set() + prev_char = word[0] + for char in word[1:]: + pairs.add((prev_char, char)) + prev_char = char + return pairs + + +def basic_clean(text): + text = ftfy.fix_text(text) + text = html.unescape(html.unescape(text)) + return text.strip() + + +def whitespace_clean(text): + text = re.sub(r"\s+", " ", text) + text = text.strip() + return text + + +class SimpleTokenizer(object): + def __init__(self, bpe_path: str = default_bpe(), special_tokens=None): + self.byte_encoder = bytes_to_unicode() + self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} + merges = gzip.open(bpe_path).read().decode("utf-8").split("\n") + merges = merges[1 : 49152 - 256 - 2 + 1] + merges = [tuple(merge.split()) for merge in merges] + vocab = list(bytes_to_unicode().values()) + vocab = vocab + [v + "" for v in vocab] + for merge in merges: + vocab.append("".join(merge)) + if not special_tokens: + special_tokens = ["", ""] + else: + special_tokens = ["", ""] + special_tokens + vocab.extend(special_tokens) + self.encoder = dict(zip(vocab, range(len(vocab)))) + self.decoder = {v: k for k, v in self.encoder.items()} + self.bpe_ranks = dict(zip(merges, range(len(merges)))) + self.cache = {t: t for t in special_tokens} + special = "|".join(special_tokens) + self.pat = re.compile( + special + r"""|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", + re.IGNORECASE, + ) + + self.vocab_size = len(self.encoder) + self.all_special_ids = [self.encoder[t] for t in special_tokens] + + def bpe(self, token): + if token in self.cache: + return self.cache[token] + word = tuple(token[:-1]) + (token[-1] + "",) + pairs = get_pairs(word) + + if not pairs: + return token + "" + + while True: + bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf"))) + if bigram not in self.bpe_ranks: + break + first, second = bigram + new_word = [] + i = 0 + while i < len(word): + try: + j = word.index(first, i) + new_word.extend(word[i:j]) + i = j + except: + new_word.extend(word[i:]) + break + + if word[i] == first and i < len(word) - 1 and word[i + 1] == second: + new_word.append(first + second) + i += 2 + else: + new_word.append(word[i]) + i += 1 + new_word = tuple(new_word) + word = new_word + if len(word) == 1: + break + else: + pairs = get_pairs(word) + word = " ".join(word) + self.cache[token] = word + return word + + def encode(self, text): + bpe_tokens = [] + text = whitespace_clean(basic_clean(text)).lower() + for token in re.findall(self.pat, text): + token = "".join(self.byte_encoder[b] for b in token.encode("utf-8")) + bpe_tokens.extend( + self.encoder[bpe_token] for bpe_token in self.bpe(token).split(" ") + ) + return bpe_tokens + + def decode(self, tokens): + text = "".join([self.decoder[token] for token in tokens]) + text = ( + bytearray([self.byte_decoder[c] for c in text]) + .decode("utf-8", errors="replace") + .replace("", " ") + ) + return text + + +_tokenizer = SimpleTokenizer() + + +def tokenize( + texts: Union[str, List[str]], context_length: int = 77 +) -> torch.LongTensor: + """ + Returns the tokenized representation of given input string(s) + + Parameters + ---------- + texts : Union[str, List[str]] + An input string or a list of input strings to tokenize + context_length : int + The context length to use; all CLIP models use 77 as the context length + + Returns + ------- + A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length] + """ + if isinstance(texts, str): + texts = [texts] + + sot_token = _tokenizer.encoder[""] + eot_token = _tokenizer.encoder[""] + all_tokens = [[sot_token] + _tokenizer.encode(text) + [eot_token] for text in texts] + result = torch.zeros(len(all_tokens), context_length, dtype=torch.long) + + for i, tokens in enumerate(all_tokens): + if len(tokens) > context_length: + tokens = tokens[:context_length] # Truncate + result[i, : len(tokens)] = torch.tensor(tokens) + + return result diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/transform.py b/qa_mdt/audioldm_train/modules/clap/open_clip/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..77aaa722c4a5544ac50de6df35d3e922f63b111d --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/transform.py @@ -0,0 +1,45 @@ +from torchvision.transforms import ( + Normalize, + Compose, + RandomResizedCrop, + InterpolationMode, + ToTensor, + Resize, + CenterCrop, +) + + +def _convert_to_rgb(image): + return image.convert("RGB") + + +def image_transform( + image_size: int, + is_train: bool, + mean=(0.48145466, 0.4578275, 0.40821073), + std=(0.26862954, 0.26130258, 0.27577711), +): + normalize = Normalize(mean=mean, std=std) + if is_train: + return Compose( + [ + RandomResizedCrop( + image_size, + scale=(0.9, 1.0), + interpolation=InterpolationMode.BICUBIC, + ), + _convert_to_rgb, + ToTensor(), + normalize, + ] + ) + else: + return Compose( + [ + Resize(image_size, interpolation=InterpolationMode.BICUBIC), + CenterCrop(image_size), + _convert_to_rgb, + ToTensor(), + normalize, + ] + ) diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/utils.py b/qa_mdt/audioldm_train/modules/clap/open_clip/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8d6a6b7ea29d9edfc0a69debbfcd11cc88c98a28 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/utils.py @@ -0,0 +1,361 @@ +import numpy as np +import torch +from torch import nn as nn +from torchvision.ops.misc import FrozenBatchNorm2d +import logging +import h5py +from tqdm import tqdm +import random +import json +import os +import pathlib + +# TODO: (yusong) this not a good place to store those information and does not scale. Need to be fixed later. +dataset_split = { + "audiocaps": ["train", "valid", "test"], + "audioset": ["balanced_train", "unbalanced_train", "eval"], + "BBCSoundEffects": ["train", "test"], + "Clotho": ["train", "test", "valid"], + "free_to_use_sounds": ["train", "test"], + "paramount_motion": ["train", "test"], + "sonniss_game_effects": ["train", "test"], + "wesoundeffects": ["train", "test"], + "MACS": ["train", "test"], + "freesound": ["train", "test"], + "FSD50K": ["train", "test", "valid"], + "fsd50k_class_label": ["train", "test", "valid"], + "esc50": ["train", "test"], + "audiostock": ["train", "test"], + "freesound_no_overlap_noesc50": ["train", "test"], + "epidemic_sound_effects": ["train", "test"], + "VGGSound": ["train", "test"], + "urbansound8k_class_label": ["train", "test"], + "audioset_t5": ["balanced_train", "unbalanced_train", "eval"], + "epidemic_sound_effects_t5": ["train", "test"], + "WavText5K": ["train", "test"], + "esc50_no_overlap": ["train", "test"], + "usd8k_no_overlap": ["train", "test"], + "fsd50k_200_class_label": ["train", "test", "valid"], +} + + +def freeze_batch_norm_2d(module, module_match={}, name=""): + """ + Converts all `BatchNorm2d` and `SyncBatchNorm` layers of provided module into `FrozenBatchNorm2d`. If `module` is + itself an instance of either `BatchNorm2d` or `SyncBatchNorm`, it is converted into `FrozenBatchNorm2d` and + returned. Otherwise, the module is walked recursively and submodules are converted in place. + + Args: + module (torch.nn.Module): Any PyTorch module. + module_match (dict): Dictionary of full module names to freeze (all if empty) + name (str): Full module name (prefix) + + Returns: + torch.nn.Module: Resulting module + + Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 + """ + res = module + is_match = True + if module_match: + is_match = name in module_match + if is_match and isinstance( + module, (nn.modules.batchnorm.BatchNorm2d, nn.modules.batchnorm.SyncBatchNorm) + ): + res = FrozenBatchNorm2d(module.num_features) + res.num_features = module.num_features + res.affine = module.affine + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for child_name, child in module.named_children(): + full_child_name = ".".join([name, child_name]) if name else child_name + new_child = freeze_batch_norm_2d(child, module_match, full_child_name) + if new_child is not child: + res.add_module(child_name, new_child) + return res + + +def exist(dataset_name, dataset_type): + """ + Check if dataset exists + """ + if dataset_type in dataset_split[dataset_name]: + return True + else: + return False + + +def get_tar_path_from_dataset_name( + dataset_names, dataset_types, islocal, dataset_path, proportion=1, full_dataset=None +): + """ + Get tar path from dataset name and type + """ + output = [] + for n in dataset_names: + if full_dataset is not None and n in full_dataset: + current_dataset_types = dataset_split[n] + else: + current_dataset_types = dataset_types + for s in current_dataset_types: + tmp = [] + if islocal: + sizefilepath_ = f"{dataset_path}/{n}/{s}/sizes.json" + if not os.path.exists(sizefilepath_): + sizefilepath_ = f"./json_files/{n}/{s}/sizes.json" + else: + sizefilepath_ = f"./json_files/{n}/{s}/sizes.json" + if not os.path.exists(sizefilepath_): + continue + sizes = json.load(open(sizefilepath_, "r")) + for k in sizes.keys(): + if islocal: + tmp.append(f"{dataset_path}/{n}/{s}/{k}") + else: + tmp.append( + f"pipe:aws s3 --cli-connect-timeout 0 cp s3://s-laion-audio/webdataset_tar/{n}/{s}/{k} -" + ) + if proportion != 1: + tmp = random.sample(tmp, int(proportion * len(tmp))) + output.append(tmp) + return sum(output, []) + + +def get_tar_path_from_txts(txt_path, islocal, proportion=1): + """ + Get tar path from txt path + """ + if isinstance(txt_path, (list, tuple)): + return sum( + [ + get_tar_path_from_txts( + txt_path[i], islocal=islocal, proportion=proportion + ) + for i in range(len(txt_path)) + ], + [], + ) + if isinstance(txt_path, str): + with open(txt_path) as f: + lines = f.readlines() + if islocal: + lines = [ + lines[i] + .split("\n")[0] + .replace("pipe:aws s3 cp s3://s-laion-audio/", "/mnt/audio_clip/") + for i in range(len(lines)) + ] + else: + lines = [ + lines[i].split("\n")[0].replace(".tar", ".tar -") + for i in range(len(lines)) + ] + if proportion != 1: + print("Sampling tars with proportion of {}".format(proportion)) + lines = random.sample(lines, int(proportion * len(lines))) + return lines + + +def get_mix_lambda(mixup_alpha, batch_size): + mixup_lambdas = [ + np.random.beta(mixup_alpha, mixup_alpha, 1)[0] for _ in range(batch_size) + ] + return np.array(mixup_lambdas).astype(np.float32) + + +def do_mixup(x, mixup_lambda): + """ + Args: + x: (batch_size , ...) + mixup_lambda: (batch_size,) + Returns: + out: (batch_size, ...) + """ + out = ( + x.transpose(0, -1) * mixup_lambda + + torch.flip(x, dims=[0]).transpose(0, -1) * (1 - mixup_lambda) + ).transpose(0, -1) + return out + + +def interpolate(x, ratio): + """Interpolate data in time domain. This is used to compensate the + resolution reduction in downsampling of a CNN. + + Args: + x: (batch_size, time_steps, classes_num) + ratio: int, ratio to interpolate + Returns: + upsampled: (batch_size, time_steps * ratio, classes_num) + """ + (batch_size, time_steps, classes_num) = x.shape + upsampled = x[:, :, None, :].repeat(1, 1, ratio, 1) + upsampled = upsampled.reshape(batch_size, time_steps * ratio, classes_num) + return upsampled + + +def pad_framewise_output(framewise_output, frames_num): + """Pad framewise_output to the same length as input frames. The pad value + is the same as the value of the last frame. + Args: + framewise_output: (batch_size, frames_num, classes_num) + frames_num: int, number of frames to pad + Outputs: + output: (batch_size, frames_num, classes_num) + """ + pad = framewise_output[:, -1:, :].repeat( + 1, frames_num - framewise_output.shape[1], 1 + ) + """tensor for padding""" + + output = torch.cat((framewise_output, pad), dim=1) + """(batch_size, frames_num, classes_num)""" + + +def process_ipc(index_path, classes_num, filename): + # load data + logging.info("Load Data...............") + ipc = [[] for _ in range(classes_num)] + with h5py.File(index_path, "r") as f: + for i in tqdm(range(len(f["target"]))): + t_class = np.where(f["target"][i])[0] + for t in t_class: + ipc[t].append(i) + print(ipc) + np.save(filename, ipc) + logging.info("Load Data Succeed...............") + + +def save_to_dict(s, o_={}): + sp = s.split(": ") + o_.update({sp[0]: float(sp[1])}) + return o_ + + +def get_data_from_log(txt_path): + """ + Output dictionary from out.txt log file + """ + with open(txt_path) as f: + lines = f.readlines() + val_data = {} + train_data = {} + train_losses = [] + train_losses_epoch = [] + for i in range(len(lines)): + if "| INFO |" in lines[i]: + if "Eval Epoch" in lines[i]: + if "val_loss" in lines[i]: + # float(regex.sub("", lines[310].split(" ")[-1]).replace(" ", "")) + line = lines[i].split("Eval Epoch: ")[-1] + num_epoch = int(line.split(" ")[0].split(" ")[0]) + d = { + line.split(" ")[0] + .split(" ")[1] + .replace(":", ""): float(line.split(" ")[0].split(" ")[-1]) + } + for i in range(1, len(line.split(" "))): + d = save_to_dict(line.split(" ")[i], d) + val_data[num_epoch] = d + elif "Train Epoch" in lines[i]: + num_epoch = int(lines[i].split("Train Epoch: ")[1][0]) + loss = float(lines[i].split("Loss: ")[-1].split(" (")[0]) + train_losses.append(loss) + train_losses_epoch.append(num_epoch) + for i in range(len(train_losses)): + train_data[i] = { + "num_epoch": train_losses_epoch[i], + "train_loss": train_losses[i], + } + return train_data, val_data + + +def save_p(obj, filename): + import pickle + + try: + from deepdiff import DeepDiff + except: + os.system("pip install deepdiff") + from deepdiff import DeepDiff + with open(filename, "wb") as file: + pickle.dump(obj, file, protocol=pickle.HIGHEST_PROTOCOL) # highest protocol + with open(filename, "rb") as file: + z = pickle.load(file) + assert ( + DeepDiff(obj, z, ignore_string_case=True) == {} + ), "there is something wrong with the saving process" + return + + +def load_p(filename): + import pickle + + with open(filename, "rb") as file: + z = pickle.load(file) + return z + + +def save_json(data, name="data.json"): + import json + + with open(name, "w") as fp: + json.dump(data, fp) + return + + +def load_json(name): + import json + + with open(name, "r") as fp: + data = json.load(fp) + return data + + +from multiprocessing import Process, Manager +from multiprocessing import Process, Value, Array +from ctypes import c_wchar + + +def load_class_label(path): + # https://stackoverflow.com/questions/48004243/how-to-share-large-read-only-dictionary-list-across-processes-in-multiprocessing + # https://stackoverflow.com/questions/45693949/storing-strings-in-a-multiprocessing-sharedctypes-array + out = None + if path is not None: + if pathlib.Path(path).suffix in [".pkl", ".pickle"]: + out = load_p(path) + elif pathlib.Path(path).suffix in [".json", ".txt"]: + out = load_json(path) + elif pathlib.Path(path).suffix in [".npy", ".npz"]: + out = np.load(path) + elif pathlib.Path(path).suffix in [".csv"]: + import pandas as pd + + out = pd.read_csv(path) + return out + # if out is None: + # return None + # else: + # key = Array(c_wchar, '\n'.join(list(out.keys())), lock=False) + # val = Array('i', out.values(), lock=False) + # return (key, val) + + +from torch import optim + + +def get_optimizer(params, lr, betas, eps, momentum, optimizer_name): + if optimizer_name.lower() == "adamw": + optimizer = optim.AdamW(params, lr=lr, betas=betas, eps=eps) + elif optimizer_name.lower() == "sgd": + optimizer = optim.SGD(params, lr=lr, momentum=momentum) + elif optimizer_name.lower() == "adam": + optimizer = optim.Adam(params, lr=lr, betas=betas, eps=eps) + else: + raise ValueError("optimizer name is not correct") + return optimizer diff --git a/qa_mdt/audioldm_train/modules/clap/open_clip/version.py b/qa_mdt/audioldm_train/modules/clap/open_clip/version.py new file mode 100644 index 0000000000000000000000000000000000000000..3ced3581bb601ae91b1e1da4b8f4f520855a065e --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/open_clip/version.py @@ -0,0 +1 @@ +__version__ = "0.2.1" diff --git a/qa_mdt/audioldm_train/modules/clap/training/__init__.py b/qa_mdt/audioldm_train/modules/clap/training/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/clap/training/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb3cf8aef7f3ed6d9195011231eaaf46de4ffdaf Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/training/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae6c3d44183c9ccc9ca66db8e5e46a799297e0a5 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/training/__pycache__/data.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/data.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4960916871130811fbed19d913c5132c60a9700f Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/data.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/training/__pycache__/data.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/data.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c43d7e9bcb68f819498eb45dee5e110076d241d Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/data.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/training/__pycache__/params.cpython-310.pyc b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/params.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..808b504ed4842c517aa54b135411c561a6a253c3 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/params.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/training/__pycache__/params.cpython-38.pyc b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/params.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5648d54568a371c60aa9c27aa33f948607b62f19 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/clap/training/__pycache__/params.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/clap/training/audioset_textmap.npy b/qa_mdt/audioldm_train/modules/clap/training/audioset_textmap.npy new file mode 100644 index 0000000000000000000000000000000000000000..3da4c92d3819aaec11e5f576464a9973a6df811b --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/audioset_textmap.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bada103070d92f9eadd33e1b4f45ec8583f59080ef218c966b43294bd4c86d5b +size 84448 diff --git a/qa_mdt/audioldm_train/modules/clap/training/data.py b/qa_mdt/audioldm_train/modules/clap/training/data.py new file mode 100644 index 0000000000000000000000000000000000000000..b1defc6d75198b6806d723a24ac7c66e1fae2433 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/data.py @@ -0,0 +1,881 @@ +import ast +import json +import logging +import math +import os +import random +import h5py +from dataclasses import dataclass +from qa_mdt.audioldm_train.modules.clap.training.params import parse_args +import braceexpand +import numpy as np +import pandas as pd +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.datasets as datasets +import torchvision.transforms +import webdataset as wds +from PIL import Image +from torch.utils.data import Dataset, DataLoader, SubsetRandomSampler +from torch.utils.data.distributed import DistributedSampler +from functools import partial +import soundfile as sf +import io +from pathlib import Path +import wget + +from qa_mdt.audioldm_train.modules.clap.open_clip.utils import ( + get_tar_path_from_dataset_name, + dataset_split, +) +from qa_mdt.audioldm_train.modules.clap.open_clip.utils import load_p, load_class_label +import tempfile +import copy + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + +try: + import torchaudio +except ImportError: + torchaudio = None + +from qa_mdt.audioldm_train.modules.clap.open_clip import tokenize + + +def tokenizer(text): + return tokenize(text).squeeze(0) + + +from transformers import RobertaTokenizer +with open('./qa_mdt/offset_pretrained_checkpoints.json', 'r') as config_file: + config_data = json.load(config_file) + +tokenize = RobertaTokenizer.from_pretrained(config_data["roberta-base"]) + + +def tokenizer(text): + result = tokenize( + text, + padding="max_length", + truncation=True, + max_length=77, + return_tensors="pt", + ) + return {k: v.squeeze(0) for k, v in result.items()} + + +# initizlied the audioset map +_AUDIOSET_MAP_PATH = os.path.join(Path(__file__).parent, "audioset_textmap.npy") +_AUDIOSET_MAP = np.load(_AUDIOSET_MAP_PATH, allow_pickle=True) + + +def int16_to_float32(x): + return (x / 32767.0).astype(np.float32) + + +def float32_to_int16(x): + x = np.clip(x, a_min=-1.0, a_max=1.0) + return (x * 32767.0).astype(np.int16) + + +# For Toy Dataset +class ToyDataset(Dataset): + def __init__(self, index_path, ipc, config, eval_mode=False): + """Toy Dataset for testing the audioset input with text labels + Parameters + ---------- + index_path: str + the link to the h5 file of each audio + idc: str + the link to the npy file, the number of samples in each class + config: dict + the audio cfg file + eval_model (bool): to indicate if the dataset is a testing dataset + """ + self.audio_cfg = config["audio_cfg"] + self.text_cfg = config["text_cfg"] + self.fp = h5py.File(index_path, "r") + self.ipc = np.load(ipc, allow_pickle=True) + self.total_size = len(self.fp["audio_name"]) + self.classes_num = self.audio_cfg["class_num"] + self.eval_mode = eval_mode + + if not eval_mode: + self.generate_queue() + else: + self.queue = [] + for i in range(self.total_size): + target = self.fp["target"][i] + if np.sum(target) > 0: + self.queue.append(i) + self.total_size = len(self.queue) + logging.info("total dataset size: %d" % (self.total_size)) + logging.info("class num: %d" % (self.classes_num)) + + def time_shifting(self, x): + frame_num = len(x) + shift_len = random.randint(0, frame_num - 1) + new_sample = np.concatenate([x[shift_len:], x[:shift_len]], axis=0) + return new_sample + + def generate_queue(self): + self.queue = [] + while len(self.queue) < self.total_size: + class_set = [*range(self.classes_num)] + random.shuffle(class_set) + self.queue += [ + self.ipc[d][random.randint(0, len(self.ipc[d]) - 1)] for d in class_set + ] + self.queue = self.queue[: self.total_size] + + logging.info("queue regenerated:%s" % (self.queue[-5:])) + + def crop_wav(self, x): + crop_size = self.audio_cfg["crop_size"] + crop_pos = random.randint(0, len(x) - crop_size - 1) + return x[crop_pos : crop_pos + crop_size] + + def prompt_text(self, target): + events = _AUDIOSET_MAP[np.where(target > 0)] + event_text = "The sounds of " + ", ".join(events[:-1]) + " and " + events[-1] + text = tokenize(event_text)[0] + return text + + def __getitem__(self, index): + """Load waveform, text, and target of an audio clip + + Parameters + ---------- + index: int + the index number + Return + ------ + output: dict { + "hdf5_path": str, + "index_in_hdf5": int, + "audio_name": str, + "waveform": list (audio_length,), + "target": list (class_num, ), + "text": torch.tensor (context_length,) + } + the output dictionary + """ + s_index = self.queue[index] + + audio_name = self.fp["audio_name"][s_index].decode() + # Hardcode here CHANGE + hdf5_path = ( + self.fp["hdf5_path"][s_index] + .decode() + .replace( + "../workspace", + "/home/la/kechen/Research/ke_zsasp/workspace", + ) + ) + r_idx = self.fp["index_in_hdf5"][s_index] + target = self.fp["target"][s_index].astype(np.float32) + text = self.prompt_text(target) + with h5py.File(hdf5_path, "r") as f: + waveform = int16_to_float32(f["waveform"][r_idx])[ + : self.audio_cfg["clip_samples"] + ] + assert ( + len(waveform) == self.audio_cfg["clip_samples"] + ), "The sample length is not match" + # Time shift + # if (self.config.enable_time_shift) and (not self.eval_mode): + # waveform = self.time_shifting(waveform) + # # Label Enhance + # if (self.config.crop_size is not None) and (not self.eval_mode): + # waveform = self.crop_wav(waveform) + # # the label enhance rate is fixed 0.5 + # if (self.config.enable_label_enhance) and (not self.eval_mode) and random.random() < 0.5: + # kidx = np.where(target)[0] + # for k in kidx: + # for add_key in self.class_map[k][1]: + # target[add_key] = 1.0 + # if len(self.class_map[k][2]) > 0: + # add_key = random.choice(self.class_map[k][2]) + # target[add_key] = 1.0 + + # missing the text input + mel_spec = get_mel(torch.from_numpy(waveform), self.audio_cfg)[None, :, :] + mel_spec = ( + torch.cat( + [mel_spec, mel_spec.clone(), mel_spec.clone(), mel_spec.clone()], dim=0 + ) + .cpu() + .numpy() + ) + longer = random.choice([True, False]) + if longer == False: + mel_spec[1:, :, :] = 0.0 + data_dict = { + "hdf5_path": hdf5_path, + "index_in_hdf5": r_idx, + "audio_name": audio_name, + "waveform": waveform, + "class_label": target, + "text": text, + "longer": longer, + "mel_fusion": mel_spec, + } + return data_dict + + def __len__(self): + return self.total_size + + +class CsvDataset(Dataset): + def __init__(self, input_filename, transforms, img_key, caption_key, sep="\t"): + logging.debug(f"Loading csv data from {input_filename}.") + df = pd.read_csv(input_filename, sep=sep) + + self.images = df[img_key].tolist() + self.captions = df[caption_key].tolist() + self.transforms = transforms + logging.debug("Done loading data.") + + def __len__(self): + return len(self.captions) + + def __getitem__(self, idx): + images = self.transforms(Image.open(str(self.images[idx]))) + texts = tokenize([str(self.captions[idx])])[0] + return images, texts + + +@dataclass +class DataInfo: + dataloader: DataLoader + sampler: DistributedSampler + + +def preprocess_txt(text): + return tokenize([str(text)])[0] + + +def get_dataset_size(shards, sizefilepath_=None, is_local=True): + if isinstance(shards, list): + size_list = [] + for s in shards: + size_list.append( + get_dataset_size(s, sizefilepath_=sizefilepath_, is_local=is_local)[0] + ) + else: + if not is_local: + for n in dataset_split.keys(): + if n in shards.split("/"): + break + for s in dataset_split[n]: + if s in shards.split("/"): + break + sizefilepath_ = f"./json_files/{n}/{s}/sizes.json" + shards_list = list(braceexpand.braceexpand(shards)) + dir_path = os.path.dirname(shards) + if sizefilepath_ is not None: + sizes = json.load(open(sizefilepath_, "r")) + total_size = sum( + [ + int(sizes[os.path.basename(shard.replace(".tar -", ".tar"))]) + for shard in shards_list + ] + ) + else: + sizes_filename = os.path.join(dir_path, "sizes.json") + len_filename = os.path.join(dir_path, "__len__") + if os.path.exists(sizes_filename): + sizes = json.load(open(sizes_filename, "r")) + total_size = sum( + [int(sizes[os.path.basename(shard)]) for shard in shards_list] + ) + elif os.path.exists(len_filename): + # FIXME this used to be eval(open(...)) but that seemed rather unsafe + total_size = ast.literal_eval(open(len_filename, "r").read()) + else: + raise Exception( + "Cannot find sizes file for dataset. Please specify the path to the file." + ) + # total_size = None # num samples undefined + # some common dataset sizes (at time of authors last download) + # cc3m-train: 2905954 + # cc12m: 10968539 + # LAION-400m: 407332084 + num_shards = len(shards_list) + if isinstance(shards, list): + return sum(size_list), len(shards) + else: + return total_size, num_shards + + +def get_imagenet(args, preprocess_fns, split): + assert split in ["train", "val", "v2"] + is_train = split == "train" + preprocess_train, preprocess_val = preprocess_fns + + if split == "v2": + from imagenetv2_pytorch import ImageNetV2Dataset + + dataset = ImageNetV2Dataset(location=args.imagenet_v2, transform=preprocess_val) + else: + if is_train: + data_path = args.imagenet_train + preprocess_fn = preprocess_train + else: + data_path = args.imagenet_val + preprocess_fn = preprocess_val + assert data_path + + dataset = datasets.ImageFolder(data_path, transform=preprocess_fn) + + if is_train: + idxs = np.zeros(len(dataset.targets)) + target_array = np.array(dataset.targets) + k = 50 + for c in range(1000): + m = target_array == c + n = len(idxs[m]) + arr = np.zeros(n) + arr[:k] = 1 + np.random.shuffle(arr) + idxs[m] = arr + + idxs = idxs.astype("int") + sampler = SubsetRandomSampler(np.where(idxs)[0]) + else: + sampler = None + + dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=args.batch_size, + num_workers=args.workers, + sampler=sampler, + ) + + return DataInfo(dataloader, sampler) + + +def count_samples(dataloader): + os.environ["WDS_EPOCH"] = "0" + n_elements, n_batches = 0, 0 + for images, texts in dataloader: + n_batches += 1 + n_elements += len(images) + assert len(images) == len(texts) + return n_elements, n_batches + + +def filter_no_caption(sample): + return "txt" in sample + + +def log_and_continue(exn): + """Call in an exception handler to ignore any exception, isssue a warning, and continue.""" + logging.warning(f"Handling webdataset error ({repr(exn)}). Ignoring.") + return True + + +_SHARD_SHUFFLE_SIZE = 2000 +_SHARD_SHUFFLE_INITIAL = 500 +_SAMPLE_SHUFFLE_SIZE = 5000 +_SAMPLE_SHUFFLE_INITIAL = 1000 + + +def sample_prop(sizefile, inputs, proportion, is_local=True): + """ + Sample a proportion of the data. + """ + file_path_dict = { + os.path.split(inputs[i])[1]: os.path.split(inputs[i])[0] + for i in range(len(inputs)) + } + sampled_filepath_dict = {} + sampled_size_dict = {} + if not is_local: + if os.path.exists("sizes.json"): + os.remove("sizes.json") + wget.download(sizefile, "sizes.json") + sizefile = "sizes.json" + with open(sizefile, "r", encoding="UTF-8") as f: + load_dict = json.load(f) + L = int(len(file_path_dict) * proportion) + subkeys = random.sample(file_path_dict.keys(), L) + for k in subkeys: + sampled_size_dict[k] = load_dict[k] + sampled_filepath_dict[k] = file_path_dict[k] + return ( + sum(sampled_size_dict.values()), + L, + [os.path.join(v, k) for k, v in sampled_filepath_dict.items()], + sampled_size_dict, + ) + + +def get_mel(audio_data, audio_cfg): + # mel shape: (n_mels, T) + mel = torchaudio.transforms.MelSpectrogram( + sample_rate=audio_cfg["sample_rate"], + n_fft=audio_cfg["window_size"], + win_length=audio_cfg["window_size"], + hop_length=audio_cfg["hop_size"], + center=True, + pad_mode="reflect", + power=2.0, + norm=None, + onesided=True, + n_mels=64, + f_min=audio_cfg["fmin"], + f_max=audio_cfg["fmax"], + ).to(audio_data.device) + mel = mel(audio_data) + # we use log mel spectrogram as input + mel = torchaudio.transforms.AmplitudeToDB(top_db=None)(mel) + return mel.T # (T, n_mels) + + +def get_audio_features( + audio_data, mel, max_len, data_truncating, data_filling, audio_cfg +): + """ + Calculate and add audio features to sample. + Sample: a dict containing all the data of current sample. + audio_data: a tensor of shape (T) containing audio data. + max_len: the maximum length of audio data. + data_truncating: the method of truncating data. + data_filling: the method of filling data. + audio_cfg: a dict containing audio configuration. Comes from model_cfg['audio_cfg']. + """ + sample = {} + + assert audio_data.size(-1) > max_len, str(audio_data.size()) + + # split to three parts + chunk_frames = ( + max_len // audio_cfg["hop_size"] + 1 + ) # the +1 related to how the spectrogram is computed + mel = mel[:chunk_frames] + + audio_data = audio_data[..., :max_len] + sample["mel_fusion"] = mel + longer = torch.tensor([True]) + + sample["longer"] = longer + sample["waveform"] = audio_data + + return sample + + +def preprocess( + sample, + audio_ext, + text_ext, + max_len, + audio_cfg, + class_index_dict=None, + data_filling="pad", + data_truncating="rand_trunc", + text_augment_selection=None, +): + """ + Preprocess a single sample for wdsdataloader. + """ + audio_data, orig_sr = sf.read(io.BytesIO(sample[audio_ext])) + audio_data = int16_to_float32(float32_to_int16(audio_data)) + audio_data = torch.tensor(audio_data).float() + + # TODO: (yusong) to be include in the future + # # if torchaudio not installed, use soundfile to load audio + # if torchaudio is None: + # audio_data, orig_sr = sf.read(io.BytesIO(sample[audio_ext])) + # audio_data = torch.tensor(audio_data).float() + # else: + # # https://github.com/webdataset/webdataset/blob/main/webdataset/autodecode.py + # with tempfile.TemporaryDirectory() as dirname: + # os.makedirs(dirname, exist_ok=True) + # fname = os.path.join(dirname, f"file.flac") + # with open(fname, "wb") as stream: + # stream.write(sample[audio_ext]) + # audio_data, orig_sr = torchaudio.load(fname) + # audio_data = audio_data[0, :].float() + + sample = get_audio_features( + sample, audio_data, max_len, data_truncating, data_filling, audio_cfg + ) + del sample[audio_ext] + + try: + json_dict_raw = json.loads(sample[text_ext].decode("utf-8")) + except: + print("sample[__url__]:", sample["__url__"]) + + # For selecting augmented text from dataset + if text_augment_selection is None or text_augment_selection == "none": + texts = json_dict_raw["text"] + elif text_augment_selection == "all": + if "text_augment_all" in json_dict_raw.keys(): + texts = json_dict_raw["text_augment_all"] + else: + texts = json_dict_raw["text"] + elif text_augment_selection == "augment_only": + if "text_augment_all" in json_dict_raw.keys(): + if json_dict_raw["text_augment_t5"] is None: + texts = json_dict_raw["text"] + else: + texts = json_dict_raw["text_augment_t5"] + else: + texts = json_dict_raw["text"] + else: + raise NotImplementedError( + f"text_augment_selection {text_augment_selection} not implemented" + ) + sample["full_text"] = texts + + if isinstance(texts, list) and isinstance(texts[0], str) and len(texts) > 1: + texts = random.choice(texts) + sample["raw_text"] = texts + sample["text"] = tokenizer(texts) # text shape: [num_token] + if class_index_dict is not None: + # https://stackoverflow.com/questions/48004243/how-to-share-large-read-only-dictionary-list-across-processes-in-multiprocessing + # https://stackoverflow.com/questions/45693949/storing-strings-in-a-multiprocessing-sharedctypes-array + # key, val = class_index_dict + # key = key[:].split('\n') + # _dict = {k: v for k, v in zip(key, val)} + sample["class_label"] = np.zeros(len(class_index_dict.keys())) + for x in json_dict_raw["tag"]: + sample["class_label"][class_index_dict[x]] = 1 + sample["class_label"] = torch.tensor(sample["class_label"]).float() + del sample[text_ext] + sample["audio_name"] = sample["__key__"].split("/")[-1] + "." + audio_ext + sample["text_name"] = sample["__key__"].split("/")[-1] + "." + text_ext + sample["audio_orig_sr"] = orig_sr + return sample + + +def collate_fn(batch): + """ + Collate function for wdsdataloader. + batch: a list of dict, each dict is a sample + """ + # concatenate values in each dictionary. if it is a tensor, concatenate. if it is a list, extend. + batch_dict = {} + for k in batch[0].keys(): + if isinstance(batch[0][k], dict): # dealwith bert tokenizer output + batch_dict[k] = {} + for kk in batch[0][k].keys(): + tmp = [] + for i in range(len(batch)): + tmp.append(batch[i][k][kk]) + batch_dict[k][kk] = torch.vstack(tmp) + elif isinstance(batch[0][k], torch.Tensor): + batch_dict[k] = torch.stack([sample[k] for sample in batch]) + elif isinstance(batch[0][k], np.ndarray): + batch_dict[k] = torch.tensor(np.stack([sample[k] for sample in batch])) + else: + batch_dict[k] = [sample[k] for sample in batch] + return batch_dict + + +def get_wds_dataset( + args, + model_cfg, + is_train, + audio_ext="flac", + text_ext="json", + max_len=480000, + proportion=1.0, + sizefilepath_=None, + is_local=None, +): + """ + Get a dataset for wdsdataloader. + """ + if is_local is None and (not args.remotedata is None): + is_local = not args.remotedata + + input_shards = args.train_data if is_train else args.val_data + assert input_shards is not None + + if not sizefilepath_ is None: + sizefilepath = sizefilepath_ + else: + sizefilepath = os.path.join(os.path.dirname(input_shards[0]), "sizes.json") + + if proportion != 1.0: + num_samples, num_shards, input_shards, _ = sample_prop( + sizefilepath, input_shards, proportion, is_local=is_local + ) + else: + num_samples, num_shards = get_dataset_size( + input_shards, sizefilepath_=sizefilepath_, is_local=is_local + ) + + if not num_samples: + if is_train: + num_samples = args.train_num_samples + if not num_samples: + raise RuntimeError( + "Currently, number of dataset samples must be specified for training dataset. " + "Please specify via `--train-num-samples` if no dataset length info present." + ) + else: + num_samples = ( + args.val_num_samples or 0 + ) # eval will just exhaust the iterator if not specified + + pipeline = [wds.SimpleShardList(input_shards)] + # at this point we have an iterator over all the shards + # TODO: (yusong): add a if statement of distributed. If not, we don't need to split_by_node + if is_train or args.parallel_eval: + pipeline.extend( + [ + wds.detshuffle( + bufsize=_SHARD_SHUFFLE_SIZE, + initial=_SHARD_SHUFFLE_INITIAL, + seed=args.seed, + ), + wds.split_by_node, + wds.split_by_worker, + # at this point, we have an iterator over the shards assigned to each worker at each node + wds.tarfile_to_samples(handler=log_and_continue), + wds.shuffle( + bufsize=_SAMPLE_SHUFFLE_SIZE, + initial=_SAMPLE_SHUFFLE_INITIAL, + rng=random.Random(args.seed), + ), + # wds.repeatedly, # FIXME determine if this is beneficial + ] + ) + else: + pipeline.extend( + [ + wds.split_by_worker, + # at this point, we have an iterator over the shards assigned to each worker + wds.tarfile_to_samples(handler=log_and_continue), + ] + ) + pipeline.append( + wds.map( + partial( + preprocess, + audio_ext=audio_ext, + text_ext=text_ext, + max_len=max_len, + audio_cfg=model_cfg["audio_cfg"], + class_index_dict=copy.deepcopy(args.class_index_dict), + data_filling=args.data_filling, + data_truncating=args.data_truncating, + text_augment_selection=args.text_augment_selection, + ) + ), + ) + + pipeline.append( + wds.batched( + args.batch_size, + partial=not (is_train or args.parallel_eval), + collation_fn=collate_fn, + ) + ) + + dataset = wds.DataPipeline(*pipeline) + if is_train or args.parallel_eval: + # (yusong): Currently parallel evaluation will be not precise as we are repeat the last few samples. + # (yusong): See comments below. + # roll over and repeat a few samples to get same number of full batches on each node + global_batch_size = args.batch_size * args.world_size + num_batches = math.ceil(num_samples / global_batch_size) + num_workers = max(1, args.workers) + num_worker_batches = math.ceil( + num_batches / num_workers + ) # per dataloader worker + num_batches = num_worker_batches * num_workers + num_samples = num_batches * global_batch_size + dataset = dataset.with_epoch( + num_worker_batches + ) # each worker is iterating over this + else: + # last batches are partial, eval is done on single (master) node + num_batches = math.ceil(num_samples / args.batch_size) + + kwargs = {} + if args.horovod: # multi-node training on summit + kwargs["multiprocessing_context"] = "forkserver" + + dataloader = wds.WebLoader( + dataset, batch_size=None, shuffle=False, num_workers=args.workers, **kwargs + ) + + # FIXME not clear which approach is better, with_epoch before vs after dataloader? + # hoping to resolve via https://github.com/webdataset/webdataset/issues/169 + # if is_train: + # # roll over and repeat a few samples to get same number of full batches on each node + # global_batch_size = args.batch_size * args.world_size + # num_batches = math.ceil(num_samples / global_batch_size) + # num_workers = max(1, args.workers) + # num_batches = math.ceil(num_batches / num_workers) * num_workers + # num_samples = num_batches * global_batch_size + # dataloader = dataloader.with_epoch(num_batches) + # else: + # # last batches are partial, eval is done on single (master) node + # num_batches = math.ceil(num_samples / args.batch_size) + + # add meta-data to dataloader instance for convenience + dataloader.num_batches = num_batches + dataloader.num_samples = num_samples + + return DataInfo(dataloader, None) + + +def wds_batch_list2dict( + batch, + keys=[ + "__url__", + "__key__", + "waveform", + "text", + "raw_text", + "audio_name", + "text_name", + "audio_orig_sr", + ], +): + """ + Return a dictionary of the batch, with keys as the names of the fields. + """ + assert len(keys) == len( + batch + ), "batch must have same number of keys as keys argument" + return {keys[i]: batch[i] for i in range(len(batch))} + + +def get_csv_dataset(args, preprocess_fn, is_train): + input_filename = args.train_data if is_train else args.val_data + assert input_filename + dataset = CsvDataset( + input_filename, + preprocess_fn, + img_key=args.csv_img_key, + caption_key=args.csv_caption_key, + sep=args.csv_separator, + ) + num_samples = len(dataset) + sampler = DistributedSampler(dataset) if args.distributed and is_train else None + shuffle = is_train and sampler is None + + dataloader = DataLoader( + dataset, + batch_size=args.batch_size, + shuffle=shuffle, + num_workers=args.workers, + pin_memory=True, + sampler=sampler, + drop_last=is_train, + ) + dataloader.num_samples = num_samples + dataloader.num_batches = len(dataloader) + + return DataInfo(dataloader, sampler) + + +def get_toy_dataset(args, model_cfg, is_train): + index_path = args.train_data if is_train else args.val_data + ipc_path = args.train_ipc if is_train else args.val_ipc + assert index_path and ipc_path + eval_mode = not is_train + dataset = ToyDataset(index_path, ipc_path, model_cfg, eval_mode=eval_mode) + + num_samples = len(dataset) + sampler = ( + DistributedSampler(dataset, shuffle=False) + if args.distributed and is_train + else None + ) + + dataloader = DataLoader( + dataset, + batch_size=args.batch_size, + shuffle=False, + num_workers=args.workers, + sampler=sampler, + drop_last=is_train, + ) + dataloader.num_samples = num_samples + dataloader.num_batches = len(dataloader) + + return DataInfo(dataloader, sampler) + + +def get_dataset_fn(data_path, dataset_type): + if dataset_type == "webdataset": + return get_wds_dataset + elif dataset_type == "csv": + return get_csv_dataset + elif dataset_type == "auto": + ext = data_path.split(".")[-1] + if ext in ["csv", "tsv"]: + return get_csv_dataset + elif ext in ["tar"]: + return get_wds_dataset + else: + raise ValueError( + f"Tried to figure out dataset type, but failed for extention {ext}." + ) + elif dataset_type == "toy": + return get_toy_dataset + else: + raise ValueError(f"Unsupported dataset type: {dataset_type}") + + +def get_data(args, model_cfg): + data = {} + + args.class_index_dict = load_class_label(args.class_label_path) + + if args.datasetinfos is None: + args.datasetinfos = ["train", "unbalanced_train", "balanced_train"] + if args.dataset_type == "webdataset": + args.train_data = get_tar_path_from_dataset_name( + args.datasetnames, + args.datasetinfos, + islocal=not args.remotedata, + proportion=args.dataset_proportion, + dataset_path=args.datasetpath, + full_dataset=args.full_train_dataset, + ) + + if args.full_train_dataset is None: + args.full_train_dataset = [] + if args.exclude_eval_dataset is None: + args.exclude_eval_dataset = [] + excluded_eval_datasets = args.full_train_dataset + args.exclude_eval_dataset + + val_dataset_names = ( + [n for n in args.datasetnames if n not in excluded_eval_datasets] + if excluded_eval_datasets + else args.datasetnames + ) + args.val_dataset_names = val_dataset_names + args.val_data = get_tar_path_from_dataset_name( + val_dataset_names, + ["valid", "test", "eval"], + islocal=not args.remotedata, + proportion=1, + dataset_path=args.datasetpath, + full_dataset=None, + ) + + if args.train_data: + data["train"] = get_dataset_fn(args.train_data, args.dataset_type)( + args, model_cfg, is_train=True + ) + + if args.val_data: + data["val"] = get_dataset_fn(args.val_data, args.dataset_type)( + args, model_cfg, is_train=False + ) + + return data diff --git a/qa_mdt/audioldm_train/modules/clap/training/distributed.py b/qa_mdt/audioldm_train/modules/clap/training/distributed.py new file mode 100644 index 0000000000000000000000000000000000000000..2fa61f76c5cc3ab9f6a9643042afa8e1f2e1cb7f --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/distributed.py @@ -0,0 +1,150 @@ +import os + +import torch +import socket + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + + +def is_global_master(args): + return args.rank == 0 + + +def is_local_master(args): + return args.local_rank == 0 + + +def is_master(args, local=False): + return is_local_master(args) if local else is_global_master(args) + + +def is_using_horovod(): + # NOTE w/ horovod run, OMPI vars should be set, but w/ SLURM PMI vars will be set + # Differentiating between horovod and DDP use via SLURM may not be possible, so horovod arg still required... + ompi_vars = ["OMPI_COMM_WORLD_RANK", "OMPI_COMM_WORLD_SIZE"] + pmi_vars = ["PMI_RANK", "PMI_SIZE"] + if all([var in os.environ for var in ompi_vars]) or all( + [var in os.environ for var in pmi_vars] + ): + return True + else: + return False + + +def is_using_distributed(): + if "WORLD_SIZE" in os.environ: + return int(os.environ["WORLD_SIZE"]) > 1 + if "SLURM_NTASKS" in os.environ: + return int(os.environ["SLURM_NTASKS"]) > 1 + return False + + +def world_info_from_env(): + local_rank = 0 + for v in ( + "SLURM_LOCALID", + "MPI_LOCALRANKID", + "OMPI_COMM_WORLD_LOCAL_RANK", + "LOCAL_RANK", + ): + if v in os.environ: + local_rank = int(os.environ[v]) + break + global_rank = 0 + for v in ("SLURM_PROCID", "PMI_RANK", "OMPI_COMM_WORLD_RANK", "RANK"): + if v in os.environ: + global_rank = int(os.environ[v]) + break + world_size = 1 + for v in ("SLURM_NTASKS", "PMI_SIZE", "OMPI_COMM_WORLD_SIZE", "WORLD_SIZE"): + if v in os.environ: + world_size = int(os.environ[v]) + break + + return local_rank, global_rank, world_size + + +def init_distributed_device(args): + # Distributed training = training on more than one GPU. + # Works in both single and multi-node scenarios. + args.distributed = False + args.world_size = 1 + args.rank = 0 # global rank + args.local_rank = 0 + if args.horovod: + assert hvd is not None, "Horovod is not installed" + hvd.init() + world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"]) + world_rank = int(os.environ["OMPI_COMM_WORLD_RANK"]) + local_rank = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"]) + args.local_rank = local_rank + args.rank = world_rank + args.world_size = world_size + # args.local_rank = int(hvd.local_rank()) + # args.rank = hvd.rank() + # args.world_size = hvd.size() + args.distributed = True + os.environ["LOCAL_RANK"] = str(args.local_rank) + os.environ["RANK"] = str(args.rank) + os.environ["WORLD_SIZE"] = str(args.world_size) + print( + f"Distributed training: local_rank={args.local_rank}, " + f"rank={args.rank}, world_size={args.world_size}, " + f"hostname={socket.gethostname()}, pid={os.getpid()}" + ) + elif is_using_distributed(): + if "SLURM_PROCID" in os.environ: + # DDP via SLURM + args.local_rank, args.rank, args.world_size = world_info_from_env() + # SLURM var -> torch.distributed vars in case needed + os.environ["LOCAL_RANK"] = str(args.local_rank) + os.environ["RANK"] = str(args.rank) + os.environ["WORLD_SIZE"] = str(args.world_size) + torch.distributed.init_process_group( + backend=args.dist_backend, + init_method=args.dist_url, + world_size=args.world_size, + rank=args.rank, + ) + elif "OMPI_COMM_WORLD_SIZE" in os.environ: # using Summit cluster + world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"]) + world_rank = int(os.environ["OMPI_COMM_WORLD_RANK"]) + local_rank = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"]) + args.local_rank = local_rank + args.rank = world_rank + args.world_size = world_size + torch.distributed.init_process_group( + backend=args.dist_backend, + init_method=args.dist_url, + world_size=args.world_size, + rank=args.rank, + ) + else: + # DDP via torchrun, torch.distributed.launch + args.local_rank, _, _ = world_info_from_env() + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url + ) + args.world_size = torch.distributed.get_world_size() + args.rank = torch.distributed.get_rank() + args.distributed = True + print( + f"Distributed training: local_rank={args.local_rank}, " + f"rank={args.rank}, world_size={args.world_size}, " + f"hostname={socket.gethostname()}, pid={os.getpid()}" + ) + + if torch.cuda.is_available(): + if args.distributed and not args.no_set_device_rank: + device = "cuda:%d" % args.local_rank + else: + device = "cuda:0" + torch.cuda.set_device(device) + else: + device = "cpu" + args.device = device + device = torch.device(device) + return device diff --git a/qa_mdt/audioldm_train/modules/clap/training/imagenet_zeroshot_data.py b/qa_mdt/audioldm_train/modules/clap/training/imagenet_zeroshot_data.py new file mode 100644 index 0000000000000000000000000000000000000000..d32e55328d6799ccb8d61625f43abb80a33d6c17 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/imagenet_zeroshot_data.py @@ -0,0 +1,1088 @@ +# NOTE: This script is currently not supported for CLAP. + +imagenet_classnames = [ + "tench", + "goldfish", + "great white shark", + "tiger shark", + "hammerhead shark", + "electric ray", + "stingray", + "rooster", + "hen", + "ostrich", + "brambling", + "goldfinch", + "house finch", + "junco", + "indigo bunting", + "American robin", + "bulbul", + "jay", + "magpie", + "chickadee", + "American dipper", + "kite (bird of prey)", + "bald eagle", + "vulture", + "great grey owl", + "fire salamander", + "smooth newt", + "newt", + "spotted salamander", + "axolotl", + "American bullfrog", + "tree frog", + "tailed frog", + "loggerhead sea turtle", + "leatherback sea turtle", + "mud turtle", + "terrapin", + "box turtle", + "banded gecko", + "green iguana", + "Carolina anole", + "desert grassland whiptail lizard", + "agama", + "frilled-necked lizard", + "alligator lizard", + "Gila monster", + "European green lizard", + "chameleon", + "Komodo dragon", + "Nile crocodile", + "American alligator", + "triceratops", + "worm snake", + "ring-necked snake", + "eastern hog-nosed snake", + "smooth green snake", + "kingsnake", + "garter snake", + "water snake", + "vine snake", + "night snake", + "boa constrictor", + "African rock python", + "Indian cobra", + "green mamba", + "sea snake", + "Saharan horned viper", + "eastern diamondback rattlesnake", + "sidewinder rattlesnake", + "trilobite", + "harvestman", + "scorpion", + "yellow garden spider", + "barn spider", + "European garden spider", + "southern black widow", + "tarantula", + "wolf spider", + "tick", + "centipede", + "black grouse", + "ptarmigan", + "ruffed grouse", + "prairie grouse", + "peafowl", + "quail", + "partridge", + "african grey parrot", + "macaw", + "sulphur-crested cockatoo", + "lorikeet", + "coucal", + "bee eater", + "hornbill", + "hummingbird", + "jacamar", + "toucan", + "duck", + "red-breasted merganser", + "goose", + "black swan", + "tusker", + "echidna", + "platypus", + "wallaby", + "koala", + "wombat", + "jellyfish", + "sea anemone", + "brain coral", + "flatworm", + "nematode", + "conch", + "snail", + "slug", + "sea slug", + "chiton", + "chambered nautilus", + "Dungeness crab", + "rock crab", + "fiddler crab", + "red king crab", + "American lobster", + "spiny lobster", + "crayfish", + "hermit crab", + "isopod", + "white stork", + "black stork", + "spoonbill", + "flamingo", + "little blue heron", + "great egret", + "bittern bird", + "crane bird", + "limpkin", + "common gallinule", + "American coot", + "bustard", + "ruddy turnstone", + "dunlin", + "common redshank", + "dowitcher", + "oystercatcher", + "pelican", + "king penguin", + "albatross", + "grey whale", + "killer whale", + "dugong", + "sea lion", + "Chihuahua", + "Japanese Chin", + "Maltese", + "Pekingese", + "Shih Tzu", + "King Charles Spaniel", + "Papillon", + "toy terrier", + "Rhodesian Ridgeback", + "Afghan Hound", + "Basset Hound", + "Beagle", + "Bloodhound", + "Bluetick Coonhound", + "Black and Tan Coonhound", + "Treeing Walker Coonhound", + "English foxhound", + "Redbone Coonhound", + "borzoi", + "Irish Wolfhound", + "Italian Greyhound", + "Whippet", + "Ibizan Hound", + "Norwegian Elkhound", + "Otterhound", + "Saluki", + "Scottish Deerhound", + "Weimaraner", + "Staffordshire Bull Terrier", + "American Staffordshire Terrier", + "Bedlington Terrier", + "Border Terrier", + "Kerry Blue Terrier", + "Irish Terrier", + "Norfolk Terrier", + "Norwich Terrier", + "Yorkshire Terrier", + "Wire Fox Terrier", + "Lakeland Terrier", + "Sealyham Terrier", + "Airedale Terrier", + "Cairn Terrier", + "Australian Terrier", + "Dandie Dinmont Terrier", + "Boston Terrier", + "Miniature Schnauzer", + "Giant Schnauzer", + "Standard Schnauzer", + "Scottish Terrier", + "Tibetan Terrier", + "Australian Silky Terrier", + "Soft-coated Wheaten Terrier", + "West Highland White Terrier", + "Lhasa Apso", + "Flat-Coated Retriever", + "Curly-coated Retriever", + "Golden Retriever", + "Labrador Retriever", + "Chesapeake Bay Retriever", + "German Shorthaired Pointer", + "Vizsla", + "English Setter", + "Irish Setter", + "Gordon Setter", + "Brittany dog", + "Clumber Spaniel", + "English Springer Spaniel", + "Welsh Springer Spaniel", + "Cocker Spaniel", + "Sussex Spaniel", + "Irish Water Spaniel", + "Kuvasz", + "Schipperke", + "Groenendael dog", + "Malinois", + "Briard", + "Australian Kelpie", + "Komondor", + "Old English Sheepdog", + "Shetland Sheepdog", + "collie", + "Border Collie", + "Bouvier des Flandres dog", + "Rottweiler", + "German Shepherd Dog", + "Dobermann", + "Miniature Pinscher", + "Greater Swiss Mountain Dog", + "Bernese Mountain Dog", + "Appenzeller Sennenhund", + "Entlebucher Sennenhund", + "Boxer", + "Bullmastiff", + "Tibetan Mastiff", + "French Bulldog", + "Great Dane", + "St. Bernard", + "husky", + "Alaskan Malamute", + "Siberian Husky", + "Dalmatian", + "Affenpinscher", + "Basenji", + "pug", + "Leonberger", + "Newfoundland dog", + "Great Pyrenees dog", + "Samoyed", + "Pomeranian", + "Chow Chow", + "Keeshond", + "brussels griffon", + "Pembroke Welsh Corgi", + "Cardigan Welsh Corgi", + "Toy Poodle", + "Miniature Poodle", + "Standard Poodle", + "Mexican hairless dog (xoloitzcuintli)", + "grey wolf", + "Alaskan tundra wolf", + "red wolf or maned wolf", + "coyote", + "dingo", + "dhole", + "African wild dog", + "hyena", + "red fox", + "kit fox", + "Arctic fox", + "grey fox", + "tabby cat", + "tiger cat", + "Persian cat", + "Siamese cat", + "Egyptian Mau", + "cougar", + "lynx", + "leopard", + "snow leopard", + "jaguar", + "lion", + "tiger", + "cheetah", + "brown bear", + "American black bear", + "polar bear", + "sloth bear", + "mongoose", + "meerkat", + "tiger beetle", + "ladybug", + "ground beetle", + "longhorn beetle", + "leaf beetle", + "dung beetle", + "rhinoceros beetle", + "weevil", + "fly", + "bee", + "ant", + "grasshopper", + "cricket insect", + "stick insect", + "cockroach", + "praying mantis", + "cicada", + "leafhopper", + "lacewing", + "dragonfly", + "damselfly", + "red admiral butterfly", + "ringlet butterfly", + "monarch butterfly", + "small white butterfly", + "sulphur butterfly", + "gossamer-winged butterfly", + "starfish", + "sea urchin", + "sea cucumber", + "cottontail rabbit", + "hare", + "Angora rabbit", + "hamster", + "porcupine", + "fox squirrel", + "marmot", + "beaver", + "guinea pig", + "common sorrel horse", + "zebra", + "pig", + "wild boar", + "warthog", + "hippopotamus", + "ox", + "water buffalo", + "bison", + "ram (adult male sheep)", + "bighorn sheep", + "Alpine ibex", + "hartebeest", + "impala (antelope)", + "gazelle", + "arabian camel", + "llama", + "weasel", + "mink", + "European polecat", + "black-footed ferret", + "otter", + "skunk", + "badger", + "armadillo", + "three-toed sloth", + "orangutan", + "gorilla", + "chimpanzee", + "gibbon", + "siamang", + "guenon", + "patas monkey", + "baboon", + "macaque", + "langur", + "black-and-white colobus", + "proboscis monkey", + "marmoset", + "white-headed capuchin", + "howler monkey", + "titi monkey", + "Geoffroy's spider monkey", + "common squirrel monkey", + "ring-tailed lemur", + "indri", + "Asian elephant", + "African bush elephant", + "red panda", + "giant panda", + "snoek fish", + "eel", + "silver salmon", + "rock beauty fish", + "clownfish", + "sturgeon", + "gar fish", + "lionfish", + "pufferfish", + "abacus", + "abaya", + "academic gown", + "accordion", + "acoustic guitar", + "aircraft carrier", + "airliner", + "airship", + "altar", + "ambulance", + "amphibious vehicle", + "analog clock", + "apiary", + "apron", + "trash can", + "assault rifle", + "backpack", + "bakery", + "balance beam", + "balloon", + "ballpoint pen", + "Band-Aid", + "banjo", + "baluster / handrail", + "barbell", + "barber chair", + "barbershop", + "barn", + "barometer", + "barrel", + "wheelbarrow", + "baseball", + "basketball", + "bassinet", + "bassoon", + "swimming cap", + "bath towel", + "bathtub", + "station wagon", + "lighthouse", + "beaker", + "military hat (bearskin or shako)", + "beer bottle", + "beer glass", + "bell tower", + "baby bib", + "tandem bicycle", + "bikini", + "ring binder", + "binoculars", + "birdhouse", + "boathouse", + "bobsleigh", + "bolo tie", + "poke bonnet", + "bookcase", + "bookstore", + "bottle cap", + "hunting bow", + "bow tie", + "brass memorial plaque", + "bra", + "breakwater", + "breastplate", + "broom", + "bucket", + "buckle", + "bulletproof vest", + "high-speed train", + "butcher shop", + "taxicab", + "cauldron", + "candle", + "cannon", + "canoe", + "can opener", + "cardigan", + "car mirror", + "carousel", + "tool kit", + "cardboard box / carton", + "car wheel", + "automated teller machine", + "cassette", + "cassette player", + "castle", + "catamaran", + "CD player", + "cello", + "mobile phone", + "chain", + "chain-link fence", + "chain mail", + "chainsaw", + "storage chest", + "chiffonier", + "bell or wind chime", + "china cabinet", + "Christmas stocking", + "church", + "movie theater", + "cleaver", + "cliff dwelling", + "cloak", + "clogs", + "cocktail shaker", + "coffee mug", + "coffeemaker", + "spiral or coil", + "combination lock", + "computer keyboard", + "candy store", + "container ship", + "convertible", + "corkscrew", + "cornet", + "cowboy boot", + "cowboy hat", + "cradle", + "construction crane", + "crash helmet", + "crate", + "infant bed", + "Crock Pot", + "croquet ball", + "crutch", + "cuirass", + "dam", + "desk", + "desktop computer", + "rotary dial telephone", + "diaper", + "digital clock", + "digital watch", + "dining table", + "dishcloth", + "dishwasher", + "disc brake", + "dock", + "dog sled", + "dome", + "doormat", + "drilling rig", + "drum", + "drumstick", + "dumbbell", + "Dutch oven", + "electric fan", + "electric guitar", + "electric locomotive", + "entertainment center", + "envelope", + "espresso machine", + "face powder", + "feather boa", + "filing cabinet", + "fireboat", + "fire truck", + "fire screen", + "flagpole", + "flute", + "folding chair", + "football helmet", + "forklift", + "fountain", + "fountain pen", + "four-poster bed", + "freight car", + "French horn", + "frying pan", + "fur coat", + "garbage truck", + "gas mask or respirator", + "gas pump", + "goblet", + "go-kart", + "golf ball", + "golf cart", + "gondola", + "gong", + "gown", + "grand piano", + "greenhouse", + "radiator grille", + "grocery store", + "guillotine", + "hair clip", + "hair spray", + "half-track", + "hammer", + "hamper", + "hair dryer", + "hand-held computer", + "handkerchief", + "hard disk drive", + "harmonica", + "harp", + "combine harvester", + "hatchet", + "holster", + "home theater", + "honeycomb", + "hook", + "hoop skirt", + "gymnastic horizontal bar", + "horse-drawn vehicle", + "hourglass", + "iPod", + "clothes iron", + "carved pumpkin", + "jeans", + "jeep", + "T-shirt", + "jigsaw puzzle", + "rickshaw", + "joystick", + "kimono", + "knee pad", + "knot", + "lab coat", + "ladle", + "lampshade", + "laptop computer", + "lawn mower", + "lens cap", + "letter opener", + "library", + "lifeboat", + "lighter", + "limousine", + "ocean liner", + "lipstick", + "slip-on shoe", + "lotion", + "music speaker", + "loupe magnifying glass", + "sawmill", + "magnetic compass", + "messenger bag", + "mailbox", + "tights", + "one-piece bathing suit", + "manhole cover", + "maraca", + "marimba", + "mask", + "matchstick", + "maypole", + "maze", + "measuring cup", + "medicine cabinet", + "megalith", + "microphone", + "microwave oven", + "military uniform", + "milk can", + "minibus", + "miniskirt", + "minivan", + "missile", + "mitten", + "mixing bowl", + "mobile home", + "ford model t", + "modem", + "monastery", + "monitor", + "moped", + "mortar and pestle", + "graduation cap", + "mosque", + "mosquito net", + "vespa", + "mountain bike", + "tent", + "computer mouse", + "mousetrap", + "moving van", + "muzzle", + "metal nail", + "neck brace", + "necklace", + "baby pacifier", + "notebook computer", + "obelisk", + "oboe", + "ocarina", + "odometer", + "oil filter", + "pipe organ", + "oscilloscope", + "overskirt", + "bullock cart", + "oxygen mask", + "product packet / packaging", + "paddle", + "paddle wheel", + "padlock", + "paintbrush", + "pajamas", + "palace", + "pan flute", + "paper towel", + "parachute", + "parallel bars", + "park bench", + "parking meter", + "railroad car", + "patio", + "payphone", + "pedestal", + "pencil case", + "pencil sharpener", + "perfume", + "Petri dish", + "photocopier", + "plectrum", + "Pickelhaube", + "picket fence", + "pickup truck", + "pier", + "piggy bank", + "pill bottle", + "pillow", + "ping-pong ball", + "pinwheel", + "pirate ship", + "drink pitcher", + "block plane", + "planetarium", + "plastic bag", + "plate rack", + "farm plow", + "plunger", + "Polaroid camera", + "pole", + "police van", + "poncho", + "pool table", + "soda bottle", + "plant pot", + "potter's wheel", + "power drill", + "prayer rug", + "printer", + "prison", + "missile", + "projector", + "hockey puck", + "punching bag", + "purse", + "quill", + "quilt", + "race car", + "racket", + "radiator", + "radio", + "radio telescope", + "rain barrel", + "recreational vehicle", + "fishing casting reel", + "reflex camera", + "refrigerator", + "remote control", + "restaurant", + "revolver", + "rifle", + "rocking chair", + "rotisserie", + "eraser", + "rugby ball", + "ruler measuring stick", + "sneaker", + "safe", + "safety pin", + "salt shaker", + "sandal", + "sarong", + "saxophone", + "scabbard", + "weighing scale", + "school bus", + "schooner", + "scoreboard", + "CRT monitor", + "screw", + "screwdriver", + "seat belt", + "sewing machine", + "shield", + "shoe store", + "shoji screen / room divider", + "shopping basket", + "shopping cart", + "shovel", + "shower cap", + "shower curtain", + "ski", + "balaclava ski mask", + "sleeping bag", + "slide rule", + "sliding door", + "slot machine", + "snorkel", + "snowmobile", + "snowplow", + "soap dispenser", + "soccer ball", + "sock", + "solar thermal collector", + "sombrero", + "soup bowl", + "keyboard space bar", + "space heater", + "space shuttle", + "spatula", + "motorboat", + "spider web", + "spindle", + "sports car", + "spotlight", + "stage", + "steam locomotive", + "through arch bridge", + "steel drum", + "stethoscope", + "scarf", + "stone wall", + "stopwatch", + "stove", + "strainer", + "tram", + "stretcher", + "couch", + "stupa", + "submarine", + "suit", + "sundial", + "sunglasses", + "sunglasses", + "sunscreen", + "suspension bridge", + "mop", + "sweatshirt", + "swim trunks / shorts", + "swing", + "electrical switch", + "syringe", + "table lamp", + "tank", + "tape player", + "teapot", + "teddy bear", + "television", + "tennis ball", + "thatched roof", + "front curtain", + "thimble", + "threshing machine", + "throne", + "tile roof", + "toaster", + "tobacco shop", + "toilet seat", + "torch", + "totem pole", + "tow truck", + "toy store", + "tractor", + "semi-trailer truck", + "tray", + "trench coat", + "tricycle", + "trimaran", + "tripod", + "triumphal arch", + "trolleybus", + "trombone", + "hot tub", + "turnstile", + "typewriter keyboard", + "umbrella", + "unicycle", + "upright piano", + "vacuum cleaner", + "vase", + "vaulted or arched ceiling", + "velvet fabric", + "vending machine", + "vestment", + "viaduct", + "violin", + "volleyball", + "waffle iron", + "wall clock", + "wallet", + "wardrobe", + "military aircraft", + "sink", + "washing machine", + "water bottle", + "water jug", + "water tower", + "whiskey jug", + "whistle", + "hair wig", + "window screen", + "window shade", + "Windsor tie", + "wine bottle", + "airplane wing", + "wok", + "wooden spoon", + "wool", + "split-rail fence", + "shipwreck", + "sailboat", + "yurt", + "website", + "comic book", + "crossword", + "traffic or street sign", + "traffic light", + "dust jacket", + "menu", + "plate", + "guacamole", + "consomme", + "hot pot", + "trifle", + "ice cream", + "popsicle", + "baguette", + "bagel", + "pretzel", + "cheeseburger", + "hot dog", + "mashed potatoes", + "cabbage", + "broccoli", + "cauliflower", + "zucchini", + "spaghetti squash", + "acorn squash", + "butternut squash", + "cucumber", + "artichoke", + "bell pepper", + "cardoon", + "mushroom", + "Granny Smith apple", + "strawberry", + "orange", + "lemon", + "fig", + "pineapple", + "banana", + "jackfruit", + "cherimoya (custard apple)", + "pomegranate", + "hay", + "carbonara", + "chocolate syrup", + "dough", + "meatloaf", + "pizza", + "pot pie", + "burrito", + "red wine", + "espresso", + "tea cup", + "eggnog", + "mountain", + "bubble", + "cliff", + "coral reef", + "geyser", + "lakeshore", + "promontory", + "sandbar", + "beach", + "valley", + "volcano", + "baseball player", + "bridegroom", + "scuba diver", + "rapeseed", + "daisy", + "yellow lady's slipper", + "corn", + "acorn", + "rose hip", + "horse chestnut seed", + "coral fungus", + "agaric", + "gyromitra", + "stinkhorn mushroom", + "earth star fungus", + "hen of the woods mushroom", + "bolete", + "corn cob", + "toilet paper", +] + + +openai_imagenet_template = [ + lambda c: f"a bad photo of a {c}.", + lambda c: f"a photo of many {c}.", + lambda c: f"a sculpture of a {c}.", + lambda c: f"a photo of the hard to see {c}.", + lambda c: f"a low resolution photo of the {c}.", + lambda c: f"a rendering of a {c}.", + lambda c: f"graffiti of a {c}.", + lambda c: f"a bad photo of the {c}.", + lambda c: f"a cropped photo of the {c}.", + lambda c: f"a tattoo of a {c}.", + lambda c: f"the embroidered {c}.", + lambda c: f"a photo of a hard to see {c}.", + lambda c: f"a bright photo of a {c}.", + lambda c: f"a photo of a clean {c}.", + lambda c: f"a photo of a dirty {c}.", + lambda c: f"a dark photo of the {c}.", + lambda c: f"a drawing of a {c}.", + lambda c: f"a photo of my {c}.", + lambda c: f"the plastic {c}.", + lambda c: f"a photo of the cool {c}.", + lambda c: f"a close-up photo of a {c}.", + lambda c: f"a black and white photo of the {c}.", + lambda c: f"a painting of the {c}.", + lambda c: f"a painting of a {c}.", + lambda c: f"a pixelated photo of the {c}.", + lambda c: f"a sculpture of the {c}.", + lambda c: f"a bright photo of the {c}.", + lambda c: f"a cropped photo of a {c}.", + lambda c: f"a plastic {c}.", + lambda c: f"a photo of the dirty {c}.", + lambda c: f"a jpeg corrupted photo of a {c}.", + lambda c: f"a blurry photo of the {c}.", + lambda c: f"a photo of the {c}.", + lambda c: f"a good photo of the {c}.", + lambda c: f"a rendering of the {c}.", + lambda c: f"a {c} in a video game.", + lambda c: f"a photo of one {c}.", + lambda c: f"a doodle of a {c}.", + lambda c: f"a close-up photo of the {c}.", + lambda c: f"a photo of a {c}.", + lambda c: f"the origami {c}.", + lambda c: f"the {c} in a video game.", + lambda c: f"a sketch of a {c}.", + lambda c: f"a doodle of the {c}.", + lambda c: f"a origami {c}.", + lambda c: f"a low resolution photo of a {c}.", + lambda c: f"the toy {c}.", + lambda c: f"a rendition of the {c}.", + lambda c: f"a photo of the clean {c}.", + lambda c: f"a photo of a large {c}.", + lambda c: f"a rendition of a {c}.", + lambda c: f"a photo of a nice {c}.", + lambda c: f"a photo of a weird {c}.", + lambda c: f"a blurry photo of a {c}.", + lambda c: f"a cartoon {c}.", + lambda c: f"art of a {c}.", + lambda c: f"a sketch of the {c}.", + lambda c: f"a embroidered {c}.", + lambda c: f"a pixelated photo of a {c}.", + lambda c: f"itap of the {c}.", + lambda c: f"a jpeg corrupted photo of the {c}.", + lambda c: f"a good photo of a {c}.", + lambda c: f"a plushie {c}.", + lambda c: f"a photo of the nice {c}.", + lambda c: f"a photo of the small {c}.", + lambda c: f"a photo of the weird {c}.", + lambda c: f"the cartoon {c}.", + lambda c: f"art of the {c}.", + lambda c: f"a drawing of the {c}.", + lambda c: f"a photo of the large {c}.", + lambda c: f"a black and white photo of a {c}.", + lambda c: f"the plushie {c}.", + lambda c: f"a dark photo of a {c}.", + lambda c: f"itap of a {c}.", + lambda c: f"graffiti of the {c}.", + lambda c: f"a toy {c}.", + lambda c: f"itap of my {c}.", + lambda c: f"a photo of a cool {c}.", + lambda c: f"a photo of a small {c}.", + lambda c: f"a tattoo of the {c}.", +] diff --git a/qa_mdt/audioldm_train/modules/clap/training/infer_demo.py b/qa_mdt/audioldm_train/modules/clap/training/infer_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..03cf4a0a8854096d8faa10c0d78099baffc48897 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/infer_demo.py @@ -0,0 +1,106 @@ +import sys + +sys.path.append("src/clap") + +import os +import torch +import librosa +from open_clip import create_model +from training.data import get_audio_features +from training.data import int16_to_float32, float32_to_int16 +from transformers import RobertaTokenizer + +tokenize = RobertaTokenizer.from_pretrained("roberta-base") + + +def tokenizer(text): + result = tokenize( + text, + padding="max_length", + truncation=True, + max_length=77, + return_tensors="pt", + ) + return {k: v.squeeze(0) for k, v in result.items()} + + +PRETRAINED_PATH = "/mnt/fast/nobackup/users/hl01486/projects/contrastive_pretraining/CLAP/assets/checkpoints/epoch_top_0_audioset_no_fusion.pt" +WAVE_48k_PATH = "/mnt/fast/nobackup/users/hl01486/projects/contrastive_pretraining/CLAP/assets/audio/machine.wav" + + +def infer_text(): + device = "cuda:0" if torch.cuda.is_available() else "cpu" + precision = "fp32" + amodel = "HTSAT-tiny" # or 'PANN-14' + tmodel = "roberta" # the best text encoder in our training + enable_fusion = False # False if you do not want to use the fusion model + fusion_type = "aff_2d" + pretrained = PRETRAINED_PATH + + model, model_cfg = create_model( + amodel, + tmodel, + pretrained, + precision=precision, + device=device, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + # load the text, can be a list (i.e. batch size) + text_data = ["I love the contrastive learning", "I love the pretrain model"] + # tokenize for roberta, if you want to tokenize for another text encoder, please refer to data.py#L43-90 + text_data = tokenizer(text_data) + + text_embed = model.get_text_embedding(text_data) + print(text_embed.size()) + + +def infer_audio(): + device = "cuda:0" if torch.cuda.is_available() else "cpu" + precision = "fp32" + amodel = "HTSAT-tiny" # or 'PANN-14' + tmodel = "roberta" # the best text encoder in our training + enable_fusion = False # False if you do not want to use the fusion model + fusion_type = "aff_2d" + pretrained = PRETRAINED_PATH + + model, model_cfg = create_model( + amodel, + tmodel, + pretrained, + precision=precision, + device=device, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + + # load the waveform of the shape (T,), should resample to 48000 + audio_waveform, sr = librosa.load(WAVE_48k_PATH, sr=48000) + # quantize + audio_waveform = int16_to_float32(float32_to_int16(audio_waveform)) + audio_waveform = torch.from_numpy(audio_waveform).float() + audio_dict = {} + + # the 'fusion' truncate mode can be changed to 'rand_trunc' if run in unfusion mode + import ipdb + + ipdb.set_trace() + audio_dict = get_audio_features( + audio_dict, + audio_waveform, + 480000, + data_truncating="fusion", + data_filling="repeatpad", + audio_cfg=model_cfg["audio_cfg"], + ) + # can send a list to the model, to process many audio tracks in one time (i.e. batch size) + audio_embed = model.get_audio_embedding([audio_dict]) + print(audio_embed.size()) + import ipdb + + ipdb.set_trace() + + +if __name__ == "__main__": + infer_text() + infer_audio() diff --git a/qa_mdt/audioldm_train/modules/clap/training/logger.py b/qa_mdt/audioldm_train/modules/clap/training/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..ac4634970fae6aacde2b7b808355dbd50c90ce73 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/logger.py @@ -0,0 +1,30 @@ +import logging + + +def setup_logging(log_file, level, include_host=False): + if include_host: + import socket + + hostname = socket.gethostname() + formatter = logging.Formatter( + f"%(asctime)s | {hostname} | %(levelname)s | %(message)s", + datefmt="%Y-%m-%d,%H:%M:%S", + ) + else: + formatter = logging.Formatter( + "%(asctime)s | %(levelname)s | %(message)s", datefmt="%Y-%m-%d,%H:%M:%S" + ) + + logging.root.setLevel(level) + loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict] + for logger in loggers: + logger.setLevel(level) + + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) + logging.root.addHandler(stream_handler) + + if log_file: + file_handler = logging.FileHandler(filename=log_file) + file_handler.setFormatter(formatter) + logging.root.addHandler(file_handler) diff --git a/qa_mdt/audioldm_train/modules/clap/training/lp_main.py b/qa_mdt/audioldm_train/modules/clap/training/lp_main.py new file mode 100644 index 0000000000000000000000000000000000000000..abc4effc4f9ae5f831aa13ca29eb9df6898da7e2 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/lp_main.py @@ -0,0 +1,669 @@ +from cmath import cos +from inspect import getargs +import logging +import os +import random +from datetime import datetime +import bisect +import copy +from sched import scheduler +import numpy as np +import torch +import torch.backends.cudnn as cudnn +from torch import optim +from torch.cuda.amp import GradScaler +import faulthandler +import pathlib +import argparse +import time + +try: + import wandb +except ImportError: + wandb = None + +try: + import torch.utils.tensorboard as tensorboard +except ImportError: + tensorboard = None + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + +from open_clip import create_model_and_transforms, trace_model, create_model +from training.data import get_data +from training.params import parse_args +from training.distributed import is_master, init_distributed_device, world_info_from_env +from training.logger import setup_logging +from training.scheduler import cosine_lr +from training.lp_train import train_one_epoch, evaluate +from open_clip.utils import get_tar_path_from_dataset_name, dataset_split, get_optimizer +from open_clip.utils import load_p, load_class_label +from open_clip.linear_probe import LinearProbe + + +def maintain_ckpts(args, startidx, all_idx_len): + for i in reversed(range(startidx, all_idx_len)): + if os.path.exists(os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt")): + os.rename( + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + os.path.join(args.checkpoint_path, f"epoch_top_{i+1}.pt"), + ) + if os.path.exists( + os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt") + ): + os.remove(os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt")) + return + + +def update_top_k_performance( + new_metrics_inputs, current_top_k_ckpt_metrics, args, ckpt, bignumbetter=True +): + """ + Record the top-k performance of the current epoch. + current_top_k_metrics is a dictionary of the form: {1: top_1_ckpt_measure, 2: top_2_ckpt_measure, ...} + """ + if isinstance(new_metrics_inputs, (list, tuple)): + new_metrics_inputs = np.mean(new_metrics_inputs) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, dict): + new_metrics_inputs = np.mean(list(new_metrics_inputs.values())) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, (float, int)): + update_flag = {k: False for k in current_top_k_ckpt_metrics.keys()} + sorted_keys = sorted(current_top_k_ckpt_metrics.keys()) + sorted_values = sorted( + current_top_k_ckpt_metrics.values(), reverse=bignumbetter + ) + sorted_values_ = copy.deepcopy(sorted_values) + sorted_values.append(new_metrics_inputs) + sorted_values = sorted(sorted_values, reverse=bignumbetter) + sorted_values = sorted_values[:-1] + + if sorted_values == sorted_values_: + return current_top_k_ckpt_metrics, new_metrics_inputs + else: + for i in range(len(sorted_keys)): + if current_top_k_ckpt_metrics[sorted_keys[i]] != sorted_values[i]: + current_top_k_ckpt_metrics[sorted_keys[i]] = sorted_values[i] + update_flag[sorted_keys[i]] = True + for i in range(len(update_flag)): + if update_flag[i]: + maintain_ckpts(args, i, len(sorted_keys)) + torch.save( + ckpt, + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + ) + break + return current_top_k_ckpt_metrics, new_metrics_inputs + + +# def updateifNone(a, b): +# a = b if None else a +# return a + + +def is_pretrained_params(n): + return ( + n.startswith("clap_model.transformer") + or n in ["clap_model.positional_embedding", "clap_model.text_projection"] + or n.startswith("clap_model.token_embedding") + or n.startswith("clap_model.ln_final") + or n.startswith("clap_model.logit_scale_t") + ) + + +def random_seed(seed=42, rank=0): + torch.manual_seed(seed + rank) + np.random.seed(seed + rank) + random.seed(seed + rank) + + +def config_lp_optimizer(model, data, args): + # set wd-related params to 0 if use adam optimizer + if args.optimizer == "adam": + args.wd = 0 + args.wd_pretrained = 0 + args.wd_new = 0 + + in_clap = lambda n, p: n.startswith("clap_model") + + named_parameters = list(model.named_parameters()) + + optimizer = {} + scheduler = {} + + # freeze text encoder + text_freeze_parameters = [ + p + for n, p in named_parameters + if n.startswith("clap_model.transformer") + or n in ["clap_model.positional_embedding", "clap_model.text_projection"] + or n.startswith("clap_model.token_embedding") + or n.startswith("clap_model.ln_final") + ] + + if args.freeze_text: + logging.info("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + + if not args.lp_freeze: + exclude = ( + lambda n, p: p.ndim < 2 + or "bn" in n + or "ln" in n + or "bias" in n + or "logit_scale" in n + ) + include = lambda n, p: not exclude(n, p) + + # (yusong): we do not split the learning rate anymore + # p for n, p in named_parameters if in_clap(n,p) and exclude(n, p) and p.requires_grad + gain_or_bias_params = [ + p for n, p in named_parameters if exclude(n, p) and p.requires_grad + ] + # rest_params = [p for n, p in named_parameters if in_clap(n,p) and include(n, p) and p.requires_grad] + rest_params = [ + p for n, p in named_parameters if include(n, p) and p.requires_grad + ] + + if args.train_data is None: + optimizer = None + scheduler = None + else: + total_steps = data["train"].dataloader.num_batches * args.epochs + + if args.split_opt: + for x in ["lr", "beta1", "beta2", "eps", "wd"]: + for y in ["_new", "_pretrained"]: + if getattr(args, x + y) is None: + setattr(args, x + y, getattr(args, x)) + + gain_or_bias_pretrained_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + rest_pretrained_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + gain_or_bias_new_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) + and (not is_pretrained_params(n)) + ] + rest_new_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) + and (not is_pretrained_params(n)) + ] + + pretrained_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_pretrained_params, "weight_decay": 0.0}, + { + "params": rest_pretrained_params, + "weight_decay": args.wd_pretrained, + }, + ], + lr=args.lr_pretrained, + betas=(args.beta1_pretrained, args.beta2_pretrained), + eps=args.eps_pretrained, + momentum=args.momentum_pretrained, + optimizer_name=args.optimizer, + ) + pretrained_params_scheduler = cosine_lr( + pretrained_params_optimizer, + args.lr_pretrained, + args.warmup, + total_steps, + ) + + new_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_new_params, "weight_decay": 0.0}, + {"params": rest_new_params, "weight_decay": args.wd_new}, + ], + lr=args.lr_new, + betas=(args.beta1_new, args.beta2_new), + eps=args.eps_new, + momentum=args.momentum_new, + optimizer_name=args.optimizer, + ) + new_params_scheduler = cosine_lr( + new_params_optimizer, args.lr_new, args.warmup, total_steps + ) + + optimizer["text"] = pretrained_params_optimizer + optimizer["audio"] = new_params_optimizer + scheduler["text"] = pretrained_params_scheduler + scheduler["audio"] = new_params_scheduler + + if args.horovod: + pretrained_params_optimizer = hvd.DistributedOptimizer( + pretrained_params_optimizer, + named_parameters=model.named_parameters(), + ) + new_params_optimizer = hvd.DistributedOptimizer( + new_params_optimizer, named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state( + pretrained_params_optimizer, root_rank=0 + ) + hvd.broadcast_optimizer_state(new_params_optimizer, root_rank=0) + else: + optimizer["clap"] = get_optimizer( + [ + {"params": gain_or_bias_params, "weight_decay": 0.0}, + {"params": rest_params, "weight_decay": args.wd}, + ], + lr=args.lr, + betas=(args.beta1, args.beta2), + eps=args.eps, + momentum=args.momentum, + optimizer_name=args.optimizer, + ) + scheduler["clap"] = cosine_lr( + optimizer["clap"], args.lr, args.warmup, total_steps + ) + + if args.horovod: + optimizer["clap"] = hvd.DistributedOptimizer( + optimizer["clap"], named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state(optimizer["clap"], root_rank=0) + + # linear probe optimizer + else: + lp_params = [ + p for n, p in named_parameters if (not in_clap(n, p)) and p.requires_grad + ] + lp_optim = get_optimizer( + lp_params, + lr=args.lp_lr, + betas=(args.beta1, args.beta2), + eps=args.eps, + momentum=0.9, + optimizer_name=args.optimizer, + ) + optimizer["lp"] = lp_optim + + return optimizer, scheduler, text_freeze_parameters + + +def main(): + args = parse_args() + + time.sleep(args.sleep) + + # sanitize model name for filesystem / uri use, easier if we don't use / in name as a rule? + args.amodel = args.amodel.replace("/", "-") + # download sizes.json file + + # (yusong): the below two lines are for debug + # print("setting up faulthandler") + # faulthandler.register(10) + + random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + np.random.seed(args.seed) + args.class_index_dict = load_class_label(args.class_label_path) + + # get the name of the experiments + if args.name is None: + args.name = "-".join( + [ + datetime.now().strftime("%Y_%m_%d-%H_%M_%S"), + f"linear_probe" f"model_{args.amodel}", + f"lr_{args.lr}", + f"b_{args.batch_size}", + f"j_{args.workers}", + f"p_{args.precision}", + ] + ) + + # discover initial world args early so we can log properly + args.distributed = False + args.local_rank, args.rank, args.world_size = world_info_from_env() + + if args.remotedata and is_master(args): + for dataset_name in args.datasetnames: + for split in dataset_split[dataset_name]: + if not os.path.exists(f"./json_files/{dataset_name}/{split}"): + os.makedirs(f"./json_files/{dataset_name}/{split}") + os.system( + f"aws s3 cp s3://s-laion-audio/webdataset_tar/{dataset_name}/{split}/sizes.json ./json_files/{dataset_name}/{split}/sizes.json" + ) + + args.log_path = None + if is_master(args, local=args.log_local): + log_base_path = os.path.join(args.logs, args.name) + os.makedirs(log_base_path, exist_ok=True) + log_filename = f"out-{args.rank}" if args.log_local else "out.log" + args.log_path = os.path.join(log_base_path, log_filename) + + # avoid log dir in same name: + postfix = 0 + while os.path.exists(args.log_path): + postfix += 1 + log_base_path_new = log_base_path + "-" + str(postfix) + os.makedirs(log_base_path_new, exist_ok=True) + log_filename = f"out-{args.rank}" if args.log_local else "out.log" + args.log_path = os.path.join(log_base_path_new, log_filename) + # print( + # "Error. Experiment already exists. Use --name {} to specify a new experiment." + # ) + # return -1 + + # Set logger + args.log_level = logging.DEBUG if args.debug else logging.INFO + setup_logging(args.log_path, args.log_level) + + # fully initialize distributed device environment + device = init_distributed_device(args) + + args.wandb = "wandb" in args.report_to or "all" in args.report_to + args.tensorboard = "tensorboard" in args.report_to or "all" in args.report_to + if is_master(args): + args.tensorboard_path = ( + os.path.join(args.logs, args.name, "tensorboard") + if args.tensorboard + else "" + ) + args.checkpoint_path = os.path.join(args.logs, args.name, "checkpoints") + for dirname in [args.tensorboard_path, args.checkpoint_path]: + if dirname: + os.makedirs(dirname, exist_ok=True) + else: + args.tensorboard_path = "" + args.checkpoint_path = "" + + if args.copy_codebase: + copy_codebase(args) + + assert args.precision in ["amp", "fp16", "fp32"] + if args.precision == "fp16": + logging.warning( + "It is recommended to use AMP mixed-precision instead of FP16. " + "FP16 support needs further verification and tuning, especially for train." + ) + + if args.horovod: + logging.info( + f"Running in horovod mode with multiple processes / nodes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + elif args.distributed: + logging.info( + f"Running in distributed mode with multiple processes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + else: + logging.info(f"Running with a single process. Device {args.device}.") + + logging.info(f"openai cache dir: {os.path.expanduser(args.openai_model_cache_dir)}") + + # Create CLAP model + clap_model, clap_model_cfg = create_model( + args.amodel, + args.tmodel, + args.pretrained, + precision=args.precision, + device=device, + jit=args.torchscript, + force_quick_gelu=args.force_quick_gelu, + openai_model_cache_dir=os.path.expanduser(args.openai_model_cache_dir), + skip_params=False, + pretrained_audio=args.pretrained_audio, + pretrained_text=args.pretrained_text, + enable_fusion=args.enable_fusion, + fusion_type=args.fusion_type, + ) + + args.lp_out_ch = len(list(args.class_index_dict.keys())) + # Linear Probe + logging.info(f"linear probe using mlp: {args.lp_mlp}") + logging.info(f"linear probe using freeze: {args.lp_freeze}") + logging.info(f"linear probe act layer: {args.lp_act}") + logging.info(f"linear probe out ch: {args.lp_out_ch}") + logging.info(f"linear probe learning rate (if applicable): {args.lp_lr}") + logging.info(f"linear probe loss func: {args.lp_loss}") + logging.info(f"linear probe lp_metrics: {args.lp_metrics}") + + model = LinearProbe( + clap_model, + mlp=args.lp_mlp, + freeze=args.lp_freeze, + in_ch=512, + out_ch=args.lp_out_ch, + act=args.lp_act, + ) # in_ch is fixed (i.e., 512) + model = model.to(device) + + if args.horovod: + with torch.no_grad(): + for param in model.parameters(): + param.set_(param.contiguous()) + + if args.trace: + model = trace_model(model, batch_size=args.batch_size, device=device) + + if is_master(args): + logging.info("Linear Probe CLAP Model:") + logging.info(f"{str(clap_model)}") + logging.info("Params:") + params_file = os.path.join(args.logs, args.name, "params.txt") + with open(params_file, "w") as f: + for name in sorted(vars(args)): + val = getattr(args, name) + logging.info(f" {name}: {val}") + f.write(f"{name}: {val}\n") + + if args.distributed and not args.horovod: + if args.use_bn_sync: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + ddp_args = {} + if args.ddp_static_graph: + # this doesn't exist in older PyTorch, arg only added if enabled + ddp_args["static_graph"] = True + model = torch.nn.parallel.DistributedDataParallel( + model, device_ids=[device], find_unused_parameters=True, **ddp_args + ) + + data = get_data(args, clap_model_cfg) + assert len(data), "At least one train or eval dataset must be specified." + if args.trace: + assert "train" not in data, "Cannot train with traced model" + + optimizer, scheduler, text_freeze_parameters = config_lp_optimizer( + model, data, args + ) + + scaler = GradScaler() if args.precision == "amp" else None + + # optionally resume from a checkpoint + start_epoch = 0 + if args.resume is not None: + if os.path.isfile(args.resume): + checkpoint = torch.load(args.resume, map_location=device) + if "epoch" in checkpoint: + # resuming a train checkpoint w/ epoch and optimizer state + start_epoch = checkpoint["epoch"] + sd = checkpoint["state_dict"] + if not args.distributed and next(iter(sd.items()))[0].startswith( + "module" + ): + sd = {k[len("module.") :]: v for k, v in sd.items()} + model.load_state_dict(sd) + if args.split_opt: + if optimizer is not None: + for k, o_ in optimizer.items(): + o_.load_state_dict(checkpoint[k + "_" + "optimizer"]) + if optimizer is not None: + optimizer.load_state_dict(checkpoint["optimizer"]) + if scaler is not None and "scaler" in checkpoint: + scaler.load_state_dict(checkpoint["scaler"]) + logging.info( + f"=> resuming checkpoint '{args.resume}' (epoch {start_epoch})" + ) + else: + # loading a bare (model only) checkpoint for fine-tune or evaluation + model.load_state_dict(checkpoint) + logging.info( + f"=> loaded checkpoint '{args.resume}' (epoch {start_epoch})" + ) + if args.freeze_text: + print("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + else: + logging.info("=> no checkpoint found at '{}'".format(args.resume)) + + cudnn.benchmark = True + cudnn.deterministic = False + + # determine if this worker should save logs and checkpoints. only do so if it is rank == 0 + args.save_logs = args.logs and args.logs.lower() != "none" and is_master(args) + writer = None + if args.save_logs and args.tensorboard: + assert tensorboard is not None, "Please install tensorboard." + writer = tensorboard.SummaryWriter(args.tensorboard_path) + + if args.wandb and is_master(args): + assert wandb is not None, "Please install wandb." + logging.debug("Starting wandb.") + args.train_sz = data["train"].dataloader.num_samples + if args.val_data is not None: + args.val_sz = data["val"].dataloader.num_samples + # you will have to configure this for your project! + wandb.init( + project="clap", + notes=args.wandb_notes, + name=args.wandb_notes, + tags=[], + config=vars(args), + ) + if args.debug: + wandb.watch(model, log="all") + wandb.save(params_file) + logging.debug("Finished loading wandb.") + + if "train" not in data: + evaluate(model, data, start_epoch, args, writer) + return + elif start_epoch == 0 and "val" in data and not args.no_eval: + evaluate(model, data, 0, args, writer) + if args.save_top_performance: + current_top_k_ckpt_metrics = { + i: 0 for i in range(args.save_top_performance) + } # initialize the top-k metric for ckpts to 0 + + for epoch in range(start_epoch, args.epochs): + # freeze the text param after (include) args.freeze_text_after, this is -1 by default + if epoch == args.freeze_text_after: + print("Text pretrained parameters are freezed since this epoch.") + for k in text_freeze_parameters: + k.requires_grad = False + if is_master(args): + logging.info(f"Start epoch {epoch}") + + train_one_epoch(model, data, epoch, optimizer, scaler, scheduler, args, writer) + completed_epoch = epoch + 1 + + if ( + any(v in data for v in ("val", "imagenet-val", "imagenet-v2")) + and not args.no_eval + ): + metrics = evaluate(model, data, completed_epoch, args, writer) + if args.save_top_performance: + top_k_dataset = args.top_k_checkpoint_select_dataset + top_k_metric = args.top_k_checkpoint_select_metric + filtered_metrics = [ + v + for k, v in metrics.items() + if top_k_metric in k and top_k_dataset in k + ] # check all R@10 metrics (all dataset) and use it to update the ckpt + # Saving checkpoints. + if args.save_logs: + opt_dict = { + k + "_" + "optimizer": v.state_dict() for k, v in optimizer.items() + } + checkpoint_dict = { + "epoch": completed_epoch, + "name": args.name, + "state_dict": model.state_dict(), + } + checkpoint_dict.update(opt_dict) + if scaler is not None: + checkpoint_dict["scaler"] = scaler.state_dict() + + if completed_epoch == args.epochs or ( + args.save_frequency > 0 and (completed_epoch % args.save_frequency) == 0 + ): + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_{completed_epoch}.pt"), + ) + if args.save_most_recent: + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_latest.pt"), + ) + if args.save_top_performance and not args.no_eval: + update_top_k_performance( + filtered_metrics, + current_top_k_ckpt_metrics, + args, + checkpoint_dict, + bignumbetter=True, + ) + + if args.wandb and is_master(args): + wandb.finish() + + +def copy_codebase(args): + from shutil import copytree, ignore_patterns + + new_code_path = os.path.join(args.logs, args.name, "code") + if os.path.exists(new_code_path): + print( + f"Error. Experiment already exists at {new_code_path}. Use --name to specify a new experiment." + ) + return -1 + print(f"Copying codebase to {new_code_path}") + current_code_path = os.path.realpath(__file__) + for _ in range(3): + current_code_path = os.path.dirname(current_code_path) + copytree( + current_code_path, new_code_path, ignore=ignore_patterns("log", "logs", "wandb") + ) + print("Done copying code.") + return 1 + + +if __name__ == "__main__": + main() diff --git a/qa_mdt/audioldm_train/modules/clap/training/lp_train.py b/qa_mdt/audioldm_train/modules/clap/training/lp_train.py new file mode 100644 index 0000000000000000000000000000000000000000..24a19bacd0a4b789415cfccbce1f8bc99bc493ed --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/lp_train.py @@ -0,0 +1,301 @@ +import json +import logging +import math +import os +import time +from contextlib import suppress + +import numpy as np +import torch +import torch.nn.functional as F + +try: + import wandb +except ImportError: + wandb = None + +from open_clip import LPLoss, LPMetrics, lp_gather_features +from open_clip.utils import do_mixup, get_mix_lambda +from .distributed import is_master +from .zero_shot import zero_shot_eval + + +class AverageMeter(object): + """Computes and stores the average and current value""" + + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +def unwrap_model(model): + if hasattr(model, "module"): + return model.module + else: + return model + + +def train_one_epoch( + model, + data, + epoch, + optimizer, + scaler, + scheduler, + args, + tb_writer=None, + extra_suffix="", +): + device = torch.device(args.device) + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + model.train() + loss = LPLoss(args.lp_loss) + + dataloader, sampler = data["train"].dataloader, data["train"].sampler + if args.distributed and sampler is not None: + sampler.set_epoch(epoch) + num_batches_per_epoch = dataloader.num_batches + sample_digits = math.ceil(math.log(dataloader.num_samples + 1, 10)) + + # for toy dataset + if args.dataset_type == "toy": + dataloader.dataset.generate_queue() + + loss_m = AverageMeter() + batch_time_m = AverageMeter() + data_time_m = AverageMeter() + end = time.time() + + for i, batch in enumerate(dataloader): + step = num_batches_per_epoch * epoch + i + + if isinstance(scheduler, dict): + for s in scheduler.values(): + s(step) + else: + scheduler(step) + + audio = batch # contains mel_spec, wavform, and longer list + class_label = batch["class_label"] + # audio = audio.to(device=device, non_blocking=True) + class_label = class_label.to(device=device, non_blocking=True) + + if args.mixup: + # https://github.com/RetroCirce/HTS-Audio-Transformer/blob/main/utils.py#L146 + mix_lambda = torch.from_numpy( + get_mix_lambda(0.5, len(audio["waveform"])) + ).to(device) + class_label = do_mixup(class_label, mix_lambda) + else: + mix_lambda = None + + data_time_m.update(time.time() - end) + if isinstance(optimizer, dict): + for o_ in optimizer.values(): + o_.zero_grad() + else: + optimizer.zero_grad() + + with autocast(): + pred = model(audio, mix_lambda=mix_lambda, device=device) + total_loss = loss(pred, class_label) + + if isinstance(optimizer, dict): + if scaler is not None: + scaler.scale(total_loss).backward() + for o_ in optimizer.values(): + if args.horovod: + o_.synchronize() + scaler.unscale_(o_) + with o_.skip_synchronize(): + scaler.step(o_) + else: + scaler.step(o_) + scaler.update() + else: + total_loss.backward() + for o_ in optimizer.values(): + o_.step() + else: + if scaler is not None: + scaler.scale(total_loss).backward() + if args.horovod: + optimizer.synchronize() + scaler.unscale_(optimizer) + with optimizer.skip_synchronize(): + scaler.step(optimizer) + else: + scaler.step(optimizer) + scaler.update() + else: + total_loss.backward() + optimizer.step() + + # Note: we clamp to 4.6052 = ln(100), as in the original paper. + with torch.no_grad(): + unwrap_model(model).clap_model.logit_scale_a.clamp_(0, math.log(100)) + unwrap_model(model).clap_model.logit_scale_t.clamp_(0, math.log(100)) + + batch_time_m.update(time.time() - end) + end = time.time() + batch_count = i + 1 + + if is_master(args) and (i % 100 == 0 or batch_count == num_batches_per_epoch): + if isinstance(audio, dict): + batch_size = len(audio["waveform"]) + else: + batch_size = len(audio) + num_samples = batch_count * batch_size * args.world_size + samples_per_epoch = dataloader.num_samples + percent_complete = 100.0 * batch_count / num_batches_per_epoch + + # NOTE loss is coarsely sampled, just master node and per log update + loss_m.update(total_loss.item(), batch_size) + if isinstance(optimizer, dict): + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {[o_.param_groups[0]['lr'] for o_ in optimizer.values()]}" + ) + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "lr": [o_.param_groups[0]["lr"] for o_ in optimizer.values()], + } + else: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {optimizer.param_groups[0]['lr']:5f} " + ) + + # Save train loss / etc. Using non avg meter values as loggers have their own smoothing + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "lr": optimizer.param_groups[0]["lr"], + } + for name, val in log_data.items(): + name = f"train{extra_suffix}/{name}" + if tb_writer is not None: + tb_writer.add_scalar(name, val, step) + if args.wandb: + assert wandb is not None, "Please install wandb." + wandb.log({name: val, "step": step}) + + # resetting batch / data time meters per log window + batch_time_m.reset() + data_time_m.reset() + # end for + + +def evaluate(model, data, epoch, args, tb_writer=None, extra_suffix=""): + metrics = {} + if not args.parallel_eval: + if not is_master(args): + return metrics + device = torch.device(args.device) + model.eval() + + # CHANGE + # zero_shot_metrics = zero_shot_eval(model, data, epoch, args) + # metrics.update(zero_shot_metrics) + if is_master(args): + print("Evaluating...") + metric_names = args.lp_metrics.split(",") + eval_tool = LPMetrics(metric_names=metric_names) + + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + if "val" in data and ( + args.val_frequency + and ((epoch % args.val_frequency) == 0 or epoch == args.epochs) + ): + if args.parallel_eval: + dataloader, sampler = data["val"].dataloader, data["val"].sampler + if args.distributed and sampler is not None: + sampler.set_epoch(epoch) + samples_per_val = dataloader.num_samples + else: + dataloader = data["val"].dataloader + num_samples = 0 + samples_per_val = dataloader.num_samples + + eval_info = {"pred": [], "target": []} + with torch.no_grad(): + for i, batch in enumerate(dataloader): + audio = batch # contains mel_spec, wavform, and longer list + class_label = batch["class_label"] + + # audio = audio.to(device=device, non_blocking=True) + class_label = class_label.to(device=device, non_blocking=True) + + with autocast(): + pred = model(audio, device=device) + if args.parallel_eval: + pred, class_label = lp_gather_features( + pred, class_label, args.world_size, args.horovod + ) + eval_info["pred"].append(pred) + eval_info["target"].append(class_label) + + num_samples += class_label.shape[0] + + if (i % 100) == 0: # and i != 0: + logging.info( + f"Eval Epoch: {epoch} [{num_samples} / {samples_per_val}]" + ) + + if is_master(args): + eval_info["pred"] = torch.cat(eval_info["pred"], 0).cpu() + eval_info["target"] = torch.cat(eval_info["target"], 0).cpu() + metric_dict = eval_tool.evaluate_mertics( + eval_info["pred"], eval_info["target"] + ) + metrics.update(metric_dict) + if "epoch" not in metrics.keys(): + metrics.update({"epoch": epoch}) + + if is_master(args): + if not metrics: + return metrics + + logging.info( + f"Eval Epoch: {epoch} " + + "\n".join( + ["\t".join([f"{m}: {round(metrics[m], 4):.4f}"]) for m in metrics] + ) + ) + if args.save_logs: + for name, val in metrics.items(): + if tb_writer is not None: + tb_writer.add_scalar(f"val{extra_suffix}/{name}", val, epoch) + + with open(os.path.join(args.checkpoint_path, "results.jsonl"), "a+") as f: + f.write(json.dumps(metrics)) + f.write("\n") + + if args.wandb: + assert wandb is not None, "Please install wandb." + for name, val in metrics.items(): + wandb.log({f"val{extra_suffix}/{name}": val, "epoch": epoch}) + + return metrics + else: + return metrics diff --git a/qa_mdt/audioldm_train/modules/clap/training/main.py b/qa_mdt/audioldm_train/modules/clap/training/main.py new file mode 100644 index 0000000000000000000000000000000000000000..3b563a5d001be7adfbe779dee7ad8ac49aadc50d --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/main.py @@ -0,0 +1,596 @@ +from inspect import getargs +import logging +import os +import random +from datetime import datetime +import bisect +import copy +import numpy as np +import torch +import torch.backends.cudnn as cudnn +from torch import optim +from torch.cuda.amp import GradScaler +import faulthandler +import pathlib + +try: + import wandb +except ImportError: + wandb = None + +try: + import torch.utils.tensorboard as tensorboard +except ImportError: + tensorboard = None + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + +from open_clip import create_model_and_transforms, trace_model, create_model +from training.data import get_data +from training.distributed import is_master, init_distributed_device, world_info_from_env +from training.logger import setup_logging +from training.params import parse_args +from training.scheduler import cosine_lr +from training.train import train_one_epoch, evaluate +from open_clip.utils import dataset_split, get_optimizer + + +def maintain_ckpts(args, startidx, all_idx_len): + for i in reversed(range(startidx, all_idx_len)): + if os.path.exists(os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt")): + os.rename( + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + os.path.join(args.checkpoint_path, f"epoch_top_{i+1}.pt"), + ) + if os.path.exists( + os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt") + ): + os.remove(os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt")) + return + + +def update_top_k_performance( + new_metrics_inputs, current_top_k_ckpt_metrics, args, ckpt, bignumbetter=True +): + """ + Record the top-k performance of the current epoch. + current_top_k_metrics is a dictionary of the form: {1: top_1_ckpt_measure, 2: top_2_ckpt_measure, ...} + """ + if isinstance(new_metrics_inputs, (list, tuple)): + new_metrics_inputs = np.mean(new_metrics_inputs) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, dict): + new_metrics_inputs = np.mean(list(new_metrics_inputs.values())) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, (float, int)): + update_flag = {k: False for k in current_top_k_ckpt_metrics.keys()} + sorted_keys = sorted(current_top_k_ckpt_metrics.keys()) + sorted_values = sorted( + current_top_k_ckpt_metrics.values(), reverse=bignumbetter + ) + sorted_values_ = copy.deepcopy(sorted_values) + sorted_values.append(new_metrics_inputs) + sorted_values = sorted(sorted_values, reverse=bignumbetter) + sorted_values = sorted_values[:-1] + + if sorted_values == sorted_values_: + return current_top_k_ckpt_metrics, new_metrics_inputs + else: + for i in range(len(sorted_keys)): + if current_top_k_ckpt_metrics[sorted_keys[i]] != sorted_values[i]: + current_top_k_ckpt_metrics[sorted_keys[i]] = sorted_values[i] + update_flag[sorted_keys[i]] = True + for i in range(len(update_flag)): + if update_flag[i]: + maintain_ckpts(args, i, len(sorted_keys)) + torch.save( + ckpt, + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + ) + break + return current_top_k_ckpt_metrics, new_metrics_inputs + + +# def updateifNone(a, b): +# a = b if None else a +# return a + + +def is_pretrained_params(n): + return ( + n.startswith("transformer") + or n in ["positional_embedding", "text_projection"] + or n.startswith("token_embedding") + or n.startswith("ln_final") + or n.startswith("logit_scale_t") + ) + + +def random_seed(seed=42, rank=0): + torch.manual_seed(seed + rank) + np.random.seed(seed + rank) + random.seed(seed + rank) + + +def main(): + args = parse_args() + # sanitize model name for filesystem / uri use, easier if we don't use / in name as a rule? + args.amodel = args.amodel.replace("/", "-") + # download sizes.json file + + # (yusong): the below two lines are for debug + # print("setting up faulthandler") + # faulthandler.register(10) + + random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + np.random.seed(args.seed) + if args.tmodel == "bert" or args.tmodel == "roberta" or args.tmodel == "bart": + assert ( + args.pretrained == "" or args.pretrained is None + ), "bert/roberta/bart text encoder does not support pretrained models." + + # get the name of the experiments + if args.name is None: + args.name = "-".join( + [ + datetime.now().strftime("%Y_%m_%d-%H_%M_%S"), + f"model_{args.amodel}", + f"lr_{args.lr}", + f"b_{args.batch_size}", + f"j_{args.workers}", + f"p_{args.precision}", + ] + ) + + # discover initial world args early so we can log properly + args.distributed = False + args.local_rank, args.rank, args.world_size = world_info_from_env() + + if args.remotedata and is_master(args): + for dataset_name in args.datasetnames: + for split in dataset_split[dataset_name]: + if not os.path.exists(f"./json_files/{dataset_name}/{split}"): + os.makedirs(f"./json_files/{dataset_name}/{split}") + os.system( + f"aws s3 cp s3://s-laion-audio/webdataset_tar/{dataset_name}/{split}/sizes.json ./json_files/{dataset_name}/{split}/sizes.json" + ) + + args.log_path = None + if is_master(args, local=args.log_local): + log_base_path = os.path.join(args.logs, args.name) + os.makedirs(log_base_path, exist_ok=True) + log_filename = f"out-{args.rank}" if args.log_local else "out.log" + args.log_path = os.path.join(log_base_path, log_filename) + if os.path.exists(args.log_path): + print( + "Error. Experiment already exists. Use --name {} to specify a new experiment." + ) + return -1 + + # Set logger + args.log_level = logging.DEBUG if args.debug else logging.INFO + setup_logging(args.log_path, args.log_level) + + # fully initialize distributed device environment + device = init_distributed_device(args) + + args.wandb = "wandb" in args.report_to or "all" in args.report_to + args.tensorboard = "tensorboard" in args.report_to or "all" in args.report_to + if is_master(args): + args.tensorboard_path = ( + os.path.join(args.logs, args.name, "tensorboard") + if args.tensorboard + else "" + ) + args.checkpoint_path = os.path.join(args.logs, args.name, "checkpoints") + for dirname in [args.tensorboard_path, args.checkpoint_path]: + if dirname: + os.makedirs(dirname, exist_ok=True) + else: + args.tensorboard_path = "" + args.checkpoint_path = "" + + if args.copy_codebase: + copy_codebase(args) + + assert args.precision in ["amp", "fp16", "fp32"] + if args.precision == "fp16": + logging.warning( + "It is recommended to use AMP mixed-precision instead of FP16. " + "FP16 support needs further verification and tuning, especially for train." + ) + + if args.horovod: + logging.info( + f"Running in horovod mode with multiple processes / nodes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + elif args.distributed: + logging.info( + f"Running in distributed mode with multiple processes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + else: + logging.info(f"Running with a single process. Device {args.device}.") + + logging.info(f"openai cache dir: {os.path.expanduser(args.openai_model_cache_dir)}") + + model, model_cfg = create_model( + args.amodel, + args.tmodel, + args.pretrained, + precision=args.precision, + device=device, + jit=args.torchscript, + force_quick_gelu=args.force_quick_gelu, + openai_model_cache_dir=os.path.expanduser(args.openai_model_cache_dir), + skip_params=True, + pretrained_audio=args.pretrained_audio, + pretrained_text=args.pretrained_text, + enable_fusion=args.enable_fusion, + fusion_type=args.fusion_type, + ) + + if args.horovod: + with torch.no_grad(): + for param in model.parameters(): + param.set_(param.contiguous()) + + if args.trace: + model = trace_model(model, batch_size=args.batch_size, device=device) + + if is_master(args): + logging.info("Model:") + logging.info(f"{str(model)}") + logging.info("Params:") + params_file = os.path.join(args.logs, args.name, "params.txt") + with open(params_file, "w") as f: + for name in sorted(vars(args)): + val = getattr(args, name) + logging.info(f" {name}: {val}") + f.write(f"{name}: {val}\n") + + if args.distributed and not args.horovod: + if args.use_bn_sync: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + ddp_args = {} + if args.ddp_static_graph: + # this doesn't exist in older PyTorch, arg only added if enabled + ddp_args["static_graph"] = True + model = torch.nn.parallel.DistributedDataParallel( + model, device_ids=[device], find_unused_parameters=True, **ddp_args + ) + + data = get_data(args, model_cfg) + assert len(data), "At least one train or eval dataset must be specified." + if args.trace: + assert "train" not in data, "Cannot train with traced model" + + exclude = ( + lambda n, p: p.ndim < 2 + or "bn" in n + or "ln" in n + or "bias" in n + or "logit_scale" in n + ) + include = lambda n, p: not exclude(n, p) + + named_parameters = list(model.named_parameters()) + + # freeze text encoder + text_freeze_parameters = [p for n, p in named_parameters if "text_branch" in n] + + if args.freeze_text: + print("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + + gain_or_bias_params = [ + p for n, p in named_parameters if exclude(n, p) and p.requires_grad + ] + rest_params = [p for n, p in named_parameters if include(n, p) and p.requires_grad] + + # set wd-related params to 0 if use adam optimizer + if args.optimizer == "adam": + args.wd = 0 + args.wd_pretrained = 0 + args.wd_new = 0 + + if args.train_data is None: + optimizer = None + scheduler = None + else: + total_steps = data["train"].dataloader.num_batches * args.epochs + + if args.split_opt: + for x in ["lr", "beta1", "beta2", "eps", "wd"]: + for y in ["_new", "_pretrained"]: + if getattr(args, x + y) is None: + setattr(args, x + y, getattr(args, x)) + + gain_or_bias_pretrained_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + rest_pretrained_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + gain_or_bias_new_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) and (not is_pretrained_params(n)) + ] + rest_new_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) and (not is_pretrained_params(n)) + ] + pretrained_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_pretrained_params, "weight_decay": 0.0}, + { + "params": rest_pretrained_params, + "weight_decay": args.wd_pretrained, + }, + ], + lr=args.lr_pretrained, + betas=(args.beta1_pretrained, args.beta2_pretrained), + eps=args.eps_pretrained, + momentum=args.momentum_pretrained, + optimizer_name=args.optimizer, + ) + pretrained_params_scheduler = cosine_lr( + pretrained_params_optimizer, + args.lr_pretrained, + args.warmup, + total_steps, + ) + new_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_new_params, "weight_decay": 0.0}, + {"params": rest_new_params, "weight_decay": args.wd_new}, + ], + lr=args.lr_new, + betas=(args.beta1_new, args.beta2_new), + eps=args.eps_new, + momentum=args.momentum_new, + optimizer_name=args.optimizer, + ) + + new_params_scheduler = cosine_lr( + new_params_optimizer, args.lr_new, args.warmup, total_steps + ) + + optimizer = { + "pretrained": pretrained_params_optimizer, + "new": new_params_optimizer, + } + scheduler = { + "pretrained": pretrained_params_scheduler, + "new": new_params_scheduler, + } + + if args.horovod: + pretrained_params_optimizer = hvd.DistributedOptimizer( + pretrained_params_optimizer, + named_parameters=model.named_parameters(), + ) + new_params_optimizer = hvd.DistributedOptimizer( + new_params_optimizer, named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state(pretrained_params_optimizer, root_rank=0) + hvd.broadcast_optimizer_state(new_params_optimizer, root_rank=0) + else: + optimizer = get_optimizer( + [ + {"params": gain_or_bias_params, "weight_decay": 0.0}, + {"params": rest_params, "weight_decay": args.wd}, + ], + lr=args.lr, + betas=(args.beta1, args.beta2), + eps=args.eps, + momentum=args.momentum, + optimizer_name=args.optimizer, + ) + + scheduler = cosine_lr(optimizer, args.lr, args.warmup, total_steps) + + if args.horovod: + optimizer = hvd.DistributedOptimizer( + optimizer, named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state(optimizer, root_rank=0) + + scaler = GradScaler() if args.precision == "amp" else None + + # optionally resume from a checkpoint + start_epoch = 0 + if args.resume is not None: + if os.path.isfile(args.resume): + checkpoint = torch.load(args.resume, map_location=device) + if "epoch" in checkpoint: + # resuming a train checkpoint w/ epoch and optimizer state + start_epoch = checkpoint["epoch"] + sd = checkpoint["state_dict"] + if not args.distributed and next(iter(sd.items()))[0].startswith( + "module" + ): + sd = {k[len("module.") :]: v for k, v in sd.items()} + model.load_state_dict(sd) + if args.split_opt: + if optimizer is not None: + for k, o_ in optimizer.items(): + o_.load_state_dict(checkpoint[k + "_" + "optimizer"]) + if optimizer is not None: + optimizer.load_state_dict(checkpoint["optimizer"]) + if scaler is not None and "scaler" in checkpoint: + scaler.load_state_dict(checkpoint["scaler"]) + logging.info( + f"=> resuming checkpoint '{args.resume}' (epoch {start_epoch})" + ) + else: + # loading a bare (model only) checkpoint for fine-tune or evaluation + model.load_state_dict(checkpoint) + logging.info( + f"=> loaded checkpoint '{args.resume}' (epoch {start_epoch})" + ) + if args.freeze_text: + print("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + else: + logging.info("=> no checkpoint found at '{}'".format(args.resume)) + + cudnn.benchmark = True + cudnn.deterministic = False + + # determine if this worker should save logs and checkpoints. only do so if it is rank == 0 + args.save_logs = args.logs and args.logs.lower() != "none" and is_master(args) + writer = None + if args.save_logs and args.tensorboard: + assert tensorboard is not None, "Please install tensorboard." + writer = tensorboard.SummaryWriter(args.tensorboard_path) + + if args.wandb and is_master(args): + assert wandb is not None, "Please install wandb." + logging.debug("Starting wandb.") + args.train_sz = data["train"].dataloader.num_samples + if args.val_data is not None: + args.val_sz = data["val"].dataloader.num_samples + # you will have to configure this for your project! + wandb.init( + project="clap", + notes=args.wandb_notes, + name=args.wandb_notes, + tags=[], + config=vars(args), + ) + if args.debug: + wandb.watch(model, log="all") + wandb.save(params_file) + logging.debug("Finished loading wandb.") + + if "train" not in data: + evaluate(model, data, start_epoch, args, writer) + return + elif start_epoch == 0 and "val" in data and not args.no_eval: + evaluate(model, data, 0, args, writer) + # print(f'rank {args.rank}, Start First Evaluation')# (yusong): for debug + if args.save_top_performance: + current_top_k_ckpt_metrics = { + i: 0 for i in range(args.save_top_performance) + } # initialize the top-k metric for ckpts to 0 + + # print(f'rank {args.rank}, Start Training') # (yusong): for debug + for epoch in range(start_epoch, args.epochs): + # freeze the text param after (include) args.freeze_text_after, this is -1 by default + if epoch == args.freeze_text_after: + print("Text pretrained parameters are freezed since this epoch.") + for k in text_freeze_parameters: + k.requires_grad = False + if is_master(args): + logging.info(f"Start epoch {epoch}") + + train_one_epoch(model, data, epoch, optimizer, scaler, scheduler, args, writer) + completed_epoch = epoch + 1 + + if ( + any(v in data for v in ("val", "imagenet-val", "imagenet-v2")) + and not args.no_eval + ): + metrics = evaluate(model, data, completed_epoch, args, writer) + if args.save_top_performance: + top_k_dataset = args.top_k_checkpoint_select_dataset + top_k_metric = args.top_k_checkpoint_select_metric + filtered_metrics = [ + v + for k, v in metrics.items() + if top_k_metric in k and top_k_dataset in k + ] # check all R@10 metrics (all dataset) and use it to update the ckpt + # Saving checkpoints. + if args.save_logs: + if args.split_opt: + opt_dict = { + k + "_" + "optimizer": v.state_dict() for k, v in optimizer.items() + } + else: + opt_dict = {"optimizer": optimizer.state_dict()} + checkpoint_dict = { + "epoch": completed_epoch, + "name": args.name, + "state_dict": model.state_dict(), + } + checkpoint_dict.update(opt_dict) + if scaler is not None: + checkpoint_dict["scaler"] = scaler.state_dict() + + if completed_epoch == args.epochs or ( + args.save_frequency > 0 and (completed_epoch % args.save_frequency) == 0 + ): + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_{completed_epoch}.pt"), + ) + if args.save_most_recent: + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_latest.pt"), + ) + if args.save_top_performance and not args.no_eval: + update_top_k_performance( + filtered_metrics, + current_top_k_ckpt_metrics, + args, + checkpoint_dict, + bignumbetter=True, + ) + + if args.wandb and is_master(args): + wandb.finish() + + +def copy_codebase(args): + from shutil import copytree, ignore_patterns + + new_code_path = os.path.join(args.logs, args.name, "code") + if os.path.exists(new_code_path): + print( + f"Error. Experiment already exists at {new_code_path}. Use --name to specify a new experiment." + ) + return -1 + print(f"Copying codebase to {new_code_path}") + current_code_path = os.path.realpath(__file__) + for _ in range(3): + current_code_path = os.path.dirname(current_code_path) + copytree( + current_code_path, new_code_path, ignore=ignore_patterns("log", "logs", "wandb") + ) + print("Done copying code.") + return 1 + + +if __name__ == "__main__": + main() diff --git a/qa_mdt/audioldm_train/modules/clap/training/params.py b/qa_mdt/audioldm_train/modules/clap/training/params.py new file mode 100644 index 0000000000000000000000000000000000000000..0cc1a0e2d982e900988cf5a4b24b2e59b093537b --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/params.py @@ -0,0 +1,563 @@ +import argparse + + +def get_default_params(model_name): + # Params from paper (https://arxiv.org/pdf/2103.00020.pdf) + model_name = model_name.lower() + if "vit" in model_name: + return {"lr": 5.0e-4, "beta1": 0.9, "beta2": 0.98, "eps": 1.0e-6} + else: + return {"lr": 5.0e-4, "beta1": 0.9, "beta2": 0.999, "eps": 1.0e-8} + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--train-data", + type=str, + default=None, + help="Path to h5 filewith training data", + ) + parser.add_argument( + "--val-data", + type=str, + default=None, + help="Path to h5 file with validation data", + ) + parser.add_argument( + "--freeze-text", + default=False, + action="store_true", + help="if you need to freeze the text encoder, make this True", + ) + parser.add_argument( + "--freeze-text-after", + type=int, + default=-1, + help="if you need to freeze the text encoder after (include) epoch x, set this param to x. Set -1 to disable it", + ) + parser.add_argument( + "--train-ipc", + type=str, + default=None, + help="Path to npy file of the number of instance per class in training data", + ) + parser.add_argument( + "--val-ipc", + type=str, + default=None, + help="Path to npy file of the number of instance per class in validation data", + ) + parser.add_argument( + "--train-num-samples", + type=int, + default=None, + help="Number of samples in dataset. Required for webdataset if not available in info file.", + ) + parser.add_argument( + "--val-num-samples", + type=int, + default=None, + help="Number of samples in dataset. Useful for webdataset if not available in info file.", + ) + parser.add_argument( + "--dataset-type", + choices=["webdataset", "csv", "auto", "toy"], + default="auto", + help="Which type of dataset to process.", + ) + parser.add_argument( + "--csv-separator", + type=str, + default="\t", + help="For csv-like datasets, which separator to use.", + ) + parser.add_argument( + "--csv-img-key", + type=str, + default="filepath", + help="For csv-like datasets, the name of the key for the image paths.", + ) + parser.add_argument( + "--csv-caption-key", + type=str, + default="title", + help="For csv-like datasets, the name of the key for the captions.", + ) + parser.add_argument( + "--imagenet-val", + type=str, + default=None, + help="Path to imagenet val set for conducting zero shot evaluation.", + ) + parser.add_argument( + "--imagenet-v2", + type=str, + default=None, + help="Path to imagenet v2 for conducting zero shot evaluation.", + ) + parser.add_argument( + "--datasetnames", + nargs="+", + default=None, + help="If loading webdataset, spedify the dataset names to load. Can be some of these: Clotho, audioset, audiocaps, BBCSoundEffects", + ) + parser.add_argument( + "--full-train-dataset", + nargs="+", + default=None, + help="Which dataset will be trained with all the subsets. (train+test)", + ) + parser.add_argument( + "--exclude-eval-dataset", + nargs="+", + default=None, + help="Which dataset will be excluded with evaluation", + ) + parser.add_argument( + "--datasetinfos", + nargs="+", + default=None, + help="If loading webdataset, spedify the dataset types to load. Can be some of these: train, test, valid, unbalanced_train, balanced_train, eval", + ) + parser.add_argument( + "--dataset-proportion", + type=float, + default=1.0, + help="How much proportion of dataset we want to train.", + ) + parser.add_argument( + "--remotedata", + default=False, + action="store_true", + help="if the dataset is remote, set this flag", + ) + parser.add_argument( + "--class-label-path", + type=str, + default=None, + help="The path of the class label pickle or csv.", + ) + parser.add_argument( + "--datasetpath", + type=str, + default="/mnt/audio_clip/webdataset_tar", + help="The path to the dataset", + ) + parser.add_argument( + "--logs", + type=str, + default="./logs/", + help="Where to store tensorboard logs. Use None to avoid storing logs.", + ) + parser.add_argument( + "--log-local", + action="store_true", + default=False, + help="log files on local master, otherwise global master only.", + ) + parser.add_argument( + "--name", + type=str, + default=None, + help="Optional identifier for the experiment when storing logs. Otherwise use current time.", + ) + parser.add_argument( + "--workers", type=int, default=1, help="Number of workers per GPU." + ) + parser.add_argument( + "--batch-size", type=int, default=64, help="Batch size per GPU." + ) + parser.add_argument( + "--epochs", type=int, default=32, help="Number of epochs to train for." + ) + parser.add_argument("--lr", type=float, default=None, help="Learning rate.") + parser.add_argument("--beta1", type=float, default=None, help="Adam beta 1.") + parser.add_argument("--beta2", type=float, default=None, help="Adam beta 2.") + parser.add_argument("--eps", type=float, default=None, help="Adam epsilon.") + parser.add_argument("--momentum", type=float, default=None, help="SGD epsilon.") + parser.add_argument("--wd", type=float, default=0.2, help="Weight decay.") + + parser.add_argument( + "--split-opt", + action="store_true", + default=False, + help="Use this flag to skip the learning rate decay.", + ) + parser.add_argument( + "--lr-pretrained", type=float, default=None, help="Learning rate for text." + ) + parser.add_argument( + "--beta1-pretrained", type=float, default=None, help="Adam beta 1 for text." + ) + parser.add_argument( + "--beta2-pretrained", type=float, default=None, help="Adam beta 2 for text." + ) + parser.add_argument( + "--eps-pretrained", type=float, default=None, help="Adam epsilon for text." + ) + parser.add_argument( + "--wd-pretrained", type=float, default=0.2, help="Weight decay for text." + ) + parser.add_argument( + "--momentum-pretrained", type=float, default=0.9, help="Momentum for text." + ) + parser.add_argument( + "--lr-new", type=float, default=None, help="Learning rate for audio." + ) + parser.add_argument( + "--beta1-new", type=float, default=None, help="Adam beta 1 for audio." + ) + parser.add_argument( + "--beta2-new", type=float, default=None, help="Adam beta 2 for audio." + ) + parser.add_argument( + "--eps-new", type=float, default=None, help="Adam epsilon for audio." + ) + parser.add_argument( + "--wd-new", type=float, default=0.2, help="Weight decay for audio." + ) + parser.add_argument( + "--momentum-new", type=float, default=0.9, help="Momentum for audio." + ) + parser.add_argument( + "--warmup", type=int, default=10000, help="Number of steps to warmup for." + ) + parser.add_argument( + "--use-bn-sync", + default=False, + action="store_true", + help="Whether to use batch norm sync.", + ) + parser.add_argument( + "--skip-scheduler", + action="store_true", + default=False, + help="Use this flag to skip the learning rate decay.", + ) + parser.add_argument( + "--save-frequency", type=int, default=1, help="How often to save checkpoints." + ) + parser.add_argument( + "--save-top-performance", + type=int, + default=0, + help="Save the top x performance weights if the value >0", + ) + parser.add_argument( + "--save-most-recent", + action="store_true", + default=False, + help="Always save the most recent model trained to epoch_latest.pt.", + ) + parser.add_argument( + "--zeroshot-frequency", type=int, default=2, help="How often to run zero shot." + ) + parser.add_argument( + "--val-frequency", + type=int, + default=1, + help="How often to run evaluation with val data.", + ) + parser.add_argument( + "--resume", + default=None, + type=str, + help="path to latest checkpoint (default: none)", + ) + parser.add_argument( + "--precision", + choices=["amp", "fp16", "fp32"], + default="amp", + help="Floating point precision.", + ) + parser.add_argument( + "--amodel", + type=str, + default="RN50", + help="Name of the audio backbone to use.", + ) + parser.add_argument( + "--tmodel", + type=str, + default="transformer", + help="Name of the text backbone to use. Can be [transformer, bert, roberta, bart]", + ) + parser.add_argument( + "--pretrained-audio", + default="", + type=str, + help="Use a pretrained audio model weights for the audio encoder of CLAP", + ) + parser.add_argument( + "--pretrained-text", + default="", + type=str, + help="Use a pretrained text model weights for the text encoder of CLAP", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + help="Use a pretrained CLIP model weights with the specified tag or file path.", + ) + parser.add_argument( + "--pretrained-image", + default=False, + action="store_true", + help="Load imagenet pretrained weights for image tower backbone if available.", + ) + parser.add_argument( + "--lock-image", + default=False, + action="store_true", + help="Lock full image tower by disabling gradients.", + ) + parser.add_argument( + "--lock-image-unlocked-groups", + type=int, + default=0, + help="Leave last n image tower layer groups unlocked.", + ) + parser.add_argument( + "--lock-image-freeze-bn-stats", + default=False, + action="store_true", + help="Freeze BatchNorm running stats in image tower for any locked layers.", + ) + parser.add_argument( + "--local-loss", + default=False, + action="store_true", + help="calculate loss w/ local features @ global (instead of realizing full global @ global matrix)", + ) + parser.add_argument( + "--gather-with-grad", + default=False, + action="store_true", + help="enable full distributed gradient for feature gather", + ) + parser.add_argument( + "--force-quick-gelu", + default=False, + action="store_true", + help="Force use of QuickGELU activation for non-OpenAI transformer models.", + ) + parser.add_argument( + "--torchscript", + default=False, + action="store_true", + help="torch.jit.script the model, also uses jit version of OpenAI models if pretrained=='openai'", + ) + parser.add_argument( + "--trace", + default=False, + action="store_true", + help="torch.jit.trace the model for inference / eval only", + ) + # arguments for distributed training + parser.add_argument( + "--dist-url", + default="env://", + type=str, + help="url used to set up distributed training", + ) + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--report-to", + default="", + type=str, + help="Options are ['wandb', 'tensorboard', 'wandb,tensorboard']", + ) + parser.add_argument( + "--wandb-notes", default="", type=str, help="Notes if logging with wandb" + ) + parser.add_argument( + "--C", type=float, default=3.16, help="inverse regularizer for logistic reg." + ) + parser.add_argument( + "--debug", + default=False, + action="store_true", + help="If true, more information is logged.", + ) + parser.add_argument( + "--copy-codebase", + default=False, + action="store_true", + help="If true, we copy the entire base on the log diretory, and execute from there.", + ) + parser.add_argument( + "--horovod", + default=False, + action="store_true", + help="Use horovod for distributed training.", + ) + parser.add_argument( + "--ddp-static-graph", + default=False, + action="store_true", + help="Enable static graph optimization for DDP in PyTorch >= 1.11.", + ) + parser.add_argument( + "--no-set-device-rank", + default=False, + action="store_true", + help="Don't set device index from local rank (when CUDA_VISIBLE_DEVICES restricted to one per proc).", + ) + parser.add_argument("--seed", type=int, default=4242, help="Default random seed.") + + parser.add_argument( + "--top-k-checkpoint-select-dataset", + type=str, + default="all", + help="The dataset of selecting top-k checkpoint.", + ) + + # @R10, @R@5, @R1, mAP@10 + parser.add_argument( + "--top-k-checkpoint-select-metric", + type=str, + default="_R@10", + help="The metric for selecting top-k checkpoint.", + ) + parser.add_argument( + "--openai-model-cache-dir", + type=str, + default="~/.cache/clip", + help="Directory to download OpenAI models.", + ) + parser.add_argument( + "--optimizer", + type=str, + default="adamw", + help="can be AdamW or SGD", + ) + parser.add_argument( + "--parallel-eval", + default=False, + action="store_true", + help="Eval in parallel (multi-GPU, multi-node).", + ) + + parser.add_argument( + "--no-eval", + default=False, + action="store_true", + help="Training without evaluation.", + ) + + parser.add_argument( + "--lp-mlp", + default=False, + action="store_true", + help="Linear Probe using MLP layer or not.", + ) + + parser.add_argument( + "--lp-freeze", + default=False, + action="store_true", + help="Linear Probe using Freeze CLAP or not", + ) + + parser.add_argument( + "--lp-act", + default="None", + type=str, + help="Options are ['relu','elu','prelu','softmax','sigmoid']", + ) + + parser.add_argument( + "--lp-loss", type=str, default="bce", help="Loss func of Linear Probe." + ) + + parser.add_argument( + "--lp-metrics", + type=str, + default="map,mauc,acc", + help="Metrics of Linear Probe.", + ) + + parser.add_argument( + "--lp-lr", type=float, default=1e-4, help="learning rate of linear probe" + ) + parser.add_argument( + "--kappa", + type=float, + default=0, + help="the kappa in the weighted contrastive loss, default is to turn off the weighted contrastive loss", + ) + + parser.add_argument( + "--data-filling", + type=str, + default="pad", + help="type of data filling when the audio length is shorter than the max length." + "Can be one of the following: repeat, repeatpad, pad", + ) + parser.add_argument( + "--data-truncating", + type=str, + default="rand_trunc", + help="type of data truncation when the audio length is longer than the max length." + "Can be one of the following: rand_trunc, fusion", + ) + + parser.add_argument( + "--clap-mlploss", + default=False, + action="store_true", + help="Using MLP loss for CLAP model or not", + ) + + parser.add_argument( + "--wandb-id", + type=str, + default=None, + help="the id of wandb experiment to restore.", + ) + + parser.add_argument( + "--sleep", type=float, default=0, help="sleep n seconds before start training" + ) + + # variable length processing + parser.add_argument( + "--enable-fusion", + default=False, + action="store_true", + help="Enable feature funsion for variable-length data", + ) + + parser.add_argument( + "--fusion-type", + type=str, + default="None", + help="Type is among ['channel_map', 'daf_1d','aff_1d','iaff_1d','daf_2d','aff_2d','iaff_2d']", + ) + + parser.add_argument( + "--mixup", + default=False, + action="store_true", + help="Enable mixup in finetuning training.", + ) + parser.add_argument( + "--text-augment-selection", + type=str, + default=None, + help="For selecting levels of augmented text. Type is among ['all', 'augment_only', 'none']", + ) + + args = parser.parse_args() + + # If some params are not passed, we use the default values based on model name. + default_params = get_default_params(args.amodel) + for name, val in default_params.items(): + if getattr(args, name) is None: + setattr(args, name, val) + + return args diff --git a/qa_mdt/audioldm_train/modules/clap/training/scheduler.py b/qa_mdt/audioldm_train/modules/clap/training/scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..7151ffbab25a113673b7627027b443b27f22cb0f --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/scheduler.py @@ -0,0 +1,24 @@ +import numpy as np + + +def assign_learning_rate(optimizer, new_lr): + for param_group in optimizer.param_groups: + param_group["lr"] = new_lr + + +def _warmup_lr(base_lr, warmup_length, step): + return base_lr * (step + 1) / warmup_length + + +def cosine_lr(optimizer, base_lr, warmup_length, steps): + def _lr_adjuster(step): + if step < warmup_length: + lr = _warmup_lr(base_lr, warmup_length, step) + else: + e = step - warmup_length + es = steps - warmup_length + lr = 0.5 * (1 + np.cos(np.pi * e / es)) * base_lr + assign_learning_rate(optimizer, lr) + return lr + + return _lr_adjuster diff --git a/qa_mdt/audioldm_train/modules/clap/training/train.py b/qa_mdt/audioldm_train/modules/clap/training/train.py new file mode 100644 index 0000000000000000000000000000000000000000..f5759c4679d2ee9c0748444adf66b8453cf09728 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/train.py @@ -0,0 +1,838 @@ +import json +import logging +import math +import os +import time +from contextlib import suppress + +import numpy as np +import torch +import torch.nn.functional as F + +try: + import wandb +except ImportError: + wandb = None + +from open_clip import ClipLoss, gather_features +from .distributed import is_master +from .zero_shot import zero_shot_eval + + +class AverageMeter(object): + """Computes and stores the average and current value""" + + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +def unwrap_model(model): + if hasattr(model, "module"): + return model.module + else: + return model + + +def train_one_epoch( + model, data, epoch, optimizer, scaler, scheduler, args, tb_writer=None +): + device = torch.device(args.device) + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + model.train() + loss = ClipLoss( + local_loss=args.local_loss, + gather_with_grad=args.gather_with_grad, + cache_labels=True, + rank=args.rank, + world_size=args.world_size, + use_horovod=args.horovod, + mlp_loss=args.clap_mlploss, + weight_loss_kappa=args.kappa, + ) + + dataloader, sampler = data["train"].dataloader, data["train"].sampler + if args.distributed and sampler is not None: + sampler.set_epoch(epoch) + num_batches_per_epoch = dataloader.num_batches + sample_digits = math.ceil(math.log(dataloader.num_samples + 1, 10)) + + # for toy dataset + if args.dataset_type == "toy": + dataloader.dataset.generate_queue() + + loss_m = AverageMeter() + batch_time_m = AverageMeter() + data_time_m = AverageMeter() + end = time.time() + + for i, batch in enumerate(dataloader): + # logging.info(f"batch {i} of {num_batches_per_epoch}") + step = num_batches_per_epoch * epoch + i + if isinstance(scheduler, dict): + for s in scheduler.values(): + s(step) + else: + scheduler(step) + audios = batch # contains mel_spec, wavform, and longer list + texts = batch["text"] + # audios = audios.to(device=device, non_blocking=True) + # texts = texts.to(device=device, non_blocking=True) + + data_time_m.update(time.time() - end) + if isinstance(optimizer, dict): + for o_ in optimizer.values(): + o_.zero_grad() + else: + optimizer.zero_grad() + + with autocast(): + ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + logit_scale_a, + logit_scale_t, + ) = model(audios, texts, device) + + if args.clap_mlploss: + total_loss = loss( + audio_features=audio_features, + text_features=text_features, + logit_scale_a=logit_scale_a, + logit_scale_t=logit_scale_t, + audio_features_mlp=audio_features_mlp, + text_features_mlp=text_features_mlp, + ) + else: + total_loss = loss( + audio_features=audio_features, + text_features=text_features, + logit_scale_a=logit_scale_a, + ) + if isinstance(optimizer, dict): + if scaler is not None: + scaler.scale(total_loss).backward() + for o_ in optimizer.values(): + if args.horovod: + o_.synchronize() + scaler.unscale_(o_) + with o_.skip_synchronize(): + scaler.step(o_) + else: + scaler.step(o_) + scaler.update() + else: + total_loss.backward() + for o_ in optimizer.values(): + o_.step() + else: + if scaler is not None: + scaler.scale(total_loss).backward() + if args.horovod: + optimizer.synchronize() + scaler.unscale_(optimizer) + with optimizer.skip_synchronize(): + scaler.step(optimizer) + else: + scaler.step(optimizer) + scaler.update() + else: + total_loss.backward() + optimizer.step() + + # Note: we clamp to 4.6052 = ln(100), as in the original paper. + with torch.no_grad(): + unwrap_model(model).logit_scale_a.clamp_(0, math.log(100)) + if args.clap_mlploss: + unwrap_model(model).logit_scale_t.clamp_(0, math.log(100)) + + batch_time_m.update(time.time() - end) + end = time.time() + batch_count = i + 1 + if is_master(args) and (i % 100 == 0 or batch_count == num_batches_per_epoch): + if isinstance(audios, dict): + batch_size = len(audios["waveform"]) + else: + batch_size = len(audios) + num_samples = batch_count * batch_size * args.world_size + samples_per_epoch = dataloader.num_samples + percent_complete = 100.0 * batch_count / num_batches_per_epoch + + # NOTE loss is coarsely sampled, just master node and per log update + loss_m.update(total_loss.item(), batch_size) + logit_scale_scalar_a = logit_scale_a.item() + logit_scale_scalar_t = logit_scale_t.item() + if isinstance(optimizer, dict): + if args.clap_mlploss: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {[o_.param_groups[0]['lr'] for o_ in optimizer.values()]} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + f"Logit Scale Text: {logit_scale_scalar_t:.3f}" + ) + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "scale_text": logit_scale_scalar_t, + "lr": [o_.param_groups[0]["lr"] for o_ in optimizer.values()], + } + else: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {[o_.param_groups[0]['lr'] for o_ in optimizer.values()]} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + ) + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "lr": [o_.param_groups[0]["lr"] for o_ in optimizer.values()], + } + + else: + if args.clap_mlploss: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {optimizer.param_groups[0]['lr']:5f} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + f"Logit Scale Text: {logit_scale_scalar_t:.3f}" + ) + + # Save train loss / etc. Using non avg meter values as loggers have their own smoothing + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "scale_text": logit_scale_scalar_t, + "lr": optimizer.param_groups[0]["lr"], + } + else: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {optimizer.param_groups[0]['lr']:5f} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + ) + + # Save train loss / etc. Using non avg meter values as loggers have their own smoothing + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "lr": optimizer.param_groups[0]["lr"], + } + for name, val in log_data.items(): + name = "train/" + name + if tb_writer is not None: + tb_writer.add_scalar(name, val, step) + if args.wandb: + assert wandb is not None, "Please install wandb." + wandb.log({name: val, "step": step}) + + # resetting batch / data time meters per log window + batch_time_m.reset() + data_time_m.reset() + # end for + + +def evaluate(model, data, epoch, args, tb_writer=None): + metrics = {} + if not args.parallel_eval: + if not is_master(args): + return metrics + device = torch.device(args.device) + model.eval() + + # CHANGE + # zero_shot_metrics = zero_shot_eval(model, data, epoch, args) + # metrics.update(zero_shot_metrics) + if is_master(args): + print("Evaluating...") + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + if args.val_dataset_names == ["Clotho", "audiocaps"]: + # if only clotho and audiocaps are used, then we will use a different evaluation function. + # This is because in the Clotho and audiocaps valid and test set, there are 5 text for 1 audio. + if args.parallel_eval: + # (yusong): just a hack here. Don't use parallel eval when evaluating only clotho and audiocaps. + raise NotImplementedError( + "Parallel evaluation not supported for eval only Clotho and audiocaps." + ) + val_metrics_per_dataset = evaluate_clotho_audiocaps( + model, data, epoch, args, autocast, device, tb_writer + ) + for m in val_metrics_per_dataset.values(): + metrics.update(m) + if "epoch" not in metrics.keys(): + metrics.update({"epoch": epoch}) + metrics = select_top_metric_clotho_audiocaps( + metrics, val_metrics_per_dataset, args + ) + elif "val" in data and ( + args.val_frequency + and ((epoch % args.val_frequency) == 0 or epoch == args.epochs) + ): + dataloader = data["val"].dataloader + num_samples = 0 + samples_per_val = dataloader.num_samples + + # FIXME this does not scale past small eval datasets + # all_audio_features @ all_text_features will blow up memory and compute very quickly + eval_info = {} + if args.clap_mlploss: + eval_info["all"] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + "all_audio_features_mlp": [], + "all_text_features_mlp": [], + } # cumulative_loss = 0.0 + else: + eval_info["all"] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + } # cumu + # all_audio_features, all_text_features, all_audio_features_mlp, all_text_features_mlp = [], [], [], [] + with torch.no_grad(): + for i, batch in enumerate(dataloader): + audios = batch # contains mel_spec, wavform, and longer list + texts = batch["text"] + # audios = audios.to(device=device, non_blocking=True) + + all_names = list( + set(["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]]) + ) + for name in all_names: + if name not in eval_info.keys(): + if args.clap_mlploss: + eval_info[name] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + "all_audio_features_mlp": [], + "all_text_features_mlp": [], + } + else: + eval_info[name] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + } + with autocast(): + ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + logit_scale_a, + logit_scale_t, + ) = model(audios, texts, device) + + if args.parallel_eval: + # multi-GPU eval + if args.clap_mlploss: + ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + ) = gather_features( + audio_features=audio_features, + text_features=text_features, + audio_features_mlp=audio_features_mlp, + text_features_mlp=text_features_mlp, + local_loss=False, + gather_with_grad=False, + rank=args.rank, + world_size=args.world_size, + use_horovod=args.horovod, + mlp_loss=args.clap_mlploss, + ) + else: + (audio_features, text_features,) = gather_features( + audio_features=audio_features, + text_features=text_features, + local_loss=False, + gather_with_grad=False, + rank=args.rank, + world_size=args.world_size, + use_horovod=args.horovod, + mlp_loss=args.clap_mlploss, + ) + + if is_master(args): + num_samples += audio_features.shape[0] + for n in [*all_names, "all"]: + if n == "all": + eval_info[n]["all_audio_features"].append( + audio_features.cpu() + ) + eval_info[n]["all_text_features"].append( + text_features.cpu() + ) + if args.clap_mlploss: + eval_info[n]["all_audio_features_mlp"].append( + audio_features_mlp.cpu() + ) + eval_info[n]["all_text_features_mlp"].append( + text_features_mlp.cpu() + ) + else: + idx = np.where( + np.array( + [ + "-".join(b.split("/")[-3:-1]) + for b in batch["__url__"] + ] + ) + == n + )[0] + eval_info[n]["all_audio_features"].append( + audio_features.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + eval_info[n]["all_text_features"].append( + text_features.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + if args.clap_mlploss: + eval_info[n]["all_audio_features_mlp"].append( + audio_features_mlp.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + eval_info[n]["all_text_features_mlp"].append( + text_features_mlp.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + # print(f'eval step {i}') # (yusong): for debug + + # cumulative_loss += total_loss * batch_size + # num_samples += batch_size + if is_master(args) and (i % 100) == 0: # and i != 0: + logging.info( + f"Eval Epoch: {epoch} [{num_samples} / {samples_per_val}]" + ) + if is_master(args): + val_metrics_per_dataset = {} + for n in eval_info.keys(): + if args.clap_mlploss: + metrics_single_dataset = get_metrics( + audio_features=torch.cat( + eval_info[n]["all_audio_features"] + ), + text_features=torch.cat(eval_info[n]["all_text_features"]), + logit_scale_a=logit_scale_a.cpu(), + audio_features_mlp=torch.cat( + eval_info[n]["all_audio_features_mlp"] + ), + text_features_mlp=torch.cat( + eval_info[n]["all_text_features_mlp"] + ), + logit_scale_t=logit_scale_t.cpu(), + mlp_loss=args.clap_mlploss, + ) + else: + metrics_single_dataset = get_metrics( + audio_features=torch.cat( + eval_info[n]["all_audio_features"] + ), + text_features=torch.cat(eval_info[n]["all_text_features"]), + logit_scale_a=logit_scale_a.cpu(), + mlp_loss=args.clap_mlploss, + ) + val_metrics_per_dataset[n] = { + n + "/" + k: v for k, v in metrics_single_dataset.items() + } + metrics.update(val_metrics_per_dataset[n]) + if "epoch" not in metrics.keys(): + metrics.update({"epoch": epoch}) + if is_master(args): + if not metrics: + return metrics + + logging.info( + f"Eval Epoch: {epoch} " + + "\n".join( + [ + "\t".join([f"{k}: {round(v, 4):.4f}" for k, v in m.items()]) + for m in val_metrics_per_dataset.values() + ] + ) + ) + + if args.save_logs: + for name, val in metrics.items(): + if tb_writer is not None: + tb_writer.add_scalar(f"val/{name}", val, epoch) + + with open(os.path.join(args.checkpoint_path, "results.jsonl"), "a+") as f: + f.write(json.dumps(metrics)) + f.write("\n") + + if args.wandb: + assert wandb is not None, "Please install wandb." + for name, val in metrics.items(): + wandb.log({f"val/{name}": val, "epoch": epoch}) + + return metrics + else: + return metrics + + +def get_metrics( + audio_features, + text_features, + logit_scale_a, + audio_features_mlp=None, + text_features_mlp=None, + logit_scale_t=None, + mlp_loss=False, +): + metrics = {} + if mlp_loss: + # Set up audio to text & text to audio similary matrice + a_logits_per_audio = ( + (logit_scale_a * audio_features @ text_features_mlp.t()).detach().cpu() + ) + a_logits_per_text = a_logits_per_audio.t().detach().cpu() + t_logits_per_audio = ( + (logit_scale_t * audio_features_mlp @ text_features.t()).detach().cpu() + ) + t_logits_per_text = t_logits_per_audio.t().detach().cpu() + + labels = torch.arange(audio_features.shape[0]).long() + # Change the loss from two terms into four terms with 2x2 combined CE loss + total_loss = ( + F.cross_entropy(a_logits_per_audio, labels) + + F.cross_entropy(a_logits_per_text, labels) + + F.cross_entropy(t_logits_per_audio, labels) + + F.cross_entropy(t_logits_per_text, labels) + ) / 4 + + metrics[f"cumulative_loss"] = total_loss.item() + metrics[f"num_samples"] = audio_features.shape[0] + + logits = { + "audio_to_text": (a_logits_per_audio + t_logits_per_audio) / 2, + "text_to_audio": (a_logits_per_text + t_logits_per_text) / 2, + } + ground_truth = torch.arange(len(text_features)).view(-1, 1) + + else: + # print("text_features", text_features) + # print("text_features.shape", text_features.shape) + logits_per_audio = ( + (logit_scale_a * audio_features @ text_features.t()).detach().cpu() + ) + logits_per_text = logits_per_audio.t().detach().cpu() + + labels = torch.arange(audio_features.shape[0]).long() + # Change the loss from two terms into four terms with 2x2 combined CE loss + total_loss = ( + F.cross_entropy(logits_per_audio, labels) + + F.cross_entropy(logits_per_text, labels) + ) / 2 + + metrics[f"cumulative_loss"] = total_loss.item() + metrics[f"num_samples"] = audio_features.shape[0] + + logits = {"audio_to_text": logits_per_audio, "text_to_audio": logits_per_text} + + ground_truth = torch.arange(len(text_features)).view(-1, 1) + + for name, logit in logits.items(): + ranking = torch.argsort(logit, descending=True) + preds = torch.where(ranking == ground_truth)[ + 1 + ] # (yusong) this line is slow because it uses single thread + preds = preds.detach().cpu().numpy() + metrics[f"{name}_mean_rank"] = preds.mean() + 1 + metrics[f"{name}_median_rank"] = np.floor(np.median(preds)) + 1 + for k in [1, 5, 10]: + metrics[f"{name}_R@{k}"] = np.mean(preds < k) + # map@10 + metrics[f"{name}_mAP@10"] = np.mean(np.where(preds < 10, 1 / (preds + 1), 0.0)) + + return metrics + + +def evaluate_clotho_audiocaps( + model, data, epoch, args, autocast, device, tb_writer=None +): + """ + Adapted from https://github.com/XinhaoMei/audio-text_retrieval/blob/main/tools/utils.py. + 1. for text-to-audio retrieval, do 5 times and average the results + 2. for R@1, R@5, R@10 in audio-to-text retrieval, take the best rank among 5 text + 3. for map@10 in audio-to-text retrieval: + 3.1: sort the rank of 5 text + 3.2: exclude the rank >=10 (0-index) + 3.3: compute the map regarding the remaining ranks: np.mean(np.arange(1, len(ranks)+1) / ranks). + (3.3) That is, take the top ranks of 5 text that is < 10, and assign the descending number as ground truth. + (3.3) E.g.: the ground truth of first rank of the 5 text should be 1, the second rank should be 2, etc. + """ + # TODO: (yusong) only support single GPU evaluation and only support non-mlp case for now. + dataloader = data["val"].dataloader + with torch.no_grad(): + eval_info = {} + for i, batch in enumerate(dataloader): + audios = batch # contains mel_spec, wavform, and longer list + + # each item in the list has 5 texts + if args.tmodel == "transformer": + from open_clip import tokenize + + texts = [tokenize(t) for t in batch["full_text"]] + texts = torch.cat(texts) + else: + from .data import tokenizer + + texts = [ + tokenizer(t) for t in batch["full_text"] + ] # 5 texts for each audio + texts = { + k: torch.cat([t[k] for t in texts]) for k in texts[0].keys() + } # 5 x batch + + # audios = audios.to(device=device, non_blocking=True) + + all_names = list( + set(["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]]) + ) + for name in all_names: + if name not in eval_info.keys(): + # we will not use mlp outputs even if args.clap_mlploss=True + eval_info[name] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + } + with autocast(): + audio_features = model(audios, None, device) + text_features = model(None, texts, device) + audio_features = F.normalize(audio_features, dim=-1) + text_features = F.normalize(text_features, dim=-1) + + all_names = list( + set(["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]]) + ) + for n in all_names: + idx = np.where( + np.array( + ["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]] + ) + == n + )[0] + eval_info[n]["all_audio_features"].append( + audio_features.cpu().index_select(0, torch.tensor(idx).long()) + ) + # (yusong) please double-check. This is for selecting 5 text features at once. + # because idx is a list of indices in size of num_samples, + # and text_features is a tensor of size (5*num_samples, dim) + # so we need to select 5 consecutive indices at once for a single index in idx. + eval_info[n]["all_text_features"].append( + text_features.cpu() + .reshape([-1, 5, text_features.shape[1]]) + .index_select(0, torch.tensor(idx).long()) + .reshape([-1, text_features.shape[1]]) + ) + + val_metrics_all = {} + + for n in eval_info.keys(): + logit_scale_a, logit_scale_t = model(None, None, device) + logit_scale_a = logit_scale_a.cpu() + + audio_features = torch.cat(eval_info[n]["all_audio_features"], dim=0) + text_features = torch.cat(eval_info[n]["all_text_features"], dim=0) + + logits_per_audio = ( + (logit_scale_a * audio_features @ text_features.t()).detach().cpu() + ) + logits_per_text = logits_per_audio.t().detach().cpu() + + # logits_per_audio shape: [num_samples, num_samples*5] + # logits_per_text shape: [num_samples*5, num_samples] + + logging.info( + f"dataset {n}, logits_per_audio shape: {logits_per_audio.shape}, " + f"logits_per_text shape: {logits_per_text.shape}" + ) + + metrics = {} + num_samples = audio_features.shape[0] + metrics[f"num_samples"] = num_samples + + # (yusong) the following code is very important, please double-check: + # logits_per_audio.reshape(num_samples, num_samples, 5)[:, :, d] + # logits_per_text.reshape(num_samples, 5, num_samples)[:, d, :] + # Those two are retrieving one of the 5 text for each audio. + labels = torch.arange(audio_features.shape[0]).long() + audio_to_text_loss = [ + F.cross_entropy( + logits_per_audio.reshape(num_samples, num_samples, 5)[:, :, d], + labels, + ) + for d in range(5) + ] + text_to_audio_loss = [ + F.cross_entropy( + logits_per_text.reshape(num_samples, 5, num_samples)[:, d, :], + labels, + ) + for d in range(5) + ] + total_loss = (np.mean(audio_to_text_loss) + np.mean(text_to_audio_loss)) / 2 + + metrics[f"cumulative_loss"] = total_loss.item() + + # text to audio: do 5 times + pred_text = [] + for d in range(5): + logit = logits_per_text.reshape(num_samples, 5, num_samples)[:, d, :] + ground_truth = torch.arange(len(logit)).view(-1, 1) + ranking = torch.argsort( + logit, descending=True + ) # [num_samples, num_samples] + preds = torch.where(ranking == ground_truth)[1] + pred_text.append(preds.detach().cpu().numpy()) + pred_text_concat = np.concatenate(pred_text, axis=0) # [5*num_samples] + metrics[f"text_to_audio_mean_rank"] = pred_text_concat.mean() + 1 + metrics[f"text_to_audio_median_rank"] = ( + np.floor(np.median(pred_text_concat)) + 1 + ) + for k in [1, 5, 10]: + metrics[f"text_to_audio_R@{k}"] = np.mean(pred_text_concat < k) + # map@10 + metrics[f"text_to_audio_mAP@10"] = np.mean( + np.where(pred_text_concat < 10, 1 / (pred_text_concat + 1), 0.0) + ) + + # audio to text: take the best result + # for audio to text map 10, sort and assign descending ground truth. + # see https://github.com/XinhaoMei/audio-text_retrieval/blob/main/tools/utils.py#L103 + # map@10 + map_all = [] + pred_audio_all = [] + for d in range(num_samples): + # logits_per_audio: [num_samples, num_samples*5] + logit_single = logits_per_audio[d, :] # [5*num_samples] + # Ground-truth index: [d*5, d*5+1, d*5+2, d*5+3, d*5+4] + ranking = torch.argsort( + logit_single, descending=True + ) # [5*num_samples] + # ranking: the index of first match, second match, ... + ground_truth = torch.arange(d * 5, d * 5 + 5)[None] + all_pred = torch.where( + torch.stack([ranking] * 5) == ground_truth.view(-1, 1) + )[1] + min_pred = torch.min(all_pred) + pred_audio_all.append(min_pred.detach().cpu().numpy()) + all_pred_filter = all_pred[all_pred < 10].detach().cpu().numpy() + # /5 because we have 5 text, so it means for the text rank >=10 we count as 0. + map_single = ( + np.sum( + (np.arange(1, len(all_pred_filter) + 1) / (all_pred_filter + 1)) + ) + / 5 + ) + map_all.append(map_single) + metrics[f"audio_to_text_mAP@10"] = np.mean(map_all) + for k in [1, 5, 10]: + metrics[f"audio_to_text_R@{k}"] = np.mean(np.array(pred_audio_all) < k) + + val_metrics_all[n] = {n + "/" + k: v for k, v in metrics.items()} + return val_metrics_all + + +def calculate_selection_performance_clotho_audiocaps(val_metrics_per_dataset): + """ + Calculate performance for Clotho+AudioCaps for model selection. + """ + selection_performance_all = [] + for n in val_metrics_per_dataset.keys(): + selection_performance = ( + val_metrics_per_dataset[n][f"{n}/audio_to_text_mAP@10"] + + val_metrics_per_dataset[n][f"{n}/text_to_audio_mAP@10"] + ) / 2 + selection_performance_all.append(selection_performance) + return np.mean(selection_performance_all) + + +def select_top_metric_clotho_audiocaps(metrics, val_metrics_per_dataset, args): + # val_metrics_per_dataset: dict, key: dataset name, value: dict, key: metric name, value: metric value + # metrics: dict, key: metric name, value: metric value + # Hack: use args to save the top performance + if not hasattr(args, "top_selection_performance"): + selection_performance = calculate_selection_performance_clotho_audiocaps( + val_metrics_per_dataset + ) + # TODO: write the if and else together + metric_update = {} + for n in val_metrics_per_dataset.keys(): + for k in val_metrics_per_dataset[n].keys(): + metric_update[ + k.split("/")[0] + "-top" + "/" + k.split("/")[1] + ] = val_metrics_per_dataset[n][k] + metric_update["top_selection_performance"] = selection_performance + metric_update["top-selection-epoch"] = metrics["epoch"] + metrics.update(metric_update) + args.top_metric = metric_update + args.top_selection_performance = selection_performance + else: + selection_performance_new = calculate_selection_performance_clotho_audiocaps( + val_metrics_per_dataset + ) + selection_performance_old = args.top_selection_performance + if selection_performance_new > selection_performance_old: + metric_update = {} + for n in val_metrics_per_dataset.keys(): + for k in val_metrics_per_dataset[n].keys(): + metric_update[ + k.split("/")[0] + "-top" + "/" + k.split("/")[1] + ] = val_metrics_per_dataset[n][k] + metric_update["top_selection_performance"] = selection_performance_new + metric_update["top-selection-epoch"] = metrics["epoch"] + metrics.update(metric_update) + args.top_metric = metric_update + args.top_selection_performance = selection_performance_new + else: + metrics.update(args.top_metric) + return metrics diff --git a/qa_mdt/audioldm_train/modules/clap/training/zero_shot.py b/qa_mdt/audioldm_train/modules/clap/training/zero_shot.py new file mode 100644 index 0000000000000000000000000000000000000000..28b8fccc1af17fc69002857a7f529ac041c374f2 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/clap/training/zero_shot.py @@ -0,0 +1,95 @@ +# NOTE: This script is currently not supported for CLAP. +import logging +from contextlib import suppress + +import torch +import torch.nn.functional as F +from tqdm import tqdm + +from open_clip import tokenize +from .imagenet_zeroshot_data import imagenet_classnames, openai_imagenet_template + + +def zero_shot_classifier(model, classnames, templates, args): + with torch.no_grad(): + zeroshot_weights = [] + for classname in tqdm(classnames): + texts = [template(classname) for template in templates] # format with class + texts = tokenize(texts).to(args.device) # tokenize + if args.distributed and not args.horovod: + class_embeddings = model.module.encode_text(texts) + else: + class_embeddings = model.encode_text(texts) + class_embedding = F.normalize(class_embeddings, dim=-1).mean(dim=0) + class_embedding /= class_embedding.norm() + zeroshot_weights.append(class_embedding) + zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(args.device) + return zeroshot_weights + + +def accuracy(output, target, topk=(1,)): + pred = output.topk(max(topk), 1, True, True)[1].t() + correct = pred.eq(target.view(1, -1).expand_as(pred)) + return [ + float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) + for k in topk + ] + + +def run(model, classifier, dataloader, args): + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + with torch.no_grad(): + top1, top5, n = 0.0, 0.0, 0.0 + for images, target in tqdm(dataloader, unit_scale=args.batch_size): + images = images.to(args.device) + target = target.to(args.device) + + with autocast(): + # predict + if args.distributed and not args.horovod: + image_features = model.module.encode_image(images) + else: + image_features = model.encode_image(images) + image_features = F.normalize(image_features, dim=-1) + logits = 100.0 * image_features @ classifier + + # measure accuracy + acc1, acc5 = accuracy(logits, target, topk=(1, 5)) + top1 += acc1 + top5 += acc5 + n += images.size(0) + + top1 = top1 / n + top5 = top5 / n + return top1, top5 + + +def zero_shot_eval(model, data, epoch, args): + if "imagenet-val" not in data and "imagenet-v2" not in data: + return {} + if args.zeroshot_frequency == 0: + return {} + if (epoch % args.zeroshot_frequency) != 0 and epoch != args.epochs: + return {} + + logging.info("Starting zero-shot imagenet.") + + logging.info("Building zero-shot classifier") + classifier = zero_shot_classifier( + model, imagenet_classnames, openai_imagenet_template, args + ) + + logging.info("Using classifier") + results = {} + if "imagenet-val" in data: + top1, top5 = run(model, classifier, data["imagenet-val"].dataloader, args) + results["imagenet-zeroshot-val-top1"] = top1 + results["imagenet-zeroshot-val-top5"] = top5 + if "imagenet-v2" in data: + top1, top5 = run(model, classifier, data["imagenet-v2"].dataloader, args) + results["imagenetv2-zeroshot-val-top1"] = top1 + results["imagenetv2-zeroshot-val-top5"] = top5 + + logging.info("Finished zero-shot imagenet.") + + return results diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/PixArt.py b/qa_mdt/audioldm_train/modules/diffusionmodules/PixArt.py new file mode 100644 index 0000000000000000000000000000000000000000..735dc33d542252879600c154902d8ab2cd9c7bb6 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/PixArt.py @@ -0,0 +1,3059 @@ +from abc import abstractmethod +from functools import partial +import math +from typing import Iterable + +import numpy as np +import torch as th +#from .utils_pos_embedding.pos_embed import RoPE2D +import torch.nn as nn +import torch.nn.functional as F +import sys +from fairscale.nn.model_parallel.layers import ( + ColumnParallelLinear, + ParallelEmbedding, + RowParallelLinear, +) + +from timm.models.layers import DropPath +from .utils import auto_grad_checkpoint, to_2tuple +from .PixArt_blocks import t2i_modulate, WindowAttention, MultiHeadCrossAttention, T2IFinalLayer, TimestepEmbedder, FinalLayer +import xformers.ops + +import math +class PatchEmbed(nn.Module): + """ 2D Image to Patch Embedding + """ + def __init__( + self, + img_size=(256, 16), + patch_size=(16, 4), + overlap = (0, 0), + in_chans=128, + embed_dim=768, + norm_layer=None, + flatten=True, + bias=True, + ): + super().__init__() + + # img_size=(256, 16) + # patch_size=(16, 4) + # overlap = (2, 2) + # in_chans=128 + # embed_dim=768 + # import pdb + # pdb.set_trace() + self.img_size = img_size + self.patch_size = patch_size + self.ol = overlap + self.grid_size = (math.ceil((img_size[0] - patch_size[0]) / (patch_size[0]-overlap[0])) + 1, + math.ceil((img_size[1] - patch_size[1]) / (patch_size[1]-overlap[1])) + 1) + self.pad_size = ((self.grid_size[0]-1) * (self.patch_size[0]-overlap[0])+self.patch_size[0]-self.img_size[0], + +(self.grid_size[1]-1)*(self.patch_size[1]-overlap[1])+self.patch_size[1]-self.img_size[1]) + self.pad_size = (self.pad_size[0] // 2, self.pad_size[1] // 2) + # self.p-ad_size = (((img_size[0] - patch_size[0]) // ), ) + self.num_patches = self.grid_size[0] * self.grid_size[1] + self.flatten = flatten + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=(patch_size[0]-overlap[0], patch_size[1]-overlap[1]), bias=bias) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + # B, C, H, W = x.shape + # _assert(H == self.img_size[0], f"Input image height ({H}) doesn't match model ({self.img_size[0]}).") + # _assert(W == self.img_size[1], f"Input image width ({W}) doesn't match model ({self.img_size[1]}).") + + + x = F.pad(x, (self.pad_size[-1], self.pad_size[-1], self.pad_size[-2], self.pad_size[-2]), "constant", 0) + x = self.proj(x) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + +class PatchEmbed_1D(nn.Module): + def __init__( + self, + img_size=(256, 16), + # patch_size=(16, 4), + # overlap = (0, 0), + in_chans=8, + embed_dim=1152, + norm_layer=None, + # flatten=True, + bias=True, + ): + super().__init__() + + self.proj = nn.Linear(in_chans*img_size[1], embed_dim, bias=bias) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + # B, C, H, W = x.shape + # _assert(H == self.img_size[0], f"Input image height ({H}) doesn't match model ({self.img_size[0]}).") + # _assert(W == self.img_size[1], f"Input image width ({W}) doesn't match model ({self.img_size[1]}).") + + # x = F.pad(x, (self.pad_size[-1], self.pad_size[-1], self.pad_size[-2], self.pad_size[-2]), "constant", 0) + # x = self.proj(x) + # if self.flatten: + # x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = th.einsum('bctf->btfc', x) + x = x.flatten(2) # BTFC -> BTD + x = self.proj(x) + x = self.norm(x) + return x + +# if __name__ == '__main__': +# x = th.rand(1, 256, 16).unsqueeze(0) +# model = PatchEmbed(in_chans=1) +# y = model(x) +from timm.models.vision_transformer import Attention, Mlp + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + +from positional_encodings.torch_encodings import PositionalEncoding1D + +def t2i_modulate(x, shift, scale): + return x * (1 + scale) + shift + +class PixArtBlock(nn.Module): + """ + A PixArt block with adaptive layer norm (adaLN-single) conditioning. + """ + + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0., window_size=0, input_size=None, use_rel_pos=False, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = WindowAttention(hidden_size, num_heads=num_heads, qkv_bias=True, + input_size=input_size if window_size == 0 else (window_size, window_size), + use_rel_pos=use_rel_pos, **block_kwargs) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.window_size = window_size + self.scale_shift_table = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + + def forward(self, x, y, t, mask=None, **kwargs): + B, N, C = x.shape + # x [B, T, D] + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)).reshape(B, N, C)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + +from qa_mdt.audioldm_train.modules.diffusionmodules.attention import CrossAttention_1D + +class PixArtBlock_Slow(nn.Module): + """ + A PixArt block with adaptive layer norm (adaLN-single) conditioning. + """ + + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0., window_size=0, input_size=None, use_rel_pos=False, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = CrossAttention_1D(query_dim=hidden_size, context_dim=hidden_size, heads=num_heads, dim_head=int(hidden_size/num_heads)) + self.cross_attn = CrossAttention_1D(query_dim=hidden_size, context_dim=hidden_size, heads=num_heads, dim_head=int(hidden_size/num_heads)) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.window_size = window_size + self.scale_shift_table = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + + def forward(self, x, y, t, mask=None, **kwargs): + B, N, C = x.shape + # x [B, T, D] + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)).reshape(B, N, C)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + +class PixArt(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=True, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, **kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if pred_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + drop_path = [x.item() for x in th.linspace(0, drop_path, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + PixArtBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.initialize_weights() + + # if config: + # logger = get_root_logger(os.path.join(config.work_dir, 'train_log.log')) + # logger.warning(f"lewei scale: {self.lewei_scale}, base size: {self.base_size}") + # else: + # print(f'Warning: lewei scale: {self.lewei_scale}, base size: {self.base_size}') + + def forward(self, x, timestep, context_list, context_mask_list=None, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + mask = context_mask_list[0] # (N, L) + + assert mask is not None + # if mask is not None: + + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, y_lens) # (N, T, D) #support grad checkpoint + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, y, cfg_scale, mask=None, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, y, mask) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + + # torch_map = th.zeros(self.x_embedder.img_size[0]+2*self.x_embedder.pad_size[0], + # self.x_embedder.img_size[1]+2*self.x_embedder.pad_size[1]).to(x.device) + # lf = self.x_embedder.grid_size[0] + # rf = self.x_embedder.grid_size[1] + + # for i in range(lf): + # for j in range(rf): + + # xx = (i) * (self.x_embedder.patch_size[0]-self.x_embedder.ol[0]) + # yy = (j) * (self.x_embedder.patch_size[1]-self.x_embedder.ol[1]) + + # torch_map[xx:(xx+self.x_embedder.patch_size[0]), yy:(yy+self.x_embedder.patch_size[1])]+=1 + # torch_map = torch_map[self.x_embedder.pad_size[0]:self.x_embedder.pad_size[0]+self.x_embedder.img_size[0], + # self.x_embedder.pad_size[1]:self.x_embedder.pad_size[1]+self.x_embedder.img_size[1]] + # torch_map = th.reciprocal(torch_map) + # c = self.out_channels + # p0, p1 = self.x_embedder.patch_size[0], self.x_embedder.patch_size[1] + + # x = x.reshape(shape=(x.shape[0], self.x_embedder.grid_size[0], + # self.x_embedder.grid_size[1], p0, p1, c)) + # x = th.einsum('nhwpqc->nchwpq', x) + # added_map = th.zeros(x.shape[0], c, + # self.x_embedder.img_size[0]+2*self.x_embedder.pad_size[0], + # self.x_embedder.img_size[1]+2*self.x_embedder.pad_size[1]).to(x.device) + + # for b_id in range(x.shape[0]): + # for i in range(lf): + # for j in range(rf): + # for c_id in range(c): + # xx = (i) * (self.x_embedder.patch_size[0]-self.x_embedder.ol[0]) + # yy = (j) * (self.x_embedder.patch_size[1]-self.x_embedder.ol[1]) + # added_map[b_id][c_id][xx:(xx+self.x_embedder.patch_size[0]), yy:(yy+self.x_embedder.patch_size[1])] += \ + # x[b_id, c_id, i, j] + # ret_map = th.zeros(x.shape[0], c, self.x_embedder.img_size[0], + # self.x_embedder.img_size[1]).to(x.device) + # for b_id in range(x.shape[0]): + # for id_c in range(c): + # ret_map[b_id, id_c, :, :] = th.mul(added_map[b_id][id_c][self.x_embedder.pad_size[0]:self.x_embedder.pad_size[0]+self.x_embedder.img_size[0], + # self.x_embedder.pad_size[1]:self.x_embedder.pad_size[1]+self.x_embedder.img_size[1]], torch_map) + c = self.out_channels + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # h = w = int(x.shape[1] ** 0.5) + # print(x.shape, h, w, p0, p1) + # import pdb + # pdb.set_trace() + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + x = th.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + return imgs + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + @property + def dtype(self): + return next(self.parameters()).dtype + +class SwiGLU(nn.Module): + def __init__( + self, + dim: int, + hidden_dim: int, + multiple_of: int, + ): + super().__init__() + hidden_dim = int(2 * hidden_dim / 3) + hidden_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) + + self.w1 = ColumnParallelLinear( + dim, hidden_dim, bias=False, gather_output=False, init_method=lambda x: x + ) + self.w2 = RowParallelLinear( + hidden_dim, dim, bias=False, input_is_parallel=True, init_method=lambda x: x + ) + self.w3 = ColumnParallelLinear( + dim, hidden_dim, bias=False, gather_output=False, init_method=lambda x: x + ) + + def forward(self, x): + return self.w2(F.silu(self.w1(x)) * self.w3(x)) +class DEBlock(nn.Module): + """ + Decoder block with added SpecTNT transformer + """ + + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, FFN_type='SwiGLU', drop_path=0., window_size=0, input_size=None, use_rel_pos=False, skip=False, num_f=None, num_t=None, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = WindowAttention(hidden_size, num_heads=num_heads, qkv_bias=True, + input_size=input_size if window_size == 0 else (window_size, window_size), + use_rel_pos=use_rel_pos, **block_kwargs) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + # self.cross_attn_f = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + # self.cross_attn_t = MultiHeadCrossAttention(hidden_size*num_f, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.norm3 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.norm4 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.norm5 = nn.LayerNorm(hidden_size * num_f, elementwise_affine=False, eps=1e-6) + self.norm6 = nn.LayerNorm(hidden_size * num_f, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + if FFN_type == 'mlp': + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + # self.mlp2 = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + # self.mlp3 = Mlp(in_features=hidden_size*num_f, hidden_features=int(hidden_size*num_f * mlp_ratio), act_layer=approx_gelu, drop=0) + elif FFN_type == 'SwiGLU': + self.mlp = SwiGLU(hidden_size, int(hidden_size * mlp_ratio), 1) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.window_size = window_size + self.scale_shift_table = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + # self.scale_shift_table_2 = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + # self.scale_shift_table_3 = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + self.skip_linear = nn.Linear(2 * hidden_size, hidden_size) if skip else None + + self.F_transformer = WindowAttention(hidden_size, num_heads=4, qkv_bias=True, + input_size=input_size if window_size == 0 else (window_size, window_size), + use_rel_pos=use_rel_pos, **block_kwargs) + + self.T_transformer = WindowAttention(hidden_size * num_f, num_heads=16, qkv_bias=True, + input_size=input_size if window_size == 0 else (window_size, window_size), + use_rel_pos=use_rel_pos, **block_kwargs) + + self.f_pos = nn.Embedding(num_f, hidden_size) + self.t_pos = nn.Embedding(num_t, hidden_size * num_f) + self.num_f = num_f + self.num_t = num_t + + def forward(self, x_normal, end, y, t, mask=None, skip=None, ids_keep=None, **kwargs): + # import pdb + # pdb.set_trace() + B, D, C = x_normal.shape + T = self.num_t + F_add_1 = self.num_f + # B, T, F_add_1, C = x.shape + # F_add_1 = F_add_1 + 1 + # x_normal = th.reshape() + # # x_end [B, T, 1, C] + # x_end = x[:, :, -1, :].unsqueeze(2) + if self.skip_linear is not None: + x_normal = self.skip_linear(th.cat([x_normal, skip], dim=-1)) + + D = T * (F_add_1 - 1) + # x_normal [B, D, C] + # import pdb + # pdb.set_trace() + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x_normal = x_normal + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x_normal), shift_msa, scale_msa)).reshape(B, D, C)) + + x_normal = x_normal.reshape(B, T, F_add_1-1, C) + x_normal = th.cat((x_normal, end), 2) + + # x_normal [B*T, F+1, C] + x_normal = x_normal.reshape(B*T, F_add_1, C) + pos_f = th.arange(self.num_f, device=x.device).unsqueeze(0).expand(B*T, -1) + # import pdb; pdb.set_trace() + x_normal = x_normal + self.f_pos(pos_f) + + x_normal = x_normal + self.F_transformer(self.norm3(x_normal)) + # x_normal = x_normal + self.cross_attn_f(x_normal, y, mask) + # x_normal = x_normal + self.mlp2(self.norm4(x_normal)) + + # x_normal [B, T, (F+1) * C] + x_normal = x_normal.reshape(B, T, F_add_1 * C) + pos_t = th.arange(self.num_t, device=x.device).unsqueeze(0).expand(B, -1) + x_normal = x_normal + self.t_pos(pos_t) + x_normal = x_normal + self.T_transformer(self.norm5(x_normal)) + # x_normal = x_normal + self.cross_attn_t(x_normal, y, mask) + + + x_normal = x_normal.reshape(B, T ,F_add_1, C) + end = x_normal[:, :, -1, :].unsqueeze(2) + x_normal = x_normal[:, :, :-1, :] + x_normal = x_normal.reshape(B, T*(F_add_1 - 1), C) + + x_normal = x_normal + self.cross_attn(x_normal, y, mask) + x_normal = x_normal + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x_normal), shift_mlp, scale_mlp))) + + # x_normal = th.cat + return x_normal, end #.reshape(B, ) +class MDTBlock(nn.Module): + """ + A PixArt block with adaptive layer norm (adaLN-single) conditioning. + """ + + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, FFN_type='mlp', drop_path=0., window_size=0, input_size=None, use_rel_pos=False, skip=False, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = WindowAttention(hidden_size, num_heads=num_heads, qkv_bias=True, + input_size=input_size if window_size == 0 else (window_size, window_size), + use_rel_pos=use_rel_pos, **block_kwargs) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + if FFN_type == 'mlp': + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + elif FFN_type == 'SwiGLU': + self.mlp = SwiGLU(hidden_size, int(hidden_size * mlp_ratio), 1) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.window_size = window_size + self.scale_shift_table = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + + self.skip_linear = nn.Linear(2 * hidden_size, hidden_size) if skip else None + + def forward(self, x, y, t, mask=None, skip=None, ids_keep=None, **kwargs): + B, N, C = x.shape + if self.skip_linear is not None: + x = self.skip_linear(th.cat([x, skip], dim=-1)) + # x [B, T, D] + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)).reshape(B, N, C)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x +class PixArt_MDT_MASK_TF(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=False, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, mask_t=0.17, mask_f=0.15, decode_layer=4,**kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + decode_layer = int(decode_layer) + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + + half_depth = (depth - decode_layer)//2 + self.half_depth=half_depth + + drop_path_half = [x.item() for x in th.linspace(0, drop_path, half_depth)] # stochastic depth decay rule + drop_path_decode = [x.item() for x in th.linspace(0, drop_path, decode_layer)] + self.en_inblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, FFN_type='mlp') for i in range(half_depth) + ]) + self.en_outblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True, FFN_type='mlp') for i in range(half_depth) + ]) + self.de_blocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_decode[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True, FFN_type='mlp') for i in range(decode_layer) + ]) + self.sideblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, FFN_type='mlp') for _ in range(1) + ]) + + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.decoder_pos_embed = nn.Parameter(th.zeros( + 1, num_patches, hidden_size), requires_grad=True) + # if mask_ratio is not None: + # self.mask_token = nn.Parameter(th.zeros(1, 1, hidden_size)) + # self.mask_ratio = float(mask_ratio) + # self.decode_layer = int(decode_layer) + # else: + # self.mask_token = nn.Parameter(th.zeros( + # 1, 1, hidden_size), requires_grad=False) + # self.mask_ratio = None + # self.decode_layer = int(decode_layer) + assert mask_t != 0 and mask_f != 0 + self.mask_token = nn.Parameter(th.zeros(1, 1, hidden_size)) + self.mask_t = mask_t + self.mask_f = mask_f + self.decode_layer = int(decode_layer) + print(f"mask ratio: T-{self.mask_t} F-{self.mask_f}", "decode_layer:", self.decode_layer) + self.initialize_weights() + + + def forward(self, x, timestep, context_list, context_mask_list=None, enable_mask=False, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # import pdb + # pdb.set_trace() + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + # if not self.training: + try: + mask = context_mask_list[0] # (N, L) + except: + mask = th.ones(x.shape[0], 1).to(x.device) + print("MASK !!!!!!!!!!!!!!!!!!!!!!!!!") + + assert mask is not None + # if mask is not None: + + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + + input_skip = x + + masked_stage = False + skips = [] + # TODO : masking op for training + if self.mask_t is not None and self.training: + # masking: length -> length * mask_ratio + rand_mask_ratio = th.rand(1, device=x.device) # noise in [0, 1] + rand_mask_ratio_t = rand_mask_ratio * 0.13 + self.mask_t # mask_ratio, mask_ratio + 0.2 + rand_mask_ratio_f = rand_mask_ratio * 0.13 + self.mask_f + # print(rand_mask_ratio) + x, mask, ids_restore, ids_keep = self.random_masking_2d( + x, rand_mask_ratio_t, rand_mask_ratio_f) + masked_stage = True + + + for block in self.en_inblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=None) + skips.append(x) + + for block in self.en_outblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=None) + + if self.mask_t is not None and self.mask_f is not None and self.training: + x = self.forward_side_interpolater(x, y, t0, y_lens, mask, ids_restore) + masked_stage = False + else: + # add pos embed + x = x + self.decoder_pos_embed + + for i in range(len(self.de_blocks)): + block = self.de_blocks[i] + this_skip = input_skip + + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=this_skip, ids_keep=None) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, context_list, context_mask_list=None, cfg_scale=4.0, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + # import pdb + # pdb.set_trace() + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, context_list, context_mask_list=None) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + if self.x_embedder.ol == (0, 0) or self.x_embedder.ol == [0, 0]: + c = self.out_channels + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + x = th.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + return imgs + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + bs = x.shape[0] + + torch_map = self.torch_map + + c = self.out_channels + + x = x.reshape(shape=(bs, lf, rf, lp, rp, c)) + x = th.einsum('nhwpqc->nchwpq', x) + + added_map = th.zeros(bs, c, lm+2*lpad, rm+2*rpad).to(x.device) + + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + added_map[:, :, xx:(xx+lp), yy:(yy+rp)] += \ + x[:, :, i, j, :, :] + # import pdb + # pdb.set_trace() + added_map = added_map[:][:][lpad:lm+lpad, rpad:rm+rpad] + return th.mul(added_map.to(x.device), torch_map.to(x.device)) + + def random_masking_2d(self, x, mask_t_prob, mask_f_prob): + """ + 2D: Spectrogram (msking t and f under mask_t_prob and mask_f_prob) + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + # if self.use_custom_patch: # overlapped patch + # T=101 + # F=12 + # else: + # T=64 + # F=8 + T = self.x_embedder.grid_size[0] + F = self.x_embedder.grid_size[1] + #x = x.reshape(N, T, F, D) + len_keep_t = int(T * (1 - mask_t_prob)) + len_keep_f = int(F * (1 - mask_f_prob)) + + # noise for mask in time + noise_t = th.rand(N, T, device=x.device) # noise in [0, 1] + # sort noise for each sample aling time + ids_shuffle_t = th.argsort(noise_t, dim=1) # ascend: small is keep, large is remove + ids_restore_t = th.argsort(ids_shuffle_t, dim=1) + ids_keep_t = ids_shuffle_t[:,:len_keep_t] + # noise mask in freq + noise_f = th.rand(N, F, device=x.device) # noise in [0, 1] + ids_shuffle_f = th.argsort(noise_f, dim=1) # ascend: small is keep, large is remove + ids_restore_f = th.argsort(ids_shuffle_f, dim=1) + ids_keep_f = ids_shuffle_f[:,:len_keep_f] # + + # generate the binary mask: 0 is keep, 1 is remove + # mask in freq + mask_f = th.ones(N, F, device=x.device) + mask_f[:,:len_keep_f] = 0 + mask_f = th.gather(mask_f, dim=1, index=ids_restore_f).unsqueeze(1).repeat(1,T,1) # N,T,F + # mask in time + mask_t = th.ones(N, T, device=x.device) + mask_t[:,:len_keep_t] = 0 + mask_t = th.gather(mask_t, dim=1, index=ids_restore_t).unsqueeze(1).repeat(1,F,1).permute(0,2,1) # N,T,F + mask = 1-(1-mask_t)*(1-mask_f) # N, T, F + + # get masked x + id2res=th.Tensor(list(range(N*T*F))).reshape(N,T,F).to(x.device) + id2res = id2res + 999*mask # add a large value for masked elements + id2res2 = th.argsort(id2res.flatten(start_dim=1)) + ids_keep=id2res2.flatten(start_dim=1)[:,:len_keep_f*len_keep_t] + x_masked = th.gather(x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + ids_restore = th.argsort(id2res2.flatten(start_dim=1)) + mask = mask.flatten(start_dim=1) + + return x_masked, mask, ids_restore, ids_keep + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = th.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + # ascend: small is keep, large is remove + ids_shuffle = th.argsort(noise, dim=1) + ids_restore = th.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = th.gather( + x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = th.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = th.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore, ids_keep + + def forward_side_interpolater(self, x, y, t0, y_lens, mask, ids_restore): + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] - x.shape[1], 1) + x_ = th.cat([x, mask_tokens], dim=1) + x = th.gather( + x_, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) # unshuffle + + # add pos embed + x = x + self.decoder_pos_embed + + # pass to the basic block + x_before = x + for sideblock in self.sideblocks: + x = sideblock(x, y, t0, y_lens, ids_keep=None) + + # masked shortcut + mask = mask.unsqueeze(dim=-1) + x = x*mask + (1-mask)*x_before + + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.en_inblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.en_outblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.de_blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.sideblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + if self.x_embedder.ol == [0, 0] or self.x_embedder.ol == (0, 0): + return + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + + torch_map = th.zeros(lm+2*lpad, rm+2*rpad).to('cuda') + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + torch_map[xx:(xx+lp), yy:(yy+rp)]+=1 + torch_map = torch_map[lpad:lm+lpad, rpad:rm+rpad] + self.torch_map = th.reciprocal(torch_map) + + @property + def dtype(self): + return next(self.parameters()).dtype + +class PixArt_MDT_MOS_AS_TOKEN(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=False, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, mask_ratio=None, decode_layer=4,**kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + decode_layer = int(decode_layer) + + self.mos_embed = nn.Embedding(num_embeddings=5, embedding_dim=hidden_size) + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + # self.mos_block = nn.Sequential( + + # ) + + self.y_embedder = nn.Linear(cond_dim, hidden_size) + + half_depth = (depth - decode_layer)//2 + self.half_depth=half_depth + + drop_path_half = [x.item() for x in th.linspace(0, drop_path, half_depth)] # stochastic depth decay rule + drop_path_decode = [x.item() for x in th.linspace(0, drop_path, decode_layer)] + self.en_inblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, FFN_type='mlp') for i in range(half_depth) + ]) + self.en_outblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True, FFN_type='mlp') for i in range(half_depth) + ]) + self.de_blocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_decode[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True, FFN_type='mlp') for i in range(decode_layer) + ]) + self.sideblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, FFN_type='mlp') for _ in range(1) + ]) + + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.decoder_pos_embed = nn.Parameter(th.zeros( + 1, num_patches, hidden_size), requires_grad=True) + if mask_ratio is not None: + self.mask_token = nn.Parameter(th.zeros(1, 1, hidden_size)) + self.mask_ratio = float(mask_ratio) + self.decode_layer = int(decode_layer) + else: + self.mask_token = nn.Parameter(th.zeros( + 1, 1, hidden_size), requires_grad=False) + self.mask_ratio = None + self.decode_layer = int(decode_layer) + print("mask ratio:", self.mask_ratio, "decode_layer:", self.decode_layer) + self.initialize_weights() + + + def forward(self, x, timestep, context_list, context_mask_list=None, enable_mask=False, mos=None, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + # mos = th.ones(x.shape[0], dtype=th.int).to(x.device) + #print(f'DEBUG! {x}, {mos}') + assert mos.shape[0] == x.shape[0] + #import pdb; pdb.set_trace() + mos = mos - 1 + mos = self.mos_embed(mos.to(x.device).to(th.int)) # [N, dim] + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # import pdb + # pdb.set_trace() + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + # if not self.training: + try: + mask = context_mask_list[0] # (N, L) + except: + mask = th.ones(x.shape[0], 1).to(x.device) + + assert mask is not None + # if mask is not None: + + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + + masked_stage = False + skips = [] + # TODO : masking op for training + try: + x = th.cat([mos, x], dim=1) # [N, L+1, dim] + except: + x = th.cat([mos.unsqueeze(1), x], dim=1) + input_skip = x + + if self.mask_ratio is not None and self.training: + # masking: length -> length * mask_ratio + rand_mask_ratio = th.rand(1, device=x.device) # noise in [0, 1] + rand_mask_ratio = rand_mask_ratio * 0.2 + self.mask_ratio # mask_ratio, mask_ratio + 0.2 + # print(rand_mask_ratio) + x, mask, ids_restore, ids_keep = self.random_masking( + x, rand_mask_ratio) + masked_stage = True + for block in self.en_inblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=None) + skips.append(x) + + for block in self.en_outblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=None) + + if self.mask_ratio is not None and self.training: + x = self.forward_side_interpolater(x, y, t0, y_lens, mask, ids_restore) + masked_stage = False + else: + # add pos embed + x[:, 1:, :] = x[:, 1:, :] + self.decoder_pos_embed + # x = x + self.decoder_pos_embed + # import pdb + # pdb.set_trace() + for i in range(len(self.de_blocks)): + block = self.de_blocks[i] + this_skip = input_skip + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=this_skip, ids_keep=None) + x = x[:, 1:, :] + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + # import pdb + # pdb.set_trace() + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, context_list, context_mask_list=None, cfg_scale=4.0, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + # import pdb + # pdb.set_trace() + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, context_list, context_mask_list=None) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + if self.x_embedder.ol == (0, 0) or self.x_embedder.ol == [0, 0]: + c = self.out_channels + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + x = th.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + return imgs + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + bs = x.shape[0] + + torch_map = self.torch_map + + c = self.out_channels + + x = x.reshape(shape=(bs, lf, rf, lp, rp, c)) + x = th.einsum('nhwpqc->nchwpq', x) + + added_map = th.zeros(bs, c, lm+2*lpad, rm+2*rpad).to(x.device) + + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + added_map[:, :, xx:(xx+lp), yy:(yy+rp)] += \ + x[:, :, i, j, :, :] + # import pdb + # pdb.set_trace() + added_map = added_map[:][:][lpad:lm+lpad, rpad:rm+rpad] + return th.mul(added_map.to(x.device), torch_map.to(x.device)) + + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + L = L - 1 + len_keep = int(L * (1 - mask_ratio)) + + noise = th.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + # ascend: small is keep, large is remove + ids_shuffle = th.argsort(noise, dim=1) + ids_restore = th.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = th.gather( + x[:, 1:, :], dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + x_masked = th.cat([x[:, 0, :].unsqueeze(1), x_masked], dim=1) + # import pdb + # pdb.set_trace() + # generate the binary mask: 0 is keep, 1 is remove + mask = th.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = th.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore, ids_keep + + def forward_side_interpolater(self, x, y, t0, y_lens, mask, ids_restore): + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] - x.shape[1] + 1, 1) + x_ = th.cat([x[:, 1:, :], mask_tokens], dim=1) + + x_ = th.gather( + x_, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) # unshuffle + + # add pos embed + x_ = x_ + self.decoder_pos_embed + x = th.cat([x[:, 0, :].unsqueeze(1), x_], dim=1) + # import pdb + # pdb.set_trace() + # pass to the basic block + x_before = x + for sideblock in self.sideblocks: + x = sideblock(x, y, t0, y_lens, ids_keep=None) + + # masked shortcut + mask = mask.unsqueeze(dim=-1) + # import pdb;pdb.set_trace() + mask = th.cat([th.ones(mask.shape[0], 1, 1).to(mask.device), mask], dim=1) + x = x*mask + (1-mask)*x_before + + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.en_inblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.en_outblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.de_blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.sideblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + if self.x_embedder.ol == [0, 0] or self.x_embedder.ol == (0, 0): + return + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + + torch_map = th.zeros(lm+2*lpad, rm+2*rpad).to('cuda') + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + torch_map[xx:(xx+lp), yy:(yy+rp)]+=1 + torch_map = torch_map[lpad:lm+lpad, rpad:rm+rpad] + self.torch_map = th.reciprocal(torch_map) + + @property + def dtype(self): + return next(self.parameters()).dtype + + +class PixArt_MDT_LC(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=False, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, mask_ratio=None, decode_layer=4,**kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + decode_layer = int(decode_layer) + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + + half_depth = (depth - decode_layer)//2 + self.half_depth=half_depth + + drop_path_half = [x.item() for x in th.linspace(0, drop_path, half_depth)] # stochastic depth decay rule + drop_path_decode = [x.item() for x in th.linspace(0, drop_path, decode_layer)] + self.en_inblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, FFN_type='mlp') for i in range(half_depth) + ]) + self.en_outblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True, FFN_type='mlp') for i in range(half_depth) + ]) + self.de_blocks = nn.ModuleList([ + DEBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_decode[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True, FFN_type='mlp', num_f=self.x_embedder.grid_size[1]+1, num_t=self.x_embedder.grid_size[0]) for i in range(decode_layer) + ]) + self.sideblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, FFN_type='mlp') for _ in range(1) + ]) + + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.decoder_pos_embed = nn.Parameter(th.zeros( + 1, num_patches, hidden_size), requires_grad=True) + if mask_ratio is not None: + self.mask_token = nn.Parameter(th.zeros(1, 1, hidden_size)) + self.mask_ratio = float(mask_ratio) + self.decode_layer = int(decode_layer) + else: + self.mask_token = nn.Parameter(th.zeros( + 1, 1, hidden_size), requires_grad=False) + self.mask_ratio = None + self.decode_layer = int(decode_layer) + print("mask ratio:", self.mask_ratio, "decode_layer:", self.decode_layer) + self.initialize_weights() + + + def forward(self, x, timestep, context_list, context_mask_list=None, enable_mask=False, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # import pdb + # pdb.set_trace() + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + # if not self.training: + try: + mask = context_mask_list[0] # (N, L) + except: + mask = th.ones(x.shape[0], 1).to(x.device) + print("MASK !!!!!!!!!!!!!!!!!!!!!!!!!") + + assert mask is not None + # if mask is not None: + + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + + input_skip = x + + masked_stage = False + skips = [] + # TODO : masking op for training + if self.mask_ratio is not None and self.training: + # masking: length -> length * mask_ratio + rand_mask_ratio = th.rand(1, device=x.device) # noise in [0, 1] + rand_mask_ratio = rand_mask_ratio * 0.2 + self.mask_ratio # mask_ratio, mask_ratio + 0.2 + # print(rand_mask_ratio) + x, mask, ids_restore, ids_keep = self.random_masking( + x, rand_mask_ratio) + masked_stage = True + + + for block in self.en_inblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=None) + skips.append(x) + + for block in self.en_outblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=None) + + if self.mask_ratio is not None and self.training: + x = self.forward_side_interpolater(x, y, t0, y_lens, mask, ids_restore) + masked_stage = False + else: + # add pos embed + x = x + self.decoder_pos_embed + bs = x.shape[0] + + bs, D, L = x.shape + T, F = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # reshaped = x.reshape(bs, T, F, L).to(x.device) + end = th.zeros(bs, T, 1, L).to(x.device) + # x = th.cat((reshaped, zero_tensor), 2) + # import pdb;pdb.set_trace() + # assert x.shape == [bs, T, F + 1, L] + for i in range(len(self.de_blocks)): + block = self.de_blocks[i] + this_skip = input_skip + x, end = auto_grad_checkpoint(block, x, end, y, t0, y_lens, skip=this_skip, ids_keep=None) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, context_list, context_mask_list=None, cfg_scale=4.0, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + # import pdb + # pdb.set_trace() + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, context_list, context_mask_list=None) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + if self.x_embedder.ol == (0, 0) or self.x_embedder.ol == [0, 0]: + c = self.out_channels + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + x = th.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + return imgs + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + bs = x.shape[0] + + torch_map = self.torch_map + + c = self.out_channels + + x = x.reshape(shape=(bs, lf, rf, lp, rp, c)) + x = th.einsum('nhwpqc->nchwpq', x) + + added_map = th.zeros(bs, c, lm+2*lpad, rm+2*rpad).to(x.device) + + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + added_map[:, :, xx:(xx+lp), yy:(yy+rp)] += \ + x[:, :, i, j, :, :] + # import pdb + # pdb.set_trace() + added_map = added_map[:][:][lpad:lm+lpad, rpad:rm+rpad] + return th.mul(added_map.to(x.device), torch_map.to(x.device)) + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = th.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + # ascend: small is keep, large is remove + ids_shuffle = th.argsort(noise, dim=1) + ids_restore = th.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = th.gather( + x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = th.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = th.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore, ids_keep + + def forward_side_interpolater(self, x, y, t0, y_lens, mask, ids_restore): + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] - x.shape[1], 1) + x_ = th.cat([x, mask_tokens], dim=1) + x = th.gather( + x_, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) # unshuffle + + # add pos embed + x = x + self.decoder_pos_embed + + # pass to the basic block + x_before = x + for sideblock in self.sideblocks: + x = sideblock(x, y, t0, y_lens, ids_keep=None) + + # masked shortcut + mask = mask.unsqueeze(dim=-1) + x = x*mask + (1-mask)*x_before + + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.en_inblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.en_outblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.de_blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.sideblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + if self.x_embedder.ol == [0, 0] or self.x_embedder.ol == (0, 0): + return + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + + torch_map = th.zeros(lm+2*lpad, rm+2*rpad).to('cuda') + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + torch_map[xx:(xx+lp), yy:(yy+rp)]+=1 + torch_map = torch_map[lpad:lm+lpad, rpad:rm+rpad] + self.torch_map = th.reciprocal(torch_map) + + @property + def dtype(self): + return next(self.parameters()).dtype + + +class PixArt_MDT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=False, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, mask_ratio=None, decode_layer=4,**kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + decode_layer = int(decode_layer) + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + + half_depth = (depth - decode_layer)//2 + self.half_depth=half_depth + + drop_path_half = [x.item() for x in th.linspace(0, drop_path, half_depth)] # stochastic depth decay rule + drop_path_decode = [x.item() for x in th.linspace(0, drop_path, decode_layer)] + self.en_inblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False) for i in range(half_depth) + ]) + self.en_outblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True) for i in range(half_depth) + ]) + self.de_blocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_decode[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True) for i in range(decode_layer) + ]) + self.sideblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False) for _ in range(1) + ]) + + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.decoder_pos_embed = nn.Parameter(th.zeros( + 1, num_patches, hidden_size), requires_grad=True) + if mask_ratio is not None: + self.mask_token = nn.Parameter(th.zeros(1, 1, hidden_size)) + self.mask_ratio = float(mask_ratio) + self.decode_layer = int(decode_layer) + else: + self.mask_token = nn.Parameter(th.zeros( + 1, 1, hidden_size), requires_grad=False) + self.mask_ratio = None + self.decode_layer = int(decode_layer) + print("mask ratio:", self.mask_ratio, "decode_layer:", self.decode_layer) + self.initialize_weights() + + + def forward(self, x, timestep, context_list, context_mask_list=None, enable_mask=False, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + # print(f'debug_MDT : {x.shape[0]}') + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # import pdb + # print(f'debug_MDT : {x.shape[0]}') + # pdb.set_trace() + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + # print(f'debug_MDT : {x.shape[0]}') + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + # print(f'debug_MDT : {x.shape[0]}') + y = self.y_embedder(y) # (N, L, D) + # if not self.training: + try: + mask = context_mask_list[0] # (N, L) + except: + mask = th.ones(x.shape[0], 1).to(x.device) + print("MASK !!!!!!!!!!!!!!!!!!!!!!!!!") + + assert mask is not None + # if mask is not None: + + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + # print(f'debug_MDT : {x.shape[0]}') + input_skip = x + + masked_stage = False + skips = [] + # TODO : masking op for training + if self.mask_ratio is not None and self.training: + # masking: length -> length * mask_ratio + rand_mask_ratio = th.rand(1, device=x.device) # noise in [0, 1] + rand_mask_ratio = rand_mask_ratio * 0.2 + self.mask_ratio # mask_ratio, mask_ratio + 0.2 + # print(rand_mask_ratio) + x, mask, ids_restore, ids_keep = self.random_masking( + x, rand_mask_ratio) + masked_stage = True + + + for block in self.en_inblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=None) + skips.append(x) + + for block in self.en_outblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=None) + + if self.mask_ratio is not None and self.training: + x = self.forward_side_interpolater(x, y, t0, y_lens, mask, ids_restore) + masked_stage = False + else: + # add pos embed + x = x + self.decoder_pos_embed + + for i in range(len(self.de_blocks)): + block = self.de_blocks[i] + this_skip = input_skip + + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=this_skip, ids_keep=None) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + + x = self.unpatchify(x) # (N, out_channels, H, W) + + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, y, cfg_scale, mask=None, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, y, mask) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + if self.x_embedder.ol == (0, 0) or self.x_embedder.ol == [0, 0]: + + c = self.out_channels + + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + + x = th.einsum('nhwpqc->nchpwq', x) + + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + + return imgs + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + bs = x.shape[0] + + torch_map = self.torch_map + + c = self.out_channels + + x = x.reshape(shape=(bs, lf, rf, lp, rp, c)) + x = th.einsum('nhwpqc->nchwpq', x) + + added_map = th.zeros(bs, c, lm+2*lpad, rm+2*rpad).to(x.device) + + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + added_map[:, :, xx:(xx+lp), yy:(yy+rp)] += \ + x[:, :, i, j, :, :] + + added_map = added_map[:, :, lpad:lm+lpad, rpad:rm+rpad] + return th.mul(added_map, torch_map.to(added_map.device)) + + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = th.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + # ascend: small is keep, large is remove + ids_shuffle = th.argsort(noise, dim=1) + ids_restore = th.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = th.gather( + x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = th.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = th.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore, ids_keep + + def forward_side_interpolater(self, x, y, t0, y_lens, mask, ids_restore): + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] - x.shape[1], 1) + x_ = th.cat([x, mask_tokens], dim=1) + x = th.gather( + x_, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) # unshuffle + + # add pos embed + x = x + self.decoder_pos_embed + + # pass to the basic block + x_before = x + for sideblock in self.sideblocks: + x = sideblock(x, y, t0, y_lens, ids_keep=None) + + # masked shortcut + mask = mask.unsqueeze(dim=-1) + x = x*mask + (1-mask)*x_before + + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.en_inblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.en_outblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.de_blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.sideblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + if self.x_embedder.ol == [0, 0] or self.x_embedder.ol == (0, 0): + return + + lf = self.x_embedder.grid_size[0] + rf = self.x_embedder.grid_size[1] + lp = self.x_embedder.patch_size[0] + rp = self.x_embedder.patch_size[1] + lo = self.x_embedder.ol[0] + ro = self.x_embedder.ol[1] + lm = self.x_embedder.img_size[0] + rm = self.x_embedder.img_size[1] + lpad = self.x_embedder.pad_size[0] + rpad = self.x_embedder.pad_size[1] + + torch_map = th.zeros(lm+2*lpad, rm+2*rpad).to('cuda') + for i in range(lf): + for j in range(rf): + xx = (i) * (lp - lo) + yy = (j) * (rp - ro) + torch_map[xx:(xx+lp), yy:(yy+rp)]+=1 + torch_map = torch_map[lpad:lm+lpad, rpad:rm+rpad] + self.torch_map = th.reciprocal(torch_map) + + @property + def dtype(self): + return next(self.parameters()).dtype + +class PixArt_MDT_FIT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=False, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, mask_ratio=None, decode_layer=4,**kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + decode_layer = int(decode_layer) + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + + half_depth = (depth - decode_layer)//2 + self.half_depth=half_depth + + drop_path_half = [x.item() for x in th.linspace(0, drop_path, half_depth)] # stochastic depth decay rule + drop_path_decode = [x.item() for x in th.linspace(0, drop_path, decode_layer)] + self.en_inblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False) for i in range(half_depth) + ]) + self.en_outblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_half[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True) for i in range(half_depth) + ]) + self.de_blocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path_decode[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False, skip=True) for i in range(decode_layer) + ]) + self.sideblocks = nn.ModuleList([ + MDTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio, + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False) for _ in range(1) + ]) + + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.decoder_pos_embed = nn.Parameter(th.zeros( + 1, num_patches, hidden_size), requires_grad=True) + if mask_ratio is not None: + self.mask_token = nn.Parameter(th.zeros(1, 1, hidden_size)) + self.mask_ratio = float(mask_ratio) + self.decode_layer = int(decode_layer) + else: + self.mask_token = nn.Parameter(th.zeros( + 1, 1, hidden_size), requires_grad=False) + self.mask_ratio = None + self.decode_layer = int(decode_layer) + print("mask ratio:", self.mask_ratio, "decode_layer:", self.decode_layer) + self.initialize_weights() + + + def forward(self, x, timestep, context_list, context_mask_list=None, enable_mask=False, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + # import pdb + # pdb.set_trace() + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + # if not self.training: + try: + mask = context_mask_list[0] # (N, L) + except: + mask = th.ones(x.shape[0], 1).to(x.device) + print("MASK !!!!!!!!!!!!!!!!!!!!!!!!!") + + assert mask is not None + # if mask is not None: + + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + + input_skip = x + + masked_stage = False + skips = [] + # TODO : masking op for training + if self.mask_ratio is not None and self.training: + # masking: length -> length * mask_ratio + rand_mask_ratio = th.rand(1, device=x.device) # noise in [0, 1] + rand_mask_ratio = rand_mask_ratio * 0.2 + self.mask_ratio # mask_ratio, mask_ratio + 0.2 + # print(rand_mask_ratio) + x, mask, ids_restore, ids_keep = self.random_masking( + x, rand_mask_ratio) + masked_stage = True + + + for block in self.en_inblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, ids_keep=None) + skips.append(x) + + for block in self.en_outblocks: + if masked_stage: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=ids_keep) + else: + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=skips.pop(), ids_keep=None) + + if self.mask_ratio is not None and self.training: + x = self.forward_side_interpolater(x, y, t0, y_lens, mask, ids_restore) + masked_stage = False + else: + # add pos embed + x = x + self.decoder_pos_embed + + for i in range(len(self.de_blocks)): + block = self.de_blocks[i] + this_skip = input_skip + + x = auto_grad_checkpoint(block, x, y, t0, y_lens, skip=this_skip, ids_keep=None) + + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, y, cfg_scale, mask=None, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, y, mask) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + + c = self.out_channels + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + x = th.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + return imgs + + def random_masking(self, x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = th.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + # ascend: small is keep, large is remove + ids_shuffle = th.argsort(noise, dim=1) + ids_restore = th.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = th.gather( + x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = th.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = th.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore, ids_keep + + def forward_side_interpolater(self, x, y, t0, y_lens, mask, ids_restore): + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] - x.shape[1], 1) + x_ = th.cat([x, mask_tokens], dim=1) + x = th.gather( + x_, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) # unshuffle + + # add pos embed + x = x + self.decoder_pos_embed + + # pass to the basic block + x_before = x + for sideblock in self.sideblocks: + x = sideblock(x, y, t0, y_lens, ids_keep=None) + + # masked shortcut + mask = mask.unsqueeze(dim=-1) + x = x*mask + (1-mask)*x_before + + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + + # Replace the absolute embedding with 2d-rope position embedding: + # pos_embed = + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.en_inblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.en_outblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.de_blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + for block in self.sideblocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + @property + def dtype(self): + return next(self.parameters()).dtype + +class PixArt_Slow(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), patch_size=(16,4), overlap=(0, 0), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=True, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, **kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if pred_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + + self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + # self.x_embedder = PatchEmbed_1D(input) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.base_size = input_size[0] // self.patch_size[0] * 2 + # Will use fixed sin-cos embedding: + self.register_buffer("pos_embed", th.zeros(1, num_patches, hidden_size)) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + drop_path = [x.item() for x in th.linspace(0, drop_path, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + PixArtBlock_Slow(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + input_size=(self.x_embedder.grid_size[0], self.x_embedder.grid_size[1]), + window_size=0, + use_rel_pos=False) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, patch_size, self.out_channels) + + self.initialize_weights() + + def forward(self, x, timestep, context_list, context_mask_list=None, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + pos_embed = self.pos_embed.to(self.dtype) + self.h, self.w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = self.x_embedder(x) + pos_embed # (N, T, D), where T = H * W / patch_size ** 2 + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + mask = context_mask_list[0] # (N, L) + + assert mask is not None + # if mask is not None: + + # y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + # y_lens = mask.sum(dim=1).tolist() + # y_lens = [int(_) for _ in y_lens] + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, mask) # (N, T, D) #support grad checkpoint + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, y, cfg_scale, mask=None, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, y, mask) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify(self, x): + + """ + x: (N, T, patch_size 0 * patch_size 1 * C) + imgs: (Bs. 256. 16. 8) + """ + c = self.out_channels + p0 = self.x_embedder.patch_size[0] + p1 = self.x_embedder.patch_size[1] + h, w = self.x_embedder.grid_size[0], self.x_embedder.grid_size[1] + + x = x.reshape(shape=(x.shape[0], h, w, p0, p1, c)) + x = th.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h * p0, w * p1)) + return imgs + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + # for block in self.blocks: + # nn.init.constant_(block.cross_attn.proj.weight, 0) + # nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + @property + def dtype(self): + return next(self.parameters()).dtype + +class PixArtBlock_1D(nn.Module): + """ + A PixArt block with adaptive layer norm (adaLN-single) conditioning. + """ + + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, drop_path=0., window_size=0, use_rel_pos=False, **block_kwargs): + super().__init__() + self.hidden_size = hidden_size + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = WindowAttention(hidden_size, num_heads=num_heads, qkv_bias=True, + input_size=None, + use_rel_pos=use_rel_pos, **block_kwargs) + # self.attn = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, **block_kwargs) + self.cross_attn = MultiHeadCrossAttention(hidden_size, num_heads, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + # to be compatible with lower version pytorch + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.mlp = Mlp(in_features=hidden_size, hidden_features=int(hidden_size * mlp_ratio), act_layer=approx_gelu, drop=0) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.window_size = window_size + self.scale_shift_table = nn.Parameter(th.randn(6, hidden_size) / hidden_size ** 0.5) + + def forward(self, x, y, t, mask=None, **kwargs): + B, N, C = x.shape + # x [3, 133, 1152] + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = (self.scale_shift_table[None] + t.reshape(B, 6, -1)).chunk(6, dim=1) + x = x + self.drop_path(gate_msa * self.attn(t2i_modulate(self.norm1(x), shift_msa, scale_msa)).reshape(B, N, C)) + x = x + self.cross_attn(x, y, mask) + x = x + self.drop_path(gate_mlp * self.mlp(t2i_modulate(self.norm2(x), shift_mlp, scale_mlp))) + + return x + +class PixArt_1D(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=True, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, **kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + + self.x_embedder = PatchEmbed_1D(input_size, in_channels, hidden_size) + # self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + self.t_embedder = TimestepEmbedder(hidden_size) + self.p_enc_1d_model = PositionalEncoding1D(hidden_size) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + drop_path = [x.item() for x in th.linspace(0, drop_path, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + PixArtBlock_1D(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + window_size=0, + use_rel_pos=False) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, (1, input_size[1]), self.out_channels) + + self.initialize_weights() + + # if config: + # logger = get_root_logger(os.path.join(config.work_dir, 'train_log.log')) + # logger.warning(f"lewei scale: {self.lewei_scale}, base size: {self.base_size}") + # else: + # print(f'Warning: lewei scale: {self.lewei_scale}, base size: {self.base_size}') + + def forward(self, x, timestep, context_list, context_mask_list=None, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + + x = self.x_embedder(x) # (N, T, D) + pos_embed = self.p_enc_1d_model(x) + x = x + pos_embed + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + try: + mask = context_mask_list[0] # (N, L) + except: + mask = th.ones(x.shape[0], 1).to(x.device) + print("MASK !!!!!!!!!!!!!!!!!!!!!!!!!") + + assert mask is not None + # if mask is not None: + y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + y_lens = mask.sum(dim=1).tolist() + y_lens = [int(_) for _ in y_lens] + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, y_lens) # (N, T, D) #support grad checkpoint + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify_1D(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, y, cfg_scale, mask=None, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, y, mask) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :8], model_out[:, 8:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify_1D(self, x): + + """ + """ + c = self.out_channels + + x = x.reshape(shape=(x.shape[0], self.input_size[0], self.input_size[1], c)) + x = th.einsum('btfc->bctf', x) + # imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p)) + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + # pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + # self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + for block in self.blocks: + nn.init.constant_(block.cross_attn.proj.weight, 0) + nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + @property + def dtype(self): + return next(self.parameters()).dtype + +class PixArt_Slow_1D(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + + def __init__(self, input_size=(256,16), in_channels=8, hidden_size=1152, depth=28, num_heads=16, mlp_ratio=4.0, class_dropout_prob=0.1, pred_sigma=True, drop_path: float = 0., window_size=0, window_block_indexes=None, use_rel_pos=False, cond_dim=1024, lewei_scale=1.0, + use_cfg=True, cfg_scale=4.0, config=None, model_max_length=120, **kwargs): + if window_block_indexes is None: + window_block_indexes = [] + super().__init__() + self.use_cfg = use_cfg + self.cfg_scale = cfg_scale + self.input_size = input_size + self.pred_sigma = pred_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if pred_sigma else in_channels + self.num_heads = num_heads + self.lewei_scale = lewei_scale, + + self.x_embedder = PatchEmbed_1D(input_size, in_channels, hidden_size) + # self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, hidden_size, bias=True) + self.t_embedder = TimestepEmbedder(hidden_size) + self.p_enc_1d_model = PositionalEncoding1D(hidden_size) + + approx_gelu = lambda: nn.GELU(approximate="tanh") + self.t_block = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + self.y_embedder = nn.Linear(cond_dim, hidden_size) + drop_path = [x.item() for x in th.linspace(0, drop_path, depth)] # stochastic depth decay rule + self.blocks = nn.ModuleList([ + PixArtBlock_Slow(hidden_size, num_heads, mlp_ratio=mlp_ratio, drop_path=drop_path[i], + window_size=0, + use_rel_pos=False) + for i in range(depth) + ]) + self.final_layer = T2IFinalLayer(hidden_size, (1, input_size[1]), self.out_channels) + + self.initialize_weights() + + # if config: + # logger = get_root_logger(os.path.join(config.work_dir, 'train_log.log')) + # logger.warning(f"lewei scale: {self.lewei_scale}, base size: {self.base_size}") + # else: + # print(f'Warning: lewei scale: {self.lewei_scale}, base size: {self.base_size}') + + def forward(self, x, timestep, context_list, context_mask_list=None, **kwargs): + """ + Forward pass of PixArt. + x: (N, C, H, W) tensor of spatial inputs (images or latent representations of images) + t: (N,) tensor of diffusion timesteps + y: (N, 1, 120, C) tensor of class labels + """ + x = x.to(self.dtype) + timestep = timestep.to(self.dtype) + y = context_list[0].to(self.dtype) + + x = self.x_embedder(x) # (N, T, D) + pos_embed = self.p_enc_1d_model(x) + x = x + pos_embed + t = self.t_embedder(timestep.to(x.dtype)) # (N, D) + t0 = self.t_block(t) + y = self.y_embedder(y) # (N, L, D) + mask = context_mask_list[0] # (N, L) + + assert mask is not None + # if mask is not None: + # y = y.masked_select(mask.unsqueeze(-1) != 0).view(1, -1, x.shape[-1]) + # y_lens = mask.sum(dim=1).tolist() + # y_lens = [int(_) for _ in y_lens] + for block in self.blocks: + x = auto_grad_checkpoint(block, x, y, t0, mask) # (N, T, D) #support grad checkpoint + x = self.final_layer(x, t) # (N, T, patch_size ** 2 * out_channels) + x = self.unpatchify_1D(x) # (N, out_channels, H, W) + return x + + def forward_with_dpmsolver(self, x, timestep, y, mask=None, **kwargs): + """ + dpm solver donnot need variance prediction + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + model_out = self.forward(x, timestep, y, mask) + return model_out.chunk(2, dim=1)[0] + + def forward_with_cfg(self, x, timestep, y, cfg_scale, mask=None, **kwargs): + """ + Forward pass of PixArt, but also batches the unconditional forward pass for classifier-free guidance. + """ + # https://github.com/openai/glide-text2im/blob/main/notebooks/text2im.ipynb + half = x[: len(x) // 2] + combined = th.cat([half, half], dim=0) + model_out = self.forward(combined, timestep, y, mask) + model_out = model_out['x'] if isinstance(model_out, dict) else model_out + eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:] + cond_eps, uncond_eps = th.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = th.cat([half_eps, half_eps], dim=0) + return eps + # return th.cat([eps, rest], dim=1) + + def unpatchify_1D(self, x): + + """ + """ + c = self.out_channels + + x = x.reshape(shape=(x.shape[0], self.input_size[0], self.input_size[1], c)) + x = th.einsum('btfc->bctf', x) + # imgs = x.reshape(shape=(x.shape[0], c, h * p, h * p)) + return x + + def initialize_weights(self): + # Initialize transformer layers: + def _basic_init(module): + if isinstance(module, nn.Linear): + th.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + + self.apply(_basic_init) + + # Initialize (and freeze) pos_embed by sin-cos embedding: + # pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], self.x_embedder.grid_size, lewei_scale=self.lewei_scale, base_size=self.base_size) + # self.pos_embed.data.copy_(th.from_numpy(pos_embed).float().unsqueeze(0)) + + # Initialize patch_embed like nn.Linear (instead of nn.Conv2d): + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + # Initialize timestep embedding MLP: + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + nn.init.normal_(self.t_block[1].weight, std=0.02) + + # Initialize caption embedding MLP: + nn.init.normal_(self.y_embedder.weight, std=0.02) + # nn.init.normal_(self.y_embedder.y_proj.fc2.weight, std=0.02) + + # Zero-out adaLN modulation layers in PixArt blocks: + # for block in self.blocks: + # nn.init.constant_(block.cross_attn.proj.weight, 0) + # nn.init.constant_(block.cross_attn.proj.bias, 0) + + # Zero-out output layers: + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + @property + def dtype(self): + return next(self.parameters()).dtype + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0, lewei_scale=1.0, base_size=16): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + # import pdb + # pdb.set_trace() + if isinstance(grid_size, int): + grid_size = to_2tuple(grid_size) + grid_h = np.arange(grid_size[0], dtype=np.float32) / (grid_size[0]/base_size) / lewei_scale + grid_w = np.arange(grid_size[1], dtype=np.float32) / (grid_size[1]/base_size) / lewei_scale + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + grid = grid.reshape([2, 1, grid_size[1], grid_size[0]]) + + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + return np.concatenate([emb_h, emb_w], axis=1) + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000 ** omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + return np.concatenate([emb_sin, emb_cos], axis=1) + +# if __name__ == '__main__' : +# import pdb +# pdb.set_trace() +# model = PixArt_1D().to('cuda') +# # x: (N, T, patch_size 0 * patch_size 1 * C) +# th.manual_seed(233) +# # x = th.rand(1, 4*16, 16*4*16).to('cuda') +# x = th.rand(3, 8, 256, 16).to('cuda') + +# t = th.tensor([1, 2, 3]).to('cuda') +# c = th.rand(3, 20, 1024).to('cuda') +# c_mask = th.ones(3, 20).to('cuda') +# c_list = [c] +# c_mask_list = [c_mask] +# y = model.forward(x, t, c_list, c_mask_list) + # res = model.unpatchify(x) +# class DiTModel(nn.Module): +# """ +# The full UNet model with attention and timestep embedding. +# :param in_channels: channels in the input Tensor. +# :param model_channels: base channel count for the model. +# :param out_channels: channels in the output Tensor. +# :param num_res_blocks: number of residual blocks per downsample. +# :param attention_resolutions: a collection of downsample rates at which +# attention will take place. May be a set, list, or tuple. +# For example, if this contains 4, then at 4x downsampling, attention +# will be used. +# :param dropout: the dropout probability. +# :param channel_mult: channel multiplier for each level of the UNet. +# :param conv_resample: if True, use learned convolutions for upsampling and +# downsampling. +# :param dims: determines if the signal is 1D, 2D, or 3D. +# :param num_classes: if specified (as an int), then this model will be +# class-conditional with `num_classes` classes. +# :param use_checkpoint: use gradient checkpointing to reduce memory usage. +# :param num_heads: the number of attention heads in each attention layer. +# :param num_heads_channels: if specified, ignore num_heads and instead use +# a fixed channel width per attention head. +# :param num_heads_upsample: works with num_heads to set a different number +# of heads for upsampling. Deprecated. +# :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. +# :param resblock_updown: use residual blocks for up/downsampling. +# :param use_new_attention_order: use a different attention pattern for potentially +# increased efficiency. +# """ + +# def __init__( +# self, +# input_size, +# patch_size, +# overlap, +# in_channels, +# embed_dim, +# model_channels, +# out_channels, +# dims=2, +# extra_film_condition_dim=None, +# use_checkpoint=False, +# use_fp16=False, +# num_heads=-1, +# num_head_channels=-1, +# use_scale_shift_norm=False, +# use_new_attention_order=False, +# transformer_depth=1, # custom transformer support +# context_dim=None, # custom transformer support +# n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model +# legacy=True, +# ): +# super().__init__() +# self.x_embedder = PatchEmbed(input_size, patch_size, overlap, in_channels, embed_dim, bias=True) +# num_patches = self.x_embedder.num_patches +# self.pos_embed = nn.Parameter(th.zeros(1, num_patches, embed_dim), requires_grad=False) +# self.blocks = nn.ModuleList([ +# DiTBlock_crossattn +# ]) + +# def convert_to_fp16(self): +# """ +# Convert the torso of the model to float16. +# """ +# # self.input_blocks.apply(convert_module_to_f16) +# # self.middle_block.apply(convert_module_to_f16) +# # self.output_blocks.apply(convert_module_to_f16) + +# def convert_to_fp32(self): +# """ +# Convert the torso of the model to float32. +# """ +# # self.input_blocks.apply(convert_module_to_f32) +# # self.middle_block.apply(convert_module_to_f32) +# # self.output_blocks.apply(convert_module_to_f32) + +# def forward( +# self, +# x, +# timesteps=None, +# y=None, +# context_list=None, +# context_attn_mask_list=None, +# **kwargs, +# ): +# """ +# Apply the model to an input batch. +# :param x: an [N x C x ...] Tensor of inputs. +# :param timesteps: a 1-D batch of timesteps. +# :param context: conditioning plugged in via crossattn +# :param y: an [N] Tensor of labels, if class-conditional. an [N, extra_film_condition_dim] Tensor if film-embed conditional +# :return: an [N x C x ...] Tensor of outputs. +# """ + +# x = self.x_embedder(x) + self.pos_embed # (N, T, D), where T = H * W / patch_size ** 2 +# t = self.t_embedder(timesteps) # (N, D) +# y = self.y_embedder(y, self.training) # (N, D) +# c = t + y # (N, D) +# for block in self.blocks: +# x = block(x, c) # (N, T, D) +# x = self.final_layer(x, c) # (N, T, patch_size ** 2 * out_channels) +# x = self.unpatchify(x) # (N, out_channels, H, W) + + diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/PixArt_blocks.py b/qa_mdt/audioldm_train/modules/diffusionmodules/PixArt_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..4e73a207119912cd9217832b0f57c71ca28c2275 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/PixArt_blocks.py @@ -0,0 +1,419 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# -------------------------------------------------------- +# References: +# GLIDE: https://github.com/openai/glide-text2im +# MAE: https://github.com/facebookresearch/mae/blob/main/models_mae.py +# -------------------------------------------------------- +import math +import torch +import torch.nn as nn +from timm.models.vision_transformer import Mlp, Attention as Attention_ +from einops import rearrange, repeat +import xformers.ops + +from .utils import add_decomposed_rel_pos + + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + + +def t2i_modulate(x, shift, scale): + return x * (1 + scale) + shift + + +class MultiHeadCrossAttention(nn.Module): + def __init__(self, d_model, num_heads, attn_drop=0., proj_drop=0., **block_kwargs): + super(MultiHeadCrossAttention, self).__init__() + assert d_model % num_heads == 0, "d_model must be divisible by num_heads" + + self.d_model = d_model + self.num_heads = num_heads + self.head_dim = d_model // num_heads + + self.q_linear = nn.Linear(d_model, d_model) + self.kv_linear = nn.Linear(d_model, d_model*2) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(d_model, d_model) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x, cond, mask=None): + # query/value: img tokens; key: condition; mask: if padding tokens + B, N, C = x.shape + + q = self.q_linear(x).view(1, -1, self.num_heads, self.head_dim) + # import pdb + # pdb.set_trace() + kv = self.kv_linear(cond).view(1, -1, 2, self.num_heads, self.head_dim) + k, v = kv.unbind(2) + attn_bias = None + assert mask is not None + # import pdb + # pdb.set_trace() + attn_bias = xformers.ops.fmha.BlockDiagonalMask.from_seqlens([N] * B, mask) + + # attn_bias = torch.zeros([B * self.num_heads, q.shape[1], k.shape[1]], dtype=q.dtype, device=q.device) + # import pdb + # pdb.set_trace() + # attn_bias.masked_fill_(mask.squeeze(1).repeat(self.num_heads, 1, 1) == 0, float('-inf')) + + x = xformers.ops.memory_efficient_attention(q, k, v, p=self.attn_drop.p, attn_bias=attn_bias) + + x = x.view(B, -1, C) + x = self.proj(x) + x = self.proj_drop(x) + + # q = self.q_linear(x).reshape(B, -1, self.num_heads, self.head_dim) + # kv = self.kv_linear(cond).reshape(B, -1, 2, self.num_heads, self.head_dim) + # k, v = kv.unbind(2) + # attn_bias = None + # if mask is not None: + + + + # x = xformers.ops.memory_efficient_attention(q, k, v, p=self.attn_drop.p, attn_bias=attn_bias) + # x = x.contiguous().reshape(B, -1, C) + # x = self.proj(x) + # x = self.proj_drop(x) + + return x + + +class WindowAttention(Attention_): + """Multi-head Attention block with relative position embeddings.""" + + def __init__( + self, + dim, + num_heads=8, + qkv_bias=True, + use_rel_pos=False, + rel_pos_zero_init=True, + input_size=None, + **block_kwargs, + ): + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool: If True, add a learnable bias to query, key, value. + rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + input_size (int or None): Input resolution for calculating the relative positional + parameter size. + """ + super().__init__(dim, num_heads=num_heads, qkv_bias=qkv_bias, **block_kwargs) + + self.use_rel_pos = use_rel_pos + if self.use_rel_pos: + # initialize relative positional embeddings + self.rel_pos_h = nn.Parameter(torch.zeros(2 * input_size[0] - 1, self.head_dim)) + self.rel_pos_w = nn.Parameter(torch.zeros(2 * input_size[1] - 1, self.head_dim)) + + if not rel_pos_zero_init: + nn.init.trunc_normal_(self.rel_pos_h, std=0.02) + nn.init.trunc_normal_(self.rel_pos_w, std=0.02) + + def forward(self, x, mask=None): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads) + q, k, v = qkv.unbind(2) + if use_fp32_attention := getattr(self, 'fp32_attention', False): + q, k, v = q.float(), k.float(), v.float() + + attn_bias = None + if mask is not None: + attn_bias = torch.zeros([B * self.num_heads, q.shape[1], k.shape[1]], dtype=q.dtype, device=q.device) + attn_bias.masked_fill_(mask.squeeze(1).repeat(self.num_heads, 1, 1) == 0, float('-inf')) + # import pdb + # pdb.set_trace() + x = xformers.ops.memory_efficient_attention(q, k, v, p=self.attn_drop.p, attn_bias=attn_bias) + + x = x.view(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +################################################################################# +# AMP attention with fp32 softmax to fix loss NaN problem during training # +################################################################################# +class Attention(Attention_): + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv.unbind(0) # make torchscript happy (cannot use tensor as tuple) + use_fp32_attention = getattr(self, 'fp32_attention', False) + if use_fp32_attention: + q, k = q.float(), k.float() + with torch.cuda.amp.autocast(enabled=not use_fp32_attention): + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class FinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class T2IFinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size[0] * patch_size[1] * out_channels, bias=True) + self.scale_shift_table = nn.Parameter(torch.randn(2, hidden_size) / hidden_size ** 0.5) + self.out_channels = out_channels + + def forward(self, x, t): + shift, scale = (self.scale_shift_table[None] + t[:, None]).chunk(2, dim=1) + x = t2i_modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + +class MaskFinalLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, final_hidden_size, c_emb_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(final_hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(final_hidden_size, patch_size * patch_size * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(c_emb_size, 2 * final_hidden_size, bias=True) + ) + def forward(self, x, t): + shift, scale = self.adaLN_modulation(t).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + + +class DecoderLayer(nn.Module): + """ + The final layer of PixArt. + """ + + def __init__(self, hidden_size, decoder_hidden_size): + super().__init__() + self.norm_decoder = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, decoder_hidden_size, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + def forward(self, x, t): + shift, scale = self.adaLN_modulation(t).chunk(2, dim=1) + x = modulate(self.norm_decoder(x), shift, scale) + x = self.linear(x) + return x + + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + :param t: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an (N, D) Tensor of positional embeddings. + """ + # https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32, device=t.device) / half) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + def forward(self, t): + t_freq = self.timestep_embedding(t, self.frequency_embedding_size).to(self.dtype) + return self.mlp(t_freq) + + @property + def dtype(self): + # 返回模型参数的数据类型 + return next(self.parameters()).dtype + + +class SizeEmbedder(TimestepEmbedder): + """ + Embeds scalar timesteps into vector representations. + """ + + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__(hidden_size=hidden_size, frequency_embedding_size=frequency_embedding_size) + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + self.outdim = hidden_size + + def forward(self, s, bs): + if s.ndim == 1: + s = s[:, None] + assert s.ndim == 2 + if s.shape[0] != bs: + s = s.repeat(bs//s.shape[0], 1) + assert s.shape[0] == bs + b, dims = s.shape[0], s.shape[1] + s = rearrange(s, "b d -> (b d)") + s_freq = self.timestep_embedding(s, self.frequency_embedding_size).to(self.dtype) + s_emb = self.mlp(s_freq) + s_emb = rearrange(s_emb, "(b d) d2 -> b (d d2)", b=b, d=dims, d2=self.outdim) + return s_emb + + @property + def dtype(self): + # 返回模型参数的数据类型 + return next(self.parameters()).dtype + + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0]).cuda() < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + return self.embedding_table(labels) + + +class CaptionEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, in_channels, hidden_size, uncond_prob, act_layer=nn.GELU(approximate='tanh'), token_num=120): + super().__init__() + self.y_proj = Mlp(in_features=in_channels, hidden_features=hidden_size, out_features=hidden_size, act_layer=act_layer, drop=0) + self.register_buffer("y_embedding", nn.Parameter(torch.randn(token_num, in_channels) / in_channels ** 0.5)) + self.uncond_prob = uncond_prob + + def token_drop(self, caption, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(caption.shape[0]).cuda() < self.uncond_prob + else: + drop_ids = force_drop_ids == 1 + caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption) + return caption + + def forward(self, caption, train, force_drop_ids=None): + if train: + assert caption.shape[2:] == self.y_embedding.shape + use_dropout = self.uncond_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + caption = self.token_drop(caption, force_drop_ids) + caption = self.y_proj(caption) + return caption + + +class CaptionEmbedderDoubleBr(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + + def __init__(self, in_channels, hidden_size, uncond_prob, act_layer=nn.GELU(approximate='tanh'), token_num=120): + super().__init__() + self.proj = Mlp(in_features=in_channels, hidden_features=hidden_size, out_features=hidden_size, act_layer=act_layer, drop=0) + self.embedding = nn.Parameter(torch.randn(1, in_channels) / 10 ** 0.5) + self.y_embedding = nn.Parameter(torch.randn(token_num, in_channels) / 10 ** 0.5) + self.uncond_prob = uncond_prob + + def token_drop(self, global_caption, caption, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(global_caption.shape[0]).cuda() < self.uncond_prob + else: + drop_ids = force_drop_ids == 1 + global_caption = torch.where(drop_ids[:, None], self.embedding, global_caption) + caption = torch.where(drop_ids[:, None, None, None], self.y_embedding, caption) + return global_caption, caption + + def forward(self, caption, train, force_drop_ids=None): + assert caption.shape[2: ] == self.y_embedding.shape + global_caption = caption.mean(dim=2).squeeze() + use_dropout = self.uncond_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + global_caption, caption = self.token_drop(global_caption, caption, force_drop_ids) + y_embed = self.proj(global_caption) + return y_embed, caption diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__init__.py b/qa_mdt/audioldm_train/modules/diffusionmodules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/PixArt.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/PixArt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87088ed0906ce005870deff18185dc3ce166cdc5 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/PixArt.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/PixArt_blocks.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/PixArt_blocks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31c5077345eb8e7e6a6be1df6b5ea8b37b68ec54 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/PixArt_blocks.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7a14fb13a2781f1af4238b7a00f8934b53030f05 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/attention.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/attention.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d3e9143b7b10572ada8457fa2b542e99d45abb8 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/attention.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/distributions.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/distributions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c850006ea6798f724a9a47652f6b86c0938a1ec9 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/distributions.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/ema.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/ema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b14ceb8d176b17356f281c43925ba09b87f9a377 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/ema.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/model.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e95dcd5f6d13f2e4bd136faa8b4dc0dde2d3751 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/model.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/utils.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a13dc54209148099b09d0a265e05082ced30274 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/__pycache__/utils.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/attention.py b/qa_mdt/audioldm_train/modules/diffusionmodules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..f0a073f70887693918f25bf67d305ef0551699c0 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/attention.py @@ -0,0 +1,512 @@ +from inspect import isfunction +import math +import torch +import torch.nn.functional as F +from torch import nn, einsum +from einops import rearrange, repeat + +from qa_mdt.audioldm_train.utilities.diffusion_util import checkpoint + + +def exists(val): + return val is not None + + +def uniq(arr): + return {el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.0): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = ( + nn.Sequential(nn.Linear(dim, inner_dim), nn.GELU()) + if not glu + else GEGLU(dim, inner_dim) + ) + + self.net = nn.Sequential( + project_in, nn.Dropout(dropout), nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def Normalize(in_channels): + return torch.nn.GroupNorm( + num_groups=32, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) + self.to_out = nn.Conv2d(hidden_dim, dim, 1) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange( + qkv, "b (qkv heads c) h w -> qkv b heads c (h w)", heads=self.heads, qkv=3 + ) + k = k.softmax(dim=-1) + context = torch.einsum("bhdn,bhen->bhde", k, v) + out = torch.einsum("bhde,bhdn->bhen", context, q) + out = rearrange( + out, "b heads c (h w) -> b (heads c) h w", heads=self.heads, h=h, w=w + ) + return self.to_out(out) + + +class SpatialSelfAttention(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = rearrange(q, "b c h w -> b (h w) c") + k = rearrange(k, "b c h w -> b c (h w)") + w_ = torch.einsum("bij,bjk->bik", q, k) + + w_ = w_ * (int(c) ** (-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = rearrange(v, "b c h w -> b c (h w)") + w_ = rearrange(w_, "b i j -> b j i") + h_ = torch.einsum("bij,bjk->bik", v, w_) + h_ = rearrange(h_, "b c (h w) -> b c h w", h=h) + h_ = self.proj_out(h_) + + return x + h_ + + +# class CrossAttention(nn.Module): +# """ +# ### Cross Attention Layer +# This falls-back to self-attention when conditional embeddings are not specified. +# """ + +# use_flash_attention: bool = True + +# # use_flash_attention: bool = False +# def __init__( +# self, +# query_dim, +# context_dim=None, +# heads=8, +# dim_head=64, +# dropout=0.0, +# is_inplace: bool = True, +# ): +# # def __init__(self, d_model: int, d_cond: int, n_heads: int, d_head: int, is_inplace: bool = True): +# """ +# :param d_model: is the input embedding size +# :param n_heads: is the number of attention heads +# :param d_head: is the size of a attention head +# :param d_cond: is the size of the conditional embeddings +# :param is_inplace: specifies whether to perform the attention softmax computation inplace to +# save memory +# """ +# super().__init__() + +# self.is_inplace = is_inplace +# self.n_heads = heads +# self.d_head = dim_head + +# # Attention scaling factor +# self.scale = dim_head**-0.5 + +# # The normal self-attention layer +# if context_dim is None: +# context_dim = query_dim + +# # Query, key and value mappings +# d_attn = dim_head * heads +# self.to_q = nn.Linear(query_dim, d_attn, bias=False) +# self.to_k = nn.Linear(context_dim, d_attn, bias=False) +# self.to_v = nn.Linear(context_dim, d_attn, bias=False) + +# # Final linear layer +# self.to_out = nn.Sequential(nn.Linear(d_attn, query_dim), nn.Dropout(dropout)) + +# # Setup [flash attention](https://github.com/HazyResearch/flash-attention). +# # Flash attention is only used if it's installed +# # and `CrossAttention.use_flash_attention` is set to `True`. +# try: +# # You can install flash attention by cloning their Github repo, +# # [https://github.com/HazyResearch/flash-attention](https://github.com/HazyResearch/flash-attention) +# # and then running `python setup.py install` +# from flash_attn.flash_attention import FlashAttention + +# self.flash = FlashAttention() +# # Set the scale for scaled dot-product attention. +# self.flash.softmax_scale = self.scale +# # Set to `None` if it's not installed +# except ImportError: +# self.flash = None + +# def forward(self, x, context=None, mask=None): +# """ +# :param x: are the input embeddings of shape `[batch_size, height * width, d_model]` +# :param cond: is the conditional embeddings of shape `[batch_size, n_cond, d_cond]` +# """ + +# # If `cond` is `None` we perform self attention +# has_cond = context is not None +# if not has_cond: +# context = x + +# # Get query, key and value vectors +# q = self.to_q(x) +# k = self.to_k(context) +# v = self.to_v(context) + +# # Use flash attention if it's available and the head size is less than or equal to `128` +# if ( +# CrossAttention.use_flash_attention +# and self.flash is not None +# and not has_cond +# and self.d_head <= 128 +# ): +# return self.flash_attention(q, k, v) +# # Otherwise, fallback to normal attention +# else: +# return self.normal_attention(q, k, v) + +# def flash_attention(self, q: torch.Tensor, k: torch.Tensor, v: torch.Tensor): +# """ +# #### Flash Attention +# :param q: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` +# :param k: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` +# :param v: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` +# """ + +# # Get batch size and number of elements along sequence axis (`width * height`) +# batch_size, seq_len, _ = q.shape + +# # Stack `q`, `k`, `v` vectors for flash attention, to get a single tensor of +# # shape `[batch_size, seq_len, 3, n_heads * d_head]` +# qkv = torch.stack((q, k, v), dim=2) +# # Split the heads +# qkv = qkv.view(batch_size, seq_len, 3, self.n_heads, self.d_head) + +# # Flash attention works for head sizes `32`, `64` and `128`, so we have to pad the heads to +# # fit this size. +# if self.d_head <= 32: +# pad = 32 - self.d_head +# elif self.d_head <= 64: +# pad = 64 - self.d_head +# elif self.d_head <= 128: +# pad = 128 - self.d_head +# else: +# raise ValueError(f"Head size ${self.d_head} too large for Flash Attention") + +# # Pad the heads +# if pad: +# qkv = torch.cat( +# (qkv, qkv.new_zeros(batch_size, seq_len, 3, self.n_heads, pad)), dim=-1 +# ) + +# # Compute attention +# # $$\underset{seq}{softmax}\Bigg(\frac{Q K^\top}{\sqrt{d_{key}}}\Bigg)V$$ +# # This gives a tensor of shape `[batch_size, seq_len, n_heads, d_padded]` +# # TODO here I add the dtype changing +# out, _ = self.flash(qkv.type(torch.float16)) +# # Truncate the extra head size +# out = out[:, :, :, : self.d_head].float() +# # Reshape to `[batch_size, seq_len, n_heads * d_head]` +# out = out.reshape(batch_size, seq_len, self.n_heads * self.d_head) + +# # Map to `[batch_size, height * width, d_model]` with a linear layer +# return self.to_out(out) + +# def normal_attention(self, q: torch.Tensor, k: torch.Tensor, v: torch.Tensor): +# """ +# #### Normal Attention + +# :param q: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` +# :param k: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` +# :param v: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` +# """ + +# # Split them to heads of shape `[batch_size, seq_len, n_heads, d_head]` +# q = q.view(*q.shape[:2], self.n_heads, -1) # [bs, 64, 20, 32] +# k = k.view(*k.shape[:2], self.n_heads, -1) # [bs, 1, 20, 32] +# v = v.view(*v.shape[:2], self.n_heads, -1) + +# # Calculate attention $\frac{Q K^\top}{\sqrt{d_{key}}}$ +# attn = torch.einsum("bihd,bjhd->bhij", q, k) * self.scale + +# # Compute softmax +# # $$\underset{seq}{softmax}\Bigg(\frac{Q K^\top}{\sqrt{d_{key}}}\Bigg)$$ +# if self.is_inplace: +# half = attn.shape[0] // 2 +# attn[half:] = attn[half:].softmax(dim=-1) +# attn[:half] = attn[:half].softmax(dim=-1) +# else: +# attn = attn.softmax(dim=-1) + +# # Compute attention output +# # $$\underset{seq}{softmax}\Bigg(\frac{Q K^\top}{\sqrt{d_{key}}}\Bigg)V$$ +# # attn: [bs, 20, 64, 1] +# # v: [bs, 1, 20, 32] +# out = torch.einsum("bhij,bjhd->bihd", attn, v) +# # Reshape to `[batch_size, height * width, n_heads * d_head]` +# out = out.reshape(*out.shape[:2], -1) +# # Map to `[batch_size, height * width, d_model]` with a linear layer +# return self.to_out(out) + + +class CrossAttention(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0): + super().__init__() + inner_dim = dim_head * heads + self.id=inner_dim + context_dim = default(context_dim, query_dim) + self.context_dim = context_dim + self.qdim=query_dim + self.scale = dim_head**-0.5 + self.heads = heads + + self.to_q = nn.Linear(query_dim, inner_dim, bias=False) + self.to_k = nn.Linear(context_dim, inner_dim, bias=False) + self.to_v = nn.Linear(context_dim, inner_dim, bias=False) + + self.to_out = nn.Sequential( + nn.Linear(inner_dim, query_dim), nn.Dropout(dropout) + ) + + def forward(self, x, context=None, mask=None): + h = self.heads + q = self.to_q(x) + context = default(context, x) + + k = self.to_k(context) + v = self.to_v(context) + + q, k, v = map(lambda t: rearrange(t, "b n (h d) -> (b h) n d", h=h), (q, k, v)) + + sim = einsum("b i d, b j d -> b i j", q, k) * self.scale + + if exists(mask): + mask = rearrange(mask, "b ... -> b (...)") + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, "b j -> (b h) () j", h=h) + sim.masked_fill_(~(mask == 1), max_neg_value) + + # attention, what we cannot get enough of + attn = sim.softmax(dim=-1) + + out = einsum("b i j, b j d -> b i d", attn, v) + out = rearrange(out, "(b h) n d -> b n (h d)", h=h) + return self.to_out(out) + +class CrossAttention_1D(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.0): + super().__init__() + inner_dim = dim_head * heads + self.id=inner_dim + context_dim = default(context_dim, query_dim) + self.context_dim = context_dim + self.qdim=query_dim + self.scale = dim_head**-0.5 + self.heads = heads + + self.to_q = nn.Linear(query_dim, inner_dim, bias=False) + self.to_k = nn.Linear(context_dim, inner_dim, bias=False) + self.to_v = nn.Linear(context_dim, inner_dim, bias=False) + + self.to_out = nn.Sequential( + nn.Linear(inner_dim, query_dim), nn.Dropout(dropout) + ) + + def forward(self, x, context=None, mask=None): + h = self.heads + q = self.to_q(x) + context = default(context, x) + + k = self.to_k(context) + v = self.to_v(context) + + q, k, v = map(lambda t: rearrange(t, "b n (h d) -> (b h) n d", h=h), (q, k, v)) + + sim = einsum("b i d, b j d -> b i j", q, k) * self.scale + + if exists(mask): + mask = rearrange(mask, "b ... -> b (...)") + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, "b j -> (b h) () j", h=h) + sim.masked_fill_(~(mask == 1), max_neg_value) + + # attention, what we cannot get enough of + attn = sim.softmax(dim=-1) + + out = einsum("b i j, b j d -> b i d", attn, v) + out = rearrange(out, "(b h) n d -> b n (h d)", h=h) + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + def __init__( + self, + dim, + n_heads, + d_head, + dropout=0.0, + context_dim=None, + gated_ff=True, + checkpoint=True, + ): + super().__init__() + self.attn1 = CrossAttention( + query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout + ) # is a self-attention + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff) + self.attn2 = CrossAttention( + query_dim=dim, + context_dim=context_dim, + heads=n_heads, + dim_head=d_head, + dropout=dropout, + ) # is self-attn if context is none + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + self.norm3 = nn.LayerNorm(dim) + self.checkpoint = checkpoint + + def forward(self, x, context=None, mask=None): + if context is None: + return checkpoint(self._forward, (x,), self.parameters(), self.checkpoint) + else: + return checkpoint( + self._forward, (x, context, mask), self.parameters(), self.checkpoint + ) + + def _forward(self, x, context=None, mask=None): + x = self.attn1(self.norm1(x)) + x + x = self.attn2(self.norm2(x), context=context, mask=mask) + x + x = self.ff(self.norm3(x)) + x + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + """ + + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0.0, + context_dim=None, + ): + super().__init__() + + context_dim = context_dim + + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = Normalize(in_channels) + + self.proj_in = nn.Conv2d( + in_channels, inner_dim, kernel_size=1, stride=1, padding=0 + ) + + self.transformer_blocks = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim + ) + for d in range(depth) + ] + ) + + self.proj_out = zero_module( + nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) + ) + + def forward(self, x, context=None, mask=None): + # note: if no context is given, cross-attention defaults to self-attention + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + x = self.proj_in(x) + x = rearrange(x, "b c h w -> b (h w) c") + for block in self.transformer_blocks: + x = block(x, context=context, mask=mask) + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + x = self.proj_out(x) + return x + x_in diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/distributions.py b/qa_mdt/audioldm_train/modules/diffusionmodules/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..58eb535e7769f402169ddff77ee45c96ba3650d9 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/distributions.py @@ -0,0 +1,102 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to( + device=self.parameters.device + ) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to( + device=self.parameters.device + ) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.0]) + else: + if other is None: + return 0.5 * torch.mean( + torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar, + dim=[1, 2, 3], + ) + else: + return 0.5 * torch.mean( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var + - 1.0 + - self.logvar + + other.logvar, + dim=[1, 2, 3], + ) + + def nll(self, sample, dims=[1, 2, 3]): + if self.deterministic: + return torch.Tensor([0.0]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims, + ) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/ema.py b/qa_mdt/audioldm_train/modules/diffusionmodules/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..880ca3d205d9b4d7450e146930a93f2e63c58b70 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/ema.py @@ -0,0 +1,82 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError("Decay must be between 0 and 1") + + self.m_name2s_name = {} + self.register_buffer("decay", torch.tensor(decay, dtype=torch.float32)) + self.register_buffer( + "num_updates", + torch.tensor(0, dtype=torch.int) + if use_num_upates + else torch.tensor(-1, dtype=torch.int), + ) + + for name, p in model.named_parameters(): + if p.requires_grad: + # remove as '.'-character is not allowed in buffers + s_name = name.replace(".", "") + self.m_name2s_name.update({name: s_name}) + self.register_buffer(s_name, p.clone().detach().data) + + self.collected_params = [] + + def forward(self, model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_( + one_minus_decay * (shadow_params[sname] - m_param[key]) + ) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/model.py b/qa_mdt/audioldm_train/modules/diffusionmodules/model.py new file mode 100644 index 0000000000000000000000000000000000000000..2cc13905b3a997938e50dbdf1e33dcf6b50502a6 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/model.py @@ -0,0 +1,1069 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import torch.nn as nn +import numpy as np +from einops import rearrange + +from qa_mdt.audioldm_train.utilities.model_util import instantiate_from_config +from qa_mdt.audioldm_train.modules.diffusionmodules.attention import LinearAttention + + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) + return emb + + +def nonlinearity(x): + # swish + return x * torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return torch.nn.GroupNorm( + num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class UpsampleTimeStride4(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=5, stride=1, padding=2 + ) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=(4.0, 2.0), mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # Do time downsampling here + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=2, padding=0 + ) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class DownsampleTimeStride4(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # Do time downsampling here + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=5, stride=(4, 2), padding=1 + ) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=(4, 2), stride=(4, 2)) + return x + + +class ResnetBlock(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + conv_shortcut=False, + dropout, + temb_channels=512, + ): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + else: + self.nin_shortcut = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x + h + + +class LinAttnBlock(LinearAttention): + """to match AttnBlock usage""" + + def __init__(self, in_channels): + super().__init__(dim=in_channels, heads=1, dim_head=in_channels) + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, c, h * w).contiguous() + q = q.permute(0, 2, 1).contiguous() # b,hw,c + k = k.reshape(b, c, h * w).contiguous() # b,c,hw + w_ = torch.bmm(q, k).contiguous() # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c) ** (-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b, c, h * w).contiguous() + w_ = w_.permute(0, 2, 1).contiguous() # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm( + v, w_ + ).contiguous() # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b, c, h, w).contiguous() + + h_ = self.proj_out(h_) + + return x + h_ + + +def make_attn(in_channels, attn_type="vanilla"): + assert attn_type in ["vanilla", "linear", "none"], f"attn_type {attn_type} unknown" + print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + return AttnBlock(in_channels) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels) + + +class Model(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + use_timestep=True, + use_linear_attn=False, + attn_type="vanilla", + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch * 4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList( + [ + torch.nn.Linear(self.ch, self.temb_ch), + torch.nn.Linear(self.temb_ch, self.temb_ch), + ] + ) + + # downsampling + self.conv_in = torch.nn.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + skip_in = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + if i_block == self.num_res_blocks: + skip_in = ch * in_ch_mult[i_level] + block.append( + ResnetBlock( + in_channels=block_in + skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x, t=None, context=None): + # assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions - 1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb + ) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + double_z=True, + use_linear_attn=False, + attn_type="vanilla", + downsample_time_stride4_levels=[], + **ignore_kwargs, + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.downsample_time_stride4_levels = downsample_time_stride4_levels + + if len(self.downsample_time_stride4_levels) > 0: + assert max(self.downsample_time_stride4_levels) < self.num_resolutions, ( + "The level to perform downsample 4 operation need to be smaller than the total resolution number %s" + % str(self.num_resolutions) + ) + + # downsampling + self.conv_in = torch.nn.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + if i_level in self.downsample_time_stride4_levels: + down.downsample = DownsampleTimeStride4(block_in, resamp_with_conv) + else: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, + 2 * z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1, + ) + + def forward(self, x): + # timestep embedding + temb = None + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions - 1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + give_pre_end=False, + tanh_out=False, + use_linear_attn=False, + downsample_time_stride4_levels=[], + attn_type="vanilla", + **ignorekwargs, + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + self.downsample_time_stride4_levels = downsample_time_stride4_levels + + if len(self.downsample_time_stride4_levels) > 0: + assert max(self.downsample_time_stride4_levels) < self.num_resolutions, ( + "The level to perform downsample 4 operation need to be smaller than the total resolution number %s" + % str(self.num_resolutions) + ) + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,) + tuple(ch_mult) + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + print( + "Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape) + ) + ) + + # z to block_in + self.conv_in = torch.nn.Conv2d( + z_channels, block_in, kernel_size=3, stride=1, padding=1 + ) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + if i_level - 1 in self.downsample_time_stride4_levels: + up.upsample = UpsampleTimeStride4(block_in, resamp_with_conv) + else: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, z): + # assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + if self.tanh_out: + h = torch.tanh(h) + return h + + +class SimpleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, *args, **kwargs): + super().__init__() + self.model = nn.ModuleList( + [ + nn.Conv2d(in_channels, in_channels, 1), + ResnetBlock( + in_channels=in_channels, + out_channels=2 * in_channels, + temb_channels=0, + dropout=0.0, + ), + ResnetBlock( + in_channels=2 * in_channels, + out_channels=4 * in_channels, + temb_channels=0, + dropout=0.0, + ), + ResnetBlock( + in_channels=4 * in_channels, + out_channels=2 * in_channels, + temb_channels=0, + dropout=0.0, + ), + nn.Conv2d(2 * in_channels, in_channels, 1), + Upsample(in_channels, with_conv=True), + ] + ) + # end + self.norm_out = Normalize(in_channels) + self.conv_out = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + for i, layer in enumerate(self.model): + if i in [1, 2, 3]: + x = layer(x, None) + else: + x = layer(x) + + h = self.norm_out(x) + h = nonlinearity(h) + x = self.conv_out(h) + return x + + +class UpsampleDecoder(nn.Module): + def __init__( + self, + in_channels, + out_channels, + ch, + num_res_blocks, + resolution, + ch_mult=(2, 2), + dropout=0.0, + ): + super().__init__() + # upsampling + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + block_in = in_channels + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.res_blocks = nn.ModuleList() + self.upsample_blocks = nn.ModuleList() + for i_level in range(self.num_resolutions): + res_block = [] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + res_block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + self.res_blocks.append(nn.ModuleList(res_block)) + if i_level != self.num_resolutions - 1: + self.upsample_blocks.append(Upsample(block_in, True)) + curr_res = curr_res * 2 + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + # upsampling + h = x + for k, i_level in enumerate(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.res_blocks[i_level][i_block](h, None) + if i_level != self.num_resolutions - 1: + h = self.upsample_blocks[k](h) + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class LatentRescaler(nn.Module): + def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2): + super().__init__() + # residual block, interpolate, residual block + self.factor = factor + self.conv_in = nn.Conv2d( + in_channels, mid_channels, kernel_size=3, stride=1, padding=1 + ) + self.res_block1 = nn.ModuleList( + [ + ResnetBlock( + in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0, + ) + for _ in range(depth) + ] + ) + self.attn = AttnBlock(mid_channels) + self.res_block2 = nn.ModuleList( + [ + ResnetBlock( + in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0, + ) + for _ in range(depth) + ] + ) + + self.conv_out = nn.Conv2d( + mid_channels, + out_channels, + kernel_size=1, + ) + + def forward(self, x): + x = self.conv_in(x) + for block in self.res_block1: + x = block(x, None) + x = torch.nn.functional.interpolate( + x, + size=( + int(round(x.shape[2] * self.factor)), + int(round(x.shape[3] * self.factor)), + ), + ) + x = self.attn(x).contiguous() + for block in self.res_block2: + x = block(x, None) + x = self.conv_out(x) + return x + + +class MergedRescaleEncoder(nn.Module): + def __init__( + self, + in_channels, + ch, + resolution, + out_ch, + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + ch_mult=(1, 2, 4, 8), + rescale_factor=1.0, + rescale_module_depth=1, + ): + super().__init__() + intermediate_chn = ch * ch_mult[-1] + self.encoder = Encoder( + in_channels=in_channels, + num_res_blocks=num_res_blocks, + ch=ch, + ch_mult=ch_mult, + z_channels=intermediate_chn, + double_z=False, + resolution=resolution, + attn_resolutions=attn_resolutions, + dropout=dropout, + resamp_with_conv=resamp_with_conv, + out_ch=None, + ) + self.rescaler = LatentRescaler( + factor=rescale_factor, + in_channels=intermediate_chn, + mid_channels=intermediate_chn, + out_channels=out_ch, + depth=rescale_module_depth, + ) + + def forward(self, x): + x = self.encoder(x) + x = self.rescaler(x) + return x + + +class MergedRescaleDecoder(nn.Module): + def __init__( + self, + z_channels, + out_ch, + resolution, + num_res_blocks, + attn_resolutions, + ch, + ch_mult=(1, 2, 4, 8), + dropout=0.0, + resamp_with_conv=True, + rescale_factor=1.0, + rescale_module_depth=1, + ): + super().__init__() + tmp_chn = z_channels * ch_mult[-1] + self.decoder = Decoder( + out_ch=out_ch, + z_channels=tmp_chn, + attn_resolutions=attn_resolutions, + dropout=dropout, + resamp_with_conv=resamp_with_conv, + in_channels=None, + num_res_blocks=num_res_blocks, + ch_mult=ch_mult, + resolution=resolution, + ch=ch, + ) + self.rescaler = LatentRescaler( + factor=rescale_factor, + in_channels=z_channels, + mid_channels=tmp_chn, + out_channels=tmp_chn, + depth=rescale_module_depth, + ) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Upsampler(nn.Module): + def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2): + super().__init__() + assert out_size >= in_size + num_blocks = int(np.log2(out_size // in_size)) + 1 + factor_up = 1.0 + (out_size % in_size) + print( + f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}" + ) + self.rescaler = LatentRescaler( + factor=factor_up, + in_channels=in_channels, + mid_channels=2 * in_channels, + out_channels=in_channels, + ) + self.decoder = Decoder( + out_ch=out_channels, + resolution=out_size, + z_channels=in_channels, + num_res_blocks=2, + attn_resolutions=[], + in_channels=None, + ch=in_channels, + ch_mult=[ch_mult for _ in range(num_blocks)], + ) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Resize(nn.Module): + def __init__(self, in_channels=None, learned=False, mode="bilinear"): + super().__init__() + self.with_conv = learned + self.mode = mode + if self.with_conv: + print( + f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode" + ) + raise NotImplementedError() + assert in_channels is not None + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=4, stride=2, padding=1 + ) + + def forward(self, x, scale_factor=1.0): + if scale_factor == 1.0: + return x + else: + x = torch.nn.functional.interpolate( + x, mode=self.mode, align_corners=False, scale_factor=scale_factor + ) + return x + + +class FirstStagePostProcessor(nn.Module): + def __init__( + self, + ch_mult: list, + in_channels, + pretrained_model: nn.Module = None, + reshape=False, + n_channels=None, + dropout=0.0, + pretrained_config=None, + ): + super().__init__() + if pretrained_config is None: + assert ( + pretrained_model is not None + ), 'Either "pretrained_model" or "pretrained_config" must not be None' + self.pretrained_model = pretrained_model + else: + assert ( + pretrained_config is not None + ), 'Either "pretrained_model" or "pretrained_config" must not be None' + self.instantiate_pretrained(pretrained_config) + + self.do_reshape = reshape + + if n_channels is None: + n_channels = self.pretrained_model.encoder.ch + + self.proj_norm = Normalize(in_channels, num_groups=in_channels // 2) + self.proj = nn.Conv2d( + in_channels, n_channels, kernel_size=3, stride=1, padding=1 + ) + + blocks = [] + downs = [] + ch_in = n_channels + for m in ch_mult: + blocks.append( + ResnetBlock( + in_channels=ch_in, out_channels=m * n_channels, dropout=dropout + ) + ) + ch_in = m * n_channels + downs.append(Downsample(ch_in, with_conv=False)) + + self.model = nn.ModuleList(blocks) + self.downsampler = nn.ModuleList(downs) + + def instantiate_pretrained(self, config): + model = instantiate_from_config(config) + self.pretrained_model = model.eval() + # self.pretrained_model.train = False + for param in self.pretrained_model.parameters(): + param.requires_grad = False + + @torch.no_grad() + def encode_with_pretrained(self, x): + c = self.pretrained_model.encode(x) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + return c + + def forward(self, x): + z_fs = self.encode_with_pretrained(x) + z = self.proj_norm(z_fs) + z = self.proj(z) + z = nonlinearity(z) + + for submodel, downmodel in zip(self.model, self.downsampler): + z = submodel(z, temb=None) + z = downmodel(z) + + if self.do_reshape: + z = rearrange(z, "b c h w -> b (h w) c") + return z diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/nn.py b/qa_mdt/audioldm_train/modules/diffusionmodules/nn.py new file mode 100644 index 0000000000000000000000000000000000000000..373d5cd2debb71f3bcc47fc7ccbcb49c67458b36 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/nn.py @@ -0,0 +1,200 @@ +""" +Various utilities for neural networks. +""" + +import math + +import torch as th +import torch.nn as nn +import torch.nn.functional as F + + +class GroupNorm32(nn.GroupNorm): + def __init__(self, num_groups, num_channels, swish, eps=1e-5): + super().__init__(num_groups=num_groups, num_channels=num_channels, eps=eps) + self.swish = swish + + def forward(self, x): + y = super().forward(x.float()).to(x.dtype) + if self.swish == 1.0: + y = F.silu(y) + elif self.swish: + y = y * F.sigmoid(y * float(self.swish)) + return y + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def update_ema(target_params, source_params, rate=0.99): + """ + Update target parameters to be closer to those of source parameters using + an exponential moving average. + + :param target_params: the target parameter sequence. + :param source_params: the source parameter sequence. + :param rate: the EMA rate (closer to 1 means slower). + """ + for targ, src in zip(target_params, source_params): + targ.detach().mul_(rate).add_(src, alpha=1 - rate) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels, swish=0.0): + """ + Make a standard normalization layer, with an optional swish activation. + + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(num_channels=channels, num_groups=32, swish=swish) + + +# def timestep_embedding(timesteps, dim, max_period=10000): +# """ +# Create sinusoidal timestep embeddings. + +# :param timesteps: a 1-D Tensor of N indices, one per batch element. +# These may be fractional. +# :param dim: the dimension of the output. +# :param max_period: controls the minimum frequency of the embeddings. +# :return: an [N x dim] Tensor of positional embeddings. +# """ +# half = dim // 2 +# freqs = th.exp( +# -math.log(max_period) * th.arange(start=0, end=half, dtype=th.float32) / half +# ).to(device=timesteps.device) +# args = timesteps[:, None].float() * freqs[None] +# embedding = th.cat([th.cos(args), th.sin(args)], dim=-1) +# if dim % 2: +# embedding = th.cat([embedding, th.zeros_like(embedding[:, :1])], dim=-1) +# return embedding + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = th.exp( + -math.log(max_period) + * th.arange(start=0, end=half, dtype=th.float32) + / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = th.cat([th.cos(args), th.sin(args)], dim=-1) + if dim % 2: + embedding = th.cat([embedding, th.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, "b -> b d", d=dim) + return embedding + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + # flag = False + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(th.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + with th.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with th.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = th.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/openaimodel.py b/qa_mdt/audioldm_train/modules/diffusionmodules/openaimodel.py new file mode 100644 index 0000000000000000000000000000000000000000..659ab12bc2825727c9ba645f82e2f8063af2e4a2 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/openaimodel.py @@ -0,0 +1,1108 @@ +from abc import abstractmethod +from functools import partial +import math +from typing import Iterable + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + +from qa_mdt.audioldm_train.utilities.diffusion_util import ( + checkpoint, + conv_nd, + linear, + avg_pool_nd, + zero_module, + normalization, + timestep_embedding, +) +from qa_mdt.audioldm_train.modules.diffusionmodules.attention import SpatialTransformer + + +# dummy replace +def convert_module_to_f16(x): + pass + + +def convert_module_to_f32(x): + pass + + +## go +class AttentionPool2d(nn.Module): + """ + Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py + """ + + def __init__( + self, + spacial_dim: int, + embed_dim: int, + num_heads_channels: int, + output_dim: int = None, + ): + super().__init__() + self.positional_embedding = nn.Parameter( + th.randn(embed_dim, spacial_dim**2 + 1) / embed_dim**0.5 + ) + self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1) + self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1) + self.num_heads = embed_dim // num_heads_channels + self.attention = QKVAttention(self.num_heads) + + def forward(self, x): + b, c, *_spatial = x.shape + x = x.reshape(b, c, -1).contiguous() # NC(HW) + x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1) + x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1) + x = self.qkv_proj(x) + x = self.attention(x) + x = self.c_proj(x) + return x[:, :, 0] + + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, x, emb, context_list=None, mask_list=None): + # The first spatial transformer block does not have context + spatial_transformer_id = 0 + context_list = [None] + context_list + mask_list = [None] + mask_list + + for layer in self: + if isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialTransformer): + if spatial_transformer_id >= len(context_list): + context, mask = None, None + else: + context, mask = ( + context_list[spatial_transformer_id], + mask_list[spatial_transformer_id], + ) + + x = layer(x, context, mask=mask) + spatial_transformer_id += 1 + else: + x = layer(x) + return x + + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = conv_nd( + dims, self.channels, self.out_channels, 3, padding=padding + ) + + def forward(self, x): + assert x.shape[1] == self.channels + if self.dims == 3: + x = F.interpolate( + x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest" + ) + else: + x = F.interpolate(x, scale_factor=2, mode="nearest") + if self.use_conv: + x = self.conv(x) + return x + + +class TransposedUpsample(nn.Module): + "Learned 2x upsampling without padding" + + def __init__(self, channels, out_channels=None, ks=5): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + + self.up = nn.ConvTranspose2d( + self.channels, self.out_channels, kernel_size=ks, stride=2 + ) + + def forward(self, x): + return self.up(x) + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = conv_nd( + dims, + self.channels, + self.out_channels, + 3, + stride=stride, + padding=padding, + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, 3, padding=1), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims) + self.x_upd = Upsample(channels, False, dims) + elif down: + self.h_upd = Downsample(channels, False, dims) + self.x_upd = Downsample(channels, False, dims) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, + ), + ) + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + def _forward(self, x, emb): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = th.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class AttentionBlock(nn.Module): + """ + An attention block that allows spatial positions to attend to each other. + Originally ported from here, but adapted to the N-d case. + https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66. + """ + + def __init__( + self, + channels, + num_heads=1, + num_head_channels=-1, + use_checkpoint=False, + use_new_attention_order=False, + ): + super().__init__() + self.channels = channels + if num_head_channels == -1: + self.num_heads = num_heads + else: + assert ( + channels % num_head_channels == 0 + ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}" + self.num_heads = channels // num_head_channels + self.use_checkpoint = use_checkpoint + self.norm = normalization(channels) + self.qkv = conv_nd(1, channels, channels * 3, 1) + if use_new_attention_order: + # split qkv before split heads + self.attention = QKVAttention(self.num_heads) + else: + # split heads before split qkv + self.attention = QKVAttentionLegacy(self.num_heads) + + self.proj_out = zero_module(conv_nd(1, channels, channels, 1)) + + def forward(self, x): + return checkpoint( + self._forward, (x,), self.parameters(), True + ) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!! + # return pt_checkpoint(self._forward, x) # pytorch + + def _forward(self, x): + b, c, *spatial = x.shape + x = x.reshape(b, c, -1).contiguous() + qkv = self.qkv(self.norm(x)).contiguous() + h = self.attention(qkv).contiguous() + h = self.proj_out(h).contiguous() + return (x + h).reshape(b, c, *spatial).contiguous() + + +def count_flops_attn(model, _x, y): + """ + A counter for the `thop` package to count the operations in an + attention operation. + Meant to be used like: + macs, params = thop.profile( + model, + inputs=(inputs, timestamps), + custom_ops={QKVAttention: QKVAttention.count_flops}, + ) + """ + b, c, *spatial = y[0].shape + num_spatial = int(np.prod(spatial)) + # We perform two matmuls with the same number of ops. + # The first computes the weight matrix, the second computes + # the combination of the value vectors. + matmul_ops = 2 * b * (num_spatial**2) * c + model.total_ops += th.DoubleTensor([matmul_ops]) + + +class QKVAttentionLegacy(nn.Module): + """ + A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = ( + qkv.reshape(bs * self.n_heads, ch * 3, length).contiguous().split(ch, dim=1) + ) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v) + return a.reshape(bs, -1, length).contiguous() + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class QKVAttention(nn.Module): + """ + A module which performs QKV attention and splits in a different order. + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.chunk(3, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", + (q * scale).view(bs * self.n_heads, ch, length), + (k * scale).view(bs * self.n_heads, ch, length), + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum( + "bts,bcs->bct", + weight, + v.reshape(bs * self.n_heads, ch, length).contiguous(), + ) + return a.reshape(bs, -1, length).contiguous() + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param attention_resolutions: a collection of downsample rates at which + attention will take place. May be a set, list, or tuple. + For example, if this contains 4, then at 4x downsampling, attention + will be used. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + extra_sa_layer=True, + num_classes=None, + extra_film_condition_dim=None, + use_checkpoint=False, + use_fp16=False, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=True, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + ): + super().__init__() + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert ( + num_head_channels != -1 + ), "Either num_heads or num_head_channels has to be set" + + if num_head_channels == -1: + assert ( + num_heads != -1 + ), "Either num_heads or num_head_channels has to be set" + + self.image_size = image_size + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.extra_film_condition_dim = extra_film_condition_dim + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + # assert not ( + # self.num_classes is not None and self.extra_film_condition_dim is not None + # ), "As for the condition of theh UNet model, you can only set using class label or an extra embedding vector (such as from CLAP). You cannot set both num_classes and extra_film_condition_dim." + + if self.num_classes is not None: + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + + self.use_extra_film_by_concat = self.extra_film_condition_dim is not None + + if self.extra_film_condition_dim is not None: + self.film_emb = nn.Linear(self.extra_film_condition_dim, time_embed_dim) + print( + "+ Use extra condition on UNet channel using Film. Extra condition dimension is %s. " + % self.extra_film_condition_dim + ) + + if context_dim is not None and not use_spatial_transformer: + assert ( + use_spatial_transformer + ), "Fool!! You forgot to use the spatial transformer for your cross-attention conditioning..." + + if context_dim is not None and not isinstance(context_dim, list): + context_dim = [context_dim] + elif context_dim is None: + context_dim = [None] # At least use one spatial transformer + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + if extra_sa_layer: + layers.append( + SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=None, + ) + ) + for context_dim_id in range(len(context_dim)): + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + if not use_spatial_transformer + else SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=context_dim[context_dim_id], + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + middle_layers = [ + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + if extra_sa_layer: + middle_layers.append( + SpatialTransformer( + ch, num_heads, dim_head, depth=transformer_depth, context_dim=None + ) + ) + for context_dim_id in range(len(context_dim)): + middle_layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + if not use_spatial_transformer + else SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=context_dim[context_dim_id], + ) + ) + middle_layers.append( + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ) + self.middle_block = TimestepEmbedSequential(*middle_layers) + + self._feature_size += ch + + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(num_res_blocks + 1): + ich = input_block_chans.pop() + layers = [ + ResBlock( + ch + ich, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=model_channels * mult, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = model_channels * mult + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + if extra_sa_layer: + layers.append( + SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=None, + ) + ) + for context_dim_id in range(len(context_dim)): + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads_upsample, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + if not use_spatial_transformer + else SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=context_dim[context_dim_id], + ) + ) + if level and i == num_res_blocks: + out_ch = ch + layers.append( + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)), + ) + if self.predict_codebook_ids: + self.id_predictor = nn.Sequential( + normalization(ch), + conv_nd(dims, model_channels, n_embed, 1), + # nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits + ) + + self.shape_reported = False + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + self.output_blocks.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + self.output_blocks.apply(convert_module_to_f32) + + def forward( + self, + x, + timesteps=None, + y=None, + context_list=None, + context_attn_mask_list=None, + **kwargs, + ): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. an [N, extra_film_condition_dim] Tensor if film-embed conditional + :return: an [N x C x ...] Tensor of outputs. + """ + if not self.shape_reported: + print("The shape of UNet input is", x.size()) + self.shape_reported = True + + print(f'check_y : {y}') + assert (y is not None) == ( + self.num_classes is not None or self.extra_film_condition_dim is not None + ), "must specify y if and only if the model is class-conditional or film embedding conditional" + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False) + emb = self.time_embed(t_emb) + + # if self.num_classes is not None: + # assert y.shape == (x.shape[0],) + # emb = emb + self.label_emb(y) + + if self.use_extra_film_by_concat: + emb = th.cat([emb, self.film_emb(y)], dim=-1) + + h = x.type(self.dtype) + for module in self.input_blocks: + h = module(h, emb, context_list, context_attn_mask_list) + hs.append(h) + h = self.middle_block(h, emb, context_list, context_attn_mask_list) + for module in self.output_blocks: + + concate_tensor = hs.pop() + h = th.cat([h, concate_tensor], dim=1) + h = module(h, emb, context_list, context_attn_mask_list) + + h = h.type(x.dtype) + if self.predict_codebook_ids: + return self.id_predictor(h) + else: + return self.out(h) + + +class EncoderUNetModel(nn.Module): + """ + The half UNet model with attention and timestep embedding. + For usage, see UNet. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + use_checkpoint=False, + use_fp16=False, + num_heads=1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + pool="adaptive", + *args, + **kwargs, + ): + super().__init__() + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + self.middle_block = TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + ) + self._feature_size += ch + self.pool = pool + if pool == "adaptive": + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + nn.AdaptiveAvgPool2d((1, 1)), + zero_module(conv_nd(dims, ch, out_channels, 1)), + nn.Flatten(), + ) + elif pool == "attention": + assert num_head_channels != -1 + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + AttentionPool2d( + (image_size // ds), ch, num_head_channels, out_channels + ), + ) + elif pool == "spatial": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + nn.ReLU(), + nn.Linear(2048, self.out_channels), + ) + elif pool == "spatial_v2": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + normalization(2048), + nn.SiLU(), + nn.Linear(2048, self.out_channels), + ) + else: + raise NotImplementedError(f"Unexpected {pool} pooling") + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + + def forward(self, x, timesteps): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :return: an [N x K] Tensor of outputs. + """ + emb = self.time_embed(timestep_embedding(timesteps, self.model_channels)) + + results = [] + h = x.type(self.dtype) + for module in self.input_blocks: + h = module(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = self.middle_block(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = th.cat(results, axis=-1) + return self.out(h) + else: + h = h.type(x.dtype) + return self.out(h) diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__init__.py b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..db68edd20c9716e74ef1c853e968227efe45be29 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__init__.py @@ -0,0 +1,63 @@ +from .transport import Transport, ModelType, WeightType, PathType, Sampler + +def create_transport( + path_type='Linear', + prediction="velocity", + loss_weight=None, + train_eps=None, + sample_eps=None, +): + """function for creating Transport object + **Note**: model prediction defaults to velocity + Args: + - path_type: type of path to use; default to linear + - learn_score: set model prediction to score + - learn_noise: set model prediction to noise + - velocity_weighted: weight loss by velocity weight + - likelihood_weighted: weight loss by likelihood weight + - train_eps: small epsilon for avoiding instability during training + - sample_eps: small epsilon for avoiding instability during sampling + """ + + if prediction == "noise": + model_type = ModelType.NOISE + elif prediction == "score": + model_type = ModelType.SCORE + else: + model_type = ModelType.VELOCITY + + if loss_weight == "velocity": + loss_type = WeightType.VELOCITY + elif loss_weight == "likelihood": + loss_type = WeightType.LIKELIHOOD + else: + loss_type = WeightType.NONE + + path_choice = { + "Linear": PathType.LINEAR, + "GVP": PathType.GVP, + "VP": PathType.VP, + } + + path_type = path_choice[path_type] + + if (path_type in [PathType.VP]): + train_eps = 1e-5 if train_eps is None else train_eps + sample_eps = 1e-3 if train_eps is None else sample_eps + elif (path_type in [PathType.GVP, PathType.LINEAR] and model_type != ModelType.VELOCITY): + train_eps = 1e-3 if train_eps is None else train_eps + sample_eps = 1e-3 if train_eps is None else sample_eps + else: # velocity & [GVP, LINEAR] is stable everywhere + train_eps = 0 + sample_eps = 0 + + # create flow state + state = Transport( + model_type=model_type, + path_type=path_type, + loss_type=loss_type, + train_eps=train_eps, + sample_eps=sample_eps, + ) + + return state \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e0f1a770d6935c3e6c925e2f00d73256eb53760 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/integrators.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/integrators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..02700f8deafcef492d414cfa5d75e9b40d2cc97f Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/integrators.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/path.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/path.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c9a188dd44af25ac808ac743907ecfce55019cb Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/path.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/transport.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/transport.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfc06ed7c1ed5f326aa39b4b7416a808e7deb472 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/transport.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/utils.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4919e1505864464f7b64aaa585d42c561493917e Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/__pycache__/utils.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/integrators.py b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/integrators.py new file mode 100644 index 0000000000000000000000000000000000000000..adf7c7b4c50b6ff6c63973e0ddaa65b9759274c0 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/integrators.py @@ -0,0 +1,117 @@ +import numpy as np +import torch as th +import torch.nn as nn +from torchdiffeq import odeint +from functools import partial +from tqdm import tqdm + +class sde: + """SDE solver class""" + def __init__( + self, + drift, + diffusion, + *, + t0, + t1, + num_steps, + sampler_type, + ): + assert t0 < t1, "SDE sampler has to be in forward time" + + self.num_timesteps = num_steps + self.t = th.linspace(t0, t1, num_steps) + self.dt = self.t[1] - self.t[0] + self.drift = drift + self.diffusion = diffusion + self.sampler_type = sampler_type + + def __Euler_Maruyama_step(self, x, mean_x, t, model, **model_kwargs): + w_cur = th.randn(x.size()).to(x) + t = th.ones(x.size(0)).to(x) * t + dw = w_cur * th.sqrt(self.dt) + drift = self.drift(x, t, model, **model_kwargs) + diffusion = self.diffusion(x, t) + mean_x = x + drift * self.dt + x = mean_x + th.sqrt(2 * diffusion) * dw + return x, mean_x + + def __Heun_step(self, x, _, t, model, **model_kwargs): + w_cur = th.randn(x.size()).to(x) + dw = w_cur * th.sqrt(self.dt) + t_cur = th.ones(x.size(0)).to(x) * t + diffusion = self.diffusion(x, t_cur) + xhat = x + th.sqrt(2 * diffusion) * dw + K1 = self.drift(xhat, t_cur, model, **model_kwargs) + xp = xhat + self.dt * K1 + K2 = self.drift(xp, t_cur + self.dt, model, **model_kwargs) + return xhat + 0.5 * self.dt * (K1 + K2), xhat # at last time point we do not perform the heun step + + def __forward_fn(self): + """TODO: generalize here by adding all private functions ending with steps to it""" + sampler_dict = { + "Euler": self.__Euler_Maruyama_step, + "Heun": self.__Heun_step, + } + + try: + sampler = sampler_dict[self.sampler_type] + except: + raise NotImplementedError("Smapler type not implemented.") + + return sampler + + def sample(self, init, model, **model_kwargs): + """forward loop of sde""" + x = init + mean_x = init + samples = [] + sampler = self.__forward_fn() + for ti in self.t[:-1]: + with th.no_grad(): + x, mean_x = sampler(x, mean_x, ti, model, **model_kwargs) + samples.append(x) + + return samples + +class ode: + """ODE solver class""" + def __init__( + self, + drift, + *, + t0, + t1, + sampler_type, + num_steps, + atol, + rtol, + ): + assert t0 < t1, "ODE sampler has to be in forward time" + + self.drift = drift + self.t = th.linspace(t0, t1, num_steps) + self.atol = atol + self.rtol = rtol + self.sampler_type = sampler_type + + def sample(self, x, model, **model_kwargs): + + device = x[0].device if isinstance(x, tuple) else x.device + def _fn(t, x): + t = th.ones(x[0].size(0)).to(device) * t if isinstance(x, tuple) else th.ones(x.size(0)).to(device) * t + model_output = self.drift(x, t, model, **model_kwargs) + return model_output + + t = self.t.to(device) + atol = [self.atol] * len(x) if isinstance(x, tuple) else [self.atol] + rtol = [self.rtol] * len(x) if isinstance(x, tuple) else [self.rtol] + samples = odeint( + _fn, + x, + t, + method=self.sampler_type, + atol=atol, + rtol=rtol + ) + return samples \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/path.py b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/path.py new file mode 100644 index 0000000000000000000000000000000000000000..156a7b0dea03497a85306ebbeedfe4fbedf87c27 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/path.py @@ -0,0 +1,192 @@ +import torch as th +import numpy as np +from functools import partial + +def expand_t_like_x(t, x): + """Function to reshape time t to broadcastable dimension of x + Args: + t: [batch_dim,], time vector + x: [batch_dim,...], data point + """ + dims = [1] * (len(x.size()) - 1) + t = t.view(t.size(0), *dims) + return t + + +#################### Coupling Plans #################### + +class ICPlan: + """Linear Coupling Plan""" + def __init__(self, sigma=0.0): + self.sigma = sigma + + def compute_alpha_t(self, t): + """Compute the data coefficient along the path""" + return t, 1 + + def compute_sigma_t(self, t): + """Compute the noise coefficient along the path""" + return 1 - t, -1 + + def compute_d_alpha_alpha_ratio_t(self, t): + """Compute the ratio between d_alpha and alpha""" + return 1 / t + + def compute_drift(self, x, t): + """We always output sde according to score parametrization; """ + t = expand_t_like_x(t, x) + alpha_ratio = self.compute_d_alpha_alpha_ratio_t(t) + sigma_t, d_sigma_t = self.compute_sigma_t(t) + drift = alpha_ratio * x + diffusion = alpha_ratio * (sigma_t ** 2) - sigma_t * d_sigma_t + + return -drift, diffusion + + def compute_diffusion(self, x, t, form="constant", norm=1.0): + """Compute the diffusion term of the SDE + Args: + x: [batch_dim, ...], data point + t: [batch_dim,], time vector + form: str, form of the diffusion term + norm: float, norm of the diffusion term + """ + t = expand_t_like_x(t, x) + choices = { + "constant": norm, + "SBDM": norm * self.compute_drift(x, t)[1], + "sigma": norm * self.compute_sigma_t(t)[0], + "linear": norm * (1 - t), + "decreasing": 0.25 * (norm * th.cos(np.pi * t) + 1) ** 2, + "inccreasing-decreasing": norm * th.sin(np.pi * t) ** 2, + } + + try: + diffusion = choices[form] + except KeyError: + raise NotImplementedError(f"Diffusion form {form} not implemented") + + return diffusion + + def get_score_from_velocity(self, velocity, x, t): + """Wrapper function: transfrom velocity prediction model to score + Args: + velocity: [batch_dim, ...] shaped tensor; velocity model output + x: [batch_dim, ...] shaped tensor; x_t data point + t: [batch_dim,] time tensor + """ + t = expand_t_like_x(t, x) + alpha_t, d_alpha_t = self.compute_alpha_t(t) + sigma_t, d_sigma_t = self.compute_sigma_t(t) + mean = x + reverse_alpha_ratio = alpha_t / d_alpha_t + var = sigma_t**2 - reverse_alpha_ratio * d_sigma_t * sigma_t + score = (reverse_alpha_ratio * velocity - mean) / var + return score + + def get_noise_from_velocity(self, velocity, x, t): + """Wrapper function: transfrom velocity prediction model to denoiser + Args: + velocity: [batch_dim, ...] shaped tensor; velocity model output + x: [batch_dim, ...] shaped tensor; x_t data point + t: [batch_dim,] time tensor + """ + t = expand_t_like_x(t, x) + alpha_t, d_alpha_t = self.compute_alpha_t(t) + sigma_t, d_sigma_t = self.compute_sigma_t(t) + mean = x + reverse_alpha_ratio = alpha_t / d_alpha_t + var = reverse_alpha_ratio * d_sigma_t - sigma_t + noise = (reverse_alpha_ratio * velocity - mean) / var + return noise + + def get_velocity_from_score(self, score, x, t): + """Wrapper function: transfrom score prediction model to velocity + Args: + score: [batch_dim, ...] shaped tensor; score model output + x: [batch_dim, ...] shaped tensor; x_t data point + t: [batch_dim,] time tensor + """ + t = expand_t_like_x(t, x) + drift, var = self.compute_drift(x, t) + velocity = var * score - drift + return velocity + + def compute_mu_t(self, t, x0, x1): + """Compute the mean of time-dependent density p_t""" + t = expand_t_like_x(t, x1) + alpha_t, _ = self.compute_alpha_t(t) + sigma_t, _ = self.compute_sigma_t(t) + return alpha_t * x1 + sigma_t * x0 + + def compute_xt(self, t, x0, x1): + """Sample xt from time-dependent density p_t; rng is required""" + xt = self.compute_mu_t(t, x0, x1) + return xt + + def compute_ut(self, t, x0, x1, xt): + """Compute the vector field corresponding to p_t""" + t = expand_t_like_x(t, x1) + _, d_alpha_t = self.compute_alpha_t(t) + _, d_sigma_t = self.compute_sigma_t(t) + return d_alpha_t * x1 + d_sigma_t * x0 + + def plan(self, t, x0, x1): + xt = self.compute_xt(t, x0, x1) + ut = self.compute_ut(t, x0, x1, xt) + return t, xt, ut + + +class VPCPlan(ICPlan): + """class for VP path flow matching""" + + def __init__(self, sigma_min=0.1, sigma_max=20.0): + self.sigma_min = sigma_min + self.sigma_max = sigma_max + self.log_mean_coeff = lambda t: -0.25 * ((1 - t) ** 2) * (self.sigma_max - self.sigma_min) - 0.5 * (1 - t) * self.sigma_min + self.d_log_mean_coeff = lambda t: 0.5 * (1 - t) * (self.sigma_max - self.sigma_min) + 0.5 * self.sigma_min + + + def compute_alpha_t(self, t): + """Compute coefficient of x1""" + alpha_t = self.log_mean_coeff(t) + alpha_t = th.exp(alpha_t) + d_alpha_t = alpha_t * self.d_log_mean_coeff(t) + return alpha_t, d_alpha_t + + def compute_sigma_t(self, t): + """Compute coefficient of x0""" + p_sigma_t = 2 * self.log_mean_coeff(t) + sigma_t = th.sqrt(1 - th.exp(p_sigma_t)) + d_sigma_t = th.exp(p_sigma_t) * (2 * self.d_log_mean_coeff(t)) / (-2 * sigma_t) + return sigma_t, d_sigma_t + + def compute_d_alpha_alpha_ratio_t(self, t): + """Special purposed function for computing numerical stabled d_alpha_t / alpha_t""" + return self.d_log_mean_coeff(t) + + def compute_drift(self, x, t): + """Compute the drift term of the SDE""" + t = expand_t_like_x(t, x) + beta_t = self.sigma_min + (1 - t) * (self.sigma_max - self.sigma_min) + return -0.5 * beta_t * x, beta_t / 2 + + +class GVPCPlan(ICPlan): + def __init__(self, sigma=0.0): + super().__init__(sigma) + + def compute_alpha_t(self, t): + """Compute coefficient of x1""" + alpha_t = th.sin(t * np.pi / 2) + d_alpha_t = np.pi / 2 * th.cos(t * np.pi / 2) + return alpha_t, d_alpha_t + + def compute_sigma_t(self, t): + """Compute coefficient of x0""" + sigma_t = th.cos(t * np.pi / 2) + d_sigma_t = -np.pi / 2 * th.sin(t * np.pi / 2) + return sigma_t, d_sigma_t + + def compute_d_alpha_alpha_ratio_t(self, t): + """Special purposed function for computing numerical stabled d_alpha_t / alpha_t""" + return np.pi / (2 * th.tan(t * np.pi / 2)) \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/transport.py b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/transport.py new file mode 100644 index 0000000000000000000000000000000000000000..396c516cfc64516a39212d95ff895c98135eef17 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/transport.py @@ -0,0 +1,443 @@ +import torch as th +import numpy as np +import logging + +import enum + +from . import path +from .utils import EasyDict, log_state, mean_flat +from .integrators import ode, sde + +class ModelType(enum.Enum): + """ + Which type of output the model predicts. + """ + + NOISE = enum.auto() # the model predicts epsilon + SCORE = enum.auto() # the model predicts \nabla \log p(x) + VELOCITY = enum.auto() # the model predicts v(x) + +class PathType(enum.Enum): + """ + Which type of path to use. + """ + + LINEAR = enum.auto() + GVP = enum.auto() + VP = enum.auto() + +class WeightType(enum.Enum): + """ + Which type of weighting to use. + """ + + NONE = enum.auto() + VELOCITY = enum.auto() + LIKELIHOOD = enum.auto() + + +class Transport: + + def __init__( + self, + *, + model_type, + path_type, + loss_type, + train_eps, + sample_eps, + ): + path_options = { + PathType.LINEAR: path.ICPlan, + PathType.GVP: path.GVPCPlan, + PathType.VP: path.VPCPlan, + } + + self.loss_type = loss_type + self.model_type = model_type + self.path_sampler = path_options[path_type]() + self.train_eps = train_eps + self.sample_eps = sample_eps + + def prior_logp(self, z): + ''' + Standard multivariate normal prior + Assume z is batched + ''' + shape = th.tensor(z.size()) + N = th.prod(shape[1:]) + _fn = lambda x: -N / 2. * np.log(2 * np.pi) - th.sum(x ** 2) / 2. + return th.vmap(_fn)(z) + + + def check_interval( + self, + train_eps, + sample_eps, + *, + diffusion_form="SBDM", + sde=False, + reverse=False, + eval=False, + last_step_size=0.0, + ): + t0 = 0 + t1 = 1 + eps = train_eps if not eval else sample_eps + if (type(self.path_sampler) in [path.VPCPlan]): + + t1 = 1 - eps if (not sde or last_step_size == 0) else 1 - last_step_size + + elif (type(self.path_sampler) in [path.ICPlan, path.GVPCPlan]) \ + and (self.model_type != ModelType.VELOCITY or sde): # avoid numerical issue by taking a first semi-implicit step + + t0 = eps if (diffusion_form == "SBDM" and sde) or self.model_type != ModelType.VELOCITY else 0 + t1 = 1 - eps if (not sde or last_step_size == 0) else 1 - last_step_size + + if reverse: + t0, t1 = 1 - t0, 1 - t1 + + return t0, t1 + + + def sample(self, x1): + """Sampling x0 & t based on shape of x1 (if needed) + Args: + x1 - data point; [batch, *dim] + """ + + x0 = th.randn_like(x1) + t0, t1 = self.check_interval(self.train_eps, self.sample_eps) + t = th.rand((x1.shape[0],)) * (t1 - t0) + t0 + t = t.to(x1) + return t, x0, x1 + + + def training_losses( + self, + model, + x1, + model_kwargs=None + ): + """Loss for training the score model + Args: + - model: backbone model; could be score, noise, or velocity + - x1: datapoint + - model_kwargs: additional arguments for the model + """ + if model_kwargs == None: + model_kwargs = {} + + t, x0, x1 = self.sample(x1) + t, xt, ut = self.path_sampler.plan(t, x0, x1) + model_output = model(xt, t, **model_kwargs) + B, *_, C = xt.shape + assert model_output.size() == (B, *xt.size()[1:-1], C) + + terms = {} + terms['pred'] = model_output + if self.model_type == ModelType.VELOCITY: + terms['loss'] = mean_flat(((model_output - ut) ** 2)) + else: + _, drift_var = self.path_sampler.compute_drift(xt, t) + sigma_t, _ = self.path_sampler.compute_sigma_t(path.expand_t_like_x(t, xt)) + if self.loss_type in [WeightType.VELOCITY]: + weight = (drift_var / sigma_t) ** 2 + elif self.loss_type in [WeightType.LIKELIHOOD]: + weight = drift_var / (sigma_t ** 2) + elif self.loss_type in [WeightType.NONE]: + weight = 1 + else: + raise NotImplementedError() + + if self.model_type == ModelType.NOISE: + terms['loss'] = mean_flat(weight * ((model_output - x0) ** 2)) + else: + terms['loss'] = mean_flat(weight * ((model_output * sigma_t + x0) ** 2)) + + return terms + + + def get_drift( + self + ): + """member function for obtaining the drift of the probability flow ODE""" + def score_ode(x, t, model, **model_kwargs): + drift_mean, drift_var = self.path_sampler.compute_drift(x, t) + model_output = model(x, t, **model_kwargs) + return (-drift_mean + drift_var * model_output) # by change of variable + + def noise_ode(x, t, model, **model_kwargs): + drift_mean, drift_var = self.path_sampler.compute_drift(x, t) + sigma_t, _ = self.path_sampler.compute_sigma_t(path.expand_t_like_x(t, x)) + model_output = model(x, t, **model_kwargs) + score = model_output / -sigma_t + return (-drift_mean + drift_var * score) + + def velocity_ode(x, t, model, **model_kwargs): + model_output = model(x, t, **model_kwargs) + return model_output + + if self.model_type == ModelType.NOISE: + drift_fn = noise_ode + elif self.model_type == ModelType.SCORE: + drift_fn = score_ode + else: + drift_fn = velocity_ode + + def body_fn(x, t, model, **model_kwargs): + model_output = drift_fn(x, t, model, **model_kwargs) + assert model_output.shape == x.shape, "Output shape from ODE solver must match input shape" + return model_output + + return body_fn + + + def get_score( + self, + ): + """member function for obtaining score of + x_t = alpha_t * x + sigma_t * eps""" + if self.model_type == ModelType.NOISE: + score_fn = lambda x, t, model, **kwargs: model(x, t, **kwargs) / -self.path_sampler.compute_sigma_t(path.expand_t_like_x(t, x))[0] + elif self.model_type == ModelType.SCORE: + score_fn = lambda x, t, model, **kwagrs: model(x, t, **kwagrs) + elif self.model_type == ModelType.VELOCITY: + score_fn = lambda x, t, model, **kwargs: self.path_sampler.get_score_from_velocity(model(x, t, **kwargs), x, t) + else: + raise NotImplementedError() + + return score_fn + + +class Sampler: + """Sampler class for the transport model""" + def __init__( + self, + transport, + ): + """Constructor for a general sampler; supporting different sampling methods + Args: + - transport: an tranport object specify model prediction & interpolant type + """ + + self.transport = transport + self.drift = self.transport.get_drift() + self.score = self.transport.get_score() + + def __get_sde_diffusion_and_drift( + self, + *, + diffusion_form="SBDM", + diffusion_norm=1.0, + ): + + def diffusion_fn(x, t): + diffusion = self.transport.path_sampler.compute_diffusion(x, t, form=diffusion_form, norm=diffusion_norm) + return diffusion + + sde_drift = \ + lambda x, t, model, **kwargs: \ + self.drift(x, t, model, **kwargs) + diffusion_fn(x, t) * self.score(x, t, model, **kwargs) + + sde_diffusion = diffusion_fn + + return sde_drift, sde_diffusion + + def __get_last_step( + self, + sde_drift, + *, + last_step, + last_step_size, + ): + """Get the last step function of the SDE solver""" + + if last_step is None: + last_step_fn = \ + lambda x, t, model, **model_kwargs: \ + x + elif last_step == "Mean": + last_step_fn = \ + lambda x, t, model, **model_kwargs: \ + x + sde_drift(x, t, model, **model_kwargs) * last_step_size + elif last_step == "Tweedie": + alpha = self.transport.path_sampler.compute_alpha_t # simple aliasing; the original name was too long + sigma = self.transport.path_sampler.compute_sigma_t + last_step_fn = \ + lambda x, t, model, **model_kwargs: \ + x / alpha(t)[0][0] + (sigma(t)[0][0] ** 2) / alpha(t)[0][0] * self.score(x, t, model, **model_kwargs) + elif last_step == "Euler": + last_step_fn = \ + lambda x, t, model, **model_kwargs: \ + x + self.drift(x, t, model, **model_kwargs) * last_step_size + else: + raise NotImplementedError() + + return last_step_fn + + def sample_sde( + self, + *, + sampling_method="Euler", + diffusion_form="SBDM", + diffusion_norm=1.0, + last_step="Mean", + last_step_size=0.04, + num_steps=250, + ): + """returns a sampling function with given SDE settings + Args: + - sampling_method: type of sampler used in solving the SDE; default to be Euler-Maruyama + - diffusion_form: function form of diffusion coefficient; default to be matching SBDM + - diffusion_norm: function magnitude of diffusion coefficient; default to 1 + - last_step: type of the last step; default to identity + - last_step_size: size of the last step; default to match the stride of 250 steps over [0,1] + - num_steps: total integration step of SDE + """ + + if last_step is None: + last_step_size = 0.0 + + sde_drift, sde_diffusion = self.__get_sde_diffusion_and_drift( + diffusion_form=diffusion_form, + diffusion_norm=diffusion_norm, + ) + + t0, t1 = self.transport.check_interval( + self.transport.train_eps, + self.transport.sample_eps, + diffusion_form=diffusion_form, + sde=True, + eval=True, + reverse=False, + last_step_size=last_step_size, + ) + + _sde = sde( + sde_drift, + sde_diffusion, + t0=t0, + t1=t1, + num_steps=num_steps, + sampler_type=sampling_method + ) + + last_step_fn = self.__get_last_step(sde_drift, last_step=last_step, last_step_size=last_step_size) + + + def _sample(init, model, **model_kwargs): + xs = _sde.sample(init, model, **model_kwargs) + ts = th.ones(init.size(0), device=init.device) * t1 + x = last_step_fn(xs[-1], ts, model, **model_kwargs) + xs.append(x) + + assert len(xs) == num_steps, "Samples does not match the number of steps" + + return xs + + return _sample + + def sample_ode( + self, + *, + sampling_method="dopri5", + num_steps=50, + atol=1e-6, + rtol=1e-3, + reverse=False, + ): + """returns a sampling function with given ODE settings + Args: + - sampling_method: type of sampler used in solving the ODE; default to be Dopri5 + - num_steps: + - fixed solver (Euler, Heun): the actual number of integration steps performed + - adaptive solver (Dopri5): the number of datapoints saved during integration; produced by interpolation + - atol: absolute error tolerance for the solver + - rtol: relative error tolerance for the solver + - reverse: whether solving the ODE in reverse (data to noise); default to False + """ + if reverse: + drift = lambda x, t, model, **kwargs: self.drift(x, th.ones_like(t) * (1 - t), model, **kwargs) + else: + drift = self.drift + + t0, t1 = self.transport.check_interval( + self.transport.train_eps, + self.transport.sample_eps, + sde=False, + eval=True, + reverse=reverse, + last_step_size=0.0, + ) + + _ode = ode( + drift=drift, + t0=t0, + t1=t1, + sampler_type=sampling_method, + num_steps=num_steps, + atol=atol, + rtol=rtol, + ) + + return _ode.sample + + def sample_ode_likelihood( + self, + *, + sampling_method="dopri5", + num_steps=50, + atol=1e-6, + rtol=1e-3, + ): + + """returns a sampling function for calculating likelihood with given ODE settings + Args: + - sampling_method: type of sampler used in solving the ODE; default to be Dopri5 + - num_steps: + - fixed solver (Euler, Heun): the actual number of integration steps performed + - adaptive solver (Dopri5): the number of datapoints saved during integration; produced by interpolation + - atol: absolute error tolerance for the solver + - rtol: relative error tolerance for the solver + """ + def _likelihood_drift(x, t, model, **model_kwargs): + x, _ = x + eps = th.randint(2, x.size(), dtype=th.float, device=x.device) * 2 - 1 + t = th.ones_like(t) * (1 - t) + with th.enable_grad(): + x.requires_grad = True + grad = th.autograd.grad(th.sum(self.drift(x, t, model, **model_kwargs) * eps), x)[0] + logp_grad = th.sum(grad * eps, dim=tuple(range(1, len(x.size())))) + drift = self.drift(x, t, model, **model_kwargs) + return (-drift, logp_grad) + + t0, t1 = self.transport.check_interval( + self.transport.train_eps, + self.transport.sample_eps, + sde=False, + eval=True, + reverse=False, + last_step_size=0.0, + ) + + _ode = ode( + drift=_likelihood_drift, + t0=t0, + t1=t1, + sampler_type=sampling_method, + num_steps=num_steps, + atol=atol, + rtol=rtol, + ) + + def _sample_fn(x, model, **model_kwargs): + init_logp = th.zeros(x.size(0)).to(x) + input = (x, init_logp) + drift, delta_logp = _ode.sample(input, model, **model_kwargs) + drift, delta_logp = drift[-1], delta_logp[-1] + prior_logp = self.transport.prior_logp(drift) + logp = prior_logp - delta_logp + return logp, drift + + return _sample_fn \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/transport/utils.py b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..44646035531326b81883727f973900edb4eac494 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/transport/utils.py @@ -0,0 +1,29 @@ +import torch as th + +class EasyDict: + + def __init__(self, sub_dict): + for k, v in sub_dict.items(): + setattr(self, k, v) + + def __getitem__(self, key): + return getattr(self, key) + +def mean_flat(x): + """ + Take the mean over all non-batch dimensions. + """ + return th.mean(x, dim=list(range(1, len(x.size())))) + +def log_state(state): + result = [] + + sorted_state = dict(sorted(state.items())) + for key, value in sorted_state.items(): + # Check if the value is an instance of a class + if " 0 or pad_w > 0: + x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h)) + Hp, Wp = H + pad_h, W + pad_w + + x = x.view(B, Hp // window_size, window_size, Wp // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows, (Hp, Wp) + + +def window_unpartition(windows, window_size, pad_hw, hw): + """ + Window unpartition into original sequences and removing padding. + Args: + x (tensor): input tokens with [B * num_windows, window_size, window_size, C]. + window_size (int): window size. + pad_hw (Tuple): padded height and width (Hp, Wp). + hw (Tuple): original height and width (H, W) before padding. + + Returns: + x: unpartitioned sequences with [B, H, W, C]. + """ + Hp, Wp = pad_hw + H, W = hw + B = windows.shape[0] // (Hp * Wp // window_size // window_size) + x = windows.view(B, Hp // window_size, Wp // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1) + + if Hp > H or Wp > W: + x = x[:, :H, :W, :].contiguous() + return x + + +def get_rel_pos(q_size, k_size, rel_pos): + """ + Get relative positional embeddings according to the relative positions of + query and key sizes. + Args: + q_size (int): size of query q. + k_size (int): size of key k. + rel_pos (Tensor): relative position embeddings (L, C). + + Returns: + Extracted positional embeddings according to relative positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + + +def add_decomposed_rel_pos(attn, q, rel_pos_h, rel_pos_w, q_size, k_size): + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn + +def mean_flat(tensor): + return tensor.mean(dim=list(range(1, tensor.ndim))) + + +################################################################################# +# Token Masking and Unmasking # +################################################################################# +def get_mask(batch, length, mask_ratio, device, mask_type=None, data_info=None, extra_len=0): + """ + Get the binary mask for the input sequence. + Args: + - batch: batch size + - length: sequence length + - mask_ratio: ratio of tokens to mask + - data_info: dictionary with info for reconstruction + return: + mask_dict with following keys: + - mask: binary mask, 0 is keep, 1 is remove + - ids_keep: indices of tokens to keep + - ids_restore: indices to restore the original order + """ + assert mask_type in ['random', 'fft', 'laplacian', 'group'] + mask = torch.ones([batch, length], device=device) + len_keep = int(length * (1 - mask_ratio)) - extra_len + + if mask_type in ['random', 'group']: + noise = torch.rand(batch, length, device=device) # noise in [0, 1] + ids_shuffle = torch.argsort(noise, dim=1) # ascend: small is keep, large is remove + ids_restore = torch.argsort(ids_shuffle, dim=1) + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + ids_removed = ids_shuffle[:, len_keep:] + + elif mask_type in ['fft', 'laplacian']: + if 'strength' in data_info: + strength = data_info['strength'] + + else: + N = data_info['N'][0] + img = data_info['ori_img'] + # 获取原图的尺寸信息 + _, C, H, W = img.shape + if mask_type == 'fft': + # 对图片进行reshape,将其变为patch (3, H/N, N, W/N, N) + reshaped_image = img.reshape((batch, -1, H // N, N, W // N, N)) + fft_image = torch.fft.fftn(reshaped_image, dim=(3, 5)) + # 取绝对值并求和获取频率强度 + strength = torch.sum(torch.abs(fft_image), dim=(1, 3, 5)).reshape((batch, -1,)) + elif type == 'laplacian': + laplacian_kernel = torch.tensor([[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]], dtype=torch.float32).reshape(1, 1, 3, 3) + laplacian_kernel = laplacian_kernel.repeat(C, 1, 1, 1) + # 对图片进行reshape,将其变为patch (3, H/N, N, W/N, N) + reshaped_image = img.reshape(-1, C, H // N, N, W // N, N).permute(0, 2, 4, 1, 3, 5).reshape(-1, C, N, N) + laplacian_response = F.conv2d(reshaped_image, laplacian_kernel, padding=1, groups=C) + strength = laplacian_response.sum(dim=[1, 2, 3]).reshape((batch, -1,)) + + # 对频率强度进行归一化,然后使用torch.multinomial进行采样 + probabilities = strength / (strength.max(dim=1)[0][:, None]+1e-5) + ids_shuffle = torch.multinomial(probabilities.clip(1e-5, 1), length, replacement=False) + ids_keep = ids_shuffle[:, :len_keep] + ids_restore = torch.argsort(ids_shuffle, dim=1) + ids_removed = ids_shuffle[:, len_keep:] + + mask[:, :len_keep] = 0 + mask = torch.gather(mask, dim=1, index=ids_restore) + + return {'mask': mask, + 'ids_keep': ids_keep, + 'ids_restore': ids_restore, + 'ids_removed': ids_removed} + + +def mask_out_token(x, ids_keep, ids_removed=None): + """ + Mask out the tokens specified by ids_keep. + Args: + - x: input sequence, [N, L, D] + - ids_keep: indices of tokens to keep + return: + - x_masked: masked sequence + """ + N, L, D = x.shape # batch, length, dim + x_remain = torch.gather(x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + if ids_removed is not None: + x_masked = torch.gather(x, dim=1, index=ids_removed.unsqueeze(-1).repeat(1, 1, D)) + return x_remain, x_masked + else: + return x_remain + + +def mask_tokens(x, mask_ratio): + """ + Perform per-sample random masking by per-sample shuffling. + Per-sample shuffling is done by argsort random noise. + x: [N, L, D], sequence + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = torch.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + ids_shuffle = torch.argsort(noise, dim=1) # ascend: small is keep, large is remove + ids_restore = torch.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = torch.gather(x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = torch.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + mask = torch.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore + + +def unmask_tokens(x, ids_restore, mask_token): + # x: [N, T, D] if extras == 0 (i.e., no cls token) else x: [N, T+1, D] + mask_tokens = mask_token.repeat(x.shape[0], ids_restore.shape[1] - x.shape[1], 1) + x = torch.cat([x, mask_tokens], dim=1) + x = torch.gather(x, dim=1, index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) # unshuffle + return x + + +# Parse 'None' to None and others to float value +def parse_float_none(s): + assert isinstance(s, str) + return None if s == 'None' else float(s) + + +#---------------------------------------------------------------------------- +# Parse a comma separated list of numbers or ranges and return a list of ints. +# Example: '1,2,5-10' returns [1, 2, 5, 6, 7, 8, 9, 10] + +def parse_int_list(s): + if isinstance(s, list): return s + ranges = [] + range_re = re.compile(r'^(\d+)-(\d+)$') + for p in s.split(','): + if m := range_re.match(p): + ranges.extend(range(int(m.group(1)), int(m.group(2))+1)) + else: + ranges.append(int(p)) + return ranges + + +def init_processes(fn, args): + """ Initialize the distributed environment. """ + os.environ['MASTER_ADDR'] = args.master_address + os.environ['MASTER_PORT'] = str(random.randint(2000, 6000)) + print(f'MASTER_ADDR = {os.environ["MASTER_ADDR"]}') + print(f'MASTER_PORT = {os.environ["MASTER_PORT"]}') + torch.cuda.set_device(args.local_rank) + dist.init_process_group(backend='nccl', init_method='env://', rank=args.global_rank, world_size=args.global_size) + fn(args) + if args.global_size > 1: + cleanup() + + +def mprint(*args, **kwargs): + """ + Print only from rank 0. + """ + if dist.get_rank() == 0: + print(*args, **kwargs) + + +def cleanup(): + """ + End DDP training. + """ + dist.barrier() + mprint("Done!") + dist.barrier() + dist.destroy_process_group() + + +#---------------------------------------------------------------------------- +# logging info. +class Logger(object): + """ + Redirect stderr to stdout, optionally print stdout to a file, + and optionally force flushing on both stdout and the file. + """ + + def __init__(self, file_name=None, file_mode="w", should_flush=True): + self.file = None + + if file_name is not None: + self.file = open(file_name, file_mode) + + self.should_flush = should_flush + self.stdout = sys.stdout + self.stderr = sys.stderr + + sys.stdout = self + sys.stderr = self + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def write(self, text): + """Write text to stdout (and a file) and optionally flush.""" + if len(text) == 0: # workaround for a bug in VSCode debugger: sys.stdout.write(''); sys.stdout.flush() => crash + return + + if self.file is not None: + self.file.write(text) + + self.stdout.write(text) + + if self.should_flush: + self.flush() + + def flush(self): + """Flush written text to both stdout and a file, if open.""" + if self.file is not None: + self.file.flush() + + self.stdout.flush() + + def close(self): + """Flush, close possible files, and remove stdout/stderr mirroring.""" + self.flush() + + # if using multiple loggers, prevent closing in wrong order + if sys.stdout is self: + sys.stdout = self.stdout + if sys.stderr is self: + sys.stderr = self.stderr + + if self.file is not None: + self.file.close() + + +class StackedRandomGenerator: + def __init__(self, device, seeds): + super().__init__() + self.generators = [torch.Generator(device).manual_seed(int(seed) % (1 << 32)) for seed in seeds] + + def randn(self, size, **kwargs): + assert size[0] == len(self.generators) + return torch.stack([torch.randn(size[1:], generator=gen, **kwargs) for gen in self.generators]) + + def randn_like(self, input): + return self.randn(input.shape, dtype=input.dtype, layout=input.layout, device=input.device) + + def randint(self, *args, size, **kwargs): + assert size[0] == len(self.generators) + return torch.stack([torch.randint(*args, size=size[1:], generator=gen, **kwargs) for gen in self.generators]) + + +def prepare_prompt_ar(prompt, ratios, device='cpu', show=True): + # get aspect_ratio or ar + aspect_ratios = re.findall(r"--aspect_ratio\s+(\d+:\d+)", prompt) + ars = re.findall(r"--ar\s+(\d+:\d+)", prompt) + custom_hw = re.findall(r"--hw\s+(\d+:\d+)", prompt) + if show: + print("aspect_ratios:", aspect_ratios, "ars:", ars, "hws:", custom_hw) + prompt_clean = prompt.split("--aspect_ratio")[0].split("--ar")[0].split("--hw")[0] + if len(aspect_ratios) + len(ars) + len(custom_hw) == 0 and show: + print( "Wrong prompt format. Set to default ar: 1. change your prompt into format '--ar h:w or --hw h:w' for correct generating") + if len(aspect_ratios) != 0: + ar = float(aspect_ratios[0].split(':')[0]) / float(aspect_ratios[0].split(':')[1]) + elif len(ars) != 0: + ar = float(ars[0].split(':')[0]) / float(ars[0].split(':')[1]) + else: + ar = 1. + closest_ratio = min(ratios.keys(), key=lambda ratio: abs(float(ratio) - ar)) + if len(custom_hw) != 0: + custom_hw = [float(custom_hw[0].split(':')[0]), float(custom_hw[0].split(':')[1])] + else: + custom_hw = ratios[closest_ratio] + default_hw = ratios[closest_ratio] + prompt_show = f'prompt: {prompt_clean.strip()}\nSize: --ar {closest_ratio}, --bin hw {ratios[closest_ratio]}, --custom hw {custom_hw}' + return prompt_clean, prompt_show, torch.tensor(default_hw, device=device)[None], torch.tensor([float(closest_ratio)], device=device)[None], torch.tensor(custom_hw, device=device)[None] + + +def resize_and_crop_tensor(samples: torch.Tensor, new_width: int, new_height: int): + orig_hw = torch.tensor([samples.shape[2], samples.shape[3]], dtype=torch.int) + custom_hw = torch.tensor([int(new_height), int(new_width)], dtype=torch.int) + + if (orig_hw != custom_hw).all(): + ratio = max(custom_hw[0] / orig_hw[0], custom_hw[1] / orig_hw[1]) + resized_width = int(orig_hw[1] * ratio) + resized_height = int(orig_hw[0] * ratio) + + transform = T.Compose([ + T.Resize((resized_height, resized_width)), + T.CenterCrop(custom_hw.tolist()) + ]) + return transform(samples) + else: + return samples + + +def resize_and_crop_img(img: Image, new_width, new_height): + orig_width, orig_height = img.size + + ratio = max(new_width/orig_width, new_height/orig_height) + resized_width = int(orig_width * ratio) + resized_height = int(orig_height * ratio) + + img = img.resize((resized_width, resized_height), Image.LANCZOS) + + left = (resized_width - new_width)/2 + top = (resized_height - new_height)/2 + right = (resized_width + new_width)/2 + bottom = (resized_height + new_height)/2 + + img = img.crop((left, top, right, bottom)) + + return img + + + +def mask_feature(emb, mask): + if emb.shape[0] == 1: + keep_index = mask.sum().item() + return emb[:, :, :keep_index, :], keep_index + else: + masked_feature = emb * mask[:, None, :, None] + return masked_feature, emb.shape[2] \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/__pycache__/pos_embed.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/__pycache__/pos_embed.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f0aef8b3b52b312e61c99f578ff3b74beb494e2 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/__pycache__/pos_embed.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__init__.py b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..25e3d48a162760260826080f6366838e83e26878 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__init__.py @@ -0,0 +1,4 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +from .curope2d import cuRoPE2D diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cffface1ff17004eacfc3774fabb1fd22e24274f Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__pycache__/curope2d.cpython-310.pyc b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__pycache__/curope2d.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..027cbe9c82beb7cb3d68d66518cb0adbb3f252e9 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/__pycache__/curope2d.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/curope.cpp b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/curope.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8fe9058e05aa1bf3f37b0d970edc7312bc68455b --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/curope.cpp @@ -0,0 +1,69 @@ +/* + Copyright (C) 2022-present Naver Corporation. All rights reserved. + Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +*/ + +#include + +// forward declaration +void rope_2d_cuda( torch::Tensor tokens, const torch::Tensor pos, const float base, const float fwd ); + +void rope_2d_cpu( torch::Tensor tokens, const torch::Tensor positions, const float base, const float fwd ) +{ + const int B = tokens.size(0); + const int N = tokens.size(1); + const int H = tokens.size(2); + const int D = tokens.size(3) / 4; + + auto tok = tokens.accessor(); + auto pos = positions.accessor(); + + for (int b = 0; b < B; b++) { + for (int x = 0; x < 2; x++) { // y and then x (2d) + for (int n = 0; n < N; n++) { + + // grab the token position + const int p = pos[b][n][x]; + + for (int h = 0; h < H; h++) { + for (int d = 0; d < D; d++) { + // grab the two values + float u = tok[b][n][h][d+0+x*2*D]; + float v = tok[b][n][h][d+D+x*2*D]; + + // grab the cos,sin + const float inv_freq = fwd * p / powf(base, d/float(D)); + float c = cosf(inv_freq); + float s = sinf(inv_freq); + + // write the result + tok[b][n][h][d+0+x*2*D] = u*c - v*s; + tok[b][n][h][d+D+x*2*D] = v*c + u*s; + } + } + } + } + } +} + +void rope_2d( torch::Tensor tokens, // B,N,H,D + const torch::Tensor positions, // B,N,2 + const float base, + const float fwd ) +{ + TORCH_CHECK(tokens.dim() == 4, "tokens must have 4 dimensions"); + TORCH_CHECK(positions.dim() == 3, "positions must have 3 dimensions"); + TORCH_CHECK(tokens.size(0) == positions.size(0), "batch size differs between tokens & positions"); + TORCH_CHECK(tokens.size(1) == positions.size(1), "seq_length differs between tokens & positions"); + TORCH_CHECK(positions.size(2) == 2, "positions.shape[2] must be equal to 2"); + TORCH_CHECK(tokens.is_cuda() == positions.is_cuda(), "tokens and positions are not on the same device" ); + + if (tokens.is_cuda()) + rope_2d_cuda( tokens, positions, base, fwd ); + else + rope_2d_cpu( tokens, positions, base, fwd ); +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("rope_2d", &rope_2d, "RoPE 2d forward/backward"); +} diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/curope2d.py b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/curope2d.py new file mode 100644 index 0000000000000000000000000000000000000000..a49c12f8c529e9a889b5ac20c5767158f238e17d --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/curope2d.py @@ -0,0 +1,40 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +import torch + +try: + import curope as _kernels # run `python setup.py install` +except ModuleNotFoundError: + from . import curope as _kernels # run `python setup.py build_ext --inplace` + + +class cuRoPE2D_func (torch.autograd.Function): + + @staticmethod + def forward(ctx, tokens, positions, base, F0=1): + ctx.save_for_backward(positions) + ctx.saved_base = base + ctx.saved_F0 = F0 + # tokens = tokens.clone() # uncomment this if inplace doesn't work + _kernels.rope_2d( tokens, positions, base, F0 ) + ctx.mark_dirty(tokens) + return tokens + + @staticmethod + def backward(ctx, grad_res): + positions, base, F0 = ctx.saved_tensors[0], ctx.saved_base, ctx.saved_F0 + _kernels.rope_2d( grad_res, positions, base, -F0 ) + ctx.mark_dirty(grad_res) + return grad_res, None, None, None + + +class cuRoPE2D(torch.nn.Module): + def __init__(self, freq=100.0, F0=1.0): + super().__init__() + self.base = freq + self.F0 = F0 + + def forward(self, tokens, positions): + cuRoPE2D_func.apply( tokens.transpose(1,2), positions, self.base, self.F0 ) + return tokens \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/kernels.cu b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/kernels.cu new file mode 100644 index 0000000000000000000000000000000000000000..7156cd1bb935cb1f0be45e58add53f9c21505c20 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/kernels.cu @@ -0,0 +1,108 @@ +/* + Copyright (C) 2022-present Naver Corporation. All rights reserved. + Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +*/ + +#include +#include +#include +#include + +#define CHECK_CUDA(tensor) {\ + TORCH_CHECK((tensor).is_cuda(), #tensor " is not in cuda memory"); \ + TORCH_CHECK((tensor).is_contiguous(), #tensor " is not contiguous"); } +void CHECK_KERNEL() {auto error = cudaGetLastError(); TORCH_CHECK( error == cudaSuccess, cudaGetErrorString(error));} + + +template < typename scalar_t > +__global__ void rope_2d_cuda_kernel( + //scalar_t* __restrict__ tokens, + torch::PackedTensorAccessor32 tokens, + const int64_t* __restrict__ pos, + const float base, + const float fwd ) + // const int N, const int H, const int D ) +{ + // tokens shape = (B, N, H, D) + const int N = tokens.size(1); + const int H = tokens.size(2); + const int D = tokens.size(3); + + // each block update a single token, for all heads + // each thread takes care of a single output + extern __shared__ float shared[]; + float* shared_inv_freq = shared + D; + + const int b = blockIdx.x / N; + const int n = blockIdx.x % N; + + const int Q = D / 4; + // one token = [0..Q : Q..2Q : 2Q..3Q : 3Q..D] + // u_Y v_Y u_X v_X + + // shared memory: first, compute inv_freq + if (threadIdx.x < Q) + shared_inv_freq[threadIdx.x] = fwd / powf(base, threadIdx.x/float(Q)); + __syncthreads(); + + // start of X or Y part + const int X = threadIdx.x < D/2 ? 0 : 1; + const int m = (X*D/2) + (threadIdx.x % Q); // index of u_Y or u_X + + // grab the cos,sin appropriate for me + const float freq = pos[blockIdx.x*2+X] * shared_inv_freq[threadIdx.x % Q]; + const float cos = cosf(freq); + const float sin = sinf(freq); + /* + float* shared_cos_sin = shared + D + D/4; + if ((threadIdx.x % (D/2)) < Q) + shared_cos_sin[m+0] = cosf(freq); + else + shared_cos_sin[m+Q] = sinf(freq); + __syncthreads(); + const float cos = shared_cos_sin[m+0]; + const float sin = shared_cos_sin[m+Q]; + */ + + for (int h = 0; h < H; h++) + { + // then, load all the token for this head in shared memory + shared[threadIdx.x] = tokens[b][n][h][threadIdx.x]; + __syncthreads(); + + const float u = shared[m]; + const float v = shared[m+Q]; + + // write output + if ((threadIdx.x % (D/2)) < Q) + tokens[b][n][h][threadIdx.x] = u*cos - v*sin; + else + tokens[b][n][h][threadIdx.x] = v*cos + u*sin; + } +} + +void rope_2d_cuda( torch::Tensor tokens, const torch::Tensor pos, const float base, const float fwd ) +{ + const int B = tokens.size(0); // batch size + const int N = tokens.size(1); // sequence length + const int H = tokens.size(2); // number of heads + const int D = tokens.size(3); // dimension per head + + TORCH_CHECK(tokens.stride(3) == 1 && tokens.stride(2) == D, "tokens are not contiguous"); + TORCH_CHECK(pos.is_contiguous(), "positions are not contiguous"); + TORCH_CHECK(pos.size(0) == B && pos.size(1) == N && pos.size(2) == 2, "bad pos.shape"); + TORCH_CHECK(D % 4 == 0, "token dim must be multiple of 4"); + + // one block for each layer, one thread per local-max + const int THREADS_PER_BLOCK = D; + const int N_BLOCKS = B * N; // each block takes care of H*D values + const int SHARED_MEM = sizeof(float) * (D + D/4); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF(tokens.type(), "rope_2d_cuda", ([&] { + rope_2d_cuda_kernel <<>> ( + //tokens.data_ptr(), + tokens.packed_accessor32(), + pos.data_ptr(), + base, fwd); //, N, H, D ); + })); +} diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/setup.py b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..230632ed05e309200e8f93a3a852072333975009 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/curope/setup.py @@ -0,0 +1,34 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +from setuptools import setup +from torch import cuda +from torch.utils.cpp_extension import BuildExtension, CUDAExtension + +# compile for all possible CUDA architectures +all_cuda_archs = cuda.get_gencode_flags().replace('compute=','arch=').split() +# alternatively, you can list cuda archs that you want, eg: +# all_cuda_archs = [ + # '-gencode', 'arch=compute_70,code=sm_70', + # '-gencode', 'arch=compute_75,code=sm_75', + # '-gencode', 'arch=compute_80,code=sm_80', + # '-gencode', 'arch=compute_86,code=sm_86' +# ] + +setup( + name = 'curope', + ext_modules = [ + CUDAExtension( + name='curope', + sources=[ + "curope.cpp", + "kernels.cu", + ], + extra_compile_args = dict( + nvcc=['-O3','--ptxas-options=-v',"--use_fast_math"]+all_cuda_archs, + cxx=['-O3']) + ) + ], + cmdclass = { + 'build_ext': BuildExtension + }) diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/modules.py b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/modules.py new file mode 100644 index 0000000000000000000000000000000000000000..2c4106d4b4a35dc29d613a4c5998615c222e9af5 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/modules.py @@ -0,0 +1,1560 @@ +from importlib import import_module + +import numpy as np +import torch + +import os +import json + +from dataclasses import dataclass +from einops import rearrange, repeat +from typing import Any, Dict, Optional, Tuple, Callable +from diffusers.models import Transformer2DModel +from diffusers.utils import USE_PEFT_BACKEND, BaseOutput, deprecate, is_xformers_available +from diffusers.models.embeddings import get_1d_sincos_pos_embed_from_grid, ImagePositionalEmbeddings, CaptionProjection, \ + PatchEmbed, CombinedTimestepSizeEmbeddings +from diffusers.configuration_utils import ConfigMixin, register_to_config +from diffusers.models.modeling_utils import ModelMixin +from diffusers.models.attention import BasicTransformerBlock +from diffusers.models.lora import LoRACompatibleConv, LoRACompatibleLinear + +import torch +import torch.nn.functional as F +from torch import nn +from diffusers.utils.torch_utils import maybe_allow_in_graph +from diffusers.models.embeddings import SinusoidalPositionalEmbedding +from diffusers.models.normalization import AdaLayerNorm, AdaLayerNormZero +from diffusers.models.attention_processor import SpatialNorm, LORA_ATTENTION_PROCESSORS, \ + CustomDiffusionAttnProcessor, CustomDiffusionXFormersAttnProcessor, CustomDiffusionAttnProcessor2_0, \ + AttnAddedKVProcessor, AttnAddedKVProcessor2_0, SlicedAttnAddedKVProcessor, XFormersAttnAddedKVProcessor, \ + LoRAAttnAddedKVProcessor, LoRAXFormersAttnProcessor, XFormersAttnProcessor, LoRAAttnProcessor2_0, LoRAAttnProcessor, \ + AttnProcessor, SlicedAttnProcessor, logger +from diffusers.models.activations import GEGLU, GELU, ApproximateGELU + +from dataclasses import dataclass + +from torch import nn + +from ..utils.pos_embed import get_2d_sincos_pos_embed + +if is_xformers_available(): + import xformers + import xformers.ops +else: + xformers = None + + + +class PatchEmbed(nn.Module): + """2D Image to Patch Embedding""" + + def __init__( + self, + height=224, + width=224, + patch_size=16, + in_channels=3, + embed_dim=768, + layer_norm=False, + flatten=True, + bias=True, + interpolation_scale=1, + ): + super().__init__() + + num_patches = (height // patch_size) * (width // patch_size) + self.flatten = flatten + self.layer_norm = layer_norm + + self.proj = nn.Conv2d( + in_channels, embed_dim, kernel_size=(patch_size, patch_size), stride=patch_size, bias=bias + ) + if layer_norm: + self.norm = nn.LayerNorm(embed_dim, elementwise_affine=False, eps=1e-6) + else: + self.norm = None + + self.patch_size = patch_size + # See: + # https://github.com/PixArt-alpha/PixArt-alpha/blob/0f55e922376d8b797edd44d25d0e7464b260dcab/diffusion/model/nets/PixArtMS.py#L161 + self.height, self.width = height // patch_size, width // patch_size + self.base_size = height // patch_size + self.interpolation_scale = interpolation_scale + pos_embed = get_2d_sincos_pos_embed( + embed_dim, int(num_patches**0.5), base_size=self.base_size, interpolation_scale=self.interpolation_scale + ) + self.register_buffer("pos_embed", torch.from_numpy(pos_embed).float().unsqueeze(0), persistent=False) + + def forward(self, latent): + height, width = latent.shape[-2] // self.patch_size, latent.shape[-1] // self.patch_size + + latent = self.proj(latent) + if self.flatten: + latent = latent.flatten(2).transpose(1, 2) # BCHW -> BNC + if self.layer_norm: + latent = self.norm(latent) + + # Interpolate positional embeddings if needed. + # (For PixArt-Alpha: https://github.com/PixArt-alpha/PixArt-alpha/blob/0f55e922376d8b797edd44d25d0e7464b260dcab/diffusion/model/nets/PixArtMS.py#L162C151-L162C160) + if self.height != height or self.width != width: + pos_embed = get_2d_sincos_pos_embed( + embed_dim=self.pos_embed.shape[-1], + grid_size=(height, width), + base_size=self.base_size, + interpolation_scale=self.interpolation_scale, + ) + pos_embed = torch.from_numpy(pos_embed) + pos_embed = pos_embed.float().unsqueeze(0).to(latent.device) + else: + pos_embed = self.pos_embed + + return (latent + pos_embed).to(latent.dtype) + +@maybe_allow_in_graph +class Attention(nn.Module): + r""" + A cross attention layer. + + Parameters: + query_dim (`int`): + The number of channels in the query. + cross_attention_dim (`int`, *optional*): + The number of channels in the encoder_hidden_states. If not given, defaults to `query_dim`. + heads (`int`, *optional*, defaults to 8): + The number of heads to use for multi-head attention. + dim_head (`int`, *optional*, defaults to 64): + The number of channels in each head. + dropout (`float`, *optional*, defaults to 0.0): + The dropout probability to use. + bias (`bool`, *optional*, defaults to False): + Set to `True` for the query, key, and value linear layers to contain a bias parameter. + upcast_attention (`bool`, *optional*, defaults to False): + Set to `True` to upcast the attention computation to `float32`. + upcast_softmax (`bool`, *optional*, defaults to False): + Set to `True` to upcast the softmax computation to `float32`. + cross_attention_norm (`str`, *optional*, defaults to `None`): + The type of normalization to use for the cross attention. Can be `None`, `layer_norm`, or `group_norm`. + cross_attention_norm_num_groups (`int`, *optional*, defaults to 32): + The number of groups to use for the group norm in the cross attention. + added_kv_proj_dim (`int`, *optional*, defaults to `None`): + The number of channels to use for the added key and value projections. If `None`, no projection is used. + norm_num_groups (`int`, *optional*, defaults to `None`): + The number of groups to use for the group norm in the attention. + spatial_norm_dim (`int`, *optional*, defaults to `None`): + The number of channels to use for the spatial normalization. + out_bias (`bool`, *optional*, defaults to `True`): + Set to `True` to use a bias in the output linear layer. + scale_qk (`bool`, *optional*, defaults to `True`): + Set to `True` to scale the query and key by `1 / sqrt(dim_head)`. + only_cross_attention (`bool`, *optional*, defaults to `False`): + Set to `True` to only use cross attention and not added_kv_proj_dim. Can only be set to `True` if + `added_kv_proj_dim` is not `None`. + eps (`float`, *optional*, defaults to 1e-5): + An additional value added to the denominator in group normalization that is used for numerical stability. + rescale_output_factor (`float`, *optional*, defaults to 1.0): + A factor to rescale the output by dividing it with this value. + residual_connection (`bool`, *optional*, defaults to `False`): + Set to `True` to add the residual connection to the output. + _from_deprecated_attn_block (`bool`, *optional*, defaults to `False`): + Set to `True` if the attention block is loaded from a deprecated state dict. + processor (`AttnProcessor`, *optional*, defaults to `None`): + The attention processor to use. If `None`, defaults to `AttnProcessor2_0` if `torch 2.x` is used and + `AttnProcessor` otherwise. + """ + + def __init__( + self, + query_dim: int, + cross_attention_dim: Optional[int] = None, + heads: int = 8, + dim_head: int = 64, + dropout: float = 0.0, + bias: bool = False, + upcast_attention: bool = False, + upcast_softmax: bool = False, + cross_attention_norm: Optional[str] = None, + cross_attention_norm_num_groups: int = 32, + added_kv_proj_dim: Optional[int] = None, + norm_num_groups: Optional[int] = None, + spatial_norm_dim: Optional[int] = None, + out_bias: bool = True, + scale_qk: bool = True, + only_cross_attention: bool = False, + eps: float = 1e-5, + rescale_output_factor: float = 1.0, + residual_connection: bool = False, + _from_deprecated_attn_block: bool = False, + processor: Optional["AttnProcessor"] = None, + attention_mode: str = 'xformers', + ): + super().__init__() + self.inner_dim = dim_head * heads + self.cross_attention_dim = cross_attention_dim if cross_attention_dim is not None else query_dim + self.upcast_attention = upcast_attention + self.upcast_softmax = upcast_softmax + self.rescale_output_factor = rescale_output_factor + self.residual_connection = residual_connection + self.dropout = dropout + + # we make use of this private variable to know whether this class is loaded + # with an deprecated state dict so that we can convert it on the fly + self._from_deprecated_attn_block = _from_deprecated_attn_block + + self.scale_qk = scale_qk + self.scale = dim_head**-0.5 if self.scale_qk else 1.0 + + self.heads = heads + # for slice_size > 0 the attention score computation + # is split across the batch axis to save memory + # You can set slice_size with `set_attention_slice` + self.sliceable_head_dim = heads + + self.added_kv_proj_dim = added_kv_proj_dim + self.only_cross_attention = only_cross_attention + + if self.added_kv_proj_dim is None and self.only_cross_attention: + raise ValueError( + "`only_cross_attention` can only be set to True if `added_kv_proj_dim` is not None. Make sure to set either `only_cross_attention=False` or define `added_kv_proj_dim`." + ) + + if norm_num_groups is not None: + self.group_norm = nn.GroupNorm(num_channels=query_dim, num_groups=norm_num_groups, eps=eps, affine=True) + else: + self.group_norm = None + + if spatial_norm_dim is not None: + self.spatial_norm = SpatialNorm(f_channels=query_dim, zq_channels=spatial_norm_dim) + else: + self.spatial_norm = None + + if cross_attention_norm is None: + self.norm_cross = None + elif cross_attention_norm == "layer_norm": + self.norm_cross = nn.LayerNorm(self.cross_attention_dim) + elif cross_attention_norm == "group_norm": + if self.added_kv_proj_dim is not None: + # The given `encoder_hidden_states` are initially of shape + # (batch_size, seq_len, added_kv_proj_dim) before being projected + # to (batch_size, seq_len, cross_attention_dim). The norm is applied + # before the projection, so we need to use `added_kv_proj_dim` as + # the number of channels for the group norm. + norm_cross_num_channels = added_kv_proj_dim + else: + norm_cross_num_channels = self.cross_attention_dim + + self.norm_cross = nn.GroupNorm( + num_channels=norm_cross_num_channels, num_groups=cross_attention_norm_num_groups, eps=1e-5, affine=True + ) + else: + raise ValueError( + f"unknown cross_attention_norm: {cross_attention_norm}. Should be None, 'layer_norm' or 'group_norm'" + ) + + if USE_PEFT_BACKEND: + linear_cls = nn.Linear + else: + linear_cls = LoRACompatibleLinear + + self.to_q = linear_cls(query_dim, self.inner_dim, bias=bias) + + if not self.only_cross_attention: + # only relevant for the `AddedKVProcessor` classes + self.to_k = linear_cls(self.cross_attention_dim, self.inner_dim, bias=bias) + self.to_v = linear_cls(self.cross_attention_dim, self.inner_dim, bias=bias) + else: + self.to_k = None + self.to_v = None + + if self.added_kv_proj_dim is not None: + self.add_k_proj = linear_cls(added_kv_proj_dim, self.inner_dim) + self.add_v_proj = linear_cls(added_kv_proj_dim, self.inner_dim) + + self.to_out = nn.ModuleList([]) + self.to_out.append(linear_cls(self.inner_dim, query_dim, bias=out_bias)) + self.to_out.append(nn.Dropout(dropout)) + + # set attention processor + # We use the AttnProcessor2_0 by default when torch 2.x is used which uses + # torch.nn.functional.scaled_dot_product_attention for native Flash/memory_efficient_attention + # but only if it has the default `scale` argument. TODO remove scale_qk check when we move to torch 2.1 + if processor is None: + processor = ( + AttnProcessor2_0(attention_mode) if hasattr(F, "scaled_dot_product_attention") and self.scale_qk else AttnProcessor() + ) + self.set_processor(processor) + + def set_use_memory_efficient_attention_xformers( + self, use_memory_efficient_attention_xformers: bool, attention_op: Optional[Callable] = None + ) -> None: + r""" + Set whether to use memory efficient attention from `xformers` or not. + + Args: + use_memory_efficient_attention_xformers (`bool`): + Whether to use memory efficient attention from `xformers` or not. + attention_op (`Callable`, *optional*): + The attention operation to use. Defaults to `None` which uses the default attention operation from + `xformers`. + """ + is_lora = hasattr(self, "processor") and isinstance( + self.processor, + LORA_ATTENTION_PROCESSORS, + ) + is_custom_diffusion = hasattr(self, "processor") and isinstance( + self.processor, + (CustomDiffusionAttnProcessor, CustomDiffusionXFormersAttnProcessor, CustomDiffusionAttnProcessor2_0), + ) + is_added_kv_processor = hasattr(self, "processor") and isinstance( + self.processor, + ( + AttnAddedKVProcessor, + AttnAddedKVProcessor2_0, + SlicedAttnAddedKVProcessor, + XFormersAttnAddedKVProcessor, + LoRAAttnAddedKVProcessor, + ), + ) + + if use_memory_efficient_attention_xformers: + if is_added_kv_processor and (is_lora or is_custom_diffusion): + raise NotImplementedError( + f"Memory efficient attention is currently not supported for LoRA or custom diffusion for attention processor type {self.processor}" + ) + if not is_xformers_available(): + raise ModuleNotFoundError( + ( + "Refer to https://github.com/facebookresearch/xformers for more information on how to install" + " xformers" + ), + name="xformers", + ) + elif not torch.cuda.is_available(): + raise ValueError( + "torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is" + " only available for GPU " + ) + else: + try: + # Make sure we can run the memory efficient attention + _ = xformers.ops.memory_efficient_attention( + torch.randn((1, 2, 40), device="cuda"), + torch.randn((1, 2, 40), device="cuda"), + torch.randn((1, 2, 40), device="cuda"), + ) + except Exception as e: + raise e + + if is_lora: + # TODO (sayakpaul): should we throw a warning if someone wants to use the xformers + # variant when using PT 2.0 now that we have LoRAAttnProcessor2_0? + processor = LoRAXFormersAttnProcessor( + hidden_size=self.processor.hidden_size, + cross_attention_dim=self.processor.cross_attention_dim, + rank=self.processor.rank, + attention_op=attention_op, + ) + processor.load_state_dict(self.processor.state_dict()) + processor.to(self.processor.to_q_lora.up.weight.device) + elif is_custom_diffusion: + processor = CustomDiffusionXFormersAttnProcessor( + train_kv=self.processor.train_kv, + train_q_out=self.processor.train_q_out, + hidden_size=self.processor.hidden_size, + cross_attention_dim=self.processor.cross_attention_dim, + attention_op=attention_op, + ) + processor.load_state_dict(self.processor.state_dict()) + if hasattr(self.processor, "to_k_custom_diffusion"): + processor.to(self.processor.to_k_custom_diffusion.weight.device) + elif is_added_kv_processor: + # TODO(Patrick, Suraj, William) - currently xformers doesn't work for UnCLIP + # which uses this type of cross attention ONLY because the attention mask of format + # [0, ..., -10.000, ..., 0, ...,] is not supported + # throw warning + logger.info( + "Memory efficient attention with `xformers` might currently not work correctly if an attention mask is required for the attention operation." + ) + processor = XFormersAttnAddedKVProcessor(attention_op=attention_op) + else: + processor = XFormersAttnProcessor(attention_op=attention_op) + else: + if is_lora: + attn_processor_class = ( + LoRAAttnProcessor2_0 if hasattr(F, "scaled_dot_product_attention") else LoRAAttnProcessor + ) + processor = attn_processor_class( + hidden_size=self.processor.hidden_size, + cross_attention_dim=self.processor.cross_attention_dim, + rank=self.processor.rank, + ) + processor.load_state_dict(self.processor.state_dict()) + processor.to(self.processor.to_q_lora.up.weight.device) + elif is_custom_diffusion: + attn_processor_class = ( + CustomDiffusionAttnProcessor2_0 + if hasattr(F, "scaled_dot_product_attention") + else CustomDiffusionAttnProcessor + ) + processor = attn_processor_class( + train_kv=self.processor.train_kv, + train_q_out=self.processor.train_q_out, + hidden_size=self.processor.hidden_size, + cross_attention_dim=self.processor.cross_attention_dim, + ) + processor.load_state_dict(self.processor.state_dict()) + if hasattr(self.processor, "to_k_custom_diffusion"): + processor.to(self.processor.to_k_custom_diffusion.weight.device) + else: + # set attention processor + # We use the AttnProcessor2_0 by default when torch 2.x is used which uses + # torch.nn.functional.scaled_dot_product_attention for native Flash/memory_efficient_attention + # but only if it has the default `scale` argument. TODO remove scale_qk check when we move to torch 2.1 + processor = ( + AttnProcessor2_0() + if hasattr(F, "scaled_dot_product_attention") and self.scale_qk + else AttnProcessor() + ) + + self.set_processor(processor) + + def set_attention_slice(self, slice_size: int) -> None: + r""" + Set the slice size for attention computation. + + Args: + slice_size (`int`): + The slice size for attention computation. + """ + if slice_size is not None and slice_size > self.sliceable_head_dim: + raise ValueError(f"slice_size {slice_size} has to be smaller or equal to {self.sliceable_head_dim}.") + + if slice_size is not None and self.added_kv_proj_dim is not None: + processor = SlicedAttnAddedKVProcessor(slice_size) + elif slice_size is not None: + processor = SlicedAttnProcessor(slice_size) + elif self.added_kv_proj_dim is not None: + processor = AttnAddedKVProcessor() + else: + # set attention processor + # We use the AttnProcessor2_0 by default when torch 2.x is used which uses + # torch.nn.functional.scaled_dot_product_attention for native Flash/memory_efficient_attention + # but only if it has the default `scale` argument. TODO remove scale_qk check when we move to torch 2.1 + processor = ( + AttnProcessor2_0() if hasattr(F, "scaled_dot_product_attention") and self.scale_qk else AttnProcessor() + ) + + self.set_processor(processor) + + def set_processor(self, processor: "AttnProcessor", _remove_lora: bool = False) -> None: + r""" + Set the attention processor to use. + + Args: + processor (`AttnProcessor`): + The attention processor to use. + _remove_lora (`bool`, *optional*, defaults to `False`): + Set to `True` to remove LoRA layers from the model. + """ + if not USE_PEFT_BACKEND and hasattr(self, "processor") and _remove_lora and self.to_q.lora_layer is not None: + deprecate( + "set_processor to offload LoRA", + "0.26.0", + "In detail, removing LoRA layers via calling `set_default_attn_processor` is deprecated. Please make sure to call `pipe.unload_lora_weights()` instead.", + ) + # TODO(Patrick, Sayak) - this can be deprecated once PEFT LoRA integration is complete + # We need to remove all LoRA layers + # Don't forget to remove ALL `_remove_lora` from the codebase + for module in self.modules(): + if hasattr(module, "set_lora_layer"): + module.set_lora_layer(None) + + # if current processor is in `self._modules` and if passed `processor` is not, we need to + # pop `processor` from `self._modules` + if ( + hasattr(self, "processor") + and isinstance(self.processor, torch.nn.Module) + and not isinstance(processor, torch.nn.Module) + ): + logger.info(f"You are removing possibly trained weights of {self.processor} with {processor}") + self._modules.pop("processor") + + self.processor = processor + + def get_processor(self, return_deprecated_lora: bool = False) -> "AttentionProcessor": + r""" + Get the attention processor in use. + + Args: + return_deprecated_lora (`bool`, *optional*, defaults to `False`): + Set to `True` to return the deprecated LoRA attention processor. + + Returns: + "AttentionProcessor": The attention processor in use. + """ + if not return_deprecated_lora: + return self.processor + + # TODO(Sayak, Patrick). The rest of the function is needed to ensure backwards compatible + # serialization format for LoRA Attention Processors. It should be deleted once the integration + # with PEFT is completed. + is_lora_activated = { + name: module.lora_layer is not None + for name, module in self.named_modules() + if hasattr(module, "lora_layer") + } + + # 1. if no layer has a LoRA activated we can return the processor as usual + if not any(is_lora_activated.values()): + return self.processor + + # If doesn't apply LoRA do `add_k_proj` or `add_v_proj` + is_lora_activated.pop("add_k_proj", None) + is_lora_activated.pop("add_v_proj", None) + # 2. else it is not posssible that only some layers have LoRA activated + if not all(is_lora_activated.values()): + raise ValueError( + f"Make sure that either all layers or no layers have LoRA activated, but have {is_lora_activated}" + ) + + # 3. And we need to merge the current LoRA layers into the corresponding LoRA attention processor + non_lora_processor_cls_name = self.processor.__class__.__name__ + lora_processor_cls = getattr(import_module(__name__), "LoRA" + non_lora_processor_cls_name) + + hidden_size = self.inner_dim + + # now create a LoRA attention processor from the LoRA layers + if lora_processor_cls in [LoRAAttnProcessor, LoRAAttnProcessor2_0, LoRAXFormersAttnProcessor]: + kwargs = { + "cross_attention_dim": self.cross_attention_dim, + "rank": self.to_q.lora_layer.rank, + "network_alpha": self.to_q.lora_layer.network_alpha, + "q_rank": self.to_q.lora_layer.rank, + "q_hidden_size": self.to_q.lora_layer.out_features, + "k_rank": self.to_k.lora_layer.rank, + "k_hidden_size": self.to_k.lora_layer.out_features, + "v_rank": self.to_v.lora_layer.rank, + "v_hidden_size": self.to_v.lora_layer.out_features, + "out_rank": self.to_out[0].lora_layer.rank, + "out_hidden_size": self.to_out[0].lora_layer.out_features, + } + + if hasattr(self.processor, "attention_op"): + kwargs["attention_op"] = self.processor.attention_op + + lora_processor = lora_processor_cls(hidden_size, **kwargs) + lora_processor.to_q_lora.load_state_dict(self.to_q.lora_layer.state_dict()) + lora_processor.to_k_lora.load_state_dict(self.to_k.lora_layer.state_dict()) + lora_processor.to_v_lora.load_state_dict(self.to_v.lora_layer.state_dict()) + lora_processor.to_out_lora.load_state_dict(self.to_out[0].lora_layer.state_dict()) + elif lora_processor_cls == LoRAAttnAddedKVProcessor: + lora_processor = lora_processor_cls( + hidden_size, + cross_attention_dim=self.add_k_proj.weight.shape[0], + rank=self.to_q.lora_layer.rank, + network_alpha=self.to_q.lora_layer.network_alpha, + ) + lora_processor.to_q_lora.load_state_dict(self.to_q.lora_layer.state_dict()) + lora_processor.to_k_lora.load_state_dict(self.to_k.lora_layer.state_dict()) + lora_processor.to_v_lora.load_state_dict(self.to_v.lora_layer.state_dict()) + lora_processor.to_out_lora.load_state_dict(self.to_out[0].lora_layer.state_dict()) + + # only save if used + if self.add_k_proj.lora_layer is not None: + lora_processor.add_k_proj_lora.load_state_dict(self.add_k_proj.lora_layer.state_dict()) + lora_processor.add_v_proj_lora.load_state_dict(self.add_v_proj.lora_layer.state_dict()) + else: + lora_processor.add_k_proj_lora = None + lora_processor.add_v_proj_lora = None + else: + raise ValueError(f"{lora_processor_cls} does not exist.") + + return lora_processor + + def forward( + self, + hidden_states: torch.FloatTensor, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + **cross_attention_kwargs, + ) -> torch.Tensor: + r""" + The forward method of the `Attention` class. + + Args: + hidden_states (`torch.Tensor`): + The hidden states of the query. + encoder_hidden_states (`torch.Tensor`, *optional*): + The hidden states of the encoder. + attention_mask (`torch.Tensor`, *optional*): + The attention mask to use. If `None`, no mask is applied. + **cross_attention_kwargs: + Additional keyword arguments to pass along to the cross attention. + + Returns: + `torch.Tensor`: The output of the attention layer. + """ + # The `Attention` class can call different attention processors / attention functions + # here we simply pass along all tensors to the selected processor class + # For standard processors that are defined here, `**cross_attention_kwargs` is empty + return self.processor( + self, + hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + + def batch_to_head_dim(self, tensor: torch.Tensor) -> torch.Tensor: + r""" + Reshape the tensor from `[batch_size, seq_len, dim]` to `[batch_size // heads, seq_len, dim * heads]`. `heads` + is the number of heads initialized while constructing the `Attention` class. + + Args: + tensor (`torch.Tensor`): The tensor to reshape. + + Returns: + `torch.Tensor`: The reshaped tensor. + """ + head_size = self.heads + batch_size, seq_len, dim = tensor.shape + tensor = tensor.reshape(batch_size // head_size, head_size, seq_len, dim) + tensor = tensor.permute(0, 2, 1, 3).reshape(batch_size // head_size, seq_len, dim * head_size) + return tensor + + def head_to_batch_dim(self, tensor: torch.Tensor, out_dim: int = 3) -> torch.Tensor: + r""" + Reshape the tensor from `[batch_size, seq_len, dim]` to `[batch_size, seq_len, heads, dim // heads]` `heads` is + the number of heads initialized while constructing the `Attention` class. + + Args: + tensor (`torch.Tensor`): The tensor to reshape. + out_dim (`int`, *optional*, defaults to `3`): The output dimension of the tensor. If `3`, the tensor is + reshaped to `[batch_size * heads, seq_len, dim // heads]`. + + Returns: + `torch.Tensor`: The reshaped tensor. + """ + head_size = self.heads + batch_size, seq_len, dim = tensor.shape + tensor = tensor.reshape(batch_size, seq_len, head_size, dim // head_size) + tensor = tensor.permute(0, 2, 1, 3) + + if out_dim == 3: + tensor = tensor.reshape(batch_size * head_size, seq_len, dim // head_size) + + return tensor + + def get_attention_scores( + self, query: torch.Tensor, key: torch.Tensor, attention_mask: torch.Tensor = None + ) -> torch.Tensor: + r""" + Compute the attention scores. + + Args: + query (`torch.Tensor`): The query tensor. + key (`torch.Tensor`): The key tensor. + attention_mask (`torch.Tensor`, *optional*): The attention mask to use. If `None`, no mask is applied. + + Returns: + `torch.Tensor`: The attention probabilities/scores. + """ + dtype = query.dtype + if self.upcast_attention: + query = query.float() + key = key.float() + + if attention_mask is None: + baddbmm_input = torch.empty( + query.shape[0], query.shape[1], key.shape[1], dtype=query.dtype, device=query.device + ) + beta = 0 + else: + baddbmm_input = attention_mask + beta = 1 + + attention_scores = torch.baddbmm( + baddbmm_input, + query, + key.transpose(-1, -2), + beta=beta, + alpha=self.scale, + ) + del baddbmm_input + + if self.upcast_softmax: + attention_scores = attention_scores.float() + + attention_probs = attention_scores.softmax(dim=-1) + del attention_scores + + attention_probs = attention_probs.to(dtype) + + return attention_probs + + def prepare_attention_mask( + self, attention_mask: torch.Tensor, target_length: int, batch_size: int, out_dim: int = 3 + ) -> torch.Tensor: + r""" + Prepare the attention mask for the attention computation. + + Args: + attention_mask (`torch.Tensor`): + The attention mask to prepare. + target_length (`int`): + The target length of the attention mask. This is the length of the attention mask after padding. + batch_size (`int`): + The batch size, which is used to repeat the attention mask. + out_dim (`int`, *optional*, defaults to `3`): + The output dimension of the attention mask. Can be either `3` or `4`. + + Returns: + `torch.Tensor`: The prepared attention mask. + """ + head_size = self.heads + if attention_mask is None: + return attention_mask + + current_length: int = attention_mask.shape[-1] + if current_length != target_length: + if attention_mask.device.type == "mps": + # HACK: MPS: Does not support padding by greater than dimension of input tensor. + # Instead, we can manually construct the padding tensor. + padding_shape = (attention_mask.shape[0], attention_mask.shape[1], target_length) + padding = torch.zeros(padding_shape, dtype=attention_mask.dtype, device=attention_mask.device) + attention_mask = torch.cat([attention_mask, padding], dim=2) + else: + # TODO: for pipelines such as stable-diffusion, padding cross-attn mask: + # we want to instead pad by (0, remaining_length), where remaining_length is: + # remaining_length: int = target_length - current_length + # TODO: re-enable tests/models/test_models_unet_2d_condition.py#test_model_xattn_padding + attention_mask = F.pad(attention_mask, (0, target_length), value=0.0) + + if out_dim == 3: + if attention_mask.shape[0] < batch_size * head_size: + attention_mask = attention_mask.repeat_interleave(head_size, dim=0) + elif out_dim == 4: + attention_mask = attention_mask.unsqueeze(1) + attention_mask = attention_mask.repeat_interleave(head_size, dim=1) + + return attention_mask + + def norm_encoder_hidden_states(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor: + r""" + Normalize the encoder hidden states. Requires `self.norm_cross` to be specified when constructing the + `Attention` class. + + Args: + encoder_hidden_states (`torch.Tensor`): Hidden states of the encoder. + + Returns: + `torch.Tensor`: The normalized encoder hidden states. + """ + assert self.norm_cross is not None, "self.norm_cross must be defined to call self.norm_encoder_hidden_states" + + if isinstance(self.norm_cross, nn.LayerNorm): + encoder_hidden_states = self.norm_cross(encoder_hidden_states) + elif isinstance(self.norm_cross, nn.GroupNorm): + # Group norm norms along the channels dimension and expects + # input to be in the shape of (N, C, *). In this case, we want + # to norm along the hidden dimension, so we need to move + # (batch_size, sequence_length, hidden_size) -> + # (batch_size, hidden_size, sequence_length) + encoder_hidden_states = encoder_hidden_states.transpose(1, 2) + encoder_hidden_states = self.norm_cross(encoder_hidden_states) + encoder_hidden_states = encoder_hidden_states.transpose(1, 2) + else: + assert False + + return encoder_hidden_states + +class AttnProcessor2_0: + r""" + Processor for implementing scaled dot-product attention (enabled by default if you're using PyTorch 2.0). + """ + + def __init__(self, attention_mode='xformers'): + self.attention_mode = attention_mode + if not hasattr(F, "scaled_dot_product_attention"): + raise ImportError("AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.") + + def __call__( + self, + attn: Attention, + hidden_states: torch.FloatTensor, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + attention_mask: Optional[torch.FloatTensor] = None, + temb: Optional[torch.FloatTensor] = None, + scale: float = 1.0, + ) -> torch.FloatTensor: + residual = hidden_states + + args = () if USE_PEFT_BACKEND else (scale,) + + if attn.spatial_norm is not None: + hidden_states = attn.spatial_norm(hidden_states, temb) + + input_ndim = hidden_states.ndim + + if input_ndim == 4: + batch_size, channel, height, width = hidden_states.shape + hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2) + + batch_size, sequence_length, _ = ( + hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape + ) + + if attention_mask is not None: + attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size) + # scaled_dot_product_attention expects attention_mask shape to be + # (batch, heads, source_length, target_length) + attention_mask = attention_mask.view(batch_size, attn.heads, -1, attention_mask.shape[-1]) + + if attn.group_norm is not None: + hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2) + + args = () if USE_PEFT_BACKEND else (scale,) + query = attn.to_q(hidden_states, *args) + + if encoder_hidden_states is None: + encoder_hidden_states = hidden_states + elif attn.norm_cross: + encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states) + + key = attn.to_k(encoder_hidden_states, *args) + value = attn.to_v(encoder_hidden_states, *args) + + inner_dim = key.shape[-1] + head_dim = inner_dim // attn.heads + + query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2) + + # the output of sdp = (batch, num_heads, seq_len, head_dim) + # TODO: add support for attn.scale when we move to Torch 2.1 + if self.attention_mode == 'flash': + assert attention_mask is None or torch.all(attention_mask.bool()), 'flash-attn do not support attention_mask' + with torch.backends.cuda.sdp_kernel(enable_math=False, enable_flash=True, enable_mem_efficient=False): + hidden_states = F.scaled_dot_product_attention( + query, key, value, dropout_p=0.0, is_causal=False + ) + elif self.attention_mode == 'xformers': + with torch.backends.cuda.sdp_kernel(enable_math=False, enable_flash=False, enable_mem_efficient=True): + hidden_states = F.scaled_dot_product_attention( + query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False + ) + elif self.attention_mode == 'math': + hidden_states = F.scaled_dot_product_attention( + query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False + ) + else: + raise NotImplementedError(f'Found attention_mode: {self.attention_mode}') + hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim) + hidden_states = hidden_states.to(query.dtype) + + # linear proj + hidden_states = attn.to_out[0](hidden_states, *args) + # dropout + hidden_states = attn.to_out[1](hidden_states) + + if input_ndim == 4: + hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width) + + if attn.residual_connection: + hidden_states = hidden_states + residual + + hidden_states = hidden_states / attn.rescale_output_factor + + return hidden_states + +@maybe_allow_in_graph +class GatedSelfAttentionDense(nn.Module): + r""" + A gated self-attention dense layer that combines visual features and object features. + + Parameters: + query_dim (`int`): The number of channels in the query. + context_dim (`int`): The number of channels in the context. + n_heads (`int`): The number of heads to use for attention. + d_head (`int`): The number of channels in each head. + """ + + def __init__(self, query_dim: int, context_dim: int, n_heads: int, d_head: int): + super().__init__() + + # we need a linear projection since we need cat visual feature and obj feature + self.linear = nn.Linear(context_dim, query_dim) + + self.attn = Attention(query_dim=query_dim, heads=n_heads, dim_head=d_head) + self.ff = FeedForward(query_dim, activation_fn="geglu") + + self.norm1 = nn.LayerNorm(query_dim) + self.norm2 = nn.LayerNorm(query_dim) + + self.register_parameter("alpha_attn", nn.Parameter(torch.tensor(0.0))) + self.register_parameter("alpha_dense", nn.Parameter(torch.tensor(0.0))) + + self.enabled = True + + def forward(self, x: torch.Tensor, objs: torch.Tensor) -> torch.Tensor: + if not self.enabled: + return x + + n_visual = x.shape[1] + objs = self.linear(objs) + + x = x + self.alpha_attn.tanh() * self.attn(self.norm1(torch.cat([x, objs], dim=1)))[:, :n_visual, :] + x = x + self.alpha_dense.tanh() * self.ff(self.norm2(x)) + + return x + + +class FeedForward(nn.Module): + r""" + A feed-forward layer. + + Parameters: + dim (`int`): The number of channels in the input. + dim_out (`int`, *optional*): The number of channels in the output. If not given, defaults to `dim`. + mult (`int`, *optional*, defaults to 4): The multiplier to use for the hidden dimension. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. + final_dropout (`bool` *optional*, defaults to False): Apply a final dropout. + """ + + def __init__( + self, + dim: int, + dim_out: Optional[int] = None, + mult: int = 4, + dropout: float = 0.0, + activation_fn: str = "geglu", + final_dropout: bool = False, + ): + super().__init__() + inner_dim = int(dim * mult) + dim_out = dim_out if dim_out is not None else dim + linear_cls = LoRACompatibleLinear if not USE_PEFT_BACKEND else nn.Linear + + if activation_fn == "gelu": + act_fn = GELU(dim, inner_dim) + if activation_fn == "gelu-approximate": + act_fn = GELU(dim, inner_dim, approximate="tanh") + elif activation_fn == "geglu": + act_fn = GEGLU(dim, inner_dim) + elif activation_fn == "geglu-approximate": + act_fn = ApproximateGELU(dim, inner_dim) + + self.net = nn.ModuleList([]) + # project in + self.net.append(act_fn) + # project dropout + self.net.append(nn.Dropout(dropout)) + # project out + self.net.append(linear_cls(inner_dim, dim_out)) + # FF as used in Vision Transformer, MLP-Mixer, etc. have a final dropout + if final_dropout: + self.net.append(nn.Dropout(dropout)) + + def forward(self, hidden_states: torch.Tensor, scale: float = 1.0) -> torch.Tensor: + compatible_cls = (GEGLU,) if USE_PEFT_BACKEND else (GEGLU, LoRACompatibleLinear) + for module in self.net: + if isinstance(module, compatible_cls): + hidden_states = module(hidden_states, scale) + else: + hidden_states = module(hidden_states) + return hidden_states + + +@maybe_allow_in_graph +class BasicTransformerBlock_(nn.Module): + r""" + A basic Transformer block. + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention. + activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. + num_embeds_ada_norm (: + obj: `int`, *optional*): The number of diffusion steps used during training. See `Transformer2DModel`. + attention_bias (: + obj: `bool`, *optional*, defaults to `False`): Configure if the attentions should contain a bias parameter. + only_cross_attention (`bool`, *optional*): + Whether to use only cross-attention layers. In this case two cross attention layers are used. + double_self_attention (`bool`, *optional*): + Whether to use two self-attention layers. In this case no cross attention layers are used. + upcast_attention (`bool`, *optional*): + Whether to upcast the attention computation to float32. This is useful for mixed precision training. + norm_elementwise_affine (`bool`, *optional*, defaults to `True`): + Whether to use learnable elementwise affine parameters for normalization. + norm_type (`str`, *optional*, defaults to `"layer_norm"`): + The normalization layer to use. Can be `"layer_norm"`, `"ada_norm"` or `"ada_norm_zero"`. + final_dropout (`bool` *optional*, defaults to False): + Whether to apply a final dropout after the last feed-forward layer. + attention_type (`str`, *optional*, defaults to `"default"`): + The type of attention to use. Can be `"default"` or `"gated"` or `"gated-text-image"`. + positional_embeddings (`str`, *optional*, defaults to `None`): + The type of positional embeddings to apply to. + num_positional_embeddings (`int`, *optional*, defaults to `None`): + The maximum number of positional embeddings to apply. + """ + + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + dropout=0.0, + cross_attention_dim: Optional[int] = None, + activation_fn: str = "geglu", + num_embeds_ada_norm: Optional[int] = None, + attention_bias: bool = False, + only_cross_attention: bool = False, + double_self_attention: bool = False, + upcast_attention: bool = False, + norm_elementwise_affine: bool = True, + norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single' + norm_eps: float = 1e-5, + final_dropout: bool = False, + attention_type: str = "default", + positional_embeddings: Optional[str] = None, + num_positional_embeddings: Optional[int] = None, + attention_mode: str = "xformers", + ): + super().__init__() + self.only_cross_attention = only_cross_attention + + self.use_ada_layer_norm_zero = (num_embeds_ada_norm is not None) and norm_type == "ada_norm_zero" + self.use_ada_layer_norm = (num_embeds_ada_norm is not None) and norm_type == "ada_norm" + self.use_ada_layer_norm_single = norm_type == "ada_norm_single" + self.use_layer_norm = norm_type == "layer_norm" + + if norm_type in ("ada_norm", "ada_norm_zero") and num_embeds_ada_norm is None: + raise ValueError( + f"`norm_type` is set to {norm_type}, but `num_embeds_ada_norm` is not defined. Please make sure to" + f" define `num_embeds_ada_norm` if setting `norm_type` to {norm_type}." + ) + + if positional_embeddings and (num_positional_embeddings is None): + raise ValueError( + "If `positional_embedding` type is defined, `num_positition_embeddings` must also be defined." + ) + + if positional_embeddings == "sinusoidal": + self.pos_embed = SinusoidalPositionalEmbedding(dim, max_seq_length=num_positional_embeddings) + else: + self.pos_embed = None + + # Define 3 blocks. Each block has its own normalization layer. + # 1. Self-Attn + if self.use_ada_layer_norm: + self.norm1 = AdaLayerNorm(dim, num_embeds_ada_norm) + elif self.use_ada_layer_norm_zero: + self.norm1 = AdaLayerNormZero(dim, num_embeds_ada_norm) + else: + self.norm1 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + + self.attn1 = Attention( + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + cross_attention_dim=cross_attention_dim if only_cross_attention else None, + upcast_attention=upcast_attention, + attention_mode=attention_mode + ) + + # # 2. Cross-Attn + # if cross_attention_dim is not None or double_self_attention: + # # We currently only use AdaLayerNormZero for self attention where there will only be one attention block. + # # I.e. the number of returned modulation chunks from AdaLayerZero would not make sense if returned during + # # the second cross attention block. + # self.norm2 = ( + # AdaLayerNorm(dim, num_embeds_ada_norm) + # if self.use_ada_layer_norm + # else nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + # ) + # self.attn2 = Attention( + # query_dim=dim, + # cross_attention_dim=cross_attention_dim if not double_self_attention else None, + # heads=num_attention_heads, + # dim_head=attention_head_dim, + # dropout=dropout, + # bias=attention_bias, + # upcast_attention=upcast_attention, + # ) # is self-attn if encoder_hidden_states is none + # else: + # self.norm2 = None + # self.attn2 = None + + # 3. Feed-forward + # if not self.use_ada_layer_norm_single: + # self.norm3 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + self.norm3 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + + self.ff = FeedForward(dim, dropout=dropout, activation_fn=activation_fn, final_dropout=final_dropout) + + # 4. Fuser + if attention_type == "gated" or attention_type == "gated-text-image": + self.fuser = GatedSelfAttentionDense(dim, cross_attention_dim, num_attention_heads, attention_head_dim) + + # 5. Scale-shift for PixArt-Alpha. + if self.use_ada_layer_norm_single: + self.scale_shift_table = nn.Parameter(torch.randn(6, dim) / dim ** 0.5) + + # let chunk size default to None + self._chunk_size = None + self._chunk_dim = 0 + + def set_chunk_feed_forward(self, chunk_size: Optional[int], dim: int): + # Sets chunk feed-forward + self._chunk_size = chunk_size + self._chunk_dim = dim + + def forward( + self, + hidden_states: torch.FloatTensor, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + class_labels: Optional[torch.LongTensor] = None, + ) -> torch.FloatTensor: + # Notice that normalization is always applied before the real computation in the following blocks. + # 0. Self-Attention + batch_size = hidden_states.shape[0] + + if self.use_ada_layer_norm: + norm_hidden_states = self.norm1(hidden_states, timestep) + elif self.use_ada_layer_norm_zero: + norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1( + hidden_states, timestep, class_labels, hidden_dtype=hidden_states.dtype + ) + elif self.use_layer_norm: + norm_hidden_states = self.norm1(hidden_states) + elif self.use_ada_layer_norm_single: + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( + self.scale_shift_table[None] + timestep.reshape(batch_size, 6, -1) + ).chunk(6, dim=1) + norm_hidden_states = self.norm1(hidden_states) + norm_hidden_states = norm_hidden_states * (1 + scale_msa) + shift_msa + norm_hidden_states = norm_hidden_states.squeeze(1) + else: + raise ValueError("Incorrect norm used") + + if self.pos_embed is not None: + norm_hidden_states = self.pos_embed(norm_hidden_states) + + # 1. Retrieve lora scale. + lora_scale = cross_attention_kwargs.get("scale", 1.0) if cross_attention_kwargs is not None else 1.0 + + # 2. Prepare GLIGEN inputs + cross_attention_kwargs = cross_attention_kwargs.copy() if cross_attention_kwargs is not None else {} + gligen_kwargs = cross_attention_kwargs.pop("gligen", None) + + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + if self.use_ada_layer_norm_zero: + attn_output = gate_msa.unsqueeze(1) * attn_output + elif self.use_ada_layer_norm_single: + attn_output = gate_msa * attn_output + + hidden_states = attn_output + hidden_states + if hidden_states.ndim == 4: + hidden_states = hidden_states.squeeze(1) + + # 2.5 GLIGEN Control + if gligen_kwargs is not None: + hidden_states = self.fuser(hidden_states, gligen_kwargs["objs"]) + + # # 3. Cross-Attention + # if self.attn2 is not None: + # if self.use_ada_layer_norm: + # norm_hidden_states = self.norm2(hidden_states, timestep) + # elif self.use_ada_layer_norm_zero or self.use_layer_norm: + # norm_hidden_states = self.norm2(hidden_states) + # elif self.use_ada_layer_norm_single: + # # For PixArt norm2 isn't applied here: + # # https://github.com/PixArt-alpha/PixArt-alpha/blob/0f55e922376d8b797edd44d25d0e7464b260dcab/diffusion/model/nets/PixArtMS.py#L70C1-L76C103 + # norm_hidden_states = hidden_states + # else: + # raise ValueError("Incorrect norm") + + # if self.pos_embed is not None and self.use_ada_layer_norm_single is False: + # norm_hidden_states = self.pos_embed(norm_hidden_states) + + # attn_output = self.attn2( + # norm_hidden_states, + # encoder_hidden_states=encoder_hidden_states, + # attention_mask=encoder_attention_mask, + # **cross_attention_kwargs, + # ) + # hidden_states = attn_output + hidden_states + + # 4. Feed-forward + # if not self.use_ada_layer_norm_single: + # norm_hidden_states = self.norm3(hidden_states) + + if self.use_ada_layer_norm_zero: + norm_hidden_states = norm_hidden_states * (1 + scale_mlp[:, None]) + shift_mlp[:, None] + + if self.use_ada_layer_norm_single: + # norm_hidden_states = self.norm2(hidden_states) + norm_hidden_states = self.norm3(hidden_states) + norm_hidden_states = norm_hidden_states * (1 + scale_mlp) + shift_mlp + + if self._chunk_size is not None: + # "feed_forward_chunk_size" can be used to save memory + if norm_hidden_states.shape[self._chunk_dim] % self._chunk_size != 0: + raise ValueError( + f"`hidden_states` dimension to be chunked: {norm_hidden_states.shape[self._chunk_dim]} has to be divisible by chunk size: {self._chunk_size}. Make sure to set an appropriate `chunk_size` when calling `unet.enable_forward_chunking`." + ) + + num_chunks = norm_hidden_states.shape[self._chunk_dim] // self._chunk_size + ff_output = torch.cat( + [ + self.ff(hid_slice, scale=lora_scale) + for hid_slice in norm_hidden_states.chunk(num_chunks, dim=self._chunk_dim) + ], + dim=self._chunk_dim, + ) + else: + ff_output = self.ff(norm_hidden_states, scale=lora_scale) + + if self.use_ada_layer_norm_zero: + ff_output = gate_mlp.unsqueeze(1) * ff_output + elif self.use_ada_layer_norm_single: + ff_output = gate_mlp * ff_output + + hidden_states = ff_output + hidden_states + if hidden_states.ndim == 4: + hidden_states = hidden_states.squeeze(1) + + return hidden_states + + +@maybe_allow_in_graph +class BasicTransformerBlock(nn.Module): + r""" + A basic Transformer block. + + Parameters: + dim (`int`): The number of channels in the input and output. + num_attention_heads (`int`): The number of heads to use for multi-head attention. + attention_head_dim (`int`): The number of channels in each head. + dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use. + cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention. + activation_fn (`str`, *optional*, defaults to `"geglu"`): Activation function to be used in feed-forward. + num_embeds_ada_norm (: + obj: `int`, *optional*): The number of diffusion steps used during training. See `Transformer2DModel`. + attention_bias (: + obj: `bool`, *optional*, defaults to `False`): Configure if the attentions should contain a bias parameter. + only_cross_attention (`bool`, *optional*): + Whether to use only cross-attention layers. In this case two cross attention layers are used. + double_self_attention (`bool`, *optional*): + Whether to use two self-attention layers. In this case no cross attention layers are used. + upcast_attention (`bool`, *optional*): + Whether to upcast the attention computation to float32. This is useful for mixed precision training. + norm_elementwise_affine (`bool`, *optional*, defaults to `True`): + Whether to use learnable elementwise affine parameters for normalization. + norm_type (`str`, *optional*, defaults to `"layer_norm"`): + The normalization layer to use. Can be `"layer_norm"`, `"ada_norm"` or `"ada_norm_zero"`. + final_dropout (`bool` *optional*, defaults to False): + Whether to apply a final dropout after the last feed-forward layer. + attention_type (`str`, *optional*, defaults to `"default"`): + The type of attention to use. Can be `"default"` or `"gated"` or `"gated-text-image"`. + positional_embeddings (`str`, *optional*, defaults to `None`): + The type of positional embeddings to apply to. + num_positional_embeddings (`int`, *optional*, defaults to `None`): + The maximum number of positional embeddings to apply. + """ + + def __init__( + self, + dim: int, + num_attention_heads: int, + attention_head_dim: int, + dropout=0.0, + cross_attention_dim: Optional[int] = None, + activation_fn: str = "geglu", + num_embeds_ada_norm: Optional[int] = None, + attention_bias: bool = False, + only_cross_attention: bool = False, + double_self_attention: bool = False, + upcast_attention: bool = False, + norm_elementwise_affine: bool = True, + norm_type: str = "layer_norm", # 'layer_norm', 'ada_norm', 'ada_norm_zero', 'ada_norm_single' + norm_eps: float = 1e-5, + final_dropout: bool = False, + attention_type: str = "default", + positional_embeddings: Optional[str] = None, + num_positional_embeddings: Optional[int] = None, + attention_mode: str = "xformers" + ): + super().__init__() + self.only_cross_attention = only_cross_attention + + self.use_ada_layer_norm_zero = (num_embeds_ada_norm is not None) and norm_type == "ada_norm_zero" + self.use_ada_layer_norm = (num_embeds_ada_norm is not None) and norm_type == "ada_norm" + self.use_ada_layer_norm_single = norm_type == "ada_norm_single" + self.use_layer_norm = norm_type == "layer_norm" + + if norm_type in ("ada_norm", "ada_norm_zero") and num_embeds_ada_norm is None: + raise ValueError( + f"`norm_type` is set to {norm_type}, but `num_embeds_ada_norm` is not defined. Please make sure to" + f" define `num_embeds_ada_norm` if setting `norm_type` to {norm_type}." + ) + + if positional_embeddings and (num_positional_embeddings is None): + raise ValueError( + "If `positional_embedding` type is defined, `num_positition_embeddings` must also be defined." + ) + + if positional_embeddings == "sinusoidal": + self.pos_embed = SinusoidalPositionalEmbedding(dim, max_seq_length=num_positional_embeddings) + else: + self.pos_embed = None + + # Define 3 blocks. Each block has its own normalization layer. + # 1. Self-Attn + if self.use_ada_layer_norm: + self.norm1 = AdaLayerNorm(dim, num_embeds_ada_norm) + elif self.use_ada_layer_norm_zero: + self.norm1 = AdaLayerNormZero(dim, num_embeds_ada_norm) + else: + self.norm1 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + + self.attn1 = Attention( + query_dim=dim, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + cross_attention_dim=cross_attention_dim if only_cross_attention else None, + upcast_attention=upcast_attention, + attention_mode=attention_mode + ) + + # 2. Cross-Attn + if cross_attention_dim is not None or double_self_attention: + # We currently only use AdaLayerNormZero for self attention where there will only be one attention block. + # I.e. the number of returned modulation chunks from AdaLayerZero would not make sense if returned during + # the second cross attention block. + self.norm2 = ( + AdaLayerNorm(dim, num_embeds_ada_norm) + if self.use_ada_layer_norm + else nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + ) + self.attn2 = Attention( + query_dim=dim, + cross_attention_dim=cross_attention_dim if not double_self_attention else None, + heads=num_attention_heads, + dim_head=attention_head_dim, + dropout=dropout, + bias=attention_bias, + upcast_attention=upcast_attention, + attention_mode='xformers', # only xformers support attention_mask + ) # is self-attn if encoder_hidden_states is none + else: + self.norm2 = None + self.attn2 = None + + # 3. Feed-forward + if not self.use_ada_layer_norm_single: + self.norm3 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine, eps=norm_eps) + + self.ff = FeedForward( + dim, + dropout=dropout, + activation_fn=activation_fn, + final_dropout=final_dropout, + ) + + # 4. Fuser + if attention_type == "gated" or attention_type == "gated-text-image": + self.fuser = GatedSelfAttentionDense(dim, cross_attention_dim, num_attention_heads, attention_head_dim) + + # 5. Scale-shift for PixArt-Alpha. + if self.use_ada_layer_norm_single: + self.scale_shift_table = nn.Parameter(torch.randn(6, dim) / dim**0.5) + + # let chunk size default to None + self._chunk_size = None + self._chunk_dim = 0 + + def set_chunk_feed_forward(self, chunk_size: Optional[int], dim: int = 0): + # Sets chunk feed-forward + self._chunk_size = chunk_size + self._chunk_dim = dim + + def forward( + self, + hidden_states: torch.FloatTensor, + attention_mask: Optional[torch.FloatTensor] = None, + encoder_hidden_states: Optional[torch.FloatTensor] = None, + encoder_attention_mask: Optional[torch.FloatTensor] = None, + timestep: Optional[torch.LongTensor] = None, + cross_attention_kwargs: Dict[str, Any] = None, + class_labels: Optional[torch.LongTensor] = None, + ) -> torch.FloatTensor: + # Notice that normalization is always applied before the real computation in the following blocks. + # 0. Self-Attention + batch_size = hidden_states.shape[0] + + if self.use_ada_layer_norm: + norm_hidden_states = self.norm1(hidden_states, timestep) + elif self.use_ada_layer_norm_zero: + norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1( + hidden_states, timestep, class_labels, hidden_dtype=hidden_states.dtype + ) + elif self.use_layer_norm: + norm_hidden_states = self.norm1(hidden_states) + elif self.use_ada_layer_norm_single: + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = ( + self.scale_shift_table[None] + timestep.reshape(batch_size, 6, -1) + ).chunk(6, dim=1) + norm_hidden_states = self.norm1(hidden_states) + norm_hidden_states = norm_hidden_states * (1 + scale_msa) + shift_msa + norm_hidden_states = norm_hidden_states.squeeze(1) + else: + raise ValueError("Incorrect norm used") + + if self.pos_embed is not None: + norm_hidden_states = self.pos_embed(norm_hidden_states) + + # 1. Retrieve lora scale. + lora_scale = cross_attention_kwargs.get("scale", 1.0) if cross_attention_kwargs is not None else 1.0 + + # 2. Prepare GLIGEN inputs + cross_attention_kwargs = cross_attention_kwargs.copy() if cross_attention_kwargs is not None else {} + gligen_kwargs = cross_attention_kwargs.pop("gligen", None) + + attn_output = self.attn1( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None, + attention_mask=attention_mask, + **cross_attention_kwargs, + ) + if self.use_ada_layer_norm_zero: + attn_output = gate_msa.unsqueeze(1) * attn_output + elif self.use_ada_layer_norm_single: + attn_output = gate_msa * attn_output + + hidden_states = attn_output + hidden_states + if hidden_states.ndim == 4: + hidden_states = hidden_states.squeeze(1) + + # 2.5 GLIGEN Control + if gligen_kwargs is not None: + hidden_states = self.fuser(hidden_states, gligen_kwargs["objs"]) + + # 3. Cross-Attention + if self.attn2 is not None: + if self.use_ada_layer_norm: + norm_hidden_states = self.norm2(hidden_states, timestep) + elif self.use_ada_layer_norm_zero or self.use_layer_norm: + norm_hidden_states = self.norm2(hidden_states) + elif self.use_ada_layer_norm_single: + # For PixArt norm2 isn't applied here: + # https://github.com/PixArt-alpha/PixArt-alpha/blob/0f55e922376d8b797edd44d25d0e7464b260dcab/diffusion/model/nets/PixArtMS.py#L70C1-L76C103 + norm_hidden_states = hidden_states + else: + raise ValueError("Incorrect norm") + + if self.pos_embed is not None and self.use_ada_layer_norm_single is False: + norm_hidden_states = self.pos_embed(norm_hidden_states) + + attn_output = self.attn2( + norm_hidden_states, + encoder_hidden_states=encoder_hidden_states, + attention_mask=encoder_attention_mask, + **cross_attention_kwargs, + ) + hidden_states = attn_output + hidden_states + + # 4. Feed-forward + if not self.use_ada_layer_norm_single: + norm_hidden_states = self.norm3(hidden_states) + + if self.use_ada_layer_norm_zero: + norm_hidden_states = norm_hidden_states * (1 + scale_mlp[:, None]) + shift_mlp[:, None] + + if self.use_ada_layer_norm_single: + norm_hidden_states = self.norm2(hidden_states) + norm_hidden_states = norm_hidden_states * (1 + scale_mlp) + shift_mlp + + if self._chunk_size is not None: + # "feed_forward_chunk_size" can be used to save memory + ff_output = _chunked_feed_forward( + self.ff, norm_hidden_states, self._chunk_dim, self._chunk_size, lora_scale=lora_scale + ) + else: + ff_output = self.ff(norm_hidden_states, scale=lora_scale) + + if self.use_ada_layer_norm_zero: + ff_output = gate_mlp.unsqueeze(1) * ff_output + elif self.use_ada_layer_norm_single: + ff_output = gate_mlp * ff_output + + hidden_states = ff_output + hidden_states + if hidden_states.ndim == 4: + hidden_states = hidden_states.squeeze(1) + + return hidden_states + +class AdaLayerNormSingle(nn.Module): + r""" + Norm layer adaptive layer norm single (adaLN-single). + + As proposed in PixArt-Alpha (see: https://arxiv.org/abs/2310.00426; Section 2.3). + + Parameters: + embedding_dim (`int`): The size of each embedding vector. + use_additional_conditions (`bool`): To use additional conditions for normalization or not. + """ + + def __init__(self, embedding_dim: int, use_additional_conditions: bool = False): + super().__init__() + + self.emb = CombinedTimestepSizeEmbeddings( + embedding_dim, size_emb_dim=embedding_dim // 3, use_additional_conditions=use_additional_conditions + ) + + self.silu = nn.SiLU() + self.linear = nn.Linear(embedding_dim, 6 * embedding_dim, bias=True) + + def forward( + self, + timestep: torch.Tensor, + added_cond_kwargs: Dict[str, torch.Tensor] = None, + batch_size: int = None, + hidden_dtype: Optional[torch.dtype] = None, + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + # No modulation happening here. + embedded_timestep = self.emb(timestep, batch_size=batch_size, hidden_dtype=hidden_dtype, resolution=None, + aspect_ratio=None) + return self.linear(self.silu(embedded_timestep)), embedded_timestep + + +@dataclass +class Transformer3DModelOutput(BaseOutput): + """ + The output of [`Transformer2DModel`]. + + Args: + sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` or `(batch size, num_vector_embeds - 1, num_latent_pixels)` if [`Transformer2DModel`] is discrete): + The hidden states output conditioned on the `encoder_hidden_states` input. If discrete, returns probability + distributions for the unnoised latent pixels. + """ + + sample: torch.FloatTensor diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/pos_embed.py b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/pos_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..cb9cc978e16d8fcadf956bcbce12e19040826ddf --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/utils_pos_embedding/pos_embed.py @@ -0,0 +1,131 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +# croco: https://github.com/naver/croco +# diffusers: https://github.com/huggingface/diffusers +# -------------------------------------------------------- +# Position embedding utils +# -------------------------------------------------------- + +import numpy as np +import torch + + +def get_2d_sincos_pos_embed( + embed_dim, grid_size, cls_token=False, extra_tokens=0, interpolation_scale=1.0, base_size=16 +): + """ + grid_size: int of the grid height and width return: pos_embed: [grid_size*grid_size, embed_dim] or + [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + if isinstance(grid_size, int): + grid_size = (grid_size, grid_size) + + grid_h = np.arange(grid_size[0], dtype=np.float32) / (grid_size[0] / base_size) / interpolation_scale + grid_w = np.arange(grid_size[1], dtype=np.float32) / (grid_size[1] / base_size) / interpolation_scale + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, 1, grid_size[1], grid_size[0]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + if embed_dim % 2 != 0: + raise ValueError("embed_dim must be divisible by 2") + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position pos: a list of positions to be encoded: size (M,) out: (M, D) + """ + if embed_dim % 2 != 0: + raise ValueError("embed_dim must be divisible by 2") + + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2.0 + omega = 1.0 / 10000 ** omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum("m,d->md", pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +# ---------------------------------------------------------- +# RoPE2D: RoPE implementation in 2D +# ---------------------------------------------------------- + +try: + from .curope import cuRoPE2D + + RoPE2D = cuRoPE2D +except ImportError: + print('Warning, cannot find cuda-compiled version of RoPE2D, using a slow pytorch version instead') + + + class RoPE2D(torch.nn.Module): + + def __init__(self, freq=100.0, F0=1.0): + super().__init__() + self.base = freq + self.F0 = F0 + self.cache = {} + + def get_cos_sin(self, D, seq_len, device, dtype): + if (D, seq_len, device, dtype) not in self.cache: + inv_freq = 1.0 / (self.base ** (torch.arange(0, D, 2).float().to(device) / D)) + t = torch.arange(seq_len, device=device, dtype=inv_freq.dtype) + freqs = torch.einsum("i,j->ij", t, inv_freq).to(dtype) + freqs = torch.cat((freqs, freqs), dim=-1) + cos = freqs.cos() # (Seq, Dim) + sin = freqs.sin() + self.cache[D, seq_len, device, dtype] = (cos, sin) + return self.cache[D, seq_len, device, dtype] + + @staticmethod + def rotate_half(x): + x1, x2 = x[..., : x.shape[-1] // 2], x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + def apply_rope1d(self, tokens, pos1d, cos, sin): + assert pos1d.ndim == 2 + # import pdb + # pdb.set_trace() + cos = torch.nn.functional.embedding(pos1d, cos)[:, None, :, :].squeeze(1) + sin = torch.nn.functional.embedding(pos1d, sin)[:, None, :, :].squeeze(1) + return (tokens * cos) + (self.rotate_half(tokens) * sin) + + def forward(self, tokens, positions): + """ + input: + * tokens: batch_size x nheads x ntokens x dim + * positions: batch_size x ntokens x 2 (y and x position of each token) + output: + * tokens after appplying RoPE2D (batch_size x nheads x ntokens x dim) + """ + positions = positions.to(torch.int).to(tokens.device) + assert tokens.size(2) % 2 == 0, "number of dimensions should be a multiple of two" + D = tokens.size(2) // 2 + assert positions.ndim == 3 and positions.shape[-1] == 2 # Batch, Seq, 2 + cos, sin = self.get_cos_sin(D, int(positions.max()) + 1, tokens.device, tokens.dtype) + # split features into two along the feature dimension, and apply rope1d on each half + y, x = tokens.chunk(2, dim=-1) + y = self.apply_rope1d(y, positions[:, :, 0], cos, sin) + x = self.apply_rope1d(x, positions[:, :, 1], cos, sin) + tokens = torch.cat((y, x), dim=-1) + return tokens diff --git a/qa_mdt/audioldm_train/modules/diffusionmodules/x_transformer.py b/qa_mdt/audioldm_train/modules/diffusionmodules/x_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..96041591aa327b95e4b6eb6a2877f8779055c664 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/diffusionmodules/x_transformer.py @@ -0,0 +1,704 @@ +"""shout-out to https://github.com/lucidrains/x-transformers/tree/main/x_transformers""" +import torch +from torch import nn, einsum +import torch.nn.functional as F +from functools import partial +from inspect import isfunction +from collections import namedtuple +from einops import rearrange, repeat, reduce + +# constants + +DEFAULT_DIM_HEAD = 64 + +Intermediates = namedtuple("Intermediates", ["pre_softmax_attn", "post_softmax_attn"]) + +LayerIntermediates = namedtuple("Intermediates", ["hiddens", "attn_intermediates"]) + + +class AbsolutePositionalEmbedding(nn.Module): + def __init__(self, dim, max_seq_len): + super().__init__() + self.emb = nn.Embedding(max_seq_len, dim) + self.init_() + + def init_(self): + nn.init.normal_(self.emb.weight, std=0.02) + + def forward(self, x): + n = torch.arange(x.shape[1], device=x.device) + return self.emb(n)[None, :, :] + + +class FixedPositionalEmbedding(nn.Module): + def __init__(self, dim): + super().__init__() + inv_freq = 1.0 / (10000 ** (torch.arange(0, dim, 2).float() / dim)) + self.register_buffer("inv_freq", inv_freq) + + def forward(self, x, seq_dim=1, offset=0): + t = ( + torch.arange(x.shape[seq_dim], device=x.device).type_as(self.inv_freq) + + offset + ) + sinusoid_inp = torch.einsum("i , j -> i j", t, self.inv_freq) + emb = torch.cat((sinusoid_inp.sin(), sinusoid_inp.cos()), dim=-1) + return emb[None, :, :] + + +# helpers + + +def exists(val): + return val is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def always(val): + def inner(*args, **kwargs): + return val + + return inner + + +def not_equals(val): + def inner(x): + return x != val + + return inner + + +def equals(val): + def inner(x): + return x == val + + return inner + + +def max_neg_value(tensor): + return -torch.finfo(tensor.dtype).max + + +# keyword argument helpers + + +def pick_and_pop(keys, d): + values = list(map(lambda key: d.pop(key), keys)) + return dict(zip(keys, values)) + + +def group_dict_by_key(cond, d): + return_val = [dict(), dict()] + for key in d.keys(): + match = bool(cond(key)) + ind = int(not match) + return_val[ind][key] = d[key] + return (*return_val,) + + +def string_begins_with(prefix, str): + return str.startswith(prefix) + + +def group_by_key_prefix(prefix, d): + return group_dict_by_key(partial(string_begins_with, prefix), d) + + +def groupby_prefix_and_trim(prefix, d): + kwargs_with_prefix, kwargs = group_dict_by_key( + partial(string_begins_with, prefix), d + ) + kwargs_without_prefix = dict( + map(lambda x: (x[0][len(prefix) :], x[1]), tuple(kwargs_with_prefix.items())) + ) + return kwargs_without_prefix, kwargs + + +# classes +class Scale(nn.Module): + def __init__(self, value, fn): + super().__init__() + self.value = value + self.fn = fn + + def forward(self, x, **kwargs): + x, *rest = self.fn(x, **kwargs) + return (x * self.value, *rest) + + +class Rezero(nn.Module): + def __init__(self, fn): + super().__init__() + self.fn = fn + self.g = nn.Parameter(torch.zeros(1)) + + def forward(self, x, **kwargs): + x, *rest = self.fn(x, **kwargs) + return (x * self.g, *rest) + + +class ScaleNorm(nn.Module): + def __init__(self, dim, eps=1e-5): + super().__init__() + self.scale = dim**-0.5 + self.eps = eps + self.g = nn.Parameter(torch.ones(1)) + + def forward(self, x): + norm = torch.norm(x, dim=-1, keepdim=True) * self.scale + return x / norm.clamp(min=self.eps) * self.g + + +class RMSNorm(nn.Module): + def __init__(self, dim, eps=1e-8): + super().__init__() + self.scale = dim**-0.5 + self.eps = eps + self.g = nn.Parameter(torch.ones(dim)) + + def forward(self, x): + norm = torch.norm(x, dim=-1, keepdim=True) * self.scale + return x / norm.clamp(min=self.eps) * self.g + + +class Residual(nn.Module): + def forward(self, x, residual): + return x + residual + + +class GRUGating(nn.Module): + def __init__(self, dim): + super().__init__() + self.gru = nn.GRUCell(dim, dim) + + def forward(self, x, residual): + gated_output = self.gru( + rearrange(x, "b n d -> (b n) d"), rearrange(residual, "b n d -> (b n) d") + ) + + return gated_output.reshape_as(x) + + +# feedforward + + +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.0): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = ( + nn.Sequential(nn.Linear(dim, inner_dim), nn.GELU()) + if not glu + else GEGLU(dim, inner_dim) + ) + + self.net = nn.Sequential( + project_in, nn.Dropout(dropout), nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +# attention. +class Attention(nn.Module): + def __init__( + self, + dim, + dim_head=DEFAULT_DIM_HEAD, + heads=8, + causal=False, + mask=None, + talking_heads=False, + sparse_topk=None, + use_entmax15=False, + num_mem_kv=0, + dropout=0.0, + on_attn=False, + ): + super().__init__() + if use_entmax15: + raise NotImplementedError( + "Check out entmax activation instead of softmax activation!" + ) + self.scale = dim_head**-0.5 + self.heads = heads + self.causal = causal + self.mask = mask + + inner_dim = dim_head * heads + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_k = nn.Linear(dim, inner_dim, bias=False) + self.to_v = nn.Linear(dim, inner_dim, bias=False) + self.dropout = nn.Dropout(dropout) + + # talking heads + self.talking_heads = talking_heads + if talking_heads: + self.pre_softmax_proj = nn.Parameter(torch.randn(heads, heads)) + self.post_softmax_proj = nn.Parameter(torch.randn(heads, heads)) + + # explicit topk sparse attention + self.sparse_topk = sparse_topk + + # entmax + # self.attn_fn = entmax15 if use_entmax15 else F.softmax + self.attn_fn = F.softmax + + # add memory key / values + self.num_mem_kv = num_mem_kv + if num_mem_kv > 0: + self.mem_k = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head)) + self.mem_v = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head)) + + # attention on attention + self.attn_on_attn = on_attn + self.to_out = ( + nn.Sequential(nn.Linear(inner_dim, dim * 2), nn.GLU()) + if on_attn + else nn.Linear(inner_dim, dim) + ) + + def forward( + self, + x, + context=None, + mask=None, + context_mask=None, + rel_pos=None, + sinusoidal_emb=None, + prev_attn=None, + mem=None, + ): + b, n, _, h, talking_heads, device = ( + *x.shape, + self.heads, + self.talking_heads, + x.device, + ) + kv_input = default(context, x) + + q_input = x + k_input = kv_input + v_input = kv_input + + if exists(mem): + k_input = torch.cat((mem, k_input), dim=-2) + v_input = torch.cat((mem, v_input), dim=-2) + + if exists(sinusoidal_emb): + # in shortformer, the query would start at a position offset depending on the past cached memory + offset = k_input.shape[-2] - q_input.shape[-2] + q_input = q_input + sinusoidal_emb(q_input, offset=offset) + k_input = k_input + sinusoidal_emb(k_input) + + q = self.to_q(q_input) + k = self.to_k(k_input) + v = self.to_v(v_input) + + q, k, v = map(lambda t: rearrange(t, "b n (h d) -> b h n d", h=h), (q, k, v)) + + input_mask = None + if any(map(exists, (mask, context_mask))): + q_mask = default(mask, lambda: torch.ones((b, n), device=device).bool()) + k_mask = q_mask if not exists(context) else context_mask + k_mask = default( + k_mask, lambda: torch.ones((b, k.shape[-2]), device=device).bool() + ) + q_mask = rearrange(q_mask, "b i -> b () i ()") + k_mask = rearrange(k_mask, "b j -> b () () j") + input_mask = q_mask * k_mask + + if self.num_mem_kv > 0: + mem_k, mem_v = map( + lambda t: repeat(t, "h n d -> b h n d", b=b), (self.mem_k, self.mem_v) + ) + k = torch.cat((mem_k, k), dim=-2) + v = torch.cat((mem_v, v), dim=-2) + if exists(input_mask): + input_mask = F.pad(input_mask, (self.num_mem_kv, 0), value=True) + + dots = einsum("b h i d, b h j d -> b h i j", q, k) * self.scale + mask_value = max_neg_value(dots) + + if exists(prev_attn): + dots = dots + prev_attn + + pre_softmax_attn = dots + + if talking_heads: + dots = einsum( + "b h i j, h k -> b k i j", dots, self.pre_softmax_proj + ).contiguous() + + if exists(rel_pos): + dots = rel_pos(dots) + + if exists(input_mask): + dots.masked_fill_(~input_mask, mask_value) + del input_mask + + if self.causal: + i, j = dots.shape[-2:] + r = torch.arange(i, device=device) + mask = rearrange(r, "i -> () () i ()") < rearrange(r, "j -> () () () j") + mask = F.pad(mask, (j - i, 0), value=False) + dots.masked_fill_(mask, mask_value) + del mask + + if exists(self.sparse_topk) and self.sparse_topk < dots.shape[-1]: + top, _ = dots.topk(self.sparse_topk, dim=-1) + vk = top[..., -1].unsqueeze(-1).expand_as(dots) + mask = dots < vk + dots.masked_fill_(mask, mask_value) + del mask + + attn = self.attn_fn(dots, dim=-1) + post_softmax_attn = attn + + attn = self.dropout(attn) + + if talking_heads: + attn = einsum( + "b h i j, h k -> b k i j", attn, self.post_softmax_proj + ).contiguous() + + out = einsum("b h i j, b h j d -> b h i d", attn, v) + out = rearrange(out, "b h n d -> b n (h d)") + + intermediates = Intermediates( + pre_softmax_attn=pre_softmax_attn, post_softmax_attn=post_softmax_attn + ) + + return self.to_out(out), intermediates + + +class AttentionLayers(nn.Module): + def __init__( + self, + dim, + depth, + heads=8, + causal=False, + cross_attend=False, + only_cross=False, + use_scalenorm=False, + use_rmsnorm=False, + use_rezero=False, + rel_pos_num_buckets=32, + rel_pos_max_distance=128, + position_infused_attn=False, + custom_layers=None, + sandwich_coef=None, + par_ratio=None, + residual_attn=False, + cross_residual_attn=False, + macaron=False, + pre_norm=True, + gate_residual=False, + **kwargs, + ): + super().__init__() + ff_kwargs, kwargs = groupby_prefix_and_trim("ff_", kwargs) + attn_kwargs, _ = groupby_prefix_and_trim("attn_", kwargs) + + dim_head = attn_kwargs.get("dim_head", DEFAULT_DIM_HEAD) + + self.dim = dim + self.depth = depth + self.layers = nn.ModuleList([]) + + self.has_pos_emb = position_infused_attn + self.pia_pos_emb = ( + FixedPositionalEmbedding(dim) if position_infused_attn else None + ) + self.rotary_pos_emb = always(None) + + assert ( + rel_pos_num_buckets <= rel_pos_max_distance + ), "number of relative position buckets must be less than the relative position max distance" + self.rel_pos = None + + self.pre_norm = pre_norm + + self.residual_attn = residual_attn + self.cross_residual_attn = cross_residual_attn + + norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm + norm_class = RMSNorm if use_rmsnorm else norm_class + norm_fn = partial(norm_class, dim) + + norm_fn = nn.Identity if use_rezero else norm_fn + branch_fn = Rezero if use_rezero else None + + if cross_attend and not only_cross: + default_block = ("a", "c", "f") + elif cross_attend and only_cross: + default_block = ("c", "f") + else: + default_block = ("a", "f") + + if macaron: + default_block = ("f",) + default_block + + if exists(custom_layers): + layer_types = custom_layers + elif exists(par_ratio): + par_depth = depth * len(default_block) + assert 1 < par_ratio <= par_depth, "par ratio out of range" + default_block = tuple(filter(not_equals("f"), default_block)) + par_attn = par_depth // par_ratio + depth_cut = ( + par_depth * 2 // 3 + ) # 2 / 3 attention layer cutoff suggested by PAR paper + par_width = (depth_cut + depth_cut // par_attn) // par_attn + assert ( + len(default_block) <= par_width + ), "default block is too large for par_ratio" + par_block = default_block + ("f",) * (par_width - len(default_block)) + par_head = par_block * par_attn + layer_types = par_head + ("f",) * (par_depth - len(par_head)) + elif exists(sandwich_coef): + assert ( + sandwich_coef > 0 and sandwich_coef <= depth + ), "sandwich coefficient should be less than the depth" + layer_types = ( + ("a",) * sandwich_coef + + default_block * (depth - sandwich_coef) + + ("f",) * sandwich_coef + ) + else: + layer_types = default_block * depth + + self.layer_types = layer_types + self.num_attn_layers = len(list(filter(equals("a"), layer_types))) + + for layer_type in self.layer_types: + if layer_type == "a": + layer = Attention(dim, heads=heads, causal=causal, **attn_kwargs) + elif layer_type == "c": + layer = Attention(dim, heads=heads, **attn_kwargs) + elif layer_type == "f": + layer = FeedForward(dim, **ff_kwargs) + layer = layer if not macaron else Scale(0.5, layer) + else: + raise Exception(f"invalid layer type {layer_type}") + + if isinstance(layer, Attention) and exists(branch_fn): + layer = branch_fn(layer) + + if gate_residual: + residual_fn = GRUGating(dim) + else: + residual_fn = Residual() + + self.layers.append(nn.ModuleList([norm_fn(), layer, residual_fn])) + + def forward( + self, + x, + context=None, + mask=None, + context_mask=None, + mems=None, + return_hiddens=False, + ): + hiddens = [] + intermediates = [] + prev_attn = None + prev_cross_attn = None + + mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers + + for ind, (layer_type, (norm, block, residual_fn)) in enumerate( + zip(self.layer_types, self.layers) + ): + is_last = ind == (len(self.layers) - 1) + + if layer_type == "a": + hiddens.append(x) + layer_mem = mems.pop(0) + + residual = x + + if self.pre_norm: + x = norm(x) + + if layer_type == "a": + out, inter = block( + x, + mask=mask, + sinusoidal_emb=self.pia_pos_emb, + rel_pos=self.rel_pos, + prev_attn=prev_attn, + mem=layer_mem, + ) + elif layer_type == "c": + out, inter = block( + x, + context=context, + mask=mask, + context_mask=context_mask, + prev_attn=prev_cross_attn, + ) + elif layer_type == "f": + out = block(x) + + x = residual_fn(out, residual) + + if layer_type in ("a", "c"): + intermediates.append(inter) + + if layer_type == "a" and self.residual_attn: + prev_attn = inter.pre_softmax_attn + elif layer_type == "c" and self.cross_residual_attn: + prev_cross_attn = inter.pre_softmax_attn + + if not self.pre_norm and not is_last: + x = norm(x) + + if return_hiddens: + intermediates = LayerIntermediates( + hiddens=hiddens, attn_intermediates=intermediates + ) + + return x, intermediates + + return x + + +class Encoder(AttentionLayers): + def __init__(self, **kwargs): + assert "causal" not in kwargs, "cannot set causality on encoder" + super().__init__(causal=False, **kwargs) + + +class TransformerWrapper(nn.Module): + def __init__( + self, + *, + num_tokens, + max_seq_len, + attn_layers, + emb_dim=None, + max_mem_len=0.0, + emb_dropout=0.0, + num_memory_tokens=None, + tie_embedding=False, + use_pos_emb=True, + ): + super().__init__() + assert isinstance( + attn_layers, AttentionLayers + ), "attention layers must be one of Encoder or Decoder" + + dim = attn_layers.dim + emb_dim = default(emb_dim, dim) + + self.max_seq_len = max_seq_len + self.max_mem_len = max_mem_len + self.num_tokens = num_tokens + + self.token_emb = nn.Embedding(num_tokens, emb_dim) + self.pos_emb = ( + AbsolutePositionalEmbedding(emb_dim, max_seq_len) + if (use_pos_emb and not attn_layers.has_pos_emb) + else always(0) + ) + self.emb_dropout = nn.Dropout(emb_dropout) + + self.project_emb = nn.Linear(emb_dim, dim) if emb_dim != dim else nn.Identity() + self.attn_layers = attn_layers + self.norm = nn.LayerNorm(dim) + + self.init_() + + self.to_logits = ( + nn.Linear(dim, num_tokens) + if not tie_embedding + else lambda t: t @ self.token_emb.weight.t() + ) + + # memory tokens (like [cls]) from Memory Transformers paper + num_memory_tokens = default(num_memory_tokens, 0) + self.num_memory_tokens = num_memory_tokens + if num_memory_tokens > 0: + self.memory_tokens = nn.Parameter(torch.randn(num_memory_tokens, dim)) + + # let funnel encoder know number of memory tokens, if specified + if hasattr(attn_layers, "num_memory_tokens"): + attn_layers.num_memory_tokens = num_memory_tokens + + def init_(self): + nn.init.normal_(self.token_emb.weight, std=0.02) + + def forward( + self, + x, + return_embeddings=False, + mask=None, + return_mems=False, + return_attn=False, + mems=None, + **kwargs, + ): + b, n, device, num_mem = *x.shape, x.device, self.num_memory_tokens + x = self.token_emb(x) + x += self.pos_emb(x) + x = self.emb_dropout(x) + + x = self.project_emb(x) + + if num_mem > 0: + mem = repeat(self.memory_tokens, "n d -> b n d", b=b) + x = torch.cat((mem, x), dim=1) + + # auto-handle masking after appending memory tokens + if exists(mask): + mask = F.pad(mask, (num_mem, 0), value=True) + + x, intermediates = self.attn_layers( + x, mask=mask, mems=mems, return_hiddens=True, **kwargs + ) + x = self.norm(x) + + mem, x = x[:, :num_mem], x[:, num_mem:] + + out = self.to_logits(x) if not return_embeddings else x + + if return_mems: + hiddens = intermediates.hiddens + new_mems = ( + list(map(lambda pair: torch.cat(pair, dim=-2), zip(mems, hiddens))) + if exists(mems) + else hiddens + ) + new_mems = list( + map(lambda t: t[..., -self.max_mem_len :, :].detach(), new_mems) + ) + return out, new_mems + + if return_attn: + attn_maps = list( + map(lambda t: t.post_softmax_attn, intermediates.attn_intermediates) + ) + return out, attn_maps + + return out diff --git a/qa_mdt/audioldm_train/modules/hifigan/LICENSE b/qa_mdt/audioldm_train/modules/hifigan/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5afae394d6b37da0e12ba6b290d2512687f421ac --- /dev/null +++ b/qa_mdt/audioldm_train/modules/hifigan/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Jungil Kong + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/qa_mdt/audioldm_train/modules/hifigan/__init__.py b/qa_mdt/audioldm_train/modules/hifigan/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d20ac10b41694550b00c5e92a6180c7dbb6cdb7f --- /dev/null +++ b/qa_mdt/audioldm_train/modules/hifigan/__init__.py @@ -0,0 +1,8 @@ +from .models_hifires import Generator_HiFiRes +from .models import Generator as Generator + + +class AttrDict(dict): + def __init__(self, *args, **kwargs): + super(AttrDict, self).__init__(*args, **kwargs) + self.__dict__ = self diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64715afed537f34222684f5a10d014e268750c93 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f56d6e8bbb5550323218a9911b71df18a07d34ba Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-39.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ad50ab920d09eb725998ca060159edb9658de51 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/__init__.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-310.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa5d792aa66126ecbe39338012f577246287a8f2 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-38.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c90ae435cb40fddcda144f24f97c244c58a7b0af Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-39.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fd34f21569e8ca6e8864d2d42805727ea4ab028 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-310.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a68aad26fb8d897a3d76ce07c118d8b8d8327d9a Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-38.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c383e2f08f20b14cf00419abbf2de78cc0737e8 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-39.pyc b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8ba02a7faeb64b4bbaf1e2aa356c678a27a7847 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/hifigan/__pycache__/models_hifires.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/modules/hifigan/models.py b/qa_mdt/audioldm_train/modules/hifigan/models.py new file mode 100644 index 0000000000000000000000000000000000000000..ca815212ab96827ffdd40804e1ef8450228b9bfe --- /dev/null +++ b/qa_mdt/audioldm_train/modules/hifigan/models.py @@ -0,0 +1,174 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn import Conv1d, ConvTranspose1d +from torch.nn.utils import weight_norm, remove_weight_norm + +LRELU_SLOPE = 0.1 + + +def init_weights(m, mean=0.0, std=0.01): + classname = m.__class__.__name__ + if classname.find("Conv") != -1: + m.weight.data.normal_(mean, std) + + +def get_padding(kernel_size, dilation=1): + return int((kernel_size * dilation - dilation) / 2) + + +class ResBlock(torch.nn.Module): + def __init__(self, h, channels, kernel_size=3, dilation=(1, 3, 5)): + super(ResBlock, self).__init__() + self.h = h + self.convs1 = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[0], + padding=get_padding(kernel_size, dilation[0]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[1], + padding=get_padding(kernel_size, dilation[1]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[2], + padding=get_padding(kernel_size, dilation[2]), + ) + ), + ] + ) + self.convs1.apply(init_weights) + + self.convs2 = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + ] + ) + self.convs2.apply(init_weights) + + def forward(self, x): + for c1, c2 in zip(self.convs1, self.convs2): + xt = F.leaky_relu(x, LRELU_SLOPE) + xt = c1(xt) + xt = F.leaky_relu(xt, LRELU_SLOPE) + xt = c2(xt) + x = xt + x + return x + + def remove_weight_norm(self): + for l in self.convs1: + remove_weight_norm(l) + for l in self.convs2: + remove_weight_norm(l) + + +class Generator(torch.nn.Module): + def __init__(self, h): + super(Generator, self).__init__() + self.h = h + self.num_kernels = len(h.resblock_kernel_sizes) + self.num_upsamples = len(h.upsample_rates) + self.conv_pre = weight_norm( + Conv1d(h.num_mels, h.upsample_initial_channel, 7, 1, padding=3) + ) + resblock = ResBlock + + self.ups = nn.ModuleList() + for i, (u, k) in enumerate(zip(h.upsample_rates, h.upsample_kernel_sizes)): + self.ups.append( + weight_norm( + ConvTranspose1d( + h.upsample_initial_channel // (2**i), + h.upsample_initial_channel // (2 ** (i + 1)), + k, + u, + padding=(k - u) // 2, + ) + ) + ) + + self.resblocks = nn.ModuleList() + for i in range(len(self.ups)): + ch = h.upsample_initial_channel // (2 ** (i + 1)) + for j, (k, d) in enumerate( + zip(h.resblock_kernel_sizes, h.resblock_dilation_sizes) + ): + self.resblocks.append(resblock(h, ch, k, d)) + + self.conv_post = weight_norm(Conv1d(ch, 1, 7, 1, padding=3)) + self.ups.apply(init_weights) + self.conv_post.apply(init_weights) + + def forward(self, x): + x = self.conv_pre(x) + for i in range(self.num_upsamples): + x = F.leaky_relu(x, LRELU_SLOPE) + x = self.ups[i](x) + xs = None + for j in range(self.num_kernels): + if xs is None: + xs = self.resblocks[i * self.num_kernels + j](x) + else: + xs += self.resblocks[i * self.num_kernels + j](x) + x = xs / self.num_kernels + x = F.leaky_relu(x) + x = self.conv_post(x) + x = torch.tanh(x) + + return x + + def remove_weight_norm(self): + print("Removing weight norm...") + for l in self.ups: + remove_weight_norm(l) + for l in self.resblocks: + l.remove_weight_norm() + remove_weight_norm(self.conv_pre) + remove_weight_norm(self.conv_post) diff --git a/qa_mdt/audioldm_train/modules/hifigan/models_hifires.py b/qa_mdt/audioldm_train/modules/hifigan/models_hifires.py new file mode 100644 index 0000000000000000000000000000000000000000..c510fca72032affde06b3c3679f82e228d071ad4 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/hifigan/models_hifires.py @@ -0,0 +1,217 @@ +import torch +import torch.nn.functional as F +import torch.nn as nn +from torch.nn import Conv1d, ConvTranspose1d +from torch.nn.utils import weight_norm, remove_weight_norm + +LRELU_SLOPE = 0.1 + + +def init_weights(m, mean=0.0, std=0.01): + classname = m.__class__.__name__ + if classname.find("Conv") != -1: + m.weight.data.normal_(mean, std) + + +def get_padding(kernel_size, dilation=1): + return int((kernel_size * dilation - dilation) / 2) + + +class ResBlock1(torch.nn.Module): + def __init__(self, h, channels, kernel_size=3, dilation=(1, 3, 5)): + super(ResBlock1, self).__init__() + self.h = h + self.convs1 = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[0], + padding=get_padding(kernel_size, dilation[0]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[1], + padding=get_padding(kernel_size, dilation[1]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[2], + padding=get_padding(kernel_size, dilation[2]), + ) + ), + ] + ) + self.convs1.apply(init_weights) + + self.convs2 = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + ] + ) + self.convs2.apply(init_weights) + + def forward(self, x): + for c1, c2 in zip(self.convs1, self.convs2): + xt = F.leaky_relu(x, LRELU_SLOPE) + xt = c1(xt) + xt = F.leaky_relu(xt, LRELU_SLOPE) + xt = c2(xt) + x = xt + x + return x + + def remove_weight_norm(self): + for l in self.convs1: + remove_weight_norm(l) + for l in self.convs2: + remove_weight_norm(l) + + +class ResBlock2(torch.nn.Module): + def __init__(self, h, channels, kernel_size=3, dilation=(1, 3)): + super(ResBlock2, self).__init__() + self.h = h + self.convs = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[0], + padding=get_padding(kernel_size, dilation[0]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[1], + padding=get_padding(kernel_size, dilation[1]), + ) + ), + ] + ) + self.convs.apply(init_weights) + + def forward(self, x): + for c in self.convs: + xt = F.leaky_relu(x, LRELU_SLOPE) + xt = c(xt) + x = xt + x + return x + + def remove_weight_norm(self): + for l in self.convs: + remove_weight_norm(l) + + +class Generator_HiFiRes(torch.nn.Module): + def __init__(self, h): + super(Generator_HiFiRes, self).__init__() + self.h = h + self.num_kernels = len(h.resblock_kernel_sizes) + self.num_upsamples = len(h.upsample_rates) + self.conv_pre = weight_norm( + Conv1d(256, h.upsample_initial_channel, 7, 1, padding=3) + ) + resblock = ResBlock1 if h.resblock == "1" else ResBlock2 + + self.ups = nn.ModuleList() + for i, (u, k) in enumerate(zip(h.upsample_rates, h.upsample_kernel_sizes)): + self.ups.append( + weight_norm( + ConvTranspose1d( + h.upsample_initial_channel // (2**i), + h.upsample_initial_channel // (2 ** (i + 1)), + u * 2, + u, + padding=u // 2 + u % 2, + output_padding=u % 2, + ) + ) + ) + + self.resblocks = nn.ModuleList() + for i in range(len(self.ups)): + ch = h.upsample_initial_channel // (2 ** (i + 1)) + for j, (k, d) in enumerate( + zip(h.resblock_kernel_sizes, h.resblock_dilation_sizes) + ): + self.resblocks.append(resblock(h, ch, k, d)) + + self.conv_post = weight_norm(Conv1d(ch, 1, 7, 1, padding=3)) + self.ups.apply(init_weights) + self.conv_post.apply(init_weights) + + def forward(self, x): + x = self.conv_pre(x) + for i in range(self.num_upsamples): + x = F.leaky_relu(x, LRELU_SLOPE) + x = self.ups[i](x) + xs = None + for j in range(self.num_kernels): + if xs is None: + xs = self.resblocks[i * self.num_kernels + j](x) + else: + xs += self.resblocks[i * self.num_kernels + j](x) + x = xs / self.num_kernels + x = F.leaky_relu(x) + x = self.conv_post(x) + x = torch.tanh(x) + + return x + + def remove_weight_norm(self): + print("Removing weight norm...") + for l in self.ups: + remove_weight_norm(l) + for l in self.resblocks: + l.remove_weight_norm() + remove_weight_norm(self.conv_pre) + remove_weight_norm(self.conv_post) diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__init__.py b/qa_mdt/audioldm_train/modules/latent_diffusion/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48bde932889f0e62a098a9461acf6f7e5bbfdc3b Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f66a885cec9b09d45aa109ffde5e5201dd72b743 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddim.cpython-310.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddim.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d17d44f0e6647c135f0c1a8248ddba0fa56615df Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddim.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddim.cpython-38.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddim.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d4bc01e9576273cc4bbf810bb7eef2b875ac6c1 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddim.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddpm.cpython-310.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddpm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e43d5bf13ac9105f20999148b37d71d10e0e7601 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddpm.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddpm.cpython-38.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddpm.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..165a21df381d705f04779d78f5f3fff30886f784 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/ddpm.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/plms.cpython-310.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/plms.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2831483b0bca206eb231860bd6802aaf088fbf02 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/plms.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/plms.cpython-38.pyc b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/plms.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67acec26ab6bdc21e323faad54f738edd19f393e Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_diffusion/__pycache__/plms.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/ddim.py b/qa_mdt/audioldm_train/modules/latent_diffusion/ddim.py new file mode 100644 index 0000000000000000000000000000000000000000..f44d39bfe8b1d5d957f77119085ad7eed30cddee --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_diffusion/ddim.py @@ -0,0 +1,504 @@ +"""SAMPLING ONLY.""" + +import torch +import numpy as np +from tqdm import tqdm +from functools import partial + +from qa_mdt.audioldm_train.utilities.diffusion_util import ( + make_ddim_sampling_parameters, + make_ddim_timesteps, + noise_like, + extract_into_tensor, +) + + +class DDIMSampler(object): + def __init__(self, model, schedule="linear", device=torch.device("cuda"), **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + self.device = device + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != self.device: + attr = attr.to(self.device) + setattr(self, name, attr) + + def make_schedule( + self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0.0, verbose=True + ): + self.ddim_timesteps = make_ddim_timesteps( + ddim_discr_method=ddim_discretize, + num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps, + verbose=verbose, + ) + alphas_cumprod = self.model.alphas_cumprod + assert ( + alphas_cumprod.shape[0] == self.ddpm_num_timesteps + ), "alphas have to be defined for each timestep" + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + self.register_buffer("betas", to_torch(self.model.betas)) + self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod)) + self.register_buffer( + "alphas_cumprod_prev", to_torch(self.model.alphas_cumprod_prev) + ) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer( + "sqrt_alphas_cumprod", to_torch(np.sqrt(alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", + to_torch(np.sqrt(1.0 - alphas_cumprod.cpu())), + ) + self.register_buffer( + "log_one_minus_alphas_cumprod", to_torch(np.log(1.0 - alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_recip_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_recipm1_alphas_cumprod", + to_torch(np.sqrt(1.0 / alphas_cumprod.cpu() - 1)), + ) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters( + alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta, + verbose=verbose, + ) + self.register_buffer("ddim_sigmas", ddim_sigmas) + self.register_buffer("ddim_alphas", ddim_alphas) + self.register_buffer("ddim_alphas_prev", ddim_alphas_prev) + self.register_buffer("ddim_sqrt_one_minus_alphas", np.sqrt(1.0 - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) + / (1 - self.alphas_cumprod) + * (1 - self.alphas_cumprod / self.alphas_cumprod_prev) + ) + self.register_buffer( + "ddim_sigmas_for_original_num_steps", sigmas_for_original_sampling_steps + ) + + @torch.no_grad() + def sample( + self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0.0, + mask=None, + x0=None, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + dynamic_threshold=None, + ucg_schedule=None, + **kwargs, + ): + + # if conditioning is not None: + # if isinstance(conditioning, dict): + # ctmp = conditioning[list(conditioning.keys())[0]] + # while isinstance(ctmp, list): ctmp = ctmp[0] + # cbs = ctmp.shape[0] + # if cbs != batch_size: + # print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + + # elif isinstance(conditioning, list): + # for ctmp in conditioning: + # if ctmp.shape[0] != batch_size: + # print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + + # else: + # if conditioning.shape[0] != batch_size: + # print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}") + + self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose) + # sampling + C, H, W = shape + size = (batch_size, C, H, W) + print(f"Data shape for DDIM sampling is {size}, eta {eta}") + # import pdb + # pdb.set_trace() + samples, intermediates = self.ddim_sampling( + conditioning, + size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, + x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + dynamic_threshold=dynamic_threshold, + ucg_schedule=ucg_schedule, + ) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling( + self, + cond, + shape, + x_T=None, + ddim_use_original_steps=False, + callback=None, + timesteps=None, + quantize_denoised=False, + mask=None, + x0=None, + img_callback=None, + log_every_t=100, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + dynamic_threshold=None, + ucg_schedule=None, + ): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + if timesteps is None: + timesteps = ( + self.ddpm_num_timesteps + if ddim_use_original_steps + else self.ddim_timesteps + ) + elif timesteps is not None and not ddim_use_original_steps: + subset_end = ( + int( + min(timesteps / self.ddim_timesteps.shape[0], 1) + * self.ddim_timesteps.shape[0] + ) + - 1 + ) + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {"x_inter": [img], "pred_x0": [img]} + time_range = ( + reversed(range(0, timesteps)) + if ddim_use_original_steps + else np.flip(timesteps) + ) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + print(f"Running DDIM Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc="DDIM Sampler", total=total_steps) + + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + + if mask is not None: + assert x0 is not None + img_orig = self.model.q_sample( + x0, ts + ) # TODO: deterministic forward pass? + img = img_orig * mask + (1.0 - mask) * img + + if ucg_schedule is not None: + assert len(ucg_schedule) == len(time_range) + unconditional_guidance_scale = ucg_schedule[i] + # import pdb; pdb.set_trace() + outs = self.p_sample_ddim( + img, + cond, + ts, + index=index, + use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, + temperature=temperature, + noise_dropout=noise_dropout, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + dynamic_threshold=dynamic_threshold, + ) + img, pred_x0 = outs + if callback: + callback(i) + if img_callback: + img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates["x_inter"].append(img) + intermediates["pred_x0"].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def p_sample_ddim( + self, + x, + c, + t, + index, + repeat_noise=False, + use_original_steps=False, + quantize_denoised=False, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + dynamic_threshold=None, + ): + b, *_, device = *x.shape, x.device + + if unconditional_conditioning is None or unconditional_guidance_scale == 1.0: + model_output = self.model.apply_model(x, t, c) + else: + x_in = x + t_in = t + + assert isinstance(c, dict) + assert isinstance(unconditional_conditioning, dict) + # import pdb; pdb.set_trace() + unconditional_conditioning['mos'] = torch.ones(x_in.shape[0], 1) * 3 + model_uncond = self.model.apply_model( + x_in, t_in, unconditional_conditioning + ) + + model_t = self.model.apply_model(x_in, t_in, c) + + model_output = model_uncond + unconditional_guidance_scale * ( + model_t - model_uncond + ) + + if self.model.parameterization == "v": + e_t = self.model.predict_eps_from_z_and_v(x, t, model_output) + else: + e_t = model_output + + if score_corrector is not None: + assert self.model.parameterization == "eps", "not implemented" + e_t = score_corrector.modify_score( + self.model, e_t, x, t, c, **corrector_kwargs + ) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = ( + self.model.alphas_cumprod_prev + if use_original_steps + else self.ddim_alphas_prev + ) + sqrt_one_minus_alphas = ( + self.model.sqrt_one_minus_alphas_cumprod + if use_original_steps + else self.ddim_sqrt_one_minus_alphas + ) + sigmas = ( + self.model.ddim_sigmas_for_original_num_steps + if use_original_steps + else self.ddim_sigmas + ) + # select parameters corresponding to the currently considered timestep + a_t = torch.full((b, 1, 1, 1), alphas[index], device=device) + a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device) + sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device) + sqrt_one_minus_at = torch.full( + (b, 1, 1, 1), sqrt_one_minus_alphas[index], device=device + ) + + # print(unconditional_conditioning, unconditional_guidance_scale) + # print(model_uncond.shape, model_t.shape, model_output.shape) + # print(x.shape, self.model.parameterization) + # print(sqrt_one_minus_at.shape, e_t.shape, a_t.shape) + # import pdb + # pdb.set_trace() + + # current prediction for x_0 + if self.model.parameterization != "v": + # import pdb + # pdb.set_trace() + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + else: + pred_x0 = self.model.predict_start_from_z_and_v(x, t, model_output) + + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + + if dynamic_threshold is not None: + raise NotImplementedError() + + # direction pointing to x_t + dir_xt = (1.0 - a_prev - sigma_t**2).sqrt() * e_t + noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.0: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise + return x_prev, pred_x0 + + @torch.no_grad() + def encode( + self, + x0, + c, + t_enc, + use_original_steps=False, + return_intermediates=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + callback=None, + ): + num_reference_steps = ( + self.ddpm_num_timesteps + if use_original_steps + else self.ddim_timesteps.shape[0] + ) + + assert t_enc <= num_reference_steps + num_steps = t_enc + + if use_original_steps: + alphas_next = self.alphas_cumprod[:num_steps] + alphas = self.alphas_cumprod_prev[:num_steps] + else: + alphas_next = self.ddim_alphas[:num_steps] + alphas = torch.tensor(self.ddim_alphas_prev[:num_steps]) + + x_next = x0 + intermediates = [] + inter_steps = [] + for i in tqdm(range(num_steps), desc="Encoding Image"): + t = torch.full( + (x0.shape[0],), i, device=self.model.device, dtype=torch.long + ) + if unconditional_guidance_scale == 1.0: + noise_pred = self.model.apply_model(x_next, t, c) + else: + assert unconditional_conditioning is not None + e_t_uncond, noise_pred = torch.chunk( + self.model.apply_model( + torch.cat((x_next, x_next)), + torch.cat((t, t)), + torch.cat((unconditional_conditioning, c)), + ), + 2, + ) + noise_pred = e_t_uncond + unconditional_guidance_scale * ( + noise_pred - e_t_uncond + ) + + xt_weighted = (alphas_next[i] / alphas[i]).sqrt() * x_next + weighted_noise_pred = ( + alphas_next[i].sqrt() + * ((1 / alphas_next[i] - 1).sqrt() - (1 / alphas[i] - 1).sqrt()) + * noise_pred + ) + x_next = xt_weighted + weighted_noise_pred + if ( + return_intermediates + and i % (num_steps // return_intermediates) == 0 + and i < num_steps - 1 + ): + intermediates.append(x_next) + inter_steps.append(i) + elif return_intermediates and i >= num_steps - 2: + intermediates.append(x_next) + inter_steps.append(i) + if callback: + callback(i) + + + out = {"x_encoded": x_next, "intermediate_steps": inter_steps} + if return_intermediates: + out.update({"intermediates": intermediates}) + return x_next, out + + @torch.no_grad() + def stochastic_encode(self, x0, t, use_original_steps=False, noise=None): + # fast, but does not allow for exact reconstruction + # t serves as an index to gather the correct alphas + if use_original_steps: + sqrt_alphas_cumprod = self.sqrt_alphas_cumprod + sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod + else: + sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas) + sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas + + if noise is None: + noise = torch.randn_like(x0) + return ( + extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 + + extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise + ) + + @torch.no_grad() + def decode( + self, + x_latent, + cond, + t_start, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + use_original_steps=False, + callback=None, + ): + + timesteps = ( + np.arange(self.ddpm_num_timesteps) + if use_original_steps + else self.ddim_timesteps + ) + timesteps = timesteps[:t_start] + + time_range = np.flip(timesteps) + total_steps = timesteps.shape[0] + print(f"Running DDIM Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc="Decoding image", total=total_steps) + x_dec = x_latent + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full( + (x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long + ) + x_dec, _ = self.p_sample_ddim( + x_dec, + cond, + ts, + index=index, + use_original_steps=use_original_steps, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + if callback: + callback(i) + return x_dec + diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/ddpm.py b/qa_mdt/audioldm_train/modules/latent_diffusion/ddpm.py new file mode 100644 index 0000000000000000000000000000000000000000..3b1e36c442259ac0ecb05fd550e2031a445f6ac3 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_diffusion/ddpm.py @@ -0,0 +1,2454 @@ +from multiprocessing.sharedctypes import Value +import statistics +import sys +import os +# from tkinter import Ec +# sys.path.append('/home/changli/Adan') + +import torch +import torch.nn as nn +import numpy as np +import pytorch_lightning as pl +from torch.optim.lr_scheduler import LambdaLR +# from adan import Adan +from einops import rearrange, repeat +from contextlib import contextmanager +from functools import partial +from tqdm import tqdm +from torchvision.utils import make_grid +from pytorch_lightning.utilities.rank_zero import rank_zero_only +from qa_mdt.audioldm_train.conditional_models import * +import datetime + +from qa_mdt.audioldm_train.utilities.model_util import ( + exists, + default, + mean_flat, + count_params, + instantiate_from_config, +) + +from qa_mdt.audioldm_train.utilities.diffusion_util import ( + make_beta_schedule, + extract_into_tensor, + noise_like, +) + +from qa_mdt.audioldm_train.modules.diffusionmodules.ema import LitEma +from qa_mdt.audioldm_train.modules.diffusionmodules.distributions import ( + normal_kl, + DiagonalGaussianDistribution, +) + +# from audioldm_train.modules.diffusionmodules.transport import + +from qa_mdt.audioldm_train.modules.latent_diffusion.ddim import DDIMSampler +from qa_mdt.audioldm_train.modules.latent_diffusion.plms import PLMSSampler +import soundfile as sf +import os + +__conditioning_keys__ = {"concat": "c_concat", "crossattn": "c_crossattn", "adm": "y"} + +import json +with open('./qa_mdt/offset_pretrained_checkpoints.json', 'r') as config_file: + config_data = json.load(config_file) + + +def disabled_train(self, mode=True): + """Overwrite model.train with this function to make sure train/eval mode + does not change anymore.""" + return self + + +def uniform_on_device(r1, r2, shape, device): + return (r1 - r2) * torch.rand(*shape, device=device) + r2 + + +class DDPM(pl.LightningModule): + # classic DDPM with Gaussian diffusion, in image space + def __init__( + self, + unet_config, + sampling_rate=None, + timesteps=1000, + beta_schedule="linear", + loss_type="l2", + ckpt_path=None, + ignore_keys=[], + load_only_unet=False, + monitor="val/loss", + use_ema=True, + first_stage_key="image", + latent_t_size=256, + latent_f_size=16, + channels=3, + log_every_t=100, + clip_denoised=True, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + given_betas=None, + original_elbo_weight=0.0, + v_posterior=0.0, # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta + l_simple_weight=1.0, + conditioning_key=None, + parameterization="eps", # all assuming fixed variance schedules + scheduler_config=None, + use_positional_encodings=False, + learn_logvar=False, + logvar_init=0.0, + evaluator=None, + ): + super().__init__() + assert parameterization in [ + "eps", + "x0", + "v", + ], 'currently only supporting "eps" and "x0" and "v"' + self.parameterization = parameterization + self.state = None + print( + f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode" + ) + assert sampling_rate is not None + self.validation_folder_name = "temp_name" + self.clip_denoised = clip_denoised + self.log_every_t = log_every_t + self.first_stage_key = first_stage_key + self.sampling_rate = sampling_rate + self.clap = CLAPAudioEmbeddingClassifierFreev2( + pretrained_path=config_data["clap_music"], + sampling_rate=self.sampling_rate, + embed_mode="audio", + amodel="HTSAT-base", + ) + + if self.global_rank == 0: + self.evaluator = evaluator + + self.initialize_param_check_toolkit() + + self.latent_t_size = latent_t_size + self.latent_f_size = latent_f_size + + self.channels = channels + self.use_positional_encodings = use_positional_encodings + self.model = DiffusionWrapper(unet_config, conditioning_key) + count_params(self.model, verbose=True) + self.use_ema = use_ema + if self.use_ema: + self.model_ema = LitEma(self.model) + print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + self.use_scheduler = scheduler_config is not None + if self.use_scheduler: + self.scheduler_config = scheduler_config + + self.v_posterior = v_posterior + self.original_elbo_weight = original_elbo_weight + self.l_simple_weight = l_simple_weight + + if monitor is not None: + self.monitor = monitor + if ckpt_path is not None: + self.init_from_ckpt( + ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet + ) + + self.register_schedule( + given_betas=given_betas, + beta_schedule=beta_schedule, + timesteps=timesteps, + linear_start=linear_start, + linear_end=linear_end, + cosine_s=cosine_s, + ) + + self.loss_type = loss_type + + self.learn_logvar = learn_logvar + self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,)) + if self.learn_logvar: + self.logvar = nn.Parameter(self.logvar, requires_grad=True) + else: + self.logvar = nn.Parameter(self.logvar, requires_grad=False) + + self.logger_save_dir = None + self.logger_exp_name = None + self.logger_exp_group_name = None + self.logger_version = None + + self.label_indices_total = None + # To avoid the system cannot find metric value for checkpoint + self.metrics_buffer = { + "val/kullback_leibler_divergence_sigmoid": 15.0, + "val/kullback_leibler_divergence_softmax": 10.0, + "val/psnr": 0.0, + "val/ssim": 0.0, + "val/inception_score_mean": 1.0, + "val/inception_score_std": 0.0, + "val/kernel_inception_distance_mean": 0.0, + "val/kernel_inception_distance_std": 0.0, + "val/frechet_inception_distance": 133.0, + "val/frechet_audio_distance": 32.0, + } + self.initial_learning_rate = None + self.test_data_subset_path = None + + def get_log_dir(self): + return os.path.join( + self.logger_save_dir, self.logger_exp_group_name, self.logger_exp_name + ) + + def set_log_dir(self, save_dir, exp_group_name, exp_name): + self.logger_save_dir = save_dir + self.logger_exp_group_name = exp_group_name + self.logger_exp_name = exp_name + + def register_schedule( + self, + given_betas=None, + beta_schedule="linear", + timesteps=1000, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + ): + if exists(given_betas): + betas = given_betas + else: + betas = make_beta_schedule( + beta_schedule, + timesteps, + linear_start=linear_start, + linear_end=linear_end, + cosine_s=cosine_s, + ) + alphas = 1.0 - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1.0, alphas_cumprod[:-1]) + + (timesteps,) = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert ( + alphas_cumprod.shape[0] == self.num_timesteps + ), "alphas have to be defined for each timestep" + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer("betas", to_torch(betas)) + self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod)) + self.register_buffer("alphas_cumprod_prev", to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer("sqrt_alphas_cumprod", to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", to_torch(np.sqrt(1.0 - alphas_cumprod)) + ) + self.register_buffer( + "log_one_minus_alphas_cumprod", to_torch(np.log(1.0 - alphas_cumprod)) + ) + self.register_buffer( + "sqrt_recip_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod)) + ) + self.register_buffer( + "sqrt_recipm1_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod - 1)) + ) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + posterior_variance = (1 - self.v_posterior) * betas * ( + 1.0 - alphas_cumprod_prev + ) / (1.0 - alphas_cumprod) + self.v_posterior * betas + # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) + self.register_buffer("posterior_variance", to_torch(posterior_variance)) + # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain + self.register_buffer( + "posterior_log_variance_clipped", + to_torch(np.log(np.maximum(posterior_variance, 1e-20))), + ) + self.register_buffer( + "posterior_mean_coef1", + to_torch(betas * np.sqrt(alphas_cumprod_prev) / (1.0 - alphas_cumprod)), + ) + self.register_buffer( + "posterior_mean_coef2", + to_torch( + (1.0 - alphas_cumprod_prev) * np.sqrt(alphas) / (1.0 - alphas_cumprod) + ), + ) + + if self.parameterization == "eps": + lvlb_weights = self.betas**2 / ( + 2 + * self.posterior_variance + * to_torch(alphas) + * (1 - self.alphas_cumprod) + ) + elif self.parameterization == "x0": + lvlb_weights = ( + 0.5 + * np.sqrt(torch.Tensor(alphas_cumprod)) + / (2.0 * 1 - torch.Tensor(alphas_cumprod)) + ) + elif self.parameterization == "v": + lvlb_weights = torch.ones_like( + self.betas**2 + / ( + 2 + * self.posterior_variance + * to_torch(alphas) + * (1 - self.alphas_cumprod) + ) + ) + else: + raise NotImplementedError("mu not supported") + # TODO how to choose this term + lvlb_weights[0] = lvlb_weights[1] + self.register_buffer("lvlb_weights", lvlb_weights, persistent=False) + assert not torch.isnan(self.lvlb_weights).all() + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.model.parameters()) + self.model_ema.copy_to(self.model) + if context is not None: + print(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.model.parameters()) + if context is not None: + print(f"{context}: Restored training weights") + + def init_from_ckpt(self, path, ignore_keys=list(), only_model=False): + sd = torch.load(path, map_location="cpu") + if "state_dict" in list(sd.keys()): + sd = sd["state_dict"] + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + missing, unexpected = ( + self.load_state_dict(sd, strict=False) + if not only_model + else self.model.load_state_dict(sd, strict=False) + ) + print( + f"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys" + ) + if len(missing) > 0: + print(f"Missing Keys: {missing}") + if len(unexpected) > 0: + print(f"Unexpected Keys: {unexpected}") + + def q_mean_variance(self, x_start, t): + """ + Get the distribution q(x_t | x_0). + :param x_start: the [N x C x ...] tensor of noiseless inputs. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :return: A tuple (mean, variance, log_variance), all of x_start's shape. + """ + mean = extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape) + log_variance = extract_into_tensor( + self.log_one_minus_alphas_cumprod, t, x_start.shape + ) + return mean, variance, log_variance + + def predict_start_from_noise(self, x_t, t, noise): + return ( + extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t + - extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) + * noise + ) + + def q_posterior(self, x_start, x_t, t): + posterior_mean = ( + extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start + + extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = extract_into_tensor( + self.posterior_log_variance_clipped, t, x_t.shape + ) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance(self, x, t, clip_denoised: bool): + model_out = self.model(x, t) + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + if clip_denoised: + x_recon.clamp_(-1.0, 1.0) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior( + x_start=x_recon, x_t=x, t=t + ) + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, t, clip_denoised=True, repeat_noise=False): + b, *_, device = *x.shape, x.device + model_mean, _, model_log_variance = self.p_mean_variance( + x=x, t=t, clip_denoised=clip_denoised + ) + noise = noise_like(x.shape, device, repeat_noise) + # no noise when t == 0 + nonzero_mask = ( + (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))).contiguous() + ) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_loop(self, shape, return_intermediates=False): + device = self.betas.device + b = shape[0] + img = torch.randn(shape, device=device) + intermediates = [img] + for i in tqdm( + reversed(range(0, self.num_timesteps)), + desc="Sampling t", + total=self.num_timesteps, + ): + img = self.p_sample( + img, + torch.full((b,), i, device=device, dtype=torch.long), + clip_denoised=self.clip_denoised, + ) + if i % self.log_every_t == 0 or i == self.num_timesteps - 1: + intermediates.append(img) + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample(self, batch_size=16, return_intermediates=False): + shape = (batch_size, channels, self.latent_t_size, self.latent_f_size) + channels = self.channels + return self.p_sample_loop(shape, return_intermediates=return_intermediates) + + def q_sample(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) + * noise + ) + + def get_loss(self, pred, target, mean=True): + if self.loss_type == "l1": + loss = (target - pred).abs() + if mean: + loss = loss.mean() + elif self.loss_type == "l2": + if mean: + loss = torch.nn.functional.mse_loss(target, pred) + else: + loss = torch.nn.functional.mse_loss(target, pred, reduction="none") + else: + raise NotImplementedError("unknown loss type '{loss_type}'") + + return loss + + def predict_start_from_z_and_v(self, x_t, t, v): + # self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + # self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * x_t + - extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) * v + ) + + def predict_eps_from_z_and_v(self, x_t, t, v): + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_t.shape) * v + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_t.shape) + * x_t + ) + + def get_v(self, x, noise, t): + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x.shape) * noise + - extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x.shape) * x + ) + + def p_losses(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_out = self.model(x_noisy, t) + + mse_loss_weight = None + alpha = extract_into_tensor(self.sqrt_alphas_cumprod, t, t.shape) + sigma = extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, t.shape) + snr = (alpha / sigma) ** 2 + + # velocity = (alpha[:, None, None, None] * x_noisy - x_start) / sigma[:, None, None, None] + + # get loss weight + if self.parameterization != "x0": + mse_loss_weight = torch.ones_like(t) + k = 5.0 + # min{snr, k} + mse_loss_weight = torch.stack([snr, k * torch.ones_like(t)], dim=1).min(dim=1)[0] / snr + else: + k = 5.0 + # min{snr, k} + mse_loss_weight = torch.stack([snr, k * torch.ones_like(t)], dim=1).min(dim=1)[0] + loss_dict = {} + if self.parameterization == "eps": + target = noise + elif self.parameterization == "x0": + target = x_start + elif self.parameterization == "v": + target = self.get_v(x_start, noise, t) + else: + raise NotImplementedError( + f"Paramterization {self.parameterization} not yet supported" + ) + + loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3]) + loss = mse_loss_weight * loss + + log_prefix = "train" if self.training else "val" + + loss_dict.update({f"{log_prefix}/loss_simple": loss.mean()}) + loss_simple = loss.mean() * self.l_simple_weight + + loss_vlb = (self.lvlb_weights[t] * loss).mean() + loss_dict.update({f"{log_prefix}/loss_vlb": loss_vlb}) + + loss = loss_simple + self.original_elbo_weight * loss_vlb + + loss_dict.update({f"{log_prefix}/loss": loss}) + + return loss, loss_dict + + def forward(self, x, *args, **kwargs): + # b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size + # assert h == img_size and w == img_size, f'height and width of image must be {img_size}' + t = torch.randint( + 0, self.num_timesteps, (x.shape[0],), device=self.device + ).long() + return self.p_losses(x, t, *args, **kwargs) + + def get_input(self, batch, k): + # fbank, log_magnitudes_stft, label_indices, fname, waveform, clip_label, text = batch + # fbank, stft, label_indices, fname, waveform, text = batch + # a = 1/0 + fname, text, label_indices, waveform, stft, fbank, mos = ( + batch["fname"], + batch["text"], + batch["label_vector"], + batch["waveform"], + batch["stft"], + batch["log_mel_spec"], + batch["mos"], + # batch + ) + # for i in range(fbank.size(0)): + # fb = fbank[i].numpy() + # seg_lb = seg_label[i].numpy() + # logits = np.mean(seg_lb, axis=0) + # index = np.argsort(logits)[::-1][:5] + # plt.imshow(seg_lb[:,index], aspect="auto") + # plt.title(index) + # plt.savefig("%s_label.png" % i) + # plt.close() + # plt.imshow(fb, aspect="auto") + # plt.savefig("%s_fb.png" % i) + # plt.close() + ret = {} + + ret["fbank"] = ( + fbank.unsqueeze(1).to(memory_format=torch.contiguous_format).float() + ) + ret["stft"] = stft.to(memory_format=torch.contiguous_format).float() + # ret["clip_label"] = clip_label.to(memory_format=torch.contiguous_format).float() + ret["waveform"] = waveform.to(memory_format=torch.contiguous_format).float() + ret["text"] = list(text) + ret["fname"] = fname + ret["mos"] = list(mos) + + for key in batch.keys(): + if key not in ret.keys(): + ret[key] = batch[key] + + return ret[k] + + def shared_step(self, batch): + x = self.get_input(batch, self.first_stage_key) + loss, loss_dict = self(x) + return loss, loss_dict + + def warmup_step(self): + if self.initial_learning_rate is None: + self.initial_learning_rate = self.learning_rate + + # Only the first parameter group + if self.global_step <= self.warmup_steps: + if self.global_step == 0: + print( + "Warming up learning rate start with %s" + % self.initial_learning_rate + ) + self.trainer.optimizers[0].param_groups[0]["lr"] = ( + self.global_step / self.warmup_steps + ) * self.initial_learning_rate + else: + # TODO set learning rate here + self.trainer.optimizers[0].param_groups[0][ + "lr" + ] = self.initial_learning_rate + + def training_step(self, batch, batch_idx): + # You instantiate a optimizer for the scheduler + # But later you overwrite the optimizer by reloading its states from a checkpoint + # So you need to replace the optimizer with the checkpoint one + # if(self.lr_schedulers().optimizer.param_groups[0]['lr'] != self.trainer.optimizers[0].param_groups[0]['lr']): + # self.lr_schedulers().optimizer = self.trainer.optimizers[0] + + # if(self.ckpt is not None): + # self.reload_everything() + # self.ckpt = None + + self.random_clap_condition() + self.warmup_step() + + # if ( + # self.state is None + # and len(self.trainer.optimizers[0].state_dict()["state"].keys()) > 0 + # ): + # self.state = ( + # self.trainer.optimizers[0].state_dict()["state"][0]["exp_avg"].clone() + # ) + # elif self.state is not None and batch_idx % 1000 == 0: + # assert ( + # torch.sum( + # torch.abs( + # self.state + # - self.trainer.optimizers[0].state_dict()["state"][0]["exp_avg"] + # ) + # ) + # > 1e-7 + # ), "Optimizer is not working" + + if len(self.metrics_buffer.keys()) > 0: + for k in self.metrics_buffer.keys(): + self.log( + k, + self.metrics_buffer[k], + prog_bar=False, + logger=True, + on_step=True, + on_epoch=False, + ) + # print(k, self.metrics_buffer[k]) + self.metrics_buffer = {} + loss, loss_dict = self.shared_step(batch) + + self.log_dict( + {k: float(v) for k, v in loss_dict.items()}, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=True, + ) + + self.log( + "global_step", + float(self.global_step), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + + lr = self.trainer.optimizers[0].param_groups[0]["lr"] + self.log( + "lr_abs", + float(lr), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + + return loss + + def random_clap_condition(self): + # This function is only used during training, let the CLAP model to use both text and audio as condition + assert self.training == True + + for key in self.cond_stage_model_metadata.keys(): + metadata = self.cond_stage_model_metadata[key] + model_idx, cond_stage_key, conditioning_key = ( + metadata["model_idx"], + metadata["cond_stage_key"], + metadata["conditioning_key"], + ) + + # If we use CLAP as condition, we might use audio for training, but we also must use text for evaluation + if isinstance( + self.cond_stage_models[model_idx], CLAPAudioEmbeddingClassifierFreev2 + ): + self.cond_stage_model_metadata[key][ + "cond_stage_key_orig" + ] = self.cond_stage_model_metadata[key]["cond_stage_key"] + self.cond_stage_model_metadata[key][ + "embed_mode_orig" + ] = self.cond_stage_models[model_idx].embed_mode + if torch.randn(1).item() < 0.5: + self.cond_stage_model_metadata[key]["cond_stage_key"] = "text" + self.cond_stage_models[model_idx].embed_mode = "text" + else: + self.cond_stage_model_metadata[key]["cond_stage_key"] = "waveform" + self.cond_stage_models[model_idx].embed_mode = "audio" + + def on_validation_epoch_start(self) -> None: + # Use text as condition during validation + for key in self.cond_stage_model_metadata.keys(): + metadata = self.cond_stage_model_metadata[key] + model_idx, cond_stage_key, conditioning_key = ( + metadata["model_idx"], + metadata["cond_stage_key"], + metadata["conditioning_key"], + ) + + # If we use CLAP as condition, we might use audio for training, but we also must use text for evaluation + if isinstance( + self.cond_stage_models[model_idx], CLAPAudioEmbeddingClassifierFreev2 + ): + self.cond_stage_model_metadata[key][ + "cond_stage_key_orig" + ] = self.cond_stage_model_metadata[key]["cond_stage_key"] + self.cond_stage_model_metadata[key][ + "embed_mode_orig" + ] = self.cond_stage_models[model_idx].embed_mode + print( + "Change the model original cond_keyand embed_mode %s, %s to text during evaluation" + % ( + self.cond_stage_model_metadata[key]["cond_stage_key_orig"], + self.cond_stage_model_metadata[key]["embed_mode_orig"], + ) + ) + self.cond_stage_model_metadata[key]["cond_stage_key"] = "text" + self.cond_stage_models[model_idx].embed_mode = "text" + + if isinstance( + self.cond_stage_models[model_idx], CLAPGenAudioMAECond + ) or isinstance(self.cond_stage_models[model_idx], SequenceGenAudioMAECond): + self.cond_stage_model_metadata[key][ + "use_gt_mae_output_orig" + ] = self.cond_stage_models[model_idx].use_gt_mae_output + self.cond_stage_model_metadata[key][ + "use_gt_mae_prob_orig" + ] = self.cond_stage_models[model_idx].use_gt_mae_prob + print("Change the model condition to the predicted AudioMAE tokens") + self.cond_stage_models[model_idx].use_gt_mae_output = False + self.cond_stage_models[model_idx].use_gt_mae_prob = 0.0 + self.validation_folder_name = self.get_validation_folder_name() + return super().on_validation_epoch_start() + + @torch.no_grad() + def validation_step(self, batch, batch_idx): + self.generate_sample( + [batch], + name=self.validation_folder_name, + unconditional_guidance_scale=self.evaluation_params[ + "unconditional_guidance_scale" + ], + ddim_steps=self.evaluation_params["ddim_sampling_steps"], + n_gen=self.evaluation_params["n_candidates_per_samples"], + ) + + def get_validation_folder_name(self): + now = datetime.datetime.now() + timestamp = now.strftime("%m-%d-%H:%M") + return "val_%s_%s_cfg_scale_%s_ddim_%s_n_cand_%s" % ( + self.global_step, + timestamp, + self.evaluation_params["unconditional_guidance_scale"], + self.evaluation_params["ddim_sampling_steps"], + self.evaluation_params["n_candidates_per_samples"], + ) + + def on_validation_epoch_end(self) -> None: + if self.global_rank == 0 and self.evaluator is not None: + assert ( + self.test_data_subset_path is not None + ), "Please set test_data_subset_path before validation so that model have a target folder" + try: + + name = self.validation_folder_name + # import pdb + # pdb.set_trace() + waveform_save_path = os.path.join(self.get_log_dir(), name) + if ( + os.path.exists(waveform_save_path) + and len(os.listdir(waveform_save_path)) > 0 + ): + + metrics = self.evaluator.main( + waveform_save_path, + self.test_data_subset_path, + ) + + self.metrics_buffer = { + ("val/" + k): float(v) for k, v in metrics.items() + } + else: + print( + "The target folder for evaluation does not exist: %s" + % waveform_save_path + ) + except Exception as e: + print("Error encountered during evaluation: ", e) + + # Very important or the program may fail + torch.cuda.synchronize() + + for key in self.cond_stage_model_metadata.keys(): + metadata = self.cond_stage_model_metadata[key] + model_idx, cond_stage_key, conditioning_key = ( + metadata["model_idx"], + metadata["cond_stage_key"], + metadata["conditioning_key"], + ) + + if isinstance( + self.cond_stage_models[model_idx], CLAPAudioEmbeddingClassifierFreev2 + ): + self.cond_stage_model_metadata[key][ + "cond_stage_key" + ] = self.cond_stage_model_metadata[key]["cond_stage_key_orig"] + self.cond_stage_models[ + model_idx + ].embed_mode = self.cond_stage_model_metadata[key]["embed_mode_orig"] + print( + "Change back the embedding mode to %s %s" + % ( + self.cond_stage_model_metadata[key]["cond_stage_key"], + self.cond_stage_models[model_idx].embed_mode, + ) + ) + + if isinstance( + self.cond_stage_models[model_idx], CLAPGenAudioMAECond + ) or isinstance(self.cond_stage_models[model_idx], SequenceGenAudioMAECond): + self.cond_stage_models[ + model_idx + ].use_gt_mae_output = self.cond_stage_model_metadata[key][ + "use_gt_mae_output_orig" + ] + self.cond_stage_models[ + model_idx + ].use_gt_mae_prob = self.cond_stage_model_metadata[key][ + "use_gt_mae_prob_orig" + ] + print( + "Change the AudioMAE condition setting to %s (Use gt) %s (gt prob)" + % ( + self.cond_stage_models[model_idx].use_gt_mae_output, + self.cond_stage_models[model_idx].use_gt_mae_prob, + ) + ) + + return super().on_validation_epoch_end() + + def on_train_epoch_start(self, *args, **kwargs): + print("Log directory: ", self.get_log_dir()) + + def on_train_batch_end(self, *args, **kwargs): + # Does this affect speed? + if self.use_ema: + self.model_ema(self.model) + + def _get_rows_from_list(self, samples): + n_imgs_per_row = len(samples) + denoise_grid = rearrange(samples, "n b c h w -> b n c h w") + denoise_grid = rearrange(denoise_grid, "b n c h w -> (b n) c h w") + denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row) + return denoise_grid + + @torch.no_grad() + def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs): + log = dict() + x = self.get_input(batch, self.first_stage_key) + N = min(x.shape[0], N) + n_row = min(x.shape[0], n_row) + x = x.to(self.device)[:N] + log["inputs"] = x + + # get diffusion row + diffusion_row = list() + x_start = x[:n_row] + + for t in range(self.num_timesteps): + if t % self.log_every_t == 0 or t == self.num_timesteps - 1: + t = repeat(torch.tensor([t]), "1 -> b", b=n_row) + t = t.to(self.device).long() + noise = torch.randn_like(x_start) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + diffusion_row.append(x_noisy) + + log["diffusion_row"] = self._get_rows_from_list(diffusion_row) + + if sample: + # get denoise row + with self.ema_scope("Plotting"): + samples, denoise_row = self.sample( + batch_size=N, return_intermediates=True + ) + + log["samples"] = samples + log["denoise_row"] = self._get_rows_from_list(denoise_row) + + if return_keys: + if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0: + return log + else: + return {key: log[key] for key in return_keys} + return log + + def configure_optimizers(self): + lr = self.learning_rate + params = list(self.model.parameters()) + if self.learn_logvar: + params = params + [self.logvar] + opt = torch.optim.AdamW(params, lr=lr) + # opt = Adan(params, lr=lr, max_grad_norm=1, fused=True) + return opt + + def initialize_param_check_toolkit(self): + self.tracked_steps = 0 + self.param_dict = {} + + def statistic_require_grad_tensor_number(self, module, name=None): + requires_grad_num = 0 + total_num = 0 + require_grad_tensor = None + for p in module.parameters(): + if p.requires_grad: + requires_grad_num += 1 + if require_grad_tensor is None: + require_grad_tensor = p + total_num += 1 + print( + "Module: [%s] have %s trainable parameters out of %s total parameters (%.2f)" + % (name, requires_grad_num, total_num, requires_grad_num / total_num) + ) + return require_grad_tensor + + def check_module_param_update(self): + if self.tracked_steps == 0: + for name, module in self.named_children(): + try: + require_grad_tensor = self.statistic_require_grad_tensor_number( + module, name=name + ) + if require_grad_tensor is not None: + self.param_dict[name] = require_grad_tensor.clone() + else: + print("==> %s does not requires grad" % name) + except Exception as e: + print("%s does not have trainable parameters: %s" % (name, e)) + continue + + if self.tracked_steps % 5000 == 0: + for name, module in self.named_children(): + try: + require_grad_tensor = self.statistic_require_grad_tensor_number( + module, name=name + ) + + if require_grad_tensor is not None: + print( + "===> Param diff %s: %s; Size: %s" + % ( + name, + torch.sum( + torch.abs( + self.param_dict[name] - require_grad_tensor + ) + ), + require_grad_tensor.size(), + ) + ) + else: + print("%s does not requires grad" % name) + except Exception as e: + print("%s does not have trainable parameters: %s" % (name, e)) + continue + + self.tracked_steps += 1 + + +class LatentDiffusion(DDPM): + """main class""" + + def __init__( + self, + first_stage_config, + cond_stage_config=None, + num_timesteps_cond=None, + cond_stage_key="image", + optimize_ddpm_parameter=True, + unconditional_prob_cfg=0.1, + warmup_steps=10000, + cond_stage_trainable=False, + concat_mode=True, + cond_stage_forward=None, + conditioning_key=None, + scale_factor=1.0, + batchsize=None, + evaluation_params={}, + scale_by_std=False, + base_learning_rate=None, + *args, + **kwargs, + ): + self.learning_rate = base_learning_rate + self.num_timesteps_cond = default(num_timesteps_cond, 1) + self.scale_by_std = scale_by_std + self.warmup_steps = warmup_steps + + + if optimize_ddpm_parameter: + if unconditional_prob_cfg == 0.0: + "You choose to optimize DDPM. The classifier free guidance scale should be 0.1" + unconditional_prob_cfg = 0.1 + else: + if unconditional_prob_cfg == 0.1: + "You choose not to optimize DDPM. The classifier free guidance scale should be 0.0" + unconditional_prob_cfg = 0.0 + + self.evaluation_params = evaluation_params + assert self.num_timesteps_cond <= kwargs["timesteps"] + + # for backwards compatibility after implementation of DiffusionWrapper + # if conditioning_key is None: + # conditioning_key = "concat" if concat_mode else "crossattn" + # if cond_stage_config == "__is_unconditional__": + # conditioning_key = None + + conditioning_key = list(cond_stage_config.keys()) + + self.conditioning_key = conditioning_key + + ckpt_path = kwargs.pop("ckpt_path", None) + ignore_keys = kwargs.pop("ignore_keys", []) + super().__init__(conditioning_key=conditioning_key, *args, **kwargs) + + self.optimize_ddpm_parameter = optimize_ddpm_parameter + # if(not optimize_ddpm_parameter): + # print("Warning: Close the optimization of the latent diffusion model") + # for p in self.model.parameters(): + # p.requires_grad=False + + self.concat_mode = concat_mode + self.cond_stage_key = cond_stage_key + self.cond_stage_key_orig = cond_stage_key + try: + self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1 + except: + self.num_downs = 0 + if not scale_by_std: + self.scale_factor = scale_factor + else: + self.register_buffer("scale_factor", torch.tensor(scale_factor)) + self.instantiate_first_stage(first_stage_config) + self.unconditional_prob_cfg = unconditional_prob_cfg + self.cond_stage_models = nn.ModuleList([]) + self.instantiate_cond_stage(cond_stage_config) + self.cond_stage_forward = cond_stage_forward + self.clip_denoised = False + self.bbox_tokenizer = None + self.conditional_dry_run_finished = False + self.restarted_from_ckpt = False + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys) + self.restarted_from_ckpt = True + + def configure_optimizers(self): + lr = self.learning_rate + params = list(self.model.parameters()) + + for each in self.cond_stage_models: + params = params + list( + each.parameters() + ) # Add the parameter from the conditional stage + + if self.learn_logvar: + print("Diffusion model optimizing logvar") + params.append(self.logvar) + # opt = Adan(params, lr=lr, max_grad_norm=1, fused=True) + opt = torch.optim.AdamW(params, lr=lr) + # if self.use_scheduler: + # assert "target" in self.scheduler_config + # scheduler = instantiate_from_config(self.scheduler_config) + + # print("Setting up LambdaLR scheduler...") + # scheduler = [ + # { + # "scheduler": LambdaLR(opt, lr_lambda=scheduler.schedule), + # "interval": "step", + # "frequency": 1, + # } + # ] + # return [opt], scheduler + return opt + + def make_cond_schedule( + self, + ): + self.cond_ids = torch.full( + size=(self.num_timesteps,), + fill_value=self.num_timesteps - 1, + dtype=torch.long, + ) + ids = torch.round( + torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond) + ).long() + self.cond_ids[: self.num_timesteps_cond] = ids + + @rank_zero_only + @torch.no_grad() + def on_train_batch_start(self, batch, batch_idx): + + # only for very first batch + if ( + self.scale_factor == 1 + and self.scale_by_std + and self.current_epoch == 0 + and self.global_step == 0 + and batch_idx == 0 + and not self.restarted_from_ckpt + ): + # assert self.scale_factor == 1., 'rather not use custom rescaling and std-rescaling simultaneously' + # set rescale weight to 1./std of encodings + print("### USING STD-RESCALING ###") + x = super().get_input(batch, self.first_stage_key) + x = x.to(self.device) + encoder_posterior = self.encode_first_stage(x) + z = self.get_first_stage_encoding(encoder_posterior).detach() + del self.scale_factor + self.register_buffer("scale_factor", 1.0 / z.flatten().std()) + print(f"setting self.scale_factor to {self.scale_factor}") + print("### USING STD-RESCALING ###") + + def register_schedule( + self, + given_betas=None, + beta_schedule="linear", + timesteps=1000, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + ): + super().register_schedule( + given_betas, beta_schedule, timesteps, linear_start, linear_end, cosine_s + ) + + self.shorten_cond_schedule = self.num_timesteps_cond > 1 + if self.shorten_cond_schedule: + self.make_cond_schedule() + + def instantiate_first_stage(self, config): + model = instantiate_from_config(config) + self.first_stage_model = model.eval() + self.first_stage_model.train = disabled_train + for param in self.first_stage_model.parameters(): + param.requires_grad = False + + def make_decision(self, probability): + if float(torch.rand(1)) < probability: + return True + else: + return False + + def instantiate_cond_stage(self, config): + self.cond_stage_model_metadata = {} + for i, cond_model_key in enumerate(config.keys()): + model = instantiate_from_config(config[cond_model_key]) + self.cond_stage_models.append(model) + self.cond_stage_model_metadata[cond_model_key] = { + "model_idx": i, + "cond_stage_key": config[cond_model_key]["cond_stage_key"], + "conditioning_key": config[cond_model_key]["conditioning_key"], + } + + def get_first_stage_encoding(self, encoder_posterior): + if isinstance(encoder_posterior, DiagonalGaussianDistribution): + z = encoder_posterior.sample() + elif isinstance(encoder_posterior, torch.Tensor): + z = encoder_posterior + else: + raise NotImplementedError( + f"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented" + ) + return self.scale_factor * z + + def get_learned_conditioning(self, c, key, unconditional_cfg): + assert key in self.cond_stage_model_metadata.keys() + + # Classifier-free guidance + if not unconditional_cfg: + c = self.cond_stage_models[ + self.cond_stage_model_metadata[key]["model_idx"] + ](c) + else: + # when the cond_stage_key is "all", pick one random element out + if isinstance(c, dict): + c = c[list(c.keys())[0]] + + if isinstance(c, torch.Tensor): + batchsize = c.size(0) + elif isinstance(c, list): + batchsize = len(c) + else: + raise NotImplementedError() + + c = self.cond_stage_models[ + self.cond_stage_model_metadata[key]["model_idx"] + ].get_unconditional_condition(batchsize) + + return c + + def get_input( + self, + batch, + k, + return_first_stage_encode=True, + return_decoding_output=False, + return_encoder_input=False, + return_encoder_output=False, + unconditional_prob_cfg=0.1, + ): + # print(self.cond_stage_model_metadata.keys()) + x = super().get_input(batch, k) + + x = x.to(self.device) + + if return_first_stage_encode: + encoder_posterior = self.encode_first_stage(x) + z = self.get_first_stage_encoding(encoder_posterior).detach() + else: + z = None + cond_dict = {} + if len(self.cond_stage_model_metadata.keys()) > 0: + unconditional_cfg = False + if self.conditional_dry_run_finished and self.make_decision( + unconditional_prob_cfg + ): + unconditional_cfg = True + for cond_model_key in self.cond_stage_model_metadata.keys(): + cond_stage_key = self.cond_stage_model_metadata[cond_model_key][ + "cond_stage_key" + ] + + if cond_model_key in cond_dict.keys(): + continue + + if not self.training: + if isinstance( + self.cond_stage_models[ + self.cond_stage_model_metadata[cond_model_key]["model_idx"] + ], + CLAPAudioEmbeddingClassifierFreev2, + ): + print( + "Warning: CLAP model normally should use text for evaluation" + ) + + # The original data for conditioning + # If cond_model_key is "all", that means the conditional model need all the information from a batch + + if cond_stage_key != "all": + xc = super().get_input(batch, cond_stage_key) + if type(xc) == torch.Tensor: + xc = xc.to(self.device) + else: + xc = batch + + # batch inference BUG + #if cond_stage_key == 'text': + # xc = xc[0] + + + # if cond_stage_key is "all", xc will be a dictionary containing all keys + # Otherwise xc will be an entry of the dictionary + c = self.get_learned_conditioning( + xc, key=cond_model_key, unconditional_cfg=unconditional_cfg + ) + + # cond_dict will be used to condition the diffusion model + # If one conditional model return multiple conditioning signal + if isinstance(c, dict): + for k in c.keys(): + cond_dict[k] = c[k] + else: + cond_dict[cond_model_key] = c + + # If the key is accidently added to the dictionary and not in the condition list, remove the condition + # for k in list(cond_dict.keys()): + # if(k not in self.cond_stage_model_metadata.keys()): + # del cond_dict[k] + + cond_dict['mos'] = batch['mos'] + out = [z, cond_dict] + + if return_decoding_output: + xrec = self.decode_first_stage(z) + out += [xrec] + + if return_encoder_input: + out += [x] + + if return_encoder_output: + out += [encoder_posterior] + + if not self.conditional_dry_run_finished: + self.conditional_dry_run_finished = True + + # Output is a dictionary, where the value could only be tensor or tuple + return out + + def decode_first_stage(self, z): + with torch.no_grad(): + z = 1.0 / self.scale_factor * z + decoding = self.first_stage_model.decode(z) + return decoding + + def mel_spectrogram_to_waveform( + self, mel, savepath=".", bs=None, name="outwav", save=True, n_gen=1 + ): + # Mel: [bs, 1, t-steps, fbins] + if len(mel.size()) == 4: + mel = mel.squeeze(1) + mel = mel.permute(0, 2, 1) + waveform = self.first_stage_model.vocoder(mel) + waveform = waveform.cpu().detach().numpy() + if save: + self.save_waveform(waveform, savepath="./") + return waveform + + def encode_first_stage(self, x): + with torch.no_grad(): + return self.first_stage_model.encode(x) + + def extract_possible_loss_in_cond_dict(self, cond_dict): + # This function enable the conditional module to return loss function that can optimize them + + assert isinstance(cond_dict, dict) + losses = {} + + for cond_key in cond_dict.keys(): + if "loss" in cond_key and "noncond" in cond_key: + assert cond_key not in losses.keys() + losses[cond_key] = cond_dict[cond_key] + + return losses + + def filter_useful_cond_dict(self, cond_dict): + new_cond_dict = {} + for key in cond_dict.keys(): + if key in self.cond_stage_model_metadata.keys(): + new_cond_dict[key] = cond_dict[key] + + # All the conditional key in the metadata should be used + for key in self.cond_stage_model_metadata.keys(): + assert key in new_cond_dict.keys(), "%s, %s" % ( + key, + str(new_cond_dict.keys()), + ) + try: + new_cond_dict['mos'] = cond_dict['mos'] + except: + pass + + return new_cond_dict + + def shared_step(self, batch, **kwargs): + # self.check_module_param_update() + if self.training: + # Classifier-free guidance + unconditional_prob_cfg = self.unconditional_prob_cfg + else: + unconditional_prob_cfg = 0.0 # TODO possible bug here + + x, c = self.get_input( + batch, self.first_stage_key, unconditional_prob_cfg=unconditional_prob_cfg + ) + + if self.optimize_ddpm_parameter: + loss, loss_dict = self(x, self.filter_useful_cond_dict(c)) + else: + loss_dict = {} + loss = None + + additional_loss_for_cond_modules = self.extract_possible_loss_in_cond_dict(c) + assert isinstance(additional_loss_for_cond_modules, dict) + + loss_dict.update(additional_loss_for_cond_modules) + + if len(additional_loss_for_cond_modules.keys()) > 0: + for k in additional_loss_for_cond_modules.keys(): + if loss is None: + loss = additional_loss_for_cond_modules[k] + else: + loss = loss + additional_loss_for_cond_modules[k] + + # for k,v in additional_loss_for_cond_modules.items(): + # self.log( + # "cond_stage/"+k, + # float(v), + # prog_bar=True, + # logger=True, + # on_step=True, + # on_epoch=True, + # ) + if self.training: + assert loss is not None + + return loss, loss_dict + + def forward(self, x, c, *args, **kwargs): + t = torch.randint( + 0, self.num_timesteps, (x.shape[0],), device=self.device + ).long() + + # assert c is not None + # c = self.get_learned_conditioning(c) + + loss, loss_dict = self.p_losses(x, c, t, *args, **kwargs) + return loss, loss_dict + + def reorder_cond_dict(self, cond_dict): + # To make sure the order is correct + new_cond_dict = {} + for key in self.conditioning_key: + new_cond_dict[key] = cond_dict[key] + new_cond_dict['mos'] = cond_dict['mos'] + return new_cond_dict + + def apply_model(self, x_noisy, t, cond, return_ids=False): + cond = self.reorder_cond_dict(cond) + # import pdb; pdb.set_trace() + x_recon = self.model(x_noisy, t, cond_dict=cond) + + if isinstance(x_recon, tuple) and not return_ids: + return x_recon[0] + else: + return x_recon + + def p_losses(self, x_start, cond, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_output = self.apply_model(x_noisy, t, cond) + + loss_dict = {} + prefix = "train" if self.training else "val" + + if self.parameterization == "x0": + target = x_start + elif self.parameterization == "eps": + target = noise + elif self.parameterization == "v": + target = self.get_v(x_start, noise, t) + else: + raise NotImplementedError() + # print(model_output.size(), target.size()) + mse_loss_weight = None + alpha = extract_into_tensor(self.sqrt_alphas_cumprod, t, t.shape) + sigma = extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, t.shape) + snr = (alpha / sigma) ** 2 + + # velocity = (alpha[:, None, None, None] * x_t - x_start) / sigma[:, None, None, None] + + # get loss weight + if self.parameterization != "x0": + mse_loss_weight = torch.ones_like(t) + k = 5.0 + # min{snr, k} + mse_loss_weight = torch.stack([snr, k * torch.ones_like(t)], dim=1).min(dim=1)[0] / snr + else: + k = 5.0 + # min{snr, k} + mse_loss_weight = torch.stack([snr, k * torch.ones_like(t)], dim=1).min(dim=1)[0] + loss_simple = self.get_loss(model_output, target, mean=False).mean([1, 2, 3]) + loss_simple = loss_simple * mse_loss_weight + # import pdb + # pdb.set_trace() + loss_dict.update({f"{prefix}/loss_simple": loss_simple.mean()}) + + logvar_t = self.logvar[t].to(self.device) + loss = loss_simple / torch.exp(logvar_t) + logvar_t + # loss = loss_simple / torch.exp(self.logvar) + self.logvar + if self.learn_logvar: + loss_dict.update({f"{prefix}/loss_gamma": loss.mean()}) + loss_dict.update({"logvar": self.logvar.data.mean()}) + + loss = self.l_simple_weight * loss.mean() + + loss_vlb = self.get_loss(model_output, target, mean=False).mean(dim=(1, 2, 3)) + loss_vlb = (self.lvlb_weights[t] * loss_vlb).mean() + loss_dict.update({f"{prefix}/loss_vlb": loss_vlb}) + loss += self.original_elbo_weight * loss_vlb + loss_dict.update({f"{prefix}/loss": loss}) + + return loss, loss_dict + + def p_mean_variance( + self, + x, + c, + t, + clip_denoised: bool, + return_codebook_ids=False, + quantize_denoised=False, + return_x0=False, + score_corrector=None, + corrector_kwargs=None, + ): + t_in = t + model_out = self.apply_model(x, t_in, c, return_ids=return_codebook_ids) + + if score_corrector is not None: + assert self.parameterization == "eps" + model_out = score_corrector.modify_score( + self, model_out, x, t, c, **corrector_kwargs + ) + + if return_codebook_ids: + model_out, logits = model_out + + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + else: + raise NotImplementedError() + + if clip_denoised: + x_recon.clamp_(-1.0, 1.0) + if quantize_denoised: + x_recon, _, [_, _, indices] = self.first_stage_model.quantize(x_recon) + model_mean, posterior_variance, posterior_log_variance = self.q_posterior( + x_start=x_recon, x_t=x, t=t + ) + if return_codebook_ids: + return model_mean, posterior_variance, posterior_log_variance, logits + elif return_x0: + return model_mean, posterior_variance, posterior_log_variance, x_recon + else: + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample( + self, + x, + c, + t, + clip_denoised=False, + repeat_noise=False, + return_codebook_ids=False, + quantize_denoised=False, + return_x0=False, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + ): + b, *_, device = *x.shape, x.device + outputs = self.p_mean_variance( + x=x, + c=c, + t=t, + clip_denoised=clip_denoised, + return_codebook_ids=return_codebook_ids, + quantize_denoised=quantize_denoised, + return_x0=return_x0, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + ) + if return_codebook_ids: + raise DeprecationWarning("Support dropped.") + model_mean, _, model_log_variance, logits = outputs + elif return_x0: + model_mean, _, model_log_variance, x0 = outputs + else: + model_mean, _, model_log_variance = outputs + + noise = noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.0: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + # no noise when t == 0 + nonzero_mask = ( + (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))).contiguous() + ) + + # if return_codebook_ids: + # return model_mean + nonzero_mask * ( + # 0.5 * model_log_variance + # ).exp() * noise, logits.argmax(dim=1) + if return_x0: + return ( + model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, + x0, + ) + else: + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def progressive_denoising( + self, + cond, + shape, + verbose=True, + callback=None, + quantize_denoised=False, + img_callback=None, + mask=None, + x0=None, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + batch_size=None, + x_T=None, + start_T=None, + log_every_t=None, + ): + if not log_every_t: + log_every_t = self.log_every_t + timesteps = self.num_timesteps + if batch_size is not None: + b = batch_size if batch_size is not None else shape[0] + shape = [batch_size] + list(shape) + else: + b = batch_size = shape[0] + if x_T is None: + img = torch.randn(shape, device=self.device) + else: + img = x_T + intermediates = [] + if cond is not None: + if isinstance(cond, dict): + cond = { + key: cond[key][:batch_size] + if not isinstance(cond[key], list) + else list(map(lambda x: x[:batch_size], cond[key])) + for key in cond + } + else: + cond = ( + [c[:batch_size] for c in cond] + if isinstance(cond, list) + else cond[:batch_size] + ) + + if start_T is not None: + timesteps = min(timesteps, start_T) + iterator = ( + tqdm( + reversed(range(0, timesteps)), + desc="Progressive Generation", + total=timesteps, + ) + if verbose + else reversed(range(0, timesteps)) + ) + if type(temperature) == float: + temperature = [temperature] * timesteps + + for i in iterator: + ts = torch.full((b,), i, device=self.device, dtype=torch.long) + if self.shorten_cond_schedule: + assert self.model.conditioning_key != "hybrid" + tc = self.cond_ids[ts].to(cond.device) + cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond)) + + img, x0_partial = self.p_sample( + img, + cond, + ts, + clip_denoised=self.clip_denoised, + quantize_denoised=quantize_denoised, + return_x0=True, + temperature=temperature[i], + noise_dropout=noise_dropout, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + ) + if mask is not None: + assert x0 is not None + img_orig = self.q_sample(x0, ts) + img = img_orig * mask + (1.0 - mask) * img + + if i % log_every_t == 0 or i == timesteps - 1: + intermediates.append(x0_partial) + if callback: + callback(i) + if img_callback: + img_callback(img, i) + return img, intermediates + + @torch.no_grad() + def p_sample_loop( + self, + cond, + shape, + return_intermediates=False, + x_T=None, + verbose=True, + callback=None, + timesteps=None, + quantize_denoised=False, + mask=None, + x0=None, + img_callback=None, + start_T=None, + log_every_t=None, + ): + if not log_every_t: + log_every_t = self.log_every_t + device = self.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + intermediates = [img] + if timesteps is None: + timesteps = self.num_timesteps + + if start_T is not None: + timesteps = min(timesteps, start_T) + iterator = ( + tqdm(reversed(range(0, timesteps)), desc="Sampling t", total=timesteps) + if verbose + else reversed(range(0, timesteps)) + ) + + if mask is not None: + assert x0 is not None + assert x0.shape[2:3] == mask.shape[2:3] # spatial size has to match + + for i in iterator: + ts = torch.full((b,), i, device=device, dtype=torch.long) + + if self.shorten_cond_schedule: + assert self.model.conditioning_key != "hybrid" + tc = self.cond_ids[ts].to(cond.device) + cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond)) + + # import pdb + # pdb.set_trace() + img = self.p_sample( + img, + cond, + ts, + clip_denoised=self.clip_denoised, + quantize_denoised=quantize_denoised, + ) + + if mask is not None: + img_orig = self.q_sample(x0, ts) + img = img_orig * mask + (1.0 - mask) * img + + if i % log_every_t == 0 or i == timesteps - 1: + intermediates.append(img) + if callback: + callback(i) + if img_callback: + img_callback(img, i) + + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample( + self, + cond, + batch_size=16, + return_intermediates=False, + x_T=None, + verbose=True, + timesteps=None, + quantize_denoised=False, + mask=None, + x0=None, + shape=None, + **kwargs, + ): + if shape is None: + shape = (batch_size, self.channels, self.latent_t_size, self.latent_f_size) + if cond is not None: + if isinstance(cond, dict): + cond = { + key: cond[key][:batch_size] + if not isinstance(cond[key], list) + else list(map(lambda x: x[:batch_size], cond[key])) + for key in cond + } + else: + cond = ( + [c[:batch_size] for c in cond] + if isinstance(cond, list) + else cond[:batch_size] + ) + return self.p_sample_loop( + cond, + shape, + return_intermediates=return_intermediates, + x_T=x_T, + verbose=verbose, + timesteps=timesteps, + quantize_denoised=quantize_denoised, + mask=mask, + x0=x0, + **kwargs, + ) + + def save_waveform(self, waveform, savepath="./", name="awesome.wav", n_gen=1): + print(f'debug_name : {name}') + + # If `name` is a list, join the elements into a string or select the first element + if isinstance(name, list): + name = "_".join(name) # Joins the list elements with an underscore + name += ".wav" # Ensures the file has a `.wav` extension + elif not isinstance(name, str): + raise TypeError("Name must be a string or list") + + # Normalize the energy of the waveform + todo_waveform = waveform[0, 0] # Assuming you are only saving the first waveform + todo_waveform = (todo_waveform / np.max(np.abs(todo_waveform))) * 0.8 + + # Define the path where to save the file + path = os.path.join(savepath, name) + + try: + # Save the waveform to the specified path + sf.write(path, todo_waveform, samplerate=self.sampling_rate) + print(f'Waveform saved at -> {path}') + except Exception as e: + print(f'Error saving waveform: {e}') + + + + @torch.no_grad() + def sample_log( + self, + cond, + batch_size, + ddim, + ddim_steps, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + use_plms=False, + mask=None, + **kwargs, + ): + if mask is not None: + shape = (self.channels, mask.size()[-2], mask.size()[-1]) + else: + shape = (self.channels, self.latent_t_size, self.latent_f_size) + + intermediate = None + if ddim and not use_plms: + print("Use ddim sampler") + + ddim_sampler = DDIMSampler(self) + samples, intermediates = ddim_sampler.sample( + ddim_steps, + batch_size, + shape, + cond, + verbose=False, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + mask=mask, + **kwargs, + ) + elif use_plms: + print("Use plms sampler") + plms_sampler = PLMSSampler(self) + samples, intermediates = plms_sampler.sample( + ddim_steps, + batch_size, + shape, + cond, + verbose=False, + unconditional_guidance_scale=unconditional_guidance_scale, + mask=mask, + unconditional_conditioning=unconditional_conditioning, + **kwargs, + ) + + else: + print("Use DDPM sampler") + samples, intermediates = self.sample( + cond=cond, + batch_size=batch_size, + return_intermediates=True, + unconditional_guidance_scale=unconditional_guidance_scale, + mask=mask, + unconditional_conditioning=unconditional_conditioning, + **kwargs, + ) + + return samples, intermediate + + @torch.no_grad() + def generate_sample( + self, + batchs, + ddim_steps=200, + ddim_eta=1.0, + x_T=None, + n_gen=1, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + name=None, + use_plms=False, + limit_num=None, + **kwargs, + ): + # Generate n_gen times and select the best + # Batch: audio, text, fnames + # import pdb + # pdb.set_trace() + assert x_T is None + try: + batchs = iter(batchs) + except TypeError: + raise ValueError("The first input argument should be an iterable object") + + if use_plms: + assert ddim_steps is not None + + use_ddim = ddim_steps is not None + if name is None: + name = self.get_validation_folder_name() + + waveform_save_path = os.path.join(self.get_log_dir(), name) + os.makedirs(waveform_save_path, exist_ok=True) + print("Waveform save path: ", waveform_save_path) + + # if ( + # "audiocaps" in waveform_save_path + # and len(os.listdir(waveform_save_path)) >= 964 + # ): + # print("The evaluation has already been done at %s" % waveform_save_path) + # return waveform_save_path + + with self.ema_scope("Plotting"): + for i, batch in enumerate(batchs): + #print(batch) + z, c = self.get_input( + batch, + self.first_stage_key, + unconditional_prob_cfg=0.0, # Do not output unconditional information in the c + ) + + # import pdb; pdb.set_trace() + if limit_num is not None and i * z.size(0) > limit_num: + break + + c = self.filter_useful_cond_dict(c) + + text = super().get_input(batch, "text") + mos = super().get_input(batch, "mos") + # for cond_key in c.keys(): + # c[cond_key] = self.cond_stage_models[self.cond_stage_model_metadata[cond_key]["model_idx"]](text[0]) + + # Generate multiple samples + batch_size = z.shape[0] * n_gen + + # Generate multiple samples at a time and filter out the best + # The condition to the diffusion wrapper can have many format + # import pdb + # pdb.set_trace() + for cond_key in c.keys(): + if isinstance(c[cond_key], list): + + for i in range(len(c[cond_key])): + c[cond_key][i] = torch.cat([c[cond_key][i]] * n_gen, dim=0) + + elif isinstance(c[cond_key], dict): + for k in c[cond_key].keys(): + c[cond_key][k] = torch.cat([c[cond_key][k]] * n_gen, dim=0) + else: + c[cond_key] = torch.cat([c[cond_key]] * n_gen, dim=0) + + + text = text * n_gen + mos = mos * n_gen + c['mos'] = torch.stack(mos).unsqueeze(1) + + if unconditional_guidance_scale != 1.0: + unconditional_conditioning = {} + for key in self.cond_stage_model_metadata: + model_idx = self.cond_stage_model_metadata[key]["model_idx"] + unconditional_conditioning[key] = self.cond_stage_models[ + model_idx + ].get_unconditional_condition(batch_size) + + fnames = list(super().get_input(batch, "fname")) + # import pdb; pdb.set_trace() + samples, _ = self.sample_log( + cond=c, + batch_size=batch_size, + x_T=x_T, + ddim=use_ddim, + ddim_steps=ddim_steps, + eta=ddim_eta, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + use_plms=use_plms, + ) + + mel = self.decode_first_stage(samples) + + # mel = super().get_input(batch, "log_mel_spec") + + waveform = self.mel_spectrogram_to_waveform( + mel, savepath=waveform_save_path, bs=None, name=fnames, save=False, n_gen=n_gen + ) + + if n_gen > 1: + try: + best_index = [] + similarity = self.clap.cos_similarity( + torch.FloatTensor(waveform).squeeze(1), text + ) + for i in range(z.shape[0]): + candidates = similarity[i :: z.shape[0]] + max_index = torch.argmax(candidates).item() + best_index.append(i + max_index * z.shape[0]) + + waveform = waveform[best_index] + + print("Similarity between generated audio and text", similarity) + print("Choose the following indexes:", best_index) + except Exception as e: + print("Warning: while calculating CLAP score (not fatal), ", e) + self.save_waveform(waveform, savepath="./") + return waveform_save_path + + +class DiffusionWrapper(pl.LightningModule): + def __init__(self, diff_model_config, conditioning_key): + super().__init__() + self.diffusion_model = instantiate_from_config(diff_model_config) + + self.conditioning_key = conditioning_key + + for key in self.conditioning_key: + if ( + "concat" in key + or "crossattn" in key + or "hybrid" in key + or "film" in key + or "noncond" in key + ): + continue + else: + raise Value("The conditioning key %s is illegal" % key) + + self.being_verbosed_once = False + + def forward(self, x, t, cond_dict: dict = {}): + x = x.contiguous() + t = t.contiguous() + + # import pdb + # pdb.set_trace() + # x with condition (or maybe not) + xc = x + + y = None + context_list, attn_mask_list = [], [] + conditional_keys = cond_dict.keys() + + for key in conditional_keys: + if "concat" in key: + xc = torch.cat([x, cond_dict[key].unsqueeze(1)], dim=1) + elif "film" in key: + if y is None: + y = cond_dict[key].squeeze(1) + else: + y = torch.cat([y, cond_dict[key].squeeze(1)], dim=-1) + elif "crossattn" in key: + # assert context is None, "You can only have one context matrix, got %s" % (cond_dict.keys()) + if isinstance(cond_dict[key], dict): + for k in cond_dict[key].keys(): + if "crossattn" in k: + context, attn_mask = cond_dict[key][ + k + ] # crossattn_audiomae_pooled: torch.Size([12, 128, 768]) + else: + assert len(cond_dict[key]) == 2, ( + "The context condition for %s you returned should have two element, one context one mask" + % (key) + ) + context, attn_mask = cond_dict[key] + + # The input to the UNet model is a list of context matrix + context_list.append(context) + attn_mask_list.append(attn_mask) + + elif ( + "noncond" in key + ): # If you use loss function in the conditional module, include the keyword "noncond" in the return dictionary + continue + elif "mos" in key: + mos = cond_dict['mos'] + else: + raise NotImplementedError() + + if not self.being_verbosed_once: + print("The input shape to the diffusion model is as follows:") + print("xc", xc.size()) + print("t", t.size()) + for i in range(len(context_list)): + print( + "context_%s" % i, context_list[i].size(), attn_mask_list[i].size() + ) + if y is not None: + print("y", y.size()) + self.being_verbosed_once = True + # try: + # out = self.diffusion_model.forward_with_dpmsolver( + # xc, timestep=t, y=context_list, mask=attn_mask_list + # ) + # except: + out = self.diffusion_model.forward( + xc, timestep=t, context_list=context_list, context_mask_list=attn_mask_list, mos=mos + ) + + return out + + @torch.no_grad() + def forward_with_cfg(self, x, t, cond_dict: dict = {}, cfg_scale=4.0, **model_kwargs): + + x = x.contiguous() + t = t.contiguous() + + # x with condition (or maybe not) + xc = x + + y = None + context_list, attn_mask_list = [], [] + conditional_keys = cond_dict.keys() + + for key in conditional_keys: + if "concat" in key: + xc = torch.cat([x, cond_dict[key].unsqueeze(1)], dim=1) + elif "film" in key: + if y is None: + y = cond_dict[key].squeeze(1) + else: + y = torch.cat([y, cond_dict[key].squeeze(1)], dim=-1) + elif "crossattn" in key: + # assert context is None, "You can only have one context matrix, got %s" % (cond_dict.keys()) + if isinstance(cond_dict[key], dict): + for k in cond_dict[key].keys(): + if "crossattn" in k: + context, attn_mask = cond_dict[key][ + k + ] # crossattn_audiomae_pooled: torch.Size([12, 128, 768]) + else: + assert len(cond_dict[key]) == 2, ( + "The context condition for %s you returned should have two element, one context one mask" + % (key) + ) + context, attn_mask = cond_dict[key] + + # The input to the UNet model is a list of context matrix + context_list.append(context) + attn_mask_list.append(attn_mask) + + elif ( + "noncond" in key + ): # If you use loss function in the conditional module, include the keyword "noncond" in the return dictionary + continue + else: + raise NotImplementedError() + + if not self.being_verbosed_once: + print("The input shape to the diffusion model is as follows:") + print("xc", xc.size()) + print("t", t.size()) + for i in range(len(context_list)): + print( + "context_%s" % i, context_list[i].size(), attn_mask_list[i].size() + ) + if y is not None: + print("y", y.size()) + self.being_verbosed_once = True + # try: + # out = self.diffusion_model.forward_with_dpmsolver( + # xc, timestep=t, y=context_list, mask=attn_mask_list + # ) + # except: + out = self.diffusion_model.forward_with_cfg( + xc, timestep=t, context_list=context_list, context_mask_list=attn_mask_list, cfg_scale=cfg_scale, **model_kwargs + ) + # import pdb + # pdb.set_trace() + + return out + + +class LatentDiffusionSpeedTest(pl.LightningModule): + """main class""" + + def __init__( + self, + first_stage_config, + cond_stage_config=None, + num_timesteps_cond=None, + cond_stage_key="image", + cond_stage_trainable=False, + concat_mode=True, + cond_stage_forward=None, + conditioning_key=None, + scale_factor=1.0, + batchsize=None, + evaluation_params={}, + scale_by_std=False, + base_learning_rate=None, + *args, + **kwargs, + ): + super().__init__() + self.l1 = nn.Linear(1, 1) + self.logger_save_dir = None + self.logger_exp_group_name = None + self.logger_exp_name = None + self.test_data_subset_path = None + + def set_log_dir(self, save_dir, exp_group_name, exp_name): + self.logger_save_dir = save_dir + self.logger_exp_group_name = exp_group_name + self.logger_exp_name = exp_name + + def forward(self, x): + return self.l1(x.permute(0, 2, 1)).permute(0, 2, 1) + + def training_step(self, batch, batch_idx): + x = batch["waveform"] + loss = self(x) + return torch.mean(loss) + + def configure_optimizers(self): + return torch.optim.Adam(self.parameters(), lr=0.02) + + +class LatentDiffusionVAELearnable(LatentDiffusion): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.automatic_optimization = False + + def configure_optimizers(self): + lr = self.learning_rate + params = list(self.model.parameters()) + + for each in self.cond_stage_models: + params = params + list( + each.parameters() + ) # Add the parameter from the conditional stage + + if self.learn_logvar: + print("Diffusion model optimizing logvar") + params.append(self.logvar) + ldm_opt = torch.optim.AdamW(params, lr=lr) + + opt_autoencoder, opt_scheduler = self.first_stage_model.configure_optimizers() + opt_ae, opt_disc = opt_autoencoder + + return [ldm_opt, opt_ae, opt_disc], [] + + def encode_first_stage(self, x): + # with torch.no_grad(): + encoding = self.first_stage_model.encode(x) + return encoding + + def decode_first_stage(self, z): + # with torch.no_grad(): + z = 1.0 / self.scale_factor * z + decoding = self.first_stage_model.decode(z) + return decoding + + def instantiate_first_stage(self, config): + model = instantiate_from_config(config) + self.first_stage_model = model.train() + # self.first_stage_model.train = disabled_train + # for param in self.first_stage_model.parameters(): + # param.requires_grad = False + + def shared_step(self, batch, **kwargs): + ldm_opt, g_opt, d_opt = self.optimizers() + + if self.training: + # Classifier-free guidance + unconditional_prob_cfg = self.unconditional_prob_cfg + else: + unconditional_prob_cfg = 0.0 + + x, c, decoder_xrec, encoder_x, encoder_posterior = self.get_input( + batch, + self.first_stage_key, + unconditional_prob_cfg=unconditional_prob_cfg, + return_decoding_output=True, + return_encoder_input=True, + return_encoder_output=True, + ) + + loss, loss_dict = self(x, self.filter_useful_cond_dict(c)) + + additional_loss_for_cond_modules = self.extract_possible_loss_in_cond_dict(c) + + assert isinstance(additional_loss_for_cond_modules, dict) + + loss_dict.update(additional_loss_for_cond_modules) + + if len(additional_loss_for_cond_modules.keys()) > 0: + for k in additional_loss_for_cond_modules.keys(): + loss = loss + additional_loss_for_cond_modules[k] + + for k, v in additional_loss_for_cond_modules.items(): + self.log( + "cond_stage/" + k, + float(v), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=True, + ) + + aeloss, log_dict_ae = self.first_stage_model.loss( + encoder_x, + decoder_xrec, + encoder_posterior, + optimizer_idx=0, + global_step=self.first_stage_model.global_step, + last_layer=self.first_stage_model.get_last_layer(), + split="train", + ) + + self.manual_backward(loss + aeloss) + + ldm_opt.step() + ldm_opt.zero_grad() + + g_opt.step() + g_opt.zero_grad() + + discloss, log_dict_disc = self.first_stage_model.loss( + encoder_x, + decoder_xrec, + encoder_posterior, + optimizer_idx=1, + global_step=self.first_stage_model.global_step, + last_layer=self.first_stage_model.get_last_layer(), + split="train", + ) + + self.manual_backward(discloss) + d_opt.step() + d_opt.zero_grad() + + self.log( + "aeloss", + aeloss, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + self.log( + "posterior_std", + torch.mean(encoder_posterior.var), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + loss_dict.update(log_dict_disc) + loss_dict.update(log_dict_ae) + + return None, loss_dict + + def training_step(self, batch, batch_idx): + self.warmup_step() + self.check_module_param_update() + + if ( + self.state is None + and len(self.trainer.optimizers[0].state_dict()["state"].keys()) > 0 + ): + self.state = ( + self.trainer.optimizers[0].state_dict()["state"][0]["exp_avg"].clone() + ) + elif self.state is not None and batch_idx % 1000 == 0: + assert ( + torch.sum( + torch.abs( + self.state + - self.trainer.optimizers[0].state_dict()["state"][0]["exp_avg"] + ) + ) + > 1e-7 + ), "Optimizer is not working" + + if len(self.metrics_buffer.keys()) > 0: + for k in self.metrics_buffer.keys(): + self.log( + k, + self.metrics_buffer[k], + prog_bar=False, + logger=True, + on_step=True, + on_epoch=False, + ) + print(k, self.metrics_buffer[k]) + self.metrics_buffer = {} + + loss, loss_dict = self.shared_step(batch) + + self.log_dict( + {k: float(v) for k, v in loss_dict.items()}, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=True, + ) + + self.log( + "global_step", + float(self.global_step), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + + lr = self.trainer.optimizers[0].param_groups[0]["lr"] + self.log( + "lr_abs", + float(lr), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/__init__.py b/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f56611cb5fb3682486f83329da3583a95800ca20 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/__init__.py @@ -0,0 +1 @@ +from .sampler import DPMSolverSampler diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/dpm_solver.py b/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/dpm_solver.py new file mode 100644 index 0000000000000000000000000000000000000000..4d55615bc96cdf07a4828b04692c6f73da0122bc --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/dpm_solver.py @@ -0,0 +1,1511 @@ +import torch +import torch.nn.functional as F +import math + + +class NoiseScheduleVP: + def __init__( + self, + schedule="discrete", + betas=None, + alphas_cumprod=None, + continuous_beta_0=0.1, + continuous_beta_1=20.0, + ): + """Create a wrapper class for the forward SDE (VP type). + + *** + Update: We support discrete-time diffusion models by implementing a picewise linear interpolation for log_alpha_t. + We recommend to use schedule='discrete' for the discrete-time diffusion models, especially for high-resolution images. + *** + + The forward SDE ensures that the condition distribution q_{t|0}(x_t | x_0) = N ( alpha_t * x_0, sigma_t^2 * I ). + We further define lambda_t = log(alpha_t) - log(sigma_t), which is the half-logSNR (described in the DPM-Solver paper). + Therefore, we implement the functions for computing alpha_t, sigma_t and lambda_t. For t in [0, T], we have: + + log_alpha_t = self.marginal_log_mean_coeff(t) + sigma_t = self.marginal_std(t) + lambda_t = self.marginal_lambda(t) + + Moreover, as lambda(t) is an invertible function, we also support its inverse function: + + t = self.inverse_lambda(lambda_t) + + =============================================================== + + We support both discrete-time DPMs (trained on n = 0, 1, ..., N-1) and continuous-time DPMs (trained on t in [t_0, T]). + + 1. For discrete-time DPMs: + + For discrete-time DPMs trained on n = 0, 1, ..., N-1, we convert the discrete steps to continuous time steps by: + t_i = (i + 1) / N + e.g. for N = 1000, we have t_0 = 1e-3 and T = t_{N-1} = 1. + We solve the corresponding diffusion ODE from time T = 1 to time t_0 = 1e-3. + + Args: + betas: A `torch.Tensor`. The beta array for the discrete-time DPM. (See the original DDPM paper for details) + alphas_cumprod: A `torch.Tensor`. The cumprod alphas for the discrete-time DPM. (See the original DDPM paper for details) + + Note that we always have alphas_cumprod = cumprod(betas). Therefore, we only need to set one of `betas` and `alphas_cumprod`. + + **Important**: Please pay special attention for the args for `alphas_cumprod`: + The `alphas_cumprod` is the \hat{alpha_n} arrays in the notations of DDPM. Specifically, DDPMs assume that + q_{t_n | 0}(x_{t_n} | x_0) = N ( \sqrt{\hat{alpha_n}} * x_0, (1 - \hat{alpha_n}) * I ). + Therefore, the notation \hat{alpha_n} is different from the notation alpha_t in DPM-Solver. In fact, we have + alpha_{t_n} = \sqrt{\hat{alpha_n}}, + and + log(alpha_{t_n}) = 0.5 * log(\hat{alpha_n}). + + + 2. For continuous-time DPMs: + + We support two types of VPSDEs: linear (DDPM) and cosine (improved-DDPM). The hyperparameters for the noise + schedule are the default settings in DDPM and improved-DDPM: + + Args: + beta_min: A `float` number. The smallest beta for the linear schedule. + beta_max: A `float` number. The largest beta for the linear schedule. + cosine_s: A `float` number. The hyperparameter in the cosine schedule. + cosine_beta_max: A `float` number. The hyperparameter in the cosine schedule. + T: A `float` number. The ending time of the forward process. + + =============================================================== + + Args: + schedule: A `str`. The noise schedule of the forward SDE. 'discrete' for discrete-time DPMs, + 'linear' or 'cosine' for continuous-time DPMs. + Returns: + A wrapper object of the forward SDE (VP type). + + =============================================================== + + Example: + + # For discrete-time DPMs, given betas (the beta array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', betas=betas) + + # For discrete-time DPMs, given alphas_cumprod (the \hat{alpha_n} array for n = 0, 1, ..., N - 1): + >>> ns = NoiseScheduleVP('discrete', alphas_cumprod=alphas_cumprod) + + # For continuous-time DPMs (VPSDE), linear schedule: + >>> ns = NoiseScheduleVP('linear', continuous_beta_0=0.1, continuous_beta_1=20.) + + """ + + if schedule not in ["discrete", "linear", "cosine"]: + raise ValueError( + "Unsupported noise schedule {}. The schedule needs to be 'discrete' or 'linear' or 'cosine'".format( + schedule + ) + ) + + self.schedule = schedule + if schedule == "discrete": + if betas is not None: + log_alphas = 0.5 * torch.log(1 - betas).cumsum(dim=0) + else: + assert alphas_cumprod is not None + log_alphas = 0.5 * torch.log(alphas_cumprod) + self.total_N = len(log_alphas) + self.T = 1.0 + self.t_array = torch.linspace(0.0, 1.0, self.total_N + 1)[1:].reshape( + (1, -1) + ) + self.log_alpha_array = log_alphas.reshape( + ( + 1, + -1, + ) + ) + else: + self.total_N = 1000 + self.beta_0 = continuous_beta_0 + self.beta_1 = continuous_beta_1 + self.cosine_s = 0.008 + self.cosine_beta_max = 999.0 + self.cosine_t_max = ( + math.atan(self.cosine_beta_max * (1.0 + self.cosine_s) / math.pi) + * 2.0 + * (1.0 + self.cosine_s) + / math.pi + - self.cosine_s + ) + self.cosine_log_alpha_0 = math.log( + math.cos(self.cosine_s / (1.0 + self.cosine_s) * math.pi / 2.0) + ) + self.schedule = schedule + if schedule == "cosine": + # For the cosine schedule, T = 1 will have numerical issues. So we manually set the ending time T. + # Note that T = 0.9946 may be not the optimal setting. However, we find it works well. + self.T = 0.9946 + else: + self.T = 1.0 + + def marginal_log_mean_coeff(self, t): + """ + Compute log(alpha_t) of a given continuous-time label t in [0, T]. + """ + if self.schedule == "discrete": + return interpolate_fn( + t.reshape((-1, 1)), + self.t_array.to(t.device), + self.log_alpha_array.to(t.device), + ).reshape((-1)) + elif self.schedule == "linear": + return -0.25 * t**2 * (self.beta_1 - self.beta_0) - 0.5 * t * self.beta_0 + elif self.schedule == "cosine": + log_alpha_fn = lambda s: torch.log( + torch.cos((s + self.cosine_s) / (1.0 + self.cosine_s) * math.pi / 2.0) + ) + log_alpha_t = log_alpha_fn(t) - self.cosine_log_alpha_0 + return log_alpha_t + + def marginal_alpha(self, t): + """ + Compute alpha_t of a given continuous-time label t in [0, T]. + """ + return torch.exp(self.marginal_log_mean_coeff(t)) + + def marginal_std(self, t): + """ + Compute sigma_t of a given continuous-time label t in [0, T]. + """ + return torch.sqrt(1.0 - torch.exp(2.0 * self.marginal_log_mean_coeff(t))) + + def marginal_lambda(self, t): + """ + Compute lambda_t = log(alpha_t) - log(sigma_t) of a given continuous-time label t in [0, T]. + """ + log_mean_coeff = self.marginal_log_mean_coeff(t) + log_std = 0.5 * torch.log(1.0 - torch.exp(2.0 * log_mean_coeff)) + return log_mean_coeff - log_std + + def inverse_lambda(self, lamb): + """ + Compute the continuous-time label t in [0, T] of a given half-logSNR lambda_t. + """ + if self.schedule == "linear": + tmp = ( + 2.0 + * (self.beta_1 - self.beta_0) + * torch.logaddexp(-2.0 * lamb, torch.zeros((1,)).to(lamb)) + ) + Delta = self.beta_0**2 + tmp + return tmp / (torch.sqrt(Delta) + self.beta_0) / (self.beta_1 - self.beta_0) + elif self.schedule == "discrete": + log_alpha = -0.5 * torch.logaddexp( + torch.zeros((1,)).to(lamb.device), -2.0 * lamb + ) + t = interpolate_fn( + log_alpha.reshape((-1, 1)), + torch.flip(self.log_alpha_array.to(lamb.device), [1]), + torch.flip(self.t_array.to(lamb.device), [1]), + ) + return t.reshape((-1,)) + else: + log_alpha = -0.5 * torch.logaddexp(-2.0 * lamb, torch.zeros((1,)).to(lamb)) + t_fn = ( + lambda log_alpha_t: torch.arccos( + torch.exp(log_alpha_t + self.cosine_log_alpha_0) + ) + * 2.0 + * (1.0 + self.cosine_s) + / math.pi + - self.cosine_s + ) + t = t_fn(log_alpha) + return t + + +def model_wrapper( + model, + noise_schedule, + model_type="noise", + model_kwargs={}, + guidance_type="uncond", + condition=None, + unconditional_condition=None, + guidance_scale=1.0, + classifier_fn=None, + classifier_kwargs={}, +): + """Create a wrapper function for the noise prediction model. + + DPM-Solver needs to solve the continuous-time diffusion ODEs. For DPMs trained on discrete-time labels, we need to + firstly wrap the model function to a noise prediction model that accepts the continuous time as the input. + + We support four types of the diffusion model by setting `model_type`: + + 1. "noise": noise prediction model. (Trained by predicting noise). + + 2. "x_start": data prediction model. (Trained by predicting the data x_0 at time 0). + + 3. "v": velocity prediction model. (Trained by predicting the velocity). + The "v" prediction is derivation detailed in Appendix D of [1], and is used in Imagen-Video [2]. + + [1] Salimans, Tim, and Jonathan Ho. "Progressive distillation for fast sampling of diffusion models." + arXiv preprint arXiv:2202.00512 (2022). + [2] Ho, Jonathan, et al. "Imagen Video: High Definition Video Generation with Diffusion Models." + arXiv preprint arXiv:2210.02303 (2022). + + 4. "score": marginal score function. (Trained by denoising score matching). + Note that the score function and the noise prediction model follows a simple relationship: + ``` + noise(x_t, t) = -sigma_t * score(x_t, t) + ``` + + We support three types of guided sampling by DPMs by setting `guidance_type`: + 1. "uncond": unconditional sampling by DPMs. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + 2. "classifier": classifier guidance sampling [3] by DPMs and another classifier. + The input `model` has the following format: + `` + model(x, t_input, **model_kwargs) -> noise | x_start | v | score + `` + + The input `classifier_fn` has the following format: + `` + classifier_fn(x, t_input, cond, **classifier_kwargs) -> logits(x, t_input, cond) + `` + + [3] P. Dhariwal and A. Q. Nichol, "Diffusion models beat GANs on image synthesis," + in Advances in Neural Information Processing Systems, vol. 34, 2021, pp. 8780-8794. + + 3. "classifier-free": classifier-free guidance sampling by conditional DPMs. + The input `model` has the following format: + `` + model(x, t_input, cond, **model_kwargs) -> noise | x_start | v | score + `` + And if cond == `unconditional_condition`, the model output is the unconditional DPM output. + + [4] Ho, Jonathan, and Tim Salimans. "Classifier-free diffusion guidance." + arXiv preprint arXiv:2207.12598 (2022). + + + The `t_input` is the time label of the model, which may be discrete-time labels (i.e. 0 to 999) + or continuous-time labels (i.e. epsilon to T). + + We wrap the model function to accept only `x` and `t_continuous` as inputs, and outputs the predicted noise: + `` + def model_fn(x, t_continuous) -> noise: + t_input = get_model_input_time(t_continuous) + return noise_pred(model, x, t_input, **model_kwargs) + `` + where `t_continuous` is the continuous time labels (i.e. epsilon to T). And we use `model_fn` for DPM-Solver. + + =============================================================== + + Args: + model: A diffusion model with the corresponding format described above. + noise_schedule: A noise schedule object, such as NoiseScheduleVP. + model_type: A `str`. The parameterization type of the diffusion model. + "noise" or "x_start" or "v" or "score". + model_kwargs: A `dict`. A dict for the other inputs of the model function. + guidance_type: A `str`. The type of the guidance for sampling. + "uncond" or "classifier" or "classifier-free". + condition: A pytorch tensor. The condition for the guided sampling. + Only used for "classifier" or "classifier-free" guidance type. + unconditional_condition: A pytorch tensor. The condition for the unconditional sampling. + Only used for "classifier-free" guidance type. + guidance_scale: A `float`. The scale for the guided sampling. + classifier_fn: A classifier function. Only used for the classifier guidance. + classifier_kwargs: A `dict`. A dict for the other inputs of the classifier function. + Returns: + A noise prediction model that accepts the noised data and the continuous time as the inputs. + """ + + def get_model_input_time(t_continuous): + """ + Convert the continuous-time `t_continuous` (in [epsilon, T]) to the model input time. + For discrete-time DPMs, we convert `t_continuous` in [1 / N, 1] to `t_input` in [0, 1000 * (N - 1) / N]. + For continuous-time DPMs, we just use `t_continuous`. + """ + if noise_schedule.schedule == "discrete": + return (t_continuous - 1.0 / noise_schedule.total_N) * 1000.0 + else: + return t_continuous + + def noise_pred_fn(x, t_continuous, cond=None): + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + t_input = get_model_input_time(t_continuous) + if cond is None: + output = model(x, t_input, **model_kwargs) + else: + output = model(x, t_input, cond, **model_kwargs) + if model_type == "noise": + return output + elif model_type == "x_start": + alpha_t, sigma_t = noise_schedule.marginal_alpha( + t_continuous + ), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return (x - expand_dims(alpha_t, dims) * output) / expand_dims( + sigma_t, dims + ) + elif model_type == "v": + alpha_t, sigma_t = noise_schedule.marginal_alpha( + t_continuous + ), noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return expand_dims(alpha_t, dims) * output + expand_dims(sigma_t, dims) * x + elif model_type == "score": + sigma_t = noise_schedule.marginal_std(t_continuous) + dims = x.dim() + return -expand_dims(sigma_t, dims) * output + + def cond_grad_fn(x, t_input): + """ + Compute the gradient of the classifier, i.e. nabla_{x} log p_t(cond | x_t). + """ + with torch.enable_grad(): + x_in = x.detach().requires_grad_(True) + log_prob = classifier_fn(x_in, t_input, condition, **classifier_kwargs) + return torch.autograd.grad(log_prob.sum(), x_in)[0] + + def model_fn(x, t_continuous): + """ + The noise predicition model function that is used for DPM-Solver. + """ + if t_continuous.reshape((-1,)).shape[0] == 1: + t_continuous = t_continuous.expand((x.shape[0])) + if guidance_type == "uncond": + return noise_pred_fn(x, t_continuous) + elif guidance_type == "classifier": + assert classifier_fn is not None + t_input = get_model_input_time(t_continuous) + cond_grad = cond_grad_fn(x, t_input) + sigma_t = noise_schedule.marginal_std(t_continuous) + noise = noise_pred_fn(x, t_continuous) + return ( + noise + - guidance_scale + * expand_dims(sigma_t, dims=cond_grad.dim()) + * cond_grad + ) + elif guidance_type == "classifier-free": + if guidance_scale == 1.0 or unconditional_condition is None: + return noise_pred_fn(x, t_continuous, cond=condition) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t_continuous] * 2) + c_in = torch.cat([unconditional_condition, condition]) + noise_uncond, noise = noise_pred_fn(x_in, t_in, cond=c_in).chunk(2) + return noise_uncond + guidance_scale * (noise - noise_uncond) + + assert model_type in ["noise", "x_start", "v"] + assert guidance_type in ["uncond", "classifier", "classifier-free"] + return model_fn + + +class DPM_Solver: + def __init__( + self, + model_fn, + noise_schedule, + predict_x0=False, + thresholding=False, + max_val=1.0, + ): + """Construct a DPM-Solver. + + We support both the noise prediction model ("predicting epsilon") and the data prediction model ("predicting x0"). + If `predict_x0` is False, we use the solver for the noise prediction model (DPM-Solver). + If `predict_x0` is True, we use the solver for the data prediction model (DPM-Solver++). + In such case, we further support the "dynamic thresholding" in [1] when `thresholding` is True. + The "dynamic thresholding" can greatly improve the sample quality for pixel-space DPMs with large guidance scales. + + Args: + model_fn: A noise prediction model function which accepts the continuous-time input (t in [epsilon, T]): + `` + def model_fn(x, t_continuous): + return noise + `` + noise_schedule: A noise schedule object, such as NoiseScheduleVP. + predict_x0: A `bool`. If true, use the data prediction model; else, use the noise prediction model. + thresholding: A `bool`. Valid when `predict_x0` is True. Whether to use the "dynamic thresholding" in [1]. + max_val: A `float`. Valid when both `predict_x0` and `thresholding` are True. The max value for thresholding. + + [1] Chitwan Saharia, William Chan, Saurabh Saxena, Lala Li, Jay Whang, Emily Denton, Seyed Kamyar Seyed Ghasemipour, Burcu Karagol Ayan, S Sara Mahdavi, Rapha Gontijo Lopes, et al. Photorealistic text-to-image diffusion models with deep language understanding. arXiv preprint arXiv:2205.11487, 2022b. + """ + self.model = model_fn + self.noise_schedule = noise_schedule + self.predict_x0 = predict_x0 + self.thresholding = thresholding + self.max_val = max_val + + def noise_prediction_fn(self, x, t): + """ + Return the noise prediction model. + """ + return self.model(x, t) + + def data_prediction_fn(self, x, t): + """ + Return the data prediction model (with thresholding). + """ + noise = self.noise_prediction_fn(x, t) + dims = x.dim() + alpha_t, sigma_t = self.noise_schedule.marginal_alpha( + t + ), self.noise_schedule.marginal_std(t) + x0 = (x - expand_dims(sigma_t, dims) * noise) / expand_dims(alpha_t, dims) + if self.thresholding: + p = 0.995 # A hyperparameter in the paper of "Imagen" [1]. + s = torch.quantile(torch.abs(x0).reshape((x0.shape[0], -1)), p, dim=1) + s = expand_dims( + torch.maximum(s, self.max_val * torch.ones_like(s).to(s.device)), dims + ) + x0 = torch.clamp(x0, -s, s) / s + return x0 + + def model_fn(self, x, t): + """ + Convert the model to the noise prediction model or the data prediction model. + """ + if self.predict_x0: + return self.data_prediction_fn(x, t) + else: + return self.noise_prediction_fn(x, t) + + def get_time_steps(self, skip_type, t_T, t_0, N, device): + """Compute the intermediate time steps for sampling. + + Args: + skip_type: A `str`. The type for the spacing of the time steps. We support three types: + - 'logSNR': uniform logSNR for the time steps. + - 'time_uniform': uniform time for the time steps. (**Recommended for high-resolutional data**.) + - 'time_quadratic': quadratic time for the time steps. (Used in DDIM for low-resolutional data.) + t_T: A `float`. The starting time of the sampling (default is T). + t_0: A `float`. The ending time of the sampling (default is epsilon). + N: A `int`. The total number of the spacing of the time steps. + device: A torch device. + Returns: + A pytorch tensor of the time steps, with the shape (N + 1,). + """ + if skip_type == "logSNR": + lambda_T = self.noise_schedule.marginal_lambda(torch.tensor(t_T).to(device)) + lambda_0 = self.noise_schedule.marginal_lambda(torch.tensor(t_0).to(device)) + logSNR_steps = torch.linspace( + lambda_T.cpu().item(), lambda_0.cpu().item(), N + 1 + ).to(device) + return self.noise_schedule.inverse_lambda(logSNR_steps) + elif skip_type == "time_uniform": + return torch.linspace(t_T, t_0, N + 1).to(device) + elif skip_type == "time_quadratic": + t_order = 2 + t = ( + torch.linspace(t_T ** (1.0 / t_order), t_0 ** (1.0 / t_order), N + 1) + .pow(t_order) + .to(device) + ) + return t + else: + raise ValueError( + "Unsupported skip_type {}, need to be 'logSNR' or 'time_uniform' or 'time_quadratic'".format( + skip_type + ) + ) + + def get_orders_and_timesteps_for_singlestep_solver( + self, steps, order, skip_type, t_T, t_0, device + ): + """ + Get the order of each step for sampling by the singlestep DPM-Solver. + + We combine both DPM-Solver-1,2,3 to use all the function evaluations, which is named as "DPM-Solver-fast". + Given a fixed number of function evaluations by `steps`, the sampling procedure by DPM-Solver-fast is: + - If order == 1: + We take `steps` of DPM-Solver-1 (i.e. DDIM). + - If order == 2: + - Denote K = (steps // 2). We take K or (K + 1) intermediate time steps for sampling. + - If steps % 2 == 0, we use K steps of DPM-Solver-2. + - If steps % 2 == 1, we use K steps of DPM-Solver-2 and 1 step of DPM-Solver-1. + - If order == 3: + - Denote K = (steps // 3 + 1). We take K intermediate time steps for sampling. + - If steps % 3 == 0, we use (K - 2) steps of DPM-Solver-3, and 1 step of DPM-Solver-2 and 1 step of DPM-Solver-1. + - If steps % 3 == 1, we use (K - 1) steps of DPM-Solver-3 and 1 step of DPM-Solver-1. + - If steps % 3 == 2, we use (K - 1) steps of DPM-Solver-3 and 1 step of DPM-Solver-2. + + ============================================ + Args: + order: A `int`. The max order for the solver (2 or 3). + steps: A `int`. The total number of function evaluations (NFE). + skip_type: A `str`. The type for the spacing of the time steps. We support three types: + - 'logSNR': uniform logSNR for the time steps. + - 'time_uniform': uniform time for the time steps. (**Recommended for high-resolutional data**.) + - 'time_quadratic': quadratic time for the time steps. (Used in DDIM for low-resolutional data.) + t_T: A `float`. The starting time of the sampling (default is T). + t_0: A `float`. The ending time of the sampling (default is epsilon). + device: A torch device. + Returns: + orders: A list of the solver order of each step. + """ + if order == 3: + K = steps // 3 + 1 + if steps % 3 == 0: + orders = [3,] * ( + K - 2 + ) + [2, 1] + elif steps % 3 == 1: + orders = [3,] * ( + K - 1 + ) + [1] + else: + orders = [3,] * ( + K - 1 + ) + [2] + elif order == 2: + if steps % 2 == 0: + K = steps // 2 + orders = [ + 2, + ] * K + else: + K = steps // 2 + 1 + orders = [2,] * ( + K - 1 + ) + [1] + elif order == 1: + K = 1 + orders = [ + 1, + ] * steps + else: + raise ValueError("'order' must be '1' or '2' or '3'.") + if skip_type == "logSNR": + # To reproduce the results in DPM-Solver paper + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, K, device) + else: + timesteps_outer = self.get_time_steps(skip_type, t_T, t_0, steps, device)[ + torch.cumsum( + torch.tensor( + [ + 0, + ] + + orders + ) + ).to(device) + ] + return timesteps_outer, orders + + def denoise_to_zero_fn(self, x, s): + """ + Denoise at the final step, which is equivalent to solve the ODE from lambda_s to infty by first-order discretization. + """ + return self.data_prediction_fn(x, s) + + def dpm_solver_first_update(self, x, s, t, model_s=None, return_intermediate=False): + """ + DPM-Solver-1 (equivalent to DDIM) from time `s` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + s: A pytorch tensor. The starting time, with the shape (x.shape[0],). + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + model_s: A pytorch tensor. The model function evaluated at time `s`. + If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it. + return_intermediate: A `bool`. If true, also return the model value at time `s`. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + ns = self.noise_schedule + dims = x.dim() + lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t) + h = lambda_t - lambda_s + log_alpha_s, log_alpha_t = ns.marginal_log_mean_coeff( + s + ), ns.marginal_log_mean_coeff(t) + sigma_s, sigma_t = ns.marginal_std(s), ns.marginal_std(t) + alpha_t = torch.exp(log_alpha_t) + + if self.predict_x0: + phi_1 = torch.expm1(-h) + if model_s is None: + model_s = self.model_fn(x, s) + x_t = ( + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + ) + if return_intermediate: + return x_t, {"model_s": model_s} + else: + return x_t + else: + phi_1 = torch.expm1(h) + if model_s is None: + model_s = self.model_fn(x, s) + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + ) + if return_intermediate: + return x_t, {"model_s": model_s} + else: + return x_t + + def singlestep_dpm_solver_second_update( + self, + x, + s, + t, + r1=0.5, + model_s=None, + return_intermediate=False, + solver_type="dpm_solver", + ): + """ + Singlestep solver DPM-Solver-2 from time `s` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + s: A pytorch tensor. The starting time, with the shape (x.shape[0],). + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + r1: A `float`. The hyperparameter of the second-order solver. + model_s: A pytorch tensor. The model function evaluated at time `s`. + If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it. + return_intermediate: A `bool`. If true, also return the model value at time `s` and `s1` (the intermediate time). + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + if solver_type not in ["dpm_solver", "taylor"]: + raise ValueError( + "'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format( + solver_type + ) + ) + if r1 is None: + r1 = 0.5 + ns = self.noise_schedule + dims = x.dim() + lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t) + h = lambda_t - lambda_s + lambda_s1 = lambda_s + r1 * h + s1 = ns.inverse_lambda(lambda_s1) + log_alpha_s, log_alpha_s1, log_alpha_t = ( + ns.marginal_log_mean_coeff(s), + ns.marginal_log_mean_coeff(s1), + ns.marginal_log_mean_coeff(t), + ) + sigma_s, sigma_s1, sigma_t = ( + ns.marginal_std(s), + ns.marginal_std(s1), + ns.marginal_std(t), + ) + alpha_s1, alpha_t = torch.exp(log_alpha_s1), torch.exp(log_alpha_t) + + if self.predict_x0: + phi_11 = torch.expm1(-r1 * h) + phi_1 = torch.expm1(-h) + + if model_s is None: + model_s = self.model_fn(x, s) + x_s1 = ( + expand_dims(sigma_s1 / sigma_s, dims) * x + - expand_dims(alpha_s1 * phi_11, dims) * model_s + ) + model_s1 = self.model_fn(x_s1, s1) + if solver_type == "dpm_solver": + x_t = ( + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + - (0.5 / r1) + * expand_dims(alpha_t * phi_1, dims) + * (model_s1 - model_s) + ) + elif solver_type == "taylor": + x_t = ( + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + + (1.0 / r1) + * expand_dims(alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0), dims) + * (model_s1 - model_s) + ) + else: + phi_11 = torch.expm1(r1 * h) + phi_1 = torch.expm1(h) + + if model_s is None: + model_s = self.model_fn(x, s) + x_s1 = ( + expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x + - expand_dims(sigma_s1 * phi_11, dims) * model_s + ) + model_s1 = self.model_fn(x_s1, s1) + if solver_type == "dpm_solver": + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - (0.5 / r1) + * expand_dims(sigma_t * phi_1, dims) + * (model_s1 - model_s) + ) + elif solver_type == "taylor": + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - (1.0 / r1) + * expand_dims(sigma_t * ((torch.exp(h) - 1.0) / h - 1.0), dims) + * (model_s1 - model_s) + ) + if return_intermediate: + return x_t, {"model_s": model_s, "model_s1": model_s1} + else: + return x_t + + def singlestep_dpm_solver_third_update( + self, + x, + s, + t, + r1=1.0 / 3.0, + r2=2.0 / 3.0, + model_s=None, + model_s1=None, + return_intermediate=False, + solver_type="dpm_solver", + ): + """ + Singlestep solver DPM-Solver-3 from time `s` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + s: A pytorch tensor. The starting time, with the shape (x.shape[0],). + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + r1: A `float`. The hyperparameter of the third-order solver. + r2: A `float`. The hyperparameter of the third-order solver. + model_s: A pytorch tensor. The model function evaluated at time `s`. + If `model_s` is None, we evaluate the model by `x` and `s`; otherwise we directly use it. + model_s1: A pytorch tensor. The model function evaluated at time `s1` (the intermediate time given by `r1`). + If `model_s1` is None, we evaluate the model at `s1`; otherwise we directly use it. + return_intermediate: A `bool`. If true, also return the model value at time `s`, `s1` and `s2` (the intermediate times). + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + if solver_type not in ["dpm_solver", "taylor"]: + raise ValueError( + "'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format( + solver_type + ) + ) + if r1 is None: + r1 = 1.0 / 3.0 + if r2 is None: + r2 = 2.0 / 3.0 + ns = self.noise_schedule + dims = x.dim() + lambda_s, lambda_t = ns.marginal_lambda(s), ns.marginal_lambda(t) + h = lambda_t - lambda_s + lambda_s1 = lambda_s + r1 * h + lambda_s2 = lambda_s + r2 * h + s1 = ns.inverse_lambda(lambda_s1) + s2 = ns.inverse_lambda(lambda_s2) + log_alpha_s, log_alpha_s1, log_alpha_s2, log_alpha_t = ( + ns.marginal_log_mean_coeff(s), + ns.marginal_log_mean_coeff(s1), + ns.marginal_log_mean_coeff(s2), + ns.marginal_log_mean_coeff(t), + ) + sigma_s, sigma_s1, sigma_s2, sigma_t = ( + ns.marginal_std(s), + ns.marginal_std(s1), + ns.marginal_std(s2), + ns.marginal_std(t), + ) + alpha_s1, alpha_s2, alpha_t = ( + torch.exp(log_alpha_s1), + torch.exp(log_alpha_s2), + torch.exp(log_alpha_t), + ) + + if self.predict_x0: + phi_11 = torch.expm1(-r1 * h) + phi_12 = torch.expm1(-r2 * h) + phi_1 = torch.expm1(-h) + phi_22 = torch.expm1(-r2 * h) / (r2 * h) + 1.0 + phi_2 = phi_1 / h + 1.0 + phi_3 = phi_2 / h - 0.5 + + if model_s is None: + model_s = self.model_fn(x, s) + if model_s1 is None: + x_s1 = ( + expand_dims(sigma_s1 / sigma_s, dims) * x + - expand_dims(alpha_s1 * phi_11, dims) * model_s + ) + model_s1 = self.model_fn(x_s1, s1) + x_s2 = ( + expand_dims(sigma_s2 / sigma_s, dims) * x + - expand_dims(alpha_s2 * phi_12, dims) * model_s + + r2 / r1 * expand_dims(alpha_s2 * phi_22, dims) * (model_s1 - model_s) + ) + model_s2 = self.model_fn(x_s2, s2) + if solver_type == "dpm_solver": + x_t = ( + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + + (1.0 / r2) + * expand_dims(alpha_t * phi_2, dims) + * (model_s2 - model_s) + ) + elif solver_type == "taylor": + D1_0 = (1.0 / r1) * (model_s1 - model_s) + D1_1 = (1.0 / r2) * (model_s2 - model_s) + D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1) + D2 = 2.0 * (D1_1 - D1_0) / (r2 - r1) + x_t = ( + expand_dims(sigma_t / sigma_s, dims) * x + - expand_dims(alpha_t * phi_1, dims) * model_s + + expand_dims(alpha_t * phi_2, dims) * D1 + - expand_dims(alpha_t * phi_3, dims) * D2 + ) + else: + phi_11 = torch.expm1(r1 * h) + phi_12 = torch.expm1(r2 * h) + phi_1 = torch.expm1(h) + phi_22 = torch.expm1(r2 * h) / (r2 * h) - 1.0 + phi_2 = phi_1 / h - 1.0 + phi_3 = phi_2 / h - 0.5 + + if model_s is None: + model_s = self.model_fn(x, s) + if model_s1 is None: + x_s1 = ( + expand_dims(torch.exp(log_alpha_s1 - log_alpha_s), dims) * x + - expand_dims(sigma_s1 * phi_11, dims) * model_s + ) + model_s1 = self.model_fn(x_s1, s1) + x_s2 = ( + expand_dims(torch.exp(log_alpha_s2 - log_alpha_s), dims) * x + - expand_dims(sigma_s2 * phi_12, dims) * model_s + - r2 / r1 * expand_dims(sigma_s2 * phi_22, dims) * (model_s1 - model_s) + ) + model_s2 = self.model_fn(x_s2, s2) + if solver_type == "dpm_solver": + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - (1.0 / r2) + * expand_dims(sigma_t * phi_2, dims) + * (model_s2 - model_s) + ) + elif solver_type == "taylor": + D1_0 = (1.0 / r1) * (model_s1 - model_s) + D1_1 = (1.0 / r2) * (model_s2 - model_s) + D1 = (r2 * D1_0 - r1 * D1_1) / (r2 - r1) + D2 = 2.0 * (D1_1 - D1_0) / (r2 - r1) + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_s), dims) * x + - expand_dims(sigma_t * phi_1, dims) * model_s + - expand_dims(sigma_t * phi_2, dims) * D1 + - expand_dims(sigma_t * phi_3, dims) * D2 + ) + + if return_intermediate: + return x_t, {"model_s": model_s, "model_s1": model_s1, "model_s2": model_s2} + else: + return x_t + + def multistep_dpm_solver_second_update( + self, x, model_prev_list, t_prev_list, t, solver_type="dpm_solver" + ): + """ + Multistep solver DPM-Solver-2 from time `t_prev_list[-1]` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + model_prev_list: A list of pytorch tensor. The previous computed model values. + t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],) + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + if solver_type not in ["dpm_solver", "taylor"]: + raise ValueError( + "'solver_type' must be either 'dpm_solver' or 'taylor', got {}".format( + solver_type + ) + ) + ns = self.noise_schedule + dims = x.dim() + model_prev_1, model_prev_0 = model_prev_list + t_prev_1, t_prev_0 = t_prev_list + lambda_prev_1, lambda_prev_0, lambda_t = ( + ns.marginal_lambda(t_prev_1), + ns.marginal_lambda(t_prev_0), + ns.marginal_lambda(t), + ) + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff( + t_prev_0 + ), ns.marginal_log_mean_coeff(t) + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + alpha_t = torch.exp(log_alpha_t) + + h_0 = lambda_prev_0 - lambda_prev_1 + h = lambda_t - lambda_prev_0 + r0 = h_0 / h + D1_0 = expand_dims(1.0 / r0, dims) * (model_prev_0 - model_prev_1) + if self.predict_x0: + if solver_type == "dpm_solver": + x_t = ( + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * (torch.exp(-h) - 1.0), dims) * model_prev_0 + - 0.5 * expand_dims(alpha_t * (torch.exp(-h) - 1.0), dims) * D1_0 + ) + elif solver_type == "taylor": + x_t = ( + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * (torch.exp(-h) - 1.0), dims) * model_prev_0 + + expand_dims(alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0), dims) + * D1_0 + ) + else: + if solver_type == "dpm_solver": + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * (torch.exp(h) - 1.0), dims) * model_prev_0 + - 0.5 * expand_dims(sigma_t * (torch.exp(h) - 1.0), dims) * D1_0 + ) + elif solver_type == "taylor": + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * (torch.exp(h) - 1.0), dims) * model_prev_0 + - expand_dims(sigma_t * ((torch.exp(h) - 1.0) / h - 1.0), dims) + * D1_0 + ) + return x_t + + def multistep_dpm_solver_third_update( + self, x, model_prev_list, t_prev_list, t, solver_type="dpm_solver" + ): + """ + Multistep solver DPM-Solver-3 from time `t_prev_list[-1]` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + model_prev_list: A list of pytorch tensor. The previous computed model values. + t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],) + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + ns = self.noise_schedule + dims = x.dim() + model_prev_2, model_prev_1, model_prev_0 = model_prev_list + t_prev_2, t_prev_1, t_prev_0 = t_prev_list + lambda_prev_2, lambda_prev_1, lambda_prev_0, lambda_t = ( + ns.marginal_lambda(t_prev_2), + ns.marginal_lambda(t_prev_1), + ns.marginal_lambda(t_prev_0), + ns.marginal_lambda(t), + ) + log_alpha_prev_0, log_alpha_t = ns.marginal_log_mean_coeff( + t_prev_0 + ), ns.marginal_log_mean_coeff(t) + sigma_prev_0, sigma_t = ns.marginal_std(t_prev_0), ns.marginal_std(t) + alpha_t = torch.exp(log_alpha_t) + + h_1 = lambda_prev_1 - lambda_prev_2 + h_0 = lambda_prev_0 - lambda_prev_1 + h = lambda_t - lambda_prev_0 + r0, r1 = h_0 / h, h_1 / h + D1_0 = expand_dims(1.0 / r0, dims) * (model_prev_0 - model_prev_1) + D1_1 = expand_dims(1.0 / r1, dims) * (model_prev_1 - model_prev_2) + D1 = D1_0 + expand_dims(r0 / (r0 + r1), dims) * (D1_0 - D1_1) + D2 = expand_dims(1.0 / (r0 + r1), dims) * (D1_0 - D1_1) + if self.predict_x0: + x_t = ( + expand_dims(sigma_t / sigma_prev_0, dims) * x + - expand_dims(alpha_t * (torch.exp(-h) - 1.0), dims) * model_prev_0 + + expand_dims(alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0), dims) * D1 + - expand_dims( + alpha_t * ((torch.exp(-h) - 1.0 + h) / h**2 - 0.5), dims + ) + * D2 + ) + else: + x_t = ( + expand_dims(torch.exp(log_alpha_t - log_alpha_prev_0), dims) * x + - expand_dims(sigma_t * (torch.exp(h) - 1.0), dims) * model_prev_0 + - expand_dims(sigma_t * ((torch.exp(h) - 1.0) / h - 1.0), dims) * D1 + - expand_dims(sigma_t * ((torch.exp(h) - 1.0 - h) / h**2 - 0.5), dims) + * D2 + ) + return x_t + + def singlestep_dpm_solver_update( + self, + x, + s, + t, + order, + return_intermediate=False, + solver_type="dpm_solver", + r1=None, + r2=None, + ): + """ + Singlestep DPM-Solver with the order `order` from time `s` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + s: A pytorch tensor. The starting time, with the shape (x.shape[0],). + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + order: A `int`. The order of DPM-Solver. We only support order == 1 or 2 or 3. + return_intermediate: A `bool`. If true, also return the model value at time `s`, `s1` and `s2` (the intermediate times). + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + r1: A `float`. The hyperparameter of the second-order or third-order solver. + r2: A `float`. The hyperparameter of the third-order solver. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + if order == 1: + return self.dpm_solver_first_update( + x, s, t, return_intermediate=return_intermediate + ) + elif order == 2: + return self.singlestep_dpm_solver_second_update( + x, + s, + t, + return_intermediate=return_intermediate, + solver_type=solver_type, + r1=r1, + ) + elif order == 3: + return self.singlestep_dpm_solver_third_update( + x, + s, + t, + return_intermediate=return_intermediate, + solver_type=solver_type, + r1=r1, + r2=r2, + ) + else: + raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order)) + + def multistep_dpm_solver_update( + self, x, model_prev_list, t_prev_list, t, order, solver_type="dpm_solver" + ): + """ + Multistep DPM-Solver with the order `order` from time `t_prev_list[-1]` to time `t`. + + Args: + x: A pytorch tensor. The initial value at time `s`. + model_prev_list: A list of pytorch tensor. The previous computed model values. + t_prev_list: A list of pytorch tensor. The previous times, each time has the shape (x.shape[0],) + t: A pytorch tensor. The ending time, with the shape (x.shape[0],). + order: A `int`. The order of DPM-Solver. We only support order == 1 or 2 or 3. + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + Returns: + x_t: A pytorch tensor. The approximated solution at time `t`. + """ + if order == 1: + return self.dpm_solver_first_update( + x, t_prev_list[-1], t, model_s=model_prev_list[-1] + ) + elif order == 2: + return self.multistep_dpm_solver_second_update( + x, model_prev_list, t_prev_list, t, solver_type=solver_type + ) + elif order == 3: + return self.multistep_dpm_solver_third_update( + x, model_prev_list, t_prev_list, t, solver_type=solver_type + ) + else: + raise ValueError("Solver order must be 1 or 2 or 3, got {}".format(order)) + + def dpm_solver_adaptive( + self, + x, + order, + t_T, + t_0, + h_init=0.05, + atol=0.0078, + rtol=0.05, + theta=0.9, + t_err=1e-5, + solver_type="dpm_solver", + ): + """ + The adaptive step size solver based on singlestep DPM-Solver. + + Args: + x: A pytorch tensor. The initial value at time `t_T`. + order: A `int`. The (higher) order of the solver. We only support order == 2 or 3. + t_T: A `float`. The starting time of the sampling (default is T). + t_0: A `float`. The ending time of the sampling (default is epsilon). + h_init: A `float`. The initial step size (for logSNR). + atol: A `float`. The absolute tolerance of the solver. For image data, the default setting is 0.0078, followed [1]. + rtol: A `float`. The relative tolerance of the solver. The default setting is 0.05. + theta: A `float`. The safety hyperparameter for adapting the step size. The default setting is 0.9, followed [1]. + t_err: A `float`. The tolerance for the time. We solve the diffusion ODE until the absolute error between the + current time and `t_0` is less than `t_err`. The default setting is 1e-5. + solver_type: either 'dpm_solver' or 'taylor'. The type for the high-order solvers. + The type slightly impacts the performance. We recommend to use 'dpm_solver' type. + Returns: + x_0: A pytorch tensor. The approximated solution at time `t_0`. + + [1] A. Jolicoeur-Martineau, K. Li, R. Piché-Taillefer, T. Kachman, and I. Mitliagkas, "Gotta go fast when generating data with score-based models," arXiv preprint arXiv:2105.14080, 2021. + """ + ns = self.noise_schedule + s = t_T * torch.ones((x.shape[0],)).to(x) + lambda_s = ns.marginal_lambda(s) + lambda_0 = ns.marginal_lambda(t_0 * torch.ones_like(s).to(x)) + h = h_init * torch.ones_like(s).to(x) + x_prev = x + nfe = 0 + if order == 2: + r1 = 0.5 + lower_update = lambda x, s, t: self.dpm_solver_first_update( + x, s, t, return_intermediate=True + ) + higher_update = ( + lambda x, s, t, **kwargs: self.singlestep_dpm_solver_second_update( + x, s, t, r1=r1, solver_type=solver_type, **kwargs + ) + ) + elif order == 3: + r1, r2 = 1.0 / 3.0, 2.0 / 3.0 + lower_update = lambda x, s, t: self.singlestep_dpm_solver_second_update( + x, s, t, r1=r1, return_intermediate=True, solver_type=solver_type + ) + higher_update = ( + lambda x, s, t, **kwargs: self.singlestep_dpm_solver_third_update( + x, s, t, r1=r1, r2=r2, solver_type=solver_type, **kwargs + ) + ) + else: + raise ValueError( + "For adaptive step size solver, order must be 2 or 3, got {}".format( + order + ) + ) + while torch.abs((s - t_0)).mean() > t_err: + t = ns.inverse_lambda(lambda_s + h) + x_lower, lower_noise_kwargs = lower_update(x, s, t) + x_higher = higher_update(x, s, t, **lower_noise_kwargs) + delta = torch.max( + torch.ones_like(x).to(x) * atol, + rtol * torch.max(torch.abs(x_lower), torch.abs(x_prev)), + ) + norm_fn = lambda v: torch.sqrt( + torch.square(v.reshape((v.shape[0], -1))).mean(dim=-1, keepdim=True) + ) + E = norm_fn((x_higher - x_lower) / delta).max() + if torch.all(E <= 1.0): + x = x_higher + s = t + x_prev = x_lower + lambda_s = ns.marginal_lambda(s) + h = torch.min( + theta * h * torch.float_power(E, -1.0 / order).float(), + lambda_0 - lambda_s, + ) + nfe += order + print("adaptive solver nfe", nfe) + return x + + def sample( + self, + x, + steps=20, + t_start=None, + t_end=None, + order=3, + skip_type="time_uniform", + method="singlestep", + lower_order_final=True, + denoise_to_zero=False, + solver_type="dpm_solver", + atol=0.0078, + rtol=0.05, + ): + """ + Compute the sample at time `t_end` by DPM-Solver, given the initial `x` at time `t_start`. + + ===================================================== + + We support the following algorithms for both noise prediction model and data prediction model: + - 'singlestep': + Singlestep DPM-Solver (i.e. "DPM-Solver-fast" in the paper), which combines different orders of singlestep DPM-Solver. + We combine all the singlestep solvers with order <= `order` to use up all the function evaluations (steps). + The total number of function evaluations (NFE) == `steps`. + Given a fixed NFE == `steps`, the sampling procedure is: + - If `order` == 1: + - Denote K = steps. We use K steps of DPM-Solver-1 (i.e. DDIM). + - If `order` == 2: + - Denote K = (steps // 2) + (steps % 2). We take K intermediate time steps for sampling. + - If steps % 2 == 0, we use K steps of singlestep DPM-Solver-2. + - If steps % 2 == 1, we use (K - 1) steps of singlestep DPM-Solver-2 and 1 step of DPM-Solver-1. + - If `order` == 3: + - Denote K = (steps // 3 + 1). We take K intermediate time steps for sampling. + - If steps % 3 == 0, we use (K - 2) steps of singlestep DPM-Solver-3, and 1 step of singlestep DPM-Solver-2 and 1 step of DPM-Solver-1. + - If steps % 3 == 1, we use (K - 1) steps of singlestep DPM-Solver-3 and 1 step of DPM-Solver-1. + - If steps % 3 == 2, we use (K - 1) steps of singlestep DPM-Solver-3 and 1 step of singlestep DPM-Solver-2. + - 'multistep': + Multistep DPM-Solver with the order of `order`. The total number of function evaluations (NFE) == `steps`. + We initialize the first `order` values by lower order multistep solvers. + Given a fixed NFE == `steps`, the sampling procedure is: + Denote K = steps. + - If `order` == 1: + - We use K steps of DPM-Solver-1 (i.e. DDIM). + - If `order` == 2: + - We firstly use 1 step of DPM-Solver-1, then use (K - 1) step of multistep DPM-Solver-2. + - If `order` == 3: + - We firstly use 1 step of DPM-Solver-1, then 1 step of multistep DPM-Solver-2, then (K - 2) step of multistep DPM-Solver-3. + - 'singlestep_fixed': + Fixed order singlestep DPM-Solver (i.e. DPM-Solver-1 or singlestep DPM-Solver-2 or singlestep DPM-Solver-3). + We use singlestep DPM-Solver-`order` for `order`=1 or 2 or 3, with total [`steps` // `order`] * `order` NFE. + - 'adaptive': + Adaptive step size DPM-Solver (i.e. "DPM-Solver-12" and "DPM-Solver-23" in the paper). + We ignore `steps` and use adaptive step size DPM-Solver with a higher order of `order`. + You can adjust the absolute tolerance `atol` and the relative tolerance `rtol` to balance the computatation costs + (NFE) and the sample quality. + - If `order` == 2, we use DPM-Solver-12 which combines DPM-Solver-1 and singlestep DPM-Solver-2. + - If `order` == 3, we use DPM-Solver-23 which combines singlestep DPM-Solver-2 and singlestep DPM-Solver-3. + + ===================================================== + + Some advices for choosing the algorithm: + - For **unconditional sampling** or **guided sampling with small guidance scale** by DPMs: + Use singlestep DPM-Solver ("DPM-Solver-fast" in the paper) with `order = 3`. + e.g. + >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, predict_x0=False) + >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=3, + skip_type='time_uniform', method='singlestep') + - For **guided sampling with large guidance scale** by DPMs: + Use multistep DPM-Solver with `predict_x0 = True` and `order = 2`. + e.g. + >>> dpm_solver = DPM_Solver(model_fn, noise_schedule, predict_x0=True) + >>> x_sample = dpm_solver.sample(x, steps=steps, t_start=t_start, t_end=t_end, order=2, + skip_type='time_uniform', method='multistep') + + We support three types of `skip_type`: + - 'logSNR': uniform logSNR for the time steps. **Recommended for low-resolutional images** + - 'time_uniform': uniform time for the time steps. **Recommended for high-resolutional images**. + - 'time_quadratic': quadratic time for the time steps. + + ===================================================== + Args: + x: A pytorch tensor. The initial value at time `t_start` + e.g. if `t_start` == T, then `x` is a sample from the standard normal distribution. + steps: A `int`. The total number of function evaluations (NFE). + t_start: A `float`. The starting time of the sampling. + If `T` is None, we use self.noise_schedule.T (default is 1.0). + t_end: A `float`. The ending time of the sampling. + If `t_end` is None, we use 1. / self.noise_schedule.total_N. + e.g. if total_N == 1000, we have `t_end` == 1e-3. + For discrete-time DPMs: + - We recommend `t_end` == 1. / self.noise_schedule.total_N. + For continuous-time DPMs: + - We recommend `t_end` == 1e-3 when `steps` <= 15; and `t_end` == 1e-4 when `steps` > 15. + order: A `int`. The order of DPM-Solver. + skip_type: A `str`. The type for the spacing of the time steps. 'time_uniform' or 'logSNR' or 'time_quadratic'. + method: A `str`. The method for sampling. 'singlestep' or 'multistep' or 'singlestep_fixed' or 'adaptive'. + denoise_to_zero: A `bool`. Whether to denoise to time 0 at the final step. + Default is `False`. If `denoise_to_zero` is `True`, the total NFE is (`steps` + 1). + + This trick is firstly proposed by DDPM (https://arxiv.org/abs/2006.11239) and + score_sde (https://arxiv.org/abs/2011.13456). Such trick can improve the FID + for diffusion models sampling by diffusion SDEs for low-resolutional images + (such as CIFAR-10). However, we observed that such trick does not matter for + high-resolutional images. As it needs an additional NFE, we do not recommend + it for high-resolutional images. + lower_order_final: A `bool`. Whether to use lower order solvers at the final steps. + Only valid for `method=multistep` and `steps < 15`. We empirically find that + this trick is a key to stabilizing the sampling by DPM-Solver with very few steps + (especially for steps <= 10). So we recommend to set it to be `True`. + solver_type: A `str`. The taylor expansion type for the solver. `dpm_solver` or `taylor`. We recommend `dpm_solver`. + atol: A `float`. The absolute tolerance of the adaptive step size solver. Valid when `method` == 'adaptive'. + rtol: A `float`. The relative tolerance of the adaptive step size solver. Valid when `method` == 'adaptive'. + Returns: + x_end: A pytorch tensor. The approximated solution at time `t_end`. + + """ + t_0 = 1.0 / self.noise_schedule.total_N if t_end is None else t_end + t_T = self.noise_schedule.T if t_start is None else t_start + device = x.device + if method == "adaptive": + with torch.no_grad(): + x = self.dpm_solver_adaptive( + x, + order=order, + t_T=t_T, + t_0=t_0, + atol=atol, + rtol=rtol, + solver_type=solver_type, + ) + elif method == "multistep": + assert steps >= order + timesteps = self.get_time_steps( + skip_type=skip_type, t_T=t_T, t_0=t_0, N=steps, device=device + ) + assert timesteps.shape[0] - 1 == steps + with torch.no_grad(): + vec_t = timesteps[0].expand((x.shape[0])) + model_prev_list = [self.model_fn(x, vec_t)] + t_prev_list = [vec_t] + # Init the first `order` values by lower order multistep DPM-Solver. + for init_order in range(1, order): + vec_t = timesteps[init_order].expand(x.shape[0]) + x = self.multistep_dpm_solver_update( + x, + model_prev_list, + t_prev_list, + vec_t, + init_order, + solver_type=solver_type, + ) + model_prev_list.append(self.model_fn(x, vec_t)) + t_prev_list.append(vec_t) + # Compute the remaining values by `order`-th order multistep DPM-Solver. + for step in range(order, steps + 1): + vec_t = timesteps[step].expand(x.shape[0]) + if lower_order_final and steps < 15: + step_order = min(order, steps + 1 - step) + else: + step_order = order + x = self.multistep_dpm_solver_update( + x, + model_prev_list, + t_prev_list, + vec_t, + step_order, + solver_type=solver_type, + ) + for i in range(order - 1): + t_prev_list[i] = t_prev_list[i + 1] + model_prev_list[i] = model_prev_list[i + 1] + t_prev_list[-1] = vec_t + # We do not need to evaluate the final model value. + if step < steps: + model_prev_list[-1] = self.model_fn(x, vec_t) + elif method in ["singlestep", "singlestep_fixed"]: + if method == "singlestep": + ( + timesteps_outer, + orders, + ) = self.get_orders_and_timesteps_for_singlestep_solver( + steps=steps, + order=order, + skip_type=skip_type, + t_T=t_T, + t_0=t_0, + device=device, + ) + elif method == "singlestep_fixed": + K = steps // order + orders = [ + order, + ] * K + timesteps_outer = self.get_time_steps( + skip_type=skip_type, t_T=t_T, t_0=t_0, N=K, device=device + ) + for i, order in enumerate(orders): + t_T_inner, t_0_inner = timesteps_outer[i], timesteps_outer[i + 1] + timesteps_inner = self.get_time_steps( + skip_type=skip_type, + t_T=t_T_inner.item(), + t_0=t_0_inner.item(), + N=order, + device=device, + ) + lambda_inner = self.noise_schedule.marginal_lambda(timesteps_inner) + vec_s, vec_t = t_T_inner.tile(x.shape[0]), t_0_inner.tile(x.shape[0]) + h = lambda_inner[-1] - lambda_inner[0] + r1 = None if order <= 1 else (lambda_inner[1] - lambda_inner[0]) / h + r2 = None if order <= 2 else (lambda_inner[2] - lambda_inner[0]) / h + x = self.singlestep_dpm_solver_update( + x, vec_s, vec_t, order, solver_type=solver_type, r1=r1, r2=r2 + ) + if denoise_to_zero: + x = self.denoise_to_zero_fn(x, torch.ones((x.shape[0],)).to(device) * t_0) + return x + + +############################################################# +# other utility functions +############################################################# + + +def interpolate_fn(x, xp, yp): + """ + A piecewise linear function y = f(x), using xp and yp as keypoints. + We implement f(x) in a differentiable way (i.e. applicable for autograd). + The function f(x) is well-defined for all x-axis. (For x beyond the bounds of xp, we use the outmost points of xp to define the linear function.) + + Args: + x: PyTorch tensor with shape [N, C], where N is the batch size, C is the number of channels (we use C = 1 for DPM-Solver). + xp: PyTorch tensor with shape [C, K], where K is the number of keypoints. + yp: PyTorch tensor with shape [C, K]. + Returns: + The function values f(x), with shape [N, C]. + """ + N, K = x.shape[0], xp.shape[1] + all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2) + sorted_all_x, x_indices = torch.sort(all_x, dim=2) + x_idx = torch.argmin(x_indices, dim=2) + cand_start_idx = x_idx - 1 + start_idx = torch.where( + torch.eq(x_idx, 0), + torch.tensor(1, device=x.device), + torch.where( + torch.eq(x_idx, K), + torch.tensor(K - 2, device=x.device), + cand_start_idx, + ), + ) + end_idx = torch.where( + torch.eq(start_idx, cand_start_idx), start_idx + 2, start_idx + 1 + ) + start_x = torch.gather(sorted_all_x, dim=2, index=start_idx.unsqueeze(2)).squeeze(2) + end_x = torch.gather(sorted_all_x, dim=2, index=end_idx.unsqueeze(2)).squeeze(2) + start_idx2 = torch.where( + torch.eq(x_idx, 0), + torch.tensor(0, device=x.device), + torch.where( + torch.eq(x_idx, K), + torch.tensor(K - 2, device=x.device), + cand_start_idx, + ), + ) + y_positions_expanded = yp.unsqueeze(0).expand(N, -1, -1) + start_y = torch.gather( + y_positions_expanded, dim=2, index=start_idx2.unsqueeze(2) + ).squeeze(2) + end_y = torch.gather( + y_positions_expanded, dim=2, index=(start_idx2 + 1).unsqueeze(2) + ).squeeze(2) + cand = start_y + (x - start_x) * (end_y - start_y) / (end_x - start_x) + return cand + + +def expand_dims(v, dims): + """ + Expand the tensor `v` to the dim `dims`. + + Args: + `v`: a PyTorch tensor with shape [N]. + `dim`: a `int`. + Returns: + a PyTorch tensor with shape [N, 1, 1, ..., 1] and the total dimension is `dims`. + """ + return v[(...,) + (None,) * (dims - 1)] diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/sampler.py b/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..7deed187bff09c51de9464a80b665515156fa433 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_diffusion/dpm_solver/sampler.py @@ -0,0 +1,94 @@ +"""SAMPLING ONLY.""" + +import torch + +from .dpm_solver import NoiseScheduleVP, model_wrapper, DPM_Solver + + +class DPMSolverSampler(object): + def __init__(self, model, **kwargs): + super().__init__() + self.model = model + to_torch = lambda x: x.clone().detach().to(torch.float32).to(model.device) + self.register_buffer("alphas_cumprod", to_torch(model.alphas_cumprod)) + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + @torch.no_grad() + def sample( + self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0.0, + mask=None, + x0=None, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs, + ): + if conditioning is not None: + if isinstance(conditioning, dict): + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + if cbs != batch_size: + print( + f"Warning: Got {cbs} conditionings but batch-size is {batch_size}" + ) + else: + if conditioning.shape[0] != batch_size: + print( + f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}" + ) + + # sampling + C, H, W = shape + size = (batch_size, C, H, W) + + # print(f'Data shape for DPM-Solver sampling is {size}, sampling steps {S}') + + device = self.model.betas.device + if x_T is None: + img = torch.randn(size, device=device) + else: + img = x_T + + ns = NoiseScheduleVP("discrete", alphas_cumprod=self.alphas_cumprod) + + model_fn = model_wrapper( + lambda x, t, c: self.model.apply_model(x, t, c), + ns, + model_type="noise", + guidance_type="classifier-free", + condition=conditioning, + unconditional_condition=unconditional_conditioning, + guidance_scale=unconditional_guidance_scale, + ) + + dpm_solver = DPM_Solver(model_fn, ns, predict_x0=True, thresholding=False) + x = dpm_solver.sample( + img, + steps=S, + skip_type="time_uniform", + method="multistep", + order=2, + lower_order_final=True, + ) + + return x.to(device), None diff --git a/qa_mdt/audioldm_train/modules/latent_diffusion/plms.py b/qa_mdt/audioldm_train/modules/latent_diffusion/plms.py new file mode 100644 index 0000000000000000000000000000000000000000..25399a1603c8ad3f68101e3edf4dd1d4cb7d5730 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_diffusion/plms.py @@ -0,0 +1,361 @@ +"""SAMPLING ONLY.""" + +import torch +import numpy as np +from tqdm import tqdm +from functools import partial + +from qa_mdt.audioldm_train.utilities.diffusion_util import ( + make_ddim_sampling_parameters, + make_ddim_timesteps, + noise_like, +) + + +class PLMSSampler(object): + def __init__(self, model, schedule="linear", **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + def make_schedule( + self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0.0, verbose=True + ): + if ddim_eta != 0: + ddim_eta = 0 + # raise ValueError('ddim_eta must be 0 for PLMS') + + self.ddim_timesteps = make_ddim_timesteps( + ddim_discr_method=ddim_discretize, + num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps, + verbose=verbose, + ) + alphas_cumprod = self.model.alphas_cumprod + assert ( + alphas_cumprod.shape[0] == self.ddpm_num_timesteps + ), "alphas have to be defined for each timestep" + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + self.register_buffer("betas", to_torch(self.model.betas)) + self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod)) + self.register_buffer( + "alphas_cumprod_prev", to_torch(self.model.alphas_cumprod_prev) + ) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer( + "sqrt_alphas_cumprod", to_torch(np.sqrt(alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", + to_torch(np.sqrt(1.0 - alphas_cumprod.cpu())), + ) + self.register_buffer( + "log_one_minus_alphas_cumprod", to_torch(np.log(1.0 - alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_recip_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_recipm1_alphas_cumprod", + to_torch(np.sqrt(1.0 / alphas_cumprod.cpu() - 1)), + ) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters( + alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta, + verbose=verbose, + ) + self.register_buffer("ddim_sigmas", ddim_sigmas) + self.register_buffer("ddim_alphas", ddim_alphas) + self.register_buffer("ddim_alphas_prev", ddim_alphas_prev) + self.register_buffer("ddim_sqrt_one_minus_alphas", np.sqrt(1.0 - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) + / (1 - self.alphas_cumprod) + * (1 - self.alphas_cumprod / self.alphas_cumprod_prev) + ) + self.register_buffer( + "ddim_sigmas_for_original_num_steps", sigmas_for_original_sampling_steps + ) + + @torch.no_grad() + def sample( + self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0.0, + mask=None, + x0=None, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs, + ): + if conditioning is not None: + if isinstance(conditioning, dict): + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + if cbs != batch_size: + print( + f"Warning: Got {cbs} conditionings but batch-size is {batch_size}" + ) + else: + if conditioning.shape[0] != batch_size: + print( + f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}" + ) + + self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose) + # sampling + C, H, W = shape + size = (batch_size, C, H, W) + print(f"Data shape for PLMS sampling is {size}") + + samples, intermediates = self.plms_sampling( + conditioning, + size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, + x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + return samples, intermediates + + @torch.no_grad() + def plms_sampling( + self, + cond, + shape, + x_T=None, + ddim_use_original_steps=False, + callback=None, + timesteps=None, + quantize_denoised=False, + mask=None, + x0=None, + img_callback=None, + log_every_t=100, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + ): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + if timesteps is None: + timesteps = ( + self.ddpm_num_timesteps + if ddim_use_original_steps + else self.ddim_timesteps + ) + elif timesteps is not None and not ddim_use_original_steps: + subset_end = ( + int( + min(timesteps / self.ddim_timesteps.shape[0], 1) + * self.ddim_timesteps.shape[0] + ) + - 1 + ) + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {"x_inter": [img], "pred_x0": [img]} + time_range = ( + list(reversed(range(0, timesteps))) + if ddim_use_original_steps + else np.flip(timesteps) + ) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + print(f"Running PLMS Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc="PLMS Sampler", total=total_steps) + old_eps = [] + + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + ts_next = torch.full( + (b,), + time_range[min(i + 1, len(time_range) - 1)], + device=device, + dtype=torch.long, + ) + + if mask is not None: + assert x0 is not None + img_orig = self.model.q_sample( + x0, ts + ) # TODO: deterministic forward pass? + img = img_orig * mask + (1.0 - mask) * img + + outs = self.p_sample_plms( + img, + cond, + ts, + index=index, + use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, + temperature=temperature, + noise_dropout=noise_dropout, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + old_eps=old_eps, + t_next=ts_next, + ) + img, pred_x0, e_t = outs + old_eps.append(e_t) + if len(old_eps) >= 4: + old_eps.pop(0) + if callback: + callback(i) + if img_callback: + img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates["x_inter"].append(img) + intermediates["pred_x0"].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def p_sample_plms( + self, + x, + c, + t, + index, + repeat_noise=False, + use_original_steps=False, + quantize_denoised=False, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + old_eps=None, + t_next=None, + ): + b, *_, device = *x.shape, x.device + + def get_model_output(x, t): + if ( + unconditional_conditioning is None + or unconditional_guidance_scale == 1.0 + ): + e_t = self.model.apply_model(x, t, c) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t] * 2) + c_in = torch.cat([unconditional_conditioning, c]) + e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2) + e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond) + + if score_corrector is not None: + assert self.model.parameterization == "eps" + e_t = score_corrector.modify_score( + self.model, e_t, x, t, c, **corrector_kwargs + ) + + return e_t + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = ( + self.model.alphas_cumprod_prev + if use_original_steps + else self.ddim_alphas_prev + ) + sqrt_one_minus_alphas = ( + self.model.sqrt_one_minus_alphas_cumprod + if use_original_steps + else self.ddim_sqrt_one_minus_alphas + ) + sigmas = ( + self.model.ddim_sigmas_for_original_num_steps + if use_original_steps + else self.ddim_sigmas + ) + + def get_x_prev_and_pred_x0(e_t, index): + # select parameters corresponding to the currently considered timestep + a_t = torch.full((b, 1, 1, 1), alphas[index], device=device) + a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device) + sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device) + sqrt_one_minus_at = torch.full( + (b, 1, 1, 1), sqrt_one_minus_alphas[index], device=device + ) + + # current prediction for x_0 + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + # direction pointing to x_t + dir_xt = (1.0 - a_prev - sigma_t**2).sqrt() * e_t + noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.0: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise + return x_prev, pred_x0 + + e_t = get_model_output(x, t) + if len(old_eps) == 0: + # Pseudo Improved Euler (2nd order) + x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t, index) + e_t_next = get_model_output(x_prev, t_next) + e_t_prime = (e_t + e_t_next) / 2 + elif len(old_eps) == 1: + # 2nd order Pseudo Linear Multistep (Adams-Bashforth) + e_t_prime = (3 * e_t - old_eps[-1]) / 2 + elif len(old_eps) == 2: + # 3nd order Pseudo Linear Multistep (Adams-Bashforth) + e_t_prime = (23 * e_t - 16 * old_eps[-1] + 5 * old_eps[-2]) / 12 + elif len(old_eps) >= 3: + # 4nd order Pseudo Linear Multistep (Adams-Bashforth) + e_t_prime = ( + 55 * e_t - 59 * old_eps[-1] + 37 * old_eps[-2] - 9 * old_eps[-3] + ) / 24 + + x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t_prime, index) + + return x_prev, pred_x0, e_t diff --git a/qa_mdt/audioldm_train/modules/latent_encoder/__init__.py b/qa_mdt/audioldm_train/modules/latent_encoder/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/latent_encoder/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/latent_encoder/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1aee62a07126d07ae85185ea2408543b0fb9bbfb Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_encoder/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_encoder/__pycache__/autoencoder.cpython-310.pyc b/qa_mdt/audioldm_train/modules/latent_encoder/__pycache__/autoencoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7b983ab6106b8157251358255a73146d9bf2521 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/latent_encoder/__pycache__/autoencoder.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/latent_encoder/autoencoder.py b/qa_mdt/audioldm_train/modules/latent_encoder/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..d8a891e88431c24583e848c39f3fe84b863c8c66 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/latent_encoder/autoencoder.py @@ -0,0 +1,614 @@ +from email.policy import strict +import torch +import os + +import pytorch_lightning as pl +import torch.nn.functional as F +from contextlib import contextmanager +import numpy as np +from qa_mdt.audioldm_train.modules.diffusionmodules.ema import * + +from torch.optim.lr_scheduler import LambdaLR +from qa_mdt.audioldm_train.modules.diffusionmodules.model import Encoder, Decoder +from qa_mdt.audioldm_train.modules.diffusionmodules.distributions import ( + DiagonalGaussianDistribution, +) + +import wandb +from qa_mdt.audioldm_train.utilities.model_util import instantiate_from_config +import soundfile as sf + +from qa_mdt.audioldm_train.utilities.model_util import get_vocoder +from qa_mdt.audioldm_train.utilities.tools import synth_one_sample +import itertools + + +class AutoencoderKL(pl.LightningModule): + def __init__( + self, + ddconfig=None, + lossconfig=None, + batchsize=None, + embed_dim=None, + time_shuffle=1, + subband=1, + sampling_rate=16000, + ckpt_path=None, + reload_from_ckpt=None, + ignore_keys=[], + image_key="fbank", + colorize_nlabels=None, + monitor=None, + base_learning_rate=1e-5, + ): + super().__init__() + self.automatic_optimization = False + assert ( + "mel_bins" in ddconfig.keys() + ), "mel_bins is not specified in the Autoencoder config" + num_mel = ddconfig["mel_bins"] + self.image_key = image_key + self.sampling_rate = sampling_rate + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + + self.loss = instantiate_from_config(lossconfig) + self.subband = int(subband) + + if self.subband > 1: + print("Use subband decomposition %s" % self.subband) + + assert ddconfig["double_z"] + self.quant_conv = torch.nn.Conv2d(2 * ddconfig["z_channels"], 2 * embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1) + + if self.image_key == "fbank": + self.vocoder = get_vocoder(None, "cpu", num_mel) + self.embed_dim = embed_dim + if colorize_nlabels is not None: + assert type(colorize_nlabels) == int + self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1)) + if monitor is not None: + self.monitor = monitor + if ckpt_path is not None: + self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys) + self.learning_rate = float(base_learning_rate) + print("Initial learning rate %s" % self.learning_rate) + + self.time_shuffle = time_shuffle + self.reload_from_ckpt = reload_from_ckpt + self.reloaded = False + self.mean, self.std = None, None + + self.feature_cache = None + self.flag_first_run = True + self.train_step = 0 + + self.logger_save_dir = None + self.logger_exp_name = None + self.logger_exp_group_name = None + + if not self.reloaded and self.reload_from_ckpt is not None: + # import pdb + # pdb.set_trace() + print("--> Reload weight of autoencoder from %s" % self.reload_from_ckpt) + checkpoint = torch.load(self.reload_from_ckpt) + + load_todo_keys = {} + pretrained_state_dict = checkpoint["state_dict"] + current_state_dict = self.state_dict() + for key in current_state_dict: + if ( + key in pretrained_state_dict.keys() + and pretrained_state_dict[key].size() + == current_state_dict[key].size() + ): + load_todo_keys[key] = pretrained_state_dict[key] + else: + print("Key %s mismatch during loading, seems fine" % key) + + self.load_state_dict(load_todo_keys, strict=False) + self.reloaded = True + else: + print("Train from scratch") + + def get_log_dir(self): + return os.path.join( + self.logger_save_dir, self.logger_exp_group_name, self.logger_exp_name + ) + + def set_log_dir(self, save_dir, exp_group_name, exp_name): + self.logger_save_dir = save_dir + self.logger_exp_name = exp_name + self.logger_exp_group_name = exp_group_name + + def init_from_ckpt(self, path, ignore_keys=list()): + sd = torch.load(path, map_location="cpu")["state_dict"] + keys = list(sd.keys()) + for k in keys: + for ik in ignore_keys: + if k.startswith(ik): + print("Deleting key {} from state_dict.".format(k)) + del sd[k] + self.load_state_dict(sd, strict=False) + print(f"Restored from {path}") + + def encode(self, x): + # x = self.time_shuffle_operation(x) + x = self.freq_split_subband(x) + h = self.encoder(x) + moments = self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior + + def decode(self, z): + z = self.post_quant_conv(z) + dec = self.decoder(z) + # bs, ch, shuffled_timesteps, fbins = dec.size() + # dec = self.time_unshuffle_operation(dec, bs, int(ch*shuffled_timesteps), fbins) + dec = self.freq_merge_subband(dec) + return dec + + def decode_to_waveform(self, dec): + from qa_mdt.audioldm_train.utilities.model_util import vocoder_infer + + if self.image_key == "fbank": + dec = dec.squeeze(1).permute(0, 2, 1) + wav_reconstruction = vocoder_infer(dec, self.vocoder) + elif self.image_key == "stft": + dec = dec.squeeze(1).permute(0, 2, 1) + wav_reconstruction = self.wave_decoder(dec) + return wav_reconstruction + + def visualize_latent(self, input): + import matplotlib.pyplot as plt + + # for i in range(10): + # zero_input = torch.zeros_like(input) - 11.59 + # zero_input[:,:,i * 16: i * 16 + 16,:16] += 13.59 + + # posterior = self.encode(zero_input) + # latent = posterior.sample() + # avg_latent = torch.mean(latent, dim=1)[0] + # plt.imshow(avg_latent.cpu().detach().numpy().T) + # plt.savefig("%s.png" % i) + # plt.close() + + np.save("input.npy", input.cpu().detach().numpy()) + # zero_input = torch.zeros_like(input) - 11.59 + time_input = input.clone() + time_input[:, :, :, :32] *= 0 + time_input[:, :, :, :32] -= 11.59 + + np.save("time_input.npy", time_input.cpu().detach().numpy()) + + posterior = self.encode(time_input) + latent = posterior.sample() + np.save("time_latent.npy", latent.cpu().detach().numpy()) + avg_latent = torch.mean(latent, dim=1) + for i in range(avg_latent.size(0)): + plt.imshow(avg_latent[i].cpu().detach().numpy().T) + plt.savefig("freq_%s.png" % i) + plt.close() + + freq_input = input.clone() + freq_input[:, :, :512, :] *= 0 + freq_input[:, :, :512, :] -= 11.59 + + np.save("freq_input.npy", freq_input.cpu().detach().numpy()) + + posterior = self.encode(freq_input) + latent = posterior.sample() + np.save("freq_latent.npy", latent.cpu().detach().numpy()) + avg_latent = torch.mean(latent, dim=1) + for i in range(avg_latent.size(0)): + plt.imshow(avg_latent[i].cpu().detach().numpy().T) + plt.savefig("time_%s.png" % i) + plt.close() + + def forward(self, input, sample_posterior=True): + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample() + else: + z = posterior.mode() + + if self.flag_first_run: + print("Latent size: ", z.size()) + self.flag_first_run = False + + dec = self.decode(z) + + return dec, posterior + + def get_input(self, batch): + fname, text, label_indices, waveform, stft, fbank = ( + batch["fname"], + batch["text"], + batch["label_vector"], + batch["waveform"], + batch["stft"], + batch["log_mel_spec"], + ) + # if(self.time_shuffle != 1): + # if(fbank.size(1) % self.time_shuffle != 0): + # pad_len = self.time_shuffle - (fbank.size(1) % self.time_shuffle) + # fbank = torch.nn.functional.pad(fbank, (0,0,0,pad_len)) + + ret = {} + + ret["fbank"], ret["stft"], ret["fname"], ret["waveform"] = ( + fbank.unsqueeze(1), + stft.unsqueeze(1), + fname, + waveform.unsqueeze(1), + ) + + return ret + + # def time_shuffle_operation(self, fbank): + # if(self.time_shuffle == 1): + # return fbank + + # shuffled_fbank = [] + # for i in range(self.time_shuffle): + # shuffled_fbank.append(fbank[:,:, i::self.time_shuffle,:]) + # return torch.cat(shuffled_fbank, dim=1) + + # def time_unshuffle_operation(self, shuffled_fbank, bs, timesteps, fbins): + # if(self.time_shuffle == 1): + # return shuffled_fbank + + # buffer = torch.zeros((bs, 1, timesteps, fbins)).to(shuffled_fbank.device) + # for i in range(self.time_shuffle): + # buffer[:,0,i::self.time_shuffle,:] = shuffled_fbank[:,i,:,:] + # return buffer + + def freq_split_subband(self, fbank): + if self.subband == 1 or self.image_key != "stft": + return fbank + + bs, ch, tstep, fbins = fbank.size() + + assert fbank.size(-1) % self.subband == 0 + assert ch == 1 + + return ( + fbank.squeeze(1) + .reshape(bs, tstep, self.subband, fbins // self.subband) + .permute(0, 2, 1, 3) + ) + + def freq_merge_subband(self, subband_fbank): + if self.subband == 1 or self.image_key != "stft": + return subband_fbank + assert subband_fbank.size(1) == self.subband # Channel dimension + bs, sub_ch, tstep, fbins = subband_fbank.size() + return subband_fbank.permute(0, 2, 1, 3).reshape(bs, tstep, -1).unsqueeze(1) + + def training_step(self, batch, batch_idx): + g_opt, d_opt = self.optimizers() + inputs_dict = self.get_input(batch) + inputs = inputs_dict[self.image_key] + waveform = inputs_dict["waveform"] + + if batch_idx % 5000 == 0 and self.local_rank == 0: + print("Log train image") + self.log_images(inputs, waveform=waveform) + + reconstructions, posterior = self(inputs) + + if self.image_key == "stft": + rec_waveform = self.decode_to_waveform(reconstructions) + else: + rec_waveform = None + + # train the discriminator + # If working on waveform, inputs is STFT, reconstructions are the waveform + # If working on the melspec, inputs is melspec, reconstruction are also mel spec + discloss, log_dict_disc = self.loss( + inputs=inputs, + reconstructions=reconstructions, + posteriors=posterior, + waveform=waveform, + rec_waveform=rec_waveform, + optimizer_idx=1, + global_step=self.global_step, + last_layer=self.get_last_layer(), + split="train", + ) + + self.log( + "discloss", + discloss, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=True, + ) + self.log_dict( + log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False + ) + d_opt.zero_grad() + self.manual_backward(discloss) + d_opt.step() + + self.log( + "train_step", + self.train_step, + prog_bar=False, + logger=False, + on_step=True, + on_epoch=False, + ) + + self.log( + "global_step", + float(self.global_step), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + + aeloss, log_dict_ae = self.loss( + inputs=inputs, + reconstructions=reconstructions, + posteriors=posterior, + waveform=waveform, + rec_waveform=rec_waveform, + optimizer_idx=0, + global_step=self.global_step, + last_layer=self.get_last_layer(), + split="train", + ) + self.log( + "aeloss", + aeloss, + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + self.log( + "posterior_std", + torch.mean(posterior.var), + prog_bar=True, + logger=True, + on_step=True, + on_epoch=False, + ) + self.log_dict( + log_dict_ae, prog_bar=True, logger=True, on_step=True, on_epoch=False + ) + + self.train_step += 1 + g_opt.zero_grad() + self.manual_backward(aeloss) + g_opt.step() + + def validation_step(self, batch, batch_idx): + inputs_dict = self.get_input(batch) + inputs = inputs_dict[self.image_key] + waveform = inputs_dict["waveform"] + + if batch_idx <= 3: + print("Log val image") + self.log_images(inputs, train=False, waveform=waveform) + + reconstructions, posterior = self(inputs) + + if self.image_key == "stft": + rec_waveform = self.decode_to_waveform(reconstructions) + else: + rec_waveform = None + + aeloss, log_dict_ae = self.loss( + inputs=inputs, + reconstructions=reconstructions, + posteriors=posterior, + waveform=waveform, + rec_waveform=rec_waveform, + optimizer_idx=0, + global_step=self.global_step, + last_layer=self.get_last_layer(), + split="val", + ) + + discloss, log_dict_disc = self.loss( + inputs=inputs, + reconstructions=reconstructions, + posteriors=posterior, + waveform=waveform, + rec_waveform=rec_waveform, + optimizer_idx=1, + global_step=self.global_step, + last_layer=self.get_last_layer(), + split="val", + ) + + self.log_dict(log_dict_ae) + self.log_dict(log_dict_disc) + return self.log_dict + + def test_step(self, batch, batch_idx): + inputs_dict = self.get_input(batch) + inputs = inputs_dict[self.image_key] + waveform = inputs_dict["waveform"] + fnames = inputs_dict["fname"] + + reconstructions, posterior = self(inputs) + save_path = os.path.join( + self.get_log_dir(), "autoencoder_result_audiocaps", str(self.global_step) + ) + + if self.image_key == "stft": + wav_prediction = self.decode_to_waveform(reconstructions) + wav_original = waveform + self.save_wave( + wav_prediction, fnames, os.path.join(save_path, "stft_wav_prediction") + ) + else: + wav_vocoder_gt, wav_prediction = synth_one_sample( + inputs.squeeze(1), + reconstructions.squeeze(1), + labels="validation", + vocoder=self.vocoder, + ) + self.save_wave( + wav_vocoder_gt, fnames, os.path.join(save_path, "fbank_vocoder_gt_wave") + ) + self.save_wave( + wav_prediction, fnames, os.path.join(save_path, "fbank_wav_prediction") + ) + + def save_wave(self, batch_wav, fname, save_dir): + os.makedirs(save_dir, exist_ok=True) + + for wav, name in zip(batch_wav, fname): + name = os.path.basename(name) + + sf.write(os.path.join(save_dir, name), wav, samplerate=self.sampling_rate) + + def configure_optimizers(self): + lr = self.learning_rate + params = ( + list(self.encoder.parameters()) + + list(self.decoder.parameters()) + + list(self.quant_conv.parameters()) + + list(self.post_quant_conv.parameters()) + ) + + if self.image_key == "stft": + params += list(self.wave_decoder.parameters()) + + opt_ae = torch.optim.Adam(params, lr=lr, betas=(0.5, 0.9)) + + if self.image_key == "fbank": + disc_params = self.loss.discriminator.parameters() + elif self.image_key == "stft": + disc_params = itertools.chain( + self.loss.msd.parameters(), self.loss.mpd.parameters() + ) + + opt_disc = torch.optim.Adam(disc_params, lr=lr, betas=(0.5, 0.9)) + return [opt_ae, opt_disc], [] + + def get_last_layer(self): + return self.decoder.conv_out.weight + + @torch.no_grad() + def log_images(self, batch, train=True, only_inputs=False, waveform=None, **kwargs): + log = dict() + x = batch.to(self.device) + if not only_inputs: + xrec, posterior = self(x) + log["samples"] = self.decode(posterior.sample()) + log["reconstructions"] = xrec + + log["inputs"] = x + wavs = self._log_img(log, train=train, index=0, waveform=waveform) + return wavs + + def _log_img(self, log, train=True, index=0, waveform=None): + images_input = self.tensor2numpy(log["inputs"][index, 0]).T + images_reconstruct = self.tensor2numpy(log["reconstructions"][index, 0]).T + images_samples = self.tensor2numpy(log["samples"][index, 0]).T + + if train: + name = "train" + else: + name = "val" + + if self.logger is not None: + self.logger.log_image( + "img_%s" % name, + [images_input, images_reconstruct, images_samples], + caption=["input", "reconstruct", "samples"], + ) + + inputs, reconstructions, samples = ( + log["inputs"], + log["reconstructions"], + log["samples"], + ) + + if self.image_key == "fbank": + wav_original, wav_prediction = synth_one_sample( + inputs[index], + reconstructions[index], + labels="validation", + vocoder=self.vocoder, + ) + wav_original, wav_samples = synth_one_sample( + inputs[index], samples[index], labels="validation", vocoder=self.vocoder + ) + wav_original, wav_samples, wav_prediction = ( + wav_original[0], + wav_samples[0], + wav_prediction[0], + ) + elif self.image_key == "stft": + wav_prediction = ( + self.decode_to_waveform(reconstructions)[index, 0] + .cpu() + .detach() + .numpy() + ) + wav_samples = ( + self.decode_to_waveform(samples)[index, 0].cpu().detach().numpy() + ) + wav_original = waveform[index, 0].cpu().detach().numpy() + + if self.logger is not None: + self.logger.experiment.log( + { + "original_%s" + % name: wandb.Audio( + wav_original, caption="original", sample_rate=self.sampling_rate + ), + "reconstruct_%s" + % name: wandb.Audio( + wav_prediction, + caption="reconstruct", + sample_rate=self.sampling_rate, + ), + "samples_%s" + % name: wandb.Audio( + wav_samples, caption="samples", sample_rate=self.sampling_rate + ), + } + ) + + return wav_original, wav_prediction, wav_samples + + def tensor2numpy(self, tensor): + return tensor.cpu().detach().numpy() + + def to_rgb(self, x): + assert self.image_key == "segmentation" + if not hasattr(self, "colorize"): + self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x)) + x = F.conv2d(x, weight=self.colorize) + x = 2.0 * (x - x.min()) / (x.max() - x.min()) - 1.0 + return x + + +class IdentityFirstStage(torch.nn.Module): + def __init__(self, *args, vq_interface=False, **kwargs): + self.vq_interface = vq_interface # TODO: Should be true by default but check to not break older stuff + super().__init__() + + def encode(self, x, *args, **kwargs): + return x + + def decode(self, x, *args, **kwargs): + return x + + def quantize(self, x, *args, **kwargs): + if self.vq_interface: + return x, None, [None, None, None] + return x + + def forward(self, x, *args, **kwargs): + return x diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__init__.py b/qa_mdt/audioldm_train/modules/phoneme_encoder/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..244cdedc3e3f1a652631b0561118ccdeabcca109 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..639a0b9996f711f3aa7b8a09ce9175843dcd5cce Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/attentions.cpython-310.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/attentions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2bcf522a98278855471e3b058f19b470d1892f39 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/attentions.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/attentions.cpython-38.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/attentions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ede6eab6cbda61e95028d8f5ef0b84f8af054f83 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/attentions.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/commons.cpython-310.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/commons.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..359028b5c3c61d5a51af286b971c854648432f44 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/commons.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/commons.cpython-38.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/commons.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8125c48ea6a4c2425653584534a32f971a9d941e Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/commons.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/encoder.cpython-310.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/encoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8baf833e872f99cef613d65009e98dccefc51f73 Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/encoder.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/encoder.cpython-38.pyc b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/encoder.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3967f61f441a4f3fb003627b282cd0cc7aaf520e Binary files /dev/null and b/qa_mdt/audioldm_train/modules/phoneme_encoder/__pycache__/encoder.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/attentions.py b/qa_mdt/audioldm_train/modules/phoneme_encoder/attentions.py new file mode 100644 index 0000000000000000000000000000000000000000..43e9bc9c1a78a8a9d6d32015ca376bed667017d5 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/phoneme_encoder/attentions.py @@ -0,0 +1,432 @@ +import copy +import math +import numpy as np +import torch +from torch import nn +from torch.nn import functional as F + +import qa_mdt.audioldm_train.modules.phoneme_encoder.commons as commons + +LRELU_SLOPE = 0.1 + + +class LayerNorm(nn.Module): + def __init__(self, channels, eps=1e-5): + super().__init__() + self.channels = channels + self.eps = eps + + self.gamma = nn.Parameter(torch.ones(channels)) + self.beta = nn.Parameter(torch.zeros(channels)) + + def forward(self, x): + x = x.transpose(1, -1) + x = F.layer_norm(x, (self.channels,), self.gamma, self.beta, self.eps) + return x.transpose(1, -1) + + +class Encoder(nn.Module): + def __init__( + self, + hidden_channels, + filter_channels, + n_heads, + n_layers, + kernel_size=1, + p_dropout=0.0, + window_size=4, + **kwargs + ): + super().__init__() + self.hidden_channels = hidden_channels + self.filter_channels = filter_channels + self.n_heads = n_heads + self.n_layers = n_layers + self.kernel_size = kernel_size + self.p_dropout = p_dropout + self.window_size = window_size + + self.drop = nn.Dropout(p_dropout) + self.attn_layers = nn.ModuleList() + self.norm_layers_1 = nn.ModuleList() + self.ffn_layers = nn.ModuleList() + self.norm_layers_2 = nn.ModuleList() + for i in range(self.n_layers): + self.attn_layers.append( + MultiHeadAttention( + hidden_channels, + hidden_channels, + n_heads, + p_dropout=p_dropout, + window_size=window_size, + ) + ) + self.norm_layers_1.append(LayerNorm(hidden_channels)) + self.ffn_layers.append( + FFN( + hidden_channels, + hidden_channels, + filter_channels, + kernel_size, + p_dropout=p_dropout, + ) + ) + self.norm_layers_2.append(LayerNorm(hidden_channels)) + + def forward(self, x, x_mask): + attn_mask = x_mask.unsqueeze(2) * x_mask.unsqueeze(-1) + x = x * x_mask + for i in range(self.n_layers): + y = self.attn_layers[i](x, x, attn_mask) + y = self.drop(y) + x = self.norm_layers_1[i](x + y) + + y = self.ffn_layers[i](x, x_mask) + y = self.drop(y) + x = self.norm_layers_2[i](x + y) + x = x * x_mask + return x + + +class Decoder(nn.Module): + def __init__( + self, + hidden_channels, + filter_channels, + n_heads, + n_layers, + kernel_size=1, + p_dropout=0.0, + proximal_bias=False, + proximal_init=True, + **kwargs + ): + super().__init__() + self.hidden_channels = hidden_channels + self.filter_channels = filter_channels + self.n_heads = n_heads + self.n_layers = n_layers + self.kernel_size = kernel_size + self.p_dropout = p_dropout + self.proximal_bias = proximal_bias + self.proximal_init = proximal_init + + self.drop = nn.Dropout(p_dropout) + self.self_attn_layers = nn.ModuleList() + self.norm_layers_0 = nn.ModuleList() + self.encdec_attn_layers = nn.ModuleList() + self.norm_layers_1 = nn.ModuleList() + self.ffn_layers = nn.ModuleList() + self.norm_layers_2 = nn.ModuleList() + for i in range(self.n_layers): + self.self_attn_layers.append( + MultiHeadAttention( + hidden_channels, + hidden_channels, + n_heads, + p_dropout=p_dropout, + proximal_bias=proximal_bias, + proximal_init=proximal_init, + ) + ) + self.norm_layers_0.append(LayerNorm(hidden_channels)) + self.encdec_attn_layers.append( + MultiHeadAttention( + hidden_channels, hidden_channels, n_heads, p_dropout=p_dropout + ) + ) + self.norm_layers_1.append(LayerNorm(hidden_channels)) + self.ffn_layers.append( + FFN( + hidden_channels, + hidden_channels, + filter_channels, + kernel_size, + p_dropout=p_dropout, + causal=True, + ) + ) + self.norm_layers_2.append(LayerNorm(hidden_channels)) + + def forward(self, x, x_mask, h, h_mask): + """ + x: decoder input + h: encoder output + """ + self_attn_mask = commons.subsequent_mask(x_mask.size(2)).to( + device=x.device, dtype=x.dtype + ) + encdec_attn_mask = h_mask.unsqueeze(2) * x_mask.unsqueeze(-1) + x = x * x_mask + for i in range(self.n_layers): + y = self.self_attn_layers[i](x, x, self_attn_mask) + y = self.drop(y) + x = self.norm_layers_0[i](x + y) + + y = self.encdec_attn_layers[i](x, h, encdec_attn_mask) + y = self.drop(y) + x = self.norm_layers_1[i](x + y) + + y = self.ffn_layers[i](x, x_mask) + y = self.drop(y) + x = self.norm_layers_2[i](x + y) + x = x * x_mask + return x + + +class MultiHeadAttention(nn.Module): + def __init__( + self, + channels, + out_channels, + n_heads, + p_dropout=0.0, + window_size=None, + heads_share=True, + block_length=None, + proximal_bias=False, + proximal_init=False, + ): + super().__init__() + assert channels % n_heads == 0 + + self.channels = channels + self.out_channels = out_channels + self.n_heads = n_heads + self.p_dropout = p_dropout + self.window_size = window_size + self.heads_share = heads_share + self.block_length = block_length + self.proximal_bias = proximal_bias + self.proximal_init = proximal_init + self.attn = None + + self.k_channels = channels // n_heads + self.conv_q = nn.Conv1d(channels, channels, 1) + self.conv_k = nn.Conv1d(channels, channels, 1) + self.conv_v = nn.Conv1d(channels, channels, 1) + self.conv_o = nn.Conv1d(channels, out_channels, 1) + self.drop = nn.Dropout(p_dropout) + + if window_size is not None: + n_heads_rel = 1 if heads_share else n_heads + rel_stddev = self.k_channels**-0.5 + self.emb_rel_k = nn.Parameter( + torch.randn(n_heads_rel, window_size * 2 + 1, self.k_channels) + * rel_stddev + ) + self.emb_rel_v = nn.Parameter( + torch.randn(n_heads_rel, window_size * 2 + 1, self.k_channels) + * rel_stddev + ) + + nn.init.xavier_uniform_(self.conv_q.weight) + nn.init.xavier_uniform_(self.conv_k.weight) + nn.init.xavier_uniform_(self.conv_v.weight) + if proximal_init: + with torch.no_grad(): + self.conv_k.weight.copy_(self.conv_q.weight) + self.conv_k.bias.copy_(self.conv_q.bias) + + def forward(self, x, c, attn_mask=None): + q = self.conv_q(x) + k = self.conv_k(c) + v = self.conv_v(c) + + x, self.attn = self.attention(q, k, v, mask=attn_mask) + + x = self.conv_o(x) + return x + + def attention(self, query, key, value, mask=None): + # reshape [b, d, t] -> [b, n_h, t, d_k] + b, d, t_s, t_t = (*key.size(), query.size(2)) + query = query.view(b, self.n_heads, self.k_channels, t_t).transpose(2, 3) + key = key.view(b, self.n_heads, self.k_channels, t_s).transpose(2, 3) + value = value.view(b, self.n_heads, self.k_channels, t_s).transpose(2, 3) + + scores = torch.matmul(query / math.sqrt(self.k_channels), key.transpose(-2, -1)) + if self.window_size is not None: + assert ( + t_s == t_t + ), "Relative attention is only available for self-attention." + key_relative_embeddings = self._get_relative_embeddings(self.emb_rel_k, t_s) + rel_logits = self._matmul_with_relative_keys( + query / math.sqrt(self.k_channels), key_relative_embeddings + ) + scores_local = self._relative_position_to_absolute_position(rel_logits) + scores = scores + scores_local + if self.proximal_bias: + assert t_s == t_t, "Proximal bias is only available for self-attention." + scores = scores + self._attention_bias_proximal(t_s).to( + device=scores.device, dtype=scores.dtype + ) + if mask is not None: + scores = scores.masked_fill(mask == 0, -1e4) + if self.block_length is not None: + assert ( + t_s == t_t + ), "Local attention is only available for self-attention." + block_mask = ( + torch.ones_like(scores) + .triu(-self.block_length) + .tril(self.block_length) + ) + scores = scores.masked_fill(block_mask == 0, -1e4) + p_attn = F.softmax(scores, dim=-1) # [b, n_h, t_t, t_s] + p_attn = self.drop(p_attn) + output = torch.matmul(p_attn, value) + if self.window_size is not None: + relative_weights = self._absolute_position_to_relative_position(p_attn) + value_relative_embeddings = self._get_relative_embeddings( + self.emb_rel_v, t_s + ) + output = output + self._matmul_with_relative_values( + relative_weights, value_relative_embeddings + ) + output = ( + output.transpose(2, 3).contiguous().view(b, d, t_t) + ) # [b, n_h, t_t, d_k] -> [b, d, t_t] + return output, p_attn + + def _matmul_with_relative_values(self, x, y): + """ + x: [b, h, l, m] + y: [h or 1, m, d] + ret: [b, h, l, d] + """ + ret = torch.matmul(x, y.unsqueeze(0)) + return ret + + def _matmul_with_relative_keys(self, x, y): + """ + x: [b, h, l, d] + y: [h or 1, m, d] + ret: [b, h, l, m] + """ + ret = torch.matmul(x, y.unsqueeze(0).transpose(-2, -1)) + return ret + + def _get_relative_embeddings(self, relative_embeddings, length): + max_relative_position = 2 * self.window_size + 1 + # Pad first before slice to avoid using cond ops. + pad_length = max(length - (self.window_size + 1), 0) + slice_start_position = max((self.window_size + 1) - length, 0) + slice_end_position = slice_start_position + 2 * length - 1 + if pad_length > 0: + padded_relative_embeddings = F.pad( + relative_embeddings, + commons.convert_pad_shape([[0, 0], [pad_length, pad_length], [0, 0]]), + ) + else: + padded_relative_embeddings = relative_embeddings + used_relative_embeddings = padded_relative_embeddings[ + :, slice_start_position:slice_end_position + ] + return used_relative_embeddings + + def _relative_position_to_absolute_position(self, x): + """ + x: [b, h, l, 2*l-1] + ret: [b, h, l, l] + """ + batch, heads, length, _ = x.size() + # Concat columns of pad to shift from relative to absolute indexing. + x = F.pad(x, commons.convert_pad_shape([[0, 0], [0, 0], [0, 0], [0, 1]])) + + # Concat extra elements so to add up to shape (len+1, 2*len-1). + x_flat = x.view([batch, heads, length * 2 * length]) + x_flat = F.pad( + x_flat, commons.convert_pad_shape([[0, 0], [0, 0], [0, length - 1]]) + ) + + # Reshape and slice out the padded elements. + x_final = x_flat.view([batch, heads, length + 1, 2 * length - 1])[ + :, :, :length, length - 1 : + ] + return x_final + + def _absolute_position_to_relative_position(self, x): + """ + x: [b, h, l, l] + ret: [b, h, l, 2*l-1] + """ + batch, heads, length, _ = x.size() + # padd along column + x = F.pad( + x, commons.convert_pad_shape([[0, 0], [0, 0], [0, 0], [0, length - 1]]) + ) + x_flat = x.view([batch, heads, length**2 + length * (length - 1)]) + # add 0's in the beginning that will skew the elements after reshape + x_flat = F.pad(x_flat, commons.convert_pad_shape([[0, 0], [0, 0], [length, 0]])) + x_final = x_flat.view([batch, heads, length, 2 * length])[:, :, :, 1:] + return x_final + + def _attention_bias_proximal(self, length): + """Bias for self-attention to encourage attention to close positions. + Args: + length: an integer scalar. + Returns: + a Tensor with shape [1, 1, length, length] + """ + r = torch.arange(length, dtype=torch.float32) + diff = torch.unsqueeze(r, 0) - torch.unsqueeze(r, 1) + return torch.unsqueeze(torch.unsqueeze(-torch.log1p(torch.abs(diff)), 0), 0) + + +class FFN(nn.Module): + def __init__( + self, + in_channels, + out_channels, + filter_channels, + kernel_size, + p_dropout=0.0, + activation=None, + causal=False, + ): + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.filter_channels = filter_channels + self.kernel_size = kernel_size + self.p_dropout = p_dropout + self.activation = activation + self.causal = causal + + if causal: + self.padding = self._causal_padding + else: + self.padding = self._same_padding + + self.conv_1 = nn.Conv1d(in_channels, filter_channels, kernel_size) + self.conv_2 = nn.Conv1d(filter_channels, out_channels, kernel_size) + self.drop = nn.Dropout(p_dropout) + + def forward(self, x, x_mask): + x = self.conv_1(self.padding(x * x_mask)) + if self.activation == "gelu": + x = x * torch.sigmoid(1.702 * x) + else: + x = torch.relu(x) + x = self.drop(x) + x = self.conv_2(self.padding(x * x_mask)) + return x * x_mask + + def _causal_padding(self, x): + if self.kernel_size == 1: + return x + pad_l = self.kernel_size - 1 + pad_r = 0 + padding = [[0, 0], [0, 0], [pad_l, pad_r]] + x = F.pad(x, commons.convert_pad_shape(padding)) + return x + + def _same_padding(self, x): + if self.kernel_size == 1: + return x + pad_l = (self.kernel_size - 1) // 2 + pad_r = self.kernel_size // 2 + padding = [[0, 0], [0, 0], [pad_l, pad_r]] + x = F.pad(x, commons.convert_pad_shape(padding)) + return x diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/commons.py b/qa_mdt/audioldm_train/modules/phoneme_encoder/commons.py new file mode 100644 index 0000000000000000000000000000000000000000..21b446b6bd4dee16cbfbd26fb97d69110b410350 --- /dev/null +++ b/qa_mdt/audioldm_train/modules/phoneme_encoder/commons.py @@ -0,0 +1,163 @@ +import math +import numpy as np +import torch +from torch import nn +from torch.nn import functional as F + + +def init_weights(m, mean=0.0, std=0.01): + classname = m.__class__.__name__ + if classname.find("Conv") != -1: + m.weight.data.normal_(mean, std) + + +def get_padding(kernel_size, dilation=1): + return int((kernel_size * dilation - dilation) / 2) + + +def convert_pad_shape(pad_shape): + l = pad_shape[::-1] + pad_shape = [item for sublist in l for item in sublist] + return pad_shape + + +def intersperse(lst, item): + result = [item] * (len(lst) * 2 + 1) + result[1::2] = lst + return result + + +def kl_divergence(m_p, logs_p, m_q, logs_q): + """KL(P||Q)""" + kl = (logs_q - logs_p) - 0.5 + kl += ( + 0.5 * (torch.exp(2.0 * logs_p) + ((m_p - m_q) ** 2)) * torch.exp(-2.0 * logs_q) + ) + return kl + + +def rand_gumbel(shape): + """Sample from the Gumbel distribution, protect from overflows.""" + uniform_samples = torch.rand(shape) * 0.99998 + 0.00001 + return -torch.log(-torch.log(uniform_samples)) + + +def rand_gumbel_like(x): + g = rand_gumbel(x.size()).to(dtype=x.dtype, device=x.device) + return g + + +def slice_segments(x, ids_str, segment_size=4): + ret = torch.zeros_like(x[:, :, :segment_size]) + for i in range(x.size(0)): + idx_str = ids_str[i] + idx_end = idx_str + segment_size + ret[i] = x[i, :, idx_str:idx_end] + return ret + + +def rand_slice_segments(x, x_lengths=None, segment_size=4): + b, d, t = x.size() + if x_lengths is None: + x_lengths = t + ids_str_max = x_lengths - segment_size + 1 + ids_str = (torch.rand([b]).to(device=x.device) * ids_str_max).to(dtype=torch.long) + ret = slice_segments(x, ids_str, segment_size) + return ret, ids_str + + +def get_timing_signal_1d(length, channels, min_timescale=1.0, max_timescale=1.0e4): + position = torch.arange(length, dtype=torch.float) + num_timescales = channels // 2 + log_timescale_increment = math.log(float(max_timescale) / float(min_timescale)) / ( + num_timescales - 1 + ) + inv_timescales = min_timescale * torch.exp( + torch.arange(num_timescales, dtype=torch.float) * -log_timescale_increment + ) + scaled_time = position.unsqueeze(0) * inv_timescales.unsqueeze(1) + signal = torch.cat([torch.sin(scaled_time), torch.cos(scaled_time)], 0) + signal = F.pad(signal, [0, 0, 0, channels % 2]) + signal = signal.view(1, channels, length) + return signal + + +def add_timing_signal_1d(x, min_timescale=1.0, max_timescale=1.0e4): + b, channels, length = x.size() + signal = get_timing_signal_1d(length, channels, min_timescale, max_timescale) + return x + signal.to(dtype=x.dtype, device=x.device) + + +def cat_timing_signal_1d(x, min_timescale=1.0, max_timescale=1.0e4, axis=1): + b, channels, length = x.size() + signal = get_timing_signal_1d(length, channels, min_timescale, max_timescale) + return torch.cat([x, signal.to(dtype=x.dtype, device=x.device)], axis) + + +def subsequent_mask(length): + mask = torch.tril(torch.ones(length, length)).unsqueeze(0).unsqueeze(0) + return mask + + +@torch.jit.script +def fused_add_tanh_sigmoid_multiply(input_a, input_b, n_channels): + n_channels_int = n_channels[0] + in_act = input_a + input_b + t_act = torch.tanh(in_act[:, :n_channels_int, :]) + s_act = torch.sigmoid(in_act[:, n_channels_int:, :]) + acts = t_act * s_act + return acts + + +def convert_pad_shape(pad_shape): + l = pad_shape[::-1] + pad_shape = [item for sublist in l for item in sublist] + return pad_shape + + +def shift_1d(x): + x = F.pad(x, convert_pad_shape([[0, 0], [0, 0], [1, 0]]))[:, :, :-1] + return x + + +def sequence_mask(length, max_length=None): + if max_length is None: + max_length = length.max() + x = torch.arange(max_length, dtype=length.dtype, device=length.device) + return x.unsqueeze(0) < length.unsqueeze(1) + + +def generate_path(duration, mask): + """ + duration: [b, 1, t_x] + mask: [b, 1, t_y, t_x] + """ + device = duration.device + + b, _, t_y, t_x = mask.shape + cum_duration = torch.cumsum(duration, -1) + + cum_duration_flat = cum_duration.view(b * t_x) + path = sequence_mask(cum_duration_flat, t_y).to(mask.dtype) + path = path.view(b, t_x, t_y) + path = path - F.pad(path, convert_pad_shape([[0, 0], [1, 0], [0, 0]]))[:, :-1] + path = path.unsqueeze(1).transpose(2, 3) * mask + return path + + +def clip_grad_value_(parameters, clip_value, norm_type=2): + if isinstance(parameters, torch.Tensor): + parameters = [parameters] + parameters = list(filter(lambda p: p.grad is not None, parameters)) + norm_type = float(norm_type) + if clip_value is not None: + clip_value = float(clip_value) + + total_norm = 0 + for p in parameters: + param_norm = p.grad.data.norm(norm_type) + total_norm += param_norm.item() ** norm_type + if clip_value is not None: + p.grad.data.clamp_(min=-clip_value, max=clip_value) + total_norm = total_norm ** (1.0 / norm_type) + return total_norm diff --git a/qa_mdt/audioldm_train/modules/phoneme_encoder/encoder.py b/qa_mdt/audioldm_train/modules/phoneme_encoder/encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..989e839cea4a79aea7703dc78a52f3b0d3e0135e --- /dev/null +++ b/qa_mdt/audioldm_train/modules/phoneme_encoder/encoder.py @@ -0,0 +1,52 @@ +import copy +import math +import torch +from torch import nn +from torch.nn import functional as F + +import qa_mdt.audioldm_train.modules.phoneme_encoder.commons as commons +import qa_mdt.audioldm_train.modules.phoneme_encoder.attentions as attentions + + +class TextEncoder(nn.Module): + def __init__( + self, + n_vocab, + out_channels=192, + hidden_channels=192, + filter_channels=768, + n_heads=2, + n_layers=6, + kernel_size=3, + p_dropout=0.1, + ): + super().__init__() + self.n_vocab = n_vocab + self.out_channels = out_channels + self.hidden_channels = hidden_channels + self.filter_channels = filter_channels + self.n_heads = n_heads + self.n_layers = n_layers + self.kernel_size = kernel_size + self.p_dropout = p_dropout + + self.emb = nn.Embedding(n_vocab, hidden_channels) + nn.init.normal_(self.emb.weight, 0.0, hidden_channels**-0.5) + + self.encoder = attentions.Encoder( + hidden_channels, filter_channels, n_heads, n_layers, kernel_size, p_dropout + ) + self.proj = nn.Conv1d(hidden_channels, out_channels * 2, 1) + + def forward(self, x, x_lengths): + x = self.emb(x) * math.sqrt(self.hidden_channels) # [b, t, h] + x = torch.transpose(x, 1, -1) # [b, h, t] + x_mask = torch.unsqueeze(commons.sequence_mask(x_lengths, x.size(2)), 1).to( + x.dtype + ) + + x = self.encoder(x * x_mask, x_mask) + stats = self.proj(x) * x_mask + + m, logs = torch.split(stats, self.out_channels, dim=1) + return x, m, logs, x_mask diff --git a/qa_mdt/audioldm_train/train/autoencoder.py b/qa_mdt/audioldm_train/train/autoencoder.py new file mode 100644 index 0000000000000000000000000000000000000000..19f867178643a44fc81c7ce9a8ced9fea3d98cce --- /dev/null +++ b/qa_mdt/audioldm_train/train/autoencoder.py @@ -0,0 +1,154 @@ +# Author: Haohe Liu +# Email: haoheliu@gmail.com +# Date: 11 Feb 2023 + +import sys + +sys.path.append("src") + +import os +import wandb + +import argparse +import yaml +import torch +from pytorch_lightning.strategies.ddp import DDPStrategy +from qa_mdt.audioldm_train.utilities.data.dataset import AudioDataset +from torch.utils.data import DataLoader +from pytorch_lightning.loggers import WandbLogger +from pytorch_lightning import Trainer +from qa_mdt.audioldm_train.modules.latent_encoder.autoencoder import AutoencoderKL +from pytorch_lightning.callbacks import ModelCheckpoint +from qa_mdt.audioldm_train.utilities.tools import get_restore_step + + +def listdir_nohidden(path): + for f in os.listdir(path): + if not f.startswith("."): + yield f + + +def main(configs, exp_group_name, exp_name): + if "precision" in configs.keys(): + torch.set_float32_matmul_precision(configs["precision"]) + batch_size = config_yaml["model"]["params"]["batchsize"] + log_path = config_yaml["log_directory"] + + if "dataloader_add_ons" in configs["data"].keys(): + dataloader_add_ons = configs["data"]["dataloader_add_ons"] + else: + dataloader_add_ons = [] + + dataset = AudioDataset(config_yaml, split="train", add_ons=dataloader_add_ons) + + loader = DataLoader( + dataset, batch_size=batch_size, num_workers=8, pin_memory=True, shuffle=True + ) + + print( + "The length of the dataset is %s, the length of the dataloader is %s, the batchsize is %s" + % (len(dataset), len(loader), batch_size) + ) + + val_dataset = AudioDataset(config_yaml, split="val", add_ons=dataloader_add_ons) + + val_loader = DataLoader( + val_dataset, + batch_size=batch_size, + num_workers=8, + shuffle=True, + ) + + model = AutoencoderKL( + ddconfig=config_yaml["model"]["params"]["ddconfig"], + lossconfig=config_yaml["model"]["params"]["lossconfig"], + embed_dim=config_yaml["model"]["params"]["embed_dim"], + image_key=config_yaml["model"]["params"]["image_key"], + base_learning_rate=config_yaml["model"]["base_learning_rate"], + subband=config_yaml["model"]["params"]["subband"], + sampling_rate=config_yaml["preprocessing"]["audio"]["sampling_rate"], + ) + + try: + config_reload_from_ckpt = configs["reload_from_ckpt"] + except: + config_reload_from_ckpt = None + + checkpoint_path = os.path.join(log_path, exp_group_name, exp_name, "checkpoints") + + checkpoint_callback = ModelCheckpoint( + dirpath=checkpoint_path, + monitor="global_step", + mode="max", + filename="checkpoint-{global_step:.0f}", + every_n_train_steps=5000, + save_top_k=config_yaml["step"]["save_top_k"], + auto_insert_metric_name=False, + save_last=True, + ) + + wandb_path = os.path.join(log_path, exp_group_name, exp_name) + + model.set_log_dir(log_path, exp_group_name, exp_name) + + os.makedirs(checkpoint_path, exist_ok=True) + + if len(os.listdir(checkpoint_path)) > 0: + print("Load checkpoint from path: %s" % checkpoint_path) + restore_step, n_step = get_restore_step(checkpoint_path) + resume_from_checkpoint = os.path.join(checkpoint_path, restore_step) + print("Resume from checkpoint", resume_from_checkpoint) + elif config_reload_from_ckpt is not None: + resume_from_checkpoint = config_reload_from_ckpt + print("Reload ckpt specified in the config file %s" % resume_from_checkpoint) + else: + print("Train from scratch") + resume_from_checkpoint = None + + devices = torch.cuda.device_count() + + wandb_logger = WandbLogger( + save_dir=wandb_path, + project=config_yaml["project"], + config=config_yaml, + name="%s/%s" % (exp_group_name, exp_name), + ) + + trainer = Trainer( + accelerator="gpu", + devices=devices, + logger=wandb_logger, + limit_val_batches=100, + callbacks=[checkpoint_callback], + strategy=DDPStrategy(find_unused_parameters=True), + val_check_interval=2000, + ) + + # TRAINING + trainer.fit(model, loader, val_loader, ckpt_path=resume_from_checkpoint) + + # EVALUTION + # trainer.test(model, test_loader, ckpt_path=resume_from_checkpoint) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + "-c", + "--autoencoder_config", + type=str, + required=True, + help="path to autoencoder config .yam", + ) + + args = parser.parse_args() + + config_yaml = args.autoencoder_config + exp_name = os.path.basename(config_yaml.split(".")[0]) + exp_group_name = os.path.basename(os.path.dirname(config_yaml)) + + config_yaml = os.path.join(config_yaml) + + config_yaml = yaml.load(open(config_yaml, "r"), Loader=yaml.FullLoader) + + main(config_yaml, exp_group_name, exp_name) diff --git a/qa_mdt/audioldm_train/train/latent_diffusion.py b/qa_mdt/audioldm_train/train/latent_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..9f88d5915b33fc23a5ed055bdf5d9270dd962391 --- /dev/null +++ b/qa_mdt/audioldm_train/train/latent_diffusion.py @@ -0,0 +1,257 @@ +import sys + +# sys.path.append("src") +import shutil +import os + +os.environ["TOKENIZERS_PARALLELISM"] = "true" + +import argparse +import yaml +import torch + + +from tqdm import tqdm +from pytorch_lightning.strategies.ddp import DDPStrategy + + +from qa_mdt.audioldm_train.modules.latent_diffusion.ddpm import LatentDiffusion + + +from torch.utils.data import WeightedRandomSampler +from torch.utils.data import DataLoader +from pytorch_lightning import Trainer, seed_everything +from pytorch_lightning.callbacks import ModelCheckpoint +from pytorch_lightning.loggers import WandbLogger + + +from qa_mdt.audioldm_train.utilities.tools import ( + listdir_nohidden, + get_restore_step, + copy_test_subset_data, +) +import wandb +from qa_mdt.audioldm_train.utilities.model_util import instantiate_from_config +import logging + +logging.basicConfig(level=logging.WARNING) + + + +def convert_path(path): + parts = path.decode().split("/")[-4:] + base = "" + result = "/".join(parts) + +def print_on_rank0(msg): + if torch.distributed.get_rank() == 0: + print(msg) + + +def main(configs, config_yaml_path, exp_group_name, exp_name, perform_validation): + print("MAIN START") + # cpth = "/train20/intern/permanent/changli7/dataset_ptm/test_dataset/dataset/audioset/zip_audios/unbalanced_train_segments/unbalanced_train_segments_part9/Y7fmOlUlwoNg.wav" + # convert_path(cpth) + if "seed" in configs.keys(): + seed_everything(configs["seed"]) + else: + print("SEED EVERYTHING TO 0") + seed_everything(1234) + + if "precision" in configs.keys(): + torch.set_float32_matmul_precision( + configs["precision"] + ) # highest, high, medium + + log_path = configs["log_directory"] + batch_size = configs["model"]["params"]["batchsize"] + + train_lmdb_path = configs["train_path"]["train_lmdb_path"] + train_key_path = [_ + '/data_key.key' for _ in train_lmdb_path] + + val_lmdb_path = configs["val_path"]["val_lmdb_path"] + val_key_path = configs["val_path"]["val_key_path"] + + + #try: + mos_path = configs["mos_path"] + from qa_mdt.audioldm_train.utilities.data.hhhh import AudioDataset + dataset = AudioDataset(config=configs, lmdb_path=train_lmdb_path, key_path=train_key_path, mos_path=mos_path) + + + loader = DataLoader( + dataset, + batch_size=batch_size, + num_workers=8, + pin_memory=True, + shuffle=True, + ) + + + + print( + "The length of the dataset is %s, the length of the dataloader is %s, the batchsize is %s" + % (len(dataset), len(loader), batch_size) + ) + try: + val_dataset = AudioDataset(config=configs, lmdb_path=val_lmdb_path, key_path=val_key_path, mos_path=mos_path) + except: + val_dataset = AudioDataset(config=configs, lmdb_path=val_lmdb_path, key_path=val_key_path) + + val_loader = DataLoader( + val_dataset, + batch_size=8, + ) + + # Copy test data + import os + test_data_subset_folder = os.path.join( + os.path.dirname(configs["log_directory"]), + "testset_data", + "tmp", + ) + os.makedirs(test_data_subset_folder, exist_ok=True) + # copy to test: + # import pdb + # pdb.set_trace() + # for i in range(len(val_dataset.keys)): + # key_tmp = val_dataset.keys[i].decode() + # cmd = "cp {} {}".format(key_tmp, os.path.join(test_data_subset_folder)) + # os.system(cmd) + + try: + config_reload_from_ckpt = configs["reload_from_ckpt"] + except: + config_reload_from_ckpt = None + + try: + limit_val_batches = configs["step"]["limit_val_batches"] + except: + limit_val_batches = None + + + validation_every_n_epochs = configs["step"]["validation_every_n_epochs"] + save_checkpoint_every_n_steps = configs["step"]["save_checkpoint_every_n_steps"] + max_steps = configs["step"]["max_steps"] + save_top_k = configs["step"]["save_top_k"] + + checkpoint_path = os.path.join(log_path, exp_group_name, exp_name, "checkpoints") + + wandb_path = os.path.join(log_path, exp_group_name, exp_name) + + checkpoint_callback = ModelCheckpoint( + dirpath=checkpoint_path, + monitor="global_step", + mode="max", + filename="checkpoint-fad-{val/frechet_inception_distance:.2f}-global_step={global_step:.0f}", + every_n_train_steps=save_checkpoint_every_n_steps, + save_top_k=save_top_k, + auto_insert_metric_name=False, + save_last=False, + ) + + os.makedirs(checkpoint_path, exist_ok=True) + # shutil.copy(config_yaml_path, wandb_path) + + if len(os.listdir(checkpoint_path)) > 0: + print("Load checkpoint from path: %s" % checkpoint_path) + restore_step, n_step = get_restore_step(checkpoint_path) + resume_from_checkpoint = os.path.join(checkpoint_path, restore_step) + print("Resume from checkpoint", resume_from_checkpoint) + elif config_reload_from_ckpt is not None: + resume_from_checkpoint = config_reload_from_ckpt + print("Reload ckpt specified in the config file %s" % resume_from_checkpoint) + else: + print("Train from scratch") + resume_from_checkpoint = None + + devices = torch.cuda.device_count() + latent_diffusion = instantiate_from_config(configs["model"]) + latent_diffusion.set_log_dir(log_path, exp_group_name, exp_name) + + wandb_logger = WandbLogger( + save_dir=wandb_path, + project=configs["project"], + config=configs, + name="%s/%s" % (exp_group_name, exp_name), + ) + + latent_diffusion.test_data_subset_path = test_data_subset_folder + + print("==> Save checkpoint every %s steps" % save_checkpoint_every_n_steps) + print("==> Perform validation every %s epochs" % validation_every_n_epochs) + + trainer = Trainer( + accelerator="auto", + devices="auto", + logger=wandb_logger, + max_steps=max_steps, + num_sanity_val_steps=1, + limit_val_batches=limit_val_batches, + check_val_every_n_epoch=validation_every_n_epochs, + strategy=DDPStrategy(find_unused_parameters=True), + gradient_clip_val=2.0,callbacks=[checkpoint_callback],num_nodes=1, + ) + + trainer.fit(latent_diffusion, loader, val_loader, ckpt_path=resume_from_checkpoint) + + ################################################################################################################ + # if(resume_from_checkpoint is not None): + # ckpt = torch.load(resume_from_checkpoint)["state_dict"] + + # key_not_in_model_state_dict = [] + # size_mismatch_keys = [] + # state_dict = latent_diffusion.state_dict() + # print("Filtering key for reloading:", resume_from_checkpoint) + # print("State dict key size:", len(list(state_dict.keys())), len(list(ckpt.keys()))) + # for key in tqdm(list(ckpt.keys())): + # if(key not in state_dict.keys()): + # key_not_in_model_state_dict.append(key) + # del ckpt[key] + # continue + # if(state_dict[key].size() != ckpt[key].size()): + # del ckpt[key] + # size_mismatch_keys.append(key) + + # if(len(key_not_in_model_state_dict) != 0 or len(size_mismatch_keys) != 0): + # print("⛳", end=" ") + + # print("==> Warning: The following key in the checkpoint is not presented in the model:", key_not_in_model_state_dict) + # print("==> Warning: These keys have different size between checkpoint and current model: ", size_mismatch_keys) + + # latent_diffusion.load_state_dict(ckpt, strict=False) + + # if(perform_validation): + # trainer.validate(latent_diffusion, val_loader) + + # trainer.fit(latent_diffusion, loader, val_loader) + ################################################################################################################ + + +if __name__ == "__main__": + print("ok") + parser = argparse.ArgumentParser() + parser.add_argument( + "-c", + "--config_yaml", + type=str, + required=False, + help="path to config .yaml file", + ) + parser.add_argument("--val", action="store_true") + args = parser.parse_args() + perform_validation = args.val + assert torch.cuda.is_available(), "CUDA is not available" + config_yaml = args.config_yaml + exp_name = os.path.basename(config_yaml.split(".")[0]) + exp_group_name = os.path.basename(os.path.dirname(config_yaml)) + config_yaml_path = os.path.join(config_yaml) + config_yaml = yaml.load(open(config_yaml_path, "r"), Loader=yaml.FullLoader) + + if perform_validation: + config_yaml["model"]["params"]["cond_stage_config"][ + "crossattn_audiomae_generated" + ]["params"]["use_gt_mae_output"] = False + config_yaml["step"]["limit_val_batches"] = None + + main(config_yaml, config_yaml_path, exp_group_name, exp_name, perform_validation) diff --git a/qa_mdt/audioldm_train/utilities/.DS_Store b/qa_mdt/audioldm_train/utilities/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..f9935eea7ec75b43fc30129007ca2d90b4560aac Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/.DS_Store differ diff --git a/qa_mdt/audioldm_train/utilities/__init__.py b/qa_mdt/audioldm_train/utilities/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..de9326c827b417c031a626f963f83ac5f97339e4 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/__init__.py @@ -0,0 +1,3 @@ +from .tools import * +from .data import * +from .model_util import * diff --git a/qa_mdt/audioldm_train/utilities/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b037b40dfa0c2baa738d45ccb42fd2c90a353ff Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/__pycache__/diffusion_util.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/__pycache__/diffusion_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fd1a80b208236e631c32ae6372ed1b8f7737933 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/__pycache__/diffusion_util.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/__pycache__/model_util.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/__pycache__/model_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7e3ba29df1fb0ec21b7acf0d1ef03240cc35 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/__pycache__/model_util.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/__pycache__/tools.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/__pycache__/tools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..293c4fad07c180ac71ab50dee2a0fec354894c68 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/__pycache__/tools.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__init__.py b/qa_mdt/audioldm_train/utilities/audio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c39f9243d2d7b4fc5dea18f56b153b0f5c5bbd4c --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/audio/__init__.py @@ -0,0 +1,3 @@ +from .audio_processing import * +from .stft import * +from .tools import * diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f1cbdeae80632982d98b13d7d5d79cdfaeee09f6 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d6ad1d9614ba1934d1c3c19f4e84c1ef8ba7826 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ecf228a11be8669193a53d09989f79615f94f89a Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/__init__.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f61af9afd7fbc21fe548ce0f9a6d69a1537f9125 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ee73090a118ad05dd5b1631d583c5b7c6db9e60 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04139d3e8c908cccdb6fdbaa2a511faa1d447271 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/audio_processing.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed709bd478dd32a9c78eb81c3f59e34d72db857d Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff9b5036fea4947cbe747832b6896d5e41c73016 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4a50ef562d1b731bbb60a23db12148e9b0813b6 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/stft.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75107b27d52b64a5ad65db40c89021d3e012ddb5 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7248eea60a12ca53bcb82c479563bdec2df811eb Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc972b918aeedde0874f31302ae5d5d5b142a545 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/audio/__pycache__/tools.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/audio/audio_processing.py b/qa_mdt/audioldm_train/utilities/audio/audio_processing.py new file mode 100644 index 0000000000000000000000000000000000000000..77a4057aa82f226f68474f4c2a19eba84510d663 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/audio/audio_processing.py @@ -0,0 +1,100 @@ +import torch +import numpy as np +import librosa.util as librosa_util +from scipy.signal import get_window + + +def window_sumsquare( + window, + n_frames, + hop_length, + win_length, + n_fft, + dtype=np.float32, + norm=None, +): + """ + # from librosa 0.6 + Compute the sum-square envelope of a window function at a given hop length. + + This is used to estimate modulation effects induced by windowing + observations in short-time fourier transforms. + + Parameters + ---------- + window : string, tuple, number, callable, or list-like + Window specification, as in `get_window` + + n_frames : int > 0 + The number of analysis frames + + hop_length : int > 0 + The number of samples to advance between frames + + win_length : [optional] + The length of the window function. By default, this matches `n_fft`. + + n_fft : int > 0 + The length of each analysis frame. + + dtype : np.dtype + The data type of the output + + Returns + ------- + wss : np.ndarray, shape=`(n_fft + hop_length * (n_frames - 1))` + The sum-squared envelope of the window function + """ + if win_length is None: + win_length = n_fft + + n = n_fft + hop_length * (n_frames - 1) + x = np.zeros(n, dtype=dtype) + + # Compute the squared window at the desired length + win_sq = get_window(window, win_length, fftbins=True) + win_sq = librosa_util.normalize(win_sq, norm=norm) ** 2 + win_sq = librosa_util.pad_center(win_sq, n_fft) + + # Fill the envelope + for i in range(n_frames): + sample = i * hop_length + x[sample : min(n, sample + n_fft)] += win_sq[: max(0, min(n_fft, n - sample))] + return x + + +def griffin_lim(magnitudes, stft_fn, n_iters=30): + """ + PARAMS + ------ + magnitudes: spectrogram magnitudes + stft_fn: STFT class with transform (STFT) and inverse (ISTFT) methods + """ + + angles = np.angle(np.exp(2j * np.pi * np.random.rand(*magnitudes.size()))) + angles = angles.astype(np.float32) + angles = torch.autograd.Variable(torch.from_numpy(angles)) + signal = stft_fn.inverse(magnitudes, angles).squeeze(1) + + for i in range(n_iters): + _, angles = stft_fn.transform(signal) + signal = stft_fn.inverse(magnitudes, angles).squeeze(1) + return signal + + +def dynamic_range_compression(x, normalize_fun=torch.log, C=1, clip_val=1e-5): + """ + PARAMS + ------ + C: compression factor + """ + return normalize_fun(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression(x, C=1): + """ + PARAMS + ------ + C: compression factor used to compress + """ + return torch.exp(x) / C diff --git a/qa_mdt/audioldm_train/utilities/audio/stft.py b/qa_mdt/audioldm_train/utilities/audio/stft.py new file mode 100644 index 0000000000000000000000000000000000000000..eb42f20979013d6110376d758cfadd129bd4b88a --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/audio/stft.py @@ -0,0 +1,178 @@ +import torch +import torch.nn.functional as F +import numpy as np +from scipy.signal import get_window +from librosa.util import pad_center, tiny +from librosa.filters import mel as librosa_mel_fn + +from qa_mdt.audioldm_train.utilities.audio.audio_processing import ( + dynamic_range_compression, + dynamic_range_decompression, + window_sumsquare, +) + + +class STFT(torch.nn.Module): + """adapted from Prem Seetharaman's https://github.com/pseeth/pytorch-stft""" + + def __init__(self, filter_length, hop_length, win_length, window="hann"): + super(STFT, self).__init__() + self.filter_length = filter_length + self.hop_length = hop_length + self.win_length = win_length + self.window = window + self.forward_transform = None + scale = self.filter_length / self.hop_length + fourier_basis = np.fft.fft(np.eye(self.filter_length)) + + cutoff = int((self.filter_length / 2 + 1)) + fourier_basis = np.vstack( + [np.real(fourier_basis[:cutoff, :]), np.imag(fourier_basis[:cutoff, :])] + ) + + forward_basis = torch.FloatTensor(fourier_basis[:, None, :]) + inverse_basis = torch.FloatTensor( + np.linalg.pinv(scale * fourier_basis).T[:, None, :] + ) + + if window is not None: + assert filter_length >= win_length + # get window and zero center pad it to filter_length + fft_window = get_window(window, win_length, fftbins=True) + fft_window = pad_center(data=fft_window, size=filter_length) + fft_window = torch.from_numpy(fft_window).float() + + # window the bases + forward_basis *= fft_window + inverse_basis *= fft_window + + self.register_buffer("forward_basis", forward_basis.float()) + self.register_buffer("inverse_basis", inverse_basis.float()) + + def transform(self, input_data): + num_batches = input_data.size(0) + num_samples = input_data.size(1) + + self.num_samples = num_samples + + # similar to librosa, reflect-pad the input + input_data = input_data.view(num_batches, 1, num_samples) + input_data = F.pad( + input_data.unsqueeze(1), + (int(self.filter_length / 2), int(self.filter_length / 2), 0, 0), + mode="reflect", + ) + input_data = input_data.squeeze(1) + + forward_transform = F.conv1d( + input_data, + torch.autograd.Variable(self.forward_basis, requires_grad=False), + stride=self.hop_length, + padding=0, + ).cpu() + + cutoff = int((self.filter_length / 2) + 1) + real_part = forward_transform[:, :cutoff, :] + imag_part = forward_transform[:, cutoff:, :] + + magnitude = torch.sqrt(real_part**2 + imag_part**2) + phase = torch.autograd.Variable(torch.atan2(imag_part.data, real_part.data)) + + return magnitude, phase + + def inverse(self, magnitude, phase): + recombine_magnitude_phase = torch.cat( + [magnitude * torch.cos(phase), magnitude * torch.sin(phase)], dim=1 + ) + + inverse_transform = F.conv_transpose1d( + recombine_magnitude_phase, + torch.autograd.Variable(self.inverse_basis, requires_grad=False), + stride=self.hop_length, + padding=0, + ) + + if self.window is not None: + window_sum = window_sumsquare( + self.window, + magnitude.size(-1), + hop_length=self.hop_length, + win_length=self.win_length, + n_fft=self.filter_length, + dtype=np.float32, + ) + # remove modulation effects + approx_nonzero_indices = torch.from_numpy( + np.where(window_sum > tiny(window_sum))[0] + ) + window_sum = torch.autograd.Variable( + torch.from_numpy(window_sum), requires_grad=False + ) + window_sum = window_sum + inverse_transform[:, :, approx_nonzero_indices] /= window_sum[ + approx_nonzero_indices + ] + + # scale by hop ratio + inverse_transform *= float(self.filter_length) / self.hop_length + + inverse_transform = inverse_transform[:, :, int(self.filter_length / 2) :] + inverse_transform = inverse_transform[:, :, : -int(self.filter_length / 2) :] + + return inverse_transform + + def forward(self, input_data): + self.magnitude, self.phase = self.transform(input_data) + reconstruction = self.inverse(self.magnitude, self.phase) + return reconstruction + + +class TacotronSTFT(torch.nn.Module): + def __init__( + self, + filter_length, + hop_length, + win_length, + n_mel_channels, + sampling_rate, + mel_fmin, + mel_fmax, + ): + super(TacotronSTFT, self).__init__() + self.n_mel_channels = n_mel_channels + self.sampling_rate = sampling_rate + self.stft_fn = STFT(filter_length, hop_length, win_length) + mel_basis = librosa_mel_fn( + sr=sampling_rate, n_fft=filter_length, n_mels=n_mel_channels, fmin=mel_fmin, fmax=mel_fmax + ) + mel_basis = torch.from_numpy(mel_basis).float() + self.register_buffer("mel_basis", mel_basis) + + def spectral_normalize(self, magnitudes, normalize_fun): + output = dynamic_range_compression(magnitudes, normalize_fun) + return output + + def spectral_de_normalize(self, magnitudes): + output = dynamic_range_decompression(magnitudes) + return output + + def mel_spectrogram(self, y, normalize_fun=torch.log): + """Computes mel-spectrograms from a batch of waves + PARAMS + ------ + y: Variable(torch.FloatTensor) with shape (B, T) in range [-1, 1] + + RETURNS + ------- + mel_output: torch.FloatTensor of shape (B, n_mel_channels, T) + """ + assert torch.min(y.data) >= -1, torch.min(y.data) + assert torch.max(y.data) <= 1, torch.max(y.data) + + magnitudes, phases = self.stft_fn.transform(y) + magnitudes = magnitudes.data + mel_output = torch.matmul(self.mel_basis, magnitudes) + mel_output = self.spectral_normalize(mel_output, normalize_fun) + energy = torch.norm(magnitudes, dim=1) + + return mel_output, magnitudes, phases, energy diff --git a/qa_mdt/audioldm_train/utilities/audio/tools.py b/qa_mdt/audioldm_train/utilities/audio/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..f6abe702ca4f9a79f6472f9b79e99b51806ceadb --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/audio/tools.py @@ -0,0 +1,35 @@ +import torch +import numpy as np +from scipy.io.wavfile import write +import torchaudio + +from qa_mdt.audioldm_train.utilities.audio.audio_processing import griffin_lim + + +def get_mel_from_wav(audio, _stft): + audio = torch.clip(torch.FloatTensor(audio).unsqueeze(0), -1, 1) + audio = torch.autograd.Variable(audio, requires_grad=False) + melspec, magnitudes, phases, energy = _stft.mel_spectrogram(audio) + melspec = torch.squeeze(melspec, 0).numpy().astype(np.float32) + magnitudes = torch.squeeze(magnitudes, 0).numpy().astype(np.float32) + energy = torch.squeeze(energy, 0).numpy().astype(np.float32) + return melspec, magnitudes, energy + + +def inv_mel_spec(mel, out_filename, _stft, griffin_iters=60): + mel = torch.stack([mel]) + mel_decompress = _stft.spectral_de_normalize(mel) + mel_decompress = mel_decompress.transpose(1, 2).data.cpu() + spec_from_mel_scaling = 1000 + spec_from_mel = torch.mm(mel_decompress[0], _stft.mel_basis) + spec_from_mel = spec_from_mel.transpose(0, 1).unsqueeze(0) + spec_from_mel = spec_from_mel * spec_from_mel_scaling + + audio = griffin_lim( + torch.autograd.Variable(spec_from_mel[:, :, :-1]), _stft._stft_fn, griffin_iters + ) + + audio = audio.squeeze() + audio = audio.cpu().numpy() + audio_path = out_filename + write(audio_path, _stft.sampling_rate, audio) diff --git a/qa_mdt/audioldm_train/utilities/data/__init__.py b/qa_mdt/audioldm_train/utilities/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..13a9804e72b88e3b9078940aee87db73788c1fb5 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/__init__.py @@ -0,0 +1 @@ +from .dataset import Dataset diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f239c760bb9c80e40b84532f1bc55eda931150e3 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7398ae5f032a68830753e744175a01ede237ad9c Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77b37eaacaad96a492ec0ea1640bc72d03c123e9 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/__init__.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1f78f9757f6cce6ca5386e9f0bc0d8e992dd51b Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed90fc9c7c05067643935b556e3118aa5623ebe6 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e2360e567eeb2ad893a6fa43a961e828138033d Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e153a25d18f740bf7c67fc386a26b08594b7df9 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d429b73b7476121ec3a0cc54ef7813f0524f9f52 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos1.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos1.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a585e7ea278d1f885bfc8230e58642b7706ea09a Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos1.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos2.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82722449033bf83e49a713a529552b7e26e18b3f Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos2.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos3.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos3.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91f16006c7f4bff79dbf4e2e186166876068a2ef Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos3.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos4.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos4.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..df0c22fc6d5251e2de8c57fe0d59931af148c7fe Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos4.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos5.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos5.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7e0a233cf8c50fa4746f259de49891ec0ef19e0 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/dataset_original_mos5.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_all_pb2.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_all_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0236471ec0f85c60070b98df3b850cdf235b0f8d Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_all_pb2.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_mos_pb2.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_mos_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b3ac321f59b33d3d95d066c4c6bd1e00127b4af Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_mos_pb2.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_wav_pb2.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_wav_pb2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5062ffa9f7ec819c037066b1e04d489f1e0a8aa Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_wav_pb2.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_wav_pb2.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_wav_pb2.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..733d2bdbf40ad950b3d6bbe77d9b7fd16d87aac0 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/datum_wav_pb2.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e196e158f9d7d1be8f0fa326dd84c84854aa1d4 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-38.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..124f4a7cef6b0af96c3823fd4f80354c417d5256 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-38.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-39.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4632e93240e5e44610abe020f79ac3811add2f5 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh.cpython-39.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh_ifly.cpython-310.pyc b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh_ifly.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..543685dc28b8322a51a34b1419fac18c51251a53 Binary files /dev/null and b/qa_mdt/audioldm_train/utilities/data/__pycache__/hhhh_ifly.cpython-310.pyc differ diff --git a/qa_mdt/audioldm_train/utilities/data/dataset.py b/qa_mdt/audioldm_train/utilities/data/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..658b67290d0f1b21d9c1ca15763eb5a499087075 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset.py @@ -0,0 +1,634 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import qa_mdt.audioldm_train.utilities.audio as Audio +from qa_mdt.audioldm_train.utilities.tools import load_json +from qa_mdt.audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + sr = self.sampling_rate, + n_fft = self.filter_length, + n_mels = self.n_mel, + fmin = self.mel_fmin, + fmax = self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass \ No newline at end of file diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original.py b/qa_mdt/audioldm_train/utilities/data/dataset_original.py new file mode 100644 index 0000000000000000000000000000000000000000..f87aaca15872e0a929fa8d8e0076dc96750e70b2 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original.py @@ -0,0 +1,634 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import qa_mdt.audioldm_train.utilities.audio as Audio +from qa_mdt.audioldm_train.utilities.tools import load_json +from qa_mdt.audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original_mos.py b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos.py new file mode 100644 index 0000000000000000000000000000000000000000..3280309bc592fab3f99a24234f4605f03ca92090 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos.py @@ -0,0 +1,635 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import audioldm_train.utilities.audio as Audio +from audioldm_train.utilities.tools import load_json +from audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "mos": 5 + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original_mos1.py b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos1.py new file mode 100644 index 0000000000000000000000000000000000000000..8c04e35adc7fa0f7b9ed12221dc2746e89110b9b --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos1.py @@ -0,0 +1,635 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import audioldm_train.utilities.audio as Audio +from audioldm_train.utilities.tools import load_json +from audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "mos": 1 + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original_mos2.py b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos2.py new file mode 100644 index 0000000000000000000000000000000000000000..8378663bc5879e42be42a7b185393da3e90bd1d6 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos2.py @@ -0,0 +1,635 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import audioldm_train.utilities.audio as Audio +from audioldm_train.utilities.tools import load_json +from audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "mos": 2 + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original_mos3.py b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos3.py new file mode 100644 index 0000000000000000000000000000000000000000..cf91dad9b937927792557dae817ad51bf8d0cda1 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos3.py @@ -0,0 +1,635 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import audioldm_train.utilities.audio as Audio +from audioldm_train.utilities.tools import load_json +from audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "mos": 3 + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original_mos4.py b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos4.py new file mode 100644 index 0000000000000000000000000000000000000000..adb991cf784f02c8bd4350f9dc0fd3963409632e --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos4.py @@ -0,0 +1,635 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import audioldm_train.utilities.audio as Audio +from audioldm_train.utilities.tools import load_json +from audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "mos": 4 + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/dataset_original_mos5.py b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos5.py new file mode 100644 index 0000000000000000000000000000000000000000..dbb7f11675f4e9147a2b04a19e7f17079676f179 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/dataset_original_mos5.py @@ -0,0 +1,635 @@ +import sys + +sys.path.append("src") +import os +import pandas as pd +import yaml +import qa_mdt.audioldm_train.utilities.audio as Audio +from qa_mdt.audioldm_train.utilities.tools import load_json +from qa_mdt.audioldm_train.dataset_plugin import * +from librosa.filters import mel as librosa_mel_fn + +import random +from torch.utils.data import Dataset +import torch.nn.functional +import torch +import numpy as np +import torchaudio +import json + + +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config=None, + split="train", + waveform_only=False, + add_ons=[], + dataset_json=None, + ): + """ + Dataset that manages audio recordings + :param audio_conf: Dictionary containing the audio loading and preprocessing settings + :param dataset_json_file + """ + self.config = config + self.split = split + self.pad_wav_start_sample = 0 # If none, random choose + self.trim_wav = False + self.waveform_only = waveform_only + self.add_ons = [eval(x) for x in add_ons] + print("Add-ons:", self.add_ons) + + self.build_setting_parameters() + + # For an external dataset + if dataset_json is not None: + self.data = dataset_json["data"] + self.id2label, self.index_dict, self.num2label = {}, {}, {} + else: + self.metadata_root = load_json(self.config["metadata_root"]) + self.dataset_name = self.config["data"][self.split] + assert split in self.config["data"].keys(), ( + "The dataset split %s you specified is not present in the config. You can choose from %s" + % (split, self.config["data"].keys()) + ) + self.build_dataset() + self.build_id_to_label() + + self.build_dsp() + self.label_num = len(self.index_dict) + print("Dataset initialize finished") + + def __getitem__(self, index): + ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, # the one-hot representation of the audio class + # the metadata of the sampled audio file and the mixup audio file (if exist) + (datum, mix_datum), + random_start, + ) = self.feature_extraction(index) + text = self.get_sample_text_caption(datum, mix_datum, label_vector) + + data = { + "text": text, # list + "fname": self.text_to_filename(text) if (not fname) else fname, # list + # tensor, [batchsize, class_num] + "label_vector": "" if (label_vector is None) else label_vector.float(), + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "mos": 5 + } + + for add_on in self.add_ons: + data.update(add_on(self.config, data, self.data[index])) + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def text_to_filename(self, text): + return text.replace(" ", "_").replace("'", "_").replace('"', "_") + + def get_dataset_root_path(self, dataset): + assert dataset in self.metadata_root.keys() + return self.metadata_root[dataset] + + def get_dataset_metadata_path(self, dataset, key): + # key: train, test, val, class_label_indices + try: + if dataset in self.metadata_root["metadata"]["path"].keys(): + return self.metadata_root["metadata"]["path"][dataset][key] + except: + raise ValueError( + 'Dataset %s does not metadata "%s" specified' % (dataset, key) + ) + + def __len__(self): + return len(self.data) + + def feature_extraction(self, index): + if index > len(self.data) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.data) - 1) + + # Read wave file and extract feature + while True: + try: + label_indices = np.zeros(self.label_num, dtype=np.float32) + datum = self.data[index] + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(datum["wav"]) + mix_datum = None + if self.label_num > 0 and "labels" in datum.keys(): + for label_str in datum["labels"].split(","): + label_indices[int(self.index_dict[label_str])] = 1.0 + + # If the key "label" is not in the metadata, return all zero vector + label_indices = torch.FloatTensor(label_indices) + break + except Exception as e: + index = (index + 1) % len(self.data) + print( + "Error encounter during audio feature extraction: ", e, datum["wav"] + ) + continue + + # The filename of the wav file + fname = datum["wav"] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + + return ( + fname, + waveform, + stft, + log_mel_spec, + label_indices, + (datum, mix_datum), + random_start, + ) + + # def augmentation(self, log_mel_spec): + # assert torch.min(log_mel_spec) < 0 + # log_mel_spec = log_mel_spec.exp() + + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # # this is just to satisfy new torchaudio version. + # log_mel_spec = log_mel_spec.unsqueeze(0) + # if self.freqm != 0: + # log_mel_spec = self.frequency_masking(log_mel_spec, self.freqm) + # if self.timem != 0: + # log_mel_spec = self.time_masking( + # log_mel_spec, self.timem) # self.timem=0 + + # log_mel_spec = (log_mel_spec + 1e-7).log() + # # squeeze back + # log_mel_spec = log_mel_spec.squeeze(0) + # log_mel_spec = torch.transpose(log_mel_spec, 0, 1) + # return log_mel_spec + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + if "train" not in self.split: + self.mixup = 0.0 + # self.freqm = 0 + # self.timem = 0 + + def _relative_path_to_absolute_path(self, metadata, dataset_name): + root_path = self.get_dataset_root_path(dataset_name) + for i in range(len(metadata["data"])): + assert "wav" in metadata["data"][i].keys(), metadata["data"][i] + assert metadata["data"][i]["wav"][0] != "/", ( + "The dataset metadata should only contain relative path to the audio file: " + + str(metadata["data"][i]["wav"]) + ) + metadata["data"][i]["wav"] = os.path.join( + root_path, metadata["data"][i]["wav"] + ) + return metadata + + def build_dataset(self): + self.data = [] + print("Build dataset split %s from %s" % (self.split, self.dataset_name)) + if type(self.dataset_name) is str: + data_json = load_json( + self.get_dataset_metadata_path(self.dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, self.dataset_name + ) + self.data = data_json["data"] + elif type(self.dataset_name) is list: + for dataset_name in self.dataset_name: + data_json = load_json( + self.get_dataset_metadata_path(dataset_name, key=self.split) + ) + data_json = self._relative_path_to_absolute_path( + data_json, dataset_name + ) + self.data += data_json["data"] + else: + raise Exception("Invalid data format") + print("Data size: {}".format(len(self.data))) + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + # self.stft_transform = torchaudio.transforms.Spectrogram( + # n_fft=1024, hop_length=160 + # ) + # self.melscale_transform = torchaudio.transforms.MelScale( + # sample_rate=16000, n_stft=1024 // 2 + 1, n_mels=64 + # ) + + def build_id_to_label(self): + id2label = {} + id2num = {} + num2label = {} + class_label_indices_path = self.get_dataset_metadata_path( + dataset=self.config["data"]["class_label_indices"], + key="class_label_indices", + ) + if class_label_indices_path is not None: + df = pd.read_csv(class_label_indices_path) + for _, row in df.iterrows(): + index, mid, display_name = row["index"], row["mid"], row["display_name"] + id2label[mid] = display_name + id2num[mid] = index + num2label[index] = display_name + self.id2label, self.index_dict, self.num2label = id2label, id2num, num2label + else: + self.id2label, self.index_dict, self.num2label = {}, {}, {} + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + return waveform + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + waveform_length = waveform.shape[-1] + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, filename): + # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + waveform, sr = torchaudio.load(filename) + + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def read_audio_file(self, filename, filename2=None): + if os.path.exists(filename): + waveform, random_start = self.read_wav_file(filename) + else: + print( + 'Non-fatal Warning [dataset.py]: The wav path "', + filename, + '" is not find in the metadata. Use empty waveform instead. This is normal in the inference process.', + ) + target_length = int(self.sampling_rate * self.duration) + waveform = torch.zeros((1, target_length)) + random_start = 0 + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + if not self.waveform_only: + log_mel_spec, stft = self.wav_feature_extraction(waveform) + else: + # Load waveform data only + # Use zero array to keep the format unified + log_mel_spec, stft = None, None + + return log_mel_spec, stft, waveform, random_start + + def get_sample_text_caption(self, datum, mix_datum, label_indices): + text = self.label_indices_to_text(datum, label_indices) + if mix_datum is not None: + text += " " + self.label_indices_to_text(mix_datum, label_indices) + return text + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + + if self.mel_fmax not in self.mel_basis: + mel = librosa_mel_fn( + self.sampling_rate, + self.filter_length, + self.n_mel, + self.mel_fmin, + self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + # @profile + # def wav_feature_extraction_torchaudio(self, waveform): + # waveform = waveform[0, ...] + # waveform = torch.FloatTensor(waveform) + + # stft = self.stft_transform(waveform) + # mel_spec = self.melscale_transform(stft) + # log_mel_spec = torch.log(mel_spec + 1e-7) + + # log_mel_spec = torch.FloatTensor(log_mel_spec.T) + # stft = torch.FloatTensor(stft.T) + + # log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + # return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + import ipdb + + ipdb.set_trace() + # pass diff --git a/qa_mdt/audioldm_train/utilities/data/datum_all_pb2.py b/qa_mdt/audioldm_train/utilities/data/datum_all_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..4c13ca0322e978be2450d92fb588e4e799f34a06 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/datum_all_pb2.py @@ -0,0 +1,90 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: datum_all.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='datum_all.proto', + package='', + syntax='proto2', + serialized_pb=_b('\n\x0f\x64\x61tum_all.proto\"_\n\tDatum_all\x12\x10\n\x08wav_file\x18\x01 \x02(\x0c\x12\x18\n\x10\x63\x61ption_original\x18\x02 \x02(\t\x12\x19\n\x11\x63\x61ption_generated\x18\x03 \x03(\t\x12\x0b\n\x03mos\x18\x04 \x02(\x02') +) + + + + +_DATUM_ALL = _descriptor.Descriptor( + name='Datum_all', + full_name='Datum_all', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='wav_file', full_name='Datum_all.wav_file', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='caption_original', full_name='Datum_all.caption_original', index=1, + number=2, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='caption_generated', full_name='Datum_all.caption_generated', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mos', full_name='Datum_all.mos', index=3, + number=4, type=2, cpp_type=6, label=2, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=19, + serialized_end=114, +) + +DESCRIPTOR.message_types_by_name['Datum_all'] = _DATUM_ALL +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Datum_all = _reflection.GeneratedProtocolMessageType('Datum_all', (_message.Message,), dict( + DESCRIPTOR = _DATUM_ALL, + __module__ = 'datum_all_pb2' + # @@protoc_insertion_point(class_scope:Datum_all) + )) +_sym_db.RegisterMessage(Datum_all) + + +# @@protoc_insertion_point(module_scope) diff --git a/qa_mdt/audioldm_train/utilities/data/datum_mos_pb2.py b/qa_mdt/audioldm_train/utilities/data/datum_mos_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..96cf00121b459855951b2acb0f3157c545ee8a12 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/datum_mos_pb2.py @@ -0,0 +1,69 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: datum_mos.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='datum_mos.proto', + package='', + syntax='proto2', + serialized_pb=_b('\n\x0f\x64\x61tum_mos.proto\"\x18\n\tDatum_mos\x12\x0b\n\x03mos\x18\x04 \x02(\x02') +) + + + + +_DATUM_MOS = _descriptor.Descriptor( + name='Datum_mos', + full_name='Datum_mos', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mos', full_name='Datum_mos.mos', index=0, + number=4, type=2, cpp_type=6, label=2, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=19, + serialized_end=43, +) + +DESCRIPTOR.message_types_by_name['Datum_mos'] = _DATUM_MOS +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Datum_mos = _reflection.GeneratedProtocolMessageType('Datum_mos', (_message.Message,), dict( + DESCRIPTOR = _DATUM_MOS, + __module__ = 'datum_mos_pb2' + # @@protoc_insertion_point(class_scope:Datum_mos) + )) +_sym_db.RegisterMessage(Datum_mos) + + +# @@protoc_insertion_point(module_scope) diff --git a/qa_mdt/audioldm_train/utilities/data/datum_wav_pb2.py b/qa_mdt/audioldm_train/utilities/data/datum_wav_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..132dc960dfc92c9be523db2011ad19b3d5fb8479 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/datum_wav_pb2.py @@ -0,0 +1,125 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: datum_wav.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='datum_wav.proto', + package='', + syntax='proto2', + serialized_pb=_b('\n\x0f\x64\x61tum_wav.proto\"\xcc\x01\n\tDatum_wav\x12\x10\n\x08wav_file\x18\x01 \x02(\x0c\x12\x0c\n\x04tags\x18\x02 \x01(\x0c\x12\x17\n\x0f\x63\x61ption_writing\x18\x03 \x01(\x0c\x12\x17\n\x0f\x63\x61ption_summary\x18\x04 \x01(\x0c\x12\x1a\n\x12\x63\x61ption_paraphrase\x18\x05 \x01(\x0c\x12$\n\x1c\x63\x61ption_attribute_prediction\x18\x06 \x01(\x0c\x12\r\n\x05title\x18\x07 \x01(\x0c\x12\x0e\n\x06\x61uthor\x18\x08 \x01(\x0c\x12\x0c\n\x04year\x18\t \x01(\x05') +) + + + + +_DATUM_WAV = _descriptor.Descriptor( + name='Datum_wav', + full_name='Datum_wav', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='wav_file', full_name='Datum_wav.wav_file', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tags', full_name='Datum_wav.tags', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='caption_writing', full_name='Datum_wav.caption_writing', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='caption_summary', full_name='Datum_wav.caption_summary', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='caption_paraphrase', full_name='Datum_wav.caption_paraphrase', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='caption_attribute_prediction', full_name='Datum_wav.caption_attribute_prediction', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='title', full_name='Datum_wav.title', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='author', full_name='Datum_wav.author', index=7, + number=8, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='year', full_name='Datum_wav.year', index=8, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=20, + serialized_end=224, +) + +DESCRIPTOR.message_types_by_name['Datum_wav'] = _DATUM_WAV +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Datum_wav = _reflection.GeneratedProtocolMessageType('Datum_wav', (_message.Message,), dict( + DESCRIPTOR = _DATUM_WAV, + __module__ = 'datum_wav_pb2' + # @@protoc_insertion_point(class_scope:Datum_wav) + )) +_sym_db.RegisterMessage(Datum_wav) + + +# @@protoc_insertion_point(module_scope) diff --git a/qa_mdt/audioldm_train/utilities/data/hhhh.py b/qa_mdt/audioldm_train/utilities/data/hhhh.py new file mode 100644 index 0000000000000000000000000000000000000000..bacfa59c93d27e9b37bb77d8b934fad70f169c0f --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/data/hhhh.py @@ -0,0 +1,741 @@ +import sys + +sys.path.append("src") +import os +import math +import pandas as pd +import zlib +import yaml +import qa_mdt.audioldm_train.utilities.audio as Audio +from qa_mdt.audioldm_train.utilities.tools import load_json +from qa_mdt.audioldm_train.dataset_plugin import * +import librosa +from librosa.filters import mel as librosa_mel_fn +import threading + +import random +import lmdb +from torch.utils.data import Dataset +import torch.nn.functional +import torch +from pydub import AudioSegment +import numpy as np +import torchaudio +import io +import json +from .datum_all_pb2 import Datum_all as Datum_lmdb +from .datum_mos_pb2 import Datum_mos as Datum_lmdb_mos +def dynamic_range_compression_torch(x, C=1, clip_val=1e-5): + return torch.log(torch.clamp(x, min=clip_val) * C) + +def dynamic_range_decompression_torch(x, C=1): + return torch.exp(x) / C + + +def spectral_normalize_torch(magnitudes): + output = dynamic_range_compression_torch(magnitudes) + return output + + +def spectral_de_normalize_torch(magnitudes): + output = dynamic_range_decompression_torch(magnitudes) + return output + + +class AudioDataset(Dataset): + def __init__( + self, + config, + lmdb_path, + key_path, + mos_path, + lock=True + ): + self.config = config + # self.lock = threading.Lock() + """ + Dataset that manages audio recordings + """ + self.pad_wav_start_sample = 0 + self.trim_wav = False + self.build_setting_parameters() + self.build_dsp() + + self.lmdb_path = [_.encode("utf-8") for _ in lmdb_path] + self.lmdb_env = [lmdb.open(_, readonly=True, lock=False) for _ in self.lmdb_path] + self.mos_txn_env = lmdb.open(mos_path, readonly=True, lock=False) + self.key_path = [_.encode("utf-8") for id, _ in enumerate(key_path)] + self.keys = [] + for _ in range(len(key_path)): + with open(self.key_path[_]) as f: + for line in f: + key = line.strip() + self.keys.append((_, key.split()[0].encode('utf-8'))) + # only for test !!! + # if _ > 20: + # break + # self.keys : [(id, key), ..., ...] + + # self.lmdb_env = lmdb.open(self.lmdb_path, readonly=True, lock=False) + # self.txn = self.lmdb_env.begin() + print(f"Dataset initialize finished, dataset_length : {len(self.keys)}") + print(f"Initialize of filter start: ") + with open('filter_all.lst', 'r') as f: + self.filter = {} + for _ in f.readlines(): + self.filter[_.strip()] = 1 + print(f"Initialize of filter finished") + #print(f"Initialize of fusion start: ") + #with open('new_file.txt', 'r') as f: + # self.refined_caption = {} + # for _ in f.readlines(): + # try: + # a, b = _.strip().split("@") + # b = b.strip('"\n') + # b = b.replace('\n', ',') + # self.refined_caption[a] = b + # except: + # pass + #print(f"Initialize of fusion finished") + + def __getitem__(self, index): + ( + + # name of file, while we use dir of fine here + fname, + + # wav of sr = 16000 + waveform, + + # mel + stft, + + # log mel + log_mel_spec, + + label_vector, + + # donot start at the begining + random_start, + + # dict or single string which describes the wav file + caption, + + # mos score for single music clip + mos + + ) = self.feature_extraction(index) + + data = { + "text": [caption], # list ... dict ? + "fname": [fname], # list + # tensor, [batchsize, 1, samples_num] + "waveform": "" if (waveform is None) else waveform.float(), + # tensor, [batchsize, t-steps, f-bins] + "stft": "" if (stft is None) else stft.float(), + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "" if (log_mel_spec is None) else log_mel_spec.float(), + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": random_start, + "label_vector": label_vector, + "mos":mos + } + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def __len__(self): + return len(self.keys) + + def feature_extraction(self, index): + if index > len(self.keys) - 1: + print( + "The index of the dataloader is out of range: %s/%s" + % (index, len(self.data)) + ) + index = random.randint(0, len(self.keys) - 1) + waveform = np.array([]) + tyu = 0 + flag = 0 + last_index = index + while(flag == 0): + id_, k = self.keys[index] + try: + if self.filter[k.decode()] == 1: + index = random.randint(0, len(self.keys) - 1) + else: + flag = 1 + except: + flag = 1 + index = last_index + while len(waveform) < 1000: + + id_, k = self.keys[index] + with self.lmdb_env[id_].begin(write=False) as txn: + cursor = txn.cursor() + try: + cursor.set_key(k) + + datum_tmp = Datum_lmdb() + datum_tmp.ParseFromString(cursor.value()) + zobj = zlib.decompressobj() # obj for decompressing data streams that won’t fit into memory at once. + decompressed_bytes = zobj.decompress(datum_tmp.wav_file) + + # decompressed_bytes = zlib.decompress(file) + waveform = np.frombuffer(decompressed_bytes, dtype=np.float32) + except: + tyu += 1 + pass + tyu += 1 + last_index = index + index = random.randint(0, len(self.keys) - 1) + if tyu > 1: + print('error') + index = last_index + flag = 0 + val = 623787092.84794 + while (flag == 0): + id_, k = self.keys[index] + with self.mos_txn_env.begin(write=False) as txn: + cursor = txn.cursor() + try: + if cursor.set_key(k): + datum_mos = Datum_lmdb_mos() + datum_mos.ParseFromString(cursor.value()) + mos = datum_mos.mos + else: + mos = -1.0 + except : + mos = -1.0 + if 'pixa_' in k.decode() or 'ifly_' in k.decode(): + mos = 5.0 + if np.random.rand() < math.exp(5.0 * mos) / val: + flag = 1 + last_index = index + index = random.randint(0, len(self.keys) - 1) + index = last_index + caption_original = datum_tmp.caption_original + try: + caption_generated = datum_tmp.caption_generated[0] + except: + caption_generated = 'None' + assert len(caption_generated) > 1 + caption_original = caption_original.lower() + caption_generated = caption_generated.lower() + caption = 'music' + if ("msd_" in k.decode()): + caption = caption_generated if caption_original == "none" else caption_original + elif ("audioset_" in k.decode()): + caption = caption_generated if caption_generated != "none" else caption_original + elif ("mtt_" in k.decode()): + caption = caption_generated if caption_original == "none" else caption_original + elif ("fma_" in k.decode()): + caption = caption_generated if caption_generated != "none" else caption_original + elif ("pixa_" in k.decode() or "ifly_" in k.decode()): + caption = caption_generated if caption_generated != "none" else caption_original + else: + caption = caption_original + prefix = 'medium quality' + if ("pixa_" in k.decode() or "ifly_" in k.decode()): + if caption == 'none': + prefix = 'high quality' + caption = '' + else: + prefix = 'high quality' + mos = 5.00 + else: + mos = float(mos) + if mos > 3.55 and mos < 4.05: + prefix = "medium quality" + elif mos >= 4.05: + prefix = "high quality" + elif mos <= 3.55: + prefix = "low quality" + else: + print(f'mos score for key : {k.decode()} miss, please check') + #if 'low quality' or 'quality is low' in caption: + # prefix = 'low quality' + caption = prefix + ', ' + caption + miu = 3.80 + sigma = 0.20 + if miu - 2 * sigma <= mos < miu - sigma: + vq_mos = 2 + elif miu - sigma <= mos < miu + sigma: + vq_mos = 3 + elif miu + sigma <= mos < miu + 2 * sigma: + vq_mos = 4 + elif mos >= miu + 2 * sigma: + vq_mos = 5 + else: + vq_mos = 1 + """ + tags = datum_tmp.tags.decode() + caption_writing = datum_tmp.caption_writing.decode() + caption_paraphrase = datum_tmp.caption_paraphrase.decode() + caption_attribute_prediction = datum_tmp.caption_attribute_prediction.decode() + caption_summary = datum_tmp.caption_summary.decode() + """ + ( + log_mel_spec, + stft, + waveform, + random_start, + ) = self.read_audio_file(waveform, k.decode()) + fname = self.keys[index] + # t_step = log_mel_spec.size(0) + # waveform = torch.FloatTensor(waveform[..., : int(self.hopsize * t_step)]) + waveform = torch.FloatTensor(waveform) + label_vector = torch.FloatTensor(np.zeros(0, dtype=np.float32)) + # finally: + # self.lock.release() + # import pdb + # pdb.set_trace() + return ( + fname, + waveform, + stft, + log_mel_spec, + label_vector, + random_start, + caption, + vq_mos + ) + + def build_setting_parameters(self): + # Read from the json config + self.melbins = self.config["preprocessing"]["mel"]["n_mel_channels"] + # self.freqm = self.config["preprocessing"]["mel"]["freqm"] + # self.timem = self.config["preprocessing"]["mel"]["timem"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.hopsize = self.config["preprocessing"]["stft"]["hop_length"] + self.duration = self.config["preprocessing"]["audio"]["duration"] + self.target_length = int(self.duration * self.sampling_rate / self.hopsize) + + self.mixup = self.config["augmentation"]["mixup"] + + # Calculate parameter derivations + # self.waveform_sample_length = int(self.target_length * self.hopsize) + + # if (self.config["balance_sampling_weight"]): + # self.samples_weight = np.loadtxt( + # self.config["balance_sampling_weight"], delimiter="," + # ) + + # if "train" not in self.split: + # self.mixup = 0.0 + # # self.freqm = 0 + # # self.timem = 0 + + def build_dsp(self): + self.mel_basis = {} + self.hann_window = {} + + self.filter_length = self.config["preprocessing"]["stft"]["filter_length"] + self.hop_length = self.config["preprocessing"]["stft"]["hop_length"] + self.win_length = self.config["preprocessing"]["stft"]["win_length"] + self.n_mel = self.config["preprocessing"]["mel"]["n_mel_channels"] + self.sampling_rate = self.config["preprocessing"]["audio"]["sampling_rate"] + self.mel_fmin = self.config["preprocessing"]["mel"]["mel_fmin"] + self.mel_fmax = self.config["preprocessing"]["mel"]["mel_fmax"] + + self.STFT = Audio.stft.TacotronSTFT( + self.config["preprocessing"]["stft"]["filter_length"], + self.config["preprocessing"]["stft"]["hop_length"], + self.config["preprocessing"]["stft"]["win_length"], + self.config["preprocessing"]["mel"]["n_mel_channels"], + self.config["preprocessing"]["audio"]["sampling_rate"], + self.config["preprocessing"]["mel"]["mel_fmin"], + self.config["preprocessing"]["mel"]["mel_fmax"], + ) + + def resample(self, waveform, sr): + waveform = torchaudio.functional.resample(waveform, sr, self.sampling_rate) + # waveform = librosa.resample(waveform, sr, self.sampling_rate) + return waveform + + # if sr == 16000: + # return waveform + # if sr == 32000 and self.sampling_rate == 16000: + # waveform = waveform[::2] + # return waveform + # if sr == 48000 and self.sampling_rate == 16000: + # waveform = waveform[::3] + # return waveform + # else: + # raise ValueError( + # "We currently only support 16k audio generation. You need to resample you audio file to 16k, 32k, or 48k: %s, %s" + # % (sr, self.sampling_rate) + # ) + + def normalize_wav(self, waveform): + waveform = waveform - np.mean(waveform) + waveform = waveform / (np.max(np.abs(waveform)) + 1e-8) + return waveform * 0.5 # Manually limit the maximum amplitude into 0.5 + + def random_segment_wav(self, waveform, target_length): + waveform = torch.tensor(waveform) + waveform = waveform.unsqueeze(0) + waveform_length = waveform.shape[-1] + # assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + if waveform_length < 100: + waveform = torch.nn.functional.pad(waveform, (0, target_length - waveform_length)) + + # Too short + if (waveform_length - target_length) <= 0: + return waveform, 0 + + for i in range(10): + random_start = int(self.random_uniform(0, waveform_length - target_length)) + if torch.max( + torch.abs(waveform[:, random_start : random_start + target_length]) + > 1e-4 + ): + break + + return waveform[:, random_start : random_start + target_length], random_start + + def pad_wav(self, waveform, target_length): + # print(waveform) + # import pdb + # pdb.set_trace() + waveform_length = waveform.shape[-1] + # assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + if waveform_length < 100: + waveform = torch.nn.functional.pad(waveform, (0, target_length - waveform_length)) + + if waveform_length == target_length: + return waveform + + # Pad + temp_wav = np.zeros((1, target_length), dtype=np.float32) + if self.pad_wav_start_sample is None: + rand_start = int(self.random_uniform(0, target_length - waveform_length)) + else: + rand_start = 0 + + temp_wav[:, rand_start : rand_start + waveform_length] = waveform + return temp_wav + + def trim_wav(self, waveform): + if np.max(np.abs(waveform)) < 0.0001: + return waveform + + def detect_leading_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = 0 + while start + chunk_size < waveform_length: + if np.max(np.abs(waveform[start : start + chunk_size])) < threshold: + start += chunk_size + else: + break + return start + + def detect_ending_silence(waveform, threshold=0.0001): + chunk_size = 1000 + waveform_length = waveform.shape[0] + start = waveform_length + while start - chunk_size > 0: + if np.max(np.abs(waveform[start - chunk_size : start])) < threshold: + start -= chunk_size + else: + break + if start == waveform_length: + return start + else: + return start + chunk_size + + start = detect_leading_silence(waveform) + end = detect_ending_silence(waveform) + + return waveform[start:end] + + def read_wav_file(self, file, k): + #zobj = zlib.decompressobj() # obj for decompressing data streams that won’t fit into memory at once. + #decompressed_bytes = zobj.decompress(file) + + # decompressed_bytes = zlib.decompress(file) + #waveform = np.frombuffer(decompressed_bytes, dtype=np.float32) + waveform = file + # # waveform, sr = librosa.load(filename, sr=None, mono=True) # 4 times slower + # if "msd" in k or "fma" in k: + # try: + # waveform = torch.tensor([(np.array(file.get_array_of_samples(array_type_override='i')) / 2147483648)], dtype=torch.float32) + # except: + # waveform = torch.tensor([(np.array(file.get_array_of_samples(array_type_override='h')) / 32768)], dtype=torch.float32) + # else: + # waveform = torch.tensor([(np.array(file.get_array_of_samples(array_type_override='h')) / 32768)], dtype=torch.float32) + # # else: + # # raise AttributeError + + # waveform = torch.tensor([(np.array(file.get_array_of_samples(array_type_override='h')) / 32768)], dtype=torch.float32) + # import pdb + # pdb.set_trace() + sr = 16000 + waveform, random_start = self.random_segment_wav( + waveform, target_length=int(sr * self.duration) + ) + waveform = self.resample(waveform, sr) + # random_start = int(random_start * (self.sampling_rate / sr)) + + waveform = waveform.numpy()[0, ...] + + waveform = self.normalize_wav(waveform) + + if self.trim_wav: + waveform = self.trim_wav(waveform) + + waveform = waveform[None, ...] + waveform = self.pad_wav( + waveform, target_length=int(self.sampling_rate * self.duration) + ) + return waveform, random_start + + def mix_two_waveforms(self, waveform1, waveform2): + mix_lambda = np.random.beta(5, 5) + mix_waveform = mix_lambda * waveform1 + (1 - mix_lambda) * waveform2 + return self.normalize_wav(mix_waveform), mix_lambda + + def read_audio_file(self, file, k): + # target_length = int(self.sampling_rate * self.duration) + # import pdb + # pdb.set_trace() + # print(type(file)) + waveform, random_start = self.read_wav_file(file, k) + + # log_mel_spec, stft = self.wav_feature_extraction_torchaudio(waveform) # this line is faster, but this implementation is not aligned with HiFi-GAN + log_mel_spec, stft = self.wav_feature_extraction(waveform) + + return log_mel_spec, stft, waveform, random_start + + def mel_spectrogram_train(self, y): + if torch.min(y) < -1.0: + print("train min value is ", torch.min(y)) + if torch.max(y) > 1.0: + print("train max value is ", torch.max(y)) + # import pdb + # pdb.set_trace() + if self.mel_fmax not in self.mel_basis: + # import pdb + # pdb.set_trace() + mel = librosa_mel_fn( + sr=self.sampling_rate, + n_fft=self.filter_length, + n_mels=self.n_mel, + fmin=self.mel_fmin, + fmax=self.mel_fmax, + ) + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)] = ( + torch.from_numpy(mel).float().to(y.device) + ) + self.hann_window[str(y.device)] = torch.hann_window(self.win_length).to( + y.device + ) + + y = torch.nn.functional.pad( + y.unsqueeze(1), + ( + int((self.filter_length - self.hop_length) / 2), + int((self.filter_length - self.hop_length) / 2), + ), + mode="reflect", + ) + + y = y.squeeze(1) + # import pdb + # pdb.set_trace() + stft_spec = torch.stft( + y, + self.filter_length, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.hann_window[str(y.device)], + center=False, + pad_mode="reflect", + normalized=False, + onesided=True, + return_complex=True, + ) + + stft_spec = torch.abs(stft_spec) + + mel = spectral_normalize_torch( + torch.matmul( + self.mel_basis[str(self.mel_fmax) + "_" + str(y.device)], stft_spec + ) + ) + + return mel[0], stft_spec[0] + + # This one is significantly slower than "wav_feature_extraction_torchaudio" if num_worker > 1 + def wav_feature_extraction(self, waveform): + waveform = waveform[0, ...] + waveform = torch.FloatTensor(waveform) + + # log_mel_spec, stft, energy = Audio.tools.get_mel_from_wav(waveform, self.STFT)[0] + log_mel_spec, stft = self.mel_spectrogram_train(waveform.unsqueeze(0)) + + log_mel_spec = torch.FloatTensor(log_mel_spec.T) + stft = torch.FloatTensor(stft.T) + + log_mel_spec, stft = self.pad_spec(log_mel_spec), self.pad_spec(stft) + return log_mel_spec, stft + + def pad_spec(self, log_mel_spec): + n_frames = log_mel_spec.shape[0] + p = self.target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + log_mel_spec = m(log_mel_spec) + elif p < 0: + log_mel_spec = log_mel_spec[0 : self.target_length, :] + + if log_mel_spec.size(-1) % 2 != 0: + log_mel_spec = log_mel_spec[..., :-1] + + return log_mel_spec + + def _read_datum_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + random_index = torch.randint(0, len(caption_keys), (1,))[0].item() + return datum[caption_keys[random_index]] + + def _is_contain_caption(self, datum): + caption_keys = [x for x in datum.keys() if ("caption" in x)] + return len(caption_keys) > 0 + + def label_indices_to_text(self, datum, label_indices): + if self._is_contain_caption(datum): + return self._read_datum_caption(datum) + elif "label" in datum.keys(): + name_indices = torch.where(label_indices > 0.1)[0] + # description_header = "This audio contains the sound of " + description_header = "" + labels = "" + for id, each in enumerate(name_indices): + if id == len(name_indices) - 1: + labels += "%s." % self.num2label[int(each)] + else: + labels += "%s, " % self.num2label[int(each)] + return description_header + labels + else: + return "" # TODO, if both label and caption are not provided, return empty string + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def frequency_masking(self, log_mel_spec, freqm): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(freqm // 8, freqm)) + mask_start = int(self.random_uniform(start=0, end=freq - mask_len)) + log_mel_spec[:, mask_start : mask_start + mask_len, :] *= 0.0 + return log_mel_spec + + def time_masking(self, log_mel_spec, timem): + bs, freq, tsteps = log_mel_spec.size() + mask_len = int(self.random_uniform(timem // 8, timem)) + mask_start = int(self.random_uniform(start=0, end=tsteps - mask_len)) + log_mel_spec[:, :, mask_start : mask_start + mask_len] *= 0.0 + return log_mel_spec + + +class AudioDataset_infer(Dataset): + def __init__( + self, + config, + caption_list, + lock=True + ): + self.config = config + # self.lock = threading.Lock() + """ + Dataset that manage caption writings + """ + self.captions = [] + with open(caption_list, 'r') as f: + for _ ,line in enumerate(f): + key = line.strip() + self.captions.append(key.split()[0]) + self.duration = self.duration = self.config["preprocessing"]["audio"]["duration"] + self.sampling_rate = self.config["variables"]["sampling_rate"] + self.target_length = int(self.sampling_rate * self.duration) + self.waveform = torch.zeros((1, self.target_length)) + + def __getitem__(self, index): + + fname = [f"sample_{index}"] + data = { + "text": [self.captions[index]], # list ... dict ? + "fname": fname, # list + # tensor, [batchsize, 1, samples_num] + "waveform": "", + # tensor, [batchsize, t-steps, f-bins] + "stft": "", + # tensor, [batchsize, t-steps, mel-bins] + "log_mel_spec": "", + "duration": self.duration, + "sampling_rate": self.sampling_rate, + "random_start_sample_in_original_audio_file": 0, + "label_vector": torch.FloatTensor(np.zeros(0, dtype=np.float32)), + "mos":mos + } + + if data["text"] is None: + print("Warning: The model return None on key text", fname) + data["text"] = "" + + return data + + def __len__(self): + return len(self.captions) + +if __name__ == "__main__": + import torch + from tqdm import tqdm + from pytorch_lightning import seed_everything + from torch.utils.data import DataLoader + + seed_everything(0) + + def write_json(my_dict, fname): + # print("Save json file at "+fname) + json_str = json.dumps(my_dict) + with open(fname, "w") as json_file: + json_file.write(json_str) + + def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + config = yaml.load( + open( + "/mnt/bn/lqhaoheliu/project/audio_generation_diffusion/config/vae_48k_256/ds_8_kl_1.0_ch_16.yaml", + "r", + ), + Loader=yaml.FullLoader, + ) + + add_ons = config["data"]["dataloader_add_ons"] + + # load_json(data) + dataset = AudioDataset( + config=config, split="train", waveform_only=False, add_ons=add_ons + ) + + loader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=True) + + # for cnt, each in tqdm(enumerate(loader)): + # print(each["waveform"].size(), each["log_mel_spec"].size()) + # print(each['freq_energy_percentile']) + # import ipdb + + # ipdb.set_trace() + # pass + diff --git a/qa_mdt/audioldm_train/utilities/diffusion_util.py b/qa_mdt/audioldm_train/utilities/diffusion_util.py new file mode 100644 index 0000000000000000000000000000000000000000..d95f241ba5c40d3b8bb81556ee14b86079548cb7 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/diffusion_util.py @@ -0,0 +1,295 @@ +# adopted from +# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +# and +# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +# and +# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py +# +# thanks! + + +import os +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat + +from qa_mdt.audioldm_train.utilities.model_util import instantiate_from_config + + +def make_beta_schedule( + schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3 +): + if schedule == "linear": + betas = ( + torch.linspace( + linear_start**0.5, linear_end**0.5, n_timestep, dtype=torch.float64 + ) + ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace( + linear_start, linear_end, n_timestep, dtype=torch.float64 + ) + elif schedule == "sqrt": + betas = ( + torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + ** 0.5 + ) + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def make_ddim_timesteps( + ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True +): + if ddim_discr_method == "uniform": + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + elif ddim_discr_method == "quad": + ddim_timesteps = ( + (np.linspace(0, np.sqrt(num_ddpm_timesteps * 0.8), num_ddim_timesteps)) ** 2 + ).astype(int) + else: + raise NotImplementedError( + f'There is no ddim discretization method called "{ddim_discr_method}"' + ) + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + steps_out = ddim_timesteps + 1 + if verbose: + print(f"Selected timesteps for ddim sampler: {steps_out}") + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt( + (1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev) + ) + if verbose: + print( + f"Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}" + ) + print( + f"For the chosen value of eta, which is {eta}, " + f"this results in the following sigma_t schedule for ddim sampler {sigmas}" + ) + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t).contiguous() + return out.reshape(b, *((1,) * (len(x_shape) - 1))).contiguous() + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) + * torch.arange(start=0, end=half, dtype=torch.float32) + / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + else: + embedding = repeat(timesteps, "b -> b d", d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + return super().forward(x.float()).type(x.dtype) + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class HybridConditioner(nn.Module): + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {"c_concat": [c_concat], "c_crossattn": [c_crossattn]} + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat( + shape[0], *((1,) * (len(shape) - 1)) + ) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() diff --git a/qa_mdt/audioldm_train/utilities/model_util.py b/qa_mdt/audioldm_train/utilities/model_util.py new file mode 100644 index 0000000000000000000000000000000000000000..702a5db1ab9d75306345b9305775462933c8bdf7 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/model_util.py @@ -0,0 +1,304 @@ +import os +import json + +import torch +import numpy as np + +import qa_mdt.audioldm_train.modules.hifigan as hifigan + +import importlib + +import torch +import numpy as np +from collections import abc + +import multiprocessing as mp +from threading import Thread +from queue import Queue + +from inspect import isfunction +from PIL import Image, ImageDraw, ImageFont +import json +with open('./qa_mdt/offset_pretrained_checkpoints.json', 'r') as config_file: + config_data = json.load(config_file) + +def log_txt_as_img(wh, xc, size=10): + # wh a tuple of (width, height) + # xc a list of captions to plot + b = len(xc) + txts = list() + for bi in range(b): + txt = Image.new("RGB", wh, color="white") + draw = ImageDraw.Draw(txt) + font = ImageFont.truetype("data/DejaVuSans.ttf", size=size) + nc = int(40 * (wh[0] / 256)) + lines = "\n".join( + xc[bi][start : start + nc] for start in range(0, len(xc[bi]), nc) + ) + + try: + draw.text((0, 0), lines, fill="black", font=font) + except UnicodeEncodeError: + print("Cant encode string for logging. Skipping.") + + txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0 + txts.append(txt) + txts = np.stack(txts) + txts = torch.tensor(txts) + return txts + + +def ismap(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] > 3) + + +def isimage(x): + if not isinstance(x, torch.Tensor): + return False + return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1) + + +def int16_to_float32(x): + return (x / 32767.0).astype(np.float32) + + +def float32_to_int16(x): + x = np.clip(x, a_min=-1.0, a_max=1.0) + return (x * 32767.0).astype(np.int16) + + +def exists(x): + return x is not None + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def mean_flat(tensor): + """ + https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86 + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.") + return total_params + + +def instantiate_from_config(config): + if not "target" in config: + if config == "__is_first_stage__": + return None + elif config == "__is_unconditional__": + return None + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + +def get_obj_from_str(string, reload=False): + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + + +def _do_parallel_data_prefetch(func, Q, data, idx, idx_to_fn=False): + # create dummy dataset instance + + # run prefetching + if idx_to_fn: + res = func(data, worker_id=idx) + else: + res = func(data) + Q.put([idx, res]) + Q.put("Done") + + +def parallel_data_prefetch( + func: callable, + data, + n_proc, + target_data_type="ndarray", + cpu_intensive=True, + use_worker_id=False, +): + # if target_data_type not in ["ndarray", "list"]: + # raise ValueError( + # "Data, which is passed to parallel_data_prefetch has to be either of type list or ndarray." + # ) + if isinstance(data, np.ndarray) and target_data_type == "list": + raise ValueError("list expected but function got ndarray.") + elif isinstance(data, abc.Iterable): + if isinstance(data, dict): + print( + f'WARNING:"data" argument passed to parallel_data_prefetch is a dict: Using only its values and disregarding keys.' + ) + data = list(data.values()) + if target_data_type == "ndarray": + data = np.asarray(data) + else: + data = list(data) + else: + raise TypeError( + f"The data, that shall be processed parallel has to be either an np.ndarray or an Iterable, but is actually {type(data)}." + ) + + if cpu_intensive: + Q = mp.Queue(1000) + proc = mp.Process + else: + Q = Queue(1000) + proc = Thread + # spawn processes + if target_data_type == "ndarray": + arguments = [ + [func, Q, part, i, use_worker_id] + for i, part in enumerate(np.array_split(data, n_proc)) + ] + else: + step = ( + int(len(data) / n_proc + 1) + if len(data) % n_proc != 0 + else int(len(data) / n_proc) + ) + arguments = [ + [func, Q, part, i, use_worker_id] + for i, part in enumerate( + [data[i : i + step] for i in range(0, len(data), step)] + ) + ] + processes = [] + for i in range(n_proc): + p = proc(target=_do_parallel_data_prefetch, args=arguments[i]) + processes += [p] + + # start processes + print(f"Start prefetching...") + import time + + start = time.time() + gather_res = [[] for _ in range(n_proc)] + try: + for p in processes: + p.start() + + k = 0 + while k < n_proc: + # get result + res = Q.get() + if res == "Done": + k += 1 + else: + gather_res[res[0]] = res[1] + + except Exception as e: + print("Exception: ", e) + for p in processes: + p.terminate() + + raise e + finally: + for p in processes: + p.join() + print(f"Prefetching complete. [{time.time() - start} sec.]") + + if target_data_type == "ndarray": + if not isinstance(gather_res[0], np.ndarray): + return np.concatenate([np.asarray(r) for r in gather_res], axis=0) + + # order outputs + return np.concatenate(gather_res, axis=0) + elif target_data_type == "list": + out = [] + for r in gather_res: + out.extend(r) + return out + else: + return gather_res + + +def get_available_checkpoint_keys(model, ckpt): + print("==> Attemp to reload from %s" % ckpt) + state_dict = torch.load(ckpt)["state_dict"] + current_state_dict = model.state_dict() + new_state_dict = {} + for k in state_dict.keys(): + if ( + k in current_state_dict.keys() + and current_state_dict[k].size() == state_dict[k].size() + ): + new_state_dict[k] = state_dict[k] + else: + print("==> WARNING: Skipping %s" % k) + print( + "%s out of %s keys are matched" + % (len(new_state_dict.keys()), len(state_dict.keys())) + ) + return new_state_dict + + +def get_param_num(model): + num_param = sum(param.numel() for param in model.parameters()) + return num_param + + +def torch_version_orig_mod_remove(state_dict): + new_state_dict = {} + new_state_dict["generator"] = {} + for key in state_dict["generator"].keys(): + if "_orig_mod." in key: + new_state_dict["generator"][key.replace("_orig_mod.", "")] = state_dict[ + "generator" + ][key] + else: + new_state_dict["generator"][key] = state_dict["generator"][key] + return new_state_dict + + +def get_vocoder(config, device, mel_bins): + ROOT = config_data["hifi-gan"] + + if mel_bins == 64: + # import pdb + # pdb.set_trace() + model_path = os.path.join(ROOT, "hifigan_16k_64bins") + with open(model_path + ".json", "r") as f: + config = json.load(f) + config = hifigan.AttrDict(config) + vocoder = hifigan.Generator(config) + elif mel_bins == 256: + model_path = os.path.join(ROOT, "hifigan_48k_256bins") + with open(model_path + ".json", "r") as f: + config = json.load(f) + config = hifigan.AttrDict(config) + vocoder = hifigan.Generator_HiFiRes(config) + + ckpt = torch.load(model_path + ".ckpt") + ckpt = torch_version_orig_mod_remove(ckpt) + vocoder.load_state_dict(ckpt["generator"]) + vocoder.eval() + vocoder.remove_weight_norm() + vocoder.to(device) + return vocoder + + +def vocoder_infer(mels, vocoder, lengths=None): + with torch.no_grad(): + wavs = vocoder(mels).squeeze(1) + + wavs = (wavs.cpu().numpy() * 32768).astype("int16") + + if lengths is not None: + wavs = wavs[:, :lengths] + + return wavs diff --git a/qa_mdt/audioldm_train/utilities/sampler_util.py b/qa_mdt/audioldm_train/utilities/sampler_util.py new file mode 100644 index 0000000000000000000000000000000000000000..cdaf4882715f53f39ead8bf71fb3dccc29cd8b94 --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/sampler_util.py @@ -0,0 +1,588 @@ +from typing import Iterator, List, Optional, Union +from collections import Counter +import logging +from operator import itemgetter +import random + +import numpy as np + +from torch.utils.data import DistributedSampler +from torch.utils.data.sampler import Sampler + +LOGGER = logging.getLogger(__name__) + +from torch.utils.data import Dataset, Sampler + + +class DatasetFromSampler(Dataset): + """Dataset to create indexes from `Sampler`. + Args: + sampler: PyTorch sampler + """ + + def __init__(self, sampler: Sampler): + """Initialisation for DatasetFromSampler.""" + self.sampler = sampler + self.sampler_list = None + + def __getitem__(self, index: int): + """Gets element of the dataset. + Args: + index: index of the element in the dataset + Returns: + Single element by index + """ + if self.sampler_list is None: + self.sampler_list = list(self.sampler) + return self.sampler_list[index] + + def __len__(self) -> int: + """ + Returns: + int: length of the dataset + """ + return len(self.sampler) + + +class BalanceClassSampler(Sampler): + """Allows you to create stratified sample on unbalanced classes. + + Args: + labels: list of class label for each elem in the dataset + mode: Strategy to balance classes. + Must be one of [downsampling, upsampling] + + Python API examples: + + .. code-block:: python + + import os + from torch import nn, optim + from torch.utils.data import DataLoader + from catalyst import dl + from catalyst.data import ToTensor, BalanceClassSampler + from catalyst.contrib.datasets import MNIST + + train_data = MNIST(os.getcwd(), train=True, download=True, transform=ToTensor()) + train_labels = train_data.targets.cpu().numpy().tolist() + train_sampler = BalanceClassSampler(train_labels, mode=5000) + valid_data = MNIST(os.getcwd(), train=False) + + loaders = { + "train": DataLoader(train_data, sampler=train_sampler, batch_size=32), + "valid": DataLoader(valid_data, batch_size=32), + } + + model = nn.Sequential(nn.Flatten(), nn.Linear(28 * 28, 10)) + criterion = nn.CrossEntropyLoss() + optimizer = optim.Adam(model.parameters(), lr=0.02) + + runner = dl.SupervisedRunner() + # model training + runner.train( + model=model, + criterion=criterion, + optimizer=optimizer, + loaders=loaders, + num_epochs=1, + logdir="./logs", + valid_loader="valid", + valid_metric="loss", + minimize_valid_metric=True, + verbose=True, + ) + """ + + def __init__(self, labels: List[int], mode: Union[str, int] = "downsampling"): + """Sampler initialisation.""" + super().__init__(labels) + + labels = np.array(labels) + samples_per_class = {label: (labels == label).sum() for label in set(labels)} + + self.lbl2idx = { + label: np.arange(len(labels))[labels == label].tolist() + for label in set(labels) + } + + if isinstance(mode, str): + assert mode in ["downsampling", "upsampling"] + + if isinstance(mode, int) or mode == "upsampling": + samples_per_class = ( + mode if isinstance(mode, int) else max(samples_per_class.values()) + ) + else: + samples_per_class = min(samples_per_class.values()) + + self.labels = labels + self.samples_per_class = samples_per_class + self.length = self.samples_per_class * len(set(labels)) + + def __iter__(self) -> Iterator[int]: + """ + Returns: + iterator of indices of stratified sample + """ + indices = [] + for key in sorted(self.lbl2idx): + replace_flag = self.samples_per_class > len(self.lbl2idx[key]) + indices += np.random.choice( + self.lbl2idx[key], self.samples_per_class, replace=replace_flag + ).tolist() + assert len(indices) == self.length + np.random.shuffle(indices) + + return iter(indices) + + def __len__(self) -> int: + """ + Returns: + length of result sample + """ + return self.length + + +class BatchBalanceClassSampler(Sampler): + """ + This kind of sampler can be used for both metric learning and classification task. + + BatchSampler with the given strategy for the C unique classes dataset: + - Selection `num_classes` of C classes for each batch + - Selection `num_samples` instances for each class in the batch + The epoch ends after `num_batches`. + So, the batch sise is `num_classes` * `num_samples`. + + One of the purposes of this sampler is to be used for + forming triplets and pos/neg pairs inside the batch. + To guarante existance of these pairs in the batch, + `num_classes` and `num_samples` should be > 1. (1) + + This type of sampling can be found in the classical paper of Person Re-Id, + where P (`num_classes`) equals 32 and K (`num_samples`) equals 4: + `In Defense of the Triplet Loss for Person Re-Identification`_. + + Args: + labels: list of classes labeles for each elem in the dataset + num_classes: number of classes in a batch, should be > 1 + num_samples: number of instances of each class in a batch, should be > 1 + num_batches: number of batches in epoch + (default = len(labels) // (num_classes * num_samples)) + + .. _In Defense of the Triplet Loss for Person Re-Identification: + https://arxiv.org/abs/1703.07737 + + Python API examples: + + .. code-block:: python + + import os + from torch import nn, optim + from torch.utils.data import DataLoader + from catalyst import dl + from catalyst.data import ToTensor, BatchBalanceClassSampler + from catalyst.contrib.datasets import MNIST + + train_data = MNIST(os.getcwd(), train=True, download=True) + train_labels = train_data.targets.cpu().numpy().tolist() + train_sampler = BatchBalanceClassSampler( + train_labels, num_classes=10, num_samples=4) + valid_data = MNIST(os.getcwd(), train=False) + + loaders = { + "train": DataLoader(train_data, batch_sampler=train_sampler), + "valid": DataLoader(valid_data, batch_size=32), + } + + model = nn.Sequential(nn.Flatten(), nn.Linear(28 * 28, 10)) + criterion = nn.CrossEntropyLoss() + optimizer = optim.Adam(model.parameters(), lr=0.02) + + runner = dl.SupervisedRunner() + # model training + runner.train( + model=model, + criterion=criterion, + optimizer=optimizer, + loaders=loaders, + num_epochs=1, + logdir="./logs", + valid_loader="valid", + valid_metric="loss", + minimize_valid_metric=True, + verbose=True, + ) + """ + + def __init__( + self, + labels: Union[List[int], np.ndarray], + num_classes: int, + num_samples: int, + num_batches: int = None, + ): + """Sampler initialisation.""" + super().__init__(labels) + classes = set(labels) + + assert isinstance(num_classes, int) and isinstance(num_samples, int) + assert (1 < num_classes <= len(classes)) and (1 < num_samples) + assert all( + n > 1 for n in Counter(labels).values() + ), "Each class shoud contain at least 2 instances to fit (1)" + + labels = np.array(labels) + self._labels = list(set(labels.tolist())) + self._num_classes = num_classes + self._num_samples = num_samples + self._batch_size = self._num_classes * self._num_samples + self._num_batches = num_batches or len(labels) // self._batch_size + self.lbl2idx = { + label: np.arange(len(labels))[labels == label].tolist() + for label in set(labels) + } + + @property + def batch_size(self) -> int: + """ + Returns: + this value should be used in DataLoader as batch size + """ + return self._batch_size + + @property + def batches_in_epoch(self) -> int: + """ + Returns: + number of batches in an epoch + """ + return self._num_batches + + def __len__(self) -> int: + """ + Returns: + number of samples in an epoch + """ + return self._num_batches # * self._batch_size + + def __iter__(self) -> Iterator[int]: + """ + Returns: + indeces for sampling dataset elems during an epoch + """ + indices = [] + for _ in range(self._num_batches): + batch_indices = [] + classes_for_batch = random.sample(self._labels, self._num_classes) + while self._num_classes != len(set(classes_for_batch)): + classes_for_batch = random.sample(self._labels, self._num_classes) + for cls_id in classes_for_batch: + replace_flag = self._num_samples > len(self.lbl2idx[cls_id]) + batch_indices += np.random.choice( + self.lbl2idx[cls_id], self._num_samples, replace=replace_flag + ).tolist() + indices.append(batch_indices) + return iter(indices) + + +class DynamicBalanceClassSampler(Sampler): + """ + This kind of sampler can be used for classification tasks with significant + class imbalance. + + The idea of this sampler that we start with the original class distribution + and gradually move to uniform class distribution like with downsampling. + + Let's define :math: D_i = #C_i/ #C_min where :math: #C_i is a size of class + i and :math: #C_min is a size of the rarest class, so :math: D_i define + class distribution. Also define :math: g(n_epoch) is a exponential + scheduler. On each epoch current :math: D_i calculated as + :math: current D_i = D_i ^ g(n_epoch), + after this data samples according this distribution. + + Notes: + In the end of the training, epochs will contain only + min_size_class * n_classes examples. So, possible it will not + necessary to do validation on each epoch. For this reason use + ControlFlowCallback. + + Examples: + + >>> import torch + >>> import numpy as np + + >>> from catalyst.data import DynamicBalanceClassSampler + >>> from torch.utils import data + + >>> features = torch.Tensor(np.random.random((200, 100))) + >>> labels = np.random.randint(0, 4, size=(200,)) + >>> sampler = DynamicBalanceClassSampler(labels) + >>> labels = torch.LongTensor(labels) + >>> dataset = data.TensorDataset(features, labels) + >>> loader = data.dataloader.DataLoader(dataset, batch_size=8) + + >>> for batch in loader: + >>> b_features, b_labels = batch + + Sampler was inspired by https://arxiv.org/abs/1901.06783 + """ + + def __init__( + self, + labels: List[Union[int, str]], + exp_lambda: float = 0.9, + start_epoch: int = 0, + max_d: Optional[int] = None, + mode: Union[str, int] = "downsampling", + ignore_warning: bool = False, + ): + """ + Args: + labels: list of labels for each elem in the dataset + exp_lambda: exponent figure for schedule + start_epoch: start epoch number, can be useful for multi-stage + experiments + max_d: if not None, limit on the difference between the most + frequent and the rarest classes, heuristic + mode: number of samples per class in the end of training. Must be + "downsampling" or number. Before change it, make sure that you + understand how does it work + ignore_warning: ignore warning about min class size + """ + assert isinstance(start_epoch, int) + assert 0 < exp_lambda < 1, "exp_lambda must be in (0, 1)" + super().__init__(labels) + self.exp_lambda = exp_lambda + if max_d is None: + max_d = np.inf + self.max_d = max_d + self.epoch = start_epoch + labels = np.array(labels) + samples_per_class = Counter(labels) + self.min_class_size = min(samples_per_class.values()) + + if self.min_class_size < 100 and not ignore_warning: + LOGGER.warning( + f"the smallest class contains only" + f" {self.min_class_size} examples. At the end of" + f" training, epochs will contain only" + f" {self.min_class_size * len(samples_per_class)}" + f" examples" + ) + + self.original_d = { + key: value / self.min_class_size for key, value in samples_per_class.items() + } + self.label2idxes = { + label: np.arange(len(labels))[labels == label].tolist() + for label in set(labels) + } + + if isinstance(mode, int): + self.min_class_size = mode + else: + assert mode == "downsampling" + + self.labels = labels + self._update() + + def _update(self) -> None: + """Update d coefficients.""" + current_d = { + key: min(value ** self._exp_scheduler(), self.max_d) + for key, value in self.original_d.items() + } + samples_per_classes = { + key: int(value * self.min_class_size) for key, value in current_d.items() + } + self.samples_per_classes = samples_per_classes + self.length = np.sum(list(samples_per_classes.values())) + self.epoch += 1 + + def _exp_scheduler(self) -> float: + return self.exp_lambda**self.epoch + + def __iter__(self) -> Iterator[int]: + """ + Returns: + iterator of indices of stratified sample + """ + indices = [] + for key in sorted(self.label2idxes): + samples_per_class = self.samples_per_classes[key] + replace_flag = samples_per_class > len(self.label2idxes[key]) + indices += np.random.choice( + self.label2idxes[key], samples_per_class, replace=replace_flag + ).tolist() + assert len(indices) == self.length + np.random.shuffle(indices) + self._update() + return iter(indices) + + def __len__(self) -> int: + """ + Returns: + length of result sample + """ + return self.length + + +class MiniEpochSampler(Sampler): + """ + Sampler iterates mini epochs from the dataset used by ``mini_epoch_len``. + + Args: + data_len: Size of the dataset + mini_epoch_len: Num samples from the dataset used in one + mini epoch. + drop_last: If ``True``, sampler will drop the last batches + if its size would be less than ``batches_per_epoch`` + shuffle: one of ``"always"``, ``"real_epoch"``, or `None``. + The sampler will shuffle indices + > "per_mini_epoch" - every mini epoch (every ``__iter__`` call) + > "per_epoch" -- every real epoch + > None -- don't shuffle + + Example: + >>> MiniEpochSampler(len(dataset), mini_epoch_len=100) + >>> MiniEpochSampler(len(dataset), mini_epoch_len=100, drop_last=True) + >>> MiniEpochSampler(len(dataset), mini_epoch_len=100, + >>> shuffle="per_epoch") + """ + + def __init__( + self, + data_len: int, + mini_epoch_len: int, + drop_last: bool = False, + shuffle: str = None, + ): + """Sampler initialisation.""" + super().__init__(None) + + self.data_len = int(data_len) + self.mini_epoch_len = int(mini_epoch_len) + + self.steps = int(data_len / self.mini_epoch_len) + self.state_i = 0 + + has_reminder = data_len - self.steps * mini_epoch_len > 0 + if self.steps == 0: + self.divider = 1 + elif has_reminder and not drop_last: + self.divider = self.steps + 1 + else: + self.divider = self.steps + + self._indices = np.arange(self.data_len) + self.indices = self._indices + self.end_pointer = max(self.data_len, self.mini_epoch_len) + + if not (shuffle is None or shuffle in ["per_mini_epoch", "per_epoch"]): + raise ValueError( + "Shuffle must be one of ['per_mini_epoch', 'per_epoch']. " + + f"Got {shuffle}" + ) + self.shuffle_type = shuffle + + def shuffle(self) -> None: + """Shuffle sampler indices.""" + if self.shuffle_type == "per_mini_epoch" or ( + self.shuffle_type == "per_epoch" and self.state_i == 0 + ): + if self.data_len >= self.mini_epoch_len: + self.indices = self._indices + np.random.shuffle(self.indices) + else: + self.indices = np.random.choice( + self._indices, self.mini_epoch_len, replace=True + ) + + def __iter__(self) -> Iterator[int]: + """Iterate over sampler. + + Returns: + python iterator + """ + self.state_i = self.state_i % self.divider + self.shuffle() + + start = self.state_i * self.mini_epoch_len + stop = ( + self.end_pointer + if (self.state_i == self.steps) + else (self.state_i + 1) * self.mini_epoch_len + ) + indices = self.indices[start:stop].tolist() + + self.state_i += 1 + return iter(indices) + + def __len__(self) -> int: + """ + Returns: + int: length of the mini-epoch + """ + return self.mini_epoch_len + + +class DistributedSamplerWrapper(DistributedSampler): + """ + Wrapper over `Sampler` for distributed training. + Allows you to use any sampler in distributed mode. + + It is especially useful in conjunction with + `torch.nn.parallel.DistributedDataParallel`. In such case, each + process can pass a DistributedSamplerWrapper instance as a DataLoader + sampler, and load a subset of subsampled data of the original dataset + that is exclusive to it. + + .. note:: + Sampler is assumed to be of constant size. + """ + + def __init__( + self, + sampler, + num_replicas: Optional[int] = None, + rank: Optional[int] = None, + shuffle: bool = True, + ): + """ + + Args: + sampler: Sampler used for subsampling + num_replicas (int, optional): Number of processes participating in + distributed training + rank (int, optional): Rank of the current process + within ``num_replicas`` + shuffle (bool, optional): If true (default), + sampler will shuffle the indices + """ + super(DistributedSamplerWrapper, self).__init__( + DatasetFromSampler(sampler), + num_replicas=num_replicas, + rank=rank, + shuffle=shuffle, + ) + self.sampler = sampler + + def __iter__(self) -> Iterator[int]: + """Iterate over sampler. + + Returns: + python iterator + """ + self.dataset = DatasetFromSampler(self.sampler) + indexes_of_indexes = super().__iter__() + subsampler_indexes = self.dataset + return iter(itemgetter(*indexes_of_indexes)(subsampler_indexes)) + + +__all__ = [ + "BalanceClassSampler", + "BatchBalanceClassSampler", + "DistributedSamplerWrapper", + "DynamicBalanceClassSampler", + "MiniEpochSampler", +] diff --git a/qa_mdt/audioldm_train/utilities/tools.py b/qa_mdt/audioldm_train/utilities/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..c52609de6f77f9550219d74263b9bc347ffa7f6e --- /dev/null +++ b/qa_mdt/audioldm_train/utilities/tools.py @@ -0,0 +1,566 @@ +# Author: Haohe Liu +# Email: haoheliu@gmail.com +# Date: 11 Feb 2023 + +import os +import json + +import torch +import torch.nn.functional as F +import numpy as np +import matplotlib +from scipy.io import wavfile +from matplotlib import pyplot as plt + +matplotlib.use("Agg") + +import hashlib +import os + +import requests +from tqdm import tqdm + +URL_MAP = { + "vggishish_lpaps": "https://a3s.fi/swift/v1/AUTH_a235c0f452d648828f745589cde1219a/specvqgan_public/vggishish16.pt", + "vggishish_mean_std_melspec_10s_22050hz": "https://a3s.fi/swift/v1/AUTH_a235c0f452d648828f745589cde1219a/specvqgan_public/train_means_stds_melspec_10s_22050hz.txt", + "melception": "https://a3s.fi/swift/v1/AUTH_a235c0f452d648828f745589cde1219a/specvqgan_public/melception-21-05-10T09-28-40.pt", +} + +CKPT_MAP = { + "vggishish_lpaps": "vggishish16.pt", + "vggishish_mean_std_melspec_10s_22050hz": "train_means_stds_melspec_10s_22050hz.txt", + "melception": "melception-21-05-10T09-28-40.pt", +} + +MD5_MAP = { + "vggishish_lpaps": "197040c524a07ccacf7715d7080a80bd", + "vggishish_mean_std_melspec_10s_22050hz": "f449c6fd0e248936c16f6d22492bb625", + "melception": "a71a41041e945b457c7d3d814bbcf72d", +} + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + + +def read_list(fname): + result = [] + with open(fname, "r") as f: + for each in f.readlines(): + each = each.strip("\n") + result.append(each) + return result + + +def build_dataset_json_from_list(list_path): + data = [] + for each in read_list(list_path): + if "|" in each: + wav, caption = each.split("|") + else: + caption = each + wav = "" + data.append( + { + "wav": wav, + "caption": caption, + } + ) + return {"data": data} + + +def load_json(fname): + with open(fname, "r") as f: + data = json.load(f) + return data + + +def read_json(dataset_json_file): + with open(dataset_json_file, "r") as fp: + data_json = json.load(fp) + return data_json["data"] + + +def copy_test_subset_data(metadata, testset_copy_target_path): + # metadata = read_json(testset_metadata) + os.makedirs(testset_copy_target_path, exist_ok=True) + if len(os.listdir(testset_copy_target_path)) == len(metadata): + return + else: + # delete files in folder testset_copy_target_path + for file in os.listdir(testset_copy_target_path): + try: + os.remove(os.path.join(testset_copy_target_path, file)) + except Exception as e: + print(e) + + print("Copying test subset data to {}".format(testset_copy_target_path)) + for each in tqdm(metadata): + cmd = "cp {} {}".format(each["wav"], os.path.join(testset_copy_target_path)) + os.system(cmd) + + +def listdir_nohidden(path): + for f in os.listdir(path): + if not f.startswith("."): + yield f + + +def get_restore_step(path): + checkpoints = os.listdir(path) + if os.path.exists(os.path.join(path, "final.ckpt")): + return "final.ckpt", 0 + elif not os.path.exists(os.path.join(path, "last.ckpt")): + steps = [int(x.split(".ckpt")[0].split("step=")[1]) for x in checkpoints] + return checkpoints[np.argmax(steps)], np.max(steps) + else: + steps = [] + for x in checkpoints: + if "last" in x: + if "-v" not in x: + fname = "last.ckpt" + else: + this_version = int(x.split(".ckpt")[0].split("-v")[1]) + steps.append(this_version) + if len(steps) == 0 or this_version > np.max(steps): + fname = "last-v%s.ckpt" % this_version + return fname, 0 + + +def download(url, local_path, chunk_size=1024): + os.makedirs(os.path.split(local_path)[0], exist_ok=True) + with requests.get(url, stream=True) as r: + total_size = int(r.headers.get("content-length", 0)) + with tqdm(total=total_size, unit="B", unit_scale=True) as pbar: + with open(local_path, "wb") as f: + for data in r.iter_content(chunk_size=chunk_size): + if data: + f.write(data) + pbar.update(chunk_size) + + +def md5_hash(path): + with open(path, "rb") as f: + content = f.read() + return hashlib.md5(content).hexdigest() + + +def get_ckpt_path(name, root, check=False): + assert name in URL_MAP + path = os.path.join(root, CKPT_MAP[name]) + if not os.path.exists(path) or (check and not md5_hash(path) == MD5_MAP[name]): + print("Downloading {} model from {} to {}".format(name, URL_MAP[name], path)) + download(URL_MAP[name], path) + md5 = md5_hash(path) + assert md5 == MD5_MAP[name], md5 + return path + + +class KeyNotFoundError(Exception): + def __init__(self, cause, keys=None, visited=None): + self.cause = cause + self.keys = keys + self.visited = visited + messages = list() + if keys is not None: + messages.append("Key not found: {}".format(keys)) + if visited is not None: + messages.append("Visited: {}".format(visited)) + messages.append("Cause:\n{}".format(cause)) + message = "\n".join(messages) + super().__init__(message) + + +def retrieve( + list_or_dict, key, splitval="/", default=None, expand=True, pass_success=False +): + """Given a nested list or dict return the desired value at key expanding + callable nodes if necessary and :attr:`expand` is ``True``. The expansion + is done in-place. + + Parameters + ---------- + list_or_dict : list or dict + Possibly nested list or dictionary. + key : str + key/to/value, path like string describing all keys necessary to + consider to get to the desired value. List indices can also be + passed here. + splitval : str + String that defines the delimiter between keys of the + different depth levels in `key`. + default : obj + Value returned if :attr:`key` is not found. + expand : bool + Whether to expand callable nodes on the path or not. + + Returns + ------- + The desired value or if :attr:`default` is not ``None`` and the + :attr:`key` is not found returns ``default``. + + Raises + ------ + Exception if ``key`` not in ``list_or_dict`` and :attr:`default` is + ``None``. + """ + + keys = key.split(splitval) + + success = True + try: + visited = [] + parent = None + last_key = None + for key in keys: + if callable(list_or_dict): + if not expand: + raise KeyNotFoundError( + ValueError( + "Trying to get past callable node with expand=False." + ), + keys=keys, + visited=visited, + ) + list_or_dict = list_or_dict() + parent[last_key] = list_or_dict + + last_key = key + parent = list_or_dict + + try: + if isinstance(list_or_dict, dict): + list_or_dict = list_or_dict[key] + else: + list_or_dict = list_or_dict[int(key)] + except (KeyError, IndexError, ValueError) as e: + raise KeyNotFoundError(e, keys=keys, visited=visited) + + visited += [key] + # final expansion of retrieved value + if expand and callable(list_or_dict): + list_or_dict = list_or_dict() + parent[last_key] = list_or_dict + except KeyNotFoundError as e: + if default is None: + raise e + else: + list_or_dict = default + success = False + + if not pass_success: + return list_or_dict + else: + return list_or_dict, success + + +def to_device(data, device): + if len(data) == 12: + ( + ids, + raw_texts, + speakers, + texts, + src_lens, + max_src_len, + mels, + mel_lens, + max_mel_len, + pitches, + energies, + durations, + ) = data + + speakers = torch.from_numpy(speakers).long().to(device) + texts = torch.from_numpy(texts).long().to(device) + src_lens = torch.from_numpy(src_lens).to(device) + mels = torch.from_numpy(mels).float().to(device) + mel_lens = torch.from_numpy(mel_lens).to(device) + pitches = torch.from_numpy(pitches).float().to(device) + energies = torch.from_numpy(energies).to(device) + durations = torch.from_numpy(durations).long().to(device) + + return ( + ids, + raw_texts, + speakers, + texts, + src_lens, + max_src_len, + mels, + mel_lens, + max_mel_len, + pitches, + energies, + durations, + ) + + if len(data) == 6: + (ids, raw_texts, speakers, texts, src_lens, max_src_len) = data + + speakers = torch.from_numpy(speakers).long().to(device) + texts = torch.from_numpy(texts).long().to(device) + src_lens = torch.from_numpy(src_lens).to(device) + + return (ids, raw_texts, speakers, texts, src_lens, max_src_len) + + +def log(logger, step=None, fig=None, audio=None, sampling_rate=22050, tag=""): + # if losses is not None: + # logger.add_scalar("Loss/total_loss", losses[0], step) + # logger.add_scalar("Loss/mel_loss", losses[1], step) + # logger.add_scalar("Loss/mel_postnet_loss", losses[2], step) + # logger.add_scalar("Loss/pitch_loss", losses[3], step) + # logger.add_scalar("Loss/energy_loss", losses[4], step) + # logger.add_scalar("Loss/duration_loss", losses[5], step) + # if(len(losses) > 6): + # logger.add_scalar("Loss/disc_loss", losses[6], step) + # logger.add_scalar("Loss/fmap_loss", losses[7], step) + # logger.add_scalar("Loss/r_loss", losses[8], step) + # logger.add_scalar("Loss/g_loss", losses[9], step) + # logger.add_scalar("Loss/gen_loss", losses[10], step) + # logger.add_scalar("Loss/diff_loss", losses[11], step) + + if fig is not None: + logger.add_figure(tag, fig) + + if audio is not None: + audio = audio / (max(abs(audio)) * 1.1) + logger.add_audio( + tag, + audio, + sample_rate=sampling_rate, + ) + + +def get_mask_from_lengths(lengths, max_len=None): + batch_size = lengths.shape[0] + if max_len is None: + max_len = torch.max(lengths).item() + + ids = torch.arange(0, max_len).unsqueeze(0).expand(batch_size, -1).to(device) + mask = ids >= lengths.unsqueeze(1).expand(-1, max_len) + + return mask + + +def expand(values, durations): + out = list() + for value, d in zip(values, durations): + out += [value] * max(0, int(d)) + return np.array(out) + + +def synth_one_sample_val( + targets, predictions, vocoder, model_config, preprocess_config +): + index = np.random.choice(list(np.arange(targets[6].size(0)))) + + basename = targets[0][index] + src_len = predictions[8][index].item() + mel_len = predictions[9][index].item() + mel_target = targets[6][index, :mel_len].detach().transpose(0, 1) + + mel_prediction = predictions[0][index, :mel_len].detach().transpose(0, 1) + postnet_mel_prediction = predictions[1][index, :mel_len].detach().transpose(0, 1) + duration = targets[11][index, :src_len].detach().cpu().numpy() + + if preprocess_config["preprocessing"]["pitch"]["feature"] == "phoneme_level": + pitch = predictions[2][index, :src_len].detach().cpu().numpy() + pitch = expand(pitch, duration) + else: + pitch = predictions[2][index, :mel_len].detach().cpu().numpy() + + if preprocess_config["preprocessing"]["energy"]["feature"] == "phoneme_level": + energy = predictions[3][index, :src_len].detach().cpu().numpy() + energy = expand(energy, duration) + else: + energy = predictions[3][index, :mel_len].detach().cpu().numpy() + + with open( + os.path.join(preprocess_config["path"]["preprocessed_path"], "stats.json") + ) as f: + stats = json.load(f) + stats = stats["pitch"] + stats["energy"][:2] + + # from datetime import datetime + # now = datetime.now() + # current_time = now.strftime("%D:%H:%M:%S") + # np.save(("mel_pred_%s.npy" % current_time).replace("/","-"), mel_prediction.cpu().numpy()) + # np.save(("postnet_mel_prediction_%s.npy" % current_time).replace("/","-"), postnet_mel_prediction.cpu().numpy()) + # np.save(("mel_target_%s.npy" % current_time).replace("/","-"), mel_target.cpu().numpy()) + + fig = plot_mel( + [ + (mel_prediction.cpu().numpy(), pitch, energy), + (postnet_mel_prediction.cpu().numpy(), pitch, energy), + (mel_target.cpu().numpy(), pitch, energy), + ], + stats, + [ + "Raw mel spectrogram prediction", + "Postnet mel prediction", + "Ground-Truth Spectrogram", + ], + ) + + if vocoder is not None: + from .model_util import vocoder_infer + + wav_reconstruction = vocoder_infer( + mel_target.unsqueeze(0), + vocoder, + model_config, + preprocess_config, + )[0] + wav_prediction = vocoder_infer( + postnet_mel_prediction.unsqueeze(0), + vocoder, + model_config, + preprocess_config, + )[0] + else: + wav_reconstruction = wav_prediction = None + + return fig, wav_reconstruction, wav_prediction, basename + + +def synth_one_sample(mel_input, mel_prediction, labels, vocoder): + if vocoder is not None: + from .model_util import vocoder_infer + + wav_reconstruction = vocoder_infer( + mel_input.permute(0, 2, 1), + vocoder, + ) + wav_prediction = vocoder_infer( + mel_prediction.permute(0, 2, 1), + vocoder, + ) + else: + wav_reconstruction = wav_prediction = None + + return wav_reconstruction, wav_prediction + + +def synth_samples(targets, predictions, vocoder, model_config, preprocess_config, path): + # (diff_output, diff_loss, latent_loss) = diffusion + + basenames = targets[0] + + for i in range(len(predictions[1])): + basename = basenames[i] + src_len = predictions[8][i].item() + mel_len = predictions[9][i].item() + mel_prediction = predictions[1][i, :mel_len].detach().transpose(0, 1) + # diff_output = diff_output[i, :mel_len].detach().transpose(0, 1) + # duration = predictions[5][i, :src_len].detach().cpu().numpy() + if preprocess_config["preprocessing"]["pitch"]["feature"] == "phoneme_level": + pitch = predictions[2][i, :src_len].detach().cpu().numpy() + # pitch = expand(pitch, duration) + else: + pitch = predictions[2][i, :mel_len].detach().cpu().numpy() + if preprocess_config["preprocessing"]["energy"]["feature"] == "phoneme_level": + energy = predictions[3][i, :src_len].detach().cpu().numpy() + # energy = expand(energy, duration) + else: + energy = predictions[3][i, :mel_len].detach().cpu().numpy() + # import ipdb; ipdb.set_trace() + with open( + os.path.join(preprocess_config["path"]["preprocessed_path"], "stats.json") + ) as f: + stats = json.load(f) + stats = stats["pitch"] + stats["energy"][:2] + + fig = plot_mel( + [ + (mel_prediction.cpu().numpy(), pitch, energy), + ], + stats, + ["Synthetized Spectrogram by PostNet"], + ) + # np.save("{}_postnet.npy".format(basename), mel_prediction.cpu().numpy()) + plt.savefig(os.path.join(path, "{}_postnet_2.png".format(basename))) + plt.close() + + from .model_util import vocoder_infer + + mel_predictions = predictions[1].transpose(1, 2) + lengths = predictions[9] * preprocess_config["preprocessing"]["stft"]["hop_length"] + wav_predictions = vocoder_infer( + mel_predictions, vocoder, model_config, preprocess_config, lengths=lengths + ) + + sampling_rate = preprocess_config["preprocessing"]["audio"]["sampling_rate"] + for wav, basename in zip(wav_predictions, basenames): + wavfile.write(os.path.join(path, "{}.wav".format(basename)), sampling_rate, wav) + + +def plot_mel(data, titles=None): + fig, axes = plt.subplots(len(data), 1, squeeze=False) + if titles is None: + titles = [None for i in range(len(data))] + + for i in range(len(data)): + mel = data[i] + axes[i][0].imshow(mel, origin="lower", aspect="auto") + axes[i][0].set_aspect(2.5, adjustable="box") + axes[i][0].set_ylim(0, mel.shape[0]) + axes[i][0].set_title(titles[i], fontsize="medium") + axes[i][0].tick_params(labelsize="x-small", left=False, labelleft=False) + axes[i][0].set_anchor("W") + + return fig + + +def pad_1D(inputs, PAD=0): + def pad_data(x, length, PAD): + x_padded = np.pad( + x, (0, length - x.shape[0]), mode="constant", constant_values=PAD + ) + return x_padded + + max_len = max((len(x) for x in inputs)) + padded = np.stack([pad_data(x, max_len, PAD) for x in inputs]) + + return padded + + +def pad_2D(inputs, maxlen=None): + def pad(x, max_len): + PAD = 0 + if np.shape(x)[0] > max_len: + raise ValueError("not max_len") + + s = np.shape(x)[1] + x_padded = np.pad( + x, (0, max_len - np.shape(x)[0]), mode="constant", constant_values=PAD + ) + return x_padded[:, :s] + + if maxlen: + output = np.stack([pad(x, maxlen) for x in inputs]) + else: + max_len = max(np.shape(x)[0] for x in inputs) + output = np.stack([pad(x, max_len) for x in inputs]) + + return output + + +def pad(input_ele, mel_max_length=None): + if mel_max_length: + max_len = mel_max_length + else: + max_len = max([input_ele[i].size(0) for i in range(len(input_ele))]) + + out_list = list() + for i, batch in enumerate(input_ele): + if len(batch.shape) == 1: + one_batch_padded = F.pad( + batch, (0, max_len - batch.size(0)), "constant", 0.0 + ) + elif len(batch.shape) == 2: + one_batch_padded = F.pad( + batch, (0, 0, 0, max_len - batch.size(0)), "constant", 0.0 + ) + out_list.append(one_batch_padded) + out_padded = torch.stack(out_list) + return out_padded diff --git a/qa_mdt/checkpoint_389999.ckpt b/qa_mdt/checkpoint_389999.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..9a82fd5711a035b36f5d9cfdad0ec0bcf872f42e --- /dev/null +++ b/qa_mdt/checkpoint_389999.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d38d080fd906a0ed8dfd18401fa4c6608ae63b3556defcf50aa318dfd7284553 +size 13504339928 diff --git a/qa_mdt/checkpoints/clap_music/music_speech_audioset_epoch_15_esc_89.98.pt b/qa_mdt/checkpoints/clap_music/music_speech_audioset_epoch_15_esc_89.98.pt new file mode 100644 index 0000000000000000000000000000000000000000..026b327c66328dcdec4ff32c5d58fe26f8551e19 --- /dev/null +++ b/qa_mdt/checkpoints/clap_music/music_speech_audioset_epoch_15_esc_89.98.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51c68f12f9d7ea25fdaaccf741ec7f81e93ee594455410f3bca4f47f88d8e006 +size 2352471003 diff --git a/qa_mdt/checkpoints/flant5/config.json b/qa_mdt/checkpoints/flant5/config.json new file mode 100644 index 0000000000000000000000000000000000000000..860552be4951ded0093cee6c5d6c6502bf1f5f73 --- /dev/null +++ b/qa_mdt/checkpoints/flant5/config.json @@ -0,0 +1,28 @@ +{ + "architectures": [ + "T5ForConditionalGeneration" + ], + "d_ff": 2816, + "d_kv": 64, + "d_model": 1024, + "decoder_start_token_id": 0, + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "gated-gelu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 24, + "num_heads": 16, + "num_layers": 24, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "tie_word_embeddings": false, + "transformers_version": "4.23.1", + "use_cache": true, + "vocab_size": 32128 +} \ No newline at end of file diff --git a/qa_mdt/checkpoints/flant5/pytorch_model.bin b/qa_mdt/checkpoints/flant5/pytorch_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..052d15561dc74846d50d29942b7ad730b5763d40 --- /dev/null +++ b/qa_mdt/checkpoints/flant5/pytorch_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93249e5f2b058ccddd0f4f5ef82aad80e262133eb103f1b42dc2da4b9bc97b36 +size 3132781861 diff --git a/qa_mdt/checkpoints/flant5/tokenizer.json b/qa_mdt/checkpoints/flant5/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..fc669fbad1a6a82119ca3e1fa75db33ee22ca47d --- /dev/null +++ b/qa_mdt/checkpoints/flant5/tokenizer.json @@ -0,0 +1,129420 @@ +{ + "version": "1.0", + "truncation": null, + "padding": null, + "added_tokens": [ + { + "id": 0, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 1, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 2, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32000, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32001, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32002, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32003, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32004, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32005, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32006, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32007, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32008, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32009, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32010, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32011, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32012, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32013, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32014, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32015, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32016, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32017, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32018, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32019, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32020, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32021, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32022, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32023, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32024, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32025, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32026, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32027, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32028, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32029, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32030, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32031, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32032, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32033, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32034, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32035, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32036, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32037, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32038, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32039, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32040, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32041, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32042, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32043, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32044, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32045, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32046, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32047, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32048, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32049, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32050, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32051, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32052, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32053, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32054, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32055, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32056, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32057, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32058, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32059, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32060, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32061, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32062, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32063, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32064, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32065, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32066, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32067, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32068, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32069, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32070, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32071, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32072, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32073, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32074, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32075, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32076, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32077, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32078, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32079, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32080, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32081, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32082, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32083, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32084, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32085, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32086, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32087, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32088, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32089, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32090, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32091, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32092, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32093, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32094, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32095, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32096, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32097, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32098, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 32099, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + } + ], + "normalizer": { + "type": "Sequence", + "normalizers": [ + { + "type": "Precompiled", + "precompiled_charsmap": "ALQCAACEAAAAAACAAQAAgMz8AgC4BQAAhyIAgMzkAgC4PQAAeyIAgMzsAgC4BQAAiyIAgMw8AADNvAAAmwkAgJ4JAIChCQCAgx0AAIAZAACBGQAAPR0AgDUdAIBNHQCARR0AgIAxAACBMQAApAkAgIkxAAA9WAMAPEgDAEAKAIA+aAMAAYUAAIQBAQADjQAAAokAAAWVAAAEkQAAB50AAAaZAAAJqQAACKEAAAutAAAKpQAADbkAAAy9AAAPvQAADrkAABHFAAAQwQAAE80AABLJAAAV1QAAFNEAABfdAAAW2QAAGeUAABjhAAAb7QAAGukAAB31AAAc8QAAH/0AAB75AABhOAkAZR0AgGNADgBi8AgAZSgPAGSADgBn2A8AZvAPAGlwDABoMAwAa/AMAGrYDABtSA0AbBwNAG8QEgBubA0ARgoAgHAMEwBzqBMAcuwTAHUoEAB0TBAAd9ARAHYUEAB50BYAePQQAF0dAIB69BYAdR0AgG0dAIB/fQEAhgwAgEGAAgDeCwCAQxgAAELAAABFSAAARGAAAEeQBgBGhAEASSgGAEhsAQBLOAcASvAHAE1wBwBMRAcAT/AEAE7MBACnCQCAUCwFAFOgCgBSEAUAVQAKAFRQCgBX0AgAVhALAFlICABYuAgAhBEAAFo8CACA9QAAgZ0AANgLAIAtHQCAg2kCAIJFAgCBNQIAgDUCAIdtAwCGVQMAgTkAAIRlAgAXDACAigEEAInVAwCI7QMAjwkAAKgLAIApDACAjAkAAC8MAICJMQMAkQkAAMzYAABVHQCAfR0AgL0aAIBMCgCAgGUDAIENAwCGPQAAgx0DAMwQAgDNhAEAgikAAMx0AwCjgQYAxRoAgICxAgCBsQIAzRoAgIEpAAClwQAA1RoAgMzoAwDNYAIAUgoAgKjxAABYCgCAXgoAgGQKAIDdGgCAgWkAAMzcBACCEQEA5RoAgGoKAIDtGgCA/RoAgAUbAID1GgCAswkAgMygBADN3AQAzAgBALYJAIClHQCAhhEBAOEAKwDgfCcA44hIAuIMOAKdHQCAh5EBALUdAICtHQCAgNkBAIE1AADMxAIA6kRkApUdAIANGwCA72hkAoERBwCC8QEA8NCLAolVAACB5QEAFRsAgIfhAQCAbQAAgQ0AAIN5AAB2CgCAgXkAAICVAQDMOAEAzRQBAIzBAQB8CgCAvAkAgKMVAQDDlBcAwpwUAMWEFwDEUBcAx+wXAMaAEgCNHQCAiAoAgMvQFgDK4BYAzRQWADUMAIDPvCAAzpwZANHMJADQ2CUA0+gkALFRAQA7DACAp90HAL0dAIDWvCQA2cgnANjUIgDb+CcALRsAgIftBwCCCgCAzPgEAB0bAIAlHQCAh8kGALAJAICR3QcAuQkAgCUbAIBwCgCANRsAgIUdAICMDACAjPkGAAsMAICA1QYAgcEGAMzEAgDNBAUAglEAAIN1BwCArQYAgbkGAIY1BwCHKQcAhEEAAI4KAICn7QAAPRsAgIjpBwCJzQcAlAoAgI/BBwCM3QcAmgoAgOoLAICnXQYAsJ0AAKAKAICmCgCAo0EGAEUbAIBVGwCAfQwAgE0bAIBdGwCArXEGAGUbAIC/CQCAzPgDAM0sAwDCCQCAo+UAAMUJAICMTQAAsgoAgKfxAAC4CgCAsT0GAIedAACGlQAAqB0HAISJAAC+CgCAgqkAAIHVAACtAQcAygoAgJE9AACCmQEAyAkAgM0MBQDMCAUAgT0AAIeFAQCIvQEAdRsAgMUdAICuCwCAjJEBAEEMAIBHDACAzR0AgID1AQCBhQEAgoEBAIOdAQCEiQEAxAoAgIapAQCHXQAAiG0AAIlNAABtGwCAzBACAIxdAACCDQAA0AoAgI9JAACw6QAAfRsAgPALAICjKQEAgCUBAIFVAQCFGwCApzUBAMykAQDNEAIA1goAgI0bAICBNQAA3AoAgK4JAQDoCgCAzOgBAM0oAgCVGwCAo/EAAIQFAACdGwCA4goAgK0bAICotQAApRsAgIFdAAC1GwCAzPwBAM3AAQC9GwCAxRsAgIGFAwARDACAgeUDAO4KAICH6QMAywkAgIylAwDNGwCA+goAgKoJAIDVGwCAgZkDAIHdAwCMvQMAzSQBAMwgAQDMEAIAzTACAIH5AACHUQAAgFUAAIFZAAD0CgCAg0kAAIxBAADlGwCA3RsAgM4JAICBfQAAgHEAAMwgAwDNsAMAo30DANEJAICjEQMA7R0AgIEtAQCx/QAApzEDAK1BAwDlHQCAo20DAP0dAID1HQCA7RsAgKdtAwCANQAAgR0AALFtAwCILQAAmAwAgKeVAACBcQAAgFkAAINxAACj9QAAgVEAAK2BAAD1GwCAsQkDAIldAACEPQAAzDgBAISdAQCBGQAAgAkAAIRlAAD9GwCAzNAHAMzwBwAFHACAkYkAAMxMBgDNBAYAzHAGAM10BgDMQAcAmy0PAMyoBwDNrAcAhg0AAIdVDwCEQQ8ACQsAgIIBDACDVQ8AgDUBAIHZAQCkDACAj+kAAIztAACSDACA3R0AgIv1AACIbQ8AiQ0AAA8LAIC0CwCAgiUAAE0MAICBQQAAUwwAgBUeAIANHgCAJR4AgB0eAIAtHgCABR4AgIApAACBKQAA/AsAgA0cAICEeQAAFRwAgIFNAQCAoQEAGAsAgKP9DwDMOAIAzUgDAB0cAICBWQAAzXwCAMykDQAkCwCAWQwAgKjJDwCHOQAA1wkAgImhDwADCwCAkREAAJ4MAIDaCQCAmQsAgF8MAICAuQ8AgbkPANUdAICDjQ8A9gsAgCUcAICEBQAALRwAgB4LAIA1HACAKgsAgIGdDwCHIQAAh7UPAMyoAgDN6AIAzLQMAM3cDACmzQAAp8UAAE0cAICPgQ8AjIkPAKPlAAAwCwCAPRwAgDwLAICxyQAAhwUAAFUcAIBFHACAhz0AAF0cAIBxDACANgsAgKMFDwCB+QAAzKgDAGUcAIBICwCAjEkAAKPxAABtHACAdwwAgEILAICnlQAAfRwAgHUcAIDMrAMAzcgAAN0JAICHaQAA4AkAgIG9AACCeQAA4wkAgIe5AQBOCwCAkaUAAIEdAACdHACAVAsAgIgFAAClHACAm5EAAFoLAIDmCQCAjJEBANILAIDGCwCAwAsAgMwLAICDRQAAgrkBAIG5AQCApQEAPR4AgIZxAABgCwCAhEkAAIsVAACKPQAAiTkAAIhFAACP+QAAZgsAgLoLAICMBQAAp1EBAKZJAQBlDACAsHkAAKNZAQCMqQAAgKkAAIGpAACBlQAAgJUAAK1xAQBrDACAogsAgISNAABNHgCARR4AgKMhAABdHgCAVR4AgGUeAICBbQAAgG0AALEFAQCkOQAANR4AgIUcAIBsCwCAqAUAAJUcAICNHACArQkAAMywAQCBvQMAgL0DAIPNAwCtHACAtRwAgL0cAIDMvAEAzYQBAInpAwDMHAEAgdkCAIDFAgDNOAEAzDwBAMxoAgDNRAIAg00AAMUcAICH2QAAhy0AAIBFAACBEQAAggUAAHILAIDVHACAzRwAgN0cAIDMOAIAiBUAAIjhAACAbQAAgTkAAMyEAgDNUAEAo0UDAIQ5AQDlHACA7RwAgMzcAwDNSAIAbR4AgOkJAIB4CwCAhR4AgKoMAICBbQAA9RwAgH4LAICj0QAAfR4AgHUeAIDMiAQAgXUAAIB1AACBCwCAo7UAAMwABADNVAIA/RwAgIcLAICETQEAjQsAgAUdAIANHQCAzNAOAMwsAQDMAAUAzVwFAOwJAIDvCQCAzJgOAIHBAADMzA8AzDwOAMwIAQDNnA4AzNQPAM14DwDMPA4AzTgOAIHlAQCA5QEAg+UBAILlAQDUCQCAhOUBAIfhAQBBHQCAiaUBAIjZAQCByQcAOR0AgFEdAIBJHQCAzDQBAPUJAICA3QAAgekAAEMKAICD/QAAgM0AAIH5AACBEQcAaR0AgGEdAICJ0QAAzCgBAHkdAIBxHQCA4QsAgMw0AQDbCwCAgF0AAIFlAACjAQEAg2EAAIFxAACASQAAMR0AgBoMAICrCwCAiVUAACwMAIAyDACAWR0AgIEdAIDBGgCATwoAgIIdAACDeQcAgBkHAIEZBwCGIQAAhykAAISRBwDyCQCAimkAALHZBgCIaQAAifUHAEkKAICP3QcAjNkHAIkMAID4CQCAKR0AgPsJAICRoQcAgEEHAIFBBwCHBQAAyRoAgIKRBwDRGgCA2RoAgKOVBgCGhQcAp+0AAMyQAgDN4AUAsekAAKPBAABVCgCAWwoAgGEKAIBnCgCA/gkAgKVlBwDhGgCAzLgDAKhVBwDpGgCAbQoAgPEaAIABGwCACRsAgPkaAIABCgCAo60AAAQKAICMJQYABwoAgIxNAACpHQCAgm0AAIE9BgCCAQYAgWUAAKEdAICHZQAAuR0AgIcRBgCHrQEAsR0AgMxQAgDNxAIAgeEBAIDJAQCD4QEAkYkAAID9AQCB1QEAmR0AgIydAQCJNQAAcwoAgIB1AACBXQAAhi0AAIc1AACEfQAAERsAgIKFAQCDfQAAgJ0BAIGRAQAZGwCAj+kAAIzhAAB5CgCAfwoAgAoKAICIDQAAifkAAKc5AQCRHQCAiwoAgDgMAICjJQEAPgwAgLBZAACJHQCAggUAAMEdAICtFQEAjwwAgDEbAICGBQAAhQoAgCEbAIApGwCAp2kAAIANAQCBAQEAhzEAAKNJAACxGQEAzBACADkbAIAODACAkQoAgK1RAADM1AEAzfgBAKhBAABBGwCAzTgBAMw8AQCB7QMAlwoAgJ0KAICMDQAA7QsAgKMKAICBxQMAzGgCAKkKAICCxQMASRsAgITJAwCHKQAAhjEAAFkbAICCbQAAgAwAgFEbAICHYQAAYRsAgGkbAIAVHQCAzKgDAM2sAgCB+QAAiC0AAA0KAIAQCgCAEwoAgIw1AAC1CgCAuwoAgLHVAADBCgCAeRsAgMkdAICxCwCAzDABAEQMAIBKDACA0R0AgMwEAQDHCgCAcRsAgKelAADTCgCAo40AAMwUAgCAuQAAgbkAAKeFAAAIDACAgmUAAIEbAICMNQAA8wsAgMzsHADN/AMAiRsAgK6tAADZCgCAkRsAgMzABgDN0AYAsL0BAMyQBwDfCgCAgckBAMwYHQDNIAIAhBEAAOsKAIDNuAYAzKwGAKEbAIDlCgCAgSkAALEbAICpGwCAo+0BAMxAHQDNEAIAuRsAgMEbAICBCQAAyRsAgMxAHQDN0AIAqNkBABQMAIDMkAcAzBwBAMxgBgDNZAYA8QoAgBwKAIDRGwCAkSkBAP0KAICBzR8A2RsAgPcKAIDpGwCA4RsAgMzEBgDNwAYAgTEAAIDZAAAfCgCAIgoAgIK5AQCDRQEAgLkBAIG5AQCGXQEA8R0AgIRdAQDpHQCAzcAAAMzwAACIARwAiXkBAAEeAICPVQEAjGEBAPkdAICB3R4AgRUfAJkbAICBXR8AjIEfAIdBHwDMGAMAzWgDAIBNHwCBpR8AJQoAgIOpHwCMFR8AjNEeACgKAICHtR8AgJUfAIGZHwCBEQAAg70fAICFHwCBiR8A8RsAgIQ9AACbDACAiZkfAPkbAICIBQAABgsAgAEcAICADQAAgf0AAAkcAICj2R8Ao3keAKOFAAAMCwCArTUfAKdhHgCnqR8AoQwAgIQNAACnDACAozUfACsKAICtiR8AhHEAAKchHwCxPR4AsYUfAJUMAIDhHQCAEgsAgLcLAIDMtBwAzbAcAFAMAICxQR8AVgwAgJwLAIAZHgCAER4AgCkeAIAhHgCAgLkeAIG5HgCCIQEAgzUBAIRhAQAxHgCAhokBAIe9AQCIkQEAiekBANkdAICL/QEAjOUBAIINAAAJHgCAj90BAIO5AQCRrQEAgb0BAIC9AQCAoQEAgaEBAPkLAID/CwCAhD0AABEcAICJlQEAm4EBAIHNHgCAzR4AzPwCAM3wAgCB5QAAGRwAgIHtAACjpQAAzJABAM1cAgCHHQAAGwsAgKj5AAAhHACAJwsAgFwMAIBiDACAKRwAgIQFAAAxHACAo9UAACELAIA5HACAgVEAAMz0AQDN0AEALQsAgIc9AABRHACAMwsAgEEcAIA/CwCAhwUAAFkcAIBJHACAh/EDAIHZAwCBmQMAgZEAAGEcAIB0DACAjPkDAMwkAQCHuQMAgfkDADkLAIDMZAIAgskDAIyZAwBpHACAh9EDAI+RAwCB3QYAkfUDAMwABADN7AMAh2UAABkdAIBLCwCAcRwAgHoMAIBFCwCAzBgBAIg5AACBHACAeRwAgMxcAwCMJQAALgoAgMwsAQCx/QAAozkDADEKAIA0CgCAoRwAgKdZAwDMdAMAiAkAAKNRAwCpHACAXQsAgINtDQCnnQAApq0AAKOdAACxDQMAzCgBANULAICntQAAprUAAMkLAIDMMAEAgdUHAMMLAIDMKAEAzwsAgEEeAIBjCwCArYkAAGkLAICAzQEAgd0BAMxEAQDNnB4AhPUBAL0LAIDMWAEAzUwBAIDtAQCB/QEAg7UAAGgMAICM3QEAbgwAgMwIHgCM8QYAzDgBAM08AQBRHgCAiREAAIEFBgBJHgCAYR4AgFkeAIBpHgCAgz0AAIAhAACBOQAAgDkAAIEhAAA5HgCAiRwAgMwoAQCB2QYAbwsAgIH9BgDMJAEAmRwAgJEcAICxHACAgCEBAIE1AQCjBQAAuRwAgMEcAIDJHACAzIwFAM1AAgC3HAMAdQsAgIfNBwDZHACA0RwAgB0dAIDNiAAAzJAAAIzdBQCjhQAAFgoAgMzgAgDhHACAiNUHAIFNAACATQAAUQsAgOkcAIBXCwCAkTkHADcKAICIxQcApQsAgIrJBwDxHACAmz0AAIflBwBxHgCAgYUHAICFBwA6CgCAgvkHAILVBgCDRQAAgMkGAIHdBgCG4QYAewsAgIRRAACJHgCAipUGAIuZBgCIeQAAiZ0GAK0MAICPWQcAjG0HAPkcAIDMgAMAzSQCALARBwA9CgCAgR4AgCEdAIB5HgCAhAsAgICNAACBnQAAzOwDAM3oBAABHQCAigsAgKNJBwCQCwCACR0AgKO9BwARHQCAGwAAgOcHAIALAACApKUHAOsEAICKBQCAAwAAgKhhBwDZDQCAZQAAgMgDAIAbCQCArWkHAIAtAQCBPQEAgl0BAINRAQCEYQEAuAQAgKwEAICHYQEAiK0BAIm1AQCKvQEAjykVALwFAIAdDACAzHgCAM3YBQCB3QEAgXEAAOQLAICC/QEAhBkAACMMAICH7QEAIAwAgMw0BADNMAQA5wsAgJ9pFQAmDACAjMkBAM34BADM8AIAsUkBACEHAICB1QAAoxUBAKCZFQBzCACARgcAgIT1AADMKAQAzSwEAMMIAICveQEAqH0BADENAICqaQEAUgkAgLQlAQC1KQEAowkBAAIMAIDqBgCA7gYAgLIFAQCzPQEAvPUAAL39AAC+2QAAOAgAgLgBAQC5AQEAugEBADwHAIBDBwCAhgwAALOdAwCyiQMAswgAgIC9AwBpBwCAbAcAgBIJAIDkBgCA5wYAgDUIAICJhQMAzOQHAL+hAwAFDACA1wwAgIxlAADN5AwAzCQMAIlBAACIVQAAi0UAAIpFAACFtQMAhLUDAIeVAwCGgQMAAQ0AgAQNAIAHDQCAmCwAABMAAICmyAAAzYwGAMyoBgCFaQAAFwAAgDEAAIBpAACAzPADAAcAAIA1AACA0QwAgLGVAAAlDQCAs5UAALKVAAA1DQCAOA0AgEANAIA7DQCALg0AgHUAAICmBgCAJQAAgJgJAIAdIQCAv1UDAEMNAIAZIQCAFSEAgGEgAIC4bAAAlGUNAJIAAgCcrQEAnaUBAJqJAQCbiQEAmJkBAJmJAQDMIAYAzQQGAMxABgDNXAYAzDwHAM04BwDMvAcAhXUAAIABDwCBDQ8AaSAAgLqZAQCFBQAAcSAAgFkgAIC+hQEAgSkPAIAlDwBlIACAgiEPAIUpAAC0pQEAhREAAG0gAICziQ8AsoUPALHJAQCwAQwAt4EPALbtAQC17QEAtO0BAIFlAQCAZQEAg2EBALi1DwDMPAsAhHkBAIDhDwCB3Q8AdSAAgF0gAIDMyAQAzbgEAIWtAACFFQAAISEAgDkhAIDM6BkAzbQZAKRdAQBGDQCAok0CAKPxDwCgVQEAod0PAH8IAIBuCQCAOwkAgO0eAIBsCQCA9R4AgHcJAIDxHgCAsQgAgJMNAACtHgCA+R4AgITVDACF6Q4AlGkAAIfdDgC1HgCAmbQCAL0eAIDFHgCAsR4AgD0hAIC5HgCAn3QBAMEeAICRGA0AgI0OAIGBDgCGhQ4AlYwDAISJDgCXRAIAghEAAKm4AACA0QAAge0AAMkeAIBJDQCA5R4AgIVZDwCDiQAAoTQNAIFFDgCASQ4A6R4AgKU0AQCFYQ8AzPAUAB0fAIC5xAUAzMgDAM3cAwCA3QAAgcEAACUfAIC/kAUAhREAALHsBwCA9QAAgcEAAKEgAIC1jAYALR8AgLdABgCA3Q4AgekOAMwoAgDNtAIAgM0OAIH5DgCFKQAAg4UBAIB1AQCBsQEAgPEBAIHVAQCpIACANR8AgIUFAACxIACAgJkBAIG9AQCCfQAAk9UBAJThAQCFDQAAmSAAgCEfAICACQAAgRkAACkfAICTrQEAlC0AAKUgAICFDQAAMR8AgIUFAACtIACAOR8AgIUpAACCGQAAhTUAAIDxAACB4QAAtSAAgJ0gAIBBIQCAhQUAAGEhAICDdQEAgO0BAIEpAQDM8AEAzbABAEwNAIBdIQCAWSEAgKMNAIBdHwCAZR8AgIA9AACBDQAAbR8AgHUfAICALQAAgR0AAIIVAABhHwCAzSwBAGkfAIBxHwCAeR8AgIjFAwClIQCAzJACAM28AgCE7QMATw0AgIb5AwCdHwCAgIEDAIH9AwCAPQAAgTUAAIFJAACAQQAAzdwBAIJBAAClHwCAoR8AgKkfAIDNMAEAlJ0DAI0hAIDN8AEAzAwBAIG5AwCAxQMAg6EDAJOlAwCArQAAgdUAAICdAACBqQAAiSEAgFINAICBwQAAgMkAAIC1AACBgQAAhSEAgINpBADMcAMAzbQDAIEhAIDNPAEApg0AgJMBBADNjAIAzPQCAIANAACBNQAAlNkGANEfAIDVHwCA2R8AgMwIAQDNHAEAgREAAIApAACpIQCAghkAAICRAQCBkQEAzWgFAMyUAgDMEAkAzSgWAMxYDgDNeA4AzBQNAM3YCgDMKAwAzYwNAMzgFwDM4AoAzDgLAM30CACFEQAAVQ0AgIBRBwCBUQcA4SAAgM2QDgCFBQAA6SAAgMzYDgDN7AEA8SAAgM0ADgCFGQAAzfAPAM08DgDNVA4AzGgBAM1sAQDZIACAYQgAgJSZBwDMwDsAgGEBAIHZAACFKQAAzWQOAMx4AQDNfAEAga0HAICtBwCFZQAAgp0HAIBRAQCBUQEAlOEHAM3AAACEeQEAk8UHAIZhAQDlIACAiCEBAIUNAADtIACAzRgBAMzYAADNtAAAgN0HAIHNBwCZHwCAhQkAAM0fAID1IACA/R8AgN0gAIAFIACADSAAgBUgAIAJIACAASAAgK0hAIARIACAGSAAgMy4AgDNHAMAgGUAAIF1AACCfQAAHSAAgIUJAACFQQAAASEAgKkNAICAmQYAgSEHAIUZAACDfQAACSEAgIVZAAD9IACA+SAAgIDNAACB2QAAjR4AgIURAACE6QAAlR4AgIblAABBIACAgDUAAIENAACdHgCAhR0AAEkgAIClHgCAhQUAAFEgAICAVQAAgW0AAIJ9AACTRQAAlA0AAIUNAAA5IACAkR4AgIAJAACBEQAAmR4AgIUdAABFIACAoR4AgIUFAABNIACAgOkBAIHxAQCCBQAAqR4AgIUJAACFCQAAVSAAgD0gAICAbQEAgXkBAIIZAACDpQEADSEAgIV1AACFBQAAESEAgAUhAIAhIACAzMgCAM3cAgCsDQCAzR4AgIA5AACBOQAA1R4AgN0eAIDRHgCA2R4AgIAdAACBDQAA4R4AgCUgAICAxQAAgdUAAM3AAADMJAIAgNUAAIHFAACFOQAAg8kAACUhAICvDQCAgNUAAIEJAACFBQAALSEAgP0eAICBIACAgAkAAIERAAAFHwCAk5kAAJS5AAANHwCAhWUAAIU9AACJIACAk10AABUfAICFEQAAzXAFAMx0BQCUATwAkSAAgHkgAIDNKAEAhSAAgI0gAICFGQAAlSAAgH0gAIA1IQCAKSEAgCkgAICFJQAAhTkAAMz4AgDNxAMAzTwBALINAICBlQMAgI0DAM3EAQCCpQMAhVEAAIVJAADMKAEAzSwBAM04AQDMPAEAgGk+AIFpPgBJIQCARSEAgM04PADMVDwAgdE8AJOdPgDMSAEAzcgCAM00AQBNIQCAlLk+AFgNAICAoT4AgaE+AIKhPgCIjTwAVSEAgIWtAACALQAAgSEAAIXVPwCVHwCAgO0AAIHxAACGpQAARR8AgISpAADNJAEAzSgBAE0fAICI+T4AhfE/AFUfAIBJHwCAhcU/AM0wAQDNEAEAzfQGAIDdAQCB6QEAzbwGAM1wBgDM4AYAzVwBAMxoBgDNkAYAzWQGAM14BgDMrAcAzagHAMzoBwDNyAcAgk0/AIP9AgCANQIAgekCAFEfAIBZHwCAgAU9AIV9AQBRIQCALSAAgM0UAQApDgCAge0BAIDhAQDNPAEAgs0BAM0sAQCCdQEAgW0BAIBZAQCAZQEAgcUAAIUfAIDNJAEAzTgBAILxAACB+QAAgFkBAIApAACBcQAAzBgBAM18AQDNLAEAjR8AgIEdAACAHQAAiR8AgJEfAIBxIQCAzSQBAMzkPQDNXA8AzegAAMwMAQCA1QEAgckBAIKZAACD5T8ACR8AgBEfAIAZHwCAMSEAgCMOAIB1IQCAPR8AgDEgAIBBHwCALA4AgIBNPwCBQT8AfR8AgGkhAICBHwCAZSEAgIAlPwCBKT8Ak5E/AIN9AAAmDgCAlEEAAMzYAgDNrAIAbSEAgJNVAACACQAAgR0AALUNAIB9IQCAlEEAAK0fAICAnQAAgaEAAIAdAACBEQAAhKUAALUfAICGpQAAvR8AgIjxAACC0QAAgdkAAIDNAACAJQAAgSkAAIIFAADFHwCAsR8AgLkfAIDBHwCAk7EAAJQRAADJHwCAgB0AAIEVAACAJQAAgS0AAII9AAB5IQCAgO0AAIHRAACCFQAAg4EAAIHQPQA1IACAzCACAM3cAQCFeAIAkSEAgC8OAICZIQCAiRgDAN0fAICALQAAgTUAAIAJAACBbQAA5R8AgMEgAICRsQAAkKkAAJPdOwCSAQQAlaUAAJSVOwDtHwCAlqEAAIUJAACTQQAAySAAgPUfAICFBQAA0SAAgJT1AAC5IACAgLkAAIHdAACC5QAA4R8AgOkfAICF6QAAgAkAAIE1AACFBQAAxSAAgPEfAICFHQAAzSAAgPkfAICFBQAA1SAAgLHBBQCwxQMAvSAAgLLFAwC12QUAtM0DAJ0hAICFOQAAuf0DAKEhAICVIQCAuw0AgM0NAIAXDgCAAR8AgAUOAIDTDQCAzIgCAAsOAIDN4D4AzZABAMwkAQBwDQCAjg0AgEEOAIB9DgCAgLEAAM3UPgDN5D4Agw4AgMy8PgDNuD4AgNEDAIHtAwCC/QMAhmkAAD4OAICFnQMAzTwBADgOAIDM6AIAzTw/AIjlAADNGAEAiQ4AgIhBAAA7DgCAdw4AgM0sAQCVDgCAgNUAAJsOAICG4QAAhukAAEcOAIDNJAEAoQ4AgM0QAQCI0QAAiCkAAMz4AgBNDgCAzfgCAMwkAQCnDgCAhS0DAMygPgDNbD4AgNUDAIHNAwCCAQMAg/kDAMxkAwDNzAIARA4AgM0kAQDMDAIAzQgCAIERAADMnAMAzLA+AM20PgDMxD4AzcA+AMyAPgDNuD4ArQ4AgMyEAgDMmD8AzVA+AMwgPgDNoD4AzQw/AM0wPwDNeD8AzQQ/AIhZAAC/DgCAzfgBAMzEAQBKDgCAxQ4AgMsOAIDMFAIAzAgBAM3IAQCIBQAA0Q4AgNcOAIDMKAIAuQ4AgIgNAACG0QAAgB0BAITNAACI9QAAzDwCAIQ1AQDMRAIAhikBAIAOAICIZQEAhg4AgKdEBQBiDgCAi+0AAIjtAACBDQAAiCUAAIZlAADMcAIAzXQCAMwwAgDN2AUAXA4AgIwOAICAOQAAXw4AgMzgBQB6DgCAzCgBAM0UAQCGJQAAiFUAAAgOAICGhDAAxA0AgIDVBwCG/QcAmA4AgMwkAgCIPQAAng4AgGsOAICIPQAApA4AgMxIAgDNeAIAUA4AgKoOAICXwAUAlnAFAJUYBQCAaQAAk1gFAIE5AACIZQAAkPg8AIZZAACeqAUAhEUAAGgOAIDM1AIAmrQFAIBdAACYrAUAp+wEAIgRAADM2AIAzdwCAKO8BACwDgCAzGACAMIOAIBuDgCAyA4AgK0IBADODgCAq/QEAMwsAgCIBQAA1A4AgLfoAwC2HAQAtSgEAMwAAgCzKAQAi3kAAIh9AACwdAQAhkEAAL6kAwCEdQAAiB0AANoOAIC6TAMAzNwDALj8AwCDqAIAiA0AALwOAICIFQAAh5QCAMw4AgBlDgCAzAQCAIvcAgCPDQAAcQ4AgI8ZAADMIAIAdA4AgI3wAgCIdQAAmCADAJksAwCPDgCAlA0AgMxMAgCWcAMAzCQCAIg9AACSDgCAzCwCAIgFAACzDgCAzCQCAIgNAAC2DgCAh/UAAKjUAwCpxAMA3Q4AgNlgAgDSDwCA1Q8AgNsPAICUNQAAkzEAANloAgDYDwCA2UwCAJQFAADeDwCAlSEAAJQpAABQEACAdBYAgEMXAIDSFgCA2WACADcXAIC12AMAtPADAJQ1AADZWAIAWhcAgJQFAADZVAIAlA0AADEXAIDgdAEAisgAALwVAACIyAAA4IACAIcXAICBoAAApOwCAKTIAgCoXAAAvA0AAJkXAIDghAIAvAUAAJ0XAICk+AIA4PQCALDMAwCV0AAAXRcAgLPgAwCmyAIAp2ACAJLYAABkFwCAvsEAAGsXAICXwQAAchcAgHkXAICAFwCAzXg/AMy8PwC+gA0AixcAgLx4DAC9gA0AuvQMALtUDAC49AwAkhcAgLYXAIC3uAwAuhcAgLWMDACyoAMAs6AMAKEXAICxQAMArnACAK9kAwC4BQMArUgDAKgXAICvFwCAqEQDAKnYAwDaFwCAp9gDAKRoAgCliAMAtjUDALc9AwCSyAIAtT0DAJldAQCYTQEAm2UBAJppAQCdZQEAnGUBAJ+FAQCemQEAh5wCAL6tAACWpQAAl70AAMw0BQDNjDcAzLg4AM2sOACflQEAth0AAJ2ZAQCc9QEAs7EBAK54AgDhFwCAvhcAgJk9AADFFwCAmxkAAJoJAADMFwCA0xcAgOBIAgCeCQAArFwCAK30AgD6FwCA9hcAgP4XAIDoFwCAh2ADAO8XAICvVAIAvhEAAJcFAAACGACA4KwCAAYYAICG+AMAh+wDAOC0AgAOGACAr0gCAK6QAgDgPAIAvg0AAAoYAICXGQAA4NgCAIaEAwCWEQAAvwAMAJ1tAACcYQAAEhgAgLFMAgCzUAIAlQ0AABYYAICGnAMA4MgCALMEAgCCBQAAIhgAgLNQAgCVDQAAJhgAgBoYAIAeGACA4LQCAIaMAwCH3AMAvg0AAJVpAACWeQAAKhgAgLToAgC1UAIAlwUAADIYAIDg1AIAtPQCAL4ZAADgoAIALhgAgODUAgCZjAMAt9QCAIoFAAA2GACAOhgAgIoVAAC3NAIAjx0AAD4YAIBCGACAswUAAEYYAICzBQAAWxgAgJwJAACdCQAATRgAgFQYAICMBQAAYhgAgG0YAIB0GACAexgAgJ9JAACCGACAiRgAgGYYAICQGACAlxgAgNkYAIDPGACA6hgAgOAYAICeGACAg8kBAIH5AQCsGACAsxgAgLoYAIDBGACAyBgAgKUYAICAtAIApYgDAOEIAgCuHQAA8RgAgLwJAACN9QEA9RgAgOEAAgCSlQEA45QQAJNFAACXiQEAhRQAAId4AQCGAAQARjoAgEo6AIBOOgCAUjoAgFY6AICdeQAA74xoAJyhAQBaOgCAXjoAgKKZAABiOgCAZjoAgGo6AIBuOgCAp4kAAHI6AIB2OgCAqUkBAHo6AICsqQAAfjoAgII6AICGOgCAsyUBAIo6AICOOgCAkjoAgLchAQC2OQEAtTEBAJY6AICaOgCAufkAALkRAQC4GQEAnjoAgKI6AICmOgCAqjoAgICwAQCEiAIArjoAgIPIAQCEVAMAhFwEALI6AICEXAUAgN0DAIEtAACCMQAAvjwCALo6AIC+OgCAh4gDAIacBACzLQMAwjoAgMY6AIC+AAQAvhwFALbRAwC12QMAyjoAgLv5AwC68QMAmljTAYTgBwC/xQMAvtkDAL3dAwC83QMAvgAYAKUFAwCmDQMAzjoAgIQcGADSOgCA1joAgKPxAwCsAQMArQEDAK4FAwCvGQMArKQbAq3cGgKqLQMAqyUDAL5MGQC+SBoA2joAgL6AGwC04BoCtdQdArYwHgLvCAIA3joAgOGgAQC6OBoC4/gCALoAAAC9ZBwCvvQcAr8AEAKRBNMBkOT2AeBEAQCSCD4C4joAgOY6AIDqOgCA7joAgL6sHADyOgCA9joAgPo6AID+OgCAAjsAgAY7AIAKOwCAgbBtAICAAQCDHFIAgth3AIUgmgCEkL4AhwjPAIaM5gCJbDcBiOAsAYsYfgGK2BMBjeClAYzwWgGP/OsBjliPAbDVFwCxAWgAso1rALOdawC0SWsAtZVvAA47AIDgcAEAEjsAgBY7AIAaOwCAHjsAgIAZAACBGQAAggUAACI7AIAqOwCAoaUCAKJJBwCjQQcApEEGAKXVGwCm3RsAp8EaAKgBHACp4R8AqkkfAKsBEACs9RMAra0TAK4BFACv+RcAqDEGAKkxBgCqTQYAq0UGAKxNBgCtmQYAro0GAK+FBgCGgAMAhxgDAC47AIAyOwCANjsAgDo7AIA+OwCAQjsAgLhtBwC5dQcAun0HALt1BwC8bQcAvc0HAL75BwC/+QcAsKkGALGFBgCyeQcAs3kHALRpBwC1aQcAtl0HALdVBwC2OgCAs8EGAEY7AIAmOwCAth0GAEo7AIBOOwCAtcEGALppBgC7RQYAUjsAgFY7AIC+qQcAv6kHALypBwC9qQcAo4UGAFo7AIBeOwCAYjsAgGY7AICmWQYApYUGAGo7AICrAQYAqi0GAG47AIByOwCAr+0HAK7tBwCt7QcArO0HAKjBBgCpLQEAqiUBAKs9AQCsJQEArS0BAK4lAQCvlQEAdjsAgHo7AIB+OwCAgjsAgIY7AICCvQAAgb0AAIC9AAC4nQEAua0BALqlAQC7bQAAvHUAAL19AAC+dQAAv20AALD1AQCx/QEAssEBALPBAQC0tQEAtb0BALa1AQC3rQEAijsAgI47AICSOwCAs6EBAJY7AIC1oQEAtqEBAJo7AICGgAEAh8QBALo9AQC7NQEAvBkBAL0ZAQC+fQEAv3UBAKPtAQCeOwCAojsAgKY7AICqOwCApu0BAKXtAQCuOwCAq3kBAKpxAQCyOwCAtjsAgK85AQCuMQEArVUBAKxVAQC6OwCAvjsAgMI7AIDGOwCAyjsAgOGsAQDOOwCA42AGANI7AIDWOwCA2jsAgO9UBgDeOwCA4jsAgL60GgDmOwCA6jsAgO47AICGaBwAh4wDAPI7AID2OwCA+jsAgP47AICAOQAAgTkAAIIFAAACPACACjwAgA48AIASPACAFjwAgKgdAwCpQQMAqkEDAKtBAwCsQQMArUkDAK5xAwCvcQMAhCAdABo8AIAePACAIjwAgCY8AIAqPACALjwAgDI8AIC46QAAufUAALr9AAC78QAAvJEAAL2RAAC+iQAAv4kAALDhAACx4QAAsuEAALPhAAC04QAAte0AALbZAAC32QAA4wwHAOEgBwDhMAEA4wgHADY8AIA6PACAPjwAgEI8AIBGPACASjwAgE48AIBSPACA75gHAFY8AIBaPACA74gHALOJAgBePACAYjwAgL6AGgBmPACAtokCALWJAgBqPACAu2UBALplAQBuPACAcjwAgL9pAQC+ZQEAvXUBALx1AQC3PQYAtj0GALU9BgC0IQYAszUGALI1BgCxAQYAsAkGAL9ZBgC+UQYAvVkGALxNBgC7bQYAunkGALlxBgC4eQYAgJ0AAIGtAACCpQAAejwAgH48AICCPACAhjwAgIo8AICvcQYArmkGAK1tBgCsbQYAq4EGAKqZBgCpkQYAqJkGAAY8AIB2PACAjjwAgKPFHQCSPACApcUdAKbFHQCWPACAhgADAIdkAwCqKR4AqykeAKw5HgCtOR4ArikeAK8lHgCzOR4AmjwAgJ48AICiPACApjwAgLb9HgC1/R4AqjwAgLvZHgC60R4ArjwAgLI8AIC/aR8AvmEfAL1pHwC8wR4AqPEeAKnxHgCq8R4Aq/EeAKw1HgCtPR4ArjUeAK8tHgC2PACAujwAgL48AIDCPACAxjwAgMo8AIDOPACA0jwAgLjlHwC57R8AuuUfALv5HwC86R8AvZEfAL6RHwC/jR8AsFUeALFdHgCyVR4As/0fALTlHwC17R8AtuUfALfdHwCjeR8A1jwAgNo8AIDePACA4jwAgKa9HwClvR8A5jwAgKuZHwCqkR8AhogAAIdMAQCvKR4AriEeAK0pHgCsgR8AgEkAAIFJAACCWQAAs5keAOo8AIC1iR4AtlEBAO48AIDyPACA9jwAgLotAQC7JQEAvD0BAL0lAQC+JQEAvxUBAKhNHgCpVR4Aql0eAKtVHgCsTR4ArZ0BAK6JAQCvgQEAhKwBAPo8AID+PACAAj0AgAY9AIAKPQCADj0AgBI9AIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kAALClAQCxrQEAsqUBALO9AQC0rQEAtZ0BALaVAQC3XQEAo9UdABY9AIAaPQCAHj0AgCI9AICmHQIApcUdACY9AICraQIAqmECACo9AIAuPQCAr1kCAK5pAgCtaQIArHECADI9AIA2PQCAOj0AgD49AIBCPQCARj0AgEo9AIBOPQCAgDkAAIE5AACCBQAAUj0AgFo9AIBePQCAh0ADAIZcBACETAQAYj0AgGY9AICEBAUA4yABAGo9AIDhqAEAbj0AgO+UGgByPQCAdj0AgHo9AIB+PQCAgj0AgIY9AICKPQCAs6EDAI49AICSPQCAlj0AgJo9AIC2fQMAtX0DAJ49AIC7WQMAulEDAKI9AICmPQCAv/0AAL79AAC9/QAAvEEDAKhRAgCpWQIAqmkCAKtpAgCstQIArb0CAK61AgCvrQIAhKgHAKo9AICuPQCAsj0AgIKpAAC2PQCAgKkAAIGpAAC4aQEAuWkBALoJAQC7CQEAvBkBAL0ZAQC+CQEAvwkBALDVAgCx3QIAstUCALNpAQC0eQEAtXkBALZpAQC3YQEA4bgBAOHUHwDjOB8A4wwbALo9AIC+PQCAwj0AgMo9AIDOPQCA0j0AgNY9AIDaPQCAvjwJAN49AIDvhBsA74QbAKOhAgDiPQCAhugEAIe8BQDmPQCApn0CAKV9AgDqPQCAq1kCAKpRAgDuPQCA8j0AgK/9AQCu/QEArf0BAKxBAgCzhQYAxj0AgPY9AID6PQCA/j0AgLaJBgC1jQYAAj4AgLuRBgC6iQYABj4AgAo+AIC/9QYAvokGAL2BBgC8iQYADj4AgBI+AIAWPgCAGj4AgB4+AIAiPgCAJj4AgO+EHQAqPgCA4QAEAC4+AIDj/AQAgBEAAIEdAACCBQAAMj4AgKjxBgCp8QYAqg0GAKsFBgCsBQYArQkGAK49BgCvNQYANj4AgDo+AICGiAAAhxADAD4+AIBCPgCARj4AgEo+AIC4EQYAuRkGALohBgC7IQYAvPUHAL39BwC+9QcAv+kHALBNBgCxVQYAsl0GALNVBgC0TQYAtTEGALYxBgC3MQYAo4UHAE4+AIBSPgCAVj4AgFo+AICmiQcApY0HAF4+AICrkQcAqokHAGI+AIBmPgCAr/UHAK6JBwCtgQcArIkHAGo+AICz4QYAbj4AgHI+AIC25QYAdj4AgHo+AIC18QYAur0GALuNBgB+PgCAgj4AgL59AQC/ZQEAvJUGAL11AQCoHQYAqSUGAKotBgCrJQYArD0GAK0hBgCuXQYAr00GAIY+AICKPgCAjj4AgJI+AICWPgCAgrkDAIGxAwCAuQMAuO0BALmFAQC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCwPQYAsQ0GALIFBgCz5QEAtP0BALXlAQC25QEAt9UBAKOlBQCaPgCAnj4AgKI+AICqPgCApqEFAKW1BQCuPgCAq8kFAKr5BQCGCAwAhxwDAK8hAgCuOQIArTECAKzRBQCyPgCAs/ECALY+AIC6PgCAtlUDAL4+AIDCPgCAteECALpxAwC7eQMAxj4AgMo+AIC+MQMAvz0DALxRAwC9UQMAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQMArpEDAK+RAwDOPgCA0j4AgNY+AIDaPgCArAAAAN4+AIDiPgCA5j4AgLiZAwC5rQMAuqUDALttAwC8dQMAvX0DAL51AwC/bQMAsPEDALH5AwCywQMAs8EDALSxAwC1vQMAtrUDALepAwDqPgCA7j4AgPI+AID2PgCA+j4AgP4+AIACPwCA76gaAL5oDADhlAEABj8AgOMcBgCADQAAgXEAAIJxAAAKPwCAo/UDAA4/AIASPwCAhEwCABo/AICmUQIApeUDAB4/AICrfQIAqnUCAIbIDACHLA0ArzkCAK41AgCtVQIArFUCAOFQBgAiPwCA4xQHAITADAAmPwCAKj8AgC4/AIAyPwCANj8AgDo/AIA+PwCAQj8AgEY/AIBKPwCA73gbAL74DwBOPwCAUj8AgFY/AICzjQEAWj8AgLWZAQC2jQEAXj8AgFY9AIBiPwCAuoUBALtNAQC8VQEAvV0BAL5VAQC/SQEAo0EOABY/AIBmPwCAaj8AgG4/AICmQQ4ApVUOAHI/AICrgQ4AqkkOAHY/AIB6PwCAr4UOAK6ZDgCtkQ4ArJkOAIBtAACBCQAAgh0AAH4/AIDvGAkAgj8AgIY/AICKPwCA4zwNAI4/AIDhWAwAkj8AgIbQAACHvAMAlj8AgJo/AICokQ4AqZkOAKrJDgCrxQ4ArN0OAK3BDgCuwQ4Ar/UOAIToAACePwCAoj8AgKY/AICqPwCArj8AgLI/AIC2PwCAuMEPALnBDwC6wQ8Au8EPALzBDwC9wQ8AvsEPAL/1DwCwjQ4AsUUOALJNDgCzRQ4AtF0OALVBDgC2QQ4At0EOAKhRDgCpWQ4Aqo0OAKudDgCshQ4ArY0OAK6FDgCvvQ4Auj8AgL4/AIDCPwCAxj8AgMo/AIDOPwCA0j8AgNY/AIC4kQ4AuZkOALqtDgC7RQEAvF0BAL1FAQC+RQEAv3UBALDFDgCxzQ4AssUOALPdDgC0xQ4AtbUOALa9DgC3tQ4AswUOANo/AIDePwCA4j8AgOY/AIC2DQ4AtQ0OAOo/AIC7CQ4AugEOAO4/AIDyPwCAv3EOAL4BDgC9CQ4AvBEOAIJtAACjQQ4AgFUAAIFlAACmSQ4A+j8AgP4/AIClSQ4AqkUOAKtNDgCGSAAAh3gAAK5FDgCvNQ4ArFUOAK1NDgCoXQIAqWECAKplAgCrdQIArG0CAK2xAgCusQIAr7ECAITsBAACQACABkAAgApAAIAOQACAEkAAgBZAAIAaQACAuHEDALlxAwC6cQMAu3EDALzVAwC93QMAvtUDAL/NAwCw0QIAsdECALLRAgCz0QIAtFEDALVRAwC2UQMAt1EDAB5AAICz6QIAIkAAgL6ABAC2NQIAJkAAgCpAAIC14QIAuhECALsRAgAuQACAMkAAgL6RAwC/kQMAvAECAL0BAgA2QACAOkAAgKOlAgA+QACApa0CAEJAAIBGQACApnkCAEpAAIBOQACAq10CAKpdAgCtTQIArE0CAK/dAwCu3QMAqNUCAKndAgCqLQEAqyUBAKw9AQCtJQEAri0BAK8lAQBSQACAVkAAgFpAAIBeQACAYkAAgGpAAIBuQACAckAAgLiFAQC5iQEAup0BALuVAQC8sQEAvbEBAL55AAC/eQAAsF0BALHlAQCy4QEAs/kBALTpAQC13QEAttUBALe9AQDh8A4AdkAAgOMUDgB6QACAgb0AAIC9AAB+QACAgq0AAIYABACH7AUAgkAAgIZAAICKQACAjkAAgO9gDgCSQACAlkAAgJpAAICFXH0AnkAAgKJAAIDjZAEApkAAgOG0AQCqQACA76AOAK5AAICmPgCAhPgFALJAAIC2QACAukAAgLMlBgBmQACAvkAAgMJAAIDGQACAtiUGALU1BgDKQACAu6EGALoZBgDOQACA0kAAgL+ZBgC+rQYAva0GALy1BgCCbQAA7zAEAIBVAACBZQAAvlwDANZAAICG+AAAh2wDANpAAIDeQACA4kAAgOZAAIDqQACA40QEAO5AAIDhjAcAo6UGAPJAAID2QACA+kAAgP5AAICmpQYApbUGAAJBAICrIQYAqpkGAAZBAIAKQQCArxkGAK4tBgCtLQYArDUGAA5BAICz+QcAEkEAgBZBAIC2SQcAGkEAgB5BAIC1UQcAulEHALtRBwAiQQCAJkEAgL41BwC/OQcAvEUHAL09BwCoNQYAqT0GAKo1BgCriQYArJ0GAK2NBgCusQYAr7EGACpBAIAuQQCAMkEAgDZBAICADQAAgbEAAIKxAAA6QQCAuKEGALmtBgC6vQYAu7UGALytBgC9XQEAvlUBAL9NAQCw0QYAsdEGALLVBgCzrQYAtLUGALW5BgC2qQYAt6UGAKO9BgA+QQCAQkEAgISEAgC+kAEApg0GAKUVBgBKQQCAqxUGAKoVBgCGCAAAh3wBAK99BgCucQYArXkGAKwBBgBOQQCAs60BAFJBAIBWQQCAtqkBAFpBAIBeQQCAta0BALptAQC7dQEAYkEAgGZBAIC+XQEAvzUBALxlAQC9VQEAqGECAKlhAgCqYQIAq2ECAKxhAgCtbQIArp0CAK+VAgBqQQCAbkEAgHJBAIB2QQCAekEAgH5BAICCQQCAhkEAgLiVAgC5nQIAuqECALuhAgC8cQMAvXEDAL5xAwC/cQMAsO0CALH1AgCy9QIAs8UCALTdAgC1tQIAtrECALexAgCKQQCAjkEAgJJBAICj5QIAlkEAgKXlAgCm4QIAmkEAgJ5BAICiQQCAqiUCAKs9AgCsLQIArR0CAK4VAgCvfQIApkEAgKpBAICuQQCAhEB8AIAVAACBHQAAggUAALJBAIC+7HwAukEAgIZIfQCHCAMAvkEAgMJBAIDGQQCAykEAgKidAgCpxQIAqsECAKvBAgCsxQIArc0CAK7xAgCv8QIAzkEAgNJBAIDWQQCA2kEAgMkAAADeQQCA4kEAgOZBAIC4wQEAucEBALrBAQC73QEAvM0BAL31AQC+/QEAv50BALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEA4TgGAOpBAIDjaAYA7kEAgPJBAID2QQCA+kEAgISUfQC+rHwA/kEAgAJCAIAGQgCAvrh/AApCAIDvEAEADkIAgBJCAIAWQgCAGkIAgB5CAIDhkAEAIkIAgONEAAAqQgCAgS0AAIAtAADvgAAAgjkAAC5CAIAyQgCA9j8AgDZCAIDhsH8AtkEAgOPUfAA6QgCAJkIAgD5CAICGuAAAh9QCAEJCAIBGQgCASkIAgE5CAIBSQgCAVkIAgO8gfABaQgCAs4l9AF5CAIBiQgCAZkIAgGpCAIC2jX0AtY19AG5CAIC7RX4AukV+AHJCAIB2QgCAv0V+AL5FfgC9VX4AvFV+AKNJfQB6QgCAfkIAgIJCAICGQgCApk19AKVNfQCKQgCAq4V+AKqFfgCOQgCAkkIAgK+FfgCuhX4ArZV+AKyVfgCCbQAAszF+AIBVAACBZQAAtvF/AITcAwCWQgCAtSF+ALrNfwC70X8AhgAEAIfUAAC+dX8Av3l/ALzBfwC9wX8AqOV/AKn1fwCq/X8Aq/V/AKztfwCtNX4Arj1+AK81fgCaQgCAnkIAgKJCAICmQgCAqkIAgK5CAICyQgCAtkIAgLjZfgC54X4AuuF+ALvhfgC85X4Avel+AL6ZfgC/mX4AsE1+ALFRfgCyUX4As1F+ALT1fgC1+X4Atul+ALfpfgCjdX8AukIAgL5CAIDCQgCAxkIAgKa1fgClZX8AykIAgKuVfgCqiX4AzkIAgNJCAICvPX4ArjF+AK2FfgCshX4A1kIAgLMxfgDaQgCA3kIAgLbFAQDiQgCA5kIAgLXRAQC6yQEAu8kBAOpCAIDuQgCAvs0BAL+xAQC8yQEAvckBAKjdfQCp9X0Aqv19AKvxfQCsHQIArQECAK45AgCvOQIA8kIAgPZCAID6QgCA/kIAgIIFAAACQwCAgBEAAIERAAC4EQIAuRkCALohAgC7IQIAvNUCAL3dAgC+1QIAv80CALBJAgCxSQIAslkCALNZAgC0TQIAtTECALYxAgC3MQIAvgADAKNxfQCEiAIAvoAEAKaFAgAKQwCADkMAgKWRAgCqiQIAq4kCAIYoBACHDAMAro0CAK/xAgCsiQIArYkCABJDAICEyAMAhcwFALPlAwAWQwCAteUDALbtAwAaQwCAHkMAgCJDAIC6bQMAu2UDALx9AwC9ZQMAvmUDAL9VAwAmQwCAKkMAgL8ABACjJQIALkMAgKUlAgCmLQIAMkMAgDZDAIA6QwCAqq0CAKulAgCsvQIAraUCAK6lAgCvlQIAPkMAgEJDAIBGQwCASkMAgE5DAIDjzAMAUkMAgOGsAQBWQwCA7xwDAFpDAIBeQwCAYkMAgGZDAIBqQwCAbkMAgOFwfwBGQQCA4wR+AHJDAIB6QwCA4ZQBAH5DAIDjWAEAgNkAAIHZAACCJQAA7+R+AIJDAICGQwCA7+B+AIpDAICzAQEAjkMAgIboBwCHLAQAkkMAgLY1AQC1BQEAlkMAgLvxAAC64QAAmkMAgJ5DAIC/sQAAvtEAAL3ZAAC84QAABkMAgHZDAICiQwCApkMAgKEBBACgEQQAoxkAAKLFBACotQYAqb0GAKrpBgCr/QYArO0GAK3VBgCu3QYArz0HALBFBwCxVQcAslUHALNtBwC0dQcAtRUHALYdBwC3FQcAuC0HALk1BwC6MQcAuw0HALwZBwC9GQcAvgkHAL8JBwCjQQYAqkMAgK5DAICyQwCAtkMAgKZ1BgClRQYAukMAgKuxBwCqoQcAj8ltAL5DAICv8QcArpEHAK2ZBwCsoQcAld11AJTBdACXzXAAli1zAJFdaACQVWgAk9l0AJJNaQCd5XgAnB17AJ9tBwCeuXgAmR1/AJhVcACboXwAmvl8AIJhbACDhWkAwkMAgMZDAICGEXUAhxF1AISVaQCFjWgAij10AIvFcgDKQwCAzkMAgI7dfgCPMX0AjD1xAI2dcQCSGX0Ak716ANJDAIDvkAkAltUGAJdRBQCUXXkAlQl5AJpxBQCbvQUA1kMAgNpDAIDeQwCA4agFAJx5AQDjuAgAoYUBAOJDAICjqQ0AogEMAKUBCACkOQ0Ap6kJAKa9CQCppRUAqAEUAKsBFACq/RUArbkRAKyxEQCvARwArqEQALH9HACw5R0As+kZALIBGAC1ASQAtH0ZAIQUAAC+FAAAgI0AAIGVAACCbQAA6kMAgIZQDwCHZAAA7kMAgPJDAIC61QcAu90HALjBBwC5wQcAvjEEAL8xBAC88QcAvfEHALKtBwCztQcAsK0HALGlBwC2nQcAt/UHALSlBwC1lQcAqmkHAKtpBwCoaQcAqWkHAK5pBwCvaQcArGkHAK1pBwD2QwCA+kMAgP5DAIACRACABkQAgApEAIAORACAEkQAgKgRBQCpHQUAqjkFAKs5BQCsLQUArVEFAK5JBQCvQQUAFkQAgBpEAIAeRACAIkQAgCZEAIAqRACALkQAgDJEAIC4XQIAuWkCALrBAwC7wQMAvPkDAL35AwC+kQMAv7UDALAJBQCxCQUAsuECALPhAgC0dQIAtX0CALZ1AgC3bQIAs7EEAIQAAgC+BA0ANkQAgDpEAIC20QQAtaUEAD5EAIC7zQQAus0EAEJEAIBGRACAv7kDAL6xAwC9NQMAvDUDAEpEAICj9QQATkQAgFJEAICmlQQAWkQAgF5EAICl4QQAqokEAKuJBACHqA0AhswMAK71AwCv/QMArHEDAK1xAwDhUAYA4TQHAONAAADjWAcAgNEAAIHdAACC1QAAYkQAgGZEAIBqRACAbkQAgHJEAIB2RACAekQAgO+cAADvyAcAfkQAgIJEAICzNQIAhkQAgLW1AQCKRACAjkQAgLa1AQC+7AwAkkQAgLuRAQC6mQEAvVEBALyJAQC/UQEAvlkBAKjtDQCp/Q0AqvUNAKttDgCsdQ4ArX0OAK51DgCvbQ4AVkQAgJZEAICaRACAnkQAgKJEAICmRACAqkQAgK5EAIC49Q4Auf0OALr1DgC7QQ8AvEEPAL1JDwC+cQ8Av3EPALAVDgCxHQ4AshUOALPNDgC01Q4Atd0OALbVDgC3zQ4Ao30NALJEAIC2RACAukQAgL5EAICm/Q4Apf0OAMJEAICr2Q4AqtEOAISoAgDGRACArxkOAK4RDgCtGQ4ArMEOAIBNAACBVQAAglUAALNRDwDKRACAtXEPALZxDwDORACAhuAAAIcEAwC6XQ8Auy0PALw1DwC9OQ8Avi0PAL8lDwCoVQ4AqV0OAKqVDgCrrQ4ArLUOAK29DgCutQ4Ar60OANJEAIDWRACA2kQAgN5EAIDiRACA5kQAgOpEAIDuRACAuGkBALlpAQC6eQEAu3kBALxpAQC9aQEAvt0BAL/VAQCw1Q4AsaUOALKtDgCzoQ4AtKUOALWtDgC2nQ4At1kBAKMdDgDyRACA9kQAgOZDAID6RACApj0OAKU9DgD+RACAq2EOAKoRDgACRQCABkUAgK9pDgCuYQ4ArXUOAKx5DgAKRQCADkUAgBJFAIAWRQCAGkUAgB5FAIAiRQCAJkUAgIANAACBFQAAgh0AACpFAIAuRQCAMkUAgIR4AQC+FAAA4xQPADpFAIDh4A0AhAADAIawBACHFAMAPkUAgEJFAIBGRQCASkUAgE5FAIBSRQCA78APAFZFAIBaRQCAXkUAgGJFAIBmRQCAakUAgLNtAwBuRQCAtX0DALZ1AwByRQCAdkUAgHpFAIC6UQMAu1EDALz1AwC9/QMAvukDAL/hAwB+RQCAgkUAgIZFAICKRQCAjkUAgJJFAICWRQCAmkUAgKhxAgCpeQIAqokDAKuJAwCsmQMArZkDAK6JAwCviQMAsPkDALH5AwCyTQMAs0UDALRBAwC1SQMAtnEDALdxAwC4IQMAuSEDALohAwC7IQMAvCEDAL0hAwC+IQMAvyEDAICdAQCBEQAAghEAAIQEBQDvFAAAnkUAgKJFAIC+EAUA48gAAKpFAIDh0AEArkUAgLJFAIC2RQCAukUAgL5FAICqeQIAq3kCAIboBACHYAUArsECAK/JAgCs3QIArdUCAMJFAICjRQIAxkUAgMpFAICmXQIAzkUAgNJFAIClVQIA1kUAgNpFAIDeRQCA4kUAgOZFAIDqRQCA7kUAgO+EDgC+rAQA4dAOAPJFAIDjFAEA9kUAgPpFAID+RQCAAkYAgLPdAQAGRgCACkYAgA5GAIASRgCAtv0BALX9AQAaRgCAu90BALrdAQCE4AQAHkYAgL+hAQC+vQEAvb0BALy9AQCoBQYAqR0GAKoVBgCrLQYArDUGAK09BgCuNQYArykGAKZFAICC9QcAgeUHAIDlBwAWRgCAIkYAgIYcAACHsAMAuCUGALnFBgC6zQYAu8UGALzdBgC9xQYAvs0GAL/FBgCwWQYAsVkGALIpBgCzKQYAtDkGALUlBgC2JQYAtx0GAKOdBgAmRgCAKkYAgC5GAIAyRgCApr0GAKW9BgA2RgCAq50GAKqdBgA6RgCAPkYAgK/hBgCu/QYArf0GAKz9BgBCRgCAs/UHAEZGAIBKRgCAtu0HAE5GAIBSRgCAteUHALqNBwC7kQcAVkYAgFpGAIC+dQcAv30HALyBBwC9fQcAqCUGAKkpBgCqOQYAqzkGAKwpBgCtKQYArnkGAK91BgBeRgCAYkYAgGZGAIBqRgCAbkYAgHJGAIB2RgCAekYAgLjVBgC53QYAuuEGALv9BgC85QYAve0GAL7lBgC/mQYAsA0GALERBgCyEQYAs+0GALT1BgC1/QYAtvUGALftBgCjsQYAgi0AAIEVAACAsQAANkUAgKapBgCloQYAfkYAgKvVBgCqyQYAgkYAgL5oAQCvOQYArjEGAK05BgCsxQYAikYAgLPxAQCGaAAAh3wBALZdAQCORgCAkkYAgLVVAQC6SQEAu0kBAJZGAICaRgCAvj0BAL8hAQC8OQEAvTUBAJ5GAICiRgCAhAQDAL6AHACmRgCA4RwGAKpGAIDjAAYAvwguAK5GAICyRgCA78gHALZGAIC6RgCAvkYAgMJGAIDGRgCAykYAgKN9AgDORgCApdkCANJGAIDWRgCAptECANpGAIDeRgCAq8UCAKrFAgCtuQIArLUCAK+tAgCusQIAqW0FAKhZBQCrDQIAqrkCAK0dAgCsHQIArwUCAK4NAgC+aB0A4kYAgOZGAIDqRgCAgB0AAIEJAACCmQEA7kYAgLnhAwC4KQIAu+EDALrpAwC94QMAvPkDAL/hAwC+6QMAsU0CALBNAgCzIQIAsi0CALUlAgC0OQIAtxECALYlAgCowQIAqdECAKrRAgCr5QIArP0CAK0VAQCuHQEArw0BAPJGAID6RgCA/kYAgAJHAIAGRwCACkcAgA5HAIASRwCAuAUBALkJAQC6HQEAuxUBALwxAQC9MQEAvv0BAL/1AQCweQEAsUEBALJBAQCzXQEAtEUBALVNAQC2RQEAtz0BAIagHQCHxB0AFkcAgO/YAAAaRwCAHkcAgCJHAIDvxAYAhGwcAOH0BgAmRwCA47AGACpHAIDhlAEALkcAgONEBgCzGQIAMkcAgDZHAIA6RwCAhewsALbVAQC1NQIAPkcAgLvFAQC6/QEAQkcAgEZHAIC/yQEAvsEBAL3JAQC81QEAo9kdAPZGAIBKRwCATkcAgFJHAICmFR4ApfUdAFZHAICrBR4Aqj0eAFpHAIBeRwCArwkeAK4BHgCtCR4ArBUeAIBpAACBaQAAggUAAGJHAIBmRwCAakcAgIcQAwCGfAMAbkcAgHJHAIB2RwCAekcAgH5HAICCRwCAhkcAgIpHAICopR8Aqa0fAKqlHwCrvR8ArKUfAK2tHwCupR8ArxUfAI5HAICSRwCAlkcAgJpHAICeRwCAokcAgKZHAICqRwCAuA0fALkZHwC6IR8AuyEfALzZAAC92QAAvskAAL/BAACwcR8AsXEfALJxHwCzRR8AtEEfALVNHwC2PR8AtzUfALMtHgCuRwCAskcAgLZHAIC6RwCAti0eALUtHgC+RwCAu7UeALq1HgDCRwCAxkcAgL+JHgC+hR4AvZEeALylHgCCKQAAo2keAIAdAACBFQAApmkeAMpHAIDORwCApWkeAKrxHgCr8R4A0kcAgITgAQCuwR4Ar80eAKzhHgCt1R4AqNUBAKnlAQCq7QEAq+UBAKz9AQCt5QEAru0BAK/lAQC+oAEAhkYAgNZHAIDaRwCAhhAAAId0AQDeRwCA4kcAgLh9AQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsJ0BALFFAQCyTQEAs0UBALRdAQC1RQEAtk0BALdFAQDmRwCA6kcAgO5HAIDyRwCA9kcAgO80AgDv7B4A+kcAgOHwHQDj4AIA4zAeAOGEAQD+RwCAAkgAgAZIAIAKSACAsyUCAJQAAAAOSACAEkgAgBZIAIC2JQIAtTUCABpIAIC7wQIAuhkCAB5IAIAiSACAv8ECAL7ZAgC90QIAvNkCACZIAIAqSACALkgAgKPpAgAySACApfkCAKbpAgA2SACAOkgAgD5IAICq1QIAqw0CAKwVAgCtHQIArhUCAK8NAgCAYQAAgWEAAIIFAABCSACASkgAgIQABAC+FAQATkgAgIbABACHUAMAUkgAgFZIAIBaSACAXkgAgGJIAIBmSACAqK0CAKm9AgCqtQIAqw0BAKwVAQCtHQEArhUBAK8NAQCE7AQAakgAgG5IAIBySACAdkgAgHpIAIB+SACAgkgAgLgdAQC5LQEAuiUBALvNAQC81QEAvd0BAL7JAQC/wQEAsH0BALFVAQCyXQEAs1UBALRNAQC1PQEAtjUBALctAQDhGB4AhkgAgOM4HgCKSACAjkgAgJJIAICWSACAmkgAgJ5IAICiSACAvmAEAKZIAICBdQAAgHUAAO/gHwCCbQAAqkgAgK5IAICG6AQAh3wFALJIAIDhkAEAukgAgOOgAAC+SACAwkgAgMZIAIDvtAAAykgAgM5IAIDSSACA1kgAgLUFBgBGSACAtkgAgLYFBgDaSACA3kgAgLOlBQDiSACAvRkGALwRBgC/YQYAvhEGAOZIAIDqSACAuwkGALohBgCj/QUA7kgAgPJIAID2SACA+kgAgKZdBgClXQYA/kgAgKtRBgCqeQYAAkkAgAZJAICvOQYArkkGAK1BBgCsSQYAqFEGAKlZBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgAKSQCADkkAgBJJAIAWSQCAgA0AAIGxAQCCsQEAGkkAgLhNBwC5VQcAul0HALtVBwC8TQcAvXUHAL59BwC/cQcAsMUHALHNBwCyxQcAs90HALTFBwC1zQcAtsUHALd5BwCz6QcAHkkAgCJJAICEwAEAvtgBALbhBwC16QcAJkkAgLsJBgC6AQYAhogAAIesAQC/CQYAvgEGAL0JBgC8EQYAKkkAgKOtBwAuSQCAMkkAgKalBwA2SQCAOkkAgKWtBwCqRQYAq00GAD5JAIBCSQCArkUGAK9NBgCsVQYArU0GAKhZBgCpZQYAqm0GAKtlBgCsYQYArWEGAK5hBgCvYQYAhKwBAEZJAIBKSQCATkkAgFJJAIBWSQCAWkkAgF5JAIC4kQEAuZkBALqhAQC7oQEAvHEBAL1xAQC+cQEAv3EBALDxAQCx8QEAsvUBALPdAQC0xQEAtbEBALaxAQC3sQEAs+UFAGJJAIBmSQCAakkAgG5JAIC24QUAtekFAHJJAIC7NQIAujUCAHZJAIB6SQCAv3UCAL4BAgC9CQIAvCECAH5JAICjoQUAgkkAgIZJAICmpQUAikkAgI5JAIClrQUAqnECAKtxAgCSSQCAvigDAK5FAgCvMQIArGUCAK1NAgCA1QAAgd0AAILhAACaSQCA4yABAJ5JAIDhqAEAokkAgO80AgCmSQCAhggMAIdoAwCsAAAAqkkAgK5JAICySQCAs40DALZJAIC6SQCAhIAMAL5JAIC2vQMAtYEDAMJJAIC7TQMAuk0DAMZJAIDKSQCAv00DAL5NAwC9TQMAvE0DAKhBAgCpTQIAqkUCAKtZAgCsSQIArX0CAK51AgCvuQIAvmgNAM5JAIDSSQCA1kkAgIRsDADaSQCA3kkAgOJJAIC4TQEAuVUBALpVAQC7ZQEAvH0BAL0VAQC+EQEAvxEBALDJAgCxyQIAstkCALPZAgC0yQIAtckCALZ9AQC3dQEA4XgHAOOYAADjuAYA4VwGAOZJAIDqSQCA7kkAgPJJAID2SQCA+kkAgP5JAIACSgCA7AAAAO9cAADv6AYACkoAgIFpAACAYQAAo4UCAIJhAACliQIADkoAgBJKAICmtQIAhkAMAIfEDACrRQIAqkUCAK1FAgCsRQIAr0UCAK5FAgCojQ4AqZEOAKqVDgCrqQ4ArKUOAK2tDgCupQ4Ar9kOAAZKAIAWSgCAGkoAgB5KAIAiSgCAJkoAgCpKAIAuSgCAuHUPALl9DwC6dQ8Au90PALzFDwC9zQ8AvsUPAL/9DwCwqQ4AsbUOALK1DgCzhQ4AtJ0OALVRDwC2UQ8At1EPALMdDgAySgCANkoAgDpKAIA+SgCAti0OALUtDgBCSgCAu3EOALptDgBGSgCASkoAgL+VDwC+WQ4AvVEOALxhDgBOSgCAo1kOAFJKAIBWSgCApmkOAFpKAIBeSgCApWkOAKopDgCrNQ4AYkoAgGZKAICuHQ4Ar9EPAKwlDgCtFQ4AqL0OAKnRDgCq0Q4AqykBAKw5AQCtOQEArikBAK8pAQCADQAAgRUAAIIdAABqSgCAbkoAgHJKAIC+dAIAdkoAgLjtAQC5hQEAuoEBALuBAQC8hQEAvY0BAL6xAQC/sQEAsFkBALFZAQCy7QEAs+UBALT9AQC15QEAtuUBALfVAQB6SgCAtqkBALWhAQB+SgCAs0kOAIJKAICGOAAAh9wBAL8xAQC+KQEAvSEBALwpAQC7jQEAuo0BAJZJAICGSgCAoxkOAIpKAICOSgCAkkoAgJZKAICm+QEApfEBAJpKAICr3QEAqt0BAJ5KAICiSgCAr2EBAK55AQCtcQEArHkBAKZKAIDv3A8AqkoAgK5KAICySgCAtkoAgLpKAIC+SgCAwkoAgMZKAIDKSgCAzkoAgNJKAIDj6A4A1koAgOGMDgCAEQAAgREAAIIRAACEQAIA2koAgN5KAIDiSgCAvhADAIbABACHRAMA6koAgO5KAIDySgCA9koAgPpKAID+SgCA7yQCAAJLAIAGSwCACksAgA5LAIASSwCAFksAgBpLAICE7AQAHksAgCJLAIAmSwCA4+wCACpLAIDhOAEALksAgLNVAwAySwCANksAgDpLAIA+SwCAth0DALUdAwBCSwCAuwkDALo5AwBGSwCASksAgL/9AAC+/QAAvfkAALwRAwCogQIAqYkCAKqdAgCrsQIArNUCAK3dAgCu1QIAr80CAIDNAQCBCQAAghkAAE5LAIBSSwCAWksAgL5wBQBeSwCAuFkBALlZAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9lAQCwvQIAsY0CALKFAgCzbQEAtHkBALV5AQC2aQEAt2kBAIYgBACHCAUAYksAgGZLAIBqSwCAbksAgHJLAIDvXAAAhOwEAOFcDgB2SwCA44wOAHpLAIB+SwCAgksAgIZLAICjVQIAiksAgI5LAICSSwCAlksAgKYdAgClHQIAmksAgKsJAgCqOQIAnksAgKJLAICv/QEArv0BAK35AQCsEQIAqGkGAKlpBgCqeQYAq3kGAKxpBgCtaQYArp0GAK+VBgBWSwCApksAgKpLAICuSwCAsksAgLZLAIC6SwCAvksAgLj1BgC5+QYAuo0GALuFBgC8nQYAvYUGAL6FBgC/tQYAsO0GALH1BgCy/QYAs/UGALTtBgC10QYAttEGALfRBgCz8QYAghUAAIG1AACAtQAAwksAgLbpBgC14QYAvtQDALsxBgC6KQYAxksAgMpLAIC/FQYAvikGAL0hBgC8KQYAzksAgKO1BgCGyAAAh8gAAKatBgDSSwCA1ksAgKWlBgCqbQYAq3UGANpLAIDeSwCArm0GAK9RBgCsbQYArWUGAKg1BgCpOQYAqoEGAKuBBgCsgQYArYEGAK6BBgCvtQYA4ksAgOZLAIDqSwCA7ksAgPJLAID2SwCA+ksAgP5LAIC4nQYAua0GALqlBgC7aQEAvHkBAL15AQC+aQEAv2kBALDRBgCx0QYAstEGALPRBgC0tQYAtb0GALa1BgC3rQYAswkGAAJMAIAGTACACkwAgA5MAIC2AQYAtQkGABJMAIC7FQYAuhUGABZMAIAaTACAv3kGAL5xBgC9BQYAvAUGAB5MAICjTQYAIkwAgOZKAICmRQYAJkwAgCpMAIClTQYAqlEGAKtRBgAuTACAMkwAgK41BgCvPQYArEEGAK1BBgCB6QMAgN0DAISIAwCC4QMAhrA8AIeIAgC+VAMAOkwAgD5MAIBCTACARkwAgEpMAIBOTACAUkwAgFZMAIBaTACA4/AGAF5MAIDhMAYAhAA8AGJMAIBmTACAakwAgG5MAIByTACAhTQ9AHZMAIB6TACA77AHAH5MAICCTACAhkwAgIpMAICOTACAkkwAgL7EPACWTACAgp0BAIGdAQCAnQEAqA0CAKllAgCqfQIAq3UCAKxZAgCtWQIArpkDAK+ZAwCw6QMAsekDALL5AwCz+QMAtOkDALXpAwC2XQMAt1UDALhtAwC5dQMAunUDALtFAwC8XQMAvTUDAL4xAwC/KQMAmkwAgJ5MAICiTACAqkwAgOFgAwDv9AMA40QCAK5MAICyTACA4zwDAO/0NwDh/AEAtkwAgLpMAIC+TACAwkwAgIZkPwCHaD0AhTQhALOZAwDGTACAtb0DALa1AwDKTACAzkwAgNJMAIC6QQIAu0ECALxBAgC9QQIAvkECAL9BAgDWTACA2kwAgN5MAIDiTACA5kwAgOpMAIDuTACA7/gBAIRoPADhPAYA8kwAgOMcBgD2TACA+kwAgP5MAIACTQCAoxUDAAZNAIAKTQCADk0AgBJNAICmOQMApTEDABpNAICrzQIAqs0CAL5kPgAeTQCAr80CAK7NAgCtzQIArM0CAKgdPgCpJT4Aqi0+AKslPgCsPT4ArSU+AK4tPgCvJT4ApkwAgIL1PwCB5T8AgOU/ABZNAIAiTQCAhgAEAIecAwC4LT4AuTE+ALoxPgC7MT4AvNE+AL3RPgC+0T4Av80+ALBdPgCxIT4Asjk+ALM5PgC0KT4AtSk+ALYZPgC3FT4As6U+ACZNAIAqTQCALk0AgDJNAIC2pT4AtbU+ADZNAIC75T4Aupk+ADpNAIA+TQCAv+0+AL7tPgC97T4AvO0+AEJNAICj4T4ARk0AgEpNAICm4T4ATk0AgFJNAICl8T4Aqt0+AKuhPgBWTQCAWk0AgK6pPgCvqT4ArKk+AK2pPgCPBSUAsyU+AF5NAIBiTQCAtik+AGZNAIBqTQCAtSk+ALp9PgC7RT4Abk0AgHJNAIC+tT4Av70+ALxdPgC9vT4An304AJ5lOQCd8TgAnFE0AJtZNQCaUTUAmfEwAJgNMQCXZTEAlsEwAJVZLQCUTS0Ak+EsAJLZKQCRWSkAkPEoALSlGQC13RgAdk0AgIQIAACwkRUAsQEVALIBGACzvRkAgA0AAIGtAwCCpQMAek0AgKNhAACiHT0AoZk9AKBxPACkxQUApUEEAKYBCACn4QkANkwAgKH1AQCi6QEAo90FAKwBEACtxREArtkRAK85EACoZQgAqQEMAKrZDQCrCQ0AijEuAIuhMwB+TQCAgk0AgI65MwCPETYAjB0yAI1NMgCCJSYAg6krAL5kAwCEYAQAhqEvAIcVLgCEGSoAhZEqAJphPgCb7T4AhsgEAIfcAwCKTQCA4Vw+AJyJAwDjAD4Akmk2AJN5NwCOTQCA7xg+AJZNOwCXuT8AlME7AJVdOgCpnT0AqIk9AKu5PQCqrT0Arak9AKyhPQCvyT0ArqE9AL7oBACSTQCAlk0AgJpNAICeTQCAok0AgKZNAICqTQCAuVk9ALhRPQC7eT0AumU9AL1pPQC8YT0Avx09AL5hPQCxgT0AsLk9ALNpPQCyiT0AtXk9ALRxPQC3aT0AtnE9AKMhPACuTQCAsk0AgLZNAIC6TQCApi08AKUtPAC+TQCAq0E8AKp5PADCTQCAxk0AgK+5PACusTwArbk8AKxZPADKTQCAzk0AgLN9AwDSTQCAtdkDANZNAIDaTQCAttEDAN5NAIDiTQCAu8UDALrFAwC9uQMAvLUDAL+tAwC+sQMA5k0AgOpNAIDuTQCA71wDAIAVAACBHQAAgjEAAO+MPgCE7AQA4fw+APJNAIDjHD4A+k0AgOGUAQD+TQCA4yAAAKP1AwACTgCAh+gEAIZsBAAGTgCAplkDAKVRAwAKTgCAq00DAKpNAwAOTgCAEk4AgK8lAwCuOQMArTEDAKw9AwCGTQCA9k0AgBZOAIAaTgCAHk4AgCJOAIAmTgCAKk4AgKhxBgCpTQYAqo0GAKuFBgCsnQYArYUGAK6NBgCvhQYAsP0GALFBBwCyQQcAs0EHALRBBwC1SQcAtnEHALdxBwC4IQcAuSEHALolBwC7OQcAvCkHAL0VBwC+HQcAv/0HALMlBgAuTgCAMk4AgDZOAIA6TgCAtiUGALU1BgA+TgCAu6UHALoZBgBCTgCARk4AgL+tBwC+pQcAvbUHALy1BwBKTgCAo2EGAE5OAIBSTgCApmEGAFZOAIBaTgCApXEGAKpdBgCr4QcAXk4AgGJOAICu4QcAr+kHAKzxBwCt8QcAqLEGAKm9BgCqzQYAq90GAKzNBgCt/QYArvUGAK8VAQCA+QEAgc0BAILFAQC+ZAIAhpAAAIcAAQBqTgCAbk4AgLjRAQC52QEAuuEBALvhAQC8kQEAvZ0BAL6VAQC/iQEAsG0BALF1AQCyfQEAs3UBALRtAQC18QEAtvEBALfxAQCzRQYAZk4AgHJOAIB2TgCAek4AgLZ9BgC1RQYAfk4AgLuxAQC6qQEAgk4AgIZOAIC/NQEAvqkBAL2hAQC8qQEAik4AgKMBBgCOTgCAkk4AgKY5BgCWTgCAmk4AgKUBBgCq7QEAq/UBAJ5OAICiTgCAru0BAK9xAQCs7QEAreUBAOEoAQCmTgCA41ACAKpOAICuTgCAsk4AgLZOAIC6TgCAvk4AgMJOAIDGTgCAyk4AgIFxAACAGQAA75wCAIJ5AADOTgCA0k4AgITIAgCzxQMA2k4AgLXFAwC2xQMAvhADAIbADACHRAwAuqkDALulAwC8vQMAvaEDAL6hAwC/lQMArhEGAK8ZBgCsAQYArQEGAKqlBgCrEQYAqEU5AKlxOQDeTgCA4k4AgOZOAIDqTgCA7k4AgPJOAID2TgCA+k4AgL7tBwC/TQcAvNEHAL3lBwC63QcAu8EHALg1BgC51QcAtjkGALcNBgC0JQYAtTkGALIxBgCzPQYAsFEGALFRBgCoOQIAqTkCAKqBAgCrgQIArIECAK2JAgCusQIAr7ECAIRsDQD+TgCAvmANAAJPAIAGTwCACk8AgA5PAIASTwCAuE0BALlVAQC6XQEAu1UBALxNAQC9dQEAvn0BAL91AQCwoQIAsa0CALKlAgCzuQIAtKkCALWdAgC2lQIAt3kBAOFUBgDh1AcA4zgGAOOwBwAWTwCAGk8AgB5PAIAiTwCAhOQMACZPAIAqTwCALk8AgDJPAIA2TwCA72wAAO/kBwCjSQIAOk8AgD5PAIBCTwCASk8AgKZJAgClSQIATk8AgKspAgCqJQIAhkgMAIfcDACvGQIAri0CAK0tAgCsMQIAqFEOAKmlDgCqrQ4Aq6UOAKy9DgCtpQ4Arq0OAK+lDgCA5Q8Age0PAILlDwBGTwCAUk8AgFZPAIBaTwCAXk8AgLjVDwC53Q8AutUPALvpDwC8+Q8AvfkPAL7pDwC/6Q8AsN0OALFBDwCyRQ8As10PALRFDwC1TQ8AtkUPALftDwCzJQ4AYk8AgGZPAIBqTwCAbk8AgLYlDgC1NQ4Ack8AgLuFDwC6GQ4Adk8AgHpPAIC/iQ8AvoEPAL2JDwC8kQ8Afk8AgKNhDgCCTwCAhk8AgKZhDgCKTwCAjk8AgKVxDgCqXQ4Aq8EPAJJPAICWTwCArsUPAK/NDwCs1Q8Arc0PAKjRDgCp2Q4AqjkBAKs5AQCsKQEArSkBAK6dAQCvlQEAmk8AgJ5PAICiTwCApk8AgIANAACBtQAAgr0AAKpPAIC4lQEAuZ0BALqhAQC7oQEAvHEAAL1xAAC+cQAAv3EAALDtAQCx9QEAsvUBALPFAQC03QEAtbUBALaxAQC3sQEArk8AgLJPAICzuQEAvsACALWpAQC2TwCAuk8AgLahAQCGgAEAh8QBALs5AQC6IQEAvRkBALwpAQC/eQEAvhEBAKPxAQC+TwCA1k4AgMJPAIDGTwCApukBAKXhAQDKTwCAq3EBAKppAQDOTwCA0k8AgK8xAQCuWQEArVEBAKxhAQDWTwCA2k8AgN5PAIDiTwCA4agBAOZPAIDjQAIA6k8AgL8oFQDuTwCA73QCAPJPAID2TwCA+k8AgP5PAIACUACABlAAgON0DwCEiAMA4TQOAApQAIAOUACAElAAgBZQAICADQAAgRUAAIIRAAAaUACAHlAAgO+kDwAiUACAKlAAgKgZAwCpQQMAqkUDAKtdAwCsTQMArX0DAK51AwCvnQAAhaQVAL58AwCGCAQAhxwDAC5QAIAyUACANlAAgDpQAIC49QAAuf0AALr1AAC7jQAAvIEAAL2BAAC+gQAAv4EAALDlAACx7QAAsuUAALP5AAC07QAAtdEAALbVAAC3zQAAPlAAgEJQAIBGUACAs8ECAEpQAIC1yQIAtvECAE5QAIBSUACAVlAAgLotAQC7JQEAvD0BAL0hAQC+JQEAvxkBAKapAgCESAIAWlAAgKWRAgBeUACAo5kCAGJQAIBmUACArn0BAK9BAQCsZQEArXkBAKp1AQCrfQEAalAAgG5QAIByUACAdlAAgHpQAIB+UACA7+QAAIJQAICGUACAilAAgOMQDgCOUACA4VgOAJJQAICALQAAgREAAIIVAAC+sAUAs3UBAJpQAICHFAUAhmwEAJ5QAIC21QAAtWUBAKJQAIC7/QAAuvUAAKZQAICqUACAv6EAAL69AAC93QAAvN0AAKh9BgCptQYAqr0GAKu1BgCsrQYArRUHAK4dBwCvFQcAllAAgK5QAICyUACAtlAAgLpQAIC+UACAwlAAgMZQAIC4OQcAuTkHALrJBwC7yQcAvNkHAL3ZBwC+zQcAv8UHALBxBwCxeQcAskkHALNJBwC0OQcAtSUHALYhBwC3IQcAozUGAMpQAIDOUACA0lAAgNZQAICmlQcApSUGANpQAICrvQcAqrUHAN5QAIDiUACAr+EHAK79BwCtnQcArJ0HAOZQAIDqUACA7lAAgPJQAID2UACAgj0AAIE9AACAPQAA+lAAgP5QAIACUQCAhKADAL6kAwAGUQCAhvgAAIfgAACoxQYAqdUGAKrVBgCr5QYArP0GAK0xAQCuMQEArzEBAApRAIAOUQCAElEAgBZRAIAaUQCAHlEAgCJRAIAmUQCAuN0BALntAQC65QEAu40BALyVAQC9nQEAvpUBAL+NAQCwUQEAsVEBALJRAQCzUQEAtPUBALX9AQC29QEAt+0BALNdBgAqUQCALlEAgDJRAIA2UQCAtrEBALV1BgA6UQCAu5UBALqVAQA+UQCAQlEAgL85AQC+MQEAvYUBALyFAQClLQYARlEAgEpRAICm6QEATlEAgFJRAICjBQYAVlEAgK3dAQCs3QEAr2EBAK5pAQBaUQCAJlAAgKvNAQCqzQEAXlEAgGJRAICExAMAvwD0AGZRAICCPQAAgT0AAIA9AABqUQCAblEAgHJRAIC+YAMAelEAgH5RAICCUQCAhlEAgIbgHACHAAMA7wwHAIpRAICOUQCAklEAgJZRAICaUQCAnlEAgKJRAICmUQCAqlEAgOHABgCuUQCA4ywHALJRAIC2UQCAulEAgL5RAIDCUQCAxlEAgMpRAIDOUQCA0lEAgKiBAwCpgQMAqoEDAKuBAwCsgQMArYEDAK6BAwCvgQMAsEUDALFNAwCyRQMAs10DALRNAwC1fQMAtnUDALcZAwC4KQMAuTUDALo9AwC7MQMAvAEDAL31AAC+/QAAv+0AALMpAgDWUQCA2lEAgN5RAIDiUQCAtiECALUpAgCEUB0Au6kCALqhAgDqUQCA7lEAgL+ZAgC+qQIAvakCALyxAgCBTQAAgE0AAO+cAwCCXQAAhvAcAId4HQC+EB0A8lEAgPZRAID6UQCA/lEAgAJSAIDhkAEABlIAgONgAwAKUgCADlIAgBJSAIAWUgCAGlIAgB5SAIAiUgCAJlIAgO+UAQCE7BwA4XAGACpSAIDjUAEALlIAgDJSAIA2UgCAOlIAgKPpAgA+UgCAQlIAgEZSAIBKUgCApuECAKXpAgBOUgCAq2kCAKphAgBSUgCAvqgcAK9ZAgCuaQIArWkCAKxxAgCoMR4AqTEeAKoxHgCrMR4ArF0eAK1FHgCuTR4Ar0UeAOZRAICCzR8AgfUfAID9HwBWUgCAWlIAgIYcAACH+AMAuMUeALnNHgC6xR4Au90eALzFHgC9zR4AvsUeAL9ZHwCwPR4AsQUeALINHgCzBR4AtB0eALUBHgC2BR4At/0eALO5HgBeUgCAYlIAgGZSAIBqUgCAtsUeALXVHgBuUgCAu8EeALr5HgByUgCAdlIAgL/FHgC+2R4AvdEeALzZHgB6UgCAo/0eAH5SAICCUgCApoEeAIZSAICKUgCApZEeAKq9HgCrhR4AjlIAgJJSAICunR4Ar4EeAKydHgCtlR4AqCkeAKkpHgCqVR4Aq20eAKx1HgCtfR4ArnUeAK9pHgCWUgCAmlIAgJ5SAICiUgCAplIAgKpSAICuUgCAslIAgLjpHgC59R4Auv0eALv1HgC87R4AvZEeAL6RHgC/kR4AsB0eALHlHgCy7R4As+UeALT9HgC15R4Atu0eALflHgCz3R4AtlIAgLpSAIC+UgCAwlIAgLb9HgC1/R4AhFgBALshHgC62R4AvigAAMpSAIC/IR4AvjkeAL0xHgC8OR4AgU0AAIBNAACjlR4Agl0AAKW1HgDGUgCAzlIAgKa1HgB2UQCA0lIAgKtpHgCqkR4ArXkeAKxxHgCvaR4ArnEeAIYABACHRAMAs4ECANZSAIC1gQIA2lIAgN5SAIC2gQIAiAAAAOJSAIC74QIAuu0CAL3lAgC8+QIAv9ECAL7lAgDmUgCA6lIAgIREAwC+jAMA4UgCAO5SAIDjAAIA7/wfAPJSAIDhPB4A79wCAONgHwD2UgCA+lIAgP5SAIACUwCAqQUCAKixAgCrBQIAqgUCAK0NAgCsBQIArzUCAK41AgCEbAUABlMAgApTAIAOUwCAElMAgBZTAIAaUwCAHlMAgLnpAwC44QMAu/kDALrhAwC96QMAvOEDAL9dAwC+4QMAsSkCALAlAgCzPQIAsiECALUZAgC0LQIAt9kDALYRAgAiUwCAJlMAgCpTAICjhQMALlMAgKWFAwCmhQMAMlMAgDpTAIA+UwCAqukDAKvlAwCs/QMAreEDAK7hAwCv1QMAgEkAAIFVAACCVQAAo6kCAL6YBAClQQEApkEBAEJTAICG4AUAh+AFAKotAQCrOQEArBEBAK0FAQCuDQEArwUBAEZTAIBKUwCATlMAgO/cAABSUwCAVlMAgFpTAIDviB4AhCwHAOHsHgBeUwCA4xweAGJTAIDhlAEAZlMAgOMwAACzJQIAhWDmAGpTAIBuUwCAclMAgLbNAQC1zQEAdlMAgLu1AQC6oQEAelMAgH5TAIC/iQEAvoEBAL2JAQC8nQEANlMAgIJTAICGUwCAilMAgI5TAICSUwCAllMAgJpTAICoAQcAqQEHAKp1BwCrrQcArLUHAK29BwCuqQcAr6kHALDZBwCx7QcAsvkHALP1BwC0mQcAtZkHALaJBwC3gQcAuIkHALmJBwC6bQAAu2UAALx9AAC9ZQAAvm0AAL9lAACBCQAAgJkAAJ5TAICCHQAAolMAgKZTAICqUwCArlMAgKgNBQCpfQUAqk0FAKuhBgCspQYAra0GAK6dBgCv/QYAsIUGALGRBgCyqQYAs70GALSlBgC1rQYAtqUGALd5BgC4SQYAuUkGALpZBgC7WQYAvEkGAL1JBgC++QcAv/kHALNdBgCyUwCAhigCAIcsAQC2UwCAtp0GALWdBgC6UwCAu4kGALq9BgC+UwCAwlMAgL/9BgC+/QYAvYEGALyNBgDGUwCAoxkGAMpTAIDOUwCAptkGANJTAIDWUwCApdkGAKr5BgCrzQYA2lMAgN5TAICuuQYAr7kGAKzJBgCtxQYAqBkBAKkZAQCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiUwCA5lMAgOpTAIDuUwCA8lMAgPZTAID6UwCA/lMAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7dAwC/1QMAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAC+LAIAAlQAgAZUAIAKVACADlQAgBJUAIAaVACAHlQAgIAtAACBNQAAgj0AACJUAICGkAwAh+gCACZUAIAqVACAs0UDAC5UAIAyVACANlQAgDpUAIC2fQMAtUUDAD5UAIC7LQMAui0DAEJUAIBGVACAvx0DAL4dAwC9IQMAvCkDAKvNAwCqzQMASlQAgE5UAICv/QMArv0DAK3BAwCsyQMAo6UDAFJUAIBWVACAWlQAgF5UAICmnQMApaUDAGJUAIBmVACAalQAgG5UAIByVACAdlQAgII9AACBPQAAgD0AAHpUAIB+VACAglQAgIRgAwCG0AwAhzADAIpUAICOVACAvkQCAJJUAICWVACAmlQAgOEAAACeVACA46gGAKJUAICE7AwAplQAgO/QAwCqVACArlQAgLJUAIC2VACAulQAgLNtAQC+VACAwlQAgMZUAIDKVACAthEBALVlAQDOVACAuz0BALo1AQDSVACA1lQAgL/9AQC+/QEAvRUBALwVAQDaVACA4fwGAN5UAIDjPAcA4lQAgOZUAIDqVACA7lQAgPJUAIC+bAwA+lQAgP5UAIACVQCABlUAgApVAIDvFAYAgV0AAIBdAACj5QEAgm0AAKXtAQAOVQCAElUAgKaZAQCHqAwAhuQMAKu1AQCqvQEArZ0BAKydAQCvdQEArnUBAKgZDgCpGQ4AqiUOAKs1DgCsLQ4ArVEOAK5RDgCvUQ4AhlQAgPZUAIAWVQCAGlUAgB5VAIAiVQCAJlUAgCpVAIC47Q4AufUOALr1DgC7jQ4AvJUOAL2dDgC+lQ4Av40OALAxDgCxOQ4AsgEOALMBDgC0+Q4AtfkOALbdDgC31Q4AqHkOAKl5DgCqjQ8Aq4UPAKydDwCtgQ8AroUPAK+5DwAuVQCAMlUAgDZVAIA6VQCAPlUAgEJVAIBGVQCASlUAgLiRDwC5mQ8AuqEPALuhDwC8UQ8AvV0PAL5JDwC/SQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1sQ8AtrEPALexDwCzBQ4ATlUAgFJVAIBWVQCAWlUAgLYBDgC1FQ4AXlUAgLsRDgC6CQ4AYlUAgISgAQC/dQ4AvgkOAL0BDgC8CQ4AgmkAAKNBDgCAWQAAgVEAAKZFDgC+WAEAZlUAgKVRDgCqTQ4Aq1UOAIbIAACHrAEArk0OAK8xDgCsTQ4ArUUOAGpVAIBuVQCAclUAgHZVAIB6VQCAflUAgBZUAICCVQCAqAkOAKkJDgCqGQ4AqxkOAKwJDgCtYQ4ArmEOAK+VAQCw7QEAsfUBALL9AQCz9QEAtO0BALV1AQC2fQEAt3UBALhNAQC5VQEAul0BALtVAQC8TQEAvfEAAL7xAAC/8QAAhlUAgIpVAICOVQCAklUAgJZVAIDj6A4AmlUAgOE0DgC+AAQA79wPAJ5VAICiVQCAplUAgKpVAICuVQCAslUAgLPxDQC2VQCAulUAgL5VAIDCVQCAtoENALXhDQDGVQCAu1ECALpJAgDKVQCAzlUAgL/RAgC+SQIAvUECALxJAgCjMQ0A0lUAgISIAwDaVQCA3lUAgKZBDQClIQ0A4lUAgKuRAgCqiQIA5lUAgOpVAICvEQIArokCAK2BAgCsiQIAgKkAAIGpAACCTQAA7lUAgOFkEgDjTAIA4wgLAOGsAQDyVQCA7zwCAO8YFgD2VQCAhlAGAIdIAwD6VQCA/lUAgKiBAgCpgQIAqoECAKuBAgCsgQIArYECAK6FAgCvHQEAAlYAgAZWAIAKVgCADlYAgBJWAIAWVgCAGlYAgIS4BQC4dQEAuX0BALp1AQC7CQEAvBkBAL0ZAQC+CQEAvwEBALBlAQCxbQEAsmUBALN9AQC0aQEAtV0BALZVAQC3TQEAHlYAgCJWAIAmVgCAKlYAgC5WAIAyVgCA7zQAAO/ADgDhXA4A4UwPAOOUAADjnA4ANlYAgIJlAACBfQAAgH0AADpWAIA+VgCAvsQHALNFAgBCVgCAtUUCALZNAgBKVgCAhkAGAIeQBAC67QEAu+UBALz9AQC95QEAvuEBAL/VAQCflQgAngUIAJ3dDQCcPQwAmzEMAJr1DQCZ7RAAmD0QAJfVEQCWsRUAlQUUAJTlFQCTtRkAkjEYAJE5GACQDRwAj2EcANZVAICz1QYATlYAgLX9BgBGVgCAUlYAgLaRBgBWVgCAWlYAgLuVBgC6lQYAvVUHALxVBwC/VQcAvlUHAF5WAIBiVgCAqo0GAKuFBgCsnQYArYUGAK6BBgCvtQYAhKgAAGZWAIBqVgCAoyUFAG5WAIClJQUApi0FAHJWAIB2VgCAelYAgH5WAICCVgCAhlYAgIpWAICOVgCAklYAgJZWAICaVgCAnlYAgKJWAICjqQUAotEEAKHZBACgZQUAgiEdAIM1HQCmVgCAqlYAgIaVGACH3RQAhBkZAIUZGQCKDRUAi7EUAK5WAICyVgCAjsURAI/VDACMzRAAjR0RAJJhDQCTdQ0AvkwAALpWAICWxQkAl80EAJSNDACVXQkAmkEFAJtBBQCGyP8Ah0wAAIFZAACAeQAAnCEEAIJRAAChxQEAvlYAgKMB/ACi2QEApRX9AKS1/QCnufkApgH4AKkJ+AColfkAqwX1AKqt9QCtsfEArAHwAK8d8ACurfEAseHtALAB7ACzAegAsv3sALVd6QC09ekAwlYAgMZWAIDKVgCAzlYAgNJWAIDWVgCA2lYAgN5WAIDiVgCA5lYAgKiNBACplQQAqpUEAKulBACsvQQArdkEAK75BACv8QQAhGz8AOpWAIDuVgCA8lYAgPZWAID6VgCA/lYAgAJXAIC4eQUAucUFALrNBQC7xQUAvN0FAL3FBQC+zQUAv+0FALCZBACxmQQAskkFALNJBQC0WQUAtVkFALZJBQC3SQUAox0EAL7M/AAGVwCAClcAgA5XAICmWQQApTUEABJXAICrXQQAql0EABZXAIAaVwCAr50FAK6dBQCtnQUArJ0FAB5XAICznQIAIlcAgCpXAIC2UQIALlcAgDJXAIC1uQIAukkCALtVAgCGSP0Ah8D8AL41AgC/PQIAvEUCAL09AgCo3QQAqUkDAKpRAwCrbQMArHUDAK2VAwCunQMAr7kDAICNAQCB5QEAguEBADZXAIA6VwCAPlcAgEJXAIBGVwCAuJUDALmdAwC6lQMAu60DALy1AwC9vQMAvrUDAL9VAgCwyQMAsdUDALLVAwCzrQMAtLUDALW9AwC2tQMAt60DAEpXAIBOVwCAo9EDAFJXAICl9QMAVlcAgFpXAICmHQMAXlcAgGJXAICrGQMAqgUDAK1xAwCsCQMAr3EDAK55AwDhKAcAZlcAgOPkBgBqVwCA4SgGAG5XAIDjaAEAclcAgHZXAIB6VwCA71gAAH5XAICCVwCAhlcAgO/IBgCKVwCAqE39AKmB/QCq0f0Aq9H9AKzx/QCt8f0ArvH9AK/x/QAmVwCAghEAAIEZAACA0f8AjlcAgJJXAICEdAMAvnQDALh1/gC5ff4AunX+ALvF/gC83f4AvcX+AL7F/gC/9f4AsJH9ALGR/QCykf0As5H9ALRV/gC1Xf4AtlX+ALdN/gCzWf0AllcAgIasAACHRAMAmlcAgLZx/QC1ef0AnlcAgLtV/QC6Vf0AolcAgKZXAIC/mf4AvpH+AL1F/QC8Rf0AqlcAgKMd/QCuVwCAslcAgKY1/QC2VwCAulcAgKU9/QCqEf0AqxH9AL5XAIDCVwCArtX+AK/d/gCsAf0ArQH9AKjN/wCp0f8AqtH/AKsh/gCsIf4ArSH+AK4h/gCvIf4AxlcAgMpXAIDOVwCA0lcAgNZXAIDaVwCA3lcAgOJXAIC4jf4AuZH+ALqV/gC7rf4AvLX+AL25/gC+qf4Av6n+ALDh/gCx4f4AsuX+ALP5/gC06f4AtdX+ALbd/gC3uf4As1n/AOZXAIC2VgCA6lcAgO5XAIC2of4Atan+APJXAIC7Jf4AuiX+APZXAID6VwCAvxH+AL4t/gC9Lf4AvDH+AIIZAACjHf8AgGUAAIEZAACm5f4A/lcAgAJYAICl7f4AqmH+AKth/gCEZAEAviAAAK5p/gCvVf4ArHX+AK1p/gAKWACA4zT+AA5YAIDhfP0AhrAEAIcIAwASWACAFlgAgBpYAIAeWACAhCQDAIQkBAAiWACA70j+ACZYAIAqWACAs+kCAC5YAIC+RAQAvkAFADJYAIC2nQIAtZkCADZYAIC7iQIAur0CADpYAIA+WACAv1kDAL5RAwC9WQMAvJECAKkdAgCoFQIAqyUCAKolAgCtWQIArFUCAK9NAgCuUQIAvmQGAEJYAIBGWACASlgAgE5YAIBSWACAVlgAgFpYAIC5+QMAuPEDALtNAwC68QMAvUEDALxZAwC/cQMAvkEDALEJAgCwPQIAs8kDALIBAgC12QMAtNEDALfJAwC20QMA4ZABAF5YAIDj8AAAYlgAgGZYAICCPQAAgT0AAIA9AABqWACAblgAgHJYAIB6WACAflgAgIJYAIDvLAAAhlgAgKPpAwCKWACAhugEAIdgBQCOWACApp0DAKWZAwCSWACAq4kDAKq9AwCWWACAmlgAgK9ZAgCuUQIArVkCAKyRAwCeWACAolgAgKZYAICqWACArlgAgLJYAIC2WACA71gBAISgBADhVP8AulgAgOOEAQC+WACAwlgAgMZYAIDKWACAs9kBAM5YAICFzBkA0lgAgNZYAIC28QEAtfkBANpYAIC7pQEAutkBAN5YAIDiWACAv50BAL6dAQC9pQEAvK0BAKgBBgCpDQYAqhEGAKsRBgCsMQYArTEGAK4pBgCvJQYAdlgAgILJBwCBwQcAgPEHAOZYAIDqWACAhhwAAIf8AwC47QYAufUGALr9BgC79QYAvO0GAL1RBwC+VQcAv00HALBdBgCxIQYAsjkGALMxBgC0GQYAtRkGALbdBgC31QYAo5kGAO5YAIDyWACA9lgAgPpYAICmsQYApbkGAP5YAICr5QYAqpkGAAJZAIAGWQCAr90GAK7dBgCt5QYArO0GAApZAICz8QcADlkAgBJZAIC2gQcAFlkAgBpZAIC1mQcAuo0HALtlBwAeWQCAIlkAgL59BwC/ZQcAvH0HAL11BwCoLQYAqTUGAKo9BgCrMQYArFUGAK1FBgCuRQYAr3UGACZZAIAqWQCALlkAgDJZAIA2WQCAOlkAgD5ZAIBCWQCAuOkGALn1BgC6/QYAu/UGALztBgC9kQYAvpUGAL+NBgCwDQYAseUGALLtBgCz5QYAtP0GALXlBgC27QYAt+UGAKO1BgBGWQCASlkAgE5ZAIBSWQCApsUGAKXdBgAGWACAqyEGAKrJBgBWWQCAWlkAgK8hBgCuOQYArTEGAKw5BgCASQAAgUkAAIJZAACzRQEAXlkAgLVFAQC2RQEAYlkAgIZAAACHZAAAuikBALslAQC8PQEAvSEBAL4hAQC/FQEAZlkAgGpZAICEBAMAvgAMAOMoBgDv4AIA4RAGAG5ZAIDvkAYA4zwCAHJZAIDh1AEAdlkAgHpZAIB+WQCAglkAgIZZAICKWQCAo8ECAI5ZAIClwQIAklkAgJZZAICmwQIAmlkAgJ5ZAICroQIAqq0CAK2lAgCsuQIAr5ECAK6lAgCpBQIAqLECAKsFAgCqBQIArQ0CAKwFAgCvNQIArjUCAISoDACiWQCAplkAgKpZAICuWQCAslkAgLZZAIC6WQCAuekDALjhAwC7+QMAuuEDAL3pAwC84QMAv10DAL7hAwCxKQIAsCUCALM9AgCyIQIAtRkCALQtAgC32QMAthECAKitAgCp1QIAqtUCAKsNAQCsFQEArQkBAK4xAQCvLQEAvlkAgMJZAIDKWQCAzlkAgNJZAIDWWQCA2lkAgN5ZAIC4IQEAuSEBALrtAQC75QEAvP0BAL3lAQC+7QEAv+UBALBVAQCxXQEAslUBALMtAQC0NQEAtTkBALYtAQC3JQEAgD0BAIGlAACCrQAA79QHAOJZAIDmWQCA6lkAgO8oBwC+LAwA4fQGAO5ZAIDjkAcA8lkAgOGUAQD2WQCA4wwGALMdAgD6WQCAh0QNAIZMDQD+WQCAtskBALXdAQACWgCAu9kBALrRAQAGWgCACloAgL+9AQC+sQEAvbkBALzBAQDGWQCADloAgBJaAIAWWgCAGloAgB5aAIAiWgCAJloAgKgJDwCpCQ8AqhkPAKsZDwCsCQ8ArQkPAK6pDwCvqQ8AsNkPALHtDwCy+Q8As/UPALSVDwC1hQ8AtoUPALe1DwC4jQ8AuWEAALphAAC7YQAAvGEAAL1hAAC+YQAAv2EAAKNdDQCCLQAAgRUAAIAdAAAqWgCApokOAKWdDgAuWgCAq5kOAKqRDgAyWgCANloAgK/9DgCu8Q4ArfkOAKyBDgA6WgCAs/UPAIboAwCHvAMAtu0PAD5aAIBCWgCAteUPALp5DwC7TQ8ARloAgEpaAIC+NQ8AvyUPALxJDwC9RQ8AozEOAE5aAIBSWgCAVloAgFpaAICmKQ4ApSEOAF5aAICriQ4Aqr0OAGJaAIBmWgCAr+EOAK7xDgCtgQ4ArI0OAGpaAIBuWgCAcloAgHZaAIB6WgCAfloAgIJaAICGWgCAiloAgI5aAICSWgCAlloAgIANAACB1QAAgt0AAJpaAICoQQEAqVEBAKpRAQCrZQEArH0BAK2RAACukQAAr5EAAJ5aAICiWgCAhGQBAL5kAQCGkAEAh4QAAKpaAICuWgCAuJEAALmRAAC6kQAAu5EAALyxAAC9sQAAvrEAAL+xAACw8QAAsfkAALLBAACzwQAAtLEAALWxAAC2sQAAt7EAALPZAgCyWgCAvnADAL5EBAC2WgCAthEDALX1AgC6WgCAuz0DALo1AwC+WgCAwloAgL91AwC+dQMAvRUDALwVAwDGWgCAo50CAMpaAIDOWgCAplUDANJaAIDWWgCApbECAKpxAwCreQMA2loAgN5aAICuMQMArzEDAKxRAwCtUQMAqDkDAKk5AwCqjQAAq50AAKyNAACtvQAArrUAAK/dAADiWgCA5loAgOpaAIDuWgCA8loAgPZaAID6WgCA/loAgLhpAAC5aQAAunkAALt5AAC8aQAAvWkAAL7ZAQC/2QEAsKkAALGpAACyvQAAs7UAALSZAAC1mQAAtlkAALdZAAACWwCABlsAgApbAIAOWwCA70QAABJbAICGmAUAh+QCAOOYAACEqAIA4fgBABpbAICAOQAAgTkAAIItAAAeWwCAs0UBACJbAIAmWwCAKlsAgC5bAIC2fQEAtUUBADJbAIC7LQEAui0BADZbAIA6WwCAvx0BAL4dAQC9IQEAvCkBAD5bAIDhUA4AQlsAgOM8DwBGWwCASlsAgE5bAIBSWwCAVlsAgFpbAIDjAAAAXlsAgGJbAIBmWwCAhPQFAO/kDgCuqQEAr6kBAKydAQCtlQEAqpkBAKuZAQBqWwCAblsAgKbJAQByWwCAdlsAgKXxAQCC/QcAo/EBAID9BwCB9QcAFlsAgHpbAIB+WwCAglsAgIZbAICKWwCAhrgDAIeQAwCoDQcAqRkHAKptBwCrZQcArH0HAK1lBwCuZQcAr1UHALAtBwCxxQcAssEHALPdBwC0xQcAtc0HALbFBwC3/QcAuMUHALnJBwC62QcAu9kHALypBwC9qQcAvp0HAL+VBwCzxQcAjlsAgJJbAICWWwCAmlsAgLbFBwC11QcAnlsAgLshBwC6yQcAolsAgKZbAIC/KQcAviEHAL0pBwC8NQcAqlsAgKOBBwCuWwCAslsAgKaBBwC2WwCAulsAgKWRBwCqjQcAq2UHAL5bAIDCWwCArmUHAK9tBwCscQcArW0HAKgVAQCpgQEAqoEBAKuBAQCsgQEArYkBAK6xAQCvsQEAxlsAgMpbAIDOWwCA0lsAgNZbAIDaWwCA3lsAgOJbAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90AALChAQCxrQEAsqUBALO5AQC0qQEAtZ0BALaVAQC3XQAA5lsAgIIdAACBHQAAgB0AAOpbAIDuWwCA8lsAgL5YAQCErAIA9lsAgIcIAQCGjAEA+lsAgKZaAID+WwCAAlwAgLNJAQAGXACAClwAgA5cAIASXACAtkkBALVJAQAWXACAuykBALolAQAaXACAHlwAgL8ZAQC+LQEAvS0BALwxAQC+2AMAIlwAgO/4BgAmXACAKlwAgC5cAIDv4AIAMlwAgOGUAQA2XACA43QCADpcAIDhmAUAPlwAgOMMBwBCXACARlwAgEpcAICjwQIAhIwDAKXBAgBOXACAUlwAgKbBAgBWXACAWlwAgKuhAgCqrQIAraUCAKy5AgCvkQIArqUCAKgxAwCpPQMAqjUDAKtJAwCsWQMArVkDAK5JAwCvQQMAgMUAAIEJAACCGQAAXlwAgGJcAIBqXACAh2wDAIYcHAC47QAAufEAALr1AAC7jQAAvJUAAL2BAAC+gQAAv70AALAJAwCxCQMAsu0AALPhAAC04QAAteEAALblAAC32QAAblwAgHJcAIB2XACAs7ECAHpcAIC13QIAttUCAH5cAICCXACAhlwAgLrBAgC7wQIAvDUBAL05AQC+KQEAvykBAKaNAgCKXACAjlwAgKWFAgCSXACAo+kCAJZcAICaXACArnEBAK9xAQCsbQEArWEBAKqZAgCrmQIAnlwAgKJcAICmXACA4YQGAKpcAIDjJAYArlwAgOGUAQCyXACA4ywAAL7oHQC2XACAulwAgO/IAACE/B0AvvAcAL5cAIDvSAcAwlwAgMZcAIDKXACAzlwAgIEdAACAHQAA0lwAgIIFAACGQBwAh8QcANpcAIDeXACA4lwAgOZcAIDqXACA7lwAgKi1HgCpBR8Aqg0fAKsFHwCsAR8ArQkfAK45HwCvOR8A1lwAgPJcAID2XACA+lwAgP5cAIACXQCABl0AgApdAIC4yR8AudUfALrRHwC76R8AvPkfAL3tHwC+mR8Av5kfALAlHwCxLR8AsjkfALM1HwC0LR8AtQ0fALYFHwC3/R8As4UfAA5dAIASXQCAFl0AgBpdAIC2iR8AtYkfAB5dAIC76R8AuuEfACJdAIAmXQCAv8kfAL7pHwC94R8AvO0fACpdAICjwR8ALl0AgDJdAICmzR8ANl0AgDpdAIClzR8AqqUfAKutHwA+XQCAQl0AgK6tHwCvjR8ArKkfAK2lHwCo6R4AqekeAKr5HgCr+R4ArOkeAK3pHgCuPQEArzUBAID5AQCBzQEAgsUBAIRgAgBGXQCASl0AgIdoAQCGnAAAuNEBALnZAQC64QEAu+EBALyRAQC9nQEAvpUBAL+JAQCwTQEAsVUBALJdAQCzVQEAtE0BALXxAQC28QEAt/EBALNxHgBOXQCAUl0AgFZdAIBaXQCAtmkeALVhHgBeXQCAu5EBALqJAQBiXQCAZl0AgL81AQC+iQEAvYEBALyJAQBqXQCAZlwAgKM5HgBuXQCApSkeAHJdAIB2XQCApiEeAHpdAIB+XQCAq9kBAKrBAQCtyQEArMEBAK99AQCuwQEAgl0AgIZdAICKXQCAjl0AgJJdAICWXQCAml0AgJ5dAICiXQCApl0AgKpdAICuXQCAsl0AgLpdAIC+XQCAvnADAOHkHgCESAIA4+gfAIQABACAeQAAgXkAAIJpAADCXQCAhsAEAIdEAwDGXQCAyl0AgM5dAIDSXQCA7yAfANZdAIDaXQCA3l0AgOJdAIDvSAIA5l0AgOpdAIDuXQCA8l0AgL7oBAD2XQCA+l0AgP5dAIACXgCA4ZABAAZeAIDj6AIAs0kDAApeAIAOXgCAEl4AgBZeAIC2SQMAtUkDABpeAIC7LQMAuiUDAB5eAIAiXgCAvxUDAL4VAwC9IQMAvCkDAKg1AgCpgQIAqoECAKuBAgCsgQIArYkCAK6xAgCvsQIAgP0BAIHNAQCCxQEAKl4AgIaQBACHBAUALl4AgIRwBAC4SQEAuUkBALpZAQC7WQEAvEkBAL1JAQC+eQEAv3kBALChAgCxqQIAsr0CALO1AgC0kQIAtZECALZ5AQC3eQEAMl4AgDZeAIA6XgCAPl4AgEJeAIBGXgCASl4AgO/QHgC+6AQA4VweAE5eAIDjkAAAUl4AgFZeAIBaXgCAXl4AgKNJAgBiXgCAZl4AgGpeAIBuXgCApkkCAKVJAgByXgCAqy0CAKolAgB2XgCAel4AgK8VAgCuFQIArSECAKwpAgCoNQYAqT0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2EGACZeAIB+XgCAgl4AgIZeAICADQAAgbEAAIKxAACKXgCAuOkGALnpBgC6+QYAu/UGALyVBgC9nQYAvpUGAL+NBgCw4QYAseEGALLhBgCz/QYAtOUGALXtBgC25QYAt9kGALPdBgCOXgCAkl4AgJZeAICaXgCAtuUGALX1BgCeXgCAuyUGALolBgCGmAAAh6wAAL8pBgC+IQYAvSkGALw1BgCiXgCAo5kGAKZeAICqXgCApqEGAK5eAICyXgCApbEGAKphBgCrYQYAtl4AgLpeAICuZQYAr20GAKxxBgCtbQYAqC0GAKk9BgCqiQYAq4kGAKyZBgCtmQYArokGAK+JBgC+XgCAwl4AgMZeAIDKXgCAzl4AgNJeAIDWXgCA2l4AgLiNBgC5lQYAupUGALulBgC8vQYAvXEBAL5xAQC/cQEAsPkGALHNBgCy2QYAs9kGALTJBgC1yQYAtr0GALe1BgCzAQYA3l4AgOJeAIDmXgCA6l4AgLYZBgC1EQYA7l4AgLsJBgC6PQYA8l4AgPZeAIC/DQYAvg0GAL0NBgC8DQYA+l4AgKNFBgC2XQCA/l4AgKZdBgACXwCAhFgAAKVVBgCqeQYAq00GAL5oAQAGXwCArkkGAK9JBgCsSQYArUkGAIDBAwCByQMAgt0DAKPNAgAKXwCApdkCAKbNAgAOXwCAhoANAIeUAwCqxQIAqw0DAKwVAwCtHQMArhUDAK8NAwDhnBcA4xgGAOMUAwDhNAYA7xgCABJfAIAWXwCAGl8AgOPQAgAeXwCA4VACACJfAIAmXwCA7ywGAO/kJQAqXwCArE0CAK1RAgCuUQIAr2UCAKgBAgCpCQIAqlkCAKtVAgCE7A0ALl8AgDJfAIA2XwCAvvgNADpfAIA+XwCAQl8AgLxRAwC9WQMAvmEDAL9hAwC47QMAuVEDALpRAwC7UQMAtM0DALXVAwC23QMAt9UDALAdAgCx1QMAst0DALPVAwDjyAAARl8AgOG4AQBKXwCAhFQPAE5fAIBSXwCAVl8AgKHpAgCgFQYAo6UDAKINAwDvIAAAWl8AgF5fAIBiXwCAZl8AgGpfAICFNCYAs40DAG5fAIC1mQMAto0DAHJfAICGwA8Ah5QNALqFAwC7TQIAvFUCAL1dAgC+VQIAv00CAHpfAIB+XwCAgl8AgIZfAICKXwCAjl8AgI/d6wDvxAYAvuAPAOGMBgCSXwCA44AGAID1AACB5QAAguUAAJZfAICZbR8AmMUfAJvJGwCaeRoAnXUaAJzFGwCf+QcAnhkGAJFpFgCQsesAk20XAJLNFwCV0RMAlGkSAJdREgCWzRMAg1XkAIJB5AB2XwCAml8AgIeNHQCGkRgAhTkYAISVGQCLERwAigUcAJ5fAICiXwCAj4UVAI6ZEACNORAAjJUdAJNRFACSRRQApl8AgKpfAICXYQkAlnUIAJWdCQCU+RUAm0EMAJqtDQCuXwCAsl8AgLZfAIC6XwCAvl8AgJzxDAChbQ0Awl8AgKMBBACihQAApZkEAKSRBACnGTgApsUFAKkJOACoKTgAq4k8AKoBPACtATAArB08AK8pMACunTAAseE0ALABNACzASgAsv00ALXZKAC00SgAxl8AgMpfAIDOXwCA0l8AgNZfAIDaXwCAgB0AAIEJAACC2QEA3l8AgKgRDwCpGQ8Aql0PAKtVDwCsTQ8ArXEPAK51DwCvbQ8A4l8AgOpfAICGiAAAhxABAO5fAIDyXwCA9l8AgPpfAIC4TQ4AuVEOALpRDgC7UQ4AvGUOAL1tDgC+ZQ4Avx0OALAdDwCxwQ8AssEPALPBDwC0xQ8Atc0PALbFDwC3eQ4As9UPAP5fAIACYACABmAAgApgAIC28Q8AtcUPAA5gAIC7BQ8AutkPABJgAIAWYACAvwkPAL4BDwC9FQ8AvBUPABpgAICjkQ8AHmAAgCJgAICmtQ8AJmAAgCpgAIClgQ8Aqp0PAKtBDwAuYACAMmAAgK5FDwCvTQ8ArFEPAK1RDwCogQ0AqYENAKqBDQCrgQ0ArIENAK2BDQCusQ0Ar6ENADZgAIA6YACAPmAAgEJgAIBGYACAgrkAAIG9AACAvQAAuDUCALk9AgC6zQIAu5UCALyNAgC9tQIAvr0CAL+1AgCwbQIAsU0CALJFAgCzJQIAtD0CALUdAgC2FQIAtw0CAEpgAIBOYACAswENAFJgAIC1AQ0AWmAAgISUAwC2CQ0AviwEAF5gAIC7gQIAuqECAL35AgC8mQIAv9ECAL7xAgBiYACAZmAAgGpgAICjRQ0AbmAAgKVFDQCmTQ0AcmAAgIbgBACHpAQAquUCAKvFAgCs3QIArb0CAK61AgCvlQIAqCUCAKk1AgCqPQIAqzUCAKwtAgCtkQIArpECAK+RAgB2YACAemAAgH5gAICCYACAzAAAAIZgAICKYACAjmAAgLiZAgC5rQIAuqUCALttAQC8dQEAvX0BAL51AQC/bQEAsPECALH5AgCywQIAs8ECALSxAgC1vQIAtrUCALepAgCSYACA44QOAJZgAIDh9A4AmmAAgJ5gAICiYACApmAAgIQgBQCqYACArmAAgLJgAIC2YACA7+wOALpgAIC+YACAs/UCAMJgAICG6AQAh4wEAL5cBAC2UQIAteUCAMpgAIC7fQIAunUCAM5gAIDSYACAvzkCAL41AgC9VQIAvFUCAKM1BQBWYACAxmAAgNZgAIDaYACAppEFAKUlBQDeYACAq70FAKq1BQDiYACA5mAAgK/5BQCu9QUArZUFAKyVBQCA+QcAgfkHAIKNBwCzjQYA6mAAgLWdBgC2iQYA7mAAgPJgAID2YACAuk0HALtFBwC8XQcAvUEHAL5BBwC/QQcA+mAAgP5gAIDmXwCAAmEAgAZhAIAKYQCADmEAgBJhAICoNQYAqQEGAKppBgCraQYArHkGAK1lBgCuZQYAr50HALDlBwCx7QcAsuUHALP5BwC06QcAtekHALZZBwC3VQcAuHEHALlxBwC6cQcAu3EHALxVBwC9XQcAvlUHAL9NBwCjwQcAFmEAgBphAIAeYQCAImEAgKbFBwCl0QcAJmEAgKsJBgCqAQYAKmEAgC5hAICvDQYArg0GAK0NBgCsEQYAgGkAAIFpAACCBQAAMmEAgL6YAQCEmAEANmEAgDphAICGADwAh8QBAD5hAIBCYQCARmEAgEphAIBOYQCAUmEAgKhdBgCpbQYAqmUGAKuBAQCsgQEArYkBAK6xAQCvsQEAVmEAgFphAIBeYQCAYmEAgGZhAIBqYQCAbmEAgHJhAIC4VQEAuV0BALpVAQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCxAQCxuQEAsokBALOJAQC0cQEAtXEBALZ1AQC3bQEAs+0FAHZhAIB6YQCAfmEAgIJhAIC2CQIAtQkCAIZhAIC7fQIAunUCAIphAICOYQCAv7UCAL61AgC9XQIAvF0CAL5gAgCjqQUAkmEAgJZhAICmTQIAmmEAgJ5hAIClTQIAqjECAKs5AgCiYQCAhOADAK7xAgCv8QIArBkCAK0ZAgC+iDwAqmEAgKotAwCrJQMArD0DAK0lAwCuLQMAryUDAID1AACB/QAAgsEAAKPBAwCuYQCApcEDAKbBAwCyYQCAhmA8AIdUAwC2YQCAumEAgL5hAIDjqAIAwmEAgOGkAQDGYQCA71wCAMphAIDOYQCA0mEAgNZhAIDaYQCA3mEAgOJhAIDjjAcA5mEAgOE8BADqYQCA7mEAgPJhAID2YQCAhCACAPphAID+YQCAAmIAgAZiAIDvbAcACmIAgA5iAICzLQIAhEQ9ABJiAIAaYgCAHmIAgLYtAgC1LQIAImIAgLvJAgC6wQIAJmIAgCpiAIC/yQIAvsECAL3JAgC80QIA4XgHAOPAAADjOAYA4VwGAICpAACBqQAAgtEAAC5iAIAyYgCANmIAgL6kPAA6YgCAPmIAgO8cAADvkAYAQmIAgIZgPACHBD0ARmIAgLNxAQBKYgCAtRkBALYJAQBOYgCAUmIAgFZiAIC6AQEAuwEBALwBAQC9AQEAvgEBAL8BAQCohT4AqbU+AKq1PgCrxT4ArN0+AK3FPgCuwT4Ar/0+AFpiAIBeYgCAYmIAgGZiAIBqYgCAbmIAgHJiAIB2YgCAuFE/ALlRPwC6UT8Au1E/ALx1PwC9fT8AvnU/AL9tPwCwiT4AsYk+ALKZPgCzmT4AtIk+ALWJPgC2eT8At3U/AKZhAICjOT4AemIAgBZiAICmQT4AfmIAgIJiAIClUT4Aqkk+AKtJPgCGYgCAimIAgK5JPgCvST4ArEk+AK1JPgCASQAAgVEAAIJRAACzkT8AjmIAgLW5PwC2RT8AkmIAgIZAAACHBAMAukU/ALtdPwC8TT8AvT0/AL4pPwC/IT8AqE0+AKlVPgCqVT4Aq2U+AKx9PgCtiT4Arrk+AK+5PgCWYgCAmmIAgJ5iAICiYgCApmIAgKpiAICuYgCAsmIAgLhhAQC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsM0+ALHVPgCy1T4As6U+ALShPgC1qT4Atpk+ALeZPgCj3T4AtmIAgLpiAIC+YgCAwmIAgKYJPgCl9T4AxmIAgKsRPgCqCT4AymIAgM5iAICvbT4ArmU+AK1xPgCsAT4A0mIAgNZiAIDaYgCA3mIAgOJiAIDmYgCA6mIAgO5iAICAOQAAgTkAAIIFAADyYgCAvrgBAIS4AQD6YgCA/mIAgKitAgCp1QIAqtUCAKstAwCsNQMArT0DAK41AwCvLQMAAmMAgAZjAIAKYwCADmMAgBJjAIAWYwCAGmMAgB5jAIC46QMAuekDALqJAwC7iQMAvJkDAL2ZAwC+iQMAv4kDALBVAwCxXQMAslUDALPpAwC0+QMAtfkDALbpAwC34QMAs10CACJjAICGKAQAh8wDACZjAIC2vQMAtb0DACpjAIC7mQMAupEDAC5jAIAyYwCAvz0DAL49AwC9PQMAvIEDAIUAFACjGQIANmMAgDpjAICm+QMAPmMAgEJjAICl+QMAqtUDAKvdAwBGYwCASmMAgK55AwCveQMArMUDAK15AwDjVD4A4dw/AOHQPgDjPD4ATmMAgO8cAABSYwCAVmMAgFpjAIDjwAAAXmMAgOHUAQDvYD4AYmMAgGpjAIDvRD8AgGEAAIFtAACCfQAAhAAFAIbwBACHnAUAvhAFAG5jAIByYwCAdmMAgHpjAIB+YwCAgmMAgIZjAICKYwCAjmMAgLiJPQC5iT0Aupk9ALuRPQC8uT0Avbk9AL7RPQC/0T0AsAU+ALENPgCyBT4Asx0+ALQFPgC1DT4AtgU+ALe5PQConT4Aqa0+AKqlPgCrvT4ArKU+AK2tPgCupT4Ar30+AISsBAC+rAQAkmMAgJZjAICaYwCAnmMAgKJjAICmYwCAqPkFAKn5BQCqKQYAqykGAKw5BgCtOQYArikGAK8pBgBmYwCAqmMAgK5jAICyYwCAtmMAgLpjAIC+YwCAwmMAgLiNBgC5kQYAupEGALulBgC8vQYAvUUHAL5BBwC/QQcAsFkGALFZBgCy7QYAs/0GALTtBgC13QYAttUGALe1BgCzoQYAxmMAgMpjAIDOYwCA0mMAgLa5BgC1sQYA2mMAgLudBgC6nQYA1mMAgPZiAIC/GQYAvikGAL0pBgC8OQYAglEAAKPlBgCAQQAAgUEAAKb9BgDeYwCA4mMAgKX1BgCq2QYAq9kGAIZIAACHbAAArm0GAK9dBgCsfQYArW0GAKg5BgCpWQYAqmkGAKtpBgCseQYArXkGAK5pBgCvaQYA5mMAgOpjAIDuYwCA8mMAgPZjAID6YwCA/mMAgAJkAIC4ZQEAuW0BALplAQC7fQEAvGUBAL1tAQC+ZQEAv9kBALAZBgCxGQYAsoEGALOBBgC0gQYAtYEGALaBBgC3gQYAs+EGAAZkAIAKZACADmQAgBJkAIC2+QYAtfEGABZkAIC73QYAut0GABpkAIAeZACAv0UGAL5FBgC9VQYAvFUGACJkAICjpQYAJmQAgCpkAICmvQYALmQAgDJkAICltQYAqpkGAKuZBgA2ZACAOmQAgK4BBgCvAQYArBEGAK0RBgConQIAqdECAKrRAgCrLQMArDUDAK09AwCuNQMAry0DAD5kAIBCZACAvmQCAEpkAIBOZACAUmQAgFZkAIBaZACAuOkDALnpAwC6iQMAu4UDALydAwC9gQMAvoEDAL+1AwCwVQMAsV0DALJVAwCz6QMAtPkDALX5AwC26QMAt+EDAIBtAwCBpQAAgq0AALNVAgBeZACAtbEDALaxAwBiZACAhOACAGZkAIC6nQMAu5UDALyNAwC9MQMAvjEDAL8xAwCjGQIAamQAgIVwaQBuZACAcmQAgKb9AwCl/QMAdmQAgKvZAwCq0QMAhkgMAIe8AwCvfQMArn0DAK19AwCswQMAemQAgH5kAICCZACAhmQAgO+wBgDvxAMAimQAgI5kAIDjfAYA45QDAOG4BwDh3AEAkmQAgJZkAICaZACAnmQAgKJkAICmZACAhEQCAL5YDQCADQAAgTUAAII9AACqZACArmQAgLJkAICGyAwAh1wNALpkAIC+ZACAwmQAgMZkAIDKZACAzmQAgNJkAIDWZACA2mQAgN5kAIDiZACA74AGAISsDQDh7AYA5mQAgONcBgDqZACA7mQAgPJkAID2ZACAs/UBAPpkAID+ZACAAmUAgAZlAIC2RQEAteUBAAplAIC7LQEAuiEBAA5lAIASZQCAv/UAAL71AAC9JQEAvC0BAKgtDgCpNQ4Aqj0OAKs1DgCsLQ4ArYUOAK6FDgCvuQ4AtmQAgBZlAIAaZQCAHmUAgIAZAACBGQAAggUAACJlAIC4WQ8AuVkPALp5DwC7eQ8AvGkPAL1pDwC+GQ8AvxkPALClDgCxqQ4AsrkOALOxDgC0cQ8AtXEPALZxDwC3cQ8Apb0OAL6IAwAqZQCAph0OACZlAIAuZQCAo60OADJlAICtfQ4ArHUOAK+tDwCurQ8ARmQAgDZlAICrdQ4AqnkOALO5DwA6ZQCAhmgAAIcMAwA+ZQCAtlEPALVZDwBCZQCAu3UPALp1DwBGZQCASmUAgL9FDwC+RQ8AvVEPALxlDwCocQ4AqXEOAKpxDgCrcQ4ArJEOAK2RDgCukQ4Ar5EOAE5lAIBSZQCAVmUAgFplAIBeZQCAYmUAgGZlAIBqZQCAuIUOALmNDgC6hQ4Au50OALyNDgC9vQ4AvrUOAL95AQCw8Q4AsfEOALLxDgCzxQ4AtMEOALXBDgC2wQ4At8EOAKP5DgBuZQCAcmUAgHZlAIB6ZQCAphEOAKUZDgB+ZQCAqzUOAKo1DgCCZQCAhmUAgK8FDgCuBQ4ArREOAKwlDgCADQAAgRUAAIIdAACKZQCAjmUAgJJlAICElAEAvpQBAIZABwCH5AAAmmUAgJ5lAICiZQCApmUAgKplAICuZQCAqIkCAKmRAgCqlQIAq7kCAKzVAgCtxQIArsUCAK/1AgCyZQCAtmUAgLplAIC+ZQCAvnwDAMJlAIDGZQCAymUAgLh9AwC5wQMAusEDALvBAwC8wQMAvckDAL7xAwC/8QMAsI0CALFFAwCyTQMAs0UDALRdAwC1RQMAtk0DALdFAwCzHQIAzmUAgNJlAIDWZQCA2mUAgLZFAgC1XQIA3mUAgLuBAwC6SQIA4mUAgOZlAIC/gQMAvpkDAL2RAwC8mQMA6mUAgKNZAgDuZQCA8mUAgKYBAgD2ZQCA+mUAgKUZAgCqDQIAq8UDAP5lAIACZgCArt0DAK/FAwCs3QMArdUDAIDZAQCB7QEAguUBAO+4DgAKZgCA4cQBAISYAgDj1AAADmYAgL7sBAASZgCA7wgAABZmAIDhxA8AGmYAgONkDgCGAAUAh2gFAB5mAICzvQIAImYAgLWtAgC2pQIAJmYAgCpmAIAuZgCAukEBALtBAQC8RQEAvU0BAL5FAQC/+QEAMmYAgDZmAIA6ZgCAPmYAgEJmAIBGZgCASmYAgO/gAQCEbAQA4dQOAE5mAIDjHA4AUmYAgFZmAIBaZgCAXmYAgKMxAgBiZgCAhCQHAGZmAIBqZgCApikCAKUhAgBuZgCAq80BAKrNAQByZgCAemYAgK91AQCuyQEArcEBAKzJAQCo6QUAqekFAKr5BQCr+QUArOkFAK3pBQCuOQYArzkGAAZmAICCzQcAgfUHAID9BwB2ZgCAfmYAgIYYAwCHkAMAuNEGALnZBgC64QYAu+EGALyRBgC9nQYAvpUGAL+JBgCwSQYAsUkGALJdBgCzVQYAtE0GALXxBgC28QYAt/EGALDhBwCx4QcAsgkHALMJBwC0GQcAtRkHALYJBwC3CQcAuDkHALkNBwC6GQcAuxkHALwJBwC9CQcAvn0HAL9xBwCCZgCAlmUAgIZmAICKZgCAjmYAgJJmAICWZgCAmmYAgKjxBwCpxQcAqsEHAKvdBwCsyQcArb0HAK6pBwCvoQcAsykGAJ5mAICiZgCApmYAgKpmAIC2XQYAtSEGAK5mAIC7RQYAukUGALJmAIC2ZgCAv70GAL69BgC9vQYAvL0GALpmAICjbQYAvmYAgMJmAICmGQYAxmYAgMpmAIClZQYAqgEGAKsBBgDOZgCA0mYAgK75BgCv+QYArPkGAK35BgCobQYAqbEBAKpJAQCrRQEArF0BAK1FAQCuTQEAr0UBANZmAICCHQAAgR0AAIAdAADaZgCA3mYAgOJmAIC+VAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwPQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAALsFAwC62QIAhiwCAIcsAwC/DQMAvgUDAL0VAwC8FQMAs+ECAOpmAIDuZgCAhCwDAPJmAIC25QIAtfUCAPZmAICqnQIAq0EDAPpmAID+ZgCArkEDAK9JAwCsUQMArVEDAAJnAICjpQIABmcAgApnAICmoQIADmcAgBJnAIClsQIAqakAAKihAACrtQAAqr0AAK3dAACs3QAAr/EAAK79AAC+LBwAFmcAgBpnAIAeZwCAImcAgCZnAIAqZwCALmcAgLl9AAC4fQAAu80BALrNAQC93QEAvN0BAL/NAQC+zQEAsZUAALCJAACzTQAAspUAALVdAAC0XQAAt00AALZNAAAyZwCANmcAgDpnAIA+ZwCAQmcAgEZnAIBKZwCATmcAgIA5AACBOQAAggUAAFJnAIBaZwCAXmcAgIf4AgCGfB0A4bgEAL7IHADjQAYAYmcAgGZnAIBqZwCAbmcAgHJnAIB2ZwCAemcAgH5nAICCZwCAhmcAgIpnAIDvsAcAjmcAgJJnAICWZwCAmmcAgO/IAACeZwCAomcAgKZnAIDvQAYAqmcAgOH8BgCuZwCA4xwGALJnAIDhlAEAtmcAgONkBgCAEQAAgRkAAIIpAACz/QEAumcAgLWdAQC2lQEAvmcAgMJnAICEbB0AuoUBALuZAQC8iQEAvVEBAL5RAQC/UQEAozEeAFZnAIDGZwCAymcAgM5nAICmWR4ApVEeANJnAICrVR4AqkkeAIYIAwCHbAMAr50eAK6dHgCtnR4ArEUeANZnAICzCR8A2mcAgN5nAIC2CR8A4mcAgOZnAIC1CR8AugUfALsNHwDqZwCA7mcAgL4FHwC/CR8AvBUfAL0NHwCw5R8Ase0fALLlHwCz/R8AtOUfALXpHwC2GR8AtxkfALgpHwC5NR8Auj0fALs1HwC8ER8AvR0fAL4JHwC/BR8A8mcAgPZnAIDmZgCA+mcAgP5nAIACaACABmgAgApoAICo0R8AqdEfAKqlHwCrvR8ArKUfAK2tHwCupR8Ar50fAKNNHgAOaACAEmgAgBZoAIAaaACApk0eAKVNHgAeaACAq0keAKpBHgAiaACAJmgAgK9NHgCuQR4ArUkeAKxRHgCADQAAgRUAAIIdAAAqaACALmgAgDJoAICEtAEAvrQBAL/oAQA6aACAhkgHAIc0AACEvAYAPmgAgEJoAIC+tAYAqI0BAKmVAQCqlQEAq80BAKzZAQCt2QEArs0BAK/FAQBGaACASmgAgE5oAIBSaACAVmgAgFpoAIBeaACAYmgAgLgdAQC5wQAAusEAALvBAAC8wQAAvckAAL7xAAC/8QAAsIkBALGJAQCyKQEAsykBALQ9AQC1JQEAti0BALclAQC7bQIAum0CAGZoAIBqaACAv8ECAL7ZAgC93QIAvN0CALM9AgBuaACAcmgAgHZoAICE/AYAtnkCALVxAgB6aACAqikCAKspAgB+aACAgmgAgK6dAgCvhQIArJkCAK2ZAgCGaACAo3kCAIpoAICOaACApj0CAJJoAICWaACApTUCAIJtJwCDjSoAhqgFAIdsAwCGmS4Ah80vAIQRLgCFmS4AiiESAIspEgCaaACAnmgAgI6RFgCPHRYAjBESAI0RFgCScRoAk+UaAKJoAIDvlHYAlvEeAJflHgCUSRoAlRkeAJopAgCb4QIAqmgAgK5oAICyaACA4SASAJzxAgDjIBYAnyEfAJ7BHwCdmRsAnC0bAJuhGwCavRcAmTkXAJixFwCXiRMAlqkTAJWpEwCUdS4AkzkvAJIxLwCRsS8AkDUrAI+tJgDjeB8A0gAAAOFcHwCCmQEAtmgAgIDxAQCB8QEAvqgHALpoAIC+aACAwmgAgIS8BgDvLB8AxmgAgMpoAIDhpB4A48wAAON8HgDhvAEAzmgAgNJoAIDWaACAhJwGANpoAIC+bAYA3mgAgOJoAIDmaACA7xAAAO8EHgDqaACA7mgAgPJoAID2aACA+mgAgP5oAIACaQCABmkAgAppAICAPQAAgQkAAILJBwAOaQCAo/kDAKLxAwChMQMAoM0fALBJcQCxAXwAsgl8ALMhfQC0AXgAtRV4ADZoAICmaACAEmkAgL4oDgCGDAAAh4wDABZpAIAaaQCAHmkAgCJpAIAmaQCAoV0AAKJVAACjfQAApAEMAKUVDACm9QwApwEIAKghCACpxQgAqgF0AKsJdACsAXQArR11AK55cACveXAAqOUFAKnxBQCq8QUAqy0FAKw1BQCtPQUArjUFAK8tBQAqaQCALmkAgDJpAIA2aQCAOmkAgD5pAIBCaQCARmkAgLj9BgC5jQYAuoUGALutBgC8uQYAvbkGAL6tBgC/pQYAsFUFALFdBQCyVQUAs+UGALT9BgC10QYAttEGALfRBgCzeQQASmkAgE5pAIBSaQCAVmkAgLa9BAC1vQQAWmkAgLuZBAC6kQQAXmkAgGJpAIC/FQcAvjkHAL0xBwC8gQQAZmkAgKM9BABqaQCAbmkAgKb5BAByaQCAdmkAgKX5BACq1QQAq90EAHppAIB+aQCArn0HAK9RBwCsxQQArXUHAKhpBwCpaQcAqnkHAKvZBgCs9QYArf0GAK71BgCv5QYAgMkAAIHJAACCBQAAgmkAgIZwDwCHNAAAimkAgI5pAIC4fQYAuQUGALoNBgC7BQYAvB0GAL0FBgC+DQYAvwUGALCdBgCxdQYAsn0GALN1BgC0UQYAtV0GALZVBgC3TQYAs/EEAJJpAICWaQCAmmkAgJ5pAIC2fQUAtX0FAKJpAIC7sQUAulkFAKZpAICqaQCAv5kFAL6VBQC9oQUAvKkFAK5pAICjtQQAsmkAgLZpAICmOQUAumkAgL5pAIClOQUAqh0FAKv1BQDCaQCAxmkAgK7RBQCv3QUArO0FAK3lBQCpuQIAqLECAKvJAgCqsQIArTUCAKw1AgCvNQIArjUCAMppAIDOaQCA0mkAgNZpAIDaaQCA3mkAgOJpAIDmaQCAuekDALjZAwC7iQMAuuEDAL2dAwC8nQMAv4EDAL6JAwCxVQIAsFUCALNVAgCyVQIAtfkDALTxAwC36QMAtvEDALM9AwDqaQCA7mkAgPJpAID6aQCAtrEDALW5AwD+aQCAu5UDALqVAwCGiAwAh6ANAL85AgC+MQIAvYUDALyFAwACagCAo3kDAAZqAIAKagCApvUDAA5qAIASagCApf0DAKrRAwCr0QMAFmoAgBpqAICudQIAr30CAKzBAwCtwQMAgIUAAIGNAACChQAA79AGAOOwBwDj9AQA4QgHAOHsBADvOAYA7yAEAL6kDAAeagCAImoAgOGEAQAmagCA49wGACpqAIAuagCAhMANALPJAQAyagCAtdkBALbJAQA2agCAOmoAgD5qAIC6xQEAu60BALy5AQC9uQEAvq0BAL+lAQCwLQ4AsUUOALJBDgCzQQ4AtEUOALVNDgC2cQ4At3EOALiBDgC5gQ4AuoEOALuBDgC8gQ4AvYEOAL6BDgC/gQ4A9mkAgEJqAIBGagCASmoAgIZpAIBOagCAUmoAgFZqAICo2Q0AqdkNAKptDgCrZQ4ArH0OAK1lDgCuZQ4Ar1UOAKOFDgCCLQAAgRUAAIAdAABaagCApoUOAKWVDgBeagCAq+EOAKqJDgBiagCAZmoAgK/pDgCu4Q4ArfUOAKz1DgBqagCAs4UPAIZoAACHHAMAtoUPAG5qAIByagCAtZEPALqNDwC7SQ8AdmoAgHpqAIC+MQ8AvzEPALxJDwC9RQ8AqBEOAKkZDgCqSQ4Aq0UOAKxdDgCtQQ4ArkEOAK91DgB+agCAgmoAgIZqAICKagCAjmoAgJJqAICWagCAmmoAgLihDgC5oQ4Aug0BALsFAQC8HQEAvQEBAL4BAQC/AQEAsA0OALHJDgCy2Q4As9UOALSxDgC1sQ4AtqkOALehDgCjwQ4AnmoAgKJqAICmagCAqmoAgKbBDgCl1Q4ArmoAgKsNDgCqyQ4AsmoAgLZqAICvdQ4ArnUOAK0BDgCsDQ4AumoAgL5qAIDCagCAxmoAgIANAACBNQAAgj0AAMpqAIDOagCA0moAgISEAQC+hAEAhjAHAIf4AADaagCA3moAgKjBAgCp0QIAqtECAKvlAgCs/QIArTUDAK49AwCvNQMA4moAgOZqAIDqagCA7moAgPJqAID2agCA+moAgP5qAIC40QMAudkDALrhAwC74QMAvJEDAL2RAwC+kQMAv5EDALBNAwCxVQMAsl0DALNVAwC0TQMAtfEDALbxAwC38QMAu7EDALqpAwACawCAvoQDAL8VAwC+qQMAvaEDALypAwCzeQIABmsAgAprAIAOawCAEmsAgLaVAwC1VQIAFmsAgKrtAwCr9QMAGmsAgB5rAICu7QMAr1EDAKztAwCt5QMAImsAgKM9AgAmawCAKmsAgKbRAwAuawCAMmsAgKURAgA2awCAgiEAAIEVAACAFQAA7wQAAISUAgA6awCAPmsAgOPYAABCawCA4fgBAEprAIBOawCAUmsAgFZrAIBaawCAhmAFAIcIBQBeawCAs20BAGJrAIC1fQEAtnUBAGZrAIBqawCAbmsAgLpRAQC7UQEAvPkBAL3RAQC+0QEAv9EBAHJrAICjpQEAdmsAgHprAICmvQEAfmsAgIJrAICltQEAqpkBAKuZAQCGawCAimsAgK4ZAQCvGQEArDEBAK0ZAQCOawCA4fQOAJJrAIDjFA4A9AAAAOF8DACWawCA41AKAJprAICeawCAviAEAO8wDQCiawCApmsAgIQ0BADvrA4AsDkGALE5BgCygQYAs6kGALS5BgC1uQYAtqkGALehBgC46QYAuekGALrJBgC7xQYAvN0GAL3BBgC+wQYAvz0HAEZrAICCHQAAgR0AAIAdAACqawCArmsAgLJrAIDWagCAqJkFAKmZBQCqSQYAq0kGAKxZBgCtWQYArkkGAK9JBgCorQcAqbUHAKq9BwCrtQcArK0HAK3dBwCuyQcAr8EHALZrAIC6awCAhogDAIcQAwC+awCAwmsAgMZrAIDKawCAuG0HALkFBwC6AQcAuxUHALwxBwC9MQcAvikHAL8pBwCwgQcAsYEHALJpBwCzZQcAtH0HALVhBwC2YQcAt1UHALM1BgDOawCA0msAgNZrAIDaawCAtl0GALUlBgDeawCAu0UGALpFBgDiawCA5msAgL+lBgC+uQYAvbEGALy9BgDqawCAo3EGAO5rAIDyawCAphkGAPZrAID6awCApWEGAKoBBgCrAQYA/msAgAJsAICu/QYAr+EGAKz5BgCt9QYAqCUBAKk1AQCqPQEAqzUBAKwtAQCtkQAArpEAAK+RAAAGbACACmwAgA5sAIASbACAFmwAgIK9AwCBvQMAgL0DALiZAAC5rQAAuqUAALttAAC8dQAAvX0AAL51AAC/bQAAsPEAALH5AACywQAAs8EAALSxAAC1vQAAtrUAALepAAAabACAHmwAgCJsAICEgAIAvhwCACpsAICG+HwAh8wCAISsAwAubACAMmwAgDZsAIA6bACAPmwAgEJsAIBGbACAs/UCAEpsAIBObACAkgAAAFJsAIC2UQMAteUCAFZsAIC7fQMAunUDAFpsAIBebACAvzkDAL41AwC9VQMAvFUDAKM1AgBibACAZmwAgGpsAIBubACAppEDAKUlAgBybACAq70DAKq1AwB2bACAemwAgK/5AwCu9QMArZUDAKyVAwC+wAMAfmwAgIJsAICGbACAgA0AAIE1AACCPQAAimwAgI5sAICSbACAhsh8AIcAAwCabACAnmwAgKJsAICmbACAqmwAgK5sAICybACAtmwAgLpsAIC+bACAwmwAgO/0AwCE7HwA4ZQBAMZsAIDjMAMAymwAgM5sAIDSbACA1mwAgLNpAQDabACA3mwAgOJsAIDmbACAtmEBALVpAQDqbACAuykBALohAQDubACA8mwAgL8dAQC+HQEAvSUBALwtAQD2bACA+mwAgP5sAICjpQEAAm0AgKWlAQCmrQEAvlR8AIaAfACH7HwAqu0BAKvlAQCs4QEArekBAK7RAQCv0QEACm0AgOGcBgCEBH8A4yQGAOPUBgAObQCA4TAEABJtAIDvlAcAgnUAAIFhAACAaQAAFm0AgBptAIAebQCA7+wGALiNfgC5lX4AupV+ALulfgC8vX4AvdF+AL7RfgC/0X4AsGV+ALFtfgCyeX4As3F+ALRZfgC1WX4Atr1+ALe1fgCoVX4AqWF+AKphfgCrYX4ArGF+AK1hfgCuYX4Ar2F+ACJtAICWbACAJmwAgCZtAIAGbQCAKm0AgC5tAIAybQCAqHF+AKlxfgCqcX4Aq3F+AKyRfwCtkX8ArpF/AK+RfwA2bQCAOm0AgD5tAIBCbQCARm0AgEptAIBObQCAUm0AgLiFfwC5jX8AuoV/ALudfwC8jX8Avb1/AL61fwC/XX8AsPF/ALHxfwCy8X8As8V/ALTBfwC1wX8AtsF/ALfBfwCz+X8AVm0AgFptAIBebQCAYm0AgLYRfgC1GX4AZm0AgLs1fgC6NX4Aam0AgG5tAIC/BX4AvgV+AL0RfgC8JX4AghUAAKO9fwCAYQAAgWEAAKZVfgBybQCAvpABAKVdfgCqcX4Aq3F+AHZtAIB6bQCArkF+AK9BfgCsYX4ArVV+AKhBfgCpUX4AqlV+AKt9fgCsZX4ArW1+AK75AQCv8QEAhgAAAIc0AQB+bQCAgm0AgIZtAICKbQCAjm0AgJJtAIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALCVAQCxnQEAspUBALNNAQC0VQEAtV0BALZVAQC3TQEAs919AJZtAICabQCAnm0AgKJtAIC27X0Ate19AKZtAIC7WQIAulECAKptAICubQCAv5kCAL6RAgC9mQIAvEECALJtAICjmX0Atm0AgLptAICmqX0Avm0AgMJtAIClqX0AqhUCAKsdAgDGbQCAym0AgK7VAgCv3QIArAUCAK3dAgDObQCA0m0AgNZtAIDabQCAgB0AAIEJAACCOQAA3m0AgOJtAIC+AAQA6m0AgO5tAIDybQCA9m0AgPptAID+bQCAhIwDAAJuAICHCAMAhuwEAAZuAIDviAIACm4AgA5uAICEbAQA4zQCABJuAIDhVAEAFm4AgBpuAIAebgCAIm4AgKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvGQEAvqwEACZuAIAqbgCALm4AgDJuAIA2bgCAOm4AgD5uAIC4DQEAuREBALoRAQC7JQEAvD0BAL3VAQC+3QEAv9UBALBpAQCxaQEAsnkBALNxAQC0WQEAtVkBALY5AQC3NQEAsy0CAEJuAIBGbgCASm4AgE5uAIC2LQIAtS0CAFJuAIC7rQEAuq0BAFpuAIBebgCAv50BAL6dAQC9pQEAvK0BAIBNAACBVQAAglUAAO9sAABibgCA7+x/AO+8fgBmbgCA4RB/AOPUfwDj2H4A4ex/AGpuAIDhTH4Abm4AgOMkfgDmbQCAVm4AgKsFBgCqBQYArQ0GAKwFBgCvNQYArjUGAIYAAwCHKAMAo4UFAHJuAIClhQUAdm4AgHpuAICmhQUAs/EGAH5uAICCbgCAhm4AgIpuAIC26QYAteEGAI5uAIC7vQYAur0GAJJuAICWbgCAv4kGAL6BBgC9iQYAvJUGAKgpBgCpKQYAqjkGAKs5BgCsKQYArSkGAK5dBgCvTQYAmm4AgJ5uAICibgCApm4AgKpuAICubgCAsm4AgLZuAIC46QcAuekHALr5BwC7+QcAvOkHAL3pBwC+XQcAv1UHALA5BgCxOQYAsgEGALMdBgC0BQYAtQ0GALYFBgC32QcAo7EHAIItAACBFQAAgB0AALpuAICmqQcApaEHAL5uAICr/QcAqv0HAMJuAICEpAIAr8kHAK7BBwCtyQcArNUHAL7MAQCzlQYAxm4AgMpuAIC2qQYAzm4AgNJuAIC1rQYAulkBALshAQCGyAAAhwwBAL4hAQC/KQEAvDEBAL0xAQCoKQYAqSkGAKpZBgCrUQYArGEGAK1tBgCutQEAr6kBAITgAQDWbgCA2m4AgN5uAIDibgCA5m4AgOpuAIDubgCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCw2QEAsaEBALKhAQCzoQEAtKEBALWpAQC2kQEAt5EBAKPRBQDybgCA9m4AgPpuAID+bgCApu0FAKXpBQACbwCAq2UCAKodAgAGbwCACm8AgK9tAgCuZQIArXUCAKx1AgAObwCAEm8AgBZvAIAabwCAHm8AgCJvAIAmbwCAKm8AgIA9AACBCQAAghkAAC5vAIAybwCAOm8AgL48AwA+bwCAhgAMAIcUAwBCbwCAs9UDAEZvAIC1PQMAtjUDAEpvAIBObwCAv4wKALoRAwC7EQMAvLUAAL29AAC+tQAAv60AAFJvAIDjdAEAVm8AgOG8AQBabwCAXm8AgGJvAIBmbwCAam8AgG5vAIBybwCAdm8AgHpvAIDvdAIAfm8AgIJvAICoTQIAqVECAKpRAgCrqQIArLkCAK25AgCuqQIAr6kCAIRsDQCGbwCAim8AgI5vAICSbwCAlm8AgJpvAIC+dA0AuG0BALkFAQC6DQEAuwUBALwdAQC9BQEAvg0BAL8FAQCw2QIAsdkCALJtAQCzZQEAtH0BALVlAQC2ZQEAt1UBAOG4AQDhUAcA47QAAON8BwCAqQAAgQkAAII5AACebwCAom8AgKpvAICubwCAsm8AgO4AAAC2bwCA7wAAAO9kBgCGYAwAh+QMAKORAgC6bwCApXkCAL5vAIDCbwCApnECAMZvAIDKbwCAq1UCAKpVAgCt+QEArPEBAK/pAQCu8QEApm8AgDZvAIDObwCA0m8AgNZvAIDabwCA3m8AgOJvAICoVQ4AqVkOAKqhDgCrvQ4ArK0OAK2VDgCu+Q4Ar/UOALCRDgCxkQ4AspEOALORDgC0sQ4AtbEOALaxDgC3sQ4AuJEOALmdDgC6lQ4Au0kPALxZDwC9WQ8AvkkPAL9JDwCzCQ4A5m8AgOpvAIDubwCA8m8AgLY1DgC1BQ4A9m8AgLt1DgC6dQ4A+m8AgP5vAIC/VQ4AvlUOAL1lDgC8ZQ4AAnAAgKNNDgAGcACACnAAgKZxDgAOcACAEnAAgKVBDgCqMQ4AqzEOAISkAwC+pAMArhEOAK8RDgCsIQ4ArSEOAKilDgCprQ4AqqUOAKu5DgCs3Q4ArcEOAK7BDgCv/Q4AgO0BAIHxAQCC8QEAFnAAgIaQAQCHtAEAGnAAgB5wAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALCFDgCxbQEAsmUBALN9AQC0ZQEAtW0BALZlAQC3+QEAsy0OACJwAIAmcACAKnAAgC5wAIC2QQ4AtVUOADJwAIC7qQEAukEOADZwAIA6cACAv6kBAL6hAQC9qQEAvLEBAD5wAICjaQ4AQnAAgEZwAICmBQ4ASnAAgE5wAIClEQ4AqgUOAKvtAQBScACAVnAAgK7lAQCv7QEArPUBAK3tAQCoOQMAqTkDAKqNAwCrhQMArJ0DAK2FAwCuhQMAr7UDAFpwAIBecACAYnAAgGZwAIBqcACAbnAAgHJwAIB2cACAuGEAALlhAAC6YQAAu2EAALxhAAC9YQAAvmEAAL9hAACwzQMAsaUDALKhAwCzoQMAtKUDALWtAwC2kQMAt5EDAIANAACBEQAAghEAAHpwAIDv9AIAfnAAgIJwAIC+HAMA4xQCAISIAgDhgAEAinAAgI5wAICScACAh8gDAIY8BAC7AQMAumkDAJZwAICacACAvwkDAL4BAwC9FQMAvBUDALNlAwCecACAonAAgKZwAICqcACAtmUDALV1AwCucACAsnAAgLZwAIC6cACAo4kCAL5wAIClmQIApokCAMJwAICELAIAxnAAgKqFAgCr7QIArPkCAK35AgCu7QIAr+UCAMpwAIDOcACAvkQFAIRMBQDScACA1nAAgNpwAIDecACA4nAAgOZwAIDqcACA7nAAgIAZAACBGQAAggUAAPJwAIDhGA8A4VwOAOO4DgDjdAEA+nAAgP5wAIACcQCABnEAgIYABACHZAUACnEAgA5xAIAScQCAFnEAgO98DgDvqAEAs3UBABpxAIAecQCAInEAgCZxAIC2MQEAtRUBACpxAIC7HQEAuhUBAC5xAIAycQCAv+EAAL79AAC9/QAAvP0AAPZwAIA2cQCAOnEAgD5xAICGcACAQnEAgEZxAIBKcQCAqI0GAKmVBgCqnQYAq+UGAKz9BgCt0QYArtEGAK/RBgCwsQYAsbkGALJJBwCzSQcAtFkHALVFBwC2RQcAt3kHALghBwC5IQcAujkHALs5BwC8KQcAvSkHAL4ZBwC/GQcAozUGAE5xAIBScQCAVnEAgFpxAICmcQYApVUGAF5xAICrXQYAqlUGAGJxAIC+oAMAr6EHAK69BwCtvQcArL0HAIBRAACBWQAAgmEAALNVBwCF9AAAtX0HALZ1BwBmcQCAhgAcAIfkAQC6LQcAuyUHALw9BwC9JQcAviUHAL8VBwCokQYAqZEGAKqRBgCrkQYArLkGAK25BgCuqQYAr6kGAGpxAIBucQCAcnEAgHZxAICiIQEAozUBAKA5BQChEQQAuEkBALlJAQC6XQEAu1UBALxNAQC90QEAvtEBAL/RAQCwpQYAsa0GALKlBgCzvQYAtK0GALWdBgC2lQYAt3kBAKMZBgCPnXkAenEAgH5xAICCcQCApjkGAKUxBgCGcQCAq2kGAKphBgCKcQCAjnEAgK9ZBgCuaQYArWkGAKxxBgCeiQgAn8EFAJzJCQCdyQkAmqENAJu9DACYsQ0AmbkNAJahcQCXRXEAlEV1AJWxcQCSoXUAk7V1AJDleQCRzXkAil1yAItFcgCScQCAvoAcAI51DgCPZQ4AjLlyAI11DgCCOXoAgzl6AJZxAICacQCAhnF2AIeZdgCECXoAhW12AJptBwCbVQIAnnEAgKJxAICmcQCA4ZAAAJxZAgDjCBoAkgkPAJNlCgCqcQCA7zgWAJZ1BgCXdQYAlH0KAJU1CwCpjRYAqIUWAKsBEACqMRYArXESAKy1EgCvuS4ArgEsAKF9AgCucQCAo6EeAKKpHgClsRoApPUfAKflGwCmsRoAhMwDAIRMHACycQCAtnEAgLpxAIC+cQCAwnEAgMZxAICxASgAsNkuALONKgCy6SoAtfUmALQBJACEcB0AynEAgID9AQCBFQAAgh0AAL6AHADOcQCA0nEAgIe4AgCGPB0A2nEAgN5xAIDicQCA5nEAgOpxAIDucQCA8nEAgPZxAID6cQCA/nEAgAJyAIAGcgCA44ADAApyAIDhoAEADnIAgO+UAwAScgCAFnIAgBpyAIAecgCAInIAgCZyAIAqcgCALnIAgOE8BgAycgCA49AGADZyAIDhMAcAOnIAgOOsBgCAOQAAgRUAAIIdAADvHAYAPnIAgEJyAIC+uB8A7+gBALPpAgBKcgCAh8QcAIbsHABOcgCAtlkCALVRAgBScgCAu00CALpNAgBWcgCAWnIAgL+5AQC+2QEAvdEBALz1AQCjKR0A1nEAgEZyAIBecgCAYnIAgKaZHQClkR0AZnIAgKuNHQCqjR0AanIAgG5yAICveR4ArhkeAK0RHgCsNR4AcnIAgLNtHwB2cgCAenIAgLZlHwB+cgCAgnIAgLVtHwC6IR8AuyEfAIZyAICKcgCAviUfAL8pHwC8MR8AvTEfAKihHwCpoR8AqqEfAKuhHwCsoR8AraEfAK6hHwCvoR8AjnIAgJJyAICWcgCAmnIAgJ5yAICicgCApnIAgKpyAIC4rR8AubUfALq9HwC7tR8AvK0fAL1VHwC+UR8Av00fALChHwCxoR8AsqEfALOhHwC0pR8AtakfALadHwC3lR8AoykeAIIZAACBGQAAgLEBAK5yAICmIR4ApSkeALJyAICrZR4AqmUeAIaIAACH/AEAr20eAK5hHgCtdR4ArHUeALZyAICzmR4AunIAgL5yAIC2XQEAwnIAgMZyAIC1sR4AukkBALtJAQDKcgCAznIAgL49AQC/IQEAvDkBAL01AQCoRR4AqVUeAKpVHgCrZR4ArH0eAK2ZAQCuiQEAr4EBAISsAADScgCA1nIAgNpyAIDecgCA4nIAgOZyAIDqcgCAuK0BALllAQC6bQEAu2UBALx9AQC9ZQEAvm0BAL9lAQCwyQEAsckBALKpAQCzpQEAtL0BALWhAQC2oQEAt5UBALhpHAC5oRwAusEcALvBHAC8wRwAvcEcAL7BHAC/wRwAsIkfALGJHwCyIRwAswUcALQdHAC1fRwAtnUcALdtHACoYR8AqWEfAKphHwCrYR8ArNkfAK3ZHwCuyR8Ar8EfAO5yAIDycgCA9nIAgPpyAID+cgCAAnMAgAZzAIAKcwCADnMAgBJzAIC+AAQAo1EdABZzAICleR0AppUCABpzAIAecwCAInMAgKqBAgCrgQIArPECAK39AgCu9QIAr+kCACpzAIDh9AEALnMAgON8AQCATQAAgXUAAIJ9AAAycwCAhsAEAIekBAA2cwCAOnMAgD5zAIBCcwCARnMAgO+MAgCoSQIAqUkCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAISgBQBKcwCATnMAgFJzAIC+vAQAVnMAgFpzAIBecwCAuC0BALk1AQC6PQEAuzUBALwtAQC91QEAvt0BAL/NAQCwzQIAsdUCALLdAgCz1QIAtM0CALUVAQC2HQEAtxUBAOGEHgDjbB8A41wfAOFYHgBicwCAZnMAgGpzAIBucwCAcnMAgHZzAIB6cwCAfnMAgOkAAADv9B4A70weAIJzAICzlQIAhnMAgIpzAICOcwCAknMAgLa5AgC1sQIAmnMAgLtRAgC6SQIAhsgEAIesBAC/kQEAvkkCAL1BAgC8SQIAJnMAgKNRBQCecwCAlnMAgKZ9BQCicwCApnMAgKV1BQCqjQUAq5UFAKpzAICucwCAro0FAK9VBgCsjQUArYUFAICJBwCBiQcAgpkHALORBgCycwCAtbkGALapBgC2cwCAunMAgL5zAIC6TQcAu0UHALxdBwC9QQcAvkEHAL9BBwCoQQYAqU0GAKpVBgCrZQYArH0GAK1lBgCubQYAr2UGAMJzAIDGcwCAynMAgM5zAIDScwCA1nMAgNpzAIDecwCAuFkHALlZBwC6aQcAu2kHALx5BwC9eQcAvmUHAL8ZBwCwxQcAsc0HALLFBwCz2QcAtMkHALXJBwC2aQcAt2kHAKPdBwDicwCA5nMAgOpzAIDucwCApuUHAKX1BwDycwCAqwkGAKoBBgD2cwCA+nMAgK8NBgCuDQYArQ0GAKwRBgCAbQAAgQkAAIIZAAD+cwCAAnQAgISYAQC+kAEABnQAgIbAAACH5AEACnQAgA50AIASdACAFnQAgBp0AIAedACAqF0GAKmNAQCqnQEAq5UBAKy5AQCtuQEArskBAK/BAQCEoAAAInQAgCZ0AIAqdACALnQAgDJ0AIA2dACAOnQAgLh5AQC5eQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsIEBALGBAQCySQEAs0kBALRZAQC1WQEAtkkBALdJAQCzFQIAPnQAgEJ0AIBGdACASnQAgLY5AgC1MQIATnQAgLtFAgC6RQIAUnQAgFZ0AIC/nQIAvp0CAL2dAgC8nQIAhXw+AKNRAgBadACAXnQAgKZ9AgBidACAZnQAgKV1AgCqAQIAqwECAGp0AIBudACArtkCAK/ZAgCs2QIArdkCAIDpAACB6QAAggUAAHJ0AIC+AAwAenQAgIeoAwCGvAwAfnQAgIJ0AICGdACAinQAgI50AICSdACAlnQAgJp0AICedACAonQAgKZ0AICqdACA42ABAK50AIDhoAEAsnQAgO+IAgC2dACAunQAgL50AIDCdACAxnQAgMp0AIDOdACAqGkCAKlpAgCqeQIAq3kCAKxpAgCtaQIArr0CAK+1AgC+rAwA0nQAgNZ0AIDadACAgB0AAIEJAACCqQAA3nQAgLhRAQC5WQEAumEBALthAQC8GQEAvRkBAL4NAQC/BQEAsM0CALHVAgCy3QIAs9UCALTNAgC1cQEAtnEBALdxAQDjxAAA4XwHAOF4BgDjvAYA4nQAgIQYDQCGuAwAhzwNAL4sDwDqdACA7nQAgPJ0AIDvEAAA9nQAgPp0AIDvdAYA/nQAgAJ1AIAGdQCAs70CAAp1AIC1rQIAtqUCAA51AIASdQCAFnUAgLpFAgC7XQIAvEUCAL1NAgC+RQIAv/kBAHZ0AIClfQ0ApnUNAOZ0AIAadQCAHnUAgCJ1AICjbQ0ArJUNAK2dDQCulQ0ArykOACZ1AIAqdQCAqpUNAKuNDQCz5Q4ALnUAgDJ1AIA2dQCAOnUAgLblDgC19Q4APnUAgLuhDgC62Q4AQnUAgEZ1AIC/pQ4AvrkOAL2xDgC8uQ4AqBUOAKklDgCqLQ4AqyUOAKw9DgCtJQ4Ari0OAK8lDgCADQAAgRUAAIIdAABKdQCATnUAgFJ1AICEMAMAVnUAgLgpDgC5KQ4AujkOALs5DgC8KQ4AvSkOAL79DwC/9Q8AsF0OALElDgCyLQ4AsyUOALQ9DgC1IQ4AtiUOALcZDgCjpQ8AWnUAgIYoAQCHTAEAXnUAgKalDwCltQ8AYnUAgKvhDwCqmQ8AZnUAgGp1AICv5Q8ArvkPAK3xDwCs+Q8AbnUAgLPpDgBydQCAdnUAgLaRDgB6dQCAfnUAgLXlDgC6sQ4Au7kOAIJ1AICGdQCAvmEBAL9hAQC8mQ4AvZkOAKglDgCpLQ4AqiUOAKs5DgCsKQ4ArVUOAK5dDgCvVQ4AinUAgI51AICSdQCAlnUAgJp1AICedQCAonUAgKZ1AIC49QEAuYEBALqBAQC7gQEAvIEBAL2JAQC+sQEAv7EBALAxDgCxOQ4AsgkOALMJDgC04QEAteEBALbhAQC3zQEAo60NAKp1AICudQCAsnUAgLZ1AICm1Q0ApaENALp1AICr/Q0AqvUNAL51AIDCdQCAryUCAK4lAgCt3Q0ArN0NAIBdAACBbQAAgmUAALNRAwC+nAMAtXkDALYZAwDKdQCAhOACAM51AIC6PQMAuzUDALwZAwC9GQMAvtkDAL/ZAwCohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAIYABACHNAMAv6AzANJ1AIDWdQCA2nUAgN51AIDidQCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAO+oAwDmdQCA6nUAgO51AICEHAIA8nUAgPZ1AID6dQCAviwFAP51AIACdgCABnYAgONAAwAKdgCA4SgAAA52AICjXQIAEnYAgBZ2AIAadgCAHnYAgKYVAgCldQIAInYAgKs5AgCqMQIAJnYAgCp2AICv1QIArtUCAK0VAgCsFQIA4ygBAOEADwDhCA4A4wgOAID9AACBCQAAgjkAAC52AIAydgCAOnYAgD52AIBCdgCA7+gOAEZ2AIBKdgCA72QOALNtAQBOdgCAhugEAIcMBQBSdgCAtm0BALVtAQBWdgCAu+0AALrtAABadgCAXnYAgL/VAAC+6QAAveEAALzpAACoXQYAqWEGAKqlBgCrvQYArKUGAK2tBgCupQYArxkHADZ2AIBidgCAZnYAgGp2AIBudgCAcnYAgHZ2AIB6dgCAuHUHALl5BwC6DQcAuwUHALwdBwC9BQcAvgUHAL81BwCwaQcAsWkHALJ9BwCzdQcAtG0HALVRBwC2UQcAt1EHAKMtBgB+dgCAgnYAgIZ2AICKdgCApi0GAKUtBgCOdgCAq60HAKqtBwCSdgCAlnYAgK+VBwCuqQcAraEHAKypBwCADQAAgRUAAIIdAACadgCAnnYAgKJ2AICEVAMAvlwAAKZ2AICqdgCAhugAAIdMAwCudgCAsnYAgLZ2AIC6dgCAvnYAgOMEBADCdgCA4bQFAMZ2AIDKdgCAznYAgNJ2AIDWdgCA2nYAgN52AIDidgCA5nYAgO/sBADqdgCA7nYAgLPtBgDydgCA9nYAgPp2AID+dgCAtpEGALXhBgACdwCAu40GALqNBgAGdwCACncAgL9BAQC+WQEAvVEBALxZAQCoJQYAqS0GAKolBgCrOQYArCkGAK1RBgCuSQYAr0EGAIDNAACBCQAAghkAAA53AIASdwCAhCwBAL40AAAadwCAuP0BALlBAQC6QQEAu0EBALxBAQC9SQEAvnEBAL9xAQCwCQYAsQkGALLNAQCzxQEAtN0BALXFAQC2zQEAt8UBAIagPACHRAMAHncAgKOhBQAidwCApa0FAKbdBQAmdwCAKncAgL4oPACqwQUAq8EFAKwVAgCtHQIArhUCAK8NAgC2QQMALncAgDJ3AIC1sQIANncAgLOhAgA6dwCAPncAgL5FAwC/TQMAvHUDAL1NAwC6ZQMAu20DAEJ3AIBGdwCASncAgE53AIDGdQCAUncAgFZ3AIBadwCAXncAgGJ3AICoRQIAqVUCAKpdAgCrVQIArE0CAK21AwCusQMAr60DALDVAwCx3QMAstUDALPtAwC09QMAtf0DALb1AwC37QMAuNkDALnZAwC6rQMAu6UDALy9AwC9pQMAvqUDAL+VAwCj9QMAZncAgGp3AIBudwCAcncAgKYVAgCl5QMAdncAgKs5AgCqMQIAencAgH53AICvGQIArhECAK0ZAgCsIQIAgGkAAIFpAACCBQAAgncAgIp3AICOdwCAkncAgO8cAACEbAIA4ZQBAJZ3AIDjyAAAmncAgJ53AICGWDwAh1A9AKJ3AICmdwCAqncAgISEPQCudwCAsncAgLZ3AIDvuAEAvmw8AOF0BgC6dwCA42QBAL53AIDCdwCAxncAgMp3AICz0QEAzncAgNJ3AIDWdwCA2ncAgLaRAQC1+QEA3ncAgLu9AQC6vQEA4ncAgOZ3AIC/dQEAvnUBAL2FAQC8hQEAqL09AKkNPgCqGT4AqxE+AKwxPgCtUT4ArlE+AK9NPgCGdwCAgh0AAIEdAACAHQAA6ncAgO53AIDydwCA9ncAgLjVPgC53T4AutU+ALtJPwC8WT8AvVk/AL5JPwC/QT8AsDk+ALE5PgCyET4AsxE+ALTxPgC18T4AtvU+ALftPgCjkT4A+ncAgIYoAACHwAMA/ncAgKbRPgCluT4AAngAgKv9PgCq/T4ABngAgAp4AICvNT4ArjU+AK3FPgCsxT4ADngAgLOdPwASeACAFngAgLalPwAaeACAHngAgLWtPwC6aT8Au3U/ACJ4AIAmeACAvlk/AL9FPwC8bT8AvWU/ACp4AIAueACAMngAgDZ4AIDjYDwAOngAgOEAPQA+eACA7/w9AEJ4AIBGeACASngAgE54AIBSeACAVngAgFp4AICjGT4AghkAAIEZAACAcQAAXngAgKYhPgClKT4AYngAgKvxPgCq7T4AhCQBAL4kAQCvwT4Art0+AK3hPgCs6T4AqNE+AKnRPgCq0T4Aq+U+AKzhPgCt4T4Arhk+AK8ZPgCGAAAAh4QAAGp4AIBueACAcngAgHZ4AIB6eACAfngAgLh9PgC5AT4AugE+ALsBPgC8AT4AvQk+AL4xPgC/MT4AsGk+ALF1PgCyfT4As3U+ALRZPgC1RT4Atk0+ALdFPgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIJ4AICGeACAingAgL8k5gGOeACAkngAgJZ4AICaeACAuFUDALlZAwC6bQMAu2UDALx9AwC9ZQMAvm0DAL9lAwCwtQIAsb0CALKBAgCzgQIAtHEDALVxAwC2cQMAt3EDALMdAgCeeACAongAgKZ4AICEiAMAtlUCALU1AgAWdwCAu3kCALpxAgCqeACArngAgL+1AwC+tQMAvVUCALxVAgCyeACAo1kCALZ4AIC6eACAphECAL54AIDCeACApXECAKo1AgCrPQIAxngAgMp4AICu8QMAr/EDAKwRAgCtEQIAqKkCAKmpAgCquQIAq7kCAKypAgCtqQIArjkBAK85AQCAzQEAgQkAAIIZAADOeACA0ngAgL64BQDaeACA3ngAgLjpAQC56QEAuokBALuFAQC8nQEAvYEBAL6BAQC/tQEAsEkBALFVAQCyXQEAs1UBALRNAQC18QEAtvEBALfxAQDvFAAA4ngAgIaoBQCH3AUA5ngAgIRYBADqeACA78Q+AO54AIDhxD4A8ngAgOMwPgDjyAAA9ngAgOEoAQD6eACAtn0CAP54AIACeQCAtXUCAAZ5AICzZQIACnkAgA55AIC+3QEAv2EBALzdAQC91QEAutkBALvFAQASeQCAFnkAgKOxBQDWeACAGnkAgB55AIAieQCApqkFAKWhBQAmeQCAqxEGAKoNBgAqeQCALnkAgK+1BgCuCQYArQEGAKwJBgAyeQCANnkAgDp5AIA+eQCAgBkAAIEZAACCBQAAQnkAgL5sAwBGeQCAhsgAAIccAwBKeQCATnkAgFJ5AIBWeQCAqLkHAKm5BwCqDQcAqx0HAKwJBwCtNQcArjEHAK8pBwCEqAMAWnkAgF55AIBieQCAZnkAgGp5AIBueQCAcnkAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsF0HALEhBwCyIQcAsz0HALQpBwC1KQcAtgEHALcBBwCzhQYAdnkAgHp5AIB+eQCAgnkAgLa1BgC1gQYAhnkAgLvlBgC6mQYAinkAgI55AIC/7QYAvu0GAL3pBgC89QYAknkAgJZ5AICaeQCAnnkAgKJ5AICmeQCAqnkAgO+QBACueQCA4dwGALJ5AIDj7AUAgCkAAIEVAACCEQAAvnwBAKMFBgC6eQCAhigAAIdMAQC+eQCApjUGAKUBBgDCeQCAq2UGAKoZBgDGeQCAynkAgK9tBgCubQYArWkGAKx1BgDOeQCAs70BANJ5AIDWeQCAtnkBANp5AIDeeQCAtXkBALpVAQC7XQEA4nkAgOZ5AIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgCE7AwA6nkAgO55AIDyeQCA9nkAgPp5AID+eQCAAnoAgLhpAwC5aQMAugkDALsJAwC8GQMAvRkDAL4JAwC/CQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwAGegCACnoAgA56AICj9QIAEnoAgKUxAgCmMQIAFnoAgBp6AIAeegCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMAgGEAAIFhAACCBQAAInoAgIbwDACHYAMAvhAMACp6AIBmeACALnoAgDJ6AIA2egCAOnoAgD56AIBCegCARnoAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIASnoAgE56AIBSegCAVnoAgFp6AIBeegCAYnoAgGZ6AIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4RAGAIRIDADjDAYAanoAgISYDABuegCAcnoAgHZ6AIB6egCAfnoAgIJ6AICGegCAgXUAAIB1AADvIAEAgnUAAIp6AICOegCAknoAgL7ADACFtA4A4RACAO9cAADjABYA4ZABAJp6AIDjWAEA7zwHAJ56AICiegCAhgAIAIe4DACznQ0AJnoAgKZ6AICqegCArnoAgLbVDQC1tQ0AsnoAgLv5DQC68Q0AtnoAgLp6AIC/GQ4AvhEOAL3VDQC81Q0AvnoAgKPZDQDCegCAxnoAgKaRDQDKegCAznoAgKXxDQCqtQ0Aq70NANJ6AIDWegCArlUOAK9dDgCskQ0ArZENAKhdDgCpYQ4AqmEOAKthDgCsYQ4ArWEOAK5hDgCvYQ4A2noAgN56AIDiegCA5noAgOp6AIDuegCA8noAgPZ6AIC4TQ8AuVEPALpRDwC7UQ8AvHEPAL1xDwC+cQ8Av3EPALDBDwCxwQ8AssEPALPBDwC0wQ8AtcEPALbBDwC3wQ8As+kPAPp6AIC+gAEA/noAgJZ6AIC24Q8AtekPAAJ7AIC7BQ4AugUOAAp7AIAGewCAvwUOAL4FDgC9FQ4AvBUOAIFNAACAQQAA72gNAIJRAACG8AcAh9QBAA57AIASewCAFnsAgIRwAQAaewCAHnsAgOHgDgAiewCA40gNACZ7AICjaQ8AKnsAgC57AIAyewCANnsAgKZhDwClaQ8AOnsAgKuFDgCqhQ4APnsAgEJ7AICvhQ4AroUOAK2VDgCslQ4ARnsAgLMxDgBKewCATnsAgLbBAQBSewCAVnsAgLXRAQC6zQEAu6UBAFp7AIBeewCAvqUBAL+tAQC8sQEAvbEBAI/dJgCj8Q0AYnsAgGZ7AICmAQIAansAgG57AIClEQIAqg0CAKtlAgByewCAviAEAK5lAgCvbQIArHECAK1xAgCfoQwAnnkKAJ1pCgCc0QgAm7E2AJp1NgCZ0TQAmOEyAJdtMgCWZTIAlTU/AJRhPgCTcT4AkjU7AJFxOgCQeToAgJUAAIGdAACCoQAAensAgO9EAgDhdA8AfnsAgOMcDwDj1AEAgnsAgOHgAQDvXAEAo7UCAKJBAACh3Q4AoLkOALWpAwCGewCAhMAEALahAwCG8AUAh+QEALOFAwCKewCAvXEDALxpAwC/QQMAvnEDAI57AIC2eQCAu3EDALp5AwCC3ScAgwE7AL6EBwC+wAYAhhE/AIcZPwCEETsAhV06AIp9PgCLJTMAknsAgJZ7AICOuTUAjxU3AIw1MwCNgTMAkqE3AJPZCQC+xBkAmnsAgJaxDQCXUQ8AlHkLAJVhCwCaBQ8Am5EBAJ57AICiewCApnsAgN0AAACcfQMAqnsAgOFIDwCuewCA4xwOALJ7AIC2ewCAunsAgL57AIDCewCAsUEXALChFwCzqesBsgHoAbUB7AG0EesB74wOAMZ7AICpxR8AqAEcAKsBEACqkR8ArdkTAKzREwCv2RcArgUTAKHxAgDKewCAo8kHAKLBAgClARgApGUHAKehGwCm+RsAqCkFAKldBQCqVQUAq20FAKx5BQCteQUArm0FAK9hBQB2ewCAznsAgNJ7AIDWewCAgA0AAIGxAACCsQAA2nsAgLiJBQC5iQUAup0FALuVBQC8uQUAvbkFAL5RBgC/UQYAsOUFALHtBQCy5QUAs/0FALTtBQC13QUAttUFALe9BQCj3QUA3nsAgOJ7AICEDAAA5nsAgKb5BQCl8QUA6nsAgKspBQCqIQUAhpgAAIegAACvGQUArikFAK0pBQCsMQUA7nsAgLNhBgDyewCA9nsAgLYhBgD6ewCA/nsAgLUBBgC6rQcAu40HAAJ8AIAGfACAvo0HAL9xBwC8lQcAvY0HAL65BQC/uQUAvLkFAL25BQC6uQUAu7kFALi5BQC5uQUAtkkFALdJBQC0fQUAtXUFALJ5BQCzeQUAsBUFALF9BQCuXQUAr20FAKxFBQCtXQUAqqUKAKtdBQCovQoAqa0KAAp8AIAOfACAEnwAgBZ8AIAafACAHnwAgCJ8AIAmfACAqA0HAKkdBwCqLQcAq0kHAKxNBwCtZQcArrEGAK+xBgAqfACALnwAgDJ8AIA2fACAOnwAgD58AIBCfACARnwAgLhVBgC5XQYAulUGALtxBgC8NQYAvfEBAL7xAQC/8QEAsK0GALGNBgCyhQYAs50GALSNBgC1cQYAtnUGALdtBgCjpQQAgi0AAIEVAACAHQAASnwAgKblBAClxQQATnwAgKtJBQCqaQUAUnwAgFp8AICvtQUArkkFAK1JBQCsUQUAhmAcAIcIAwBefACAs4UCAGJ8AIC1gQIAtoECAGZ8AIBqfACAbnwAgLoJAwC7CQMAvBkDAL0ZAwC+CQMAvwkDAKxVAgCtXQIArmECAK9hAgCoDQIAqVUCAKpRAgCrUQIAhKwDAHJ8AIB2fACAenwAgIT8HQB+fACAgnwAgIZ8AIC8cQMAvXEDAL5xAwC/cQMAuHEDALlxAwC6cQMAu3EDALSRAwC1kQMAtpEDALeRAwCwkQMAsZEDALKRAwCzkQMAinwAgI58AICSfACAlnwAgJp8AIDhpAEAnnwAgOOAAQC+aBwAonwAgKZ8AIDv2AYAqnwAgK58AICyfACAtnwAgKOJAwCCLQAAgRUAAIAdAAC6fACApo0DAKWNAwC+fACAqwUCAKoFAgDCfACAynwAgK8FAgCuBQIArRUCAKwVAgCGIBwAh8QdAM58AIDSfACA1nwAgNp8AIDefACA72wGAOJ8AIDhbAcA5nwAgON0BwDqfACA7nwAgPJ8AID2fACAs5EBAPp8AID+fACAAn0AgAZ9AIC2sQEAtbkBAAp9AIC7VQEAukkBAA59AIASfQCAv/UAAL71AAC9RQEAvEUBAKNRHgDGfACAFn0AgBp9AIAefQCApnEeAKV5HgAifQCAq5UeAKqJHgAmfQCAKn0AgK81HwCuNR8ArYUeAKyFHgCAbQAAgRUAAIIdAADv/BkALn0AgDJ9AIA2fQCAOn0AgIbAAACHrAMAPn0AgEJ9AIBGfQCA4SwcAEp9AIDjzBwAqK0eAKnNHgCq2R4Aq9EeAKzxHgCt8R4Arj0eAK81HgCE7AAATn0AgFJ9AIBWfQCAWn0AgF59AIBifQCAZn0AgLjRHwC53R8Auu0fALvlHwC84R8AveEfAL7hHwC/4R8AsE0eALFRHgCyUR4As1EeALTxHwC18R8AtvEfALfxHwCobR4AqY0eAKqFHgCrnR4ArIUeAK2NHgCuuR4Ar7UeAGp9AIBufQCAcn0AgHZ9AIB6fQCAfn0AgIJ9AICGfQCAuJ0eALmtHgC6pR4Au0UBALxdAQC9RQEAvkUBAL91AQCw0R4AsdEeALLRHgCz0R4AtLUeALW9HgC2tR4At60eALMNHgCKfQCAjn0AgJJ9AICWfQCAtg0eALUNHgCafQCAuxUeALoVHgCefQCAon0AgL95HgC+cR4AvQUeALwFHgCCbQAAo0keAIBVAACBZQAApkkeAL6cAQCqfQCApUkeAKpRHgCrUR4Ah3wAAIZMAACuNR4Arz0eAKxBHgCtQR4AqF0CAKltAgCqZQIAq30CAKxpAgCtsQIArrECAK+xAgCE7AQArn0AgLJ9AIC2fQCAun0AgL59AIDCfQCAxn0AgLhxAwC5cQMAunEDALtxAwC81QMAvd0DAL7VAwC/zQMAsNECALHRAgCy0QIAs9ECALRRAwC1UQMAtlEDALdRAwCz7QIAyn0AgM59AIC+gAQA0n0AgLYxAgC14QIA1n0AgLsVAgC6FQIA2n0AgN59AIC/lQMAvpUDAL0FAgC8BQIA4n0AgKOpAgDmfQCA6n0AgKZ1AgDufQCA8n0AgKWlAgCqUQIAq1ECAPZ9AID6fQCArtEDAK/RAwCsQQIArUECAKjZAgCpIQEAqiEBAKshAQCsIQEArSEBAK4hAQCvIQEA/n0AgAJ+AIAGfgCAviAEAAp+AIAOfgCAEn4AgBp+AIC4jQEAuZEBALqRAQC7pQEAvL0BAL11AAC+fQAAv3UAALDlAQCx7QEAsvkBALPxAQC02QEAtdkBALa5AQC3tQEA4RgeAB5+AIDjKB8AIn4AgIGlAACApQAAJn4AgIKlAACGAAQAh/QFACp+AIAufgCAMn4AgDZ+AIDvYB4AOn4AgD5+AIBCfgCAhfD0AUZ+AIBKfgCA42QBAE5+AIDhpAEAUn4AgO/IAABWfgCAWn4AgFZ8AICE/AUAXn4AgGJ+AICzKQYAFn4AgGZ+AIBqfgCAbn4AgLYhBgC1KQYAcn4AgLupBgC6oQYAdn4AgHp+AIC/nQYAvp0GAL2lBgC8rQYA4bQHAH5+AIDjeAQAgn4AgIB9AACBEQAAghUAAIZ+AICGwAAAh1gDAIp+AICOfgCAkn4AgJZ+AIDvDAQAmn4AgKOpBgCefgCAon4AgKZ+AICqfgCApqEGAKWpBgCufgCAqykGAKohBgCyfgCAtn4AgK8dBgCuHQYArSUGAKwtBgC6fgCAs0kHAL5+AIDCfgCAtn0HAMZ+AIDKfgCAtXUHALpdBwC7JQcAzn4AgNJ+AIC+IQcAvy0HALw9BwC9MQcAqD0GAKmBBgCqhQYAq5UGAKy5BgCtuQYArqkGAK+pBgDWfgCA2n4AgN5+AIDifgCA5n4AgIK5AACBsQAAgLkAALitBgC5vQYAurUGALtFAQC8XQEAvUUBAL5FAQC/dQEAsN0GALGlBgCyrQYAs6EGALShBgC1rQYAtpkGALeVBgCjDQYA6n4AgO5+AIDyfgCAhJgCAKY5BgClMQYAvpwBAKthBgCqGQYAhggAAId8AQCvaQYArmUGAK11BgCseQYA+n4AgLO1AQD+fgCAAn8AgLZVAQAGfwCACn8AgLWhAQC6cQEAu3kBAA5/AIASfwCAvjEBAL89AQC8UQEAvVEBAKhpAgCpaQIAqnkCAKt5AgCsbQIArZECAK6RAgCvkQIAFn8AgBp/AIAefwCAIn8AgCZ/AIAqfwCALn8AgDJ/AIC4mQIAua0CALqlAgC7bQMAvHUDAL19AwC+dQMAv20DALDxAgCx+QIAssECALPBAgC0sQIAtb0CALa1AgC3qQIANn8AgDp/AIA+fwCAo/0CAEJ/AICl6QIAph0CAEZ/AIBKfwCATn8AgKo5AgCrMQIArBkCAK0ZAgCueQIAr3UCAFJ/AIBWfwCAWn8AgIQADACAGQAAgQkAAII5AABefwCAYn8AgGp/AIBufwCAvuAMAHJ/AIB2fwCAhlgNAIcMAwCowQIAqc0CAKrFAgCr2QIArMkCAK39AgCu9QIArz0BAHp/AIB+fwCAgn8AgIZ/AICKfwCAjn8AgJJ/AIC+MAwAuMUBALnNAQC62QEAu9EBALzxAQC98QEAvpkBAL+ZAQCwRQEAsU0BALJFAQCzXQEAtEUBALVNAQC2RQEAt/0BAOE4BgCWfwCA42wGAJp/AICefwCAon8AgKZ/AICqfwCAhKgNAK5/AICyfwCAtn8AgL6wDwC6fwCA72wGAL5/AIDCfwCApn0AgMZ/AIDKfwCA41AAAM5/AIDhoAEA0n8AgO+EAADafwCAhyANAIZMDwCAPQAAgSEAAIIlAADefwCAs80NAGZ/AIDWfwCA4n8AgOZ/AIC2/Q0AtcENAOp/AIC7CQ4AugEOAO5/AIDyfwCAvwkOAL4BDgC9CQ4AvBEOAPZ/AIDjmAwA+n8AgOH8DwD+fwCAAoAAgAaAAIAKgACADoAAgBKAAIAWgACAGoAAgB6AAIDvYAwAIoAAgCaAAICjTQ0AKoAAgC6AAIAygACANoAAgKZ9DQClQQ0AOoAAgKuJDgCqgQ4APoAAgEKAAICviQ4AroEOAK2JDgCskQ4Agm0AALM1DgCAVQAAgWUAALb1DwCE3AMARoAAgLX9DwC60Q8Au9EPAIYABACH3AAAvn0PAL9lDwC8wQ8AvXkPAKjlDwCp7Q8AqvkPAKv5DwCsMQ4ArTEOAK4xDgCvMQ4ASoAAgE6AAIBSgACAVoAAgFqAAIBegACAYoAAgGaAAIC43Q4AueEOALrhDgC74Q4AvOUOAL3pDgC+mQ4Av5UOALBRDgCxUQ4AslEOALPpDgC0/Q4AteUOALbtDgC35Q4Ao3EPAGqAAIBugACAcoAAgHaAAICmsQ4ApbkOAHqAAICrlQ4AqpUOAH6AAICCgACAryEOAK45DgCtPQ4ArIUOAIaAAICzyQEAioAAgI6AAIC2+QEAkoAAgJaAAIC1wQEAuqkBALu1AQCagACAnoAAgL6tAQC/lQEAvK0BAL2lAQCo5Q0AqfkNAKoFAgCrHQIArA0CAK09AgCuNQIAr10CAKKAAICmgACAqoAAgK6AAICAGQAAgRkAAIIFAACygACAuC0CALk1AgC6MQIAuzECALzVAgC93QIAvtUCAL/NAgCwKQIAsTUCALI9AgCzNQIAtC0CALUVAgC2HQIAtxUCALqAAICEnAIAvoAAgKOBAgDCgACApYkCAKaxAgDGgACAhiAEAIfUAwCq4QIAq/0CAKzlAgCt7QIAruUCAK/dAgC29QMAvkQDAIWM/QG1/QMAyoAAgLP9AwDOgACA0oAAgL59AwC/TQMAvGUDAL19AwC6dQMAu30DANaAAIDagACA3oAAgOKAAICEBAIAoyUCAOaAAIClJQIApi0CAOqAAIDugACA8oAAgKqtAgCrpQIArL0CAK2lAgCupQIAr5UCAPaAAID6gACA/oAAgAKBAIAGgQCA48ADAAqBAIDhrAEADoEAgO9YAwASgQCAFoEAgIANAACB5QAAgu0AABqBAIDhYA8A40ABAOM4DgDheA4AHoEAgCKBAIC+lAUAKoEAgIYABACHZAUALoEAgDKBAIA2gQCA7/wOAO98DgA6gQCAs1EBAD6BAID2fgCAQoEAgEaBAIC2DQEAtQkBAEqBAIC74QAAuhkBAE6BAIBSgQCAv9EAAL7pAAC96QAAvPkAALaAAIAmgQCAVoEAgFqBAIBegQCAYoEAgGaBAIBqgQCAqKEGAKmtBgCquQYAq7EGAKzhBgCt7QYAruUGAK/FBgCwvQYAsUUHALJNBwCzXQcAtE0HALV1BwC2fQcAtx0HALglBwC5LQcAuiUHALs9BwC8KQcAvRUHAL4RBwC/EQcAoxEGAG6BAIBygQCAdoEAgHqBAICmTQYApUkGAH6BAICroQcAqlkGAIKBAICGgQCAr5EHAK6pBwCtqQcArLkHAIANAACBFQAAgh0AAIqBAICOgQCAkoEAgISUAwC+lAMAloEAgJqBAICGyAAAh4wAAJ6BAICigQCApoEAgKqBAIConQYAqa0GAKqlBgCrvQYArK0GAK3RBgCu1QYAr80GAK6BAICygQCAtoEAgLqBAIC+gQCAwoEAgMaBAIDKgQCAuF0BALnBAQC6wQEAu8EBALzBAQC9yQEAvvEBAL/xAQCwvQYAsY0GALKFBgCzZQEAtH0BALVlAQC2bQEAt2UBALMtBgDOgQCA0oEAgNaBAIDagQCAtlEGALUlBgDegQCAu0kGALp5BgDigQCA5oEAgL+hAQC+uQEAvbEBALxRBgDqgQCAo2kGAO6BAIDygQCAphUGAPaBAID6gQCApWEGAKo9BgCrDQYA/oEAgAKCAICu/QEAr+UBAKwVBgCt9QEAutUHALvdBwC4wQcAucEHAL4xBAC/MQQAvPEHAL3xBwCyrQcAs7UHALCtBwCxpQcAtp0HALf1BwC0pQcAtZUHAKppBwCraQcAqGkHAKlpBwCuaQcAr2kHAKxpBwCtaQcAgLkDAIGNAwCChQMAhKgDAIZQ/AGHCAMAvjQDAAqCAICoZQIAqXUCAKp9AgCrdQIArG0CAK21AwCuvQMAr7UDAA6CAIASggCAFoIAgBqCAIAeggCAIoIAgCaCAIAqggCAuFEDALlZAwC6YQMAu2EDALwRAwC9HQMAvhUDAL8JAwCwzQMAsdUDALLdAwCz1QMAtM0DALVxAwC2cQMAt3EDAC6CAIAyggCAs/0DADaCAIC17QMAOoIAgD6CAIC2PQIAQoIAgEaCAIC7GQIAugECAL0JAgC8AQIAv70CAL4BAgBKggCAToIAgITE/QG+wPwBUoIAgFaCAIBaggCA79wDAF6CAIDhlAEAYoIAgOMQAwBmggCAgu0AAIHtAACA7QAA4TgGAOE8BwDjQAEA45QGAGqCAIBuggCAcoIAgHqCAICGgPwBh+j9AX6CAICCggCAhoIAgIqCAIDvnAEA79wGAKM1AwCOggCAkoIAgJaCAICaggCApvUCAKUlAwCeggCAq9ECAKrJAgCiggCApoIAgK91AgCuyQIArcECAKzJAgB2ggCAqoIAgK6CAICyggCA76T9AbaCAIC6ggCAvoIAgON4/QHCggCA4UD8AcaCAIDKggCAzoIAgNKCAIDWggCAs+X+AYItAACBFQAAgB0AANqCAIC25f4BtfX+Ad6CAIC7Yf8Butn+AeKCAICE5AMAv2n/Ab5h/wG9df8BvHn/Aaj9/gGpJf4Bqi3+Aasl/gGsPf4BrSX+Aa4t/gGvJf4BviwAAOaCAICGiAAAh+wAAOqCAIDuggCA8oIAgPaCAIC4gf8BuYH/AbqZ/wG7mf8BvIn/Ab21/wG+sf8Bv63/AbBd/gGx5f8Bsu3/AbPh/wG05f8Bte3/AbbZ/wG32f8Bo6X/AfqCAID+ggCAAoMAgAaDAICmpf8BpbX/AQqDAICrIf4Bqpn/AQ6DAIASgwCAryn+Aa4h/gGtNf4BrDn+ARaDAICz6f4BGoMAgB6DAIC2lf4BIoMAgCaDAIC16f4BurH+Abu5/gEqgwCALoMAgL51AQC/fQEAvJH+Ab2R/gGoHf4BqS3+Aaol/gGrPf4BrCX+Aa1R/gGuUf4Br1H+ATKDAIA2gwCAOoMAgD6DAIBCgwCARoMAgEqDAIBOgwCAuNkBALnZAQC67QEAu+EBALzhAQC94QEAvuEBAL/hAQCwMf4BsTn+AbIB/gGzAf4BtPUBALX9AQC29QEAt+kBAKOt/QFSgwCAvkwDAFqDAIBegwCAptH9AaWt/QFigwCAq/39Aar1/QFmgwCAaoMAgK85AgCuMQIArdX9AazV/QGA+QMAgfkDAIJNAACFdCAAboMAgITYAwCE1AQAcoMAgIZABACHVAMAdoMAgHqDAIB+gwCAgoMAgIaDAIC+8AUAqDECAKkxAgCqMQIAqzECAKyVAwCtnQMArpUDAK+NAwCKgwCAjoMAgJKDAICWgwCAhHwHAJqDAICegwCAooMAgLipAwC5qQMAumkDALtpAwC8eQMAvXkDAL5pAwC/aQMAsP0DALHNAwCyxQMAs60DALS5AwC1uQMAtq0DALelAwCmgwCAqoMAgK6DAICygwCAtoMAgLqDAIDv6AMAvoMAgOGQAQDCgwCA42wDAMqDAICAJQAAgSkAAIIdAADOgwCAs/kDANKDAICGaAcAh1wFANaDAIC2XQIAtV0CANqDAIC7SQIAunkCAN6DAIDigwCAvz0CAL49AgC9OQIAvFECAOaDAIDhPP4BvkAGAOPwAQDqgwCA7oMAgPKDAID2gwCA+oMAgP6DAIAChACABoIAgAaEAIAKhACADoQAgO/kAQAShACAFoQAgKNxAwAahACApdUCAB6EAIAihACAptUCACaEAIAqhACAq8ECAKrxAgCtsQIArNkCAK+1AgCutQIA4dz8AcaDAIDjUAQA74gEAID1BwCBCQAAgj0AAC6EAICEJAEAMoQAgDaEAIA6hACAPoQAgOFMBADv5BwA43QEALNdBgBChACAhgAMAIfgAwBGhACAtgUGALV1BgBKhACAuxEGALoJBgBOhACAUoQAgL/VBgC+1QYAvQEGALwJBgCojQYAqZUGAKqVBgCrpQYArL0GAK3FBgCuxQYAr/UGAFaEAIBahACAXoQAgGKEAIBmhACAaoQAgG6EAIByhACAuHUGALl9BgC6dQYAu80HALzVBwC93QcAvtUHAL/NBwCwjQYAsZUGALKdBgCzlQYAtFEGALVRBgC2UQYAt1EGAKMdBwCPFewBdoQAgHqEAIB+hACApkUHAKU1BwCChACAq1EHAKpJBwCGhACAioQAgK+VBwCulQcArUEHAKxJBwCeRfkBn6X5AZyR/QGdTfkBmlX9AZtd/QGYBfEBmZX+AZal8gGXYfEBlG31AZU19QGS4ekBk4X2AZBV7AGRXekBsbEdALClHQCziRkAskEcALUBJAC09RkAjoQAgJKEAICWhACAgqkDAIGhAwCAaQAAohUFAKMFAgCgFQYAob0FAKHFAQCahACAo80NAKLlAQClAQgApN0NAKfRCQCm2QkAqQEUAKilCACrxRQAqs0VAK3REQCsARAArwEcAK51EQCCEe8BgynvAZ6EAICihACAhuH1AYcR9gGEOeoBhY3qAYp59gGL4fEBvqQMAKqEAICO+f0BjzH+AYw98gGNYfIBkkn+AZOd/gGHCAwAhmwMAJax+gGX+QUAlFn6AZVZ+gGaYQYAm8EGAK6EAICyhACAtoQAgLqEAICcyQEAvoQAgKitBQCpuQUAqs0FAKvdBQCszQUArf0FAK71BQCvHQUAwoQAgMaEAIDKhACAzoQAgNKEAIDWhACA2oQAgN6EAIC4dQUAuX0FALoJBQC7CQUAvB0FAL0BBQC+AQUAvz0FALBxBQCxcQUAsnEFALNxBQC0UQUAtVEFALZRBQC3TQUAs0UEAOKEAIDmhACA6oQAgO6EAIC2fQQAtUUEAPKEAIC7tQQAurUEAPaEAID6hACAv5UEAL6VBAC9pQQAvKUEAP6EAICjAQQAAoUAgAaFAICmOQQACoUAgA6FAIClAQQAqvEEAKvxBAAShQCAhOwNAK7RBACv0QQArOEEAK3hBADh0AYAhAwMAOMoBwC+AAwAGoUAgO9EAwCGuAwAhywNAB6FAIDjlAEAIoUAgOH8AQBWgwCAJoUAgO/IBgAqhQCALoUAgDKFAICzjQMANoUAgLWNAwA6hQCAPoUAgLa1AwBChQCARoUAgLtBAwC6SQMAvUEDALxZAwC/QQMAvkkDAKNFDACmhACAFoUAgEqFAIBOhQCApn0MAKVFDABShQCAq4kMAKqBDABWhQCAWoUAgK+JDACugQwArYkMAKyRDACAFQ8AgR0PAIIhDwCzIQ4AXoUAgLUhDgC2JQ4AYoUAgGaFAIBqhQCAusEOALvBDgC8wQ4AvcEOAL7BDgC/wQ4AqK0OAKntDgCq5Q4Aq/0OAKzlDgCt6Q4ArjkOAK85DgBuhQCAcoUAgHaFAIB6hQCAgB0AAIEJAACCvQEAfoUAgLjNDwC51Q8AutUPALvlDwC8/Q8AvZUPAL6RDwC/kQ8AsEkOALFJDgCyWQ4As1kOALRJDgC1SQ4Atv0PALf1DwCjbQ8AgoUAgL6EAQCKhQCAjoUAgKZpDwClbQ8AkoUAgKuNDwCqjQ8AhogAAIdsAQCvjQ8Aro0PAK2NDwCsjQ8AloUAgLPtDgCahQCAnoUAgLaRDgCihQCApoUAgLXhDgC6tQ4Au70OAKqFAICuhQCAvn0BAL9lAQC8mQ4AvZkOAKgRDgCpJQ4AqiEOAKs5DgCsLQ4ArVUOAK5dDgCvUQ4AhKgAALKFAIC2hQCAuoUAgL6FAIDChQCAxoUAgMqFAIC47QEAuZUBALqVAQC7rQEAvLUBAL11AQC+fQEAv3UBALA1DgCxPQ4AsgkOALMJDgC0/QEAteUBALblAQC31QEAo6kNAM6FAIDShQCA1oUAgNqFAICm1Q0ApaUNAN6FAICr+Q0AqvENAOKFAIDmhQCAryECAK45AgCt3Q0ArN0NAIANAACBFQAAgh0AAOqFAIDuhQCA8oUAgIeQAwCGfAQAvuwEAPqFAID+hQCAAoYAgAaGAIAKhgCADoYAgBKGAICyLQ4AszUOALAtDgCxJQ4Ati0OALedDwC0LQ4AtSUOALq9DwC7jQ8AuKUPALm9DwC+LQ8AvxUPALyVDwC9JQ8AFoYAgBqGAIAehgCAIoYAgCaGAIAqhgCALoYAgDKGAICqpQ4Aq7UOAKjFDgCp3Q4Arp0OAK9VDgCspQ4ArZUOAKgNAgCpFQIAqhUCAKtNAgCsWQIArVkCAK5NAgCvRQIAhKgFADaGAIA6hgCAPoYAgIS4BABChgCARoYAgEqGAIC4/QIAuUEBALpBAQC7QQEAvEEBAL1JAQC+cQEAv3EBALAJAgCxCQIAss0CALPFAgC03QIAtcUCALbNAgC3xQIA4dQPAOMQDgDj9A4A4QwOAE6GAIBShgCAVoYAgFqGAIBehgCAYoYAgL4kBABqhgCA7AAAAO9EAADvzA4AboYAgIJlAACz2QIAgFUAAIFtAAC2nQIAcoYAgHaGAIC1lQIAuokCALuJAgCGqAQAh+AEAL5dAgC/RQIAvF0CAL1VAgCjHQUA9oUAgGaGAIB6hgCAfoYAgKZZBQClUQUAgoYAgKtNBQCqTQUAhoYAgIqGAICvgQUArpkFAK2RBQCsmQUAjoYAgLMpBgCShgCAloYAgLYpBgCahgCAnoYAgLUpBgC6pQYAu60GAKKGAICmhgCAvqUGAL+tBgC8tQYAva0GAKjlBgCp7QYAquUGAKv9BgCs5QYAre0GAK7lBgCvXQYAqoYAgK6GAICyhgCAtoYAgLqGAIC+hgCAwoYAgMaGAIC46QcAuekHALr9BwC79QcAvO0HAL1FBwC+TQcAv0UHALAlBgCxLQYAsiUGALM9BgC0JQYAtS0GALYlBgC32QcAo20HAIItAACBFQAAgB0AAMqGAICmbQcApW0HAM6GAICr6QcAquEHANKGAIC+oAEAr+kHAK7hBwCt6QcArPEHANaGAICzkQYAhugAAIcsAQC2QQEA2oYAgN6GAIC1UQEAuk0BALslAQDihgCA5oYAgL4lAQC/LQEAvDEBAL0xAQCwrQEAscUBALLBAQCzwQEAtMUBALXNAQC28QEAt/EBALgBAQC5AQEAugEBALsBAQC8AQEAvQEBAL4BAQC/AQEA6oYAgO6GAIDyhgCA9oYAgIaFAID6hgCA/oYAgAKHAICoTQYAqVkGAKo9BgCrNQYArP0BAK3lAQCu5QEAr9UBAKPVBQAGhwCACocAgA6HAIAShwCApgUCAKUVAgAWhwCAq2ECAKoJAgAahwCAHocAgK9pAgCuYQIArXUCAKx1AgAihwCAJocAgCqHAIAuhwCAMocAgOFkBQA2hwCA4+wFAIARAACBEQAAghEAAO/0BgA6hwCAPocAgEKHAIC+MAMAhMQCAEqHAICz4QMAhMAcALVRAwBOhwCAUocAgLZZAwBWhwCAWocAgLtxAwC6eQMAvbUAALxpAwC/tQAAvrUAAF6HAIDhlAEAYocAgONcAgCGcBwAh0QDAGaHAIBqhwCAbocAgHKHAIB2hwCAeocAgH6HAICChwCAhocAgO94AgCoVQIAqV0CAKphAgCrYQIArNECAK3RAgCu0QIAr9ECAIqHAICOhwCAkocAgJaHAICahwCAnocAgKKHAICmhwCAuGkBALlpAQC6CQEAuwkBALwZAQC9GQEAvgkBAL8FAQCwtQIAsb0CALK1AgCzaQEAtHkBALV5AQC2aQEAt2EBAOHEBwDjpAYA47gGAOF8BgCADQAAgTUAAII9AACqhwCArocAgLKHAIC+4B0AuocAgL6HAIDvYAAA7+gGAMKHAICjqQIAxocAgMqHAIDOhwCA0ocAgKYRAgClGQIA1ocAgKs5AgCqMQIAhkgcAIfMHACv/QEArv0BAK39AQCsIQIAqIUeAKmRHgCqkR4Aq60eAKy1HgCt1R4ArtEeAK/FHgC2hwCA2ocAgN6HAIDihwCA5ocAgOqHAIDuhwCA8ocAgLhhHwC5YR8AumEfALthHwC8YR8AvWEfAL5hHwC/YR8AsL0eALGFHgCyjR4As4UeALSdHgC1hR4Ato0eALeFHgCzGR4A9ocAgPqHAID+hwCAAogAgLZVHgC1PR4ABogAgLtBHgC6eR4ACogAgA6IAIC/QR4AvlkeAL1RHgC8WR4AEogAgKNdHgAWiACAGogAgKYRHgAeiACAIogAgKV5HgCqPR4AqwUeAISkAwC+qAMArh0eAK8FHgCsHR4ArRUeAKitHgCptR4AqrUeAKvJHgCs2R4ArdkeAK7JHgCvwR4AgO0BAIHxAQCC8QEAJogAgIaQAACHdAEAKogAgC6IAIC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALBFAQCxTQEAskUBALNdAQC0RQEAtU0BALZFAQC3+QEAsz0eADKIAIA2iACAOogAgD6IAIC2WR4AtVEeAEKIAIC7iQEAuoEBAEaIAIBKiACAv4kBAL6BAQC9iQEAvJEBAE6IAIBSiACAo3UeAFaIAIClGR4AWogAgF6IAICmER4ARocAgGKIAICrwQEAqskBAK3BAQCs2QEAr8EBAK7JAQBmiACAaogAgG6IAIByiACAdogAgIQYAgB6iACAfogAgIKIAICGiACAiogAgI6IAICSiACAmogAgJ6IAIC+cAMAgGkAAIFpAACCeQAAhAAEAIbwBACHdAMAoogAgO8MHwCmiACA4aweAKqIAIDj8B4ArogAgLKIAIC2iACAuogAgL6IAIDCiACAxogAgMqIAIDvVAIAzogAgNKIAIDWiACA46QCANqIAIDhgAEA3ogAgOKIAIDmiACA6ogAgO6IAICzRQMA8ogAgPaIAID6iACA/ogAgLZFAwC1VQMAAokAgLshAwC6SQMAvqAEAAqJAIC/KQMAviEDAL01AwC8OQMAqDkCAKk5AgCqjQIAq4UCAKydAgCthQIAroUCAK+1AgCA7QEAgfUBAIL1AQAOiQCAhpAEAIcEBQASiQCAFokAgLhFAQC5TQEAukUBALtdAQC8SQEAvUkBAL55AQC/eQEAsM0CALGlAgCyrQIAs6ECALSlAgC1rQIAtp0CALd9AQAaiQCAHokAgCKJAIAmiQCAKokAgC6JAIAyiQCA74gBAITsBADhVB4ANokAgONUAQA6iQCAPokAgEKJAIBGiQCAo0UCAEqJAIBOiQCAUokAgFaJAICmRQIApVUCAFqJAICrIQIAqkkCAF6JAIBiiQCArykCAK4hAgCtNQIArDkCAKg1BgCpPQYAqlEGAKttBgCseQYArWUGAK5tBgCvZQYABokAgGaJAIBqiQCAbokAgIAZAACBGQAAggUAAHKJAIC45QYAuekGALr5BgC7+QYAvOkGAL3pBgC+nQYAv5UGALAdBgCx5QYAsu0GALPlBgC0/QYAteEGALbhBgC34QYAs9kGAL7QAwB2iQCAeokAgH6JAIC25QYAtfEGAIKJAIC7IQYAutkGAIaYAACHeAMAvyUGAL45BgC9MQYAvDkGAIaJAICjnQYAiokAgI6JAICmoQYAkokAgJaJAICltQYAqp0GAKtlBgCaiQCAnokAgK59BgCvYQYArH0GAK11BgCo7QcAqSkGAKoxBgCrMQYArJEGAK2RBgCukQYAr5EGAKKJAICmiQCAqokAgK6JAICyiQCAtokAgLqJAIC+iQCAuIUGALmNBgC6hQYAu50GALyNBgC9vQYAvrUGAL95AQCw8QYAsfEGALLxBgCzxQYAtMEGALXBBgC2wQYAt8EGALO5BgDCiQCAxokAgMqJAIDOiQCAthEGALUZBgDSiQCAuzUGALo1BgDWiQCA2okAgL8FBgC+BQYAvREGALwlBgClQQYA3okAgOKJAICmSQYAgRUAAIB5AACj4QYAghUAAK1JBgCsfQYAr10GAK5dBgCENAEAlogAgKttBgCqbQYAvswDAOqJAICzlQIA7okAgLXZAgDyiQCA9okAgLbRAgCGgAwAhzgDALvFAgC6xQIAvRUDALwVAwC/FQMAvhUDAPqJAID+iQCA71gGAIRAAwACigCABooAgAqKAIAOigCAEooAgBaKAIAaigCAHooAgOE4BgAiigCA4yQGAL5wDACsSQIArUkCAK5dAgCvVQIAqB0CAKkFAgCqBQIAq10CAISoDAAmigCAKooAgC6KAIC+vA0AMooAgDaKAIA6igCAvE0DAL1VAwC+VQMAv2UDALjpAwC56QMAul0DALtVAwC0yQMAtckDALbZAwC32QMAsBkCALEZAgCy2QMAs9kDAD6KAIDj5AAAQooAgOG8AQBGigCAgj0AAIE9AACAPQAASooAgE6KAIBSigCAWooAgF6KAIDvzAMAYooAgGaKAICj3QMAaooAgIboDACHYA0AbooAgKaZAwClkQMAcooAgKuNAwCqjQMAdooAgHqKAICvXQIArl0CAK1dAgCsXQIAfooAgIKKAICGigCAiooAgI6KAICSigCAlooAgO/gAQCEvAwA4YwGAJqKAIDjHAYAnooAgKKKAICmigCAqooAgLPVAQCuigCAsooAgLaKAIC6igCAtpEBALWZAQC+igCAu70BALq9AQDCigCAyooAgL+dAQC+nQEAvZ0BALydAQCoBQ4AqQkOAKodDgCrFQ4ArFEOAK1RDgCuSQ4Ar0kOAFaKAICCzQ8AgfUPAID9DwDGigCAzooAgIYcAACHsAMAuOkOALnpDgC6/Q4Au/UOALztDgC9VQ8AvlEPAL9NDwCwOQ4AsTkOALIJDgCzCQ4AtBkOALUZDgC2DQ4At9kOAKOVDgDSigCA1ooAgNqKAIDeigCAptEOAKXZDgDiigCAq/0OAKr9DgDmigCA6ooAgK/dDgCu3Q4Ard0OAKzdDgDuigCAs/0PAPKKAID2igCAtoEPAPqKAID+igCAtZkPALqNDwC7ZQ8AAosAgAaLAIC+fQ8Av2UPALx9DwC9dQ8AqC0OAKk1DgCqMQ4AqzEOAKxVDgCtRQ4ArkUOAK91DgAKiwCADosAgBKLAIAWiwCAGosAgB6LAIAiiwCAJosAgLjpDgC59Q4Auv0OALv1DgC87Q4AvZEOAL6RDgC/kQ4AsA0OALHlDgCy7Q4As+UOALT9DgC15Q4Atu0OALflDgCjuQ4Agi0AAIEVAACAHQAAKosAgKbFDgCl3Q4ALosAgKshDgCqyQ4AMosAgL4sAQCvIQ4ArjkOAK0xDgCsOQ4AOosAgLZVAQC1RQEANosAgLNVAQA+iwCAhngAAIdcAAC/OQEAvjEBAL0lAQC8JQEAuzEBALpZAQDmiQCAQosAgEaLAIBKiwCAhAQDAKOJAgBOiwCApZkCAKaJAgBSiwCAvyg5AFaLAICqhQIAq+0CAKz5AgCt+QIAru0CAK/lAgDjWAIA78AOAOGIAQBaiwCAXosAgGKLAIBmiwCAaosAgG6LAIByiwCAdosAgHqLAIDvKAIA4ygOAH6LAIDhRA4AqbUCAKhpDQCrAQIAqgkCAK0BAgCsGQIArzECAK4BAgC+AAQAgosAgIaLAICKiwCAjosAgJKLAICWiwCAmosAgLnlAwC45QMAu+UDALrlAwC95QMAvOUDAL/lAwC+5QMAsSECALBJAgCzJQIAsiUCALUpAgC0IQIAtxUCALYVAgCowQIAqdECAKr1AgCrDQEArBUBAK0FAQCuBQEArzkBAJ6LAICiiwCAqosAgK6LAICyiwCAtosAgLqLAIC+iwCAuC0BALk9AQC67QEAu+UBALz9AQC95QEAvu0BAL/lAQCwLQEAsTUBALI9AQCzNQEAtC0BALUVAQC2HQEAtxUBAIA9AQCBpQAAgq0AAO/YAACGsAUAh9gFAMKLAIDv1A8AhGwEAOH0DgDGiwCA4xwPAMqLAIDhlAEAzosAgOMMDgCzPQIA0osAgNaLAIDaiwCA3osAgLbFAQC13QEA4osAgLuxAQC6qQEA5osAgOqLAIC/kQEAvqkBAL2hAQC8qQEAposAgO6LAICqRQYAq10GAKxFBgCtTQYArkUGAK99BgDyiwCA9osAgPqLAICj0QUA/osAgKUxBgCmKQYAAowAgAaMAICCHQAAgR0AAIAdAAAKjACADowAgBKMAIC+lAMAFowAgBqMAICGSAMAh8wDAB6MAIAijACAJowAgCqMAICoqQcAqakHAKq5BwCruQcArKkHAK2pBwCuAQcArzUHAC6MAIAyjACANowAgDqMAIA+jACAQowAgEaMAIBKjACAuC0HALnBAAC66QAAu+kAALz5AAC95QAAvuUAAL+dAACwUQcAsV0HALItBwCzJQcAtD0HALUlBwC2JQcAtxUHALMxBgBOjACAUowAgFaMAIBajACAtikGALUhBgBejACAu5kGALqVBgBijACAZowAgL/hBgC++QYAvfEGALz5BgBqjACAo3UGAG6MAIByjACApm0GAHaMAIB6jACApWUGAKrRBgCr3QYAfowAgIKMAICuvQYAr6UGAKy9BgCttQYAqOUBAKn1AQCq/QEAq/UBAKztAQCtNQEArj0BAK81AQCA+QAAgc0AAILFAACEYAEAvngBAIqMAICHrAAAhpABALjRAAC52QAAuuEAALvhAAC8kQAAvZ0AAL6VAAC/iQAAsE0BALFVAQCyXQEAs1UBALRNAQC18QAAtvEAALfxAACzdQIAjowAgJKMAICWjACAmowAgLa1AgC1ZQIAnowAgLuRAgC6iQIAoowAgKaMAIC/NQMAvokCAL2BAgC8iQIAqowAgKMxAgCujACAhMADAKbxAgCyjACAtowAgKUhAgCqzQIAq9UCALqMAIC+jACArs0CAK9xAwCszQIArcUCAKuNAACqjQAAqY0AAKg5AwCvvQAArr0AAK2FAACsjQAAqgAAAKsAAADCjACAxowAgMqMAIDOjACA0owAgNaMAIC7fQAAun0AALl9AAC4fQAAv90BAL7dAQC93QEAvN0BALO5AACysQAAsaEAALCtAAC3XQAAtl0AALWVAAC0lQAA2owAgN6MAIDijACA5owAgIE1AACADQAA6owAgII1AAC+rD0A7owAgPKMAICFaD0A+owAgP6MAICGODwAh8ACALNJAQACjQCA0AAAAAaNAIAKjQCAtkkBALVJAQAOjQCAuykBALolAQASjQCAFo0AgL8dAQC+HQEAvSEBALwpAQDjNDYA4QwGAOGwAgDjPAYAGo0AgB6NAIAijQCAJo0AgIQsPwC+oD8AKo0AgC6NAIDvfDcAMo0AgDaNAIDvGAEAOo0AgD6NAICGaD4Ah8w/AEKNAIBGjQCASo0AgO+UAABOjQCA4ZQBAFKNAIDjUAAAVo0AgILpPwCB6T8AgPE/AKMJPgCPASQA9owAgFqNAIBejQCApgk+AKUJPgBijQCAq2k+AKplPgBmjQCAao0AgK9dPgCuXT4ArWE+AKxpPgCeYTgAn3U4AJzBNACdtTkAmqU1AJt1NACYeTAAmXExAJYhLQCXhTEAlG0sAJVlLACSeSgAk6UtAJBRJACReSgAsQ0UALAFFACzARgAslUUALV5GAC0tRgAbo0AgHKNAIB2jQCAeo0AgH6NAICCjQCAotE8AKMlAQCgdTkAob08AKHJAACGjQCAowEEAKLlAAClHQQApPUEAKf5CACmAQgAqQEMAKhtCACrzQwAqs0MAK3REACsARAAr9URAK7ZEACCBSUAgy0lAIqNAICOjQCAhsEsAIcRLQCEHSkAhRUpAIopLQCLZSwAko0AgJaNAICOHTAAj8E0AIzZMACNHTEAkmE1AJPNNQCajQCAno0AgJZhOQCXmTgAlKE4AJV9OQCaYT0AmwU9AKKNAICmjQCAqo0AgK6NAICc6QAAso0AgLaNAIC6jQCAvo0AgMKNAICGjACAxo0AgMqNAIDOjQCAqJE+AKmRPgCq7T4Aq+E+AKzhPgCt6T4ArtE+AK/RPgCwUT4AsVE+ALJRPgCzUT4AtHk+ALV5PgC2bT4At2U+ALghPgC5IT4Aujk+ALs5PgC8KT4AvRU+AL4RPgC/DT4AgJkDAIGZAwCCBQAA0o0AgL5UAwDhsD0A2o0AgONAPgCEOAIA3o0AgOKNAIDv9D8A5o0AgOqNAICGmAQAhxwDALMFPQCECAQA7o0AgPKNAID2jQCAtgk9ALUJPQD6jQCAu/U9ALr1PQD+jQCAAo4AgL/dPQC+3T0AveU9ALzlPQAGjgCACo4AgKPNPQC+xAQApcE9AA6OAIASjgCApsE9ABaOAIAajgCAqz09AKo9PQCtLT0ArC09AK8VPQCuFT0AtmkCAB6OAIAijgCAtWkCACaOAICzSQIAKo4AgC6OAIC+qQMAv6kDALzBAwC9wQMAuvkDALv5AwAyjgCANo4AgKgtAwCpnQMAqpUDAKutAwCstQMArb0DAK61AwCv2QMAgA0AAIEVAACCHQAAOo4AgD6OAIBCjgCAh7QFAIacBAC4MQIAuTECALo1AgC7zQIAvNUCAL3dAgC+1QIAv8kCALBpAgCxaQIAskECALNBAgC0OQIAtTkCALYRAgC3EQIASo4AgOM0PgBOjgCA4aw+AFKOAIDvfAMAVo4AgFqOAIBejgCA45QDAGKOAIDhfD4AZo4AgO/oPgBqjgCAbo4AgHKOAIB2jgCAo1UDAHqOAICldQMAfo4AgIKOAICmdQMAho4AgIqOAICr5QIAquUCAK3dAgCs3QIAr7UCAK61AgCoGQYAqSEGAKohBgCrPQYArCUGAK1dBgCuVQYAr00GAEaOAICOjgCAko4AgJaOAICajgCAno4AgKKOAICmjgCAuOUGALmBBgC6gQYAu50GALyJBgC9iQYAvqEGAL+hBgCwPQYAsQ0GALIFBgCz7QYAtPUGALXhBgC24QYAt90GALOpBgCCLQAAgRUAAIAdAACqjgCAtt0GALWtBgCujgCAu8kGALr5BgCyjgCAhOADAL8lBgC+MQYAvTkGALzRBgC+iAMAo+0GANaNAIC2jgCAppkGALqOAIC+jgCApekGAKq9BgCrjQYAhkgAAIdsAACudQYAr2EGAKyVBgCtfQYAqIEGAKmNBgCqmQYAq5UGAKyNBgCttQYArrEGAK+tBgDCjgCAxo4AgMqOAIDOjgCA0o4AgNaOAIDajgCA3o4AgLilBgC5YQEAumEBALthAQC8YQEAvWEBAL5hAQC/YQEAsNkGALHZBgCyqQYAs6kGALS9BgC1oQYAtqEGALedBgCzEQYA4o4AgOaOAIDqjgCA7o4AgLY1BgC1BQYA8o4AgLsdBgC6HQYA9o4AgPqOAIC/ZQYAvnkGAL19BgC8fQYA/o4AgKNVBgACjwCABo8AgKZxBgAKjwCADo8AgKVBBgCqWQYAq1kGABKPAIAWjwCArj0GAK8hBgCsOQYArTkGAKjVAgCp3QIAqikDAKspAwCsOQMArTkDAK4pAwCvKQMAGo8AgB6PAIAijwCAKo8AgC6PAIAyjwCAvrgDADaPAIC47QMAuYUDALqBAwC7gQMAvIUDAL2NAwC+sQMAv7EDALBZAwCxWQMAsu0DALPlAwC0/QMAteUDALblAwC31QMAgKEAAIGhAACCoQAAvoAMADqPAICEmAIAPo8AgEKPAICGAAwAh/QDAEaPAIBKjwCATo8AgFKPAIBWjwCAhLADALPhAwBajwCAXo8AgGKPAIBmjwCAtvkDALXxAwBqjwCAu90DALrdAwBujwCAco8AgL9hAwC+eQMAvXEDALx5AwB2jwCAeo8AgH6PAICjLQIAgo8AgKU9AgCmNQIAho8AgIqPAICOjwCAqhECAKsRAgCstQIArb0CAK61AgCvrQIA48QDAOMQBwDhuAEA4WwHAIBxAACBcQAAggUAAJKPAICGwAwAh1QNAJqPAICejwCA77ADAO8ABwCijwCApo8AgKqPAICujwCAso8AgLaPAIC6jwCAvo8AgMKPAIDvpAEAhKANAOGABgDGjwCA4xABAMqPAIDOjwCA0o8AgNaPAICz9QEA2o8AgN6PAIDijwCA5o8AgLZNAQC1SQEA6o8AgLtRAQC6SQEA7o8AgPKPAIC/OQEAvjEBAL1BAQC8SQEAqC0OAKk1DgCqPQ4AqzEOAKyBDgCtjQ4AroUOAK+1DgCWjwCA9o8AgPqPAID+jwCAgBkAAIEZAACCBQAAApAAgLidDgC5rQ4AuqUOALtNDwC8VQ8AvV0PAL5JDwC/QQ8AsM0OALHVDgCy3Q4As9UOALS1DgC1vQ4AtrUOALetDgCjtQ4AvogDAAaQAIAKkACADpAAgKYNDgClCQ4AEpAAgKsRDgCqCQ4AhggAAIdsAwCveQ4ArnEOAK0BDgCsCQ4AFpAAgBqQAIAekACAs7UPACKQAIC1VQ8Atl0PACaPAIAmkACAKpAAgLp5DwC7eQ8AvGkPAL1dDwC+SQ8Av0kPAKhpDgCpaQ4AqnEOAKtxDgCskQ4ArZEOAK6RDgCvkQ4ALpAAgDKQAIA2kACAOpAAgD6QAIBCkACARpAAgEqQAIC4hQ4AuY0OALqFDgC7nQ4AvI0OAL29DgC+tQ4Av3kBALDxDgCx8Q4AsvEOALPFDgC0wQ4AtcEOALbBDgC3wQ4Ao/kOAE6QAIBSkACAVpAAgFqQAICmEQ4ApRkOAF6QAICrNQ4AqjUOAGKQAIBmkACArwUOAK4FDgCtEQ4ArCUOAIANAACBFQAAgh0AAGqQAIBukACAcpAAgISUAQC+lAEAhkAHAIf0AAB6kACAfpAAgIKQAICGkACAipAAgI6QAICojQIAqZUCAKqVAgCrzQIArNUCAK3dAgCuyQIAr/0CAJKQAICWkACAmpAAgJ6QAIC/ABQAopAAgKaQAICqkACAuH0DALnBAwC6wQMAu8EDALzBAwC9yQMAvvEDAL/xAwCwhQIAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALMdAgCukACAspAAgLaQAIC6kACAtl0CALVdAgC+kACAu4EDALpBAgDCkACAxpAAgL+BAwC+mQMAvZEDALyZAwDKkACAo1kCAM6QAIDSkACAphkCANaQAIDakACApRkCAKoFAgCrxQMA3pAAgOKQAICu3QMAr8UDAKzdAwCt1QMA6pAAgOPMAACEBAIA4bwBAIDJAQCB/QEAgvUBAL4QBQDukACAvigEAPKQAID2kACA+pAAgO8QAAD+kACAApEAgIbgBACH9AIABpEAgAqRAIDj/A8ADpEAgOHgDwASkQCA7xQPABaRAIAakQCAHpEAgCKRAIAmkQCAKpEAgC6RAIAykQCANpEAgDqRAIA+kQCAQpEAgEaRAIBKkQCA7+ABAIUEEgDh3A4ATpEAgOMcDgCAKQAAgR0AAIIFAABSkQCAszECAFqRAICEzAUAXpEAgGKRAIC2KQIAtSECAGaRAIC7zQEAus0BAGqRAIBukQCAv3UBAL7JAQC9wQEAvMkBAKjpBQCp6QUAqvkFAKv5BQCs6QUArekFAK45BgCvOQYA5pAAgFaRAICGiAAAhwADAHKRAIB2kQCAepEAgH6RAIC40QYAudkGALrhBgC74QYAvJEGAL2dBgC+lQYAv4kGALBJBgCxSQYAsl0GALNVBgC0TQYAtfEGALbxBgC38QYAo3EFAIKRAICGkQCAipEAgI6RAICmaQUApWEFAJKRAICrjQYAqo0GAJaRAICakQCArzUGAK6JBgCtgQYArIkGAJ6RAICikQCAs+EHAKaRAIC14QcAqpEAgK6RAIC25QcAdpAAgLKRAIC7vQcAuqEHAL2VBwC8qQcAv5UHAL6VBwCoAQYAqSUGAKohBgCrIQYArCEGAK0tBgCuJQYAr1UGALaRAICCHQAAgR0AAIAdAAC6kQCAvpEAgMKRAIC+MAEAuDkGALk5BgC6yQYAu8kGALzZBgC92QYAvskGAL/JBgCwLQYAsTEGALI1BgCzCQYAtBkGALUZBgC2CQYAtwkGAKOpBgCEjAIAhigfAIdEAQDKkQCApq0GAKWpBgDOkQCAq/UGAKrpBgDSkQCA1pEAgK/dBgCu3QYArd0GAKzhBgDakQCAsxUGAN6RAIDikQCAtj0GAOaRAIDqkQCAtTUGALrZAQC72QEA7pEAgPKRAIC+fQEAv2UBALx9AQC9dQEAqMUFAKnJBQCq2QUAq9EFAKz5BQCt+QUArikCAK8pAgD2kQCA+pEAgP6RAIACkgCAjAAAAAaSAIAKkgCADpIAgLjtAgC5hQIAuo0CALuBAgC8hQIAvY0CAL69AgC/fQMAsFkCALFZAgCy7QIAs+UCALT9AgC15QIAtuUCALfVAgCjUQUAEpIAgBaSAIAakgCAHpIAgKZ5BQClcQUAIpIAgKudAgCqnQIAJpIAgCqSAICvIQIArjkCAK0xAgCsOQIAghEAAC6SAICAZQAAgQkAADKSAIC+mAMAOpIAgD6SAICEJAMAQpIAgIdoAwCGjBwARpIAgEqSAIBOkgCAUpIAgFaSAIBakgCAs6ECAITAHAC10QIAXpIAgGKSAIC21QIAZpIAgGqSAIC7wQIAuvUCAL0RAQC82QIAvxEBAL4ZAQBukgCAcpIAgHaSAIB6kgCAfpIAgIKSAICGkgCA77gGAIqSAIDhnAQAjpIAgON0BgCSkgCAlpIAgJqSAICekgCAgPkAAIH5AACCBQAAopIAgL5YHACEWB8A71wAAO9ABgDhkAEA4fwGAOM8AADjdAYAqpIAgK6SAICGmBwAh/QcAKNpAgC+DB8AspIAgLaSAIC6kgCAph0CAKUZAgC+kgCAqwkCAKo9AgDCkgCAxpIAgK/ZAQCu0QEArdkBAKwRAgCokR0AqZkdAKqhHQCroR0ArNEdAK3dHQCu1R0Ar8kdADaSAICmkgCAypIAgM6SAIDSkgCA1pIAgNqSAIDekgCAuHkeALl5HgC6zR4Au8UeALzdHgC9xR4AvsUeAL/1HgCwuR0AsY0dALKFHQCzTR4AtFUeALVdHgC2VR4At0keALjNHwC51R8Aut0fALvVHwC88R8Avf0fAL7pHwC/6R8AsKUfALGxHwCysR8As40fALSVHwC19R8Atv0fALf1HwCoGR4AqRkeAKotHgCrPR4ArCUeAK0tHgCuJR4Ar90fAOKSAIDmkgCA6pIAgO6SAIDykgCAxpEAgPaSAID6kgCAs+UfAP6SAIACkwCABpMAgAqTAIC27R8Ate0fAA6TAIC7NR4AuiEeABKTAIAWkwCAv3EeAL4RHgC9GR4AvCUeAIJpAACjoR8AgFkAAIFRAACmqR8AGpMAgB6TAIClqR8AqmUeAKtxHgCGAAQAh+wBAK5VHgCvNR4ArGEeAK1dHgCoMR4AqTEeAKpBHgCrQR4ArEEeAK1JHgCucR4Ar3EeACKTAIAmkwCAKpMAgC6TAIAykwCANpMAgDqTAIA+kwCAuCkBALkpAQC6OQEAuzUBALwtAQC90QAAvtEAAL/RAACwyQEAsckBALLZAQCz2QEAtMkBALXJAQC2GQEAtxkBALPJHQBCkwCARpMAgEqTAIBOkwCAtskdALXJHQBSkwCAuw0CALoNAgBWkwCAWpMAgL8NAgC+DQIAvQ0CALwNAgBekwCAo40dAGKTAIBmkwCApo0dAGqTAIBukwCApY0dAKpJAgCrSQIAcpMAgHaTAICuSQIAr0kCAKxJAgCtSQIAgA0AAIERAACCEQAAepMAgO/MAgB+kwCAgpMAgISQAgDjLAIAvigDAOHYAQCKkwCAhhAEAIfUAwCOkwCAkpMAgLNhAwCWkwCAmpMAgJ6TAICikwCAtnkDALVxAwCmkwCAu10DALpdAwCqkwCArpMAgL/hAAC++QAAvfEAALz5AACjoQIAspMAgLaTAIC6kwCAvpMAgKa5AgClsQIAwpMAgKudAgCqnQIAxpMAgMqTAICvIQEArjkBAK0xAQCsOQEAzpMAgNKTAIDvZB8A1pMAgNqTAIDekwCA4pMAgOaTAICADQAAgREAAIIVAADqkwCA4eAcAO6TAIDjiB8A8pMAgISAAgC+jAUAh0gFAIYsBAD6kwCA/pMAgO+kHgDv9B4A4QAeAOFQHwDjLB4A47AeAAKUAIAGlACACpQAgA6UAIASlACAFpQAgISEBACzcQEAGpQAgLUdAQC2FQEAHpQAgCKUAIAmlACAugEBALsBAQC89QAAvf0AAL71AAC/7QAAqK0GAKm9BgCqtQYAq8kGAKzZBgCt2QYArskGAK/BBgAqlACALpQAgDKUAIA2lACAOpQAgD6UAIBClACARpQAgLhtBwC5BQcAug0HALsBBwC8AQcAvQEHAL4BBwC/AQcAsIkGALGJBgCybQcAs2UHALR9BwC1ZQcAtmUHALdVBwCGkwCAozkGAEqUAID2kwCApl0GAE6UAIBSlACApVUGAKpJBgCrSQYAVpQAgFqUAICuvQcAr6UHAKy9BwCttQcAgG0AAIEJAACCGQAAXpQAgGKUAIC+nAMAZpQAgGqUAICGQAAAh2AAAG6UAIBylACAdpQAgHqUAIB+lACAgpQAgKiRBgCpkQYAqrkGAKu5BgCsqQYArakGAK7ZBgCv2QYAhpQAgIqUAICOlACAkpQAgJaUAICalACAnpQAgKKUAIC4cQEAuXEBALpxAQC7cQEAvNkBAL3BAQC+wQEAv/UBALCxBgCxuQYAsokGALOJBgC0UQEAtVEBALZRAQC3UQEAszEGAKaUAICqlACArpQAgLKUAIC2KQYAtSEGALaUAIC7fQYAunUGALqUAIC+lACAv5UBAL6VAQC9XQYAvF0GAMKUAICjdQYAxpQAgMqUAICmbQYAzpQAgNKUAIClZQYAqjEGAKs5BgCErAEAvqABAK7RAQCv0QEArBkGAK0ZBgCo3QIAqe0CAKrlAgCr/QIArOUCAK3tAgCu5QIArz0DANqUAIDelACA4pQAgL5kDADmlACA6pQAgO6UAIDylACAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+VAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDAIFVAwCASQMAs2UCAIJVAwC1ZQIA9pQAgPqUAIC2ZQIAhgAMAIfkAwC7gQMAuokDAL2BAwC8mQMAv4EDAL6JAwCjLQIA/pQAgAKVAIAGlQCACpUAgKYtAgClLQIADpUAgKvJAwCqwQMAEpUAgBaVAICvyQMArsEDAK3JAwCs0QMA49gGAOGsBwDhnAYA45wGABqVAICEWA0AHpUAgCKVAIAmlQCAKpUAgC6VAIAylQCA7xwBADaVAIA6lQCA70AGAIB5AACBFQAAghEAAIQADAA+lQCA46wAAEKVAIDhpAEASpUAgO9wAACGyAwAh6QNAE6VAIBSlQCAVpUAgFqVAIC6yQUAu8kFALilBQC5zQUAvvkFAL/5BQC8zQUAvcUFALKlBQCzrQUAsBEGALERBgC2rQUAt50FALS1BQC1rQUAqmEGAKthBgConQYAqZUGAK5hBgCvYQYArHEGAK1xBgBelQCAYpUAgGaVAIBqlQCAbpUAgHKVAIC+sAwAdpUAgKghDgCpIQ4AqiEOAKs9DgCsJQ4ArS0OAK4lDgCviQ4ARpUAgHqVAIB+lQCAgpUAgIaVAICKlQCAjpUAgJKVAIC4UQ8AuV0PALpVDwC7bQ8AvHUPAL19DwC+dQ8Av2kPALD5DgCxoQ4AsqEOALOhDgC0oQ4AtakOALaRDgC3kQ4As6kOAJaVAIDWlACAmpUAgJ6VAIC2rQ4Ata0OAKKVAIC7ZQ4Auj0OAKaVAICqlQCAv20OAL5lDgC9dQ4AvHUOAIIZAACj7Q4AgGUAAIEZAACm6Q4ArpUAgLKVAICl6Q4AqnkOAKshDgC2lQCAupUAgK4hDgCvKQ4ArDEOAK0xDgCoYQ4AqXUOAKp9DgCrdQ4ArG0OAK31DgCu/Q4Ar/UOAIaAAQCHpAEAvpUAgMKVAIDGlQCAypUAgM6VAIDSlQCAuHUBALl9AQC6dQEAu8kBALzdAQC9xQEAvsUBAL/1AQCwjQ4AsZUOALKdDgCzkQ4AtFUBALVdAQC2VQEAt00BALP1DgDWlQCA2pUAgN6VAIDilQCAtnUOALXlDgDmlQCAu1EOALpJDgDqlQCA7pUAgL+ZAQC+kQEAvUUOALxJDgDylQCAo7EOAPaVAID6lQCApjEOAP6VAIAClgCApaEOAKoNDgCrFQ4ABpYAgAqWAICu1QEAr90BAKwNDgCtAQ4AqO0CAKktAwCqJQMAqz0DAKwlAwCtLQMAriUDAK+ZAwAOlgCAEpYAgBaWAIAalgCAHpYAgCKWAIC+dAIAKpYAgLiNAwC5kQMAupEDALulAwC8vQMAvXUAAL59AAC/dQAAsOkDALHpAwCy+QMAs/EDALTZAwC12QMAtrkDALe1AwCArQAAgbUAAIK9AACzoQMALpYAgLWhAwC2oQMAMpYAgITgAgA2lgCAuiEDALshAwC8IQMAvSkDAL4RAwC/EQMAo+0DAIXABACFtG8AOpYAgD6WAICm7QMApe0DAEKWAICrbQMAqm0DAIZIBQCHbAMAr10DAK5dAwCtZQMArG0DAEaWAIDjAA4A71hsAOG0DwBKlgCATpYAgFKWAIBWlgCAoakDAKD9DwCjwQMAog0DAOHgAwDv4A8A4+QDAFqWAIBelgCAYpYAgIQEBAC+BAQAZpYAgO+UAwBqlgCAbpYAgHKWAIDj1AMAdpYAgOFUAAB6lgCAfpYAgIKWAICGlgCAgA0AAIEVAACCHQAAipYAgI6WAICSlgCAj5EbAO+cDgCE4AcA4dQOAJqWAIDj8A4AnpYAgKKWAICGGAcAh5AEAJnlFwCY5RcAm+kLAJo5CwCd/QoAnPELAJ9VDwCeXQ8AkSkfAJDNGwCTJR8Aks0fAJXREwCUKRMAlxkXAJZ1EwCM4RAAjSUQAI4tEACP+QwAJpYAgJaWAICKORQAi5UUAITpGACFBRgAhuUYAIfxFACmlgCAqpYAgIIxHACDFRwAnKkEAK6WAICylgCAtpYAgLqWAIC+lgCAmtEEAJt9BACUTQ0AleUIAJblCACXtQgAwpYAgMaWAICSWQwAk1kMAKGRAADKlgCAowF8AKKZAACluXwApJF8AKeZeACm4X0AqYF5AKiheACriXQAqgF0AK0BcACsWXQAr4VwAK6dcACx4WwAsAFsALMBaACyHWwAtfVoALT1aADOlgCA0pYAgNaWAIDalgCA3pYAgOKWAIDmlgCA6pYAgO6WAIDylgCAqD0HAKmVBwCqlQcAq6kHAKzdBwCtxQcArsUHAK8dBgD2lgCAgh0AAIEdAACAHQAA+pYAgP6WAIAClwCAvmABALgZBgC5GQYAuikGALslBgC8IQYAvSEGAL4hBgC/IQYAsHEGALFxBgCycQYAs3EGALRNBgC1NQYAtj0GALctBgCzHQcACpcAgIYoAACHqAAADpcAgLZFBwC1VQcAEpcAgLu1BgC6tQYAFpcAgBqXAIC/8QYAvokGAL2lBgC8pQYAHpcAgKNZBwAilwCAJpcAgKYBBwAqlwCALpcAgKURBwCq8QYAq/EGADKXAIA2lwCArs0GAK+1BgCs4QYAreEGAKipBQCptQUAqr0FAKs9AgCsJQIArVECAK5RAgCvUQIAOpcAgD6XAIBClwCARpcAgIQ8AwBKlwCATpcAgFKXAIC4pQIAua0CALqlAgC7vQIAvKUCAL2tAgC+pQIAv30DALAxAgCxMQIAshkCALMZAgC09QIAta0CALalAgC3nQIAVpcAgFqXAIBelwCAszkFAGKXAIC1oQIAtt0CAGaXAIBqlwCAbpcAgLr5AgC7+QIAvMECAL3BAgC+PQIAv2UCAHKXAICmgQIApf0CAHqXAICjZQUAvlh8AIbYfACHnHwArzkCAK5hAgCtnQIArJ0CAKulAgCqpQIAfpcAgIKXAICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAIGFAQCAhQEAhpcAgILtAQCKlwCAjpcAgJKXAICWlwCAuHUBALl9AQC6dQEAu80BALzVAQC93QEAvskBAL/BAQCwtQIAsb0CALKBAgCzgQIAtFEBALVRAQC2UQEAt1EBAJqXAICelwCAopcAgKaXAIDhMAYA4WQHAOMoBgDjxAYAhCB9AKqXAIDvbAAA7xgGAK6XAICylwCAtpcAgLqXAICzXQIAvkh8AL6XAIDClwCAxpcAgLYVAgC1dQIAypcAgLs5AgC6MQIAzpcAgNKXAIC/1QEAvtUBAL0VAgC8FQIAo519AHaXAIDWlwCA2pcAgN6XAICm1X0ApbV9AOKXAICr+X0AqvF9AOaXAIDqlwCArxV+AK4VfgCt1X0ArNV9AIBNAACBVQAAglUAALOxfgDulwCAtWV/ALZtfwDylwCAhkADAIcEAwC66X8Au+l/ALz5fwC9+X8Avt1/AL/NfwD2lwCA+pcAgAaXAID+lwCAApgAgAaYAIAKmACADpgAgKhtfgCpXX4AqlV+AKuFfwCsgX8ArYF/AK6BfwCvgX8AsEF/ALFBfwCyQX8As0F/ALR1fwC1ZX8Atm1/ALdlfwC4XX8AuS1/ALolfwC7PX8AvC1/AL0dfwC+FX8Av/UAAKP9fwASmACAFpgAgBqYAIAemACApiF+AKUpfgAimACAq6V+AKqlfgAmmACAKpgAgK+BfgCukX4ArbV+AKy1fgAumACAMpgAgDaYAIA6mACAPpgAgEKYAIBGmACASpgAgIA9AACBCQAAghkAAE6YAIBSmACAhLgBAL6wAQBWmACAqK0BAKnVAQCq1QEAqw0BAKwVAQCtGQEArgkBAK8JAQCGAAQAhwQBAFqYAIBemACAYpgAgGaYAIBqmACAbpgAgLjtAAC5hQAAuo0AALuFAAC8nQAAvYUAAL6NAAC/hQAAsHkBALF5AQCy7QAAs+UAALT9AAC15QAAtuUAALfVAACzXQIAcpgAgHaYAIB6mACAfpgAgLaZAgC1nQIAgpgAgLu9AgC6vQIAhpgAgIqYAIC/IQMAvjkDAL0xAwC8OQMAvigDAKMZAgCOmACAkpgAgKbdAgCWmACAmpgAgKXZAgCq+QIAq/kCAJ6YAICimACArn0DAK9lAwCsfQMArXUDAL7IBACmmACAqpgAgL7EBQCumACAspgAgLaYAIC6mACAgD0AAIEJAACCGQAAvpgAgMKYAICEOAMAypgAgM6YAIDveAIA0pgAgIZIBACHVAMA1pgAgNqYAIDemACA4pgAgOaYAIDqmACA7pgAgPKYAIDjVAIA9pgAgOFAAQD6mACA/pgAgOMkfwACmQCA4Zx8AAaZAIAKmQCADpkAgBKZAICEbAUAFpkAgBqZAIAemQCAIpkAgO8YfwAmmQCAKpkAgLPxAgAumQCAMpkAgDqZAIA+mQCAtukCALXhAgBCmQCAu3EBALppAQCHoAUAhswEAL85AQC+WQEAvVEBALxhAQDhQH8ARpkAgOM4fgCEwAQAgtkAAO8UAACApQAAgdkAAEqZAIDjwAAATpkAgOHUAQBSmQCAVpkAgO+EfgBamQCAqs0BAKvVAQBemQCAYpkAgK79AQCvnQEArMUBAK31AQBmmQCAo1UCAGqZAIBumQCApk0CAHKZAIB2mQCApUUCAMaYAIA2mQCAepkAgH6ZAICCmQCAhpkAgIqZAICOmQCAqJkGAKmZBgCq7QYAq/0GAKzlBgCt7QYAruUGAK/dBgCwpQYAsa0GALKlBgCzuQYAtK0GALVVBwC2UQcAt00HALh1BwC5fQcAunUHALtJBwC8WQcAvVkHAL5JBwC/RQcAs0UGAJKZAICWmQCAmpkAgJ6ZAIC2TQYAtU0GAKKZAIC7SQYAukEGAIYIAACHjAAAv7EHAL5JBgC9TQYAvFEGAIJdAACjAQYAgEUAAIFdAACmCQYAqpkAgK6ZAIClCQYAqgUGAKsNBgCymQCAtpkAgK4NBgCv9QcArBUGAK0JBgCoTQYAqVUGAKpVBgCriQYArLEGAK29BgCuqQYAr6kGAKaZAIC6mQCAvpkAgMKZAIDGmQCAypkAgM6ZAIDSmQCAuEkBALlJAQC6WQEAu1kBALxJAQC9SQEAvt0BAL/VAQCw3QYAsa0GALKlBgCzjQYAtJkGALWZBgC2jQYAt4UGALPdBgDWmQCA2pkAgN6ZAIDimQCAtj0GALU5BgDmmQCAu2kGALoZBgDqmQCA7pkAgL9dBgC+XQYAvVkGALxxBgDymQCAo5kGAPaZAID6mQCApnkGAP6ZAIACmgCApX0GAKpdBgCrLQYABpoAgAqaAICuGQYArxkGAKw1BgCtHQYAqNUCAKndAgCq4QIAq+ECAKw1AwCtPQMArjUDAK8tAwCAzQMAgQkAAIIZAAAOmgCAEpoAgIQYAgC+dAMAGpoAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsFUDALFdAwCyVQMAs+kDALT5AwC1+QMAtukDALfhAwCGIAwAhxADAB6aAIAimgCAJpoAgCqaAIAumgCA71wCADKaAIDhFAAANpoAgOOIAgC++AwAOpoAgD6aAIBCmgCAu/kDALrxAwC+gA0ARpoAgL9dAwC+XQMAvV0DALzhAwCzCQIASpoAgE6aAIBSmgCAVpoAgLbdAwC13QMAWpoAgKipBgCpqQYAqrkGAKu5BgCsqQYArakGAK4dBQCvFQUAXpoAgGKaAIBmmgCAapoAgG6aAIBymgCAdpoAgHqaAIC4GQUAuS0FALolBQC7yQUAvNkFAL3FBQC+zQUAv8UFALBtBQCxdQUAsnUFALNFBQC0XQUAtT0FALY1BQC3KQUA4fQGAOFUBwDjFAYA47wGAIEJAACAqQAAfpoAgII5AACE7A0AgpoAgIeIDACGDAwAipoAgI6aAIDvzAcA78QHAKMpAwCSmgCAlpoAgJqaAICemgCApv0CAKX9AgCimgCAq9kCAKrRAgCmmgCAqpoAgK99AgCufQIArX0CAKzBAgCoPQ4AqY0OAKqFDgCrnQ4ArIUOAK2NDgCuuQ4Ar7UOAIaaAICumgCAspoAgLaaAIC6mgCAvpoAgMKaAIDGmgCAuL0OALllDwC6bQ8Au2UPALx9DwC9ZQ8Avm0PAL9lDwCw1Q4Asd0OALLVDgCzoQ4AtJUOALWdDgC2lQ4At40OALMNDgDKmgCAzpoAgNKaAIDWmgCAtg0OALUNDgDamgCAuxkOALoRDgDemgCAFpoAgL9ZDgC+UQ4AvXUOALwBDgDimgCAo0kOAOaaAIDqmgCApkkOAO6aAIDymgCApUkOAKpVDgCrXQ4AhKQDAPaaAICuFQ4Arx0OAKxFDgCtMQ4AqLEOAKmxDgCqzQ4Aq8UOAKzdDgCtxQ4ArsUOAK/1DgCA7QEAgfEBAILxAQD6mgCAhpABAIe0AQD+mgCAApsAgLjFAQC5zQEAusUBALvdAQC8zQEAvf0BAL6ZAQC/lQEAsI0OALFBAQCyQQEAs0EBALRBAQC1QQEAtkEBALdBAQCzRQ4ABpsAgAqbAIAOmwCAEpsAgLZFDgC1VQ4AFpsAgLuFAQC6SQ4AGpsAgB6bAIC/hQEAvoUBAL2VAQC8lQEAIpsAgKMBDgAmmwCAKpsAgKYBDgAumwCAMpsAgKURDgCqDQ4Aq8EBADabAIA6mwCArsEBAK/BAQCs0QEArdEBAKgtAwCpPQMAqjUDAKuJAwCsmQMArZkDAK6JAwCvgQMAPpsAgEKbAIBGmwCASpsAgE6bAIBSmwCAVpsAgFqbAIC4rQMAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALDJAwCxyQMAsqkDALOlAwC0vQMAtaEDALahAwC3lQMAgL0AAIEJAACCGQAAXpsAgGKbAIC+2AMAapsAgG6bAICErAIAcpsAgIfoAwCGDAQAdpsAgHqbAIB+mwCAgpsAgLP9AwCGmwCAipsAgI6bAICSmwCAtlkDALVRAwCWmwCAu00DALpNAwCamwCAnpsAgL8lAwC+OQMAvTEDALw9AwCimwCAppsAgKqbAICumwCA71gPALKbAIC2mwCAupsAgOOQDgC+mwCA4bAPAMKbAIDGmwCAypsAgM6bAIDSmwCAgHUAAIF9AACCdQAAhBgFAO88AwDamwCAvhQFAN6bAIDj0AMA4psAgOFAAADmmwCAhtAEAIdYBQDqmwCA7psAgPKbAID2mwCA+psAgP6bAIACnACABpwAgAqcAIDvrA8AhOwEAOEQDgAOnACA41QBABKcAIAWnACAGpwAgB6cAICj/QIAIpwAgCacAIAqnACALpwAgKZZAgClUQIAMpwAgKtNAgCqTQIANpwAgDqcAICvJQIArjkCAK0xAgCsPQIAqJkGAKmZBgCqrQYAq70GAKylBgCtrQYArqUGAK/ZBgDWmwCAghEAAIEZAACAwQcAPpwAgEKcAIC+cAMARpwAgLhJBwC5SQcAul0HALtVBwC8TQcAvXEHAL51BwC/bQcAsKkGALGpBgCyuQYAs7EGALSZBgC1mQYAtnkHALd5BwC1NQYASpwAgE6cAIC2NQYAhjAAAIdcAwCzPQYAUpwAgL19BgC8dQYAv0UGAL5FBgBmmwCAVpwAgLt1BgC6dQYAo2UGAFqcAIBenACAYpwAgGacAICmbQYApW0GAGqcAICrLQYAqi0GAG6cAIBynACArx0GAK4dBgCtJQYArC0GAKhVBgCpWQYAqm0GAKthBgCsaQYArWkGAK6ZBgCvmQYAdpwAgHqcAIB+nACAgpwAgIacAICKnACAjpwAgJKcAIC4+QYAufkGALqNBgC7hQYAvJ0GAL2FBgC+hQYAv7UGALDpBgCx6QYAsvkGALP5BgC06QYAtd0GALbJBgC3yQYAs+UGAJacAICanACAnpwAgKKcAIC26QYAteEGAKacAIC7LQYAui0GAKqcAICunACAvxkGAL4tBgC9LQYAvC0GAIIVAACjoQYAgGEAAIFhAACmrQYAspwAgL6QAQClpQYAqmkGAKtpBgCEpAEAupwAgK5pBgCvXQYArGkGAK1pBgCohQIAqY0CAKqVAgCruQIArNUCAK3dAgCu1QIAr80CAIaAHACHZAMAvpwAgL5gAwDCnACAxpwAgMqcAIDOnACAuHUDALl9AwC6dQMAu8kDALzZAwC92QMAvskDAL/BAwCwvQIAsY0CALKFAgCzTQMAtFUDALVdAwC2VQMAt00DALMdAgDSnACAhAgDANacAIDanACAtl0CALVdAgDenACAu0kCALp5AgDinACA5pwAgL+ZAwC+kQMAvZkDALxRAgCwAAAAo1kCAOqcAIDunACAphkCAPKcAID2nACApRkCAKo9AgCrDQIA+pwAgP6cAICu1QMAr90DAKwVAgCt3QMAAp0AgAadAIAKnQCA76wGAA6dAIASnQCAFp0AgBqdAIC+6BwAHp0AgCKdAIAqnQCALp0AgOGABwAynQCA42AGAIBdAACBYQAAgmEAALN9AQA2nQCAtW0BALZlAQA6nQCAhiAdAIdYHQC6+QEAu/EBALzZAQC92QEAvrEBAL+xAQDvoAAAPp0AgEKdAIBGnQCASp0AgE6dAIBSnQCA71wBAIRsHADhzAYAVp0AgOMcBgDjSAAAWp0AgOEwAQBenQCAo/EBAGKdAICFABQAZp0AgGqdAICm6QEApeEBAG6dAICrfQEAqnUBAHKdAIB2nQCArz0BAK49AQCtVQEArFUBAKjtHQCpLR4AqjkeAKs5HgCsKR4ArSkeAK6dHgCvkR4AJp0AgHqdAIB+nQCAgp0AgIadAICC+QAAgfEAAID9AAC4qR4AuakeALpJHwC7SR8AvFkfAL1FHwC+TR8Av0UfALDxHgCx+R4AssEeALPBHgC0uR4AtbkeALatHgC3pR4AsBEfALERHwCyER8AsyUfALQlHwC1KR8Atl0fALdRHwC4cR8AuXkfALpBHwC7QR8AvJUAAL2dAAC+lQAAv40AAIqdAIC2nACAjp0AgJKdAICWnQCAmp0AgIb4AwCH0AAAqM0fAKnVHwCq0R8Aq70fAKytHwCtcR8ArnEfAK9xHwCzOR4Anp0AgKKdAICmnQCAqp0AgLaRHgC1RR4Arp0AgLu1HgC6tR4Asp0AgLadAIC/jR4AvoEeAL2RHgC8pR4Aup0AgKN9HgC+nQCAwp0AgKbVHgDGnQCAyp0AgKUBHgCq8R4Aq/EeAM6dAIDSnQCArsUeAK/JHgCs4R4ArdUeAKhVAQCpgQAAqoEAAKuBAACsgQAArYkAAK6xAACvsQAA1p0AgNqdAIDenQCA4p0AgOadAIDqnQCA7p0AgPKdAIC4ZQAAuW0AALplAAC7fQAAvGUAAL1tAAC+ZQAAv90DALChAACxrQAAsqUAALO5AAC0qQAAtZ0AALaVAAC3XQAA9p0AgIIdAACBHQAAgB0AAPqdAID+nQCAAp4AgL4UAgAKngCAhKgCAA6eAIASngCAFp4AgBqeAIAengCAjwAAALNJAwAingCAhugEAIesAgAmngCAtkkDALVJAwAqngCAuykDALolAwAungCAMp4AgL8ZAwC+LQMAvS0DALwxAwA2ngCAo40DADqeAIA+ngCApo0DAEKeAIBGngCApY0DAKrhAwCr7QMASp4AgE6eAICu6QMAr90DAKz1AwCt6QMAvoQDAFKeAIBWngCAWp4AgF6eAIBingCAZp4AgGqeAICAPQAAgQkAAIIZAABungCAcp4AgHqeAICENAMAfp4AgLMtAQCCngCAh8wCAIZMBQCGngCAti0BALUtAQCKngCAu0kBALp5AQCOngCAkp4AgL+9AQC+vQEAvbkBALxRAQDheB8Alp4AgOPQHwCangCAnp4AgOGUAQCingCA42gDAKaeAICqngCArp4AgO+IAwCyngCAtp4AgO+sHwC6ngCAvp4AgMKeAIDGngCAyp4AgM6eAIDSngCA1p4AgO9EHgDangCA4dweAN6eAIDjHB4A4p4AgOqeAIDungCA8p4AgIFpAACAZQAAo+UBAIJ9AACl5QEA9p4AgIQUBACm5QEAvigEAPqeAICrgQEAqrEBAK1xAQCsmQEAr3UBAK51AQCoIQYAqS0GAKolBgCrPQYArCUGAK0tBgCuXQYAr00GAHaeAIDmngCAhggDAIeMAwD+ngCAAp8AgAafAIAKnwCAuOkGALnpBgC6jQYAu4UGALydBgC9hQYAvo0GAL+FBgCwPQYAsQ0GALIFBgCz7QYAtPkGALX5BgC27QYAt+UGALDNBwCx1QcAstEHALPtBwC09QcAtf0HALbpBwC36QcAuN0HALklBwC6LQcAuyUHALw9BwC9JQcAvi0HAL8lBwAOnwCAEp8AgAaeAIAWnwCAGp8AgB6fAIAinwCAJp8AgKgVBgCpGQYAqu0HAKv9BwCs7QcArd0HAK7VBwCvuQcAswUGACqfAIAunwCAMp8AgDafAIC2PQYAtQUGADqfAIC7cQYAumkGAD6fAIBCnwCAv1kGAL5RBgC9WQYAvGUGAEafAICjQQYASp8AgE6fAICmeQYAUp8AgIS0AQClQQYAqi0GAKs1BgC+gAEAWp8AgK4VBgCvHQYArCEGAK0dBgCoNQYAqT0GAKo1BgCrWQYArHUGAK2lAQCurQEAr6UBAIDpAACB6QAAgv0AAL8kAQCGMA8Ah+QAAF6fAIBinwCAuMUAALnNAAC6xQAAu90AALzNAAC9/QAAvvUAAL+dAACw3QEAsSUBALItAQCzIQEAtCEBALUhAQC2IQEAtyEBALvBAgC6OQIAZp8AgGqfAIC/xQIAvsUCAL3VAgC82QIAs50FAG6fAIBynwCAdp8AgIwAAAC2BQIAtd0FAHqfAICqfQIAq4UCAH6fAICCnwCAroECAK+BAgCsnQIArZECAIafAICj2QUAip8AgI6fAICmQQIAkp8AgJafAIClmQUAgpFqAIORagCanwCAnp8AgIa5FgCH6RcAhBEWAIWZFgCKoRIAi6ESAKKfAICmnwCAjpEeAI9ZHgCMmRMAjREeAJJxGgCT5RoAqp8AgO/oJACW8QYAlwUGAJTlGgCVGQYAmikCAJvFAgCunwCAsp8AgLafAIDhKBsAnN0CAOMgDwCfIQcAnsEHAJ01GwCcLRsAm6EbAJr5HwCZOR8AmLEfAJcBEgCWIRMAlSkTAJRRFgCTGRcAkjEXAJGxFwCQKWsAj1FrAOOsBwCEBA0A4RwHAIANAACBNQAAgj0AALqfAIC+nwCAwp8AgL4gDQDKnwCAzp8AgO9MBwCGWAwAh2ANANKfAIDWnwCA2p8AgN6fAICEXA8A4p8AgO8IAADvhAYA4ZABAOGwBgDj4AAA42QGAOafAIDqnwCA7p8AgPKfAID2nwCA+p8AgL4ADwCEQA4A/p8AgAKgAIAGoACACqAAgA6gAIASoACAFqAAgBqgAICj1QMAotUDAKExAwCgLQcAVp8AgMafAIAeoACAIqAAgCagAICCmQAAgZEAAICZAACoTQ0AqZ0NAKqVDQCrJQ4ArD0OAK0RDgCuEQ4ArxEOALB9DgCxDQ4AsgUOALMtDgC0OQ4AtTkOALYtDgC3JQ4AuOkOALnpDgC6wQ4Au8EOALy5DgC9nQ4AvpUOAL+NDgCzPQ0AKqAAgC6gAIAyoACANqAAgLaxDgC1lQ4AOqAAgLvpDgC6mQ4AhogAAIfkAAC/3Q4Avt0OAL3ZDgC88Q4APqAAgKN5DQC+hAEAhIAGAKb1DgBCoACARqAAgKXRDgCq3Q4Aq60OAEqgAIBOoACArpkOAK+ZDgCstQ4ArZ0OALIFNQCzGTQAsG0wALENNQBSoACAVqAAgLQBKAC1PSkAWqAAgF6gAIBioACAZqAAgGqgAIBuoACAcqAAgHagAICiRQEAo9UBAHqgAIChTQEAps0FAKcBOACkAQQApX0FAKoBPACrRT0AqEk5AKnlOQCudTEAr30xAKxdPQCtATAAqO0OAKn1DgCqCQ4AqwkOAKwZDgCtGQ4Arg0OAK8tDgB+oACAgqAAgIagAICKoACAjqAAgJKgAICWoACAmqAAgLgdDgC5JQ4Aui0OALslDgC8PQ4Avd0BAL7VAQC/zQEAsFUOALFdDgCyVQ4Asy0OALQ1DgC1JQ4Ati0OALclDgCzgQ0AnqAAgKKgAICqoACArqAAgLaZDQC1kQ0AvlQEALuZDQC6kQ0AhogEAIe8AwC/4Q0AvvENAL35DQC8gQ0AgkkAAKPFDQCA9QMAgUkAAKbdDQCyoACAtqAAgKXVDQCq1Q0Aq90NALqgAIC+oACArrUNAK+lDQCsxQ0Arb0NAKgdAgCpRQIAql0CAKtVAgCseQIArXkCAK6JAwCviQMAwqAAgMagAIDKoACAzqAAgIT8BQDSoACA1qAAgNqgAIC4iQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDBAwCxwQMAssEDALPBAwC0wQMAtcEDALbBAwC3wQMA3qAAgOKgAIDmoACA6qAAgO6gAIDhpAEA8qAAgOPADgC+aAQA9qAAgPqgAIDvHAEA/qAAgAKhAIAGoQCACqEAgLOVAwAOoQCAEqEAgBqhAIAeoQCAtrkDALWxAwAioQCAu0UCALpFAgCGqAQAh6QFAL9FAgC+RQIAvVUCALxVAgDh4A4A4SwMAOMIDgDj1A4AgK0AAIHRAACC0QAAJqEAgCqhAIAuoQCAMqEAgDahAIA6oQCAPqEAgO+IDgDvLA4AoxUDAEKhAICFxCsARqEAgEqhAICmOQMApTEDAE6hAICrxQIAqsUCAFKhAIBWoQCAr8UCAK7FAgCt1QIArNUCAKgNBgCpFQYAql0GAKtVBgCseQYArXkGAK65BgCvuQYAFqEAgFqhAIBeoQCAYqEAgGahAIBqoQCAbqEAgHKhAIC4TQcAuVUHALpRBwC7aQcAvHkHAL1lBwC+bQcAv2UHALDJBgCxyQYAst0GALPVBgC0zQYAtXUHALZ9BwC3dQcAs9UGAHahAIB6oQCAfqEAgIKhAIC2+QYAtfEGAIahAIC7DQYAug0GAIYIAACHLAAAv7EHAL4JBgC9AQYAvAkGAIJRAACjkQYAgEEAAIFBAACmvQYAiqEAgI6hAICltQYAqkkGAKtJBgCSoQCAlqEAgK5NBgCv9QcArE0GAK1FBgCwsQYAsbEGALLNBgCzwQYAtMEGALXJBgC28QYAt/EGALgFAQC5DQEAugUBALsdAQC8BQEAvQ0BAL4FAQC/uQEAmqEAgJ6hAICioQCApqEAgKqhAICuoQCApqAAgLKhAICoLQYAqTUGAKo1BgCr8QYArNEGAK3RBgCu0QYAr9EGALPdBgC2oQCAuqEAgL6hAIDCoQCAtjEGALU5BgDGoQCAuxUGALoVBgDKoQCAzqEAgL9tBgC+ZQYAvXUGALx5BgDSoQCAo5kGANahAIDaoQCApnUGAN6hAIDioQCApX0GAKpRBgCrUQYA5qEAgOqhAICuIQYArykGAKw9BgCtMQYAqNUCAKndAgCq4QIAq+ECAKxRAwCtUQMArlEDAK9RAwDuoQCA8qEAgL7sAwD6oQCA/qEAgAKiAIAGogCACqIAgLjpAwC56QMAuokDALuFAwC8nQMAvYEDAL6BAwC/tQMAsDEDALExAwCyNQMAs+kDALT5AwC1+QMAtukDALfhAwCAbQMAgaUAAIKtAACzZQIADqIAgLXVAwC23QMAEqIAgITgAgAWogCAuvkDALv5AwC87QMAvTEDAL4xAwC/MQMAh+wDAIZkPACyAAAAGqIAgB6iAIDjCAQAIqIAgOHsBgAmogCA7wAGACqiAIAuogCAMqIAgDaiAIA6ogCAPqIAgEKiAIBGogCASqIAgE6iAIDjoAMAUqIAgOGoAQBWogCA7/ADAIIdAACBHQAAgB0AAFqiAIBeogCAYqIAgGqiAIC+TD0AbqIAgKOhAwC+QDwApRECAHKiAIB2ogCAphkCAIRsAgB6ogCAqz0CAKo9AgCt9QIArCkCAK/1AgCu9QIAhkA8AIe0PQB+ogCAgqIAgIaiAICKogCAjqIAgO9EBgCSogCA4dQGAJaiAIDjDAcAmqIAgJ6iAICiogCApqIAgLP1AQCqogCArqIAgLKiAIC2ogCAtkUBALXlAQC6ogCAuzEBALopAQC+ogCAwqIAgL8dAQC+HQEAvRkBALwlAQCoLT4AqTU+AKo9PgCrNT4ArC0+AK2FPgCuhT4Ar7k+AGaiAIDGogCAyqIAgM6iAICAGQAAgRkAAIIFAADSogCAuLk+ALm5PgC6ST8Au0k/ALxZPwC9WT8Avk0/AL9BPwCwrT4AsbU+ALKxPgCzjT4AtJk+ALWZPgC2iT4At4k+AKO1PgCEjAIA1qIAgNqiAIDeogCApgU+AKWlPgDiogCAq3E+AKppPgCGCAAAh2gDAK9dPgCuXT4ArVk+AKxlPgDmogCAs5E/AOqiAIDuogCAtlk/APKiAID2ogCAtbk/ALp1PwC7fT8A+qIAgP6iAIC+QT8Av0E/ALxZPwC9VT8AsJU+ALGdPgCyqT4As6U+ALShPgC1oT4AtqE+ALehPgC45T4Aue0+ALrlPgC7/T4AvO0+AL3dPgC+1T4AvxkBAAKjAIAGowCACqMAgA6jAIASowCA9qEAgBajAIAaowCAqF0+AKkhPgCqPT4AqzU+AKwVPgCt/T4ArvU+AK/tPgCj1T4AHqMAgCKjAIAmowCAKqMAgKYdPgCl/T4ALqMAgKs5PgCqMT4AMqMAgDajAICvBT4ArgU+AK0RPgCsHT4AgREAAIANAAA6owCAghkAAD6jAIBCowCAhJQBAL4QAACGQAcAhwABAEqjAIBOowCAUqMAgFajAIBaowCAXqMAgKiNAgCplQIAqpUCAKvNAgCs2QIArdkCAK7NAgCvxQIAYqMAgGajAIBqowCAbqMAgIwAAAByowCAdqMAgHqjAIC4HQMAucEDALrBAwC7wQMAvMEDAL3JAwC+8QMAv/EDALCJAgCxiQIAsikDALMpAwC0OQMAtTkDALYpAwC3JQMAsx0CAH6jAICCowCAhqMAgIqjAIC2WQIAtVECAI6jAIC7TQIAuk0CAJKjAICWowCAv/0DAL79AwC9/QMAvP0DAJqjAICeowCAoqMAgKajAIDhDD4AqqMAgOOoPwCuowCAgT0AAIAxAADvUD8Agh0AALKjAIC++AQAhhgFAIdMAwCEDAIA48wAALqjAIDhvAEAvqMAgMKjAIDGowCAyqMAgM6jAICELAUA0qMAgNajAIDaowCA7xAAAN6jAIDiowCAo90DAOajAIDqowCA7qMAgPKjAICmmQMApZEDAPajAICrjQMAqo0DAPqjAID+owCArz0CAK49AgCtPQIArD0CAAKkAIAGpACACqQAgA6kAIASpACAFqQAgBqkAIDvKD4AHqQAgOE8PgAipACA4zgBAIApAACBFQAAghEAACqkAICzMQIAvsgEAITABAAupACAMqQAgLYpAgC1IQIANqQAgLvNAQC6zQEAOqQAgD6kAIC/dQEAvskBAL3BAQC8yQEAqOkFAKnpBQCq+QUAq/kFAKzpBQCt6QUArjkGAK85BgC2owCAJqQAgIaIAACHQAMAQqQAgEakAIBKpACATqQAgLjRBgC52QYAuuEGALvhBgC8kQYAvZEGAL6RBgC/kQYAsEkGALFJBgCyXQYAs1UGALRNBgC18QYAtvEGALfxBgCjcQUAUqQAgFakAIBapACAXqQAgKZpBQClYQUAYqQAgKuNBgCqjQYAZqQAgGqkAICvNQYArokGAK2BBgCsiQYAbqQAgLPRBwBypACAdqQAgLbxBwB6pACAfqQAgLXBBwC60QcAu90HAIKkAICGpACAvrkHAL+5BwC8xQcAvbkHALhpBgC5aQYAuokGALuJBgC8mQYAvZkGAL6JBgC/iQYAsBEGALEdBgCyFQYAs2kGALR5BgC1eQYAtmkGALdhBgCoSQYAqVUGAKpdBgCrVQYArE0GAK11BgCucQYAr3EGAEajAICCHQAAgR0AAIAdAACKpACAjqQAgJKkAIC+cAEAo5UGAJqkAICGKAAAh0gBAJ6kAICmtQYApYUGAKKkAICrmQYAqpUGAKakAICqpACAr/0GAK79BgCt/QYArIEGAK6kAICzFQYAsqQAgLakAIC2PQYAuqQAgL6kAIC1NQYAutkBALvZAQDCpACAxqQAgL59AQC/ZQEAvH0BAL11AQCovQUAqckFAKrZBQCr0QUArPkFAK35BQCuKQIArykCAMqkAIDOpACA0qQAgNakAICMAAAA2qQAgN6kAIDipACAuO0CALmFAgC6gQIAu4ECALyFAgC9jQIAvrECAL+xAgCwWQIAsVkCALLtAgCz5QIAtP0CALXlAgC25QIAt9UCAKNRBQDmpACA6qQAgO6kAIDypACApnkFAKVxBQD2pACAq50CAKqdAgD6pACA/qQAgK8hAgCuOQIArTECAKw5AgCBbQAAgG0AAAKlAICCBQAAvlwMAAqlAIAOpQCA79AGAITsAwDhHAUAEqUAgOP8BwAWpQCAGqUAgIbYDACHvAwAqIUCAKmVAgCqlQIAq6UCAKy9AgCt1QIArtECAK/RAgAepQCAIqUAgCalAIAqpQCALqUAgDKlAIA2pQCAOqUAgLh1AQC5fQEAunUBALvJAQC82QEAvdkBAL7JAQC/wQEAsLUCALG9AgCygQIAs4ECALRRAQC1UQEAtlEBALdRAQA+pQCAhAQNAEKlAIBGpQCAvhwMAEqlAIDvHAAA76AGAOGQAQDhRAcA43AGAOOYBgBOpQCAUqUAgFalAIBapQCAs10CAF6lAIBipQCAZqUAgGqlAIC2FQIAtXUCAG6lAIC7OQIAujECAHKlAIB6pQCAv9UBAL7VAQC9FQIAvBUCAKOdDQAGpQCAdqUAgH6lAICCpQCAptUNAKW1DQCGpQCAq/kNAKrxDQCGCAMAh2ADAK8VDgCuFQ4ArdUNAKzVDQCAkQ8AgZkPAIKhDwCzpQ4AiqUAgLWhDgC2eQ8AjqUAgJKlAICWpQCAukUPALtdDwC8RQ8AvU0PAL5FDwC//Q8AqFUOAKldDgCqYQ4Aq30OAKxlDgCttQ8Arr0PAK+1DwCapQCAnqUAgKKlAICmpQCAqqUAgK6lAICypQCAtqUAgLhVDwC5dQ8Aun0PALt1DwC8bQ8AvREPAL4RDwC/EQ8AsM0PALHVDwCy3Q8As9UPALTNDwC1dQ8AtnEPALdxDwCj6Q8AuqUAgL6lAIDCpQCAxqUAgKY1DgCl7Q8AyqUAgKsRDgCqCQ4AzqUAgNKlAICvsQ4ArgkOAK0BDgCsCQ4A1qUAgIIdAACBHQAAgB0AANqlAIDepQCA4qUAgL6UAQCErAEA5qUAgIfgAQCGzAAA6qUAgO6lAIDypQCAlqQAgKhtDgCpiQEAqpkBAKuRAQCswQEArckBAK75AQCv+QEAhKAAAPalAID6pQCA/qUAgAKmAIAGpgCACqYAgA6mAIC4xQAAuc0AALrFAAC73QAAvM0AAL39AAC+9QAAv50AALBBAQCxQQEAskEBALNBAQC0QQEAtUEBALZBAQC3QQEAsxECABKmAIAWpgCAGqYAgB6mAIC2SQIAtUkCACKmAIC7hQIAuoUCACamAIAqpgCAv4UCAL6FAgC9lQIAvJUCAIU8GgCjVQIALqYAgDKmAICmDQIANqYAgDqmAIClDQIAqsECAKvBAgA+pgCAQqYAgK7BAgCvwQIArNECAK3RAgCCGQAARqYAgIAZAACBGQAASqYAgE6mAIBSpgCAWqYAgL4ABABepgCAYqYAgGamAIBqpgCAbqYAgHKmAIB2pgCA7+gOAHqmAICG6AQAh1ADAH6mAICCpgCA74ACAIamAIDhlAEAiqYAgONYAQCOpgCA4wAOAJKmAIDhaA0AlqYAgKhxAgCpcQIAqnECAKupAgCsuQIArbkCAK6pAgCvqQIAhKwFAJqmAICepgCAoqYAgKamAICqpgCArqYAgLKmAIC4bQEAuQ0BALoFAQC7GQEAvAkBAL09AQC+NQEAv9kBALDZAgCx2QIAsm0BALNlAQC0fQEAtWUBALZlAQC3VQEA4WAPAOP0AADjHA4A4bwBALamAICCOQAAgTEAAIA9AAC6pgCAvigEAL6mAIDCpgCAvjwHAO8QAADv0A4AyqYAgIbgBACHyAQAzqYAgLO1AgDSpgCAtX0CALZ1AgDWpgCA2qYAgN6mAIC6UQIAu1ECALz1AQC9/QEAvvUBAL/tAQBWpgCAxqYAgKqxBQCrsQUArBUGAK0dBgCuFQYArw0GAOKmAIDmpgCA6qYAgKNVBQDupgCApZ0FAKaVBQDypgCAs+kGAPamAID6pgCA/qYAgAKnAIC24QYAtekGAAanAIC7sQYAuqEGAAqnAIAOpwCAv50GAL6RBgC9pQYAvKkGAKgdBgCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvIQYAEqcAgBanAIAapwCAHqcAgCKnAIAmpwCAKqcAgC6nAIC45QcAue0HALrlBwC7/QcAvOUHAL3tBwC+5QcAv00HALAlBgCxNQYAsj0GALMxBgC0FQYAtRkGALYNBgC3AQYAo6kHAIIVAACBtQEAgLUBADKnAICmoQcApakHADanAICr8QcAquEHAISgAgA6pwCAr90HAK7RBwCt5QcArOkHAD6nAICzlQYAhugAAIcYAQC2tQYAQqcAgEanAIC1vQYAukkBALtVAQBKpwCATqcAgL45AQC/OQEAvEUBAL05AQCoPQYAqU0GAKpZBgCrUQYArHEGAK1xBgCuuQEAr7kBAISsAQBSpwCAVqcAgFqnAIBepwCAYqcAgGanAIBqpwCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCwyQEAsdUBALLVAQCzqQEAtLkBALW5AQC2qQEAt6EBAKPRBQBupwCAcqcAgHanAIB6pwCApvEFAKX5BQB+pwCAqxECAKoNAgCCpwCAhqcAgK99AgCufQIArX0CAKwBAgCKpwCAjqcAgJKnAICWpwCAgTEAAIANAACapwCAgjkAAJ6nAICipwCAviQDAKqnAICupwCAsqcAgIbYHACHTAMAtqcAgLqnAIC+pwCAhMAcAOMgAQDCpwCA4cgBAManAIDvMAIAyqcAgM6nAIDSpwCA1qcAgNqnAIDepwCA4qcAgLOVAwDmpwCA6qcAgO6nAIDypwCAtrkDALWxAwD2pwCAu1EDALpJAwD6pwCA/qcAgL/1AAC+SQMAvUEDALxJAwCoLQIAqUUCAKpdAgCrVQIArHkCAK15AgCuvQIAr7UCAL5oHQACqACABqgAgAqoAICAHQAAgQkAAIKpAAAOqACAuFEBALlZAQC6YQEAu2EBALwRAQC9EQEAvhEBAL8RAQCwzQIAsdUCALLdAgCz1QIAtM0CALVxAQC2cQEAt3EBAOFYBgDhVAcA47AAAOO8BgASqACAGqgAgIYYHACHVB0AHqgAgCKoAIAmqACAKqgAgL74HAAuqACA7/AGAO/gBgCjlQIAMqgAgDaoAIA6qACAPqgAgKa5AgClsQIAQqgAgKtRAgCqSQIARqgAgEqoAICv9QEArkkCAK1BAgCsSQIAqG0eAKl1HgCqfR4Aq40eAKyVHgCtnR4Aro0eAK+BHgAWqACATqgAgFKoAIBWqACAWqgAgF6oAIBiqACAZqgAgLiJHgC5iR4AupkeALuRHgC8uR4AvbkeAL59HwC/dR8AsMUeALHNHgCyxR4As90eALTFHgC1zR4AtsUeALe5HgCz9R4AaqgAgG6oAIByqACAdqgAgLYdHgC1HR4AeqgAgLsJHgC6AR4AfqgAgIKoAIC/CR4AvgEeAL0JHgC8ER4Agm0AAKOxHgCAVQAAgWUAAKZZHgCEmAMAv9ABAKVZHgCqRR4Aq00eAIYABACHmAEArkUeAK9NHgCsVR4ArU0eAIqoAICOqACAhCQAAJKoAICWqACAmqgAgKanAICGqACAqLUeAKmFHgCqjR4Aq4UeAKydHgCtgR4Arv0eAK/1HgCwjR4AsZUeALKVHgCzpR4AtL0eALVxAQC2cQEAt3EBALhRAQC5UQEAulEBALtRAQC89QEAvf0BAL71AQC/7QEAsyUeAL4IBwCeqACAoqgAgKaoAIC2IR4AtTUeAKqoAIC7cR4AumkeAK6oAICyqACAv5UBAL5ZHgC9UR4AvGEeALaoAICjYR4AuqgAgL6oAICmZR4AwqgAgMaoAIClcR4Aqi0eAKs1HgDKqACAzqgAgK4dHgCv0QEArCUeAK0VHgDhVBoA0qgAgONcCgDWqACA2qgAgN6oAIDiqACA5qgAgOqoAIC+qAUA7qgAgPKoAICPMSoA+qgAgO/E+wD+qACAk2EuAJIdLwCR2SoAkEkqAJfZEgCWdRIAlQ0TAJTBLgCbHRsAmkEWAJlJFgCYDRcAn3EeAJ4RGwCdcRoAnHkaAKOhAgCinQMAoZUfAKCJHgDjiAEA4wgeAOFoAADh/B4A79wBAO98HwC1if4AtAH8ALMB+gCylfoAsQH4ALAR9gCv4fYArgH0AK0l8gCs7fIAqwHwAKrpDwCp1Q4AqN0OAKcBDACmyQoApe0KAKQBCACj4QYAovEGAKHlAwACqQCAggErAIMBKwAGqQCACqkAgIYxLwCHiS8AhIkrAIVFLgCKdRIAiwUTAIYIBQCHbAUAjhEXAI8RFwCMsRMAjV0WAJI9GgCTQRsAhMgFAIQABwCWUR8Al1EfAJRRGwCVORoAmn0eAJt9AgAOqQCAEqkAgIFZAQCAVQEAnFkDAIJRAQC+yAcAFqkAgBqpAIAeqQCAIqkAgCapAIAqqQCA79QeAC6pAIDhJB4AMqkAgONoAQA2qQCAOqkAgD6pAIBCqQCAu2kCALpZAgBGqQCASqkAgL8dAgC+HQIAvRkCALxxAgCz7QIATqkAgFKpAIBWqQCAWqkAgLZ9AgC17QIAXqkAgKMNBQD2qACAYqkAgGqpAIBmqQCApp0FAKUNBQBuqQCAq4kFAKq5BQCGCAMAh3wDAK/9BQCu/QUArfkFAKyRBQCAsQcAgbkHAIJBAACzsQYAcqkAgLVZBwC2MQcAdqkAgHqpAIB+qQCAuuEHALvhBwC84QcAveEHAL7hBwC/3QcAqLUGAKm5BgCqdQYAq4UHAKydBwCt/QcArvUHAK8ZBwCCqQCAhqkAgIqpAICOqQCAkqkAgJapAICaqQCAnqkAgLh1BwC5fQcAunUHALsFBwC8HQcAvTEHAL4xBwC/MQcAsGkHALFpBwCyeQcAs3kHALRpBwC1VQcAtlEHALdNBwCj/QcAoqkAgKapAICqqQCArqkAgKZ9BgClFQYAsqkAgKutBgCqrQYAtqkAgLqpAICvkQYArq0GAK2tBgCsrQYAvqkAgMKpAIDGqQCAyqkAgIAdAACBCQAAgjkAAM6pAIDSqQCA2qkAgIbIAACHpAEA3qkAgOKpAIDmqQCA6qkAgKiNAQCpmQEAqtkBAKvRAQCs8QEArfEBAK45AQCvOQEAhKAAAO6pAIDyqQCA9qkAgPqpAID+qQCAAqoAgAaqAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAAugUEALsJBAC44QcAueEHAL4JBAC/CQQAvAkEAL0JBACyjQcAs+UHALC1BwCxhQcAtuUHALftBwC08QcAtfEHAKpNBwCrVQcAqEkHAKlJBwCu3QcAr8UHAKxNBwCt1QcACqoAgA6qAIASqgCAFqoAgBqqAIAeqgCAIqoAgCaqAICz0QIAKqoAgC6qAIC+AAwAMqoAgLbxAgC1+QIANqoAgLsNAgC6DQIAOqoAgD6qAIC/DQIAvg0CAL0NAgC8DQIAghUAAKOVAgCAYQAAgWEAAKa1AgBCqgCASqoAgKW9AgCqSQIAq0kCAIbIDACHrAwArkkCAK9JAgCsSQIArUkCAKhlAgCpdQIAqn0CAKt1AgCsbQIArbECAK6xAgCvsQIAhKANAE6qAIBSqgCAVqoAgFqqAIBeqgCAYqoAgGaqAIC4MQEAuTEBALoxAQC7MQEAvNUBAL3dAQC+yQEAv8EBALDRAgCx0QIAstECALPRAgC0EQEAtREBALYRAQC3EQEA4bAGAGqqAIDj0AYAhEAPAG6qAIDhpAEAcqoAgOPABgB2qgCAeqoAgH6qAIDv1AYA7AAAAIKqAIDvZAcAhqoAgIqqAICOqgCAkqoAgLO5AgCWqgCAtakCALZ9AgCaqgCAnqoAgKKqAIC6WQIAu1kCALxJAgC9SQIAvpkBAL+ZAQCjdQ0ARqoAgKaqAICqqgCArqoAgKaxDQClZQ0AsqoAgKuVDQCqlQ0AvqQDALaqAICvVQ4ArlUOAK2FDQCshQ0AgE0AAIFVAACCVQAAs2UPALqqAIC1ZQ8Atm0PAL6qAICGQAMAhxQDALrtDwC7/Q8AvOkPAL3VDwC+3Q8Av9UPAKhZDgCpoQ8AqqEPAKuhDwCsoQ8AraEPAK6hDwCvoQ8AwqoAgMaqAIDKqgCAzqoAgNKqAIDWqgCA2qoAgN6qAIC4AQ8AuQEPALoBDwC7HQ8AvA0PAL01DwC+PQ8Av9UAALBlDwCxdQ8AsnEPALNNDwC0VQ8AtV0PALZNDwC3QQ8AoykOAOKqAIDmqgCA6qoAgO6qAICmIQ4ApSkOAPKqAICrsQ4AqqEOAPaqAID6qgCAr5kOAK6RDgCtmQ4ArKUOAP6qAIACqwCABqsAgAqrAIDvJA0ADqsAgBKrAIAWqwCA49AOABqrAIDhGA4AHqsAgIAVAACBGQAAggUAACKrAICo0QEAqdkBAKopAQCrKQEArDkBAK05AQCuKQEArykBAL5oAQAqqwCAhsgBAIesAAAuqwCAMqsAgDarAIA6qwCAuO0AALmFAAC6jQAAu4UAALydAAC9gQAAvoEAAL+BAACwWQEAsVkBALLtAACz5QAAtP0AALXlAAC25QAAt9UAALOhAgA+qwCAQqsAgEarAIBKqwCAtrkCALWxAgBOqwCAu50CALqdAgBSqwCAVqsAgL8hAwC+OQMAvTEDALw5AwCF+PUAo+UCAFqrAIBeqwCApv0CAGKrAIBmqwCApfUCAKrZAgCr2QIAaqsAgG6rAICufQMAr2UDAKx9AwCtdQMAuOkAALnpAAC6aQAAu2kAALx5AAC9ZQAAvm0AAL9lAACwsQAAsbkAALKBAACzgQAAtPkAALX5AAC27QAAt+UAAKhlAwCpdQMAqn0DAKt1AwCsbQMArdEAAK7RAACv0QAAcqsAgHarAIB6qwCA1qkAgH6rAICCqwCAhqsAgIqrAICA/QEAgQkAAIIZAACOqwCAkqsAgL5EAgCaqwCAnqsAgISsAgCiqwCAh/gCAIasBQCmqwCAqqsAgK6rAICyqwCAs/UCALarAIC6qwCAvqsAgMKrAIC2UQEAteUCAMarAIC7fQEAunUBAMqrAIDOqwCAvz0BAL49AQC9VQEAvFUBAOFwDwDSqwCA47gOAITABQDvyAAA1qsAgNqrAIDeqwCA4zwOAOKrAIDh0AEA5qsAgIR0BwDqqwCA72gBAO6rAIDyqwCApXkCAKbNAQD2qwCAgCEAAIEhAACC3QcAo2kCAKzJAQCtyQEArqEBAK+hAQD6qwCA/qsAgKrpAQCr4QEAlqsAgAKsAIC+QAIABqwAgIYwAwCHMAMACqwAgA6sAICoOQcAqTkHAKoNBwCrHQcArAUHAK0NBwCuBQcAr3kHALAJBwCxCQcAshkHALMRBwC0OQcAtTkHALbdBwC3yQcAuPkHALn5BwC6zQcAu8EHALzFBwC9yQcAvrkHAL+xBwCzpQcAEqwAgBasAIAarACAHqwAgLatBwC1rQcAIqwAgLvtBwC67QcAJqwAgCqsAIC/3QcAvt0HAL3lBwC87QcALqwAgKPhBwAyrACANqwAgKbpBwA6rACAPqwAgKXpBwCqqQcAq6kHAEKsAIBGrACArpkHAK+ZBwCsqQcAraEHAEqsAIBOrACAUqwAgFasAIBarACAXqwAgGKsAIBmrACAgREAAIANAABqrACAghkAAG6sAIByrACAvuQBAHasAICG4AAAhxgBAHqsAIB+rACAgqwAgIasAICKrACA77AEAI6sAIDh1AYAkqwAgONcBACWrACAmqwAgJ6sAICirACAqJkBAKmZAQCqDQEAqwUBAKwdAQCtBQEArgUBAK81AQCEiAEApqwAgKqsAICurACAsqwAgLasAIC6rACAvqwAgLjBAAC5wQAAusEAALvBAAC8wQAAvcEAAL7BAAC/wQAAsE0BALElAQCyIQEAsyEBALQlAQC1LQEAthEBALcRAQDCrACAxqwAgLONAgDKrACAtZ0CAM6sAIDSrACAto0CANasAIDarACAu+kCALqBAgC9/QIAvP0CAL/hAgC+6QIA3qwAgKbVAgClxQIAvggDAKPVAgCCLQAAgRkAAIB5AACvuQIArrECAK2lAgCspQIAq7ECAKrZAgDirACA6qwAgO80AgDurACAhxgDAIYs/ADyrACA9qwAgPqsAID+rACAAq0AgAatAIAKrQCADq0AgOMAAQASrQCA4eABABatAIC6tQMAu70DABqtAIAerQCAvnkDAL95AwC8pQMAvXkDACarAICztQMAIq0AgCatAIC2kQMAKq0AgC6tAIC1pQMAqEkCAKlJAgCqWQIAq1kCAKxJAgCtdQIArnECAK9tAgC+aP0AvqT/ADKtAIA2rQCAOq0AgD6tAIBCrQCARq0AgLj5AgC5+QIAukkBALtJAQC8XQEAvUEBAL5BAQC/fQEAsBUCALEdAgCyFQIAs8kCALTZAgC12QIAtskCALfJAgDjIAYA4bAGAOGAAQDjEAYAgA0AAIE1AACCPQAASq0AgE6tAIBSrQCAWq0AgF6tAIDvcAAAYq0AgGatAIDvTAEAhIz9AGqtAICjmQIAbq0AgKWJAgByrQCAdq0AgKa9AgCGwPwAh+T8AKuRAgCqmQIArVUCAKyJAgCvVQIArlUCAKh9/gCpgf4Aqpn+AKuZ/gCsif4ArYn+AK65/gCvuf4AVq0AgHqtAIB+rQCAgq0AgIatAICKrQCAjq0AgJKtAIC4tf4Aub3+ALph/wC7Yf8AvGH/AL1h/wC+Yf8Av2H/ALDJ/gCxyf4Ast3+ALPR/gC0uf4Atbn+ALaR/gC3kf4AsxH+AJatAICarQCAnq0AgKKtAIC2Cf4AtQH+AKatAIC7Df4Aug3+AKqtAICurQCAv33+AL59/gC9Bf4AvAn+ALKtAICjVf4Atq0AgLqtAICmTf4Avq0AgMKtAIClRf4Aqkn+AKtJ/gCEKAMAxq0AgK45/gCvOf4ArE3+AK1B/gCAzQEAgdEBAILRAQCzuf4Ayq0AgLXR/gC21f4Azq0AgIZgAQCHYAEAug0BALsFAQC8HQEAvQUBAL4NAQC/BQEA0q0AgNatAIDarQCA3q0AgOKtAIDhwP0A5q0AgOOM/ADqrQCA7q0AgPKtAIDvtPwA9q0AgPqtAID+rQCAAq4AgKgp/gCpKf4Aqj3+AKs1/gCsVf4ArVn+AK5N/gCvRf4ABq4AgAquAIAOrgCAEq4AgBauAIAargCAHq4AgCKuAIC4SQEAuUkBALpZAQC7UQEAvHkBAL15AQC+GQEAvxUBALDFAQCxzQEAssUBALPdAQC0xQEAtc0BALbFAQC3eQEAJq4AgCquAIAurgCAo7n9ADKuAICl0f0AptX9AITQAwBBrgCAvuACAKoNAgCrBQIArB0CAK0FAgCuDQIArwUCAIFJAACAQQAAowkDAIJdAAClGQMARa4AgEmuAICmEQMAhsAEAIfkAwCrDQMAqg0DAK0BAwCsHQMArwEDAK4JAwCw4QMAseEDALLhAwCz/QMAtOUDALXtAwC25QMAtz0DALgFAwC5DQMAugUDALsdAwC8BQMAvQ0DAL4FAwC/vQAATa4AgFGuAIBVrgCAWa4AgOasAIBdrgCAYa4AgGWuAICo8QMAqfkDAKqpAwCrqQMArLkDAK25AwCuqQMAr6UDALNBAgBprgCAba4AgHGuAIB1rgCAtlkCALVRAgB5rgCAu0UCALpFAgB9rgCAga4AgL9JAgC+QQIAvUkCALxVAgCFrgCAia4AgI2uAICRrgCA74wDAJWuAICZrgCAna4AgONsAwChrgCA4VAAAKWuAICprgCAvngFALGuAICEcAIAgOUAAIHpAACC+QAAta4AgIawBACHVAUAua4AgO9A/gC9rgCA4Vz+AMGuAIDjVAEAxa4AgMmuAIDNrgCA0a4AgLOZAQDVrgCA2a4AgN2uAIDhrgCAth0BALUdAQDlrgCAuz0BALo9AQDprgCA7a4AgL/hAAC++QAAvfEAALz5AACoIQYAqVEGAKpRBgCrzQYArNUGAK3dBgCu1QYAr8kGAK2uAIDxrgCA9a4AgPmuAID9rgCAAa8AgAWvAIAJrwCAuG0HALkFBwC6DQcAuwUHALwdBwC9AQcAvgEHAL8BBwCwuQYAsbkGALJtBwCzZQcAtH0HALVlBwC2ZQcAt1UHAKPZBgANrwCAEa8AgBWvAIAZrwCApl0GAKVdBgCEnAIAq30GAKp9BgC+JAMAHa8AgK+hBwCuuQcArbEHAKy5BwCASQAAgUkAAIJZAACzVQcAIa8AgLV9BwC2aQcAJa8AgIZAAACHVAMAulUHALspBwC8OQcAvTkHAL4pBwC/IQcAo5kGACmvAIAtrwCAMa8AgDWvAICmpQYApbEGADmvAICr5QYAqpkGAD2vAIBBrwCAr+0GAK7lBgCt9QYArPUGAOE4BQBFrwCA4yQEAEmvAIBNrwCAUa8AgFWvAIBZrwCAXa8AgGGvAIBlrwCAaa8AgG2vAIBxrwCA7/QEAHWvAICo+QYAqQkGAKoRBgCrLQYArDkGAK0lBgCuLQYAryUGAHmvAIB9rwCAga8AgIWvAICAGQAAgRkAAIIFAACJrwCAuOUBALntAQC65QEAu/0BALzlAQC97QEAvuUBAL9ZAQCwXQYAsSEGALIhBgCzIQYAtCEGALUpBgC2EQYAtxEGAKjRAgCp2QIAqg0DAKsFAwCsHQMArQUDAK4FAwCvNQMAvmQCAJGvAICVrwCAma8AgJ2vAIChrwCApa8AgKmvAIC4JQMAuS0DALolAwC7PQMAvCUDAL0pAwC++QMAv/kDALBNAwCxIQMAsiUDALM9AwC0JQMAtS0DALYlAwC3HQMAs4UDAITIAgCtrwCAhAgDALGvAIC2hQMAtZUDALWvAIC75QMAuokDAIYIDACHnAMAv+kDAL7hAwC96QMAvPEDAIXsCgA2rgCAo80DALmvAICl3QMAva8AgMGvAICmzQMAxa8AgMmvAICrrQMAqsEDAK2hAwCsuQMAr6EDAK6pAwDNrwCA0a8AgNWvAIDZrwCA78gDAN2vAIDhrwCA5a8AgOO0AwDprwCA4dABAO2vAICADQAAgXUAAIJ9AADxrwCA9a8AgPmvAICzZQEAvgQCALVlAQABsACABbAAgLZlAQCGQA0Ah1gNALv1AQC6/QEAvaUBALy5AQC/mQEAvqUBAAmwAIANsACAEbAAgIQADAAVsACAGbAAgB2wAIDvzAEAIbAAgOEsBgAlsACA4yABAOwAAAApsACALbAAgDGwAIA1sACAo+kBADmwAIA9sACApukBAEGwAIBFsACApekBAKpxAQCreQEASbAAgE2wAICuKQEArxUBAKw1AQCtKQEAqCUOAKktDgCqJQ4Aqz0OAKwlDgCtLQ4AriUOAK+VDgD9rwCAUbAAgFWwAIBZsACAXbAAgIKdAACBnQAAgJ0AALhFDwC5TQ8AukUPALtZDwC8SQ8AvUkPAL59DwC/cQ8AsPEOALH5DgCypQ4As7kOALSpDgC1lQ4Atp0OALd9DwCo1Q8Aqd0PAKoJDwCrCQ8ArBkPAK0FDwCuDQ8ArwUPAGGwAIBlsACAabAAgL6gAwBtsACAcbAAgId4AwCGEAAAuBUPALkdDwC6IQ8AuyEPALz1AAC9/QAAvvUAAL/tAACwQQ8AsU0PALJdDwCzVQ8AtE0PALU1DwC2MQ8AtzEPAHWwAIDvsAwAebAAgH2wAICBsACAhbAAgImwAICNsACAkbAAgJWwAICZsACAnbAAgKGwAIDjqA0ApbAAgOGMDQCzwQ4AqbAAgK2wAICxsACAtbAAgLbFDgC10Q4AubAAgLvJDgC6xQ4AvbAAgMGwAIC/sQ4AvskOAL3BDgC8yQ4AowEOAMWwAIDJsACAzbAAgNGwAICmBQ4ApREOANWwAICrCQ4AqgUOANmwAICErAIAr3EOAK4JDgCtAQ4ArAkOAIBRAACBWQAAgmEAALPFAAC+zAEAtcUAALbNAADhsACAhkAHAIcUAQC6yQAAu8kAALzZAAC92QAAvskAAL/FAACrDQMAqg0DAKkJAwCouQIArw0DAK4NAwCtDQMArA0DAL5gAwDlsACA6bAAgO2wAIDxsACA9bAAgPmwAIC+MAUAuykDALoZAwC5GQMAuAEDAL/dAwC+3QMAvd0DALwxAwCzTQMAsk0DALFNAwCwTQMAtzkDALYxAwC1QQMAtE0DAP2wAICmkQMApZkDAAGxAICjmQMABbEAgAmxAIANsQCAr5kDAK6VAwCthQMArIUDAKuVAwCqlQMAja8AgBGxAIAVsQCAGbEAgB2xAIAhsQCAJbEAgCmxAIAtsQCAMbEAgDWxAIA5sQCAPbEAgEGxAICAHQAAgQkAAIL9AQBFsQCAvwgHAEmxAIBRsQCA7yQAAFWxAICElAIAWbEAgF2xAICH4AIAhgQFAL4AGABhsQCAZbEAgOGQAQBpsQCA44AAAG2xAIBxsQCAdbEAgLNlAQB5sQCAtWUBALZtAQB9sQCAgbEAgIWxAIC65QEAu/kBALzpAQC96QEAvsUBAL+9AQCJsQCAjbEAgJGxAIC+xBkAlbEAgJmxAICdsQCA78gBAKGxAIDh3A4ApbEAgOMwDgCpsQCArbEAgLGxAICEMAQAgHkAAIEVAACCFQAAo+UBALWxAICl5QEApu0BALmxAICGQAYAh5AHAKplAQCreQEArGkBAK1pAQCuRQEArz0BAKjdBQCpIQYAqiEGAKshBgCsIQYArSEGAK4hBgCvnQYATbEAgL2xAIDBsQCAhDABAMWxAIDJsQCAzbEAgNGxAIC4jQYAuZUGALqdBgC7lQYAvI0GAL21BgC+vQYAv7UGALDtBgCx8QYAsvEGALPxBgC0zQYAtbUGALa9BgC3tQYAqIkHAKmVBwCqkQcAq5EHAKy9BwCtpQcArqEHAK/dBwDVsQCA2bEAgN2xAIDhsQCA5bEAgOmxAIDtsQCA8bEAgLhJBwC5VQcAul0HALtVBwC8cQcAvX0HAL5pBwC/aQcAsKUHALGtBwCyuQcAs7EHALSRBwC1kQcAtnkHALd5BwD1sQCA+bEAgP2xAIABsgCA78gFAOHACQAFsgCA48AZAOMkBAAJsgCA4dAGAO/cKACinQMAoxUBAKAZBQChjQUAs1kGAA2yAIARsgCAFbIAgBmyAIC2ZQYAtXUGAB2yAIC7KQYAuiEGACGyAIAlsgCAvxUGAL4VBgC9JQYAvC0GAKOZBgCPmfwAKbIAgDGyAIA1sgCApqUGAKW1BgA5sgCAq+kGAKrhBgCGKB8Ah5wAAK/VBgCu1QYAreUGAKztBgCebQkAn30HAJwNCwCd7QkAmvENAJs5DQCY5fAAmQ0PAJbh8QCX6fEAlMX1AJUN8wCSHfcAk/H1AJD9+QCR7fkAgh3/AIMB+gA9sgCAQbIAgIYV9gCHOfYAhAn6AIXx9ACKwfAAiyXyAEWyAIBJsgCAjuEMAI8VDgCMNfIAjQHzAJKtDgCTgQgATbIAgFGyAICW6QQAl3UGAJR5CgCV8QoAmtEGAJvJAABVsgCAWbIAgIEdAwCAHQMAnFkCAIL1AwCrARAAqpUWAKmNFgCojRYAr5UuAK4BLACt/RIArJkSAKOlHgCipR4AoY0CAN2wAICnGRoAppUaAKUBGACknR8AXbIAgGGyAIBlsgCAabIAgG2yAIBxsgCAdbIAgHmyAICz5SoAsuUqALGtLwCw5S4AfbIAgIGyAIC1ASQAtBEqAKgpAwCpNQMAqj0DAKs1AwCsLQMArbUDAK69AwCvtQMAhbIAgImyAICNsgCAkbIAgIAdAACBCQAAgrkAAJWyAIC4TQIAuV0CALptAgC7CQIAvBkCAL0ZAgC+CQIAvwECALDNAwCx1QMAst0DALPVAwC0zQMAtXUCALZ9AgC3dQIAmbIAgITIHQChsgCAvgwfAKWyAICpsgCA70gGAO9YBwDhWAYA4ZgGAOOUAQDjAAYAhhAcAId8HQC+9B4ArbIAgLGyAIC2ZQMAtfUDALWyAICz5QMAubIAgL2yAIDBsgCAv+ECAL5ZAwC9UQMAvFkDALtBAwC6WQMAxbIAgMmyAIAtsgCAnbIAgM2yAIDRsgCA1bIAgNmyAIDdsgCA4bIAgKitHQCptR0AqrUdAKslHgCsPR4ArR0eAK4VHgCvdR4AsA0eALEtHgCyJR4As40eALSVHgC1nR4AtpUeALeNHgC4tR4Aub0eALq1HgC7nR4AvIUeAL1VHwC+XR8Av1UfALMdHQDlsgCA6bIAgO2yAIDxsgCAtr0eALWVHgD1sgCAu8keALrpHgD5sgCA/bIAgL95HgC+cR4AvXkeALzRHgCCKQAAo1kdAIAdAACBFQAApvkeAAGzAIAFswCApdEeAKqtHgCrjR4ACbMAgITgAwCuNR4Arz0eAKyVHgCtPR4AqIkeAKmVHgCqnR4Aq7EeAKzRHgCt2R4Ars0eAK/FHgANswCAEbMAgIaIAACHbAEAFbMAgBmzAIAdswCAIbMAgLhdAQC5wQEAusEBALvBAQC8wQEAvckBAL7xAQC/8QEAsL0eALGdHgCylR4As2UBALR9AQC1ZQEAtm0BALdlAQCqLR0AqzUdACWzAIApswCAri0dAK+VHACsLR0ArSUdAISMAQCjkR0ALbMAgDGzAICmER0ANbMAgDmzAIClgR0As1UeAD2zAIBBswCARbMAgEmzAIC2GR4AtRkeAE2zAIC7GR4AujkeAFGzAIBVswCAv+EBAL75AQC98QEAvAEeAFmzAIBdswCAYbMAgKOZHQBlswCApdUdAKbVHQBpswCAbbMAgHGzAICq9R0Aq9UdAKzNHQCtPQIArjUCAK8tAgCAZQAAgRUAAIIdAACEAAQAdbMAgHmzAICHcAMAhvwEAIGzAICFswCAibMAgI2zAICRswCAlbMAgJmzAICdswCAvsgEAKGzAIClswCAqbMAgK2zAICxswCAtbMAgO/cHwC5swCA4ZQBAL2zAIDjHAEAwbMAgMWzAIDJswCAzbMAgLt1AwC6aQMAvkgGANGzAIC/HQMAvh0DAL0dAwC8ZQMAs9UDANWzAIDZswCA3bMAgOGzAIC2fQMAtcUDAIRwBQCoJQIAqTUCAKo9AgCrNQIArC0CAK2dAgCulQIAr7UCAIIVAADlswCAgNkBAIEJAADEAAAA6bMAgPGzAID1swCAuKkCALmpAgC6SQEAu0kBALxZAQC9RQEAvkUBAL99AQCwzQIAsdECALLRAgCzqQIAtLkCALW5AgC2qQIAt6ECAOEoHgDhNBwA43QBAOMYHgD5swCA/bMAgIa4BACHVAUAhDgHAAG0AIAFtACACbQAgL6sBwANtACA78weAO/IGgCj9QIAEbQAgBW0AIAZtACAHbQAgKZdAgCl5QIAIbQAgKtVAgCqSQIAJbQAgCm0AICvPQIArj0CAK09AgCsRQIAqGEGAKlhBgCqYQYAq2EGAKxhBgCtYQYArmEGAK9hBgDtswCALbQAgDG0AIA1tACAObQAgD20AIBBtACARbQAgLjxBgC58QYAuvEGALvxBgC8nQYAvbEGAL6xBgC/sQYAsOUGALHtBgCy5QYAs/0GALTlBgC17QYAttkGALfVBgCz6QYASbQAgE20AIBRtACAVbQAgLbhBgC16QYAWbQAgLspBgC6IQYAXbQAgGG0AIC/KQYAviEGAL0pBgC8MQYAgl0AAKOtBgCARQAAgV0AAKalBgBltACAabQAgKWtBgCqZQYAq20GAIYADACHQAMArmUGAK9tBgCsdQYArW0GAG20AIDvfAUAcbQAgHW0AIB5tACAfbQAgIG0AICFtACAibQAgI20AICRtACAlbQAgJm0AIDjaAUAnbQAgOF4BQCz0QYAobQAgKW0AICptACArbQAgLb9BgC1/QYAsbQAgLupBgC6oQYAtbQAgLm0AIC/mQYAvqkGAL2pBgC8sQYAqLkGAKm5BgCqGQYAqxkGAKw1BgCtPQYArjUGAK8pBgC9tACAgh0AAIEdAACAHQAAwbQAgMW0AIDJtACA0bQAgLjpAQC56QEAuvkBALv5AQC86QEAvekBAL5dAQC/VQEAsCUGALEtBgCyJQYAsz0GALQtBgC1HQYAthUGALfZAQCGgAwAh+QCANW0AICjnQUA2bQAgKWxBQCmsQUA3bQAgOG0AIDltACAqu0FAKvlBQCs/QUAreUFAK7lBQCv1QUAtk0DAOm0AICExAMAtUUDAO20AICzjQIA8bQAgPW0AIC+SQMAv0kDALxJAwC9SQMAumkDALtpAwD5tACA/bQAgAG1AICmiQMApYEDAAW1AICjSQIACbUAgA21AIARtQCAr40DAK6NAwCtjQMArI0DAKutAwCqrQMAfbMAgBW1AIAZtQCAHbUAgIW0PQAhtQCAJbUAgCm1AIAttQCAMbUAgIA9AACBCQAAgh0AADW1AIC+sAMAObUAgIc4AwCG3AwAQbUAgEW1AIBJtQCATbUAgFG1AIDvXAYAVbUAgFm1AIC+6AwA45QGAF21AIDh3AEAYbUAgGW1AIBptQCAbbUAgLNRAQBxtQCAdbUAgHm1AIB9tQCAtnEBALV5AQCBtQCAuz0BALo9AQCFtQCAibUAgL/9AQC+9QEAvQUBALwFAQCNtQCAkbUAgJW1AICEQAwAmbUAgJ21AIChtQCA76wHAKW1AIDhJAYAqbUAgONABwCGkAwAh/wMALG1AIC1tQCAgFkAAIFlAACCYQAAo90BALm1AICl9QEApv0BAL21AIDBtQCAxbUAgKqxAQCrsQEArIkBAK2JAQCueQEAr3EBAM20AIA9tQCAybUAgM21AICttQCA0bUAgNW1AIDZtQCAqJ0NAKktDgCqOQ4AqzEOAKwRDgCtEQ4Arn0OAK9tDgCwGQ4AsRkOALIxDgCzMQ4AtNEOALXZDgC2zQ4At8UOALj9DgC52Q4AuqkOALupDgC8vQ4AvaUOAL6tDgC/pQ4AqIEPAKmBDwCqgQ8Aq4EPAKyBDwCtjQ8AroUPAK+1DwDdtQCA4bUAgOW1AIDptQCA7bUAgPG1AID1tQCA+bUAgLidDwC5rQ8AuqUPALtNDwC8VQ8AvV0PAL5JDwC/SQ8AsNEPALHRDwCy0Q8As9EPALS1DwC1vQ8AtrUPALetDwCzCQ4A/bUAgAG2AIAFtgCACbYAgLYNDgC1CQ4ADbYAgLsVDgC6FQ4AEbYAgBW2AIC/eQ4AvnEOAL0FDgC8BQ4AghUAAKNNDgCAYQAAgWEAAKZJDgAZtgCAvhABAKVNDgCqUQ4Aq1EOAIQkAQAhtgCArjUOAK89DgCsQQ4ArUEOAKg5DgCpOQ4AqlkOAKtRDgCscQ4ArXEOAK6RAQCvkQEAhgAAAIeEAAAltgCAKbYAgC22AIAxtgCANbYAgDm2AIC4dQEAuX0BALp1AQC7yQAAvNkAAL3ZAAC+yQAAv8EAALD1AQCx/QEAsvUBALNNAQC0VQEAtV0BALZVAQC3TQEAuk0PALtVDwC4TQ8AuUUPAL59DwC/tQ8AvEUPAL11DwCyAQ8AswEPALAxDwCxMQ8AtgEPALcNDwC0EQ8AtREPAKqZDgCrRQ8AqOUOAKmZDgCuQQ8Ar0EPAKxRDwCtUQ8APbYAgEG2AIBFtgCASbYAgE22AIBRtgCAVbYAgFm2AICzUQ0AXbYAgGG2AIBltgCAabYAgLZxDQC1eQ0AbbYAgLu5AgC6sQIAcbYAgHW2AIC/GQIAvhECAL0ZAgC8oQIAebYAgKMVDQB9tgCAgbYAgKY1DQCFtgCAibYAgKU9DQCq9QIAq/0CAIToAwCRtgCArlUCAK9dAgCs5QIArV0CAKhtAgCprQIAqqUCAKu9AgCspQIAra0CAK6lAgCvfQEAgO0BAIHxAQCC8QEAvqAFAJW2AICZtgCAh2gFAIYcBQC4yQEAuckBALrZAQC70QEAvPkBAL35AQC+mQEAv5UBALAFAQCxDQEAsgUBALMdAQC0BQEAtQ0BALYFAQC3+QEA4WQPAOGcDwDjFA4A49QPAJ22AIDhPA4AobYAgOPkAAC+rAQApbYAgKm2AIDvDAAArbYAgLG2AIDvYA4A77QPALW2AIC5tgCAhEQEALNhAgC9tgCAtWECALZhAgDBtgCAxbYAgMm2AIC6jQEAu4UBALydAQC9hQEAvo0BAL+FAQCjrQUAjbYAgM22AIDRtgCA1bYAgKatBQClrQUA2bYAgKtJBgCqQQYA3bYAgOG2AICvSQYArkEGAK1JBgCsUQYA5bYAgOm2AIDttgCA8bYAgIAdAACBCQAAgjkAAPW2AID5tgCA/bYAgIbIAACHIAMAAbcAgAW3AIAJtwCADbcAgKhtBgCptQcAqr0HAKsdBwCsCQcArTEHAK4xBwCvLQcAhKgDABG3AIAVtwCAGbcAgB23AIAhtwCAJbcAgCm3AIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+nQAAv5UAALBVBwCxJQcAsi0HALM9BwC0LQcAtRUHALYdBwC39QAALbcAgOG8BgAxtwCA4/QFADW3AIA5twCAPbcAgEG3AIBFtwCASbcAgE23AIBRtwCAVbcAgFm3AIBdtwCA7+gEALN1BgCCLQAAgRUAAIAdAABhtwCAtvEGALXBBgBltwCAu6EGALrRBgBptwCAvmwBAL+RBgC+qQYAvakGALy5BgCjtQYAcbcAgIYoAACHTAEAdbcAgKYxBgClAQYAebcAgKthBgCqEQYAfbcAgIG3AICvUQYArmkGAK1pBgCseQYAhbcAgLO9AQCJtwCAjbcAgLZ5AQCRtwCAlbcAgLV5AQC6VQEAu10BAJm3AICdtwCAvvkAAL/lAAC8RQEAvf0AAKhxAgCpcQIAqnECAKtxAgCstQIArb0CAK61AgCvrQIAhOw8AKG3AICltwCAqbcAgK23AICxtwCAtbcAgLm3AIC4XQMAuWUDALptAwC7ZQMAvH0DAL1lAwC+bQMAv2UDALDVAgCx3QIAstUCALNtAwC0eQMAtWUDALZtAwC3ZQMAHbYAgL23AIDBtwCAo/UCAMW3AIClMQIApjECAMm3AIDNtwCA0bcAgKodAgCrFQIArA0CAK21AwCusQMAr60DAIBlAACBCQAAghkAANW3AIDZtwCA4bcAgL4QPADltwCAhsA8AIcgAwDptwCA7bcAgPG3AID1twCA+bcAgP23AICohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAAG4AIAFuACACbgAgA24AIARuACAFbgAgBm4AIAduACAuHUBALl9AQC6dQEAu8kBALzZAQC9xQEAvsUBAL/9AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2VQEAt00BAOGkBgAhuACA41AGAL6APACEHDwAvoA/ACW4AIApuACALbgAgDG4AIA1uACAObgAgD24AIBBuACA7+AGAEW4AICBfQAAgHEAAEm4AICCBQAAUbgAgFW4AIDvTAAAWbgAgOGQAQBduACA41gBAGG4AIBluACAabgAgIZYPwCH/DwAs509AN23AIBNuACAbbgAgHG4AIC21T0AtbU9AHW4AIC7+T0AuvE9AHm4AIB9uACAvxk+AL4RPgC91T0AvNU9AIG4AICj2T0AhbgAgIm4AICmkT0AjbgAgJG4AICl8T0AqrU9AKu9PQCVuACAmbgAgK5VPgCvXT4ArJE9AK2RPQCoVT4AqVk+AKphPgCrYT4ArGE+AK1hPgCuYT4Ar2E+AISoAwCduACAobgAgKW4AICpuACArbgAgLG4AIC1uACAuEU/ALldPwC6VT8Au20/ALx1PwC9fT8AvnU/AL9tPwCwwT8AscE/ALLBPwCzwT8AtME/ALXBPwC2wT8At8E/AIC5AQCBuQEAggUAALm4AIDhgD4AwbgAgOMoPQDFuACAhoAAAIcEAQDvCD0AybgAgM24AIDRuACA1bgAgNm4AICzqT8AvbgAgN24AIDhuACA5bgAgLahPwC1qT8A6bgAgLtFPgC6RT4A7bgAgPG4AIC/RT4AvkU+AL1VPgC8VT4Ao2k/APW4AID5uACA/bgAgAG5AICmYT8ApWk/AAW5AICrhT4AqoU+AAm5AIANuQCAr4U+AK6FPgCtlT4ArJU+ABG5AICzGT4AFbkAgBm5AIC2IT4AHbkAgCG5AIC1MT4AuvEBALv5AQAluQCAKbkAgL6xAQC/vQEAvNEBAL3RAQCo0T0AqdE9AKrVPQCr6T0ArP09AK3lPQCu7T0ArxECAID5AwCBzQMAgsUDAIQkAwC+AAQAMbkAgIesAwCGvAQAuBkCALktAgC6JQIAu+kCALz5AgC9+QIAvukCAL/pAgCwcQIAsXkCALJBAgCzQQIAtDECALU9AgC2NQIAtykCAKVtPQA1uQCAObkAgKZ9PQA9uQCAbbcAgKNFPQBBuQCArY0CAKyNAgCv4QIAru0CAKwAAABFuQCAq6UCAKqtAgDh+AEASbkAgOP0AgCEwAQATbkAgFG5AIBVuQCAWbkAgF25AIBhuQCAZbkAgGm5AIBtuQCAcbkAgO8wAgB1uQCAqBUCAKkZAgCqJQIAqz0CAKwlAgCtLQIAriUCAK9VAgB5uQCAfbkAgIG5AICFuQCAibkAgI25AICEsAQAkbkAgLjRAgC52QIAuuECALvhAgC8kQIAvZ0CAL6VAgC/iQIAsC0CALE1AgCyNQIAswUCALQdAgC18QIAtvECALfxAgDheD8A4zQBAOMIPgDhbD4AgQkAAICpAACVuQCAgj0AAJm5AIChuQCApbkAgL4gBACpuQCA79g+AO/MPgCtuQCAsbkAgLPpAgCG6AQAh8AEALbpAgC1uQCAubkAgLXpAgC6rQIAu7UCAL25AIDBuQCAvp0CAL9xAgC8pQIAvZUCAC25AICduQCAxbkAgMm5AIDNuQCA0bkAgNW5AIDZuQCAqBUGAKmhBgCqoQYAq70GAKytBgCtgQYArv0GAK/tBgCwlQYAsZ0GALKVBgCzrQYAtLUGALW9BgC2tQYAt60GALiVBgC5mQYAukkHALtJBwC8WQcAvVkHAL5JBwC/SQcArN0FAK3tBQCu5QUArwkFAN25AIDhuQCAqtUFAKvNBQDluQCApZEFAKaRBQDpuQCA7bkAgPG5AID1uQCAo5EFALNJBgD5uQCA/bkAgAG6AIAFugCAtmEGALVFBgAJugCAuzkGALoxBgC+ZAAADboAgL8ZBgC+EQYAvRkGALwhBgCjiQcAgtkBAIHZAQCAwQEAEboAgKahBwClhQcAFboAgKv5BwCq8QcAhggBAId8AQCv2QcArtEHAK3ZBwCs4QcAGboAgLP1BgAdugCAIboAgLaFBgAlugCAKboAgLWdBgC6jQYAu20BAC26AIAxugCAvmUBAL9tAQC8dQEAvW0BAKglBgCpLQYAqjkGAKsxBgCsUQYArUEGAK5BBgCvdQYANboAgDm6AIA9ugCAQboAgEW6AIBJugCATboAgFG6AIC4VQEAuWUBALplAQC7fQEAvGUBAL1tAQC+HQEAvxUBALANBgCx7QEAsuUBALP9AQC05QEAte0BALblAQC3bQEAo7EFAFW6AIBZugCAvkgDAL5YDACmwQUApdkFAF26AICrKQIAqskFAGG6AIBlugCArykCAK4hAgCtKQIArDECAGm6AIBtugCAcboAgHW6AICAGQAAgRkAAIIFAAB5ugCAhKwDAIG6AICHGAMAhswMAIW6AICJugCAjboAgJG6AICokQMAqZkDAKrJAwCrxQMArN0DAK3BAwCuwQMAr/UDAJW6AICZugCAnboAgKG6AIClugCAqboAgK26AICxugCAuH0DALnBAAC6wQAAu9EAALz5AAC9+QAAvpkAAL+ZAACwjQMAsUUDALJNAwCzRQMAtF0DALVFAwC2TQMAt0UDALNBAgC1ugCAuboAgL8EDwC9ugCAtkECALVVAgDBugCAu4ECALpJAgDFugCAyboAgL+BAgC+mQIAvZECALyZAgDNugCA0boAgNW6AIDZugCA76QDAN26AIDhugCA5boAgOMQAwDpugCA4VgAAIQgDQCAKQAAgSkAAIIdAADxugCA4VAGAOGgBwDjoAYA41AHAIWUDAD1ugCA70gbAPm6AIDhJAIA/boAgONwGgABuwCABbsAgAm7AIDvqAEA7+gGAIagDwCHDA0Ao4kCAA27AIClnQIAEbsAgBW7AICmiQIAGbsAgB27AICrSQIAqoECAK1ZAgCsUQIAr0kCAK5RAgCoZQ4AqXUOAKp9DgCrdQ4ArG0OAK21DgCuvQ4Ar7UOAO26AIAhuwCAJbsAgCm7AIAtuwCAOLsAgDy7AIBAuwCAuF0PALltDwC6ZQ8Auw0PALwVDwC9HQ8AvhUPAL8JDwCwzQ4AsdUOALLdDgCz1Q4AtM0OALVxDwC2cQ8At20PALP1DgBEuwCASLsAgEy7AIBQuwCAtjUOALXlDgBUuwCAuxEOALoJDgBYuwCAXLsAgL+1DwC+CQ4AvQEOALwJDgCCFQAAo7EOAIBhAACBYQAApnEOAGC7AIC+EAEApaEOAKpNDgCrVQ4AaLsAgIQgAQCuTQ4Ar/EPAKxNDgCtRQ4An0UIAJ4NCQCdDQkAnJkLAJt1NQCaETUAmZk3AJgNMQCXJTEAliUxAJWBPQCUDT0Ak4k/AJIVOACRPTkAkD05AI9lJQDvrA0AhgAEAIegAQBsuwCAcLsAgHS7AIDv6AEAeLsAgOE0AgB8uwCA4zQBAIC7AIDjCAwAhLsAgOEIDQChoQEAiLsAgKMJBQCibQMApc0EAKQRBQCnHRkAph0ZAKmhHQCoORkAq+kcAKqpHQCtkREArAEQAK8BFACuUREAsfkVALDlFQCz6WkAsgFoALUBbAC0eWkAjLsAgJC7AICUuwCAmLsAgJy7AICguwCAowkDAKIZDQCh/Q0AoP0NAIIlJgCDBToApLsAgKi7AICGqTwAhzU+AIQdOgCFPTsAiok+AIslMgCsuwCAsLsAgI6xNACPMTYAjD0yAI0tMgCSJTYAk9EIAIREAwC+wAQAlhULAJdVDgCUXQoAlVUKAJplDgCbiQ4AtLsAgLi7AIC8uwCAwLsAgJyBAADEuwCAuLUCALm9AgC6tQIAuwkCALwZAgC9GQIAvgkCAL8BAgCwdQ0AsX0NALJJDQCzSQ0AtJUCALWdAgC2lQIAt40CAKi9DQCpUQ0AqlUNAKtpDQCsfQ0ArWUNAK5tDQCvEQ0AZLsAgILtAQCBHQAAgB0AAMi7AIDMuwCAfboAgL5wBQCznQwAhIwFANC7AIDYuwCA3LsAgLalDAC1tQwA4LsAgLv5DAC68QwAhigFAIcgBQC/GQMAvhEDAL3dDAC83QwA5LsAgKPZDADouwCA7LsAgKbhDADwuwCA9LsAgKXxDACqtQwAq70MAPi7AID8uwCArlUDAK9dAwCsmQwArZkMAAC8AIAEvACACLwAgAy8AIAQvACAFLwAgBi8AIDvvAEAHLwAgOF8DgAgvACA41ABACS8AIAovACALLwAgDC8AICzlQIANLwAgDi8AIA8vACAQLwAgLa9AgC1uQIASLwAgLs5AgC6YQIAhsgEAIesBAC/GQIAvhECAL0ZAgC8IQIAo1UFAILVBwCBxQcAgMUHAEy8AICmfQUApXkFAFC8AICr+QUAqqEFAFS8AIBYvACAr9kFAK7RBQCt2QUArOEFAFy8AICzWQcAYLwAgGS8AIC2HQcAaLwAgGy8AIC1FQcAugkHALsJBwBwvACAdLwAgL75BwC/+QcAvPkHAL35BwDUuwCARLwAgHi8AIB8vACAgLwAgIS8AICIvACAjLwAgKitBwCptQcAqrUHAKvtBwCs+QcArfkHAK7tBwCv5QcAsKkHALGpBwCySQcAs0kHALRZBwC1WQcAtkkHALdJBwC4eQcAuUUHALpBBwC7XQcAvEUHAL1NBwC+RQcAvzkHAKMdBgCQvACAlLwAgJi8AICcvACAplkGAKVRBgCgvACAq00GAKpNBgCkvACAqLwAgK+9BgCuvQYArb0GAKy9BgCAbQAAgQkAAIIZAACsvACAsLwAgISYAQC+kAEAtLwAgIYAHACHxAEAuLwAgLy8AIDAvACAxLwAgMi8AIDMvACAqF0GAKmVAQCqlQEAq6UBAKy9AQCt1QEArtEBAK/RAQDQvACA1LwAgNi8AIDcvACA4LwAgOS8AIDovACA7LwAgLhZAQC5WQEAus0AALvFAAC83QAAvcUAAL7FAAC/9QAAsLUBALG9AQCygQEAs4EBALR5AQC1eQEAtmkBALdpAQCzHQIA8LwAgPS8AIC+gBwA+LwAgLZVAgC1NQIA/LwAgLt5AgC6cQIAAL0AgAS9AIC/vQIAvr0CAL1VAgC8VQIACL0AgKNZAgAMvQCAEL0AgKYRAgAUvQCAGL0AgKVxAgCqNQIAqz0CABy9AIAgvQCArvkCAK/5AgCsEQIArRECACi9AIAsvQCAvgQdAL4AHgAwvQCANL0AgDi9AIA8vQCAgPkAAIHNAACCxQAAhCADAIawHACHlAMAQL0AgES9AIBIvQCATL0AgFC9AIBUvQCA42wCAFi9AIDhoAEAXL0AgO8UAgBgvQCAZL0AgGi9AIBsvQCAcL0AgHS9AIB4vQCA4fAGAOE0BgDjTAAA4xgGAHy9AICAvQCAhL0AgIi9AICAPQAAgQkAAIIZAACMvQCAkL0AgIS8HQDvmAAA7zgHALMxAgDRAAAAh9gdAIZsHACYvQCAtikCALUhAgCcvQCAu80CALrNAgCgvQCApL0AgL/NAgC+zQIAvc0CALzNAgCyXQYAs2UGALANBgCxVQYAtn0GALedBQC0fQYAtXUGALqNBQC7zQUAuKUFALmFBQC+xQUAv8kFALzVBQC9zQUAqL0AgKy9AICwvQCAtL0AgLi9AIC8vQCAwL0AgMS9AICqtQYAq70GAKgBBwCpvQYAroEGAK+NBgCsmQYArZUGAKNxHQDIvQCAzL0AgNC9AIDUvQCApmkdAKVhHQDYvQCAq40dAKqNHQDcvQCA4L0AgK+NHQCujR0ArY0dAKyNHQDkvQCAs9UeAOi9AIDsvQCAts0eAPC9AID0vQCAtcUeALqhHgC7oR4A+L0AgPy9AIC+pR4Av6keALyxHgC9sR4AJL0AgJS9AIAAvgCAhAQDAID5AACB+QAAghEAAAS+AICoIR4AqSEeAKo5HgCrOR4ArCkeAK0pHgCuAR4ArwEeALABHgCxAR4AsgEeALMBHgC0BR4AtQkeALY9HgC3NR4AuA0eALkVHgC6HR4AuxUeALwNHgC95R8Avu0fAL/lHwCjkR8ACL4AgIYoAQCHSAEADL4AgKaJHwClgR8AEL4AgKvlHwCq5R8AFL4AgBi+AICv7R8AruEfAK31HwCs9R8AHL4AgLMtHgAgvgCAJL4AgLaVHgAovgCALL4AgLWdHgC6sR4Au7EeADC+AIA0vgCAvnUBAL99AQC8oR4AvaEeAKjRHgCp2R4AquEeAKvhHgCsUR4ArVEeAK5RHgCvUR4AOL4AgDy+AIBAvgCARL4AgEi+AIBMvgCAUL4AgFS+AIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALAxHgCxMR4AsjEeALMxHgC09QEAtf0BALb1AQC37QEAo2kdAFi+AIBcvgCAYL4AgGS+AICm0R0ApdkdAGi+AICr9R0AqvUdAGy+AIBwvgCArzkCAK4xAgCt5R0ArOUdAIFpAACAWQAAvgAEAIJhAAB4vgCAfL4AgIC+AICEvgCAhOwDAIi+AICHiAMAhuwEAIy+AICQvgCAlL4AgJi+AICohQMAqZUDAKqVAwCrpQMArL0DAK3VAwCu0QMAr9EDAJy+AICgvgCApL4AgKi+AICsvgCAsL4AgLS+AIC4vgCAuHEDALlxAwC6cQMAu3EDALzVAAC93QAAvtUAAL/NAACwtQMAsb0DALKBAwCzgQMAtFEDALVRAwC2UQMAt1EDAOFUHgDhrB8A45QBAOMoHgDjYAMAvL4AgOEIAADAvgCA75ADAMS+AIDIvgCAzL4AgNC+AIDUvgCA70wfAO9MHwCzXQIA2L4AgNy+AIDgvgCA6L4AgLYVAgC1dQIA7L4AgLs5AgC6MQIAhCQFAL7gBAC/1QIAvtUCAL0VAgC8FQIAuJEdALmZHQC6oR0Au6EdALzRHQC93R0AvtUdAL/JHQCwCR4AsQkeALIZHgCzGR4AtAkeALUJHgC2vR0At7UdAKipHgCpqR4AqrkeAKu5HgCsqR4ArakeAK55HgCveR4AgKUAAIGtAACCpQAA8L4AgIbQBACH+AQA9L4AgPi+AIB0vgCA5L4AgPy+AIAAvwCABL8AgAi/AIAMvwCAEL8AgKhxBgCpcQYAqnEGAKtxBgCsVQYArUUGAK5NBgCvRQYAsD0GALHlBgCy7QYAs+UGALT9BgC15QYAtu0GALflBgC43QYAuXEHALp1BwC7SQcAvFkHAL1ZBwC+SQcAv0kHALPZBgAUvwCAGL8AgBy/AIAgvwCAtuUGALX9BgAkvwCAuwEGALrZBgAovwCALL8AgL8BBgC+GQYAvREGALwZBgAwvwCAo9kFADS/AIA4vwCAppEFADy/AIBAvwCApfEFAKq1BQCrvQUARL8AgEi/AICuUQUAr1EFAKyRBQCtkQUAo1kHAIIZAACBGQAAgOEBAEy/AICmZQcApX0HAFC/AICrgQcAqlkHAISgAgC+rAEAr4EHAK6ZBwCtkQcArJkHAFS/AICzqQYAhugAAIcsAQC2WQEAWL8AgFy/AIC1oQYAunUBALt9AQBgvwCAZL8AgL75AQC/+QEAvGUBAL35AQCo0QYAqdkGAKplBgCrdQYArG0GAK2dAQCulQEAr40BAITsAQBovwCAbL8AgHC/AIB0vwCAeL8AgHy/AICAvwCAuGkBALlpAQC6CQEAuwUBALwdAQC9AQEAvgEBAL81AQCw9QEAsf0BALL1AQCzaQEAtHkBALV5AQC2aQEAt2EBAIS/AICIvwCAjL8AgKPhBQCQvwCApekFAKYRAgCUvwCAmL8AgJy/AICqPQIAqzUCAKwtAgCtsQIArrECAK+xAgCgvwCApL8AgL4EAwCEAAwAqL8AgKy/AICwvwCAtL8AgIANAACBFQAAgh0AALi/AIC8vwCAwL8AgIdEAwCG3AwAs+kDAMi/AIDMvwCA0L8AgNS/AIC2PQMAtT0DANi/AIC7GQMAuhEDANy/AIDgvwCAv7kAAL6xAAC9uQAAvAEDAOS/AIDhlAEA6L8AgON8AQDsvwCA8L8AgPS/AID4vwCA/L8AgADAAIAEwACACMAAgAzAAIAQwACAFMAAgO9MAgCoVQIAqV0CAKphAgCrYQIArLUCAK29AgCutQIAr60CAL5oDQAYwACAHMAAgCDAAIAkwACAgq0AAIGtAACArQAAuGEBALlhAQC6CQEAuwkBALwBAQC9AQEAvgEBAL8BAQCw1QIAsd0CALLVAgCzbQEAtHUBALV9AQC2aQEAt2EBAOFoBgDh8AcA47AAAOP0BgAowACALMAAgDDAAIA4wACAPMAAgEDAAIBEwACASMAAgL78DABMwACA72wAAO8oBgCjqQIAUMAAgIZoDACHBA0AVMAAgKZ9AgClfQIAWMAAgKtZAgCqUQIAXMAAgGDAAICv+QEArvEBAK35AQCsQQIAqIUOAKmNDgCqhQ4Aq50OAKyNDgCtvQ4ArrUOAK/dDgA0wACAZMAAgGjAAIBswACAcMAAgHTAAIB4wACAfMAAgLitDgC5tQ4Aur0OALu1DgC8dQ8AvX0PAL51DwC/bQ8AsKkOALG1DgCyvQ4As7UOALStDgC1lQ4Atp0OALeVDgCzDQ4AgMAAgITAAICIwACAjMAAgLY9DgC1BQ4AkMAAgLtxDgC6bQ4AlMAAgJjAAIC/UQ4AvmkOAL1hDgC8aQ4AghkAAKNJDgCAZQAAgRkAAKZ5DgCcwACAoMAAgKVBDgCqKQ4AqzUOAIS8AwCkwACAri0OAK8VDgCsLQ4ArSUOAKidDgCppQ4Aqq0OAKulDgCsvQ4AraEOAK7dDgCvzQ4AhiABAIdkAQCowACArMAAgLDAAIC0wACAuMAAgLzAAIC4eQEAuXkBALrNAQC7xQEAvN0BAL3FAQC+xQEAv/UBALC9DgCxjQ4AsoUOALNJAQC0WQEAtVkBALZJAQC3SQEAtS0OAMDAAIDEwACAtjkOAMjAAIDMwACAsz0OANDAAIC9hQEAvEkOAL+FAQC+hQEA1MAAgMS/AIC7UQ4AumEOAKNlDgDYwACA3MAAgODAAIDkwACApmEOAKV1DgDowACAqwkOAKo5DgDswACA8MAAgK/dAQCu3QEArd0BAKwRDgD0wACA+MAAgO/QDwD8wACAAMEAgATBAIAIwQCADMEAgBDBAIC+aAMAGMEAgBzBAIDhVA4AIMEAgONkDgAkwQCAgFkAAIFZAACCaQAAhIwDAIbwBACHFAMAKMEAgCzBAIAwwQCANMEAgDjBAIA8wQCAQMEAgETBAIBIwQCATMEAgFDBAIBUwQCAWMEAgFzBAIBgwQCAZMEAgGjBAIBswQCAqIkDAKmJAwCqmQMAq5kDAKyJAwCtiQMArj0DAK81AwCwUQMAsVEDALJVAwCzfQMAtBUDALUdAwC2FQMAtw0DALg9AwC5DQMAugUDALvtAAC89QAAvfkAAL7pAAC/6QAAcMEAgHTBAIB4wQCAsz0CAHzBAIC1LQIAtiUCAIDBAIC+aAUAiMEAgLq5AgC7uQIAvK0CAL2FAgC+/QIAv/UCAIBJAACBVQAAglUAAIQABQDvjAMAvhgEAId0BQCG/AQA4zwDAIzBAIDhUAAAkMEAgJTBAICYwQCAnMEAgKDBAICkwQCAqMEAgKzBAICwwQCAtMEAgLjBAIC8wQCA79QOAL4oBgDhdA4AwMEAgONUAQDEwQCAyMEAgMzBAIDQwQCAo/ECANTBAIDYwQCA3MEAgODBAICm6QIApeECAOTBAICrdQIAqnUCAOjBAIDswQCArzkCAK4xAgCtSQIArGECAKgpBgCpKQYAqj0GAKsxBgCsSQYArUkGAK55BgCveQYAhMEAgIIVAACBxQcAgMUHAPDBAICEaAMA9MEAgPjBAIC4yQYAuckGALrZBgC72QYAvMkGAL3JBgC+WQcAv1kHALAJBgCxCQYAshkGALMZBgC0CQYAtQkGALb5BgC3+QYAs7UGAPzBAICGrAAAh0ADAADCAIC2yQYAtcEGAATCAIC7zQYAus0GAAjCAIAMwgCAv80GAL7NBgC9zQYAvM0GABDCAICj8QYAFMIAgBjCAICmjQYAHMIAgCDCAIClhQYAqokGAKuJBgAkwgCAKMIAgK6JBgCviQYArIkGAK2JBgCoJQYAqWEGAKplBgCrfQYArGUGAK1tBgCuZQYAr50GACzCAIAwwgCANMIAgDjCAIA8wgCAQMIAgETCAIBIwgCAuPUGALn9BgC69QYAu4kGALyZBgC9mQYAvokGAL+BBgCw5QYAse0GALLlBgCz/QYAtOUGALXtBgC20QYAt80GAEzCAIC2/QYAtf0GAFDCAICz/QYAVMIAgFjCAIBcwgCAvzkGAL4xBgC9OQYAvCEGALs5BgC6MQYAFMEAgGDCAICjrQYAgnkAAIFVAACAVQAAhFwBAKatBgClrQYAaMIAgKtpBgCqYQYAhkh/AIfkAACvaQYArmEGAK1pBgCscQYAbMIAgO/cBwBwwgCAdMIAgHjCAIB8wgCAgMIAgITCAICIwgCAhKADAIzCAIC/JHkAkMIAgONoBwCUwgCA4XQGALPRAgCYwgCAvgQDAISAfQCcwgCAtvkCALXxAgCgwgCAu7UCALqpAgCkwgCAqMIAgL9RAwC+mQIAvZECALylAgCpBQIAqLkCAKsVAgCqHQIArT0CAKw9AgCvUQIArl0CAL5ofQCswgCAsMIAgLTCAIC4wgCAvMIAgMDCAIDEwgCAufEDALjpAwC78QMAuvkDAL1RAwC86QMAv00DAL5RAwCxNQIAsCkCALMBAgCyNQIAtdEDALQZAgC30QMAttkDAIIpAACjlQMAgB0AAIEVAACmvQMAyMIAgMzCAICltQMAqu0DAKvxAwDQwgCA2MIAgK7dAwCvFQIArOEDAK3VAwCGYH0Ah3h9ALNBAQCEAH8AtUEBANzCAIDgwgCAtkkBAOTCAIDowgCAu0EBALpNAQC9SQEAvEUBAL8pAQC+OQEA7MIAgO/cBgDwwgCA9MIAgPjCAID8wgCAAMMAgO8wBgCELH4A4eAGAATDAIDjiAEACMMAgON0AAAMwwCA4SwBAKPJAQAQwwCAFMMAgIVweQAYwwCApsEBAKXJAQAcwwCAq8kBAKrFAQAgwwCAJMMAgK+hAQCusQEArcEBAKzNAQCo3X0AqQV+AKoBfgCrAX4ArAF+AK0BfgCuAX4ArwF+ANTCAIAowwCALMMAgDDDAIA0wwCAgp0AAIGdAACAnQAAuC1+ALnhfgC64X4Au+F+ALzhfgC94X4AvuF+AL/hfgCwQX4AsU1+ALJZfgCzVX4AtDV+ALUlfgC2JX4AtxV+AKitfwCp0X8AqtF/AKvtfwCs9X8ArRV/AK4RfwCvEX8AOMMAgDzDAIBAwwCARMMAgIbwAwCHuAAASMMAgEzDAIC4EX8AuRl/ALohfwC7IX8AvPUAAL39AAC+9QAAv+0AALBxfwCxcX8AsnF/ALNFfwC0QX8AtU1/ALY9fwC3NX8As1l+AFDDAIBUwwCAWMMAgFzDAIC2lX4AtX1+AGDDAIC7tX4AurV+AGTDAIBowwCAv4l+AL6FfgC9kX4AvKV+AGzDAICjHX4AcMMAgHTDAICm0X4AeMMAgHzDAIClOX4AqvF+AKvxfgCAwwCAhMMAgK7BfgCvzX4ArOF+AK3VfgCwrQAAscUAALLBAACzwQAAtMUAALXNAAC28QAAt/EAALhhAAC5YQAAumEAALt9AAC8ZQAAvW0AAL5lAAC/vQMAiMMAgIzDAICQwwCAZMIAgJTDAICYwwCAnMMAgKDDAICoWQEAqVkBAKrtAACr5QAArP0AAK3lAACu5QAAr9UAAKTDAICCHQAAgR0AAIAdAACowwCArMMAgLDDAIC+VAIAhoAEAIfsAgC4wwCAvMMAgMDDAIDEwwCAyMMAgL54AwDjdH4AzMMAgOG4fQDQwwCA1MMAgNjDAIDcwwCA4MMAgOTDAIDowwCA7MMAgPDDAIDvwH4A9MMAgPjDAID8wwCAs4UDAADEAIAExACACMQAgAzEAIC2hQMAtZUDABDEAIC74QMAuokDAL4kBgAUxACAv+kDAL7hAwC99QMAvPUDAIIpAACjwQMAgB0AAIEVAACmwQMAGMQAgBzEAICl0QMAqs0DAKulAwAgxACAheAFAK6lAwCvrQMArLEDAK2xAwDh+AMAKMQAgONcHwAsxACA7/QDADDEAICGPAcAh6wCAON8fgA0xACA4YABADjEAIA8xACAQMQAgO/kEwBExACAs3EBAEjEAIBMxACAUMQAgFTEAIC2EQEAtWEBAFjEAIC7OQEAujEBAFzEAIBgxACAvxkBAL4RAQC9GQEAvCEBAGTEAIBoxACAbMQAgHDEAIB0xACAeMQAgHzEAIDvxH8AgMQAgOH8fgCExACA4/B/AIANAACBdQAAgn0AAIjEAICMxACAkMQAgKP5AQC+AAgApekBAJjEAICcxACAppkBAISoBQCgxACAq7EBAKq5AQCtkQEArKkBAK+RAQCumQEAqCkGAKkpBgCqOQYAqzkGAKwpBgCtUQYArlUGAK9NBgAkxACAhCABAKTEAICUxACAo+EBAKKZBAChGQQAoPEFALg5BgC5OQYAus0GALvFBgC83QYAvcUGAL7FBgC/8QYAsDUGALE9BgCyNQYAsw0GALQVBgC1HQYAthUGALcJBgCPoWwAs5EHAIYoAQCHfAMAtqEHAKjEAICsxACAtbEHALrlBwC77QcAsMQAgLTEAIC+7QcAv90HALz1BwC97QcAn/l4AJ7leACdcXkAnCF8AJvxfACaYX0AmZlxAJjZcACX4XAAlnl0AJVtdACUbXQAk61pAJJxaACReWgAkB1uAIIhbQCD5W8AuMQAgLzEAICGTWgAh5V1AISZaQCFmWkAiqV1AIu5dQDAxACAxMQAgI5xcACPgXwAjDlxAI05cQCSYX0Ak6l9AMjEAIDMxACAlml5AJeZBACU4XgAlX15AJpBBQCbyQUA0MQAgNTEAIDYxACA3MQAgJypAADgxACAo4ENAKKpAQChqQEA5MQAgKexCQCmAQgApU0NAKSZDQCrkRUAqoUVAKkBFACocQkArx0QAK7pEQCtvREArAEQALMBGACy8RwAscEdALDJHQC0wwCA6MQAgLXhGAC0/RkA7MQAgPDEAID0xACA+MQAgIAdAACBCQAAgv0DAPzEAICjFQUAAMUAgIaIDACHPAMACMUAgKYlBQClNQUADMUAgKtpBQCqYQUAEMUAgBTFAICvWQUArmkFAK1pBQCscQUAGMUAgBzFAICEBAwAIMUAgCTFAIDhbAYAKMUAgOPsewAsxQCAMMUAgDTFAIDvqAYAOMUAgDzFAIBAxQCARMUAgKmNBQCogQUAq60FAKqZBQCtoQUArLkFAK+lBQCuqQUAhGgNAEjFAIBMxQCAUMUAgFTFAIBYxQCAXMUAgL70DAC5SQUAuEEFALtZBQC6QQUAvUkFALxBBQC/cQUAvn0FALGpBQCwoQUAs7kFALKhBQC1mQUAtKkFALd5BQC2kQUAqNUEAKndBACq7QQAqyUDAKyFAwCtjQMArrEDAK+xAwBgxQCAZMUAgGjFAIBsxQCAgBkAAIEZAACCBQAAcMUAgLgxAgC5MQIAujUCALvBAgC8hQIAvbUCAL69AgC/tQIAsGkCALFpAgCyQQIAs0ECALQ5AgC1OQIAthECALcRAgCGoAwAh0wNAHjFAIB8xQCA76QGAIDFAICExQCA78wHAOOUAQDhpAYA4TgBAONcBgCIxQCAjMUAgJDFAICUxQCAmMUAgJzFAICzLQQAoMUAgLVFAwCkxQCAqMUAgLZFAwCsxQCAsMUAgLvlAgC65QIAvd0CALzdAgC/tQIAvrUCAATFAIB0xQCAtMUAgLjFAIC8xQCAwMUAgMTFAIDIxQCAqDEOAKk5DgCqAQ4AqwEOAKxxDgCtcQ4ArnUOAK9tDgCwGQ4AsSUOALItDgCzJQ4AtCEOALUhDgC2IQ4AtyEOALjFDgC5zQ4AusUOALvdDgC8xQ4Avc0OAL5ZDwC/WQ8As6kOAMzFAIDQxQCA1MUAgNjFAIC20Q4AtdkOANzFAIC7wQ4Auv0OAODFAIC+LAAAv8UOAL7FDgC90Q4AvNkOAIJpAACj7Q4AgFkAAIFRAACmlQ4A5MUAgOjFAIClnQ4AqrkOAKuFDgCGyAAAh6wAAK6BDgCvgQ4ArJ0OAK2VDgDsxQCAs5EOAPDFAID0xQCAtqUOAPjFAID8xQCAta0OALrhDgC74Q4AAMYAgATGAIC+6Q4Av9UOALz1DgC96Q4Ao6UKAAjGAIAMxgCAEMYAgBTGAICmzQ0Apc0NABjGAICrbQwAqm0MABzGAIAgxgCArz0MAK49DACtVQwArFUMAKgJDgCpCQ4Aqh0OAKsVDgCsIQ4ArSEOAK4hDgCvIQ4AJMYAgCjGAIAsxgCAMMYAgDTGAIA4xgCAPMYAgEDGAIC4zQEAudUBALrdAQC71QEAvM0BAL1RAQC+UQEAv1EBALAhDgCxIQ4AsiUOALM5DgC0KQ4AtRUOALYdDgC39QEARMYAgEjGAIBMxgCAo5kNAFDGAIClpQ0Apq0NAL7cAgCE7AMAWMYAgKrpDQCr6Q0ArP0NAK3hDQCu4Q0Ar90NAIBFAACBTQAAglkAAKNFAwBcxgCApUEDAKZBAwBgxgCAhsAEAIcAAwCqLQMAqyUDAKw9AwCtJQMAriUDAK8VAwCoWQIAqYUDAKqBAwCrgQMArIUDAK2NAwCusQMAr7EDAGTGAIBoxgCAbMYAgHDGAIB0xgCAeMYAgHzGAICAxgCAuGUDALltAwC6ZQMAu30DALxlAwC9bQMAvmUDAL/dAACwpQMAsa0DALKlAwCzvQMAtK0DALWdAwC2lQMAt10DALMJAgCExgCAiMYAgIzGAICQxgCAtg0CALUNAgCUxgCAu2kCALphAgCYxgCAnMYAgL9ZAgC+aQIAvWkCALxxAgCgxgCApMYAgKjGAICsxgCA4aABALDGAIDjaAMAtMYAgIEVAACAFQAA74wDAIIVAAC4xgCAvMYAgMDGAIC+cAUA4RgOAOGUDwDjOA8A49QPAISUAgDIxgCAzMYAgNDGAIDUxgCA2MYAgNzGAIDgxgCA5MYAgOjGAIDv7AEA7/gPAIZgBACHBAUAs5UBAITMBQC1dQEA7MYAgPDGAIC2dQEA9MYAgPjGAIC7UQEAulkBAL31AAC8SQEAv/UAAL71AACoJQYAqVUGAKpVBgCrrQYArLUGAK29BgCutQYAr60GAMTGAID8xgCAAMcAgATHAIAIxwCADMcAgBDHAIAUxwCAuGkHALlpBwC6CQcAuwkHALwZBwC9GQcAvg0HAL8BBwCw1QYAsd0GALLVBgCzaQcAtHkHALV5BwC2aQcAt2EHAKPdBgAYxwCAHMcAgCDHAIAkxwCApj0GAKU9BgAoxwCAqxkGAKoRBgAsxwCAMMcAgK+9BwCuvQcArb0HAKwBBgCAXQAAgW0AAIJlAACzUQcAvtgDALVxBwC2cQcANMcAgIbgAACHFAMAul0HALs5BwC8KQcAvRUHAL4dBwC/2QAAqJUGAKmdBgCqlQYAq60GAKy1BgCtvQYArrUGAK+tBgA4xwCAPMcAgEDHAIBExwCASMcAgEzHAIBQxwCAVMcAgLhxAQC5cQEAunEBALtxAQC81QEAvd0BAL7VAQC/zQEAsNUGALGxBgCysQYAs40GALSVBgC1UQEAtlEBALdRAQBYxwCAoxkGAFzHAIBgxwCApjkGAFTGAIBkxwCApTkGAKoVBgCrcQYAaMcAgGzHAICuVQYAr5EBAKxhBgCtXQYAcMcAgHTHAIB4xwCAfMcAgIDHAICExwCAiMcAgIzHAICQxwCAlMcAgJjHAICcxwCAgBkAAIEZAACCBQAAoMcAgISAAgC+gAMAhwwDAIasHADhaAYAqMcAgOOYBwCsxwCAsMcAgLTHAIDvrAcAuMcAgLzHAIDAxwCAxMcAgMjHAIDMxwCA0McAgNTHAICzZQMA2McAgLVlAwC2bQMA3McAgODHAIDkxwCAuukDALvlAwC8/QMAve0DAL7RAwC/0QMA6McAgOzHAIDwxwCA9McAgPjHAID8xwCAAMgAgATIAICogQMAqYEDAKqBAwCrgQMArIEDAK2BAwCugQMAr4EDALBBAwCxTQMAskUDALNVAwC0eQMAtXkDALYZAwC3GQMAuCkDALkpAwC6OQMAuzkDALwpAwC9KQMAvhkDAL8ZAwCBGQAAgBEAAKMhAgCCLQAApSECAAjIAIAMyACApikCABDIAIAYyACAq6ECAKqtAgCtqQIArLkCAK+VAgCulQIAhEwCAL5IHQCHZB0AhuwcAONAAwAcyACA4aABACDIAIDvnAMAJMgAgCjIAIAsyACAMMgAgDTIAIA4yACAPMgAgEDIAIBEyACASMgAgEzIAIBQyACAVMgAgFjIAIDvtAEAhKgdAOF8BgBcyACA43AGAGDIAIBkyACAaMgAgGzIAICz4QEAcMgAgHTIAIB4yACAfMgAgLblAQC19QEAgMgAgLuhAQC62QEAvuQcAIjIAIC/rQEAvqUBAL2xAQC8uQEAqBUeAKkZHgCqKR4AqykeAKw9HgCtJR4Ari0eAK8lHgAUyACAgvkfAIH5HwCA4R8AhMgAgIzIAICGHAAAh7ADALjBHgC5wR4AusEeALvBHgC8wR4AvcEeAL7BHgC/wR4AsF0eALElHgCyLR4AsyUeALQhHgC1KR4AthkeALcZHgCjoR4AkMgAgJTIAICYyACAnMgAgKalHgCltR4AoMgAgKvhHgCqmR4ApMgAgKjIAICv7R4AruUeAK3xHgCs+R4ArMgAgLOZHwCwyACAtMgAgLa9HwC4yACAvMgAgLW1HwC6mR8Au5kfAMDIAIDEyACAvnkfAL95HwC8eR8AvXkfAKglHgCpUR4AqlUeAKtpHgCseR4ArXkeAK5pHgCvaR4AyMgAgMzIAIDQyACA1MgAgNjIAIDcyACA4MgAgOTIAIC42R4Aue0eALr5HgC7+R4AvOkeAL3pHgC+nR4Av5UeALAZHgCxGR4AsukeALPpHgC0+R4AtfkeALbpHgC36R4Ao90eAIIpAACBFQAAgB0AAOjIAICm+R4ApfEeAOzIAICr3R4Aqt0eAKTHAIDwyACArz0eAK49HgCtPR4ArD0eAITIAgCzQQEAvgwBAPjIAIC2QQEA/MgAgADJAIC1UQEAuk0BALslAQCGSAAAh1ABAL4lAQC/LQEAvDEBAL0xAQAEyQCACMkAgIQEAwC+gAQADMkAgO+oHwAQyQCAFMkAgL8oMQDjdB8AGMkAgOE4HgAcyQCAIMkAgCTJAIAoyQCALMkAgDDJAICjzQIANMkAgKXdAgA4yQCAPMkAgKbNAgBAyQCARMkAgKupAgCqwQIArb0CAKy9AgCvoQIArqkCAKm1AgCoaR0AqwECAKoJAgCtAQIArBkCAK8xAgCuAQIAhGwFAEjJAIBMyQCAUMkAgFTJAICCnQEAgZ0BAICdAQC55QMAuOUDALvlAwC65QMAveUDALzlAwC/5QMAvuUDALEhAgCwSQIAsyUCALIlAgC1KQIAtCECALcVAgC2FQIAqM0CAKnRAgCq0QIAqw0BAKwVAQCtBQEArgEBAK8BAQBYyQCAXMkAgGDJAIBoyQCAvvgEAGzJAIBwyQCAdMkAgLgVAQC5HQEAuikBALspAQC89QEAvf0BAL71AQC/7QEAsEkBALFVAQCyXQEAs1UBALRNAQC1NQEAtj0BALcxAQCGoAUAh8gFAHjJAIDvvAAAfMkAgIDJAICEyQCA74weAIQsBwDh8B4AiMkAgOMcHgCMyQCA4ZQBAJDJAIDjbAAAsxkCAJTJAICYyQCAnMkAgIQACAC2xQEAtd0BAKDJAIC70QEAus0BAKTJAICoyQCAv7EBAL7JAQC9wQEAvMkBAKPZBQBkyQCArMkAgLDJAIC0yQCApgUGAKUdBgC4yQCAqxEGAKoNBgC8yQCAwMkAgK9xBgCuCQYArQEGAKwJBgDEyQCAgh0AAIEdAACAHQAAyMkAgMzJAIDQyQCA1MkAgIZAAwCHxAMA2MkAgNzJAIDgyQCA5MkAgOjJAIDsyQCAqK0HAKmxBwCqsQcAq7EHAKwZBwCtBQcArg0HAK8FBwDwyQCA9MkAgPjJAID8yQCAAMoAgATKAIAIygCADMoAgLgtBwC5zQAAusUAALvdAAC8zQAAvf0AAL71AAC/nQAAsEkHALFVBwCyUQcAsykHALQ5BwC1OQcAtiUHALcVBwCzOQYAEMoAgBTKAIAYygCAHMoAgLaFBgC1kQYAIMoAgLuRBgC6jQYAJMoAgCjKAIC//QYAvv0GAL39BgC8hQYALMoAgKN9BgAwygCANMoAgKbBBgA4ygCAPMoAgKXVBgCqyQYAq9UGAEDKAIC+bAEArrkGAK+5BgCswQYArbkGAKjpAQCp6QEAqvkBAKv5AQCs6QEArekBAK45AQCvOQEAgPUAAIH9AACCwQAARMoAgIYQAACHdAEASMoAgPTIAIC4zQAAudUAALrVAAC75QAAvP0AAL2VAAC+kQAAv5EAALBJAQCxSQEAslkBALNZAQC0SQEAtUkBALb9AAC39QAA7/QGAEzKAIBQygCAVMoAgO8wAgBYygCAXMoAgGDKAIDj4AcAZMoAgOGAAQBoygCA4ygGAGzKAIDhyAUAcMoAgLMxAgB0ygCAeMoAgJYAAAB8ygCAtikCALUhAgCAygCAu80CALrNAgCEygCAiMoAgL/NAgC+zQIAvc0CALzNAgCMygCAkMoAgJTKAICj/QIAmMoAgKXtAgCm5QIAnMoAgKDKAICkygCAqgECAKsBAgCsAQIArQECAK4BAgCvAQIAgA0AAIEVAACCHQAAqMoAgKzKAICwygCAvlQMALjKAICGwAwAhyQDALzKAIDAygCAxMoAgMjKAIDMygCA0MoAgKi5AgCpAQEAqgEBAKsBAQCsBQEArQ0BAK4FAQCvOQEAhKgNANTKAIDYygCA3MoAgODKAIDkygCA6MoAgOzKAIC4LQEAucUBALrNAQC7xQEAvMEBAL3JAQC++QEAv/kBALBNAQCxUQEAslUBALMpAQC0OQEAtSUBALYlAQC3FQEA4RgGAPDKAIDjOAcA9MoAgPjKAIC+WAwA/MoAgADLAICEbA8ABMsAgL5gDwAIywCADMsAgBDLAIDvcAYAFMsAgIAVAACBGQAAgi0AAITMDwDjYAYAGMsAgOGgAQAcywCA73QAACDLAICGyAwAh/wMACjLAIAsywCAMMsAgDTLAICjCQ4AtMoAgCTLAIA4ywCAPMsAgKYNDgClDQ4AQMsAgKsVDgCqCQ4ARMsAgEjLAICvYQ4Arn0OAK19DgCsAQ4ATMsAgLOpDgBQywCAVMsAgLapDgBYywCAXMsAgLWpDgC6SQ8Au0kPAGDLAIBkywCAvkkPAL9JDwC8SQ8AvUkPAKhdDgCpbQ4AqmUOAKt9DgCsZQ4ArW0OAK5lDgCvuQ8AaMsAgGzLAIBwywCAdMsAgHjLAIB8ywCAgMsAgITLAIC4UQ8AuV0PALpVDwC7aQ8AvH0PAL1lDwC+bQ8Av2EPALDJDwCxyQ8AstkPALPZDwC0yQ8AtckPALZ9DwC3cQ8AiMsAgLURDwC2EQ8AjMsAgIARAACBGQAAgikAALMVDwC8HQ8AvWEPAL5hDwC/fQ8AkMsAgJTLAIC6FQ8AuwkPAKOtDwCYywCAhugAAIfIAQCcywCApq0PAKWtDwCgywCAq00OAKpNDgCkywCAqMsAgK9NDgCuTQ4ArU0OAKxNDgCocQ4AqXEOAKpxDgCrcQ4ArJ0BAK2FAQCuhQEAr7UBAL7sAACsywCAsMsAgLTLAIC4ywCAvMsAgMDLAIDEywCAuGEBALlhAQC6YQEAu2EBALxhAQC9YQEAvmEBAL9hAQCwzQEAsaUBALKhAQCzoQEAtKUBALWtAQC2kQEAt5EBALP5DQDIywCAzMsAgNDLAIDUywCAtgUCALUVAgDYywCAu2ECALoJAgDcywCA4MsAgL9pAgC+YQIAvXUCALx1AgDkywCAo70NAOjLAIDsywCApkECAPDLAID0ywCApVECAKpNAgCrJQIA+MsAgPzLAICuJQIAry0CAKwxAgCtMQIAge0AAIDtAADv0AEAgh0AAADMAIAIzACAhjgEAIdQAwAMzACAEMwAgBTMAIAYzACA4eABABzMAIDjZA8AIMwAgCTMAIAozACALMwAgLORAwAwzACAtbkDALZ9AwA0zACAOMwAgDzMAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAL5oBQBAzACARMwAgEjMAIBMzACAUMwAgFTMAIBYzACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOF4DwDjNA4A47gOAOF8DgBczACAYMwAgGTMAIBozACAbMwAgHDMAIB4zACAfMwAgIDMAIDv5A4A79QOAITMAICjnQIAgmEAAIFpAACAUQAAhJwFAKZxAgCltQIAiMwAgKtVAgCqVQIAhkgEAIfMBACv+QEArvEBAK1FAgCsRQIAqJUGAKmlBgCqrQYAq6UGAKy9BgCtoQYArqUGAK/dBgB0zACAjMwAgJDMAICUzACAmMwAgJzMAICgzACApMwAgLhtBwC5dQcAun0HALt1BwC8bQcAvcUHAL7NBwC/xQcAsKUGALGtBgCyuQYAs7EGALSRBgC1kQYAtl0HALdVBwCzJQYAqMwAgKzMAICwzACAtMwAgLYhBgC1NQYAuMwAgLtpBgC6YQYAvMwAgMDMAIC/VQYAvlUGAL1lBgC8bQYAxMwAgKNhBgDIzACAzMwAgKZlBgDQzACA1MwAgKVxBgCqJQYAqy0GANjMAIDczACArhEGAK8RBgCsKQYArSEGAKipBgCpqQYAqrkGAKuxBgCszQYArTEBAK4xAQCvMQEAgMkBAIHJAQCCBQAA4MwAgL54AgCEeAIA5MwAgOjMAIC43QEAue0BALrlAQC7jQEAvJkBAL2ZAQC+jQEAv4UBALBRAQCxUQEAslEBALNRAQC09QEAtf0BALb1AQC37QEAszEGAOzMAICGKAAAh9wBAPDMAIC2sQEAtUUGAPTMAIC7lQEAupUBAPjMAID8zACAvzkBAL4xAQC9hQEAvIUBAATMAICjdQYAAM0AgATNAICm9QEACM0AgAzNAIClAQYAqtEBAKvRAQAQzQCAFM0AgK51AQCvfQEArMEBAK3BAQAYzQCAHM0AgCDNAIAkzQCAKM0AgCzNAIAwzQCANM0AgDjNAIA8zQCAQM0AgETNAIBIzQCATM0AgFDNAIC+cAMAhQA8AOHEBgCERAIA44wHAIBhAACBYQAAgmEAAO9oAwCFRDwA4RACAFjNAIDj2CsAhlA9AIf0AwBczQCA76QHAGDNAIDvQAIAZM0AgGjNAIBszQCAcM0AgHTNAIB4zQCAhDw8AHzNAICAzQCAhM0AgIjNAIDj7AIAjM0AgOEsAQCzUQMAkM0AgJTNAICYzQCAnM0AgLZ5AwC1cQMAoM0AgLs5AwC6MQMApM0AgKjNAIC/9QAAvvUAAL0VAwC8FQMAqD0CAKmBAgCqmQIAq5ECAKy5AgCtuQIArtECAK/RAgCEqD8Avqg/AKzNAICwzQCAtM0AgLjNAIC8zQCAwM0AgLhRAQC5UQEAulEBALtRAQC8cQEAvXEBAL5xAQC/cQEAsLUCALG9AgCygQIAs4ECALRxAQC1cQEAtnEBALdxAQCAtQAAgb0AAIK1AADIzQCAhrA/AIfgPADMzQCA71QAAL4sPgDhVAYA0M0AgOOIAADUzQCA2M0AgNzNAIDgzQCAo1ECAOTNAIC/2CYA6M0AgOzNAICmeQIApXECAPDNAICrOQIAqjECAPTNAID4zQCAr/UBAK71AQCtFQIArBUCAJAtJACRBSgAkg0oAJPZKACUhS0AlTUsAJbFLACXtTEAmAEwAJkVMACalTUAmyk0AJxtNACdmTUAnj04AJ81OABUzQCAttU+ALXFPgDEzQCAs9E+APzNAIAAzgCABM4AgL/ZPgC+1T4AvcU+ALzFPgC71T4Auuk+AAjOAICPXSQAqeUJAKgVCACrBQwAqg0MAK0BEACsAQwAr0EQAK69EACh4QAADM4AgKMBBACi4QAApZ0EAKSVBACnuQgApgEIAKD1OQChBT0Aouk8AKP1PQAQzgCAFM4AgBjOAIAczgCAscEUALABFACzARgAsn0UALXVGAC01RgAIM4AgCTOAICCISUAgyklACjOAIAszgCAhsUpAIeBLACEGSkAhRkpAIoBLQCL+S0AMM4AgDjOAICOATEAj4k0AIyRMACNHTEAkkU1AJMZNQCG6AcAh+wBAJZZOQCXYTgAlPU0AJVZOQCaoTwAm0U9ADzOAIBAzgCAgX0AAIB9AACcQTwAglUAAKjpPwCp/T8Aqgk/AKsFPwCsHT8ArQU/AK4NPwCvBT8ARM4AgEjOAIBMzgCAUM4AgFTOAIBYzgCAXM4AgGDOAIC4DT8AuRU/ALoVPwC7JT8AvD0/AL39PgC+9T4Av+0+ALB9PwCxQT8AskE/ALNBPwC0QT8AtU0/ALY9PwC3NT8Ao4E8AGTOAIBozgCAbM4AgHDOAICmhTwApZU8AHTOAICrhTwAqrk8AHjOAIB8zgCAr4k8AK6FPACtlTwArJU8AITIAwCz7T0AgM4AgITOAIC26T0AiM4AgIzOAIC16T0Auq09ALu1PQCQzgCAlM4AgL6dPQC/IQIAvKU9AL2VPQCoDT0AqR09AKohPQCrPT0ArCU9AK0tPQCuJT0Ar1k9AIANAACBFQAAgh0AAJjOAICczgCAoM4AgKjOAIC+uAMAuLkCALlhAgC6GQIAuxkCALwJAgC9CQIAviECAL8hAgCwLT0AsTU9ALI1PQCzBT0AtB09ALWhAgC2oQIAt6ECAKOpPACszgCAhigFAIfsAgCwzgCApq08AKWtPAC0zgCAq/E8AKrpPAC4zgCAvM4AgK9lAwCu2TwArdE8AKzhPADAzgCAsykCAMTOAIDIzgCAtvkCAMzOAIDQzgCAtfkCALrVAgC73QIA1M4AgNjOAIC+eQEAv3kBALzFAgC9eQEA3M4AgODOAICj5QIA5M4AgKU1AgDozgCA7M4AgKY1AgDwzgCA9M4AgKsRAgCqGQIArbUBAKwJAgCvtQEArrUBAOPwPgDhrD8A4UA+AON8PwD4zgCA/M4AgADPAIAEzwCAgA0AAIERAACCEQAACM8AgO+oPgAMzwCAEM8AgO8gPgCoLQUAqW0FAKplBQCrrQUArLUFAK29BQCutQUAr60FAKTOAICE6AMAvuADABTPAICGEAMAh5gDABjPAIAczwCAuGkGALlpBgC6AQYAuwEGALwFBgC9DQYAvjEGAL8xBgCw1QUAsd0FALLVBQCzaQYAtHkGALV5BgC2aQYAt2EGAKg5BgCpgQcAqpkHAKuRBwCsuQcArbkHAK7ZBwCv1QcAIM8AgCTPAIA0zgCAKM8AgCzPAIAwzwCANM8AgDjPAIC4VQcAuV0HALppBwC7aQcAvAEHAL0BBwC+AQcAvwEHALCtBwCxsQcAsrEHALOFBwC0nQcAtXUHALZ9BwC3cQcAsxEGADzPAIBAzwCARM8AgEjPAIC2OQYAtTEGAEzPAIC7dQYAumkGAFDPAIBUzwCAv7EGAL5ZBgC9UQYAvGUGAFjPAICjVQYAXM8AgGDPAICmfQYAZM8AgGjPAICldQYAqi0GAKsxBgBszwCAcM8AgK4dBgCv9QYArCEGAK0VBgCouQEAqbkBAKopAQCrKQEArD0BAK0lAQCuLQEAryUBAHTPAICCHQAAgR0AAIAdAAB4zwCAfM8AgIDPAIC+cAEAuIEAALmNAAC6hQAAu5kAALyJAAC9vQAAvrUAAL99AACwXQEAseEAALLhAACz4QAAtOEAALXpAAC20QAAt9EAAITIAgCzpQIAhzgDAIYoAgC2oQIAiM8AgIzPAIC1sQIAup0CALshAwC+bAMAkM8AgL4hAwC/KQMAvDEDAL0xAwCj4QIAlM8AgJjPAICczwCAoM8AgKblAgCl9QIApM8AgKtlAwCq2QIAqM8AgKzPAICvbQMArmUDAK11AwCsdQMAqZkAAKiRAACrzQAAqqEAAK3dAACs3QAAr8UAAK7NAAC+LA0AsM8AgLTPAIC4zwCAvM8AgMDPAIDEzwCAyM8AgLnBAQC4eQAAu8EBALrJAQC9wQEAvNkBAL/FAQC+xQEAsY0AALCNAACzQQAAskkAALVBAAC0WQAAt0EAALZJAADMzwCA0M8AgNTPAIDYzwCA3M8AgO9QBwDgzwCA5M8AgL74DwDjdAcA6M8AgOF8BACAGQAAgQkAAIJ5AADszwCA8M8AgLNpAQD4zwCAhMQCALYdAQD8zwCAANAAgLUVAQC6CQEAuwkBAIboDQCH6A0Avt0BAL/FAQC83QEAvdUBAATQAIAI0ACADNAAgBDQAIDv1AAAFNAAgBjQAIDvTAEA47ADAOG0BgDhgAEA45gBABzQAIAg0ACAJNAAgCjQAIAs0ACAMNAAgKPlAQCEwA0ApZkBADTQAIA40ACAppEBADzQAIBA0ACAq4UBAKqFAQCtWQEArFEBAK9JAQCuUQEA9M8AgETQAIBI0ACATNAAgFDQAIBU0ACAWNAAgFzQAICoaQ8AqXEPAKpxDwCrrQ8ArLUPAK29DwCutQ8Ar6kPALDZDwCx9Q8Asv0PALP1DwC07Q8AtZUPALadDwC3iQ8AuLkPALmFDwC6jQ8Au2kAALx5AAC9eQAAvmkAAL9pAACBnQAAgJ0AAGDQAICCBQAAZNAAgGjQAIBs0ACAcNAAgIaAAwCH9AMAdNAAgHjQAIB80ACAgNAAgITQAICEzwCAs5kPAIjQAICM0ACAkNAAgJTQAIC2XQ8AtV0PAJjQAIC7UQ8Aun0PAJzQAICg0ACAvzEPAL5JDwC9QQ8AvEkPAKNZDgCk0ACAqNAAgKzQAICw0ACApp0OAKWdDgC00ACAq5EOAKq9DgC40ACAvNAAgK/xDgCuiQ4ArYEOAKyJDgDA0ACAxNAAgMjQAIDM0ACAgBkAAIEZAACCBQAA0NAAgISgAQDU0ACAh+gBAIYABADY0ACA3NAAgODQAIDk0ACAqBUBAKkdAQCqFQEAqyUBAKw9AQCtJQEAri0BAK8lAQDo0ACA7NAAgPDQAID00ACA+NAAgPzQAIAA0QCABNEAgLjJAAC5yQAAutkAALvRAAC8+QAAvfkAAL6ZAAC/mQAAsCUBALEtAQCyJQEAsz0BALQtAQC1HQEAthUBALf5AAAI0QCADNEAgBDRAICzkQIAFNEAgLW5AgC2qQIAGNEAgBzRAIAg0QCAuu0CALvlAgC8/QIAveUCAL7lAgC/1QIApvECACTRAIAo0QCApeECACzRAICjyQIAMNEAgDTRAICuvQIAr40CAKylAgCtvQIAqrUCAKu9AgA40QCAPNEAgID5AACB+QAAggUAAEDRAIC+yAMAhBgDAEjRAIBM0QCAUNEAgFTRAIBY0QCAXNEAgGDRAIBk0QCAhhgEAIecAwBo0QCAbNEAgHDRAIB00QCAeNEAgHzRAIDvsAIAgNEAgOGUAQCE0QCA42wCAIjRAICM0QCAkNEAgJTRAICY0QCA79APAJzRAICg0QCApNEAgKjRAIDhrAEArNEAgONsAACAMQAAgT0AAIIdAADv9A4A42wOALDRAIDhLA8AvnAFALM5AgCEDAUAhugEAIdgBQDcAAAAtvECALX5AgC40QCAu9UCALrVAgC80QCAwNEAgL91AQC+dQEAvcUCALzFAgDE0QCA4fQOAMjRAIDjUA4AzNEAgNDRAIDU0QCA2NEAgNzRAIDg0QCA5NEAgOjRAIDs0QCA8NEAgPTRAIDv5A8ApmUCAPjRAID80QCApW0CAADSAICjrQIABNIAgAjSAICu4QEAr+EBAKxRAgCtUQIAqkECAKtBAgAM0gCAENIAgKiZBgCpmQYAqqkGAKupBgCsuQYArbkGAK6pBgCvqQYAFNIAgIIdAACBHQAAgB0AABjSAIAc0gCAINIAgL50AwC4rQYAubUGALq9BgC7tQYAvK0GAL1RBwC+UQcAv1EHALChBgCxoQYAsqEGALOhBgC0oQYAtaEGALalBgC3mQYARNEAgLMlBgCExAMAtNEAgLY9BgAk0gCAKNIAgLU1BgC6YQYAu2EGAIYIAACHiAAAvmEGAL9hBgC8cQYAvXEGAKNhBgAs0gCAMNIAgDTSAIA40gCApnkGAKVxBgA80gCAqyUGAKolBgBA0gCARNIAgK8lBgCuJQYArTUGAKw1BgCoXQYAqW0GAKplBgCrjQYArJkGAK2FBgCujQYAr4UGAEjSAIBM0gCAUNIAgFTSAIBY0gCAXNIAgGDSAIBk0gCAuIUGALmNBgC6mQYAu5UGALyNBgC9rQYAvqUGAL99AQCw/QYAscUGALLNBgCzxQYAtN0GALXFBgC2zQYAt8UGALPtBgBo0gCAbNIAgHDSAIB00gCAtgUGALURBgB40gCAuwEGALo5BgB80gCAgNIAgL8BBgC+GQYAvREGALwZBgCE0gCAo6kGAIjSAICM0gCApkEGAJDSAICElAEApVUGAKp9BgCrRQYAvqABAJjSAICuXQYAr0UGAKxdBgCtVQYAqJkCAKnBAgCqwQIAq8ECAKzBAgCtyQIArvECAK/xAgCB7QMAgO0DAJzSAICC+QMAhpAcAId0AwCg0gCApNIAgLjFAwC5zQMAusUDALvdAwC8zQMAvf0DAL71AwC/nQMAsEEDALFBAwCyQQMAs0EDALRBAwC1QQMAtkEDALdBAwCzSQIAqNIAgKzSAICw0gCAtNIAgLZJAgC1SQIAuNIAgLuFAwC6hQMAvNIAgMDSAIC/hQMAvoUDAL2VAwC8lQMAxNIAgKMNAgDI0gCAzNIAgKYNAgDQ0gCA1NIAgKUNAgCqwQMAq8EDANjSAIDc0gCArsEDAK/BAwCs0QMArdEDAOOYAQDhpAcA4VgGAONYBgDhoAEA4NIAgOPQAADk0gCA6NIAgOzSAIDvOAAA8NIAgO/0AQD00gCA+NIAgO/4BgCAeQAAgRUAAIIdAACEAB0A/NIAgADTAIC+EB0ACNMAgIbAHACHrB0ADNMAgBDTAIAU0wCAGNMAgBzTAIAg0wCAu8UFALqhBQC5qQUAuJEFAL/NBQC+zQUAvckFALzVBQCzHQYAsh0GALEdBgCwHQYAt6EFALa9BQC1vQUAtL0FAKu9BgCqvQYAqb0GAKi9BgCvfQYArn0GAK19BgCsfQYAJNMAgCjTAIAs0wCAMNMAgDTTAIA40wCAPNMAgEDTAICo7R0AqS0eAKoxHgCrMR4ArJUeAK2dHgCulR4Ar40eAATTAIBE0wCASNMAgEzTAIBQ0wCAVNMAgFjTAIBc0wCAuKkeALmpHgC6XR8Au1EfALxxHwC9cR8AvnUfAL9pHwCw/R4Asc0eALLFHgCzrR4AtLkeALW5HgC2rR4At6UeALO5HgBg0wCAZNMAgGjTAICU0gCAth0eALUdHgBs0wCAuwkeALo5HgBw0wCAhOADAL99HgC+fR4AvXkeALwRHgCCaQAAo/0eAIBFAACBUQAAplkeAL6cAwB00wCApVkeAKp9HgCrTR4AhkgAAIdsAACuOR4ArzkeAKxVHgCtPR4AqF0eAKltHgCqZR4Aq30eAKxlHgCtbR4ArmUeAK/9HgB40wCAfNMAgIDTAICE0wCAiNMAgIzTAICQ0wCAlNMAgLhpAQC5aQEAunkBALt5AQC8aQEAvWkBAL7dAQC/1QEAsIUeALGNHgCyhR4As50eALSFHgC1jR4AtoUeALdZAQCz7R4AmNMAgJzTAICg0wCApNMAgLbtHgC17R4AqNMAgLtJHgC6QR4ArNMAgLDTAIC/SR4AvkEeAL1JHgC8UR4AtNMAgKOpHgC40wCAvNMAgKapHgDA0wCAxNMAgKWpHgCqBR4Aqw0eAMjTAIDM0wCArgUeAK8NHgCsFR4ArQ0eAKghAwCpIQMAqiEDAKshAwCsIQMArSEDAK4hAwCvIQMA0NMAgNTTAIDY0wCAvmACANzTAIDg0wCA6NMAgOzTAIC4iQMAuYkDALqdAwC7lQMAvLkDAL25AwC+eQAAv3kAALDlAwCx7QMAsuUDALP9AwC07QMAtd0DALbVAwC3vQMAgKkAAIG1AACCvQAAs6UDAPDTAIC1pQMAtq0DAPTTAICE4AIA+NMAgLotAwC7JQMAvD0DAL0lAwC+JQMAvxUDAKPpAwD80wCAhmgEAIeAAwAA1ACApuEDAKXpAwAE1ACAq2kDAKphAwAI1ACADNQAgK9ZAwCuaQMArWkDAKxxAwAQ1ACAFNQAgBjUAIAc1ACAINQAgOE8HwAk1ACA40AeACjUAIAs1ACAMNQAgO+MHgA01ACAONQAgDzUAIBA1ACARNQAgIIlAACBEQAAgB0AAEjUAIDj5AMATNQAgOGsAQBQ1ACA77ADAIRkAgC+YAUAhtAEAIdEBQBY1ACAXNQAgGDUAIBk1ACAaNQAgGzUAIBw1ACAdNQAgHjUAIDvsAEAhKQFAOHcHgB81ACA4xABAIDUAICE1ACAiNQAgIzUAICzUQEAkNQAgJTUAICY1ACAnNQAgLYRAQC1fQEAoNQAgLsNAQC6DQEApNQAgKjUAIC//QAAvv0AAL39AAC8/QAAqDkGAKk5BgCqmQYAq5EGAKy1BgCt0QYArskGAK/BBgBU1ACArNQAgLDUAIC01ACAgA0AAIGxAACCsQAAuNQAgLhhBwC5YQcAumEHALt9BwC8ZQcAvW0HAL5lBwC/HQcAsIkGALGJBgCyaQcAs2kHALR5BwC1eQcAtmkHALdlBwCjEQYAvNQAgMDUAIC+gAMAxNQAgKZRBgClPQYAyNQAgKtNBgCqTQYAhggAAId8AwCvvQcArr0HAK29BwCsvQcAzNQAgNDUAICzSQcA1NQAgLVZBwDY1ACA3NQAgLZRBwDg1ACA5NMAgLtBBwC6dQcAvUUHALxFBwC/RQcAvkUHAKh5BgCpeQYAqokGAKuJBgCsmQYArZkGAK6JBgCviQYA5NQAgOjUAIDs1ACA8NQAgPTUAID41ACA/NQAgADVAIC4jQYAuZUGALqVBgC7pQYAvL0GAL1xAQC+cQEAv3EBALD5BgCxzQYAstkGALPZBgC0yQYAtckGALa9BgC3tQYAowEGAATVAIAI1QCADNUAgBDVAICmGQYApREGABTVAICrCQYAqj0GABjVAIAc1QCArw0GAK4NBgCtDQYArA0GACDVAIAk1QCAKNUAgCzVAICAGQAAgRkAAIIFAAAw1QCAhKwBAL6sAQCH6AAAhkwPADjVAIA81QCAQNUAgETVAIConQIAqcUCAKrNAgCrwQIArMUCAK3NAgCu+QIArz0DAEjVAIBM1QCAUNUAgFTVAIC+PAwAWNUAgFzVAIBg1QCAuMkDALnJAwC62QMAu9EDALz5AwC9+QMAvpkDAL+ZAwCwRQMAsU0DALJFAwCzXQMAtEUDALVNAwC2RQMAt/kDALNFAgBk1QCAaNUAgGzVAIBw1QCAtk0CALVNAgB01QCAu4kDALqBAwB41QCAfNUAgL+JAwC+gQMAvYkDALyRAwCA1QCAowECAITVAICI1QCApgkCAIzVAICQ1QCApQkCAKrFAwCrzQMAlNUAgJjVAICuxQMAr80DAKzVAwCtzQMAgO0BAIEVAACCEQAAhAACAJzVAIDhpAEAoNUAgOPsAACo1QCArNUAgLDVAIDvMAAAtNUAgLjVAIC81QCAwNUAgIbgDACH9AIAxNUAgMjVAIDM1QCA0NUAgO/MBgDU1QCA4bAHANjVAIDjEAYA3NUAgODVAIDk1QCA6NUAgOzVAIDw1QCA9NUAgPjVAID81QCAANYAgATWAIAI1gCA7+gBAIUYDwDhzAYADNYAgOMcBgCAKQAAgR0AAIIFAAAQ1gCAszkCAITMDQCGaA8Ah/wMAOHQ0gO28QEAtfkBABjWAIC72QEAutEBAL7kDAAc1gCAv30BAL59AQC9fQEAvMEBAKjxDQCp8Q0AqvENAKvxDQCsMQ4ArTEOAK4xDgCvMQ4ApNUAgBTWAIAg1gCAJNYAgCjWAIAs1gCAMNYAgDTWAIC46Q4AuekOALqJDgC7hQ4AvJ0OAL2BDgC+gQ4Av7UOALBVDgCxXQ4AslUOALPpDgC0+Q4AtfkOALbpDgC34Q4Ao3kNADjWAIA81gCAQNYAgETWAICmsQ4ApbkOAEjWAICrmQ4AqpEOAEzWAIBQ1gCArz0OAK49DgCtPQ4ArIEOAFTWAICz7Q8AWNYAgFzWAIC26Q8AYNYAgGTWAIC16Q8Auq0PALu1DwA01QCAaNYAgL6VDwC/mQ8AvK0PAL2hDwCoIQ4AqSEOAKohDgCrPQ4ArCUOAK0tDgCuJQ4Ar1UOAGzWAIBw1gCAdNYAgHjWAICAHQAAgQkAAIK9AAB81gCAuDkOALk5DgC6yQ4Au8kOALzZDgC92Q4AvskOAL/JDgCwLQ4AsTUOALI9DgCzMQ4AtBUOALUZDgC2CQ4AtwkOAKOpDgCA1gCAhIACAL6AAQCFAAQApq0OAKWtDgCI1gCAq/EOAKrpDgCGKAcAhxgAAK/dDgCu0Q4AreUOAKzpDgCM1gCAs+0BAJDWAICU1gCAtuUBAJjWAICc1gCAte0BALplAQC7bQEAoNYAgKTWAIC+bQEAv10BALx1AQC9bQEAqN0NAKnpDQCqIQIAqyECAKwhAgCtIQIAriECAK8hAgCo1gCArNYAgLDWAIC01gCAohECAKMRAgCgqQ4AodUCALiJAgC5iQIAup0CALuVAgC8vQIAvXUDAL59AwC/dQMAsOUCALHtAgCy5QIAs/0CALTtAgC13QIAttUCALe9AgCjqQIAj8UaALjWAIC81gCAwNYAgKahAgClqQIAxNYAgKspAgCqIQIAyNYAgMzWAICvGQIArikCAK0pAgCsMQIAniUOAJ/lDgCc6QoAnRUKAJpFFgCbRQoAmFkWAJlRFgCWcRIAl4ETAJRVEgCV7RIAktEeAJPZHgCQtRoAkVUeAISpHwCFJR8AhiUfAIexEwDQ1gCA1NYAgIJZGwCDURsAjEUSAI2lFwCOpRcAj7kXAIA5+wHY1gCAijkTAIutEwCUmQsAlaEPAJZpDwCX3Q8A3NYAgO+cDwCSyQsAk30LAJxFAwDjeA4A4NYAgOGYDADk1gCAhHgCAJqRAwCbXQMA4QQAAL6IBQDj3OoD6NYAgOzWAIDw1gCA7+wAAO+MDgDhcA4A4fwOAOMwAADjeA4AgSEAAIA5AADvtO0DgikAALMJAgD41gCAhmgEAIcsBQD81gCAtg0CALUNAgAA1wCAu8UBALrFAQAE1wCACNcAgL99AQC+fQEAvdUBALzVAQCE1gCA9NYAgAzXAIAQ1wCAFNcAgBjXAIAc1wCAINcAgKi9BQCp5QUAquEFAKvhBQCs5QUAre0FAK7RBQCv0QUAsGEGALFhBgCyYQYAs2EGALTZBgC12QYAtskGALfBBgC4yQYAuckGALp5BwC7eQcAvEUHAL0lBwC+EQcAvw0HAKNJBQAk1wCAKNcAgCzXAIAw1wCApk0FAKVNBQA01wCAq4UGAKqFBgA41wCAPNcAgK89BgCuPQYArZUGAKyVBgBA1wCARNcAgEjXAIBM1wCAUNcAgFTXAIBY1wCAXNcAgIA5AACBOQAAggUAAGDXAIC+uAMAhLgDAGjXAIBs1wCAqMUGAKnVBgCq1QYAq+UGAKz9BgCtHQEArhUBAK8NAQBk1wCAcNcAgIaIAQCHHAEAdNcAgHjXAIB81wCAgNcAgLjpAQC56QEAuokBALuJAQC8mQEAvZkBAL6JAQC/iQEAsHUBALF9AQCydQEAs+kBALT5AQC1+QEAtukBALfhAQCzXQYAhNcAgIjXAICM1wCAhLwBALadAQC1dQYAkNcAgLu5AQC6sQEAlNcAgJjXAIC/PQEAvj0BAL09AQC8oQEAnNcAgKMZBgCg1wCApNcAgKbZAQCo1wCArNcAgKUxBgCq9QEAq/0BALDXAIC01wCArnkBAK95AQCs5QEArXkBAKj5AgCp+QIAqi0DAKs9AwCsJQMArS0DAK4lAwCvmQMAuNcAgLzXAIDA1wCAxNcAgIANAACBsQAAgrEAAMjXAIC4lQMAuZ0DALqhAwC7oQMAvHEAAL1xAAC+cQAAv3EAALDpAwCx6QMAsvUDALPFAwC03QMAtbUDALaxAwC3sQMAvswDAMzXAIDQ1wCA2NcAgNzXAIDg1wCA5NcAgO/kAgDo1wCA4ZQBAOzXAIDjLAEA8NcAgPTXAICHGAMAhhz8A7tNAwC6TQMA+NcAgPzXAIC/EQMAvnkDAL1xAwC8QQMAs8UDAITo/AMA2ACABNgAgAjYAIC2zQMAtc0DAAzYAICkAfwDpSX/A6bZ/wOnAfgDENgAgKEVAwCiHQMAoz0CAKwR9wOtAfADri3zA68B8wOoEfsDqZn7A6oB9AOrHfcDtAHoA7Vl6wO+xPwDhMT8A7AB7AOxVe8Dsk3vA7Nx7gMU2ACAGNgAgBzYAIAg2ACAJNgAgCjYAIAs2ACAMNgAgOFQBgDhNAQA42wBAOPoBgA02ACAONgAgDzYAIBA2ACAgDUAAIE9AACCNQAASNgAgEzYAIBQ2ACA77ABAO/ABgCj5QIAVNgAgIbo/AOHfP0DWNgAgKbtAgCl7QIAXNgAgKttAgCqbQIAYNgAgGTYAICvMQIArlkCAK1RAgCsYQIAqI3+A6mV/gOqnf4Dq5X+A6yx/gOtvf4Drqn+A6+p/gNE2ACAaNgAgGzYAIBw2ACAdNgAgHjYAIB82ACAgNgAgLgl/wO5Lf8DuiX/A7s9/wO8Jf8DvS3/A74l/wO/zf8DsKn+A7Gp/gOygf4Ds4H+A7SB/gO1if4Dtmn/A7cd/wOE2ACA4SD8A4jYAIDjePwDjNgAgJDYAICU2ACAmNgAgJzYAICg2ACApNgAgKjYAICAHQAAgXEAAIJxAADvDP0Ds1X+A6zYAICw2ACAvkAAALTYAIC2ff4DtXn+A7jYAIC7Lf4Dui3+A4boAACHrAAAvw3+A74F/gO9Ff4DvBX+A6OV/wO82ACAwNgAgMTYAIDI2ACApr3/A6W5/wPM2ACAq+3/A6rt/wPQ2ACA1NgAgK/N/wOuxf8DrdX/A6zV/wPY2ACAs/H+A9zYAIDg2ACAto3+A+TYAIDo2ACAtY3+A7pFAQC7TQEA7NgAgPDYAIC+RQEAv00BALxVAQC9TQEAqC3+A6k1/gOqPf4Dq0n+A6xB/gOtSf4DrnH+A69x/gP02ACA+NgAgPzYAIAA2QCABNkAgAjZAIAM2QCAENkAgLhJAQC5VQEAul0BALtVAQC8TQEAvXUBAL59AQC/dQEAsMUBALHNAQCyxQEAs90BALTFAQC1zQEAtsUBALd9AQCjtf0DFNkAgBjZAICExAMAHNkAgKbJ/QOlyf0DINkAgKsJAgCqAQIAKNkAgL7sAgCvCQIArgECAK0JAgCsEQIAgEkAAIFVAACCVQAAo0UDACzZAIClRQMApkUDADDZAICGwAQAhxQDAKopAwCrJQMArD0DAK0hAwCuIQMArxUDADTZAIA42QCAPNkAgEDZAIBE2QCASNkAgEzZAIBQ2QCAqH0CAKmhAwCqoQMAq6EDAKyhAwCtqQMArpEDAK+RAwCwgQMAsY0DALKFAwCzmQMAtIkDALW9AwC2tQMAt30DALhFAwC5TQMAukUDALtdAwC8RQMAvU0DAL5FAwC/+QAA1NcAgLMNAgBU2QCAWNkAgLYNAgBc2QCAYNkAgLUNAgC6YQIAu20CAGTZAIBo2QCAvmkCAL9dAgC8dQIAvWkCAGzZAIBw2QCAdNkAgHjZAIB82QCA4aQBAIDZAIDjQAMAhNkAgIjZAICM2QCA77gDAIAVAACBHQAAggUAAJDZAICEgAIAvsgFAIcYBQCGLAQAmNkAgJzZAICg2QCA76gBAKTZAIDhdP4DqNkAgOPw/gOs2QCAsNkAgLTZAIC42QCAvNkAgMDZAIDE2QCAs5EBAMjZAIC1UQEAtlEBAMzZAIDQ2QCA1NkAgLp9AQC7dQEAvG0BAL39AAC+9QAAv+kAAKgpBgCpVQYAqlUGAKuNBgCslQYArZ0GAK6VBgCvjQYAlNkAgNjZAIDc2QCA4NkAgOTZAIDo2QCA7NkAgPDZAIC4bQcAuQUHALoNBwC7BQcAvB0HAL0FBwC+AQcAvz0HALD1BgCx/QYAsvUGALNlBwC0fQcAtWEHALZhBwC3VQcA4xAFAPTZAIDh8AQA+NkAgIAdAACBCQAAgjkAAPzZAIAA2gCAhOgDAL7gAwAE2gCA78wFAAjaAICHOAAAhhgAAKOdBgAM2gCAENoAgBTaAIAY2gCApl0GAKVdBgAc2gCAq3kGAKpxBgAg2gCAJNoAgK/lBwCu+QcArfEHAKxhBgCokQYAqZEGAKqRBgCrrQYArLkGAK2lBgCurQYAr6UGACjaAIAs2gCAMNoAgDTaAIA42gCAPNoAgEDaAIBE2gCAuGUBALltAQC6ZQEAu30BALxlAQC9bQEAvmUBAL/ZAQCw3QYAsaUGALKtBgCzpQYAtKEGALWpBgC2mQYAt5kGALMZBgBI2gCATNoAgFDaAIBU2gCAtiUGALUxBgBY2gCAu2EGALoZBgBc2gCAYNoAgL9tBgC+ZQYAvXEGALx5BgBk2gCAo10GAGjaAIBs2gCApmEGAHDaAICEmAEApXUGAKpdBgCrJQYAvqQBAHjaAICuIQYArykGAKw9BgCtNQYAqcUCAKixAgCrxQIAqsUCAK3NAgCsxQIAr/UCAK71AgB82gCAgNoAgITaAICI2gCAjNoAgJDaAICU2gCAmNoAgLnJAwC4wQMAu9kDALrBAwC9+QMAvMkDAL+ZAwC+8QMAsUUDALBFAwCzRQMAskUDALVFAwC0RQMAt0UDALZFAwCASQMAgUkDAIJdAwCzRQIAvtwMALVFAgC2RQIAnNoAgIYADACH5AMAuokDALuJAwC8mQMAvZkDAL6JAwC/iQMAowkCAKDaAICk2gCAqNoAgKzaAICmCQIApQkCALDaAICrxQMAqsUDALTaAIC42gCAr8UDAK7FAwCt1QMArNUDALzaAIDA2gCAxNoAgCTZAIDvAAAAyNoAgMzaAIDQ2gCA4+gAANTaAIDhjAEA2NoAgNzaAIDg2gCA6NoAgOzaAICAbQAAgXUAAIJ9AACEQAIAhvAMAId4DQDw2gCA9NoAgPjaAID82gCAANsAgATbAIAI2wCADNsAgBDbAIAU2wCAGNsAgBzbAIAg2wCAJNsAgCjbAIAs2wCAMNsAgO/MAQCE7AwA4TAGADTbAIDjGAEAONsAgDzbAIBA2wCARNsAgLPlAQBI2wCAhIQPAEzbAIBQ2wCAtuUBALX1AQBY2wCAu30BALrZAQC+oAwAXNsAgL8hAQC+OQEAvTEBALw5AQCo7Q0AqSUOAKotDgCrJQ4ArD0OAK0lDgCuLQ4AryUOAOTaAICC9Q8AgeUPAIDpDwBU2wCAYNsAgIaYAACHDAMAuK0OALlFDwC6TQ8Au0UPALxFDwC9TQ8AvkUPAL95DwCwXQ4AsfkOALKtDgCzpQ4AtL0OALWlDgC2pQ4At5UOAGTbAIDv7AwAaNsAgGzbAIBw2wCAdNsAgHjbAIB82wCAvugAAIDbAICE2wCAiNsAgIzbAIDj6A0AkNsAgOEEDACj5Q4AlNsAgJjbAICc2wCAoNsAgKblDgCl9Q4ApNsAgKt9DgCq2Q4AqNsAgKzbAICvIQ4ArjkOAK0xDgCsOQ4AqDkOAKk5DgCqUQ4Aq1EOAKxxDgCtcQ4ArnEOAK9xDgCw2wCAtNsAgLjbAIC82wCAgBkAAIEZAACCBQAAwNsAgLjRDgC50Q4AutEOALvlDgC84Q4AveEOAL7hDgC/4Q4AsBEOALERDgCyEQ4AsxEOALTxDgC18Q4AtvEOALfxDgCz2Q4AyNsAgIYoAACHuAAAzNsAgLbxDgC1+Q4A0NsAgLvVDgC61Q4A1NsAgNjbAIC/NQ4AvjUOAL3FDgC8xQ4A3NsAgKOdDgDg2wCA5NsAgKa1DgDo2wCA7NsAgKW9DgCqkQ4Aq5EOAPDbAID02wCArnEOAK9xDgCsgQ4ArYEOAKjdDQCp6Q0Aqj0CAKuNAgCsmQIArZkCAK6JAgCviQIAvqwEAPjbAID82wCAhCADAADcAIAE3ACACNwAgAzcAIC4iQIAuYkCALqZAgC7kQIAvLkCAL25AgC+eQMAv3kDALD5AgCx+QIAss0CALPFAgC03QIAtcUCALbBAgC3uQIAs7UCABDcAIAU3ACAGNwAgBzcAIC2GQIAtRECACDcAIC7PQIAuj0CACTcAIAo3ACAvwECAL4ZAgC9EQIAvBkCACzcAICj8QIAMNwAgDjcAICmXQIAPNwAgEDcAIClVQIAqnkCAKt5AgCGSAUAh6wEAK5dAgCvRQIArF0CAK1VAgCohQIAqZUCAKqVAgCrpQIArL0CAK3VAgCu0QIAr9ECAETcAIBI3ACATNwAgFDcAICB8QEAgJkBAHTaAICC9QEAuHkBALl5AQC6zQEAu8UBALzdAQC9xQEAvsUBAL/1AQCwtQIAsb0CALKBAgCzgQIAtFUBALVdAQC2SQEAt0kBAFTcAIBY3ACAXNwAgO/UAQCEEAUAYNwAgGTcAIDvjA4AvuwFAOHsDgBo3ACA4xwOAGzcAIDhlAEAcNwAgONkDgCzXQIAdNwAgHjcAIB83ACAgNwAgLYVAgC1dQIAhNwAgLs5AgC6MQIAiNwAgIzcAIC/2QEAvtEBAL0VAgC8FQIAo50FADTcAICQ3ACAlNwAgJjcAICm1QUApbUFAJzcAICr+QUAqvEFAKDcAICk3ACArxkGAK4RBgCt1QUArNUFAIBRAACBWQAAgmEAALOVBgCo3ACAtXEHALZxBwCs3ACAhkADAIdUAwC67QcAu+UHALzlBwC97QcAvtEHAL/NBwCw3ACAtNwAgLjcAIC83ACAwNwAgMTcAIDvQAQAyNwAgOEwBwDM3ACA45QEANDcAIDU3ACA2NwAgNzcAIDg3ACAoxkGAOTcAIDo3ACA7NwAgPDcAICm/QcApf0HAPTcAICraQcAqmEHAPjcAID83ACAr0EHAK5dBwCtYQcArGkHAKjNBwCp0QcAqtEHAKstBgCsNQYArT0GAK41BgCvnQYAAN0AgATdAIAI3QCADN0AgIAZAACBGQAAggUAABDdAIC4iQYAuYkGALqZBgC7kQYAvLkGAL25BgC+UQEAv1EBALDlBgCx7QYAsv0GALP1BgC02QYAtcUGALbBBgC3uQYAqNEBAKnZAQCqCQEAqwkBAKwZAQCtGQEArgkBAK8JAQCEYAEAvnwBAIeoAACGjAEAGN0AgBzdAIAg3QCAJN0AgLgJAQC5CQEAuhkBALsRAQC8OQEAvTkBAL75AAC/+QAAsH0BALFBAQCyRQEAs10BALRFAQC1TQEAtkUBALc5AQAo3QCALN0AgDDdAICzjQIANN0AgLWdAgC2lQIAON0AgDzdAIBA3QCAurUCALuJAgC8nQIAvYUCAL6NAgC/hQIAps0CAETdAIBI3QCApcUCAEzdAICj1QIAUN0AgFTdAICu1QIAr90CAKzFAgCt3QIAqu0CAKvRAgCE9AMAWN0AgKgxAwCpMQMAqjEDAKsxAwCskQAArZEAAK6RAACvjQAAXN0AgGDdAIBk3QCAaN0AgGzdAIBw3QCAdN0AgHjdAIC4vQAAuWUAALptAAC7ZQAAvH0AAL1lAAC+bQAAv2UAALD9AACxxQAAss0AALOpAAC0uQAAtaUAALahAAC3oQAAgL0BAIEJAACCGQAAfN0AgIDdAIC+WAIAhxQdAIacHQCEbB0AxNsAgIjdAICM3QCAvrwcAJDdAICU3QCAmN0AgLP5AgCc3QCAoN0AgKTdAICo3QCAtlEBALVZAQC+3B8Au0EBALp5AQCs3QCAsN0AgL8hAQC+PQEAvT0BALxZAQDhcAcAtN0AgOMIBgC43QCA78wAALzdAIDA3QCAxN0AgOMQAADI3QCA4dABAMzdAICGkBwAh/QcAO/gBgDQ3QCAo3kCANTdAIDY3QCA3N0AgODdAICm0QEApdkBAOTdAICrwQEAqvkBAOjdAIDs3QCAr6EBAK69AQCtvQEArNkBAITdAICCFQAAgeUfAIDlHwDw3QCA9N0AgPjdAID83QCAqAkfAKkJHwCqHR8AqxUfAKwNHwCtcR8ArnEfAK9xHwCwER8AsS0fALIlHwCzyR8AtN0fALXBHwC2wR8At8EfALjFHwC5yR8AutUfALupHwC8uR8AvbkfAL6pHwC/oR8As7UfAADeAIAE3gCACN4AgAzeAIC20R8AtaUfABDeAIC7yR8AuvUfABTeAIAY3gCAvyUfAL45HwC9PR8AvNEfABzeAIAg3gCAJN4AgCjeAIAs3gCA4WAfADDeAIDjtBwANN4AgDjeAIA83gCA7wAdAEDeAIBE3gCASN4AgEzeAICjNR4AUN4AgFTeAIBY3gCAXN4AgKZRHgClJR4AYN4AgKtJHgCqdR4AhKgCAGTeAICvpR4ArrkeAK29HgCsUR4AgE0AAIFVAACCVQAAs8kBAGjeAIC12QEAtskBAGzeAICGoAAAhwQBALrFAQC7rQEAvLUBAL29AQC+tQEAv60BAKiZAQCpmQEAqg0BAKsFAQCsHQEArQUBAK4FAQCvNQEAcN4AgHTeAIB43gCAfN4AgIDeAICE3gCAiN4AgIzeAIC4JQEAuS0BALo5AQC7OQEAvCkBAL0pAQC+3QAAv9UAALBNAQCxJQEAsi0BALMlAQC0PQEAtSUBALYhAQC3HQEAkN4AgJTeAICY3gCAo4kCAJzeAIClmQIApokCAKDeAICk3gCAqN4AgKqFAgCr7QIArPUCAK39AgCu9QIAr+0CAKzeAICw3gCAtN4AgIRAAgC43gCAvN4AgMDeAIDE3gCAgA0AAIEVAACCHQAAyN4AgMzeAIDQ3gCAh7QDAIbcBAC+zAMA2N4AgNzeAIDg3gCA7+gCAOTeAIDo3gCA7N4AgOP8AgDw3gCA4dABAPTeAID43gCA/N4AgADfAIAE3wCAs2EDAAjfAIAM3wCAEN8AgBTfAIC2eQMAtXEDABjfAIC7XQMAul0DABzfAIAg3wCAv+EAAL79AAC9/QAAvP0AALC5AgCxuQIAsgkBALMJAQC0GQEAtQUBALYFAQC3PQEAuAUBALllAQC6bQEAu2UBALxhAQC9YQEAvmEBAL9hAQCFXAcAJN8AgCjfAIAs3wCAFN0AgDDfAIA03wCAON8AgKgxAgCpOQIAqskCAKvJAgCs2QIArdkCAK7JAgCvyQIAhMwFAOGAHgA83wCA47weAOE4HgBA3wCA46AAAL4QBABI3wCATN8AgO8MHgBQ3wCAVN8AgFjfAIBc3wCA73QeAKNhAgCCUQAAgUEAAICRAABg3wCApnkCAKVxAgBk3wCAq10CAKpdAgCGyAQAhzwFAK/hAQCu/QEArf0BAKz9AQCohQYAqY0GAKqFBgCrmQYArIkGAK2JBgCuvQYAr7EGAETfAIBo3wCAbN8AgHDfAIB03wCAeN8AgHzfAICA3wCAuJ0GALmtBgC6pQYAuwkHALwZBwC9GQcAvg0HAL8FBwCw0QYAsdEGALLRBgCz0QYAtLUGALW9BgC2tQYAt60GALMNBgCE3wCAiN8AgIzfAICQ3wCAtgkGALUBBgCU3wCAuxUGALoVBgCY3wCAnN8AgL95BgC+cQYAvQUGALwFBgCg3wCA4aAEAKTfAIDjXAUAgA0AAIE1AACCPQAAqN8AgKzfAICw3wCAhGADAL5sAAC/8AEAhZAAALTfAIDvmAUAo40HAIQIAACGAAwAh4wAALjfAICmiQcApYEHALzfAICrlQcAqpUHAMDfAIDE3wCAr/kHAK7xBwCthQcArIUHAMjfAICz6QYAzN8AgNDfAIC26QYA1N8AgNjfAIC16QYAukUBALtNAQDc3wCA4N8AgL5FAQC/TQEAvFUBAL1NAQCoIQYAqSEGAKolBgCrPQYArCUGAK0tBgCuSQYAr0EGAOTfAIDo3wCA7N8AgPDfAID03wCA+N8AgPzfAIAA4ACAuEkBALlJAQC6WQEAu1EBALx5AQC9eQEAvhkBAL8VAQCwxQEAsc0BALLFAQCz3QEAtMUBALXNAQC2xQEAt3kBAATgAIAI4ACADOAAgKOhBQAQ4ACApaEFAKahBQAU4ACAjyHqAxjgAICqDQIAqwUCAKwdAgCtBQIArg0CAK8FAgCX7RIAlmUSAJVFEQCUnRYAk3EWAJJVFQCReesDkFnqA59hBgCeNQUAnUUaAJxpGgCbVRkAmkUeAJlZHgCYRR0A4WAAABzgAIDjTD4AIOAAgKOxAgCi1QEAobUHAKCJBgCxATgAsAk+ALOVOgCyjToAtbUmALQBJADvaDoAvjAMAKnJNgCowTYAqwEwAKrhNwCtzTMArPUyAK/5PgCuATwAoRkCACjgAICjbQ4Aom0OAKX1CgCkAQgAp4ULAKaZCgCGAA0Ah0QNAIIJ6wODCesDhDHqA4UVFACGORcAh80XAISgDQAs4ACAiiUQAIsNEwCMnRMAjQ0cAI4ZHwCPDR8A1N4AgO8AAwCSbRgAk0kbAJR9GwCVBQQAllkHAJdJBwAw4ACANOAAgJpFBgCbLQAAnFEDAONgAAA44ACA4WwAAIClAQCBAQEAggUBAL4ADAA84ACAQOAAgETgAIDviAEASOAAgOFUBgBM4ACA41QBAFDgAIBU4ACAWOAAgFzgAICz6QIAYOAAgGTgAIBo4ACAbOAAgLadAgC1mQIAcOAAgLuJAgC6vQIAdOAAgHjgAIC/WQIAvlECAL1ZAgC8kQIAoykNAHzgAICA4ACAhOAAgIjgAICmXQ0ApVkNAIzgAICrSQ0Aqn0NAJDgAICY4ACAr5kNAK6RDQCtmQ0ArFENAIBRAACBWQAAgmEAALMtDwCc4ACAtS0PALbJDwCg4ACAhkADAIcIAwC6yQ8Au8UPALzBDwC9wQ8AvsEPAL/BDwAk4ACAlOAAgKTgAICo4ACArOAAgLDgAIC04ACAuOAAgKhFDgCpgQ8AqskPAKvJDwCsyQ8ArSUPAK4tDwCvJQ8AsGEPALFtDwCyeQ8As3kPALRpDwC1aQ8Ath0PALcVDwC4LQ8AuTUPALo1DwC7BQ8AvB0PAL3xAAC+8QAAv/EAAKNhDgC84ACAhMQBAMDgAIDE4ACApoUOAKVhDgDI4ACAq4kOAKqFDgDM4ACA0OAAgK+NDgCujQ4ArY0OAKyNDgDU4ACA2OAAgNzgAIDg4ACA5OAAgOjgAIDs4ACA8OAAgPTgAICCHQAAgR0AAIAdAAD44ACA/OAAgADhAIC+tAEAqK0BAKnVAQCq1QEAqwUBAKwdAQCtBQEArg0BAK8FAQCGgAEAhxgBAAjhAIAM4QCAEOEAgBThAIAY4QCAHOEAgLiFAAC5jQAAuoUAALudAAC8hQAAvY0AAL6FAAC/vQAAsH0BALHhAACy5QAAs/0AALTtAAC13QAAttUAALe9AACzXQIAIOEAgCThAIAo4QCALOEAgLaFAgC1lQIAMOEAgLslAwC6uQIANOEAgDjhAIC/GQMAvikDAL0pAwC8MQMAvswEAKMZAgA84QCAQOEAgKbBAgBE4QCASOEAgKXRAgCq/QIAq2EDAEzhAIBQ4QCArm0DAK9dAwCsdQMArW0DAKgpAwCpKQMAqjkDAKs5AwCsKQMArSkDAK6dAACvlQAAVOEAgFjhAIBc4QCAYOEAgGThAICCqQEAga0BAICtAQC4mQAAua0AALqlAAC7bQAAvHUAAL19AAC+dQAAv20AALDtAACx9QAAsvUAALPFAAC03QAAtb0AALa1AAC3qQAA4XgBAOEcDgDjEAAA4zwOAGjhAIBs4QCAvhQEAHDhAICErAIAeOEAgId4BQCGDAUAfOEAgIDhAIDvvAAA70gOALPxAgCE4QCAiOEAgIzhAICQ4QCAtukCALXhAgCU4QCAu3EBALppAQCY4QCAhKAEAL85AQC+WQEAvVEBALxhAQCc4QCAhIwEAKDhAICEADgApOEAgKjhAICs4QCAsOEAgKqJDgCriQ4AqLkOAKmxDgCu/Q4Ar+EOAKz5DgCt9Q4Asq0OALNlDgCwkQ4AsaUOALZ9DgC3ZQ4AtH0OALV1DgC6XQ4Au+UNALhdDgC5VQ4AvuENAL/pDQC8/Q0AvfUNAKOxBQB04QCAtOEAgLjhAIC84QCApqkFAKWhBQDA4QCAqzEGAKopBgDE4QCAyOEAgK95BgCuGQYArREGAKwhBgDM4QCA0OEAgNThAIDY4QCAgB0AAIEJAACCOQAA3OEAgODhAIDk4QCAhsgAAIcMAwDo4QCA7OEAgPDhAID04QCAqKUHAKm1BwCqvQcAq8kHAKzZBwCt2QcArskHAK/BBwC+oAAA+OEAgPzhAIAA4gCABOIAgAjiAIAM4gCAEOIAgLjNAAC51QAAutUAALvlAAC8/QAAvZUAAL6dAAC/lQAAsIkHALFlBwCyYQcAs30HALRlBwC1bQcAtmUHALf1AACzNQYAFOIAgBjiAIAc4gCAIOIAgLZZBgC1UQYAJOIAgLuhBgC6TQYAKOIAgCziAIC/qQYAvqEGAL2pBgC8tQYAMOIAgDTiAIDv8AUAOOIAgDziAIBA4gCAROIAgEjiAICAPQAAgQkAAIIdAABM4gCA4cgGAFDiAIDjSAQAVOIAgKO1BgBY4gCAhigAAIdAAQBc4gCAptkGAKXRBgBg4gCAqyEGAKrNBgBk4gCAaOIAgK8pBgCuIQYArSkGAKw1BgBs4gCAs70BAHDiAIB04gCAtnkBAHjiAIB84gCAtXkBALpVAQC7XQEAgOIAgITiAIC++QAAv/kAALxFAQC9+QAAqHECAKlxAgCqcQIAq3ECAKy1AgCtvQIArrUCAK+tAgC+rDwAiOIAgIziAICQ4gCAlOIAgJjiAICc4gCAoOIAgLhpAwC5aQMAugkDALsJAwC8HQMAvQUDAL4NAwC/BQMAsNUCALHdAgCy1QIAs2kDALR5AwC1eQMAtmkDALdhAwCk4gCAqOIAgKziAICj9QIAsOIAgKUxAgCmMQIAtOIAgLjiAIC84gCAqh0CAKsVAgCsDQIArbEDAK6xAwCvsQMA7xgCAIIVAACBbQAAgG0AAMDiAIDI4gCAhvg8AIcYAwDM4gCA0OIAgNTiAIDY4gCA42wHAAThAIDhaAEA3OIAgKiFAgCplQIAqpUCAKulAgCsvQIArdUCAK7RAgCv0QIA4OIAgOTiAIDo4gCA7OIAgPDiAID04gCA+OIAgPziAIC4dQEAuX0BALp1AQC7zQEAvNUBAL3dAQC+yQEAv8EBALC1AgCxvQIAsoECALOBAgC0VQEAtV0BALZVAQC3TQEA4bQGAADjAIDj9AYABOMAgIQYPQAI4wCADOMAgBDjAIAU4wCAGOMAgBzjAIAg4wCAJOMAgCjjAIDvWAYALOMAgIF9AACAcQAAMOMAgIIFAAA44wCAPOMAgO+AAQC+VDwA4ZABAEDjAIDjfAYAROMAgEjjAIBM4wCAhtg8AIf0PACjnT0AxOIAgDTjAIBQ4wCAVOMAgKbVPQCltT0AWOMAgKv5PQCq8T0AXOMAgGDjAICvGT4ArhE+AK3VPQCs1T0AZOMAgLOhPgBo4wCAbOMAgLatPgBw4wCAdOMAgLWxPgC6ST8Au0k/AHjjAIB84wCAvkk/AL9JPwC8ST8AvUk/AKhVPgCpZT4Aqm0+AKtlPgCsfT4ArWk+AK65PwCvuT8AgOMAgITjAICI4wCAjOMAgJDjAICU4wCAmOMAgJzjAIC4VT8AuV0/ALpVPwC7bT8AvHU/AL19PwC+dT8Av20/ALDJPwCxyT8Astk/ALPZPwC0yT8Atck/ALZ9PwC3cT8AghUAAKPhPwCAsQEAgbEBAKbtPwCg4wCAvtABAKXxPwCqCT4Aqwk+AITkAQCk4wCArgk+AK8JPgCsCT4ArQk+ALPdPACo4wCAhugAAIfMAQCs4wCAtpU8ALX1PACw4wCAu7k8ALqxPAC04wCAuOMAgL9ZPwC+UT8AvZU8ALyVPACoUT4AqVE+AKptPgCrYT4ArGE+AK1hPgCulQEAr40BAISgAQC84wCAwOMAgMTjAIDI4wCAzOMAgNDjAIDU4wCAuKkBALmpAQC6aQEAu2kBALx5AQC9eQEAvmkBAL9pAQCw/QEAsc0BALLFAQCzrQEAtLkBALW5AQC2rQEAt6UBALPlPQDY4wCA3OMAgODjAIDk4wCAtuE9ALXpPQDo4wCAuwkCALo5AgDs4wCA8OMAgL99AgC+fQIAvXkCALwRAgD04wCAo6E9APjjAID84wCApqU9AADkAIAE5ACApa09AKp9AgCrTQIACOQAgAzkAICuOQIArzkCAKxVAgCtPQIAgOkAAIHpAACCHQAAvsADAO/kAgAQ5ACAh1QDAIY8BADjEAEAGOQAgOH4AQAc5ACAIOQAgCTkAIAo5ACALOQAgDDkAIA05ACAOOQAgLORAwA85ACAtbkDALZ9AwBA5ACAROQAgEjkAIC6WQMAu1kDALxJAwC9SQMAvv0AAL/1AACoRQIAqVUCAKpVAgCrZQIArH0CAK2xAgCusQIAr7ECAIRsBQBM5ACAUOQAgFTkAIBY5ACAXOQAgL5wBQBg5ACAuF0BALltAQC6ZQEAuw0BALwZAQC9GQEAvg0BAL8FAQCw0QIAsdECALLRAgCz0QIAtHUBALV9AQC2dQEAt20BAOFAPwDjvAAA4wg+AOFsPgBk5ACAaOQAgGzkAIBw5ACAdOQAgHjkAIB85ACAgOQAgL5sBwDvVAAA75w+AIjkAICjnQIAgmkAAIFhAACAaQAAjOQAgKZxAgCltQIAkOQAgKtVAgCqVQIAhsgEAIfsBACv+QEArvEBAK1FAgCsRQIAqKUGAKmpBgCquQYAq7kGAKypBgCtqQYArtkGAK/ZBgCE5ACAlOQAgJjkAICc5ACAoOQAgKTkAICo5ACArOQAgLhxBwC5cQcAunUHALvdBwC8xQcAvc0HAL7FBwC//QcAsKkGALG1BgCytQYAs40GALSVBgC1UQcAtlEHALdRBwCzMQYAsOQAgLTkAIC45ACAvOQAgLYpBgC1IQYAwOQAgLtxBgC6bQYAxOQAgMjkAIC/lQcAvlEGAL1ZBgC8YQYAzOQAgKN1BgDQ5ACA1OQAgKZtBgDY5ACA3OQAgKVlBgCqKQYAqzUGAODkAIDk5ACArhUGAK/RBwCsJQYArR0GAIANAACBFQAAgh0AAOjkAIDs5ACA8OQAgITcAQD05ACAhoAAAIcgAQD45ACA/OQAgADlAIAE5QCACOUAgAzlAIAQ5QCA43QEABTlAIDhyAUAGOUAgBzlAIAg5QCAJOUAgCjlAIAs5QCAMOUAgDTlAIA45QCA77QEADzlAIBA5QCAqD0GAKlVBgCqVQYAq6kBAKy5AQCtuQEArqkBAK+pAQCErAEAROUAgEjlAIBM5QCAUOUAgFTlAIBY5QCAXOUAgLhtAQC5BQEAugEBALsBAQC8BQEAvQ0BAL4xAQC/MQEAsNkBALHZAQCybQEAs2UBALR9AQC1ZQEAtmUBALdVAQCBvQMAgL0DALPVBQCCGQAAtTkCAGDlAIC+VAMAtjECAGjlAIBs5QCAuxUCALoVAgC9uQIAvLECAL+pAgC+sQIAcOUAgKZpAgClYQIAhAAMAKONBQB05QCAhvgMAId8AwCv8QIArukCAK3hAgCs6QIAq00CAKpNAgB45QCAfOUAgIDlAICE5QCAiOUAgIzlAIDjIAEAkOUAgOGgAQCU5QCA70ACAJjlAICc5QCAoOUAgKTlAICo5QCArOUAgLDlAICz8QMAtOUAgBTkAIC45QCAvOUAgLbpAwC14QMAwOUAgLu1AwC6tQMAxOUAgMjlAIC/lQMAvpUDAL2lAwC8pQMAqCkCAKkpAgCqOQIAqzkCAKwpAgCtKQIArlkCAK9VAgCAzQEAgQkAAIIZAADM5QCA0OUAgL58DQCHtA0AhhwMALgxAgC5PQIAujUCALvpAgC8+QIAvfkCAL7pAgC/6QIAsDECALExAgCyMQIAszECALQRAgC1EQIAthECALcRAgDY5QCA3OUAgODlAIDk5QCA6OUAgOzlAIDw5QCA79QGAPTlAIDhVAYA+OUAgOOkAACsDBUA/OUAgADmAIAE5gCAo/ECAAjmAIAM5gCAEOYAgBTmAICm6QIApeECABjmAICrtQIAqrUCABzmAIAg5gCAr5UCAK6VAgCtpQIArKUCAKghDgCpIQ4AqkkOAKtZDgCsaQ4ArWkOAK6ZDgCvmQ4A1OUAgCTmAIAo5gCALOYAgDDmAIA05gCAOOYAgDzmAIC49Q4Auf0OALr1DgC7iQ4AvJ0OAL2FDgC+hQ4Av7UOALDpDgCx6Q4Asv0OALPxDgC01Q4Atd0OALbVDgC3zQ4As8EOAIIVAACBtQAAgLUAAEDmAIC26Q4AteEOAL4QAAC7LQ4Aui0OAIRkAwBE5gCAvxkOAL4RDgC9JQ4AvCkOAEjmAICjhQ4AhogAAIdsAwCmrQ4ATOYAgFDmAIClpQ4AqmkOAKtpDgBU5gCAWOYAgK5VDgCvXQ4ArG0OAK1hDgCziQ4AXOYAgGDmAIBk5gCAaOYAgLaBDgC1iQ4AbOYAgLuVDgC6jQ4AcOYAgHTmAIC/+Q4AvvEOAL2FDgC8hQ4AeOYAgHzmAICA5gCAhOYAgOMMDQCI5gCA4RgNAIzmAIDvrAwAkOYAgJTmAICY5gCAnOYAgKDmAICk5gCAqOYAgKgBDgCpAQ4AqgEOAKsBDgCsAQ4ArQEOAK4BDgCvPQ4AgN0AAIEJAACCGQAArOYAgLDmAICEPAEAvnQAALjmAIC4HQ4AuS0OALolDgC76QEAvPkBAL35AQC+6QEAv+kBALBJDgCxUQ4AslEOALNRDgC0NQ4AtT0OALY1DgC3LQ4Ao4kNALzmAICGrAQAhzwDAMDmAICmgQ0ApYkNAMTmAICrlQ0Aqo0NAMjmAIDM5gCAr/kNAK7xDQCthQ0ArIUNANDmAICznQIAhEgDAL5ABAC2VQMA1OYAgNjmAIC1sQIAunEDALt5AwDc5gCA4OYAgL4xAwC/MQMAvFEDAL1RAwCwkQMAsZkDALKhAwCzoQMAtNEDALXRAwC20QMAt9EDALj1AwC5+QMAus0DALvFAwC83QMAvcUDAL7NAwC/xQMA5OYAgOjmAIDs5gCA8OYAgIV8GQD05gCA+OYAgGTlAICoIQIAqTECAKoxAgCrBQIArB0CAK3xAwCu8QMAr/EDAPzmAIAA5wCABOcAgAjnAIDvUAAADOcAgBDnAIAU5wCA44QAABjnAIDh+AEAHOcAgIAVAACBGQAAggUAACDnAICjmQMAKOcAgIZoBACHYAUALOcAgKZRAgCltQMAMOcAgKt9AgCqdQIANOcAgDjnAICvNQIArjUCAK1VAgCsVQIAPOcAgEDnAIBE5wCASOcAgEznAIBQ5wCAVOcAgO/4AQC+bAQA4YAOAFjnAIDjFAEAXOcAgGDnAIBk5wCAaOcAgGznAIBw5wCAdOcAgLPdAQB45wCAtf0BALb1AQB85wCAgOcAgITnAIC6sQEAu4UBALydAQC9NQEAvj0BAL81AQCpBQYAqLkFAKsVBgCqHQYArT0GAKw9BgCvTQYArl0GACTnAICCHQAAgR0AAIAdAACI5wCAjOcAgJDnAICU5wCAuUEHALidBgC7QQcAukkHAL1FBwC8WQcAv0UHAL5FBwCxCQYAsD0GALOpBgCyAQYAtbkGALSxBgC3rQYAtrEGAKORBgCEjAIAhigAAIfAAwCY5wCAprkGAKWxBgCc5wCAq8kGAKr9BgCg5wCApOcAgK95BgCucQYArXkGAKzRBgCo5wCAs5kHAKznAICw5wCAtlEHALTnAIC45wCAtbEHALptBwC7dQcAvOcAgMDnAIC+WQcAv0UHALxtBwC9ZQcAxOcAgMjnAIDM5wCA0OcAgNTnAIDY5wCA3OcAgO+oBQDg5wCA4TQFAOTnAIDjdAUA6OcAgOznAIDw5wCA9OcAgKMdBgCCLQAAgRUAAIAdAAD45wCAptUGAKU1BgD85wCAq/EGAKrpBgAA6ACAhCgBAK/BBgCu3QYAreEGAKzpBgCoxQYAqdUGAKrVBgCr5QYArP0GAK0VBgCuHQYArxUGAL7sAQAI6ACAhggAAIcgAAAM6ACAEOgAgBToAIAY6ACAuH0GALkFBgC6DQYAuwUGALwBBgC9CQYAvjkGAL85BgCwbQYAsXUGALJ9BgCzdQYAtFkGALVFBgC2TQYAt0UGAKiRAgCpmQIAqqECAKuhAgCs0QIArd0CAK7VAgCvyQIAHOgAgCDoAIAk6ACAvyweACjoAIAs6ACAMOgAgDToAIC4VQMAuV0DALppAwC7ZQMAvGEDAL1hAwC+YQMAv2EDALC5AgCxjQIAsoUCALNtAwC0dQMAtX0DALZ1AwC3bQMAOOgAgDzoAICzIQIAQOgAgLVRAgCEiAMAROgAgLZVAgC05gCAvigcALtBAgC6dQIAvbEDALxZAgC/sQMAvrkDAKNpAgBI6ACATOgAgFDoAIBU6ACAph0CAKUZAgBY6ACAqwkCAKo9AgBc6ACAYOgAgK/5AwCu8QMArfkDAKwRAgCopQIAqbUCAKq9AgCrtQIArK0CAK01AQCuPQEArzUBAL4sHABk6ACAaOgAgGzoAIBw6ACAeOgAgIdoHQCGHB0AuIUBALmNAQC6hQEAu50BALyNAQC9vQEAvrUBAL95AACwUQEAsVEBALJRAQCzUQEAtPEBALXxAQC29QEAt+UBAO/YAACCtQAAgaUAAIClAAB86ACAgOgAgIToAIDvxAYAiOgAgOH0BgCM6ACA4zgBAOPMAACQ6ACA4SgBAJToAICY6ACAtuUBALV1AgCEQBwAs2UCAJzoAICg6ACApOgAgL9lAQC+ZQEAvdUBALzVAQC7xQEAusUBAKjoAICs6ACAo7UdAHToAICw6ACAtOgAgLjoAICmNR4ApaUdALzoAICrFR4AqhUeAMDoAIDE6ACAr7UeAK61HgCtBR4ArAUeAMjoAIDM6ACA0OgAgNToAICADQAAgTUAAII9AADY6ACA3OgAgODoAIC1BQAAcRoAgOG0AgCs2AIAtQUAAHUaAICotR8AqRUfAKodHwCrFR8ArDEfAK09HwCuLR8AryEfAOG0AgCs2AIAtQUAAHkaAIDhtAIArNgCALUFAAB9GgCAuNEAALnZAAC64QAAu+EAALyRAAC9kQAAvpEAAL+RAACwIR8AsTEfALIxHwCzMR8AtAkfALUJHwC28QAAt/EAAOG0AgCs3AIA71QdALUdAACBGgCA4bwCAKzQAgC1KQAAoyUBAKKRAwChFR0AoA0dAOGAHgCFGgCA47wdAOHEAgCz1R4AtQkAAKzYAgCJGgCA4bwCALb9HgC1+R4ArOACALu1HgC6pR4AtQUAAI0aAIC/jR4Avo0eAL2lHgC8pR4AoxUeAOG8AgCs0AIAtREAAI9pJQCmPR4ApTkeAJEaAICrdR4AqmUeAOG0AgCseAEAr00eAK5NHgCtZR4ArGUeAJvdFACa5RUAmQEXAJjhEACfcR8AnnkZAJ35GQCcARsAk+UtAJIRLwCRbSkAkG0pAJf5EQCW8REAlYUsAJSZLQC1JQAA4ZQCAILxJgCDjSoAhJUqAIXhLACGHS4Ah3kuAKy0AgCVGgCAilUvAIspEgCMORIAjRkTAI7xFACPHRYAtQUAAJkaAICSVRcAk5EYAJRxGgCV+RoAlvkcAJd9HgCC4AMAkwsAgJpVHgCb2QAAnHUCAIMMAICzDACAuIkKAKwBBACthQYAroEGAMwQAgDMfAMAtgwAgJ0aAIDCDACAxQwAgMgMAIAACwCAgaUyArwMAIAE6ACAmpUGAJtVIwK8kQYAvbEAAL6RBgC/rQYAuOkGALmVBgC6kQYAoRoAgLTBBgC1zQYAts0GALfdBgCw/QYAseUGALKdAACz5QYAhVTHA6UaAICH/AAAuAEKAK0aAIDpDACAsRoAgIyRcwCNpAEAzPACAL4NAIDBDQCAiRQAALgZCgCLDAAAGg4AgFMOAIC5DACAvwwAgBkKAICRwAEAywwAgLhtCgDODACA1AwAgNoMAIDdDACA4AwAgLUaAIAoDQCA5gwAgLkaAIDhpB4AKw0AgONUHgCvIXMAzCgCAO8MAIDsDACA8gwAgPUMAID4DACAzIACAJS4AwD7DACAkhQCAO9gHgCQAAIA/gwAgAoNAIC48QoADQ0AgJ8LAIAQDQCAiSkLABMNAICpGgCAvDABAL/EAQC+7AEAFg0AgMzsAgC4xQoAukQBAK0JAIAZDQCAygYAgN8GAIDyBgCAHA0AgPoGAIAfDQCACgcAgC0HAIAYBwCA9gcAgC8HAICpDQCAOgcAgK8NAIBKBwCAtXkAAGcHAIC3cSoCcgcAgLFhAAB0BwCAsw0pAo0HAIC96QAAoAcAgPoHAICtBwCAuRkrAsMHAIC7WRQCHwgAgFoJAIA8CACALw4AgFsIAIA5AACAgQgAgHEAAIDHCACAKwAAgCAJAIA9AACAXAkAgEMAAIBeCQCARQgAgGoIAIBJAACAAAgAgFMAAIB5CQCAWQAAgCINAIBfAACAuw0iAtANAIDMFDYCHwAAgL9lAAC+EQAAvW0AAOUHAICAaQEAgXUBAIJxAQCD3SEChGkHAIWBBwCGgQcAh3EBAIihAQCJrQEAirUHAIuNBwCMlQcAjaUBAE8AAICPpQEAkOEBAJHtBwCSsSECk/0HAJSNBwCVUQYAlvEBAJfZAQCY0QEAmXUGAJp9BgCb1QEAnGkGAJ2ZFAKeUQYAn1EGAKB1FAKhuQYAokkBAKOFLQKkIQEApS0BAKZ1FAKntQYAqKERAqlRFAKqlQYAsSEAgMy8NQLNPDUCbQAAgKoDAICsAwCArwMAgL0hAIDEIQCA2yEAgOIhAIDJAACADwAAgLihBgC6BgCAtwYAgMwAAIDOIQCAtQMAgN0FAIAYBgCAugUCALvVAgC46QUAuf0FAL7JAgC/5RcCvA0CAL0BAgCy4QUAs+EFALCNBQCxnQUAtuUFALfpBQC09QUAte0FAKo9BQCrwQUAqD0FAKk1BQCuzQUAr/UFAKzNBQCtxQUAoj0FAKMFBQCg1QIAoTkFAKYdBQCnBQUApB0FAKUVBQC/BgCAm8EFAD4GAIBVBgCAnt0FAJ8xBACcUQIAndUFAHIGAICJBgCApAMAgDAiAIDbAACAoAMAgI8HAIDuBwCA8gcAgJAJAIACCACABggAgJYLAICUCQCArwoAgG8HAICLBwCAlwcAgKIHAICqBwCAqgkAgPsOAIASDwCAHw8AgMwEMwLNsDACzCAzAs3gMALMEDACzGgwAsxYMALNjDACzGgxAs0UMQLM1DECzRQ2AsxwIALN0CcCzDA2AswkMQLMDDwCzWg/AswYPwLNND8CzBg9As3AMgLMRDwCzBg5Asw4MgLNqDICzIgyAs34MwLMfDMCzUAzAswoMwLNCDMCzMghAs0kJgLMrCYCzEA4AsyYJQLNyDoCzBwkAs0QJALMhDsCzag7AsysJQLNvDoCzKw4Asz4JwLM4DgCzXQ4AicPAID2BgCAYQ0AgIgNAIDNICoCzBwrAqoGAIAsIgCAzKQgAs2gJwLMOCYCygQAgMw4OgLNPDsCzBA5As1gPgLMoAMAvj0NAL3tLALWBACAu1UjAgQJAIC5PSICzwYAgNkHAIClBACAoA0AgLIEAIBvBQCA9AYAgL4EAIB1BQCAr70MAK6ZLgKtpQwAwgUAgKvFIgIDBgCAxAQAgCMGAIDQBACAyAUAgCkGAIBdBgCAowEYAqAEAIAaBwCAHQcAgJ9dDACeUQwAnUUMACcHAICbWSECrwcAgLEHAIC0BwCAuAcAgCoHAIDOBwCA0AcAgJMtJgLTBwCAbAgAgG8IAICPBQwAjnEMAI1lDAB5CACAi0UgAmAJAICJNS8CYwkAgGcJAIB8CACAcAkAgHMJAIC9AwCAACIAgIFdDACAYQwAgAABAIEYAACCAAQABCIAgIQQBwCFFAYAhuQIAIc8AgCILAUAiaQFAIoAeAAIIgCAjCQAAAwiAIAUIgCAECIAgLgRAACRxHsAkkh6AJNMeQAcIgCAzOgCAJbwCQC4OQAAkMAJACQiAICS8AkAzPgCAJS0CQC4DQAAKCIAgMwcAgC4BQAANCIAgMzkAgC4HQAAOCIAgDwiAIBDIgCAWiIAgKiMCACp5HsAYSIAgKvUBgDM5AIAuA0AAGsiAIDMlAIAbyIAgLGAewC4CQAAuBUAAMz8AgC15AgAcyIAgMzYAgB3IgCAuAUAALqcBQC7XAUAvAB8AL30fwC++H0Av/xyAIAJOgKBDToCggE6AoMFOgKEGToChR06AoYROgKHFToCiCk6AoktOgKKIToCiyU6Aow5OgKNPToCjjE6Ao81OgLM8AIAkekPAIMiAIDMzAIAuBkAAH8iAIDM3AIAl+UPALg1AAC4DQAAjyIAgMz8AgC4BQAAkyIAgMwwAgCXIgCAzNACAJsiAICfIgCAzIgCAKQtDwClVQ8Apl0PAMyUAgCoqToCqa06ArjVAACjIgCAuDUAAKciAIDMUAMAr7U6AswsAwCrIgCAzBgDALMFDwC0HQ8AzyIAgLYJDwC3CQ8Avmh9ALhtAAC4RQAAzDgDALwpDwDTIgCAviUPAMxYAwCH5Q4AzOg6Ari9AQC4yQEAzPA1As2kMwLMgCICzXwlAs2UNgLMBCkCzew7AsxkOgK45QEAuMEBAInVDgCI1Q4Al7EOALgNAACvIgCAsyIAgLciAIC4GQAAuyIAgNciAICfaTsC2yIAgL8iAIC4PQAAzMQCAMz4AgDDIgCAxyIAgLjZAADLIgCA3yIAgLjRAADjIgCAuPEAAMzMMwLnIgCAuMkAAMzoMwLrIgCAuNUAAKllAAC4yQAAzNgCAKq5BgC3TQ0Atk0NALU1DgC0NQ4AuFUAABUjAICxGQ8AsCkOAL/1AwC+UQ0AvVkNALw1DAC7XQ0Aul0NALldDQC4XQ0AgL0KAIHFCgCCFQQAg8kKAMx8BQCF3QoAhtUKAIfNCgDMVAUAifEKAIq5CACLDQgAjBEIAI0VCACOtScCj+UKAJBpCACRbQgAknEIAJNtJALMEAUAlR0IAJaFCgDMEAUAzDQFAJk9CACaiQoAmw0IAJwRCACdFQgAzEgFAMwQAgCgZQoAoW0KAKJlCgC4BQcApLEEAMzoAgCmsQQAuA0HAKiBBADM/AIAqpkIAKtdCgCsuQgArakEALglBwCvNQgAsNEIALHxBADMwAIAs40IALQpKAK1IQoAtiEKALchCgC4IQsAuSUIALhBBwC7KQsAvA0dAr3dDwC+MQsAvzELAIDdCgAZIwCAnKF9ANADAIDpAwCAhRkJAIaZCQCHlQkAiOEJAIklJQICBACAGwQAgC4EAIBBBACAVAQAgGcEAICQrQoAkUkFAJJtBQCTYQUAlGEFAJVtBQCWZQUAlxEFAJg1BQCZPQUAmjUFAJsNBQCcFQUAnR0FAJ4VBQCfCQUAoKkJAKH9BQCi9QUAowEFAKQFBQClDQUApgUFAKc9BQCoBQUAqQ0FAKoFBQCrGQUArIkJAK2pBQCutQkAr/0JALABCQCxfQUAsnUFALMBBQC0aQkAtQEFALYFBQC3PQUAuAUFALnhJQK6AQUAuwEFALzRJQK9PQkAvnkJAL9dCQCDMAUAoXgHAJ+xfgB6BACApHgHAKVIBwCNBACA8wQAgIt8BADdAACAEwEAgIhIBAAcAQCAIAEAgCQBAIAoAQCALAEAgDABAICyAAcAs/wHADQBAIDhAACAtuQHALfwBwDmAACA6wAAgLrgBwC7nAcAvIgHAL2oBwDwAACAs8F+AKPMBAD1AACA+gAAgIMABAD/AACAhXQEAKUgBAAEAQCAiEwEAAkBAIAOAQCAFwEAgK8tBwCNxAcArSEHAKwpBwDNAwCA8AQAgI8FAICwZQcA4gUAgB0GAIBDBgCAWgYAgHcGAICOBgCA0wMAgOwDAIAFBACAHgQAgDEEAIC8fAQAgt0rAoPlKwKA/QoAgfkrAoaZCQCHmQkAhOEKAIXhCgCKiQkAi4kJAIiJCQCJiQkAjoUJAEQEAICM4QgAjY0JAJK5KwKTQScCkJkrApHFCwCWyQsAl3UnApTFDQCV0SQCmskLAJvZKgKYyQsAmXkHAFcEAIBqBACAnP0LAH0EAICQBACA9gQAgKABAICkAQCAqAEAgONkAgCsAQCAsAEAgLQBAIDvvAcAqBEJALgBAIC8AQCAwAEAgMQBAIDIAQCAzAEAgNABAIDUAQCA2AEAgNwBAIDgAQCA5AEAgOgBAIDsAQCA8AEAgPQBAID4AQCA/AEAgAACAICCnH4ABAIAgKD1VAKh2VQCoulUAqP1dQCk7XUApZ12AKaVdgCnvXYAqIV2AKkpfQCqOX0AqwV9AKwdfQCtBX0Arg19AK8FfQCwfX0AsUl+ALJRfgCzUX4AtHV+ALV9fgC2aX4At2l+ALhZfgC5WX4Auil+ALspfgC8IX4AvSF+AL4ZfgC/GX4AkgcAgDkJAIDXBwCATSIAgLQNAAC1NQAAtj0AAKIGAICsBgCArwYAgAMjAIAJIwCAvSV4ALy1WALGMQCALjoAgJkqAIC9KgCAySoAgNkqAIDhKgCA7SoAgPUqAID9KgCACSsAgF0rAIB1KwCAhSsAgJUrAIClKwCAtSsAgNUrAICAeX8AgYF/AIKBfwCDnX8AhI1/AIWxfwCGsX8Ah7F/AIjhfwCJ4X8AiuF/AIv9fwCM5X8Aje1/AI7lfwCP3X8AkKV/AJGtfwCSpX8Ak71/AJSlfwCVrX8Alm1+AJctfgCYFX4AmRl+AJrpfgCb6X4AnPl+AJ35fgCe6X4An+V+AKAdfgChJX4AoiV+AKM9fgCkJX4ApS1+AKYlfgCnXX4AqGV+AKltfgCqZX4Aq31+AKxlfgCtbX4ArmV+AK9dfgCwJX4AsS1+ALIlfgCzPX4AtCV+ALUpfgC2WXcAt9V1ALj9eQC56XUAuvl1ALvZeQC86XUAvdV1AL7RdQC/2XUAgDF2AIE9dgCCSXYAg0V2AIRBdgCFTXYAhvl0AId9dgCIoQIAiU12AIpZdgCLuXoAjEl2AI2degCOsQIAjx16AJCRVgKRKXYAkoF2AJPNdgCU2XYAlel2AJbJdgCX0VkCmKF2AJllWgKa8XYAm01aApzRdgCdYXoAnoFWAp/VdgCgBQIAoY1aAqI1VwKjCXYApCF2AKUtdgCmiVoCp5laAqi5WgKpdXYAql13ANkrAIDdKwCAESwAgDksAIBJLACAUSwAgFUsAIBhLACAfSwAgIEsAICZLACAnSwAgKUsAIC1LACAUS0AgGUtAIClLQCAuS0AgMEtAIDFLQCA1S0AgJl1CgD4LQCAJC4AgDAuAIBQLgCAXC4AgGAuAIBkLgCAgux6AINkewB8LgCAgC4AgIZ0ewCHvHsArC4AgLguAIDALgCAyC4AgNguAIDnLgCA7y4AgBsvAIAfLwCAJy8AgJJwfAArLwCAMy8AgJFMfAA7LwCASy8AgGcvAIDfLwCA8y8AgKvMfACo5HwAqdx8APcvAIB3MACAezAAgI8wAICiwHwAkzAAgJswAICjMACAzEBJAs0ASQLM/EoCzWhLAqswAIC3MACA7TAAgP0wAIARMQCAjjEAgJoxAICqMQCAsqx8ALNAfAC2MQCAwjEAgMoxAIDOMQCAtGx8ALUEfACAlQcAgZ0HAIKVBwCDqQcAhLkHAIW5BwCG2QcAh9kHAIjpBwCJ6QcAivkHAIv5BwCM6QcAjekHAI7RBwCP0QcAkLEHAJGxBwCSSQEAk0kBAJRZAQCVWQEAlkkBAJdJAQCYeQEAmXkBAJpJAQCbSQEAnFkBAJ1ZAQCeSQEAn0kBAKC5AQChuQEAoskBAKPJAQCk2QEApdkBAKbJAQCnyQEAqPkBAKn5AQCqyQEAq8kBAKzZAQCt2QEArskBAK/JAQCwuQEAsbkBALJJAQCzSQEAtFkBALVZAQC2SQEAt0kBALh5AQC5eQEAukkBALtJAQC8WQEAvVkBAL5JAQC/SQEA0jEAgNYxAIDaMQCAkjIAgNoyAIDmMgCA6jIAgO4yAIDyMgCA+jIAgP4yAIASMwCALjMAgDYzAIB2MwCAejMAgIIzAICGMwCAjjMAgJIzAIC2MwCAujMAgNYzAIDaMwCA3jMAgOIzAID2MwCAGjQAgB40AIAiNACARjQAgIY0AICKNACAqjQAgLo0AIDCNACA4jQAgAY1AIBKNQCAUjUAgGY1AIByNQCAejUAgII1AICGNQCAijUAgKI1AICmNQCAwjUAgMo1AIDSNQCA1jUAgOI1AIDqNQCA7jUAgPI1AID6NQCA/jUAgJ42AICyNgCAnoUMAOY2AIDqNgCA8jYAgIC5AwCBuQMAgskDAIPJAwCE2QMAhdkDAIbJAwCHyQMAiPkDAIn5AwCKyQMAi8kDAIzZAwCN2QMAjs0DAI/FAwCQvQMAkQEMAJJJDgCTSQ4AlFkOAJVZDgCWSQ4Al0kOAJh5DgCZeQ4AmkkOAJtJDgCcWQ4AnVkOAJ5JDgCfSQ4AoLkOAKG5DgCiyQ4Ao8kOAKTZDgCl2Q4ApskOAKfJDgCo+Q4AqfkOAKrJDgCryQ4ArNkOAK3ZDgCuyQ4Ar8kOALC5DgCxuQ4AskkOALNJDgC0WQ4AtVkOALZJDgC3SQ4AuHkOALl5DgC6SQ4Au0kOALxZDgC9WQ4AvkkOAL9JDgC8eQQAvXkEAL6JBAC/nQQAuHUEALl9BAC6aQQAu2kEALRxBAC1cQQAtnEEALdxBACwcQQAsXEEALJxBACzcQQArGkEAK1pBACucQQAr3EEAKhBBACpQQQAqkEEAKtBBACknQUApWEEAKZhBACnYQQAoJ0FAKGFBQCijQUAo4UFAJxdBQCdZQUAnm0FAJ9lBQCYXQUAmUUFAJpNBQCbRQUAlB0FAJVlBQCWbQUAl2UFAJAdBQCRBQUAkg0FAJMFBQCMMQcAjTEHAI4xBwCPMQcAiDEHAIkxBwCKMQcAizEHAIQxBwCFMQcAhjEHAIcxBwCAMQcAgTEHAIIxBwCDMQcAJjcAgC43AIA2NwCAcjcAgHY3AIB+NwCAgjcAgIY3AICyNwCAtjcAgL43AIDSNwCA1jcAgPI3AID6NwCA/jcAgCI4AIBCOACAUjgAgFY4AIBeOACAijgAgI44AICeOACAwjgAgM44AIDeOACA9jgAgP44AIACOQCABjkAgAo5AIAWOQCAGjkAgCI5AIA+OQCAQjkAgEY5AIBeOQCAYjkAgGo5AIB+OQCAgjkAgIY5AICOOQCAkjkAgJY5AICaOQCAnjkAgK45AIDGOQCAyjkAgNY5AIDaOQCA3jkAgOI5AIDqOQCA7jkAgPI5AID+OQCABjoAgA46AIASOgCAGjoAgIC5AQCBuQEAgskBAIPJAQCE2QEAhdkBAIbJAQCHyQEAiPkBAIn5AQCKyQEAi8kBAIzZAQCN2QEAjskBAI/JAQCQuQEAkbkBAJIRAACTEQAAlDEAAJUxAAAeOgCAIjoAgCo6AIAyOgCAPSMAgGUsAIBpLACAJSQAgIJgAgCZ4QAAgIAAAIGYAACC5AYAg4gEAITUGwCFlBoAhhgfALMjAICIxB4AiQAQAIqoEwCLrBEAjAAoAI20KwCOuCoAj7wpAOOwAgC+dAIAnlUAAOMUAgCCbAIAtyMAgJkNAAC+RAIAnjUAAIJoAgCZBQAAuyMAgO/MAgC+oAAAgoQAAO/YAgDj7AEA4/QBAL8jAIDjCAMAwyMAgOM4AwDHIwCA44gDAMsjAIDv4AMAzyMAgO+IAwDvPAEA78QDANMjAIDv1AMA4+wDAB43AIDXIwCA4+wDAOPsAwDj5AMA2yMAgOO4AwDvXAMA70wDAN8jAIDvSAMA7/QDAOMjAIDnIwCA7zQDAON8AwDjlAQA6yMAgO8jAIDzIwCA47QEAPcjAID7IwCA/yMAgO9sBAADJACAByQAgO9YBADvUAQACyQAgBYkAIAaJACAvQAAgOP4BADCAACAMSQAgB4kAIBtKQCA45wEAAglAIBrJQCAriUAgO9QBADaJQCABCYAgO88BAApJgCAgAlLAoYcdwC+RAIAgnQCAL5QAgA+JgCAmREBAJkNAQCPrAIAggQCAI1oAQCewQIAi3wBAJ49AQCeKQEAvggCAJfQAgCZXQEAldACAJ5VAQCT0AIAmXUBAJHQAgC+SAIAn7gCAEYmAICdtAIAnk0BAJuwAgCZXQEAmbQCAL6EAgCeqQEApowCAGImAICkgAIAmakBAGomAIChSAIAgqwCAK/kAgCCtAIAglwCAJnlAQC+CAIAgnwCAIIABACopAIAnvkBAL5wAgC1HAQAnoUBAL6oBQCyhAIAtrECAL6sBQC4KQkAuYkCALqZAgCCjAUAu+gEAIKcBQByJgCAuPAEAJ5ZBgCZbQYAnmEGAJl5BgC+fAIAnmEGAIJcAgC+QAIAmVkGAJ5dBgCCYAIAmaUGAL58AgCevQYAghwCAL4UAgCZzQYAvkwCAIJMAgCa3QYAnt0GAJ/FBgDjDAIAgrwCAJn5BgC+ZAIA7/QCAJrxBgCe6QYAn+kGAJ7ZBgCf1QYA4wQCAJklBgCaIQYAgngCAJk9BgDjBAIAgkQCAJolBgC+cAIA75wCAJ4FBgCfFQYA7+gCAJp1BgCZBQYAggQCAL5wAgDjcAIAnnUGAJ8NBgCeAQYAvnwCAOM0AgCZDQYAvmACAIJsAgDv8AIAmTUGAIKQAwDv2AIAniEGAIQmAICbxQcAmeUHAL58AgCe7QcAn8UHAOPsAwCdUAIAnNEHAIJsAgDv1AIAmc0HAIJ8AgC+cAIAmd0HAJ7dBwC+AAIA42gCAJ6tBwCZuQcA42gCAIJ8AgDjDAIAvkgCAJmpBwCCWAIA78QCAJ6ZBwC+bAIA77gCAIKUAgCejQcA77gCALsAAACZeQcAuQwAAJ5xBwC/AAAAglQCAL0EAAC+aAIAs9QDAJmxBgCxcAMAggQCALc4AACeoQYAtTQAAL5wAgCrWAMAnqEGAO9cAgCZqQYArxADAIJQAgCtFAMAmYUHAJlpBgC+WAIAnmEGAL58AgCCaAIApqACAOOQAgCZaQYA43wBAOOYAQDjrAEA49ABAOPoAQC+dAIAno0FAOMwAgDvzAIAgmgCAJnRBQDvlAIA71QBAO9wAQDvJAEA7ygBAL58AgCevQUA4wwCAIJ4AgCZrQIAvnQCAJ6lAgDjNAIAgmACAJkZAAC+YAIA7/wCAJ4NAACClAIA79QCAJAmAIDj/AIAmQkAAL5gAgCYJgCAnh0AAOMAAgCwJSoAglgCAJkNAADv9AIAvmQCAK4mAIDvwAIAnhkAAIIYAgCCOAIA43ACAJkRAACaNQAAmSkBAL50AgDsJgCAnyUAAJ4JAACZ6QEAvrQDAL7gAwCazQEA79gCAJ4RAQCC2AMA/SYAgIHEAgDjsAMAHycAgOP8AwC+/AIAhMQCAIIoAgCGEAIAKicAgIg8AgCeIQAAnw0AAHonAIDvKAMAj3QCAO8sAwCCiAIAmXUAAJoVAACSxAMAldADAJktAACa0QAAjicAgL7IAgCYaAMAm3wDAILEAwCeQQAAnykAALAnAICChAIA45ACAL4IAwC+JwCABigAgJ8ZAACe7QAA49ACAJlxAACaFQAAvhQCAO8wAgCZIQAA71gCABQoAICv7AMAggQCALFMHACwABwAniUAALJMHACeXQAAn2EAAOO8AgCZIQAA+QAAAHEpAIDvlAIAdSkAgL08HACCgB0Av8EfAHkpAIDjtB0AvnQCAJ71HwDj8B0AmQUAAH0pAIC+fAIAngkAAIJgAgCZDQAAiSkAgL5gAgDvzAIAnh0AAOklAIDv3AIA42gCAPkYAIDjPB0AIRoAgP0YAIABGQCAJRoAgCkaAIAtGgCAMRoAgDUaAIA5GgCA76QCAD0aAIDvJB0AQRoAgLHFAAAFGQCAs8UAALLdAAC1yQAAtMEAALcdAAC2wQAAuWUAALhlAAC7zQAAus0AAL3dAAC83QAAv8UAAL7JAAAJGQCADRkAgE0ZAIBhGQCAERkAgBUZAIDvFHgD7wBIA+HYTQPhOKgC41x5A+O0UAOtGQCAsRkAgLUZAIC5GQCAgMkBAIHVAQCC3QEAg20CAITdAQCFcQIAhgEEAIcdBQCIJQUAiTUFAIo9BQCLbQUAjHUFAI1lBQCObQUAj80BAJC1AQCRvQEAkrUBAJNNAwCUVQMAlV0DAJZVAwCXTQMAmHUDAJl9AwCadQMAm00DAJxVAwCdWQMAnkkDAJ9JAwCguQMAobkDAKLBAwCj3QMApMUDAKXNAwCmxQMAp/0DAKjJAwCpyQMAqtEDAKvRAwCsMQMArTEDAK4xAwCvMQMAsFEDALFRAwCyUQMAs1EDALRxAwC1cQMAtnEDALdxAwC4UQMAuVEDALpRAwC7UQMAvDEDAL0xAwC+MQMAvzEDAL0ZAIDBGQCAxRkAgMkZAIDNGQCA0RkAgNUZAIDZGQCA3RkAgOEZAIDwIAIA5RkAgOkZAIDtGQCA8RkAgPUZAICc9TYAnf02APkZAICRkAIA/RkAgKkZAIBFGQCASRkAgEUaAIC6adgASRoAgE0aAIC4sTYAubE2AFEaAIBVGgCAWRoAgF0aAIBRGQCAYRoAgGUaAIBVGQCAWRkAgF0ZAIBlGQCAaRkAgG0ZAIBxGQCAdRkAgHkZAIB9GQCAgRkAgIUZAICJGQCAjRkAgJEZAICVGQCAglgCAJkZAIBpGgCA8FgCAG0aAICdGQCAoRkAgKUZAIABGgCABRoAgJF0AwDhtDsCCRoAgOPYIgINGgCAERoAgBUaAIAZGgCAHRoAgKUqAIBVLQCAqSoAgMEqAICtKgCAljMAgO/IPwK1KgCA4ZTzAuGY0gLjlPcC4xDGAuGUtgLhkJ0C44SiAuMIhwIZGQCAHRkAgO+4swLvOIsCnSoAgOAtAIDvIJcC7+DgAoLkAgBpLQCACAIAgLrF2QAOAgCAFAIAgBoCAIAgAgCAJgIAgCwCAIAyAgCAOAIAgD4CAIBEAgCASgIAgFACAIDhgHgC8OQGAOMUagKCgAgA4aAPAuEIEwLjhA4C4xgeAlYCAIA0AwCA7zQ7Au8wHwI6AwCAQAMAgO8MEgJGAwCAJRkAgCkZAIBMAwCAUgMAgC0ZAIAxGQCAWAMAgF4DAIB2AwCAggMAgIgDAICOAwCAlAMAgJoDAIB8AwCAZAMAgDUZAIA5GQCAbQMAgFwCAIA9GQCAQRkAgHQCAIBoAgCAvAIAgHoCAICYAgCAYgIAgJICAIBuAgCApAIAgNQCAICAUQYAgV0GAIJVBgCDaQYAhHkGAIV5BgCGaQYAh2kGAIhZBgCJoQcAiqUHAIu9BwCMpQcAja0HAI6lBwDyAgCA7AIAgOACAICSCRQAkxUUAJTxBwCV8QcAlvEHAJfxBwCY0QcAmdEHAJo5FACb0QcAnIEHAJ2BBwCefQcAnx0UAJktAQCYLQEAmz0BAJo9AQCdLQEAnC0BACEZAICeVQEAkd0GAJDRBgCTJQEAkiUBAJUtAQCULQEAlx0BAJYdAQCJ8QYAiOkGAIvxBgCK+QYAjbEGAIzpBgCPqQYAjrkGAIHxBgCA7QYAg/EGAIL5BgCF0QYAhOkGAIfRBgCG2QYAua0DALitAwC7vQMAur0DAL2tAwC8rQMAv90DAL7dAwCxrQMAsK0DALO9AwCyvQMAta0DALStAwC3nQMAtp0DAKm5AQCosQEAq3UBAKqxAQCtFQEArBUBAK/dAwCu3QMAobkBAKCpAQCjiQEAorEBAKWZAQCkkQEAp4kBAKaRAQAuAwCAwgIAgM4CAIDmAgCA2gIAgAQDAICwAgCA+AIAgCIDAIAKAwCAngIAgIACAIC2AgCAyAIAgP4CAICGAgCAKAMAgKoCAIAQAwCAjAIAgBYDAIAcAwCACS0AgOsuAIDKNACAhAcAgAYFAIAVBQCAJAUAgDMFAIBCBQCASwUAgPAsOABUBQCAXQUAgGYFAICSBQCA40huA5sFAIDhTG4DpAUAgO/0AQOnBQCAqgUAgK0FAIBGOgCApkwAgNZVAIA2aACAZnEAgJZ6AID2jACAVp8AgIaoAIDtugCAJMQAgFTNAICE1gCAtN8AgDG7AIA6rgCABqUAgPkqAICJKwCAoSoAgOUqAIBBMQCAATEAgE40AIDVLACABjMAgIo3AIBiNACAHSwAgJI0AICeMwCAEjgAgFkrAICFLACA+jEAgCY5AIAdKwCArSsAgJ4xAIC8LgCAySwAgFksAIA4LgCALC4AgJGgBgDuMwCAGSsAgJ43AIB1LACAzS0AgLAFAIDh1D8D4VgaA+PcLwPjUA4D4RTyA+FA0wPjQOoD40DDA7MFAIC2BQCA73jrA+9c8gO5BQCA5QUAgO9E3gPvmCUD4bSLA+E8lwPjfKID45iLA+EwQQDhUKwD4xx/AOOIRgDoBQCA6wUAgO84ewDv4EEA7gUAgPEFAIDvzIoD7yCHA4DBGACB3RgAgikLAIMpCwCE6Q4AhekOAIYZDwCH8RgAiCUPAIntGgCK5RsAiyEdAIw5HQCN5RsAjmkQAI/VGgCQhRsAkU0PAJJFDwCTXQ8AlEUPAJVNDwCWRQ8Al30PAJhFDwCZTQ8AmkUPAJtpGwCcQQ8AnUEPAJ5BDwCfQQ8AoMEPAKHBDwCiwQ8Ao8EPAKS5CwCluQsApqkLAKfNDwCo9Q8Aqf0PAKr1DwCrzQ8ArNkPAK3ZDwCuyQ8Ar8kPALC5DwCxuQ8AsmkPALNpDwC0YQ8AtWEPALY5DwC3OQ8AuBEPALkRDwC66QEAu+kBALz5AQC9+QEAvukBAL/pAQD0BQCA9wUAgPoFAID9BQCAAAYAgCAGAIDhBACAgAUAgNMFAIAOBgCANAYAgEsGAIBoBgCAfwYAgJYGAIDdAwCA9gMAgA8EAIASBwCAQQgAgD4IAIA/BwCAOSQAgHIkAICjJACAyCQAgLkmAIDEJgCAyCYAgMwmAIDQJgCALygAgG4oAICWKACAmigAgL8oAIDHKACA4ygAgPUoAID5KACA/SgAgLrp0wAVKQCAMCkAgEspAIA9JACASiQAgFckAIBkJACAdiQAgIMkAICVJACApyQAgLckAIDMJACA1iQAgOQkAIDuJACA+yQAgAwlAIAWJQCAbyUAgHYlAIAkJQCAgBkDAIEZAwCCKQMAgykDAIQ5AwCFOQMAhikDAIcpAwCIGQMAiRkDAIppAwCLaQMAjHkDAI15AwCOaQMAj2kDAJAZAwCRGQMAkgEEAJMtAwCUNQMAlVUGAJZdBgCXVQYAmG0GAJl1BgCafQYAm3UGAJxtBgCdNQYAnj0GAJ81BgCgzQYAodUGAKLdBgCj1QYApPkDAKX5AwCm6QMAp+kDAKjZAwCp+QYAqikGAKspBgCsOQYArTkGAK7FAwCvPQMAsEUDALFNAwCyRQMAs10DALRFAwC1TQMAtkUDALd9AwC4SQMAuUkDALpZAwC7fQYAvGUGAL1tBgC+ZQYAgCUAgKkVDwCoAQ8Aq00PAKpNDwCtRQ8ArEUPAK+hDQCuqQ0AoXULAKBhCwCj7QsAoqkLAKXlCwCk5QsApzkPAKZZCAC5oQ0AuJkNALuhDQC6qQ0AvaENALy5DQAxJQCAvqkNALGhDQCw2Q0As6ENALKpDQC1oQ0AtLkNALehDQC2qQ0AOCUAgEglAIBbJQCAsiUAgLwlAICRJQCAoSUAgNAlAICB7Q0AgO0NAIP9DQCC/Q0Ahe0NAITtDQCH2Q0AhiEYAJlNDQCYTQ0Am1ENAJpdDQCdeQ0AnHUNAJ9pDQCecQ0AkYkNAJCBDQCTmQ0AkoENAJWJDQCUgQ0Al30NAJaBDQDgJACAICUAgI0lAIDMJQCA3iUAgAgmAIAtJgCAQiYAgPAlAID6JQCADCYAgBkmAIAxJgCATiYAgFgmAIB2JgCASiYAgGYmAIBuJgCAgCYAgIwmAICUJgCAoyYAgN4mAICcJgCAsiYAgKcmAIC9JgCA1CYAgOImAIABJwCAEScAgBsnAIBPJwCAkicAgOcnAIBPKQCAXSkAgGEpAIBlKQCA8CYAgC4nAIA+JwCASCcAgCMnAIBTJwCAYycAgH4nAIBwJwCAlicAgMInAIDJJwCApicAgNMnAIDdJwCAtCcAgBgoAIAKKACA6ycAgCUoAIDyJwCA/CcAgDMoAIBAKACASigAgFQoAIBeKACAcigAgH8oAICGKACAnigAgKUoAICyKACAyygAgNUoAIDnKACAASkAgA4pAIAZKQCAIykAgDQpAIA7KQCAUykAgMMDAIDmBACAhQUAgNgFAIATBgCAOQYAgFAGAIBtBgCAhAYAgJsGAIDjAwCA/AMAgBUEAIAoBACAOwQAgE4EAIBhBACAdAQAgIcEAICaBACAAAUAgA8FAIAeBQCALQUAgDwFAIBjCACAJAgAgMEGAID8BwCAHQkAgOMoEwAzCQCAKggAgC0IAIAxCACAJAcAgNwuAIDKMACA2S0AgLswAIBFMQCAJwkAgO/sEwAGCQCA3A0AgM8IAICDCACAMQcAgEwHAID8BgCACggAgJQIAIAqCQCACQkAgOANAIDsDQCA2wgAgJkIAIAVBwCAhggAgFUHAID/BgCApgcAgJEkAIDwDQCA4ggAgCcIAICcCACAWAgAgBUJAID0DQCA5QgAgBQIAICfCACA6AgAgBcIAIDJCACAoggAgOwIAIAbCACAzAgAgKYIAID3CACA/QgAgIgHAICKCACAWQcAgAMHAIA9CQCAQQkAgEkJAIA2CQCAGAkAgPgNAID0CACALQkAgAwJAIDkDQCA0ggAgI4IAIBdBwCAMAkAgA8JAIDoDQCA1QgAgJEIAIBgBwCArQgAgGMHAIDjSBIA4xQSAOP4EwDjuBMA4+wSAOOgEgDjbBIA43gSAO/ADQDv2A0A73QSAO9QEgDvqBIA79wSAO8oEwDvIBMA6QcAgMwGAIAOCACAEQgAgNgGAIDUBgCAIQgAgAcHAIBnCACADAcAgHYIAIA0BwCANwcAgKoIAIC2CACAuQgAgOPYEADjoBAA46AQAON0EQDjNBAA4wgQAOPkEADj9BAA77wQAO/gEADvzBAA7zgQAO8QEADvcBAA73AQAO9MEADjhBMA4+gTAOMwEADjEBAA42ATAONAEwDjpBMA47QTAO/IEwDvtBMA75gTAO98EwDvXBMA70wTAO8UEwDv6BAAgO08AIH1PACC/TwAg/U8AITtPACFFT0Ahh09AIcVPQCILT0AiTU9AIo9PQCLNT0AjC09AI0VPQCOHT0AjxU9AJBtPQCRdT0Akn09AJN1PQCUbT0AlRU9AJYdPQCXFT0AmC09AJk1PQCaPT0AmzU9AJwtPQCdFT0Anh09AJ8VPQCg7T0AofU9AKL9PQCj9T0ApO09AKUVPQCmHT0ApxU9AKgtPQCpNT0Aqj09AKs1PQCsLT0ArRU9AK4dPQCvFT0AsG09ALF1PQCyfT0As3U9ALRtPQC1FT0AthE9ALcRPQC4MT0AuTE9ALoxPQC7MT0AvBE9AL0RPQC+ET0AvxE9AIDxPACB/TwAgvU8AIMNPwCEFT8AhR0/AIYVPwCHDT8AiDU/AIk9PwCKNT8Aiw0/AIwVPwCNHT8AjhU/AI8NPwCQdT8AkX0/AJJ1PwCTDT8AlBU/AJUZPwCWCT8Alwk/AJg5PwCZOT8Amgk/AJsJPwCcGT8AnRk/AJ4JPwCfCT8AoPk/AKH5PwCiCT8Aowk/AKQZPwClGT8Apgk/AKcJPwCoOT8AqTk/AKoJPwCrCT8ArBk/AK0ZPwCuCT8Arwk/ALB5PwCxeT8Asgk/ALMJPwC0GT8AtRk/ALYJPwC3CT8AuDk/ALk5PwC6CT8Auwk/ALwZPwC9GT8Avgk/AL8JPwCA+TwAgfk8AIJJPQCDST0AhFk9AIVZPQCGST0Ah0k9AIh5PQCJeT0Aikk9AItJPQCMWT0AjVk9AI5JPQCPST0AkDk9AJE5PQCSAQQAk00GAJRVBgCVXQYAllUGAJdNBgCYdQYAmX0GAJp1BgCbTQYAnFUGAJ1dBgCeVQYAn00GAKC1BgChvQYAorUGAKPNBgCk1QYApd0GAKbVBgCnzQYAqPUGAKn9BgCq9QYAq80GAKzVBgCt3QYArtUGAK/NBgCwtQYAsb0GALK1BgCzTQYAtFUGALVdBgC2VQYAt00GALh1BgC5fQYAunUGALtNBgC8VQYAvV0GAL5VBgC/TQYArH0/AK2lPwCurT8Ar6U/AKh9PwCpZT8Aqm0/AKtlPwCkHT8ApUU/AKZNPwCnRT8AoB0/AKEFPwCiDT8AowU/ALydPwC9pT8Avq0/AL+lPwC4nT8AuYU/ALqNPwC7hT8AtN0/ALWlPwC2rT8At6U/ALDdPwCxxT8Ass0/ALPFPwCMZToAjW06AI5lOgCPfToAiEU6AIlNOgCKRToAi306AIRlOgCFbToAhmU6AId9OgCABToAgQ06AIIFOgCDfToAnF04AJ3lPwCe7T8An+U/AJhdOACZRTgAmk04AJtFOACUuTgAlWU4AJZtOACXZTgAkAU6AJENOgCSBToAkwE5AMAIAIDYCACA3ggAgPAIAIB2BwCAIgkAgHkHAICBBwCAVAkAgJ0HAIDLBwCAvQcAgMQGAIDcBACAewUAgM4FAIAJBgCALwYAgEYGAIBjBgCAegYAgJEGAIDXAwCA8AMAgAkEAIAiBACANQQAgEgEAIBbBACAbgQAgIEEAICUBACA+gQAgAkFAIAYBQCAJwUAgDYFAIBFBQCATgUAgFcFAIBgBQCAaQUAgJUFAICeBQCAXQgAgFYOAIBZDgCAOjoAgKwKAIAVCwCANjoAgD46AICcGQAAnRkAAJ45AACfOQAA4wwAgEI6AIB6NwCA8TAAgKI3AIBaMgCAxSoAgLksAICaMDUA7C0AgB0tAIDoLQCA1y8AgJ+ENQDSMwCAnUQpAGI1AICaNgCA1jYAgAo3AIAeOACAdjEAgAIyAICuMgCARjMAgGI2AIBGOACAcjkAgOkqAICNLACAijEAgNIyAICWNgCAwjkAgJQuAIB6MgCAhjYAgBo3AIALMACAvjUAgLSAGgC1hBkAtojmALeM5ACwABwAsZQeALIAGACznBsAvADsAL2k7wC+qO4Av6TtALgA4AC5tOMAurjiALu84QCkwAAApQAMAKbIDgCnAAgA4jYAgAcvAIAFMQCArXwDAKwAEACt5BMArugSAK9gEQCo8AoAqRwJAKr4FgCr/BQAGjIAgB4zAIAqOACAKSsAgMErAIAtLACAczAAgIIxAIDOMgCA8jMAgI42AICmNgCAyjcAgO44AICiOQCAvjkAgC40AIBuNACAvAgAgCY1AIBGNgCAejgAgE43AIChLQCAIy8AgN40AICeNQCAAjMAgDY0AICaNwCA5jgAgJ0tAIBwLgCAejEAgC4yAIBiMgCAFjUAgD41AICmOACAKSwAgJwAAACqNQCAzSsAgMkrAICaNACAKjUAgF42AICuOACAajcAgA8wAIBaNwCA0SoAgEQuAIB7LwCAMjMAgLIzAIBNLACAPjQAgDkrAIBfLwCAsSoAgO4xAICLMACAEjUAgIDpAwCB6QMAgjkvAIP9AwCE5QMAhe0DAIblAwCHfS4AiEEuAIkhAgCKeS8AiyUCAIw9AgCNJQIAjiECAI8dAgCQZQIAkW0CAJJlAgCTfQIAlGUCAJVtAgCWZQIAlx0CAJglAgCZLQIAmiUCAJs9AgCcJQIAnS0CAJ4lAgCfHQIAoOUCAKHtAgCi5QIAo/0CAKTlAgCl7QIApuUCAKdNAgCodQIAqX0CAKqpAQCrqQEArLkBAK25AQCuqQEAr6kBALDZAQCx2QEAsukBALPpAQC0eSIAtf0BALb1AQC37QEAuNUBALndAQC61QEAu60BALy1AQC9uQEAvqkBAL+pAQChLACAjS0AgP4zAIBmNgCAPjcAgLoxAIDmMQCAHzAAgB42AIA/MACArjMAgAUrAICBKwCAxSsAgFYxAID+NACA9jUAgEo3AIBaOACANSwAgOksAIAXLwCApzAAgH4yAIBCNACAljgAgHo5AIDOOQCA5jkAgOkwAICmMQCA7jcAgOMuAIC/LwCA2y8AgGswAIBuMgCAujIAgGozAICONACAMjUAgJY1AIDeNwCAbjYAgAY4AIB+OACA6SsAgBUsAID9LACAqjIAgPY2AIADLwCAcy8AgDcwAICyMQCA2jQAgCYzAIAVKwCAWS0AgKguAIB/LwCAQjMAgF4zAIBuNQCAgFEBAIEBKgCCXQEAg1UBAIRNAQCFdQEAhn0BAId1AQCITQEAiVUBAIqdKwCLWQEAjEkBAI1JAQCOuQEAj7kBAJDJAQCRyQEAktkBAJPZAQCUyQEAlckBAJb5AQCX+QEAmMkBAJnJAQCa2QEAm9kBAJzJAQCdyQEAnrkBAJ+5AQCgSQEAoZUBAKJFAQCjXQEApEUBAKVNAQCmRQEAp30BAKhFAQCpTQEAqnkPAKtBAQCsQQEArUEBAK5BAQCvQQEAsMEDALHBAwCywQMAs8EDALTBAwC1wQMAtsEDALfBAwC4wQMAucEDALrBAwC7wQMAvMEDAL3BAwC+wQMAv8kMAI41AIBiOACA4jgAgPI4AIAuOQCALSsAgII0AIBOOACAyjgAgJcvAIDxKgCAUSsAgEguAIBoLgCAlzAAgMYyAIDOMwCAejYAgBo4AIDZMACAojgAgA0sAIAlMQCAMTEAgBIyAIBKMgCATjMAgKozAIAqNACADjUAgDo5AIDrLwCAsjgAgEErAICMLgCAMjIAgOI3AIBPLwCAny8AgDkxAIC6OACA8SsAgNksAIB4LgCAwjAAgBUxAIBiMQCA9jEAgEozAIC+MwCAWjUAgPo2AIAGNwCA1jgAgF0sAIBOMgCA3SwAgMoyAIBuMwCAijYAgL44AICqOQCA0jkAgC0xAICxOSMAsBEDALMVAwCyFQMAtTUDALQ1AwC3NQMAtjUDALkVAwC4FQMAuxUDALoVAwC9dQMAvHUDAL91AwC+dQMAoZkNAKCRDQCjqQ0AopENAKW5DQCksQ0Ap6kNAKaxDQCpmQ0AqJENAKtpAwCqkQ0ArXkDAKxxAwCvaQMArnEDAJEZDQCQEQ0Aky0NAJIRDQCVPQ0AlD0NAJctDQCWLQ0AmR0NAJgdDQCbbQ0Amm0NAJ15DQCcgQ4An2kNAJ5xDQCBmQ0AgAkjAIOpDQCCkQ0AhbkNAISxDQCHqQ0AhrENAImZDQCIkQ0Ai2kNAIqRDQCNeQ0AjHENAI9pDQCOcQ0AKjIAgMY1AIDGNACA6jQAgBozAICiMgCAZjcAgA0rAIAuNgCA9SsAgOUrAIDzLgCAEzAAgPY0AIA0LgCABjIAgOUwAIDqNwCAqjgAgA8vAIBhKwCANS0AgIktAIDVMACA0SsAgCIzAIDmMwCASjQAgGY0AIBqNACAfjQAgPo4AIDuNACAkjYAgFY3AIAKOACANjgAgE45AIBSOQCAVjkAgLo5AIAuOACAxjgAgDErAIBVKwCAaSsAgCUsAIAxLACAcSwAgCUtAIBBLQCASS0AgIUtAICRLQCAdC4AgIsvAICzLwCAuy8AgJH4EADTLwCAfzAAgK8wAIDdMACAWjEAgIApAQCBKQEAgjkBAIM5AQCEKQEAhSkBAIZZAQCHWQEAiNkoAIltAQCKKSUAi2EBAIxhAQCNYQEAHjIAgDoyAICQGQEAajIAgJIVAQC+MgCA3jIAgJU1AQCWPQEAlzUBAJgNAQCZFQEAmh0BAJsVAQCcDQEAnfUBAJ7dKABSMwCAoAUBADI0AICiAQEAVjQAgFI0AIClGQEApgkBAFo0AIBeNACAdjQAgKo9AQCrNQEArC0BAK0VAQCuHQEArxUBALBtAQCxdQEAsn0BALN1AQC0bQEAtRUBALYdAQC3FQEAuC0BALk1AQC6PQEAuzUBALzZLgC9KQEAvhkBAL8ZAQC6eR4Au3keALjNAgC5eR4AvpUeAL+dHgC8QQIAvZ0eALJ9HgCzRR4AsH0eALF1HgC2XR4At0UeALRdHgC1VR4AqgUeAKsNHgCodR4AqQ0eAHo0AICeNACArBUeAK0NHgCiSR4Ao0keAKBJHgChSR4ApkkeAKf5AgCkSR4ApUkeAJqNHgCblR4AmI0eAJmFHgCeiR4An4keAJyNHgCdhR4AkgUDAJP1AACQCQMAkY05AJaxHgCXFQYAlO0AAJUBHACKvQMAi0EDAIiFAwCJnQMAjkEDAI9JAwCMyTkAjVEDAIIVAgCDHQIAgAUCAIEdAgCGzQMAh7EDAIQFAgCFxQMAs/kFALLxBQCx+QUAsOEFALeZKgC2EQMAtRkDALThBQC7NQMAujUDALklAwC4JQMAvxUDAL4VAwC9JQMAvCUDAKP9BQCi/QUAof0FAKD9BQCnnQUApp0FAKWdBQCknQUAq7kFAKqxBQCpJScAqL0FAK+ZBQCukQUArZkFAKyhBQCTAQUAkvkFAJF1OQCQ9QUAlwEFAJYZBQCVEQUAlBkFAJt5CQCaOQUAmTEFAJg5BQCfHQUAnh0FAJ0dBQCcHQUAg4kFAIKBBQCBiQUAgPEFAIeFBQCGhQUAhZUFAISBJgCLhQUAioUFAIm1BQCItQUAj4UFAI6FBQCNlQUAjJUFAM40AIA6NQCAQjUAgFY1AIB+NQCAzjUAgAI2AIBqNgCAEjcAgCo3AIBeNwCAYjcAgKY3AICqNwCAAjgAgNo4AIAeOQCANjkAgIMvAICQ6gCA5jUAgLkqAIC9KwCAfSsAgCUrAIBlKwCAkSsAgCEsAIA9LACAES0AgCEtAIA9LQCAmS0AgOQtAIDwLQCADC4AgBwuAIALLwCAEy8AgEMvAIBjLwCAky8AgKsvAICbLwCAry8AgO8vAIBHMACAUzAAgFswAICDMACACTEAgB0xAIBeMgCAVjIAgIYyAIAWNACA4jIAgBYzAIBiMwCAfjMAgKIzAIDGMwCAyjMAgOozAICAjQEAgZUBAIKdAQCDlQEAhI0BAIW1AQCGvQEAh7UBAIiNAQCJwR0AipkBAIvBHQCMhQEAjY0BAI6FAQCP/QEAkIUBAJEZHQCSkRQAk4UBAJSdAQCViTIAlk0ZAJc9GwCYsQEAmbEBAJotHACbtQEAnD0cAJ2pAQCemQEAn5kBAKDlHQChbQEAomUBAKN9AQCkZQEApW0BAKbxHQCnYQEAqKEDAKmhAwCqoQMAq6EDAKyhAwCttQEArq0DAK+lAwCwYRkAsdkDALLZAQCz7QMAtPUDALX9AwC29QMAt+0DALjFAQC50QMAumEdALvVAwC82QEAvT0XAL7FAwC/0QEA+jMAgA40AIAKNACAOjQAgLY0AIDmNACAHjUAgE41AIAyNgCAWjYAgM42AIAWNwCAIjcAgEI3AIBGNwCAUjcAgG43AIDmNwCAFjgAgEo4AIBqOACAtjgAgA45AIAqOQCAijkAgCfqAIAi6gCAVOoAgOEpAIAJKgCADSoAgNbqAIAD6wCAe+sAgBY6AIAmOgCARwgAgFIIAIBVCACASggAgE4IAIBXCQCA8Q4AgOIOAIDnDgCA9g4AgOwOAICyNACASw8AgMoPAICBDwCALw8AgFoPAIBnDwCAbw8AgJ0PAIDCDwCAuA8AgL0PAICqDwCAsQ8AgP4OAIADDwCACA8AgIBBAQCBMQMAgk0BAINFAQCEXQEAhUUBAIZNAQCHIQMAiF0fAIl9AQCKaQMAi3EBAIx1AwCNVQEAjlk6AI9ZAQCQKQEAkSkBAJI5AQCTOQEAlCkBAJUpAQCW2QEAl9kBAJjpAQCZ6QEAFQ8AgCIPAIAqDwCAMg8AgDwPAIBBDwCARg8AgFAPAIBVDwCAXQ8AgGoPAIByDwCAdw8AgHwPAICEDwCAiQ8AgJMPAICYDwCAoA8AgKUPAIDFDwCANw8AgBoPAIBiDwCAjg8AgA0PAIDdFgCA5hYAgOkWAIDvFgCA4xYAgOwWAIDgFgCAExcAgBYXAID1FgCA8hYAgPgWAICAmQcAgZkHAPsWAICDrQcAhLUHAAQXAICGsQcAh7EHAIiRBwCJkQcAipEHAIuRBwCM8QcAjfEHAI7xBwCP8QcAkJEHAJGVBwCSnQcAk5kHAJSFBwCVgQcAloEHAJeFBwCYuQcAmb0HAJq1BwCbsQcAnK0HAJ2pBwCemQcAn50HAKBhBwChZQcAom0HAKNpBwCkdQcApXEHAKZxBwCndQcAqEkHAKlNBwCqRQcAq0EHAKxdBwCtWQcArkkHAK9NBwCwMQcAsTUHALI9BwCzOQcAtCUHALUhBwC2IQcAtyUHALgZBwC5HQcAuhUHALsRBwC8DQcAvQkHAL7xAAC/9QAAgAkBAIENAQCCHQEAgxkBAITZAACF3QAAhtUAAIfRAACI8QAAifUAAIr9AACL+QAAjOkAAI3tAACO5QAAj+EAAJCdAACRmQAAkq0AAJOpAACUtQAAlbEAAJaxAACXtQAAmIkAAJmNAACahQAAm4EAAJydAACdmQAAnokAAJ+NAACgdQAAoXEAAKJ9AACjeQAApGlQAqVtUAKmYQAAp2UAAKhZAACpXQAAqlUAAKtRAACsTQAArUkAAK49AwCvOQMAsClQArEtUAIBFwCABxcAgP4WAIANFwCAChcAgBkXAIDZXFICHxcAgCUXAIAiFwCAKBcAgCsXAIA0FwCALhcAgKOhAACipQAAoZEAAKCVAACntQAAprEAAKW9AACkuQAAq40AAKqJAACpgQAAqIUAAK+FAACugQAArYkAAKyNAACz/QAAsvkAALHxAACw9QAAt5kAALadAAC1nQAAtJkAALutAAC6qQAAuaUAALilAAC/ZQEAvmEBAL1tAQC8aQEAHBcAgFcXAIBAFwCAPRcAgEgXAIBOFwCAOhcAgNksUQJLFwCAVBcAgHkWAIDhDwCAMRAAgA4QAIAiEACAHRAAgJNBAAAnEACALBAAgBMQAICXWQAAllUAAJVZAACUXQAAm3EAAJppAACZZQAAmGUAAJ9lAACeYQAAnTFTApxtAAC4gQQAuYEEALqBBAC7gQQAvIEEAFEXAIC+jQQA5g8AgLDdBQCxTQQAskUEALNdBAC0RQQAtU0EALZFBADrDwCAqKEFAKntQQCqrQUAq6UFAKy9BQCtpQUArq0FAK+lBQCgqQUAoZFBAKKpQACjoQUApKEFAKWhBQCmoQUAp6EFAP8PAIAYEACAWBAAgF0QAIBpEACAnVUFAH8QAICfWQUAjhAAgJMQAICeEACAkwUFAJQdBQCVBQUAlg0FAJcFBQC4EACAyxAAgO8QAIAhEQCAJhEAgC4RAIA9EQCATBEAgIBxBQCBcQUAgnEFAINxBQCEUQUAhVEFAIZdBQBREQCAWREAgHwRAICjEQCArxEAgM8RAIDUEQCA2REAgBMSAIAmEgCAMhIAgEoSAIDEEgCAGhMAgDMTAIA4EwCASxMAgFwTAIBuEwCAcxMAgJoTAICiEwCAtxMAgN4TAIDjEwCAPRQAgEIUAIBHFACAUxQAgF8UAIBkFACAbBQAgHgUAICSFACAlxQAgJ8UAICkFACAqRQAgK4UAICzFACAuBQAgMsUAIDQFACA7BQAgAYVAIAgFQCALBUAgEQVAIBJFQCAVhUAgHcVAICaFQCAtBUAgMAVAIDFFQCAzRUAgO4VAIAIFgCAFxYAgDQWAIA5FgCAQRYAgEYWAIBZFgCAXhYAgICtAQCBtQEAgr0BAIO1AQCErQEAhdUBAIbdAQCH1QEAiO0BAIn1AQCK/QEAi/UBAIztAQCN1QEAjt0BAI/VAQCQrQEAkbUBAJK9AQCTtQEAlK0BAJVVAwCWXQMAl1UDAJhtAwCZdQMAmn0DAJt1AwCcbQMAnVUDAJ5dAwCfVQMAoK0DAKG1AwCivQMAo7UDAKStAwCl1QMAphkOAKfZAwCobQ8AqSEOAKrhAwCr4QMArCkOAK3lAwCuGQ4ArxkOALCVAwCxnQMAsgEOALORAwC0HQ4AtQUOALa5AwC3uQMAuDkOALmNAwC6NQ4AuxEOALyBAQC9gQEAvnkBAL95AQCEFgCAkBYAgJwWAICrFgCAyBYAgM0WAIDuEQCA/xEAgHwWAICBAACAiwAAgJUAAICfAACAqQAAgLMAAID1DwCA+g8AgAQQAIB1EACAehAAgIQQAIDlEACA6hAAgBcRAIAzEQCAOBEAgEIRAIBRFQCADRYAgBIWAIAqFgCAoRYAgKYWAIC+FgCA8A8AgAkQAICJEACAHBEAgNcSAIA/FQCALxYAgGMWAIDDFgCARxEAgGQSAICfEgCAshIAgBEUAIAdFACAKRQAgI0TAICSEwCA0RMAgNYTAID9EwCAAhQAgGkSAIBuEgCAtxIAgLwSAIDCEQCAxxEAgJYRAICbEQCApD0DAKVFAwCmTQMAp0UDAKA9AwChJQMAoi0DAKMlAwCsfQMArUUDAK5NAwCvRQMAqH0DAKllAwCqbQMAq2UDALQ9AwC1xQMAts0DALfFAwCwPQMAsSUDALItAwCzJQMAvP0DAL3FAwC+zQMAv8UDALj9AwC55QMAuu0DALvlAwCEBQwAhQ0MAIYFDACHHQwAgI0MAIGpDACCGQwAg1ENAIxhDACNYQwAjmEMAI9hDACIKQwAiRUMAIodDACLFQwAlD0MAJXFAwCWzQMAl8UDAJABDACRAQwAkgEMAJMBDACc/QMAncUDAJ7NAwCfxQMAmP0DAJnlAwCa7QMAm+UDAIBpBACBaQQAgnEEAINxBACEnQQAhYUEAIaNBACHhQQAiL0EAImNBACKhQQAi50EAIyFBACNqQYAjvkEAI/5BACQiQQAkYkEAJKRBACTkQQAlLEEAJWxBACW+QYAl60EAJiVBACZwQYAmmkGAJtpBgCceQYAnXkGAJ7RBgCf/QsAoA0GAKEdCwCiGQYAo0ULAKQFBgClTQsApjUGAKe1BACoEQYAqREGAKoRBgCrNQQArC0EAK0BBACuXQQArx0GALDNBgCxbQYAsnUGALMNBgC0FQYAtR0GALYVBgC3DQYAuDUGALk9BgC6NQYAuw0GALwVBgC9HQYAvhUGAL8NBgCA9QcAgf0HAIL1BwCD9QAAhO0AAIURAwCGEQMAhxEDAIgxAwCJMQMAijEDAIsxAwCMhQcAjRUDAI4dAwCPFQMAkG0DAJGNBwCShQcAk50HAJSFBwCVjQcAloUHAJe9BwCYhQcAmY0HAJqFBwCbnQcAnIUHAJ2NBwCehQcAn4UAAKB9AAChgQMAooEDAKOBAwCkgQMApYEDAKaBAwCngQMAqBUHAKmFAwCqjQMAq4UDAKydAwCtoQMArqEDAK+hAwCwdQcAsXUHALJxBwCzhQUAtM0FALX1BQC2/QUAt8kDALj5AwC5+QMAuqEFALuhBQC8wQMAvcUDAN4RAIDjEQCAhJz7ACYTAIArEwCAYRMAgGYTAIB2EgCAghIAgJUSAICaEgCARRIAgNwSAIBXEwCASxAAgKMQAIC9EACAxBAAgJB1AACRfQAAknEAAJNxAACUAfwAlVX+AJZd/gCXVf4AmG3+AJlp/gCaef4Am3n+AJxp/gCdaf4Anln+AJ9Z/gCgpf4Aoa3+AKKl/gCjof4ApKH+AKWl/gCmrf4Ap6X+AKiZ/gCpmf4Aqun+AKvt/gCs9f4ArfH+AK7x/gCv8f4AsI3+ALGV/gCymf4As5n+ALSJ/gC1if4Atrn+ALe9/gC4hf4AuY3+ALqF/gC7nf4AvIX+AL2B/gC+gf4Av4H+AKbZCACnBQcApMEIAKWZBQCi0QgAo9EIAKCJBQChtQgArgEHAK8BBwCsMQcArTEHAKo9BwCrJQcAqD0HAKk1BwC2fQcAtwUHALR9BwC1dQcAsskFALNlBwCwcQcAsXEHAL4BBwC/AQcAvDEHAL0xBwC6IQcAuyEHALg9BwC5MQcAhjkHAIc5BwCELQcAhTkHAIINBwCDNQcAgBEHAIEFBwCOSQcAj0kHAIxNBwCN1QUAisEFAIvBBQCI1QUAiXEHAJbVBQCX2QgAlE0FAJXdBQCSUQUAk9kFAJD5BQCRoQUAnnEIAJ99CACcYQgAnWEIAJpxCACbeQUAmMUIAJl1BQD0EACA+xAAgAIRAICBEQCAuxEAgLQRAIArEgCAGBIAgB8SAIBWEgCATxIAgF0SAIDJEgCAHxMAgIcSAIB7EgCApBIAgKsSAIA9EwCAUBMAgHgTAIB/EwCAhhMAgKcTAIC8EwCAwxMAgOgTAID2EwCA7xMAgEwUAIB9FACAhBQAgAsVAIAZFQCAEhUAgPEUAIAlFQCAMRUAgHwVAICDFQCAkxUAgFsVAIBpFQCAnxUAgKYVAIBiFQCASxYAgFIWAIDzFQCA+hUAgNkVAIDgFQCAIxYAgBwWAICwFgCAbhAAgLEQAICqEACA3hAAgNcQAIAQEQCACREAgI8RAIBeEQCAgIEBAIGBAQCCgQEAg4EBAISdAQCFhQEAhokBAIeJAQCItQEAib0BAIq1AQCLjQEAjJUBAI2dAQCOlQEAj40BAIgRAIA3EgCAkv0BAJP1AQCU7QEAlZUBAJadAQCXlQEAmKkBAJmpAQCauQEAm7kBAJypAQCdrQEAnqUBAJ+dAQCgZQEAoW0BAKJlAQCjfQEApGUBAKVtAQCmZQEAp90AAKjlAACppQMAqq0DAKulAwCsvQMAraUDAK6tAwCvpQMAsN0DALHlAwCy7QMAs+UDALSpAQC1VQEAtvUDALftAwC41QMAud0DALrVAwC7rQMAvM0DAL3BAwC+vQMAv7UDANASAICOEgCARBMAgP8UAIA4FQCAlRYAgIkWAIC3FgCAuRUAgIsUAIABFgCAyhMAgMQUAIDSFQCArRUAgPgUAIC9FACAZREAgKgRAIBwFQCA0BAAgFgUAIBiEACAPhIAgOcVAIATEwCAcRQAgEIQAIA5EACAihUAgOESAID2EQCArhMAgGsWAIDqEgCA8RIAgGwRAIAEEgCApgMAgA0jAIARIwCAoAYAgMcAAIC1BgCAqyMAgK8jAIC5IQCAtSEAgOMHAIB7CQCAfwkAgEEjAICnIwCANSMAgDkjAIAdIwCAISMAgCUjAIApIwCALSMAgDEjAIDbBwCA3wcAgNEAAICATQEAgVEBAIJRAQCDTQEAhE0DAIUhAwCGRQEAh30BANcAAICiAwCAqAMAgN0HAIDTAACA1QAAgL0GAIB5AACABxQAgH0AAICHAACAkQAAgAwUAICbAACAGBQAgKUAAIAkFACArwAAgDAUAIC5AACANRQAgM8PAIBVEACAmBAAgJsQAIArEQCAVhEAgKARAIDMEQCA6BEAgOsRAIDzEQCADRIAgBASAIBzEgCAwRIAgDATAIBrEwCAlxMAgJ8TAICwpQEAsa0BALKlAQCzvQEAtKUBALWtAQC2pQEAt10BALhlAQC5bQEAumUBALt9AQC8ZQEA2xMAgDoUAIBpFACAgAW5AIHhBgCC4QYAg+EGAIThBgCoBgCAswYAgIfpBgCI2QYAifmxAIr1sQCL8bEAjO2xAI31BgCO+QYAj/0GAJDZBgCR2QYAkvWxAJwUAICUiZIClfEGAJb1BgCX9QYAmNkGAJnVsgCa3bIAm6kGAJy5BgCduQYAnqkGAJ+BBgCgoQcAoaEHAKIhsgCjpQcApIUAAKWNAACmQbMA1RQAgKiNBwCplQcAqp0HAKuVBwBOFQCAyhUAgDYQAIA+FgCAsP0HALGFBwCyjQcAaBYAgLSZBwCBFgCAtpUHALeNBwC4tQcAub0HALq1BwC7jQcAvJUHAL2dBwC+lQcAv40HAIB1BgCBlaACgpmgAoOZoAKEhaAChb2gAoaxoAKHhaACiLmgAomRoAKKnaACi5mgAoyFoAKNjQEAjoEBAI9FBgCQOQYAkT0GAJIxBgCTMQYAlC0GAJXVBgCW2QYAl90GAJjhBgCZ4QYAmu0GAJvpBgCc9QYAnf0GAJ7xBgCf9QYAoAkGAKEJBgCiBQYAowEGAKQdBgClBQYApgkGAKcNBgCoMQYAqTEGAKo9BgCrNQYArCkGAK0pBgCuJQYArx0GALBhBgCxYQYAsm0GALNpBgC0dQYAtX0GALZxBgC3dQYAuEkGALlJBgC6RQYAu0EGALxdBgC9RQYAvkkGAL9NBgCAsQUAgbEFAIK9BQCDuQUAhKUFAIWtBQCGoQUAh6UFAIiZBQCJmQUAipUFAIuRBQCMjQUAjcEFAI7NBQCPyQUAkLUFAJG9BQCSsQUAk7UFAJSpBQCVqQUAlqUFAJehBQCYnQUAmSkCAJolAgCbIQIAnD0CAJ3pAgCe5QIAn+ECAKAdAgChNQIAojkCAKM9AgCkIQIApSECAKYtAgCnKQIAqBUCAKkZAgCqFQIAqxECAKwNAgCteQIArnUCAK8V8ACwafAAsRECALIdAgCzGQIAtAUCALUhAAC2LQAAtyUAALgZAAC54QEAuu0BALvlAQC8+QEA2BQAgN0UAIC/9YYCp2kNAOIUAIDnFACAzwAAgNkAAICzAwCA4QcAgH0JAID7IgCAzNSFAszghQL/IgCAgSkAgDUkAIBuJACAjSQAgLyZBQC9mQUAvqkFAL+ZvAC4mQUAuZkFALqJBQC7iQUAtKEFALXVsQC23bEAt6kFALCxsgCxzQUAssUFALO9BQCfJACAxCQAgMMoAIDfKACA8SgAgIgmAICFKQCAaSkAgCkkAIAtJACA2WSgAoEJAIDZUKAChAkAgI0JAICKCQCAhwkAgOwhAIDvIgCA9CEAgJhlBQCZEbIA/CEAgNkwoAKUOZEClU0FAJZFBQCXXQUAkGkFAJFpBQCSWQUAk1kFAID9vACB1ZwCgmW8AIPFvACEkbwAhZ28AIalvACHjbwAiK2TAonlvACKKZACi7W8AIwRkAKNlbwAji2wAI/FnAKQ6bwAkcHIAJJBkAKT8Z0ClNW8AJXlvACW4bwAl02QAphlkAKZfZACmrm8AJupCgCcbQ8Anb0KAPMiAICfXQ8AoK0PAKElCgCibQoAo2UKAKQNCgClpQ8ApgXUAKepDwComQ8AqZkPAKopDwCrKQ8ArDkPAK05DwCuKQ8ArykPALBZDwCxndEAspXRALOF1gC0sdEAtbHRALbZ1AC32dQAuOnUALnp1AC6+dQAu/nUALzp1AC96dQAvrnUAL+51ACASdUAgUnVAIJZ1QCDWdUAhEnVAIV90ACGddAAh23QAIhV0ACJXdAAinXVAIut1QCMtdUAjb3VAI611QCPQdAAkMHQAJHB0ACSwdAAk8HQAJTB0ACVwdAAlsHQAJfB0ACYwdAAmc3QAJrF0ACb3dAAnOHVAJ3pDgCe2Q4An9kOAKDV2wChwdkAotnZAKPB2QCkxdkApc3ZAKbF2QCnGdkAqGHZAKlh2QCqydkAq8nZAKzZ2QCt2dkArs3ZAK/B2QCwCdkAsRXZALId2QCzrdoAtB3ZALWx2gC2wdwAt93dALjl3QC59d0Auv3dALut3QC8td0AvaXdAL6t3QDwIQCAgvHaAIPx2gD3IgCA5OgAgIYR2ACHEdgAhOHaAIXh2gCKKdgAiynYAK9AEwClKNoAjinYAI8p2ACMKdgAjSnYAJJh2ACTYdgA6egAgO7oAICWZdgAl23YAJR12ACVbdgAml3YAJst2ADz6ACA8FwCALEw3wCR8AIAnCnYALLQAwCiOQ0Ao1GeAqAlDQChOQ0AplUNAIS8AgCkJQ0ApV0NAKptDQCrAQQAqGENAKlRAwCuuQAAp3UAAKxhDQCtxQIA+OgAgIfMAwDwVAIAzFC6AJHYBACb9NsAkRgCAJk02wCddAQAvh0AAJ9gBQCejAUAjOwCAI2sBAD96ACAvfWKAqghvwCpLb8Aqi2/AKs9vwCsKb8ArVW/AK5RvwCvTb8AoBkIAKGlvQCiIb8AozGzAKQ9vwClJb8Apg2zAKclvwC46bMAuc3LALppswC7uQkAvH0IAL2tCQC+QQwAv50JALA5vwCxhb0Asgm/ALPtywC0Gb8AtQW/ALbtswC3Bb8AiDG9AIkxvQCKrQgAiyW9AIwJCQCNvQgAjiW+AI+JDAAC6QCAgQ0JAIKlDACDUQkAhIEIAIWBCACGmQgAh60MAJhhvQCZYb0Amm0JAJsVnQKcxQ8AnQ28AJ7BDwCfcQkAkBW+AJERnwKSNZ8Ckw2fApQJvgCVCb4AlnG9AJdxvQCCuAQAl6UHALnEAwDwWAIAkUwCAJLIAgCErAQAsD0AAAzpAIAH6QCAvQUAABHpAIDwTAIAuhEAAJEkAgCN5AQAkqwCAJasAgC4uAMAudADAJb4AgCvDQAAFukAgPB4AgCRXAIAlrACAK8FAAAb6QCAIOkAgCnpAIAy6QCAP+kAgIX4AwBM6QCAh4ADAIbAAgBZ6QCAZukAgHPpAICW6QCAuzkAAHzpAICf6QCAiekAgL8dAAC+HQAAvR0AALwhAACVwB0AlMQfAJfIGgCWABgAkSAAAJDUAQCT2B4AkgAcAJ3gEgCcABAAn+gRAJ7sEwCZ8BkAmPQbAJv4FwCaABQAnnEBAJ9xAQCABQAArOkAgM0KAICwDACAXg0AgGQNAIBqDQCAdg0AgHkNAIB8DQCAfw0AgIINAICRDQCAlw0AgJoNAICdDQCAICIAgMcNAIDWDQCA/A0AgP8NAIAODgCAEQ4AgB0OAIAYIgCAMg4AgDUOAIDXFgCAEBcAgNoWAIC4ACwAuYwvALqILgC6AwCAhpwXAMx4vACEmC0AhVwXALcDAIDKAwCAiAAoAIksFADtBACAjAUAgN8FAIAaBgCAQAYAgFcGAIB0BgCAiwYAgDgBAIA8AQCAQAEAgEQBAIBIAQCATAEAgKR9AQBQAQCAonUBAKNlAQCggQEAoYEBALxxugC9kbYAvnG6AL+ltgC48bgAuXW6ALqZzgC7dboAtGG6ALVtugC2eboAt3W6ALAZugCxEboAsgm6ALMFugCsUboArXG2AK5RugCvbboAqNG4AKldugCqRbYAq1G6AKRxlgKlYZYCpnGWAqe9ugCgzZsCofG6AKLJugCjxboAnHmaAp0tugCeDc4An4WWApgJugCZtZYCmjm6AJuJtgCUMboA+CEAgJZpugCXrZYCkHm6AJE1ugCSMboAkwG6AIxJzgCN5bYAjhmaAo+hugCIoboAiUG2AIqhugCLdbYAhAG4AIWFugCGac4Ah4W6AICxugCBvboAgqm6AIOlugCAgbkAgQ27AIIVtwCDAbsAhAG7AIUhtwCGAbsAhz27AIgJuwCJAbsAihm7AIsVuwCMcbsAjX27AI5puwCPZbsAkKG5AJEluwCSyc8AkyW7AJQhuwCVwbcAliG7AJf1twCY6c8AmUW3AJq5mwKbAbsAnLm7AJ31uwCe8bsAn8G7AKARuwChCZQCokm7AKONlwKkCbsApbWXAqY5uwCnibcAqFmbAqkNuwCqLc8Aq6WXAqwNmgKtMbsArgm7AK8FuwCw0ZcCscGXArLRlwKzHbsAtFG5ALXduwC2xbcAt9G7ALjxuwC50bcAuvG7ALvNuwC82bsAvdG7AL7JuwC/xbsAgJmkAIEliAKCqaQAgxmoAFsNAICFvaQAhp3QAIcViAKInYUCiaGkAIqZpACLlaQAjCGIAo0xiAKOIYgCj+2kAJDBpgCRTaQAklWoAJNBpACUQaQAlWGoAJZBpACXfaQAmEmkAJlBpACaWaQAm1WkAJwxpACdPaQAnimkAJ8lpACgYaYAoeWkAKIJ0ACj5aQApOGkAKUBqACm4aQApzWoAKgp0ACphagAqnmEAqvBpACseaQArTWkAK4xpACvAaQAsFGkALFJiwKyCaQAs82IArRJpAC19YgCtnmkALfJqAC4GYQCuU2kALpt0AC75YgCvE2FAr1xpAC+SaQAv0WkAIARiQKBAYkCghGJAoPdpQCEkacAhR2lAFQBAICHEaUAiDGlAIkRqQCKMaUAWAEAgFwBAICNEaUAjgmlAI8FpQCQAaUAkQ2lAJIZpQCTFaUAlLGnAGABAICW2dEAlzWlAJgRpQCZ8akAmhGlAJvFqQCc+dEAZAEAgJ6phQKfEaUAoEmlAKEFpQCiAaUAozGlAKQBpQClGYoCplmlAKediQKoOaUAqYWJAqoJpQCruakArEmFAq0dpQCuPdEAr7WJArB9hAKxQaUAsnmlALN1pQC0wYkCtdGJArbBiQK3DaUAuGGnALntpQBoAQCAu+GlALzhpQC9wakAvuGlAGwBAIC3baYAttWGArUpqgC0hdIAs7mqALJtpgCxjaoAsG2mAL8higK+5aYAvaWJAnABAIC7jaYAdAEAgLm5pgC49aYAeAEAgKZ1pgClbaYAfAEAgIABAICiTaYAhAEAgIgBAICvCaYAruXSAIwBAICsjaQAqymmAKolpgCpMaYAkAEAgJc5pgCWNaYAlQ2mAJQxhwKTmYoCkhHSAJExpgCQZYYCn62mAJ65qgCUAQCAnC2kAJthpgCarYoCmb2KApitigKHfaYAhk2mAIVJpgCEBaYAg72mAIIFhgKB+aoAgFXSAI/1qgCORaYAjcmKAox1pgCL8YoCijWmAIl1iQKIbaYAgCmnAIEhpwCCOacAgzWnAIRRpwCYAQCAhkmnAJwBAIDMSIkCzYiJAoqp0wCLRacAjEGnAI2hqwCOQacAj5WrAJDJ0wBFIwCAkpmHApMhpwCUmacAldWnAJbRpwCX4acAmPGnAJnpiAKaqacAm22LApzppwCdVYsCntmnAJ9pqwCgeYcCoS2nAKIN0wCjhYsCpC2GAqURpwCmKacApyWnAKixiwKpoYsCqrGLAqt9pwCsMaUArb2nAK6lqwCvsacAsNGnALHxqwCy0acAs+2nALT5pwC18acAtumnALflpwC4oacAua2nALq5pwC7tacAvBGlAL2VpwC+edMAv5WnAICRoACBiY8CgsmgAIMNjAKEiaAAhTWMAoa5oACHCawAiNmAAomNoACKrdQAiyWMAoyNgQKNsaAAjomgAI+FoACQUYwCkUGMApJRjAKTnaAAlNGiAJVdoACWRawAl1GgAJhxoACZUawAmnGgAJtNoACcWaAAnVGgAJ5JoACfRaAAoMGgAKHNoACi2aAAo9WgAKRxogCl9aAAphnUAKf1oACo0aAAqTGsAKrRoACrBawArDnUAK2VrACuaYACr9GgALAJoACxRaAAskGgALNxoAC0QaAAtVmPArYZoAC33YwCuHmgALnFjAK6SaAAu/msALwJgAK9XaAAvn3UAL/1jAKAvYACgYGhAIK5oQCDtaEAhAGNAoURjQKGAY0Ch82hAIihowCJLaEAijWtAIshoQCMIaEAjQGtAI4hoQCPHaEAkGmhAJFhoQCSeaEAk3WhAJQRoQCVHaEAlgmhAJcFoQCYgaMAmQWhAJrp1QCbBaEAnAGhAJ3hrQCeAaEAn9WtAKAJ1QChpa0AolmBAqPhoQCkWaEApRWhAKYRoQCnIaEAqDGhAKkpjgKqaaEAq62NAqwpoQCtlY0CrhmhAK+prQCwOYECsW2hALJN1QCzxY0CtG2AArVRoQC2aaEAt2WhALjxjQK54Y0CuvGNArs9oQC8caMAvf2hAL7lrQC/8aEAs2miALKF1gCxaaIAsO2gALe5rgC2baIAtY2uALRtogC7TaIAuvWCArkJrgC4pdYAv42iAL69ogC9uaIAvPWiAKNNogCiWa4AoUGiAKDNoACncaIApk2iAKVtrgCkTaIAq1miAKpVogCpTaIAqEWiAK8pogCuJaIArTGiAKw9ogCTla4AkiWiAJGpjgKQFaIAl5mOApYR1gCVMaIAlGWCApsZogCaFaIAmS2iAJgRgwKfYaIAnq2OAp29jgKcrY4Cg2muAIK9ogCBXa4AgL2iAIe9ogCGBYIChfmuAIRV1gCLXaIAim2iAIlpogCIJaIAj/GOAo41ogCNdY0CjG2iAIARowCBMa8AghGjAIMtowCEOaMAhTGjAIYpowCHJaMAiGGjAIltowCKeaMAi3WjAIzRoQCNVaMAjrnXAI9VowCQMaMAkdGvAJIxowCT5a8AlNnXAJV1rwCWiYMClzGjAJipowCZ5aMAmuGjAJvRowCc4aMAnfmMAp65owCffY8CoBmjAKGljwKiKaMAo5mvAKRpgwKlPaMAph3XAKeVjwKoHYICqSGjAKoZowCrFaMArKGPAq2xjwKuoY8Cr22jALBBoQCxzaMAstWvALPBowC0waMAteGvALbBowC3/aMAuMmjALnBowC62aMAu9WjALyxowC9vaMAvqmjAL+lowBnDQCA0QYAgG0NAIDIBwCAcw0AgA8HAICFDQCAlAcAgIsNAICaBwCAuA0AgH0HAIDKDQCAxQcAgAIOAIBPBwCAFA4AgFIHAIAgDgCAkB0AAOEGAIAPJACA4iUAgCguAICtLACAyS0AgKpVAACrKQAAMjcAgAErAIDGMACAsjIAgAEsAIBTLwCAmSsAgJ8wAIDtKwCAGjUAgI43AICtLQCA5SwAgGYyAIADMACALzAAgA44AIAjMACA+y8AgHI0AICAIa4AgaWsAIJJ2ACDpawAhKGsAIVBoACGoawAh3WgAIhp2ACJxaAAiv0AAIsxxgCM7QAAjdEAAI7VAACPyQAAgCmhAIFNFACCIQEAg+G4AoQ5qgCFOaoAhhG9AodRFACIEQEAidW4AorNrQCLLbsCjGEUAI3ZjQKObRQAj2UUAJB5AQCRubgCkkm9ApNFuwKUDRQAlTUUAJYZAQCXqbgCmF2qAJkBFACaIQEAmwUUAJx5vQKdhbgCnnm7Ap+JuAKggb0CoXm4AqKZCQCjlRQApFmuAKWJFACmmQEAp70UAKipAQCpvbsCqrkBAKuJFACsmRQArZkUAK6JFACviRQAsNkBALEJrgCy6QEAs9W7ArTNuwK17RQAtpW8ArfhFAC4oRQAuaEUALrBoQC7pRQAvNkBAL0ZuAK+0aoAv9GqAL9FFwC+RRcAvTUXALxBvwK7KRcAugm4ArkBuAK4PQIAt+2tALY9AgC1HRcAtB0XALMdFwCyHRcAsR0XALAtAgCvWbgCrk0CAK1pFwCsTQIAq00XAKqdrQCpQRcAqE0KAK40AIDRLACApX0XAKR9FwCjoa4Aom2CAqF9ggKgbYICnzmuAJ41rgCdDa4AnDGPApuZggKaEdoAmTGuAJhljgKXtaIAlgWuAJWJggKUNa4Ak7GCApJ1rgCRNYECkC2uAI99rgCOTa4AjUmuAIwFrgCLva4AigWOAon5ogCIVdoAh0miAIadrgCFfaIAhJ2uAIOZrgCCddoAgZmuAIAdrADMqIQCzUyGAswguQLNTLkCzECOAkYyAIDMmIUCzTyEAswQgwLNUIMCzKCDAs2MgwLMMIACzSSAAswYgALNhIACmjMAgAUsAIAxLQCAiSMAgE0jAIBXIwCAayMAgJMjAIB1IwCAnSMAgGEjAIB/IwCAzPC5As2EuQLMULgCzay7AoDNAACB1QAAgt0AAIPVAACEzQAAhfUAAIb9AACH9QAAiM0AAFcvAIDBLACA1SoAgM0qAIDdKgCAuekAgCErAICQZQAAkW0AAKiIKgA1KwCAPSsAgEUrAIBJKwCATSsAgKIAMACjzDMAoOg9AKHsPACm8DYAp/QoAKQANACl/DUAgFERAIHpiAKCXREAg1URAIQpBACF6b0Chhm4AocVvgKIfREAiUURAIppBACL2b0CjA2vAI1REQCOcQQAj1URAJBJuAKRtb0Ckkm+ApO5vQKUUbgClam9ApZJDACXRREAmKmrAJl5EQCaaQQAm00RAJx5BACdbb4CnmkEAJ9ZEQCgqREAoakRAKK5EQCjuREApIkEAKVZqwCmuQQAp4W+Aqi9vgKpnREAquW5AquREQCs8REArfERAK6RpACv9REAsOkEALEpvQKy4a8As+GvALTZuAK1mREAtukEALctvQK4BagAueW+Arq5EQC7AYgCvKURAL2tEQC+wQQAvwG9AoABuQKBDb8CglUQAINtEACEUQUAheG8AoYlrgCHeRAAiGkFAIlNEACKIbkCi928AowxvwKNwbwCjjm5Ao/BvAKQUQ0AkV0QAJKBqgCTURAAlFEFAJV1EACWUQUAl0W/AphxBQCZQRAAmkEQAJtBEACcQRAAnUEQAJ5hBQCfsaoAoKEFAKGdvwKilb8Co7UQAKTduAKlqRAAptkQAKfZEACoiaUAqe0QAKqBBQCrQbwCrJmuAK2ZrgCusbkCr/EQALDxBQCxNbwCsi2pALPNvwK0gRAAtTmJAraNEAC3hRAAuNkFALkZvAK66bkCu+W/ArytEAC9lRAAvrkFAL8JvAK5La0AuC2tALtFEwC6BboCveG/ArwlBgC/GbwCvvmqALEdEwCwabsCs20TALJtEwC1eRMAtB2mALfVvwK2FQYAqXUTAKh1EwCrhakAqlUGAK1JvAKsdQYAr2ETAK5BvAKhQRMAoGUGAKNxvAKiZQYApVUTAKRlBgCnVRMAplUTAJl1vwKYhbwCm3W/ApqNugKdiRMAnIUOAJ+FEwCeVakAkVW/ApDlBgCTzRMAkpGtAJXZEwCU/QYAl0m/Apa1ugKJmRMAiJETAIs1vwKK9QYAjdm8AozVugKPuRMAjoETAIGtEwCA7boCgxm/AoLdBgCF8bwChBGqAIcVigKGrRMAgD2sAIFhEgCCQQcAg2USAIQZuwKF5b4Chhm9AofpvgKIIbsCidm+AopFEgCLXRIAjSkAgM3pAICOzaoAj8mLApCdiwKRpYsCkrGqAJOxqgCU2akAldmpAJb5qQCX+akAmJWqAJmRiwKatYsCm42LApyJqgCdiaoAnvGpAJ/xqQCgIakAoSGpAKJ9qgCjeYsCpE2LAqV1iwKmYaoAp2GqAKgpqQCpKakAqgmpAKsJqQCsRaoArUGLAq5liwKvXYsCsDmqALE5qgCyQakAs0GpALRxqQC1cakAti2qALcpiwK4PYsCuQWLAroRqgC7EaoAvHmpAL15qQC+WakAv1mpAIKJIwBtKwCAcSsAgI0rAIC+6QCAh5kjAJEpAIB5KwCAyOkAgIu5JACpKwCAifkkAI6VIwCPiSMAsSsAgI2JJACSvSMAESsAgLkrAICR4SMAo+sAgJfFIwCU8SMA4SsAgJkpAICbkSMA+SsAgJndIwD9KwCAnwktAAksAICdjdUAogkjAJ0pAIBBLACAofUjAEUsAICnGSMApCUkAG0sAICq7SQAeSwAgKgdIwCpeSQArhUjAK8JIwCsCSQArQkkALI9IwCJLACAsDEjALFhIwC2VSMAt0UjALRxIwC1XSMAulkjALsRIwCRLACAuV0jAL6JLQCVLACAvI0tANzpAICAuSUAgX0iAIKBIgCDmSIAhK0lAIXZJQCGuSIAh5EiAIiVIgCJ8SUAljIAgIuxJQCMgSUAjYElAI6dIgCPgSIAkLkiAJHpIgCStSIAk9EiAJT5IgCV1SIAlt0iAJfNIgCY+SIAmdUiAJrRIgCbmSIAqSwAgLEsAIDh6QCAvSwAgGUAAACh/SIAogEiAKMZIgDFLACApVklAKY5IgCnESIAqBUiAKlxJQDNLACAqzElAKwBJQCtASUArh0iAK8BIgCwOSIAsWkiALI1IgCzUSIAtHkiALVVIgC2XSIAt00iALh5IgC5VSIAulEiALsZIgD1LACA4SwAgO0sAIDxLACAgI0vAIGlLwCCrS8Ag70vAISlLwCFrS8AhqUvAIfdLwCI5S8Aie0vAIrlLwD5LACAAS0AgAUtAIANLQCAFS0AgJCRLwCRkS8AkpEvAJORLwCUsS8AlbEvAJa1LwCXRTMAmE0zAJlVMwCaPTMAmxkzAJyZMwCdiTMAnlUwAJ9JMACgwTAAockwAKLZMACj1TAApM0wAKX9MACm5TAApzUwAKi1MQCpuTEAqu0xAKuxmgCs0ZYArbE6AK61OgAZLQCAsEGUALHNlgCy1ZoAs8GWALTBlgC14ZoAtsGWALf9lgC4yZYAucGWALrZlgC71ZYAvLGWAL29lgC+qZYAv6WWAMUAAAChfSAAooEgACktAICkrScALS0AgDktAICnkSAAXS0AgKnxJwCqZScAq7EnAKyBJwCtgScArp0gAK+BIACwuSAAsekgALK1IABhLQCAtPkgALXVIAC23SAAt80gAEUtAIC51SAATS0AgLuZIACpLQCAcS0AgHUtAIB5LQCAgDknAIH9IACCASAAgxkgAG0tAICFWScAhjkgAIcRIACIFSAAiXEnAIrlJwCLMScAjAEnAI0BJwCOHSAAjwEgAJA5IACRaSAAkjUgAJNRIACUeSAAlVUgAJZdIACXTSAAmHkgAJlVIACaUSAAmxkgAJyFLgCdBdYAnoEuAJ+BLgCArT8AgbU/AIK9PwCDtT8AhK0/AIW5yACG1T8Ah80/AIj1PwCJ/T8AipnIAIvxPwCMATsAjQE7AI6NyACPOQQAkEkEAJFJBACSWQQAk1UEAJRNBACV3TwAlnkEAJd1BACYWQQAmSEEAJohBACbNdQAnCEEAJ3Z5gCeJQQAnx0EAKDpBACh9QQAos0/AKP1BACkFQQApfnUAKYhyACnIcgAqNHUAKktBACqOQQAq03CAKwtBACtdcgArh0EAK95BACwKQQAsTEEALI9BACzOQQAtC0EALX9BQC2qQUAt6kFALiZBQC5mQUAunkFALtFBQC8AQUAvQEFAL4BBQC/AQUAgC0HAIE1BwCCPQcAgzUHAIQtBwCFqQcAhqUHAIdl1QCILQYAiTEGAIoxBgCLDQYAjPnJAI15BgCOWQYAj1UGAJBpyQCRNQYAkj0GAJM1BgCULQYAlcUGAJZdAwCXVQMAmG0DAJl1AwCafQMAm3UDAJxtAwCdET0AnlkDAJ9ZAwCgqQMAoakDAKK5AwCjuQMApKkDAKWpAwCm2QMAp9kDAKjpAwCp6QMAqvkDAKv9AwCs5QMAre0DAK7lAwCvbcMAsKEDALGhAwCyoQMAs6EDALShAwC1zeYAtq0DALelAwC4yeYAuZkDALppAwC7aQMAvHkDAL15AwC+aQMAv2kDAIAAAACBLQCAfS0AgJUtAIDm6QCAsS0AgLUtAIC9LQCA0S0AgPQtAIDr6QCA8OkAgAAuAIAELgCACC4AgPwtAIAQLgCAoSkAgKUpAIAYLgCAIC4AgPXpAIA8LgCAQC4AgEwuAID66QCAVC4AgFguAIA3LwCAqSkAgGwuAICILgCAhC4AgATqAICQLgCACeoAgJwuAICYLgCAoC4AgLAuAIC0LgCArSkAgMQuAIDMLgCA0C4AgNQuAICxKQCADuoAgLUpAID3LgCA+y4AgP8uAIDV6wCAGOoAgNo1AIAvLwCAuSkAgDvqAIAN6wCAPy8AgEcvAIC9KQCAWy8AgGsvAICqIfQAq7U/AKilPwCpzecArkXwAK+hPwCsSfAArTH0AKJl4gCjvT8AoLk/AKG5PwCmlT8Ap50/AKSlPwClnT8Augk8AG8vAIC4CTwAuQk8AHcvAICHLwCAxSkAgMEpAICy3T8AswU9ALBN7wCx1T8Atn3wALe55AC0HT0AtWk8AB3qAICPLwCAoy8AgKcvAIC3LwCAyy8AgMMvAIDHLwCAgrX7AM8vAICA/T8AgfU/AOMvAIDnLwCA/y8AgAcwAICavT8Am/3NAJi9PwCZtT8Anlk/AJ9ZPwCcWT8AnVk/AJKBPwCTaekAkHnkAJGxPwCWgT8Al4H0AJQh5wCVmT8AFzAAgCswAIAs6gCAJzAAgBswAIAzMACAOzAAgE8wAIAx6gCAVzAAgEoAAABLMACAQzAAgMkpAIBfMACAZzAAgG8wAIBjMACAzSkAgIcwAIA26gCAszAAgPUwAIDRMACA2SkAgNUpAIDRKQCAnSsAgKErAID5MACA4TAAgK41AIA9KgCADTEAgCExAIAZMQCAT+oAgN0pAIA1MQCAKTEAgFIxAIBZ6gCAXjEAgD0xAIBmMQCAajEAgG4xAIByMQCAfjEAgF7qAICGMQCA5SkAgJIxAIBj6gCAljEAgOkpAICiMQCArjEAgL4xAIBo6gCA/+kAgG3qAIDeMQCAcuoAgLgJAQC5CQEAuhkBALsZAQC8CQEAvQkBAL45AQC/OQEAsM3FALE1zACymQ4As5kOALSJDgC1iQ4AtjkBALc5AQCo6dkAqckOAKrZDgCrqcUArMUOAK3NDgCuxQ4Ar/kOAKA1DgChPQ4AojUOAKOxxQCk8Q4ApfEOAKbxDgCn8Q4AmGkPAJlpDwCaeQ8Am3kPAJxpDwCdaQ8Ant0OAJ/NDgCQ+eoAkXEPAJJ9DwCTdQ8AlG0PAJVpDwCWWQ8Al1kPAIh5DwCJeQ8AigkPAIsJDwCMGQ8AjRkPAI4NzACPDQ8AgHkPAIF5DwCCSQ8Ag0kPAIRZDwCFWQ8AhkkPAIdJDwCKUQIAi1ECAIj5xgCJQQIAjnECAI/txgCMQQIAjUECAIIVAgCDHQIAgAUCAIEdAgCGdQIAh30CAIQFAgCFfQIAmsUCAJvNAgCYkc8AmYXaAJ7FAgCfzQIAnNUCAJ3NAgCSDQIAkxUCAJANAgCRBQIAlg0CAJf1AgCUDQIAlQUCAKo9AgCrRQIAqD0CAKk1AgCuXQIAr0UCAKxdAgCtVQIAol3GAKMBAgCgNQIAoQ0CAKYBAgCnxdgApBECAKURAgC6OQIAuzkCALg5AgC5OQIAvtkBAL/ZAQC82QEAvdkBALI9AgCzBQIAsD0CALE1AgC2GQIAtxkCALQdAgC16cIA6jEAgPIxAIDiMQCA/jEAgA4yAIAWMgCAIjIAgCYyAIB36gCACjIAgD4yAIBCMgCA7SkAgFIyAIB86gCANjIAgHIyAICB6gCAhuoAgHYyAICKMgCAgjIAgPEpAICOMgCAnjIAgJoyAICmMgCAw+kAgLYyAICL6gCAwjIAgJXqAIDWMgCA9jIAgJrqAIAKMwCADjMAgJ/qAICk6gCAKjMAgDozAID1KQCAPjMAgPkpAIBWMwCAWjMAgGYzAIByMwCA/SkAgIozAICp6gCApjMAgK7qAIAT6gCAwjMAgLPqAIC4AAAAuOoAgL3qAIABKgCABSoAgMfqAIDC6gCAzOoAgIAB3gCB8QcAgvEHAIPxBwCEFQIAhR0CAIYVAgCHEQIAiCXeAIld3gCKOQIAizkCAIwpAgCNKQIAjhkCAI99ygCQTd4AkWECAJJhAgCT7cEAlH0CAJVlAgCWIcAAl2kCAJhZAgCZMcIAmlUCAJstAgCcNQIAnT0CAJ4xAgCfMQIAoNECAKHRAgCi0QIAo9ECAKTxAgCl8QIApvECAKfxAgCo0QIAqdECAKrRAgCr0QIArDECAK0xAgCuMQIArzECALBRAgCxUQIAslECALNRAgC0cQIAtXECALZxAgC3cQIAuFECALlRAgC6+dwAu1UCALxNAgC9NQIAvj0CAL81AgC+7QYAv/UGALztBgC95QYAuskGALvJBgC4xcsAuckGALbtBgC39QYAtO0GALXlBgCyjQYAs/UGALDR3QCxhQYArvEGAK/xBgCs5QYAreEGAKr1BgCr/QYAqMUGAKn9BgCm9QYAp/0GAKTlBgCl/QYAovUGAKP9BgCg+QYAoZ3dAJ75BgCf+QYAnPkGAJ35BgCa+QYAm/kGAJj5BgCZ+QYAlvkGAJf5BgCUcd0AlfkGAJL9BgCT5QYAkP0GAJH1BgCO/QYAj4UGAIz9BgCN9QYAiuEGAIsB3QCI8QYAifEGAIbBBgCHwQYAhPEGAIXxBgCCkccAg+EGAIDpBgCBxcAAgAAAANHqAIACNACABjQAgBI0AIARKgCAFSoAgNvqAIAmNACAGSoAgODqAIDl6gCA6uoAgJY0AIAdKgCAojQAgKY0AIDv6gCA9OoAgL40AIAhKgCA+eoAgNI0AIDWNACAJSoAgP7qAIDyNACAKSoAgAI1AID6NACACjUAgAjrAIAiNQCALSoAgC41AIA2NQCARjUAgDEqAIAS6wCAF+sAgDUqAIAc6wCAXjUAgCHrAIBqNQCAdjUAgCbrAIAr6wCAkjUAgDDrAICaNQCAQOoAgDkqAICyNQCAtjUAgEEqAIC6NQCAFC4AgDXrAIA66wCAReoAgErqAIDeNQCA9jcAgIDNAQCB1QEAgt0BAIPVAQCEzQEAhfUBAIb9AQCH9QEAiM0BAInVAQCK3QEAi/UJAIzJAQCNyQEAjgEcAI89HwCQRR8AkU0fAJJFHwCTXR8AlEUfAJVNHwCWRR8Al30fAJhBxwCZQR8AmkEfAJtBHwCcQR8AnUEfAJ5BHwCfYd8AoL0fAKHFHwCizR8Ao8UfAKTdHwClxR8Aps0fAKfFHwCo/R8AqcUfAKrNHwCrxR8ArN0fAK3FHwCuzR8Ar8UfALC9HwCxRR8Ask0fALNFHwC0/ckAtVkfALZJHwC3SR8AuHkfALl5HwC6SR8Au8XdALxVHwC9XR8AvlUfAL9NHwAKNgCABjYAgA42AIAZLACAEjYAgBY2AIAaNgCAIjYAgD/rAIAmNgCAOjYAgD42AIAqNgCAQjYAgFY2AIA2NgCASjYAgE42AIBSNgCAROsAgE7rAIBJ6wCASSoAgHI2AIB2NgCAfjYAgGLrAICCNgCAU+sAgE0qAIBRKgCAWOsAgF3rAIBVKgCAojYAgKo2AICuNgCAujYAgLY2AIDCNgCAvjYAgMY2AIDKNgCA0jYAgFkqAIDaNgCA3jYAgF0qAIDuNgCAZ+sAgP42AIACNwCAYSoAgA43AICVKQCAbOsAgHHrAIBlKgCAaSoAgDo3AIB26wCAkjcAgJY3AICuNwCAgLUBAIG9AQCCtQEAg80BAITt9ACF0QEAhtEBAIfRAQCI8QEAifEBAIrxAQCL8QEAjNEBAI3RAQCO0QEAj9EBAJB9wwCRBcMAkl35AJO9AQCUpQEAla0BAJalAQCXXQMAmGUDAJltAwCaZQMAm30DAJxlAwCdbQMAnmUDAJ85wwCgoQMAoaEDAKKhAwCjoQMApKEDAKWhAwCmoQMAp6EDAKjhAwCp4QMAquEDAKvhAwCs4QMAreEDAK7hAwCv4QMAsKEDALGhAwCyoQMAs6EDALShAwC1oQMAtqEDALehAwC4YQMAuWEDALphAwC7YQMAvGEDAL1hAwC+pcMAv6HDALo3AICA6wCA0ukAgMY3AIDCNwCAzjcAgNfpAIDaNwCAhesAgIrrAIAmOACAMjgAgDo4AICP6wCAPjgAgGY4AIByOACAdjgAgG44AICCOACAhjgAgJTrAICSOACAbSoAgJo4AICZ6wCAcSoAgNI4AICkLgCA6jgAgJ7rAICo6wCAdSoAgHkqAIASOQCAresAgH0qAICy6wCAMjkAgLfrAIBKOQCAgSoAgFo5AIBmOQCAbjkAgHY5AICFKgCAvOsAgKY5AICyOQCAiSoAgI0qAIC2OQCAwesAgJEqAIDG6wCAy+sAgNDrAICVKgCA9jkAgPo5AIACOgCACjoAgNrrAICQ1QEAkd0BAJLVAQCT7QEAlPUBAJXB+wCW8QEAl/n7AJjNAQCZ1QEAmt0BAJvVAQCcyfsAnckBAEUqAICPAAAAgNkBAIHZAQCC6QEAg+kBAIT5AQCF+QEAhukBAIfpAQCI2QEAidkBAIoJwQCLrQEAjLUBAI29AQCOtQEAj60BAKAAAAChAAAAogAAAKMAAACkAAAApQAAAKYAAACnAAAAqAAAAKkAAACqAAAAqwAAAKwAAACtAAAArgAAAK8AAACwAAAAsQAAALIAAACzAAAAtAAAALUAAAC2AAAAtwAAALgAAAC5AAAAugAAALsAAAC8AAAAvQAAAL4AAAC/AAAAACAAIMyBACDMgwAgzIQAIMyFACDMhgAgzIcAIMyIACDMiMyAACDMiMyBACDMiM2CACDMigAgzIsAIMyTACDMk8yAACDMk8yBACDMk82CACDMlAAgzJTMgAAgzJTMgQAgzJTNggAgzKcAIMyoACDMswAgzYIAIM2FACDZiwAg2YwAINmM2ZEAINmNACDZjdmRACDZjgAg2Y7ZkQAg2Y8AINmP2ZEAINmQACDZkNmRACDZkQAg2ZHZsAAg2ZIAIOOCmQAg44KaACEAISEAIT8AIgAjACQAJQAmACcAKAAoMSkAKDEwKQAoMTEpACgxMikAKDEzKQAoMTQpACgxNSkAKDE2KQAoMTcpACgxOCkAKDE5KQAoMikAKDIwKQAoMykAKDQpACg1KQAoNikAKDcpACg4KQAoOSkAKEEpAChCKQAoQykAKEQpAChFKQAoRikAKEcpAChIKQAoSSkAKEopAChLKQAoTCkAKE0pAChOKQAoTykAKFApAChRKQAoUikAKFMpAChUKQAoVSkAKFYpAChXKQAoWCkAKFkpAChaKQAoYSkAKGIpAChjKQAoZCkAKGUpAChmKQAoZykAKGgpAChpKQAoaikAKGspAChsKQAobSkAKG4pAChvKQAocCkAKHEpAChyKQAocykAKHQpACh1KQAodikAKHcpACh4KQAoeSkAKHopACjhhIApACjhhIIpACjhhIMpACjhhIUpACjhhIYpACjhhIcpACjhhIkpACjhhIspACjhhIwpACjhhI4pACjhhI8pACjhhJApACjhhJEpACjhhJIpACjkuIApACjkuIMpACjkuIkpACjkuZ0pACjkuowpACjkupQpACjku6MpACjkvIEpACjkvJEpACjlhaspACjlha0pACjlirQpACjljYEpACjljZQpACjlkI0pACjlkbwpACjlm5spACjlnJ8pACjlraYpACjml6UpACjmnIgpACjmnIkpACjmnKgpACjmoKopACjmsLQpACjngaspACjnibkpACjnm6MpACjnpL4pACjnpZ0pACjnpa0pACjoh6opACjoh7MpACjosqEpACjos4cpACjph5EpACjqsIApACjrgpgpACjri6QpACjrnbwpACjrp4gpACjrsJQpACjsgqwpACjslYQpACjsmKTsoIQpACjsmKTtm4QpACjsnpApACjso7wpACjssKgpACjsubQpACjtg4ApACjtjIwpACjtlZgpACkAKgArACwALQAuAC4uAC4uLgAvADAAMCwAMC4AMOKBhDMAMOeCuQAxADEsADEuADEwADEwLgAxMOaXpQAxMOaciAAxMOeCuQAxMQAxMS4AMTHml6UAMTHmnIgAMTHngrkAMTIAMTIuADEy5pelADEy5pyIADEy54K5ADEzADEzLgAxM+aXpQAxM+eCuQAxNAAxNC4AMTTml6UAMTTngrkAMTUAMTUuADE15pelADE154K5ADE2ADE2LgAxNuaXpQAxNueCuQAxNwAxNy4AMTfml6UAMTfngrkAMTgAMTguADE45pelADE454K5ADE5ADE5LgAxOeaXpQAxOeeCuQAx4oGEADHigYQxMAAx4oGEMgAx4oGEMwAx4oGENAAx4oGENQAx4oGENgAx4oGENwAx4oGEOAAx4oGEOQAx5pelADHmnIgAMeeCuQAyADIsADIuADIwADIwLgAyMOaXpQAyMOeCuQAyMQAyMeaXpQAyMeeCuQAyMgAyMuaXpQAyMueCuQAyMwAyM+aXpQAyM+eCuQAyNAAyNOaXpQAyNOeCuQAyNQAyNeaXpQAyNgAyNuaXpQAyNwAyN+aXpQAyOAAyOOaXpQAyOQAyOeaXpQAy4oGEMwAy4oGENQAy5pelADLmnIgAMueCuQAzADMsADMuADMwADMw5pelADMxADMx5pelADMyADMzADM0ADM1ADM2ADM3ADM4ADM5ADPigYQ0ADPigYQ1ADPigYQ4ADPml6UAM+aciAAz54K5ADQANCwANC4ANDAANDEANDIANDMANDQANDUANDYANDcANDgANDkANOKBhDUANOaXpQA05pyIADTngrkANQA1LAA1LgA1MAA14oGENgA14oGEOAA15pelADXmnIgANeeCuQA2ADYsADYuADbml6UANuaciAA254K5ADcANywANy4AN+KBhDgAN+aXpQA35pyIADfngrkAOAA4LAA4LgA45pelADjmnIgAOOeCuQA5ADksADkuADnml6UAOeaciAA554K5ADoAOjo9ADsAPAA9AD09AD09PQA+AD8APyEAPz8AQABBAEFVAEHiiJVtAEIAQnEAQwBDRABDby4AQ+KIlWtnAEQAREoARFoARHoARMW9AETFvgBFAEYARkFYAEcAR0IAR0h6AEdQYQBHeQBIAEhQAEhWAEhnAEh6AEkASUkASUlJAElKAElVAElWAElYAEoASwBLQgBLSwBLTQBMAExKAExURABMagBMwrcATQBNQgBNQwBNRABNSHoATVBhAE1WAE1XAE3OqQBOAE5KAE5qAE5vAE8AUABQSABQUE0AUFBWAFBSAFBURQBQYQBRAFIAUnMAUwBTRABTTQBTUwBTdgBUAFRFTABUSHoAVE0AVQBWAFZJAFZJSQBWSUlJAFbiiJVtAFcAV0MAV1oAV2IAWABYSQBYSUkAWQBaAFsAXABdAF4AXwBgAGEAYS5tLgBhL2MAYS9zAGHKvgBiAGJhcgBjAGMvbwBjL3UAY2FsAGNjAGNkAGNtAGNtMgBjbTMAZABkQgBkYQBkbABkbQBkbTIAZG0zAGR6AGTFvgBlAGVWAGVyZwBmAGZmAGZmaQBmZmwAZmkAZmwAZm0AZwBnYWwAaABoUGEAaGEAaQBpaQBpaWkAaWoAaW4AaXYAaXgAagBrAGtBAGtIegBrUGEAa1YAa1cAa2NhbABrZwBrbABrbQBrbTIAa20zAGt0AGvOqQBsAGxqAGxtAGxuAGxvZwBseABswrcAbQBtMgBtMwBtQQBtVgBtVwBtYgBtZwBtaWwAbWwAbW0AbW0yAG1tMwBtb2wAbXMAbeKIlXMAbeKIlXMyAG4AbkEAbkYAblYAblcAbmoAbm0AbnMAbwBvVgBwAHAubS4AcEEAcEYAcFYAcFcAcGMAcHMAcQByAHJhZAByYWTiiJVzAHJhZOKIlXMyAHMAc3IAc3QAdAB1AHYAdmkAdmlpAHZpaWkAdwB4AHhpAHhpaQB5AHoAewB8AH0AwqIAwqMAwqUAwqYAwqwAwrBDAMKwRgDCtwDDgADDgQDDggDDgwDDhADDhQDDhgDDhwDDiADDiQDDigDDiwDDjADDjQDDjgDDjwDDkQDDkgDDkwDDlADDlQDDlgDDmQDDmgDDmwDDnADDnQDDoADDoQDDogDDowDDpADDpQDDpwDDqADDqQDDqgDDqwDDrADDrQDDrgDDrwDDsADDsQDDsgDDswDDtADDtQDDtgDDuQDDugDDuwDDvADDvQDDvwDEgADEgQDEggDEgwDEhADEhQDEhgDEhwDEiADEiQDEigDEiwDEjADEjQDEjgDEjwDEkgDEkwDElADElQDElgDElwDEmADEmQDEmgDEmwDEnADEnQDEngDEnwDEoADEoQDEogDEowDEpADEpQDEpgDEpwDEqADEqQDEqgDEqwDErADErQDErgDErwDEsADEsQDEtADEtQDEtgDEtwDEuQDEugDEuwDEvADEvQDEvgDFgwDFhADFhQDFhgDFhwDFiADFiwDFjADFjQDFjgDFjwDFkADFkQDFkwDFlADFlQDFlgDFlwDFmADFmQDFmgDFmwDFnADFnQDFngDFnwDFoADFoQDFogDFowDFpADFpQDFqADFqQDFqgDFqwDFrADFrQDFrgDFrwDFsADFsQDFsgDFswDFtADFtQDFtgDFtwDFuADFuQDFugDFuwDFvADFvQDFvgDGjgDGkADGoADGoQDGqwDGrwDGsADHjQDHjgDHjwDHkADHkQDHkgDHkwDHlADHlQDHlgDHlwDHmADHmQDHmgDHmwDHnADHngDHnwDHoADHoQDHogDHowDHpgDHpwDHqADHqQDHqgDHqwDHrADHrQDHrgDHrwDHsADHtADHtQDHuADHuQDHugDHuwDHvADHvQDHvgDHvwDIgADIgQDIggDIgwDIhADIhQDIhgDIhwDIiADIiQDIigDIiwDIjADIjQDIjgDIjwDIkADIkQDIkgDIkwDIlADIlQDIlgDIlwDImADImQDImgDImwDIngDInwDIogDIpgDIpwDIqADIqQDIqgDIqwDIrADIrQDIrgDIrwDIsADIsQDIsgDIswDItwDJkADJkQDJkgDJlADJlQDJmQDJmwDJnADJnwDJoQDJowDJpQDJpgDJqADJqQDJqgDJqwDJrQDJrwDJsADJsQDJsgDJswDJtADJtQDJuADJuQDJuwDKgQDKggDKgwDKiQDKigDKiwDKjADKkADKkQDKkgDKlQDKnQDKnwDKuQDKvG4AzIAAzIEAzIjMgQDMkwDOhgDOiADOiQDOigDOjADOjgDOjwDOkADOkQDOkgDOkwDOlADOlQDOlgDOlwDOmADOmQDOmgDOmwDOnADOnQDOngDOnwDOoADOoQDOowDOpADOpQDOpgDOpwDOqADOqQDOqgDOqwDOrADOrQDOrgDOrwDOsADOsQDOsgDOswDOtADOtQDOtgDOtwDOuADOuQDOugDOuwDOvADOvEEAzrxGAM68VgDOvFcAzrxnAM68bADOvG0AzrxzAM69AM6+AM6/AM+AAM+BAM+CAM+DAM+EAM+FAM+GAM+HAM+IAM+JAM+KAM+LAM+MAM+NAM+OAM+cAM+dANCAANCBANCDANCHANCMANCNANCOANCZANC5ANC9ANGKANGMANGQANGRANGTANGXANGcANGdANGeANG2ANG3ANOBANOCANOQANORANOSANOTANOWANOXANOaANObANOcANOdANOeANOfANOiANOjANOkANOlANOmANOnANOqANOrANOsANOtANOuANOvANOwANOxANOyANOzANO0ANO1ANO4ANO5ANWl1oIA1bTVpQDVtNWrANW01a0A1bTVtgDVvtW2ANeQANeQ1rcA15DWuADXkNa8ANeQ15wA15EA15HWvADXkda/ANeSANeS1rwA15MA15PWvADXlADXlNa8ANeV1rkA15XWvADXlta8ANeY1rwA15nWtADXmda8ANea1rwA15sA15vWvADXm9a/ANecANec1rwA150A157WvADXoNa8ANeh1rwA16IA16PWvADXpNa8ANek1r8A16bWvADXp9a8ANeoANeo1rwA16nWvADXqda814EA16nWvNeCANep14EA16nXggDXqgDXqta8ANey1rcA2KEA2KIA2KMA2KQA2KUA2KYA2KbYpwDYptisANim2K0A2KbYrgDYptixANim2LIA2KbZhQDYptmGANim2YcA2KbZiADYptmJANim2YoA2KbbhgDYptuHANim24gA2KbbkADYptuVANinANin2YPYqNixANin2YTZhNmHANin2YsA2KfZtADYqADYqNisANio2K0A2KjYrdmKANio2K4A2KjYrtmKANio2LEA2KjYsgDYqNmFANio2YYA2KjZhwDYqNmJANio2YoA2KkA2KoA2KrYrADYqtis2YUA2KrYrNmJANiq2KzZigDYqtitANiq2K3YrADYqtit2YUA2KrYrgDYqtiu2YUA2KrYrtmJANiq2K7ZigDYqtixANiq2LIA2KrZhQDYqtmF2KwA2KrZhditANiq2YXYrgDYqtmF2YkA2KrZhdmKANiq2YYA2KrZhwDYqtmJANiq2YoA2KsA2KvYrADYq9ixANir2LIA2KvZhQDYq9mGANir2YcA2KvZiQDYq9mKANisANis2K0A2KzYrdmJANis2K3ZigDYrNmEINis2YTYp9mE2YcA2KzZhQDYrNmF2K0A2KzZhdmJANis2YXZigDYrNmJANis2YoA2K0A2K3YrADYrdis2YoA2K3ZhQDYrdmF2YkA2K3ZhdmKANit2YkA2K3ZigDYrgDYrtisANiu2K0A2K7ZhQDYrtmJANiu2YoA2K8A2LAA2LDZsADYsQDYsdiz2YjZhADYsdmwANix24zYp9mEANiyANizANiz2KwA2LPYrNitANiz2KzZiQDYs9itANiz2K3YrADYs9iuANiz2K7ZiQDYs9iu2YoA2LPYsQDYs9mFANiz2YXYrADYs9mF2K0A2LPZhdmFANiz2YcA2LPZiQDYs9mKANi0ANi02KwA2LTYrNmKANi02K0A2LTYrdmFANi02K3ZigDYtNiuANi02LEA2LTZhQDYtNmF2K4A2LTZhdmFANi02YcA2LTZiQDYtNmKANi1ANi12K0A2LXYrditANi12K3ZigDYtdiuANi12LEA2LXZhNi52YUA2LXZhNmJANi12YTZiSDYp9mE2YTZhyDYudmE2YrZhyDZiNiz2YTZhQDYtdmE25IA2LXZhQDYtdmF2YUA2LXZiQDYtdmKANi2ANi22KwA2LbYrQDYttit2YkA2LbYrdmKANi22K4A2LbYrtmFANi22LEA2LbZhQDYttmJANi22YoA2LcA2LfYrQDYt9mFANi32YXYrQDYt9mF2YUA2LfZhdmKANi32YkA2LfZigDYuADYuNmFANi5ANi52KwA2LnYrNmFANi52YTZitmHANi52YUA2LnZhdmFANi52YXZiQDYudmF2YoA2LnZiQDYudmKANi6ANi62KwA2LrZhQDYutmF2YUA2LrZhdmJANi62YXZigDYutmJANi62YoA2YDZiwDZgNmOANmA2Y7ZkQDZgNmPANmA2Y/ZkQDZgNmQANmA2ZDZkQDZgNmRANmA2ZIA2YEA2YHYrADZgditANmB2K4A2YHYrtmFANmB2YUA2YHZhdmKANmB2YkA2YHZigDZggDZgtitANmC2YTbkgDZgtmFANmC2YXYrQDZgtmF2YUA2YLZhdmKANmC2YkA2YLZigDZgwDZg9inANmD2KwA2YPYrQDZg9iuANmD2YQA2YPZhQDZg9mF2YUA2YPZhdmKANmD2YkA2YPZigDZhADZhNiiANmE2KMA2YTYpQDZhNinANmE2KwA2YTYrNisANmE2KzZhQDZhNis2YoA2YTYrQDZhNit2YUA2YTYrdmJANmE2K3ZigDZhNiuANmE2K7ZhQDZhNmFANmE2YXYrQDZhNmF2YoA2YTZhwDZhNmJANmE2YoA2YUA2YXYpwDZhdisANmF2KzYrQDZhdis2K4A2YXYrNmFANmF2KzZigDZhditANmF2K3YrADZhdit2YUA2YXYrdmF2K8A2YXYrdmKANmF2K4A2YXYrtisANmF2K7ZhQDZhdiu2YoA2YXZhQDZhdmF2YoA2YXZiQDZhdmKANmGANmG2KwA2YbYrNitANmG2KzZhQDZhtis2YkA2YbYrNmKANmG2K0A2YbYrdmFANmG2K3ZiQDZhtit2YoA2YbYrgDZhtixANmG2LIA2YbZhQDZhtmF2YkA2YbZhdmKANmG2YYA2YbZhwDZhtmJANmG2YoA2YcA2YfYrADZh9mFANmH2YXYrADZh9mF2YUA2YfZiQDZh9mKANmH2bAA2YgA2YjYs9mE2YUA2YjZtADZiQDZidmwANmKANmK2KwA2YrYrNmKANmK2K0A2YrYrdmKANmK2K4A2YrYsQDZitiyANmK2YUA2YrZhdmFANmK2YXZigDZitmGANmK2YcA2YrZiQDZitmKANmK2bQA2a4A2a8A2bEA2bkA2boA2bsA2b4A2b8A2oAA2oMA2oQA2oYA2ocA2ogA2owA2o0A2o4A2pEA2pgA2qEA2qQA2qYA2qkA2q0A2q8A2rEA2rMA2roA2rsA2r4A24AA24EA24IA24UA24YA24cA24fZtADbiADbiQDbiwDbjADbkADbkgDbkwDgpJXgpLwA4KSW4KS8AOCkl+CkvADgpJzgpLwA4KSh4KS8AOCkouCkvADgpKkA4KSr4KS8AOCkr+CkvADgpLEA4KS0AOCmoeCmvADgpqLgprwA4Kav4Ka8AOCniwDgp4wA4KiW4Ki8AOCol+CovADgqJzgqLwA4Kir4Ki8AOCosuCovADgqLjgqLwA4Kyh4Ky8AOCsouCsvADgrYgA4K2LAOCtjADgrpQA4K+KAOCviwDgr4wA4LGIAOCzgADgs4cA4LOIAOCzigDgs4sA4LWKAOC1iwDgtYwA4LeaAOC3nADgt50A4LeeAOC5jeC4sgDguqvgupkA4Lqr4LqhAOC7jeC6sgDgvIsA4L2A4L61AOC9guC+twDgvYzgvrcA4L2R4L63AOC9luC+twDgvZvgvrcA4L2x4L2yAOC9seC9tADgvbHgvoAA4L6Q4L61AOC+kuC+twDgvpzgvrcA4L6h4L63AOC+puC+twDgvqvgvrcA4L6y4L2x4L6AAOC+suC+gADgvrPgvbHgvoAA4L6z4L6AAOGApgDhg5wA4YSAAOGEgQDhhIIA4YSDAOGEhADhhIUA4YSGAOGEhwDhhIgA4YSJAOGEigDhhIsA4YSMAOGEjQDhhI4A4YSPAOGEkADhhJEA4YSSAOGElADhhJUA4YSaAOGEnADhhJ0A4YSeAOGEoADhhKEA4YSiAOGEowDhhKcA4YSpAOGEqwDhhKwA4YStAOGErgDhhK8A4YSyAOGEtgDhhYAA4YWHAOGFjADhhZcA4YWYAOGFmQDhhaAA4YWhAOGFogDhhaMA4YWkAOGFpQDhhaYA4YWnAOGFqADhhakA4YWqAOGFqwDhhawA4YWtAOGFrgDhha8A4YWwAOGFsQDhhbIA4YWzAOGFtADhhbUA4YaEAOGGhQDhhogA4YaRAOGGkgDhhpQA4YaeAOGGoQDhhqoA4YasAOGGrQDhhrAA4YaxAOGGsgDhhrMA4Ya0AOGGtQDhh4cA4YeIAOGHjADhh44A4YeTAOGHlwDhh5kA4YedAOGHnwDhh7EA4YeyAOGshgDhrIgA4ayKAOGsjADhrI4A4aySAOGsuwDhrL0A4a2AAOGtgQDhrYMA4bSCAOG0lgDhtJcA4bScAOG0nQDhtKUA4bW7AOG2hQDhuIAA4biBAOG4ggDhuIMA4biEAOG4hQDhuIYA4biHAOG4iADhuIkA4biKAOG4iwDhuIwA4biNAOG4jgDhuI8A4biQAOG4kQDhuJIA4biTAOG4lADhuJUA4biWAOG4lwDhuJgA4biZAOG4mgDhuJsA4bicAOG4nQDhuJ4A4bifAOG4oADhuKEA4biiAOG4owDhuKQA4bilAOG4pgDhuKcA4bioAOG4qQDhuKoA4birAOG4rADhuK0A4biuAOG4rwDhuLAA4bixAOG4sgDhuLMA4bi0AOG4tQDhuLYA4bi3AOG4uADhuLkA4bi6AOG4uwDhuLwA4bi9AOG4vgDhuL8A4bmAAOG5gQDhuYIA4bmDAOG5hADhuYUA4bmGAOG5hwDhuYgA4bmJAOG5igDhuYsA4bmMAOG5jQDhuY4A4bmPAOG5kADhuZEA4bmSAOG5kwDhuZQA4bmVAOG5lgDhuZcA4bmYAOG5mQDhuZoA4bmbAOG5nADhuZ0A4bmeAOG5nwDhuaAA4bmhAOG5ogDhuaMA4bmkAOG5pQDhuaYA4bmnAOG5qADhuakA4bmqAOG5qwDhuawA4bmtAOG5rgDhua8A4bmwAOG5sQDhubIA4bmzAOG5tADhubUA4bm2AOG5twDhubgA4bm5AOG5ugDhubsA4bm8AOG5vQDhub4A4bm/AOG6gADhuoEA4bqCAOG6gwDhuoQA4bqFAOG6hgDhuocA4bqIAOG6iQDhuooA4bqLAOG6jADhuo0A4bqOAOG6jwDhupAA4bqRAOG6kgDhupMA4bqUAOG6lQDhupYA4bqXAOG6mADhupkA4bqgAOG6oQDhuqIA4bqjAOG6pADhuqUA4bqmAOG6pwDhuqgA4bqpAOG6qgDhuqsA4bqsAOG6rQDhuq4A4bqvAOG6sADhurEA4bqyAOG6swDhurQA4bq1AOG6tgDhurcA4bq4AOG6uQDhuroA4bq7AOG6vADhur0A4bq+AOG6vwDhu4AA4buBAOG7ggDhu4MA4buEAOG7hQDhu4YA4buHAOG7iADhu4kA4buKAOG7iwDhu4wA4buNAOG7jgDhu48A4buQAOG7kQDhu5IA4buTAOG7lADhu5UA4buWAOG7lwDhu5gA4buZAOG7mgDhu5sA4bucAOG7nQDhu54A4bufAOG7oADhu6EA4buiAOG7owDhu6QA4bulAOG7pgDhu6cA4buoAOG7qQDhu6oA4burAOG7rADhu60A4buuAOG7rwDhu7AA4buxAOG7sgDhu7MA4bu0AOG7tQDhu7YA4bu3AOG7uADhu7kA4byAAOG8gQDhvIIA4byDAOG8hADhvIUA4byGAOG8hwDhvIgA4byJAOG8igDhvIsA4byMAOG8jQDhvI4A4byPAOG8kADhvJEA4bySAOG8kwDhvJQA4byVAOG8mADhvJkA4byaAOG8mwDhvJwA4bydAOG8oADhvKEA4byiAOG8owDhvKQA4bylAOG8pgDhvKcA4byoAOG8qQDhvKoA4byrAOG8rADhvK0A4byuAOG8rwDhvLAA4byxAOG8sgDhvLMA4by0AOG8tQDhvLYA4by3AOG8uADhvLkA4by6AOG8uwDhvLwA4by9AOG8vgDhvL8A4b2AAOG9gQDhvYIA4b2DAOG9hADhvYUA4b2IAOG9iQDhvYoA4b2LAOG9jADhvY0A4b2QAOG9kQDhvZIA4b2TAOG9lADhvZUA4b2WAOG9lwDhvZkA4b2bAOG9nQDhvZ8A4b2gAOG9oQDhvaIA4b2jAOG9pADhvaUA4b2mAOG9pwDhvagA4b2pAOG9qgDhvasA4b2sAOG9rQDhva4A4b2vAOG9sADhvbIA4b20AOG9tgDhvbgA4b26AOG9vADhvoAA4b6BAOG+ggDhvoMA4b6EAOG+hQDhvoYA4b6HAOG+iADhvokA4b6KAOG+iwDhvowA4b6NAOG+jgDhvo8A4b6QAOG+kQDhvpIA4b6TAOG+lADhvpUA4b6WAOG+lwDhvpgA4b6ZAOG+mgDhvpsA4b6cAOG+nQDhvp4A4b6fAOG+oADhvqEA4b6iAOG+owDhvqQA4b6lAOG+pgDhvqcA4b6oAOG+qQDhvqoA4b6rAOG+rADhvq0A4b6uAOG+rwDhvrAA4b6xAOG+sgDhvrMA4b60AOG+tgDhvrcA4b64AOG+uQDhvroA4b68AOG/ggDhv4MA4b+EAOG/hgDhv4cA4b+IAOG/igDhv4wA4b+QAOG/kQDhv5IA4b+WAOG/lwDhv5gA4b+ZAOG/mgDhv6AA4b+hAOG/ogDhv6QA4b+lAOG/pgDhv6cA4b+oAOG/qQDhv6oA4b+sAOG/sgDhv7MA4b+0AOG/tgDhv7cA4b+4AOG/ugDhv7wA4oCQAOKAkwDigJQA4oCy4oCyAOKAsuKAsuKAsgDigLLigLLigLLigLIA4oC14oC1AOKAteKAteKAtQDigqkA4oaQAOKGkQDihpIA4oaTAOKGmgDihpsA4oauAOKHjQDih44A4oePAOKIggDiiIQA4oiHAOKIiQDiiIwA4oiRAOKIkgDiiKQA4oimAOKIq+KIqwDiiKviiKviiKsA4oir4oir4oir4oirAOKIruKIrgDiiK7iiK7iiK4A4omBAOKJhADiiYcA4omJAOKJoADiiaIA4omtAOKJrgDiia8A4omwAOKJsQDiibQA4om1AOKJuADiibkA4oqAAOKKgQDiioQA4oqFAOKKiADiiokA4oqsAOKKrQDiiq4A4oqvAOKLoADii6EA4ouiAOKLowDii6oA4ourAOKLrADii60A4pSCAOKWoADil4sA4qaFAOKmhgDiq53MuADitaEA44CBAOOAggDjgIgA44CJAOOAigDjgIsA44CMAOOAjQDjgI4A44CPAOOAkADjgJEA44CSAOOAlADjgJRT44CVAOOAlOS4ieOAlQDjgJTkuozjgJUA44CU5Yud44CVAOOAlOWuieOAlQDjgJTmiZPjgJUA44CU5pWX44CVAOOAlOacrOOAlQDjgJTngrnjgJUA44CU55uX44CVAOOAlQDjgJYA44CXAOOBjADjgY4A44GQAOOBkgDjgZQA44GWAOOBmADjgZoA44GcAOOBngDjgaAA44GiAOOBpQDjgacA44GpAOOBsADjgbEA44GzAOOBtADjgbYA44G3AOOBuQDjgboA44G744GLAOOBvADjgb0A44KI44KKAOOClADjgpkA44KaAOOCngDjgqEA44KiAOOCouODkeODvOODiADjgqLjg6vjg5XjgqEA44Ki44Oz44Oa44KiAOOCouODvOODqwDjgqMA44KkAOOCpOODi+ODs+OCsADjgqTjg7Pjg4EA44KlAOOCpgDjgqbjgqnjg7MA44KnAOOCqADjgqjjgrnjgq/jg7zjg4kA44Ko44O844Kr44O8AOOCqQDjgqoA44Kq44Oz44K5AOOCquODvOODoADjgqsA44Kr44Kk44OqAOOCq+ODqeODg+ODiADjgqvjg63jg6rjg7wA44KsAOOCrOODreODswDjgqzjg7Pjg54A44KtAOOCreODpeODquODvADjgq3jg60A44Kt44Ot44Kw44Op44OgAOOCreODreODoeODvOODiOODqwDjgq3jg63jg6/jg4Pjg4gA44KuAOOCruOCrADjgq7jg4vjg7wA44Ku44Or44OA44O8AOOCrwDjgq/jg6vjgrzjgqTjg60A44Kv44Ot44O844ONAOOCsADjgrDjg6njg6AA44Kw44Op44Og44OI44OzAOOCsQDjgrHjg7zjgrkA44KyAOOCswDjgrPjgrMA44Kz44OIAOOCs+ODq+ODigDjgrPjg7zjg50A44K0AOOCtQDjgrXjgqTjgq/jg6sA44K144Oz44OB44O844OgAOOCtgDjgrcA44K344Oq44Oz44KwAOOCuADjgrkA44K6AOOCuwDjgrvjg7Pjg4EA44K744Oz44OIAOOCvADjgr0A44K+AOOCvwDjg4AA44OA44O844K5AOODgQDjg4IA44ODAOODhADjg4UA44OGAOODhwDjg4fjgrcA44OIAOODiOODswDjg4kA44OJ44OrAOODigDjg4rjg44A44OLAOODjADjg40A44OOAOODjuODg+ODiADjg48A44OP44Kk44OEAOODkADjg5Djg7zjg6zjg6sA44ORAOODkeODvOOCu+ODs+ODiADjg5Hjg7zjg4QA44OSAOODkwDjg5Pjg6sA44OUAOODlOOCouOCueODiOODqwDjg5Tjgq/jg6sA44OU44KzAOODlQDjg5XjgqHjg6njg4Pjg4kA44OV44Kj44O844OIAOODleODqeODswDjg5YA44OW44OD44K344Kn44OrAOODlwDjg5gA44OY44Kv44K/44O844OrAOODmOODq+ODhADjg5kA44OZ44O844K/AOODmgDjg5rjgr0A44Oa44OL44OSAOODmuODs+OCuQDjg5rjg7zjgrgA44ObAOODm+ODswDjg5vjg7zjg6sA44Ob44O844OzAOODnADjg5zjg6vjg4gA44OdAOODneOCpOODs+ODiADjg53jg7Pjg4kA44OeAOODnuOCpOOCr+ODrQDjg57jgqTjg6sA44Oe44OD44OPAOODnuODq+OCrwDjg57jg7Pjgrfjg6fjg7MA44OfAOODn+OCr+ODreODswDjg5/jg6oA44Of44Oq44OQ44O844OrAOODoADjg6EA44Oh44KsAOODoeOCrOODiOODswDjg6Hjg7zjg4jjg6sA44OiAOODowDjg6QA44Ok44O844OJAOODpOODvOODqwDjg6UA44OmAOODpuOCouODswDjg6cA44OoAOODqQDjg6oA44Oq44OD44OI44OrAOODquODqQDjg6sA44Or44OU44O8AOODq+ODvOODluODqwDjg6wA44Os44OgAOODrOODs+ODiOOCsuODswDjg60A44OvAOODr+ODg+ODiADjg7AA44OxAOODsgDjg7MA44O0AOODtwDjg7gA44O5AOODugDjg7sA44O8AOODvgDjkp4A45K5AOOSuwDjk58A45SVAOObrgDjm7wA456BAOOgrwDjoaIA46G8AOOjhwDjo6MA46ScAOOkugDjqK4A46msAOOrpADjrIgA46yZAOOtiQDjrp0A47CYAOOxjgDjtLMA47aWAOO6rADjurgA47ybAOO/vADkgIgA5ICYAOSAuQDkgYYA5IKWAOSDowDkhK8A5IiCAOSIpwDkiqAA5IyBAOSMtADkjZkA5I+VAOSPmQDkkIsA5JGrAOSUqwDklZ0A5JWhAOSVqwDkl5cA5Je5AOSYtQDkmr4A5JuHAOSmlQDkp6YA5KmuAOSptgDkqrIA5KyzAOSvjgDks44A5LOtAOSzuADktZYA5LiAAOS4gQDkuIMA5LiJAOS4igDkuIsA5LiNAOS4mQDkuKYA5LioAOS4rQDkuLIA5Li2AOS4uADkuLkA5Li9AOS4vwDkuYEA5LmZAOS5nQDkuoIA5LqFAOS6hgDkuowA5LqUAOS6oADkuqQA5LquAOS6ugDku4AA5LuMAOS7pADkvIEA5LyRAOS9oADkvoAA5L6GAOS+iwDkvq4A5L67AOS+vwDlgIIA5YCrAOWBugDlgpkA5YOPAOWDmgDlg6cA5YSqAOWEvwDlhYAA5YWFAOWFjQDlhZQA5YWkAOWFpQDlhacA5YWoAOWFqQDlhasA5YWtAOWFtwDlhoAA5YaCAOWGjQDlhpIA5YaVAOWGlgDlhpcA5YaZAOWGpADlhqsA5YasAOWGtQDlhrcA5YeJAOWHjADlh5wA5YeeAOWHoADlh7UA5YiAAOWIgwDliIcA5YiXAOWInQDliKkA5Yi6AOWIuwDliYYA5YmNAOWJsgDlibcA5YqJAOWKmwDliqMA5YqzAOWKtADli4cA5YuJAOWLkgDli54A5YukAOWLtQDli7kA5Yu6AOWMhQDljIYA5YyVAOWMlwDljJoA5Yy4AOWMuwDljL8A5Y2BAOWNhADljYUA5Y2JAOWNkQDljZQA5Y2aAOWNnADljakA5Y2wAOWNswDljbUA5Y29AOWNvwDljoIA5Y62AOWPgwDlj4gA5Y+KAOWPjADlj58A5Y+jAOWPpQDlj6sA5Y+vAOWPsQDlj7MA5ZCGAOWQiADlkI0A5ZCPAOWQnQDlkLgA5ZC5AOWRggDlkYgA5ZGoAOWSngDlkqIA5ZK9AOWTtgDllJAA5ZWPAOWVkwDllZUA5ZWjAOWWhADllocA5ZaZAOWWnQDllqsA5ZazAOWWtgDll4AA5ZeCAOWXogDlmIYA5ZmRAOWZqADlmbQA5ZuXAOWbmwDlm7kA5ZyWAOWclwDlnJ8A5ZywAOWeiwDln44A5Z+0AOWgjQDloLEA5aCyAOWhgADloZoA5aGeAOWiqADloqwA5aKzAOWjmADlo58A5aOrAOWjrgDlo7AA5aOyAOWjtwDlpIIA5aSGAOWkigDlpJUA5aSaAOWknADlpKIA5aSnAOWkp+atowDlpKkA5aWEAOWliADlpZEA5aWUAOWlogDlpbMA5aeYAOWnrADlqJsA5ainAOWpogDlqaYA5aq1AOWsiADlrKgA5ay+AOWtkADlrZcA5a2mAOWugADlroUA5a6XAOWvgwDlr5gA5a+nAOWvrgDlr7MA5a+4AOWvvwDlsIYA5bCPAOWwogDlsLgA5bC/AOWxoADlsaIA5bGkAOWxpQDlsa4A5bGxAOWyjQDls4AA5bSZAOW1gwDltZAA5bWrAOW1rgDltbwA5bayAOW2ugDlt5sA5behAOW3ogDlt6UA5bemAOW3sQDlt70A5be+AOW4qADluL0A5bmpAOW5sgDlubPmiJAA5bm0AOW5ugDlubwA5bm/AOW6pgDlurAA5bqzAOW6tgDlu4kA5buKAOW7kgDlu5MA5buZAOW7rADlu7QA5bu+AOW8hADlvIsA5byTAOW8ogDlvZAA5b2TAOW9oQDlvaIA5b2pAOW9qwDlvbMA5b6LAOW+jADlvpcA5b6aAOW+qQDlvq0A5b+DAOW/jQDlv5cA5b+1AOW/uQDmgJIA5oCcAOaBtQDmgoEA5oKUAOaDhwDmg5gA5oOhAOaEiADmhYQA5oWIAOaFjADmhY4A5oWgAOaFqADmhboA5oaOAOaGkADmhqQA5oavAOaGsgDmh54A5oeyAOaHtgDmiIAA5oiIAOaIkADmiJsA5oiuAOaItADmiLYA5omLAOaJkwDmiZ0A5oqVAOaKsQDmi4kA5ouPAOaLkwDmi5QA5ou8AOaLvgDmjIcA5oy9AOaNkADmjZUA5o2oAOaNuwDmjoMA5o6gAOaOqQDmj4QA5o+FAOaPpADmkJwA5pCiAOaRkgDmkakA5pG3AOaRvgDmkpoA5pKdAOaThADmlK8A5pS0AOaVjwDmlZYA5pWsAOaVuADmlocA5paXAOaWmQDmlqQA5pawAOaWuQDml4UA5pegAOaXogDml6MA5pelAOaYjuayuwDmmJMA5pigAOaYreWSjADmmYkA5pm0AOaaiADmmpEA5pqcAOaatADmm4YA5puwAOabtADmm7gA5pyAAOaciADmnIkA5pyXAOacmwDmnKEA5pyoAOadjgDmnZMA5p2WAOadngDmnbsA5p6FAOaelwDmn7MA5p+6AOaglwDmoJ8A5qCqAOagquW8j+S8muekvgDmoZIA5qKBAOaihQDmoo4A5qKoAOaklADmpYIA5qajAOanqgDmqIIA5qiTAOaqqADmq5MA5qubAOashADmrKAA5qyhAOatlADmraIA5q2jAOatsgDmrbcA5q25AOaunwDmrq4A5q6zAOauugDmrrsA5q+LAOavjQDmr5QA5q+bAOawjwDmsJQA5rC0AOaxjgDmsacA5rKIAOayvwDms4wA5rONAOazpQDms6gA5rSWAOa0mwDmtJ4A5rS0AOa0vgDmtYEA5rWpAOa1qgDmtbcA5rW4AOa2hQDmt4sA5reaAOa3qgDmt7kA5riaAOa4rwDmua4A5rqAAOa6nADmuroA5ruHAOa7iwDmu5EA5rubAOa8jwDmvJQA5ryiAOa8owDmva4A5r+GAOa/qwDmv74A54CbAOeAngDngLkA54GKAOeBqwDngbAA54G3AOeBvQDngpkA54KtAOeDiADng5kA54ShAOeFhQDnhYkA54WuAOeGnADnh44A54eQAOeIkADniJsA54ioAOeIqgDniKsA54i1AOeItgDniLsA54i/AOeJhwDniZAA54mZAOeJmwDniaIA54m5AOeKgADnipUA54qsAOeKrwDni4AA54u8AOeMqgDnjbUA5426AOeOhADnjocA546JAOeOiwDnjqUA546yAOePngDnkIYA55CJAOeQogDnkYcA55GcAOeRqQDnkbEA55KFAOeSiQDnkpgA55OKAOeTnADnk6YA55SGAOeUmADnlJ8A55SkAOeUqADnlLAA55SyAOeUswDnlLcA55S7AOeUvgDnlZkA55WlAOeVsADnlosA55aSAOeXogDnmJAA55idAOeYnwDnmYIA55mpAOeZtgDnmb0A55quAOeavwDnm4oA55ubAOebowDnm6cA55uuAOebtADnnIEA55yeAOecnwDnnYAA552KAOeeiwDnnqcA55+bAOefogDnn7MA56GOAOehqwDnoowA56KRAOejigDno4wA56O7AOekqgDnpLoA56S8AOekvgDnpYgA56WJAOelkADnpZYA56WdAOelngDnpaUA56W/AOemgQDnpo0A56aOAOemjwDnpq4A56a4AOemvgDnp4oA56eYAOenqwDnqJwA56mAAOepigDnqY8A56m0AOepugDnqoEA56qxAOeriwDnq64A56u5AOesoADnro8A56+AAOevhgDnr4kA57C+AOexoADnsbMA57G7AOeykgDnsr4A57OSAOezlgDns6MA57OnAOezqADns7gA57SAAOe0kADntKIA57SvAOe1ggDntZsA57WjAOe2oADntr4A57eHAOe3tADnuIIA57iJAOe4twDnuYEA57mFAOe8tgDnvL4A572RAOe9sgDnvbkA5726AOe+hQDnvooA576VAOe+mgDnvr0A57+6AOiAgQDogIUA6ICMAOiAkgDogLMA6IGGAOiBoADoga8A6IGwAOiBvgDogb8A6IKJAOiCiwDogq0A6IKyAOiEgwDohL4A6IeYAOiHowDoh6gA6IeqAOiHrQDoh7MA6Ie8AOiIgQDoiIQA6IiMAOiImADoiJsA6IifAOiJrgDoia8A6ImyAOiJuADoibkA6IqLAOiKkQDoip0A6IqxAOiKswDoir0A6IulAOiLpgDojJ0A6IyjAOiMtgDojZIA6I2TAOiNowDojq0A6I69AOiPiQDoj4oA6I+MAOiPnADoj6cA6I+vAOiPsQDokL0A6JGJAOiRlwDok64A6JOxAOiTswDok7wA6JSWAOiVpADol40A6Je6AOiYhgDomJIA6JitAOiYvwDomY0A6JmQAOiZnADomacA6JmpAOiZqwDomogA6JqpAOibogDonI4A6JyoAOidqwDonbkA6J6GAOieugDon6EA6KCBAOignwDooYAA6KGMAOihoADooaMA6KOCAOijjwDoo5cA6KOeAOijoQDoo7gA6KO6AOikkADopYEA6KWkAOilvgDopoYA6KaLAOimlgDop5IA6KejAOiogADoqqAA6KqqAOiqvwDoq4sA6KuSAOirlgDoq60A6Ku4AOirvgDorIEA6Ky5AOitmADoroAA6K6KAOiwtwDosYYA6LGIAOixlQDosbgA6LKdAOiyoQDosqkA6LKrAOizgQDos4IA6LOHAOiziADos5MA6LSIAOi0mwDotaQA6LWwAOi1twDotrMA6La8AOi3iwDot68A6LewAOi6qwDou4oA6LuUAOi8pgDovKoA6Ly4AOi8uwDovaIA6L6bAOi+ngDovrAA6L61AOi+tgDpgKMA6YC4AOmBigDpgakA6YGyAOmBvADpgo8A6YKRAOmClADpg44A6YOeAOmDsQDpg70A6YSRAOmEmwDphYkA6YWqAOmGmQDphrQA6YeGAOmHjADph48A6YeRAOmItADpiLgA6Ym2AOmJvADpi5cA6YuYAOmMhADpjYoA6Y+5AOmQlQDplbcA6ZaAAOmWiwDplq0A6Za3AOmYnADpmK4A6ZmLAOmZjQDpmbUA6Zm4AOmZvADpmoYA6ZqjAOmatgDpmrcA6Zq4AOmauQDpm4MA6ZuiAOmbowDpm6gA6Zu2AOmbtwDpnKMA6ZyyAOmdiADpnZEA6Z2WAOmdngDpnaIA6Z2pAOmfiwDpn5sA6Z+gAOmfrQDpn7MA6Z+/AOmggQDpoIUA6aCLAOmgmADpoKkA6aC7AOmhngDpoqgA6aObAOmjnwDpo6IA6aOvAOmjvADppKgA6aSpAOmmlgDpppkA6aanAOmmrADpp4IA6aexAOmnvgDpqaoA6aqoAOmrmADpq58A6aySAOmspQDprK8A6ayyAOmsvADprZoA6a2vAOmxgADpsZcA6bOlAOmzvQDptacA6ba0AOm3ugDpuJ4A6bm1AOm5vwDpupcA6bqfAOm6pQDpursA6buDAOm7jQDpu44A6buRAOm7uQDpu70A6bu+AOm8hQDpvI4A6byPAOm8kwDpvJYA6bygAOm8uwDpvYMA6b2KAOm9kgDpvo0A6b6OAOm+nADpvp8A6b6gAOqcpwDqna8A6qy3AOqtkgDqsIAA6rCBAOqwggDqsIMA6rCEAOqwhQDqsIYA6rCHAOqwiADqsIkA6rCKAOqwiwDqsIwA6rCNAOqwjgDqsI8A6rCQAOqwkQDqsJIA6rCTAOqwlADqsJUA6rCWAOqwlwDqsJgA6rCZAOqwmgDqsJsA6rCcAOqwnQDqsJ4A6rCfAOqwoADqsKEA6rCiAOqwowDqsKQA6rClAOqwpgDqsKcA6rCoAOqwqQDqsKoA6rCrAOqwrADqsK0A6rCuAOqwrwDqsLAA6rCxAOqwsgDqsLMA6rC0AOqwtQDqsLYA6rC3AOqwuADqsLkA6rC6AOqwuwDqsLwA6rC9AOqwvgDqsL8A6rGAAOqxgQDqsYIA6rGDAOqxhADqsYUA6rGGAOqxhwDqsYgA6rGJAOqxigDqsYsA6rGMAOqxjQDqsY4A6rGPAOqxkADqsZEA6rGSAOqxkwDqsZQA6rGVAOqxlgDqsZcA6rGYAOqxmQDqsZoA6rGbAOqxnADqsZ0A6rGeAOqxnwDqsaAA6rGhAOqxogDqsaMA6rGkAOqxpQDqsaYA6rGnAOqxqADqsakA6rGqAOqxqwDqsawA6rGtAOqxrgDqsa8A6rGwAOqxsQDqsbIA6rGzAOqxtADqsbUA6rG2AOqxtwDqsbgA6rG5AOqxugDqsbsA6rG8AOqxvQDqsb4A6rG/AOqygADqsoEA6rKCAOqygwDqsoQA6rKFAOqyhgDqsocA6rKIAOqyiQDqsooA6rKLAOqyjADqso0A6rKOAOqyjwDqspAA6rKRAOqykgDqspMA6rKUAOqylQDqspYA6rKXAOqymADqspkA6rKaAOqymwDqspwA6rKdAOqyngDqsp8A6rKgAOqyoQDqsqIA6rKjAOqypADqsqUA6rKmAOqypwDqsqgA6rKpAOqyqgDqsqsA6rKsAOqyrQDqsq4A6rKvAOqysADqsrEA6rKyAOqyswDqsrQA6rK1AOqytgDqsrcA6rK4AOqyuQDqsroA6rK7AOqyvADqsr0A6rK+AOqyvwDqs4AA6rOBAOqzggDqs4MA6rOEAOqzhQDqs4YA6rOHAOqziADqs4kA6rOKAOqziwDqs4wA6rONAOqzjgDqs48A6rOQAOqzkQDqs5IA6rOTAOqzlADqs5UA6rOWAOqzlwDqs5gA6rOZAOqzmgDqs5sA6rOcAOqznQDqs54A6rOfAOqzoADqs6EA6rOiAOqzowDqs6QA6rOlAOqzpgDqs6cA6rOoAOqzqQDqs6oA6rOrAOqzrADqs60A6rOuAOqzrwDqs7AA6rOxAOqzsgDqs7MA6rO0AOqztQDqs7YA6rO3AOqzuADqs7kA6rO6AOqzuwDqs7wA6rO9AOqzvgDqs78A6rSAAOq0gQDqtIIA6rSDAOq0hADqtIUA6rSGAOq0hwDqtIgA6rSJAOq0igDqtIsA6rSMAOq0jQDqtI4A6rSPAOq0kADqtJEA6rSSAOq0kwDqtJQA6rSVAOq0lgDqtJcA6rSYAOq0mQDqtJoA6rSbAOq0nADqtJ0A6rSeAOq0nwDqtKAA6rShAOq0ogDqtKMA6rSkAOq0pQDqtKYA6rSnAOq0qADqtKkA6rSqAOq0qwDqtKwA6rStAOq0rgDqtK8A6rSwAOq0sQDqtLIA6rSzAOq0tADqtLUA6rS2AOq0twDqtLgA6rS5AOq0ugDqtLsA6rS8AOq0vQDqtL4A6rS/AOq1gADqtYEA6rWCAOq1gwDqtYQA6rWFAOq1hgDqtYcA6rWIAOq1iQDqtYoA6rWLAOq1jADqtY0A6rWOAOq1jwDqtZAA6rWRAOq1kgDqtZMA6rWUAOq1lQDqtZYA6rWXAOq1mADqtZkA6rWaAOq1mwDqtZwA6rWdAOq1ngDqtZ8A6rWgAOq1oQDqtaIA6rWjAOq1pADqtaUA6rWmAOq1pwDqtagA6rWpAOq1qgDqtasA6rWsAOq1rQDqta4A6rWvAOq1sADqtbEA6rWyAOq1swDqtbQA6rW1AOq1tgDqtbcA6rW4AOq1uQDqtboA6rW7AOq1vADqtb0A6rW+AOq1vwDqtoAA6raBAOq2ggDqtoMA6raEAOq2hQDqtoYA6raHAOq2iADqtokA6raKAOq2iwDqtowA6raNAOq2jgDqto8A6raQAOq2kQDqtpIA6raTAOq2lADqtpUA6raWAOq2lwDqtpgA6raZAOq2mgDqtpsA6racAOq2nQDqtp4A6rafAOq2oADqtqEA6raiAOq2owDqtqQA6ralAOq2pgDqtqcA6raoAOq2qQDqtqoA6rarAOq2rADqtq0A6rauAOq2rwDqtrAA6raxAOq2sgDqtrMA6ra0AOq2tQDqtrYA6ra3AOq2uADqtrkA6ra6AOq2uwDqtrwA6ra9AOq2vgDqtr8A6reAAOq3gQDqt4IA6reDAOq3hADqt4UA6reGAOq3hwDqt4gA6reJAOq3igDqt4sA6reMAOq3jQDqt44A6rePAOq3kADqt5EA6reSAOq3kwDqt5QA6reVAOq3lgDqt5cA6reYAOq3mQDqt5oA6rebAOq3nADqt50A6reeAOq3nwDqt6AA6rehAOq3ogDqt6MA6rekAOq3pQDqt6YA6renAOq3qADqt6kA6reqAOq3qwDqt6wA6retAOq3rgDqt68A6rewAOq3sQDqt7IA6rezAOq3tADqt7UA6re2AOq3twDqt7gA6re5AOq3ugDqt7sA6re8AOq3vQDqt74A6re/AOq4gADquIEA6riCAOq4gwDquIQA6riFAOq4hgDquIcA6riIAOq4iQDquIoA6riLAOq4jADquI0A6riOAOq4jwDquJAA6riRAOq4kgDquJMA6riUAOq4lQDquJYA6riXAOq4mADquJkA6riaAOq4mwDquJwA6ridAOq4ngDquJ8A6rigAOq4oQDquKIA6rijAOq4pADquKUA6rimAOq4pwDquKgA6ripAOq4qgDquKsA6risAOq4rQDquK4A6rivAOq4sADquLEA6riyAOq4swDquLQA6ri1AOq4tgDquLcA6ri4AOq4uQDquLoA6ri7AOq4vADquL0A6ri+AOq4vwDquYAA6rmBAOq5ggDquYMA6rmEAOq5hQDquYYA6rmHAOq5iADquYkA6rmKAOq5iwDquYwA6rmNAOq5jgDquY8A6rmQAOq5kQDquZIA6rmTAOq5lADquZUA6rmWAOq5lwDquZgA6rmZAOq5mgDquZsA6rmcAOq5nQDquZ4A6rmfAOq5oADquaEA6rmiAOq5owDquaQA6rmlAOq5pgDquacA6rmoAOq5qQDquaoA6rmrAOq5rADqua0A6rmuAOq5rwDqubAA6rmxAOq5sgDqubMA6rm0AOq5tQDqubYA6rm3AOq5uADqubkA6rm6AOq5uwDqubwA6rm9AOq5vgDqub8A6rqAAOq6gQDquoIA6rqDAOq6hADquoUA6rqGAOq6hwDquogA6rqJAOq6igDquosA6rqMAOq6jQDquo4A6rqPAOq6kADqupEA6rqSAOq6kwDqupQA6rqVAOq6lgDqupcA6rqYAOq6mQDqupoA6rqbAOq6nADqup0A6rqeAOq6nwDquqAA6rqhAOq6ogDquqMA6rqkAOq6pQDquqYA6rqnAOq6qADquqkA6rqqAOq6qwDquqwA6rqtAOq6rgDquq8A6rqwAOq6sQDqurIA6rqzAOq6tADqurUA6rq2AOq6twDqurgA6rq5AOq6ugDqursA6rq8AOq6vQDqur4A6rq/AOq7gADqu4EA6ruCAOq7gwDqu4QA6ruFAOq7hgDqu4cA6ruIAOq7iQDqu4oA6ruLAOq7jADqu40A6ruOAOq7jwDqu5AA6ruRAOq7kgDqu5MA6ruUAOq7lQDqu5YA6ruXAOq7mADqu5kA6ruaAOq7mwDqu5wA6rudAOq7ngDqu58A6rugAOq7oQDqu6IA6rujAOq7pADqu6UA6rumAOq7pwDqu6gA6rupAOq7qgDqu6sA6rusAOq7rQDqu64A6ruvAOq7sADqu7EA6ruyAOq7swDqu7QA6ru1AOq7tgDqu7cA6ru4AOq7uQDqu7oA6ru7AOq7vADqu70A6ru+AOq7vwDqvIAA6ryBAOq8ggDqvIMA6ryEAOq8hQDqvIYA6ryHAOq8iADqvIkA6ryKAOq8iwDqvIwA6ryNAOq8jgDqvI8A6ryQAOq8kQDqvJIA6ryTAOq8lADqvJUA6ryWAOq8lwDqvJgA6ryZAOq8mgDqvJsA6rycAOq8nQDqvJ4A6ryfAOq8oADqvKEA6ryiAOq8owDqvKQA6rylAOq8pgDqvKcA6ryoAOq8qQDqvKoA6ryrAOq8rADqvK0A6ryuAOq8rwDqvLAA6ryxAOq8sgDqvLMA6ry0AOq8tQDqvLYA6ry3AOq8uADqvLkA6ry6AOq8uwDqvLwA6ry9AOq8vgDqvL8A6r2AAOq9gQDqvYIA6r2DAOq9hADqvYUA6r2GAOq9hwDqvYgA6r2JAOq9igDqvYsA6r2MAOq9jQDqvY4A6r2PAOq9kADqvZEA6r2SAOq9kwDqvZQA6r2VAOq9lgDqvZcA6r2YAOq9mQDqvZoA6r2bAOq9nADqvZ0A6r2eAOq9nwDqvaAA6r2hAOq9ogDqvaMA6r2kAOq9pQDqvaYA6r2nAOq9qADqvakA6r2qAOq9qwDqvawA6r2tAOq9rgDqva8A6r2wAOq9sQDqvbIA6r2zAOq9tADqvbUA6r22AOq9twDqvbgA6r25AOq9ugDqvbsA6r28AOq9vQDqvb4A6r2/AOq+gADqvoEA6r6CAOq+gwDqvoQA6r6FAOq+hgDqvocA6r6IAOq+iQDqvooA6r6LAOq+jADqvo0A6r6OAOq+jwDqvpAA6r6RAOq+kgDqvpMA6r6UAOq+lQDqvpYA6r6XAOq+mADqvpkA6r6aAOq+mwDqvpwA6r6dAOq+ngDqvp8A6r6gAOq+oQDqvqIA6r6jAOq+pADqvqUA6r6mAOq+pwDqvqgA6r6pAOq+qgDqvqsA6r6sAOq+rQDqvq4A6r6vAOq+sADqvrEA6r6yAOq+swDqvrQA6r61AOq+tgDqvrcA6r64AOq+uQDqvroA6r67AOq+vADqvr0A6r6+AOq+vwDqv4AA6r+BAOq/ggDqv4MA6r+EAOq/hQDqv4YA6r+HAOq/iADqv4kA6r+KAOq/iwDqv4wA6r+NAOq/jgDqv48A6r+QAOq/kQDqv5IA6r+TAOq/lADqv5UA6r+WAOq/lwDqv5gA6r+ZAOq/mgDqv5sA6r+cAOq/nQDqv54A6r+fAOq/oADqv6EA6r+iAOq/owDqv6QA6r+lAOq/pgDqv6cA6r+oAOq/qQDqv6oA6r+rAOq/rADqv60A6r+uAOq/rwDqv7AA6r+xAOq/sgDqv7MA6r+0AOq/tQDqv7YA6r+3AOq/uADqv7kA6r+6AOq/uwDqv7wA6r+9AOq/vgDqv78A64CAAOuAgQDrgIIA64CDAOuAhADrgIUA64CGAOuAhwDrgIgA64CJAOuAigDrgIsA64CMAOuAjQDrgI4A64CPAOuAkADrgJEA64CSAOuAkwDrgJQA64CVAOuAlgDrgJcA64CYAOuAmQDrgJoA64CbAOuAnADrgJ0A64CeAOuAnwDrgKAA64ChAOuAogDrgKMA64CkAOuApQDrgKYA64CnAOuAqADrgKkA64CqAOuAqwDrgKwA64CtAOuArgDrgK8A64CwAOuAsQDrgLIA64CzAOuAtADrgLUA64C2AOuAtwDrgLgA64C5AOuAugDrgLsA64C8AOuAvQDrgL4A64C/AOuBgADrgYEA64GCAOuBgwDrgYQA64GFAOuBhgDrgYcA64GIAOuBiQDrgYoA64GLAOuBjADrgY0A64GOAOuBjwDrgZAA64GRAOuBkgDrgZMA64GUAOuBlQDrgZYA64GXAOuBmADrgZkA64GaAOuBmwDrgZwA64GdAOuBngDrgZ8A64GgAOuBoQDrgaIA64GjAOuBpADrgaUA64GmAOuBpwDrgagA64GpAOuBqgDrgasA64GsAOuBrQDrga4A64GvAOuBsADrgbEA64GyAOuBswDrgbQA64G1AOuBtgDrgbcA64G4AOuBuQDrgboA64G7AOuBvADrgb0A64G+AOuBvwDrgoAA64KBAOuCggDrgoMA64KEAOuChQDrgoYA64KHAOuCiADrgokA64KKAOuCiwDrgowA64KNAOuCjgDrgo8A64KQAOuCkQDrgpIA64KTAOuClADrgpUA64KWAOuClwDrgpgA64KZAOuCmgDrgpsA64KcAOuCnQDrgp4A64KfAOuCoADrgqEA64KiAOuCowDrgqQA64KlAOuCpgDrgqcA64KoAOuCqQDrgqoA64KrAOuCrADrgq0A64KuAOuCrwDrgrAA64KxAOuCsgDrgrMA64K0AOuCtQDrgrYA64K3AOuCuADrgrkA64K6AOuCuwDrgrwA64K9AOuCvgDrgr8A64OAAOuDgQDrg4IA64ODAOuDhADrg4UA64OGAOuDhwDrg4gA64OJAOuDigDrg4sA64OMAOuDjQDrg44A64OPAOuDkADrg5EA64OSAOuDkwDrg5QA64OVAOuDlgDrg5cA64OYAOuDmQDrg5oA64ObAOuDnADrg50A64OeAOuDnwDrg6AA64OhAOuDogDrg6MA64OkAOuDpQDrg6YA64OnAOuDqADrg6kA64OqAOuDqwDrg6wA64OtAOuDrgDrg68A64OwAOuDsQDrg7IA64OzAOuDtADrg7UA64O2AOuDtwDrg7gA64O5AOuDugDrg7sA64O8AOuDvQDrg74A64O/AOuEgADrhIEA64SCAOuEgwDrhIQA64SFAOuEhgDrhIcA64SIAOuEiQDrhIoA64SLAOuEjADrhI0A64SOAOuEjwDrhJAA64SRAOuEkgDrhJMA64SUAOuElQDrhJYA64SXAOuEmADrhJkA64SaAOuEmwDrhJwA64SdAOuEngDrhJ8A64SgAOuEoQDrhKIA64SjAOuEpADrhKUA64SmAOuEpwDrhKgA64SpAOuEqgDrhKsA64SsAOuErQDrhK4A64SvAOuEsADrhLEA64SyAOuEswDrhLQA64S1AOuEtgDrhLcA64S4AOuEuQDrhLoA64S7AOuEvADrhL0A64S+AOuEvwDrhYAA64WBAOuFggDrhYMA64WEAOuFhQDrhYYA64WHAOuFiADrhYkA64WKAOuFiwDrhYwA64WNAOuFjgDrhY8A64WQAOuFkQDrhZIA64WTAOuFlADrhZUA64WWAOuFlwDrhZgA64WZAOuFmgDrhZsA64WcAOuFnQDrhZ4A64WfAOuFoADrhaEA64WiAOuFowDrhaQA64WlAOuFpgDrhacA64WoAOuFqQDrhaoA64WrAOuFrADrha0A64WuAOuFrwDrhbAA64WxAOuFsgDrhbMA64W0AOuFtQDrhbYA64W3AOuFuADrhbkA64W6AOuFuwDrhbwA64W9AOuFvgDrhb8A64aAAOuGgQDrhoIA64aDAOuGhADrhoUA64aGAOuGhwDrhogA64aJAOuGigDrhosA64aMAOuGjQDrho4A64aPAOuGkADrhpEA64aSAOuGkwDrhpQA64aVAOuGlgDrhpcA64aYAOuGmQDrhpoA64abAOuGnADrhp0A64aeAOuGnwDrhqAA64ahAOuGogDrhqMA64akAOuGpQDrhqYA64anAOuGqADrhqkA64aqAOuGqwDrhqwA64atAOuGrgDrhq8A64awAOuGsQDrhrIA64azAOuGtADrhrUA64a2AOuGtwDrhrgA64a5AOuGugDrhrsA64a8AOuGvQDrhr4A64a/AOuHgADrh4EA64eCAOuHgwDrh4QA64eFAOuHhgDrh4cA64eIAOuHiQDrh4oA64eLAOuHjADrh40A64eOAOuHjwDrh5AA64eRAOuHkgDrh5MA64eUAOuHlQDrh5YA64eXAOuHmADrh5kA64eaAOuHmwDrh5wA64edAOuHngDrh58A64egAOuHoQDrh6IA64ejAOuHpADrh6UA64emAOuHpwDrh6gA64epAOuHqgDrh6sA64esAOuHrQDrh64A64evAOuHsADrh7EA64eyAOuHswDrh7QA64e1AOuHtgDrh7cA64e4AOuHuQDrh7oA64e7AOuHvADrh70A64e+AOuHvwDriIAA64iBAOuIggDriIMA64iEAOuIhQDriIYA64iHAOuIiADriIkA64iKAOuIiwDriIwA64iNAOuIjgDriI8A64iQAOuIkQDriJIA64iTAOuIlADriJUA64iWAOuIlwDriJgA64iZAOuImgDriJsA64icAOuInQDriJ4A64ifAOuIoADriKEA64iiAOuIowDriKQA64ilAOuIpgDriKcA64ioAOuIqQDriKoA64irAOuIrADriK0A64iuAOuIrwDriLAA64ixAOuIsgDriLMA64i0AOuItQDriLYA64i3AOuIuADriLkA64i6AOuIuwDriLwA64i9AOuIvgDriL8A64mAAOuJgQDriYIA64mDAOuJhADriYUA64mGAOuJhwDriYgA64mJAOuJigDriYsA64mMAOuJjQDriY4A64mPAOuJkADriZEA64mSAOuJkwDriZQA64mVAOuJlgDriZcA64mYAOuJmQDriZoA64mbAOuJnADriZ0A64meAOuJnwDriaAA64mhAOuJogDriaMA64mkAOuJpQDriaYA64mnAOuJqADriakA64mqAOuJqwDriawA64mtAOuJrgDria8A64mwAOuJsQDribIA64mzAOuJtADribUA64m2AOuJtwDribgA64m5AOuJugDribsA64m8AOuJvQDrib4A64m/AOuKgADrioEA64qCAOuKgwDrioQA64qFAOuKhgDriocA64qIAOuKiQDriooA64qLAOuKjADrio0A64qOAOuKjwDripAA64qRAOuKkgDripMA64qUAOuKlQDripYA64qXAOuKmADripkA64qaAOuKmwDripwA64qdAOuKngDrip8A64qgAOuKoQDriqIA64qjAOuKpADriqUA64qmAOuKpwDriqgA64qpAOuKqgDriqsA64qsAOuKrQDriq4A64qvAOuKsADrirEA64qyAOuKswDrirQA64q1AOuKtgDrircA64q4AOuKuQDriroA64q7AOuKvADrir0A64q+AOuKvwDri4AA64uBAOuLggDri4MA64uEAOuLhQDri4YA64uHAOuLiADri4kA64uKAOuLiwDri4wA64uNAOuLjgDri48A64uQAOuLkQDri5IA64uTAOuLlADri5UA64uWAOuLlwDri5gA64uZAOuLmgDri5sA64ucAOuLnQDri54A64ufAOuLoADri6EA64uiAOuLowDri6QA64ulAOuLpgDri6cA64uoAOuLqQDri6oA64urAOuLrADri60A64uuAOuLrwDri7AA64uxAOuLsgDri7MA64u0AOuLtQDri7YA64u3AOuLuADri7kA64u6AOuLuwDri7wA64u9AOuLvgDri78A64yAAOuMgQDrjIIA64yDAOuMhADrjIUA64yGAOuMhwDrjIgA64yJAOuMigDrjIsA64yMAOuMjQDrjI4A64yPAOuMkADrjJEA64ySAOuMkwDrjJQA64yVAOuMlgDrjJcA64yYAOuMmQDrjJoA64ybAOuMnADrjJ0A64yeAOuMnwDrjKAA64yhAOuMogDrjKMA64ykAOuMpQDrjKYA64ynAOuMqADrjKkA64yqAOuMqwDrjKwA64ytAOuMrgDrjK8A64ywAOuMsQDrjLIA64yzAOuMtADrjLUA64y2AOuMtwDrjLgA64y5AOuMugDrjLsA64y8AOuMvQDrjL4A64y/AOuNgADrjYEA642CAOuNgwDrjYQA642FAOuNhgDrjYcA642IAOuNiQDrjYoA642LAOuNjADrjY0A642OAOuNjwDrjZAA642RAOuNkgDrjZMA642UAOuNlQDrjZYA642XAOuNmADrjZkA642aAOuNmwDrjZwA642dAOuNngDrjZ8A642gAOuNoQDrjaIA642jAOuNpADrjaUA642mAOuNpwDrjagA642pAOuNqgDrjasA642sAOuNrQDrja4A642vAOuNsADrjbEA642yAOuNswDrjbQA6421AOuNtgDrjbcA6424AOuNuQDrjboA6427AOuNvADrjb0A642+AOuNvwDrjoAA646BAOuOggDrjoMA646EAOuOhQDrjoYA646HAOuOiADrjokA646KAOuOiwDrjowA646NAOuOjgDrjo8A646QAOuOkQDrjpIA646TAOuOlADrjpUA646WAOuOlwDrjpgA646ZAOuOmgDrjpsA646cAOuOnQDrjp4A646fAOuOoADrjqEA646iAOuOowDrjqQA646lAOuOpgDrjqcA646oAOuOqQDrjqoA646rAOuOrADrjq0A646uAOuOrwDrjrAA646xAOuOsgDrjrMA6460AOuOtQDrjrYA6463AOuOuADrjrkA6466AOuOuwDrjrwA6469AOuOvgDrjr8A64+AAOuPgQDrj4IA64+DAOuPhADrj4UA64+GAOuPhwDrj4gA64+JAOuPigDrj4sA64+MAOuPjQDrj44A64+PAOuPkADrj5EA64+SAOuPkwDrj5QA64+VAOuPlgDrj5cA64+YAOuPmQDrj5oA64+bAOuPnADrj50A64+eAOuPnwDrj6AA64+hAOuPogDrj6MA64+kAOuPpQDrj6YA64+nAOuPqADrj6kA64+qAOuPqwDrj6wA64+tAOuPrgDrj68A64+wAOuPsQDrj7IA64+zAOuPtADrj7UA64+2AOuPtwDrj7gA64+5AOuPugDrj7sA64+8AOuPvQDrj74A64+/AOuQgADrkIEA65CCAOuQgwDrkIQA65CFAOuQhgDrkIcA65CIAOuQiQDrkIoA65CLAOuQjADrkI0A65COAOuQjwDrkJAA65CRAOuQkgDrkJMA65CUAOuQlQDrkJYA65CXAOuQmADrkJkA65CaAOuQmwDrkJwA65CdAOuQngDrkJ8A65CgAOuQoQDrkKIA65CjAOuQpADrkKUA65CmAOuQpwDrkKgA65CpAOuQqgDrkKsA65CsAOuQrQDrkK4A65CvAOuQsADrkLEA65CyAOuQswDrkLQA65C1AOuQtgDrkLcA65C4AOuQuQDrkLoA65C7AOuQvADrkL0A65C+AOuQvwDrkYAA65GBAOuRggDrkYMA65GEAOuRhQDrkYYA65GHAOuRiADrkYkA65GKAOuRiwDrkYwA65GNAOuRjgDrkY8A65GQAOuRkQDrkZIA65GTAOuRlADrkZUA65GWAOuRlwDrkZgA65GZAOuRmgDrkZsA65GcAOuRnQDrkZ4A65GfAOuRoADrkaEA65GiAOuRowDrkaQA65GlAOuRpgDrkacA65GoAOuRqQDrkaoA65GrAOuRrADrka0A65GuAOuRrwDrkbAA65GxAOuRsgDrkbMA65G0AOuRtQDrkbYA65G3AOuRuADrkbkA65G6AOuRuwDrkbwA65G9AOuRvgDrkb8A65KAAOuSgQDrkoIA65KDAOuShADrkoUA65KGAOuShwDrkogA65KJAOuSigDrkosA65KMAOuSjQDrko4A65KPAOuSkADrkpEA65KSAOuSkwDrkpQA65KVAOuSlgDrkpcA65KYAOuSmQDrkpoA65KbAOuSnADrkp0A65KeAOuSnwDrkqAA65KhAOuSogDrkqMA65KkAOuSpQDrkqYA65KnAOuSqADrkqkA65KqAOuSqwDrkqwA65KtAOuSrgDrkq8A65KwAOuSsQDrkrIA65KzAOuStADrkrUA65K2AOuStwDrkrgA65K5AOuSugDrkrsA65K8AOuSvQDrkr4A65K/AOuTgADrk4EA65OCAOuTgwDrk4QA65OFAOuThgDrk4cA65OIAOuTiQDrk4oA65OLAOuTjADrk40A65OOAOuTjwDrk5AA65ORAOuTkgDrk5MA65OUAOuTlQDrk5YA65OXAOuTmADrk5kA65OaAOuTmwDrk5wA65OdAOuTngDrk58A65OgAOuToQDrk6IA65OjAOuTpADrk6UA65OmAOuTpwDrk6gA65OpAOuTqgDrk6sA65OsAOuTrQDrk64A65OvAOuTsADrk7EA65OyAOuTswDrk7QA65O1AOuTtgDrk7cA65O4AOuTuQDrk7oA65O7AOuTvADrk70A65O+AOuTvwDrlIAA65SBAOuUggDrlIMA65SEAOuUhQDrlIYA65SHAOuUiADrlIkA65SKAOuUiwDrlIwA65SNAOuUjgDrlI8A65SQAOuUkQDrlJIA65STAOuUlADrlJUA65SWAOuUlwDrlJgA65SZAOuUmgDrlJsA65ScAOuUnQDrlJ4A65SfAOuUoADrlKEA65SiAOuUowDrlKQA65SlAOuUpgDrlKcA65SoAOuUqQDrlKoA65SrAOuUrADrlK0A65SuAOuUrwDrlLAA65SxAOuUsgDrlLMA65S0AOuUtQDrlLYA65S3AOuUuADrlLkA65S6AOuUuwDrlLwA65S9AOuUvgDrlL8A65WAAOuVgQDrlYIA65WDAOuVhADrlYUA65WGAOuVhwDrlYgA65WJAOuVigDrlYsA65WMAOuVjQDrlY4A65WPAOuVkADrlZEA65WSAOuVkwDrlZQA65WVAOuVlgDrlZcA65WYAOuVmQDrlZoA65WbAOuVnADrlZ0A65WeAOuVnwDrlaAA65WhAOuVogDrlaMA65WkAOuVpQDrlaYA65WnAOuVqADrlakA65WqAOuVqwDrlawA65WtAOuVrgDrla8A65WwAOuVsQDrlbIA65WzAOuVtADrlbUA65W2AOuVtwDrlbgA65W5AOuVugDrlbsA65W8AOuVvQDrlb4A65W/AOuWgADrloEA65aCAOuWgwDrloQA65aFAOuWhgDrlocA65aIAOuWiQDrlooA65aLAOuWjADrlo0A65aOAOuWjwDrlpAA65aRAOuWkgDrlpMA65aUAOuWlQDrlpYA65aXAOuWmADrlpkA65aaAOuWmwDrlpwA65adAOuWngDrlp8A65agAOuWoQDrlqIA65ajAOuWpADrlqUA65amAOuWpwDrlqgA65apAOuWqgDrlqsA65asAOuWrQDrlq4A65avAOuWsADrlrEA65ayAOuWswDrlrQA65a1AOuWtgDrlrcA65a4AOuWuQDrlroA65a7AOuWvADrlr0A65a+AOuWvwDrl4AA65eBAOuXggDrl4MA65eEAOuXhQDrl4YA65eHAOuXiADrl4kA65eKAOuXiwDrl4wA65eNAOuXjgDrl48A65eQAOuXkQDrl5IA65eTAOuXlADrl5UA65eWAOuXlwDrl5gA65eZAOuXmgDrl5sA65ecAOuXnQDrl54A65efAOuXoADrl6EA65eiAOuXowDrl6QA65elAOuXpgDrl6cA65eoAOuXqQDrl6oA65erAOuXrADrl60A65euAOuXrwDrl7AA65exAOuXsgDrl7MA65e0AOuXtQDrl7YA65e3AOuXuADrl7kA65e6AOuXuwDrl7wA65e9AOuXvgDrl78A65iAAOuYgQDrmIIA65iDAOuYhADrmIUA65iGAOuYhwDrmIgA65iJAOuYigDrmIsA65iMAOuYjQDrmI4A65iPAOuYkADrmJEA65iSAOuYkwDrmJQA65iVAOuYlgDrmJcA65iYAOuYmQDrmJoA65ibAOuYnADrmJ0A65ieAOuYnwDrmKAA65ihAOuYogDrmKMA65ikAOuYpQDrmKYA65inAOuYqADrmKkA65iqAOuYqwDrmKwA65itAOuYrgDrmK8A65iwAOuYsQDrmLIA65izAOuYtADrmLUA65i2AOuYtwDrmLgA65i5AOuYugDrmLsA65i8AOuYvQDrmL4A65i/AOuZgADrmYEA65mCAOuZgwDrmYQA65mFAOuZhgDrmYcA65mIAOuZiQDrmYoA65mLAOuZjADrmY0A65mOAOuZjwDrmZAA65mRAOuZkgDrmZMA65mUAOuZlQDrmZYA65mXAOuZmADrmZkA65maAOuZmwDrmZwA65mdAOuZngDrmZ8A65mgAOuZoQDrmaIA65mjAOuZpADrmaUA65mmAOuZpwDrmagA65mpAOuZqgDrmasA65msAOuZrQDrma4A65mvAOuZsADrmbEA65myAOuZswDrmbQA65m1AOuZtgDrmbcA65m4AOuZuQDrmboA65m7AOuZvADrmb0A65m+AOuZvwDrmoAA65qBAOuaggDrmoMA65qEAOuahQDrmoYA65qHAOuaiADrmokA65qKAOuaiwDrmowA65qNAOuajgDrmo8A65qQAOuakQDrmpIA65qTAOualADrmpUA65qWAOualwDrmpgA65qZAOuamgDrmpsA65qcAOuanQDrmp4A65qfAOuaoADrmqEA65qiAOuaowDrmqQA65qlAOuapgDrmqcA65qoAOuaqQDrmqoA65qrAOuarADrmq0A65quAOuarwDrmrAA65qxAOuasgDrmrMA65q0AOuatQDrmrYA65q3AOuauADrmrkA65q6AOuauwDrmrwA65q9AOuavgDrmr8A65uAAOubgQDrm4IA65uDAOubhADrm4UA65uGAOubhwDrm4gA65uJAOubigDrm4sA65uMAOubjQDrm44A65uPAOubkADrm5EA65uSAOubkwDrm5QA65uVAOublgDrm5cA65uYAOubmQDrm5oA65ubAOubnADrm50A65ueAOubnwDrm6AA65uhAOubogDrm6MA65ukAOubpQDrm6YA65unAOubqADrm6kA65uqAOubqwDrm6wA65utAOubrgDrm68A65uwAOubsQDrm7IA65uzAOubtADrm7UA65u2AOubtwDrm7gA65u5AOubugDrm7sA65u8AOubvQDrm74A65u/AOucgADrnIEA65yCAOucgwDrnIQA65yFAOuchgDrnIcA65yIAOuciQDrnIoA65yLAOucjADrnI0A65yOAOucjwDrnJAA65yRAOuckgDrnJMA65yUAOuclQDrnJYA65yXAOucmADrnJkA65yaAOucmwDrnJwA65ydAOucngDrnJ8A65ygAOucoQDrnKIA65yjAOucpADrnKUA65ymAOucpwDrnKgA65ypAOucqgDrnKsA65ysAOucrQDrnK4A65yvAOucsADrnLEA65yyAOucswDrnLQA65y1AOuctgDrnLcA65y4AOucuQDrnLoA65y7AOucvADrnL0A65y+AOucvwDrnYAA652BAOudggDrnYMA652EAOudhQDrnYYA652HAOudiADrnYkA652KAOudiwDrnYwA652NAOudjgDrnY8A652QAOudkQDrnZIA652TAOudlADrnZUA652WAOudlwDrnZgA652ZAOudmgDrnZsA652cAOudnQDrnZ4A652fAOudoADrnaEA652iAOudowDrnaQA652lAOudpgDrnacA652oAOudqQDrnaoA652rAOudrADrna0A652uAOudrwDrnbAA652xAOudsgDrnbMA6520AOudtQDrnbYA6523AOuduADrnbkA6526AOuduwDrnbwA6529AOudvgDrnb8A656AAOuegQDrnoIA656DAOuehADrnoUA656GAOuehwDrnogA656JAOueigDrnosA656MAOuejQDrno4A656PAOuekADrnpEA656SAOuekwDrnpQA656VAOuelgDrnpcA656YAOuemQDrnpoA656bAOuenADrnp0A656eAOuenwDrnqAA656hAOueogDrnqMA656kAOuepQDrnqYA656nAOueqADrnqkA656qAOueqwDrnqwA656tAOuergDrnq8A656wAOuesQDrnrIA656zAOuetADrnrUA6562AOuetwDrnrgA6565AOueugDrnrsA6568AOuevQDrnr4A656/AOufgADrn4EA65+CAOufgwDrn4QA65+FAOufhgDrn4cA65+IAOufiQDrn4oA65+LAOufjADrn40A65+OAOufjwDrn5AA65+RAOufkgDrn5MA65+UAOuflQDrn5YA65+XAOufmADrn5kA65+aAOufmwDrn5wA65+dAOufngDrn58A65+gAOufoQDrn6IA65+jAOufpADrn6UA65+mAOufpwDrn6gA65+pAOufqgDrn6sA65+sAOufrQDrn64A65+vAOufsADrn7EA65+yAOufswDrn7QA65+1AOuftgDrn7cA65+4AOufuQDrn7oA65+7AOufvADrn70A65++AOufvwDroIAA66CBAOugggDroIMA66CEAOughQDroIYA66CHAOugiADroIkA66CKAOugiwDroIwA66CNAOugjgDroI8A66CQAOugkQDroJIA66CTAOuglADroJUA66CWAOuglwDroJgA66CZAOugmgDroJsA66CcAOugnQDroJ4A66CfAOugoADroKEA66CiAOugowDroKQA66ClAOugpgDroKcA66CoAOugqQDroKoA66CrAOugrADroK0A66CuAOugrwDroLAA66CxAOugsgDroLMA66C0AOugtQDroLYA66C3AOuguADroLkA66C6AOuguwDroLwA66C9AOugvgDroL8A66GAAOuhgQDroYIA66GDAOuhhADroYUA66GGAOuhhwDroYgA66GJAOuhigDroYsA66GMAOuhjQDroY4A66GPAOuhkADroZEA66GSAOuhkwDroZQA66GVAOuhlgDroZcA66GYAOuhmQDroZoA66GbAOuhnADroZ0A66GeAOuhnwDroaAA66GhAOuhogDroaMA66GkAOuhpQDroaYA66GnAOuhqADroakA66GqAOuhqwDroawA66GtAOuhrgDroa8A66GwAOuhsQDrobIA66GzAOuhtADrobUA66G2AOuhtwDrobgA66G5AOuhugDrobsA66G8AOuhvQDrob4A66G/AOuigADrooEA66KCAOuigwDrooQA66KFAOuihgDroocA66KIAOuiiQDroooA66KLAOuijADroo0A66KOAOuijwDropAA66KRAOuikgDropMA66KUAOuilQDropYA66KXAOuimADropkA66KaAOuimwDropwA66KdAOuingDrop8A66KgAOuioQDroqIA66KjAOuipADroqUA66KmAOuipwDroqgA66KpAOuiqgDroqsA66KsAOuirQDroq4A66KvAOuisADrorEA66KyAOuiswDrorQA66K1AOuitgDrorcA66K4AOuiuQDroroA66K7AOuivADror0A66K+AOuivwDro4AA66OBAOujggDro4MA66OEAOujhQDro4YA66OHAOujiADro4kA66OKAOujiwDro4wA66ONAOujjgDro48A66OQAOujkQDro5IA66OTAOujlADro5UA66OWAOujlwDro5gA66OZAOujmgDro5sA66OcAOujnQDro54A66OfAOujoADro6EA66OiAOujowDro6QA66OlAOujpgDro6cA66OoAOujqQDro6oA66OrAOujrADro60A66OuAOujrwDro7AA66OxAOujsgDro7MA66O0AOujtQDro7YA66O3AOujuADro7kA66O6AOujuwDro7wA66O9AOujvgDro78A66SAAOukgQDrpIIA66SDAOukhADrpIUA66SGAOukhwDrpIgA66SJAOukigDrpIsA66SMAOukjQDrpI4A66SPAOukkADrpJEA66SSAOukkwDrpJQA66SVAOuklgDrpJcA66SYAOukmQDrpJoA66SbAOuknADrpJ0A66SeAOuknwDrpKAA66ShAOukogDrpKMA66SkAOukpQDrpKYA66SnAOukqADrpKkA66SqAOukqwDrpKwA66StAOukrgDrpK8A66SwAOuksQDrpLIA66SzAOuktADrpLUA66S2AOuktwDrpLgA66S5AOukugDrpLsA66S8AOukvQDrpL4A66S/AOulgADrpYEA66WCAOulgwDrpYQA66WFAOulhgDrpYcA66WIAOuliQDrpYoA66WLAOuljADrpY0A66WOAOuljwDrpZAA66WRAOulkgDrpZMA66WUAOullQDrpZYA66WXAOulmADrpZkA66WaAOulmwDrpZwA66WdAOulngDrpZ8A66WgAOuloQDrpaIA66WjAOulpADrpaUA66WmAOulpwDrpagA66WpAOulqgDrpasA66WsAOulrQDrpa4A66WvAOulsADrpbEA66WyAOulswDrpbQA66W1AOultgDrpbcA66W4AOuluQDrpboA66W7AOulvADrpb0A66W+AOulvwDrpoAA66aBAOumggDrpoMA66aEAOumhQDrpoYA66aHAOumiADrpokA66aKAOumiwDrpowA66aNAOumjgDrpo8A66aQAOumkQDrppIA66aTAOumlADrppUA66aWAOumlwDrppgA66aZAOummgDrppsA66acAOumnQDrpp4A66afAOumoADrpqEA66aiAOumowDrpqQA66alAOumpgDrpqcA66aoAOumqQDrpqoA66arAOumrADrpq0A66auAOumrwDrprAA66axAOumsgDrprMA66a0AOumtQDrprYA66a3AOumuADrprkA66a6AOumuwDrprwA66a9AOumvgDrpr8A66eAAOungQDrp4IA66eDAOunhADrp4UA66eGAOunhwDrp4gA66eJAOunigDrp4sA66eMAOunjQDrp44A66ePAOunkADrp5EA66eSAOunkwDrp5QA66eVAOunlgDrp5cA66eYAOunmQDrp5oA66ebAOunnADrp50A66eeAOunnwDrp6AA66ehAOunogDrp6MA66ekAOunpQDrp6YA66enAOunqADrp6kA66eqAOunqwDrp6wA66etAOunrgDrp68A66ewAOunsQDrp7IA66ezAOuntADrp7UA66e2AOuntwDrp7gA66e5AOunugDrp7sA66e8AOunvQDrp74A66e/AOuogADrqIEA66iCAOuogwDrqIQA66iFAOuohgDrqIcA66iIAOuoiQDrqIoA66iLAOuojADrqI0A66iOAOuojwDrqJAA66iRAOuokgDrqJMA66iUAOuolQDrqJYA66iXAOuomADrqJkA66iaAOuomwDrqJwA66idAOuongDrqJ8A66igAOuooQDrqKIA66ijAOuopADrqKUA66imAOuopwDrqKgA66ipAOuoqgDrqKsA66isAOuorQDrqK4A66ivAOuosADrqLEA66iyAOuoswDrqLQA66i1AOuotgDrqLcA66i4AOuouQDrqLoA66i7AOuovADrqL0A66i+AOuovwDrqYAA66mBAOupggDrqYMA66mEAOuphQDrqYYA66mHAOupiADrqYkA66mKAOupiwDrqYwA66mNAOupjgDrqY8A66mQAOupkQDrqZIA66mTAOuplADrqZUA66mWAOuplwDrqZgA66mZAOupmgDrqZsA66mcAOupnQDrqZ4A66mfAOupoADrqaEA66miAOupowDrqaQA66mlAOuppgDrqacA66moAOupqQDrqaoA66mrAOuprADrqa0A66muAOuprwDrqbAA66mxAOupsgDrqbMA66m0AOuptQDrqbYA66m3AOupuADrqbkA66m6AOupuwDrqbwA66m9AOupvgDrqb8A66qAAOuqgQDrqoIA66qDAOuqhADrqoUA66qGAOuqhwDrqogA66qJAOuqigDrqosA66qMAOuqjQDrqo4A66qPAOuqkADrqpEA66qSAOuqkwDrqpQA66qVAOuqlgDrqpcA66qYAOuqmQDrqpoA66qbAOuqnADrqp0A66qeAOuqnwDrqqAA66qhAOuqogDrqqMA66qkAOuqpQDrqqYA66qnAOuqqADrqqkA66qqAOuqqwDrqqwA66qtAOuqrgDrqq8A66qwAOuqsQDrqrIA66qzAOuqtADrqrUA66q2AOuqtwDrqrgA66q5AOuqugDrqrsA66q8AOuqvQDrqr4A66q/AOurgADrq4EA66uCAOurgwDrq4QA66uFAOurhgDrq4cA66uIAOuriQDrq4oA66uLAOurjADrq40A66uOAOurjwDrq5AA66uRAOurkgDrq5MA66uUAOurlQDrq5YA66uXAOurmADrq5kA66uaAOurmwDrq5wA66udAOurngDrq58A66ugAOuroQDrq6IA66ujAOurpADrq6UA66umAOurpwDrq6gA66upAOurqgDrq6sA66usAOurrQDrq64A66uvAOursADrq7EA66uyAOurswDrq7QA66u1AOurtgDrq7cA66u4AOuruQDrq7oA66u7AOurvADrq70A66u+AOurvwDrrIAA66yBAOusggDrrIMA66yEAOushQDrrIYA66yHAOusiADrrIkA66yKAOusiwDrrIwA66yNAOusjgDrrI8A66yQAOuskQDrrJIA66yTAOuslADrrJUA66yWAOuslwDrrJgA66yZAOusmgDrrJsA66ycAOusnQDrrJ4A66yfAOusoADrrKEA66yiAOusowDrrKQA66ylAOuspgDrrKcA66yoAOusqQDrrKoA66yrAOusrADrrK0A66yuAOusrwDrrLAA66yxAOussgDrrLMA66y0AOustQDrrLYA66y3AOusuADrrLkA66y6AOusuwDrrLwA66y9AOusvgDrrL8A662AAOutgQDrrYIA662DAOuthADrrYUA662GAOuthwDrrYgA662JAOutigDrrYsA662MAOutjQDrrY4A662PAOutkADrrZEA662SAOutkwDrrZQA662VAOutlgDrrZcA662YAOutmQDrrZoA662bAOutnADrrZ0A662eAOutnwDrraAA662hAOutogDrraMA662kAOutpQDrraYA662nAOutqADrrakA662qAOutqwDrrawA662tAOutrgDrra8A662wAOutsQDrrbIA662zAOuttADrrbUA6622AOuttwDrrbgA6625AOutugDrrbsA6628AOutvQDrrb4A662/AOuugADrroEA666CAOuugwDrroQA666FAOuuhgDrrocA666IAOuuiQDrrooA666LAOuujADrro0A666OAOuujwDrrpAA666RAOuukgDrrpMA666UAOuulQDrrpYA666XAOuumADrrpkA666aAOuumwDrrpwA666dAOuungDrrp8A666gAOuuoQDrrqIA666jAOuupADrrqUA666mAOuupwDrrqgA666pAOuuqgDrrqsA666sAOuurQDrrq4A666vAOuusADrrrEA666yAOuuswDrrrQA6661AOuutgDrrrcA6664AOuuuQDrrroA6667AOuuvADrrr0A666+AOuuvwDrr4AA66+BAOuvggDrr4MA66+EAOuvhQDrr4YA66+HAOuviADrr4kA66+KAOuviwDrr4wA66+NAOuvjgDrr48A66+QAOuvkQDrr5IA66+TAOuvlADrr5UA66+WAOuvlwDrr5gA66+ZAOuvmgDrr5sA66+cAOuvnQDrr54A66+fAOuvoADrr6EA66+iAOuvowDrr6QA66+lAOuvpgDrr6cA66+oAOuvqQDrr6oA66+rAOuvrADrr60A66+uAOuvrwDrr7AA66+xAOuvsgDrr7MA66+0AOuvtQDrr7YA66+3AOuvuADrr7kA66+6AOuvuwDrr7wA66+9AOuvvgDrr78A67CAAOuwgQDrsIIA67CDAOuwhADrsIUA67CGAOuwhwDrsIgA67CJAOuwigDrsIsA67CMAOuwjQDrsI4A67CPAOuwkADrsJEA67CSAOuwkwDrsJQA67CVAOuwlgDrsJcA67CYAOuwmQDrsJoA67CbAOuwnADrsJ0A67CeAOuwnwDrsKAA67ChAOuwogDrsKMA67CkAOuwpQDrsKYA67CnAOuwqADrsKkA67CqAOuwqwDrsKwA67CtAOuwrgDrsK8A67CwAOuwsQDrsLIA67CzAOuwtADrsLUA67C2AOuwtwDrsLgA67C5AOuwugDrsLsA67C8AOuwvQDrsL4A67C/AOuxgADrsYEA67GCAOuxgwDrsYQA67GFAOuxhgDrsYcA67GIAOuxiQDrsYoA67GLAOuxjADrsY0A67GOAOuxjwDrsZAA67GRAOuxkgDrsZMA67GUAOuxlQDrsZYA67GXAOuxmADrsZkA67GaAOuxmwDrsZwA67GdAOuxngDrsZ8A67GgAOuxoQDrsaIA67GjAOuxpADrsaUA67GmAOuxpwDrsagA67GpAOuxqgDrsasA67GsAOuxrQDrsa4A67GvAOuxsADrsbEA67GyAOuxswDrsbQA67G1AOuxtgDrsbcA67G4AOuxuQDrsboA67G7AOuxvADrsb0A67G+AOuxvwDrsoAA67KBAOuyggDrsoMA67KEAOuyhQDrsoYA67KHAOuyiADrsokA67KKAOuyiwDrsowA67KNAOuyjgDrso8A67KQAOuykQDrspIA67KTAOuylADrspUA67KWAOuylwDrspgA67KZAOuymgDrspsA67KcAOuynQDrsp4A67KfAOuyoADrsqEA67KiAOuyowDrsqQA67KlAOuypgDrsqcA67KoAOuyqQDrsqoA67KrAOuyrADrsq0A67KuAOuyrwDrsrAA67KxAOuysgDrsrMA67K0AOuytQDrsrYA67K3AOuyuADrsrkA67K6AOuyuwDrsrwA67K9AOuyvgDrsr8A67OAAOuzgQDrs4IA67ODAOuzhADrs4UA67OGAOuzhwDrs4gA67OJAOuzigDrs4sA67OMAOuzjQDrs44A67OPAOuzkADrs5EA67OSAOuzkwDrs5QA67OVAOuzlgDrs5cA67OYAOuzmQDrs5oA67ObAOuznADrs50A67OeAOuznwDrs6AA67OhAOuzogDrs6MA67OkAOuzpQDrs6YA67OnAOuzqADrs6kA67OqAOuzqwDrs6wA67OtAOuzrgDrs68A67OwAOuzsQDrs7IA67OzAOuztADrs7UA67O2AOuztwDrs7gA67O5AOuzugDrs7sA67O8AOuzvQDrs74A67O/AOu0gADrtIEA67SCAOu0gwDrtIQA67SFAOu0hgDrtIcA67SIAOu0iQDrtIoA67SLAOu0jADrtI0A67SOAOu0jwDrtJAA67SRAOu0kgDrtJMA67SUAOu0lQDrtJYA67SXAOu0mADrtJkA67SaAOu0mwDrtJwA67SdAOu0ngDrtJ8A67SgAOu0oQDrtKIA67SjAOu0pADrtKUA67SmAOu0pwDrtKgA67SpAOu0qgDrtKsA67SsAOu0rQDrtK4A67SvAOu0sADrtLEA67SyAOu0swDrtLQA67S1AOu0tgDrtLcA67S4AOu0uQDrtLoA67S7AOu0vADrtL0A67S+AOu0vwDrtYAA67WBAOu1ggDrtYMA67WEAOu1hQDrtYYA67WHAOu1iADrtYkA67WKAOu1iwDrtYwA67WNAOu1jgDrtY8A67WQAOu1kQDrtZIA67WTAOu1lADrtZUA67WWAOu1lwDrtZgA67WZAOu1mgDrtZsA67WcAOu1nQDrtZ4A67WfAOu1oADrtaEA67WiAOu1owDrtaQA67WlAOu1pgDrtacA67WoAOu1qQDrtaoA67WrAOu1rADrta0A67WuAOu1rwDrtbAA67WxAOu1sgDrtbMA67W0AOu1tQDrtbYA67W3AOu1uADrtbkA67W6AOu1uwDrtbwA67W9AOu1vgDrtb8A67aAAOu2gQDrtoIA67aDAOu2hADrtoUA67aGAOu2hwDrtogA67aJAOu2igDrtosA67aMAOu2jQDrto4A67aPAOu2kADrtpEA67aSAOu2kwDrtpQA67aVAOu2lgDrtpcA67aYAOu2mQDrtpoA67abAOu2nADrtp0A67aeAOu2nwDrtqAA67ahAOu2ogDrtqMA67akAOu2pQDrtqYA67anAOu2qADrtqkA67aqAOu2qwDrtqwA67atAOu2rgDrtq8A67awAOu2sQDrtrIA67azAOu2tADrtrUA67a2AOu2twDrtrgA67a5AOu2ugDrtrsA67a8AOu2vQDrtr4A67a/AOu3gADrt4EA67eCAOu3gwDrt4QA67eFAOu3hgDrt4cA67eIAOu3iQDrt4oA67eLAOu3jADrt40A67eOAOu3jwDrt5AA67eRAOu3kgDrt5MA67eUAOu3lQDrt5YA67eXAOu3mADrt5kA67eaAOu3mwDrt5wA67edAOu3ngDrt58A67egAOu3oQDrt6IA67ejAOu3pADrt6UA67emAOu3pwDrt6gA67epAOu3qgDrt6sA67esAOu3rQDrt64A67evAOu3sADrt7EA67eyAOu3swDrt7QA67e1AOu3tgDrt7cA67e4AOu3uQDrt7oA67e7AOu3vADrt70A67e+AOu3vwDruIAA67iBAOu4ggDruIMA67iEAOu4hQDruIYA67iHAOu4iADruIkA67iKAOu4iwDruIwA67iNAOu4jgDruI8A67iQAOu4kQDruJIA67iTAOu4lADruJUA67iWAOu4lwDruJgA67iZAOu4mgDruJsA67icAOu4nQDruJ4A67ifAOu4oADruKEA67iiAOu4owDruKQA67ilAOu4pgDruKcA67ioAOu4qQDruKoA67irAOu4rADruK0A67iuAOu4rwDruLAA67ixAOu4sgDruLMA67i0AOu4tQDruLYA67i3AOu4uADruLkA67i6AOu4uwDruLwA67i9AOu4vgDruL8A67mAAOu5gQDruYIA67mDAOu5hADruYUA67mGAOu5hwDruYgA67mJAOu5igDruYsA67mMAOu5jQDruY4A67mPAOu5kADruZEA67mSAOu5kwDruZQA67mVAOu5lgDruZcA67mYAOu5mQDruZoA67mbAOu5nADruZ0A67meAOu5nwDruaAA67mhAOu5ogDruaMA67mkAOu5pQDruaYA67mnAOu5qADruakA67mqAOu5qwDruawA67mtAOu5rgDrua8A67mwAOu5sQDrubIA67mzAOu5tADrubUA67m2AOu5twDrubgA67m5AOu5ugDrubsA67m8AOu5vQDrub4A67m/AOu6gADruoEA67qCAOu6gwDruoQA67qFAOu6hgDruocA67qIAOu6iQDruooA67qLAOu6jADruo0A67qOAOu6jwDrupAA67qRAOu6kgDrupMA67qUAOu6lQDrupYA67qXAOu6mADrupkA67qaAOu6mwDrupwA67qdAOu6ngDrup8A67qgAOu6oQDruqIA67qjAOu6pADruqUA67qmAOu6pwDruqgA67qpAOu6qgDruqsA67qsAOu6rQDruq4A67qvAOu6sADrurEA67qyAOu6swDrurQA67q1AOu6tgDrurcA67q4AOu6uQDruroA67q7AOu6vADrur0A67q+AOu6vwDru4AA67uBAOu7ggDru4MA67uEAOu7hQDru4YA67uHAOu7iADru4kA67uKAOu7iwDru4wA67uNAOu7jgDru48A67uQAOu7kQDru5IA67uTAOu7lADru5UA67uWAOu7lwDru5gA67uZAOu7mgDru5sA67ucAOu7nQDru54A67ufAOu7oADru6EA67uiAOu7owDru6QA67ulAOu7pgDru6cA67uoAOu7qQDru6oA67urAOu7rADru60A67uuAOu7rwDru7AA67uxAOu7sgDru7MA67u0AOu7tQDru7YA67u3AOu7uADru7kA67u6AOu7uwDru7wA67u9AOu7vgDru78A67yAAOu8gQDrvIIA67yDAOu8hADrvIUA67yGAOu8hwDrvIgA67yJAOu8igDrvIsA67yMAOu8jQDrvI4A67yPAOu8kADrvJEA67ySAOu8kwDrvJQA67yVAOu8lgDrvJcA67yYAOu8mQDrvJoA67ybAOu8nADrvJ0A67yeAOu8nwDrvKAA67yhAOu8ogDrvKMA67ykAOu8pQDrvKYA67ynAOu8qADrvKkA67yqAOu8qwDrvKwA67ytAOu8rgDrvK8A67ywAOu8sQDrvLIA67yzAOu8tADrvLUA67y2AOu8twDrvLgA67y5AOu8ugDrvLsA67y8AOu8vQDrvL4A67y/AOu9gADrvYEA672CAOu9gwDrvYQA672FAOu9hgDrvYcA672IAOu9iQDrvYoA672LAOu9jADrvY0A672OAOu9jwDrvZAA672RAOu9kgDrvZMA672UAOu9lQDrvZYA672XAOu9mADrvZkA672aAOu9mwDrvZwA672dAOu9ngDrvZ8A672gAOu9oQDrvaIA672jAOu9pADrvaUA672mAOu9pwDrvagA672pAOu9qgDrvasA672sAOu9rQDrva4A672vAOu9sADrvbEA672yAOu9swDrvbQA6721AOu9tgDrvbcA6724AOu9uQDrvboA6727AOu9vADrvb0A672+AOu9vwDrvoAA676BAOu+ggDrvoMA676EAOu+hQDrvoYA676HAOu+iADrvokA676KAOu+iwDrvowA676NAOu+jgDrvo8A676QAOu+kQDrvpIA676TAOu+lADrvpUA676WAOu+lwDrvpgA676ZAOu+mgDrvpsA676cAOu+nQDrvp4A676fAOu+oADrvqEA676iAOu+owDrvqQA676lAOu+pgDrvqcA676oAOu+qQDrvqoA676rAOu+rADrvq0A676uAOu+rwDrvrAA676xAOu+sgDrvrMA6760AOu+tQDrvrYA6763AOu+uADrvrkA6766AOu+uwDrvrwA6769AOu+vgDrvr8A67+AAOu/gQDrv4IA67+DAOu/hADrv4UA67+GAOu/hwDrv4gA67+JAOu/igDrv4sA67+MAOu/jQDrv44A67+PAOu/kADrv5EA67+SAOu/kwDrv5QA67+VAOu/lgDrv5cA67+YAOu/mQDrv5oA67+bAOu/nADrv50A67+eAOu/nwDrv6AA67+hAOu/ogDrv6MA67+kAOu/pQDrv6YA67+nAOu/qADrv6kA67+qAOu/qwDrv6wA67+tAOu/rgDrv68A67+wAOu/sQDrv7IA67+zAOu/tADrv7UA67+2AOu/twDrv7gA67+5AOu/ugDrv7sA67+8AOu/vQDrv74A67+/AOyAgADsgIEA7ICCAOyAgwDsgIQA7ICFAOyAhgDsgIcA7ICIAOyAiQDsgIoA7ICLAOyAjADsgI0A7ICOAOyAjwDsgJAA7ICRAOyAkgDsgJMA7ICUAOyAlQDsgJYA7ICXAOyAmADsgJkA7ICaAOyAmwDsgJwA7ICdAOyAngDsgJ8A7ICgAOyAoQDsgKIA7ICjAOyApADsgKUA7ICmAOyApwDsgKgA7ICpAOyAqgDsgKsA7ICsAOyArQDsgK4A7ICvAOyAsADsgLEA7ICyAOyAswDsgLQA7IC1AOyAtgDsgLcA7IC4AOyAuQDsgLoA7IC7AOyAvADsgL0A7IC+AOyAvwDsgYAA7IGBAOyBggDsgYMA7IGEAOyBhQDsgYYA7IGHAOyBiADsgYkA7IGKAOyBiwDsgYwA7IGNAOyBjgDsgY8A7IGQAOyBkQDsgZIA7IGTAOyBlADsgZUA7IGWAOyBlwDsgZgA7IGZAOyBmgDsgZsA7IGcAOyBnQDsgZ4A7IGfAOyBoADsgaEA7IGiAOyBowDsgaQA7IGlAOyBpgDsgacA7IGoAOyBqQDsgaoA7IGrAOyBrADsga0A7IGuAOyBrwDsgbAA7IGxAOyBsgDsgbMA7IG0AOyBtQDsgbYA7IG3AOyBuADsgbkA7IG6AOyBuwDsgbwA7IG9AOyBvgDsgb8A7IKAAOyCgQDsgoIA7IKDAOyChADsgoUA7IKGAOyChwDsgogA7IKJAOyCigDsgosA7IKMAOyCjQDsgo4A7IKPAOyCkADsgpEA7IKSAOyCkwDsgpQA7IKVAOyClgDsgpcA7IKYAOyCmQDsgpoA7IKbAOyCnADsgp0A7IKeAOyCnwDsgqAA7IKhAOyCogDsgqMA7IKkAOyCpQDsgqYA7IKnAOyCqADsgqkA7IKqAOyCqwDsgqwA7IKtAOyCrgDsgq8A7IKwAOyCsQDsgrIA7IKzAOyCtADsgrUA7IK2AOyCtwDsgrgA7IK5AOyCugDsgrsA7IK8AOyCvQDsgr4A7IK/AOyDgADsg4EA7IOCAOyDgwDsg4QA7IOFAOyDhgDsg4cA7IOIAOyDiQDsg4oA7IOLAOyDjADsg40A7IOOAOyDjwDsg5AA7IORAOyDkgDsg5MA7IOUAOyDlQDsg5YA7IOXAOyDmADsg5kA7IOaAOyDmwDsg5wA7IOdAOyDngDsg58A7IOgAOyDoQDsg6IA7IOjAOyDpADsg6UA7IOmAOyDpwDsg6gA7IOpAOyDqgDsg6sA7IOsAOyDrQDsg64A7IOvAOyDsADsg7EA7IOyAOyDswDsg7QA7IO1AOyDtgDsg7cA7IO4AOyDuQDsg7oA7IO7AOyDvADsg70A7IO+AOyDvwDshIAA7ISBAOyEggDshIMA7ISEAOyEhQDshIYA7ISHAOyEiADshIkA7ISKAOyEiwDshIwA7ISNAOyEjgDshI8A7ISQAOyEkQDshJIA7ISTAOyElADshJUA7ISWAOyElwDshJgA7ISZAOyEmgDshJsA7IScAOyEnQDshJ4A7ISfAOyEoADshKEA7ISiAOyEowDshKQA7ISlAOyEpgDshKcA7ISoAOyEqQDshKoA7ISrAOyErADshK0A7ISuAOyErwDshLAA7ISxAOyEsgDshLMA7IS0AOyEtQDshLYA7IS3AOyEuADshLkA7IS6AOyEuwDshLwA7IS9AOyEvgDshL8A7IWAAOyFgQDshYIA7IWDAOyFhADshYUA7IWGAOyFhwDshYgA7IWJAOyFigDshYsA7IWMAOyFjQDshY4A7IWPAOyFkADshZEA7IWSAOyFkwDshZQA7IWVAOyFlgDshZcA7IWYAOyFmQDshZoA7IWbAOyFnADshZ0A7IWeAOyFnwDshaAA7IWhAOyFogDshaMA7IWkAOyFpQDshaYA7IWnAOyFqADshakA7IWqAOyFqwDshawA7IWtAOyFrgDsha8A7IWwAOyFsQDshbIA7IWzAOyFtADshbUA7IW2AOyFtwDshbgA7IW5AOyFugDshbsA7IW8AOyFvQDshb4A7IW/AOyGgADshoEA7IaCAOyGgwDshoQA7IaFAOyGhgDshocA7IaIAOyGiQDshooA7IaLAOyGjADsho0A7IaOAOyGjwDshpAA7IaRAOyGkgDshpMA7IaUAOyGlQDshpYA7IaXAOyGmADshpkA7IaaAOyGmwDshpwA7IadAOyGngDshp8A7IagAOyGoQDshqIA7IajAOyGpADshqUA7IamAOyGpwDshqgA7IapAOyGqgDshqsA7IasAOyGrQDshq4A7IavAOyGsADshrEA7IayAOyGswDshrQA7Ia1AOyGtgDshrcA7Ia4AOyGuQDshroA7Ia7AOyGvADshr0A7Ia+AOyGvwDsh4AA7IeBAOyHggDsh4MA7IeEAOyHhQDsh4YA7IeHAOyHiADsh4kA7IeKAOyHiwDsh4wA7IeNAOyHjgDsh48A7IeQAOyHkQDsh5IA7IeTAOyHlADsh5UA7IeWAOyHlwDsh5gA7IeZAOyHmgDsh5sA7IecAOyHnQDsh54A7IefAOyHoADsh6EA7IeiAOyHowDsh6QA7IelAOyHpgDsh6cA7IeoAOyHqQDsh6oA7IerAOyHrADsh60A7IeuAOyHrwDsh7AA7IexAOyHsgDsh7MA7Ie0AOyHtQDsh7YA7Ie3AOyHuADsh7kA7Ie6AOyHuwDsh7wA7Ie9AOyHvgDsh78A7IiAAOyIgQDsiIIA7IiDAOyIhADsiIUA7IiGAOyIhwDsiIgA7IiJAOyIigDsiIsA7IiMAOyIjQDsiI4A7IiPAOyIkADsiJEA7IiSAOyIkwDsiJQA7IiVAOyIlgDsiJcA7IiYAOyImQDsiJoA7IibAOyInADsiJ0A7IieAOyInwDsiKAA7IihAOyIogDsiKMA7IikAOyIpQDsiKYA7IinAOyIqADsiKkA7IiqAOyIqwDsiKwA7IitAOyIrgDsiK8A7IiwAOyIsQDsiLIA7IizAOyItADsiLUA7Ii2AOyItwDsiLgA7Ii5AOyIugDsiLsA7Ii8AOyIvQDsiL4A7Ii/AOyJgADsiYEA7ImCAOyJgwDsiYQA7ImFAOyJhgDsiYcA7ImIAOyJiQDsiYoA7ImLAOyJjADsiY0A7ImOAOyJjwDsiZAA7ImRAOyJkgDsiZMA7ImUAOyJlQDsiZYA7ImXAOyJmADsiZkA7ImaAOyJmwDsiZwA7ImdAOyJngDsiZ8A7ImgAOyJoQDsiaIA7ImjAOyJpADsiaUA7ImmAOyJpwDsiagA7ImpAOyJqgDsiasA7ImsAOyJrQDsia4A7ImvAOyJsADsibEA7ImyAOyJswDsibQA7Im1AOyJtgDsibcA7Im4AOyJuQDsiboA7Im7AOyJvADsib0A7Im+AOyJvwDsioAA7IqBAOyKggDsioMA7IqEAOyKhQDsioYA7IqHAOyKiADsiokA7IqKAOyKiwDsiowA7IqNAOyKjgDsio8A7IqQAOyKkQDsipIA7IqTAOyKlADsipUA7IqWAOyKlwDsipgA7IqZAOyKmgDsipsA7IqcAOyKnQDsip4A7IqfAOyKoADsiqEA7IqiAOyKowDsiqQA7IqlAOyKpgDsiqcA7IqoAOyKqQDsiqoA7IqrAOyKrADsiq0A7IquAOyKrwDsirAA7IqxAOyKsgDsirMA7Iq0AOyKtQDsirYA7Iq3AOyKuADsirkA7Iq6AOyKuwDsirwA7Iq9AOyKvgDsir8A7IuAAOyLgQDsi4IA7IuDAOyLhADsi4UA7IuGAOyLhwDsi4gA7IuJAOyLigDsi4sA7IuMAOyLjQDsi44A7IuPAOyLkADsi5EA7IuSAOyLkwDsi5QA7IuVAOyLlgDsi5cA7IuYAOyLmQDsi5oA7IubAOyLnADsi50A7IueAOyLnwDsi6AA7IuhAOyLogDsi6MA7IukAOyLpQDsi6YA7IunAOyLqADsi6kA7IuqAOyLqwDsi6wA7IutAOyLrgDsi68A7IuwAOyLsQDsi7IA7IuzAOyLtADsi7UA7Iu2AOyLtwDsi7gA7Iu5AOyLugDsi7sA7Iu8AOyLvQDsi74A7Iu/AOyMgADsjIEA7IyCAOyMgwDsjIQA7IyFAOyMhgDsjIcA7IyIAOyMiQDsjIoA7IyLAOyMjADsjI0A7IyOAOyMjwDsjJAA7IyRAOyMkgDsjJMA7IyUAOyMlQDsjJYA7IyXAOyMmADsjJkA7IyaAOyMmwDsjJwA7IydAOyMngDsjJ8A7IygAOyMoQDsjKIA7IyjAOyMpADsjKUA7IymAOyMpwDsjKgA7IypAOyMqgDsjKsA7IysAOyMrQDsjK4A7IyvAOyMsADsjLEA7IyyAOyMswDsjLQA7Iy1AOyMtgDsjLcA7Iy4AOyMuQDsjLoA7Iy7AOyMvADsjL0A7Iy+AOyMvwDsjYAA7I2BAOyNggDsjYMA7I2EAOyNhQDsjYYA7I2HAOyNiADsjYkA7I2KAOyNiwDsjYwA7I2NAOyNjgDsjY8A7I2QAOyNkQDsjZIA7I2TAOyNlADsjZUA7I2WAOyNlwDsjZgA7I2ZAOyNmgDsjZsA7I2cAOyNnQDsjZ4A7I2fAOyNoADsjaEA7I2iAOyNowDsjaQA7I2lAOyNpgDsjacA7I2oAOyNqQDsjaoA7I2rAOyNrADsja0A7I2uAOyNrwDsjbAA7I2xAOyNsgDsjbMA7I20AOyNtQDsjbYA7I23AOyNuADsjbkA7I26AOyNuwDsjbwA7I29AOyNvgDsjb8A7I6AAOyOgQDsjoIA7I6DAOyOhADsjoUA7I6GAOyOhwDsjogA7I6JAOyOigDsjosA7I6MAOyOjQDsjo4A7I6PAOyOkADsjpEA7I6SAOyOkwDsjpQA7I6VAOyOlgDsjpcA7I6YAOyOmQDsjpoA7I6bAOyOnADsjp0A7I6eAOyOnwDsjqAA7I6hAOyOogDsjqMA7I6kAOyOpQDsjqYA7I6nAOyOqADsjqkA7I6qAOyOqwDsjqwA7I6tAOyOrgDsjq8A7I6wAOyOsQDsjrIA7I6zAOyOtADsjrUA7I62AOyOtwDsjrgA7I65AOyOugDsjrsA7I68AOyOvQDsjr4A7I6/AOyPgADsj4EA7I+CAOyPgwDsj4QA7I+FAOyPhgDsj4cA7I+IAOyPiQDsj4oA7I+LAOyPjADsj40A7I+OAOyPjwDsj5AA7I+RAOyPkgDsj5MA7I+UAOyPlQDsj5YA7I+XAOyPmADsj5kA7I+aAOyPmwDsj5wA7I+dAOyPngDsj58A7I+gAOyPoQDsj6IA7I+jAOyPpADsj6UA7I+mAOyPpwDsj6gA7I+pAOyPqgDsj6sA7I+sAOyPrQDsj64A7I+vAOyPsADsj7EA7I+yAOyPswDsj7QA7I+1AOyPtgDsj7cA7I+4AOyPuQDsj7oA7I+7AOyPvADsj70A7I++AOyPvwDskIAA7JCBAOyQggDskIMA7JCEAOyQhQDskIYA7JCHAOyQiADskIkA7JCKAOyQiwDskIwA7JCNAOyQjgDskI8A7JCQAOyQkQDskJIA7JCTAOyQlADskJUA7JCWAOyQlwDskJgA7JCZAOyQmgDskJsA7JCcAOyQnQDskJ4A7JCfAOyQoADskKEA7JCiAOyQowDskKQA7JClAOyQpgDskKcA7JCoAOyQqQDskKoA7JCrAOyQrADskK0A7JCuAOyQrwDskLAA7JCxAOyQsgDskLMA7JC0AOyQtQDskLYA7JC3AOyQuADskLkA7JC6AOyQuwDskLwA7JC9AOyQvgDskL8A7JGAAOyRgQDskYIA7JGDAOyRhADskYUA7JGGAOyRhwDskYgA7JGJAOyRigDskYsA7JGMAOyRjQDskY4A7JGPAOyRkADskZEA7JGSAOyRkwDskZQA7JGVAOyRlgDskZcA7JGYAOyRmQDskZoA7JGbAOyRnADskZ0A7JGeAOyRnwDskaAA7JGhAOyRogDskaMA7JGkAOyRpQDskaYA7JGnAOyRqADskakA7JGqAOyRqwDskawA7JGtAOyRrgDska8A7JGwAOyRsQDskbIA7JGzAOyRtADskbUA7JG2AOyRtwDskbgA7JG5AOyRugDskbsA7JG8AOyRvQDskb4A7JG/AOySgADskoEA7JKCAOySgwDskoQA7JKFAOyShgDskocA7JKIAOySiQDskooA7JKLAOySjADsko0A7JKOAOySjwDskpAA7JKRAOySkgDskpMA7JKUAOySlQDskpYA7JKXAOySmADskpkA7JKaAOySmwDskpwA7JKdAOySngDskp8A7JKgAOySoQDskqIA7JKjAOySpADskqUA7JKmAOySpwDskqgA7JKpAOySqgDskqsA7JKsAOySrQDskq4A7JKvAOySsADskrEA7JKyAOySswDskrQA7JK1AOyStgDskrcA7JK4AOySuQDskroA7JK7AOySvADskr0A7JK+AOySvwDsk4AA7JOBAOyTggDsk4MA7JOEAOyThQDsk4YA7JOHAOyTiADsk4kA7JOKAOyTiwDsk4wA7JONAOyTjgDsk48A7JOQAOyTkQDsk5IA7JOTAOyTlADsk5UA7JOWAOyTlwDsk5gA7JOZAOyTmgDsk5sA7JOcAOyTnQDsk54A7JOfAOyToADsk6EA7JOiAOyTowDsk6QA7JOlAOyTpgDsk6cA7JOoAOyTqQDsk6oA7JOrAOyTrADsk60A7JOuAOyTrwDsk7AA7JOxAOyTsgDsk7MA7JO0AOyTtQDsk7YA7JO3AOyTuADsk7kA7JO6AOyTuwDsk7wA7JO9AOyTvgDsk78A7JSAAOyUgQDslIIA7JSDAOyUhADslIUA7JSGAOyUhwDslIgA7JSJAOyUigDslIsA7JSMAOyUjQDslI4A7JSPAOyUkADslJEA7JSSAOyUkwDslJQA7JSVAOyUlgDslJcA7JSYAOyUmQDslJoA7JSbAOyUnADslJ0A7JSeAOyUnwDslKAA7JShAOyUogDslKMA7JSkAOyUpQDslKYA7JSnAOyUqADslKkA7JSqAOyUqwDslKwA7JStAOyUrgDslK8A7JSwAOyUsQDslLIA7JSzAOyUtADslLUA7JS2AOyUtwDslLgA7JS5AOyUugDslLsA7JS8AOyUvQDslL4A7JS/AOyVgADslYEA7JWCAOyVgwDslYQA7JWFAOyVhgDslYcA7JWIAOyViQDslYoA7JWLAOyVjADslY0A7JWOAOyVjwDslZAA7JWRAOyVkgDslZMA7JWUAOyVlQDslZYA7JWXAOyVmADslZkA7JWaAOyVmwDslZwA7JWdAOyVngDslZ8A7JWgAOyVoQDslaIA7JWjAOyVpADslaUA7JWmAOyVpwDslagA7JWpAOyVqgDslasA7JWsAOyVrQDsla4A7JWvAOyVsADslbEA7JWyAOyVswDslbQA7JW1AOyVtgDslbcA7JW4AOyVuQDslboA7JW7AOyVvADslb0A7JW+AOyVvwDsloAA7JaBAOyWggDsloMA7JaEAOyWhQDsloYA7JaHAOyWiADslokA7JaKAOyWiwDslowA7JaNAOyWjgDslo8A7JaQAOyWkQDslpIA7JaTAOyWlADslpUA7JaWAOyWlwDslpgA7JaZAOyWmgDslpsA7JacAOyWnQDslp4A7JafAOyWoADslqEA7JaiAOyWowDslqQA7JalAOyWpgDslqcA7JaoAOyWqQDslqoA7JarAOyWrADslq0A7JauAOyWrwDslrAA7JaxAOyWsgDslrMA7Ja0AOyWtQDslrYA7Ja3AOyWuADslrkA7Ja6AOyWuwDslrwA7Ja9AOyWvgDslr8A7JeAAOyXgQDsl4IA7JeDAOyXhADsl4UA7JeGAOyXhwDsl4gA7JeJAOyXigDsl4sA7JeMAOyXjQDsl44A7JePAOyXkADsl5EA7JeSAOyXkwDsl5QA7JeVAOyXlgDsl5cA7JeYAOyXmQDsl5oA7JebAOyXnADsl50A7JeeAOyXnwDsl6AA7JehAOyXogDsl6MA7JekAOyXpQDsl6YA7JenAOyXqADsl6kA7JeqAOyXqwDsl6wA7JetAOyXrgDsl68A7JewAOyXsQDsl7IA7JezAOyXtADsl7UA7Je2AOyXtwDsl7gA7Je5AOyXugDsl7sA7Je8AOyXvQDsl74A7Je/AOyYgADsmIEA7JiCAOyYgwDsmIQA7JiFAOyYhgDsmIcA7JiIAOyYiQDsmIoA7JiLAOyYjADsmI0A7JiOAOyYjwDsmJAA7JiRAOyYkgDsmJMA7JiUAOyYlQDsmJYA7JiXAOyYmADsmJkA7JiaAOyYmwDsmJwA7JidAOyYngDsmJ8A7JigAOyYoQDsmKIA7JijAOyYpADsmKUA7JimAOyYpwDsmKgA7JipAOyYqgDsmKsA7JisAOyYrQDsmK4A7JivAOyYsADsmLEA7JiyAOyYswDsmLQA7Ji1AOyYtgDsmLcA7Ji4AOyYuQDsmLoA7Ji7AOyYvADsmL0A7Ji+AOyYvwDsmYAA7JmBAOyZggDsmYMA7JmEAOyZhQDsmYYA7JmHAOyZiADsmYkA7JmKAOyZiwDsmYwA7JmNAOyZjgDsmY8A7JmQAOyZkQDsmZIA7JmTAOyZlADsmZUA7JmWAOyZlwDsmZgA7JmZAOyZmgDsmZsA7JmcAOyZnQDsmZ4A7JmfAOyZoADsmaEA7JmiAOyZowDsmaQA7JmlAOyZpgDsmacA7JmoAOyZqQDsmaoA7JmrAOyZrADsma0A7JmuAOyZrwDsmbAA7JmxAOyZsgDsmbMA7Jm0AOyZtQDsmbYA7Jm3AOyZuADsmbkA7Jm6AOyZuwDsmbwA7Jm9AOyZvgDsmb8A7JqAAOyagQDsmoIA7JqDAOyahADsmoUA7JqGAOyahwDsmogA7JqJAOyaigDsmosA7JqMAOyajQDsmo4A7JqPAOyakADsmpEA7JqSAOyakwDsmpQA7JqVAOyalgDsmpcA7JqYAOyamQDsmpoA7JqbAOyanADsmp0A7JqeAOyanwDsmqAA7JqhAOyaogDsmqMA7JqkAOyapQDsmqYA7JqnAOyaqADsmqkA7JqqAOyaqwDsmqwA7JqtAOyargDsmq8A7JqwAOyasQDsmrIA7JqzAOyatADsmrUA7Jq2AOyatwDsmrgA7Jq5AOyaugDsmrsA7Jq8AOyavQDsmr4A7Jq/AOybgADsm4EA7JuCAOybgwDsm4QA7JuFAOybhgDsm4cA7JuIAOybiQDsm4oA7JuLAOybjADsm40A7JuOAOybjwDsm5AA7JuRAOybkgDsm5MA7JuUAOyblQDsm5YA7JuXAOybmADsm5kA7JuaAOybmwDsm5wA7JudAOybngDsm58A7JugAOyboQDsm6IA7JujAOybpADsm6UA7JumAOybpwDsm6gA7JupAOybqgDsm6sA7JusAOybrQDsm64A7JuvAOybsADsm7EA7JuyAOybswDsm7QA7Ju1AOybtgDsm7cA7Ju4AOybuQDsm7oA7Ju7AOybvADsm70A7Ju+AOybvwDsnIAA7JyBAOycggDsnIMA7JyEAOychQDsnIYA7JyHAOyciADsnIkA7JyKAOyciwDsnIwA7JyNAOycjgDsnI8A7JyQAOyckQDsnJIA7JyTAOyclADsnJUA7JyWAOyclwDsnJgA7JyZAOycmgDsnJsA7JycAOycnQDsnJ4A7JyfAOycoADsnKEA7JyiAOycowDsnKQA7JylAOycpgDsnKcA7JyoAOycqQDsnKoA7JyrAOycrADsnK0A7JyuAOycrwDsnLAA7JyxAOycsgDsnLMA7Jy0AOyctQDsnLYA7Jy3AOycuADsnLkA7Jy6AOycuwDsnLwA7Jy9AOycvgDsnL8A7J2AAOydgQDsnYIA7J2DAOydhADsnYUA7J2GAOydhwDsnYgA7J2JAOydigDsnYsA7J2MAOydjQDsnY4A7J2PAOydkADsnZEA7J2SAOydkwDsnZQA7J2VAOydlgDsnZcA7J2YAOydmQDsnZoA7J2bAOydnADsnZ0A7J2eAOydnwDsnaAA7J2hAOydogDsnaMA7J2kAOydpQDsnaYA7J2nAOydqADsnakA7J2qAOydqwDsnawA7J2tAOydrgDsna8A7J2wAOydsQDsnbIA7J2zAOydtADsnbUA7J22AOydtwDsnbgA7J25AOydugDsnbsA7J28AOydvQDsnb4A7J2/AOyegADsnoEA7J6CAOyegwDsnoQA7J6FAOyehgDsnocA7J6IAOyeiQDsnooA7J6LAOyejADsno0A7J6OAOyejwDsnpAA7J6RAOyekgDsnpMA7J6UAOyelQDsnpYA7J6XAOyemADsnpkA7J6aAOyemwDsnpwA7J6dAOyengDsnp8A7J6gAOyeoQDsnqIA7J6jAOyepADsnqUA7J6mAOyepwDsnqgA7J6pAOyeqgDsnqsA7J6sAOyerQDsnq4A7J6vAOyesADsnrEA7J6yAOyeswDsnrQA7J61AOyetgDsnrcA7J64AOyeuQDsnroA7J67AOyevADsnr0A7J6+AOyevwDsn4AA7J+BAOyfggDsn4MA7J+EAOyfhQDsn4YA7J+HAOyfiADsn4kA7J+KAOyfiwDsn4wA7J+NAOyfjgDsn48A7J+QAOyfkQDsn5IA7J+TAOyflADsn5UA7J+WAOyflwDsn5gA7J+ZAOyfmgDsn5sA7J+cAOyfnQDsn54A7J+fAOyfoADsn6EA7J+iAOyfowDsn6QA7J+lAOyfpgDsn6cA7J+oAOyfqQDsn6oA7J+rAOyfrADsn60A7J+uAOyfrwDsn7AA7J+xAOyfsgDsn7MA7J+0AOyftQDsn7YA7J+3AOyfuADsn7kA7J+6AOyfuwDsn7wA7J+9AOyfvgDsn78A7KCAAOyggQDsoIIA7KCDAOyghADsoIUA7KCGAOyghwDsoIgA7KCJAOygigDsoIsA7KCMAOygjQDsoI4A7KCPAOygkADsoJEA7KCSAOygkwDsoJQA7KCVAOyglgDsoJcA7KCYAOygmQDsoJoA7KCbAOygnADsoJ0A7KCeAOygnwDsoKAA7KChAOygogDsoKMA7KCkAOygpQDsoKYA7KCnAOygqADsoKkA7KCqAOygqwDsoKwA7KCtAOygrgDsoK8A7KCwAOygsQDsoLIA7KCzAOygtADsoLUA7KC2AOygtwDsoLgA7KC5AOygugDsoLsA7KC8AOygvQDsoL4A7KC/AOyhgADsoYEA7KGCAOyhgwDsoYQA7KGFAOyhhgDsoYcA7KGIAOyhiQDsoYoA7KGLAOyhjADsoY0A7KGOAOyhjwDsoZAA7KGRAOyhkgDsoZMA7KGUAOyhlQDsoZYA7KGXAOyhmADsoZkA7KGaAOyhmwDsoZwA7KGdAOyhngDsoZ8A7KGgAOyhoQDsoaIA7KGjAOyhpADsoaUA7KGmAOyhpwDsoagA7KGpAOyhqgDsoasA7KGsAOyhrQDsoa4A7KGvAOyhsADsobEA7KGyAOyhswDsobQA7KG1AOyhtgDsobcA7KG4AOyhuQDsoboA7KG7AOyhvADsob0A7KG+AOyhvwDsooAA7KKBAOyiggDsooMA7KKEAOyihQDsooYA7KKHAOyiiADsookA7KKKAOyiiwDsoowA7KKNAOyijgDsoo8A7KKQAOyikQDsopIA7KKTAOyilADsopUA7KKWAOyilwDsopgA7KKZAOyimgDsopsA7KKcAOyinQDsop4A7KKfAOyioADsoqEA7KKiAOyiowDsoqQA7KKlAOyipgDsoqcA7KKoAOyiqQDsoqoA7KKrAOyirADsoq0A7KKuAOyirwDsorAA7KKxAOyisgDsorMA7KK0AOyitQDsorYA7KK3AOyiuADsorkA7KK6AOyiuwDsorwA7KK9AOyivgDsor8A7KOAAOyjgQDso4IA7KODAOyjhADso4UA7KOGAOyjhwDso4gA7KOJAOyjigDso4sA7KOMAOyjjQDso44A7KOPAOyjkADso5EA7KOSAOyjkwDso5QA7KOVAOyjlgDso5cA7KOYAOyjmQDso5oA7KObAOyjnADso50A7KOeAOyjnwDso6AA7KOhAOyjogDso6MA7KOkAOyjpQDso6YA7KOnAOyjqADso6kA7KOqAOyjqwDso6wA7KOtAOyjrgDso68A7KOwAOyjsQDso7IA7KOzAOyjtADso7UA7KO2AOyjtwDso7gA7KO5AOyjugDso7sA7KO8AOyjvOydmADso70A7KO+AOyjvwDspIAA7KSBAOykggDspIMA7KSEAOykhQDspIYA7KSHAOykiADspIkA7KSKAOykiwDspIwA7KSNAOykjgDspI8A7KSQAOykkQDspJIA7KSTAOyklADspJUA7KSWAOyklwDspJgA7KSZAOykmgDspJsA7KScAOyknQDspJ4A7KSfAOykoADspKEA7KSiAOykowDspKQA7KSlAOykpgDspKcA7KSoAOykqQDspKoA7KSrAOykrADspK0A7KSuAOykrwDspLAA7KSxAOyksgDspLMA7KS0AOyktQDspLYA7KS3AOykuADspLkA7KS6AOykuwDspLwA7KS9AOykvgDspL8A7KWAAOylgQDspYIA7KWDAOylhADspYUA7KWGAOylhwDspYgA7KWJAOyligDspYsA7KWMAOyljQDspY4A7KWPAOylkADspZEA7KWSAOylkwDspZQA7KWVAOyllgDspZcA7KWYAOylmQDspZoA7KWbAOylnADspZ0A7KWeAOylnwDspaAA7KWhAOylogDspaMA7KWkAOylpQDspaYA7KWnAOylqADspakA7KWqAOylqwDspawA7KWtAOylrgDspa8A7KWwAOylsQDspbIA7KWzAOyltADspbUA7KW2AOyltwDspbgA7KW5AOylugDspbsA7KW8AOylvQDspb4A7KW/AOymgADspoEA7KaCAOymgwDspoQA7KaFAOymhgDspocA7KaIAOymiQDspooA7KaLAOymjADspo0A7KaOAOymjwDsppAA7KaRAOymkgDsppMA7KaUAOymlQDsppYA7KaXAOymmADsppkA7KaaAOymmwDsppwA7KadAOymngDspp8A7KagAOymoQDspqIA7KajAOympADspqUA7KamAOympwDspqgA7KapAOymqgDspqsA7KasAOymrQDspq4A7KavAOymsADsprEA7KayAOymswDsprQA7Ka1AOymtgDsprcA7Ka4AOymuQDsproA7Ka7AOymvADspr0A7Ka+AOymvwDsp4AA7KeBAOynggDsp4MA7KeEAOynhQDsp4YA7KeHAOyniADsp4kA7KeKAOyniwDsp4wA7KeNAOynjgDsp48A7KeQAOynkQDsp5IA7KeTAOynlADsp5UA7KeWAOynlwDsp5gA7KeZAOynmgDsp5sA7KecAOynnQDsp54A7KefAOynoADsp6EA7KeiAOynowDsp6QA7KelAOynpgDsp6cA7KeoAOynqQDsp6oA7KerAOynrADsp60A7KeuAOynrwDsp7AA7KexAOynsgDsp7MA7Ke0AOyntQDsp7YA7Ke3AOynuADsp7kA7Ke6AOynuwDsp7wA7Ke9AOynvgDsp78A7KiAAOyogQDsqIIA7KiDAOyohADsqIUA7KiGAOyohwDsqIgA7KiJAOyoigDsqIsA7KiMAOyojQDsqI4A7KiPAOyokADsqJEA7KiSAOyokwDsqJQA7KiVAOyolgDsqJcA7KiYAOyomQDsqJoA7KibAOyonADsqJ0A7KieAOyonwDsqKAA7KihAOyoogDsqKMA7KikAOyopQDsqKYA7KinAOyoqADsqKkA7KiqAOyoqwDsqKwA7KitAOyorgDsqK8A7KiwAOyosQDsqLIA7KizAOyotADsqLUA7Ki2AOyotwDsqLgA7Ki5AOyougDsqLsA7Ki8AOyovQDsqL4A7Ki/AOypgADsqYEA7KmCAOypgwDsqYQA7KmFAOyphgDsqYcA7KmIAOypiQDsqYoA7KmLAOypjADsqY0A7KmOAOypjwDsqZAA7KmRAOypkgDsqZMA7KmUAOyplQDsqZYA7KmXAOypmADsqZkA7KmaAOypmwDsqZwA7KmdAOypngDsqZ8A7KmgAOypoQDsqaIA7KmjAOyppADsqaUA7KmmAOyppwDsqagA7KmpAOypqgDsqasA7KmsAOyprQDsqa4A7KmvAOypsADsqbEA7KmyAOypswDsqbQA7Km1AOyptgDsqbcA7Km4AOypuQDsqboA7Km7AOypvADsqb0A7Km+AOypvwDsqoAA7KqBAOyqggDsqoMA7KqEAOyqhQDsqoYA7KqHAOyqiADsqokA7KqKAOyqiwDsqowA7KqNAOyqjgDsqo8A7KqQAOyqkQDsqpIA7KqTAOyqlADsqpUA7KqWAOyqlwDsqpgA7KqZAOyqmgDsqpsA7KqcAOyqnQDsqp4A7KqfAOyqoADsqqEA7KqiAOyqowDsqqQA7KqlAOyqpgDsqqcA7KqoAOyqqQDsqqoA7KqrAOyqrADsqq0A7KquAOyqrwDsqrAA7KqxAOyqsgDsqrMA7Kq0AOyqtQDsqrYA7Kq3AOyquADsqrkA7Kq6AOyquwDsqrwA7Kq9AOyqvgDsqr8A7KuAAOyrgQDsq4IA7KuDAOyrhADsq4UA7KuGAOyrhwDsq4gA7KuJAOyrigDsq4sA7KuMAOyrjQDsq44A7KuPAOyrkADsq5EA7KuSAOyrkwDsq5QA7KuVAOyrlgDsq5cA7KuYAOyrmQDsq5oA7KubAOyrnADsq50A7KueAOyrnwDsq6AA7KuhAOyrogDsq6MA7KukAOyrpQDsq6YA7KunAOyrqADsq6kA7KuqAOyrqwDsq6wA7KutAOyrrgDsq68A7KuwAOyrsQDsq7IA7KuzAOyrtADsq7UA7Ku2AOyrtwDsq7gA7Ku5AOyrugDsq7sA7Ku8AOyrvQDsq74A7Ku/AOysgADsrIEA7KyCAOysgwDsrIQA7KyFAOyshgDsrIcA7KyIAOysiQDsrIoA7KyLAOysjADsrI0A7KyOAOysjwDsrJAA7KyRAOyskgDsrJMA7KyUAOyslQDsrJYA7KyXAOysmADsrJkA7KyaAOysmwDsrJwA7KydAOysngDsrJ8A7KygAOysoQDsrKIA7KyjAOyspADsrKUA7KymAOyspwDsrKgA7KypAOysqgDsrKsA7KysAOysrQDsrK4A7KyvAOyssADsrLEA7KyyAOysswDsrLQA7Ky1AOystgDsrLcA7Ky4AOysuQDsrLoA7Ky7AOysvADsrL0A7Ky+AOysvwDsrYAA7K2BAOytggDsrYMA7K2EAOythQDsrYYA7K2HAOytiADsrYkA7K2KAOytiwDsrYwA7K2NAOytjgDsrY8A7K2QAOytkQDsrZIA7K2TAOytlADsrZUA7K2WAOytlwDsrZgA7K2ZAOytmgDsrZsA7K2cAOytnQDsrZ4A7K2fAOytoADsraEA7K2iAOytowDsraQA7K2lAOytpgDsracA7K2oAOytqQDsraoA7K2rAOytrADsra0A7K2uAOytrwDsrbAA7K2xAOytsgDsrbMA7K20AOyttQDsrbYA7K23AOytuADsrbkA7K26AOytuwDsrbwA7K29AOytvgDsrb8A7K6AAOyugQDsroIA7K6DAOyuhADsroUA7K6GAOyuhwDsrogA7K6JAOyuigDsrosA7K6MAOyujQDsro4A7K6PAOyukADsrpEA7K6SAOyukwDsrpQA7K6VAOyulgDsrpcA7K6YAOyumQDsrpoA7K6bAOyunADsrp0A7K6eAOyunwDsrqAA7K6hAOyuogDsrqMA7K6kAOyupQDsrqYA7K6nAOyuqADsrqkA7K6qAOyuqwDsrqwA7K6tAOyurgDsrq8A7K6wAOyusQDsrrIA7K6zAOyutADsrrUA7K62AOyutwDsrrgA7K65AOyuugDsrrsA7K68AOyuvQDsrr4A7K6/AOyvgADsr4EA7K+CAOyvgwDsr4QA7K+FAOyvhgDsr4cA7K+IAOyviQDsr4oA7K+LAOyvjADsr40A7K+OAOyvjwDsr5AA7K+RAOyvkgDsr5MA7K+UAOyvlQDsr5YA7K+XAOyvmADsr5kA7K+aAOyvmwDsr5wA7K+dAOyvngDsr58A7K+gAOyvoQDsr6IA7K+jAOyvpADsr6UA7K+mAOyvpwDsr6gA7K+pAOyvqgDsr6sA7K+sAOyvrQDsr64A7K+vAOyvsADsr7EA7K+yAOyvswDsr7QA7K+1AOyvtgDsr7cA7K+4AOyvuQDsr7oA7K+7AOyvvADsr70A7K++AOyvvwDssIAA7LCBAOywggDssIMA7LCEAOywhQDssIYA7LCHAOywiADssIkA7LCKAOywiwDssIwA7LCNAOywjgDssI8A7LCQAOywkQDssJIA7LCTAOywlADssJUA7LCWAOywlwDssJgA7LCZAOywmgDssJsA7LCcAOywnQDssJ4A7LCfAOywoADssKEA7LCiAOywowDssKQA7LClAOywpgDssKcA7LCoAOywqQDssKoA7LCrAOywrADssK0A7LCuAOywrwDssLAA7LCxAOywsgDssLMA7LC0AOywtQDssLYA7LC3AOywuADssLjqs6AA7LC5AOywugDssLsA7LC8AOywvQDssL4A7LC/AOyxgADssYEA7LGCAOyxgwDssYQA7LGFAOyxhgDssYcA7LGIAOyxiQDssYoA7LGLAOyxjADssY0A7LGOAOyxjwDssZAA7LGRAOyxkgDssZMA7LGUAOyxlQDssZYA7LGXAOyxmADssZkA7LGaAOyxmwDssZwA7LGdAOyxngDssZ8A7LGgAOyxoQDssaIA7LGjAOyxpADssaUA7LGmAOyxpwDssagA7LGpAOyxqgDssasA7LGsAOyxrQDssa4A7LGvAOyxsADssbEA7LGyAOyxswDssbQA7LG1AOyxtgDssbcA7LG4AOyxuQDssboA7LG7AOyxvADssb0A7LG+AOyxvwDssoAA7LKBAOyyggDssoMA7LKEAOyyhQDssoYA7LKHAOyyiADssokA7LKKAOyyiwDssowA7LKNAOyyjgDsso8A7LKQAOyykQDsspIA7LKTAOyylADsspUA7LKWAOyylwDsspgA7LKZAOyymgDsspsA7LKcAOyynQDssp4A7LKfAOyyoADssqEA7LKiAOyyowDssqQA7LKlAOyypgDssqcA7LKoAOyyqQDssqoA7LKrAOyyrADssq0A7LKuAOyyrwDssrAA7LKxAOyysgDssrMA7LK0AOyytQDssrYA7LK3AOyyuADssrkA7LK6AOyyuwDssrwA7LK9AOyyvgDssr8A7LOAAOyzgQDss4IA7LODAOyzhADss4UA7LOGAOyzhwDss4gA7LOJAOyzigDss4sA7LOMAOyzjQDss44A7LOPAOyzkADss5EA7LOSAOyzkwDss5QA7LOVAOyzlgDss5cA7LOYAOyzmQDss5oA7LObAOyznADss50A7LOeAOyznwDss6AA7LOhAOyzogDss6MA7LOkAOyzpQDss6YA7LOnAOyzqADss6kA7LOqAOyzqwDss6wA7LOtAOyzrgDss68A7LOwAOyzsQDss7IA7LOzAOyztADss7UA7LO2AOyztwDss7gA7LO5AOyzugDss7sA7LO8AOyzvQDss74A7LO/AOy0gADstIEA7LSCAOy0gwDstIQA7LSFAOy0hgDstIcA7LSIAOy0iQDstIoA7LSLAOy0jADstI0A7LSOAOy0jwDstJAA7LSRAOy0kgDstJMA7LSUAOy0lQDstJYA7LSXAOy0mADstJkA7LSaAOy0mwDstJwA7LSdAOy0ngDstJ8A7LSgAOy0oQDstKIA7LSjAOy0pADstKUA7LSmAOy0pwDstKgA7LSpAOy0qgDstKsA7LSsAOy0rQDstK4A7LSvAOy0sADstLEA7LSyAOy0swDstLQA7LS1AOy0tgDstLcA7LS4AOy0uQDstLoA7LS7AOy0vADstL0A7LS+AOy0vwDstYAA7LWBAOy1ggDstYMA7LWEAOy1hQDstYYA7LWHAOy1iADstYkA7LWKAOy1iwDstYwA7LWNAOy1jgDstY8A7LWQAOy1kQDstZIA7LWTAOy1lADstZUA7LWWAOy1lwDstZgA7LWZAOy1mgDstZsA7LWcAOy1nQDstZ4A7LWfAOy1oADstaEA7LWiAOy1owDstaQA7LWlAOy1pgDstacA7LWoAOy1qQDstaoA7LWrAOy1rADsta0A7LWuAOy1rwDstbAA7LWxAOy1sgDstbMA7LW0AOy1tQDstbYA7LW3AOy1uADstbkA7LW6AOy1uwDstbwA7LW9AOy1vgDstb8A7LaAAOy2gQDstoIA7LaDAOy2hADstoUA7LaGAOy2hwDstogA7LaJAOy2igDstosA7LaMAOy2jQDsto4A7LaPAOy2kADstpEA7LaSAOy2kwDstpQA7LaVAOy2lgDstpcA7LaYAOy2mQDstpoA7LabAOy2nADstp0A7LaeAOy2nwDstqAA7LahAOy2ogDstqMA7LakAOy2pQDstqYA7LanAOy2qADstqkA7LaqAOy2qwDstqwA7LatAOy2rgDstq8A7LawAOy2sQDstrIA7LazAOy2tADstrUA7La2AOy2twDstrgA7La5AOy2ugDstrsA7La8AOy2vQDstr4A7La/AOy3gADst4EA7LeCAOy3gwDst4QA7LeFAOy3hgDst4cA7LeIAOy3iQDst4oA7LeLAOy3jADst40A7LeOAOy3jwDst5AA7LeRAOy3kgDst5MA7LeUAOy3lQDst5YA7LeXAOy3mADst5kA7LeaAOy3mwDst5wA7LedAOy3ngDst58A7LegAOy3oQDst6IA7LejAOy3pADst6UA7LemAOy3pwDst6gA7LepAOy3qgDst6sA7LesAOy3rQDst64A7LevAOy3sADst7EA7LeyAOy3swDst7QA7Le1AOy3tgDst7cA7Le4AOy3uQDst7oA7Le7AOy3vADst70A7Le+AOy3vwDsuIAA7LiBAOy4ggDsuIMA7LiEAOy4hQDsuIYA7LiHAOy4iADsuIkA7LiKAOy4iwDsuIwA7LiNAOy4jgDsuI8A7LiQAOy4kQDsuJIA7LiTAOy4lADsuJUA7LiWAOy4lwDsuJgA7LiZAOy4mgDsuJsA7LicAOy4nQDsuJ4A7LifAOy4oADsuKEA7LiiAOy4owDsuKQA7LilAOy4pgDsuKcA7LioAOy4qQDsuKoA7LirAOy4rADsuK0A7LiuAOy4rwDsuLAA7LixAOy4sgDsuLMA7Li0AOy4tQDsuLYA7Li3AOy4uADsuLkA7Li6AOy4uwDsuLwA7Li9AOy4vgDsuL8A7LmAAOy5gQDsuYIA7LmDAOy5hADsuYUA7LmGAOy5hwDsuYgA7LmJAOy5igDsuYsA7LmMAOy5jQDsuY4A7LmPAOy5kADsuZEA7LmSAOy5kwDsuZQA7LmVAOy5lgDsuZcA7LmYAOy5mQDsuZoA7LmbAOy5nADsuZ0A7LmeAOy5nwDsuaAA7LmhAOy5ogDsuaMA7LmkAOy5pQDsuaYA7LmnAOy5qADsuakA7LmqAOy5qwDsuawA7LmtAOy5rgDsua8A7LmwAOy5sQDsubIA7LmzAOy5tADsubUA7Lm2AOy5twDsubgA7Lm5AOy5ugDsubsA7Lm8AOy5vQDsub4A7Lm/AOy6gADsuoEA7LqCAOy6gwDsuoQA7LqFAOy6hgDsuocA7LqIAOy6iQDsuooA7LqLAOy6jADsuo0A7LqOAOy6jwDsupAA7LqRAOy6kgDsupMA7LqUAOy6lQDsupYA7LqXAOy6mADsupkA7LqaAOy6mwDsupwA7LqdAOy6ngDsup8A7LqgAOy6oQDsuqIA7LqjAOy6pADsuqUA7LqmAOy6pwDsuqgA7LqpAOy6qgDsuqsA7LqsAOy6rQDsuq4A7LqvAOy6sADsurEA7LqyAOy6swDsurQA7Lq1AOy6tgDsurcA7Lq4AOy6uQDsuroA7Lq7AOy6vADsur0A7Lq+AOy6vwDsu4AA7LuBAOy7ggDsu4MA7LuEAOy7hQDsu4YA7LuHAOy7iADsu4kA7LuKAOy7iwDsu4wA7LuNAOy7jgDsu48A7LuQAOy7kQDsu5IA7LuTAOy7lADsu5UA7LuWAOy7lwDsu5gA7LuZAOy7mgDsu5sA7LucAOy7nQDsu54A7LufAOy7oADsu6EA7LuiAOy7owDsu6QA7LulAOy7pgDsu6cA7LuoAOy7qQDsu6oA7LurAOy7rADsu60A7LuuAOy7rwDsu7AA7LuxAOy7sgDsu7MA7Lu0AOy7tQDsu7YA7Lu3AOy7uADsu7kA7Lu6AOy7uwDsu7wA7Lu9AOy7vgDsu78A7LyAAOy8gQDsvIIA7LyDAOy8hADsvIUA7LyGAOy8hwDsvIgA7LyJAOy8igDsvIsA7LyMAOy8jQDsvI4A7LyPAOy8kADsvJEA7LySAOy8kwDsvJQA7LyVAOy8lgDsvJcA7LyYAOy8mQDsvJoA7LybAOy8nADsvJ0A7LyeAOy8nwDsvKAA7LyhAOy8ogDsvKMA7LykAOy8pQDsvKYA7LynAOy8qADsvKkA7LyqAOy8qwDsvKwA7LytAOy8rgDsvK8A7LywAOy8sQDsvLIA7LyzAOy8tADsvLUA7Ly2AOy8twDsvLgA7Ly5AOy8ugDsvLsA7Ly8AOy8vQDsvL4A7Ly/AOy9gADsvYEA7L2CAOy9gwDsvYQA7L2FAOy9hgDsvYcA7L2IAOy9iQDsvYoA7L2LAOy9jADsvY0A7L2OAOy9jwDsvZAA7L2RAOy9kgDsvZMA7L2UAOy9lQDsvZYA7L2XAOy9mADsvZkA7L2aAOy9mwDsvZwA7L2dAOy9ngDsvZ8A7L2gAOy9oQDsvaIA7L2jAOy9pADsvaUA7L2mAOy9pwDsvagA7L2pAOy9qgDsvasA7L2sAOy9rQDsva4A7L2vAOy9sADsvbEA7L2yAOy9swDsvbQA7L21AOy9tgDsvbcA7L24AOy9uQDsvboA7L27AOy9vADsvb0A7L2+AOy9vwDsvoAA7L6BAOy+ggDsvoMA7L6EAOy+hQDsvoYA7L6HAOy+iADsvokA7L6KAOy+iwDsvowA7L6NAOy+jgDsvo8A7L6QAOy+kQDsvpIA7L6TAOy+lADsvpUA7L6WAOy+lwDsvpgA7L6ZAOy+mgDsvpsA7L6cAOy+nQDsvp4A7L6fAOy+oADsvqEA7L6iAOy+owDsvqQA7L6lAOy+pgDsvqcA7L6oAOy+qQDsvqoA7L6rAOy+rADsvq0A7L6uAOy+rwDsvrAA7L6xAOy+sgDsvrMA7L60AOy+tQDsvrYA7L63AOy+uADsvrkA7L66AOy+uwDsvrwA7L69AOy+vgDsvr8A7L+AAOy/gQDsv4IA7L+DAOy/hADsv4UA7L+GAOy/hwDsv4gA7L+JAOy/igDsv4sA7L+MAOy/jQDsv44A7L+PAOy/kADsv5EA7L+SAOy/kwDsv5QA7L+VAOy/lgDsv5cA7L+YAOy/mQDsv5oA7L+bAOy/nADsv50A7L+eAOy/nwDsv6AA7L+hAOy/ogDsv6MA7L+kAOy/pQDsv6YA7L+nAOy/qADsv6kA7L+qAOy/qwDsv6wA7L+tAOy/rgDsv68A7L+wAOy/sQDsv7IA7L+zAOy/tADsv7UA7L+2AOy/twDsv7gA7L+5AOy/ugDsv7sA7L+8AOy/vQDsv74A7L+/AO2AgADtgIEA7YCCAO2AgwDtgIQA7YCFAO2AhgDtgIcA7YCIAO2AiQDtgIoA7YCLAO2AjADtgI0A7YCOAO2AjwDtgJAA7YCRAO2AkgDtgJMA7YCUAO2AlQDtgJYA7YCXAO2AmADtgJkA7YCaAO2AmwDtgJwA7YCdAO2AngDtgJ8A7YCgAO2AoQDtgKIA7YCjAO2ApADtgKUA7YCmAO2ApwDtgKgA7YCpAO2AqgDtgKsA7YCsAO2ArQDtgK4A7YCvAO2AsADtgLEA7YCyAO2AswDtgLQA7YC1AO2AtgDtgLcA7YC4AO2AuQDtgLoA7YC7AO2AvADtgL0A7YC+AO2AvwDtgYAA7YGBAO2BggDtgYMA7YGEAO2BhQDtgYYA7YGHAO2BiADtgYkA7YGKAO2BiwDtgYwA7YGNAO2BjgDtgY8A7YGQAO2BkQDtgZIA7YGTAO2BlADtgZUA7YGWAO2BlwDtgZgA7YGZAO2BmgDtgZsA7YGcAO2BnQDtgZ4A7YGfAO2BoADtgaEA7YGiAO2BowDtgaQA7YGlAO2BpgDtgacA7YGoAO2BqQDtgaoA7YGrAO2BrADtga0A7YGuAO2BrwDtgbAA7YGxAO2BsgDtgbMA7YG0AO2BtQDtgbYA7YG3AO2BuADtgbkA7YG6AO2BuwDtgbwA7YG9AO2BvgDtgb8A7YKAAO2CgQDtgoIA7YKDAO2ChADtgoUA7YKGAO2ChwDtgogA7YKJAO2CigDtgosA7YKMAO2CjQDtgo4A7YKPAO2CkADtgpEA7YKSAO2CkwDtgpQA7YKVAO2ClgDtgpcA7YKYAO2CmQDtgpoA7YKbAO2CnADtgp0A7YKeAO2CnwDtgqAA7YKhAO2CogDtgqMA7YKkAO2CpQDtgqYA7YKnAO2CqADtgqkA7YKqAO2CqwDtgqwA7YKtAO2CrgDtgq8A7YKwAO2CsQDtgrIA7YKzAO2CtADtgrUA7YK2AO2CtwDtgrgA7YK5AO2CugDtgrsA7YK8AO2CvQDtgr4A7YK/AO2DgADtg4EA7YOCAO2DgwDtg4QA7YOFAO2DhgDtg4cA7YOIAO2DiQDtg4oA7YOLAO2DjADtg40A7YOOAO2DjwDtg5AA7YORAO2DkgDtg5MA7YOUAO2DlQDtg5YA7YOXAO2DmADtg5kA7YOaAO2DmwDtg5wA7YOdAO2DngDtg58A7YOgAO2DoQDtg6IA7YOjAO2DpADtg6UA7YOmAO2DpwDtg6gA7YOpAO2DqgDtg6sA7YOsAO2DrQDtg64A7YOvAO2DsADtg7EA7YOyAO2DswDtg7QA7YO1AO2DtgDtg7cA7YO4AO2DuQDtg7oA7YO7AO2DvADtg70A7YO+AO2DvwDthIAA7YSBAO2EggDthIMA7YSEAO2EhQDthIYA7YSHAO2EiADthIkA7YSKAO2EiwDthIwA7YSNAO2EjgDthI8A7YSQAO2EkQDthJIA7YSTAO2ElADthJUA7YSWAO2ElwDthJgA7YSZAO2EmgDthJsA7YScAO2EnQDthJ4A7YSfAO2EoADthKEA7YSiAO2EowDthKQA7YSlAO2EpgDthKcA7YSoAO2EqQDthKoA7YSrAO2ErADthK0A7YSuAO2ErwDthLAA7YSxAO2EsgDthLMA7YS0AO2EtQDthLYA7YS3AO2EuADthLkA7YS6AO2EuwDthLwA7YS9AO2EvgDthL8A7YWAAO2FgQDthYIA7YWDAO2FhADthYUA7YWGAO2FhwDthYgA7YWJAO2FigDthYsA7YWMAO2FjQDthY4A7YWPAO2FkADthZEA7YWSAO2FkwDthZQA7YWVAO2FlgDthZcA7YWYAO2FmQDthZoA7YWbAO2FnADthZ0A7YWeAO2FnwDthaAA7YWhAO2FogDthaMA7YWkAO2FpQDthaYA7YWnAO2FqADthakA7YWqAO2FqwDthawA7YWtAO2FrgDtha8A7YWwAO2FsQDthbIA7YWzAO2FtADthbUA7YW2AO2FtwDthbgA7YW5AO2FugDthbsA7YW8AO2FvQDthb4A7YW/AO2GgADthoEA7YaCAO2GgwDthoQA7YaFAO2GhgDthocA7YaIAO2GiQDthooA7YaLAO2GjADtho0A7YaOAO2GjwDthpAA7YaRAO2GkgDthpMA7YaUAO2GlQDthpYA7YaXAO2GmADthpkA7YaaAO2GmwDthpwA7YadAO2GngDthp8A7YagAO2GoQDthqIA7YajAO2GpADthqUA7YamAO2GpwDthqgA7YapAO2GqgDthqsA7YasAO2GrQDthq4A7YavAO2GsADthrEA7YayAO2GswDthrQA7Ya1AO2GtgDthrcA7Ya4AO2GuQDthroA7Ya7AO2GvADthr0A7Ya+AO2GvwDth4AA7YeBAO2HggDth4MA7YeEAO2HhQDth4YA7YeHAO2HiADth4kA7YeKAO2HiwDth4wA7YeNAO2HjgDth48A7YeQAO2HkQDth5IA7YeTAO2HlADth5UA7YeWAO2HlwDth5gA7YeZAO2HmgDth5sA7YecAO2HnQDth54A7YefAO2HoADth6EA7YeiAO2HowDth6QA7YelAO2HpgDth6cA7YeoAO2HqQDth6oA7YerAO2HrADth60A7YeuAO2HrwDth7AA7YexAO2HsgDth7MA7Ye0AO2HtQDth7YA7Ye3AO2HuADth7kA7Ye6AO2HuwDth7wA7Ye9AO2HvgDth78A7YiAAO2IgQDtiIIA7YiDAO2IhADtiIUA7YiGAO2IhwDtiIgA7YiJAO2IigDtiIsA7YiMAO2IjQDtiI4A7YiPAO2IkADtiJEA7YiSAO2IkwDtiJQA7YiVAO2IlgDtiJcA7YiYAO2ImQDtiJoA7YibAO2InADtiJ0A7YieAO2InwDtiKAA7YihAO2IogDtiKMA7YikAO2IpQDtiKYA7YinAO2IqADtiKkA7YiqAO2IqwDtiKwA7YitAO2IrgDtiK8A7YiwAO2IsQDtiLIA7YizAO2ItADtiLUA7Yi2AO2ItwDtiLgA7Yi5AO2IugDtiLsA7Yi8AO2IvQDtiL4A7Yi/AO2JgADtiYEA7YmCAO2JgwDtiYQA7YmFAO2JhgDtiYcA7YmIAO2JiQDtiYoA7YmLAO2JjADtiY0A7YmOAO2JjwDtiZAA7YmRAO2JkgDtiZMA7YmUAO2JlQDtiZYA7YmXAO2JmADtiZkA7YmaAO2JmwDtiZwA7YmdAO2JngDtiZ8A7YmgAO2JoQDtiaIA7YmjAO2JpADtiaUA7YmmAO2JpwDtiagA7YmpAO2JqgDtiasA7YmsAO2JrQDtia4A7YmvAO2JsADtibEA7YmyAO2JswDtibQA7Ym1AO2JtgDtibcA7Ym4AO2JuQDtiboA7Ym7AO2JvADtib0A7Ym+AO2JvwDtioAA7YqBAO2KggDtioMA7YqEAO2KhQDtioYA7YqHAO2KiADtiokA7YqKAO2KiwDtiowA7YqNAO2KjgDtio8A7YqQAO2KkQDtipIA7YqTAO2KlADtipUA7YqWAO2KlwDtipgA7YqZAO2KmgDtipsA7YqcAO2KnQDtip4A7YqfAO2KoADtiqEA7YqiAO2KowDtiqQA7YqlAO2KpgDtiqcA7YqoAO2KqQDtiqoA7YqrAO2KrADtiq0A7YquAO2KrwDtirAA7YqxAO2KsgDtirMA7Yq0AO2KtQDtirYA7Yq3AO2KuADtirkA7Yq6AO2KuwDtirwA7Yq9AO2KvgDtir8A7YuAAO2LgQDti4IA7YuDAO2LhADti4UA7YuGAO2LhwDti4gA7YuJAO2LigDti4sA7YuMAO2LjQDti44A7YuPAO2LkADti5EA7YuSAO2LkwDti5QA7YuVAO2LlgDti5cA7YuYAO2LmQDti5oA7YubAO2LnADti50A7YueAO2LnwDti6AA7YuhAO2LogDti6MA7YukAO2LpQDti6YA7YunAO2LqADti6kA7YuqAO2LqwDti6wA7YutAO2LrgDti68A7YuwAO2LsQDti7IA7YuzAO2LtADti7UA7Yu2AO2LtwDti7gA7Yu5AO2LugDti7sA7Yu8AO2LvQDti74A7Yu/AO2MgADtjIEA7YyCAO2MgwDtjIQA7YyFAO2MhgDtjIcA7YyIAO2MiQDtjIoA7YyLAO2MjADtjI0A7YyOAO2MjwDtjJAA7YyRAO2MkgDtjJMA7YyUAO2MlQDtjJYA7YyXAO2MmADtjJkA7YyaAO2MmwDtjJwA7YydAO2MngDtjJ8A7YygAO2MoQDtjKIA7YyjAO2MpADtjKUA7YymAO2MpwDtjKgA7YypAO2MqgDtjKsA7YysAO2MrQDtjK4A7YyvAO2MsADtjLEA7YyyAO2MswDtjLQA7Yy1AO2MtgDtjLcA7Yy4AO2MuQDtjLoA7Yy7AO2MvADtjL0A7Yy+AO2MvwDtjYAA7Y2BAO2NggDtjYMA7Y2EAO2NhQDtjYYA7Y2HAO2NiADtjYkA7Y2KAO2NiwDtjYwA7Y2NAO2NjgDtjY8A7Y2QAO2NkQDtjZIA7Y2TAO2NlADtjZUA7Y2WAO2NlwDtjZgA7Y2ZAO2NmgDtjZsA7Y2cAO2NnQDtjZ4A7Y2fAO2NoADtjaEA7Y2iAO2NowDtjaQA7Y2lAO2NpgDtjacA7Y2oAO2NqQDtjaoA7Y2rAO2NrADtja0A7Y2uAO2NrwDtjbAA7Y2xAO2NsgDtjbMA7Y20AO2NtQDtjbYA7Y23AO2NuADtjbkA7Y26AO2NuwDtjbwA7Y29AO2NvgDtjb8A7Y6AAO2OgQDtjoIA7Y6DAO2OhADtjoUA7Y6GAO2OhwDtjogA7Y6JAO2OigDtjosA7Y6MAO2OjQDtjo4A7Y6PAO2OkADtjpEA7Y6SAO2OkwDtjpQA7Y6VAO2OlgDtjpcA7Y6YAO2OmQDtjpoA7Y6bAO2OnADtjp0A7Y6eAO2OnwDtjqAA7Y6hAO2OogDtjqMA7Y6kAO2OpQDtjqYA7Y6nAO2OqADtjqkA7Y6qAO2OqwDtjqwA7Y6tAO2OrgDtjq8A7Y6wAO2OsQDtjrIA7Y6zAO2OtADtjrUA7Y62AO2OtwDtjrgA7Y65AO2OugDtjrsA7Y68AO2OvQDtjr4A7Y6/AO2PgADtj4EA7Y+CAO2PgwDtj4QA7Y+FAO2PhgDtj4cA7Y+IAO2PiQDtj4oA7Y+LAO2PjADtj40A7Y+OAO2PjwDtj5AA7Y+RAO2PkgDtj5MA7Y+UAO2PlQDtj5YA7Y+XAO2PmADtj5kA7Y+aAO2PmwDtj5wA7Y+dAO2PngDtj58A7Y+gAO2PoQDtj6IA7Y+jAO2PpADtj6UA7Y+mAO2PpwDtj6gA7Y+pAO2PqgDtj6sA7Y+sAO2PrQDtj64A7Y+vAO2PsADtj7EA7Y+yAO2PswDtj7QA7Y+1AO2PtgDtj7cA7Y+4AO2PuQDtj7oA7Y+7AO2PvADtj70A7Y++AO2PvwDtkIAA7ZCBAO2QggDtkIMA7ZCEAO2QhQDtkIYA7ZCHAO2QiADtkIkA7ZCKAO2QiwDtkIwA7ZCNAO2QjgDtkI8A7ZCQAO2QkQDtkJIA7ZCTAO2QlADtkJUA7ZCWAO2QlwDtkJgA7ZCZAO2QmgDtkJsA7ZCcAO2QnQDtkJ4A7ZCfAO2QoADtkKEA7ZCiAO2QowDtkKQA7ZClAO2QpgDtkKcA7ZCoAO2QqQDtkKoA7ZCrAO2QrADtkK0A7ZCuAO2QrwDtkLAA7ZCxAO2QsgDtkLMA7ZC0AO2QtQDtkLYA7ZC3AO2QuADtkLkA7ZC6AO2QuwDtkLwA7ZC9AO2QvgDtkL8A7ZGAAO2RgQDtkYIA7ZGDAO2RhADtkYUA7ZGGAO2RhwDtkYgA7ZGJAO2RigDtkYsA7ZGMAO2RjQDtkY4A7ZGPAO2RkADtkZEA7ZGSAO2RkwDtkZQA7ZGVAO2RlgDtkZcA7ZGYAO2RmQDtkZoA7ZGbAO2RnADtkZ0A7ZGeAO2RnwDtkaAA7ZGhAO2RogDtkaMA7ZGkAO2RpQDtkaYA7ZGnAO2RqADtkakA7ZGqAO2RqwDtkawA7ZGtAO2RrgDtka8A7ZGwAO2RsQDtkbIA7ZGzAO2RtADtkbUA7ZG2AO2RtwDtkbgA7ZG5AO2RugDtkbsA7ZG8AO2RvQDtkb4A7ZG/AO2SgADtkoEA7ZKCAO2SgwDtkoQA7ZKFAO2ShgDtkocA7ZKIAO2SiQDtkooA7ZKLAO2SjADtko0A7ZKOAO2SjwDtkpAA7ZKRAO2SkgDtkpMA7ZKUAO2SlQDtkpYA7ZKXAO2SmADtkpkA7ZKaAO2SmwDtkpwA7ZKdAO2SngDtkp8A7ZKgAO2SoQDtkqIA7ZKjAO2SpADtkqUA7ZKmAO2SpwDtkqgA7ZKpAO2SqgDtkqsA7ZKsAO2SrQDtkq4A7ZKvAO2SsADtkrEA7ZKyAO2SswDtkrQA7ZK1AO2StgDtkrcA7ZK4AO2SuQDtkroA7ZK7AO2SvADtkr0A7ZK+AO2SvwDtk4AA7ZOBAO2TggDtk4MA7ZOEAO2ThQDtk4YA7ZOHAO2TiADtk4kA7ZOKAO2TiwDtk4wA7ZONAO2TjgDtk48A7ZOQAO2TkQDtk5IA7ZOTAO2TlADtk5UA7ZOWAO2TlwDtk5gA7ZOZAO2TmgDtk5sA7ZOcAO2TnQDtk54A7ZOfAO2ToADtk6EA7ZOiAO2TowDtk6QA7ZOlAO2TpgDtk6cA7ZOoAO2TqQDtk6oA7ZOrAO2TrADtk60A7ZOuAO2TrwDtk7AA7ZOxAO2TsgDtk7MA7ZO0AO2TtQDtk7YA7ZO3AO2TuADtk7kA7ZO6AO2TuwDtk7wA7ZO9AO2TvgDtk78A7ZSAAO2UgQDtlIIA7ZSDAO2UhADtlIUA7ZSGAO2UhwDtlIgA7ZSJAO2UigDtlIsA7ZSMAO2UjQDtlI4A7ZSPAO2UkADtlJEA7ZSSAO2UkwDtlJQA7ZSVAO2UlgDtlJcA7ZSYAO2UmQDtlJoA7ZSbAO2UnADtlJ0A7ZSeAO2UnwDtlKAA7ZShAO2UogDtlKMA7ZSkAO2UpQDtlKYA7ZSnAO2UqADtlKkA7ZSqAO2UqwDtlKwA7ZStAO2UrgDtlK8A7ZSwAO2UsQDtlLIA7ZSzAO2UtADtlLUA7ZS2AO2UtwDtlLgA7ZS5AO2UugDtlLsA7ZS8AO2UvQDtlL4A7ZS/AO2VgADtlYEA7ZWCAO2VgwDtlYQA7ZWFAO2VhgDtlYcA7ZWIAO2ViQDtlYoA7ZWLAO2VjADtlY0A7ZWOAO2VjwDtlZAA7ZWRAO2VkgDtlZMA7ZWUAO2VlQDtlZYA7ZWXAO2VmADtlZkA7ZWaAO2VmwDtlZwA7ZWdAO2VngDtlZ8A7ZWgAO2VoQDtlaIA7ZWjAO2VpADtlaUA7ZWmAO2VpwDtlagA7ZWpAO2VqgDtlasA7ZWsAO2VrQDtla4A7ZWvAO2VsADtlbEA7ZWyAO2VswDtlbQA7ZW1AO2VtgDtlbcA7ZW4AO2VuQDtlboA7ZW7AO2VvADtlb0A7ZW+AO2VvwDtloAA7ZaBAO2WggDtloMA7ZaEAO2WhQDtloYA7ZaHAO2WiADtlokA7ZaKAO2WiwDtlowA7ZaNAO2WjgDtlo8A7ZaQAO2WkQDtlpIA7ZaTAO2WlADtlpUA7ZaWAO2WlwDtlpgA7ZaZAO2WmgDtlpsA7ZacAO2WnQDtlp4A7ZafAO2WoADtlqEA7ZaiAO2WowDtlqQA7ZalAO2WpgDtlqcA7ZaoAO2WqQDtlqoA7ZarAO2WrADtlq0A7ZauAO2WrwDtlrAA7ZaxAO2WsgDtlrMA7Za0AO2WtQDtlrYA7Za3AO2WuADtlrkA7Za6AO2WuwDtlrwA7Za9AO2WvgDtlr8A7ZeAAO2XgQDtl4IA7ZeDAO2XhADtl4UA7ZeGAO2XhwDtl4gA7ZeJAO2XigDtl4sA7ZeMAO2XjQDtl44A7ZePAO2XkADtl5EA7ZeSAO2XkwDtl5QA7ZeVAO2XlgDtl5cA7ZeYAO2XmQDtl5oA7ZebAO2XnADtl50A7ZeeAO2XnwDtl6AA7ZehAO2XogDtl6MA7ZekAO2XpQDtl6YA7ZenAO2XqADtl6kA7ZeqAO2XqwDtl6wA7ZetAO2XrgDtl68A7ZewAO2XsQDtl7IA7ZezAO2XtADtl7UA7Ze2AO2XtwDtl7gA7Ze5AO2XugDtl7sA7Ze8AO2XvQDtl74A7Ze/AO2YgADtmIEA7ZiCAO2YgwDtmIQA7ZiFAO2YhgDtmIcA7ZiIAO2YiQDtmIoA7ZiLAO2YjADtmI0A7ZiOAO2YjwDtmJAA7ZiRAO2YkgDtmJMA7ZiUAO2YlQDtmJYA7ZiXAO2YmADtmJkA7ZiaAO2YmwDtmJwA7ZidAO2YngDtmJ8A7ZigAO2YoQDtmKIA7ZijAO2YpADtmKUA7ZimAO2YpwDtmKgA7ZipAO2YqgDtmKsA7ZisAO2YrQDtmK4A7ZivAO2YsADtmLEA7ZiyAO2YswDtmLQA7Zi1AO2YtgDtmLcA7Zi4AO2YuQDtmLoA7Zi7AO2YvADtmL0A7Zi+AO2YvwDtmYAA7ZmBAO2ZggDtmYMA7ZmEAO2ZhQDtmYYA7ZmHAO2ZiADtmYkA7ZmKAO2ZiwDtmYwA7ZmNAO2ZjgDtmY8A7ZmQAO2ZkQDtmZIA7ZmTAO2ZlADtmZUA7ZmWAO2ZlwDtmZgA7ZmZAO2ZmgDtmZsA7ZmcAO2ZnQDtmZ4A7ZmfAO2ZoADtmaEA7ZmiAO2ZowDtmaQA7ZmlAO2ZpgDtmacA7ZmoAO2ZqQDtmaoA7ZmrAO2ZrADtma0A7ZmuAO2ZrwDtmbAA7ZmxAO2ZsgDtmbMA7Zm0AO2ZtQDtmbYA7Zm3AO2ZuADtmbkA7Zm6AO2ZuwDtmbwA7Zm9AO2ZvgDtmb8A7ZqAAO2agQDtmoIA7ZqDAO2ahADtmoUA7ZqGAO2ahwDtmogA7ZqJAO2aigDtmosA7ZqMAO2ajQDtmo4A7ZqPAO2akADtmpEA7ZqSAO2akwDtmpQA7ZqVAO2algDtmpcA7ZqYAO2amQDtmpoA7ZqbAO2anADtmp0A7ZqeAO2anwDtmqAA7ZqhAO2aogDtmqMA7ZqkAO2apQDtmqYA7ZqnAO2aqADtmqkA7ZqqAO2aqwDtmqwA7ZqtAO2argDtmq8A7ZqwAO2asQDtmrIA7ZqzAO2atADtmrUA7Zq2AO2atwDtmrgA7Zq5AO2augDtmrsA7Zq8AO2avQDtmr4A7Zq/AO2bgADtm4EA7ZuCAO2bgwDtm4QA7ZuFAO2bhgDtm4cA7ZuIAO2biQDtm4oA7ZuLAO2bjADtm40A7ZuOAO2bjwDtm5AA7ZuRAO2bkgDtm5MA7ZuUAO2blQDtm5YA7ZuXAO2bmADtm5kA7ZuaAO2bmwDtm5wA7ZudAO2bngDtm58A7ZugAO2boQDtm6IA7ZujAO2bpADtm6UA7ZumAO2bpwDtm6gA7ZupAO2bqgDtm6sA7ZusAO2brQDtm64A7ZuvAO2bsADtm7EA7ZuyAO2bswDtm7QA7Zu1AO2btgDtm7cA7Zu4AO2buQDtm7oA7Zu7AO2bvADtm70A7Zu+AO2bvwDtnIAA7ZyBAO2cggDtnIMA7ZyEAO2chQDtnIYA7ZyHAO2ciADtnIkA7ZyKAO2ciwDtnIwA7ZyNAO2cjgDtnI8A7ZyQAO2ckQDtnJIA7ZyTAO2clADtnJUA7ZyWAO2clwDtnJgA7ZyZAO2cmgDtnJsA7ZycAO2cnQDtnJ4A7ZyfAO2coADtnKEA7ZyiAO2cowDtnKQA7ZylAO2cpgDtnKcA7ZyoAO2cqQDtnKoA7ZyrAO2crADtnK0A7ZyuAO2crwDtnLAA7ZyxAO2csgDtnLMA7Zy0AO2ctQDtnLYA7Zy3AO2cuADtnLkA7Zy6AO2cuwDtnLwA7Zy9AO2cvgDtnL8A7Z2AAO2dgQDtnYIA7Z2DAO2dhADtnYUA7Z2GAO2dhwDtnYgA7Z2JAO2digDtnYsA7Z2MAO2djQDtnY4A7Z2PAO2dkADtnZEA7Z2SAO2dkwDtnZQA7Z2VAO2dlgDtnZcA7Z2YAO2dmQDtnZoA7Z2bAO2dnADtnZ0A7Z2eAO2dnwDtnaAA7Z2hAO2dogDtnaMA7Z2kAO2dpQDtnaYA7Z2nAO2dqADtnakA7Z2qAO2dqwDtnawA7Z2tAO2drgDtna8A7Z2wAO2dsQDtnbIA7Z2zAO2dtADtnbUA7Z22AO2dtwDtnbgA7Z25AO2dugDtnbsA7Z28AO2dvQDtnb4A7Z2/AO2egADtnoEA7Z6CAO2egwDtnoQA7Z6FAO2ehgDtnocA7Z6IAO2eiQDtnooA7Z6LAO2ejADtno0A7Z6OAO2ejwDtnpAA7Z6RAO2ekgDtnpMA7Z6UAO2elQDtnpYA7Z6XAO2emADtnpkA7Z6aAO2emwDtnpwA7Z6dAO2engDtnp8A7Z6gAO2eoQDtnqIA7Z6jAPCRgpoA8JGCnADwkYKrAPCRhK4A8JGErwDwkY2LAPCRjYwA8JGSuwDwkZK8APCRkr4A8JGWugDwkZa7APCdhZfwnYWlAPCdhZjwnYWlAPCdhZjwnYWl8J2FrgDwnYWY8J2FpfCdha8A8J2FmPCdhaXwnYWwAPCdhZjwnYWl8J2FsQDwnYWY8J2FpfCdhbIA8J2GufCdhaUA8J2GufCdhaXwnYWuAPCdhrnwnYWl8J2FrwDwnYa68J2FpQDwnYa68J2FpfCdha4A8J2GuvCdhaXwnYWvAPCghKIA8KCUnADwoJSlAPCglYsA8KCYugDwoKCEAPCgo54A8KCorADwoK2jAPChk6QA8KGaqADwoZuqAPChp4gA8KGsmADwobSLAPCht6QA8KG3pgDwooaDAPCihp8A8KKMsQDwopuUAPCioYQA8KKhigDwoqyMAPCir7EA8KOAigDwo4q4APCjjZ8A8KOOkwDwo46cAPCjj4MA8KOPlQDwo5GtAPCjmqMA8KOipwDwo6qNAPCjq7oA8KOyvADwo7SeAPCju5EA8KO9ngDwo76OAPCkiaMA8KSLrgDwpI6rAPCkmIgA8KSctQDwpKCUAPCksLYA8KSykgDwpL6hAPCkvrgA8KWBhADwpYOyAPClg7MA8KWEmQDwpYSzAPCliYkA8KWQnQDwpZimAPClmpoA8KWbhQDwpaW8APClqqcA8KWuqwDwpbKAAPCls5AA8KW+hgDwpoeaAPCmiKgA8KaJhwDwpouZAPCmjL4A8KaTmgDwppSjAPCmlqgA8KaepwDwpp61APCmrLwA8KawtgDwprOVAPCmtasA8Ka8rADwpr6xAPCng5IA8KePigDwp5mnAPCnoq4A8KelpgDwp7KoAPCnu5MA8Ke8rwDwqJeSAPCol60A8KicrgDwqK+6APCotbcA8KmFhQDwqYefAPCpiJoA8KmQigDwqZKWAPCplrYA8KmssADwqoOOAPCqhIUA8KqIjgDwqoqRAPCqjpIA8KqYgAA=" + }, + { + "type": "Replace", + "pattern": { + "Regex": " {2,}" + }, + "content": " " + } + ] + }, + "pre_tokenizer": { + "type": "Metaspace", + "replacement": "▁", + "add_prefix_space": true + }, + "post_processor": { + "type": "TemplateProcessing", + "single": [ + { + "Sequence": { + "id": "A", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + } + ], + "pair": [ + { + "Sequence": { + "id": "A", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + }, + { + "Sequence": { + "id": "B", + "type_id": 0 + } + }, + { + "SpecialToken": { + "id": "", + "type_id": 0 + } + } + ], + "special_tokens": { + "": { + "id": "", + "ids": [ + 1 + ], + "tokens": [ + "" + ] + } + } + }, + "decoder": { + "type": "Metaspace", + "replacement": "▁", + "add_prefix_space": true + }, + "model": { + "type": "Unigram", + "unk_id": 2, + "vocab": [ + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "▁", + -2.0122928619384766 + ], + [ + "X", + -2.486478805541992 + ], + [ + ".", + -3.5449328422546387 + ], + [ + ",", + -3.649247407913208 + ], + [ + "s", + -3.9033992290496826 + ], + [ + "▁the", + -3.9598512649536133 + ], + [ + "a", + -4.097104549407959 + ], + [ + ":", + -4.414328098297119 + ], + [ + "▁and", + -4.420670986175537 + ], + [ + "▁to", + -4.4523234367370605 + ], + [ + "▁of", + -4.572070121765137 + ], + [ + "▁fill", + -4.575019836425781 + ], + [ + "e", + -4.674920082092285 + ], + [ + "▁in", + -4.812063694000244 + ], + [ + "t", + -5.063905715942383 + ], + [ + "-", + -5.129043102264404 + ], + [ + "▁is", + -5.283425331115723 + ], + [ + "▁de", + -5.344141960144043 + ], + [ + "▁for", + -5.3930158615112305 + ], + [ + "’", + -5.4228339195251465 + ], + [ + "i", + -5.469857692718506 + ], + [ + "▁that", + -5.576240539550781 + ], + [ + "▁you", + -5.596375465393066 + ], + [ + "d", + -5.6047282218933105 + ], + [ + "▁I", + -5.6640448570251465 + ], + [ + "▁with", + -5.703730583190918 + ], + [ + "n", + -5.737886905670166 + ], + [ + "▁on", + -5.784142971038818 + ], + [ + "'", + -5.828996181488037 + ], + [ + "o", + -5.925558090209961 + ], + [ + "▁are", + -5.931313991546631 + ], + [ + "▁it", + -5.939518928527832 + ], + [ + "en", + -5.9465556144714355 + ], + [ + "▁be", + -5.9556708335876465 + ], + [ + "▁The", + -5.990020751953125 + ], + [ + "▁as", + -6.057407379150391 + ], + [ + "▁your", + -6.132311820983887 + ], + [ + "l", + -6.139498710632324 + ], + [ + "▁(", + -6.184796333312988 + ], + [ + "▁or", + -6.241950035095215 + ], + [ + "▁have", + -6.27459192276001 + ], + [ + "▁at", + -6.327472686767578 + ], + [ + "▁from", + -6.349645137786865 + ], + [ + "▁an", + -6.350090980529785 + ], + [ + "▁was", + -6.350385665893555 + ], + [ + "▁this", + -6.352563381195068 + ], + [ + "er", + -6.3604278564453125 + ], + [ + "▁la", + -6.3624043464660645 + ], + [ + "m", + -6.375206470489502 + ], + [ + "r", + -6.376530170440674 + ], + [ + "ing", + -6.3778581619262695 + ], + [ + "▁can", + -6.387146472930908 + ], + [ + "!", + -6.421379566192627 + ], + [ + "▁will", + -6.423982620239258 + ], + [ + "▁by", + -6.44155216217041 + ], + [ + "?", + -6.585887432098389 + ], + [ + "▁not", + -6.5959086418151855 + ], + [ + "re", + -6.620072364807129 + ], + [ + ")", + -6.63656759262085 + ], + [ + "▁we", + -6.643022060394287 + ], + [ + "y", + -6.654535293579102 + ], + [ + "▁und", + -6.741473197937012 + ], + [ + "▁has", + -6.7602033615112305 + ], + [ + "▁all", + -6.768176555633545 + ], + [ + "▁die", + -6.8641204833984375 + ], + [ + "▁but", + -6.906830310821533 + ], + [ + "▁our", + -6.909878730773926 + ], + [ + "▁their", + -6.91325044631958 + ], + [ + "▁A", + -6.915814399719238 + ], + [ + "▁more", + -6.918668746948242 + ], + [ + "▁un", + -6.924930572509766 + ], + [ + "▁der", + -6.925402641296387 + ], + [ + "c", + -6.925714015960693 + ], + [ + "u", + -6.932939052581787 + ], + [ + "in", + -6.934063911437988 + ], + [ + "▁so", + -6.947050094604492 + ], + [ + "▁they", + -6.989297866821289 + ], + [ + "▁one", + -7.012735843658447 + ], + [ + "▁about", + -7.071486473083496 + ], + [ + "▁my", + -7.072140693664551 + ], + [ + "ul", + -7.076492786407471 + ], + [ + "▁which", + -7.097039222717285 + ], + [ + "à", + -7.099997520446777 + ], + [ + "▁In", + -7.100254535675049 + ], + [ + "/", + -7.100865840911865 + ], + [ + "he", + -7.104752540588379 + ], + [ + "f", + -7.110044002532959 + ], + [ + "▁le", + -7.112937927246094 + ], + [ + "▁out", + -7.128556728363037 + ], + [ + "▁also", + -7.133583068847656 + ], + [ + "▁des", + -7.156766414642334 + ], + [ + "▁It", + -7.162121295928955 + ], + [ + "▁up", + -7.1723432540893555 + ], + [ + "▁\"", + -7.172809600830078 + ], + [ + "▁time", + -7.178046703338623 + ], + [ + "ă", + -7.183253765106201 + ], + [ + "if", + -7.185171127319336 + ], + [ + "▁This", + -7.191652297973633 + ], + [ + "▁We", + -7.223267078399658 + ], + [ + "p", + -7.224130153656006 + ], + [ + "▁do", + -7.228212356567383 + ], + [ + "–", + -7.235409736633301 + ], + [ + "▁“", + -7.238142013549805 + ], + [ + "on", + -7.240827560424805 + ], + [ + "h", + -7.2543206214904785 + ], + [ + "▁si", + -7.276725769042969 + ], + [ + "le", + -7.2994256019592285 + ], + [ + "▁les", + -7.312957286834717 + ], + [ + "▁în", + -7.314571857452393 + ], + [ + "▁his", + -7.324767112731934 + ], + [ + "▁who", + -7.35105562210083 + ], + [ + "▁like", + -7.371364116668701 + ], + [ + "b", + -7.375369071960449 + ], + [ + "▁when", + -7.380199432373047 + ], + [ + ";", + -7.380846977233887 + ], + [ + "▁been", + -7.38668966293335 + ], + [ + "▁other", + -7.388518333435059 + ], + [ + "ly", + -7.394660949707031 + ], + [ + "\"", + -7.407205104827881 + ], + [ + "g", + -7.407997131347656 + ], + [ + "▁cu", + -7.415276527404785 + ], + [ + "▁care", + -7.432408332824707 + ], + [ + "▁what", + -7.433043003082275 + ], + [ + "▁new", + -7.4370903968811035 + ], + [ + "or", + -7.445409774780273 + ], + [ + "▁some", + -7.461953639984131 + ], + [ + "▁get", + -7.479001998901367 + ], + [ + "▁were", + -7.491549491882324 + ], + [ + "▁just", + -7.492495536804199 + ], + [ + "▁there", + -7.493194103240967 + ], + [ + "▁would", + -7.494382381439209 + ], + [ + "S", + -7.4974141120910645 + ], + [ + "▁them", + -7.513596057891846 + ], + [ + "▁any", + -7.520544052124023 + ], + [ + ").", + -7.521052360534668 + ], + [ + "al", + -7.523056983947754 + ], + [ + "▁into", + -7.527902603149414 + ], + [ + "▁me", + -7.528337001800537 + ], + [ + "▁had", + -7.532425403594971 + ], + [ + "▁se", + -7.5451483726501465 + ], + [ + "▁make", + -7.5827131271362305 + ], + [ + "at", + -7.589433670043945 + ], + [ + "▁than", + -7.592360019683838 + ], + [ + "▁du", + -7.595852375030518 + ], + [ + "▁over", + -7.6078782081604 + ], + [ + "▁You", + -7.626111030578613 + ], + [ + "▁how", + -7.635554313659668 + ], + [ + "▁no", + -7.63729190826416 + ], + [ + "▁people", + -7.639947414398193 + ], + [ + "an", + -7.64084005355835 + ], + [ + "”", + -7.644528865814209 + ], + [ + "é", + -7.646921157836914 + ], + [ + "it", + -7.648641109466553 + ], + [ + "▁If", + -7.648687839508057 + ], + [ + "k", + -7.6605634689331055 + ], + [ + "▁pe", + -7.662139415740967 + ], + [ + "is", + -7.66726016998291 + ], + [ + "▁her", + -7.6733808517456055 + ], + [ + "▁work", + -7.680386543273926 + ], + [ + "ve", + -7.687412738800049 + ], + [ + "▁only", + -7.69785737991333 + ], + [ + "▁may", + -7.702393531799316 + ], + [ + "▁its", + -7.702449798583984 + ], + [ + "▁first", + -7.704373836517334 + ], + [ + "▁most", + -7.708309173583984 + ], + [ + "▁well", + -7.708758354187012 + ], + [ + "▁use", + -7.715085983276367 + ], + [ + "▁zu", + -7.718777656555176 + ], + [ + "▁pour", + -7.736708164215088 + ], + [ + "z", + -7.745654106140137 + ], + [ + "il", + -7.745913982391357 + ], + [ + "▁need", + -7.74778938293457 + ], + [ + "▁these", + -7.763317584991455 + ], + [ + "▁din", + -7.769891262054443 + ], + [ + "▁den", + -7.775663375854492 + ], + [ + "▁us", + -7.778133869171143 + ], + [ + "able", + -7.779712200164795 + ], + [ + "▁S", + -7.781893730163574 + ], + [ + "▁mit", + -7.792516231536865 + ], + [ + "▁very", + -7.79970645904541 + ], + [ + "▁am", + -7.814100742340088 + ], + [ + "&", + -7.829529285430908 + ], + [ + "▁au", + -7.83012056350708 + ], + [ + "▁many", + -7.83834171295166 + ], + [ + "▁mai", + -7.84363317489624 + ], + [ + "A", + -7.849830150604248 + ], + [ + "th", + -7.855541229248047 + ], + [ + "▁through", + -7.859585285186768 + ], + [ + "▁pentru", + -7.86391544342041 + ], + [ + "▁two", + -7.873607158660889 + ], + [ + "▁von", + -7.874959945678711 + ], + [ + "▁way", + -7.887117385864258 + ], + [ + "ll", + -7.887749195098877 + ], + [ + "I", + -7.891303539276123 + ], + [ + "▁ce", + -7.9015631675720215 + ], + [ + "▁și", + -7.904444694519043 + ], + [ + "▁help", + -7.907405853271484 + ], + [ + "▁best", + -7.907911777496338 + ], + [ + "),", + -7.908212184906006 + ], + [ + "un", + -7.925017833709717 + ], + [ + "▁years", + -7.925964832305908 + ], + [ + "▁2", + -7.9282684326171875 + ], + [ + "▁C", + -7.936962604522705 + ], + [ + "▁nu", + -7.939520835876465 + ], + [ + "▁good", + -7.943995952606201 + ], + [ + "v", + -7.94746732711792 + ], + [ + "▁1", + -7.94765567779541 + ], + [ + "w", + -7.947978496551514 + ], + [ + "▁das", + -7.960538864135742 + ], + [ + "▁ca", + -7.962430477142334 + ], + [ + "▁where", + -7.964908123016357 + ], + [ + "▁know", + -7.96622896194458 + ], + [ + "▁year", + -7.971063613891602 + ], + [ + "▁He", + -7.974609375 + ], + [ + "▁see", + -7.980011463165283 + ], + [ + "▁für", + -7.984004497528076 + ], + [ + "▁auf", + -7.984249114990234 + ], + [ + "▁3", + -7.984433650970459 + ], + [ + "de", + -7.985401153564453 + ], + [ + "est", + -8.002091407775879 + ], + [ + "▁back", + -8.007022857666016 + ], + [ + "▁such", + -8.008523941040039 + ], + [ + "▁should", + -8.011754989624023 + ], + [ + "x", + -8.015050888061523 + ], + [ + "▁after", + -8.01761245727539 + ], + [ + "▁could", + -8.019674301147461 + ], + [ + "▁ist", + -8.020784378051758 + ], + [ + "▁now", + -8.022845268249512 + ], + [ + "▁much", + -8.023111343383789 + ], + [ + "and", + -8.02390193939209 + ], + [ + "...", + -8.030110359191895 + ], + [ + "▁home", + -8.036273956298828 + ], + [ + "to", + -8.03821086883545 + ], + [ + "▁ein", + -8.04833984375 + ], + [ + "▁even", + -8.048656463623047 + ], + [ + "▁que", + -8.049829483032227 + ], + [ + "▁day", + -8.051553726196289 + ], + [ + "▁take", + -8.054189682006836 + ], + [ + "▁want", + -8.054435729980469 + ], + [ + "▁For", + -8.06217098236084 + ], + [ + "▁said", + -8.063249588012695 + ], + [ + "▁sur", + -8.073471069335938 + ], + [ + "▁une", + -8.077030181884766 + ], + [ + "▁să", + -8.082921028137207 + ], + [ + "▁dans", + -8.084549903869629 + ], + [ + "▁great", + -8.088057518005371 + ], + [ + "▁este", + -8.08947467803955 + ], + [ + "▁because", + -8.094311714172363 + ], + [ + "▁information", + -8.104085922241211 + ], + [ + "ului", + -8.105451583862305 + ], + [ + "▁find", + -8.112174987792969 + ], + [ + "C", + -8.119946479797363 + ], + [ + "▁she", + -8.125317573547363 + ], + [ + "▁im", + -8.126056671142578 + ], + [ + "ation", + -8.130115509033203 + ], + [ + "▁then", + -8.13021469116211 + ], + [ + "▁est", + -8.13099479675293 + ], + [ + "▁par", + -8.138585090637207 + ], + [ + "▁used", + -8.141871452331543 + ], + [ + "▁E", + -8.146790504455566 + ], + [ + "▁made", + -8.149978637695312 + ], + [ + "▁So", + -8.15785026550293 + ], + [ + "am", + -8.16288948059082 + ], + [ + "▁eine", + -8.165464401245117 + ], + [ + "▁şi", + -8.168368339538574 + ], + [ + "▁business", + -8.17335033416748 + ], + [ + "▁right", + -8.173593521118164 + ], + [ + "▁here", + -8.176125526428223 + ], + [ + "▁being", + -8.184967041015625 + ], + [ + "▁B", + -8.185355186462402 + ], + [ + "▁those", + -8.185736656188965 + ], + [ + "▁before", + -8.194721221923828 + ], + [ + "▁And", + -8.199501037597656 + ], + [ + "▁P", + -8.200712203979492 + ], + [ + "ers", + -8.200922012329102 + ], + [ + "▁don", + -8.204029083251953 + ], + [ + "B", + -8.20487117767334 + ], + [ + "▁life", + -8.206265449523926 + ], + [ + "▁go", + -8.209736824035645 + ], + [ + "▁As", + -8.210551261901855 + ], + [ + "▁M", + -8.221170425415039 + ], + [ + "▁each", + -8.22955322265625 + ], + [ + "▁qui", + -8.23323917388916 + ], + [ + "▁place", + -8.236248970031738 + ], + [ + "com", + -8.237479209899902 + ], + [ + "ant", + -8.252915382385254 + ], + [ + "▁sich", + -8.255932807922363 + ], + [ + "▁There", + -8.261948585510254 + ], + [ + "ar", + -8.264991760253906 + ], + [ + "▁Sie", + -8.273868560791016 + ], + [ + "▁own", + -8.277531623840332 + ], + [ + "▁part", + -8.279440879821777 + ], + [ + "ent", + -8.281047821044922 + ], + [ + "▁world", + -8.28173542022705 + ], + [ + "ment", + -8.282004356384277 + ], + [ + "▁while", + -8.294474601745605 + ], + [ + "▁But", + -8.295366287231445 + ], + [ + "▁around", + -8.300799369812012 + ], + [ + "▁L", + -8.301082611083984 + ], + [ + "us", + -8.304039001464844 + ], + [ + "▁plus", + -8.313054084777832 + ], + [ + "▁To", + -8.313691139221191 + ], + [ + "▁5", + -8.31412410736084 + ], + [ + "▁high", + -8.31862735748291 + ], + [ + "▁long", + -8.319378852844238 + ], + [ + "D", + -8.320075035095215 + ], + [ + "▁D", + -8.320279121398926 + ], + [ + "▁really", + -8.322924613952637 + ], + [ + "▁nicht", + -8.332040786743164 + ], + [ + "▁Le", + -8.335328102111816 + ], + [ + "▁service", + -8.3412504196167 + ], + [ + "▁4", + -8.342093467712402 + ], + [ + "▁different", + -8.342538833618164 + ], + [ + "▁Die", + -8.348092079162598 + ], + [ + "▁think", + -8.353771209716797 + ], + [ + "—", + -8.355998039245605 + ], + [ + "▁auch", + -8.357160568237305 + ], + [ + "▁look", + -8.362202644348145 + ], + [ + "▁both", + -8.366817474365234 + ], + [ + "lor", + -8.36687183380127 + ], + [ + "▁down", + -8.367999076843262 + ], + [ + "ten", + -8.368885040283203 + ], + [ + "▁La", + -8.378066062927246 + ], + [ + "▁off", + -8.380044937133789 + ], + [ + "▁vous", + -8.380541801452637 + ], + [ + "▁They", + -8.381462097167969 + ], + [ + "M", + -8.383248329162598 + ], + [ + "▁pas", + -8.384513854980469 + ], + [ + "▁data", + -8.385709762573242 + ], + [ + "▁T", + -8.386754989624023 + ], + [ + "▁love", + -8.388101577758789 + ], + [ + "▁every", + -8.390009880065918 + ], + [ + "▁10", + -8.391179084777832 + ], + [ + "▁last", + -8.392083168029785 + ], + [ + "▁same", + -8.393481254577637 + ], + [ + "▁using", + -8.395487785339355 + ], + [ + "▁free", + -8.408831596374512 + ], + [ + "▁dem", + -8.40894889831543 + ], + [ + "▁still", + -8.409984588623047 + ], + [ + "ate", + -8.410931587219238 + ], + [ + "ist", + -8.415611267089844 + ], + [ + "▁between", + -8.420283317565918 + ], + [ + "P", + -8.420982360839844 + ], + [ + "be", + -8.428167343139648 + ], + [ + "▁available", + -8.429443359375 + ], + [ + "man", + -8.432978630065918 + ], + [ + "▁company", + -8.439678192138672 + ], + [ + "▁G", + -8.441640853881836 + ], + [ + "▁experience", + -8.444950103759766 + ], + [ + "▁going", + -8.449073791503906 + ], + [ + "▁site", + -8.453832626342773 + ], + [ + "j", + -8.455142974853516 + ], + [ + "are", + -8.456900596618652 + ], + [ + "▁set", + -8.470661163330078 + ], + [ + "2", + -8.473684310913086 + ], + [ + "▁system", + -8.474678039550781 + ], + [ + "▁important", + -8.476791381835938 + ], + [ + "▁few", + -8.482437133789062 + ], + [ + "▁fi", + -8.482551574707031 + ], + [ + "ich", + -8.483301162719727 + ], + [ + "▁What", + -8.488649368286133 + ], + [ + "▁services", + -8.502433776855469 + ], + [ + "▁under", + -8.502569198608398 + ], + [ + "▁When", + -8.50308895111084 + ], + [ + "▁online", + -8.50699520111084 + ], + [ + "▁New", + -8.51494312286377 + ], + [ + "▁come", + -8.524871826171875 + ], + [ + "▁provide", + -8.525650024414062 + ], + [ + "F", + -8.526449203491211 + ], + [ + "▁team", + -8.52782154083252 + ], + [ + "▁always", + -8.529409408569336 + ], + [ + "▁De", + -8.530412673950195 + ], + [ + "▁că", + -8.532517433166504 + ], + [ + "▁him", + -8.53586196899414 + ], + [ + "▁F", + -8.538305282592773 + ], + [ + "▁things", + -8.550079345703125 + ], + [ + "▁including", + -8.550943374633789 + ], + [ + "▁support", + -8.552608489990234 + ], + [ + "▁number", + -8.554113388061523 + ], + [ + "T", + -8.557183265686035 + ], + [ + "▁during", + -8.55886459350586 + ], + [ + "▁family", + -8.560463905334473 + ], + [ + "▁little", + -8.561317443847656 + ], + [ + "▁three", + -8.567726135253906 + ], + [ + "▁water", + -8.56810188293457 + ], + [ + "▁man", + -8.569759368896484 + ], + [ + "▁An", + -8.57192611694336 + ], + [ + "based", + -8.572155952453613 + ], + [ + "▁R", + -8.57442855834961 + ], + [ + "▁sau", + -8.574433326721191 + ], + [ + "▁avec", + -8.576035499572754 + ], + [ + "▁better", + -8.576830863952637 + ], + [ + "▁„", + -8.582253456115723 + ], + [ + "▁too", + -8.58635425567627 + ], + [ + "ge", + -8.586719512939453 + ], + [ + "▁must", + -8.589736938476562 + ], + [ + "▁per", + -8.589916229248047 + ], + [ + "ele", + -8.590399742126465 + ], + [ + "▁oder", + -8.59264850616455 + ], + [ + "au", + -8.59555435180664 + ], + [ + "▁aus", + -8.595727920532227 + ], + [ + "▁werden", + -8.598653793334961 + ], + [ + "▁does", + -8.599140167236328 + ], + [ + "▁without", + -8.599270820617676 + ], + [ + "▁ou", + -8.599929809570312 + ], + [ + "▁design", + -8.60101318359375 + ], + [ + "▁va", + -8.605440139770508 + ], + [ + "▁did", + -8.615679740905762 + ], + [ + "▁O", + -8.619062423706055 + ], + [ + "▁U", + -8.623565673828125 + ], + [ + "up", + -8.62901496887207 + ], + [ + "▁end", + -8.63367748260498 + ], + [ + "▁local", + -8.636231422424316 + ], + [ + "▁next", + -8.638967514038086 + ], + [ + "▁sure", + -8.64098072052002 + ], + [ + "▁lot", + -8.64644718170166 + ], + [ + "▁Re", + -8.647016525268555 + ], + [ + "▁top", + -8.647642135620117 + ], + [ + "▁Our", + -8.656886100769043 + ], + [ + "▁small", + -8.656978607177734 + ], + [ + "▁full", + -8.659418106079102 + ], + [ + "▁something", + -8.662886619567871 + ], + [ + "ung", + -8.666722297668457 + ], + [ + "▁vor", + -8.673250198364258 + ], + [ + "E", + -8.673337936401367 + ], + [ + "▁give", + -8.67603588104248 + ], + [ + "▁might", + -8.67660903930664 + ], + [ + "▁another", + -8.679330825805664 + ], + [ + "▁6", + -8.680779457092285 + ], + [ + "▁All", + -8.681318283081055 + ], + [ + "▁process", + -8.681672096252441 + ], + [ + "L", + -8.682575225830078 + ], + [ + "▁found", + -8.68941593170166 + ], + [ + "▁sind", + -8.690044403076172 + ], + [ + "▁since", + -8.69528865814209 + ], + [ + "▁With", + -8.695560455322266 + ], + [ + "K", + -8.696988105773926 + ], + [ + "um", + -8.701016426086426 + ], + [ + "▁within", + -8.701669692993164 + ], + [ + "▁post", + -8.706608772277832 + ], + [ + "▁car", + -8.709365844726562 + ], + [ + "une", + -8.714099884033203 + ], + [ + "▁N", + -8.715041160583496 + ], + [ + "▁J", + -8.715597152709961 + ], + [ + "ic", + -8.71823787689209 + ], + [ + "R", + -8.722309112548828 + ], + [ + "ter", + -8.727437019348145 + ], + [ + "ur", + -8.728265762329102 + ], + [ + "▁She", + -8.73131275177002 + ], + [ + "▁public", + -8.732009887695312 + ], + [ + "▁keep", + -8.735784530639648 + ], + [ + "▁H", + -8.736178398132324 + ], + [ + "▁order", + -8.740762710571289 + ], + [ + "▁start", + -8.742195129394531 + ], + [ + "ez", + -8.74746322631836 + ], + [ + "▁‘", + -8.749832153320312 + ], + [ + "uri", + -8.751104354858398 + ], + [ + "▁20", + -8.752482414245605 + ], + [ + "▁On", + -8.753515243530273 + ], + [ + "▁offer", + -8.763005256652832 + ], + [ + "▁quality", + -8.764988899230957 + ], + [ + "▁working", + -8.769987106323242 + ], + [ + "▁No", + -8.770307540893555 + ], + [ + "▁That", + -8.775156021118164 + ], + [ + "▁game", + -8.7863187789917 + ], + [ + "▁bei", + -8.786642074584961 + ], + [ + "▁today", + -8.788661003112793 + ], + [ + "▁never", + -8.794586181640625 + ], + [ + "▁week", + -8.79587173461914 + ], + [ + "▁St", + -8.797786712646484 + ], + [ + "▁feel", + -8.799317359924316 + ], + [ + "▁put", + -8.801899909973145 + ], + [ + "▁website", + -8.80322265625 + ], + [ + "Y", + -8.804483413696289 + ], + [ + "▁days", + -8.804709434509277 + ], + [ + "▁program", + -8.805448532104492 + ], + [ + "▁looking", + -8.810463905334473 + ], + [ + "▁K", + -8.810808181762695 + ], + [ + "▁students", + -8.811436653137207 + ], + [ + "▁create", + -8.811800956726074 + ], + [ + "▁change", + -8.812616348266602 + ], + [ + "▁book", + -8.812932014465332 + ], + [ + "ity", + -8.813761711120605 + ], + [ + "▁At", + -8.815207481384277 + ], + [ + "▁possible", + -8.815670013427734 + ], + [ + "▁sunt", + -8.81651496887207 + ], + [ + "▁7", + -8.818120002746582 + ], + [ + "▁real", + -8.823369026184082 + ], + [ + "▁al", + -8.824172019958496 + ], + [ + "▁making", + -8.825371742248535 + ], + [ + "▁Be", + -8.825761795043945 + ], + [ + "▁products", + -8.82592487335205 + ], + [ + "▁case", + -8.82653522491455 + ], + [ + "▁school", + -8.8272066116333 + ], + [ + "▁say", + -8.830352783203125 + ], + [ + "area", + -8.832084655761719 + ], + [ + "▁My", + -8.833836555480957 + ], + [ + "▁point", + -8.834731101989746 + ], + [ + "▁als", + -8.83560848236084 + ], + [ + "▁children", + -8.836194038391113 + ], + [ + "▁course", + -8.844061851501465 + ], + [ + "▁show", + -8.847993850708008 + ], + [ + "▁8", + -8.849273681640625 + ], + [ + "▁These", + -8.849345207214355 + ], + [ + "▁18", + -8.851140975952148 + ], + [ + "▁large", + -8.851323127746582 + ], + [ + "co", + -8.854362487792969 + ], + [ + "▁über", + -8.854788780212402 + ], + [ + "▁second", + -8.856559753417969 + ], + [ + "▁market", + -8.859807014465332 + ], + [ + "▁fost", + -8.86048698425293 + ], + [ + "▁easy", + -8.863983154296875 + ], + [ + "▁plan", + -8.864302635192871 + ], + [ + "▁project", + -8.864927291870117 + ], + [ + "G", + -8.865178108215332 + ], + [ + "W", + -8.869574546813965 + ], + [ + "3", + -8.871939659118652 + ], + [ + "▁son", + -8.873332023620605 + ], + [ + "la", + -8.879053115844727 + ], + [ + "▁face", + -8.88137435913086 + ], + [ + "▁needs", + -8.88148021697998 + ], + [ + "ch", + -8.883138656616211 + ], + [ + "▁personal", + -8.88343620300293 + ], + [ + "me", + -8.886031150817871 + ], + [ + "▁sont", + -8.887377738952637 + ], + [ + "▁je", + -8.894930839538574 + ], + [ + "▁non", + -8.895471572875977 + ], + [ + "▁got", + -8.896591186523438 + ], + [ + "▁Do", + -8.897382736206055 + ], + [ + "the", + -8.89765453338623 + ], + [ + "▁health", + -8.89908504486084 + ], + [ + "▁special", + -8.90555477142334 + ], + [ + ".\"", + -8.907710075378418 + ], + [ + "1", + -8.907852172851562 + ], + [ + "den", + -8.908616065979004 + ], + [ + "▁state", + -8.909355163574219 + ], + [ + "▁open", + -8.91019058227539 + ], + [ + "▁money", + -8.91053581237793 + ], + [ + "▁again", + -8.913084983825684 + ], + [ + "▁food", + -8.913167953491211 + ], + [ + "▁page", + -8.914595603942871 + ], + [ + "▁together", + -8.91628360748291 + ], + [ + "age", + -8.919108390808105 + ], + [ + "▁qu", + -8.921928405761719 + ], + [ + "hat", + -8.922386169433594 + ], + [ + "▁ver", + -8.926993370056152 + ], + [ + "▁W", + -8.927785873413086 + ], + [ + "▁away", + -8.928759574890137 + ], + [ + "▁wird", + -8.931641578674316 + ], + [ + "▁until", + -8.934249877929688 + ], + [ + "V", + -8.934935569763184 + ], + [ + "▁pre", + -8.935851097106934 + ], + [ + "▁One", + -8.936429977416992 + ], + [ + "▁product", + -8.936561584472656 + ], + [ + "▁often", + -8.939326286315918 + ], + [ + "▁wir", + -8.944111824035645 + ], + [ + "▁nach", + -8.945127487182617 + ], + [ + "▁include", + -8.946555137634277 + ], + [ + "▁um", + -8.948204040527344 + ], + [ + "▁room", + -8.953709602355957 + ], + [ + "▁group", + -8.953767776489258 + ], + [ + "▁name", + -8.954949378967285 + ], + [ + "ce", + -8.955448150634766 + ], + [ + "H", + -8.956180572509766 + ], + [ + "N", + -8.958139419555664 + ], + [ + "▁person", + -8.958183288574219 + ], + [ + "▁social", + -8.958606719970703 + ], + [ + "▁list", + -8.963666915893555 + ], + [ + "▁How", + -8.964127540588379 + ], + [ + "▁why", + -8.96571159362793 + ], + [ + "▁community", + -8.965995788574219 + ], + [ + "▁contact", + -8.973031044006348 + ], + [ + "­", + -8.9755859375 + ], + [ + "▁co", + -8.979683876037598 + ], + [ + "▁play", + -8.983960151672363 + ], + [ + "▁having", + -8.984169960021973 + ], + [ + "▁power", + -8.986917495727539 + ], + [ + "▁call", + -8.991690635681152 + ], + [ + "▁against", + -8.991816520690918 + ], + [ + "▁become", + -8.997780799865723 + ], + [ + "▁cost", + -9.003793716430664 + ], + [ + "▁V", + -9.004593849182129 + ], + [ + "▁research", + -9.006913185119629 + ], + [ + "▁12", + -9.007307052612305 + ], + [ + "▁wie", + -9.008277893066406 + ], + [ + "der", + -9.008386611938477 + ], + [ + "▁thing", + -9.014028549194336 + ], + [ + "▁along", + -9.017301559448242 + ], + [ + "4", + -9.017330169677734 + ], + [ + "▁access", + -9.020391464233398 + ], + [ + "▁level", + -9.020505905151367 + ], + [ + "▁price", + -9.022817611694336 + ], + [ + "▁einen", + -9.023714065551758 + ], + [ + "▁side", + -9.026359558105469 + ], + [ + "▁Un", + -9.026851654052734 + ], + [ + "▁means", + -9.030416488647461 + ], + [ + "(", + -9.032341957092285 + ], + [ + "▁big", + -9.034374237060547 + ], + [ + "▁God", + -9.036499977111816 + ], + [ + "▁dass", + -9.037314414978027 + ], + [ + "im", + -9.037374496459961 + ], + [ + "▁30", + -9.037432670593262 + ], + [ + "▁event", + -9.041665077209473 + ], + [ + "▁development", + -9.042060852050781 + ], + [ + "▁form", + -9.04226303100586 + ], + [ + "▁read", + -9.042579650878906 + ], + [ + "▁hand", + -9.043194770812988 + ], + [ + "▁control", + -9.04446792602539 + ], + [ + "▁However", + -9.046320915222168 + ], + [ + "▁done", + -9.048060417175293 + ], + [ + "▁job", + -9.051692008972168 + ], + [ + "▁hard", + -9.056619644165039 + ], + [ + "▁war", + -9.057538032531738 + ], + [ + "▁area", + -9.0584135055542 + ], + [ + "▁add", + -9.0586576461792 + ], + [ + "▁votre", + -9.0593900680542 + ], + [ + "▁live", + -9.059494018554688 + ], + [ + "▁range", + -9.060099601745605 + ], + [ + "▁After", + -9.060164451599121 + ], + [ + "▁Les", + -9.060513496398926 + ], + [ + "▁far", + -9.064413070678711 + ], + [ + "ver", + -9.064727783203125 + ], + [ + "▁old", + -9.069576263427734 + ], + [ + "▁perfect", + -9.06976318359375 + ], + [ + "▁15", + -9.070429801940918 + ], + [ + "▁space", + -9.073654174804688 + ], + [ + "▁house", + -9.074068069458008 + ], + [ + "ine", + -9.07408618927002 + ], + [ + "▁enough", + -9.074334144592285 + ], + [ + "0", + -9.075824737548828 + ], + [ + "▁several", + -9.077119827270508 + ], + [ + "The", + -9.081155776977539 + ], + [ + "mm", + -9.085619926452637 + ], + [ + "▁University", + -9.08637523651123 + ], + [ + "▁diese", + -9.087566375732422 + ], + [ + "▁Co", + -9.088335990905762 + ], + [ + "▁comes", + -9.088497161865234 + ], + [ + "▁across", + -9.088857650756836 + ], + [ + "▁already", + -9.090097427368164 + ], + [ + ",”", + -9.090341567993164 + ], + [ + "▁body", + -9.09276294708252 + ], + [ + "▁Das", + -9.094594955444336 + ], + [ + "▁einer", + -9.095956802368164 + ], + [ + "▁left", + -9.09921646118164 + ], + [ + "▁future", + -9.105711936950684 + ], + [ + "▁times", + -9.106670379638672 + ], + [ + "▁dar", + -9.109651565551758 + ], + [ + "▁simple", + -9.110408782958984 + ], + [ + "ry", + -9.112407684326172 + ], + [ + "▁getting", + -9.113155364990234 + ], + [ + "▁try", + -9.115362167358398 + ], + [ + "ți", + -9.116897583007812 + ], + [ + "ness", + -9.120043754577637 + ], + [ + "▁makes", + -9.120377540588379 + ], + [ + "▁past", + -9.120619773864746 + ], + [ + "ca", + -9.12130069732666 + ], + [ + "▁light", + -9.122207641601562 + ], + [ + "▁Der", + -9.122997283935547 + ], + [ + "▁run", + -9.125843048095703 + ], + [ + "▁four", + -9.126943588256836 + ], + [ + "ance", + -9.130500793457031 + ], + [ + "▁ever", + -9.131503105163574 + ], + [ + "▁einem", + -9.131816864013672 + ], + [ + "▁below", + -9.133723258972168 + ], + [ + "O", + -9.134073257446289 + ], + [ + "▁9", + -9.137282371520996 + ], + [ + "▁learn", + -9.14004135131836 + ], + [ + "out", + -9.140358924865723 + ], + [ + "▁video", + -9.143178939819336 + ], + [ + "▁etc", + -9.146929740905762 + ], + [ + "▁«", + -9.148795127868652 + ], + [ + "▁zum", + -9.149712562561035 + ], + [ + "▁kann", + -9.1504487991333 + ], + [ + "▁minutes", + -9.151180267333984 + ], + [ + "▁example", + -9.154194831848145 + ], + [ + "▁nous", + -9.154619216918945 + ], + [ + "▁Se", + -9.157441139221191 + ], + [ + "▁sie", + -9.159955024719238 + ], + [ + "▁industry", + -9.161614418029785 + ], + [ + "▁problem", + -9.162016868591309 + ], + [ + "J", + -9.162480354309082 + ], + [ + "▁country", + -9.163366317749023 + ], + [ + "▁fact", + -9.164189338684082 + ], + [ + "▁type", + -9.164190292358398 + ], + [ + "ner", + -9.164238929748535 + ], + [ + "▁companies", + -9.165864944458008 + ], + [ + "▁line", + -9.169849395751953 + ], + [ + "▁city", + -9.172713279724121 + ], + [ + "▁check", + -9.173710823059082 + ], + [ + "▁doing", + -9.174406051635742 + ], + [ + "elle", + -9.175037384033203 + ], + [ + "▁fun", + -9.176549911499023 + ], + [ + "▁En", + -9.177546501159668 + ], + [ + "▁Your", + -9.178601264953613 + ], + [ + "ling", + -9.181450843811035 + ], + [ + "▁share", + -9.18185806274414 + ], + [ + "ile", + -9.182005882263184 + ], + [ + "▁actually", + -9.187544822692871 + ], + [ + "▁value", + -9.187751770019531 + ], + [ + "zi", + -9.188661575317383 + ], + [ + "▁ab", + -9.1898832321167 + ], + [ + "▁offers", + -9.1905517578125 + ], + [ + "▁less", + -9.190573692321777 + ], + [ + "▁night", + -9.193560600280762 + ], + [ + "▁Dr", + -9.19518756866455 + ], + [ + "▁started", + -9.195454597473145 + ], + [ + "▁least", + -9.198020935058594 + ], + [ + "▁short", + -9.198562622070312 + ], + [ + "▁main", + -9.201143264770508 + ], + [ + "▁single", + -9.202939987182617 + ], + [ + "▁though", + -9.203780174255371 + ], + [ + "▁prin", + -9.203930854797363 + ], + [ + "time", + -9.20531177520752 + ], + [ + "▁hours", + -9.206608772277832 + ], + [ + "▁others", + -9.206849098205566 + ], + [ + "▁called", + -9.20730209350586 + ], + [ + "▁visit", + -9.208869934082031 + ], + [ + "▁bit", + -9.209009170532227 + ], + [ + "ée", + -9.210821151733398 + ], + [ + "▁customers", + -9.211383819580078 + ], + [ + "▁music", + -9.212000846862793 + ], + [ + "▁members", + -9.217191696166992 + ], + [ + "ies", + -9.21743392944336 + ], + [ + "▁pay", + -9.219176292419434 + ], + [ + "nd", + -9.219744682312012 + ], + [ + "▁once", + -9.221125602722168 + ], + [ + "gen", + -9.2217378616333 + ], + [ + "▁können", + -9.222976684570312 + ], + [ + "▁low", + -9.223771095275879 + ], + [ + "▁durch", + -9.227394104003906 + ], + [ + "▁story", + -9.228075981140137 + ], + [ + "▁understand", + -9.22953987121582 + ], + [ + "“", + -9.229856491088867 + ], + [ + "▁Am", + -9.231831550598145 + ], + [ + "▁didn", + -9.234603881835938 + ], + [ + "▁content", + -9.237217903137207 + ], + [ + "son", + -9.24180793762207 + ], + [ + "▁building", + -9.242242813110352 + ], + [ + "▁result", + -9.242605209350586 + ], + [ + "▁aux", + -9.243107795715332 + ], + [ + "▁complete", + -9.244999885559082 + ], + [ + "▁doesn", + -9.24510669708252 + ], + [ + "▁haben", + -9.246070861816406 + ], + [ + "▁questions", + -9.24661636352539 + ], + [ + "line", + -9.247077941894531 + ], + [ + "▁technology", + -9.247429847717285 + ], + [ + "▁Pro", + -9.247976303100586 + ], + [ + "▁current", + -9.248504638671875 + ], + [ + "▁won", + -9.248883247375488 + ], + [ + "▁let", + -9.250710487365723 + ], + [ + "▁features", + -9.251978874206543 + ], + [ + "▁please", + -9.258262634277344 + ], + [ + "5", + -9.258519172668457 + ], + [ + "▁above", + -9.259394645690918 + ], + [ + "ive", + -9.262128829956055 + ], + [ + "▁management", + -9.262394905090332 + ], + [ + "▁lui", + -9.262539863586426 + ], + [ + "her", + -9.263057708740234 + ], + [ + "▁training", + -9.265711784362793 + ], + [ + "▁everything", + -9.2665433883667 + ], + [ + "▁noch", + -9.266846656799316 + ], + [ + "▁came", + -9.267708778381348 + ], + [ + "▁web", + -9.272823333740234 + ], + [ + "▁ensure", + -9.272987365722656 + ], + [ + "▁months", + -9.273130416870117 + ], + [ + "▁art", + -9.27313232421875 + ], + [ + "▁sub", + -9.274359703063965 + ], + [ + "▁million", + -9.274559020996094 + ], + [ + "▁professional", + -9.275035858154297 + ], + [ + "▁results", + -9.278368949890137 + ], + [ + "▁kind", + -9.278395652770996 + ], + [ + "▁season", + -9.279285430908203 + ], + [ + "▁unique", + -9.281067848205566 + ], + [ + "ze", + -9.284360885620117 + ], + [ + "▁enjoy", + -9.28487777709961 + ], + [ + "▁early", + -9.287765502929688 + ], + [ + "▁major", + -9.288202285766602 + ], + [ + "▁yet", + -9.29152774810791 + ], + [ + "▁Ver", + -9.293331146240234 + ], + [ + "one", + -9.296777725219727 + ], + [ + "▁media", + -9.29719352722168 + ], + [ + "▁[", + -9.30095100402832 + ], + [ + "▁property", + -9.302969932556152 + ], + [ + "▁beautiful", + -9.304466247558594 + ], + [ + "▁given", + -9.305286407470703 + ], + [ + "▁due", + -9.306716918945312 + ], + [ + "▁government", + -9.307181358337402 + ], + [ + "▁nur", + -9.30881404876709 + ], + [ + "▁email", + -9.309103012084961 + ], + [ + "▁total", + -9.311080932617188 + ], + [ + "▁natural", + -9.311264038085938 + ], + [ + "▁test", + -9.311450004577637 + ], + [ + "▁provides", + -9.311640739440918 + ], + [ + "▁various", + -9.312631607055664 + ], + [ + "▁American", + -9.315605163574219 + ], + [ + "▁moment", + -9.318109512329102 + ], + [ + "▁air", + -9.318952560424805 + ], + [ + "▁idea", + -9.319236755371094 + ], + [ + "▁known", + -9.319981575012207 + ], + [ + "▁Il", + -9.320504188537598 + ], + [ + "▁friends", + -9.320576667785645 + ], + [ + "▁final", + -9.320919036865234 + ], + [ + "▁buy", + -9.32139778137207 + ], + [ + "▁specific", + -9.322234153747559 + ], + [ + "▁issues", + -9.32454776763916 + ], + [ + "▁took", + -9.325233459472656 + ], + [ + "▁mind", + -9.326258659362793 + ], + [ + "▁study", + -9.32675838470459 + ], + [ + "▁addition", + -9.328418731689453 + ], + [ + "▁size", + -9.332446098327637 + ], + [ + "▁pro", + -9.334047317504883 + ], + [ + "▁film", + -9.33545970916748 + ], + [ + "▁pot", + -9.335636138916016 + ], + [ + "▁thought", + -9.338120460510254 + ], + [ + "▁tell", + -9.33890438079834 + ], + [ + "▁While", + -9.339675903320312 + ], + [ + "▁head", + -9.339983940124512 + ], + [ + "▁clients", + -9.340429306030273 + ], + [ + "▁performance", + -9.346199989318848 + ], + [ + "▁question", + -9.346835136413574 + ], + [ + "▁whether", + -9.347925186157227 + ], + [ + "▁certain", + -9.34826946258545 + ], + [ + "▁model", + -9.348764419555664 + ], + [ + "▁following", + -9.350926399230957 + ], + [ + "▁energy", + -9.354207992553711 + ], + [ + "▁office", + -9.354207992553711 + ], + [ + "▁whole", + -9.356687545776367 + ], + [ + "▁bring", + -9.356956481933594 + ], + [ + "▁required", + -9.35726261138916 + ], + [ + "ţi", + -9.358223915100098 + ], + [ + "▁date", + -9.358695030212402 + ], + [ + "_", + -9.358983039855957 + ], + [ + "que", + -9.359789848327637 + ], + [ + "▁da", + -9.360264778137207 + ], + [ + "▁US", + -9.36120319366455 + ], + [ + "▁taking", + -9.36143684387207 + ], + [ + "go", + -9.362788200378418 + ], + [ + "▁living", + -9.36341667175293 + ], + [ + "▁someone", + -9.363489151000977 + ], + [ + "▁heart", + -9.365120887756348 + ], + [ + "▁key", + -9.365775108337402 + ], + [ + "▁areas", + -9.366238594055176 + ], + [ + "▁says", + -9.367013931274414 + ], + [ + "▁2018", + -9.369132041931152 + ], + [ + "▁month", + -9.37012767791748 + ], + [ + "▁Er", + -9.371354103088379 + ], + [ + "ste", + -9.375077247619629 + ], + [ + "▁11", + -9.375179290771484 + ], + [ + "▁front", + -9.37528133392334 + ], + [ + "▁Now", + -9.37669563293457 + ], + [ + "▁class", + -9.376946449279785 + ], + [ + "▁choose", + -9.377082824707031 + ], + [ + "pe", + -9.37808609008789 + ], + [ + "▁further", + -9.379021644592285 + ], + [ + "▁believe", + -9.37936019897461 + ], + [ + "of", + -9.379590034484863 + ], + [ + "▁among", + -9.380990982055664 + ], + [ + "sch", + -9.381686210632324 + ], + [ + "▁child", + -9.382609367370605 + ], + [ + "▁aber", + -9.38376235961914 + ], + [ + "▁Please", + -9.386269569396973 + ], + [ + "rea", + -9.387248992919922 + ], + [ + "▁later", + -9.387272834777832 + ], + [ + "▁amount", + -9.388760566711426 + ], + [ + "ice", + -9.390128135681152 + ], + [ + "▁National", + -9.390177726745605 + ], + [ + "▁style", + -9.390748977661133 + ], + [ + "▁tout", + -9.391490936279297 + ], + [ + "▁staff", + -9.392939567565918 + ], + [ + "▁white", + -9.397933959960938 + ], + [ + "▁ge", + -9.399179458618164 + ], + [ + "▁five", + -9.400984764099121 + ], + [ + "▁blog", + -9.40109920501709 + ], + [ + "▁designed", + -9.40125846862793 + ], + [ + "▁went", + -9.402216911315918 + ], + [ + "▁Da", + -9.40268611907959 + ], + [ + "▁general", + -9.403801918029785 + ], + [ + "▁rest", + -9.403874397277832 + ], + [ + "▁zur", + -9.40579891204834 + ], + [ + "▁quite", + -9.405948638916016 + ], + [ + "per", + -9.40687084197998 + ], + [ + "▁customer", + -9.408379554748535 + ], + [ + "▁close", + -9.408747673034668 + ], + [ + "▁Some", + -9.41054630279541 + ], + [ + "▁women", + -9.41075611114502 + ], + [ + "▁move", + -9.410761833190918 + ], + [ + "▁software", + -9.411357879638672 + ], + [ + "▁Ein", + -9.413651466369629 + ], + [ + "▁Ab", + -9.413823127746582 + ], + [ + "▁history", + -9.413864135742188 + ], + [ + "▁either", + -9.41564655303955 + ], + [ + "▁seen", + -9.417396545410156 + ], + [ + "▁card", + -9.419726371765137 + ], + [ + "▁City", + -9.421541213989258 + ], + [ + "▁hope", + -9.421769142150879 + ], + [ + "▁16", + -9.422072410583496 + ], + [ + "és", + -9.422825813293457 + ], + [ + "va", + -9.423294067382812 + ], + [ + "▁Al", + -9.423827171325684 + ], + [ + "▁especially", + -9.424827575683594 + ], + [ + "▁view", + -9.426136016845703 + ], + [ + "men", + -9.427363395690918 + ], + [ + "▁account", + -9.427489280700684 + ], + [ + "▁needed", + -9.429777145385742 + ], + [ + "▁United", + -9.429789543151855 + ], + [ + "]", + -9.432387351989746 + ], + [ + "▁yourself", + -9.432788848876953 + ], + [ + "▁100", + -9.433059692382812 + ], + [ + "▁receive", + -9.433417320251465 + ], + [ + "▁ideas", + -9.43369197845459 + ], + [ + "▁writing", + -9.434585571289062 + ], + [ + "▁simply", + -9.434741973876953 + ], + [ + "▁present", + -9.435087203979492 + ], + [ + "▁continue", + -9.436107635498047 + ], + [ + "▁application", + -9.44115161895752 + ], + [ + "▁build", + -9.44187068939209 + ], + [ + "▁turn", + -9.44249439239502 + ], + [ + "ated", + -9.442923545837402 + ], + [ + "▁everyone", + -9.443060874938965 + ], + [ + "cette", + -9.443114280700684 + ], + [ + "▁bien", + -9.444964408874512 + ], + [ + "less", + -9.445222854614258 + ], + [ + "▁Si", + -9.445359230041504 + ], + [ + "▁original", + -9.446867942810059 + ], + [ + "8", + -9.44794750213623 + ], + [ + "▁individual", + -9.448895454406738 + ], + [ + "tre", + -9.449433326721191 + ], + [ + "▁works", + -9.45171070098877 + ], + [ + "▁options", + -9.451821327209473 + ], + [ + "▁May", + -9.454456329345703 + ], + [ + "▁Not", + -9.454940795898438 + ], + [ + "▁report", + -9.455467224121094 + ], + [ + "mer", + -9.457239151000977 + ], + [ + "▁human", + -9.459118843078613 + ], + [ + "▁provided", + -9.459603309631348 + ], + [ + "▁By", + -9.460925102233887 + ], + [ + "▁series", + -9.462006568908691 + ], + [ + "7", + -9.46226692199707 + ], + [ + "▁modern", + -9.463875770568848 + ], + [ + "▁meet", + -9.463921546936035 + ], + [ + "▁50", + -9.464119911193848 + ], + [ + "▁25", + -9.46969985961914 + ], + [ + "▁color", + -9.470091819763184 + ], + [ + "▁download", + -9.470109939575195 + ], + [ + "▁Here", + -9.471144676208496 + ], + [ + "6", + -9.471323013305664 + ], + [ + "▁poate", + -9.471449851989746 + ], + [ + "▁În", + -9.472321510314941 + ], + [ + "▁phone", + -9.473695755004883 + ], + [ + "▁likely", + -9.474374771118164 + ], + [ + "▁table", + -9.476469993591309 + ], + [ + "▁ma", + -9.476551055908203 + ], + [ + "▁Or", + -9.479181289672852 + ], + [ + "Z", + -9.48026180267334 + ], + [ + "▁19", + -9.482215881347656 + ], + [ + "▁insurance", + -9.482544898986816 + ], + [ + "▁anything", + -9.483808517456055 + ], + [ + "▁search", + -9.485033988952637 + ], + [ + "▁Ge", + -9.48520565032959 + ], + [ + "▁issue", + -9.485564231872559 + ], + [ + "▁includes", + -9.485688209533691 + ], + [ + "▁clear", + -9.487342834472656 + ], + [ + "les", + -9.488021850585938 + ], + [ + "▁almost", + -9.488259315490723 + ], + [ + "ilor", + -9.48935317993164 + ], + [ + "▁14", + -9.490717887878418 + ], + [ + "by", + -9.494056701660156 + ], + [ + "▁Du", + -9.49624252319336 + ], + [ + "▁mais", + -9.497303009033203 + ], + [ + "ier", + -9.499163627624512 + ], + [ + "▁law", + -9.49924087524414 + ], + [ + "▁added", + -9.500134468078613 + ], + [ + "▁con", + -9.500962257385254 + ], + [ + ",\"", + -9.501530647277832 + ], + [ + "▁ago", + -9.502127647399902 + ], + [ + "▁His", + -9.504697799682617 + ], + [ + "▁points", + -9.504981994628906 + ], + [ + "▁mult", + -9.505581855773926 + ], + [ + "▁financial", + -9.506216049194336 + ], + [ + "▁problems", + -9.506428718566895 + ], + [ + "▁however", + -9.50648307800293 + ], + [ + "▁events", + -9.50675106048584 + ], + [ + "▁half", + -9.507889747619629 + ], + [ + "ard", + -9.511183738708496 + ], + [ + "▁ask", + -9.51156997680664 + ], + [ + "▁version", + -9.511631965637207 + ], + [ + "end", + -9.512478828430176 + ], + [ + "▁created", + -9.512639999389648 + ], + [ + "▁lead", + -9.512917518615723 + ], + [ + "▁focus", + -9.513853073120117 + ], + [ + "▁increase", + -9.515096664428711 + ], + [ + "ex", + -9.515118598937988 + ], + [ + "▁allow", + -9.515798568725586 + ], + [ + "▁extra", + -9.516464233398438 + ], + [ + "▁24", + -9.516692161560059 + ], + [ + "▁credit", + -9.516772270202637 + ], + [ + "▁production", + -9.516801834106445 + ], + [ + "zu", + -9.517256736755371 + ], + [ + "▁black", + -9.51754093170166 + ], + [ + "▁systems", + -9.518040657043457 + ], + [ + "▁17", + -9.518178939819336 + ], + [ + "▁opportunity", + -9.518531799316406 + ], + [ + "▁bis", + -9.519219398498535 + ], + [ + "▁fast", + -9.519807815551758 + ], + [ + "ring", + -9.521166801452637 + ], + [ + "▁Don", + -9.522114753723145 + ], + [ + "▁via", + -9.52242660522461 + ], + [ + "fer", + -9.5225248336792 + ], + [ + "▁comme", + -9.522799491882324 + ], + [ + "▁popular", + -9.523722648620605 + ], + [ + "▁South", + -9.524491310119629 + ], + [ + "ating", + -9.525003433227539 + ], + [ + "▁State", + -9.525198936462402 + ], + [ + "ator", + -9.525679588317871 + ], + [ + "▁common", + -9.525968551635742 + ], + [ + "con", + -9.526727676391602 + ], + [ + "▁throughout", + -9.527557373046875 + ], + [ + "▁risk", + -9.52774715423584 + ], + [ + "▁young", + -9.528532028198242 + ], + [ + "▁Je", + -9.528688430786133 + ], + [ + "▁image", + -9.52928352355957 + ], + [ + "ha", + -9.529376983642578 + ], + [ + "▁third", + -9.529587745666504 + ], + [ + "▁taken", + -9.530049324035645 + ], + [ + "▁Z", + -9.5314302444458 + ], + [ + "▁dis", + -9.5316162109375 + ], + [ + "▁From", + -9.533575057983398 + ], + [ + "▁details", + -9.534862518310547 + ], + [ + "▁games", + -9.53516674041748 + ], + [ + "▁practice", + -9.536040306091309 + ], + [ + "che", + -9.536151885986328 + ], + [ + "▁security", + -9.537364959716797 + ], + [ + "▁medical", + -9.537653923034668 + ], + [ + "▁learning", + -9.537806510925293 + ], + [ + "▁material", + -9.538509368896484 + ], + [ + "▁international", + -9.540703773498535 + ], + [ + "▁forward", + -9.541245460510254 + ], + [ + "▁paper", + -9.541247367858887 + ], + [ + "▁action", + -9.541348457336426 + ], + [ + "▁file", + -9.542378425598145 + ], + [ + "▁oil", + -9.543096542358398 + ], + [ + "▁self", + -9.54377555847168 + ], + [ + "▁private", + -9.545247077941895 + ], + [ + "▁interest", + -9.545559883117676 + ], + [ + "bar", + -9.546065330505371 + ], + [ + "▁sale", + -9.547115325927734 + ], + [ + "▁stay", + -9.547348976135254 + ], + [ + "ke", + -9.548089981079102 + ], + [ + "▁San", + -9.549053192138672 + ], + [ + "▁matter", + -9.549870491027832 + ], + [ + "▁reason", + -9.550254821777344 + ], + [ + "ted", + -9.55147647857666 + ], + [ + "▁potential", + -9.551742553710938 + ], + [ + "▁brand", + -9.552441596984863 + ], + [ + "▁field", + -9.55315113067627 + ], + [ + "▁treatment", + -9.553420066833496 + ], + [ + "▁period", + -9.553516387939453 + ], + [ + "▁York", + -9.553890228271484 + ], + [ + "▁Park", + -9.554738998413086 + ], + [ + "▁acest", + -9.556009292602539 + ], + [ + "ou", + -9.556926727294922 + ], + [ + "▁Ce", + -9.557014465332031 + ], + [ + "▁ready", + -9.558111190795898 + ], + [ + "▁rather", + -9.55860424041748 + ], + [ + "▁outside", + -9.560086250305176 + ], + [ + "▁standard", + -9.560121536254883 + ], + [ + "▁located", + -9.560770034790039 + ], + [ + "▁marketing", + -9.562313079833984 + ], + [ + "cu", + -9.564041137695312 + ], + [ + "▁Can", + -9.564562797546387 + ], + [ + "▁education", + -9.566105842590332 + ], + [ + "use", + -9.566640853881836 + ], + [ + "▁role", + -9.566828727722168 + ], + [ + "▁men", + -9.571505546569824 + ], + [ + "▁probably", + -9.571550369262695 + ], + [ + "▁store", + -9.57221508026123 + ], + [ + "▁John", + -9.572355270385742 + ], + [ + "▁rate", + -9.573956489562988 + ], + [ + "▁code", + -9.573994636535645 + ], + [ + "▁kids", + -9.574408531188965 + ], + [ + "▁currently", + -9.57552719116211 + ], + [ + "▁near", + -9.576475143432617 + ], + [ + "▁sales", + -9.576716423034668 + ], + [ + "▁usually", + -9.577012062072754 + ], + [ + "▁activities", + -9.577242851257324 + ], + [ + "▁party", + -9.577371597290039 + ], + [ + "▁leur", + -9.577434539794922 + ], + [ + "▁particular", + -9.577627182006836 + ], + [ + "▁mehr", + -9.577707290649414 + ], + [ + "ill", + -9.578757286071777 + ], + [ + "▁percent", + -9.579113006591797 + ], + [ + "▁fait", + -9.579537391662598 + ], + [ + "▁happy", + -9.579904556274414 + ], + [ + "▁inside", + -9.58005428314209 + ], + [ + "▁save", + -9.580510139465332 + ], + [ + "▁skills", + -9.580765724182129 + ], + [ + "▁consider", + -9.581025123596191 + ], + [ + "▁recent", + -9.58161735534668 + ], + [ + "▁strong", + -9.581781387329102 + ], + [ + "▁position", + -9.582076072692871 + ], + [ + "▁knowledge", + -9.582303047180176 + ], + [ + "▁tax", + -9.583868980407715 + ], + [ + "▁users", + -9.584261894226074 + ], + [ + "und", + -9.585564613342285 + ], + [ + "▁coming", + -9.585904121398926 + ], + [ + "▁article", + -9.585923194885254 + ], + [ + "min", + -9.586345672607422 + ], + [ + "▁sein", + -9.586555480957031 + ], + [ + "▁travel", + -9.586871147155762 + ], + [ + "▁changes", + -9.58765983581543 + ], + [ + "▁impact", + -9.588181495666504 + ], + [ + "▁wanted", + -9.588460922241211 + ], + [ + "▁address", + -9.5885591506958 + ], + [ + "▁soon", + -9.58873462677002 + ], + [ + "▁North", + -9.588915824890137 + ], + [ + "ată", + -9.589237213134766 + ], + [ + "▁trying", + -9.58985424041748 + ], + [ + "▁app", + -9.590612411499023 + ], + [ + "▁School", + -9.592510223388672 + ], + [ + "▁Es", + -9.592548370361328 + ], + [ + "we", + -9.59261703491211 + ], + [ + "▁conditions", + -9.59292984008789 + ], + [ + "▁digital", + -9.593293190002441 + ], + [ + "▁similar", + -9.594805717468262 + ], + [ + "▁solution", + -9.59514331817627 + ], + [ + "▁location", + -9.595183372497559 + ], + [ + "▁Of", + -9.595418930053711 + ], + [ + "▁follow", + -9.595842361450195 + ], + [ + "▁red", + -9.597526550292969 + ], + [ + "▁review", + -9.599202156066895 + ], + [ + "▁skin", + -9.599575996398926 + ], + [ + "▁pretty", + -9.600369453430176 + ], + [ + "day", + -9.600558280944824 + ], + [ + "▁dé", + -9.602072715759277 + ], + [ + "▁cause", + -9.602169036865234 + ], + [ + "▁Sa", + -9.602463722229004 + ], + [ + "▁user", + -9.602520942687988 + ], + [ + "▁Man", + -9.603377342224121 + ], + [ + "”.", + -9.604146003723145 + ], + [ + "▁Just", + -9.604366302490234 + ], + [ + "▁faire", + -9.604475021362305 + ], + [ + "▁member", + -9.605619430541992 + ], + [ + "▁iar", + -9.606892585754395 + ], + [ + "▁higher", + -9.607715606689453 + ], + [ + "▁step", + -9.607887268066406 + ], + [ + "▁wide", + -9.608185768127441 + ], + [ + "▁uns", + -9.608920097351074 + ], + [ + "▁World", + -9.609135627746582 + ], + [ + "▁additional", + -9.61176586151123 + ], + [ + "ber", + -9.613197326660156 + ], + [ + "▁easily", + -9.613990783691406 + ], + [ + "▁deal", + -9.615070343017578 + ], + [ + "▁ways", + -9.615514755249023 + ], + [ + "▁mobile", + -9.616837501525879 + ], + [ + "▁national", + -9.616913795471191 + ], + [ + "▁couple", + -9.617389678955078 + ], + [ + "▁ihre", + -9.61939811706543 + ], + [ + "▁choice", + -9.619612693786621 + ], + [ + "for", + -9.619686126708984 + ], + [ + "ous", + -9.62070083618164 + ], + [ + "▁Google", + -9.620855331420898 + ], + [ + "▁environment", + -9.622426986694336 + ], + [ + "urile", + -9.623322486877441 + ], + [ + "▁Center", + -9.626680374145508 + ], + [ + "mp", + -9.628592491149902 + ], + [ + "▁»", + -9.629727363586426 + ], + [ + "qui", + -9.630680084228516 + ], + [ + "▁growth", + -9.631048202514648 + ], + [ + "ler", + -9.633174896240234 + ], + [ + "▁improve", + -9.63360595703125 + ], + [ + "▁items", + -9.6336669921875 + ], + [ + "▁Nu", + -9.63393783569336 + ], + [ + "▁leave", + -9.634074211120605 + ], + [ + "▁true", + -9.634805679321289 + ], + [ + "▁wurde", + -9.63487434387207 + ], + [ + "▁cannot", + -9.635004043579102 + ], + [ + "▁13", + -9.635096549987793 + ], + [ + "▁running", + -9.636015892028809 + ], + [ + "▁anti", + -9.636177062988281 + ], + [ + "▁option", + -9.636306762695312 + ], + [ + "▁reading", + -9.63657283782959 + ], + [ + "▁Car", + -9.636698722839355 + ], + [ + "▁Wir", + -9.638110160827637 + ], + [ + "▁April", + -9.63975715637207 + ], + [ + "▁behind", + -9.640642166137695 + ], + [ + "▁client", + -9.640750885009766 + ], + [ + "▁cover", + -9.641012191772461 + ], + [ + "▁stop", + -9.641090393066406 + ], + [ + "ja", + -9.641277313232422 + ], + [ + "▁built", + -9.641307830810547 + ], + [ + "▁Con", + -9.641313552856445 + ], + [ + "ement", + -9.641366004943848 + ], + [ + "▁projects", + -9.641828536987305 + ], + [ + "▁variety", + -9.641840934753418 + ], + [ + "▁Ihre", + -9.642666816711426 + ], + [ + "ș", + -9.64302921295166 + ], + [ + "▁unter", + -9.64385986328125 + ], + [ + "▁longer", + -9.646577835083008 + ], + [ + "year", + -9.647161483764648 + ], + [ + "▁photo", + -9.648370742797852 + ], + [ + "▁Also", + -9.64933967590332 + ], + [ + "▁received", + -9.651098251342773 + ], + [ + "▁return", + -9.652676582336426 + ], + [ + "00", + -9.653081893920898 + ], + [ + "▁bar", + -9.653343200683594 + ], + [ + "ary", + -9.654427528381348 + ], + [ + "elor", + -9.655137062072754 + ], + [ + "▁Home", + -9.656189918518066 + ], + [ + "our", + -9.656298637390137 + ], + [ + "▁Me", + -9.65771198272705 + ], + [ + "▁held", + -9.659111022949219 + ], + [ + "▁click", + -9.66014289855957 + ], + [ + "▁ex", + -9.660178184509277 + ], + [ + "▁cum", + -9.661561965942383 + ], + [ + "▁takes", + -9.66395378112793 + ], + [ + "▁computer", + -9.665796279907227 + ], + [ + "▁told", + -9.668192863464355 + ], + [ + "+", + -9.670648574829102 + ], + [ + "▁patients", + -9.670809745788574 + ], + [ + "ting", + -9.672165870666504 + ], + [ + "▁direct", + -9.672248840332031 + ], + [ + "▁quickly", + -9.672410011291504 + ], + [ + "tic", + -9.672877311706543 + ], + [ + "▁vom", + -9.673723220825195 + ], + [ + "▁di", + -9.67381477355957 + ], + [ + "▁kitchen", + -9.674022674560547 + ], + [ + "▁network", + -9.675640106201172 + ], + [ + "▁2015", + -9.676688194274902 + ], + [ + "▁effective", + -9.677227020263672 + ], + [ + "▁collection", + -9.677703857421875 + ], + [ + "▁2017", + -9.677751541137695 + ], + [ + "▁words", + -9.678145408630371 + ], + [ + "▁cele", + -9.678857803344727 + ], + [ + "▁student", + -9.678862571716309 + ], + [ + "▁amazing", + -9.678932189941406 + ], + [ + "eur", + -9.680419921875 + ], + [ + ".”", + -9.68227481842041 + ], + [ + "▁ale", + -9.682716369628906 + ], + [ + "”,", + -9.68414306640625 + ], + [ + "▁purchase", + -9.684350967407227 + ], + [ + "▁mean", + -9.68477725982666 + ], + [ + "▁West", + -9.686846733093262 + ], + [ + "▁nice", + -9.6889066696167 + ], + [ + "▁age", + -9.689131736755371 + ], + [ + "▁base", + -9.68923568725586 + ], + [ + "▁summer", + -9.68928337097168 + ], + [ + "▁multi", + -9.689496994018555 + ], + [ + "▁allows", + -9.689573287963867 + ], + [ + "▁latest", + -9.689604759216309 + ], + [ + "▁global", + -9.68992805480957 + ], + [ + "▁chance", + -9.690792083740234 + ], + [ + "▁sense", + -9.690872192382812 + ], + [ + "ieren", + -9.692789077758789 + ], + [ + "▁difficult", + -9.693133354187012 + ], + [ + "ité", + -9.694750785827637 + ], + [ + "ka", + -9.694792747497559 + ], + [ + "du", + -9.69483757019043 + ], + [ + "▁providing", + -9.695744514465332 + ], + [ + "▁Art", + -9.696940422058105 + ], + [ + "▁drive", + -9.698554992675781 + ], + [ + "▁Go", + -9.698877334594727 + ], + [ + "▁très", + -9.699414253234863 + ], + [ + "U", + -9.699579238891602 + ], + [ + "▁Pre", + -9.699846267700195 + ], + [ + "▁shows", + -9.700040817260742 + ], + [ + "▁hair", + -9.701324462890625 + ], + [ + "▁success", + -9.701513290405273 + ], + [ + "▁UK", + -9.703169822692871 + ], + [ + "red", + -9.703241348266602 + ], + [ + "ü", + -9.703370094299316 + ], + [ + "ish", + -9.703631401062012 + ], + [ + "▁weeks", + -9.704839706420898 + ], + [ + "▁solutions", + -9.7055025100708 + ], + [ + "▁Pe", + -9.7057523727417 + ], + [ + "▁equipment", + -9.706141471862793 + ], + [ + "și", + -9.706482887268066 + ], + [ + "▁worked", + -9.707073211669922 + ], + [ + "\".", + -9.708627700805664 + ], + [ + "▁legal", + -9.708720207214355 + ], + [ + "▁bad", + -9.70892333984375 + ], + [ + "▁40", + -9.709561347961426 + ], + [ + "▁Internet", + -9.709798812866211 + ], + [ + "▁included", + -9.709976196289062 + ], + [ + "▁upon", + -9.710977554321289 + ], + [ + "▁excellent", + -9.71106243133545 + ], + [ + "▁goal", + -9.71130084991455 + ], + [ + "▁El", + -9.711408615112305 + ], + [ + "▁Mo", + -9.711703300476074 + ], + [ + "▁policy", + -9.71319580078125 + ], + [ + "▁aussi", + -9.713537216186523 + ], + [ + "▁weight", + -9.713687896728516 + ], + [ + "ici", + -9.715133666992188 + ], + [ + "▁approach", + -9.715584754943848 + ], + [ + "▁six", + -9.71579647064209 + ], + [ + "▁entire", + -9.715911865234375 + ], + [ + "9", + -9.71633529663086 + ], + [ + "▁send", + -9.716832160949707 + ], + [ + "▁1.", + -9.718971252441406 + ], + [ + "▁wenn", + -9.719056129455566 + ], + [ + "▁photos", + -9.71993637084961 + ], + [ + "://", + -9.721014022827148 + ], + [ + "ger", + -9.72281551361084 + ], + [ + "▁favorite", + -9.723104476928711 + ], + [ + "ley", + -9.723477363586426 + ], + [ + "▁else", + -9.72463321685791 + ], + [ + "▁types", + -9.72468376159668 + ], + [ + "▁link", + -9.725333213806152 + ], + [ + "▁recently", + -9.72584056854248 + ], + [ + "▁Mit", + -9.72631549835205 + ], + [ + "▁hot", + -9.726548194885254 + ], + [ + "tra", + -9.726597785949707 + ], + [ + "ş", + -9.727307319641113 + ], + [ + "▁according", + -9.728511810302734 + ], + [ + "▁necessary", + -9.728511810302734 + ], + [ + "▁multiple", + -9.729269027709961 + ], + [ + "▁Im", + -9.729510307312012 + ], + [ + "▁sehr", + -9.729660034179688 + ], + [ + "▁sign", + -9.732263565063477 + ], + [ + "▁anyone", + -9.73283576965332 + ], + [ + "▁land", + -9.733613014221191 + ], + [ + "▁States", + -9.734037399291992 + ], + [ + "▁unsere", + -9.734119415283203 + ], + [ + "ées", + -9.734639167785645 + ], + [ + "We", + -9.735671043395996 + ], + [ + "▁nothing", + -9.735845565795898 + ], + [ + "▁commercial", + -9.736858367919922 + ], + [ + "ful", + -9.737265586853027 + ], + [ + "▁seems", + -9.739325523376465 + ], + [ + "▁International", + -9.740097045898438 + ], + [ + "▁March", + -9.74163818359375 + ], + [ + "▁Thanks", + -9.743307113647461 + ], + [ + "▁County", + -9.74365234375 + ], + [ + "▁books", + -9.744638442993164 + ], + [ + "▁Ca", + -9.7451753616333 + ], + [ + "▁mi", + -9.746304512023926 + ], + [ + "▁meeting", + -9.746662139892578 + ], + [ + "▁tools", + -9.747593879699707 + ], + [ + "▁cut", + -9.747650146484375 + ], + [ + "▁related", + -9.74765682220459 + ], + [ + "▁lives", + -9.748003005981445 + ], + [ + "way", + -9.748501777648926 + ], + [ + "▁develop", + -9.748651504516602 + ], + [ + "▁sound", + -9.748723983764648 + ], + [ + "▁safe", + -9.748950958251953 + ], + [ + "▁Her", + -9.74937629699707 + ], + [ + "▁average", + -9.751277923583984 + ], + [ + "▁clean", + -9.75174331665039 + ], + [ + "▁talk", + -9.752362251281738 + ], + [ + "▁peut", + -9.75241756439209 + ], + [ + "▁dann", + -9.752546310424805 + ], + [ + "▁terms", + -9.753265380859375 + ], + [ + "▁foarte", + -9.753512382507324 + ], + [ + "▁super", + -9.754284858703613 + ], + [ + "▁programs", + -9.754853248596191 + ], + [ + "▁decision", + -9.75540828704834 + ], + [ + "▁costs", + -9.756058692932129 + ], + [ + "▁être", + -9.756291389465332 + ], + [ + "▁2019", + -9.757674217224121 + ], + [ + "led", + -9.759482383728027 + ], + [ + "▁parents", + -9.759617805480957 + ], + [ + "▁Mr", + -9.761702537536621 + ], + [ + "▁lower", + -9.762362480163574 + ], + [ + "▁door", + -9.762978553771973 + ], + [ + "▁été", + -9.763933181762695 + ], + [ + "▁box", + -9.764954566955566 + ], + [ + "▁record", + -9.765517234802246 + ], + [ + "▁win", + -9.765650749206543 + ], + [ + "ster", + -9.766402244567871 + ], + [ + "▁America", + -9.766748428344727 + ], + [ + "▁immer", + -9.768763542175293 + ], + [ + "▁road", + -9.76996898651123 + ], + [ + "▁leading", + -9.772759437561035 + ], + [ + "▁section", + -9.772838592529297 + ], + [ + "▁Facebook", + -9.772990226745605 + ], + [ + "▁Most", + -9.7738676071167 + ], + [ + "iert", + -9.77435302734375 + ], + [ + "▁morning", + -9.774497032165527 + ], + [ + "▁asked", + -9.775190353393555 + ], + [ + "▁involved", + -9.77551555633545 + ], + [ + "▁hier", + -9.777607917785645 + ], + [ + "▁images", + -9.77821159362793 + ], + [ + "▁House", + -9.778263092041016 + ], + [ + "▁highly", + -9.780763626098633 + ], + [ + "▁Bar", + -9.781620979309082 + ], + [ + "▁Service", + -9.782510757446289 + ], + [ + "▁attention", + -9.784318923950195 + ], + [ + "▁normal", + -9.784571647644043 + ], + [ + "▁plans", + -9.785883903503418 + ], + [ + "▁source", + -9.785930633544922 + ], + [ + "▁Aus", + -9.788092613220215 + ], + [ + "▁benefits", + -9.788655281066895 + ], + [ + "▁ses", + -9.789348602294922 + ], + [ + "des", + -9.789867401123047 + ], + [ + "▁internet", + -9.789949417114258 + ], + [ + "▁materials", + -9.790080070495605 + ], + [ + "▁même", + -9.791318893432617 + ], + [ + "▁fine", + -9.791522026062012 + ], + [ + "▁fit", + -9.792226791381836 + ], + [ + "▁21", + -9.792612075805664 + ], + [ + "▁itself", + -9.793739318847656 + ], + [ + "▁wieder", + -9.793972969055176 + ], + [ + "▁Many", + -9.795313835144043 + ], + [ + "▁nature", + -9.795402526855469 + ], + [ + "▁pain", + -9.795467376708984 + ], + [ + "▁device", + -9.796183586120605 + ], + [ + "art", + -9.796989440917969 + ], + [ + "pro", + -9.7971830368042 + ], + [ + "▁France", + -9.797271728515625 + ], + [ + "lich", + -9.797314643859863 + ], + [ + "▁2014", + -9.799542427062988 + ], + [ + "▁inter", + -9.799964904785156 + ], + [ + "▁Li", + -9.800453186035156 + ], + [ + "▁career", + -9.801136016845703 + ], + [ + "▁looks", + -9.80145263671875 + ], + [ + "▁ré", + -9.802245140075684 + ], + [ + "▁ability", + -9.802556991577148 + ], + [ + "▁situation", + -9.803154945373535 + ], + [ + "ville", + -9.803157806396484 + ], + [ + "▁2016", + -9.80319595336914 + ], + [ + "tes", + -9.803462982177734 + ], + [ + "▁remember", + -9.803879737854004 + ], + [ + "▁TV", + -9.803998947143555 + ], + [ + "▁levels", + -9.805853843688965 + ], + [ + "▁subject", + -9.807723999023438 + ], + [ + "ally", + -9.80844497680664 + ], + [ + "▁reduce", + -9.810232162475586 + ], + [ + "▁*", + -9.8108491897583 + ], + [ + "▁Day", + -9.810867309570312 + ], + [ + "▁write", + -9.812152862548828 + ], + [ + "▁pick", + -9.814252853393555 + ], + [ + "ence", + -9.815399169921875 + ], + [ + "▁fresh", + -9.816520690917969 + ], + [ + "▁traditional", + -9.816662788391113 + ], + [ + "chi", + -9.817692756652832 + ], + [ + "▁machine", + -9.818047523498535 + ], + [ + "▁resources", + -9.819125175476074 + ], + [ + "â", + -9.819502830505371 + ], + [ + "▁countries", + -9.820009231567383 + ], + [ + "▁Even", + -9.820342063903809 + ], + [ + "▁green", + -9.821283340454102 + ], + [ + "▁Free", + -9.821910858154297 + ], + [ + "▁daily", + -9.822112083435059 + ], + [ + "▁respect", + -9.823013305664062 + ], + [ + "▁instead", + -9.823714256286621 + ], + [ + "▁Once", + -9.82418155670166 + ], + [ + "▁word", + -9.824407577514648 + ], + [ + "▁construction", + -9.82489013671875 + ], + [ + "▁huge", + -9.825064659118652 + ], + [ + "▁feature", + -9.825220108032227 + ], + [ + "▁themselves", + -9.826369285583496 + ], + [ + "▁loss", + -9.82919692993164 + ], + [ + "%", + -9.830063819885254 + ], + [ + "▁safety", + -9.830256462097168 + ], + [ + "▁economic", + -9.831406593322754 + ], + [ + "▁require", + -9.831945419311523 + ], + [ + "30", + -9.83255386352539 + ], + [ + "▁planning", + -9.833393096923828 + ], + [ + "▁mal", + -9.834482192993164 + ], + [ + "▁directly", + -9.835214614868164 + ], + [ + "ure", + -9.835719108581543 + ], + [ + "▁track", + -9.835734367370605 + ], + [ + "▁tool", + -9.836135864257812 + ], + [ + "▁positive", + -9.836392402648926 + ], + [ + "▁piece", + -9.837076187133789 + ], + [ + "▁parts", + -9.837140083312988 + ], + [ + "ang", + -9.83740520477295 + ], + [ + "▁trip", + -9.837453842163086 + ], + [ + "▁organization", + -9.837935447692871 + ], + [ + "▁sites", + -9.838274002075195 + ], + [ + "▁fire", + -9.83831787109375 + ], + [ + "▁China", + -9.838876724243164 + ], + [ + "▁Pour", + -9.839289665222168 + ], + [ + "▁plant", + -9.84011459350586 + ], + [ + "▁board", + -9.840341567993164 + ], + [ + "▁interesting", + -9.841227531433105 + ], + [ + "gar", + -9.841713905334473 + ], + [ + "▁fie", + -9.841752052307129 + ], + [ + "▁late", + -9.842166900634766 + ], + [ + "▁wall", + -9.842294692993164 + ], + [ + "▁walk", + -9.842741966247559 + ], + [ + "ham", + -9.843868255615234 + ], + [ + "▁Ne", + -9.845427513122559 + ], + [ + "▁First", + -9.845462799072266 + ], + [ + "▁double", + -9.845701217651367 + ], + [ + "▁budget", + -9.847657203674316 + ], + [ + "▁cases", + -9.847670555114746 + ], + [ + "cal", + -9.849738121032715 + ], + [ + "old", + -9.849796295166016 + ], + [ + "▁Bo", + -9.849822998046875 + ], + [ + "▁spend", + -9.850439071655273 + ], + [ + "port", + -9.850828170776367 + ], + [ + "▁worth", + -9.850934028625488 + ], + [ + "ique", + -9.851308822631836 + ], + [ + "nes", + -9.85190486907959 + ], + [ + "cul", + -9.852272033691406 + ], + [ + "era", + -9.85296630859375 + ], + [ + "▁text", + -9.853032112121582 + ], + [ + "▁decided", + -9.854948997497559 + ], + [ + "▁floor", + -9.855036735534668 + ], + [ + "▁requirements", + -9.85529899597168 + ], + [ + "▁cel", + -9.855361938476562 + ], + [ + "▁effect", + -9.855412483215332 + ], + [ + "▁gibt", + -9.856159210205078 + ], + [ + "▁news", + -9.859238624572754 + ], + [ + "▁vos", + -9.859931945800781 + ], + [ + "▁players", + -9.86057186126709 + ], + [ + "▁saw", + -9.862728118896484 + ], + [ + "▁auto", + -9.863056182861328 + ], + [ + "▁town", + -9.863207817077637 + ], + [ + "▁myself", + -9.864106178283691 + ], + [ + "▁lost", + -9.864988327026367 + ], + [ + "▁$", + -9.865124702453613 + ], + [ + "▁June", + -9.86609172821045 + ], + [ + "▁significant", + -9.866196632385254 + ], + [ + "▁giving", + -9.866230010986328 + ], + [ + "▁stand", + -9.866744041442871 + ], + [ + "▁stock", + -9.867657661437988 + ], + [ + "▁hold", + -9.867766380310059 + ], + [ + "▁Are", + -9.869078636169434 + ], + [ + "▁shall", + -9.86923599243164 + ], + [ + "▁ideal", + -9.869279861450195 + ], + [ + "▁London", + -9.87080192565918 + ], + [ + "▁answer", + -9.870853424072266 + ], + [ + "▁Vor", + -9.87157917022705 + ], + [ + "▁gives", + -9.873115539550781 + ], + [ + "ative", + -9.87316608428955 + ], + [ + "▁timp", + -9.873167991638184 + ], + [ + "▁center", + -9.87362289428711 + ], + [ + "▁Group", + -9.874580383300781 + ], + [ + "▁sans", + -9.875143051147461 + ], + [ + "▁Ar", + -9.875466346740723 + ], + [ + "▁Ma", + -9.875568389892578 + ], + [ + "▁reach", + -9.876279830932617 + ], + [ + "ren", + -9.876652717590332 + ], + [ + "▁More", + -9.877446174621582 + ], + [ + "mit", + -9.878068923950195 + ], + [ + "▁guide", + -9.87833309173584 + ], + [ + "▁fully", + -9.878828048706055 + ], + [ + "▁Since", + -9.878952980041504 + ], + [ + "▁Inc", + -9.87923812866211 + ], + [ + "▁culture", + -9.879780769348145 + ], + [ + "eat", + -9.880531311035156 + ], + [ + "▁written", + -9.880722999572754 + ], + [ + "▁Ho", + -9.881338119506836 + ], + [ + "▁India", + -9.881625175476074 + ], + [ + "▁Well", + -9.881708145141602 + ], + [ + "back", + -9.881752967834473 + ], + [ + "▁goes", + -9.882170677185059 + ], + [ + "▁completely", + -9.88217544555664 + ], + [ + "▁tour", + -9.883081436157227 + ], + [ + "▁began", + -9.883196830749512 + ], + [ + "▁picture", + -9.883255958557129 + ], + [ + "▁mare", + -9.88353157043457 + ], + [ + "▁playing", + -9.884223937988281 + ], + [ + "▁trebuie", + -9.884926795959473 + ], + [ + "ils", + -9.884940147399902 + ], + [ + "chen", + -9.885220527648926 + ], + [ + "▁hit", + -9.885416984558105 + ], + [ + "▁complex", + -9.88591480255127 + ], + [ + "▁Thank", + -9.886140823364258 + ], + [ + "▁Let", + -9.886350631713867 + ], + [ + "▁applications", + -9.887116432189941 + ], + [ + "▁friend", + -9.888312339782715 + ], + [ + "▁English", + -9.889549255371094 + ], + [ + "▁charge", + -9.890040397644043 + ], + [ + "▁recommend", + -9.893453598022461 + ], + [ + "▁message", + -9.893672943115234 + ], + [ + "In", + -9.893722534179688 + ], + [ + "▁Mar", + -9.894762992858887 + ], + [ + "pp", + -9.895845413208008 + ], + [ + "▁method", + -9.89692497253418 + ], + [ + "▁successful", + -9.897004127502441 + ], + [ + "tion", + -9.898880958557129 + ], + [ + "▁release", + -9.899920463562012 + ], + [ + "▁creating", + -9.900403022766113 + ], + [ + "▁despre", + -9.90141773223877 + ], + [ + "esc", + -9.902434349060059 + ], + [ + "▁eye", + -9.902752876281738 + ], + [ + "▁apply", + -9.905945777893066 + ], + [ + "net", + -9.906000137329102 + ], + [ + "side", + -9.906539916992188 + ], + [ + "▁ar", + -9.906949996948242 + ], + [ + "▁platform", + -9.90713882446289 + ], + [ + "▁touch", + -9.907329559326172 + ], + [ + "▁towards", + -9.90785026550293 + ], + [ + "▁match", + -9.908224105834961 + ], + [ + "▁Black", + -9.909344673156738 + ], + [ + "▁fall", + -9.90961742401123 + ], + [ + "▁ground", + -9.910234451293945 + ], + [ + "▁High", + -9.910740852355957 + ], + [ + "▁Q", + -9.911155700683594 + ], + [ + "▁schon", + -9.911709785461426 + ], + [ + "▁hotel", + -9.911751747131348 + ], + [ + "▁prices", + -9.912031173706055 + ], + [ + "▁developed", + -9.913411140441895 + ], + [ + "uk", + -9.913476943969727 + ], + [ + "ide", + -9.91367244720459 + ], + [ + "▁September", + -9.91370964050293 + ], + [ + "ized", + -9.914202690124512 + ], + [ + "▁War", + -9.914704322814941 + ], + [ + "!!", + -9.916285514831543 + ], + [ + "▁grow", + -9.916997909545898 + ], + [ + "▁watch", + -9.917067527770996 + ], + [ + "▁storage", + -9.917412757873535 + ], + [ + "eau", + -9.917513847351074 + ], + [ + "can", + -9.918373107910156 + ], + [ + "▁Get", + -9.919524192810059 + ], + [ + "▁See", + -9.91953182220459 + ], + [ + "▁European", + -9.919703483581543 + ], + [ + "▁language", + -9.91982650756836 + ], + [ + "ează", + -9.920175552368164 + ], + [ + "▁court", + -9.920334815979004 + ], + [ + "▁Why", + -9.921106338500977 + ], + [ + "▁hear", + -9.921342849731445 + ], + [ + "▁doar", + -9.921804428100586 + ], + [ + "lan", + -9.92330265045166 + ], + [ + "▁Christmas", + -9.923810958862305 + ], + [ + "▁Web", + -9.923871994018555 + ], + [ + "vo", + -9.92405891418457 + ], + [ + "▁sent", + -9.924983024597168 + ], + [ + "▁businesses", + -9.925868034362793 + ], + [ + "▁Red", + -9.926278114318848 + ], + [ + "tel", + -9.926375389099121 + ], + [ + "▁Ha", + -9.926508903503418 + ], + [ + "▁wonderful", + -9.926653861999512 + ], + [ + "ations", + -9.926738739013672 + ], + [ + "za", + -9.92748737335205 + ], + [ + "▁22", + -9.928659439086914 + ], + [ + "▁thinking", + -9.92941665649414 + ], + [ + "▁became", + -9.929733276367188 + ], + [ + "▁cool", + -9.929835319519043 + ], + [ + "▁speed", + -9.930370330810547 + ], + [ + "mar", + -9.930426597595215 + ], + [ + "▁--", + -9.931743621826172 + ], + [ + "▁groups", + -9.931920051574707 + ], + [ + "▁interested", + -9.93198299407959 + ], + [ + "ak", + -9.93218994140625 + ], + [ + "▁60", + -9.932672500610352 + ], + [ + "▁screen", + -9.93370246887207 + ], + [ + "▁Design", + -9.933789253234863 + ], + [ + "▁limited", + -9.935648918151855 + ], + [ + "▁expected", + -9.935959815979004 + ], + [ + "▁opportunities", + -9.936376571655273 + ], + [ + "▁regular", + -9.936870574951172 + ], + [ + "off", + -9.93702220916748 + ], + [ + "▁Best", + -9.937298774719238 + ], + [ + "Re", + -9.938436508178711 + ], + [ + "▁ihr", + -9.938719749450684 + ], + [ + "▁Great", + -9.938907623291016 + ], + [ + "▁employees", + -9.93924617767334 + ], + [ + "▁custom", + -9.939679145812988 + ], + [ + "▁multe", + -9.940123558044434 + ], + [ + "let", + -9.940876007080078 + ], + [ + "▁benefit", + -9.942487716674805 + ], + [ + "▁term", + -9.942623138427734 + ], + [ + "▁bine", + -9.942869186401367 + ], + [ + "▁deep", + -9.944526672363281 + ], + [ + "▁August", + -9.94526481628418 + ], + [ + "▁President", + -9.945381164550781 + ], + [ + "▁Auf", + -9.945854187011719 + ], + [ + "▁wish", + -9.946924209594727 + ], + [ + "▁sometimes", + -9.947274208068848 + ], + [ + "ari", + -9.947793960571289 + ], + [ + "▁pressure", + -9.948184967041016 + ], + [ + "▁ani", + -9.94859504699707 + ], + [ + "▁trade", + -9.949930191040039 + ], + [ + "▁firm", + -9.950027465820312 + ], + [ + "▁comment", + -9.95003604888916 + ], + [ + "▁November", + -9.950242042541504 + ], + [ + "▁expect", + -9.951102256774902 + ], + [ + "▁2012", + -9.952491760253906 + ], + [ + "▁Ich", + -9.95328140258789 + ], + [ + "▁relationship", + -9.95363998413086 + ], + [ + "▁active", + -9.954682350158691 + ], + [ + "org", + -9.954710960388184 + ], + [ + "▁heat", + -9.956732749938965 + ], + [ + "▁wood", + -9.95678997039795 + ], + [ + "▁notre", + -9.957921028137207 + ], + [ + "▁function", + -9.958330154418945 + ], + [ + "▁2.", + -9.95909309387207 + ], + [ + "▁wedding", + -9.960049629211426 + ], + [ + "▁starting", + -9.961235046386719 + ], + [ + "▁Health", + -9.961249351501465 + ], + [ + "\",", + -9.961713790893555 + ], + [ + "▁death", + -9.962173461914062 + ], + [ + "▁pages", + -9.962764739990234 + ], + [ + "▁vehicle", + -9.96293830871582 + ], + [ + "▁request", + -9.963874816894531 + ], + [ + "▁helps", + -9.963916778564453 + ], + [ + "▁blue", + -9.964017868041992 + ], + [ + "▁analysis", + -9.964414596557617 + ], + [ + "▁posted", + -9.964544296264648 + ], + [ + "▁healthy", + -9.964814186096191 + ], + [ + "▁contract", + -9.964988708496094 + ], + [ + "▁•", + -9.965263366699219 + ], + [ + "▁Each", + -9.965293884277344 + ], + [ + "▁Fa", + -9.966179847717285 + ], + [ + "▁dintre", + -9.966221809387207 + ], + [ + "▁Friday", + -9.967202186584473 + ], + [ + "▁considered", + -9.967992782592773 + ], + [ + "cher", + -9.96826457977295 + ], + [ + "▁quick", + -9.968731880187988 + ], + [ + "▁understanding", + -9.96916389465332 + ], + [ + "▁condition", + -9.969378471374512 + ], + [ + "ization", + -9.971049308776855 + ], + [ + "▁document", + -9.971664428710938 + ], + [ + "▁prevent", + -9.971890449523926 + ], + [ + "▁growing", + -9.9725341796875 + ], + [ + "▁protection", + -9.972620964050293 + ], + [ + "▁cat", + -9.974002838134766 + ], + [ + "▁#", + -9.975058555603027 + ], + [ + "10", + -9.975275039672852 + ], + [ + "▁join", + -9.9759521484375 + ], + [ + "▁serve", + -9.976580619812012 + ], + [ + "▁blood", + -9.977095603942871 + ], + [ + "▁July", + -9.977341651916504 + ], + [ + "▁region", + -9.977787971496582 + ], + [ + "car", + -9.97933578491211 + ], + [ + "▁entre", + -9.979788780212402 + ], + [ + "▁physical", + -9.981287002563477 + ], + [ + "▁cash", + -9.9813232421875 + ], + [ + "aux", + -9.981823921203613 + ], + [ + "ng", + -9.982654571533203 + ], + [ + "▁stage", + -9.98281478881836 + ], + [ + "▁seem", + -9.983034133911133 + ], + [ + "▁definitely", + -9.983795166015625 + ], + [ + "▁investment", + -9.983827590942383 + ], + [ + "▁purpose", + -9.985441207885742 + ], + [ + "▁begin", + -9.985486030578613 + ], + [ + "®", + -9.985495567321777 + ], + [ + "▁break", + -9.985701560974121 + ], + [ + "itate", + -9.987293243408203 + ], + [ + "▁moving", + -9.989288330078125 + ], + [ + "▁met", + -9.990678787231445 + ], + [ + "ize", + -9.990833282470703 + ], + [ + "▁select", + -9.991165161132812 + ], + [ + "▁tous", + -9.991310119628906 + ], + [ + "▁Europe", + -9.991639137268066 + ], + [ + "@", + -9.992724418640137 + ], + [ + "▁individuals", + -9.993392944335938 + ], + [ + "▁Zeit", + -9.993524551391602 + ], + [ + "gu", + -9.995670318603516 + ], + [ + "▁unit", + -9.995753288269043 + ], + [ + "▁noi", + -9.996089935302734 + ], + [ + "▁places", + -9.996171951293945 + ], + [ + "all", + -9.99632453918457 + ], + [ + "▁wait", + -9.996755599975586 + ], + [ + "▁difference", + -9.997234344482422 + ], + [ + "▁round", + -9.998015403747559 + ], + [ + "50", + -9.99953842163086 + ], + [ + "rie", + -9.999545097351074 + ], + [ + "▁Et", + -9.999933242797852 + ], + [ + "20", + -10.000725746154785 + ], + [ + "▁activity", + -10.000792503356934 + ], + [ + "е", + -10.000866889953613 + ], + [ + "▁Windows", + -10.001087188720703 + ], + [ + "▁produce", + -10.001385688781738 + ], + [ + "▁keine", + -10.00212574005127 + ], + [ + "▁Air", + -10.002567291259766 + ], + [ + "▁January", + -10.004890441894531 + ], + [ + "▁deux", + -10.005081176757812 + ], + [ + "▁entry", + -10.005208015441895 + ], + [ + "king", + -10.006500244140625 + ], + [ + "▁goals", + -10.006736755371094 + ], + [ + "▁previous", + -10.0077543258667 + ], + [ + "▁+", + -10.008035659790039 + ], + [ + "▁Business", + -10.008259773254395 + ], + [ + "ont", + -10.008552551269531 + ], + [ + "▁Sunday", + -10.008694648742676 + ], + [ + "▁offering", + -10.010359764099121 + ], + [ + "▁response", + -10.011018753051758 + ], + [ + "▁surface", + -10.011393547058105 + ], + [ + "▁Department", + -10.01212215423584 + ], + [ + "▁exactly", + -10.012190818786621 + ], + [ + "▁Online", + -10.012577056884766 + ], + [ + "dem", + -10.013803482055664 + ], + [ + "ischen", + -10.014006614685059 + ], + [ + "▁hands", + -10.015100479125977 + ], + [ + "▁hour", + -10.016197204589844 + ], + [ + "▁dog", + -10.016946792602539 + ], + [ + "▁damage", + -10.017006874084473 + ], + [ + "▁capital", + -10.018792152404785 + ], + [ + "▁toate", + -10.020488739013672 + ], + [ + "▁wrong", + -10.020674705505371 + ], + [ + "unui", + -10.022201538085938 + ], + [ + "tri", + -10.023979187011719 + ], + [ + "▁sell", + -10.023999214172363 + ], + [ + "▁published", + -10.024175643920898 + ], + [ + "▁families", + -10.024675369262695 + ], + [ + "▁avoid", + -10.025490760803223 + ], + [ + "▁Ko", + -10.025506019592285 + ], + [ + "▁mod", + -10.026697158813477 + ], + [ + "rat", + -10.027653694152832 + ], + [ + "▁Make", + -10.0299654006958 + ], + [ + "▁October", + -10.030153274536133 + ], + [ + "▁former", + -10.031285285949707 + ], + [ + "▁Services", + -10.03281021118164 + ], + [ + "▁felt", + -10.033045768737793 + ], + [ + "▁selection", + -10.033309936523438 + ], + [ + "eaza", + -10.034177780151367 + ], + [ + "gel", + -10.034422874450684 + ], + [ + "▁Good", + -10.035792350769043 + ], + [ + "▁actual", + -10.0364351272583 + ], + [ + "▁gut", + -10.036853790283203 + ], + [ + "▁gas", + -10.03708553314209 + ], + [ + "15", + -10.038182258605957 + ], + [ + "▁structure", + -10.038285255432129 + ], + [ + "▁act", + -10.0386381149292 + ], + [ + "▁Zu", + -10.038654327392578 + ], + [ + "▁creative", + -10.039134979248047 + ], + [ + "▁Vi", + -10.039159774780273 + ], + [ + "▁shop", + -10.04066276550293 + ], + [ + "▁Lo", + -10.040735244750977 + ], + [ + "şi", + -10.042192459106445 + ], + [ + "▁mis", + -10.042224884033203 + ], + [ + "ungen", + -10.042301177978516 + ], + [ + "▁fan", + -10.04240608215332 + ], + [ + "▁|", + -10.043391227722168 + ], + [ + "▁Bei", + -10.044037818908691 + ], + [ + "▁protect", + -10.04454517364502 + ], + [ + "▁Na", + -10.0447998046875 + ], + [ + "q", + -10.045693397521973 + ], + [ + "ok", + -10.04710578918457 + ], + [ + "▁California", + -10.047263145446777 + ], + [ + "▁political", + -10.047301292419434 + ], + [ + "25", + -10.047530174255371 + ], + [ + "▁feeling", + -10.047913551330566 + ], + [ + "▁ces", + -10.048321723937988 + ], + [ + "▁display", + -10.048857688903809 + ], + [ + "▁essential", + -10.04964542388916 + ], + [ + "ând", + -10.049971580505371 + ], + [ + "▁seine", + -10.050551414489746 + ], + [ + "▁soft", + -10.050915718078613 + ], + [ + "ach", + -10.05102252960205 + ], + [ + "▁happen", + -10.051118850708008 + ], + [ + "▁Paul", + -10.053346633911133 + ], + [ + "▁Cu", + -10.054024696350098 + ], + [ + "house", + -10.055376052856445 + ], + [ + "ante", + -10.05582046508789 + ], + [ + "▁easier", + -10.056551933288574 + ], + [ + "▁sort", + -10.0567045211792 + ], + [ + "▁Post", + -10.057138442993164 + ], + [ + "▁accept", + -10.05730152130127 + ], + [ + "field", + -10.057648658752441 + ], + [ + "zen", + -10.057741165161133 + ], + [ + "▁character", + -10.057848930358887 + ], + [ + "▁beginning", + -10.058433532714844 + ], + [ + "▁Jesus", + -10.058760643005371 + ], + [ + "▁weekend", + -10.059663772583008 + ], + [ + "▁certainly", + -10.06114387512207 + ], + [ + "▁THE", + -10.061254501342773 + ], + [ + "▁alle", + -10.06189250946045 + ], + [ + "▁transport", + -10.062220573425293 + ], + [ + "▁Saturday", + -10.063043594360352 + ], + [ + "▁basic", + -10.064136505126953 + ], + [ + "▁loved", + -10.06431770324707 + ], + [ + "ros", + -10.065333366394043 + ], + [ + "▁offered", + -10.065996170043945 + ], + [ + "▁camera", + -10.067024230957031 + ], + [ + "▁Green", + -10.06789779663086 + ], + [ + "ology", + -10.069480895996094 + ], + [ + "ä", + -10.069646835327148 + ], + [ + "▁manage", + -10.070416450500488 + ], + [ + "▁paid", + -10.070881843566895 + ], + [ + "▁advice", + -10.071617126464844 + ], + [ + "▁patient", + -10.072234153747559 + ], + [ + "▁spent", + -10.072272300720215 + ], + [ + "▁mir", + -10.072366714477539 + ], + [ + "▁baby", + -10.072400093078613 + ], + [ + "ö", + -10.073193550109863 + ], + [ + "▁basis", + -10.073338508605957 + ], + [ + "▁cancer", + -10.073765754699707 + ], + [ + "▁Although", + -10.07400894165039 + ], + [ + "▁gift", + -10.074336051940918 + ], + [ + "▁3.", + -10.074871063232422 + ], + [ + "dieser", + -10.075157165527344 + ], + [ + "▁overall", + -10.07520580291748 + ], + [ + "▁Sch", + -10.075265884399414 + ], + [ + "▁Ex", + -10.076258659362793 + ], + [ + "▁December", + -10.07689094543457 + ], + [ + "▁released", + -10.078214645385742 + ], + [ + "▁prior", + -10.07900333404541 + ], + [ + "▁sowie", + -10.081072807312012 + ], + [ + "▁club", + -10.081326484680176 + ], + [ + "▁Street", + -10.081535339355469 + ], + [ + "▁College", + -10.08254623413086 + ], + [ + "▁î", + -10.083059310913086 + ], + [ + "over", + -10.083159446716309 + ], + [ + "▁gave", + -10.08454704284668 + ], + [ + "▁truly", + -10.084784507751465 + ], + [ + "par", + -10.084806442260742 + ], + [ + "▁Canada", + -10.084888458251953 + ], + [ + "▁existing", + -10.085420608520508 + ], + [ + "lie", + -10.086335182189941 + ], + [ + "▁ganz", + -10.086658477783203 + ], + [ + "▁setting", + -10.087109565734863 + ], + [ + "▁supply", + -10.08739185333252 + ], + [ + "▁college", + -10.087540626525879 + ], + [ + "▁communication", + -10.088407516479492 + ], + [ + "▁23", + -10.088834762573242 + ], + [ + "▁pass", + -10.091546058654785 + ], + [ + "▁devices", + -10.091872215270996 + ], + [ + "▁glass", + -10.092083930969238 + ], + [ + "▁experienced", + -10.092395782470703 + ], + [ + "▁grand", + -10.093363761901855 + ], + [ + "▁Po", + -10.093396186828613 + ], + [ + "▁beyond", + -10.094029426574707 + ], + [ + "▁format", + -10.094165802001953 + ], + [ + "▁mon", + -10.09461498260498 + ], + [ + "▁perform", + -10.094635009765625 + ], + [ + "sten", + -10.095130920410156 + ], + [ + "▁1,", + -10.096270561218262 + ], + [ + "▁Per", + -10.096640586853027 + ], + [ + "▁sold", + -10.097247123718262 + ], + [ + "▁rates", + -10.0972900390625 + ], + [ + "▁regarding", + -10.097782135009766 + ], + [ + "▁Paris", + -10.098291397094727 + ], + [ + "▁Dar", + -10.099579811096191 + ], + [ + "▁challenge", + -10.099649429321289 + ], + [ + "▁feet", + -10.100564002990723 + ], + [ + "▁Su", + -10.102017402648926 + ], + [ + "je", + -10.102593421936035 + ], + [ + "▁Bank", + -10.102627754211426 + ], + [ + "ven", + -10.103126525878906 + ], + [ + "jo", + -10.103290557861328 + ], + [ + "▁band", + -10.10348892211914 + ], + [ + "▁delivery", + -10.104915618896484 + ], + [ + "Vous", + -10.104924201965332 + ], + [ + "tele", + -10.10495376586914 + ], + [ + "▁East", + -10.105379104614258 + ], + [ + "▁pictures", + -10.106067657470703 + ], + [ + "▁useful", + -10.106481552124023 + ], + [ + "*", + -10.107648849487305 + ], + [ + "▁increased", + -10.107746124267578 + ], + [ + "▁stories", + -10.108119010925293 + ], + [ + "sion", + -10.108280181884766 + ], + [ + "bra", + -10.108345985412598 + ], + [ + "▁brought", + -10.108466148376465 + ], + [ + "▁effort", + -10.109898567199707 + ], + [ + "▁payment", + -10.11058235168457 + ], + [ + "▁heard", + -10.110925674438477 + ], + [ + "▁played", + -10.111245155334473 + ], + [ + "▁White", + -10.111417770385742 + ], + [ + "▁metal", + -10.111721992492676 + ], + [ + "tal", + -10.111754417419434 + ], + [ + "▁engine", + -10.112006187438965 + ], + [ + "▁Club", + -10.11218547821045 + ], + [ + "ical", + -10.114581108093262 + ], + [ + "▁effects", + -10.115421295166016 + ], + [ + "▁degree", + -10.115763664245605 + ], + [ + "▁bed", + -10.1159086227417 + ], + [ + "ette", + -10.115991592407227 + ], + [ + "▁David", + -10.116386413574219 + ], + [ + "°", + -10.117666244506836 + ], + [ + "▁Au", + -10.117938041687012 + ], + [ + "▁Company", + -10.11845874786377 + ], + [ + "▁player", + -10.11938190460205 + ], + [ + "▁Today", + -10.120569229125977 + ], + [ + "▁maintain", + -10.12093448638916 + ], + [ + "▁minute", + -10.121193885803223 + ], + [ + "mail", + -10.122172355651855 + ], + [ + "▁race", + -10.122366905212402 + ], + [ + "▁comfortable", + -10.123887062072754 + ], + [ + "▁responsible", + -10.124085426330566 + ], + [ + "vor", + -10.124622344970703 + ], + [ + "▁associated", + -10.124695777893066 + ], + [ + "▁weather", + -10.124701499938965 + ], + [ + "▁$1", + -10.125639915466309 + ], + [ + "▁tried", + -10.126176834106445 + ], + [ + "▁Check", + -10.127649307250977 + ], + [ + "▁solid", + -10.127864837646484 + ], + [ + "▁movie", + -10.128364562988281 + ], + [ + "▁coffee", + -10.12874698638916 + ], + [ + "board", + -10.129073143005371 + ], + [ + "▁po", + -10.12946605682373 + ], + [ + "▁warm", + -10.129583358764648 + ], + [ + "▁connect", + -10.131733894348145 + ], + [ + "▁Ad", + -10.133807182312012 + ], + [ + "work", + -10.133859634399414 + ], + [ + "mal", + -10.13397216796875 + ], + [ + "▁Act", + -10.134634971618652 + ], + [ + "▁achieve", + -10.134769439697266 + ], + [ + "▁Nach", + -10.136604309082031 + ], + [ + "www", + -10.136669158935547 + ], + [ + "term", + -10.13672161102295 + ], + [ + "▁claim", + -10.137251853942871 + ], + [ + "▁particularly", + -10.138245582580566 + ], + [ + "▁cas", + -10.138396263122559 + ], + [ + "▁furniture", + -10.138461112976074 + ], + [ + "▁finish", + -10.13896369934082 + ], + [ + "▁temps", + -10.139026641845703 + ], + [ + "▁disease", + -10.139115333557129 + ], + [ + "▁lots", + -10.139196395874023 + ], + [ + "▁ball", + -10.139307975769043 + ], + [ + "▁sun", + -10.14010238647461 + ], + [ + "▁strategy", + -10.140498161315918 + ], + [ + "bre", + -10.140518188476562 + ], + [ + "▁mine", + -10.141541481018066 + ], + [ + "▁Click", + -10.141743659973145 + ], + [ + "ran", + -10.141983032226562 + ], + [ + "▁Will", + -10.142234802246094 + ], + [ + "▁garden", + -10.142974853515625 + ], + [ + "▁stuff", + -10.14359188079834 + ], + [ + "▁limit", + -10.144641876220703 + ], + [ + "▁bottom", + -10.14494800567627 + ], + [ + "▁shown", + -10.144962310791016 + ], + [ + "ship", + -10.145271301269531 + ], + [ + "▁habe", + -10.145858764648438 + ], + [ + "▁Super", + -10.146219253540039 + ], + [ + "▁completed", + -10.146971702575684 + ], + [ + "▁wine", + -10.146979331970215 + ], + [ + "ische", + -10.147262573242188 + ], + [ + "▁largest", + -10.147466659545898 + ], + [ + "▁appropriate", + -10.148261070251465 + ], + [ + "▁immediately", + -10.150248527526855 + ], + [ + "▁Hi", + -10.152358055114746 + ], + [ + "▁trust", + -10.152767181396484 + ], + [ + "ability", + -10.154254913330078 + ], + [ + "▁powerful", + -10.155101776123047 + ], + [ + "▁helping", + -10.155620574951172 + ], + [ + "▁schedule", + -10.155688285827637 + ], + [ + "▁correct", + -10.155707359313965 + ], + [ + "▁transfer", + -10.156496047973633 + ], + [ + "pre", + -10.15665340423584 + ], + [ + "▁journey", + -10.15688419342041 + ], + [ + "pm", + -10.157002449035645 + ], + [ + "don", + -10.158435821533203 + ], + [ + "▁highest", + -10.159249305725098 + ], + [ + "▁finally", + -10.15999698638916 + ], + [ + "form", + -10.160258293151855 + ], + [ + "▁extremely", + -10.160404205322266 + ], + [ + "▁window", + -10.160501480102539 + ], + [ + "▁Over", + -10.162222862243652 + ], + [ + "▁remove", + -10.162469863891602 + ], + [ + "wood", + -10.162479400634766 + ], + [ + "▁2013", + -10.163631439208984 + ], + [ + "▁mother", + -10.164072036743164 + ], + [ + "▁Auto", + -10.16436767578125 + ], + [ + "▁annual", + -10.164615631103516 + ], + [ + "▁Star", + -10.164834976196289 + ], + [ + "▁Di", + -10.166138648986816 + ], + [ + "о", + -10.16711139678955 + ], + [ + "▁gold", + -10.167129516601562 + ], + [ + "tar", + -10.167352676391602 + ], + [ + "ju", + -10.167750358581543 + ], + [ + "▁Use", + -10.169474601745605 + ], + [ + "▁thanks", + -10.16960334777832 + ], + [ + "▁centre", + -10.170127868652344 + ], + [ + "▁Australia", + -10.170358657836914 + ], + [ + "▁estate", + -10.170504570007324 + ], + [ + "▁eyes", + -10.1714448928833 + ], + [ + "▁force", + -10.171592712402344 + ], + [ + "▁income", + -10.17395305633545 + ], + [ + "▁science", + -10.174036026000977 + ], + [ + "ori", + -10.174230575561523 + ], + [ + "▁enter", + -10.174851417541504 + ], + [ + "▁28", + -10.175408363342285 + ], + [ + "ire", + -10.17568302154541 + ], + [ + "▁schools", + -10.175797462463379 + ], + [ + "▁restaurant", + -10.176088333129883 + ], + [ + "▁Council", + -10.177032470703125 + ], + [ + "aus", + -10.177885055541992 + ], + [ + "▁agree", + -10.17905330657959 + ], + [ + "▁campaign", + -10.179192543029785 + ], + [ + "▁Ta", + -10.179428100585938 + ], + [ + "▁letter", + -10.179814338684082 + ], + [ + "▁central", + -10.179931640625 + ], + [ + "▁Because", + -10.180054664611816 + ], + [ + "▁path", + -10.180349349975586 + ], + [ + "▁loc", + -10.180882453918457 + ], + [ + "▁files", + -10.182587623596191 + ], + [ + "▁population", + -10.182705879211426 + ], + [ + "▁explore", + -10.182723999023438 + ], + [ + "▁mid", + -10.182734489440918 + ], + [ + "▁concept", + -10.182748794555664 + ], + [ + "▁church", + -10.183015823364258 + ], + [ + "80", + -10.183026313781738 + ], + [ + "▁einfach", + -10.185834884643555 + ], + [ + "▁reasons", + -10.186690330505371 + ], + [ + "▁determine", + -10.186755180358887 + ], + [ + "▁February", + -10.187095642089844 + ], + [ + "▁evidence", + -10.18797779083252 + ], + [ + "▁sleep", + -10.188036918640137 + ], + [ + "▁Board", + -10.188652992248535 + ], + [ + "▁maybe", + -10.189635276794434 + ], + [ + "▁wasn", + -10.189701080322266 + ], + [ + "▁Monday", + -10.190101623535156 + ], + [ + "▁director", + -10.190481185913086 + ], + [ + "well", + -10.190974235534668 + ], + [ + "During", + -10.191001892089844 + ], + [ + "▁sweet", + -10.191061973571777 + ], + [ + "▁assist", + -10.19124984741211 + ], + [ + "▁police", + -10.191511154174805 + ], + [ + "▁repair", + -10.191729545593262 + ], + [ + "▁techniques", + -10.191733360290527 + ], + [ + "▁served", + -10.191808700561523 + ], + [ + "vi", + -10.192037582397461 + ], + [ + "▁sports", + -10.192331314086914 + ], + [ + "▁opening", + -10.192401885986328 + ], + [ + "▁ones", + -10.192731857299805 + ], + [ + "▁notice", + -10.193460464477539 + ], + [ + "▁PC", + -10.193547248840332 + ], + [ + "▁alte", + -10.194242477416992 + ], + [ + "▁Bi", + -10.194340705871582 + ], + [ + "▁cold", + -10.195606231689453 + ], + [ + "▁billion", + -10.195794105529785 + ], + [ + "▁balance", + -10.196361541748047 + ], + [ + "cer", + -10.196417808532715 + ], + [ + "▁nearly", + -10.196725845336914 + ], + [ + "▁wear", + -10.197259902954102 + ], + [ + "free", + -10.19760799407959 + ], + [ + "▁Have", + -10.197748184204102 + ], + [ + "▁comfort", + -10.199211120605469 + ], + [ + "▁studies", + -10.199225425720215 + ], + [ + "▁traffic", + -10.199540138244629 + ], + [ + "▁item", + -10.200214385986328 + ], + [ + "▁teaching", + -10.200467109680176 + ], + [ + "▁turned", + -10.201326370239258 + ], + [ + "isation", + -10.201354026794434 + ], + [ + "12", + -10.202038764953613 + ], + [ + "▁greater", + -10.202167510986328 + ], + [ + "▁knew", + -10.20233154296875 + ], + [ + "▁Association", + -10.203333854675293 + ], + [ + "▁Office", + -10.203802108764648 + ], + [ + "▁established", + -10.204085350036621 + ], + [ + "45", + -10.204170227050781 + ], + [ + "▁Love", + -10.204318046569824 + ], + [ + "▁changed", + -10.204882621765137 + ], + [ + "▁pan", + -10.205184936523438 + ], + [ + "van", + -10.20565414428711 + ], + [ + "▁Mi", + -10.205663681030273 + ], + [ + "▁tend", + -10.20637321472168 + ], + [ + "▁connection", + -10.206522941589355 + ], + [ + "▁lack", + -10.206954002380371 + ], + [ + "▁bank", + -10.208464622497559 + ], + [ + "cat", + -10.208720207214355 + ], + [ + "▁helped", + -10.209071159362793 + ], + [ + "▁spot", + -10.209417343139648 + ], + [ + "▁spring", + -10.20974063873291 + ], + [ + "▁Wi", + -10.210912704467773 + ], + [ + "▁Mac", + -10.211682319641113 + ], + [ + "▁Christ", + -10.212015151977539 + ], + [ + "▁saying", + -10.212835311889648 + ], + [ + "▁General", + -10.213062286376953 + ], + [ + "▁port", + -10.213099479675293 + ], + [ + "▁Mal", + -10.213156700134277 + ], + [ + "▁System", + -10.213486671447754 + ], + [ + "▁According", + -10.2152738571167 + ], + [ + "▁chiar", + -10.21568489074707 + ], + [ + "log", + -10.21576976776123 + ], + [ + "▁mix", + -10.215974807739258 + ], + [ + "▁Lake", + -10.216042518615723 + ], + [ + "▁intr", + -10.216590881347656 + ], + [ + "▁deliver", + -10.216793060302734 + ], + [ + "mon", + -10.216931343078613 + ], + [ + "▁Ro", + -10.217060089111328 + ], + [ + "▁Management", + -10.217504501342773 + ], + [ + "bri", + -10.218718528747559 + ], + [ + "▁pieces", + -10.218774795532227 + ], + [ + "▁announced", + -10.218926429748535 + ], + [ + "▁Yes", + -10.219268798828125 + ], + [ + "▁dark", + -10.220884323120117 + ], + [ + "val", + -10.221765518188477 + ], + [ + "▁rights", + -10.22309684753418 + ], + [ + "▁Diese", + -10.223100662231445 + ], + [ + "ki", + -10.223350524902344 + ], + [ + "vent", + -10.22375774383545 + ], + [ + "▁born", + -10.22380542755127 + ], + [ + "▁muss", + -10.224031448364258 + ], + [ + "compared", + -10.224660873413086 + ], + [ + "▁demand", + -10.224669456481934 + ], + [ + "▁handle", + -10.225493431091309 + ], + [ + "▁mode", + -10.226058006286621 + ], + [ + "lic", + -10.226137161254883 + ], + [ + "▁ahead", + -10.226436614990234 + ], + [ + "▁sharing", + -10.227599143981934 + ], + [ + "▁micro", + -10.227779388427734 + ], + [ + "▁Par", + -10.228626251220703 + ], + [ + "▁Every", + -10.22950553894043 + ], + [ + "▁bag", + -10.229736328125 + ], + [ + "▁daca", + -10.22974967956543 + ], + [ + "▁Apple", + -10.23022174835205 + ], + [ + "▁Mark", + -10.230239868164062 + ], + [ + "▁larger", + -10.231284141540527 + ], + [ + "eze", + -10.231978416442871 + ], + [ + "▁progress", + -10.232234001159668 + ], + [ + "▁stress", + -10.232929229736328 + ], + [ + "▁cards", + -10.233663558959961 + ], + [ + "▁driving", + -10.233738899230957 + ], + [ + "▁dry", + -10.233970642089844 + ], + [ + "▁relevant", + -10.234556198120117 + ], + [ + "▁Jo", + -10.234825134277344 + ], + [ + "▁tree", + -10.235036849975586 + ], + [ + "▁reported", + -10.235770225524902 + ], + [ + "ities", + -10.23577880859375 + ], + [ + "▁tea", + -10.235806465148926 + ], + [ + "▁although", + -10.236145973205566 + ], + [ + "▁Research", + -10.236261367797852 + ], + [ + "▁pool", + -10.23691463470459 + ], + [ + "▁fin", + -10.237163543701172 + ], + [ + "▁Und", + -10.238130569458008 + ], + [ + "▁decide", + -10.239217758178711 + ], + [ + "▁expert", + -10.239344596862793 + ], + [ + "rate", + -10.239428520202637 + ], + [ + "zeit", + -10.239971160888672 + ], + [ + "▁26", + -10.24040412902832 + ], + [ + "▁Ka", + -10.24056339263916 + ], + [ + "▁fix", + -10.240666389465332 + ], + [ + "igen", + -10.240713119506836 + ], + [ + "▁direction", + -10.241188049316406 + ], + [ + "▁star", + -10.241661071777344 + ], + [ + "▁middle", + -10.241889953613281 + ], + [ + "▁Ja", + -10.241962432861328 + ], + [ + "▁Land", + -10.24207878112793 + ], + [ + "ken", + -10.242605209350586 + ], + [ + "▁button", + -10.242630004882812 + ], + [ + "▁rules", + -10.242656707763672 + ], + [ + "▁également", + -10.242706298828125 + ], + [ + "▁viel", + -10.243158340454102 + ], + [ + "▁welcome", + -10.243682861328125 + ], + [ + "că", + -10.243932723999023 + ], + [ + "▁Top", + -10.245308876037598 + ], + [ + "▁allowed", + -10.245487213134766 + ], + [ + "▁tip", + -10.245584487915039 + ], + [ + "▁cei", + -10.245768547058105 + ], + [ + "▁Nous", + -10.246004104614258 + ], + [ + "té", + -10.246850967407227 + ], + [ + "▁unei", + -10.246903419494629 + ], + [ + "▁efforts", + -10.247260093688965 + ], + [ + "▁note", + -10.247719764709473 + ], + [ + "▁title", + -10.247977256774902 + ], + [ + "ric", + -10.248047828674316 + ], + [ + "berg", + -10.248252868652344 + ], + [ + "▁ainsi", + -10.248576164245605 + ], + [ + "▁led", + -10.248713493347168 + ], + [ + "▁alone", + -10.248786926269531 + ], + [ + "ward", + -10.249215126037598 + ], + [ + "▁vie", + -10.249323844909668 + ], + [ + "▁brain", + -10.249427795410156 + ], + [ + "light", + -10.250100135803223 + ], + [ + "▁Court", + -10.250598907470703 + ], + [ + "set", + -10.250869750976562 + ], + [ + "▁steps", + -10.251251220703125 + ], + [ + "pri", + -10.251391410827637 + ], + [ + "Q", + -10.251654624938965 + ], + [ + "sti", + -10.251938819885254 + ], + [ + "▁voice", + -10.252121925354004 + ], + [ + "▁models", + -10.252705574035645 + ], + [ + "▁parties", + -10.25442886352539 + ], + [ + "▁radio", + -10.255270957946777 + ], + [ + "▁mission", + -10.25545883178711 + ], + [ + "▁methods", + -10.255658149719238 + ], + [ + "▁Te", + -10.256019592285156 + ], + [ + "air", + -10.256489753723145 + ], + [ + "▁essay", + -10.256719589233398 + ], + [ + "my", + -10.256826400756836 + ], + [ + "▁competition", + -10.257049560546875 + ], + [ + "ses", + -10.257447242736816 + ], + [ + "▁serious", + -10.258724212646484 + ], + [ + "▁Ti", + -10.258733749389648 + ], + [ + "▁Hand", + -10.259561538696289 + ], + [ + "not", + -10.25958251953125 + ], + [ + "▁winter", + -10.261277198791504 + ], + [ + "24", + -10.261724472045898 + ], + [ + "▁vision", + -10.26174545288086 + ], + [ + "▁technical", + -10.262110710144043 + ], + [ + "▁cross", + -10.262799263000488 + ], + [ + "▁update", + -10.262947082519531 + ], + [ + "▁Team", + -10.263564109802246 + ], + [ + "▁evening", + -10.264286041259766 + ], + [ + "▁experts", + -10.26435661315918 + ], + [ + "part", + -10.264640808105469 + ], + [ + "▁wo", + -10.265190124511719 + ], + [ + "▁App", + -10.265729904174805 + ], + [ + "▁peu", + -10.266267776489258 + ], + [ + "▁mich", + -10.26630687713623 + ], + [ + "▁reports", + -10.267001152038574 + ], + [ + "▁km", + -10.267594337463379 + ], + [ + "▁print", + -10.2678804397583 + ], + [ + "▁Hotel", + -10.268101692199707 + ], + [ + "▁earlier", + -10.268235206604004 + ], + [ + "▁uses", + -10.26826286315918 + ], + [ + "▁menu", + -10.268416404724121 + ], + [ + "▁miles", + -10.26845645904541 + ], + [ + "▁classes", + -10.268463134765625 + ], + [ + "▁mo", + -10.268525123596191 + ], + [ + "▁loan", + -10.2691011428833 + ], + [ + "▁host", + -10.269192695617676 + ], + [ + "▁author", + -10.269274711608887 + ], + [ + "-1", + -10.269434928894043 + ], + [ + "▁bun", + -10.269940376281738 + ], + [ + "19", + -10.270011901855469 + ], + [ + "uch", + -10.270670890808105 + ], + [ + "ble", + -10.270813941955566 + ], + [ + "▁holiday", + -10.270859718322754 + ], + [ + "los", + -10.271894454956055 + ], + [ + "▁looked", + -10.272663116455078 + ], + [ + "▁Test", + -10.272759437561035 + ], + [ + "▁moved", + -10.273000717163086 + ], + [ + "▁numbers", + -10.273306846618652 + ], + [ + "▁covered", + -10.273405075073242 + ], + [ + "ker", + -10.273696899414062 + ], + [ + "TM", + -10.273768424987793 + ], + [ + "▁album", + -10.274727821350098 + ], + [ + "▁27", + -10.27476692199707 + ], + [ + "▁când", + -10.27523422241211 + ], + [ + "▁shopping", + -10.275248527526855 + ], + [ + "▁Ihr", + -10.27531623840332 + ], + [ + "▁requires", + -10.275786399841309 + ], + [ + "▁USA", + -10.275909423828125 + ], + [ + "000", + -10.275951385498047 + ], + [ + "▁official", + -10.276010513305664 + ], + [ + "▁states", + -10.276346206665039 + ], + [ + "▁tips", + -10.276570320129395 + ], + [ + "ible", + -10.277321815490723 + ], + [ + "▁Lu", + -10.27756404876709 + ], + [ + "ces", + -10.278343200683594 + ], + [ + "▁figure", + -10.27839469909668 + ], + [ + "▁Take", + -10.278576850891113 + ], + [ + "▁după", + -10.278687477111816 + ], + [ + "▁teams", + -10.278980255126953 + ], + [ + "▁song", + -10.279138565063477 + ], + [ + "▁master", + -10.279386520385742 + ], + [ + "ED", + -10.279841423034668 + ], + [ + "▁cleaning", + -10.280523300170898 + ], + [ + "▁drop", + -10.280651092529297 + ], + [ + "▁primary", + -10.2808837890625 + ], + [ + "▁Life", + -10.28108024597168 + ], + [ + "▁carry", + -10.281129837036133 + ], + [ + "▁initial", + -10.281270980834961 + ], + [ + "▁encore", + -10.281617164611816 + ], + [ + "▁Add", + -10.281670570373535 + ], + [ + "▁woman", + -10.282076835632324 + ], + [ + "▁Water", + -10.282219886779785 + ], + [ + "▁advantage", + -10.28277587890625 + ], + [ + "see", + -10.283234596252441 + ], + [ + "ré", + -10.283341407775879 + ], + [ + "▁motor", + -10.283479690551758 + ], + [ + "mel", + -10.2838716506958 + ], + [ + "▁finding", + -10.284419059753418 + ], + [ + "▁plastic", + -10.286365509033203 + ], + [ + "▁IT", + -10.286602973937988 + ], + [ + "▁Church", + -10.286916732788086 + ], + [ + "▁shape", + -10.287345886230469 + ], + [ + "▁gets", + -10.287763595581055 + ], + [ + "▁followed", + -10.288186073303223 + ], + [ + "▁100%", + -10.288315773010254 + ], + [ + "▁Program", + -10.28912353515625 + ], + [ + "▁Another", + -10.28934383392334 + ], + [ + "▁zwei", + -10.289522171020508 + ], + [ + "▁father", + -10.289839744567871 + ], + [ + "▁rich", + -10.290282249450684 + ], + [ + "où", + -10.290810585021973 + ], + [ + "▁lines", + -10.290934562683105 + ], + [ + "▁distance", + -10.291757583618164 + ], + [ + "▁cell", + -10.291876792907715 + ], + [ + "▁parte", + -10.292072296142578 + ], + [ + "bit", + -10.292445182800293 + ], + [ + "▁perhaps", + -10.292749404907227 + ], + [ + "rii", + -10.293590545654297 + ], + [ + "▁session", + -10.294137954711914 + ], + [ + "▁Pentru", + -10.294528007507324 + ], + [ + "ING", + -10.295049667358398 + ], + [ + "ants", + -10.295478820800781 + ], + [ + "▁remain", + -10.295543670654297 + ], + [ + "13", + -10.295588493347168 + ], + [ + "▁finished", + -10.295763969421387 + ], + [ + "bel", + -10.298725128173828 + ], + [ + "▁organizations", + -10.299455642700195 + ], + [ + "▁Any", + -10.299896240234375 + ], + [ + "▁taste", + -10.300277709960938 + ], + [ + "Whether", + -10.300600051879883 + ], + [ + "ram", + -10.300874710083008 + ], + [ + "like", + -10.301307678222656 + ], + [ + "▁artist", + -10.301319122314453 + ], + [ + "aire", + -10.303369522094727 + ], + [ + "▁French", + -10.303386688232422 + ], + [ + "▁donc", + -10.303634643554688 + ], + [ + "ow", + -10.30386734008789 + ], + [ + "▁200", + -10.303993225097656 + ], + [ + "▁paint", + -10.304465293884277 + ], + [ + "▁Open", + -10.304535865783691 + ], + [ + "▁appear", + -10.304722785949707 + ], + [ + "▁Washington", + -10.304765701293945 + ], + [ + "▁target", + -10.30491828918457 + ], + [ + "pir", + -10.305578231811523 + ], + [ + "▁generally", + -10.305987358093262 + ], + [ + "▁British", + -10.306790351867676 + ], + [ + "▁seven", + -10.306937217712402 + ], + [ + "▁bio", + -10.307162284851074 + ], + [ + "▁sector", + -10.307358741760254 + ], + [ + "90", + -10.30777359008789 + ], + [ + "▁fapt", + -10.307881355285645 + ], + [ + "▁prefer", + -10.308316230773926 + ], + [ + "▁partner", + -10.308427810668945 + ], + [ + "ăm", + -10.308547973632812 + ], + [ + "▁diverse", + -10.308610916137695 + ], + [ + "▁onto", + -10.309283256530762 + ], + [ + "▁refer", + -10.309828758239746 + ], + [ + "▁Law", + -10.310302734375 + ], + [ + "▁Ri", + -10.310596466064453 + ], + [ + "▁critical", + -10.310735702514648 + ], + [ + "▁copy", + -10.310897827148438 + ], + [ + "ck", + -10.311517715454102 + ], + [ + "ix", + -10.311732292175293 + ], + [ + "tag", + -10.311793327331543 + ], + [ + "▁Road", + -10.311936378479004 + ], + [ + "▁concern", + -10.312053680419922 + ], + [ + "▁maximum", + -10.312095642089844 + ], + [ + "▁train", + -10.312148094177246 + ], + [ + "▁într", + -10.312189102172852 + ], + [ + "ura", + -10.313023567199707 + ], + [ + "▁Qu", + -10.313481330871582 + ], + [ + "▁links", + -10.313538551330566 + ], + [ + "▁audience", + -10.313969612121582 + ], + [ + "▁foot", + -10.314554214477539 + ], + [ + "▁Blue", + -10.314605712890625 + ], + [ + "ification", + -10.315386772155762 + ], + [ + "▁developing", + -10.315847396850586 + ], + [ + "▁interior", + -10.315876007080078 + ], + [ + "=", + -10.316556930541992 + ], + [ + "▁aceasta", + -10.31698989868164 + ], + [ + "▁dedicated", + -10.317373275756836 + ], + [ + "▁movement", + -10.317383766174316 + ], + [ + "sta", + -10.318868637084961 + ], + [ + "▁challenges", + -10.319018363952637 + ], + [ + "inte", + -10.319074630737305 + ], + [ + "▁Euro", + -10.319075584411621 + ], + [ + "▁classic", + -10.320341110229492 + ], + [ + "▁Um", + -10.320767402648926 + ], + [ + "▁alternative", + -10.321407318115234 + ], + [ + "mann", + -10.321614265441895 + ], + [ + "▁Une", + -10.322278022766113 + ], + [ + "qu", + -10.322415351867676 + ], + [ + "▁heavy", + -10.322434425354004 + ], + [ + "▁install", + -10.322484970092773 + ], + [ + "▁fiind", + -10.322504043579102 + ], + [ + "▁leaders", + -10.323003768920898 + ], + [ + "▁views", + -10.323019981384277 + ], + [ + "▁www", + -10.323084831237793 + ], + [ + "▁standards", + -10.323270797729492 + ], + [ + "ong", + -10.323580741882324 + ], + [ + "40", + -10.323833465576172 + ], + [ + "▁cm", + -10.323848724365234 + ], + [ + "▁park", + -10.324324607849121 + ], + [ + "▁himself", + -10.324419021606445 + ], + [ + "▁People", + -10.324649810791016 + ], + [ + "▁separate", + -10.324843406677246 + ], + [ + "▁secure", + -10.325018882751465 + ], + [ + "sie", + -10.325084686279297 + ], + [ + "▁maintenance", + -10.325199127197266 + ], + [ + "▁encourage", + -10.32766056060791 + ], + [ + "ein", + -10.328139305114746 + ], + [ + "▁reviews", + -10.328202247619629 + ], + [ + "▁Michael", + -10.328210830688477 + ], + [ + "▁background", + -10.328283309936523 + ], + [ + "▁therefore", + -10.328433990478516 + ], + [ + "▁server", + -10.328487396240234 + ], + [ + "▁dream", + -10.328742027282715 + ], + [ + "ping", + -10.329025268554688 + ], + [ + "▁block", + -10.329855918884277 + ], + [ + "▁2009", + -10.330734252929688 + ], + [ + "▁facilities", + -10.330931663513184 + ], + [ + "▁II", + -10.331367492675781 + ], + [ + "▁attend", + -10.33156967163086 + ], + [ + "▁cap", + -10.33224105834961 + ], + [ + "35", + -10.332416534423828 + ], + [ + "▁steel", + -10.332796096801758 + ], + [ + "▁shared", + -10.333391189575195 + ], + [ + "▁doctor", + -10.333939552307129 + ], + [ + "▁River", + -10.33411693572998 + ], + [ + "▁Bay", + -10.334456443786621 + ], + [ + "▁length", + -10.335005760192871 + ], + [ + "▁jobs", + -10.335466384887695 + ], + [ + "▁Plus", + -10.335992813110352 + ], + [ + "▁station", + -10.336140632629395 + ], + [ + "▁elements", + -10.336268424987793 + ], + [ + "▁rock", + -10.336668014526367 + ], + [ + "▁professionals", + -10.336670875549316 + ], + [ + "cle", + -10.336777687072754 + ], + [ + "▁dont", + -10.336873054504395 + ], + [ + "urilor", + -10.337142944335938 + ], + [ + "▁gain", + -10.337271690368652 + ], + [ + "▁programme", + -10.337540626525879 + ], + [ + "▁Cor", + -10.338377952575684 + ], + [ + "▁leader", + -10.338542938232422 + ], + [ + "ării", + -10.33876895904541 + ], + [ + "▁>", + -10.339137077331543 + ], + [ + "▁task", + -10.339471817016602 + ], + [ + "▁seeing", + -10.339943885803223 + ], + [ + "▁statement", + -10.34045696258545 + ], + [ + "vin", + -10.341094017028809 + ], + [ + "▁fish", + -10.341700553894043 + ], + [ + "▁advanced", + -10.342403411865234 + ], + [ + "▁discuss", + -10.342494010925293 + ], + [ + "die", + -10.342904090881348 + ], + [ + "isch", + -10.342944145202637 + ], + [ + "▁plenty", + -10.342947959899902 + ], + [ + "▁Hall", + -10.343120574951172 + ], + [ + "▁Other", + -10.343339920043945 + ], + [ + "▁homes", + -10.344944953918457 + ], + [ + "▁Ni", + -10.345016479492188 + ], + [ + "▁testing", + -10.345102310180664 + ], + [ + "▁Last", + -10.345392227172852 + ], + [ + "▁Note", + -10.345595359802246 + ], + [ + "▁talking", + -10.345934867858887 + ], + [ + "▁exchange", + -10.347042083740234 + ], + [ + "▁exercise", + -10.347189903259277 + ], + [ + "▁cea", + -10.347546577453613 + ], + [ + "▁wife", + -10.34820556640625 + ], + [ + "▁Für", + -10.348480224609375 + ], + [ + "▁Texas", + -10.34981918334961 + ], + [ + "▁fr", + -10.35065746307373 + ], + [ + "▁speak", + -10.350894927978516 + ], + [ + "17", + -10.351007461547852 + ], + [ + "70", + -10.351462364196777 + ], + [ + "▁promote", + -10.351851463317871 + ], + [ + "tul", + -10.351990699768066 + ], + [ + "apos", + -10.35208511352539 + ], + [ + "▁Jahr", + -10.35214900970459 + ], + [ + "▁Trump", + -10.352204322814941 + ], + [ + "▁ohne", + -10.352357864379883 + ], + [ + "▁learned", + -10.353700637817383 + ], + [ + "▁Sp", + -10.353803634643555 + ], + [ + "▁owner", + -10.354275703430176 + ], + [ + "mor", + -10.354422569274902 + ], + [ + "▁fois", + -10.354452133178711 + ], + [ + "▁meaning", + -10.35518741607666 + ], + [ + "▁dacă", + -10.355249404907227 + ], + [ + "nic", + -10.355484008789062 + ], + [ + "а", + -10.355525970458984 + ], + [ + "14", + -10.355767250061035 + ], + [ + "▁driver", + -10.356258392333984 + ], + [ + "▁Amazon", + -10.3567533493042 + ], + [ + "▁flow", + -10.358469009399414 + ], + [ + "▁shot", + -10.358726501464844 + ], + [ + "▁sous", + -10.35914421081543 + ], + [ + "▁Gold", + -10.359339714050293 + ], + [ + "▁straight", + -10.359562873840332 + ], + [ + "▁conference", + -10.359610557556152 + ], + [ + "▁peste", + -10.359662055969238 + ], + [ + "whose", + -10.36030101776123 + ], + [ + "▁installation", + -10.36050796508789 + ], + [ + "▁produced", + -10.360607147216797 + ], + [ + "▁independent", + -10.36192512512207 + ], + [ + "▁Institute", + -10.362021446228027 + ], + [ + "▁James", + -10.362373352050781 + ], + [ + "▁mental", + -10.362601280212402 + ], + [ + "ara", + -10.362798690795898 + ], + [ + "ium", + -10.363021850585938 + ], + [ + "▁husband", + -10.36306095123291 + ], + [ + "▁guests", + -10.363907814025879 + ], + [ + "27", + -10.364319801330566 + ], + [ + "▁Che", + -10.364651679992676 + ], + [ + "▁Indian", + -10.364694595336914 + ], + [ + "zer", + -10.36478042602539 + ], + [ + "▁minimum", + -10.364962577819824 + ], + [ + "500", + -10.365096092224121 + ], + [ + "▁sit", + -10.36561393737793 + ], + [ + "put", + -10.36656379699707 + ], + [ + "▁avea", + -10.36665153503418 + ], + [ + "▁ride", + -10.367088317871094 + ], + [ + "gan", + -10.367152214050293 + ], + [ + "▁Ke", + -10.36747932434082 + ], + [ + "book", + -10.367515563964844 + ], + [ + "ages", + -10.368019104003906 + ], + [ + "▁presented", + -10.368157386779785 + ], + [ + "▁Com", + -10.368927955627441 + ], + [ + "▁Call", + -10.369053840637207 + ], + [ + "▁fee", + -10.369847297668457 + ], + [ + "ări", + -10.369905471801758 + ], + [ + "▁putea", + -10.37072467803955 + ], + [ + "▁Public", + -10.371030807495117 + ], + [ + "▁pa", + -10.371152877807617 + ], + [ + "28", + -10.371233940124512 + ], + [ + "▁Director", + -10.37126350402832 + ], + [ + "▁contains", + -10.3717622756958 + ], + [ + "▁factors", + -10.372554779052734 + ], + [ + "▁famous", + -10.372614860534668 + ], + [ + "▁bathroom", + -10.373040199279785 + ], + [ + "▁core", + -10.37353229522705 + ], + [ + "▁viele", + -10.373610496520996 + ], + [ + "▁acum", + -10.374361991882324 + ], + [ + "▁animal", + -10.374407768249512 + ], + [ + "▁Ihnen", + -10.374425888061523 + ], + [ + "▁Find", + -10.374545097351074 + ], + [ + "▁Fall", + -10.374861717224121 + ], + [ + "ford", + -10.376051902770996 + ], + [ + "▁coverage", + -10.3765287399292 + ], + [ + "▁smart", + -10.376830101013184 + ], + [ + "ries", + -10.376893997192383 + ], + [ + "▁memory", + -10.3772554397583 + ], + [ + "▁dance", + -10.377443313598633 + ], + [ + "11", + -10.37746810913086 + ], + [ + "▁communities", + -10.377655982971191 + ], + [ + "eurs", + -10.378050804138184 + ], + [ + "▁Florida", + -10.378463745117188 + ], + [ + "▁sport", + -10.379366874694824 + ], + [ + "▁bus", + -10.37992000579834 + ], + [ + "▁colors", + -10.379969596862793 + ], + [ + "▁affect", + -10.380044937133789 + ], + [ + "▁score", + -10.380183219909668 + ], + [ + "▁properties", + -10.38050365447998 + ], + [ + "18", + -10.380593299865723 + ], + [ + "▁astfel", + -10.381312370300293 + ], + [ + "▁beach", + -10.382407188415527 + ], + [ + "▁friendly", + -10.382795333862305 + ], + [ + "izing", + -10.38288688659668 + ], + [ + "▁buying", + -10.383146286010742 + ], + [ + "▁forget", + -10.383195877075195 + ], + [ + "este", + -10.383198738098145 + ], + [ + "▁capacity", + -10.38360595703125 + ], + [ + "▁lose", + -10.383692741394043 + ], + [ + "▁listed", + -10.38407039642334 + ], + [ + "ica", + -10.384084701538086 + ], + [ + "han", + -10.384085655212402 + ], + [ + "▁selbst", + -10.384390830993652 + ], + [ + "▁values", + -10.384391784667969 + ], + [ + "▁Power", + -10.384559631347656 + ], + [ + "▁comments", + -10.384831428527832 + ], + [ + "eux", + -10.385346412658691 + ], + [ + "ați", + -10.385419845581055 + ], + [ + "▁context", + -10.385710716247559 + ], + [ + "liche", + -10.385944366455078 + ], + [ + "▁keeping", + -10.38620662689209 + ], + [ + "▁2008", + -10.38647174835205 + ], + [ + "▁su", + -10.386670112609863 + ], + [ + "▁biggest", + -10.386838912963867 + ], + [ + "▁fiecare", + -10.387356758117676 + ], + [ + "ight", + -10.38845157623291 + ], + [ + "▁toute", + -10.389808654785156 + ], + [ + "▁dinner", + -10.389827728271484 + ], + [ + "bau", + -10.390706062316895 + ], + [ + "▁Mai", + -10.390762329101562 + ], + [ + "▁status", + -10.390776634216309 + ], + [ + "rez", + -10.391340255737305 + ], + [ + "▁selected", + -10.391549110412598 + ], + [ + "▁cells", + -10.392601013183594 + ], + [ + "▁eight", + -10.393319129943848 + ], + [ + "▁package", + -10.393320083618164 + ], + [ + "▁scale", + -10.39333724975586 + ], + [ + "din", + -10.39336109161377 + ], + [ + "▁Who", + -10.393381118774414 + ], + [ + "▁century", + -10.393399238586426 + ], + [ + "▁bi", + -10.393516540527344 + ], + [ + "▁Africa", + -10.39384937286377 + ], + [ + "▁http", + -10.394133567810059 + ], + [ + "▁named", + -10.394230842590332 + ], + [ + "▁adding", + -10.394901275634766 + ], + [ + "▁mention", + -10.395039558410645 + ], + [ + "▁casino", + -10.395421981811523 + ], + [ + "▁couldn", + -10.395624160766602 + ], + [ + "▁outdoor", + -10.395912170410156 + ], + [ + "▁sugar", + -10.3960542678833 + ], + [ + "▁prepared", + -10.396124839782715 + ], + [ + "21", + -10.396528244018555 + ], + [ + "▁Ba", + -10.396632194519043 + ], + [ + "vers", + -10.396697998046875 + ], + [ + "ration", + -10.396773338317871 + ], + [ + "▁ja", + -10.397035598754883 + ], + [ + "▁aspect", + -10.397224426269531 + ], + [ + "▁31", + -10.397462844848633 + ], + [ + "▁treat", + -10.397475242614746 + ], + [ + "tru", + -10.397841453552246 + ], + [ + "▁flat", + -10.397890090942383 + ], + [ + "32", + -10.397989273071289 + ], + [ + "▁reality", + -10.398238182067871 + ], + [ + "▁waste", + -10.39876937866211 + ], + [ + "▁King", + -10.399649620056152 + ], + [ + "▁drug", + -10.399870872497559 + ], + [ + "▁operations", + -10.400120735168457 + ], + [ + "▁aim", + -10.40042495727539 + ], + [ + "▁fans", + -10.400444984436035 + ], + [ + "▁vers", + -10.400891304016113 + ], + [ + "▁plants", + -10.400971412658691 + ], + [ + "▁Dis", + -10.401477813720703 + ], + [ + "▁Daten", + -10.401510238647461 + ], + [ + "être", + -10.40267276763916 + ], + [ + "▁placed", + -10.40326976776123 + ], + [ + "▁bon", + -10.403977394104004 + ], + [ + "beim", + -10.4041109085083 + ], + [ + "▁slow", + -10.40501880645752 + ], + [ + "cri", + -10.405512809753418 + ], + [ + "▁Care", + -10.405691146850586 + ], + [ + "mes", + -10.406211853027344 + ], + [ + "26", + -10.406257629394531 + ], + [ + "box", + -10.406330108642578 + ], + [ + "▁helpful", + -10.406362533569336 + ], + [ + "▁documents", + -10.406543731689453 + ], + [ + "▁visitors", + -10.406773567199707 + ], + [ + "ture", + -10.406862258911133 + ], + [ + "▁Menschen", + -10.406891822814941 + ], + [ + "▁Chi", + -10.406975746154785 + ], + [ + "▁recipe", + -10.40764045715332 + ], + [ + "▁kept", + -10.407693862915039 + ], + [ + "▁Grand", + -10.407915115356445 + ], + [ + "▁operating", + -10.408178329467773 + ], + [ + "point", + -10.408329010009766 + ], + [ + "▁bin", + -10.40837287902832 + ], + [ + "▁Tri", + -10.40845775604248 + ], + [ + "Be", + -10.408512115478516 + ], + [ + "▁experiences", + -10.40856647491455 + ], + [ + "▁academic", + -10.408608436584473 + ], + [ + "▁finden", + -10.40870475769043 + ], + [ + "▁sera", + -10.409092903137207 + ], + [ + "act", + -10.410541534423828 + ], + [ + "▁Pa", + -10.410907745361328 + ], + [ + "▁society", + -10.411056518554688 + ], + [ + "▁combination", + -10.411237716674805 + ], + [ + "5%", + -10.41182804107666 + ], + [ + "▁owners", + -10.41188907623291 + ], + [ + "▁poor", + -10.412039756774902 + ], + [ + "▁Robert", + -10.412378311157227 + ], + [ + "▁military", + -10.412964820861816 + ], + [ + "▁economy", + -10.413033485412598 + ], + [ + "▁aware", + -10.413055419921875 + ], + [ + "rot", + -10.413443565368652 + ], + [ + "mie", + -10.413544654846191 + ], + [ + "▁Thursday", + -10.414399147033691 + ], + [ + "▁2011", + -10.41490650177002 + ], + [ + "▁fantastic", + -10.41554069519043 + ], + [ + "▁numerous", + -10.415921211242676 + ], + [ + "▁fair", + -10.4165620803833 + ], + [ + "med", + -10.416753768920898 + ], + [ + "▁welche", + -10.416893005371094 + ], + [ + "▁fruit", + -10.41712760925293 + ], + [ + "ku", + -10.417325019836426 + ], + [ + "▁Social", + -10.417583465576172 + ], + [ + "▁funds", + -10.418157577514648 + ], + [ + "▁atunci", + -10.418214797973633 + ], + [ + "▁Part", + -10.418238639831543 + ], + [ + "▁Big", + -10.418301582336426 + ], + [ + "▁2010", + -10.419414520263672 + ], + [ + "▁detail", + -10.419889450073242 + ], + [ + "▁Peter", + -10.419942855834961 + ], + [ + "ani", + -10.420196533203125 + ], + [ + "▁Wie", + -10.420795440673828 + ], + [ + "▁Tu", + -10.421649932861328 + ], + [ + "ear", + -10.421706199645996 + ], + [ + "▁Wenn", + -10.421941757202148 + ], + [ + "▁manager", + -10.42199993133545 + ], + [ + "▁Dan", + -10.422409057617188 + ], + [ + "▁Pi", + -10.42257308959961 + ], + [ + "▁wants", + -10.422652244567871 + ], + [ + "▁Data", + -10.42322826385498 + ], + [ + "pos", + -10.42387580871582 + ], + [ + "▁older", + -10.423946380615234 + ], + [ + "▁Download", + -10.424071311950684 + ], + [ + "▁Was", + -10.424107551574707 + ], + [ + "▁corner", + -10.424195289611816 + ], + [ + "▁president", + -10.424199104309082 + ], + [ + "mas", + -10.424248695373535 + ], + [ + "▁smaller", + -10.424361228942871 + ], + [ + "▁bright", + -10.424459457397461 + ], + [ + "▁proper", + -10.424582481384277 + ], + [ + "▁Kinder", + -10.424637794494629 + ], + [ + "▁Two", + -10.424668312072754 + ], + [ + "▁award", + -10.42471694946289 + ], + [ + "▁premier", + -10.425211906433105 + ], + [ + "▁seek", + -10.425646781921387 + ], + [ + "▁thank", + -10.425662994384766 + ], + [ + "▁proud", + -10.426509857177734 + ], + [ + "▁workers", + -10.426774024963379 + ], + [ + "▁2000", + -10.426970481872559 + ], + [ + "▁gone", + -10.427482604980469 + ], + [ + "▁medium", + -10.427693367004395 + ], + [ + "▁grade", + -10.42777156829834 + ], + [ + "▁Ru", + -10.427800178527832 + ], + [ + "cro", + -10.427851676940918 + ], + [ + "▁interview", + -10.428311347961426 + ], + [ + "23", + -10.428787231445312 + ], + [ + "▁mari", + -10.429442405700684 + ], + [ + "▁80", + -10.429756164550781 + ], + [ + "▁Ga", + -10.430047035217285 + ], + [ + "▁90", + -10.431839942932129 + ], + [ + "▁anderen", + -10.432605743408203 + ], + [ + "▁cultural", + -10.433018684387207 + ], + [ + "but", + -10.433144569396973 + ], + [ + "rum", + -10.433300018310547 + ], + [ + "get", + -10.43338680267334 + ], + [ + "▁pop", + -10.433582305908203 + ], + [ + "▁Information", + -10.433594703674316 + ], + [ + "▁press", + -10.434972763061523 + ], + [ + "▁Project", + -10.435359001159668 + ], + [ + "▁excited", + -10.435755729675293 + ], + [ + "▁Saint", + -10.436088562011719 + ], + [ + "▁England", + -10.436192512512207 + ], + [ + "▁beauty", + -10.43643856048584 + ], + [ + "▁agreement", + -10.436464309692383 + ], + [ + "▁Like", + -10.437565803527832 + ], + [ + "▁strength", + -10.437664985656738 + ], + [ + "▁waiting", + -10.438165664672852 + ], + [ + "и", + -10.438270568847656 + ], + [ + "Le", + -10.438329696655273 + ], + [ + "▁residents", + -10.43835735321045 + ], + [ + "▁Ben", + -10.438603401184082 + ], + [ + "▁mentioned", + -10.439260482788086 + ], + [ + "▁etwas", + -10.43930721282959 + ], + [ + "▁rooms", + -10.439347267150879 + ], + [ + "▁neue", + -10.439501762390137 + ], + [ + "▁Microsoft", + -10.439726829528809 + ], + [ + "▁passed", + -10.440205574035645 + ], + [ + "▁sea", + -10.440893173217773 + ], + [ + "▁electric", + -10.441244125366211 + ], + [ + "▁forms", + -10.441384315490723 + ], + [ + "▁Central", + -10.441597938537598 + ], + [ + "▁Lord", + -10.442625999450684 + ], + [ + "ute", + -10.442763328552246 + ], + [ + "▁pré", + -10.442790031433105 + ], + [ + "▁square", + -10.44308090209961 + ], + [ + "itatea", + -10.443451881408691 + ], + [ + "▁debt", + -10.443757057189941 + ], + [ + "▁street", + -10.443975448608398 + ], + [ + "▁pi", + -10.444917678833008 + ], + [ + "▁happened", + -10.445326805114746 + ], + [ + "▁Tuesday", + -10.445592880249023 + ], + [ + "recht", + -10.446094512939453 + ], + [ + "▁Eine", + -10.44627857208252 + ], + [ + "▁Set", + -10.446768760681152 + ], + [ + "▁federal", + -10.4468412399292 + ], + [ + "CC", + -10.446905136108398 + ], + [ + "....", + -10.446938514709473 + ], + [ + "lig", + -10.447463035583496 + ], + [ + "▁Christian", + -10.44870662689209 + ], + [ + "▁truth", + -10.449213981628418 + ], + [ + "▁map", + -10.449728012084961 + ], + [ + "▁secret", + -10.449979782104492 + ], + [ + "▁Chinese", + -10.450844764709473 + ], + [ + "hol", + -10.450895309448242 + ], + [ + "▁wrote", + -10.451505661010742 + ], + [ + "▁hospital", + -10.451783180236816 + ], + [ + "▁Island", + -10.451870918273926 + ], + [ + "▁frame", + -10.451946258544922 + ], + [ + "▁sources", + -10.452117919921875 + ], + [ + "pan", + -10.453242301940918 + ], + [ + "▁29", + -10.453530311584473 + ], + [ + "▁changing", + -10.454547882080078 + ], + [ + "▁Where", + -10.454627990722656 + ], + [ + "▁negative", + -10.45471477508545 + ], + [ + "▁processes", + -10.45491886138916 + ], + [ + "▁leadership", + -10.455029487609863 + ], + [ + "▁nos", + -10.455195426940918 + ], + [ + "▁info", + -10.455780029296875 + ], + [ + "▁Gu", + -10.45595645904541 + ], + [ + "▁CO", + -10.45605182647705 + ], + [ + "▁reference", + -10.456884384155273 + ], + [ + "▁corporate", + -10.457097053527832 + ], + [ + "▁characters", + -10.457563400268555 + ], + [ + "▁dining", + -10.4577054977417 + ], + [ + "▁becoming", + -10.459708213806152 + ], + [ + "▁4.", + -10.460311889648438 + ], + [ + "▁Science", + -10.460626602172852 + ], + [ + "▁Education", + -10.461943626403809 + ], + [ + "▁camp", + -10.46207046508789 + ], + [ + "fall", + -10.462146759033203 + ], + [ + "▁Auch", + -10.462471961975098 + ], + [ + "▁topic", + -10.462519645690918 + ], + [ + "▁influence", + -10.463460922241211 + ], + [ + "▁70", + -10.463892936706543 + ], + [ + "▁identify", + -10.464459419250488 + ], + [ + "▁(19", + -10.464646339416504 + ], + [ + "care", + -10.465216636657715 + ], + [ + "ions", + -10.466215133666992 + ], + [ + "ray", + -10.4663724899292 + ], + [ + "▁Both", + -10.466577529907227 + ], + [ + "▁collect", + -10.466997146606445 + ], + [ + "▁practices", + -10.467667579650879 + ], + [ + "▁fight", + -10.468058586120605 + ], + [ + "▁injury", + -10.46873664855957 + ], + [ + "▁nici", + -10.46905517578125 + ], + [ + "▁depuis", + -10.469563484191895 + ], + [ + "▁actions", + -10.469609260559082 + ], + [ + "▁Wednesday", + -10.47089958190918 + ], + [ + "▁bill", + -10.471086502075195 + ], + [ + "▁cheap", + -10.471318244934082 + ], + [ + "lui", + -10.471719741821289 + ], + [ + "▁awesome", + -10.471731185913086 + ], + [ + "tig", + -10.472554206848145 + ], + [ + "▁expensive", + -10.472636222839355 + ], + [ + "ceea", + -10.472834587097168 + ], + [ + "▁exact", + -10.472907066345215 + ], + [ + "22", + -10.473462104797363 + ], + [ + "▁avant", + -10.47352123260498 + ], + [ + "▁fat", + -10.47353744506836 + ], + [ + "▁spending", + -10.474353790283203 + ], + [ + "▁designs", + -10.47608470916748 + ], + [ + "▁damit", + -10.4761323928833 + ], + [ + "▁comp", + -10.47619342803955 + ], + [ + "▁whatever", + -10.476434707641602 + ], + [ + "▁Light", + -10.476442337036133 + ], + [ + "▁quarter", + -10.47680377960205 + ], + [ + "hand", + -10.477301597595215 + ], + [ + "▁connected", + -10.477584838867188 + ], + [ + "▁technologies", + -10.47772216796875 + ], + [ + "ges", + -10.477808952331543 + ], + [ + "▁shower", + -10.478998184204102 + ], + [ + "▁500", + -10.47923469543457 + ], + [ + "▁Time", + -10.479436874389648 + ], + [ + "▁zone", + -10.480525970458984 + ], + [ + "▁vote", + -10.480624198913574 + ], + [ + "▁andere", + -10.480871200561523 + ], + [ + "▁otherwise", + -10.480988502502441 + ], + [ + "tur", + -10.481294631958008 + ], + [ + "▁happens", + -10.481504440307617 + ], + [ + "hin", + -10.481597900390625 + ], + [ + "▁volume", + -10.482161521911621 + ], + [ + "▁thousands", + -10.482391357421875 + ], + [ + "war", + -10.482551574707031 + ], + [ + "▁Play", + -10.482900619506836 + ], + [ + "▁temperature", + -10.48371410369873 + ], + [ + "▁industrial", + -10.483830451965332 + ], + [ + "▁fuel", + -10.483915328979492 + ], + [ + "100", + -10.48409366607666 + ], + [ + "top", + -10.484210014343262 + ], + [ + "kin", + -10.484312057495117 + ], + [ + "▁efficient", + -10.484414100646973 + ], + [ + "teil", + -10.484525680541992 + ], + [ + "alt", + -10.484578132629395 + ], + [ + "▁monde", + -10.48483657836914 + ], + [ + "▁Ra", + -10.484899520874023 + ], + [ + "▁bedroom", + -10.485103607177734 + ], + [ + "▁showing", + -10.485316276550293 + ], + [ + "▁continued", + -10.485490798950195 + ], + [ + "▁Plan", + -10.48552131652832 + ], + [ + "▁assistance", + -10.486014366149902 + ], + [ + "▁discover", + -10.48622989654541 + ], + [ + "▁Year", + -10.486238479614258 + ], + [ + "▁applied", + -10.486433029174805 + ], + [ + "▁audio", + -10.48755931854248 + ], + [ + "▁thus", + -10.487645149230957 + ], + [ + "▁permet", + -10.48806095123291 + ], + [ + "▁fashion", + -10.488532066345215 + ], + [ + "cra", + -10.488645553588867 + ], + [ + "ious", + -10.488700866699219 + ], + [ + "▁focused", + -10.489258766174316 + ], + [ + "16", + -10.48930549621582 + ], + [ + "▁arm", + -10.489364624023438 + ], + [ + "▁Their", + -10.489789962768555 + ], + [ + "▁Foundation", + -10.49022388458252 + ], + [ + "▁majority", + -10.49022388458252 + ], + [ + "▁wind", + -10.490785598754883 + ], + [ + "▁bought", + -10.491056442260742 + ], + [ + "▁factor", + -10.491918563842773 + ], + [ + "▁opened", + -10.49213695526123 + ], + [ + "tern", + -10.492374420166016 + ], + [ + "▁cars", + -10.492597579956055 + ], + [ + "▁exciting", + -10.492691040039062 + ], + [ + "▁affordable", + -10.493510246276855 + ], + [ + "ches", + -10.493563652038574 + ], + [ + "▁panel", + -10.493720054626465 + ], + [ + "▁caused", + -10.493793487548828 + ], + [ + "▁travail", + -10.493998527526855 + ], + [ + "▁roof", + -10.494073867797852 + ], + [ + "▁enable", + -10.494202613830566 + ], + [ + "▁toward", + -10.494491577148438 + ], + [ + "▁Development", + -10.494688987731934 + ], + [ + "▁foreign", + -10.495308876037598 + ], + [ + "avi", + -10.495320320129395 + ], + [ + "long", + -10.495328903198242 + ], + [ + "De", + -10.49578857421875 + ], + [ + "▁Mon", + -10.49588394165039 + ], + [ + "▁Va", + -10.495942115783691 + ], + [ + "AP", + -10.496097564697266 + ], + [ + "▁asta", + -10.49720573425293 + ], + [ + "▁prepare", + -10.497220993041992 + ], + [ + "▁German", + -10.497261047363281 + ], + [ + "▁Centre", + -10.497325897216797 + ], + [ + "ère", + -10.497367858886719 + ], + [ + "▁fear", + -10.497537612915039 + ], + [ + "▁Este", + -10.497878074645996 + ], + [ + "▁Des", + -10.49793529510498 + ], + [ + "▁Kon", + -10.499308586120605 + ], + [ + "á", + -10.499866485595703 + ], + [ + "stand", + -10.500805854797363 + ], + [ + "▁Real", + -10.500842094421387 + ], + [ + "lichen", + -10.50098705291748 + ], + [ + "▁Beach", + -10.501455307006836 + ], + [ + "▁expertise", + -10.50185775756836 + ], + [ + "▁route", + -10.502445220947266 + ], + [ + "▁nation", + -10.502551078796387 + ], + [ + "▁snow", + -10.503022193908691 + ], + [ + "▁articles", + -10.503127098083496 + ], + [ + "▁Wood", + -10.504426956176758 + ], + [ + "▁operation", + -10.50494384765625 + ], + [ + "▁passion", + -10.505215644836426 + ], + [ + "▁cand", + -10.505690574645996 + ], + [ + "haus", + -10.505701065063477 + ], + [ + "OR", + -10.505711555480957 + ], + [ + "▁senior", + -10.506511688232422 + ], + [ + "▁becomes", + -10.506546020507812 + ], + [ + "▁sounds", + -10.506878852844238 + ], + [ + "▁enjoyed", + -10.50704574584961 + ], + [ + "▁gegen", + -10.507533073425293 + ], + [ + "▁courses", + -10.507919311523438 + ], + [ + "▁absolutely", + -10.508257865905762 + ], + [ + "tim", + -10.508264541625977 + ], + [ + "uff", + -10.508516311645508 + ], + [ + "▁moins", + -10.50860595703125 + ], + [ + "▁TO", + -10.509060859680176 + ], + [ + "▁fabric", + -10.509267807006836 + ], + [ + "poli", + -10.509326934814453 + ], + [ + "▁Bre", + -10.509761810302734 + ], + [ + "▁bo", + -10.509916305541992 + ], + [ + "▁Elle", + -10.510469436645508 + ], + [ + "bu", + -10.512336730957031 + ], + [ + "▁participants", + -10.512401580810547 + ], + [ + "stone", + -10.512794494628906 + ], + [ + "ties", + -10.51366138458252 + ], + [ + "▁listen", + -10.513700485229492 + ], + [ + "▁Spiel", + -10.513752937316895 + ], + [ + "pot", + -10.513872146606445 + ], + [ + "▁selling", + -10.514358520507812 + ], + [ + "▁geht", + -10.514680862426758 + ], + [ + "▁mini", + -10.515146255493164 + ], + [ + "▁trans", + -10.515408515930176 + ], + [ + "▁ingredients", + -10.515642166137695 + ], + [ + "auf", + -10.515671730041504 + ], + [ + "▁orice", + -10.51595401763916 + ], + [ + "▁Next", + -10.516300201416016 + ], + [ + "▁cream", + -10.516756057739258 + ], + [ + "▁edge", + -10.516973495483398 + ], + [ + "▁recommended", + -10.517022132873535 + ], + [ + "▁Form", + -10.517277717590332 + ], + [ + "▁processing", + -10.51746940612793 + ], + [ + "vert", + -10.517709732055664 + ], + [ + "▁described", + -10.518362998962402 + ], + [ + "▁installed", + -10.51884937286377 + ], + [ + "▁managed", + -10.518952369689941 + ], + [ + "▁electronic", + -10.518966674804688 + ], + [ + "▁performed", + -10.519064903259277 + ], + [ + "▁raise", + -10.519098281860352 + ], + [ + "▁imagine", + -10.519281387329102 + ], + [ + "down", + -10.51952838897705 + ], + [ + "▁fond", + -10.519978523254395 + ], + [ + "▁Inter", + -10.520434379577637 + ], + [ + "▁Mc", + -10.520550727844238 + ], + [ + "▁Dans", + -10.520679473876953 + ], + [ + "istic", + -10.520966529846191 + ], + [ + "▁miss", + -10.521052360534668 + ], + [ + "sur", + -10.521062850952148 + ], + [ + "▁Col", + -10.521879196166992 + ], + [ + "cut", + -10.522021293640137 + ], + [ + "▁dupa", + -10.522160530090332 + ], + [ + "▁Twitter", + -10.522604942321777 + ], + [ + "▁bowl", + -10.523721694946289 + ], + [ + "▁remains", + -10.5237455368042 + ], + [ + "▁Jan", + -10.524046897888184 + ], + [ + "▁smooth", + -10.524162292480469 + ], + [ + "▁fees", + -10.524415969848633 + ], + [ + "▁aid", + -10.524494171142578 + ], + [ + "▁presence", + -10.524827003479004 + ], + [ + "▁Android", + -10.52499771118164 + ], + [ + "▁decisions", + -10.52539348602295 + ], + [ + "▁names", + -10.5254487991333 + ], + [ + "▁Music", + -10.525546073913574 + ], + [ + "▁innovative", + -10.525578498840332 + ], + [ + "▁Tom", + -10.525997161865234 + ], + [ + "▁spread", + -10.526165962219238 + ], + [ + "▁lovely", + -10.526222229003906 + ], + [ + "▁daughter", + -10.526397705078125 + ], + [ + "US", + -10.527050971984863 + ], + [ + "▁facility", + -10.52710247039795 + ], + [ + "▁peace", + -10.527105331420898 + ], + [ + "▁department", + -10.527277946472168 + ], + [ + "▁weiter", + -10.527591705322266 + ], + [ + "▁Sun", + -10.527756690979004 + ], + [ + "▁fund", + -10.527772903442383 + ], + [ + "▁2018.", + -10.52792739868164 + ], + [ + "▁discussion", + -10.528186798095703 + ], + [ + "75", + -10.528799057006836 + ], + [ + "EC", + -10.529126167297363 + ], + [ + "▁lunch", + -10.529144287109375 + ], + [ + "▁videos", + -10.52927017211914 + ], + [ + "05", + -10.531253814697266 + ], + [ + "ige", + -10.531266212463379 + ], + [ + "▁parking", + -10.531564712524414 + ], + [ + "▁relationships", + -10.531732559204102 + ], + [ + "▁George", + -10.532986640930176 + ], + [ + "▁teachers", + -10.53299617767334 + ], + [ + "room", + -10.533458709716797 + ], + [ + "▁Tra", + -10.533605575561523 + ], + [ + "▁Sam", + -10.533651351928711 + ], + [ + "▁properly", + -10.535590171813965 + ], + [ + "▁Book", + -10.535629272460938 + ], + [ + "▁CA", + -10.536957740783691 + ], + [ + "▁calls", + -10.53756046295166 + ], + [ + "▁stat", + -10.538175582885742 + ], + [ + "ux", + -10.538220405578613 + ], + [ + "▁soit", + -10.538439750671387 + ], + [ + "▁Community", + -10.538684844970703 + ], + [ + "▁Jahren", + -10.538714408874512 + ], + [ + "▁increasing", + -10.539575576782227 + ], + [ + "▁civil", + -10.540184020996094 + ], + [ + "app", + -10.540573120117188 + ], + [ + "▁35", + -10.540589332580566 + ], + [ + "▁rise", + -10.540600776672363 + ], + [ + "▁dabei", + -10.540989875793457 + ], + [ + "▁studio", + -10.541803359985352 + ], + [ + "▁policies", + -10.542054176330566 + ], + [ + "▁agent", + -10.542055130004883 + ], + [ + "▁Before", + -10.542601585388184 + ], + [ + "▁Cal", + -10.543017387390137 + ], + [ + "▁2005", + -10.543404579162598 + ], + [ + "▁sample", + -10.543777465820312 + ], + [ + "▁manner", + -10.545186996459961 + ], + [ + "wing", + -10.54521369934082 + ], + [ + "stra", + -10.545552253723145 + ], + [ + "▁fel", + -10.545793533325195 + ], + [ + "▁Show", + -10.545952796936035 + ], + [ + "▁scene", + -10.54656982421875 + ], + [ + "mic", + -10.546764373779297 + ], + [ + "nom", + -10.546995162963867 + ], + [ + "▁typically", + -10.547088623046875 + ], + [ + "▁pair", + -10.547104835510254 + ], + [ + "▁detailed", + -10.547394752502441 + ], + [ + "▁Work", + -10.547422409057617 + ], + [ + "▁cities", + -10.547451972961426 + ], + [ + "▁Rock", + -10.54749584197998 + ], + [ + "▁Gar", + -10.547906875610352 + ], + [ + "▁serving", + -10.548352241516113 + ], + [ + "▁machen", + -10.548521995544434 + ], + [ + "▁trees", + -10.54888916015625 + ], + [ + "▁accident", + -10.549199104309082 + ], + [ + "▁cloud", + -10.54920482635498 + ], + [ + "▁animals", + -10.549297332763672 + ], + [ + "▁Den", + -10.549897193908691 + ], + [ + "▁Wa", + -10.54990291595459 + ], + [ + "▁suggest", + -10.550220489501953 + ], + [ + "putting", + -10.550407409667969 + ], + [ + "▁suite", + -10.550434112548828 + ], + [ + "▁clearly", + -10.550849914550781 + ], + [ + "▁net", + -10.551287651062012 + ], + [ + "▁funding", + -10.551506996154785 + ], + [ + "▁salt", + -10.551935195922852 + ], + [ + "▁Men", + -10.552119255065918 + ], + [ + "ped", + -10.552419662475586 + ], + [ + "▁Food", + -10.553142547607422 + ], + [ + "▁leaving", + -10.553544998168945 + ], + [ + "▁Government", + -10.554243087768555 + ], + [ + "ick", + -10.554381370544434 + ], + [ + "▁seat", + -10.555121421813965 + ], + [ + "▁Los", + -10.555183410644531 + ], + [ + "▁teacher", + -10.555587768554688 + ], + [ + "▁iPhone", + -10.555693626403809 + ], + [ + "▁300", + -10.556120872497559 + ], + [ + "▁commitment", + -10.556180000305176 + ], + [ + "▁aspects", + -10.556498527526855 + ], + [ + "▁previously", + -10.55711555480957 + ], + [ + "▁cent", + -10.5572509765625 + ], + [ + "▁Vo", + -10.557341575622559 + ], + [ + "▁artists", + -10.557963371276855 + ], + [ + "▁runs", + -10.558130264282227 + ], + [ + ">", + -10.558155059814453 + ], + [ + "▁Gi", + -10.558273315429688 + ], + [ + "▁mar", + -10.5585355758667 + ], + [ + "!!!", + -10.558544158935547 + ], + [ + "▁Media", + -10.558943748474121 + ], + [ + "▁feedback", + -10.559109687805176 + ], + [ + "▁resolution", + -10.559117317199707 + ], + [ + "IN", + -10.55915641784668 + ], + [ + "▁wurden", + -10.55952262878418 + ], + [ + "▁busy", + -10.559832572937012 + ], + [ + "▁adult", + -10.5600004196167 + ], + [ + "29", + -10.560487747192383 + ], + [ + "elles", + -10.561375617980957 + ], + [ + "▁closed", + -10.561762809753418 + ], + [ + "▁trouble", + -10.561767578125 + ], + [ + "▁rent", + -10.561984062194824 + ], + [ + "lot", + -10.56224536895752 + ], + [ + "▁importance", + -10.562314987182617 + ], + [ + "▁units", + -10.56257438659668 + ], + [ + "Pro", + -10.562713623046875 + ], + [ + "▁provider", + -10.563005447387695 + ], + [ + "▁visual", + -10.563288688659668 + ], + [ + "IT", + -10.563385009765625 + ], + [ + "▁diet", + -10.563733100891113 + ], + [ + "▁appearance", + -10.563932418823242 + ], + [ + "pin", + -10.564576148986816 + ], + [ + "▁Din", + -10.564760208129883 + ], + [ + "▁eating", + -10.565516471862793 + ], + [ + "Fi", + -10.565762519836426 + ], + [ + "ball", + -10.565765380859375 + ], + [ + "är", + -10.565861701965332 + ], + [ + "ney", + -10.565878868103027 + ], + [ + "▁records", + -10.566070556640625 + ], + [ + "▁Fi", + -10.566180229187012 + ], + [ + "▁faut", + -10.566329002380371 + ], + [ + "▁CD", + -10.566803932189941 + ], + [ + "ign", + -10.566930770874023 + ], + [ + "▁vă", + -10.566996574401855 + ], + [ + "▁agency", + -10.567153930664062 + ], + [ + "ierung", + -10.567323684692383 + ], + [ + "▁Back", + -10.567361831665039 + ], + [ + "▁windows", + -10.567545890808105 + ], + [ + "▁pull", + -10.567888259887695 + ], + [ + "ash", + -10.567959785461426 + ], + [ + "▁profit", + -10.568593978881836 + ], + [ + "▁brings", + -10.568605422973633 + ], + [ + "▁Committee", + -10.569122314453125 + ], + [ + "▁girl", + -10.569174766540527 + ], + [ + "▁vehicles", + -10.569372177124023 + ], + [ + "▁Hier", + -10.569567680358887 + ], + [ + "ES", + -10.569639205932617 + ], + [ + "până", + -10.569880485534668 + ], + [ + "▁Kunden", + -10.570380210876465 + ], + [ + "pen", + -10.570462226867676 + ], + [ + "▁explain", + -10.570505142211914 + ], + [ + "▁cadru", + -10.570760726928711 + ], + [ + "▁attack", + -10.571100234985352 + ], + [ + "▁markets", + -10.571115493774414 + ], + [ + "▁claims", + -10.571340560913086 + ], + [ + "▁walking", + -10.571385383605957 + ], + [ + "▁pouv", + -10.571528434753418 + ], + [ + "low", + -10.571642875671387 + ], + [ + "▁showed", + -10.572114944458008 + ], + [ + "▁principal", + -10.57211971282959 + ], + [ + "▁lucru", + -10.572144508361816 + ], + [ + "▁precum", + -10.572712898254395 + ], + [ + "TA", + -10.573094367980957 + ], + [ + "▁partners", + -10.573104858398438 + ], + [ + "▁exist", + -10.573136329650879 + ], + [ + "▁internal", + -10.57334041595459 + ], + [ + "hen", + -10.573945045471191 + ], + [ + "▁Master", + -10.573966979980469 + ], + [ + "unless", + -10.574013710021973 + ], + [ + "▁doubt", + -10.574721336364746 + ], + [ + "$", + -10.574785232543945 + ], + [ + "▁Long", + -10.574888229370117 + ], + [ + "▁leaves", + -10.574907302856445 + ], + [ + "allowing", + -10.575063705444336 + ], + [ + "pol", + -10.575272560119629 + ], + [ + "▁Up", + -10.575491905212402 + ], + [ + "▁Contact", + -10.576093673706055 + ], + [ + "▁practical", + -10.57708740234375 + ], + [ + "▁suit", + -10.57758903503418 + ], + [ + "▁Site", + -10.577656745910645 + ], + [ + "▁formation", + -10.57768726348877 + ], + [ + "▁signal", + -10.578215599060059 + ], + [ + "▁approximately", + -10.578414916992188 + ], + [ + "▁ourselves", + -10.578497886657715 + ], + [ + "▁colour", + -10.578519821166992 + ], + [ + "▁species", + -10.578530311584473 + ], + [ + "▁advance", + -10.578753471374512 + ], + [ + "▁PM", + -10.57891845703125 + ], + [ + "ans", + -10.579121589660645 + ], + [ + "▁locations", + -10.579397201538086 + ], + [ + "vous", + -10.579601287841797 + ], + [ + "▁updated", + -10.579636573791504 + ], + [ + "▁faith", + -10.579673767089844 + ], + [ + "mus", + -10.579740524291992 + ], + [ + "▁stores", + -10.579863548278809 + ], + [ + "heim", + -10.580127716064453 + ], + [ + "▁suitable", + -10.580558776855469 + ], + [ + "▁continues", + -10.580703735351562 + ], + [ + "▁fac", + -10.581133842468262 + ], + [ + "ever", + -10.581156730651855 + ], + [ + "▁Bill", + -10.581195831298828 + ], + [ + "▁chose", + -10.58121109008789 + ], + [ + "▁inform", + -10.581228256225586 + ], + [ + "▁environmental", + -10.581427574157715 + ], + [ + "▁responsibility", + -10.58188533782959 + ], + [ + "99", + -10.582542419433594 + ], + [ + "▁competitive", + -10.583723068237305 + ], + [ + "▁strategies", + -10.583903312683105 + ], + [ + "▁toujours", + -10.584270477294922 + ], + [ + "tive", + -10.58430290222168 + ], + [ + "▁automatically", + -10.585600852966309 + ], + [ + "▁dress", + -10.585609436035156 + ], + [ + "▁Minister", + -10.585624694824219 + ], + [ + "har", + -10.586076736450195 + ], + [ + "▁Start", + -10.586249351501465 + ], + [ + "▁=", + -10.586563110351562 + ], + [ + "▁pattern", + -10.58659553527832 + ], + [ + "tier", + -10.58676528930664 + ], + [ + "▁pays", + -10.587034225463867 + ], + [ + "▁profile", + -10.58725357055664 + ], + [ + "▁raised", + -10.587263107299805 + ], + [ + "ange", + -10.587288856506348 + ], + [ + "▁drink", + -10.587762832641602 + ], + [ + "▁element", + -10.588042259216309 + ], + [ + "▁landscape", + -10.58875560760498 + ], + [ + "▁Tag", + -10.589073181152344 + ], + [ + "▁cheese", + -10.589590072631836 + ], + [ + "ific", + -10.590009689331055 + ], + [ + "▁Stadt", + -10.590181350708008 + ], + [ + "39", + -10.591398239135742 + ], + [ + "▁launch", + -10.592113494873047 + ], + [ + "▁wouldn", + -10.592150688171387 + ], + [ + "AS", + -10.592202186584473 + ], + [ + "▁push", + -10.593059539794922 + ], + [ + "▁mill", + -10.593452453613281 + ], + [ + "▁mass", + -10.593647003173828 + ], + [ + "▁category", + -10.593790054321289 + ], + [ + "sondern", + -10.594050407409668 + ], + [ + "col", + -10.594111442565918 + ], + [ + "▁climate", + -10.594313621520996 + ], + [ + "lier", + -10.594437599182129 + ], + [ + "▁slightly", + -10.595514297485352 + ], + [ + "95", + -10.596519470214844 + ], + [ + "ace", + -10.596612930297852 + ], + [ + "▁domain", + -10.597633361816406 + ], + [ + "kan", + -10.598306655883789 + ], + [ + "▁feed", + -10.598485946655273 + ], + [ + "▁Live", + -10.598837852478027 + ], + [ + "▁Mais", + -10.599113464355469 + ], + [ + "▁après", + -10.599365234375 + ], + [ + "▁village", + -10.59941577911377 + ], + [ + "▁hatte", + -10.59968090057373 + ], + [ + "▁joined", + -10.599881172180176 + ], + [ + "▁Museum", + -10.600311279296875 + ], + [ + "head", + -10.600855827331543 + ], + [ + "▁draw", + -10.6009521484375 + ], + [ + "▁concerns", + -10.600966453552246 + ], + [ + "ER", + -10.601505279541016 + ], + [ + "▁technique", + -10.601648330688477 + ], + [ + "▁Bio", + -10.601861000061035 + ], + [ + "▁Sea", + -10.601881980895996 + ], + [ + "▁@", + -10.601927757263184 + ], + [ + "wer", + -10.6021146774292 + ], + [ + "▁battery", + -10.602462768554688 + ], + [ + "▁mostly", + -10.60267448425293 + ], + [ + "▁familiar", + -10.602680206298828 + ], + [ + "▁Sub", + -10.602689743041992 + ], + [ + "▁delicious", + -10.603222846984863 + ], + [ + "doch", + -10.60326099395752 + ], + [ + "60", + -10.603395462036133 + ], + [ + "▁carte", + -10.603611946105957 + ], + [ + "▁avut", + -10.604146957397461 + ], + [ + "▁premium", + -10.60460376739502 + ], + [ + "▁attempt", + -10.604704856872559 + ], + [ + "▁Über", + -10.60473346710205 + ], + [ + "▁combined", + -10.604935646057129 + ], + [ + "lement", + -10.604947090148926 + ], + [ + "▁voi", + -10.605031967163086 + ], + [ + "▁wonder", + -10.605376243591309 + ], + [ + "▁failure", + -10.606106758117676 + ], + [ + "which", + -10.606147766113281 + ], + [ + "esti", + -10.606316566467285 + ], + [ + "31", + -10.606547355651855 + ], + [ + "▁sta", + -10.606734275817871 + ], + [ + "▁transform", + -10.60673999786377 + ], + [ + "▁license", + -10.606743812561035 + ], + [ + "▁depending", + -10.606758117675781 + ], + [ + "▁specifically", + -10.606782913208008 + ], + [ + "▁OF", + -10.60693645477295 + ], + [ + "band", + -10.606959342956543 + ], + [ + "▁Sport", + -10.60731315612793 + ], + [ + "list", + -10.607434272766113 + ], + [ + "▁Tour", + -10.60753059387207 + ], + [ + "▁Israel", + -10.607564926147461 + ], + [ + "▁filled", + -10.607722282409668 + ], + [ + "▁manual", + -10.60776138305664 + ], + [ + "▁watching", + -10.608621597290039 + ], + [ + "▁rule", + -10.608877182006836 + ], + [ + "mat", + -10.60901927947998 + ], + [ + "▁notes", + -10.609585762023926 + ], + [ + "▁Oh", + -10.60960578918457 + ], + [ + "▁bereits", + -10.609634399414062 + ], + [ + "▁foundation", + -10.609916687011719 + ], + [ + "▁vital", + -10.610146522521973 + ], + [ + "▁lassen", + -10.610747337341309 + ], + [ + "▁cât", + -10.611162185668945 + ], + [ + "▁shipping", + -10.611433029174805 + ], + [ + "▁registered", + -10.611513137817383 + ], + [ + "▁jour", + -10.612669944763184 + ], + [ + "▁island", + -10.61276626586914 + ], + [ + "▁sets", + -10.613068580627441 + ], + [ + "▁football", + -10.613683700561523 + ], + [ + "▁EU", + -10.613860130310059 + ], + [ + "▁stone", + -10.614019393920898 + ], + [ + "▁Press", + -10.614699363708496 + ], + [ + "▁adapt", + -10.615066528320312 + ], + [ + "ised", + -10.615425109863281 + ], + [ + "▁thoughts", + -10.615434646606445 + ], + [ + "▁doors", + -10.615851402282715 + ], + [ + "€", + -10.615954399108887 + ], + [ + "▁components", + -10.616040229797363 + ], + [ + "rig", + -10.616332054138184 + ], + [ + "▁generation", + -10.616585731506348 + ], + [ + "▁guess", + -10.616700172424316 + ], + [ + "cker", + -10.61694049835205 + ], + [ + "▁realize", + -10.617207527160645 + ], + [ + "▁Roman", + -10.617310523986816 + ], + [ + "▁contre", + -10.617693901062012 + ], + [ + "▁Out", + -10.617938995361328 + ], + [ + "▁IN", + -10.619051933288574 + ], + [ + "cip", + -10.619085311889648 + ], + [ + "59", + -10.619330406188965 + ], + [ + "▁enhance", + -10.619768142700195 + ], + [ + "▁battle", + -10.61982250213623 + ], + [ + "▁monitor", + -10.619863510131836 + ], + [ + "▁Martin", + -10.62045955657959 + ], + [ + "▁websites", + -10.620461463928223 + ], + [ + "▁DE", + -10.620599746704102 + ], + [ + "▁Festival", + -10.620951652526855 + ], + [ + "ân", + -10.62131118774414 + ], + [ + "▁Place", + -10.621419906616211 + ], + [ + "▁rare", + -10.621554374694824 + ], + [ + "această", + -10.621726989746094 + ], + [ + "▁sollte", + -10.621731758117676 + ], + [ + "▁Read", + -10.621816635131836 + ], + [ + "ware", + -10.622169494628906 + ], + [ + "Those", + -10.622671127319336 + ], + [ + "ende", + -10.623543739318848 + ], + [ + "▁prix", + -10.623835563659668 + ], + [ + "▁roman", + -10.624101638793945 + ], + [ + "▁creation", + -10.624224662780762 + ], + [ + "▁confidence", + -10.624552726745605 + ], + [ + "▁Japan", + -10.624638557434082 + ], + [ + "▁rain", + -10.624942779541016 + ], + [ + "▁guys", + -10.62518310546875 + ], + [ + "▁south", + -10.625236511230469 + ], + [ + "▁trading", + -10.625646591186523 + ], + [ + "▁€", + -10.626100540161133 + ], + [ + "▁Film", + -10.626341819763184 + ], + [ + "▁pana", + -10.627065658569336 + ], + [ + "▁asemenea", + -10.627066612243652 + ], + [ + "36", + -10.627190589904785 + ], + [ + "▁instance", + -10.627884864807129 + ], + [ + "cou", + -10.629385948181152 + ], + [ + "▁nun", + -10.630074501037598 + ], + [ + "▁Pass", + -10.630390167236328 + ], + [ + "Cette", + -10.630579948425293 + ], + [ + "▁Network", + -10.630876541137695 + ], + [ + "▁prime", + -10.631010055541992 + ], + [ + "▁spiritual", + -10.632098197937012 + ], + [ + "▁tough", + -10.633030891418457 + ], + [ + "▁AND", + -10.633086204528809 + ], + [ + "▁Cat", + -10.633601188659668 + ], + [ + "▁boat", + -10.633611679077148 + ], + [ + "▁leads", + -10.634864807128906 + ], + [ + "▁Germany", + -10.63509750366211 + ], + [ + "▁valuable", + -10.635635375976562 + ], + [ + "57", + -10.635892868041992 + ], + [ + "lect", + -10.636148452758789 + ], + [ + "▁distribution", + -10.636445045471191 + ], + [ + "dar", + -10.636518478393555 + ], + [ + "▁Manager", + -10.637701988220215 + ], + [ + "cha", + -10.637725830078125 + ], + [ + "▁obtain", + -10.637741088867188 + ], + [ + "GB", + -10.637908935546875 + ], + [ + "▁unor", + -10.638079643249512 + ], + [ + "schaft", + -10.638603210449219 + ], + [ + "▁zwischen", + -10.638723373413086 + ], + [ + "▁winning", + -10.639172554016113 + ], + [ + "▁suis", + -10.639811515808105 + ], + [ + "58", + -10.640130996704102 + ], + [ + "▁Party", + -10.640372276306152 + ], + [ + "▁ceva", + -10.640416145324707 + ], + [ + "▁comprehensive", + -10.640684127807617 + ], + [ + "▁aceste", + -10.640726089477539 + ], + [ + "▁committed", + -10.640726089477539 + ], + [ + "▁Hu", + -10.641382217407227 + ], + [ + "ţ", + -10.64149284362793 + ], + [ + "▁north", + -10.642021179199219 + ], + [ + "werk", + -10.642542839050293 + ], + [ + "▁interface", + -10.642794609069824 + ], + [ + "▁Valley", + -10.64281177520752 + ], + [ + "▁anywhere", + -10.64281177520752 + ], + [ + "▁Only", + -10.642851829528809 + ], + [ + "TE", + -10.643295288085938 + ], + [ + "hui", + -10.6436767578125 + ], + [ + "bus", + -10.643951416015625 + ], + [ + "vis", + -10.6439790725708 + ], + [ + "▁Society", + -10.645116806030273 + ], + [ + "▁reliable", + -10.64556884765625 + ], + [ + "▁quelques", + -10.64563274383545 + ], + [ + "tech", + -10.646187782287598 + ], + [ + "ual", + -10.646377563476562 + ], + [ + "▁educational", + -10.646418571472168 + ], + [ + "serv", + -10.646490097045898 + ], + [ + "▁opinion", + -10.646628379821777 + ], + [ + "▁appears", + -10.646702766418457 + ], + [ + "▁count", + -10.646795272827148 + ], + [ + "irea", + -10.646981239318848 + ], + [ + "ban", + -10.647504806518555 + ], + [ + "▁45", + -10.647530555725098 + ], + [ + "▁contain", + -10.647661209106445 + ], + [ + "ost", + -10.647663116455078 + ], + [ + "▁anul", + -10.647706031799316 + ], + [ + "rien", + -10.648159980773926 + ], + [ + "gra", + -10.648360252380371 + ], + [ + "▁counter", + -10.648946762084961 + ], + [ + "-3", + -10.650411605834961 + ], + [ + "▁resource", + -10.650463104248047 + ], + [ + "▁Wo", + -10.6505126953125 + ], + [ + "▁posts", + -10.650618553161621 + ], + [ + "▁employee", + -10.651320457458496 + ], + [ + "rol", + -10.651863098144531 + ], + [ + "▁ended", + -10.651969909667969 + ], + [ + "met", + -10.653080940246582 + ], + [ + "▁meine", + -10.653165817260742 + ], + [ + "▁reached", + -10.653368949890137 + ], + [ + "gri", + -10.653716087341309 + ], + [ + "▁Bra", + -10.65374755859375 + ], + [ + "▁conduct", + -10.654294967651367 + ], + [ + "▁housing", + -10.654422760009766 + ], + [ + "▁tickets", + -10.654792785644531 + ], + [ + "▁database", + -10.655674934387207 + ], + [ + "IL", + -10.656150817871094 + ], + [ + "▁perspective", + -10.656359672546387 + ], + [ + "▁Har", + -10.656404495239258 + ], + [ + "▁error", + -10.656549453735352 + ], + [ + "▁meal", + -10.656569480895996 + ], + [ + "▁hearing", + -10.657238006591797 + ], + [ + "▁transition", + -10.657302856445312 + ], + [ + "▁browser", + -10.657609939575195 + ], + [ + "▁supported", + -10.657609939575195 + ], + [ + "▁starts", + -10.658814430236816 + ], + [ + "țe", + -10.658902168273926 + ], + [ + "▁adults", + -10.658905029296875 + ], + [ + "▁România", + -10.65917682647705 + ], + [ + "dra", + -10.659884452819824 + ], + [ + "▁worry", + -10.660222053527832 + ], + [ + "▁avoir", + -10.660497665405273 + ], + [ + "▁regional", + -10.660507202148438 + ], + [ + "▁min", + -10.660722732543945 + ], + [ + "▁Does", + -10.660806655883789 + ], + [ + "▁Keep", + -10.661200523376465 + ], + [ + "rom", + -10.661237716674805 + ], + [ + "sco", + -10.661320686340332 + ], + [ + "tem", + -10.661898612976074 + ], + [ + "▁Old", + -10.661954879760742 + ], + [ + "▁Under", + -10.662552833557129 + ], + [ + "▁Commission", + -10.662557601928711 + ], + [ + "▁Bau", + -10.6632661819458 + ], + [ + "▁News", + -10.663358688354492 + ], + [ + "▁mois", + -10.663444519042969 + ], + [ + "▁respond", + -10.66356372833252 + ], + [ + "▁alles", + -10.663878440856934 + ], + [ + "▁chair", + -10.664475440979004 + ], + [ + "▁ho", + -10.664854049682617 + ], + [ + "right", + -10.664908409118652 + ], + [ + "▁totally", + -10.665532112121582 + ], + [ + "gle", + -10.665534973144531 + ], + [ + "▁32", + -10.665604591369629 + ], + [ + "66", + -10.665664672851562 + ], + [ + "town", + -10.665902137756348 + ], + [ + "Ch", + -10.666261672973633 + ], + [ + "▁gr", + -10.66629695892334 + ], + [ + "▁garage", + -10.666328430175781 + ], + [ + "ții", + -10.666495323181152 + ], + [ + "▁Union", + -10.667136192321777 + ], + [ + "ică", + -10.667343139648438 + ], + [ + "▁2,", + -10.668437004089355 + ], + [ + "▁reflect", + -10.669163703918457 + ], + [ + "▁retail", + -10.669388771057129 + ], + [ + "▁unde", + -10.669605255126953 + ], + [ + "▁accessible", + -10.670262336730957 + ], + [ + "water", + -10.67059326171875 + ], + [ + "▁regard", + -10.670710563659668 + ], + [ + "▁logo", + -10.671489715576172 + ], + [ + "▁inspired", + -10.671518325805664 + ], + [ + "▁Wall", + -10.671859741210938 + ], + [ + "▁Ste", + -10.672093391418457 + ], + [ + "▁asking", + -10.672179222106934 + ], + [ + "▁Journal", + -10.673028945922852 + ], + [ + "▁Teil", + -10.674042701721191 + ], + [ + "▁collaboration", + -10.674185752868652 + ], + [ + "▁acid", + -10.674266815185547 + ], + [ + "▁Fund", + -10.674382209777832 + ], + [ + "▁spirit", + -10.6744384765625 + ], + [ + "despite", + -10.674457550048828 + ], + [ + "▁delivered", + -10.674821853637695 + ], + [ + "▁girls", + -10.675374984741211 + ], + [ + "▁Look", + -10.675896644592285 + ], + [ + "rant", + -10.675949096679688 + ], + [ + "▁District", + -10.676460266113281 + ], + [ + "▁rental", + -10.676709175109863 + ], + [ + "▁spune", + -10.676733016967773 + ], + [ + "els", + -10.677544593811035 + ], + [ + "▁permanent", + -10.677659034729004 + ], + [ + "▁iron", + -10.677709579467773 + ], + [ + "▁Thomas", + -10.677745819091797 + ], + [ + "EL", + -10.678071022033691 + ], + [ + "▁except", + -10.678074836730957 + ], + [ + "▁catch", + -10.678366661071777 + ], + [ + "▁providers", + -10.678375244140625 + ], + [ + "▁2006", + -10.678435325622559 + ], + [ + "▁chat", + -10.679931640625 + ], + [ + "▁emergency", + -10.680281639099121 + ], + [ + "gre", + -10.68030834197998 + ], + [ + "site", + -10.680888175964355 + ], + [ + "▁missing", + -10.68089485168457 + ], + [ + "abil", + -10.680914878845215 + ], + [ + "▁Hill", + -10.68099594116211 + ], + [ + "urs", + -10.681312561035156 + ], + [ + "▁plusieurs", + -10.681716918945312 + ], + [ + "▁birthday", + -10.681726455688477 + ], + [ + "DS", + -10.682019233703613 + ], + [ + "ersten", + -10.682381629943848 + ], + [ + "▁5.", + -10.68252944946289 + ], + [ + "▁library", + -10.68333911895752 + ], + [ + "▁earth", + -10.683515548706055 + ], + [ + "CI", + -10.683645248413086 + ], + [ + "▁lighting", + -10.684442520141602 + ], + [ + "▁fixed", + -10.684879302978516 + ], + [ + "tori", + -10.684891700744629 + ], + [ + "▁replace", + -10.684995651245117 + ], + [ + "▁administration", + -10.685074806213379 + ], + [ + "leurs", + -10.685229301452637 + ], + [ + "▁meat", + -10.686142921447754 + ], + [ + "▁songs", + -10.686662673950195 + ], + [ + "▁confirm", + -10.686866760253906 + ], + [ + "▁rapid", + -10.68698787689209 + ], + [ + "▁Special", + -10.686995506286621 + ], + [ + "▁holding", + -10.687115669250488 + ], + [ + "▁honor", + -10.687271118164062 + ], + [ + "▁Market", + -10.687409400939941 + ], + [ + "La", + -10.687535285949707 + ], + [ + "▁measure", + -10.687760353088379 + ], + [ + "▁guarantee", + -10.68785572052002 + ], + [ + "▁switch", + -10.68813419342041 + ], + [ + "▁extensive", + -10.688294410705566 + ], + [ + "▁Neu", + -10.688674926757812 + ], + [ + "avez", + -10.688901901245117 + ], + [ + "▁protein", + -10.688984870910645 + ], + [ + "▁infrastructure", + -10.689454078674316 + ], + [ + "▁functions", + -10.689494132995605 + ], + [ + "▁cont", + -10.689496040344238 + ], + [ + "row", + -10.689760208129883 + ], + [ + "star", + -10.689773559570312 + ], + [ + "▁Port", + -10.690192222595215 + ], + [ + "Using", + -10.690336227416992 + ], + [ + "▁faster", + -10.690557479858398 + ], + [ + "44", + -10.691168785095215 + ], + [ + "▁measures", + -10.691615104675293 + ], + [ + "▁celor", + -10.69186019897461 + ], + [ + "▁exam", + -10.69189739227295 + ], + [ + "200", + -10.69202995300293 + ], + [ + "î", + -10.692545890808105 + ], + [ + "▁conversation", + -10.692832946777344 + ], + [ + "▁brands", + -10.692959785461426 + ], + [ + "▁Code", + -10.69359016418457 + ], + [ + "▁Website", + -10.693748474121094 + ], + [ + "OS", + -10.693782806396484 + ], + [ + "▁alors", + -10.693822860717773 + ], + [ + "▁organ", + -10.694032669067383 + ], + [ + "▁removed", + -10.694823265075684 + ], + [ + "▁Head", + -10.694905281066895 + ], + [ + "▁Cha", + -10.694908142089844 + ], + [ + "▁visiting", + -10.694928169250488 + ], + [ + "▁wild", + -10.694928169250488 + ], + [ + "▁seit", + -10.694962501525879 + ], + [ + "49", + -10.695109367370605 + ], + [ + "▁organic", + -10.69539737701416 + ], + [ + "aţi", + -10.695775032043457 + ], + [ + "▁kit", + -10.695947647094727 + ], + [ + "68", + -10.695959091186523 + ], + [ + "▁flowers", + -10.696124076843262 + ], + [ + "▁appreciate", + -10.697006225585938 + ], + [ + "▁dead", + -10.697439193725586 + ], + [ + "▁Fire", + -10.697539329528809 + ], + [ + "▁cela", + -10.697591781616211 + ], + [ + "▁Ph", + -10.697633743286133 + ], + [ + "▁arrive", + -10.697921752929688 + ], + [ + "▁purposes", + -10.698213577270508 + ], + [ + "▁qualité", + -10.698226928710938 + ], + [ + "▁restaurants", + -10.698478698730469 + ], + [ + "▁advertising", + -10.698541641235352 + ], + [ + "cur", + -10.69855785369873 + ], + [ + "▁ça", + -10.698973655700684 + ], + [ + "▁introduced", + -10.699088096618652 + ], + [ + "▁returned", + -10.699111938476562 + ], + [ + "▁desire", + -10.699511528015137 + ], + [ + "▁soul", + -10.699983596801758 + ], + [ + "▁Technology", + -10.699994087219238 + ], + [ + ");", + -10.700163841247559 + ], + [ + "▁Royal", + -10.700282096862793 + ], + [ + "tant", + -10.70068645477295 + ], + [ + "▁possibly", + -10.700702667236328 + ], + [ + "▁consumers", + -10.700812339782715 + ], + [ + "▁doua", + -10.70097541809082 + ], + [ + "ified", + -10.70097827911377 + ], + [ + "▁Award", + -10.70114803314209 + ], + [ + "toutes", + -10.70130443572998 + ], + [ + "▁meant", + -10.701325416564941 + ], + [ + "ezi", + -10.701616287231445 + ], + [ + "▁plu", + -10.701766014099121 + ], + [ + "ţii", + -10.7021484375 + ], + [ + "▁talent", + -10.702789306640625 + ], + [ + "▁Security", + -10.703309059143066 + ], + [ + "arii", + -10.703352928161621 + ], + [ + "▁zi", + -10.703455924987793 + ], + [ + "▁Shop", + -10.703667640686035 + ], + [ + "▁breakfast", + -10.704107284545898 + ], + [ + "▁trial", + -10.704485893249512 + ], + [ + "ami", + -10.704936981201172 + ], + [ + "▁register", + -10.705301284790039 + ], + [ + "unserer", + -10.705646514892578 + ], + [ + "▁solar", + -10.705697059631348 + ], + [ + "▁deals", + -10.70591926574707 + ], + [ + "▁Ku", + -10.7059326171875 + ], + [ + "To", + -10.706186294555664 + ], + [ + "bat", + -10.70680046081543 + ], + [ + "MC", + -10.707010269165039 + ], + [ + "▁Global", + -10.707018852233887 + ], + [ + "у", + -10.707405090332031 + ], + [ + "▁nor", + -10.707818984985352 + ], + [ + "▁milk", + -10.707868576049805 + ], + [ + "▁choices", + -10.708206176757812 + ], + [ + "»", + -10.7086763381958 + ], + [ + "▁Sur", + -10.708695411682129 + ], + [ + "more", + -10.708739280700684 + ], + [ + "48", + -10.709024429321289 + ], + [ + "67", + -10.709375381469727 + ], + [ + "▁replacement", + -10.709942817687988 + ], + [ + "34", + -10.710440635681152 + ], + [ + "▁chocolate", + -10.710485458374023 + ], + [ + "▁Family", + -10.71059513092041 + ], + [ + "This", + -10.71122932434082 + ], + [ + "▁novel", + -10.711435317993164 + ], + [ + "▁Chicago", + -10.711563110351562 + ], + [ + "▁participate", + -10.71166706085205 + ], + [ + "▁trei", + -10.712727546691895 + ], + [ + "▁monthly", + -10.713729858398438 + ], + [ + "▁survey", + -10.713977813720703 + ], + [ + "▁End", + -10.714285850524902 + ], + [ + "▁Medical", + -10.71442699432373 + ], + [ + "autres", + -10.714678764343262 + ], + [ + "rich", + -10.714698791503906 + ], + [ + "▁bike", + -10.714703559875488 + ], + [ + "▁eventually", + -10.714717864990234 + ], + [ + "▁HD", + -10.714722633361816 + ], + [ + "bil", + -10.714744567871094 + ], + [ + "cent", + -10.714902877807617 + ], + [ + "▁afin", + -10.715676307678223 + ], + [ + "▁surgery", + -10.716160774230957 + ], + [ + "▁sin", + -10.716455459594727 + ], + [ + "▁manufacturing", + -10.716955184936523 + ], + [ + "▁consumer", + -10.717245101928711 + ], + [ + "system", + -10.717306137084961 + ], + [ + "▁object", + -10.717400550842285 + ], + [ + "▁Ju", + -10.717422485351562 + ], + [ + "ered", + -10.7178373336792 + ], + [ + "rac", + -10.718070030212402 + ], + [ + "▁clinical", + -10.718664169311523 + ], + [ + "▁dollars", + -10.719761848449707 + ], + [ + "▁chain", + -10.71994686126709 + ], + [ + "▁afternoon", + -10.720196723937988 + ], + [ + "▁ligne", + -10.720422744750977 + ], + [ + "▁accounts", + -10.721806526184082 + ], + [ + "ving", + -10.722037315368652 + ], + [ + "▁Australian", + -10.72240924835205 + ], + [ + "38", + -10.722542762756348 + ], + [ + "▁persoane", + -10.72258472442627 + ], + [ + "▁grande", + -10.722668647766113 + ], + [ + "▁Report", + -10.723472595214844 + ], + [ + "▁revenue", + -10.723649024963379 + ], + [ + "▁spre", + -10.723760604858398 + ], + [ + "▁cutting", + -10.7239990234375 + ], + [ + "▁approved", + -10.724133491516113 + ], + [ + "▁glad", + -10.724188804626465 + ], + [ + "chaque", + -10.724395751953125 + ], + [ + "win", + -10.724435806274414 + ], + [ + "▁waren", + -10.724733352661133 + ], + [ + "▁launched", + -10.725071907043457 + ], + [ + "▁layer", + -10.725645065307617 + ], + [ + "▁airport", + -10.725716590881348 + ], + [ + "▁effectively", + -10.72572135925293 + ], + [ + "▁coach", + -10.725946426391602 + ], + [ + "dé", + -10.726130485534668 + ], + [ + "LE", + -10.72627067565918 + ], + [ + "▁müssen", + -10.726386070251465 + ], + [ + "plan", + -10.726641654968262 + ], + [ + "dan", + -10.726705551147461 + ], + [ + "55", + -10.726786613464355 + ], + [ + "bringing", + -10.726895332336426 + ], + [ + "▁$2", + -10.726995468139648 + ], + [ + "nce", + -10.727181434631348 + ], + [ + "▁inspiration", + -10.728177070617676 + ], + [ + "You", + -10.728657722473145 + ], + [ + "▁soll", + -10.729095458984375 + ], + [ + "▁seemed", + -10.729595184326172 + ], + [ + "▁flight", + -10.729687690734863 + ], + [ + "▁prima", + -10.729883193969727 + ], + [ + "▁Welt", + -10.730123519897461 + ], + [ + "▁jetzt", + -10.730315208435059 + ], + [ + "ky", + -10.730428695678711 + ], + [ + "▁Western", + -10.73054027557373 + ], + [ + "▁label", + -10.730600357055664 + ], + [ + "▁möglich", + -10.73081111907959 + ], + [ + "▁input", + -10.730862617492676 + ], + [ + "▁laws", + -10.730995178222656 + ], + [ + "▁personnes", + -10.731708526611328 + ], + [ + "▁paying", + -10.731731414794922 + ], + [ + "▁Uhr", + -10.73173713684082 + ], + [ + "▁Mary", + -10.731745719909668 + ], + [ + "pur", + -10.73190689086914 + ], + [ + "▁covers", + -10.732133865356445 + ], + [ + "▁throw", + -10.732522964477539 + ], + [ + "▁Tor", + -10.733281135559082 + ], + [ + "▁bat", + -10.73355484008789 + ], + [ + "▁Gr", + -10.73373031616211 + ], + [ + "▁farm", + -10.73376178741455 + ], + [ + "▁improved", + -10.733843803405762 + ], + [ + "▁fără", + -10.734286308288574 + ], + [ + "▁theme", + -10.73437213897705 + ], + [ + "pens", + -10.734865188598633 + ], + [ + "▁Cup", + -10.734975814819336 + ], + [ + "▁settings", + -10.735114097595215 + ], + [ + "▁hire", + -10.735234260559082 + ], + [ + "▁massive", + -10.735248565673828 + ], + [ + "▁generate", + -10.735405921936035 + ], + [ + "▁earn", + -10.735837936401367 + ], + [ + "▁tab", + -10.736431121826172 + ], + [ + "For", + -10.736616134643555 + ], + [ + "gang", + -10.736891746520996 + ], + [ + "▁hin", + -10.73709487915039 + ], + [ + "▁roll", + -10.737113952636719 + ], + [ + "▁engagement", + -10.737157821655273 + ], + [ + "▁signed", + -10.737177848815918 + ], + [ + "▁League", + -10.737323760986328 + ], + [ + "▁registration", + -10.737931251525879 + ], + [ + "▁première", + -10.738763809204102 + ], + [ + "isse", + -10.73896598815918 + ], + [ + "▁university", + -10.739027976989746 + ], + [ + "ell", + -10.739157676696777 + ], + [ + "▁nou", + -10.739169120788574 + ], + [ + "rog", + -10.739191055297852 + ], + [ + "▁sitting", + -10.739206314086914 + ], + [ + "▁cazul", + -10.739571571350098 + ], + [ + "▁surrounding", + -10.73983383178711 + ], + [ + "▁Asia", + -10.740357398986816 + ], + [ + "▁bath", + -10.740825653076172 + ], + [ + "hal", + -10.740923881530762 + ], + [ + "▁plate", + -10.741026878356934 + ], + [ + "▁tests", + -10.741151809692383 + ], + [ + "▁presentation", + -10.741156578063965 + ], + [ + "▁chicken", + -10.741501808166504 + ], + [ + "▁Val", + -10.741586685180664 + ], + [ + "ably", + -10.74166488647461 + ], + [ + "▁magazine", + -10.741697311401367 + ], + [ + "▁Maybe", + -10.74187183380127 + ], + [ + "▁sauce", + -10.742673873901367 + ], + [ + "TC", + -10.742887496948242 + ], + [ + "▁exclusive", + -10.74296760559082 + ], + [ + "86", + -10.74306869506836 + ], + [ + "▁teeth", + -10.743474960327148 + ], + [ + "▁regularly", + -10.743524551391602 + ], + [ + "sed", + -10.743824005126953 + ], + [ + "gro", + -10.744174003601074 + ], + [ + "He", + -10.744211196899414 + ], + [ + "▁2017.", + -10.744302749633789 + ], + [ + "▁template", + -10.74489688873291 + ], + [ + "▁gleich", + -10.744938850402832 + ], + [ + "bal", + -10.745061874389648 + ], + [ + "▁African", + -10.74511432647705 + ], + [ + "în", + -10.745231628417969 + ], + [ + "▁rep", + -10.74543571472168 + ], + [ + "▁beat", + -10.74588394165039 + ], + [ + "▁deck", + -10.746064186096191 + ], + [ + "▁intended", + -10.746221542358398 + ], + [ + "▁para", + -10.746513366699219 + ], + [ + "▁IP", + -10.746712684631348 + ], + [ + "▁bra", + -10.746881484985352 + ], + [ + "▁forces", + -10.746966361999512 + ], + [ + "▁routine", + -10.747184753417969 + ], + [ + "▁Jahre", + -10.747758865356445 + ], + [ + "▁Bad", + -10.74797534942627 + ], + [ + "▁drivers", + -10.748074531555176 + ], + [ + "▁updates", + -10.748095512390137 + ], + [ + "▁elegant", + -10.748279571533203 + ], + [ + "▁external", + -10.748444557189941 + ], + [ + "▁engineering", + -10.748819351196289 + ], + [ + "ender", + -10.749544143676758 + ], + [ + "table", + -10.749755859375 + ], + [ + "inter", + -10.749878883361816 + ], + [ + "▁Romania", + -10.749948501586914 + ], + [ + "▁zile", + -10.750468254089355 + ], + [ + "▁luxury", + -10.750570297241211 + ], + [ + "▁calling", + -10.750750541687012 + ], + [ + "▁cooking", + -10.75101375579834 + ], + [ + "▁component", + -10.75114631652832 + ], + [ + "wan", + -10.75121021270752 + ], + [ + "schen", + -10.751212120056152 + ], + [ + "▁birth", + -10.751242637634277 + ], + [ + "asupra", + -10.751349449157715 + ], + [ + "Co", + -10.751471519470215 + ], + [ + "▁opt", + -10.75153923034668 + ], + [ + "▁discovered", + -10.751860618591309 + ], + [ + "▁teach", + -10.752084732055664 + ], + [ + "▁Son", + -10.75234317779541 + ], + [ + "▁guest", + -10.752384185791016 + ], + [ + "▁dogs", + -10.752695083618164 + ], + [ + "▁2003", + -10.752745628356934 + ], + [ + "▁behavior", + -10.752750396728516 + ], + [ + "pé", + -10.7529935836792 + ], + [ + "63", + -10.75316333770752 + ], + [ + "▁Human", + -10.753702163696289 + ], + [ + "▁expression", + -10.754800796508789 + ], + [ + "▁nevoie", + -10.754936218261719 + ], + [ + "▁recherche", + -10.75528621673584 + ], + [ + "ging", + -10.755767822265625 + ], + [ + "related", + -10.755948066711426 + ], + [ + "▁discount", + -10.756040573120117 + ], + [ + "▁Brown", + -10.756054878234863 + ], + [ + "▁Such", + -10.756107330322266 + ], + [ + "▁Ve", + -10.757149696350098 + ], + [ + "▁height", + -10.757265090942383 + ], + [ + "clo", + -10.757414817810059 + ], + [ + "▁incredible", + -10.757912635803223 + ], + [ + "▁bas", + -10.757916450500488 + ], + [ + "▁mă", + -10.75798225402832 + ], + [ + "▁purchased", + -10.758240699768066 + ], + [ + "▁compte", + -10.75831127166748 + ], + [ + "▁instructions", + -10.758537292480469 + ], + [ + "▁Instead", + -10.75866985321045 + ], + [ + "▁output", + -10.758706092834473 + ], + [ + "▁mom", + -10.758886337280273 + ], + [ + "DR", + -10.759828567504883 + ], + [ + "89", + -10.760168075561523 + ], + [ + "▁reduced", + -10.760621070861816 + ], + [ + "98", + -10.7606840133667 + ], + [ + "▁constant", + -10.760879516601562 + ], + [ + "▁therapy", + -10.762417793273926 + ], + [ + "▁capable", + -10.762757301330566 + ], + [ + "mark", + -10.763265609741211 + ], + [ + "▁Sometimes", + -10.76332950592041 + ], + [ + "▁joy", + -10.763419151306152 + ], + [ + "▁perfectly", + -10.763589859008789 + ], + [ + "▁painting", + -10.763704299926758 + ], + [ + "avait", + -10.763765335083008 + ], + [ + "▁Sha", + -10.764384269714355 + ], + [ + "▁dat", + -10.764463424682617 + ], + [ + "▁produits", + -10.764479637145996 + ], + [ + "tric", + -10.76456356048584 + ], + [ + "ierte", + -10.765153884887695 + ], + [ + "▁Smith", + -10.765836715698242 + ], + [ + "▁trebui", + -10.766264915466309 + ], + [ + "▁beaucoup", + -10.766630172729492 + ], + [ + "▁chosen", + -10.767189025878906 + ], + [ + "▁cre", + -10.76732063293457 + ], + [ + "▁complet", + -10.767341613769531 + ], + [ + "▁Ltd", + -10.767599105834961 + ], + [ + "▁recovery", + -10.76781940460205 + ], + [ + "▁district", + -10.768423080444336 + ], + [ + "78", + -10.768640518188477 + ], + [ + "▁Unter", + -10.76872730255127 + ], + [ + "▁schnell", + -10.768729209899902 + ], + [ + "▁apart", + -10.768943786621094 + ], + [ + "▁phase", + -10.76894760131836 + ], + [ + "▁seeking", + -10.769091606140137 + ], + [ + "▁mark", + -10.769148826599121 + ], + [ + "▁pet", + -10.769233703613281 + ], + [ + "▁PDF", + -10.769296646118164 + ], + [ + "▁efficiency", + -10.769577980041504 + ], + [ + "▁buildings", + -10.769611358642578 + ], + [ + "69", + -10.769723892211914 + ], + [ + "▁sens", + -10.769858360290527 + ], + [ + "▁Video", + -10.770115852355957 + ], + [ + "▁destination", + -10.770181655883789 + ], + [ + "▁female", + -10.770319938659668 + ], + [ + "▁supporting", + -10.770674705505371 + ], + [ + "▁signs", + -10.77077865600586 + ], + [ + "▁appeal", + -10.770784378051758 + ], + [ + "76", + -10.77110481262207 + ], + [ + "▁favourite", + -10.771612167358398 + ], + [ + "ock", + -10.771702766418457 + ], + [ + "▁readers", + -10.771757125854492 + ], + [ + "▁Did", + -10.771868705749512 + ], + [ + "rou", + -10.772045135498047 + ], + [ + "PA", + -10.77222728729248 + ], + [ + "▁Jean", + -10.772480964660645 + ], + [ + "▁Em", + -10.772586822509766 + ], + [ + "pass", + -10.77280330657959 + ], + [ + "▁Zi", + -10.773090362548828 + ], + [ + "▁între", + -10.773261070251465 + ], + [ + "▁fly", + -10.773427963256836 + ], + [ + "mos", + -10.773666381835938 + ], + [ + "▁emotional", + -10.773860931396484 + ], + [ + "asse", + -10.774768829345703 + ], + [ + "▁sessions", + -10.775086402893066 + ], + [ + "▁symptoms", + -10.77564811706543 + ], + [ + "▁died", + -10.776217460632324 + ], + [ + "▁seconds", + -10.776628494262695 + ], + [ + "▁procedure", + -10.777206420898438 + ], + [ + "▁express", + -10.777420997619629 + ], + [ + "▁două", + -10.777885437011719 + ], + [ + "▁valid", + -10.778393745422363 + ], + [ + "▁euro", + -10.7788667678833 + ], + [ + "▁interests", + -10.779032707214355 + ], + [ + "Having", + -10.779237747192383 + ], + [ + "▁hundreds", + -10.779669761657715 + ], + [ + "grad", + -10.780023574829102 + ], + [ + "▁neuen", + -10.780084609985352 + ], + [ + "▁cook", + -10.780552864074707 + ], + [ + "▁pur", + -10.780834197998047 + ], + [ + "▁charges", + -10.781024932861328 + ], + [ + "sche", + -10.78118896484375 + ], + [ + "▁smile", + -10.781468391418457 + ], + [ + "▁festival", + -10.781611442565918 + ], + [ + "cho", + -10.781672477722168 + ], + [ + "▁£", + -10.781937599182129 + ], + [ + "cht", + -10.78201675415039 + ], + [ + "▁macht", + -10.782021522521973 + ], + [ + "▁Wasser", + -10.782028198242188 + ], + [ + "▁Cap", + -10.78226375579834 + ], + [ + "▁Learn", + -10.78274154663086 + ], + [ + "▁load", + -10.783162117004395 + ], + [ + "▁aici", + -10.783225059509277 + ], + [ + "▁Ch", + -10.784143447875977 + ], + [ + "▁cycle", + -10.784223556518555 + ], + [ + "▁carried", + -10.784337997436523 + ], + [ + "▁jusqu", + -10.784517288208008 + ], + [ + "stein", + -10.78505802154541 + ], + [ + "ski", + -10.78513240814209 + ], + [ + "cap", + -10.78579330444336 + ], + [ + "▁Bal", + -10.785852432250977 + ], + [ + "▁minor", + -10.786053657531738 + ], + [ + "77", + -10.786175727844238 + ], + [ + "▁considering", + -10.78632640838623 + ], + [ + "innen", + -10.78644847869873 + ], + [ + "▁greatest", + -10.787055015563965 + ], + [ + "▁Training", + -10.787137031555176 + ], + [ + "08", + -10.787307739257812 + ], + [ + "▁significantly", + -10.787607192993164 + ], + [ + "gé", + -10.787728309631348 + ], + [ + "▁dumpster", + -10.788351058959961 + ], + [ + "▁allem", + -10.788930892944336 + ], + [ + "▁bonus", + -10.7889404296875 + ], + [ + "▁guy", + -10.789036750793457 + ], + [ + "fel", + -10.78904914855957 + ], + [ + "▁lifestyle", + -10.789241790771484 + ], + [ + "▁Bro", + -10.78961181640625 + ], + [ + "▁implement", + -10.789687156677246 + ], + [ + "lock", + -10.790046691894531 + ], + [ + "▁Earth", + -10.790142059326172 + ], + [ + "kar", + -10.790733337402344 + ], + [ + "▁invest", + -10.790833473205566 + ], + [ + "▁river", + -10.790933609008789 + ], + [ + "▁accurate", + -10.791494369506836 + ], + [ + "▁mu", + -10.791579246520996 + ], + [ + "▁celebrate", + -10.792119979858398 + ], + [ + "▁ran", + -10.79256820678711 + ], + [ + "▁bigger", + -10.792988777160645 + ], + [ + "▁Mer", + -10.793476104736328 + ], + [ + "▁millions", + -10.793486595153809 + ], + [ + "▁partie", + -10.793563842773438 + ], + [ + "▁dazu", + -10.793951988220215 + ], + [ + "▁Full", + -10.794130325317383 + ], + [ + "gie", + -10.794207572937012 + ], + [ + "bot", + -10.794373512268066 + ], + [ + "roll", + -10.79472827911377 + ], + [ + "▁Women", + -10.795303344726562 + ], + [ + "▁compare", + -10.796135902404785 + ], + [ + "▁van", + -10.796503067016602 + ], + [ + "▁apps", + -10.796521186828613 + ], + [ + "PC", + -10.797050476074219 + ], + [ + "▁drei", + -10.79736042022705 + ], + [ + "▁maison", + -10.797588348388672 + ], + [ + "▁knows", + -10.797712326049805 + ], + [ + "rid", + -10.797972679138184 + ], + [ + "62", + -10.798396110534668 + ], + [ + "class", + -10.798508644104004 + ], + [ + "▁chez", + -10.798669815063477 + ], + [ + "char", + -10.798828125 + ], + [ + "88", + -10.798989295959473 + ], + [ + "▁cast", + -10.79948902130127 + ], + [ + "▁examples", + -10.79973030090332 + ], + [ + "▁Therefore", + -10.799823760986328 + ], + [ + "▁topics", + -10.799941062927246 + ], + [ + "with", + -10.80013656616211 + ], + [ + "▁Anti", + -10.800555229187012 + ], + [ + "how", + -10.800620079040527 + ], + [ + "▁whom", + -10.80094051361084 + ], + [ + "▁Deutschland", + -10.801124572753906 + ], + [ + "tine", + -10.80113697052002 + ], + [ + "▁CEO", + -10.801224708557129 + ], + [ + "▁truck", + -10.801350593566895 + ], + [ + "▁Which", + -10.8015718460083 + ], + [ + "erie", + -10.802017211914062 + ], + [ + "fect", + -10.802069664001465 + ], + [ + "bou", + -10.8026762008667 + ], + [ + "▁(1", + -10.802818298339844 + ], + [ + "sum", + -10.802980422973633 + ], + [ + "▁bonne", + -10.803068161010742 + ], + [ + "▁remaining", + -10.80321216583252 + ], + [ + "▁equal", + -10.803543090820312 + ], + [ + "▁engage", + -10.803561210632324 + ], + [ + "▁RE", + -10.803849220275879 + ], + [ + "style", + -10.804182052612305 + ], + [ + "▁urma", + -10.804337501525879 + ], + [ + "▁Grund", + -10.80496883392334 + ], + [ + "ür", + -10.8051176071167 + ], + [ + "▁font", + -10.805353164672852 + ], + [ + "▁assets", + -10.805916786193848 + ], + [ + "AL", + -10.806102752685547 + ], + [ + "▁rear", + -10.80635929107666 + ], + [ + "▁contemporary", + -10.80646800994873 + ], + [ + "▁occur", + -10.8067045211792 + ], + [ + "rated", + -10.806941986083984 + ], + [ + "▁tight", + -10.807088851928711 + ], + [ + "▁machines", + -10.807921409606934 + ], + [ + "▁0.", + -10.808456420898438 + ], + [ + "▁Aber", + -10.808470726013184 + ], + [ + "sol", + -10.808517456054688 + ], + [ + "rü", + -10.80858039855957 + ], + [ + "▁2007", + -10.809479713439941 + ], + [ + "gg", + -10.809488296508789 + ], + [ + "▁unul", + -10.809691429138184 + ], + [ + "▁était", + -10.809908866882324 + ], + [ + "▁capture", + -10.809980392456055 + ], + [ + "▁command", + -10.810037612915039 + ], + [ + "▁wire", + -10.810425758361816 + ], + [ + "▁shift", + -10.810762405395508 + ], + [ + "▁bread", + -10.81084156036377 + ], + [ + "▁causes", + -10.810937881469727 + ], + [ + "PI", + -10.810938835144043 + ], + [ + "SC", + -10.811086654663086 + ], + [ + "▁lights", + -10.811190605163574 + ], + [ + "▁lived", + -10.811293601989746 + ], + [ + "mul", + -10.811446189880371 + ], + [ + "▁Cur", + -10.811917304992676 + ], + [ + "▁Richard", + -10.811973571777344 + ], + [ + "37", + -10.812638282775879 + ], + [ + "▁cup", + -10.812737464904785 + ], + [ + "▁fields", + -10.812983512878418 + ], + [ + "▁crusher", + -10.813389778137207 + ], + [ + "65", + -10.813774108886719 + ], + [ + "avons", + -10.813822746276855 + ], + [ + "▁gear", + -10.813835144042969 + ], + [ + "▁standing", + -10.813844680786133 + ], + [ + "▁thick", + -10.81445026397705 + ], + [ + "aff", + -10.815132141113281 + ], + [ + "ments", + -10.815434455871582 + ], + [ + "▁conflict", + -10.815728187561035 + ], + [ + "ität", + -10.815825462341309 + ], + [ + "▁worse", + -10.816295623779297 + ], + [ + "SE", + -10.816332817077637 + ], + [ + "imi", + -10.816459655761719 + ], + [ + "▁dating", + -10.817033767700195 + ], + [ + "Do", + -10.817073822021484 + ], + [ + "▁flexible", + -10.817093849182129 + ], + [ + "ologie", + -10.817131996154785 + ], + [ + "SU", + -10.817200660705566 + ], + [ + "▁contribute", + -10.817306518554688 + ], + [ + "▁denn", + -10.817428588867188 + ], + [ + "▁appointment", + -10.81746768951416 + ], + [ + "▁ticket", + -10.817523002624512 + ], + [ + "bed", + -10.817892074584961 + ], + [ + "▁2019.", + -10.817936897277832 + ], + [ + "▁tasks", + -10.81871223449707 + ], + [ + "▁carbon", + -10.818734169006348 + ], + [ + "▁situations", + -10.819400787353516 + ], + [ + "MA", + -10.819402694702148 + ], + [ + "▁portion", + -10.819498062133789 + ], + [ + "▁urban", + -10.819585800170898 + ], + [ + "▁Canadian", + -10.819805145263672 + ], + [ + "▁Bur", + -10.819937705993652 + ], + [ + "▁pack", + -10.81995964050293 + ], + [ + "▁effet", + -10.819992065429688 + ], + [ + "▁Ball", + -10.82008171081543 + ], + [ + "▁timpul", + -10.82014274597168 + ], + [ + "▁owned", + -10.820211410522461 + ], + [ + "▁surprise", + -10.820413589477539 + ], + [ + "▁Mu", + -10.820582389831543 + ], + [ + "▁decades", + -10.821001052856445 + ], + [ + "▁affected", + -10.821728706359863 + ], + [ + "▁proven", + -10.821732521057129 + ], + [ + "▁Fe", + -10.821990966796875 + ], + [ + "zy", + -10.822042465209961 + ], + [ + "42", + -10.822175979614258 + ], + [ + "▁trend", + -10.8223876953125 + ], + [ + "▁autres", + -10.82262897491455 + ], + [ + "No", + -10.823028564453125 + ], + [ + "▁nine", + -10.823565483093262 + ], + [ + "ON", + -10.82376480102539 + ], + [ + "NE", + -10.823953628540039 + ], + [ + "oli", + -10.824359893798828 + ], + [ + "▁Daniel", + -10.824434280395508 + ], + [ + "▁spa", + -10.824939727783203 + ], + [ + "▁messages", + -10.825084686279297 + ], + [ + "PS", + -10.825183868408203 + ], + [ + "47", + -10.825703620910645 + ], + [ + "▁doch", + -10.826032638549805 + ], + [ + "▁improvement", + -10.826187133789062 + ], + [ + "▁mountain", + -10.826350212097168 + ], + [ + "▁Room", + -10.826451301574707 + ], + [ + "▁edition", + -10.826546669006348 + ], + [ + "▁musical", + -10.826712608337402 + ], + [ + "CP", + -10.827024459838867 + ], + [ + "▁Mill", + -10.827027320861816 + ], + [ + "▁steht", + -10.827740669250488 + ], + [ + "▁determined", + -10.828083038330078 + ], + [ + "you", + -10.828392028808594 + ], + [ + "weg", + -10.828554153442383 + ], + [ + "▁Digital", + -10.828624725341797 + ], + [ + "▁filter", + -10.828903198242188 + ], + [ + "▁youth", + -10.829047203063965 + ], + [ + "▁assessment", + -10.829301834106445 + ], + [ + "▁butter", + -10.829370498657227 + ], + [ + "▁Watch", + -10.829427719116211 + ], + [ + "▁zusammen", + -10.829471588134766 + ], + [ + "▁View", + -10.829606056213379 + ], + [ + "09", + -10.829649925231934 + ], + [ + "▁sole", + -10.829816818237305 + ], + [ + ".00", + -10.830018997192383 + ], + [ + "33", + -10.83015251159668 + ], + [ + "▁export", + -10.830229759216309 + ], + [ + "ery", + -10.830373764038086 + ], + [ + "▁zurück", + -10.830426216125488 + ], + [ + "▁walls", + -10.83048152923584 + ], + [ + "▁recognize", + -10.8306884765625 + ], + [ + "law", + -10.830801963806152 + ], + [ + "▁parent", + -10.830863952636719 + ], + [ + "ST", + -10.831357955932617 + ], + [ + "▁description", + -10.831669807434082 + ], + [ + "MS", + -10.831887245178223 + ], + [ + "SM", + -10.83189582824707 + ], + [ + "▁Finally", + -10.831940650939941 + ], + [ + "▁hardware", + -10.831965446472168 + ], + [ + "ident", + -10.832464218139648 + ], + [ + "▁brown", + -10.832566261291504 + ], + [ + "▁kinds", + -10.832950592041016 + ], + [ + "▁Arts", + -10.83297061920166 + ], + [ + "▁concert", + -10.83341121673584 + ], + [ + "▁sec", + -10.83342456817627 + ], + [ + "▁represent", + -10.833512306213379 + ], + [ + "▁institutions", + -10.833597183227539 + ], + [ + "▁fur", + -10.833998680114746 + ], + [ + "▁Support", + -10.83403205871582 + ], + [ + "87", + -10.834076881408691 + ], + [ + "▁ease", + -10.834178924560547 + ], + [ + "▁feels", + -10.834218978881836 + ], + [ + "▁sheet", + -10.834342002868652 + ], + [ + "▁Though", + -10.83437442779541 + ], + [ + "▁propose", + -10.834381103515625 + ], + [ + "▁personnel", + -10.834409713745117 + ], + [ + "bie", + -10.834794044494629 + ], + [ + "▁contest", + -10.834836959838867 + ], + [ + "▁successfully", + -10.835152626037598 + ], + [ + "▁direkt", + -10.835397720336914 + ], + [ + "bietet", + -10.835597038269043 + ], + [ + "▁submit", + -10.835888862609863 + ], + [ + "▁sicher", + -10.835919380187988 + ], + [ + "▁Personal", + -10.83607006072998 + ], + [ + "94", + -10.836341857910156 + ], + [ + "61", + -10.836400985717773 + ], + [ + "▁Very", + -10.836540222167969 + ], + [ + "bol", + -10.836603164672852 + ], + [ + "▁ha", + -10.837089538574219 + ], + [ + "▁channel", + -10.8372220993042 + ], + [ + "mut", + -10.837289810180664 + ], + [ + "▁mouth", + -10.837342262268066 + ], + [ + "▁vast", + -10.837395668029785 + ], + [ + "▁Ob", + -10.837569236755371 + ], + [ + "lit", + -10.83763313293457 + ], + [ + "▁poly", + -10.837878227233887 + ], + [ + "▁trained", + -10.838102340698242 + ], + [ + "▁specialist", + -10.838122367858887 + ], + [ + "UL", + -10.83822250366211 + ], + [ + "▁seiner", + -10.838336944580078 + ], + [ + "SS", + -10.838627815246582 + ], + [ + "▁vacation", + -10.838672637939453 + ], + [ + "▁resume", + -10.839157104492188 + ], + [ + "▁constantly", + -10.839717864990234 + ], + [ + "▁treated", + -10.83986759185791 + ], + [ + "▁150", + -10.840936660766602 + ], + [ + "▁native", + -10.841246604919434 + ], + [ + "▁Russian", + -10.841329574584961 + ], + [ + "▁patterns", + -10.841371536254883 + ], + [ + "▁knowing", + -10.841670989990234 + ], + [ + "▁Pan", + -10.841682434082031 + ], + [ + "peri", + -10.841848373413086 + ], + [ + "aci", + -10.841864585876465 + ], + [ + "▁answers", + -10.842114448547363 + ], + [ + "▁heute", + -10.842985153198242 + ], + [ + "93", + -10.843056678771973 + ], + [ + "▁Winter", + -10.844083786010742 + ], + [ + "▁yes", + -10.844173431396484 + ], + [ + "SP", + -10.844185829162598 + ], + [ + "].", + -10.844388008117676 + ], + [ + "▁kein", + -10.844862937927246 + ], + [ + "▁introduce", + -10.8450927734375 + ], + [ + "-4", + -10.84555435180664 + ], + [ + "▁shoot", + -10.845762252807617 + ], + [ + "AR", + -10.84576416015625 + ], + [ + "▁receiving", + -10.845864295959473 + ], + [ + "▁intre", + -10.84702205657959 + ], + [ + "▁appeared", + -10.84708023071289 + ], + [ + "▁brother", + -10.847321510314941 + ], + [ + "▁extend", + -10.847765922546387 + ], + [ + "▁fara", + -10.848737716674805 + ], + [ + "▁kommt", + -10.848876953125 + ], + [ + "ali", + -10.848913192749023 + ], + [ + "▁numai", + -10.849047660827637 + ], + [ + "▁scientific", + -10.84913158416748 + ], + [ + "▁virtual", + -10.849145889282227 + ], + [ + "▁Ac", + -10.849513053894043 + ], + [ + "▁procedures", + -10.849631309509277 + ], + [ + "▁silver", + -10.849821090698242 + ], + [ + "▁leather", + -10.849979400634766 + ], + [ + "DA", + -10.85014820098877 + ], + [ + "▁executive", + -10.850263595581055 + ], + [ + "▁officials", + -10.850496292114258 + ], + [ + "▁agencies", + -10.850503921508789 + ], + [ + "▁Software", + -10.850540161132812 + ], + [ + "▁cor", + -10.850690841674805 + ], + [ + "Con", + -10.850741386413574 + ], + [ + "▁log", + -10.851066589355469 + ], + [ + "ț", + -10.851147651672363 + ], + [ + "02", + -10.851195335388184 + ], + [ + "▁7.", + -10.85245132446289 + ], + [ + "▁accepted", + -10.852483749389648 + ], + [ + "▁Berlin", + -10.852538108825684 + ], + [ + "ID", + -10.852582931518555 + ], + [ + "cot", + -10.852788925170898 + ], + [ + "▁employment", + -10.852799415588379 + ], + [ + "run", + -10.853020668029785 + ], + [ + "▁identified", + -10.853178977966309 + ], + [ + "96", + -10.853887557983398 + ], + [ + "▁déjà", + -10.853944778442383 + ], + [ + "▁cuisine", + -10.853952407836914 + ], + [ + "turi", + -10.854070663452148 + ], + [ + "▁Japanese", + -10.854316711425781 + ], + [ + "▁golf", + -10.854514122009277 + ], + [ + "▁Ki", + -10.854787826538086 + ], + [ + "▁carefully", + -10.854863166809082 + ], + [ + "▁remote", + -10.854973793029785 + ], + [ + "▁2018,", + -10.855148315429688 + ], + [ + "▁sus", + -10.855154991149902 + ], + [ + "tique", + -10.855293273925781 + ], + [ + "▁residential", + -10.855695724487305 + ], + [ + "97", + -10.855809211730957 + ], + [ + "▁Spring", + -10.855908393859863 + ], + [ + "▁Marketing", + -10.856186866760254 + ], + [ + "▁Control", + -10.85630989074707 + ], + [ + "var", + -10.856344223022461 + ], + [ + "▁historical", + -10.8563814163208 + ], + [ + "▁freedom", + -10.856423377990723 + ], + [ + "sure", + -10.856426239013672 + ], + [ + "▁broken", + -10.856796264648438 + ], + [ + "▁criminal", + -10.856949806213379 + ], + [ + "▁innovation", + -10.857075691223145 + ], + [ + "▁Italian", + -10.857192039489746 + ], + [ + "sper", + -10.857282638549805 + ], + [ + "▁cake", + -10.857653617858887 + ], + [ + "▁candidates", + -10.857894897460938 + ], + [ + "▁sizes", + -10.858267784118652 + ], + [ + "pel", + -10.858366966247559 + ], + [ + "▁frequently", + -10.85889720916748 + ], + [ + "▁planet", + -10.859138488769531 + ], + [ + "▁writer", + -10.859519958496094 + ], + [ + "1,", + -10.859569549560547 + ], + [ + "uvent", + -10.85959529876709 + ], + [ + "▁awareness", + -10.859807968139648 + ], + [ + "name", + -10.859954833984375 + ], + [ + "▁Children", + -10.859980583190918 + ], + [ + "▁relatively", + -10.860311508178711 + ], + [ + "▁pu", + -10.860321998596191 + ], + [ + "▁quiet", + -10.86038875579834 + ], + [ + "▁planned", + -10.860716819763184 + ], + [ + "▁election", + -10.861419677734375 + ], + [ + "▁6.", + -10.861761093139648 + ], + [ + "▁broad", + -10.861772537231445 + ], + [ + "▁skill", + -10.861835479736328 + ], + [ + "▁reasonable", + -10.862037658691406 + ], + [ + "▁Fort", + -10.862283706665039 + ], + [ + "▁aceea", + -10.862407684326172 + ], + [ + "▁arrived", + -10.86263370513916 + ], + [ + "▁payments", + -10.862680435180664 + ], + [ + "ack", + -10.862700462341309 + ], + [ + "▁Ort", + -10.863354682922363 + ], + [ + "▁investors", + -10.863364219665527 + ], + [ + "▁operate", + -10.86351203918457 + ], + [ + "ME", + -10.863556861877441 + ], + [ + "dic", + -10.863683700561523 + ], + [ + "▁foods", + -10.863731384277344 + ], + [ + "▁stick", + -10.863831520080566 + ], + [ + "▁agents", + -10.86412525177002 + ], + [ + "▁crowd", + -10.864175796508789 + ], + [ + "▁Students", + -10.864480972290039 + ], + [ + "▁concerned", + -10.864609718322754 + ], + [ + "test", + -10.864740371704102 + ], + [ + "▁designer", + -10.865334510803223 + ], + [ + "▁Conference", + -10.865593910217285 + ], + [ + "▁saving", + -10.866105079650879 + ], + [ + "▁recorded", + -10.866422653198242 + ], + [ + "▁proposed", + -10.866564750671387 + ], + [ + "▁ship", + -10.86657428741455 + ], + [ + "▁cred", + -10.867274284362793 + ], + [ + "▁Ci", + -10.867440223693848 + ], + [ + "RE", + -10.867619514465332 + ], + [ + "▁tradition", + -10.867753982543945 + ], + [ + "▁worldwide", + -10.867779731750488 + ], + [ + "64", + -10.867944717407227 + ], + [ + "▁television", + -10.867989540100098 + ], + [ + "▁projet", + -10.868102073669434 + ], + [ + "ency", + -10.868487358093262 + ], + [ + "▁struggle", + -10.868514060974121 + ], + [ + "▁twice", + -10.868955612182617 + ], + [ + "▁Off", + -10.869234085083008 + ], + [ + "▁begins", + -10.869577407836914 + ], + [ + "key", + -10.869794845581055 + ], + [ + "▁Table", + -10.869963645935059 + ], + [ + "▁demande", + -10.870177268981934 + ], + [ + "▁liquid", + -10.870441436767578 + ], + [ + "meter", + -10.870684623718262 + ], + [ + "▁2001", + -10.871190071105957 + ], + [ + "▁willing", + -10.871660232543945 + ], + [ + "▁medicine", + -10.871707916259766 + ], + [ + "▁expand", + -10.871747970581055 + ], + [ + "▁2004", + -10.871804237365723 + ], + [ + "▁2002", + -10.872016906738281 + ], + [ + "▁accord", + -10.872292518615723 + ], + [ + "▁Chris", + -10.872446060180664 + ], + [ + "▁prove", + -10.872543334960938 + ], + [ + "ston", + -10.872740745544434 + ], + [ + "mettre", + -10.872800827026367 + ], + [ + "▁moments", + -10.873537063598633 + ], + [ + "tik", + -10.87368392944336 + ], + [ + "such", + -10.874055862426758 + ], + [ + "2.", + -10.874431610107422 + ], + [ + "▁UN", + -10.874561309814453 + ], + [ + "▁jump", + -10.874737739562988 + ], + [ + "▁dish", + -10.87539291381836 + ], + [ + "▁Key", + -10.875663757324219 + ], + [ + "▁challenging", + -10.875975608825684 + ], + [ + "▁domestic", + -10.876410484313965 + ], + [ + "▁impressive", + -10.876752853393555 + ], + [ + "iger", + -10.877022743225098 + ], + [ + "▁Ram", + -10.877157211303711 + ], + [ + "▁doit", + -10.877263069152832 + ], + [ + "▁concrete", + -10.87734317779541 + ], + [ + "▁Unternehmen", + -10.877397537231445 + ], + [ + "▁LED", + -10.877429008483887 + ], + [ + "▁trouver", + -10.877533912658691 + ], + [ + "▁fundamental", + -10.877875328063965 + ], + [ + "▁implementation", + -10.878121376037598 + ], + [ + "85", + -10.878247261047363 + ], + [ + "▁hosting", + -10.87856388092041 + ], + [ + "▁Game", + -10.878691673278809 + ], + [ + "▁taught", + -10.878981590270996 + ], + [ + "tung", + -10.879016876220703 + ], + [ + "ront", + -10.87940502166748 + ], + [ + "▁shoes", + -10.879639625549316 + ], + [ + "79", + -10.8797607421875 + ], + [ + "▁stunning", + -10.879778861999512 + ], + [ + "▁Congress", + -10.880142211914062 + ], + [ + "▁Ent", + -10.880278587341309 + ], + [ + "▁Wer", + -10.880607604980469 + ], + [ + "▁alt", + -10.880608558654785 + ], + [ + "ör", + -10.880699157714844 + ], + [ + "▁calm", + -10.8808012008667 + ], + [ + "46", + -10.881132125854492 + ], + [ + "▁Daca", + -10.881404876708984 + ], + [ + "71", + -10.881938934326172 + ], + [ + "▁Dec", + -10.882392883300781 + ], + [ + "▁Fo", + -10.882437705993652 + ], + [ + "▁defense", + -10.88313102722168 + ], + [ + "▁expectations", + -10.883166313171387 + ], + [ + "▁Alle", + -10.88318920135498 + ], + [ + "▁brief", + -10.883691787719727 + ], + [ + "▁Hospital", + -10.883975982666016 + ], + [ + "▁sides", + -10.884121894836426 + ], + [ + "▁yellow", + -10.884140014648438 + ], + [ + "lei", + -10.88451862335205 + ], + [ + "▁speaking", + -10.884589195251465 + ], + [ + "▁crucial", + -10.885198593139648 + ], + [ + "▁Town", + -10.8854341506958 + ], + [ + "▁married", + -10.885574340820312 + ], + [ + "▁acesta", + -10.885583877563477 + ], + [ + "▁noted", + -10.885611534118652 + ], + [ + "▁Word", + -10.885659217834473 + ], + [ + "▁conducted", + -10.885963439941406 + ], + [ + "▁decor", + -10.886249542236328 + ], + [ + "kon", + -10.886565208435059 + ], + [ + "▁supplies", + -10.8866605758667 + ], + [ + "▁adventure", + -10.886691093444824 + ], + [ + "▁exhibition", + -10.887163162231445 + ], + [ + "heit", + -10.887300491333008 + ], + [ + "▁36", + -10.88744831085205 + ], + [ + "eria", + -10.887505531311035 + ], + [ + "ines", + -10.887551307678223 + ], + [ + "ological", + -10.887582778930664 + ], + [ + "quel", + -10.88806438446045 + ], + [ + "▁Van", + -10.88825511932373 + ], + [ + "-19", + -10.88853645324707 + ], + [ + "2,", + -10.888566970825195 + ], + [ + "▁Band", + -10.888989448547363 + ], + [ + "▁soil", + -10.889184951782227 + ], + [ + "▁Tim", + -10.889599800109863 + ], + [ + "▁NOT", + -10.88968563079834 + ], + [ + "▁pilot", + -10.889753341674805 + ], + [ + "▁Sh", + -10.889774322509766 + ], + [ + "Ho", + -10.890361785888672 + ], + [ + "CA", + -10.890509605407715 + ], + [ + "▁Eu", + -10.890745162963867 + ], + [ + "▁committee", + -10.890829086303711 + ], + [ + "▁Store", + -10.891075134277344 + ], + [ + "▁joint", + -10.89111614227295 + ], + [ + "▁Op", + -10.891315460205078 + ], + [ + "▁Jack", + -10.891985893249512 + ], + [ + "quality", + -10.89216423034668 + ], + [ + "▁Has", + -10.892489433288574 + ], + [ + "▁wenig", + -10.892507553100586 + ], + [ + "hood", + -10.892545700073242 + ], + [ + "▁Class", + -10.892582893371582 + ], + [ + "rus", + -10.892773628234863 + ], + [ + "▁grown", + -10.89294719696045 + ], + [ + "▁About", + -10.893518447875977 + ], + [ + "▁sum", + -10.893942832946777 + ], + [ + "▁Fair", + -10.893946647644043 + ], + [ + "SA", + -10.894149780273438 + ], + [ + "92", + -10.894185066223145 + ], + [ + "▁fourth", + -10.894354820251465 + ], + [ + "▁featured", + -10.894384384155273 + ], + [ + "▁Pen", + -10.89444637298584 + ], + [ + "▁natürlich", + -10.894885063171387 + ], + [ + "ched", + -10.894901275634766 + ], + [ + "▁ban", + -10.895112991333008 + ], + [ + "anne", + -10.89522647857666 + ], + [ + "▁theory", + -10.895413398742676 + ], + [ + "bin", + -10.895438194274902 + ], + [ + "iers", + -10.895819664001465 + ], + [ + "▁strategic", + -10.895903587341309 + ], + [ + "▁jours", + -10.895956039428711 + ], + [ + "▁communicate", + -10.896124839782715 + ], + [ + "▁pin", + -10.896320343017578 + ], + [ + "▁Bon", + -10.89721393585205 + ], + [ + "kom", + -10.897290229797363 + ], + [ + "-5", + -10.898177146911621 + ], + [ + "▁degrees", + -10.898643493652344 + ], + [ + "▁entertainment", + -10.899014472961426 + ], + [ + "ară", + -10.899248123168945 + ], + [ + "ales", + -10.899425506591797 + ], + [ + "▁pendant", + -10.89954662322998 + ], + [ + "▁Series", + -10.899575233459473 + ], + [ + "▁holds", + -10.899592399597168 + ], + [ + "▁Mini", + -10.899828910827637 + ], + [ + "▁Obama", + -10.899898529052734 + ], + [ + "▁conform", + -10.900163650512695 + ], + [ + "-10", + -10.900216102600098 + ], + [ + "▁preparation", + -10.9009370803833 + ], + [ + "▁autre", + -10.90105152130127 + ], + [ + "▁mortgage", + -10.901155471801758 + ], + [ + "▁Kan", + -10.901508331298828 + ], + [ + "▁typical", + -10.901538848876953 + ], + [ + "01", + -10.901711463928223 + ], + [ + "▁Review", + -10.901862144470215 + ], + [ + "▁laptop", + -10.902127265930176 + ], + [ + "CR", + -10.902610778808594 + ], + [ + "▁thread", + -10.90265941619873 + ], + [ + "BS", + -10.902661323547363 + ], + [ + "▁upper", + -10.902700424194336 + ], + [ + "▁searching", + -10.902932167053223 + ], + [ + "▁pen", + -10.903214454650879 + ], + [ + "▁Middle", + -10.90333080291748 + ], + [ + "73", + -10.903359413146973 + ], + [ + "▁leg", + -10.903650283813477 + ], + [ + "onic", + -10.904272079467773 + ], + [ + "IS", + -10.904356956481934 + ], + [ + "▁Kar", + -10.904623985290527 + ], + [ + "anz", + -10.9046630859375 + ], + [ + "▁circuit", + -10.904901504516602 + ], + [ + "▁Casino", + -10.905384063720703 + ], + [ + "07", + -10.90584659576416 + ], + [ + "▁petit", + -10.905906677246094 + ], + [ + "TV", + -10.905978202819824 + ], + [ + "level", + -10.906311988830566 + ], + [ + "▁Point", + -10.906312942504883 + ], + [ + "rau", + -10.906474113464355 + ], + [ + "▁cabinet", + -10.906991958618164 + ], + [ + "▁failed", + -10.907042503356934 + ], + [ + "▁stated", + -10.907126426696777 + ], + [ + "LA", + -10.907461166381836 + ], + [ + "▁privacy", + -10.907596588134766 + ], + [ + "vol", + -10.907901763916016 + ], + [ + "ativ", + -10.908151626586914 + ], + [ + "▁matters", + -10.908210754394531 + ], + [ + "▁Mor", + -10.908555030822754 + ], + [ + "▁Ur", + -10.90860652923584 + ], + [ + "view", + -10.908968925476074 + ], + [ + "▁consultation", + -10.90921688079834 + ], + [ + "TS", + -10.909296989440918 + ], + [ + "▁apartment", + -10.909412384033203 + ], + [ + "▁integrated", + -10.909425735473633 + ], + [ + "74", + -10.909669876098633 + ], + [ + "▁Through", + -10.909710884094238 + ], + [ + "▁kick", + -10.909798622131348 + ], + [ + "▁perioada", + -10.90993881225586 + ], + [ + "▁entirely", + -10.909953117370605 + ], + [ + "▁impossible", + -10.91015911102295 + ], + [ + "▁consideration", + -10.910268783569336 + ], + [ + "▁Alt", + -10.91054916381836 + ], + [ + "▁Come", + -10.911089897155762 + ], + [ + "▁outstanding", + -10.911276817321777 + ], + [ + "83", + -10.911727905273438 + ], + [ + "▁prezent", + -10.911859512329102 + ], + [ + "▁Local", + -10.911993980407715 + ], + [ + "▁Camp", + -10.912056922912598 + ], + [ + "▁bear", + -10.912067413330078 + ], + [ + "enden", + -10.912262916564941 + ], + [ + "life", + -10.91236686706543 + ], + [ + "▁Haus", + -10.912516593933105 + ], + [ + "▁William", + -10.912644386291504 + ], + [ + "“,", + -10.912665367126465 + ], + [ + "▁Instagram", + -10.91285514831543 + ], + [ + "▁solve", + -10.913195610046387 + ], + [ + "▁Ze", + -10.913431167602539 + ], + [ + "▁everyday", + -10.91357135772705 + ], + [ + "bla", + -10.913615226745605 + ], + [ + "eng", + -10.913662910461426 + ], + [ + "ough", + -10.914246559143066 + ], + [ + "84", + -10.914483070373535 + ], + [ + "?\"", + -10.914599418640137 + ], + [ + "rely", + -10.91476821899414 + ], + [ + "TH", + -10.914841651916504 + ], + [ + "lang", + -10.91511058807373 + ], + [ + "82", + -10.915817260742188 + ], + [ + "▁removal", + -10.91589641571045 + ], + [ + "ală", + -10.915956497192383 + ], + [ + "▁circumstances", + -10.916097640991211 + ], + [ + "ente", + -10.91622257232666 + ], + [ + "▁lieu", + -10.91645336151123 + ], + [ + "▁2016.", + -10.91710376739502 + ], + [ + "▁ales", + -10.917342185974121 + ], + [ + "▁pure", + -10.917482376098633 + ], + [ + "▁choosing", + -10.917590141296387 + ], + [ + "▁Russia", + -10.917698860168457 + ], + [ + "amp", + -10.917703628540039 + ], + [ + "▁Santa", + -10.91788387298584 + ], + [ + "▁happening", + -10.918203353881836 + ], + [ + "▁crew", + -10.91822338104248 + ], + [ + "▁lei", + -10.91855239868164 + ], + [ + "IP", + -10.91858196258545 + ], + [ + "RO", + -10.919425964355469 + ], + [ + "▁resort", + -10.919514656066895 + ], + [ + "ened", + -10.919689178466797 + ], + [ + "MB", + -10.920031547546387 + ], + [ + "▁styles", + -10.920052528381348 + ], + [ + "▁dernier", + -10.920533180236816 + ], + [ + "uck", + -10.920699119567871 + ], + [ + "▁Guide", + -10.920710563659668 + ], + [ + "fic", + -10.92096996307373 + ], + [ + "▁fitness", + -10.921977996826172 + ], + [ + "▁healthcare", + -10.92223072052002 + ], + [ + "mol", + -10.92237663269043 + ], + [ + "▁vis", + -10.922721862792969 + ], + [ + "▁atmosphere", + -10.922972679138184 + ], + [ + "▁motion", + -10.922989845275879 + ], + [ + "▁closer", + -10.923114776611328 + ], + [ + "▁SA", + -10.92335319519043 + ], + [ + "▁default", + -10.923371315002441 + ], + [ + "▁architecture", + -10.923471450805664 + ], + [ + "iile", + -10.923528671264648 + ], + [ + "zel", + -10.923675537109375 + ], + [ + "cla", + -10.92387866973877 + ], + [ + "OP", + -10.924382209777832 + ], + [ + "▁west", + -10.924965858459473 + ], + [ + "▁Energy", + -10.925613403320312 + ], + [ + "▁positions", + -10.925777435302734 + ], + [ + "▁contrast", + -10.925885200500488 + ], + [ + "▁serves", + -10.92605972290039 + ], + [ + "cup", + -10.926340103149414 + ], + [ + "▁rose", + -10.926485061645508 + ], + [ + "pers", + -10.92664623260498 + ], + [ + "▁noise", + -10.926846504211426 + ], + [ + "mont", + -10.92690658569336 + ], + [ + "#", + -10.927061080932617 + ], + [ + "lies", + -10.927326202392578 + ], + [ + "pat", + -10.927718162536621 + ], + [ + "IC", + -10.927956581115723 + ], + [ + "arc", + -10.927989959716797 + ], + [ + "▁winner", + -10.928524017333984 + ], + [ + "tent", + -10.928732872009277 + ], + [ + "▁Preis", + -10.929106712341309 + ], + [ + "▁vin", + -10.929254531860352 + ], + [ + "blo", + -10.92929458618164 + ], + [ + "ție", + -10.929520606994629 + ], + [ + "▁OR", + -10.930315017700195 + ], + [ + "▁Buch", + -10.930798530578613 + ], + [ + "▁nearby", + -10.931190490722656 + ], + [ + "▁meetings", + -10.931290626525879 + ], + [ + "▁48", + -10.931465148925781 + ], + [ + "▁quand", + -10.93152904510498 + ], + [ + "▁usual", + -10.931936264038086 + ], + [ + "▁weitere", + -10.932539939880371 + ], + [ + "▁caught", + -10.932571411132812 + ], + [ + "▁issued", + -10.932626724243164 + ], + [ + "ști", + -10.932896614074707 + ], + [ + "upcoming", + -10.933232307434082 + ], + [ + "▁agreed", + -10.933233261108398 + ], + [ + "place", + -10.933353424072266 + ], + [ + "▁Brand", + -10.93344497680664 + ], + [ + "▁relation", + -10.933969497680664 + ], + [ + "▁atât", + -10.934090614318848 + ], + [ + "▁Tre", + -10.934176445007324 + ], + [ + "▁lors", + -10.934438705444336 + ], + [ + "▁adopt", + -10.934452056884766 + ], + [ + "▁celui", + -10.93458366394043 + ], + [ + "cken", + -10.93505859375 + ], + [ + "▁partnership", + -10.935284614562988 + ], + [ + "?”", + -10.935376167297363 + ], + [ + "▁ba", + -10.935746192932129 + ], + [ + "▁ID", + -10.935832023620605 + ], + [ + "▁consistent", + -10.935835838317871 + ], + [ + "▁Ya", + -10.935941696166992 + ], + [ + "▁Academy", + -10.936182022094727 + ], + [ + "cial", + -10.936230659484863 + ], + [ + "1%", + -10.936366081237793 + ], + [ + "▁mise", + -10.936684608459473 + ], + [ + "▁gute", + -10.936728477478027 + ], + [ + "gli", + -10.936939239501953 + ], + [ + "▁Bu", + -10.937679290771484 + ], + [ + "▁reduction", + -10.937917709350586 + ], + [ + "acy", + -10.938126564025879 + ], + [ + "aga", + -10.938161849975586 + ], + [ + "▁Sc", + -10.938273429870605 + ], + [ + "▁Informationen", + -10.938308715820312 + ], + [ + "▁kommen", + -10.938352584838867 + ], + [ + "press", + -10.93837833404541 + ], + [ + "▁bridge", + -10.938379287719727 + ], + [ + "▁qualified", + -10.938671112060547 + ], + [ + "position", + -10.938821792602539 + ], + [ + "▁combat", + -10.938933372497559 + ], + [ + "!\"", + -10.938993453979492 + ], + [ + "eva", + -10.939217567443848 + ], + [ + "oase", + -10.939380645751953 + ], + [ + "▁inner", + -10.939410209655762 + ], + [ + "▁loans", + -10.939720153808594 + ], + [ + "made", + -10.939786911010742 + ], + [ + "▁Mexico", + -10.93993091583252 + ], + [ + "▁formal", + -10.940092086791992 + ], + [ + "▁fell", + -10.94021987915039 + ], + [ + "91", + -10.940524101257324 + ], + [ + "▁campus", + -10.9407320022583 + ], + [ + "ienne", + -10.940869331359863 + ], + [ + "▁framework", + -10.94105339050293 + ], + [ + "ncing", + -10.941157341003418 + ], + [ + "▁Para", + -10.941222190856934 + ], + [ + "▁password", + -10.941298484802246 + ], + [ + "▁sei", + -10.941422462463379 + ], + [ + "▁Cross", + -10.941532135009766 + ], + [ + "▁Ten", + -10.941873550415039 + ], + [ + "bank", + -10.941887855529785 + ], + [ + "▁gun", + -10.942000389099121 + ], + [ + "ient", + -10.942021369934082 + ], + [ + "▁usage", + -10.942176818847656 + ], + [ + "▁(2", + -10.942278861999512 + ], + [ + "Gra", + -10.942320823669434 + ], + [ + "▁prea", + -10.94253158569336 + ], + [ + "▁Als", + -10.942619323730469 + ], + [ + "▁finance", + -10.942638397216797 + ], + [ + "tate", + -10.942665100097656 + ], + [ + "ition", + -10.942703247070312 + ], + [ + "▁regulations", + -10.942741394042969 + ], + [ + "▁Professional", + -10.943001747131348 + ], + [ + "▁pl", + -10.94336986541748 + ], + [ + "▁SEO", + -10.943472862243652 + ], + [ + "▁trecut", + -10.943487167358398 + ], + [ + "▁aller", + -10.943509101867676 + ], + [ + "▁violence", + -10.943986892700195 + ], + [ + "▁membership", + -10.944117546081543 + ], + [ + "▁picked", + -10.944162368774414 + ], + [ + "▁collected", + -10.9443359375 + ], + [ + "▁extended", + -10.944449424743652 + ], + [ + "▁religious", + -10.944661140441895 + ], + [ + "▁salle", + -10.944767951965332 + ], + [ + "RA", + -10.944781303405762 + ], + [ + "▁blend", + -10.945232391357422 + ], + [ + "▁Min", + -10.94532299041748 + ], + [ + "kal", + -10.945887565612793 + ], + [ + "▁featuring", + -10.945902824401855 + ], + [ + "▁researchers", + -10.946263313293457 + ], + [ + "▁Search", + -10.946558952331543 + ], + [ + "CE", + -10.946675300598145 + ], + [ + "▁recognized", + -10.94682502746582 + ], + [ + "▁semi", + -10.94692611694336 + ], + [ + "▁exposure", + -10.94718074798584 + ], + [ + "grew", + -10.947466850280762 + ], + [ + "▁candidate", + -10.948250770568848 + ], + [ + "▁shares", + -10.948908805847168 + ], + [ + "▁edit", + -10.949745178222656 + ], + [ + "CS", + -10.949905395507812 + ], + [ + "▁Cl", + -10.950240135192871 + ], + [ + "▁Enjoy", + -10.951438903808594 + ], + [ + "▁hurt", + -10.951482772827148 + ], + [ + "▁bottle", + -10.951593399047852 + ], + [ + "▁Buy", + -10.95159912109375 + ], + [ + "▁superior", + -10.952286720275879 + ], + [ + "▁missed", + -10.952424049377441 + ], + [ + "▁workshop", + -10.952433586120605 + ], + [ + "action", + -10.952437400817871 + ], + [ + "ple", + -10.952699661254883 + ], + [ + "▁Schul", + -10.952814102172852 + ], + [ + "▁houses", + -10.953080177307129 + ], + [ + "▁2017,", + -10.953569412231445 + ], + [ + "▁killed", + -10.953750610351562 + ], + [ + "▁calendar", + -10.954306602478027 + ], + [ + "▁Mike", + -10.954597473144531 + ], + [ + "FA", + -10.954627990722656 + ], + [ + "nut", + -10.95487117767334 + ], + [ + "▁establish", + -10.955140113830566 + ], + [ + "▁alcohol", + -10.95514965057373 + ], + [ + "▁closely", + -10.955170631408691 + ], + [ + "▁MA", + -10.955381393432617 + ], + [ + "pul", + -10.955389022827148 + ], + [ + "▁defined", + -10.955666542053223 + ], + [ + "aires", + -10.955692291259766 + ], + [ + "▁Shi", + -10.955703735351562 + ], + [ + "▁plays", + -10.956303596496582 + ], + [ + "▁sister", + -10.95690631866455 + ], + [ + "▁cable", + -10.957179069519043 + ], + [ + "▁desk", + -10.957215309143066 + ], + [ + "▁apoi", + -10.957738876342773 + ], + [ + "▁identity", + -10.95785140991211 + ], + [ + "▁stars", + -10.957931518554688 + ], + [ + "▁fata", + -10.958008766174316 + ], + [ + "▁obvious", + -10.958330154418945 + ], + [ + "▁dental", + -10.95843505859375 + ], + [ + "AM", + -10.958802223205566 + ], + [ + "▁sharp", + -10.95881175994873 + ], + [ + "duc", + -10.959053993225098 + ], + [ + "▁manufacturer", + -10.95914077758789 + ], + [ + "!)", + -10.959270477294922 + ], + [ + "▁objects", + -10.959720611572266 + ], + [ + "▁Ag", + -10.959989547729492 + ], + [ + "referred", + -10.960195541381836 + ], + [ + "▁Ak", + -10.960308074951172 + ], + [ + "burg", + -10.960360527038574 + ], + [ + "▁nouveau", + -10.960854530334473 + ], + [ + "▁Pal", + -10.960994720458984 + ], + [ + "▁Arbeits", + -10.961280822753906 + ], + [ + "▁personally", + -10.961288452148438 + ], + [ + "▁Dé", + -10.961292266845703 + ], + [ + "▁import", + -10.961688041687012 + ], + [ + "▁justice", + -10.961913108825684 + ], + [ + "▁photography", + -10.962705612182617 + ], + [ + "▁portfolio", + -10.962841987609863 + ], + [ + "56", + -10.96314525604248 + ], + [ + "▁nouvelle", + -10.963293075561523 + ], + [ + "▁oven", + -10.964197158813477 + ], + [ + "▁400", + -10.964272499084473 + ], + [ + "▁mixed", + -10.964395523071289 + ], + [ + "▁relax", + -10.964427947998047 + ], + [ + "▁imp", + -10.964703559875488 + ], + [ + "▁».", + -10.964734077453613 + ], + [ + "▁mail", + -10.964777946472168 + ], + [ + "rage", + -10.964861869812012 + ], + [ + "nos", + -10.964974403381348 + ], + [ + "▁drugs", + -10.965195655822754 + ], + [ + "▁jede", + -10.965211868286133 + ], + [ + "▁einige", + -10.965232849121094 + ], + [ + "▁8.", + -10.965325355529785 + ], + [ + "ters", + -10.965412139892578 + ], + [ + "▁electrical", + -10.965432167053223 + ], + [ + "▁puis", + -10.965836524963379 + ], + [ + "▁films", + -10.965903282165527 + ], + [ + "41", + -10.966036796569824 + ], + [ + "▁moral", + -10.966398239135742 + ], + [ + "lage", + -10.966402053833008 + ], + [ + "▁spaces", + -10.966415405273438 + ], + [ + "▁Ed", + -10.966462135314941 + ], + [ + "▁classroom", + -10.966588020324707 + ], + [ + "▁große", + -10.966588973999023 + ], + [ + "▁baza", + -10.966887474060059 + ], + [ + "face", + -10.967308044433594 + ], + [ + "▁informed", + -10.967333793640137 + ], + [ + "▁improving", + -10.967477798461914 + ], + [ + "▁guidance", + -10.967880249023438 + ], + [ + "▁gallery", + -10.96800708770752 + ], + [ + "cular", + -10.968046188354492 + ], + [ + "53", + -10.968094825744629 + ], + [ + "Despite", + -10.968238830566406 + ], + [ + "▁forme", + -10.968304634094238 + ], + [ + "▁système", + -10.968415260314941 + ], + [ + "▁Win", + -10.968494415283203 + ], + [ + "▁Small", + -10.968537330627441 + ], + [ + "▁Mobile", + -10.968564987182617 + ], + [ + "▁tape", + -10.968606948852539 + ], + [ + "▁erhalten", + -10.968914985656738 + ], + [ + "▁movies", + -10.968928337097168 + ], + [ + "▁Unfortunately", + -10.968963623046875 + ], + [ + "▁Looking", + -10.96945858001709 + ], + [ + "▁guard", + -10.969584465026855 + ], + [ + "▁pr", + -10.969820976257324 + ], + [ + "▁confident", + -10.96988582611084 + ], + [ + "BA", + -10.970229148864746 + ], + [ + "bas", + -10.970272064208984 + ], + [ + "hum", + -10.97050666809082 + ], + [ + "ular", + -10.9705171585083 + ], + [ + "▁Still", + -10.970593452453613 + ], + [ + "▁flavor", + -10.970656394958496 + ], + [ + "▁boost", + -10.970773696899414 + ], + [ + "▁division", + -10.970842361450195 + ], + [ + "ising", + -10.971006393432617 + ], + [ + "▁monitoring", + -10.971044540405273 + ], + [ + "▁Sen", + -10.97105884552002 + ], + [ + "▁https", + -10.971527099609375 + ], + [ + "mainly", + -10.971735000610352 + ], + [ + "play", + -10.972251892089844 + ], + [ + "▁dynamic", + -10.972357749938965 + ], + [ + "▁coup", + -10.972370147705078 + ], + [ + "▁carpet", + -10.972561836242676 + ], + [ + "iner", + -10.972846984863281 + ], + [ + "ral", + -10.97325611114502 + ], + [ + "iser", + -10.973320007324219 + ], + [ + "RC", + -10.9739990234375 + ], + [ + "▁definition", + -10.97475814819336 + ], + [ + "▁Za", + -10.974767684936523 + ], + [ + "friendly", + -10.974883079528809 + ], + [ + "43", + -10.975123405456543 + ], + [ + "link", + -10.975180625915527 + ], + [ + "▁Multi", + -10.97519302368164 + ], + [ + "▁einmal", + -10.975272178649902 + ], + [ + "▁stopped", + -10.975394248962402 + ], + [ + "vel", + -10.975456237792969 + ], + [ + "▁ongoing", + -10.975565910339355 + ], + [ + "▁ancient", + -10.976259231567383 + ], + [ + "take", + -10.976301193237305 + ], + [ + "cia", + -10.976432800292969 + ], + [ + "▁USB", + -10.976545333862305 + ], + [ + "▁attorney", + -10.976866722106934 + ], + [ + "▁slot", + -10.976866722106934 + ], + [ + "▁Line", + -10.97693157196045 + ], + [ + "rice", + -10.977087020874023 + ], + [ + "ify", + -10.977520942687988 + ], + [ + "ó", + -10.978260040283203 + ], + [ + "▁flash", + -10.978483200073242 + ], + [ + "▁extension", + -10.978555679321289 + ], + [ + "▁Ende", + -10.979022979736328 + ], + [ + "▁powder", + -10.979114532470703 + ], + [ + "ească", + -10.979143142700195 + ], + [ + "03", + -10.979327201843262 + ], + [ + "▁normally", + -10.979416847229004 + ], + [ + "▁pun", + -10.980108261108398 + ], + [ + "viewed", + -10.980138778686523 + ], + [ + "ssen", + -10.980896949768066 + ], + [ + "ache", + -10.981121063232422 + ], + [ + "ește", + -10.98122787475586 + ], + [ + "▁PA", + -10.981266021728516 + ], + [ + "FI", + -10.981945991516113 + ], + [ + "▁Frank", + -10.98198127746582 + ], + [ + "▁apa", + -10.98242473602295 + ], + [ + "▁coast", + -10.982614517211914 + ], + [ + "▁boy", + -10.982665061950684 + ], + [ + "lim", + -10.982902526855469 + ], + [ + "▁putin", + -10.983194351196289 + ], + [ + "▁script", + -10.983332633972168 + ], + [ + "▁noticed", + -10.9837007522583 + ], + [ + "▁dealing", + -10.983922004699707 + ], + [ + "▁Trans", + -10.984100341796875 + ], + [ + "▁border", + -10.984447479248047 + ], + [ + "▁reputation", + -10.984657287597656 + ], + [ + "-2", + -10.984662055969238 + ], + [ + "HS", + -10.984707832336426 + ], + [ + "▁supports", + -10.984724998474121 + ], + [ + "▁horse", + -10.985146522521973 + ], + [ + "nik", + -10.98520565032959 + ], + [ + "▁clothes", + -10.985234260559082 + ], + [ + "▁Card", + -10.985612869262695 + ], + [ + "▁relief", + -10.98595905303955 + ], + [ + "▁Visit", + -10.986259460449219 + ], + [ + "▁luni", + -10.986593246459961 + ], + [ + "81", + -10.986693382263184 + ], + [ + "qua", + -10.986945152282715 + ], + [ + "▁Comp", + -10.98697280883789 + ], + [ + "▁investigation", + -10.987137794494629 + ], + [ + "▁depth", + -10.987598419189453 + ], + [ + "▁earned", + -10.987709045410156 + ], + [ + "▁Ren", + -10.988090515136719 + ], + [ + "▁Dumnezeu", + -10.988107681274414 + ], + [ + "▁Joe", + -10.988210678100586 + ], + [ + "▁goods", + -10.988288879394531 + ], + [ + "▁Vol", + -10.988686561584473 + ], + [ + "▁certified", + -10.989118576049805 + ], + [ + "▁favor", + -10.989326477050781 + ], + [ + "▁Scott", + -10.989599227905273 + ], + [ + "▁protest", + -10.989802360534668 + ], + [ + "▁pace", + -10.989803314208984 + ], + [ + "▁Angeles", + -10.990368843078613 + ], + [ + "inch", + -10.99050521850586 + ], + [ + "▁charged", + -10.99052619934082 + ], + [ + "code", + -10.990968704223633 + ], + [ + "▁convenient", + -10.99138355255127 + ], + [ + "▁Nord", + -10.991556167602539 + ], + [ + "▁yesterday", + -10.991691589355469 + ], + [ + "Dacă", + -10.99169635772705 + ], + [ + "▁Travel", + -10.991786003112793 + ], + [ + "▁kid", + -10.991941452026367 + ], + [ + "ction", + -10.991986274719238 + ], + [ + "▁groupe", + -10.992770195007324 + ], + [ + "pu", + -10.993056297302246 + ], + [ + "bzw", + -10.993196487426758 + ], + [ + "▁mixture", + -10.993513107299805 + ], + [ + "▁Farm", + -10.993715286254883 + ], + [ + "▁acces", + -10.993939399719238 + ], + [ + "matic", + -10.993950843811035 + ], + [ + "▁comparison", + -10.994006156921387 + ], + [ + "reich", + -10.994095802307129 + ], + [ + "pet", + -10.994502067565918 + ], + [ + "▁lit", + -10.994685173034668 + ], + [ + "▁organized", + -10.99476432800293 + ], + [ + "just", + -10.995564460754395 + ], + [ + "▁fellow", + -10.996004104614258 + ], + [ + "Ver", + -10.996209144592285 + ], + [ + "▁trends", + -10.99622631072998 + ], + [ + "▁evaluation", + -10.99626636505127 + ], + [ + "feld", + -10.99639892578125 + ], + [ + "▁Pu", + -10.99671459197998 + ], + [ + "▁equipped", + -10.99727725982666 + ], + [ + "▁catre", + -10.997278213500977 + ], + [ + "eck", + -10.997369766235352 + ], + [ + "▁facing", + -10.997998237609863 + ], + [ + "▁instrument", + -10.998361587524414 + ], + [ + "▁pleased", + -10.998507499694824 + ], + [ + "▁tap", + -10.998818397521973 + ], + [ + "dom", + -10.998826026916504 + ], + [ + "▁pump", + -10.999384880065918 + ], + [ + "▁functional", + -10.999429702758789 + ], + [ + "▁authority", + -10.999455451965332 + ], + [ + "▁experiment", + -10.999478340148926 + ], + [ + "LO", + -10.999529838562012 + ], + [ + "▁scheduled", + -10.999552726745605 + ], + [ + "halt", + -10.999604225158691 + ], + [ + "▁ceiling", + -10.999761581420898 + ], + [ + "▁Step", + -11.000310897827148 + ], + [ + "▁orders", + -11.00032901763916 + ], + [ + "▁speech", + -11.001046180725098 + ], + [ + "▁stands", + -11.001119613647461 + ], + [ + "▁disc", + -11.001920700073242 + ], + [ + "▁rec", + -11.001935958862305 + ], + [ + "▁Text", + -11.00243854522705 + ], + [ + "▁banks", + -11.00294017791748 + ], + [ + "▁oameni", + -11.003045082092285 + ], + [ + "▁communications", + -11.003194808959961 + ], + [ + "trag", + -11.003307342529297 + ], + [ + "▁trail", + -11.003803253173828 + ], + [ + "AN", + -11.00426197052002 + ], + [ + "▁Federal", + -11.004467964172363 + ], + [ + "▁quote", + -11.00455093383789 + ], + [ + "▁spus", + -11.004620552062988 + ], + [ + "▁managing", + -11.004990577697754 + ], + [ + "▁booking", + -11.00505256652832 + ], + [ + "▁Blog", + -11.005669593811035 + ], + [ + "▁tank", + -11.005681991577148 + ], + [ + "pon", + -11.005804061889648 + ], + [ + "GE", + -11.00582218170166 + ], + [ + "▁fiscal", + -11.005871772766113 + ], + [ + "▁satisfaction", + -11.006044387817383 + ], + [ + "cre", + -11.00614070892334 + ], + [ + "▁protected", + -11.006494522094727 + ], + [ + "▁enfants", + -11.006782531738281 + ], + [ + "▁dort", + -11.007554054260254 + ], + [ + "▁Mel", + -11.008041381835938 + ], + [ + "▁turns", + -11.00804615020752 + ], + [ + "▁savings", + -11.008106231689453 + ], + [ + "▁voir", + -11.008358001708984 + ], + [ + "▁Boston", + -11.008394241333008 + ], + [ + "▁debate", + -11.008469581604004 + ], + [ + "▁SO", + -11.008857727050781 + ], + [ + "▁tables", + -11.009193420410156 + ], + [ + "▁honest", + -11.009210586547852 + ], + [ + "mate", + -11.009283065795898 + ], + [ + "▁chart", + -11.0094633102417 + ], + [ + "decât", + -11.009682655334473 + ], + [ + "▁Radio", + -11.009685516357422 + ], + [ + "54", + -11.00986385345459 + ], + [ + "▁vol", + -11.010008811950684 + ], + [ + "last", + -11.010148048400879 + ], + [ + "▁tall", + -11.010408401489258 + ], + [ + "▁Should", + -11.010489463806152 + ], + [ + "▁sink", + -11.010525703430176 + ], + [ + "▁Right", + -11.010527610778809 + ], + [ + "▁male", + -11.010720252990723 + ], + [ + "▁Modern", + -11.010753631591797 + ], + [ + "▁indeed", + -11.010886192321777 + ], + [ + "▁Garden", + -11.011139869689941 + ], + [ + "▁Mod", + -11.011307716369629 + ], + [ + "▁turning", + -11.0115327835083 + ], + [ + "▁inches", + -11.011557579040527 + ], + [ + "▁Police", + -11.01183795928955 + ], + [ + "▁Pay", + -11.012016296386719 + ], + [ + "UE", + -11.0126371383667 + ], + [ + "mé", + -11.012652397155762 + ], + [ + "EE", + -11.013046264648438 + ], + [ + "▁cookies", + -11.013116836547852 + ], + [ + "rip", + -11.013351440429688 + ], + [ + "▁Motor", + -11.01352310180664 + ], + [ + "▁lung", + -11.01379680633545 + ], + [ + "▁Ap", + -11.013995170593262 + ], + [ + "▁sustainable", + -11.014066696166992 + ], + [ + "▁instant", + -11.014240264892578 + ], + [ + "▁Rose", + -11.014464378356934 + ], + [ + "▁Carolina", + -11.014906883239746 + ], + [ + "▁Help", + -11.014969825744629 + ], + [ + "IE", + -11.01535701751709 + ], + [ + "▁Jersey", + -11.015522956848145 + ], + [ + "▁Spanish", + -11.015586853027344 + ], + [ + "▁wheel", + -11.015660285949707 + ], + [ + "▁fishing", + -11.0158109664917 + ], + [ + "gram", + -11.015937805175781 + ], + [ + "▁ST", + -11.016227722167969 + ], + [ + "▁Nov", + -11.01632022857666 + ], + [ + "▁reporting", + -11.016362190246582 + ], + [ + "ked", + -11.016467094421387 + ], + [ + "▁Leben", + -11.016557693481445 + ], + [ + "▁organisation", + -11.016843795776367 + ], + [ + "▁tiny", + -11.017144203186035 + ], + [ + "▁Alex", + -11.017236709594727 + ], + [ + "▁obtained", + -11.017255783081055 + ], + [ + "▁Acest", + -11.017367362976074 + ], + [ + "▁dangerous", + -11.01749038696289 + ], + [ + "utter", + -11.017624855041504 + ], + [ + "▁rev", + -11.01801586151123 + ], + [ + "Un", + -11.018242835998535 + ], + [ + "▁revealed", + -11.018356323242188 + ], + [ + "▁decade", + -11.018709182739258 + ], + [ + "▁possibility", + -11.01945686340332 + ], + [ + "service", + -11.019577980041504 + ], + [ + "è", + -11.01966667175293 + ], + [ + "▁Chief", + -11.019674301147461 + ], + [ + "▁Durch", + -11.019795417785645 + ], + [ + "▁cadre", + -11.019843101501465 + ], + [ + "▁wearing", + -11.019845008850098 + ], + [ + "sized", + -11.01988410949707 + ], + [ + "LY", + -11.01989459991455 + ], + [ + "▁unser", + -11.019963264465332 + ], + [ + "▁2016,", + -11.019988059997559 + ], + [ + "▁fail", + -11.020028114318848 + ], + [ + "iques", + -11.020115852355957 + ], + [ + "▁Angel", + -11.020315170288086 + ], + [ + "▁transportation", + -11.020364761352539 + ], + [ + "▁dates", + -11.020395278930664 + ], + [ + "▁danger", + -11.020731925964355 + ], + [ + "▁forum", + -11.020828247070312 + ], + [ + "zug", + -11.020885467529297 + ], + [ + "▁filed", + -11.021199226379395 + ], + [ + "loc", + -11.021201133728027 + ], + [ + "éri", + -11.021234512329102 + ], + [ + "tribu", + -11.021393775939941 + ], + [ + "▁entered", + -11.021639823913574 + ], + [ + "▁porte", + -11.021928787231445 + ], + [ + "▁arts", + -11.021979331970215 + ], + [ + "▁reform", + -11.022001266479492 + ], + [ + "▁Main", + -11.022101402282715 + ], + [ + "▁dir", + -11.022111892700195 + ], + [ + "▁approval", + -11.022465705871582 + ], + [ + "▁juice", + -11.022750854492188 + ], + [ + "vier", + -11.022771835327148 + ], + [ + "▁nivel", + -11.02318000793457 + ], + [ + "▁returns", + -11.023423194885254 + ], + [ + "▁formed", + -11.023723602294922 + ], + [ + "▁combine", + -11.02436351776123 + ], + [ + "▁cours", + -11.024392127990723 + ], + [ + "▁Standard", + -11.024463653564453 + ], + [ + "▁certification", + -11.024677276611328 + ], + [ + "escu", + -11.024996757507324 + ], + [ + "▁achieved", + -11.025278091430664 + ], + [ + "▁Model", + -11.025280952453613 + ], + [ + "rul", + -11.025404930114746 + ], + [ + "▁Tage", + -11.025530815124512 + ], + [ + "▁injuries", + -11.02560806274414 + ], + [ + "▁Sal", + -11.025671005249023 + ], + [ + "▁expenses", + -11.025887489318848 + ], + [ + "▁cet", + -11.026009559631348 + ], + [ + "▁taxes", + -11.026028633117676 + ], + [ + "diesen", + -11.02626895904541 + ], + [ + "▁fairly", + -11.026638984680176 + ], + [ + "▁Access", + -11.026866912841797 + ], + [ + "wind", + -11.027122497558594 + ], + [ + "IM", + -11.027252197265625 + ], + [ + "ense", + -11.027548789978027 + ], + [ + "▁hang", + -11.027957916259766 + ], + [ + "▁citizens", + -11.028020858764648 + ], + [ + "3%", + -11.028101921081543 + ], + [ + "lum", + -11.028268814086914 + ], + [ + "▁discussed", + -11.028326034545898 + ], + [ + "AC", + -11.02841854095459 + ], + [ + "‘", + -11.0286865234375 + ], + [ + "▁Sol", + -11.028698921203613 + ], + [ + "06", + -11.028816223144531 + ], + [ + "stellen", + -11.029170989990234 + ], + [ + "▁participation", + -11.02917194366455 + ], + [ + "▁Box", + -11.029200553894043 + ], + [ + "▁bieten", + -11.029687881469727 + ], + [ + "▁Louis", + -11.029730796813965 + ], + [ + "▁lessons", + -11.029789924621582 + ], + [ + "▁visible", + -11.029966354370117 + ], + [ + "▁Cam", + -11.030128479003906 + ], + [ + "▁Ban", + -11.03053092956543 + ], + [ + "▁Far", + -11.03060245513916 + ], + [ + "▁travers", + -11.030759811401367 + ], + [ + "▁telling", + -11.030808448791504 + ], + [ + "▁magic", + -11.030855178833008 + ], + [ + "▁Night", + -11.031316757202148 + ], + [ + "▁judge", + -11.031400680541992 + ], + [ + "▁Pat", + -11.031482696533203 + ], + [ + "▁Southern", + -11.031901359558105 + ], + [ + "OL", + -11.031929969787598 + ], + [ + "fully", + -11.032191276550293 + ], + [ + "▁acestea", + -11.03223705291748 + ], + [ + "▁Order", + -11.032383918762207 + ], + [ + "▁facut", + -11.032523155212402 + ], + [ + "▁Matt", + -11.032600402832031 + ], + [ + "registr", + -11.03278923034668 + ], + [ + "▁Yet", + -11.032811164855957 + ], + [ + "ß", + -11.033596992492676 + ], + [ + "▁făcut", + -11.033618927001953 + ], + [ + "▁versions", + -11.033780097961426 + ], + [ + "▁Force", + -11.03396224975586 + ], + [ + "rick", + -11.034153938293457 + ], + [ + "▁rund", + -11.034563064575195 + ], + [ + "ike", + -11.034658432006836 + ], + [ + "▁Young", + -11.034675598144531 + ], + [ + "▁ski", + -11.034927368164062 + ], + [ + "CU", + -11.035385131835938 + ], + [ + "▁Second", + -11.035510063171387 + ], + [ + "▁graduate", + -11.03554916381836 + ], + [ + "▁Bible", + -11.036049842834473 + ], + [ + "▁vary", + -11.036060333251953 + ], + [ + "▁celebration", + -11.036151885986328 + ], + [ + "▁risks", + -11.036210060119629 + ], + [ + "erii", + -11.036327362060547 + ], + [ + "rance", + -11.036577224731445 + ], + [ + "▁MP", + -11.036787986755371 + ], + [ + "▁tale", + -11.036788940429688 + ], + [ + "▁Ford", + -11.037044525146484 + ], + [ + "▁attached", + -11.037278175354004 + ], + [ + "▁Sy", + -11.037312507629395 + ], + [ + "▁Ly", + -11.03765869140625 + ], + [ + "stellung", + -11.037687301635742 + ], + [ + "▁trop", + -11.0377197265625 + ], + [ + "▁années", + -11.037736892700195 + ], + [ + "▁linked", + -11.03792667388916 + ], + [ + "pit", + -11.038352012634277 + ], + [ + "So", + -11.03835391998291 + ], + [ + "ţe", + -11.038473129272461 + ], + [ + "▁origin", + -11.038509368896484 + ], + [ + "▁boys", + -11.039263725280762 + ], + [ + "holder", + -11.039352416992188 + ], + [ + "read", + -11.039461135864258 + ], + [ + "▁relative", + -11.03950023651123 + ], + [ + "▁industries", + -11.03958511352539 + ], + [ + "making", + -11.039688110351562 + ], + [ + "▁tun", + -11.039917945861816 + ], + [ + "▁forced", + -11.041061401367188 + ], + [ + "▁Welcome", + -11.041086196899414 + ], + [ + "▁explained", + -11.041138648986816 + ], + [ + "MP", + -11.041389465332031 + ], + [ + "▁Three", + -11.041613578796387 + ], + [ + "aza", + -11.041768074035645 + ], + [ + "▁1999", + -11.041924476623535 + ], + [ + "▁erst", + -11.042237281799316 + ], + [ + "RS", + -11.042623519897461 + ], + [ + "▁attractive", + -11.04279899597168 + ], + [ + "▁visited", + -11.042805671691895 + ], + [ + "▁nom", + -11.042874336242676 + ], + [ + "▁drum", + -11.042933464050293 + ], + [ + "cast", + -11.043068885803223 + ], + [ + "ogen", + -11.043105125427246 + ], + [ + "▁tech", + -11.04360294342041 + ], + [ + "▁Comment", + -11.043664932250977 + ], + [ + "▁Little", + -11.04405689239502 + ], + [ + "▁suggested", + -11.044086456298828 + ], + [ + "▁gar", + -11.044205665588379 + ], + [ + "▁crack", + -11.04458999633789 + ], + [ + "▁shooting", + -11.044676780700684 + ], + [ + "▁Try", + -11.044759750366211 + ], + [ + "▁Remember", + -11.045008659362793 + ], + [ + "▁folks", + -11.045217514038086 + ], + [ + "▁MS", + -11.045512199401855 + ], + [ + "▁Dia", + -11.04584789276123 + ], + [ + "3)", + -11.046561241149902 + ], + [ + "arbeit", + -11.04697036743164 + ], + [ + "▁pepper", + -11.047065734863281 + ], + [ + "zz", + -11.047107696533203 + ], + [ + "▁extreme", + -11.047235488891602 + ], + [ + "▁extrem", + -11.047367095947266 + ], + [ + "▁severe", + -11.047768592834473 + ], + [ + "▁networks", + -11.047882080078125 + ], + [ + "păr", + -11.047910690307617 + ], + [ + "sent", + -11.047933578491211 + ], + [ + "▁structures", + -11.048048973083496 + ], + [ + "▁Join", + -11.048078536987305 + ], + [ + "▁privind", + -11.048255920410156 + ], + [ + "▁marriage", + -11.04865837097168 + ], + [ + "▁liegt", + -11.048918724060059 + ], + [ + "eben", + -11.048995971679688 + ], + [ + "▁produse", + -11.049076080322266 + ], + [ + "▁tested", + -11.049090385437012 + ], + [ + "▁Queen", + -11.049134254455566 + ], + [ + "▁Tax", + -11.049687385559082 + ], + [ + "rian", + -11.049710273742676 + ], + [ + "▁Problem", + -11.050151824951172 + ], + [ + "izat", + -11.05023193359375 + ], + [ + "udi", + -11.050324440002441 + ], + [ + "▁LA", + -11.050718307495117 + ], + [ + "▁afford", + -11.051108360290527 + ], + [ + "▁percentage", + -11.05121898651123 + ], + [ + "▁cute", + -11.051547050476074 + ], + [ + "▁gorgeous", + -11.051891326904297 + ], + [ + "▁indoor", + -11.05190372467041 + ], + [ + "▁configuration", + -11.052103042602539 + ], + [ + "▁immediate", + -11.052303314208984 + ], + [ + "▁exemple", + -11.052450180053711 + ], + [ + "▁Being", + -11.052550315856934 + ], + [ + "▁introduction", + -11.052591323852539 + ], + [ + "ella", + -11.053206443786621 + ], + [ + "bare", + -11.053521156311035 + ], + [ + "▁besser", + -11.053539276123047 + ], + [ + "▁Put", + -11.053740501403809 + ], + [ + "gon", + -11.054248809814453 + ], + [ + "▁Italy", + -11.054259300231934 + ], + [ + "▁Thus", + -11.05435562133789 + ], + [ + "tari", + -11.054437637329102 + ], + [ + "0.000", + -11.054460525512695 + ], + [ + "▁Price", + -11.054651260375977 + ], + [ + "▁Trust", + -11.054824829101562 + ], + [ + "▁contra", + -11.054863929748535 + ], + [ + "▁layout", + -11.05504035949707 + ], + [ + "▁Ireland", + -11.055187225341797 + ], + [ + "ctor", + -11.055344581604004 + ], + [ + "atoare", + -11.055540084838867 + ], + [ + "pra", + -11.055729866027832 + ], + [ + "rent", + -11.055892944335938 + ], + [ + "▁Seite", + -11.05605411529541 + ], + [ + "▁ori", + -11.056280136108398 + ], + [ + "spiel", + -11.056541442871094 + ], + [ + "▁Times", + -11.056883811950684 + ], + [ + "primarily", + -11.056974411010742 + ], + [ + "nov", + -11.05703067779541 + ], + [ + "▁desired", + -11.057061195373535 + ], + [ + "▁Would", + -11.057072639465332 + ], + [ + "PL", + -11.057225227355957 + ], + [ + "▁originally", + -11.057367324829102 + ], + [ + "▁Ana", + -11.057463645935059 + ], + [ + "EN", + -11.05754566192627 + ], + [ + "▁occasion", + -11.05755615234375 + ], + [ + "▁grant", + -11.057572364807129 + ], + [ + "igkeit", + -11.057975769042969 + ], + [ + "▁scheme", + -11.058146476745605 + ], + [ + "▁2015.", + -11.058621406555176 + ], + [ + "izare", + -11.058778762817383 + ], + [ + "gate", + -11.058792114257812 + ], + [ + "▁poker", + -11.058899879455566 + ], + [ + "pping", + -11.058998107910156 + ], + [ + "▁Wild", + -11.059511184692383 + ], + [ + "▁YouTube", + -11.059995651245117 + ], + [ + "▁assume", + -11.060284614562988 + ], + [ + "с", + -11.060614585876465 + ], + [ + "▁rapport", + -11.060623168945312 + ], + [ + "▁labor", + -11.060996055603027 + ], + [ + "teur", + -11.061041831970215 + ], + [ + "▁genre", + -11.06116008758545 + ], + [ + "▁plat", + -11.061745643615723 + ], + [ + "▁listening", + -11.061750411987305 + ], + [ + "sky", + -11.061777114868164 + ], + [ + "▁neighborhood", + -11.061782836914062 + ], + [ + "▁3-", + -11.062150001525879 + ], + [ + "▁Library", + -11.062162399291992 + ], + [ + "agit", + -11.062249183654785 + ], + [ + "▁platforms", + -11.062849998474121 + ], + [ + "bei", + -11.062882423400879 + ], + [ + "AB", + -11.062897682189941 + ], + [ + "▁manufacturers", + -11.06295394897461 + ], + [ + "▁printing", + -11.063141822814941 + ], + [ + "▁crisis", + -11.063326835632324 + ], + [ + "▁Smart", + -11.06335163116455 + ], + [ + "▁drawing", + -11.063406944274902 + ], + [ + "MO", + -11.06348991394043 + ], + [ + "▁durable", + -11.063569068908691 + ], + [ + "chant", + -11.0636625289917 + ], + [ + "▁chemical", + -11.063764572143555 + ], + [ + "▁savoir", + -11.063776016235352 + ], + [ + "▁Max", + -11.063802719116211 + ], + [ + "gestellt", + -11.06380844116211 + ], + [ + "▁rural", + -11.063854217529297 + ], + [ + "52", + -11.064105033874512 + ], + [ + "▁invited", + -11.064169883728027 + ], + [ + "▁fil", + -11.0642728805542 + ], + [ + "▁Rob", + -11.064284324645996 + ], + [ + "▁Bell", + -11.064387321472168 + ], + [ + "▁neck", + -11.064831733703613 + ], + [ + "pac", + -11.064879417419434 + ], + [ + "wal", + -11.06491470336914 + ], + [ + "▁là", + -11.064922332763672 + ], + [ + "▁Virginia", + -11.065081596374512 + ], + [ + "▁applicable", + -11.06509017944336 + ], + [ + "▁abuse", + -11.065153121948242 + ], + [ + "aide", + -11.065321922302246 + ], + [ + "▁increases", + -11.065396308898926 + ], + [ + "▁moi", + -11.065568923950195 + ], + [ + "▁Non", + -11.065577507019043 + ], + [ + "▁Produkt", + -11.065627098083496 + ], + [ + "FC", + -11.065644264221191 + ], + [ + "▁shops", + -11.065677642822266 + ], + [ + "▁prendre", + -11.065923690795898 + ], + [ + "atul", + -11.065990447998047 + ], + [ + "▁sal", + -11.066137313842773 + ], + [ + "▁société", + -11.06627082824707 + ], + [ + "▁Hot", + -11.066329002380371 + ], + [ + "rim", + -11.066587448120117 + ], + [ + "gue", + -11.06661605834961 + ], + [ + "▁enterprise", + -11.066624641418457 + ], + [ + "▁33", + -11.067329406738281 + ], + [ + "mittel", + -11.067395210266113 + ], + [ + "ged", + -11.067439079284668 + ], + [ + "▁formula", + -11.06777286529541 + ], + [ + "▁spin", + -11.067784309387207 + ], + [ + "als", + -11.067826271057129 + ], + [ + "2%", + -11.06785774230957 + ], + [ + "bon", + -11.068192481994629 + ], + [ + "▁Executive", + -11.068323135375977 + ], + [ + "▁wirklich", + -11.068427085876465 + ], + [ + "îl", + -11.068608283996582 + ], + [ + "1.", + -11.068917274475098 + ], + [ + "▁Arm", + -11.069157600402832 + ], + [ + "▁rid", + -11.069358825683594 + ], + [ + "aries", + -11.069727897644043 + ], + [ + "▁incident", + -11.06982421875 + ], + [ + "▁copii", + -11.070008277893066 + ], + [ + "▁Charles", + -11.070141792297363 + ], + [ + "▁meals", + -11.070147514343262 + ], + [ + "▁wireless", + -11.070237159729004 + ], + [ + "Ex", + -11.070364952087402 + ], + [ + "▁Financial", + -11.070540428161621 + ], + [ + "▁AM", + -11.070615768432617 + ], + [ + "▁fest", + -11.070645332336426 + ], + [ + "▁Ol", + -11.071410179138184 + ], + [ + "oir", + -11.071447372436523 + ], + [ + "300", + -11.071893692016602 + ], + [ + "▁punct", + -11.072138786315918 + ], + [ + "▁Mad", + -11.07283878326416 + ], + [ + "▁Ali", + -11.072907447814941 + ], + [ + "lag", + -11.073214530944824 + ], + [ + "▁ocean", + -11.073314666748047 + ], + [ + "▁mirror", + -11.073326110839844 + ], + [ + "▁Additionally", + -11.073869705200195 + ], + [ + "alia", + -11.073884963989258 + ], + [ + "▁county", + -11.073899269104004 + ], + [ + "▁hip", + -11.074305534362793 + ], + [ + "dale", + -11.074395179748535 + ], + [ + "▁Stra", + -11.074429512023926 + ], + [ + "▁drag", + -11.074575424194336 + ], + [ + "▁Sand", + -11.074851036071777 + ], + [ + "▁historic", + -11.074980735778809 + ], + [ + "ière", + -11.075427055358887 + ], + [ + "▁examine", + -11.075624465942383 + ], + [ + "soci", + -11.075634002685547 + ], + [ + "ime", + -11.076088905334473 + ], + [ + "▁Insurance", + -11.07621955871582 + ], + [ + "▁crime", + -11.076736450195312 + ], + [ + "▁pare", + -11.076945304870605 + ], + [ + "▁craft", + -11.077105522155762 + ], + [ + "▁Building", + -11.077279090881348 + ], + [ + "mission", + -11.077534675598145 + ], + [ + "▁Americans", + -11.077573776245117 + ], + [ + "▁mg", + -11.077799797058105 + ], + [ + "▁passage", + -11.077938079833984 + ], + [ + "▁deposit", + -11.078346252441406 + ], + [ + "▁widely", + -11.078444480895996 + ], + [ + "nch", + -11.078453063964844 + ], + [ + "▁Coast", + -11.078756332397461 + ], + [ + "▁recipes", + -11.078784942626953 + ], + [ + "▁Ziel", + -11.07951545715332 + ], + [ + "▁duty", + -11.079646110534668 + ], + [ + "▁gerne", + -11.079704284667969 + ], + [ + "most", + -11.080034255981445 + ], + [ + "▁argument", + -11.080158233642578 + ], + [ + "▁root", + -11.08021354675293 + ], + [ + "▁consult", + -11.08024787902832 + ], + [ + "▁muscle", + -11.080255508422852 + ], + [ + "▁spoke", + -11.08038330078125 + ], + [ + "▁Cum", + -11.080950736999512 + ], + [ + "▁orange", + -11.081033706665039 + ], + [ + "▁reader", + -11.081123352050781 + ], + [ + "schw", + -11.081151008605957 + ], + [ + "▁commission", + -11.081332206726074 + ], + [ + "histoire", + -11.081811904907227 + ], + [ + "▁represents", + -11.082064628601074 + ], + [ + "▁meilleur", + -11.082343101501465 + ], + [ + "▁10.", + -11.082358360290527 + ], + [ + "HA", + -11.082427024841309 + ], + [ + "▁Systems", + -11.082573890686035 + ], + [ + "▁blind", + -11.082603454589844 + ], + [ + "▁HP", + -11.083221435546875 + ], + [ + "▁doi", + -11.083307266235352 + ], + [ + "▁signature", + -11.083404541015625 + ], + [ + "▁invite", + -11.083505630493164 + ], + [ + "▁Samsung", + -11.083802223205566 + ], + [ + "▁liber", + -11.083942413330078 + ], + [ + "▁letters", + -11.0840482711792 + ], + [ + "▁primul", + -11.084186553955078 + ], + [ + "▁losing", + -11.084328651428223 + ], + [ + "resulting", + -11.084467887878418 + ], + [ + "▁Computer", + -11.08474063873291 + ], + [ + "▁poll", + -11.0847749710083 + ], + [ + "rile", + -11.085102081298828 + ], + [ + "TI", + -11.085142135620117 + ], + [ + "▁cur", + -11.08566951751709 + ], + [ + "▁fonction", + -11.085833549499512 + ], + [ + "gat", + -11.086359977722168 + ], + [ + "AA", + -11.086480140686035 + ], + [ + "tiv", + -11.086692810058594 + ], + [ + "▁Str", + -11.087076187133789 + ], + [ + "ești", + -11.087677955627441 + ], + [ + "▁officer", + -11.0877046585083 + ], + [ + "reducing", + -11.08772087097168 + ], + [ + "▁gifts", + -11.08780288696289 + ], + [ + "▁performing", + -11.08788776397705 + ], + [ + "▁»,", + -11.088349342346191 + ], + [ + "▁guitar", + -11.08838939666748 + ], + [ + "▁segment", + -11.088580131530762 + ], + [ + "▁Tar", + -11.08861255645752 + ], + [ + "▁ultimately", + -11.088805198669434 + ], + [ + "▁cam", + -11.088960647583008 + ], + [ + "▁Arbeit", + -11.089076042175293 + ], + [ + "▁accessories", + -11.089418411254883 + ], + [ + "bad", + -11.089820861816406 + ], + [ + "home", + -11.0899019241333 + ], + [ + "▁clip", + -11.08995532989502 + ], + [ + "range", + -11.090432167053223 + ], + [ + "CM", + -11.090867042541504 + ], + [ + "▁printed", + -11.090883255004883 + ], + [ + "▁Pet", + -11.091177940368652 + ], + [ + "▁attract", + -11.091333389282227 + ], + [ + "date", + -11.091501235961914 + ], + [ + "▁Senior", + -11.091503143310547 + ], + [ + "▁genau", + -11.092177391052246 + ], + [ + "num", + -11.092435836791992 + ], + [ + "▁attended", + -11.092674255371094 + ], + [ + "▁Turn", + -11.092824935913086 + ], + [ + "▁History", + -11.092830657958984 + ], + [ + "some", + -11.092852592468262 + ], + [ + "▁describe", + -11.09308910369873 + ], + [ + "▁Lee", + -11.093143463134766 + ], + [ + "▁Fre", + -11.093314170837402 + ], + [ + "▁league", + -11.093345642089844 + ], + [ + "new", + -11.093505859375 + ], + [ + "tors", + -11.093535423278809 + ], + [ + "▁storm", + -11.094005584716797 + ], + [ + "▁Beispiel", + -11.094197273254395 + ], + [ + "▁index", + -11.094344139099121 + ], + [ + "▁awarded", + -11.094613075256348 + ], + [ + "state", + -11.094625473022461 + ], + [ + "▁1990", + -11.094874382019043 + ], + [ + "▁ends", + -11.094902992248535 + ], + [ + "kor", + -11.095070838928223 + ], + [ + "far", + -11.095418930053711 + ], + [ + "▁Page", + -11.095541000366211 + ], + [ + "▁promotion", + -11.095610618591309 + ], + [ + "▁weekly", + -11.095726013183594 + ], + [ + "400", + -11.095966339111328 + ], + [ + "iuni", + -11.096365928649902 + ], + [ + "▁Summer", + -11.096376419067383 + ], + [ + "▁thin", + -11.096627235412598 + ], + [ + "▁dafür", + -11.09669303894043 + ], + [ + "51", + -11.096769332885742 + ], + [ + "PR", + -11.096978187561035 + ], + [ + "▁Hy", + -11.097001075744629 + ], + [ + "gas", + -11.097013473510742 + ], + [ + "▁atat", + -11.097166061401367 + ], + [ + "▁mining", + -11.097347259521484 + ], + [ + "▁principles", + -11.09741497039795 + ], + [ + "gent", + -11.097545623779297 + ], + [ + "ika", + -11.097685813903809 + ], + [ + "▁religion", + -11.097787857055664 + ], + [ + "▁ordered", + -11.098284721374512 + ], + [ + "▁developers", + -11.098298072814941 + ], + [ + "▁pleasure", + -11.098456382751465 + ], + [ + "vit", + -11.098505020141602 + ], + [ + "mers", + -11.0988130569458 + ], + [ + "▁Section", + -11.098873138427734 + ], + [ + "▁por", + -11.098960876464844 + ], + [ + "▁Name", + -11.099200248718262 + ], + [ + "▁pink", + -11.099260330200195 + ], + [ + "dig", + -11.09934139251709 + ], + [ + "▁eligible", + -11.099397659301758 + ], + [ + "▁Happy", + -11.09941577911377 + ], + [ + "▁fo", + -11.099480628967285 + ], + [ + "▁availability", + -11.099541664123535 + ], + [ + "GO", + -11.099583625793457 + ], + [ + "▁Europa", + -11.099637985229492 + ], + [ + "▁Unit", + -11.099656105041504 + ], + [ + "▁1000", + -11.099837303161621 + ], + [ + "▁Berg", + -11.099846839904785 + ], + [ + "fini", + -11.099853515625 + ], + [ + "▁$3", + -11.100565910339355 + ], + [ + "iza", + -11.100749969482422 + ], + [ + "▁promo", + -11.100830078125 + ], + [ + "▁Low", + -11.101234436035156 + ], + [ + "abord", + -11.101326942443848 + ], + [ + "äh", + -11.101485252380371 + ], + [ + "▁Professor", + -11.101570129394531 + ], + [ + "▁array", + -11.101579666137695 + ], + [ + "▁hate", + -11.101594924926758 + ], + [ + "▁recording", + -11.101601600646973 + ], + [ + "RI", + -11.101649284362793 + ], + [ + "▁proof", + -11.101710319519043 + ], + [ + "lay", + -11.10185718536377 + ], + [ + "DE", + -11.102007865905762 + ], + [ + "▁surprised", + -11.102066040039062 + ], + [ + "▁boxes", + -11.102193832397461 + ], + [ + "▁noastre", + -11.102386474609375 + ], + [ + "zie", + -11.102387428283691 + ], + [ + "▁însă", + -11.10254192352295 + ], + [ + "▁ajuta", + -11.102783203125 + ], + [ + "▁weil", + -11.1028413772583 + ], + [ + "▁whenever", + -11.103026390075684 + ], + [ + "shi", + -11.103194236755371 + ], + [ + "satz", + -11.103605270385742 + ], + [ + "▁remind", + -11.10401725769043 + ], + [ + "▁consist", + -11.10412311553955 + ], + [ + "▁motiv", + -11.104240417480469 + ], + [ + "▁PS", + -11.1043062210083 + ], + [ + "▁trois", + -11.104543685913086 + ], + [ + "pad", + -11.10477352142334 + ], + [ + "▁besten", + -11.104904174804688 + ], + [ + "▁Stone", + -11.105140686035156 + ], + [ + "itz", + -11.105157852172852 + ], + [ + "fit", + -11.105164527893066 + ], + [ + "▁Mountain", + -11.105178833007812 + ], + [ + "OC", + -11.10519027709961 + ], + [ + "▁depends", + -11.105228424072266 + ], + [ + "▁Cover", + -11.105387687683105 + ], + [ + "▁bags", + -11.106058120727539 + ], + [ + "▁Bel", + -11.106199264526367 + ], + [ + "▁Engineering", + -11.106304168701172 + ], + [ + "▁flower", + -11.106647491455078 + ], + [ + "▁gratuit", + -11.106670379638672 + ], + [ + "▁smartphone", + -11.106780052185059 + ], + [ + "stan", + -11.107197761535645 + ], + [ + "spect", + -11.10726261138916 + ], + [ + "SL", + -11.107282638549805 + ], + [ + "sho", + -11.10738754272461 + ], + [ + "▁Ser", + -11.10791301727295 + ], + [ + "▁Perhaps", + -11.108247756958008 + ], + [ + "▁codes", + -11.108342170715332 + ], + [ + "▁Wind", + -11.10849666595459 + ], + [ + "aient", + -11.108757019042969 + ], + [ + "▁Prin", + -11.108802795410156 + ], + [ + "▁(1)", + -11.109090805053711 + ], + [ + "▁figures", + -11.109450340270996 + ], + [ + "▁ausge", + -11.10972785949707 + ], + [ + "▁episode", + -11.110050201416016 + ], + [ + "▁Spa", + -11.110370635986328 + ], + [ + "▁Silver", + -11.110386848449707 + ], + [ + "▁Sky", + -11.110396385192871 + ], + [ + "▁capabilities", + -11.1107177734375 + ], + [ + "▁Uni", + -11.11073112487793 + ], + [ + "▁încă", + -11.110876083374023 + ], + [ + "TO", + -11.111289978027344 + ], + [ + "▁Hal", + -11.111358642578125 + ], + [ + "ghi", + -11.111414909362793 + ], + [ + "▁sofa", + -11.111438751220703 + ], + [ + "hard", + -11.11150074005127 + ], + [ + "▁FOR", + -11.111587524414062 + ], + [ + "▁Ber", + -11.111820220947266 + ], + [ + "▁firms", + -11.11187744140625 + ], + [ + "▁memories", + -11.111883163452148 + ], + [ + "▁lift", + -11.11214542388916 + ], + [ + "▁sending", + -11.11214542388916 + ], + [ + "▁narrow", + -11.112646102905273 + ], + [ + "▁Steve", + -11.112784385681152 + ], + [ + "▁integration", + -11.112905502319336 + ], + [ + "known", + -11.113122940063477 + ], + [ + "▁nostru", + -11.113237380981445 + ], + [ + "iţi", + -11.113422393798828 + ], + [ + "▁Georgia", + -11.113759994506836 + ], + [ + "▁slowly", + -11.114026069641113 + ], + [ + "iere", + -11.114028930664062 + ], + [ + "aka", + -11.114255905151367 + ], + [ + "PE", + -11.114320755004883 + ], + [ + "▁venue", + -11.11468505859375 + ], + [ + "jar", + -11.11474609375 + ], + [ + "buch", + -11.114755630493164 + ], + [ + "rad", + -11.114858627319336 + ], + [ + "▁resistance", + -11.114899635314941 + ], + [ + "▁stehen", + -11.114914894104004 + ], + [ + "chin", + -11.11504077911377 + ], + [ + "▁weak", + -11.11535358428955 + ], + [ + "▁DVD", + -11.115598678588867 + ], + [ + "▁bodies", + -11.115856170654297 + ], + [ + "▁split", + -11.115884780883789 + ], + [ + "What", + -11.116231918334961 + ], + [ + "setzen", + -11.116467475891113 + ], + [ + "▁loves", + -11.116561889648438 + ], + [ + "▁kleine", + -11.117077827453613 + ], + [ + "▁increasingly", + -11.11746883392334 + ], + [ + "▁alert", + -11.117583274841309 + ], + [ + "▁AC", + -11.117647171020508 + ], + [ + "▁partir", + -11.117974281311035 + ], + [ + "▁ratio", + -11.11807918548584 + ], + [ + "▁keeps", + -11.118539810180664 + ], + [ + "▁Area", + -11.118544578552246 + ], + [ + "▁données", + -11.119071960449219 + ], + [ + "▁flag", + -11.119254112243652 + ], + [ + "▁NO", + -11.119277000427246 + ], + [ + "▁hotels", + -11.119336128234863 + ], + [ + "▁debut", + -11.119365692138672 + ], + [ + "▁suffer", + -11.119368553161621 + ], + [ + "▁hidden", + -11.119810104370117 + ], + [ + "▁clothing", + -11.120074272155762 + ], + [ + "▁household", + -11.120235443115234 + ], + [ + "medi", + -11.120268821716309 + ], + [ + "▁reste", + -11.120274543762207 + ], + [ + "bro", + -11.120381355285645 + ], + [ + "▁Bus", + -11.120405197143555 + ], + [ + "▁Ken", + -11.120572090148926 + ], + [ + "IR", + -11.120758056640625 + ], + [ + "▁suffering", + -11.121212005615234 + ], + [ + "▁publication", + -11.121246337890625 + ], + [ + "▁Mat", + -11.121360778808594 + ], + [ + "▁impression", + -11.121509552001953 + ], + [ + "▁founded", + -11.121562957763672 + ], + [ + "▁stable", + -11.121566772460938 + ], + [ + "▁promise", + -11.121719360351562 + ], + [ + "▁Cloud", + -11.121770858764648 + ], + [ + "▁prison", + -11.122099876403809 + ], + [ + "cor", + -11.122355461120605 + ], + [ + "▁Sports", + -11.122716903686523 + ], + [ + "▁erste", + -11.122745513916016 + ], + [ + "shire", + -11.122757911682129 + ], + [ + "▁recommendations", + -11.122916221618652 + ], + [ + "▁permit", + -11.123100280761719 + ], + [ + "▁tomorrow", + -11.123126983642578 + ], + [ + "▁lucky", + -11.123422622680664 + ], + [ + "▁realized", + -11.123449325561523 + ], + [ + "▁famille", + -11.123473167419434 + ], + [ + "▁Zealand", + -11.123542785644531 + ], + [ + "▁wooden", + -11.123601913452148 + ], + [ + "▁east", + -11.124269485473633 + ], + [ + "▁Bereich", + -11.12458324432373 + ], + [ + "während", + -11.124653816223145 + ], + [ + "rite", + -11.124836921691895 + ], + [ + "▁fla", + -11.124902725219727 + ], + [ + "platz", + -11.124991416931152 + ], + [ + "▁zero", + -11.125292778015137 + ], + [ + "▁priority", + -11.12535572052002 + ], + [ + "▁Airport", + -11.125506401062012 + ], + [ + "▁Kauf", + -11.125590324401855 + ], + [ + "▁ultimate", + -11.12601375579834 + ], + [ + "▁chest", + -11.126175880432129 + ], + [ + "▁tone", + -11.126376152038574 + ], + [ + "▁Kal", + -11.126431465148926 + ], + [ + "▁supposed", + -11.12669849395752 + ], + [ + "▁vedere", + -11.126846313476562 + ], + [ + "▁50%", + -11.126872062683105 + ], + [ + "▁Ger", + -11.127785682678223 + ], + [ + "pack", + -11.127849578857422 + ], + [ + "▁priv", + -11.128241539001465 + ], + [ + "▁Kit", + -11.128263473510742 + ], + [ + "▁tent", + -11.128457069396973 + ], + [ + "▁guidelines", + -11.128461837768555 + ], + [ + "▁Republic", + -11.128824234008789 + ], + [ + "including", + -11.129239082336426 + ], + [ + "▁chief", + -11.129615783691406 + ], + [ + "▁Living", + -11.129766464233398 + ], + [ + "keit", + -11.1298189163208 + ], + [ + "▁convert", + -11.129831314086914 + ], + [ + "tail", + -11.129928588867188 + ], + [ + "orient", + -11.129960060119629 + ], + [ + "eigenen", + -11.130245208740234 + ], + [ + "▁soup", + -11.130587577819824 + ], + [ + "▁zona", + -11.130661010742188 + ], + [ + "▁composition", + -11.130690574645996 + ], + [ + "▁Bob", + -11.130831718444824 + ], + [ + "▁exception", + -11.131170272827148 + ], + [ + "▁cr", + -11.131287574768066 + ], + [ + "▁str", + -11.131482124328613 + ], + [ + "▁Fl", + -11.13178825378418 + ], + [ + "AT", + -11.131909370422363 + ], + [ + "kel", + -11.132002830505371 + ], + [ + "▁pricing", + -11.132189750671387 + ], + [ + "▁Mass", + -11.132258415222168 + ], + [ + "vir", + -11.132333755493164 + ], + [ + "leg", + -11.132448196411133 + ], + [ + "▁rating", + -11.132455825805664 + ], + [ + "▁Sale", + -11.132628440856934 + ], + [ + "▁somewhere", + -11.132866859436035 + ], + [ + "▁submitted", + -11.133084297180176 + ], + [ + "▁Pop", + -11.133296012878418 + ], + [ + "▁papers", + -11.13330364227295 + ], + [ + "▁authorities", + -11.133326530456543 + ], + [ + "▁Person", + -11.133381843566895 + ], + [ + "▁kill", + -11.133512496948242 + ], + [ + "▁suggestions", + -11.133548736572266 + ], + [ + "-6", + -11.133644104003906 + ], + [ + "▁dust", + -11.133750915527344 + ], + [ + "taire", + -11.133805274963379 + ], + [ + "▁recognition", + -11.133870124816895 + ], + [ + "3.", + -11.134047508239746 + ], + [ + "▁Mont", + -11.134230613708496 + ], + [ + "▁produit", + -11.13430118560791 + ], + [ + "▁transmission", + -11.134340286254883 + ], + [ + "▁Th", + -11.13475513458252 + ], + [ + "▁passing", + -11.134928703308105 + ], + [ + "▁Partner", + -11.135161399841309 + ], + [ + "▁dire", + -11.135205268859863 + ], + [ + "▁DC", + -11.135432243347168 + ], + [ + "▁sky", + -11.135659217834473 + ], + [ + "▁Kitchen", + -11.135890007019043 + ], + [ + "▁fluid", + -11.135929107666016 + ], + [ + "▁scored", + -11.136005401611328 + ], + [ + "▁chapter", + -11.136100769042969 + ], + [ + "If", + -11.136231422424316 + ], + [ + "letzten", + -11.136275291442871 + ], + [ + "▁officers", + -11.13641357421875 + ], + [ + "▁avem", + -11.136631965637207 + ], + [ + "ister", + -11.136666297912598 + ], + [ + "▁involves", + -11.136688232421875 + ], + [ + "ico", + -11.136898040771484 + ], + [ + "bur", + -11.137056350708008 + ], + [ + "▁mieux", + -11.137064933776855 + ], + [ + "▁Photo", + -11.1371431350708 + ], + [ + "▁Cro", + -11.137228012084961 + ], + [ + "▁professor", + -11.137245178222656 + ], + [ + "▁besonders", + -11.137313842773438 + ], + [ + "д", + -11.137367248535156 + ], + [ + "▁alongside", + -11.137382507324219 + ], + [ + "▁stored", + -11.13770580291748 + ], + [ + "▁activ", + -11.137849807739258 + ], + [ + "▁setup", + -11.138169288635254 + ], + [ + "▁extract", + -11.138627052307129 + ], + [ + "▁accent", + -11.138633728027344 + ], + [ + "▁replaced", + -11.138638496398926 + ], + [ + "tec", + -11.138800621032715 + ], + [ + "▁Natur", + -11.138848304748535 + ], + [ + "▁Pacific", + -11.138887405395508 + ], + [ + "▁NY", + -11.139485359191895 + ], + [ + "▁Capital", + -11.139583587646484 + ], + [ + "▁forest", + -11.13969898223877 + ], + [ + "incredibly", + -11.14006233215332 + ], + [ + "▁choix", + -11.14021110534668 + ], + [ + "▁seriously", + -11.140281677246094 + ], + [ + "▁konnte", + -11.14030933380127 + ], + [ + "▁2014.", + -11.140443801879883 + ], + [ + "ensuring", + -11.140534400939941 + ], + [ + "▁handling", + -11.140661239624023 + ], + [ + "▁9.", + -11.140715599060059 + ], + [ + "▁relations", + -11.140876770019531 + ], + [ + "▁Kom", + -11.141045570373535 + ], + [ + "▁Hol", + -11.141282081604004 + ], + [ + "▁none", + -11.141515731811523 + ], + [ + "rob", + -11.141718864440918 + ], + [ + "▁Forum", + -11.141759872436523 + ], + [ + "hour", + -11.141776084899902 + ], + [ + "ème", + -11.141809463500977 + ], + [ + "▁Space", + -11.141986846923828 + ], + [ + "▁Ham", + -11.142992973327637 + ], + [ + "rap", + -11.143169403076172 + ], + [ + "▁Michigan", + -11.14317512512207 + ], + [ + "km", + -11.143202781677246 + ], + [ + "▁utilize", + -11.143548965454102 + ], + [ + "lov", + -11.143775939941406 + ], + [ + "▁luck", + -11.144388198852539 + ], + [ + "lä", + -11.144824981689453 + ], + [ + "▁healing", + -11.145010948181152 + ], + [ + "▁neu", + -11.145182609558105 + ], + [ + "aging", + -11.145251274108887 + ], + [ + "▁compliance", + -11.145583152770996 + ], + [ + "▁vertical", + -11.145675659179688 + ], + [ + "▁FREE", + -11.145729064941406 + ], + [ + "▁differences", + -11.146014213562012 + ], + [ + "▁Server", + -11.146252632141113 + ], + [ + "▁estimated", + -11.146378517150879 + ], + [ + "schutz", + -11.146692276000977 + ], + [ + "▁notamment", + -11.146736145019531 + ], + [ + "▁120", + -11.146919250488281 + ], + [ + "72", + -11.147282600402832 + ], + [ + "▁heating", + -11.147347450256348 + ], + [ + "late", + -11.14756965637207 + ], + [ + "▁younger", + -11.14783000946045 + ], + [ + "▁Intel", + -11.148171424865723 + ], + [ + "▁salad", + -11.148362159729004 + ], + [ + "▁commonly", + -11.148563385009766 + ], + [ + "▁treatments", + -11.148682594299316 + ], + [ + "▁speaker", + -11.148770332336426 + ], + [ + "▁producing", + -11.149120330810547 + ], + [ + "▁eggs", + -11.149367332458496 + ], + [ + "▁Spirit", + -11.149892807006836 + ], + [ + "▁beide", + -11.149918556213379 + ], + [ + "▁transaction", + -11.150283813476562 + ], + [ + "▁Machine", + -11.150464057922363 + ], + [ + "▁Games", + -11.150527000427246 + ], + [ + "▁niveau", + -11.150687217712402 + ], + [ + "▁Need", + -11.15082836151123 + ], + [ + "radi", + -11.150959968566895 + ], + [ + "mir", + -11.15096664428711 + ], + [ + "causing", + -11.151000022888184 + ], + [ + "▁début", + -11.151042938232422 + ], + [ + "▁rencontre", + -11.151063919067383 + ], + [ + "▁threat", + -11.151153564453125 + ], + [ + "▁enjoying", + -11.151320457458496 + ], + [ + "Com", + -11.151386260986328 + ], + [ + "▁Johnson", + -11.151555061340332 + ], + [ + "▁tournament", + -11.15156364440918 + ], + [ + "▁Micro", + -11.151582717895508 + ], + [ + "▁Drive", + -11.151667594909668 + ], + [ + "▁Cre", + -11.151866912841797 + ], + [ + "▁Lebens", + -11.151930809020996 + ], + [ + "▁categories", + -11.152358055114746 + ], + [ + "5,000", + -11.15261173248291 + ], + [ + "▁confirmed", + -11.152617454528809 + ], + [ + "pli", + -11.152763366699219 + ], + [ + "▁Francisco", + -11.153139114379883 + ], + [ + "▁raw", + -11.153157234191895 + ], + [ + "▁managers", + -11.153223991394043 + ], + [ + "ţie", + -11.153365135192871 + ], + [ + "UR", + -11.153368949890137 + ], + [ + "▁aproape", + -11.154065132141113 + ], + [ + "via", + -11.154606819152832 + ], + [ + "▁engaged", + -11.154646873474121 + ], + [ + "▁parti", + -11.154741287231445 + ], + [ + "▁posting", + -11.15517807006836 + ], + [ + "CO", + -11.155484199523926 + ], + [ + "▁bois", + -11.155815124511719 + ], + [ + "▁inch", + -11.15590763092041 + ], + [ + "vie", + -11.156068801879883 + ], + [ + "▁aside", + -11.156314849853516 + ], + [ + "▁exceptional", + -11.15658950805664 + ], + [ + "▁vintage", + -11.156668663024902 + ], + [ + "▁Him", + -11.156795501708984 + ], + [ + "▁expansion", + -11.156806945800781 + ], + [ + "▁Weg", + -11.157122611999512 + ], + [ + "▁authors", + -11.157535552978516 + ], + [ + "▁deine", + -11.15764045715332 + ], + [ + "▁Prime", + -11.158016204833984 + ], + [ + "▁scan", + -11.158055305480957 + ], + [ + "▁reg", + -11.158112525939941 + ], + [ + "ția", + -11.158141136169434 + ], + [ + "riv", + -11.158258438110352 + ], + [ + "selon", + -11.158440589904785 + ], + [ + "▁Studio", + -11.158571243286133 + ], + [ + "▁dich", + -11.158658027648926 + ], + [ + "▁vi", + -11.158745765686035 + ], + [ + "▁sequence", + -11.159016609191895 + ], + [ + "▁Four", + -11.159046173095703 + ], + [ + "RT", + -11.159050941467285 + ], + [ + "▁ihn", + -11.159072875976562 + ], + [ + "▁employ", + -11.159223556518555 + ], + [ + "umb", + -11.159659385681152 + ], + [ + "ită", + -11.159818649291992 + ], + [ + "▁Station", + -11.159950256347656 + ], + [ + "▁upload", + -11.159972190856934 + ], + [ + "▁upgrade", + -11.160445213317871 + ], + [ + "▁exterior", + -11.160528182983398 + ], + [ + "▁writers", + -11.160531997680664 + ], + [ + "▁plot", + -11.160543441772461 + ], + [ + "▁Gen", + -11.16068172454834 + ], + [ + "TER", + -11.160821914672852 + ], + [ + "-12", + -11.160930633544922 + ], + [ + "http", + -11.162168502807617 + ], + [ + "▁smell", + -11.1621732711792 + ], + [ + "post", + -11.162522315979004 + ], + [ + "von", + -11.162790298461914 + ], + [ + "mili", + -11.16280746459961 + ], + [ + "8%", + -11.162972450256348 + ], + [ + "▁Andrew", + -11.163065910339355 + ], + [ + "▁spun", + -11.16321086883545 + ], + [ + "▁grass", + -11.163444519042969 + ], + [ + "unter", + -11.163474082946777 + ], + [ + "▁burn", + -11.16356086730957 + ], + [ + "▁Gegen", + -11.163601875305176 + ], + [ + "fest", + -11.163721084594727 + ], + [ + "▁Northern", + -11.163738250732422 + ], + [ + "▁consumption", + -11.163775444030762 + ], + [ + "▁bird", + -11.164069175720215 + ], + [ + "▁Miss", + -11.164369583129883 + ], + [ + "anti", + -11.16447925567627 + ], + [ + "▁viata", + -11.164583206176758 + ], + [ + "bereich", + -11.164602279663086 + ], + [ + "▁Change", + -11.164871215820312 + ], + [ + "▁pouvoir", + -11.165255546569824 + ], + [ + "▁demonstrate", + -11.165435791015625 + ], + [ + "▁requirement", + -11.165483474731445 + ], + [ + "BI", + -11.16577434539795 + ], + [ + "ied", + -11.166099548339844 + ], + [ + "▁spray", + -11.166358947753906 + ], + [ + "▁calitate", + -11.166379928588867 + ], + [ + "▁souvent", + -11.1665620803833 + ], + [ + "▁samples", + -11.166682243347168 + ], + [ + "▁compete", + -11.166930198669434 + ], + [ + "ank", + -11.166946411132812 + ], + [ + "année", + -11.167037963867188 + ], + [ + "wick", + -11.167183876037598 + ], + [ + "iff", + -11.167254447937012 + ], + [ + "noi", + -11.167255401611328 + ], + [ + "ography", + -11.167450904846191 + ], + [ + "▁SE", + -11.167508125305176 + ], + [ + "▁250", + -11.16779899597168 + ], + [ + "▁wealth", + -11.167884826660156 + ], + [ + "4%", + -11.168235778808594 + ], + [ + "▁swimming", + -11.168269157409668 + ], + [ + "enne", + -11.168338775634766 + ], + [ + "Qu", + -11.168400764465332 + ], + [ + "▁connections", + -11.168476104736328 + ], + [ + "onne", + -11.16852855682373 + ], + [ + "▁Way", + -11.168676376342773 + ], + [ + "voll", + -11.168793678283691 + ], + [ + "▁extent", + -11.169041633605957 + ], + [ + "▁objective", + -11.169572830200195 + ], + [ + "▁clinic", + -11.169581413269043 + ], + [ + "NA", + -11.169848442077637 + ], + [ + "▁Hope", + -11.170098304748535 + ], + [ + "▁coat", + -11.170331954956055 + ], + [ + "▁depend", + -11.170393943786621 + ], + [ + "▁tine", + -11.170463562011719 + ], + [ + "acc", + -11.170486450195312 + ], + [ + "▁editor", + -11.170598983764648 + ], + [ + "▁Jim", + -11.170690536499023 + ], + [ + "600", + -11.171262741088867 + ], + [ + "▁module", + -11.171302795410156 + ], + [ + "▁deja", + -11.171821594238281 + ], + [ + "atur", + -11.171841621398926 + ], + [ + "▁maintaining", + -11.171918869018555 + ], + [ + "▁hoch", + -11.172059059143066 + ], + [ + "▁covering", + -11.17239761352539 + ], + [ + "vielen", + -11.172450065612793 + ], + [ + "hem", + -11.172531127929688 + ], + [ + "▁illegal", + -11.172656059265137 + ], + [ + "▁certificate", + -11.17329216003418 + ], + [ + "▁collective", + -11.173357963562012 + ], + [ + "▁blow", + -11.17343807220459 + ], + [ + "▁programming", + -11.17343807220459 + ], + [ + "HE", + -11.173727989196777 + ], + [ + "▁Division", + -11.173842430114746 + ], + [ + "▁ceux", + -11.174081802368164 + ], + [ + "▁saved", + -11.174202919006348 + ], + [ + "▁worst", + -11.17426586151123 + ], + [ + "▁arms", + -11.17430305480957 + ], + [ + "▁Officer", + -11.17463493347168 + ], + [ + "▁association", + -11.174838066101074 + ], + [ + "ington", + -11.1749906539917 + ], + [ + "▁belle", + -11.175024032592773 + ], + [ + "tting", + -11.17537784576416 + ], + [ + "▁attacks", + -11.175446510314941 + ], + [ + "▁vei", + -11.17546558380127 + ], + [ + "▁gerade", + -11.175470352172852 + ], + [ + "▁strain", + -11.175748825073242 + ], + [ + "▁offices", + -11.1759672164917 + ], + [ + "EM", + -11.17627239227295 + ], + [ + "EST", + -11.176509857177734 + ], + [ + "-8", + -11.176758766174316 + ], + [ + "▁faculty", + -11.176998138427734 + ], + [ + "▁Plant", + -11.177046775817871 + ], + [ + "pla", + -11.177295684814453 + ], + [ + "card", + -11.177618980407715 + ], + [ + "▁loose", + -11.177982330322266 + ], + [ + "▁PR", + -11.178044319152832 + ], + [ + "profit", + -11.178071022033691 + ], + [ + "▁channels", + -11.178119659423828 + ], + [ + "ATE", + -11.178257942199707 + ], + [ + "atic", + -11.178304672241211 + ], + [ + "wegen", + -11.178404808044434 + ], + [ + "word", + -11.178621292114258 + ], + [ + "▁sehen", + -11.178659439086914 + ], + [ + "▁nombre", + -11.178744316101074 + ], + [ + "▁DO", + -11.178763389587402 + ], + [ + "▁hoping", + -11.178949356079102 + ], + [ + "▁wollen", + -11.179091453552246 + ], + [ + "▁decat", + -11.179244995117188 + ], + [ + "IF", + -11.179386138916016 + ], + [ + "▁permission", + -11.179396629333496 + ], + [ + "▁Williams", + -11.179936408996582 + ], + [ + "▁beer", + -11.179962158203125 + ], + [ + "▁dernière", + -11.180052757263184 + ], + [ + "▁purchasing", + -11.18025016784668 + ], + [ + "▁pride", + -11.180416107177734 + ], + [ + "solv", + -11.180598258972168 + ], + [ + "ego", + -11.180691719055176 + ], + [ + "▁Oil", + -11.18079662322998 + ], + [ + "▁dishes", + -11.18102741241455 + ], + [ + "▁Baby", + -11.181109428405762 + ], + [ + "▁Roll", + -11.181137084960938 + ], + [ + "vez", + -11.18134593963623 + ], + [ + "▁drept", + -11.181367874145508 + ], + [ + "lly", + -11.18148136138916 + ], + [ + "▁potrivit", + -11.181495666503906 + ], + [ + "person", + -11.181961059570312 + ], + [ + "▁interactive", + -11.182269096374512 + ], + [ + "▁brilliant", + -11.182304382324219 + ], + [ + "▁000", + -11.182357788085938 + ], + [ + "▁giant", + -11.182657241821289 + ], + [ + "▁plain", + -11.182945251464844 + ], + [ + "▁lock", + -11.183197975158691 + ], + [ + "▁inspection", + -11.183762550354004 + ], + [ + "▁symbol", + -11.18392276763916 + ], + [ + "▁Gal", + -11.183953285217285 + ], + [ + "▁concepts", + -11.1840181350708 + ], + [ + "▁venture", + -11.18411922454834 + ], + [ + "▁Tr", + -11.184402465820312 + ], + [ + "▁Color", + -11.184469223022461 + ], + [ + "▁behalf", + -11.184635162353516 + ], + [ + "ink", + -11.184715270996094 + ], + [ + "atii", + -11.1848726272583 + ], + [ + "wie", + -11.184907913208008 + ], + [ + "▁stream", + -11.18514347076416 + ], + [ + "▁buyers", + -11.185192108154297 + ], + [ + "legen", + -11.185526847839355 + ], + [ + "iness", + -11.18578815460205 + ], + [ + "▁absolute", + -11.185945510864258 + ], + [ + "▁council", + -11.186067581176758 + ], + [ + "▁displayed", + -11.186172485351562 + ], + [ + "▁Bun", + -11.186405181884766 + ], + [ + "▁darauf", + -11.186585426330566 + ], + [ + "▁rod", + -11.186829566955566 + ], + [ + "▁repeat", + -11.186898231506348 + ], + [ + "quelle", + -11.187023162841797 + ], + [ + "lation", + -11.187433242797852 + ], + [ + "gul", + -11.18774700164795 + ], + [ + "▁compensation", + -11.188064575195312 + ], + [ + "▁string", + -11.1881685256958 + ], + [ + "▁joining", + -11.188251495361328 + ], + [ + "▁Pra", + -11.188429832458496 + ], + [ + "hab", + -11.188936233520508 + ], + [ + "▁plane", + -11.189024925231934 + ], + [ + "▁conversion", + -11.189078330993652 + ], + [ + "▁lesson", + -11.189361572265625 + ], + [ + "bound", + -11.1893949508667 + ], + [ + "▁seats", + -11.18946361541748 + ], + [ + "voc", + -11.189902305603027 + ], + [ + "▁Disney", + -11.190120697021484 + ], + [ + "esse", + -11.190277099609375 + ], + [ + "▁awards", + -11.190279006958008 + ], + [ + "▁initiative", + -11.190483093261719 + ], + [ + "UM", + -11.19050407409668 + ], + [ + "▁intelligence", + -11.190763473510742 + ], + [ + "▁laser", + -11.191128730773926 + ], + [ + "än", + -11.191228866577148 + ], + [ + "▁generated", + -11.191231727600098 + ], + [ + "▁allen", + -11.19186782836914 + ], + [ + "▁Aug", + -11.19261360168457 + ], + [ + "lini", + -11.192968368530273 + ], + [ + "▁Update", + -11.193015098571777 + ], + [ + "▁grab", + -11.193095207214355 + ], + [ + "▁Bridge", + -11.193219184875488 + ], + [ + "rock", + -11.193289756774902 + ], + [ + "hold", + -11.193461418151855 + ], + [ + "seinen", + -11.193643569946289 + ], + [ + "▁false", + -11.193758010864258 + ], + [ + "type", + -11.193792343139648 + ], + [ + "▁outcome", + -11.193906784057617 + ], + [ + "▁crazy", + -11.194161415100098 + ], + [ + "▁Platz", + -11.194281578063965 + ], + [ + "▁believed", + -11.194426536560059 + ], + [ + "▁adjust", + -11.194503784179688 + ], + [ + "▁entrance", + -11.194644927978516 + ], + [ + "▁Colorado", + -11.194751739501953 + ], + [ + "▁concentration", + -11.194865226745605 + ], + [ + "aid", + -11.194958686828613 + ], + [ + "▁regardless", + -11.195035934448242 + ], + [ + "▁mici", + -11.195063591003418 + ], + [ + "▁potentially", + -11.195109367370605 + ], + [ + "▁Custom", + -11.195867538452148 + ], + [ + "rag", + -11.196009635925293 + ], + [ + "▁employer", + -11.19604206085205 + ], + [ + "tagged", + -11.196158409118652 + ], + [ + "▁34", + -11.196271896362305 + ], + [ + "fro", + -11.196895599365234 + ], + [ + "▁Pas", + -11.197010040283203 + ], + [ + "▁AS", + -11.197013854980469 + ], + [ + "PP", + -11.197031021118164 + ], + [ + "stru", + -11.19741439819336 + ], + [ + "grâce", + -11.198037147521973 + ], + [ + "▁anyway", + -11.198240280151367 + ], + [ + "▁streets", + -11.1986083984375 + ], + [ + "▁Region", + -11.199190139770508 + ], + [ + "▁newly", + -11.199280738830566 + ], + [ + "▁assistant", + -11.199461936950684 + ], + [ + "▁requests", + -11.199618339538574 + ], + [ + "▁Ohio", + -11.199705123901367 + ], + [ + "▁continuing", + -11.200072288513184 + ], + [ + "▁îm", + -11.200136184692383 + ], + [ + "7%", + -11.20031452178955 + ], + [ + "▁basically", + -11.200325965881348 + ], + [ + "gabe", + -11.200334548950195 + ], + [ + "▁ultra", + -11.200355529785156 + ], + [ + "pic", + -11.200571060180664 + ], + [ + "▁jeder", + -11.200939178466797 + ], + [ + "▁Cook", + -11.201225280761719 + ], + [ + "▁tie", + -11.201227188110352 + ], + [ + "▁yard", + -11.20151424407959 + ], + [ + "▁wash", + -11.20152759552002 + ], + [ + "▁3,", + -11.20194149017334 + ], + [ + "▁exista", + -11.202128410339355 + ], + [ + "▁egg", + -11.202342987060547 + ], + [ + "▁marché", + -11.202616691589355 + ], + [ + "kommen", + -11.202630996704102 + ], + [ + "▁Select", + -11.202999114990234 + ], + [ + "geben", + -11.203126907348633 + ], + [ + "▁Joseph", + -11.203531265258789 + ], + [ + "▁Ces", + -11.203642845153809 + ], + [ + "▁hundred", + -11.203676223754883 + ], + [ + "even", + -11.203792572021484 + ], + [ + "gal", + -11.204232215881348 + ], + [ + "800", + -11.20443058013916 + ], + [ + "▁Jones", + -11.204599380493164 + ], + [ + "ova", + -11.204681396484375 + ], + [ + "▁careful", + -11.204727172851562 + ], + [ + "▁alarm", + -11.205070495605469 + ], + [ + "NI", + -11.205113410949707 + ], + [ + "▁residence", + -11.205327987670898 + ], + [ + "▁wäre", + -11.20590877532959 + ], + [ + "▁Dor", + -11.205986976623535 + ], + [ + "▁amounts", + -11.206369400024414 + ], + [ + "▁mistake", + -11.206687927246094 + ], + [ + "ates", + -11.206796646118164 + ], + [ + "▁bune", + -11.206951141357422 + ], + [ + "▁vegetables", + -11.207124710083008 + ], + [ + "▁Ann", + -11.207204818725586 + ], + [ + "logical", + -11.20776081085205 + ], + [ + "stadt", + -11.207806587219238 + ], + [ + "▁chances", + -11.207921981811523 + ], + [ + "%)", + -11.208030700683594 + ], + [ + "▁minimal", + -11.20810604095459 + ], + [ + "▁naturally", + -11.20817756652832 + ], + [ + "▁Geld", + -11.20822525024414 + ], + [ + "▁Yu", + -11.208361625671387 + ], + [ + "▁wrap", + -11.20840072631836 + ], + [ + "rest", + -11.208674430847168 + ], + [ + "▁legs", + -11.208758354187012 + ], + [ + "PM", + -11.208806991577148 + ], + [ + "▁Heart", + -11.208888053894043 + ], + [ + "▁suspect", + -11.209020614624023 + ], + [ + "Go", + -11.209098815917969 + ], + [ + "▁Fil", + -11.209175109863281 + ], + [ + "▁YOU", + -11.209175109863281 + ], + [ + "▁victory", + -11.209245681762695 + ], + [ + "pun", + -11.20960807800293 + ], + [ + "▁Zo", + -11.209632873535156 + ], + [ + "CT", + -11.209640502929688 + ], + [ + "▁trim", + -11.20969009399414 + ], + [ + "▁stuck", + -11.209836959838867 + ], + [ + "ators", + -11.209877014160156 + ], + [ + "▁Ideas", + -11.210016250610352 + ], + [ + "▁voyage", + -11.210166931152344 + ], + [ + "▁Restaurant", + -11.210205078125 + ], + [ + "▁pat", + -11.210234642028809 + ], + [ + "▁bond", + -11.210521697998047 + ], + [ + "▁Del", + -11.210552215576172 + ], + [ + "▁fighting", + -11.210705757141113 + ], + [ + "▁concerning", + -11.210867881774902 + ], + [ + "▁etwa", + -11.211141586303711 + ], + [ + "▁Thema", + -11.211237907409668 + ], + [ + "▁preferred", + -11.211423873901367 + ], + [ + "▁pitch", + -11.211465835571289 + ], + [ + "▁Singapore", + -11.211971282958984 + ], + [ + "▁tub", + -11.212018013000488 + ], + [ + "FT", + -11.212053298950195 + ], + [ + "▁Product", + -11.21212100982666 + ], + [ + "▁applying", + -11.212285995483398 + ], + [ + "▁Fr", + -11.212340354919434 + ], + [ + "ţa", + -11.212599754333496 + ], + [ + "▁iPad", + -11.212861061096191 + ], + [ + "PD", + -11.2129545211792 + ], + [ + "▁comun", + -11.212995529174805 + ], + [ + "▁pie", + -11.213286399841309 + ], + [ + "rank", + -11.21364688873291 + ], + [ + "tron", + -11.213677406311035 + ], + [ + "▁pest", + -11.213906288146973 + ], + [ + "▁herself", + -11.213936805725098 + ], + [ + "▁intense", + -11.213964462280273 + ], + [ + "foot", + -11.21413803100586 + ], + [ + "▁1998", + -11.2141695022583 + ], + [ + "▁anxiety", + -11.214616775512695 + ], + [ + "▁portable", + -11.214674949645996 + ], + [ + "▁harm", + -11.214735984802246 + ], + [ + "▁admit", + -11.214885711669922 + ], + [ + "sted", + -11.214900016784668 + ], + [ + "▁regions", + -11.215450286865234 + ], + [ + "cie", + -11.215556144714355 + ], + [ + "▁robust", + -11.21577262878418 + ], + [ + "▁stem", + -11.215982437133789 + ], + [ + "▁roles", + -11.216024398803711 + ], + [ + "▁Latin", + -11.216224670410156 + ], + [ + "▁Ré", + -11.216378211975098 + ], + [ + "▁ref", + -11.216381072998047 + ], + [ + "isme", + -11.216426849365234 + ], + [ + "▁contribution", + -11.216776847839355 + ], + [ + "▁forever", + -11.217447280883789 + ], + [ + "▁frei", + -11.21754264831543 + ], + [ + "▁mont", + -11.217818260192871 + ], + [ + "that", + -11.217999458312988 + ], + [ + "▁sensitive", + -11.218116760253906 + ], + [ + "▁wider", + -11.218175888061523 + ], + [ + "AF", + -11.218234062194824 + ], + [ + "▁liability", + -11.218748092651367 + ], + [ + "ţiei", + -11.219043731689453 + ], + [ + "▁Cho", + -11.219260215759277 + ], + [ + "aria", + -11.21960735321045 + ], + [ + "rang", + -11.21977710723877 + ], + [ + "▁Account", + -11.21986198425293 + ], + [ + "▁III", + -11.219941139221191 + ], + [ + "▁tooth", + -11.220222473144531 + ], + [ + "▁factory", + -11.220240592956543 + ], + [ + "▁dropped", + -11.220495223999023 + ], + [ + "horn", + -11.220780372619629 + ], + [ + "RP", + -11.221110343933105 + ], + [ + "▁container", + -11.22118091583252 + ], + [ + "fran", + -11.221474647521973 + ], + [ + "▁lawyer", + -11.221842765808105 + ], + [ + "▁Image", + -11.221907615661621 + ], + [ + "HO", + -11.22195816040039 + ], + [ + "▁incorporate", + -11.221992492675781 + ], + [ + "▁lume", + -11.22226333618164 + ], + [ + "GA", + -11.222331047058105 + ], + [ + "itati", + -11.222370147705078 + ], + [ + "autre", + -11.222665786743164 + ], + [ + "ierten", + -11.222688674926758 + ], + [ + "[", + -11.222746849060059 + ], + [ + "▁packages", + -11.222758293151855 + ], + [ + "▁Simon", + -11.22290325164795 + ], + [ + "▁somewhat", + -11.223734855651855 + ], + [ + "mbo", + -11.223737716674805 + ], + [ + "lite", + -11.223844528198242 + ], + [ + "▁eliminate", + -11.22395133972168 + ], + [ + "▁decrease", + -11.224117279052734 + ], + [ + "▁geben", + -11.224214553833008 + ], + [ + "▁approaches", + -11.224482536315918 + ], + [ + "▁tissue", + -11.224940299987793 + ], + [ + "▁personne", + -11.225192070007324 + ], + [ + "ional", + -11.225587844848633 + ], + [ + "unable", + -11.2256498336792 + ], + [ + "▁Case", + -11.225736618041992 + ], + [ + "hill", + -11.225744247436523 + ], + [ + "och", + -11.225862503051758 + ], + [ + "▁minister", + -11.225920677185059 + ], + [ + "▁Rad", + -11.226285934448242 + ], + [ + "▁yoga", + -11.226390838623047 + ], + [ + "▁encounter", + -11.22661018371582 + ], + [ + "text", + -11.22670841217041 + ], + [ + "▁OS", + -11.226719856262207 + ], + [ + "▁opera", + -11.22673225402832 + ], + [ + "▁loving", + -11.226977348327637 + ], + [ + "▁birds", + -11.227363586425781 + ], + [ + "▁prim", + -11.227389335632324 + ], + [ + "easca", + -11.227432250976562 + ], + [ + "park", + -11.227453231811523 + ], + [ + "fü", + -11.227797508239746 + ], + [ + "▁champion", + -11.227824211120605 + ], + [ + "▁warning", + -11.228245735168457 + ], + [ + "DC", + -11.228271484375 + ], + [ + "▁yield", + -11.228310585021973 + ], + [ + "raum", + -11.228334426879883 + ], + [ + "▁Student", + -11.228434562683105 + ], + [ + "▁Rev", + -11.22848892211914 + ], + [ + "▁Fu", + -11.228501319885254 + ], + [ + "▁intra", + -11.22854232788086 + ], + [ + "▁proces", + -11.228585243225098 + ], + [ + "▁margin", + -11.228621482849121 + ], + [ + "lands", + -11.228816986083984 + ], + [ + "04", + -11.228952407836914 + ], + [ + "▁Steel", + -11.229897499084473 + ], + [ + "▁besoin", + -11.230081558227539 + ], + [ + "şti", + -11.230561256408691 + ], + [ + "▁39", + -11.230635643005371 + ], + [ + "▁outcomes", + -11.230677604675293 + ], + [ + "wert", + -11.230719566345215 + ], + [ + "3,", + -11.23080062866211 + ], + [ + "▁hole", + -11.230888366699219 + ], + [ + "▁Create", + -11.23096752166748 + ], + [ + "▁hall", + -11.231266975402832 + ], + [ + "nach", + -11.231595039367676 + ], + [ + "▁indicate", + -11.232311248779297 + ], + [ + "cum", + -11.232604026794434 + ], + [ + "▁Mann", + -11.232690811157227 + ], + [ + "▁reaction", + -11.232828140258789 + ], + [ + "▁empty", + -11.23289680480957 + ], + [ + "▁Sign", + -11.232941627502441 + ], + [ + "▁pm", + -11.23300838470459 + ], + [ + "erung", + -11.23322582244873 + ], + [ + "▁würde", + -11.233592987060547 + ], + [ + "▁declarat", + -11.233602523803711 + ], + [ + "6%", + -11.23371410369873 + ], + [ + "▁Client", + -11.23377513885498 + ], + [ + "vil", + -11.234295845031738 + ], + [ + "▁electricity", + -11.234469413757324 + ], + [ + "▁75", + -11.234505653381348 + ], + [ + "▁buna", + -11.234505653381348 + ], + [ + "eşte", + -11.23473834991455 + ], + [ + "▁prop", + -11.234792709350586 + ], + [ + "▁journal", + -11.234883308410645 + ], + [ + "▁meu", + -11.23495101928711 + ], + [ + "▁chef", + -11.235034942626953 + ], + [ + "▁Ever", + -11.235102653503418 + ], + [ + "▁feelings", + -11.235466003417969 + ], + [ + "PT", + -11.23551082611084 + ], + [ + "▁proposal", + -11.235651969909668 + ], + [ + "▁Its", + -11.235709190368652 + ], + [ + "▁2013.", + -11.235795974731445 + ], + [ + "▁Bundes", + -11.23595142364502 + ], + [ + "▁droit", + -11.236333847045898 + ], + [ + "▁10%", + -11.236671447753906 + ], + [ + "gard", + -11.236772537231445 + ], + [ + "information", + -11.236814498901367 + ], + [ + "FE", + -11.237309455871582 + ], + [ + "▁Dun", + -11.237340927124023 + ], + [ + "▁Stock", + -11.237472534179688 + ], + [ + "ație", + -11.2374849319458 + ], + [ + "▁mag", + -11.237603187561035 + ], + [ + "▁br", + -11.237665176391602 + ], + [ + "▁sight", + -11.237772941589355 + ], + [ + "phone", + -11.237796783447266 + ], + [ + "▁Cy", + -11.237811088562012 + ], + [ + "▁opposite", + -11.238035202026367 + ], + [ + "ically", + -11.238235473632812 + ], + [ + "großen", + -11.238388061523438 + ], + [ + "▁Without", + -11.23845100402832 + ], + [ + "espace", + -11.238515853881836 + ], + [ + "▁chairs", + -11.238595008850098 + ], + [ + "▁matches", + -11.238685607910156 + ], + [ + "ateur", + -11.238697052001953 + ], + [ + "▁Cost", + -11.238699913024902 + ], + [ + "▁WordPress", + -11.238880157470703 + ], + [ + "▁Opera", + -11.239195823669434 + ], + [ + "walked", + -11.239234924316406 + ], + [ + "▁transactions", + -11.239521026611328 + ], + [ + "▁nuclear", + -11.239579200744629 + ], + [ + "ways", + -11.239594459533691 + ], + [ + "▁Oct", + -11.239738464355469 + ], + [ + "▁bomb", + -11.239835739135742 + ], + [ + "▁tracking", + -11.239879608154297 + ], + [ + "▁photograph", + -11.240066528320312 + ], + [ + "bio", + -11.240309715270996 + ], + [ + "▁branch", + -11.240363121032715 + ], + [ + "▁$5", + -11.240684509277344 + ], + [ + "▁diagram", + -11.240986824035645 + ], + [ + "▁Hard", + -11.241218566894531 + ], + [ + "bach", + -11.241232872009277 + ], + [ + "▁42", + -11.241249084472656 + ], + [ + "logy", + -11.241472244262695 + ], + [ + "▁tile", + -11.241593360900879 + ], + [ + "▁API", + -11.241833686828613 + ], + [ + "seront", + -11.24204158782959 + ], + [ + "ENT", + -11.242156982421875 + ], + [ + "▁accommodation", + -11.242409706115723 + ], + [ + "▁fiber", + -11.242438316345215 + ], + [ + "▁Give", + -11.242792129516602 + ], + [ + "▁Gas", + -11.242916107177734 + ], + [ + "▁Spain", + -11.243086814880371 + ], + [ + "▁listing", + -11.24312686920166 + ], + [ + "▁blocks", + -11.24349308013916 + ], + [ + "▁constitu", + -11.243762969970703 + ], + [ + "▁convenience", + -11.243797302246094 + ], + [ + "▁prize", + -11.243823051452637 + ], + [ + "▁aircraft", + -11.24404239654541 + ], + [ + "containing", + -11.244124412536621 + ], + [ + "▁vice", + -11.244247436523438 + ], + [ + "▁organisations", + -11.244304656982422 + ], + [ + "▁complicated", + -11.244588851928711 + ], + [ + "rons", + -11.244647979736328 + ], + [ + "▁bars", + -11.244670867919922 + ], + [ + "était", + -11.244705200195312 + ], + [ + "▁checking", + -11.245287895202637 + ], + [ + "vant", + -11.245542526245117 + ], + [ + "▁couch", + -11.245657920837402 + ], + [ + "▁brush", + -11.245870590209961 + ], + [ + "▁printer", + -11.245922088623047 + ], + [ + "▁Rat", + -11.246051788330078 + ], + [ + "▁announce", + -11.246057510375977 + ], + [ + "▁salari", + -11.246200561523438 + ], + [ + "▁Sk", + -11.246356964111328 + ], + [ + "pal", + -11.246383666992188 + ], + [ + "▁yards", + -11.24658203125 + ], + [ + "▁flexibility", + -11.246652603149414 + ], + [ + "▁jamais", + -11.24670696258545 + ], + [ + "UC", + -11.246740341186523 + ], + [ + "▁4,", + -11.246793746948242 + ], + [ + "▁Made", + -11.247078895568848 + ], + [ + "▁solche", + -11.247113227844238 + ], + [ + "▁tri", + -11.247237205505371 + ], + [ + "▁outfit", + -11.247243881225586 + ], + [ + "м", + -11.247267723083496 + ], + [ + "▁encouraged", + -11.247477531433105 + ], + [ + "trac", + -11.247552871704102 + ], + [ + "▁genetic", + -11.24755859375 + ], + [ + "▁beneficial", + -11.247747421264648 + ], + [ + "mă", + -11.247849464416504 + ], + [ + "involving", + -11.247879028320312 + ], + [ + "▁knee", + -11.247879028320312 + ], + [ + "▁respective", + -11.248316764831543 + ], + [ + "▁controlled", + -11.248350143432617 + ], + [ + "▁Rück", + -11.24837589263916 + ], + [ + "LC", + -11.248592376708984 + ], + [ + "▁highlight", + -11.248634338378906 + ], + [ + "chem", + -11.248797416687012 + ], + [ + "▁Bis", + -11.24956226348877 + ], + [ + "▁graphics", + -11.249592781066895 + ], + [ + "▁posibil", + -11.249672889709473 + ], + [ + "orul", + -11.249682426452637 + ], + [ + "imagin", + -11.249836921691895 + ], + [ + "▁draft", + -11.250006675720215 + ], + [ + "shaped", + -11.250219345092773 + ], + [ + "▁suggests", + -11.250221252441406 + ], + [ + "uvre", + -11.250509262084961 + ], + [ + "page", + -11.250545501708984 + ], + [ + "▁sentiment", + -11.250685691833496 + ], + [ + "▁loop", + -11.251015663146973 + ], + [ + "▁Quality", + -11.251839637756348 + ], + [ + "▁volunteers", + -11.251869201660156 + ], + [ + "▁representation", + -11.251923561096191 + ], + [ + "▁examination", + -11.252134323120117 + ], + [ + "▁(2)", + -11.252225875854492 + ], + [ + "assi", + -11.252435684204102 + ], + [ + "▁till", + -11.252486228942871 + ], + [ + "▁Catholic", + -11.252618789672852 + ], + [ + "▁2020", + -11.252726554870605 + ], + [ + "▁random", + -11.252764701843262 + ], + [ + "tage", + -11.253146171569824 + ], + [ + "▁baking", + -11.253690719604492 + ], + [ + "▁Musik", + -11.253852844238281 + ], + [ + "▁SC", + -11.253867149353027 + ], + [ + "▁möchte", + -11.254390716552734 + ], + [ + "▁gene", + -11.254411697387695 + ], + [ + "▁kam", + -11.254928588867188 + ], + [ + "▁inspire", + -11.254974365234375 + ], + [ + "unk", + -11.255097389221191 + ], + [ + "▁Final", + -11.255477905273438 + ], + [ + "▁jeden", + -11.255497932434082 + ], + [ + "▁LLC", + -11.255962371826172 + ], + [ + "▁sistem", + -11.25613784790039 + ], + [ + "▁stages", + -11.256441116333008 + ], + [ + "▁texture", + -11.256613731384277 + ], + [ + "rib", + -11.256739616394043 + ], + [ + "lung", + -11.256782531738281 + ], + [ + "▁breath", + -11.256814002990723 + ], + [ + "▁hosted", + -11.256844520568848 + ], + [ + "▁Kingdom", + -11.257079124450684 + ], + [ + "▁politics", + -11.257121086120605 + ], + [ + "▁mood", + -11.257122993469238 + ], + [ + "cam", + -11.257285118103027 + ], + [ + "▁liked", + -11.257287979125977 + ], + [ + "▁Credit", + -11.257304191589355 + ], + [ + "tisch", + -11.257527351379395 + ], + [ + "▁everywhere", + -11.257692337036133 + ], + [ + "▁poti", + -11.257915496826172 + ], + [ + "▁fruits", + -11.258264541625977 + ], + [ + "oire", + -11.258322715759277 + ], + [ + "▁mesure", + -11.258586883544922 + ], + [ + "▁Studies", + -11.258838653564453 + ], + [ + "▁provision", + -11.25888729095459 + ], + [ + "▁Maria", + -11.258927345275879 + ], + [ + "▁necessarily", + -11.259103775024414 + ], + [ + "▁Net", + -11.259212493896484 + ], + [ + "▁scar", + -11.259307861328125 + ], + [ + "▁tracks", + -11.259424209594727 + ], + [ + "▁ads", + -11.259856224060059 + ], + [ + "termin", + -11.259861946105957 + ], + [ + "▁Yo", + -11.26022720336914 + ], + [ + "atory", + -11.260252952575684 + ], + [ + "itoare", + -11.26025676727295 + ], + [ + "▁colours", + -11.260563850402832 + ], + [ + "▁correctly", + -11.260817527770996 + ], + [ + "▁Trade", + -11.26090145111084 + ], + [ + "▁Week", + -11.261052131652832 + ], + [ + "▁Premier", + -11.261499404907227 + ], + [ + "▁designers", + -11.261600494384766 + ], + [ + "▁BE", + -11.261879920959473 + ], + [ + "▁desktop", + -11.261929512023926 + ], + [ + "▁lifetime", + -11.262046813964844 + ], + [ + "▁Kind", + -11.26213264465332 + ], + [ + "▁divers", + -11.262246131896973 + ], + [ + "rain", + -11.262260437011719 + ], + [ + "▁Von", + -11.262263298034668 + ], + [ + "▁bal", + -11.262568473815918 + ], + [ + "▁shots", + -11.262624740600586 + ], + [ + "▁accommodate", + -11.262767791748047 + ], + [ + "▁Paper", + -11.263001441955566 + ], + [ + "▁interaction", + -11.263191223144531 + ], + [ + "▁acquisition", + -11.263233184814453 + ], + [ + "▁neuro", + -11.26378345489502 + ], + [ + "▁institution", + -11.26391887664795 + ], + [ + "▁automatic", + -11.26403522491455 + ], + [ + "▁assess", + -11.264177322387695 + ], + [ + "▁manifest", + -11.264199256896973 + ], + [ + "▁audit", + -11.264202117919922 + ], + [ + "▁câte", + -11.264406204223633 + ], + [ + "▁insight", + -11.264533996582031 + ], + [ + "▁lange", + -11.264781951904297 + ], + [ + "▁retirement", + -11.264795303344727 + ], + [ + "sons", + -11.264864921569824 + ], + [ + "▁Asian", + -11.26492691040039 + ], + [ + "▁rail", + -11.264978408813477 + ], + [ + "▁Awards", + -11.264982223510742 + ], + [ + "Avec", + -11.265035629272461 + ], + [ + "SO", + -11.26511287689209 + ], + [ + "para", + -11.265304565429688 + ], + [ + "▁tant", + -11.265562057495117 + ], + [ + "▁strike", + -11.265693664550781 + ], + [ + "▁transformation", + -11.265742301940918 + ], + [ + "▁leicht", + -11.26586627960205 + ], + [ + "л", + -11.265996932983398 + ], + [ + "fat", + -11.26629638671875 + ], + [ + "▁Qui", + -11.266626358032227 + ], + [ + "▁chip", + -11.26663589477539 + ], + [ + "titude", + -11.266640663146973 + ], + [ + "▁Projekt", + -11.266998291015625 + ], + [ + "▁statt", + -11.267010688781738 + ], + [ + "▁findet", + -11.267184257507324 + ], + [ + "▁telephone", + -11.267251968383789 + ], + [ + "▁staying", + -11.267267227172852 + ], + [ + "▁Mess", + -11.267353057861328 + ], + [ + "▁patio", + -11.267382621765137 + ], + [ + "▁afla", + -11.267890930175781 + ], + [ + "▁administrative", + -11.267910957336426 + ], + [ + "▁gemeinsam", + -11.268129348754883 + ], + [ + "▁suppliers", + -11.268136024475098 + ], + [ + "ark", + -11.268181800842285 + ], + [ + "▁rice", + -11.268397331237793 + ], + [ + "▁stretch", + -11.268439292907715 + ], + [ + "▁compact", + -11.268651008605957 + ], + [ + "fire", + -11.268756866455078 + ], + [ + "в", + -11.268963813781738 + ], + [ + "vision", + -11.269035339355469 + ], + [ + "▁Mag", + -11.269368171691895 + ], + [ + "▁dreams", + -11.269472122192383 + ], + [ + "▁funny", + -11.26968765258789 + ], + [ + "▁lässt", + -11.270216941833496 + ], + [ + "cade", + -11.270448684692383 + ], + [ + "▁drama", + -11.270484924316406 + ], + [ + "▁schimb", + -11.270767211914062 + ], + [ + "PO", + -11.270785331726074 + ], + [ + "▁Sim", + -11.270806312561035 + ], + [ + "▁motivation", + -11.271045684814453 + ], + [ + "▁presents", + -11.27138614654541 + ], + [ + "▁1997", + -11.271828651428223 + ], + [ + "agi", + -11.271883010864258 + ], + [ + "▁optimal", + -11.27198314666748 + ], + [ + "▁folder", + -11.271995544433594 + ], + [ + "stro", + -11.272034645080566 + ], + [ + "▁Han", + -11.272072792053223 + ], + [ + "▁Ei", + -11.27220344543457 + ], + [ + "▁pus", + -11.272356986999512 + ], + [ + "▁Learning", + -11.272531509399414 + ], + [ + "oop", + -11.272603034973145 + ], + [ + "▁Type", + -11.272658348083496 + ], + [ + "space", + -11.272665023803711 + ], + [ + "▁define", + -11.273098945617676 + ], + [ + "▁plug", + -11.273098945617676 + ], + [ + "yard", + -11.273188591003418 + ], + [ + "▁utility", + -11.273297309875488 + ], + [ + "über", + -11.273561477661133 + ], + [ + "▁commun", + -11.273627281188965 + ], + [ + "▁directed", + -11.273842811584473 + ], + [ + "▁consent", + -11.273893356323242 + ], + [ + "▁DNA", + -11.274068832397461 + ], + [ + "▁statements", + -11.274130821228027 + ], + [ + "real", + -11.274298667907715 + ], + [ + "active", + -11.274430274963379 + ], + [ + "school", + -11.274965286254883 + ], + [ + "▁mic", + -11.275360107421875 + ], + [ + "▁acestui", + -11.275467872619629 + ], + [ + "scale", + -11.27550220489502 + ], + [ + "▁Mid", + -11.275628089904785 + ], + [ + "▁Chair", + -11.275874137878418 + ], + [ + "к", + -11.275936126708984 + ], + [ + "▁Bas", + -11.27630615234375 + ], + [ + "▁38", + -11.276379585266113 + ], + [ + "erin", + -11.276461601257324 + ], + [ + "▁Everyone", + -11.27686882019043 + ], + [ + "COM", + -11.276907920837402 + ], + [ + "▁chronic", + -11.277079582214355 + ], + [ + "▁doctors", + -11.277222633361816 + ], + [ + "▁sh", + -11.277276039123535 + ], + [ + "sport", + -11.27740478515625 + ], + [ + "▁volunteer", + -11.277512550354004 + ], + [ + "▁drinking", + -11.277839660644531 + ], + [ + "▁Mas", + -11.277868270874023 + ], + [ + "▁pursue", + -11.2780122756958 + ], + [ + "▁exposed", + -11.278536796569824 + ], + [ + "exe", + -11.278660774230957 + ], + [ + "hung", + -11.278841972351074 + ], + [ + "▁Tier", + -11.278921127319336 + ], + [ + "▁plac", + -11.279121398925781 + ], + [ + "▁proiect", + -11.279136657714844 + ], + [ + "▁literally", + -11.279288291931152 + ], + [ + "▁acolo", + -11.279412269592285 + ], + [ + "▁User", + -11.279485702514648 + ], + [ + "UT", + -11.279598236083984 + ], + [ + "▁hyper", + -11.279623985290527 + ], + [ + "▁seed", + -11.279794692993164 + ], + [ + "▁literature", + -11.2802734375 + ], + [ + "▁Holy", + -11.280373573303223 + ], + [ + "▁jeu", + -11.280396461486816 + ], + [ + "▁licensed", + -11.280896186828613 + ], + [ + "station", + -11.280900955200195 + ], + [ + "▁criteria", + -11.281292915344238 + ], + [ + "▁sufficient", + -11.281292915344238 + ], + [ + "▁gestion", + -11.281512260437012 + ], + [ + "▁pic", + -11.281549453735352 + ], + [ + "▁64", + -11.28170108795166 + ], + [ + "▁facts", + -11.281905174255371 + ], + [ + "▁Bild", + -11.282098770141602 + ], + [ + "obi", + -11.28212833404541 + ], + [ + "▁nie", + -11.282362937927246 + ], + [ + "▁Jewish", + -11.282756805419922 + ], + [ + "bor", + -11.28281307220459 + ], + [ + "▁1980", + -11.28286361694336 + ], + [ + "▁Fach", + -11.282917976379395 + ], + [ + "craft", + -11.283047676086426 + ], + [ + "▁Pakistan", + -11.283408164978027 + ], + [ + "▁Mos", + -11.283621788024902 + ], + [ + "▁toilet", + -11.283844947814941 + ], + [ + "partea", + -11.28391170501709 + ], + [ + "case", + -11.284221649169922 + ], + [ + "▁clock", + -11.28430461883545 + ], + [ + "▁parc", + -11.284602165222168 + ], + [ + "▁legislation", + -11.284692764282227 + ], + [ + "▁icon", + -11.284933090209961 + ], + [ + "etz", + -11.285178184509277 + ], + [ + "ept", + -11.285270690917969 + ], + [ + "▁Corporation", + -11.28585433959961 + ], + [ + "▁requested", + -11.285983085632324 + ], + [ + "▁column", + -11.286088943481445 + ], + [ + "rier", + -11.286120414733887 + ], + [ + "uß", + -11.2861967086792 + ], + [ + "▁wohl", + -11.286418914794922 + ], + [ + "tell", + -11.286569595336914 + ], + [ + "gno", + -11.286608695983887 + ], + [ + "▁diseases", + -11.286726951599121 + ], + [ + "Sch", + -11.286762237548828 + ], + [ + "▁colon", + -11.287075996398926 + ], + [ + "▁Based", + -11.28709602355957 + ], + [ + "▁flu", + -11.28725528717041 + ], + [ + "▁vocal", + -11.287408828735352 + ], + [ + "▁virus", + -11.287693977355957 + ], + [ + "▁traveling", + -11.287750244140625 + ], + [ + "bul", + -11.287837982177734 + ], + [ + "т", + -11.28794002532959 + ], + [ + "city", + -11.287961959838867 + ], + [ + "AU", + -11.287991523742676 + ], + [ + "wide", + -11.288037300109863 + ], + [ + "▁solo", + -11.288061141967773 + ], + [ + "▁functionality", + -11.288214683532715 + ], + [ + "▁reveal", + -11.28831672668457 + ], + [ + "sign", + -11.288952827453613 + ], + [ + "▁closing", + -11.288971900939941 + ], + [ + "▁peak", + -11.289087295532227 + ], + [ + "▁practic", + -11.289398193359375 + ], + [ + "than", + -11.289473533630371 + ], + [ + "▁driven", + -11.289484977722168 + ], + [ + "êtes", + -11.289548873901367 + ], + [ + "high", + -11.290016174316406 + ], + [ + "power", + -11.290226936340332 + ], + [ + "▁Lin", + -11.29028606414795 + ], + [ + "▁dose", + -11.29034423828125 + ], + [ + "▁pocket", + -11.290650367736816 + ], + [ + "▁Classic", + -11.29067611694336 + ], + [ + "▁packaging", + -11.290792465209961 + ], + [ + "▁distinct", + -11.290800094604492 + ], + [ + "▁côté", + -11.291094779968262 + ], + [ + "▁breast", + -11.29127025604248 + ], + [ + "▁folosit", + -11.29133129119873 + ], + [ + "▁drinks", + -11.291353225708008 + ], + [ + "▁Dog", + -11.291529655456543 + ], + [ + "ailleurs", + -11.291658401489258 + ], + [ + "▁caz", + -11.291804313659668 + ], + [ + "▁escape", + -11.29188346862793 + ], + [ + "▁warranty", + -11.291902542114258 + ], + [ + "▁pulled", + -11.291996955871582 + ], + [ + "data", + -11.292088508605957 + ], + [ + "▁facilitate", + -11.292213439941406 + ], + [ + "É", + -11.292335510253906 + ], + [ + "▁SP", + -11.292403221130371 + ], + [ + "lant", + -11.292557716369629 + ], + [ + "AD", + -11.29256534576416 + ], + [ + "▁Print", + -11.292802810668945 + ], + [ + "mond", + -11.292863845825195 + ], + [ + "▁strange", + -11.292875289916992 + ], + [ + "▁Hor", + -11.293227195739746 + ], + [ + "▁Collection", + -11.293328285217285 + ], + [ + "arm", + -11.29346752166748 + ], + [ + "cas", + -11.293691635131836 + ], + [ + "arrow", + -11.29379940032959 + ], + [ + "▁carrying", + -11.293927192687988 + ], + [ + "▁wave", + -11.294661521911621 + ], + [ + "setzt", + -11.294907569885254 + ], + [ + "▁construct", + -11.29514217376709 + ], + [ + "▁acts", + -11.295269966125488 + ], + [ + "▁Action", + -11.295342445373535 + ], + [ + "▁Kim", + -11.295354843139648 + ], + [ + "oxid", + -11.295459747314453 + ], + [ + "fish", + -11.295519828796387 + ], + [ + "▁damaged", + -11.295660018920898 + ], + [ + "▁Greek", + -11.295747756958008 + ], + [ + "▁belt", + -11.295772552490234 + ], + [ + "▁Prior", + -11.295778274536133 + ], + [ + "▁marks", + -11.295936584472656 + ], + [ + "▁lumea", + -11.296183586120605 + ], + [ + "▁twenty", + -11.296196937561035 + ], + [ + "▁locul", + -11.296360969543457 + ], + [ + "▁Army", + -11.296524047851562 + ], + [ + "apt", + -11.296602249145508 + ], + [ + "▁limits", + -11.296733856201172 + ], + [ + "▁cruise", + -11.296966552734375 + ], + [ + "▁List", + -11.296998023986816 + ], + [ + "utilisation", + -11.29753589630127 + ], + [ + "▁personality", + -11.297622680664062 + ], + [ + "▁sections", + -11.297759056091309 + ], + [ + "▁drawn", + -11.29797649383545 + ], + [ + "▁mold", + -11.298277854919434 + ], + [ + "▁Think", + -11.298333168029785 + ], + [ + "▁holidays", + -11.298355102539062 + ], + [ + "▁critic", + -11.298545837402344 + ], + [ + "grade", + -11.298660278320312 + ], + [ + "▁sick", + -11.299074172973633 + ], + [ + "▁characteristics", + -11.299237251281738 + ], + [ + "▁echipa", + -11.299272537231445 + ], + [ + "▁Fast", + -11.29929256439209 + ], + [ + "▁Br", + -11.299600601196289 + ], + [ + "▁Reise", + -11.299734115600586 + ], + [ + "teen", + -11.299749374389648 + ], + [ + "uci", + -11.299949645996094 + ], + [ + "!”", + -11.300180435180664 + ], + [ + "ppe", + -11.300532341003418 + ], + [ + "▁talked", + -11.301164627075195 + ], + [ + "▁gap", + -11.301473617553711 + ], + [ + "homme", + -11.301778793334961 + ], + [ + "▁interact", + -11.301934242248535 + ], + [ + "▁dollar", + -11.302276611328125 + ], + [ + "▁bone", + -11.302309036254883 + ], + [ + "▁Einsatz", + -11.302343368530273 + ], + [ + "▁sad", + -11.302434921264648 + ], + [ + "any", + -11.302445411682129 + ], + [ + "tation", + -11.302666664123535 + ], + [ + "▁Haupt", + -11.302748680114746 + ], + [ + "iva", + -11.302781105041504 + ], + [ + "▁Schu", + -11.302916526794434 + ], + [ + "▁evaluate", + -11.3036470413208 + ], + [ + "▁variant", + -11.303807258605957 + ], + [ + "▁IS", + -11.303879737854004 + ], + [ + "▁PRO", + -11.303947448730469 + ], + [ + "▁vine", + -11.303959846496582 + ], + [ + "rut", + -11.304062843322754 + ], + [ + "▁existence", + -11.30443286895752 + ], + [ + "-7", + -11.304525375366211 + ], + [ + "ancy", + -11.304702758789062 + ], + [ + "▁Want", + -11.305023193359375 + ], + [ + "alism", + -11.305127143859863 + ], + [ + "ranging", + -11.30550765991211 + ], + [ + "preis", + -11.305551528930664 + ], + [ + "All", + -11.305620193481445 + ], + [ + "▁reception", + -11.30565071105957 + ], + [ + "mai", + -11.305730819702148 + ], + [ + "▁lease", + -11.30577278137207 + ], + [ + "▁finest", + -11.30578899383545 + ], + [ + "▁evident", + -11.305874824523926 + ], + [ + "▁Easy", + -11.306075096130371 + ], + [ + "▁gilt", + -11.306085586547852 + ], + [ + "▁trips", + -11.306344985961914 + ], + [ + "▁skilled", + -11.306368827819824 + ], + [ + "consists", + -11.306456565856934 + ], + [ + "front", + -11.306635856628418 + ], + [ + "rati", + -11.306652069091797 + ], + [ + "▁Following", + -11.30678653717041 + ], + [ + "▁Medicine", + -11.307161331176758 + ], + [ + "▁pune", + -11.30729866027832 + ], + [ + "▁errors", + -11.307354927062988 + ], + [ + "arian", + -11.307613372802734 + ], + [ + "lib", + -11.30811882019043 + ], + [ + "SR", + -11.308351516723633 + ], + [ + "ML", + -11.308568000793457 + ], + [ + "▁Safety", + -11.308823585510254 + ], + [ + "▁clar", + -11.309355735778809 + ], + [ + "New", + -11.309764862060547 + ], + [ + "▁37", + -11.309773445129395 + ], + [ + "▁Administration", + -11.309823036193848 + ], + [ + "▁2.0", + -11.310120582580566 + ], + [ + "▁obviously", + -11.310196876525879 + ], + [ + "▁Mitarbeiter", + -11.310254096984863 + ], + [ + "▁improvements", + -11.31043529510498 + ], + [ + "▁Cut", + -11.310630798339844 + ], + [ + "▁Natural", + -11.310672760009766 + ], + [ + "▁arrival", + -11.311182975769043 + ], + [ + "▁pizza", + -11.311339378356934 + ], + [ + "eşti", + -11.311570167541504 + ], + [ + "cept", + -11.311654090881348 + ], + [ + "▁livre", + -11.311686515808105 + ], + [ + "▁nombreux", + -11.312195777893066 + ], + [ + "▁authentic", + -11.312231063842773 + ], + [ + "▁gemacht", + -11.312472343444824 + ], + [ + "▁broadcast", + -11.312478065490723 + ], + [ + "▁stronger", + -11.312545776367188 + ], + [ + "UP", + -11.31257152557373 + ], + [ + "▁centers", + -11.312614440917969 + ], + [ + "▁petite", + -11.312617301940918 + ], + [ + "▁spots", + -11.312626838684082 + ], + [ + "▁crystal", + -11.312756538391113 + ], + [ + "▁salon", + -11.313044548034668 + ], + [ + "▁gained", + -11.313098907470703 + ], + [ + "▁Mus", + -11.313215255737305 + ], + [ + "▁lens", + -11.313223838806152 + ], + [ + "▁ihm", + -11.313231468200684 + ], + [ + "minute", + -11.313573837280273 + ], + [ + "▁greatly", + -11.313587188720703 + ], + [ + "LP", + -11.31361198425293 + ], + [ + "rait", + -11.314027786254883 + ], + [ + "▁bid", + -11.314154624938965 + ], + [ + "▁cit", + -11.314203262329102 + ], + [ + "entreprise", + -11.31435775756836 + ], + [ + "▁55", + -11.314533233642578 + ], + [ + "▁respectively", + -11.314536094665527 + ], + [ + "▁lo", + -11.314638137817383 + ], + [ + "▁cons", + -11.314743995666504 + ], + [ + "▁Energie", + -11.315169334411621 + ], + [ + "▁OK", + -11.31521224975586 + ], + [ + "▁grill", + -11.315338134765625 + ], + [ + "▁heading", + -11.31549072265625 + ], + [ + "▁sollten", + -11.315491676330566 + ], + [ + "▁Fragen", + -11.315528869628906 + ], + [ + "▁Poli", + -11.315556526184082 + ], + [ + "▁studying", + -11.315723419189453 + ], + [ + "▁développement", + -11.315882682800293 + ], + [ + "▁foam", + -11.316035270690918 + ], + [ + "▁1996", + -11.316511154174805 + ], + [ + "▁disaster", + -11.31662654876709 + ], + [ + "▁cafe", + -11.317262649536133 + ], + [ + "▁moves", + -11.317267417907715 + ], + [ + "focuses", + -11.317712783813477 + ], + [ + "▁Avenue", + -11.317834854125977 + ], + [ + "▁humans", + -11.31784439086914 + ], + [ + "▁(3", + -11.318021774291992 + ], + [ + "▁région", + -11.318347930908203 + ], + [ + "▁DJ", + -11.318608283996582 + ], + [ + "shop", + -11.318819046020508 + ], + [ + "▁acting", + -11.318843841552734 + ], + [ + "▁Justice", + -11.318967819213867 + ], + [ + "▁trouve", + -11.319010734558105 + ], + [ + "▁Estate", + -11.319040298461914 + ], + [ + "▁strict", + -11.319231986999512 + ], + [ + "▁talks", + -11.319283485412598 + ], + [ + "▁mat", + -11.319290161132812 + ], + [ + "▁completion", + -11.319327354431152 + ], + [ + "delivering", + -11.31943416595459 + ], + [ + "CD", + -11.31973934173584 + ], + [ + "0%", + -11.319960594177246 + ], + [ + "▁creativity", + -11.320253372192383 + ], + [ + "BR", + -11.320272445678711 + ], + [ + "▁occurred", + -11.320357322692871 + ], + [ + "Car", + -11.320590019226074 + ], + [ + "▁rising", + -11.320761680603027 + ], + [ + "gger", + -11.32086181640625 + ], + [ + "▁Gene", + -11.320901870727539 + ], + [ + "▁workplace", + -11.320914268493652 + ], + [ + "phy", + -11.321065902709961 + ], + [ + "▁Bla", + -11.32107162475586 + ], + [ + "▁trailer", + -11.32120418548584 + ], + [ + "▁Forest", + -11.321205139160156 + ], + [ + "▁profession", + -11.321246147155762 + ], + [ + "▁Father", + -11.32137680053711 + ], + [ + "flu", + -11.321487426757812 + ], + [ + "tone", + -11.321489334106445 + ], + [ + "▁sexual", + -11.321736335754395 + ], + [ + "▁Map", + -11.321805953979492 + ], + [ + "OT", + -11.3218412399292 + ], + [ + "▁Us", + -11.321878433227539 + ], + [ + "tôt", + -11.321892738342285 + ], + [ + "▁Wert", + -11.321901321411133 + ], + [ + "preparing", + -11.322121620178223 + ], + [ + "isé", + -11.322243690490723 + ], + [ + "▁lake", + -11.322461128234863 + ], + [ + "eed", + -11.32270336151123 + ], + [ + "jun", + -11.322888374328613 + ], + [ + "▁implemented", + -11.323014259338379 + ], + [ + "vid", + -11.323116302490234 + ], + [ + "igne", + -11.323201179504395 + ], + [ + "▁follows", + -11.323214530944824 + ], + [ + "▁Eric", + -11.323430061340332 + ], + [ + "body", + -11.323530197143555 + ], + [ + "▁contained", + -11.323585510253906 + ], + [ + "▁massage", + -11.323715209960938 + ], + [ + "AV", + -11.323725700378418 + ], + [ + "▁insa", + -11.323850631713867 + ], + [ + "▁observed", + -11.323892593383789 + ], + [ + "▁marque", + -11.324137687683105 + ], + [ + "lines", + -11.324451446533203 + ], + [ + "▁Frage", + -11.324482917785645 + ], + [ + "largely", + -11.324647903442383 + ], + [ + "gegeben", + -11.32473087310791 + ], + [ + "▁colleagues", + -11.324762344360352 + ], + [ + "pha", + -11.32494068145752 + ], + [ + "▁representative", + -11.325217247009277 + ], + [ + "▁shut", + -11.325650215148926 + ], + [ + "▁secondary", + -11.325779914855957 + ], + [ + "▁exhibit", + -11.325927734375 + ], + [ + "1)", + -11.325932502746582 + ], + [ + "mid", + -11.326109886169434 + ], + [ + "▁Due", + -11.326229095458984 + ], + [ + "▁initiatives", + -11.326457023620605 + ], + [ + "▁occurs", + -11.326458930969238 + ], + [ + "lent", + -11.326478958129883 + ], + [ + "▁façon", + -11.326778411865234 + ], + [ + "▁iOS", + -11.326803207397461 + ], + [ + "▁exploring", + -11.327000617980957 + ], + [ + "▁stations", + -11.327103614807129 + ], + [ + "nton", + -11.327234268188477 + ], + [ + "▁Country", + -11.32729721069336 + ], + [ + "▁shouldn", + -11.327406883239746 + ], + [ + "▁casual", + -11.327611923217773 + ], + [ + "-18", + -11.32769775390625 + ], + [ + "▁maintained", + -11.32772445678711 + ], + [ + "▁cart", + -11.327790260314941 + ], + [ + "▁propre", + -11.327836036682129 + ], + [ + "▁asset", + -11.327948570251465 + ], + [ + "firm", + -11.32803726196289 + ], + [ + "gla", + -11.328231811523438 + ], + [ + "viv", + -11.3282470703125 + ], + [ + "▁scientists", + -11.328873634338379 + ], + [ + "▁Nor", + -11.328936576843262 + ], + [ + "ites", + -11.329320907592773 + ], + [ + "▁engaging", + -11.329933166503906 + ], + [ + "My", + -11.330178260803223 + ], + [ + "▁workshops", + -11.330282211303711 + ], + [ + "ffer", + -11.3303804397583 + ], + [ + "activité", + -11.33047103881836 + ], + [ + "▁tension", + -11.330567359924316 + ], + [ + "▁dual", + -11.330668449401855 + ], + [ + "uer", + -11.33084774017334 + ], + [ + "900", + -11.330941200256348 + ], + [ + "SF", + -11.33108139038086 + ], + [ + "▁kannst", + -11.331146240234375 + ], + [ + "▁bur", + -11.33115291595459 + ], + [ + "▁visitor", + -11.331156730651855 + ], + [ + "▁granted", + -11.331178665161133 + ], + [ + "▁union", + -11.331355094909668 + ], + [ + "▁tablet", + -11.331461906433105 + ], + [ + "▁Choose", + -11.33146858215332 + ], + [ + "ibil", + -11.331551551818848 + ], + [ + "▁settlement", + -11.331830978393555 + ], + [ + "genommen", + -11.331892967224121 + ], + [ + "▁marked", + -11.332956314086914 + ], + [ + "▁diagnostic", + -11.333370208740234 + ], + [ + "▁prayer", + -11.333529472351074 + ], + [ + "▁Toronto", + -11.334035873413086 + ], + [ + "trans", + -11.334146499633789 + ], + [ + "▁respectiv", + -11.334160804748535 + ], + [ + "▁2012.", + -11.334207534790039 + ], + [ + "icul", + -11.334394454956055 + ], + [ + "▁satisfied", + -11.334527969360352 + ], + [ + "▁Fla", + -11.334596633911133 + ], + [ + "▁estimate", + -11.334638595581055 + ], + [ + "▁Agency", + -11.33466911315918 + ], + [ + "OD", + -11.334708213806152 + ], + [ + "▁McC", + -11.334746360778809 + ], + [ + "bert", + -11.334748268127441 + ], + [ + "▁seal", + -11.334771156311035 + ], + [ + "aine", + -11.334839820861816 + ], + [ + "▁cauza", + -11.334848403930664 + ], + [ + "▁wallpaper", + -11.335081100463867 + ], + [ + "▁alb", + -11.33536434173584 + ], + [ + "▁Sound", + -11.335681915283203 + ], + [ + "worth", + -11.33572769165039 + ], + [ + "chten", + -11.335858345031738 + ], + [ + "programm", + -11.335896492004395 + ], + [ + "▁pounds", + -11.336215019226074 + ], + [ + "▁coaching", + -11.336278915405273 + ], + [ + "▁Furthermore", + -11.336454391479492 + ], + [ + "▁Korea", + -11.336471557617188 + ], + [ + "▁flour", + -11.336530685424805 + ], + [ + "▁sommes", + -11.33657169342041 + ], + [ + "▁Repair", + -11.33661937713623 + ], + [ + "”)", + -11.336642265319824 + ], + [ + "itch", + -11.336675643920898 + ], + [ + "blu", + -11.336786270141602 + ], + [ + "zar", + -11.336882591247559 + ], + [ + "▁diferite", + -11.33745002746582 + ], + [ + "▁Golf", + -11.337685585021973 + ], + [ + "arch", + -11.33772087097168 + ], + [ + "▁panels", + -11.337799072265625 + ], + [ + "jan", + -11.337956428527832 + ], + [ + "“.", + -11.338240623474121 + ], + [ + "izarea", + -11.338324546813965 + ], + [ + "▁golden", + -11.33854866027832 + ], + [ + "▁flying", + -11.338550567626953 + ], + [ + "▁museum", + -11.338700294494629 + ], + [ + "▁equivalent", + -11.338759422302246 + ], + [ + "▁Lang", + -11.339032173156738 + ], + [ + "schi", + -11.339539527893066 + ], + [ + "MI", + -11.339595794677734 + ], + [ + "▁faci", + -11.339838027954102 + ], + [ + "▁Rahmen", + -11.339988708496094 + ], + [ + "▁attending", + -11.340130805969238 + ], + [ + "′′", + -11.340483665466309 + ], + [ + "▁Tro", + -11.341070175170898 + ], + [ + "▁gaming", + -11.341447830200195 + ], + [ + "▁aujourd", + -11.341479301452637 + ], + [ + "▁Wochen", + -11.341526985168457 + ], + [ + "▁entering", + -11.341535568237305 + ], + [ + "its", + -11.34155559539795 + ], + [ + "▁Private", + -11.341866493225098 + ], + [ + "▁Ocean", + -11.34188175201416 + ], + [ + "▁01", + -11.342098236083984 + ], + [ + "▁coloring", + -11.342188835144043 + ], + [ + "ător", + -11.34253215789795 + ], + [ + "▁flooring", + -11.342548370361328 + ], + [ + "▁downtown", + -11.34276294708252 + ], + [ + "rab", + -11.342998504638672 + ], + [ + "HI", + -11.343221664428711 + ], + [ + "▁illness", + -11.343234062194824 + ], + [ + "▁whil", + -11.343307495117188 + ], + [ + "▁diamond", + -11.34333324432373 + ], + [ + "Mail", + -11.343419075012207 + ], + [ + "▁Dream", + -11.34344482421875 + ], + [ + "▁Golden", + -11.344099044799805 + ], + [ + "▁rein", + -11.344220161437988 + ], + [ + "▁hi", + -11.344283103942871 + ], + [ + "▁expressed", + -11.344489097595215 + ], + [ + "▁luat", + -11.344511985778809 + ], + [ + "▁Share", + -11.34453010559082 + ], + [ + "▁Programm", + -11.344706535339355 + ], + [ + "▁Sales", + -11.344707489013672 + ], + [ + "▁prof", + -11.344890594482422 + ], + [ + "▁MO", + -11.34505844116211 + ], + [ + "▁Short", + -11.345088958740234 + ], + [ + "▁charm", + -11.345290184020996 + ], + [ + "▁Cer", + -11.345373153686523 + ], + [ + "▁Run", + -11.34553337097168 + ], + [ + "▁tutorial", + -11.345589637756348 + ], + [ + "oul", + -11.34561824798584 + ], + [ + "▁Fest", + -11.345794677734375 + ], + [ + "▁uniform", + -11.345929145812988 + ], + [ + "aß", + -11.346014976501465 + ], + [ + "▁pipe", + -11.346076965332031 + ], + [ + "▁Square", + -11.346283912658691 + ], + [ + "▁Kosten", + -11.346365928649902 + ], + [ + "▁checked", + -11.346590042114258 + ], + [ + "▁65", + -11.346626281738281 + ], + [ + "▁Adam", + -11.346686363220215 + ], + [ + "cel", + -11.346700668334961 + ], + [ + "ello", + -11.346965789794922 + ], + [ + "▁Res", + -11.347023963928223 + ], + [ + "▁drain", + -11.34708309173584 + ], + [ + "ză", + -11.347129821777344 + ], + [ + "▁Tech", + -11.34739875793457 + ], + [ + "▁strive", + -11.34749698638916 + ], + [ + "cycl", + -11.347506523132324 + ], + [ + "▁stark", + -11.347541809082031 + ], + [ + "load", + -11.34754753112793 + ], + [ + "▁Stat", + -11.347589492797852 + ], + [ + "▁Rec", + -11.347622871398926 + ], + [ + "ians", + -11.347716331481934 + ], + [ + "▁Tin", + -11.347738265991211 + ], + [ + "▁Agreement", + -11.347840309143066 + ], + [ + "▁pret", + -11.348027229309082 + ], + [ + "-9", + -11.348326683044434 + ], + [ + "▁sentence", + -11.348380088806152 + ], + [ + "▁Direct", + -11.348426818847656 + ], + [ + "▁Rep", + -11.348465919494629 + ], + [ + "▁Prozent", + -11.348799705505371 + ], + [ + "▁invitation", + -11.34882640838623 + ], + [ + "▁refund", + -11.349113464355469 + ], + [ + "▁Kids", + -11.349287986755371 + ], + [ + "stock", + -11.349383354187012 + ], + [ + "TP", + -11.349400520324707 + ], + [ + "▁tau", + -11.34941291809082 + ], + [ + "from", + -11.349421501159668 + ], + [ + "▁Ash", + -11.349451065063477 + ], + [ + "store", + -11.349535942077637 + ], + [ + "▁Common", + -11.34958553314209 + ], + [ + "▁Qualität", + -11.34968376159668 + ], + [ + "▁strongly", + -11.349727630615234 + ], + [ + "▁importante", + -11.34979248046875 + ], + [ + "ome", + -11.349912643432617 + ], + [ + "▁surtout", + -11.349946022033691 + ], + [ + "enables", + -11.35020637512207 + ], + [ + "▁decent", + -11.350221633911133 + ], + [ + "▁neutral", + -11.350237846374512 + ], + [ + "▁produs", + -11.350356101989746 + ], + [ + "bury", + -11.350451469421387 + ], + [ + "▁Level", + -11.350618362426758 + ], + [ + "▁interes", + -11.350699424743652 + ], + [ + "mov", + -11.350797653198242 + ], + [ + "▁backup", + -11.350939750671387 + ], + [ + "même", + -11.351094245910645 + ], + [ + "doc", + -11.351119041442871 + ], + [ + "▁#1", + -11.35130786895752 + ], + [ + "▁specified", + -11.351495742797852 + ], + [ + "▁founder", + -11.351655960083008 + ], + [ + "And", + -11.352090835571289 + ], + [ + "isten", + -11.352149963378906 + ], + [ + "▁lecture", + -11.352729797363281 + ], + [ + "▁wake", + -11.352895736694336 + ], + [ + "▁vraiment", + -11.352980613708496 + ], + [ + "▁swing", + -11.353188514709473 + ], + [ + "▁addresses", + -11.353275299072266 + ], + [ + "▁Verfügung", + -11.353504180908203 + ], + [ + "▁deadline", + -11.353761672973633 + ], + [ + "н", + -11.353791236877441 + ], + [ + "▁Content", + -11.353970527648926 + ], + [ + "▁Gre", + -11.354111671447754 + ], + [ + "▁Experience", + -11.354378700256348 + ], + [ + "tura", + -11.354458808898926 + ], + [ + "▁exit", + -11.354642868041992 + ], + [ + "▁Britain", + -11.354652404785156 + ], + [ + "▁Sunt", + -11.354684829711914 + ], + [ + "▁documentation", + -11.354690551757812 + ], + [ + "▁showcase", + -11.3547945022583 + ], + [ + "▁photographs", + -11.354822158813477 + ], + [ + "qué", + -11.35483169555664 + ], + [ + "zin", + -11.354909896850586 + ], + [ + "pres", + -11.354933738708496 + ], + [ + "▁decline", + -11.354955673217773 + ], + [ + "▁Large", + -11.355030059814453 + ], + [ + "▁bills", + -11.355141639709473 + ], + [ + "▁entitled", + -11.355222702026367 + ], + [ + "▁passionate", + -11.355393409729004 + ], + [ + "▁workout", + -11.355413436889648 + ], + [ + "▁Again", + -11.35560417175293 + ], + [ + "▁Haut", + -11.35582160949707 + ], + [ + "▁guaranteed", + -11.35599136352539 + ], + [ + "▁vue", + -11.35600471496582 + ], + [ + "▁farmers", + -11.356224060058594 + ], + [ + "▁admission", + -11.356500625610352 + ], + [ + "▁manière", + -11.357080459594727 + ], + [ + "▁reverse", + -11.357121467590332 + ], + [ + "▁FL", + -11.357142448425293 + ], + [ + "▁terminal", + -11.357206344604492 + ], + [ + "GI", + -11.35731029510498 + ], + [ + "▁speakers", + -11.35739803314209 + ], + [ + "▁responses", + -11.357398986816406 + ], + [ + "▁Doch", + -11.357457160949707 + ], + [ + "▁2013,", + -11.357717514038086 + ], + [ + "▁phones", + -11.357789993286133 + ], + [ + "ential", + -11.357851028442383 + ], + [ + "▁operator", + -11.357916831970215 + ], + [ + "▁steam", + -11.358036994934082 + ], + [ + "burn", + -11.358091354370117 + ], + [ + "▁seul", + -11.35815715789795 + ], + [ + "▁unusual", + -11.358322143554688 + ], + [ + "▁educate", + -11.358403205871582 + ], + [ + "▁Que", + -11.358680725097656 + ], + [ + "▁believes", + -11.359137535095215 + ], + [ + "▁succeed", + -11.359344482421875 + ], + [ + "▁delay", + -11.359533309936523 + ], + [ + "▁deeper", + -11.359633445739746 + ], + [ + "▁reaching", + -11.359890937805176 + ], + [ + "▁objectives", + -11.360086441040039 + ], + [ + "▁temporary", + -11.36028003692627 + ], + [ + "▁artistic", + -11.360421180725098 + ], + [ + "▁sou", + -11.360471725463867 + ], + [ + "▁transparent", + -11.36062240600586 + ], + [ + "There", + -11.360798835754395 + ], + [ + "ception", + -11.360836029052734 + ], + [ + "▁excess", + -11.360939979553223 + ], + [ + "▁gathering", + -11.361008644104004 + ], + [ + "▁Save", + -11.361095428466797 + ], + [ + "ază", + -11.361166000366211 + ], + [ + "▁français", + -11.361197471618652 + ], + [ + "▁laid", + -11.361210823059082 + ], + [ + "▁modul", + -11.361394882202148 + ], + [ + "avoir", + -11.361465454101562 + ], + [ + "under", + -11.362113952636719 + ], + [ + "dding", + -11.362226486206055 + ], + [ + "▁falls", + -11.362232208251953 + ], + [ + "▁Möglichkeit", + -11.362369537353516 + ], + [ + "▁ceremony", + -11.362370491027832 + ], + [ + "rai", + -11.36237621307373 + ], + [ + "▁Bor", + -11.362709045410156 + ], + [ + "▁Below", + -11.362750053405762 + ], + [ + "4)", + -11.362759590148926 + ], + [ + "▁Field", + -11.362833023071289 + ], + [ + "wear", + -11.362935066223145 + ], + [ + "motion", + -11.362948417663574 + ], + [ + "print", + -11.363311767578125 + ], + [ + "game", + -11.363360404968262 + ], + [ + "▁Irish", + -11.363458633422852 + ], + [ + "▁Las", + -11.363458633422852 + ], + [ + "Among", + -11.363570213317871 + ], + [ + "atori", + -11.363580703735352 + ], + [ + "▁ajuns", + -11.363837242126465 + ], + [ + "▁alive", + -11.363860130310059 + ], + [ + "▁retour", + -11.363900184631348 + ], + [ + "▁smoke", + -11.3640775680542 + ], + [ + "▁math", + -11.364285469055176 + ], + [ + "▁Ye", + -11.364337921142578 + ], + [ + "▁Denn", + -11.36436653137207 + ], + [ + "▁1995", + -11.364412307739258 + ], + [ + "▁bani", + -11.364644050598145 + ], + [ + "raz", + -11.364998817443848 + ], + [ + "world", + -11.365026473999023 + ], + [ + "▁engines", + -11.365140914916992 + ], + [ + "nehmen", + -11.365192413330078 + ], + [ + "stor", + -11.365328788757324 + ], + [ + "▁interpret", + -11.365403175354004 + ], + [ + "▁Ven", + -11.365489959716797 + ], + [ + "▁cotton", + -11.365622520446777 + ], + [ + "▁represented", + -11.366004943847656 + ], + [ + "▁fabulous", + -11.366166114807129 + ], + [ + "▁gender", + -11.366301536560059 + ], + [ + "Mar", + -11.366668701171875 + ], + [ + "vic", + -11.366991996765137 + ], + [ + "▁newsletter", + -11.367432594299316 + ], + [ + "sburg", + -11.367574691772461 + ], + [ + "pond", + -11.36838436126709 + ], + [ + "▁Carl", + -11.368454933166504 + ], + [ + "▁bunch", + -11.368714332580566 + ], + [ + "▁tower", + -11.368847846984863 + ], + [ + "▁trigger", + -11.368976593017578 + ], + [ + "▁explanation", + -11.369091033935547 + ], + [ + "Man", + -11.369114875793457 + ], + [ + "iunea", + -11.369168281555176 + ], + [ + "▁announcement", + -11.369492530822754 + ], + [ + "▁seeds", + -11.36952018737793 + ], + [ + "▁shell", + -11.369865417480469 + ], + [ + "▁Working", + -11.36989688873291 + ], + [ + "viz", + -11.370267868041992 + ], + [ + "▁Simply", + -11.370329856872559 + ], + [ + "sub", + -11.37037181854248 + ], + [ + "▁Village", + -11.37060832977295 + ], + [ + "▁falling", + -11.370742797851562 + ], + [ + "▁fits", + -11.37084674835205 + ], + [ + "▁wichtig", + -11.37088394165039 + ], + [ + "▁Down", + -11.37108039855957 + ], + [ + "bble", + -11.371573448181152 + ], + [ + "▁Orange", + -11.37165641784668 + ], + [ + "promoting", + -11.371932029724121 + ], + [ + "▁rapidly", + -11.37217903137207 + ], + [ + "▁translation", + -11.372330665588379 + ], + [ + "nig", + -11.3723726272583 + ], + [ + "fusion", + -11.37240982055664 + ], + [ + "kosten", + -11.372611045837402 + ], + [ + "2)", + -11.372783660888672 + ], + [ + "▁Express", + -11.372958183288574 + ], + [ + "▁Sw", + -11.373003959655762 + ], + [ + "▁frequency", + -11.373086929321289 + ], + [ + "▁diversity", + -11.373348236083984 + ], + [ + "MT", + -11.373452186584473 + ], + [ + "▁bekannt", + -11.373530387878418 + ], + [ + "lion", + -11.373871803283691 + ], + [ + "▁cop", + -11.37393856048584 + ], + [ + "▁Customer", + -11.374072074890137 + ], + [ + "▁demands", + -11.374427795410156 + ], + [ + "▁corn", + -11.374516487121582 + ], + [ + "▁Hamburg", + -11.374551773071289 + ], + [ + "SD", + -11.374628067016602 + ], + [ + "▁Rome", + -11.374677658081055 + ], + [ + "▁Pur", + -11.374750137329102 + ], + [ + "▁stamp", + -11.374885559082031 + ], + [ + "▁grateful", + -11.374967575073242 + ], + [ + "RM", + -11.37511157989502 + ], + [ + "▁Pl", + -11.37511920928955 + ], + [ + "▁Tele", + -11.375154495239258 + ], + [ + "▁plugin", + -11.375492095947266 + ], + [ + "▁maxim", + -11.375675201416016 + ], + [ + "▁Hoch", + -11.37574577331543 + ], + [ + "igung", + -11.375823020935059 + ], + [ + "▁Entwicklung", + -11.375858306884766 + ], + [ + "▁File", + -11.375931739807129 + ], + [ + "▁Eastern", + -11.376070022583008 + ], + [ + "▁scrap", + -11.376331329345703 + ], + [ + "▁acquired", + -11.376338958740234 + ], + [ + "sau", + -11.376364707946777 + ], + [ + "▁Klein", + -11.376452445983887 + ], + [ + "▁milioane", + -11.376492500305176 + ], + [ + "▁Stand", + -11.376693725585938 + ], + [ + "▁childhood", + -11.37671184539795 + ], + [ + "▁artificial", + -11.376752853393555 + ], + [ + "▁substantial", + -11.376851081848145 + ], + [ + "druck", + -11.377315521240234 + ], + [ + "▁Kra", + -11.377562522888184 + ], + [ + "▁performances", + -11.377645492553711 + ], + [ + "▁row", + -11.377824783325195 + ], + [ + "NT", + -11.377899169921875 + ], + [ + "mod", + -11.377904891967773 + ], + [ + "remained", + -11.378399848937988 + ], + [ + "▁nimic", + -11.378462791442871 + ], + [ + "▁Limited", + -11.378555297851562 + ], + [ + "▁cookie", + -11.378718376159668 + ], + [ + "▁retain", + -11.378816604614258 + ], + [ + "▁600", + -11.379144668579102 + ], + [ + "▁eigene", + -11.379158020019531 + ], + [ + "▁tune", + -11.379209518432617 + ], + [ + "NS", + -11.379256248474121 + ], + [ + "▁dad", + -11.379284858703613 + ], + [ + "Moreover", + -11.379415512084961 + ], + [ + "ès", + -11.379434585571289 + ], + [ + "▁worship", + -11.379439353942871 + ], + [ + "▁Material", + -11.3794584274292 + ], + [ + "▁verb", + -11.379528045654297 + ], + [ + "ziehen", + -11.37957763671875 + ], + [ + "lton", + -11.379645347595215 + ], + [ + "▁boot", + -11.379982948303223 + ], + [ + "plo", + -11.380118370056152 + ], + [ + "CF", + -11.380212783813477 + ], + [ + "GM", + -11.380215644836426 + ], + [ + "▁Mix", + -11.38046932220459 + ], + [ + "▁Front", + -11.380474090576172 + ], + [ + "▁repairs", + -11.380655288696289 + ], + [ + "▁proportion", + -11.381068229675293 + ], + [ + "▁habit", + -11.381132125854492 + ], + [ + "▁hide", + -11.38156509399414 + ], + [ + "focusing", + -11.381707191467285 + ], + [ + "▁Annual", + -11.381717681884766 + ], + [ + "▁twin", + -11.3817777633667 + ], + [ + "▁acord", + -11.381780624389648 + ], + [ + "ehr", + -11.381814956665039 + ], + [ + "month", + -11.382303237915039 + ], + [ + "venir", + -11.382535934448242 + ], + [ + "Or", + -11.38254165649414 + ], + [ + "awa", + -11.382600784301758 + ], + [ + "lass", + -11.382735252380371 + ], + [ + "ffe", + -11.383048057556152 + ], + [ + "iți", + -11.383074760437012 + ], + [ + "NO", + -11.3831148147583 + ], + [ + "▁scope", + -11.383295059204102 + ], + [ + "▁lowest", + -11.383527755737305 + ], + [ + "▁afraid", + -11.383572578430176 + ], + [ + "▁subjects", + -11.383578300476074 + ], + [ + "▁templates", + -11.383586883544922 + ], + [ + "▁jos", + -11.383604049682617 + ], + [ + "DM", + -11.383687973022461 + ], + [ + "ensemble", + -11.383792877197266 + ], + [ + "▁Ski", + -11.383941650390625 + ], + [ + "DP", + -11.384099960327148 + ], + [ + "▁grip", + -11.384171485900879 + ], + [ + "2-", + -11.38436222076416 + ], + [ + "▁sécurité", + -11.384743690490723 + ], + [ + "▁mono", + -11.384749412536621 + ], + [ + "▁controls", + -11.384854316711426 + ], + [ + "SV", + -11.384879112243652 + ], + [ + "install", + -11.384970664978027 + ], + [ + "berry", + -11.385042190551758 + ], + [ + "nial", + -11.385120391845703 + ], + [ + "shed", + -11.385462760925293 + ], + [ + "▁celle", + -11.385830879211426 + ], + [ + "FR", + -11.385936737060547 + ], + [ + "äng", + -11.385950088500977 + ], + [ + "▁gaz", + -11.385984420776367 + ], + [ + "êt", + -11.386184692382812 + ], + [ + "▁viewing", + -11.386412620544434 + ], + [ + "▁asigura", + -11.386524200439453 + ], + [ + "bling", + -11.3865327835083 + ], + [ + "master", + -11.386919975280762 + ], + [ + "▁Fin", + -11.387160301208496 + ], + [ + "VC", + -11.387365341186523 + ], + [ + "▁patent", + -11.387715339660645 + ], + [ + "▁Clean", + -11.38773250579834 + ], + [ + "▁1970", + -11.387789726257324 + ], + [ + "▁Char", + -11.387971878051758 + ], + [ + "thi", + -11.388010025024414 + ], + [ + "bli", + -11.388141632080078 + ], + [ + "▁haut", + -11.388307571411133 + ], + [ + "tica", + -11.38836669921875 + ], + [ + "▁venit", + -11.388578414916992 + ], + [ + "▁compatible", + -11.388678550720215 + ], + [ + "▁hanging", + -11.388690948486328 + ], + [ + "UN", + -11.388842582702637 + ], + [ + "▁forth", + -11.388911247253418 + ], + [ + "▁painted", + -11.388912200927734 + ], + [ + "lip", + -11.389031410217285 + ], + [ + "▁deeply", + -11.389089584350586 + ], + [ + "▁participating", + -11.389242172241211 + ], + [ + "▁Iran", + -11.38968276977539 + ], + [ + "▁conventional", + -11.389769554138184 + ], + [ + "ARE", + -11.38985824584961 + ], + [ + "▁accuracy", + -11.389896392822266 + ], + [ + "▁Familie", + -11.389955520629883 + ], + [ + "▁Dir", + -11.39001178741455 + ], + [ + "▁gehen", + -11.390127182006836 + ], + [ + "▁moderne", + -11.39022159576416 + ], + [ + "▁Iraq", + -11.39050579071045 + ], + [ + "▁vente", + -11.390582084655762 + ], + [ + "▁Donald", + -11.390998840332031 + ], + [ + "▁passer", + -11.391051292419434 + ], + [ + "▁mehrere", + -11.391267776489258 + ], + [ + "▁Everything", + -11.391291618347168 + ], + [ + "▁studied", + -11.391307830810547 + ], + [ + "▁acquire", + -11.391312599182129 + ], + [ + "für", + -11.391477584838867 + ], + [ + "▁gal", + -11.391502380371094 + ], + [ + "▁headed", + -11.391809463500977 + ], + [ + "▁screening", + -11.391865730285645 + ], + [ + "▁findings", + -11.392303466796875 + ], + [ + "▁nutrition", + -11.392305374145508 + ], + [ + "▁Secretary", + -11.392308235168457 + ], + [ + "duct", + -11.392431259155273 + ], + [ + "born", + -11.392436027526855 + ], + [ + "«", + -11.39261531829834 + ], + [ + "▁statistics", + -11.392616271972656 + ], + [ + "▁Sydney", + -11.392800331115723 + ], + [ + "▁Prof", + -11.392829895019531 + ], + [ + "▁dialogue", + -11.39327621459961 + ], + [ + "▁gather", + -11.393425941467285 + ], + [ + "valu", + -11.393746376037598 + ], + [ + "▁currency", + -11.394073486328125 + ], + [ + "▁Kat", + -11.394092559814453 + ], + [ + "gotten", + -11.394189834594727 + ], + [ + "main", + -11.39432144165039 + ], + [ + "▁coin", + -11.394340515136719 + ], + [ + "▁Nick", + -11.394380569458008 + ], + [ + "vă", + -11.394658088684082 + ], + [ + "▁Victoria", + -11.394832611083984 + ], + [ + "▁conclusion", + -11.3949613571167 + ], + [ + "▁lemon", + -11.394998550415039 + ], + [ + "▁Article", + -11.39516830444336 + ], + [ + "▁necesar", + -11.39516830444336 + ], + [ + "mag", + -11.395180702209473 + ], + [ + "▁riding", + -11.39537239074707 + ], + [ + "▁Eli", + -11.395599365234375 + ], + [ + "▁cord", + -11.395635604858398 + ], + [ + "wä", + -11.39572811126709 + ], + [ + "ußerdem", + -11.395737648010254 + ], + [ + "▁Bed", + -11.395759582519531 + ], + [ + "▁layers", + -11.395833015441895 + ], + [ + "▁harder", + -11.395975112915039 + ], + [ + "▁processor", + -11.396040916442871 + ], + [ + "▁Ils", + -11.39613151550293 + ], + [ + "▁Edition", + -11.39615535736084 + ], + [ + "▁Link", + -11.396393775939941 + ], + [ + "éré", + -11.396461486816406 + ], + [ + "▁nume", + -11.396576881408691 + ], + [ + "▁Boy", + -11.39659595489502 + ], + [ + "▁equally", + -11.396646499633789 + ], + [ + "▁Regel", + -11.397119522094727 + ], + [ + "▁hopes", + -11.397185325622559 + ], + [ + "odor", + -11.397311210632324 + ], + [ + "▁initially", + -11.397430419921875 + ], + [ + "▁$4", + -11.3974609375 + ], + [ + "▁exemplu", + -11.397537231445312 + ], + [ + "▁vari", + -11.397565841674805 + ], + [ + "schl", + -11.397698402404785 + ], + [ + "▁southern", + -11.39809799194336 + ], + [ + "▁mein", + -11.39818000793457 + ], + [ + "▁1994", + -11.398300170898438 + ], + [ + "▁importantly", + -11.398401260375977 + ], + [ + "▁succes", + -11.398526191711426 + ], + [ + "▁developer", + -11.398598670959473 + ], + [ + "▁lips", + -11.39889144897461 + ], + [ + "▁attitude", + -11.39900016784668 + ], + [ + "▁Age", + -11.399541854858398 + ], + [ + "▁corps", + -11.399713516235352 + ], + [ + "▁clicking", + -11.39976978302002 + ], + [ + "▁putem", + -11.399832725524902 + ], + [ + "▁journée", + -11.40003776550293 + ], + [ + "boy", + -11.4002103805542 + ], + [ + "▁injured", + -11.40028190612793 + ], + [ + "▁watched", + -11.400433540344238 + ], + [ + "▁flights", + -11.40079116821289 + ], + [ + "turn", + -11.400980949401855 + ], + [ + "▁stainless", + -11.401562690734863 + ], + [ + "▁besondere", + -11.40156364440918 + ], + [ + "▁Tur", + -11.401596069335938 + ], + [ + "▁hiring", + -11.401650428771973 + ], + [ + "▁roads", + -11.401727676391602 + ], + [ + "ificat", + -11.401785850524902 + ], + [ + "▁Flor", + -11.402045249938965 + ], + [ + "▁puternic", + -11.402215003967285 + ], + [ + "▁unexpected", + -11.40223503112793 + ], + [ + "▁Est", + -11.40238094329834 + ], + [ + "▁adopted", + -11.40253734588623 + ], + [ + "▁Fox", + -11.402647972106934 + ], + [ + "▁contributions", + -11.402870178222656 + ], + [ + "sec", + -11.402968406677246 + ], + [ + "IO", + -11.403059959411621 + ], + [ + "▁santé", + -11.403432846069336 + ], + [ + "▁Tree", + -11.403763771057129 + ], + [ + "▁scurt", + -11.40381908416748 + ], + [ + "▁Products", + -11.403848648071289 + ], + [ + "▁forecast", + -11.403998374938965 + ], + [ + "▁actor", + -11.404143333435059 + ], + [ + "▁Gallery", + -11.404149055480957 + ], + [ + "▁continuous", + -11.404163360595703 + ], + [ + "▁Hat", + -11.404291152954102 + ], + [ + "▁slip", + -11.404501914978027 + ], + [ + "9%", + -11.404960632324219 + ], + [ + "▁depression", + -11.405043601989746 + ], + [ + "UI", + -11.405229568481445 + ], + [ + "abile", + -11.405648231506348 + ], + [ + "▁merit", + -11.405671119689941 + ], + [ + "▁Fer", + -11.405805587768555 + ], + [ + "▁robot", + -11.405888557434082 + ], + [ + "▁gel", + -11.40589427947998 + ], + [ + "▁gentle", + -11.406017303466797 + ], + [ + "▁wanting", + -11.406071662902832 + ], + [ + "▁understood", + -11.406157493591309 + ], + [ + "▁terrain", + -11.406161308288574 + ], + [ + "▁associate", + -11.406176567077637 + ], + [ + "▁discussions", + -11.40632152557373 + ], + [ + "▁Job", + -11.406365394592285 + ], + [ + "spec", + -11.406440734863281 + ], + [ + "Dabei", + -11.406475067138672 + ], + [ + "etic", + -11.406517028808594 + ], + [ + "gol", + -11.40654468536377 + ], + [ + "▁20%", + -11.406584739685059 + ], + [ + "▁grup", + -11.406606674194336 + ], + [ + "▁Doctor", + -11.406813621520996 + ], + [ + "verse", + -11.407246589660645 + ], + [ + "▁victim", + -11.407258033752441 + ], + [ + "ță", + -11.407302856445312 + ], + [ + "▁scores", + -11.407544136047363 + ], + [ + "▁Policy", + -11.407634735107422 + ], + [ + "▁Anna", + -11.407736778259277 + ], + [ + "IV", + -11.407804489135742 + ], + [ + "▁mineral", + -11.408202171325684 + ], + [ + "live", + -11.40821647644043 + ], + [ + "▁grey", + -11.408368110656738 + ], + [ + "struct", + -11.40852165222168 + ], + [ + "▁emails", + -11.408738136291504 + ], + [ + "▁anymore", + -11.409114837646484 + ], + [ + "▁productivity", + -11.409387588500977 + ], + [ + "▁Dark", + -11.409463882446289 + ], + [ + "▁neither", + -11.409481048583984 + ], + [ + "▁quotes", + -11.409611701965332 + ], + [ + "LS", + -11.410368919372559 + ], + [ + "▁Arizona", + -11.41040325164795 + ], + [ + "night", + -11.410497665405273 + ], + [ + "élé", + -11.411019325256348 + ], + [ + "▁assigned", + -11.411153793334961 + ], + [ + "▁satellite", + -11.411328315734863 + ], + [ + "▁stability", + -11.411665916442871 + ], + [ + "▁networking", + -11.41172981262207 + ], + [ + "▁Transport", + -11.411847114562988 + ], + [ + "▁persons", + -11.411856651306152 + ], + [ + "fund", + -11.412043571472168 + ], + [ + "▁pratique", + -11.41213321685791 + ], + [ + "▁inca", + -11.412134170532227 + ], + [ + "iller", + -11.412349700927734 + ], + [ + "▁packed", + -11.41239070892334 + ], + [ + "▁Vegas", + -11.412484169006348 + ], + [ + "▁offre", + -11.412493705749512 + ], + [ + "▁Bin", + -11.412518501281738 + ], + [ + "stop", + -11.412609100341797 + ], + [ + "mini", + -11.412860870361328 + ], + [ + "▁jam", + -11.412877082824707 + ], + [ + "cord", + -11.41289234161377 + ], + [ + "▁Beautiful", + -11.412996292114258 + ], + [ + "▁trash", + -11.413012504577637 + ], + [ + "▁wise", + -11.413092613220215 + ], + [ + "▁accounting", + -11.413178443908691 + ], + [ + "▁différents", + -11.413182258605957 + ], + [ + "▁stil", + -11.413214683532715 + ], + [ + "suit", + -11.413951873779297 + ], + [ + "▁vier", + -11.414209365844727 + ], + [ + "▁permis", + -11.414224624633789 + ], + [ + "flow", + -11.414238929748535 + ], + [ + "▁col", + -11.414749145507812 + ], + [ + "ected", + -11.414960861206055 + ], + [ + "▁singer", + -11.414999008178711 + ], + [ + "▁GmbH", + -11.415038108825684 + ], + [ + "tics", + -11.415094375610352 + ], + [ + "▁ser", + -11.415159225463867 + ], + [ + "On", + -11.415315628051758 + ], + [ + "▁insights", + -11.415605545043945 + ], + [ + "BB", + -11.415946960449219 + ], + [ + "▁differ", + -11.415959358215332 + ], + [ + "▁Glass", + -11.416131973266602 + ], + [ + "▁Six", + -11.416482925415039 + ], + [ + "▁subscription", + -11.416584968566895 + ], + [ + "BC", + -11.416606903076172 + ], + [ + "▁returning", + -11.416664123535156 + ], + [ + "kleinen", + -11.416693687438965 + ], + [ + "▁advantages", + -11.416747093200684 + ], + [ + "omme", + -11.416852951049805 + ], + [ + "lus", + -11.417071342468262 + ], + [ + "now", + -11.417141914367676 + ], + [ + "▁Pack", + -11.417253494262695 + ], + [ + "▁leak", + -11.417333602905273 + ], + [ + "▁muscles", + -11.41748332977295 + ], + [ + "▁davon", + -11.417492866516113 + ], + [ + "mph", + -11.417858123779297 + ], + [ + "▁temple", + -11.417868614196777 + ], + [ + "▁Après", + -11.417901039123535 + ], + [ + "▁Illinois", + -11.41801643371582 + ], + [ + "▁variable", + -11.418065071105957 + ], + [ + "▁judgment", + -11.418389320373535 + ], + [ + "gran", + -11.41861629486084 + ], + [ + "▁pose", + -11.418621063232422 + ], + [ + "das", + -11.418647766113281 + ], + [ + "ures", + -11.418673515319824 + ], + [ + "▁Championship", + -11.418689727783203 + ], + [ + "ebenfalls", + -11.41872501373291 + ], + [ + "▁hydro", + -11.418753623962402 + ], + [ + "▁angle", + -11.419268608093262 + ], + [ + "▁5-", + -11.41940975189209 + ], + [ + "▁gest", + -11.419547080993652 + ], + [ + "▁Frau", + -11.420233726501465 + ], + [ + "▁knock", + -11.420275688171387 + ], + [ + "FS", + -11.420442581176758 + ], + [ + "spi", + -11.420577049255371 + ], + [ + "▁Regional", + -11.420717239379883 + ], + [ + "lets", + -11.421098709106445 + ], + [ + "▁Date", + -11.42115592956543 + ], + [ + "▁Finance", + -11.421211242675781 + ], + [ + "▁Dann", + -11.421320915222168 + ], + [ + "Star", + -11.421380043029785 + ], + [ + "▁Creek", + -11.421393394470215 + ], + [ + "▁fu", + -11.421648979187012 + ], + [ + "wohn", + -11.422141075134277 + ], + [ + "▁anniversary", + -11.422219276428223 + ], + [ + "▁investments", + -11.422292709350586 + ], + [ + "▁universal", + -11.422601699829102 + ], + [ + "▁pit", + -11.422745704650879 + ], + [ + "ște", + -11.422784805297852 + ], + [ + "▁lab", + -11.422822952270508 + ], + [ + "dienst", + -11.422884941101074 + ], + [ + "▁pal", + -11.422889709472656 + ], + [ + "▁graphic", + -11.42289924621582 + ], + [ + "▁bearing", + -11.422900199890137 + ], + [ + "▁stylish", + -11.423087120056152 + ], + [ + "▁mé", + -11.42319393157959 + ], + [ + "▁există", + -11.42326545715332 + ], + [ + "▁découvrir", + -11.423477172851562 + ], + [ + "comp", + -11.423606872558594 + ], + [ + "ridge", + -11.423667907714844 + ], + [ + "▁heads", + -11.423765182495117 + ], + [ + "▁consequences", + -11.423835754394531 + ], + [ + "self", + -11.423842430114746 + ], + [ + "fried", + -11.423870086669922 + ], + [ + "▁inventory", + -11.424199104309082 + ], + [ + "▁strip", + -11.42422866821289 + ], + [ + "▁Civil", + -11.42424488067627 + ], + [ + "bell", + -11.424307823181152 + ], + [ + "▁neben", + -11.424444198608398 + ], + [ + "▁Perfect", + -11.424470901489258 + ], + [ + "▁Notre", + -11.424478530883789 + ], + [ + "▁fraud", + -11.424630165100098 + ], + [ + "▁employers", + -11.424656867980957 + ], + [ + "▁Jackson", + -11.42470645904541 + ], + [ + "▁probleme", + -11.424915313720703 + ], + [ + "▁richtig", + -11.424957275390625 + ], + [ + "▁Method", + -11.425009727478027 + ], + [ + "▁tired", + -11.425010681152344 + ], + [ + "dies", + -11.425031661987305 + ], + [ + "▁Number", + -11.425315856933594 + ], + [ + "rland", + -11.425652503967285 + ], + [ + "▁latter", + -11.426031112670898 + ], + [ + "rendre", + -11.426064491271973 + ], + [ + "▁cameras", + -11.426095962524414 + ], + [ + "▁euch", + -11.426630020141602 + ], + [ + "▁Description", + -11.427038192749023 + ], + [ + "Spec", + -11.427061080932617 + ], + [ + "▁mile", + -11.427437782287598 + ], + [ + "▁Challenge", + -11.427474021911621 + ], + [ + "▁Solutions", + -11.427504539489746 + ], + [ + "▁trusted", + -11.427509307861328 + ], + [ + "▁einge", + -11.427515029907227 + ], + [ + "rück", + -11.427528381347656 + ], + [ + "▁Ober", + -11.427635192871094 + ], + [ + "kes", + -11.42764949798584 + ], + [ + "▁Log", + -11.427684783935547 + ], + [ + "▁dessert", + -11.427776336669922 + ], + [ + "▁murder", + -11.428033828735352 + ], + [ + "▁1/2", + -11.428311347961426 + ], + [ + "▁Provide", + -11.42872142791748 + ], + [ + "nivelul", + -11.428800582885742 + ], + [ + "nici", + -11.428818702697754 + ], + [ + "▁observe", + -11.42889404296875 + ], + [ + "▁prescription", + -11.429162979125977 + ], + [ + "▁Sau", + -11.429170608520508 + ], + [ + "▁genuine", + -11.42919635772705 + ], + [ + "▁operated", + -11.429231643676758 + ], + [ + "▁generous", + -11.429267883300781 + ], + [ + "▁weapons", + -11.429458618164062 + ], + [ + "▁belief", + -11.4295015335083 + ], + [ + "▁consum", + -11.429584503173828 + ], + [ + "▁unknown", + -11.430116653442383 + ], + [ + "deoarece", + -11.430135726928711 + ], + [ + "Art", + -11.430147171020508 + ], + [ + "▁kurz", + -11.430183410644531 + ], + [ + "▁Gut", + -11.430258750915527 + ], + [ + "▁medication", + -11.430522918701172 + ], + [ + "▁Mau", + -11.43058967590332 + ], + [ + "▁divorce", + -11.430678367614746 + ], + [ + "▁claimed", + -11.430811882019043 + ], + [ + "halten", + -11.430848121643066 + ], + [ + "▁Cons", + -11.43089485168457 + ], + [ + "▁operational", + -11.430975914001465 + ], + [ + "▁Hong", + -11.431081771850586 + ], + [ + "VI", + -11.431143760681152 + ], + [ + "▁Blick", + -11.431485176086426 + ], + [ + "▁lamp", + -11.431706428527832 + ], + [ + "pati", + -11.431853294372559 + ], + [ + "▁4-", + -11.43192195892334 + ], + [ + "▁interven", + -11.431964874267578 + ], + [ + "ques", + -11.43201732635498 + ], + [ + "▁Talk", + -11.432096481323242 + ], + [ + "▁zeigt", + -11.432318687438965 + ], + [ + "▁targeted", + -11.432390213012695 + ], + [ + "round", + -11.432640075683594 + ], + [ + "enfant", + -11.432748794555664 + ], + [ + "▁Reg", + -11.432836532592773 + ], + [ + "▁instruments", + -11.432872772216797 + ], + [ + "▁calcul", + -11.433363914489746 + ], + [ + "▁Henry", + -11.4335298538208 + ], + [ + "▁Cla", + -11.433616638183594 + ], + [ + "▁rack", + -11.433661460876465 + ], + [ + "sehen", + -11.43375301361084 + ], + [ + "▁ending", + -11.433754920959473 + ], + [ + "▁resolve", + -11.434130668640137 + ], + [ + "▁advise", + -11.434178352355957 + ], + [ + "▁sociale", + -11.434386253356934 + ], + [ + "▁cabin", + -11.434536933898926 + ], + [ + "▁involve", + -11.43480396270752 + ], + [ + "gă", + -11.434889793395996 + ], + [ + "▁automat", + -11.435132026672363 + ], + [ + "▁consultant", + -11.435258865356445 + ], + [ + "Bu", + -11.435370445251465 + ], + [ + "▁safely", + -11.435466766357422 + ], + [ + "état", + -11.435478210449219 + ], + [ + "▁pros", + -11.435657501220703 + ], + [ + "▁lies", + -11.435659408569336 + ], + [ + "▁Brian", + -11.435914993286133 + ], + [ + "▁talented", + -11.435954093933105 + ], + [ + "pus", + -11.43599796295166 + ], + [ + "▁hub", + -11.436060905456543 + ], + [ + "▁Ji", + -11.436066627502441 + ], + [ + "▁sought", + -11.436102867126465 + ], + [ + "▁energie", + -11.436210632324219 + ], + [ + "▁möchten", + -11.43634033203125 + ], + [ + "▁11.", + -11.436558723449707 + ], + [ + "▁Kong", + -11.436662673950195 + ], + [ + "▁grave", + -11.43666934967041 + ], + [ + "▁lists", + -11.436800956726074 + ], + [ + "tati", + -11.436809539794922 + ], + [ + "verschiedenen", + -11.43692398071289 + ], + [ + "dam", + -11.437061309814453 + ], + [ + "▁charity", + -11.437249183654785 + ], + [ + "▁breaking", + -11.43735122680664 + ], + [ + "kins", + -11.43747329711914 + ], + [ + "▁könnte", + -11.437517166137695 + ], + [ + "▁appointed", + -11.437532424926758 + ], + [ + "roc", + -11.4376859664917 + ], + [ + "▁Senate", + -11.437979698181152 + ], + [ + "wit", + -11.438002586364746 + ], + [ + "▁emerging", + -11.438162803649902 + ], + [ + "▁année", + -11.438288688659668 + ], + [ + "▁Cool", + -11.438365936279297 + ], + [ + "▁sensor", + -11.43842887878418 + ], + [ + "How", + -11.438488960266113 + ], + [ + "▁Ryan", + -11.438626289367676 + ], + [ + "▁computers", + -11.43871784210205 + ], + [ + "▁fault", + -11.4388427734375 + ], + [ + "▁présent", + -11.438843727111816 + ], + [ + "ulation", + -11.439149856567383 + ], + [ + "▁stir", + -11.439348220825195 + ], + [ + "lauf", + -11.439703941345215 + ], + [ + "▁AI", + -11.440389633178711 + ], + [ + "▁Bri", + -11.440438270568848 + ], + [ + "▁bain", + -11.441011428833008 + ], + [ + "▁5,", + -11.441287994384766 + ], + [ + "schein", + -11.44157886505127 + ], + [ + "▁weiß", + -11.441596031188965 + ], + [ + "▁possibilities", + -11.44235610961914 + ], + [ + "gur", + -11.442413330078125 + ], + [ + "▁hinter", + -11.442647933959961 + ], + [ + "Innen", + -11.442755699157715 + ], + [ + "▁vorba", + -11.442992210388184 + ], + [ + "fahren", + -11.443008422851562 + ], + [ + "▁Cell", + -11.443072319030762 + ], + [ + "univers", + -11.443137168884277 + ], + [ + "▁Follow", + -11.443424224853516 + ], + [ + "▁emotions", + -11.44360637664795 + ], + [ + "▁Ministry", + -11.443694114685059 + ], + [ + "▁curriculum", + -11.443694114685059 + ], + [ + "Je", + -11.443764686584473 + ], + [ + "▁gab", + -11.444080352783203 + ], + [ + "▁sigur", + -11.444270133972168 + ], + [ + "rise", + -11.444416999816895 + ], + [ + "Pri", + -11.44466495513916 + ], + [ + "▁stabil", + -11.444781303405762 + ], + [ + "▁superb", + -11.445100784301758 + ], + [ + "▁Oak", + -11.44510269165039 + ], + [ + "▁rubber", + -11.445286750793457 + ], + [ + "▁tag", + -11.445306777954102 + ], + [ + "PG", + -11.445361137390137 + ], + [ + "▁Heat", + -11.445477485656738 + ], + [ + "▁thousand", + -11.445504188537598 + ], + [ + "▁meets", + -11.445521354675293 + ], + [ + "▁faced", + -11.445578575134277 + ], + [ + "▁reserve", + -11.445640563964844 + ], + [ + "cateva", + -11.445767402648926 + ], + [ + "▁gym", + -11.445771217346191 + ], + [ + "▁vitamin", + -11.445960998535156 + ], + [ + "▁Rest", + -11.446457862854004 + ], + [ + "▁Single", + -11.446535110473633 + ], + [ + "▁Stephen", + -11.446623802185059 + ], + [ + "▁trick", + -11.446824073791504 + ], + [ + "DU", + -11.44694709777832 + ], + [ + "▁telefon", + -11.44711685180664 + ], + [ + "▁gând", + -11.447120666503906 + ], + [ + "▁primit", + -11.447345733642578 + ], + [ + "▁Connect", + -11.447351455688477 + ], + [ + "▁führt", + -11.447440147399902 + ], + [ + "▁Info", + -11.447500228881836 + ], + [ + "▁recall", + -11.447848320007324 + ], + [ + "▁restore", + -11.447885513305664 + ], + [ + "lege", + -11.44792652130127 + ], + [ + "▁franchise", + -11.448189735412598 + ], + [ + "▁seulement", + -11.44856071472168 + ], + [ + "reci", + -11.448598861694336 + ], + [ + "▁2019,", + -11.44864273071289 + ], + [ + "▁Ring", + -11.448663711547852 + ], + [ + "▁assembly", + -11.448678970336914 + ], + [ + "intérieur", + -11.448775291442871 + ], + [ + "▁shade", + -11.44887924194336 + ], + [ + "▁meaningful", + -11.448881149291992 + ], + [ + "bag", + -11.448989868164062 + ], + [ + "ONE", + -11.449249267578125 + ], + [ + "▁globe", + -11.449287414550781 + ], + [ + "▁WA", + -11.449406623840332 + ], + [ + "▁intervention", + -11.449495315551758 + ], + [ + "öl", + -11.449531555175781 + ], + [ + "▁Marine", + -11.45029067993164 + ], + [ + "▁Angebot", + -11.450512886047363 + ], + [ + "▁align", + -11.450618743896484 + ], + [ + "▁temperatures", + -11.450634956359863 + ], + [ + "ifier", + -11.45091724395752 + ], + [ + "▁Nigeria", + -11.451189041137695 + ], + [ + "▁survive", + -11.451216697692871 + ], + [ + "ounce", + -11.451275825500488 + ], + [ + "▁placement", + -11.451416969299316 + ], + [ + "▁deci", + -11.451528549194336 + ], + [ + "▁Taylor", + -11.451759338378906 + ], + [ + "step", + -11.45190715789795 + ], + [ + "▁Geschichte", + -11.452054023742676 + ], + [ + "▁Bet", + -11.452169418334961 + ], + [ + "▁Nature", + -11.45224380493164 + ], + [ + "▁FC", + -11.452256202697754 + ], + [ + "▁ownership", + -11.452286720275879 + ], + [ + "▁behaviour", + -11.452474594116211 + ], + [ + "▁deutlich", + -11.452532768249512 + ], + [ + "▁wondering", + -11.452798843383789 + ], + [ + "▁cleaner", + -11.453295707702637 + ], + [ + "uring", + -11.4534912109375 + ], + [ + "rä", + -11.453496932983398 + ], + [ + "▁ga", + -11.454296112060547 + ], + [ + "ador", + -11.454482078552246 + ], + [ + "▁artwork", + -11.454564094543457 + ], + [ + "ologic", + -11.45457649230957 + ], + [ + "▁eigentlich", + -11.454848289489746 + ], + [ + "▁hell", + -11.45522403717041 + ], + [ + "source", + -11.455251693725586 + ], + [ + "▁gem", + -11.455265045166016 + ], + [ + "▁boss", + -11.455307006835938 + ], + [ + "▁arise", + -11.455460548400879 + ], + [ + "about", + -11.455711364746094 + ], + [ + "▁SI", + -11.455951690673828 + ], + [ + "▁ME", + -11.45610237121582 + ], + [ + "akt", + -11.456191062927246 + ], + [ + "▁Style", + -11.456259727478027 + ], + [ + "▁Körper", + -11.456493377685547 + ], + [ + "gui", + -11.456799507141113 + ], + [ + "▁navigate", + -11.456819534301758 + ], + [ + "▁Meanwhile", + -11.456977844238281 + ], + [ + "▁așa", + -11.457111358642578 + ], + [ + "▁bulk", + -11.457298278808594 + ], + [ + "▁directions", + -11.457310676574707 + ], + [ + "▁brick", + -11.457747459411621 + ], + [ + "▁Poly", + -11.457752227783203 + ], + [ + "▁politique", + -11.457772254943848 + ], + [ + "▁patch", + -11.457777976989746 + ], + [ + "ра", + -11.457816123962402 + ], + [ + "commerce", + -11.457844734191895 + ], + [ + "▁înainte", + -11.457884788513184 + ], + [ + "▁intelligent", + -11.45823860168457 + ], + [ + "▁infection", + -11.458426475524902 + ], + [ + "▁Tru", + -11.458494186401367 + ], + [ + "▁raising", + -11.458504676818848 + ], + [ + "tragen", + -11.458539009094238 + ], + [ + "▁portrait", + -11.45858383178711 + ], + [ + "▁meisten", + -11.458783149719238 + ], + [ + "▁organize", + -11.45893669128418 + ], + [ + "metric", + -11.458962440490723 + ], + [ + "▁Season", + -11.459036827087402 + ], + [ + "▁enforcement", + -11.459259033203125 + ], + [ + "origine", + -11.459836959838867 + ], + [ + "▁Ros", + -11.460065841674805 + ], + [ + "▁Mount", + -11.460083961486816 + ], + [ + "have", + -11.460237503051758 + ], + [ + "▁romantic", + -11.460258483886719 + ], + [ + "▁comic", + -11.460810661315918 + ], + [ + "▁greu", + -11.461116790771484 + ], + [ + "ET", + -11.46133041381836 + ], + [ + "▁hook", + -11.461407661437988 + ], + [ + "▁mort", + -11.461411476135254 + ], + [ + "▁indicated", + -11.461583137512207 + ], + [ + "▁7,", + -11.461982727050781 + ], + [ + "▁Neben", + -11.46204662322998 + ], + [ + "yer", + -11.46214485168457 + ], + [ + "▁momentul", + -11.46214771270752 + ], + [ + "note", + -11.462313652038574 + ], + [ + "▁baz", + -11.46231460571289 + ], + [ + "▁abroad", + -11.462320327758789 + ], + [ + "nite", + -11.462464332580566 + ], + [ + "▁bass", + -11.462701797485352 + ], + [ + "▁norm", + -11.462714195251465 + ], + [ + "▁É", + -11.462788581848145 + ], + [ + "4.", + -11.462881088256836 + ], + [ + "▁province", + -11.463004112243652 + ], + [ + "▁merge", + -11.463419914245605 + ], + [ + "arbeiten", + -11.463438987731934 + ], + [ + "-20", + -11.463574409484863 + ], + [ + "▁Nicht", + -11.463674545288086 + ], + [ + "spo", + -11.463783264160156 + ], + [ + "size", + -11.463815689086914 + ], + [ + "▁assure", + -11.463849067687988 + ], + [ + "charge", + -11.463987350463867 + ], + [ + "▁olive", + -11.464017868041992 + ], + [ + "▁Pot", + -11.46408462524414 + ], + [ + "▁Figure", + -11.4642333984375 + ], + [ + "clair", + -11.464336395263672 + ], + [ + "▁discipline", + -11.464600563049316 + ], + [ + "elli", + -11.464639663696289 + ], + [ + "▁tackle", + -11.465169906616211 + ], + [ + "▁buyer", + -11.465237617492676 + ], + [ + "▁loud", + -11.465479850769043 + ], + [ + "▁180", + -11.465534210205078 + ], + [ + "▁căt", + -11.465587615966797 + ], + [ + "▁Palm", + -11.465738296508789 + ], + [ + "away", + -11.46593189239502 + ], + [ + "▁Mother", + -11.46607494354248 + ], + [ + "onia", + -11.466240882873535 + ], + [ + "▁Protection", + -11.466416358947754 + ], + [ + "auto", + -11.466547966003418 + ], + [ + "▁Version", + -11.466583251953125 + ], + [ + "▁Nice", + -11.466714859008789 + ], + [ + "▁12.", + -11.46682071685791 + ], + [ + "▁0,", + -11.466835021972656 + ], + [ + "ATION", + -11.466911315917969 + ], + [ + "▁Produkte", + -11.466955184936523 + ], + [ + "▁tube", + -11.467084884643555 + ], + [ + "▁Houston", + -11.467106819152832 + ], + [ + "chu", + -11.467500686645508 + ], + [ + "pas", + -11.467717170715332 + ], + [ + "▁Ele", + -11.467801094055176 + ], + [ + "▁mountains", + -11.467835426330566 + ], + [ + "PH", + -11.467937469482422 + ], + [ + "▁languages", + -11.468672752380371 + ], + [ + "▁servicii", + -11.468722343444824 + ], + [ + "▁Stay", + -11.468999862670898 + ], + [ + "fil", + -11.469138145446777 + ], + [ + "▁propos", + -11.469801902770996 + ], + [ + "▁coll", + -11.469825744628906 + ], + [ + "▁mor", + -11.470197677612305 + ], + [ + "▁arrange", + -11.470410346984863 + ], + [ + "▁sorry", + -11.470475196838379 + ], + [ + "▁instruction", + -11.470723152160645 + ], + [ + "▁holes", + -11.47077465057373 + ], + [ + "letting", + -11.471046447753906 + ], + [ + "▁wa", + -11.471074104309082 + ], + [ + "▁Feb", + -11.471227645874023 + ], + [ + "omb", + -11.471232414245605 + ], + [ + "▁prise", + -11.471290588378906 + ], + [ + "VO", + -11.471305847167969 + ], + [ + "week", + -11.471349716186523 + ], + [ + "▁Event", + -11.471427917480469 + ], + [ + "▁AT", + -11.471485137939453 + ], + [ + "ket", + -11.471492767333984 + ], + [ + "haft", + -11.471579551696777 + ], + [ + "▁hits", + -11.47159194946289 + ], + [ + "foli", + -11.471681594848633 + ], + [ + "this", + -11.471948623657227 + ], + [ + "GP", + -11.471970558166504 + ], + [ + "▁Pin", + -11.472332954406738 + ], + [ + "▁Stein", + -11.472503662109375 + ], + [ + "thing", + -11.472512245178223 + ], + [ + "▁emphasis", + -11.472556114196777 + ], + [ + "▁Mur", + -11.472631454467773 + ], + [ + "▁Bag", + -11.472647666931152 + ], + [ + "cons", + -11.47273063659668 + ], + [ + "tons", + -11.472835540771484 + ], + [ + "lash", + -11.472987174987793 + ], + [ + "▁Grant", + -11.473104476928711 + ], + [ + "▁pris", + -11.473175048828125 + ], + [ + "▁bună", + -11.47323989868164 + ], + [ + "▁buc", + -11.473699569702148 + ], + [ + "▁passe", + -11.473746299743652 + ], + [ + "▁jewelry", + -11.474213600158691 + ], + [ + "iens", + -11.474342346191406 + ], + [ + "▁forma", + -11.47453784942627 + ], + [ + "▁Med", + -11.474651336669922 + ], + [ + "laufen", + -11.474778175354004 + ], + [ + "▁hunt", + -11.474977493286133 + ], + [ + "stayed", + -11.475086212158203 + ], + [ + "party", + -11.475152015686035 + ], + [ + "▁fra", + -11.47529411315918 + ], + [ + "▁scenes", + -11.475305557250977 + ], + [ + "▁absorb", + -11.47535228729248 + ], + [ + "▁abilities", + -11.475377082824707 + ], + [ + "lug", + -11.475507736206055 + ], + [ + "▁Sarah", + -11.475693702697754 + ], + [ + "mpf", + -11.47570514678955 + ], + [ + "▁fle", + -11.4757080078125 + ], + [ + "accès", + -11.475872993469238 + ], + [ + "▁solicit", + -11.475926399230957 + ], + [ + "pie", + -11.476278305053711 + ], + [ + "▁Zum", + -11.476296424865723 + ], + [ + "▁universe", + -11.476390838623047 + ], + [ + "▁exists", + -11.476449012756348 + ], + [ + "oane", + -11.476597785949707 + ], + [ + "IVE", + -11.47668743133545 + ], + [ + "▁2011.", + -11.476906776428223 + ], + [ + "▁specialists", + -11.477072715759277 + ], + [ + "▁mess", + -11.477309226989746 + ], + [ + "fach", + -11.477402687072754 + ], + [ + "▁Recht", + -11.477404594421387 + ], + [ + "▁hack", + -11.47755241394043 + ], + [ + "▁jacket", + -11.477564811706543 + ], + [ + "HC", + -11.47769832611084 + ], + [ + "▁substance", + -11.477728843688965 + ], + [ + "▁signing", + -11.477775573730469 + ], + [ + "▁allerdings", + -11.478032112121582 + ], + [ + "▁publish", + -11.478139877319336 + ], + [ + "▁Lab", + -11.478157043457031 + ], + [ + "▁agenda", + -11.478249549865723 + ], + [ + "lane", + -11.478299140930176 + ], + [ + "stream", + -11.478620529174805 + ], + [ + "schau", + -11.47879409790039 + ], + [ + "▁realizat", + -11.478971481323242 + ], + [ + "▁supplier", + -11.479019165039062 + ], + [ + "▁moderate", + -11.47902774810791 + ], + [ + "▁tours", + -11.479212760925293 + ], + [ + "▁narrative", + -11.479220390319824 + ], + [ + "ația", + -11.479279518127441 + ], + [ + "▁maps", + -11.479423522949219 + ], + [ + "treten", + -11.479447364807129 + ], + [ + "▁mars", + -11.479706764221191 + ], + [ + "▁moon", + -11.479745864868164 + ], + [ + "rose", + -11.479751586914062 + ], + [ + "▁exp", + -11.479766845703125 + ], + [ + "zahl", + -11.480154037475586 + ], + [ + "psych", + -11.480195999145508 + ], + [ + "▁gehört", + -11.48024845123291 + ], + [ + "▁bound", + -11.4803466796875 + ], + [ + "▁submission", + -11.480451583862305 + ], + [ + "▁clubs", + -11.480722427368164 + ], + [ + "Am", + -11.480755805969238 + ], + [ + "tenir", + -11.480782508850098 + ], + [ + "▁boast", + -11.480851173400879 + ], + [ + "▁boards", + -11.4810791015625 + ], + [ + "▁Geschäfts", + -11.481216430664062 + ], + [ + "zing", + -11.48126220703125 + ], + [ + "wort", + -11.48137092590332 + ], + [ + "lid", + -11.481417655944824 + ], + [ + "▁contractor", + -11.481528282165527 + ], + [ + "▁donner", + -11.481672286987305 + ], + [ + "▁coupon", + -11.481974601745605 + ], + [ + "adresse", + -11.482004165649414 + ], + [ + "colo", + -11.48210334777832 + ], + [ + "▁perception", + -11.482124328613281 + ], + [ + "NC", + -11.48222541809082 + ], + [ + "▁abge", + -11.482245445251465 + ], + [ + "▁cheaper", + -11.482268333435059 + ], + [ + "▁grace", + -11.482312202453613 + ], + [ + "▁resident", + -11.482718467712402 + ], + [ + "kla", + -11.4828462600708 + ], + [ + "▁bug", + -11.4828462600708 + ], + [ + "▁Available", + -11.482893943786621 + ], + [ + "▁BA", + -11.483323097229004 + ], + [ + "▁Met", + -11.483601570129395 + ], + [ + "▁climb", + -11.48365592956543 + ], + [ + "▁expanded", + -11.484349250793457 + ], + [ + "ying", + -11.484426498413086 + ], + [ + "▁matching", + -11.484469413757324 + ], + [ + "▁suffered", + -11.484733581542969 + ], + [ + "▁employed", + -11.484755516052246 + ], + [ + "pper", + -11.484843254089355 + ], + [ + "▁experiencing", + -11.484884262084961 + ], + [ + "ddy", + -11.484953880310059 + ], + [ + "▁philosophy", + -11.484955787658691 + ], + [ + "▁utilisé", + -11.485008239746094 + ], + [ + "▁Jane", + -11.485079765319824 + ], + [ + "LI", + -11.485087394714355 + ], + [ + "▁elected", + -11.485185623168945 + ], + [ + "▁MI", + -11.485264778137207 + ], + [ + "▁ISO", + -11.485340118408203 + ], + [ + "winning", + -11.48537540435791 + ], + [ + "▁vot", + -11.485424041748047 + ], + [ + "▁generic", + -11.485519409179688 + ], + [ + "▁Bol", + -11.485650062561035 + ], + [ + "▁copies", + -11.48568058013916 + ], + [ + "▁mechanical", + -11.48568058013916 + ], + [ + "günstig", + -11.485682487487793 + ], + [ + "roy", + -11.485770225524902 + ], + [ + "Astfel", + -11.485808372497559 + ], + [ + "media", + -11.485868453979492 + ], + [ + "▁shoulder", + -11.4859037399292 + ], + [ + "▁directory", + -11.486000061035156 + ], + [ + "▁banking", + -11.486016273498535 + ], + [ + "▁mistakes", + -11.486040115356445 + ], + [ + "▁Fran", + -11.486425399780273 + ], + [ + "▁Jon", + -11.486544609069824 + ], + [ + "▁spare", + -11.486579895019531 + ], + [ + "metri", + -11.486668586730957 + ], + [ + "▁mask", + -11.486879348754883 + ], + [ + "▁consistently", + -11.48695182800293 + ], + [ + "▁Columbia", + -11.487278938293457 + ], + [ + "roid", + -11.48774242401123 + ], + [ + "essen", + -11.487935066223145 + ], + [ + "▁(“", + -11.48798656463623 + ], + [ + "▁série", + -11.488212585449219 + ], + [ + "▁Phil", + -11.488249778747559 + ], + [ + "▁usor", + -11.488249778747559 + ], + [ + "▁stood", + -11.488279342651367 + ], + [ + "▁racing", + -11.488335609436035 + ], + [ + "▁Comme", + -11.488555908203125 + ], + [ + "▁exceed", + -11.488565444946289 + ], + [ + "на", + -11.488618850708008 + ], + [ + "▁activate", + -11.48873233795166 + ], + [ + "▁circle", + -11.488836288452148 + ], + [ + "▁bold", + -11.488956451416016 + ], + [ + "▁handy", + -11.48909854888916 + ], + [ + "merely", + -11.489114761352539 + ], + [ + "▁Edward", + -11.489147186279297 + ], + [ + "▁contracts", + -11.489530563354492 + ], + [ + "ê", + -11.489595413208008 + ], + [ + "▁campaigns", + -11.489673614501953 + ], + [ + "▁ought", + -11.489733695983887 + ], + [ + "▁nursing", + -11.489781379699707 + ], + [ + "▁Jr", + -11.489917755126953 + ], + [ + "▁rarely", + -11.490032196044922 + ], + [ + "▁Mir", + -11.490050315856934 + ], + [ + "▁diagnosis", + -11.490379333496094 + ], + [ + "▁Theatre", + -11.490394592285156 + ], + [ + "▁producer", + -11.490407943725586 + ], + [ + "Currently", + -11.490492820739746 + ], + [ + "▁fitting", + -11.490580558776855 + ], + [ + "▁ajunge", + -11.490618705749512 + ], + [ + "minte", + -11.490754127502441 + ], + [ + "▁termen", + -11.490838050842285 + ], + [ + "▁Linux", + -11.491013526916504 + ], + [ + "▁1-", + -11.491068840026855 + ], + [ + "▁hätte", + -11.491202354431152 + ], + [ + "▁Resort", + -11.49129867553711 + ], + [ + "image", + -11.491527557373047 + ], + [ + "▁Rod", + -11.49189281463623 + ], + [ + "▁Fly", + -11.491924285888672 + ], + [ + "try", + -11.492317199707031 + ], + [ + "▁expense", + -11.49245834350586 + ], + [ + "▁Interior", + -11.492799758911133 + ], + [ + "▁fence", + -11.492920875549316 + ], + [ + "▁Kontakt", + -11.493063926696777 + ], + [ + "▁ALL", + -11.493142127990723 + ], + [ + "VA", + -11.493229866027832 + ], + [ + "▁Exchange", + -11.493316650390625 + ], + [ + "ranked", + -11.493558883666992 + ], + [ + "▁Performance", + -11.493621826171875 + ], + [ + "prim", + -11.493635177612305 + ], + [ + "▁basket", + -11.493694305419922 + ], + [ + "▁Vice", + -11.493703842163086 + ], + [ + "phan", + -11.4937105178833 + ], + [ + "▁broke", + -11.494003295898438 + ], + [ + "voir", + -11.49431324005127 + ], + [ + "arg", + -11.494512557983398 + ], + [ + "ART", + -11.494529724121094 + ], + [ + "▁floors", + -11.494856834411621 + ], + [ + "pression", + -11.495025634765625 + ], + [ + "▁possession", + -11.49507999420166 + ], + [ + "▁domaine", + -11.49510669708252 + ], + [ + "▁valeur", + -11.495132446289062 + ], + [ + "▁suddenly", + -11.495282173156738 + ], + [ + "▁mild", + -11.495304107666016 + ], + [ + "▁aflat", + -11.495431900024414 + ], + [ + "▁Tea", + -11.495731353759766 + ], + [ + "tritt", + -11.495767593383789 + ], + [ + "▁Mittel", + -11.495773315429688 + ], + [ + "▁regulatory", + -11.49580192565918 + ], + [ + "▁spectacular", + -11.495905876159668 + ], + [ + "fahrt", + -11.495949745178223 + ], + [ + "GS", + -11.496026039123535 + ], + [ + "MM", + -11.4961576461792 + ], + [ + "▁environments", + -11.496203422546387 + ], + [ + "▁Raum", + -11.496381759643555 + ], + [ + "▁lay", + -11.496664047241211 + ], + [ + "▁cré", + -11.496713638305664 + ], + [ + "▁Selbst", + -11.496726989746094 + ], + [ + "▁opposition", + -11.496821403503418 + ], + [ + "two", + -11.49729061126709 + ], + [ + "▁Clark", + -11.497822761535645 + ], + [ + "▁Netz", + -11.497845649719238 + ], + [ + "bald", + -11.497983932495117 + ], + [ + "▁Innovation", + -11.4982271194458 + ], + [ + "▁overcome", + -11.49825382232666 + ], + [ + "quot", + -11.499013900756836 + ], + [ + "▁Sin", + -11.499106407165527 + ], + [ + "▁Sto", + -11.499320983886719 + ], + [ + "▁grain", + -11.499560356140137 + ], + [ + "▁collections", + -11.499724388122559 + ], + [ + "▁applies", + -11.49986743927002 + ], + [ + "mach", + -11.499934196472168 + ], + [ + "▁wheels", + -11.499958992004395 + ], + [ + "▁universities", + -11.500049591064453 + ], + [ + "▁Ray", + -11.500182151794434 + ], + [ + "lina", + -11.500238418579102 + ], + [ + "▁arrangements", + -11.500393867492676 + ], + [ + "▁western", + -11.500728607177734 + ], + [ + "rous", + -11.500768661499023 + ], + [ + "aise", + -11.500784873962402 + ], + [ + "▁highlights", + -11.50112533569336 + ], + [ + "▁intend", + -11.501265525817871 + ], + [ + "aimed", + -11.501358032226562 + ], + [ + "▁Scotland", + -11.501360893249512 + ], + [ + "▁acestei", + -11.501466751098633 + ], + [ + "graf", + -11.50150203704834 + ], + [ + "duction", + -11.501517295837402 + ], + [ + "path", + -11.50156021118164 + ], + [ + "▁evil", + -11.501633644104004 + ], + [ + "▁scris", + -11.501791000366211 + ], + [ + "▁disposition", + -11.501927375793457 + ], + [ + "▁designing", + -11.5020751953125 + ], + [ + "zwar", + -11.502172470092773 + ], + [ + "▁Retrieve", + -11.50217342376709 + ], + [ + "▁aggressive", + -11.502374649047852 + ], + [ + "▁Glen", + -11.502411842346191 + ], + [ + "▁daher", + -11.502473831176758 + ], + [ + "▁Quick", + -11.502494812011719 + ], + [ + "▁recover", + -11.502632141113281 + ], + [ + "▁prominent", + -11.50288200378418 + ], + [ + "▁visits", + -11.503198623657227 + ], + [ + "▁Mis", + -11.503376960754395 + ], + [ + "▁edited", + -11.503456115722656 + ], + [ + "▁distributed", + -11.503564834594727 + ], + [ + "▁dés", + -11.503580093383789 + ], + [ + "▁alter", + -11.5035982131958 + ], + [ + "▁cooked", + -11.503697395324707 + ], + [ + "embl", + -11.503706932067871 + ], + [ + "Univers", + -11.503715515136719 + ], + [ + "▁Minuten", + -11.504156112670898 + ], + [ + "▁compris", + -11.504179954528809 + ], + [ + "rais", + -11.504182815551758 + ], + [ + "essentially", + -11.504199028015137 + ], + [ + "▁rel", + -11.504340171813965 + ], + [ + "▁appel", + -11.504570007324219 + ], + [ + "▁trace", + -11.504788398742676 + ], + [ + "relating", + -11.504830360412598 + ], + [ + "dès", + -11.504937171936035 + ], + [ + "aste", + -11.504961013793945 + ], + [ + "▁raison", + -11.504963874816895 + ], + [ + "▁frequent", + -11.505281448364258 + ], + [ + "▁beds", + -11.505316734313965 + ], + [ + "▁Miami", + -11.505511283874512 + ], + [ + "▁vibrant", + -11.50564193725586 + ], + [ + "▁Kam", + -11.505721092224121 + ], + [ + "▁klar", + -11.505861282348633 + ], + [ + "▁Tan", + -11.50598430633545 + ], + [ + "▁vidéo", + -11.506032943725586 + ], + [ + "▁Kur", + -11.506115913391113 + ], + [ + "▁themes", + -11.506134033203125 + ], + [ + "▁struggling", + -11.506440162658691 + ], + [ + "▁Magazine", + -11.506444931030273 + ], + [ + "maker", + -11.506476402282715 + ], + [ + "veni", + -11.506564140319824 + ], + [ + "▁Groß", + -11.506732940673828 + ], + [ + "▁streaming", + -11.506772994995117 + ], + [ + "▁analyze", + -11.506876945495605 + ], + [ + "▁titles", + -11.506982803344727 + ], + [ + "pier", + -11.507316589355469 + ], + [ + "▁participant", + -11.507347106933594 + ], + [ + "aims", + -11.507607460021973 + ], + [ + "▁convention", + -11.507638931274414 + ], + [ + "▁flood", + -11.507780075073242 + ], + [ + "▁nights", + -11.507842063903809 + ], + [ + "▁titre", + -11.50792407989502 + ], + [ + "▁voul", + -11.508010864257812 + ], + [ + "weit", + -11.50816822052002 + ], + [ + "where", + -11.508213996887207 + ], + [ + "▁Seiten", + -11.508286476135254 + ], + [ + "▁relaxing", + -11.508628845214844 + ], + [ + "▁piano", + -11.50883674621582 + ], + [ + "▁Pick", + -11.508842468261719 + ], + [ + "▁Sony", + -11.508955001831055 + ], + [ + "▁enhanced", + -11.509017944335938 + ], + [ + "▁visa", + -11.50915241241455 + ], + [ + "CH", + -11.50930118560791 + ], + [ + "▁instantly", + -11.50930404663086 + ], + [ + "▁Fan", + -11.509721755981445 + ], + [ + "▁diabetes", + -11.509988784790039 + ], + [ + "▁popul", + -11.50999641418457 + ], + [ + "Ang", + -11.510232925415039 + ], + [ + "▁Ask", + -11.510295867919922 + ], + [ + "cate", + -11.510650634765625 + ], + [ + "▁simplu", + -11.510666847229004 + ], + [ + "nahme", + -11.510685920715332 + ], + [ + "▁dentist", + -11.510842323303223 + ], + [ + "ubi", + -11.510920524597168 + ], + [ + "article", + -11.511030197143555 + ], + [ + "▁graph", + -11.511094093322754 + ], + [ + "▁rival", + -11.51121711730957 + ], + [ + "jahr", + -11.5113525390625 + ], + [ + "▁bloc", + -11.511370658874512 + ], + [ + "fern", + -11.511427879333496 + ], + [ + "▁dispar", + -11.511516571044922 + ], + [ + "▁servers", + -11.511582374572754 + ], + [ + "▁patru", + -11.511610984802246 + ], + [ + "▁Within", + -11.511634826660156 + ], + [ + "▁situated", + -11.511896133422852 + ], + [ + "▁HR", + -11.511981964111328 + ], + [ + "▁leaf", + -11.511981964111328 + ], + [ + "▁curs", + -11.512049674987793 + ], + [ + "antes", + -11.512325286865234 + ], + [ + "lux", + -11.512406349182129 + ], + [ + "▁1993", + -11.512463569641113 + ], + [ + "stance", + -11.512650489807129 + ], + [ + "▁northern", + -11.512683868408203 + ], + [ + "lves", + -11.512718200683594 + ], + [ + "▁contractors", + -11.512882232666016 + ], + [ + "▁dimensions", + -11.512920379638672 + ], + [ + "▁rolling", + -11.513068199157715 + ], + [ + "▁automobile", + -11.513211250305176 + ], + [ + "▁cru", + -11.51342487335205 + ], + [ + "▁displays", + -11.513570785522461 + ], + [ + "web", + -11.513812065124512 + ], + [ + "had", + -11.513850212097168 + ], + [ + "▁Never", + -11.513893127441406 + ], + [ + "▁2-", + -11.513932228088379 + ], + [ + "vine", + -11.51393985748291 + ], + [ + "▁Wahl", + -11.513975143432617 + ], + [ + "▁Markt", + -11.514166831970215 + ], + [ + "▁Double", + -11.514227867126465 + ], + [ + "▁acknowledge", + -11.514229774475098 + ], + [ + "stal", + -11.514288902282715 + ], + [ + "▁equity", + -11.514620780944824 + ], + [ + "▁ministry", + -11.514823913574219 + ], + [ + "▁Lor", + -11.514875411987305 + ], + [ + "▁sud", + -11.514968872070312 + ], + [ + "idée", + -11.515044212341309 + ], + [ + "▁measured", + -11.515448570251465 + ], + [ + "▁editing", + -11.515609741210938 + ], + [ + "▁singur", + -11.515620231628418 + ], + [ + "▁coal", + -11.515623092651367 + ], + [ + "▁dramatic", + -11.516212463378906 + ], + [ + "AG", + -11.516251564025879 + ], + [ + "asca", + -11.516280174255371 + ], + [ + "▁crash", + -11.516321182250977 + ], + [ + "ischer", + -11.516597747802734 + ], + [ + "▁Pla", + -11.516871452331543 + ], + [ + "▁psycho", + -11.517054557800293 + ], + [ + "piece", + -11.517118453979492 + ], + [ + "▁finger", + -11.517121315002441 + ], + [ + "▁Hollywood", + -11.517123222351074 + ], + [ + "▁Cr", + -11.517345428466797 + ], + [ + "▁locally", + -11.517622947692871 + ], + [ + "▁mouse", + -11.517792701721191 + ], + [ + "▁Base", + -11.517867088317871 + ], + [ + "uite", + -11.518095016479492 + ], + [ + "▁detect", + -11.518099784851074 + ], + [ + "cea", + -11.518150329589844 + ], + [ + "▁bull", + -11.518194198608398 + ], + [ + "▁curve", + -11.518208503723145 + ], + [ + "été", + -11.518218994140625 + ], + [ + "ddle", + -11.51839542388916 + ], + [ + "▁span", + -11.518523216247559 + ], + [ + "WS", + -11.518878936767578 + ], + [ + "CL", + -11.519017219543457 + ], + [ + "▁officially", + -11.519042015075684 + ], + [ + "▁corect", + -11.519168853759766 + ], + [ + "▁Artikel", + -11.5193510055542 + ], + [ + "▁customized", + -11.520099639892578 + ], + [ + "▁intellectual", + -11.52018928527832 + ], + [ + "▁heures", + -11.520334243774414 + ], + [ + "schule", + -11.520444869995117 + ], + [ + "▁investing", + -11.520585060119629 + ], + [ + "▁parallel", + -11.521227836608887 + ], + [ + "▁loi", + -11.521263122558594 + ], + [ + "ările", + -11.521566390991211 + ], + [ + "р", + -11.521679878234863 + ], + [ + "▁bench", + -11.521724700927734 + ], + [ + "▁principle", + -11.521756172180176 + ], + [ + "▁Galaxy", + -11.521829605102539 + ], + [ + "ța", + -11.522237777709961 + ], + [ + "▁(4", + -11.522418975830078 + ], + [ + "▁bedrooms", + -11.522578239440918 + ], + [ + "née", + -11.52273941040039 + ], + [ + "▁surely", + -11.52275276184082 + ], + [ + "very", + -11.522927284240723 + ], + [ + "stelle", + -11.523200988769531 + ], + [ + "activ", + -11.523216247558594 + ], + [ + "cite", + -11.523551940917969 + ], + [ + "▁Original", + -11.523553848266602 + ], + [ + "▁palm", + -11.523665428161621 + ], + [ + "▁losses", + -11.523934364318848 + ], + [ + "▁newspaper", + -11.524153709411621 + ], + [ + "ciu", + -11.52436351776123 + ], + [ + "▁Hold", + -11.524392127990723 + ], + [ + "BO", + -11.524422645568848 + ], + [ + "▁CON", + -11.524598121643066 + ], + [ + "▁modified", + -11.524624824523926 + ], + [ + "▁stake", + -11.524735450744629 + ], + [ + "▁Ton", + -11.524798393249512 + ], + [ + "▁luna", + -11.524968147277832 + ], + [ + "▁Mind", + -11.525094985961914 + ], + [ + "lap", + -11.525150299072266 + ], + [ + "▁opinions", + -11.525247573852539 + ], + [ + "▁Jordan", + -11.525351524353027 + ], + [ + "div", + -11.52537727355957 + ], + [ + "indi", + -11.525418281555176 + ], + [ + "▁Story", + -11.525476455688477 + ], + [ + "▁affiliate", + -11.52585506439209 + ], + [ + "▁matière", + -11.525918960571289 + ], + [ + "▁fifth", + -11.526399612426758 + ], + [ + "▁sheets", + -11.52645492553711 + ], + [ + "▁puțin", + -11.526909828186035 + ], + [ + "ush", + -11.526947021484375 + ], + [ + "geführt", + -11.526993751525879 + ], + [ + "▁Falls", + -11.527168273925781 + ], + [ + "legi", + -11.527295112609863 + ], + [ + "▁auction", + -11.527326583862305 + ], + [ + "▁cooperation", + -11.52735424041748 + ], + [ + "▁Fee", + -11.527474403381348 + ], + [ + "▁Daily", + -11.52774715423584 + ], + [ + "pies", + -11.527853965759277 + ], + [ + "▁basketball", + -11.527976036071777 + ], + [ + "removing", + -11.528056144714355 + ], + [ + "Besides", + -11.528294563293457 + ], + [ + "▁Body", + -11.528355598449707 + ], + [ + "▁AD", + -11.528369903564453 + ], + [ + "RU", + -11.528435707092285 + ], + [ + "ţia", + -11.52894401550293 + ], + [ + "▁Extra", + -11.528986930847168 + ], + [ + "▁Practice", + -11.52900218963623 + ], + [ + "▁Jeff", + -11.529017448425293 + ], + [ + "▁început", + -11.529253005981445 + ], + [ + "ching", + -11.529269218444824 + ], + [ + "▁Gift", + -11.529281616210938 + ], + [ + "kk", + -11.529295921325684 + ], + [ + "\")", + -11.529349327087402 + ], + [ + "▁Austin", + -11.529651641845703 + ], + [ + "thro", + -11.529766082763672 + ], + [ + "▁camping", + -11.529810905456543 + ], + [ + "▁theatre", + -11.529850959777832 + ], + [ + "école", + -11.529916763305664 + ], + [ + "vient", + -11.530159950256348 + ], + [ + "▁faces", + -11.530226707458496 + ], + [ + "▁constructed", + -11.530437469482422 + ], + [ + "▁overnight", + -11.530472755432129 + ], + [ + "▁locale", + -11.530574798583984 + ], + [ + "▁roots", + -11.530611038208008 + ], + [ + "▁bu", + -11.530662536621094 + ], + [ + "4,", + -11.530683517456055 + ], + [ + "▁Enterprise", + -11.530865669250488 + ], + [ + "screen", + -11.530935287475586 + ], + [ + "▁Chef", + -11.53096866607666 + ], + [ + "▁Along", + -11.531298637390137 + ], + [ + "▁MD", + -11.531431198120117 + ], + [ + "▁Supreme", + -11.531597137451172 + ], + [ + "En", + -11.531655311584473 + ], + [ + "▁verwendet", + -11.532015800476074 + ], + [ + "▁processed", + -11.532425880432129 + ], + [ + "▁vendors", + -11.532549858093262 + ], + [ + "▁FA", + -11.532651901245117 + ], + [ + "▁44", + -11.532716751098633 + ], + [ + "▁beautifully", + -11.532933235168457 + ], + [ + "▁eficient", + -11.533092498779297 + ], + [ + "▁Wil", + -11.533117294311523 + ], + [ + "▁Member", + -11.533121109008789 + ], + [ + "▁damages", + -11.5332670211792 + ], + [ + "▁mutual", + -11.533288955688477 + ], + [ + "SN", + -11.533506393432617 + ], + [ + "▁Dave", + -11.533665657043457 + ], + [ + "??", + -11.533998489379883 + ], + [ + "stat", + -11.534090995788574 + ], + [ + "▁tourist", + -11.534374237060547 + ], + [ + "fie", + -11.534425735473633 + ], + [ + "şte", + -11.534754753112793 + ], + [ + "▁donne", + -11.534764289855957 + ], + [ + "▁shadow", + -11.53493881225586 + ], + [ + "▁dough", + -11.534993171691895 + ], + [ + "▁Gro", + -11.535002708435059 + ], + [ + "▁Mah", + -11.535066604614258 + ], + [ + "RF", + -11.535126686096191 + ], + [ + "▁mechanism", + -11.535163879394531 + ], + [ + "▁2011,", + -11.535179138183594 + ], + [ + "▁Alter", + -11.53530502319336 + ], + [ + "▁opposed", + -11.53538990020752 + ], + [ + "▁Fri", + -11.535501480102539 + ], + [ + "▁remarkable", + -11.535572052001953 + ], + [ + "oral", + -11.535635948181152 + ], + [ + "▁verschiedene", + -11.535653114318848 + ], + [ + "▁difficulty", + -11.535691261291504 + ], + [ + "▁Application", + -11.535840034484863 + ], + [ + "▁Hay", + -11.535888671875 + ], + [ + "▁continua", + -11.535935401916504 + ], + [ + "EP", + -11.53609848022461 + ], + [ + "▁Pr", + -11.53617000579834 + ], + [ + "▁Lady", + -11.53631591796875 + ], + [ + "▁interval", + -11.536457061767578 + ], + [ + "▁Mil", + -11.536504745483398 + ], + [ + "▁2010.", + -11.537042617797852 + ], + [ + "VE", + -11.537074089050293 + ], + [ + "integr", + -11.537360191345215 + ], + [ + "▁création", + -11.537415504455566 + ], + [ + "weed", + -11.537456512451172 + ], + [ + "EG", + -11.53760051727295 + ], + [ + "▁6,", + -11.537784576416016 + ], + [ + "▁god", + -11.537866592407227 + ], + [ + "▁accomplish", + -11.537947654724121 + ], + [ + "▁thoroughly", + -11.538019180297852 + ], + [ + "2019", + -11.538228988647461 + ], + [ + "izer", + -11.538246154785156 + ], + [ + "▁Wal", + -11.538300514221191 + ], + [ + "ifying", + -11.538701057434082 + ], + [ + "▁Wohn", + -11.539227485656738 + ], + [ + "▁Holz", + -11.539474487304688 + ], + [ + "▁Advanced", + -11.539528846740723 + ], + [ + "▁honey", + -11.539626121520996 + ], + [ + "proof", + -11.539634704589844 + ], + [ + "▁saison", + -11.540029525756836 + ], + [ + "ându", + -11.540035247802734 + ], + [ + "▁Kevin", + -11.540116310119629 + ], + [ + "▁shelter", + -11.540199279785156 + ], + [ + "▁discut", + -11.540257453918457 + ], + [ + "▁hike", + -11.540257453918457 + ], + [ + "ités", + -11.540461540222168 + ], + [ + "▁boutique", + -11.540672302246094 + ], + [ + "▁Email", + -11.54067611694336 + ], + [ + "▁cosmetic", + -11.540830612182617 + ], + [ + "dian", + -11.540916442871094 + ], + [ + "▁hohe", + -11.540940284729004 + ], + [ + "▁absence", + -11.541071891784668 + ], + [ + "axi", + -11.541136741638184 + ], + [ + "nah", + -11.541178703308105 + ], + [ + "▁Frauen", + -11.541236877441406 + ], + [ + "▁actively", + -11.541278839111328 + ], + [ + "bind", + -11.541468620300293 + ], + [ + "▁everybody", + -11.541740417480469 + ], + [ + "▁controller", + -11.541802406311035 + ], + [ + "▁1.5", + -11.5418062210083 + ], + [ + "erau", + -11.541842460632324 + ], + [ + "gehen", + -11.541988372802734 + ], + [ + "▁scenario", + -11.542038917541504 + ], + [ + "▁odd", + -11.542083740234375 + ], + [ + "▁Ultra", + -11.542089462280273 + ], + [ + "▁finishing", + -11.542366981506348 + ], + [ + "▁cuts", + -11.542383193969727 + ], + [ + "▁financing", + -11.542515754699707 + ], + [ + "▁Chance", + -11.542579650878906 + ], + [ + "surrounded", + -11.542818069458008 + ], + [ + "▁joc", + -11.542903900146484 + ], + [ + "▁shelf", + -11.543004035949707 + ], + [ + "tief", + -11.54308032989502 + ], + [ + "▁Sir", + -11.543146133422852 + ], + [ + "▁Agent", + -11.543197631835938 + ], + [ + "▁scratch", + -11.543560981750488 + ], + [ + "2,000", + -11.54360294342041 + ], + [ + "nutri", + -11.54365348815918 + ], + [ + "nier", + -11.544063568115234 + ], + [ + "▁Dur", + -11.544175148010254 + ], + [ + "▁grid", + -11.544268608093262 + ], + [ + "road", + -11.544413566589355 + ], + [ + "▁pets", + -11.544429779052734 + ], + [ + "stud", + -11.54448127746582 + ], + [ + "OM", + -11.544569969177246 + ], + [ + "Die", + -11.544877052307129 + ], + [ + "▁800", + -11.54496955871582 + ], + [ + "▁arrangement", + -11.545088768005371 + ], + [ + "▁Sri", + -11.545185089111328 + ], + [ + "▁Patrick", + -11.545187950134277 + ], + [ + "ava", + -11.545212745666504 + ], + [ + "▁pension", + -11.54523754119873 + ], + [ + "dung", + -11.545353889465332 + ], + [ + "▁Chapter", + -11.545475006103516 + ], + [ + "▁Property", + -11.545475006103516 + ], + [ + "▁structural", + -11.545571327209473 + ], + [ + "▁overview", + -11.545731544494629 + ], + [ + "2015", + -11.545917510986328 + ], + [ + "▁lawn", + -11.545924186706543 + ], + [ + "▁Vin", + -11.546219825744629 + ], + [ + "lik", + -11.546402931213379 + ], + [ + "dus", + -11.546418190002441 + ], + [ + "Several", + -11.54654598236084 + ], + [ + "▁Bou", + -11.546670913696289 + ], + [ + "▁copper", + -11.546703338623047 + ], + [ + "▁duration", + -11.546867370605469 + ], + [ + "inate", + -11.546982765197754 + ], + [ + "▁podcast", + -11.547204971313477 + ], + [ + "▁Self", + -11.547208786010742 + ], + [ + "▁Construction", + -11.547491073608398 + ], + [ + "achat", + -11.54768180847168 + ], + [ + "???", + -11.547683715820312 + ], + [ + "▁Electric", + -11.547974586486816 + ], + [ + "▁Mrs", + -11.54799747467041 + ], + [ + "▁CT", + -11.548019409179688 + ], + [ + "▁proceed", + -11.548324584960938 + ], + [ + "▁Course", + -11.548333168029785 + ], + [ + "▁Frei", + -11.548699378967285 + ], + [ + "▁heavily", + -11.548868179321289 + ], + [ + "rique", + -11.548872947692871 + ], + [ + "version", + -11.549016952514648 + ], + [ + "▁representatives", + -11.549118041992188 + ], + [ + "▁tourism", + -11.549182891845703 + ], + [ + "▁shirt", + -11.5494966506958 + ], + [ + "▁rough", + -11.549507141113281 + ], + [ + "▁weniger", + -11.549735069274902 + ], + [ + "▁keyboard", + -11.550058364868164 + ], + [ + "▁heritage", + -11.550149917602539 + ], + [ + "kat", + -11.550535202026367 + ], + [ + "assez", + -11.550567626953125 + ], + [ + "▁cabinets", + -11.550591468811035 + ], + [ + "▁Komm", + -11.550762176513672 + ], + [ + "▁impressed", + -11.55078411102295 + ], + [ + "▁Oregon", + -11.550788879394531 + ], + [ + "▁Davis", + -11.55081558227539 + ], + [ + "specialized", + -11.55097770690918 + ], + [ + "▁gross", + -11.550999641418457 + ], + [ + "Located", + -11.551044464111328 + ], + [ + "ttle", + -11.551044464111328 + ], + [ + "▁2010,", + -11.551224708557129 + ], + [ + "chan", + -11.551253318786621 + ], + [ + "mine", + -11.551305770874023 + ], + [ + "▁aduce", + -11.551637649536133 + ], + [ + "▁subsequent", + -11.551729202270508 + ], + [ + "▁demo", + -11.551851272583008 + ], + [ + "aba", + -11.552209854125977 + ], + [ + "▁shock", + -11.552389144897461 + ], + [ + "▁theater", + -11.552854537963867 + ], + [ + "▁engineers", + -11.55294418334961 + ], + [ + "▁feu", + -11.553037643432617 + ], + [ + "▁Rot", + -11.553058624267578 + ], + [ + "▁addressed", + -11.553155899047852 + ], + [ + "▁Letter", + -11.553431510925293 + ], + [ + "gré", + -11.553448677062988 + ], + [ + "▁quantity", + -11.553449630737305 + ], + [ + "▁Seit", + -11.553640365600586 + ], + [ + "▁bacteria", + -11.553681373596191 + ], + [ + "kg", + -11.55408000946045 + ], + [ + "▁conservation", + -11.554191589355469 + ], + [ + "▁entreprises", + -11.55420207977295 + ], + [ + "▁pleasant", + -11.554207801818848 + ], + [ + "armed", + -11.554228782653809 + ], + [ + "dorf", + -11.554286003112793 + ], + [ + "fact", + -11.554320335388184 + ], + [ + "▁Much", + -11.554388046264648 + ], + [ + "▁laugh", + -11.55482006072998 + ], + [ + "▁blade", + -11.554835319519043 + ], + [ + "amine", + -11.554838180541992 + ], + [ + "▁insert", + -11.55493450164795 + ], + [ + "▁toys", + -11.555326461791992 + ], + [ + "▁в", + -11.555726051330566 + ], + [ + "cell", + -11.555747985839844 + ], + [ + "▁strengthen", + -11.555864334106445 + ], + [ + "GR", + -11.555882453918457 + ], + [ + "▁autor", + -11.556114196777344 + ], + [ + "▁LI", + -11.556147575378418 + ], + [ + "▁oamenii", + -11.556184768676758 + ], + [ + "▁Modell", + -11.556222915649414 + ], + [ + "▁sophisticated", + -11.556225776672363 + ], + [ + "▁Write", + -11.556283950805664 + ], + [ + "eți", + -11.556295394897461 + ], + [ + "say", + -11.556641578674316 + ], + [ + "▁nutzen", + -11.556783676147461 + ], + [ + "▁amenities", + -11.556979179382324 + ], + [ + "chel", + -11.557068824768066 + ], + [ + "Unlike", + -11.55720043182373 + ], + [ + "▁Bilder", + -11.557208061218262 + ], + [ + "fertig", + -11.55722713470459 + ], + [ + "PER", + -11.557244300842285 + ], + [ + "▁apparently", + -11.557282447814941 + ], + [ + "▁pointed", + -11.557332992553711 + ], + [ + "lop", + -11.557435989379883 + ], + [ + "▁commande", + -11.557848930358887 + ], + [ + "▁NEW", + -11.557923316955566 + ], + [ + "▁primi", + -11.55798625946045 + ], + [ + "▁aluminum", + -11.558046340942383 + ], + [ + "ificare", + -11.558063507080078 + ], + [ + "open", + -11.55815315246582 + ], + [ + "▁establishment", + -11.558305740356445 + ], + [ + "▁blanc", + -11.558349609375 + ], + [ + "▁1960", + -11.558454513549805 + ], + [ + "▁parameters", + -11.55856990814209 + ], + [ + "schluss", + -11.558685302734375 + ], + [ + "▁jet", + -11.55879020690918 + ], + [ + "gam", + -11.55902099609375 + ], + [ + "▁oral", + -11.559290885925293 + ], + [ + "▁tons", + -11.559348106384277 + ], + [ + "▁AL", + -11.55935001373291 + ], + [ + "▁intention", + -11.55947494506836 + ], + [ + "ives", + -11.55974292755127 + ], + [ + "▁BMW", + -11.559837341308594 + ], + [ + "gun", + -11.559967041015625 + ], + [ + "leben", + -11.560046195983887 + ], + [ + "▁Fresh", + -11.56010913848877 + ], + [ + "▁tuturor", + -11.560193061828613 + ], + [ + "▁marine", + -11.560208320617676 + ], + [ + "mile", + -11.560260772705078 + ], + [ + "▁alta", + -11.560271263122559 + ], + [ + "nnen", + -11.56050968170166 + ], + [ + "▁courts", + -11.560530662536621 + ], + [ + "▁Hello", + -11.560791015625 + ], + [ + "BL", + -11.560895919799805 + ], + [ + "▁reply", + -11.560962677001953 + ], + [ + "environnement", + -11.560975074768066 + ], + [ + "American", + -11.560995101928711 + ], + [ + "▁Tell", + -11.561040878295898 + ], + [ + "▁chic", + -11.56148624420166 + ], + [ + "bir", + -11.561542510986328 + ], + [ + "▁singing", + -11.561788558959961 + ], + [ + "▁earnings", + -11.561819076538086 + ], + [ + "▁ensemble", + -11.562082290649414 + ], + [ + "▁($", + -11.562169075012207 + ], + [ + "▁Tout", + -11.562192916870117 + ], + [ + "▁Abs", + -11.562264442443848 + ], + [ + "▁describes", + -11.562322616577148 + ], + [ + "▁navigation", + -11.5625 + ], + [ + "▁destul", + -11.562532424926758 + ], + [ + "legate", + -11.562586784362793 + ], + [ + "tral", + -11.562599182128906 + ], + [ + "aţie", + -11.562753677368164 + ], + [ + "▁supplied", + -11.562775611877441 + ], + [ + "▁paar", + -11.562911987304688 + ], + [ + "ionat", + -11.563241958618164 + ], + [ + "9.", + -11.563263893127441 + ], + [ + "▁41", + -11.563348770141602 + ], + [ + "▁Track", + -11.563451766967773 + ], + [ + "▁happiness", + -11.563636779785156 + ], + [ + "▁Personen", + -11.563680648803711 + ], + [ + "▁sac", + -11.56373119354248 + ], + [ + "▁shapes", + -11.563774108886719 + ], + [ + "eld", + -11.56393051147461 + ], + [ + "bett", + -11.563963890075684 + ], + [ + "tile", + -11.56400203704834 + ], + [ + "▁divided", + -11.564035415649414 + ], + [ + "▁13.", + -11.56403923034668 + ], + [ + "market", + -11.564109802246094 + ], + [ + "crafted", + -11.564115524291992 + ], + [ + "▁periods", + -11.564120292663574 + ], + [ + "uş", + -11.564568519592285 + ], + [ + "▁trainer", + -11.56460952758789 + ], + [ + "▁Licht", + -11.564871788024902 + ], + [ + "▁advisor", + -11.564948081970215 + ], + [ + "▁Herr", + -11.564980506896973 + ], + [ + "▁Halloween", + -11.565147399902344 + ], + [ + "alter", + -11.565154075622559 + ], + [ + "▁radical", + -11.565155029296875 + ], + [ + "▁nose", + -11.56527042388916 + ], + [ + "▁Sat", + -11.565323829650879 + ], + [ + "▁Mom", + -11.565372467041016 + ], + [ + "moni", + -11.565377235412598 + ], + [ + "▁semn", + -11.565397262573242 + ], + [ + "vé", + -11.565672874450684 + ], + [ + "identifie", + -11.56570053100586 + ], + [ + "▁hatten", + -11.565957069396973 + ], + [ + "completing", + -11.565959930419922 + ], + [ + "▁gust", + -11.565963745117188 + ], + [ + "▁creat", + -11.56601333618164 + ], + [ + "ché", + -11.566075325012207 + ], + [ + "pay", + -11.566216468811035 + ], + [ + "▁Money", + -11.566229820251465 + ], + [ + "IG", + -11.566243171691895 + ], + [ + "▁Cash", + -11.566327095031738 + ], + [ + "altă", + -11.566420555114746 + ], + [ + "▁bekommen", + -11.566620826721191 + ], + [ + "▁43", + -11.56662654876709 + ], + [ + "▁supplement", + -11.566637992858887 + ], + [ + "▁Early", + -11.566754341125488 + ], + [ + "▁mattress", + -11.56692123413086 + ], + [ + "▁worn", + -11.567182540893555 + ], + [ + "rov", + -11.567197799682617 + ], + [ + "▁pray", + -11.56733226776123 + ], + [ + "▁beans", + -11.567673683166504 + ], + [ + "▁passé", + -11.567782402038574 + ], + [ + "▁facilit", + -11.56782054901123 + ], + [ + "▁meters", + -11.56784439086914 + ], + [ + "cke", + -11.568163871765137 + ], + [ + "▁Villa", + -11.568199157714844 + ], + [ + "▁Diego", + -11.568217277526855 + ], + [ + "▁chips", + -11.568244934082031 + ], + [ + "▁mes", + -11.568349838256836 + ], + [ + "▁Seattle", + -11.568421363830566 + ], + [ + "BU", + -11.568621635437012 + ], + [ + "▁nevoi", + -11.568714141845703 + ], + [ + "▁lets", + -11.568737030029297 + ], + [ + "▁hopefully", + -11.56894302368164 + ], + [ + "▁AG", + -11.568954467773438 + ], + [ + "liable", + -11.568999290466309 + ], + [ + "pound", + -11.569067001342773 + ], + [ + "près", + -11.569085121154785 + ], + [ + "arul", + -11.56920337677002 + ], + [ + "isiert", + -11.569281578063965 + ], + [ + "▁Expert", + -11.569297790527344 + ], + [ + "▁particulier", + -11.569367408752441 + ], + [ + "stoff", + -11.569952964782715 + ], + [ + "▁interpretation", + -11.56999397277832 + ], + [ + "După", + -11.57007884979248 + ], + [ + "sait", + -11.57011604309082 + ], + [ + "▁nouvelles", + -11.570173263549805 + ], + [ + "▁Ok", + -11.570175170898438 + ], + [ + "tap", + -11.570301055908203 + ], + [ + "▁targets", + -11.570327758789062 + ], + [ + "rung", + -11.57052230834961 + ], + [ + "▁stare", + -11.570576667785645 + ], + [ + "▁efficiently", + -11.570908546447754 + ], + [ + "EV", + -11.571003913879395 + ], + [ + "évit", + -11.571310997009277 + ], + [ + "▁Moldova", + -11.571542739868164 + ], + [ + "▁Face", + -11.571663856506348 + ], + [ + "▁flo", + -11.57168960571289 + ], + [ + "▁acestora", + -11.5717134475708 + ], + [ + "▁Victor", + -11.57183837890625 + ], + [ + "▁breed", + -11.57198429107666 + ], + [ + "morph", + -11.572230339050293 + ], + [ + "sley", + -11.572274208068848 + ], + [ + "mot", + -11.57234001159668 + ], + [ + "▁URL", + -11.572395324707031 + ], + [ + "ellen", + -11.572502136230469 + ], + [ + "▁resist", + -11.572781562805176 + ], + [ + "zon", + -11.57282829284668 + ], + [ + "ndel", + -11.572967529296875 + ], + [ + "will", + -11.572989463806152 + ], + [ + "▁alege", + -11.573076248168945 + ], + [ + "▁Easter", + -11.573114395141602 + ], + [ + "▁Bat", + -11.573190689086914 + ], + [ + "▁Höhe", + -11.573223114013672 + ], + [ + "▁fascinating", + -11.573387145996094 + ], + [ + "▁Know", + -11.5735445022583 + ], + [ + "illon", + -11.573602676391602 + ], + [ + "flex", + -11.57363224029541 + ], + [ + "who", + -11.573701858520508 + ], + [ + "▁Always", + -11.573729515075684 + ], + [ + "▁Bush", + -11.573777198791504 + ], + [ + "ICE", + -11.574009895324707 + ], + [ + "verein", + -11.57448673248291 + ], + [ + "▁später", + -11.57448959350586 + ], + [ + "▁cherch", + -11.574575424194336 + ], + [ + "makers", + -11.574753761291504 + ], + [ + "versus", + -11.574790954589844 + ], + [ + "▁Clear", + -11.574846267700195 + ], + [ + "▁Pennsylvania", + -11.574912071228027 + ], + [ + "Dieser", + -11.575041770935059 + ], + [ + "▁picking", + -11.575072288513184 + ], + [ + "▁restoration", + -11.57513427734375 + ], + [ + "▁interviews", + -11.575201988220215 + ], + [ + "pressed", + -11.575210571289062 + ], + [ + "nnerhalb", + -11.575674057006836 + ], + [ + "▁connecting", + -11.575834274291992 + ], + [ + "jou", + -11.575943946838379 + ], + [ + "▁react", + -11.576189041137695 + ], + [ + "▁Merci", + -11.576223373413086 + ], + [ + "▁Phone", + -11.576356887817383 + ], + [ + "▁1)", + -11.57652473449707 + ], + [ + "▁victims", + -11.576618194580078 + ], + [ + "▁Spo", + -11.576685905456543 + ], + [ + "atului", + -11.576735496520996 + ], + [ + "▁Harry", + -11.576837539672852 + ], + [ + "▁Sala", + -11.576875686645508 + ], + [ + "Pol", + -11.577075958251953 + ], + [ + "▁Clo", + -11.577167510986328 + ], + [ + "▁Erfolg", + -11.577211380004883 + ], + [ + "autour", + -11.577308654785156 + ], + [ + "▁Template", + -11.577314376831055 + ], + [ + "▁invention", + -11.57754898071289 + ], + [ + "▁schwer", + -11.57761287689209 + ], + [ + "vac", + -11.577625274658203 + ], + [ + "▁Trail", + -11.577627182006836 + ], + [ + "▁Vietnam", + -11.577638626098633 + ], + [ + "▁Size", + -11.577689170837402 + ], + [ + "▁Bern", + -11.577783584594727 + ], + [ + "▁emp", + -11.577845573425293 + ], + [ + "▁shake", + -11.57787799835205 + ], + [ + "▁Ave", + -11.57794189453125 + ], + [ + "▁productive", + -11.578009605407715 + ], + [ + "▁apple", + -11.578015327453613 + ], + [ + "▁portal", + -11.578052520751953 + ], + [ + "▁ceramic", + -11.578082084655762 + ], + [ + "▁pad", + -11.578110694885254 + ], + [ + "▁Syn", + -11.578316688537598 + ], + [ + "Ab", + -11.57845401763916 + ], + [ + "▁syn", + -11.578761100769043 + ], + [ + "find", + -11.578888893127441 + ], + [ + "▁settle", + -11.578909873962402 + ], + [ + "▁général", + -11.578965187072754 + ], + [ + "▁okay", + -11.579032897949219 + ], + [ + "▁receipt", + -11.57906436920166 + ], + [ + "orii", + -11.579117774963379 + ], + [ + "▁Mission", + -11.579122543334961 + ], + [ + "entrée", + -11.579304695129395 + ], + [ + "▁besteht", + -11.579394340515137 + ], + [ + "▁wisdom", + -11.57950210571289 + ], + [ + "▁heraus", + -11.579645156860352 + ], + [ + "▁balanced", + -11.579753875732422 + ], + [ + "▁habits", + -11.579773902893066 + ], + [ + "tang", + -11.579888343811035 + ], + [ + "ură", + -11.580151557922363 + ], + [ + "▁winners", + -11.580182075500488 + ], + [ + "ç", + -11.580215454101562 + ], + [ + "▁folosi", + -11.580242156982422 + ], + [ + "aliment", + -11.5802583694458 + ], + [ + "▁fiction", + -11.580373764038086 + ], + [ + "▁Spe", + -11.580534934997559 + ], + [ + "▁elsewhere", + -11.580663681030273 + ], + [ + "▁dependent", + -11.580808639526367 + ], + [ + "▁Anne", + -11.581167221069336 + ], + [ + "▁excellence", + -11.581695556640625 + ], + [ + "▁Feel", + -11.581753730773926 + ], + [ + "lieb", + -11.581811904907227 + ], + [ + "▁sectors", + -11.581865310668945 + ], + [ + "▁expir", + -11.581886291503906 + ], + [ + "▁surfaces", + -11.58191204071045 + ], + [ + "▁minim", + -11.581937789916992 + ], + [ + "▁tumor", + -11.58204460144043 + ], + [ + "▁paragraph", + -11.582289695739746 + ], + [ + "▁disk", + -11.58232307434082 + ], + [ + "▁tonight", + -11.582379341125488 + ], + [ + "▁precious", + -11.582794189453125 + ], + [ + "▁console", + -11.58288288116455 + ], + [ + "Th", + -11.582939147949219 + ], + [ + "neu", + -11.583020210266113 + ], + [ + "effective", + -11.5839262008667 + ], + [ + "▁Republican", + -11.583944320678711 + ], + [ + "format", + -11.584297180175781 + ], + [ + "▁preserve", + -11.58436107635498 + ], + [ + "▁wiring", + -11.584599494934082 + ], + [ + "▁exercises", + -11.584757804870605 + ], + [ + "▁pregnancy", + -11.584774017333984 + ], + [ + "tries", + -11.58481502532959 + ], + [ + "▁jeunes", + -11.584883689880371 + ], + [ + "▁publishing", + -11.584932327270508 + ], + [ + "▁nehmen", + -11.584935188293457 + ], + [ + "▁capability", + -11.5849609375 + ], + [ + "▁prompt", + -11.584965705871582 + ], + [ + "▁Further", + -11.58497428894043 + ], + [ + "▁semaine", + -11.585173606872559 + ], + [ + "abo", + -11.585216522216797 + ], + [ + "▁evolution", + -11.585319519042969 + ], + [ + "▁Sud", + -11.585403442382812 + ], + [ + "▁frais", + -11.585525512695312 + ], + [ + "LT", + -11.585619926452637 + ], + [ + "▁stack", + -11.58581829071045 + ], + [ + "▁Inside", + -11.585854530334473 + ], + [ + "▁programmes", + -11.585997581481934 + ], + [ + "▁passes", + -11.586196899414062 + ], + [ + "mü", + -11.586474418640137 + ], + [ + "▁progressive", + -11.586518287658691 + ], + [ + "▁calculator", + -11.58658218383789 + ], + [ + "▁Core", + -11.586655616760254 + ], + [ + "BT", + -11.586956977844238 + ], + [ + "core", + -11.586996078491211 + ], + [ + "▁Moon", + -11.587004661560059 + ], + [ + "▁tender", + -11.587040901184082 + ], + [ + "durch", + -11.58721923828125 + ], + [ + "▁commune", + -11.587453842163086 + ], + [ + "▁Prince", + -11.587594032287598 + ], + [ + "▁demonstrated", + -11.587693214416504 + ], + [ + "▁conversations", + -11.587890625 + ], + [ + "▁fri", + -11.587984085083008 + ], + [ + "igh", + -11.587992668151855 + ], + [ + "being", + -11.588334083557129 + ], + [ + "pause", + -11.58853530883789 + ], + [ + "▁Bear", + -11.58871841430664 + ], + [ + "ayant", + -11.588875770568848 + ], + [ + "▁Industry", + -11.588967323303223 + ], + [ + "▁sponsor", + -11.589012145996094 + ], + [ + "▁numele", + -11.589098930358887 + ], + [ + "▁VA", + -11.589167594909668 + ], + [ + "▁Sommer", + -11.589366912841797 + ], + [ + "TB", + -11.589380264282227 + ], + [ + "▁optional", + -11.589505195617676 + ], + [ + "▁Landes", + -11.589812278747559 + ], + [ + "coli", + -11.589963912963867 + ], + [ + "empt", + -11.59018325805664 + ], + [ + "▁Iron", + -11.590620040893555 + ], + [ + "▁1992", + -11.59090518951416 + ], + [ + "▁attempts", + -11.59090518951416 + ], + [ + "halb", + -11.590960502624512 + ], + [ + "▁photographer", + -11.59097671508789 + ], + [ + "▁witness", + -11.59097957611084 + ], + [ + "bru", + -11.591073989868164 + ], + [ + "▁Ras", + -11.59107780456543 + ], + [ + "▁burden", + -11.591142654418945 + ], + [ + "▁kaufen", + -11.591256141662598 + ], + [ + "▁vu", + -11.591362953186035 + ], + [ + "▁Wedding", + -11.591601371765137 + ], + [ + "▁Kla", + -11.591604232788086 + ], + [ + "occasion", + -11.591915130615234 + ], + [ + "▁keys", + -11.592131614685059 + ], + [ + "▁oferi", + -11.592279434204102 + ], + [ + "▁puzzle", + -11.592302322387695 + ], + [ + "eaux", + -11.59254264831543 + ], + [ + "▁Eco", + -11.592805862426758 + ], + [ + "▁52", + -11.592817306518555 + ], + [ + "▁Elizabeth", + -11.59284496307373 + ], + [ + "▁dispose", + -11.593144416809082 + ], + [ + "▁cluster", + -11.59326171875 + ], + [ + "iki", + -11.593283653259277 + ], + [ + "▁Guys", + -11.593595504760742 + ], + [ + "▁Economic", + -11.593632698059082 + ], + [ + "▁apar", + -11.593677520751953 + ], + [ + "▁ziua", + -11.593688011169434 + ], + [ + "▁integral", + -11.593740463256836 + ], + [ + "▁tac", + -11.59376335144043 + ], + [ + "▁restrictions", + -11.593778610229492 + ], + [ + "▁nerve", + -11.593794822692871 + ], + [ + "▁Stop", + -11.59386157989502 + ], + [ + "burger", + -11.593897819519043 + ], + [ + "explo", + -11.593944549560547 + ], + [ + "lö", + -11.593958854675293 + ], + [ + "NP", + -11.594077110290527 + ], + [ + "▁Brook", + -11.59418773651123 + ], + [ + "▁Close", + -11.594278335571289 + ], + [ + "▁representing", + -11.59446907043457 + ], + [ + "▁certaine", + -11.594767570495605 + ], + [ + "▁discovery", + -11.594836235046387 + ], + [ + "▁rece", + -11.594964981079102 + ], + [ + "FF", + -11.594970703125 + ], + [ + "▁salary", + -11.595069885253906 + ], + [ + "▁Wolf", + -11.595137596130371 + ], + [ + "▁deserve", + -11.595166206359863 + ], + [ + "ţele", + -11.595417976379395 + ], + [ + "gathered", + -11.595934867858887 + ], + [ + "▁comply", + -11.59599494934082 + ], + [ + "lagen", + -11.596034049987793 + ], + [ + "ătoare", + -11.596192359924316 + ], + [ + "▁relate", + -11.596410751342773 + ], + [ + "▁Roger", + -11.59656810760498 + ], + [ + "▁blame", + -11.596575736999512 + ], + [ + "▁Jen", + -11.596914291381836 + ], + [ + "▁army", + -11.596936225891113 + ], + [ + "▁$10", + -11.597129821777344 + ], + [ + "▁Cabinet", + -11.597185134887695 + ], + [ + "Gu", + -11.597367286682129 + ], + [ + "▁wildlife", + -11.597452163696289 + ], + [ + "▁Memorial", + -11.597643852233887 + ], + [ + "▁Holiday", + -11.597742080688477 + ], + [ + "▁curat", + -11.598291397094727 + ], + [ + "iilor", + -11.598299026489258 + ], + [ + "▁fleet", + -11.598408699035645 + ], + [ + "▁reviewed", + -11.59843635559082 + ], + [ + "cet", + -11.598450660705566 + ], + [ + "▁virtually", + -11.598487854003906 + ], + [ + "▁Crusher", + -11.59852409362793 + ], + [ + "▁slide", + -11.59858226776123 + ], + [ + "▁générale", + -11.598604202270508 + ], + [ + "▁sensation", + -11.598630905151367 + ], + [ + "▁garlic", + -11.598638534545898 + ], + [ + "5)", + -11.598657608032227 + ], + [ + "▁batteries", + -11.598756790161133 + ], + [ + "SH", + -11.59876823425293 + ], + [ + "▁seller", + -11.59882926940918 + ], + [ + "design", + -11.598871231079102 + ], + [ + "5.", + -11.598944664001465 + ], + [ + "▁Overall", + -11.598969459533691 + ], + [ + "▁investigate", + -11.599058151245117 + ], + [ + "max", + -11.599064826965332 + ], + [ + "▁attach", + -11.599166870117188 + ], + [ + "▁Future", + -11.599209785461426 + ], + [ + "OUR", + -11.599284172058105 + ], + [ + "▁LE", + -11.59968090057373 + ], + [ + "▁bite", + -11.599811553955078 + ], + [ + "tige", + -11.599874496459961 + ], + [ + "▁twist", + -11.59987735748291 + ], + [ + "hole", + -11.600180625915527 + ], + [ + "▁Tony", + -11.600510597229004 + ], + [ + "LU", + -11.600598335266113 + ], + [ + "▁Organization", + -11.600617408752441 + ], + [ + "▁invit", + -11.600632667541504 + ], + [ + "▁Ant", + -11.600739479064941 + ], + [ + "NR", + -11.600788116455078 + ], + [ + "sorgt", + -11.600854873657227 + ], + [ + "▁Lan", + -11.600860595703125 + ], + [ + "▁Manchester", + -11.60091495513916 + ], + [ + "schrift", + -11.601066589355469 + ], + [ + "▁kg", + -11.601150512695312 + ], + [ + "▁aroma", + -11.60132884979248 + ], + [ + "▁Source", + -11.601388931274414 + ], + [ + "▁permite", + -11.601445198059082 + ], + [ + "▁Consider", + -11.601457595825195 + ], + [ + "▁Artist", + -11.601627349853516 + ], + [ + "▁transmit", + -11.601783752441406 + ], + [ + "oasa", + -11.601834297180176 + ], + [ + "▁Zen", + -11.60198974609375 + ], + [ + "ANT", + -11.602235794067383 + ], + [ + "▁consulting", + -11.602404594421387 + ], + [ + "▁commence", + -11.6025390625 + ], + [ + "▁quilt", + -11.60261058807373 + ], + [ + "owned", + -11.602642059326172 + ], + [ + "▁bro", + -11.602689743041992 + ], + [ + "▁integrate", + -11.602715492248535 + ], + [ + "▁Ontario", + -11.602775573730469 + ], + [ + "TF", + -11.602832794189453 + ], + [ + "▁Study", + -11.602887153625488 + ], + [ + "▁ensuite", + -11.603155136108398 + ], + [ + "itatii", + -11.603180885314941 + ], + [ + "Mon", + -11.603235244750977 + ], + [ + "-11", + -11.603299140930176 + ], + [ + "what", + -11.603384017944336 + ], + [ + "▁Things", + -11.60361385345459 + ], + [ + "▁Eye", + -11.603819847106934 + ], + [ + "▁présente", + -11.603828430175781 + ], + [ + "tention", + -11.603915214538574 + ], + [ + "|", + -11.603957176208496 + ], + [ + "stall", + -11.603963851928711 + ], + [ + "▁beef", + -11.603992462158203 + ], + [ + "figur", + -11.604005813598633 + ], + [ + "▁cancel", + -11.604146003723145 + ], + [ + "▁domeniul", + -11.604252815246582 + ], + [ + "▁360", + -11.604290008544922 + ], + [ + "▁sleeping", + -11.6045560836792 + ], + [ + "▁traitement", + -11.604580879211426 + ], + [ + "ühl", + -11.604769706726074 + ], + [ + "▁Environmental", + -11.604835510253906 + ], + [ + "cier", + -11.604894638061523 + ], + [ + "▁NC", + -11.604907035827637 + ], + [ + "pub", + -11.604925155639648 + ], + [ + "▁addiction", + -11.605071067810059 + ], + [ + "▁nest", + -11.605128288269043 + ], + [ + "▁ON", + -11.605395317077637 + ], + [ + "▁discrimin", + -11.605396270751953 + ], + [ + "▁proved", + -11.605517387390137 + ], + [ + "▁occasions", + -11.605864524841309 + ], + [ + "OH", + -11.606184959411621 + ], + [ + "▁lawyers", + -11.606203079223633 + ], + [ + "own", + -11.606290817260742 + ], + [ + "▁Meeting", + -11.606596946716309 + ], + [ + "▁Industrial", + -11.606704711914062 + ], + [ + "owed", + -11.606736183166504 + ], + [ + "▁Cel", + -11.606793403625488 + ], + [ + "legt", + -11.60706615447998 + ], + [ + "ily", + -11.607085227966309 + ], + [ + "▁wins", + -11.607155799865723 + ], + [ + "▁strap", + -11.607367515563965 + ], + [ + "digit", + -11.607441902160645 + ], + [ + "▁hinaus", + -11.607504844665527 + ], + [ + "mple", + -11.607712745666504 + ], + [ + "▁(5", + -11.607797622680664 + ], + [ + "▁pdf", + -11.607894897460938 + ], + [ + "▁eco", + -11.607915878295898 + ], + [ + "▁junior", + -11.608172416687012 + ], + [ + "DB", + -11.608556747436523 + ], + [ + "gelegt", + -11.608636856079102 + ], + [ + "ION", + -11.608678817749023 + ], + [ + "▁competitors", + -11.60880184173584 + ], + [ + "▁Arab", + -11.60898208618164 + ], + [ + "▁Secret", + -11.609148979187012 + ], + [ + "▁Kunst", + -11.609283447265625 + ], + [ + "▁worried", + -11.609297752380371 + ], + [ + "meiner", + -11.609378814697266 + ], + [ + "▁Magic", + -11.609450340270996 + ], + [ + "▁groß", + -11.609537124633789 + ], + [ + "▁travaux", + -11.609748840332031 + ], + [ + "▁sollen", + -11.609772682189941 + ], + [ + "▁Sciences", + -11.609850883483887 + ], + [ + "▁athletes", + -11.610055923461914 + ], + [ + "▁discounts", + -11.610079765319824 + ], + [ + "kit", + -11.610211372375488 + ], + [ + "lind", + -11.610305786132812 + ], + [ + "▁enjoyable", + -11.610421180725098 + ], + [ + "ground", + -11.610489845275879 + ], + [ + "▁Tat", + -11.610529899597168 + ], + [ + "▁passengers", + -11.610576629638672 + ], + [ + "▁Dami", + -11.610677719116211 + ], + [ + "▁Major", + -11.61070728302002 + ], + [ + "watch", + -11.610796928405762 + ], + [ + "working", + -11.610908508300781 + ], + [ + "arrêt", + -11.610923767089844 + ], + [ + "▁subtle", + -11.611069679260254 + ], + [ + "▁epi", + -11.611197471618652 + ], + [ + "▁Jahres", + -11.61128044128418 + ], + [ + "▁cooling", + -11.61141586303711 + ], + [ + "▁makeup", + -11.611427307128906 + ], + [ + "jet", + -11.611495018005371 + ], + [ + "▁Given", + -11.611519813537598 + ], + [ + "plex", + -11.61158275604248 + ], + [ + "▁exploit", + -11.611590385437012 + ], + [ + "rine", + -11.611604690551758 + ], + [ + "▁delivers", + -11.612122535705566 + ], + [ + "▁summary", + -11.612236022949219 + ], + [ + "▁beaches", + -11.612459182739258 + ], + [ + "lift", + -11.612550735473633 + ], + [ + "▁Suite", + -11.612554550170898 + ], + [ + "▁Assistant", + -11.612688064575195 + ], + [ + "▁taxi", + -11.61273193359375 + ], + [ + "▁peaceful", + -11.612805366516113 + ], + [ + "▁Mode", + -11.612980842590332 + ], + [ + "▁Fun", + -11.613059043884277 + ], + [ + "▁diameter", + -11.613142967224121 + ], + [ + "▁phrase", + -11.613150596618652 + ], + [ + "ACT", + -11.613265037536621 + ], + [ + "▁différentes", + -11.613322257995605 + ], + [ + "▁14.", + -11.613417625427246 + ], + [ + "▁CE", + -11.61352825164795 + ], + [ + "▁2)", + -11.613739013671875 + ], + [ + "▁Nat", + -11.613785743713379 + ], + [ + "▁delete", + -11.61388111114502 + ], + [ + "other", + -11.613930702209473 + ], + [ + "hang", + -11.613985061645508 + ], + [ + "▁sujet", + -11.614117622375488 + ], + [ + "▁precise", + -11.614212989807129 + ], + [ + "▁Total", + -11.614290237426758 + ], + [ + "▁chambre", + -11.614483833312988 + ], + [ + "sati", + -11.614666938781738 + ], + [ + "▁Metal", + -11.614995956420898 + ], + [ + "rust", + -11.615038871765137 + ], + [ + "▁Brazil", + -11.615508079528809 + ], + [ + "▁hybrid", + -11.615636825561523 + ], + [ + "ops", + -11.615691184997559 + ], + [ + "▁electro", + -11.615789413452148 + ], + [ + "utz", + -11.61608600616455 + ], + [ + "▁quoi", + -11.616246223449707 + ], + [ + "▁adoption", + -11.616331100463867 + ], + [ + "3.5", + -11.616518020629883 + ], + [ + "50,000", + -11.616599082946777 + ], + [ + "veti", + -11.616630554199219 + ], + [ + "hir", + -11.616957664489746 + ], + [ + "▁adequate", + -11.617067337036133 + ], + [ + "ologist", + -11.617109298706055 + ], + [ + "torii", + -11.617295265197754 + ], + [ + "wasser", + -11.617355346679688 + ], + [ + "▁Authority", + -11.617362976074219 + ], + [ + "▁donation", + -11.617364883422852 + ], + [ + "700", + -11.617375373840332 + ], + [ + "▁somehow", + -11.617375373840332 + ], + [ + "▁kostenlos", + -11.617425918579102 + ], + [ + "▁generations", + -11.617537498474121 + ], + [ + "▁Turkey", + -11.617711067199707 + ], + [ + "rata", + -11.617819786071777 + ], + [ + "▁animation", + -11.618206024169922 + ], + [ + "▁CH", + -11.618281364440918 + ], + [ + "ending", + -11.618317604064941 + ], + [ + "welt", + -11.618376731872559 + ], + [ + "bac", + -11.618380546569824 + ], + [ + "MG", + -11.618460655212402 + ], + [ + "▁parks", + -11.618468284606934 + ], + [ + "▁placing", + -11.618870735168457 + ], + [ + "sort", + -11.61915111541748 + ], + [ + "▁Bitcoin", + -11.619163513183594 + ], + [ + "▁disorder", + -11.619282722473145 + ], + [ + "MAN", + -11.619302749633789 + ], + [ + "aught", + -11.619412422180176 + ], + [ + "▁guides", + -11.61956787109375 + ], + [ + "▁circul", + -11.619651794433594 + ], + [ + "▁Steven", + -11.619954109191895 + ], + [ + "rrière", + -11.619976997375488 + ], + [ + "▁Arch", + -11.61999225616455 + ], + [ + "▁plates", + -11.620091438293457 + ], + [ + "MR", + -11.620118141174316 + ], + [ + "▁cow", + -11.620142936706543 + ], + [ + "▁integrity", + -11.620210647583008 + ], + [ + "▁(18", + -11.620217323303223 + ], + [ + "▁totul", + -11.62024211883545 + ], + [ + "jack", + -11.620373725891113 + ], + [ + "▁privire", + -11.620588302612305 + ], + [ + "▁terme", + -11.620752334594727 + ], + [ + "▁execution", + -11.620781898498535 + ], + [ + "▁organism", + -11.620838165283203 + ], + [ + "▁führen", + -11.620853424072266 + ], + [ + "▁patron", + -11.620940208435059 + ], + [ + "▁appreciated", + -11.62096881866455 + ], + [ + "liant", + -11.62100601196289 + ], + [ + "▁Solar", + -11.621055603027344 + ], + [ + "▁vinyl", + -11.621134757995605 + ], + [ + "▁treasure", + -11.621137619018555 + ], + [ + "▁retro", + -11.621167182922363 + ], + [ + "▁bout", + -11.621174812316895 + ], + [ + "lab", + -11.621183395385742 + ], + [ + "▁dimension", + -11.621394157409668 + ], + [ + "called", + -11.62146282196045 + ], + [ + "▁intern", + -11.621479034423828 + ], + [ + "issement", + -11.62173843383789 + ], + [ + "▁Erst", + -11.621837615966797 + ], + [ + "▁stellen", + -11.621920585632324 + ], + [ + "▁familia", + -11.622069358825684 + ], + [ + "▁notion", + -11.622176170349121 + ], + [ + "▁Could", + -11.622322082519531 + ], + [ + "Getting", + -11.622323036193848 + ], + [ + "▁drives", + -11.622397422790527 + ], + [ + "▁Israeli", + -11.622520446777344 + ], + [ + "▁nations", + -11.622546195983887 + ], + [ + "▁duties", + -11.622700691223145 + ], + [ + "▁personalized", + -11.622788429260254 + ], + [ + "▁weren", + -11.62282657623291 + ], + [ + "▁chemicals", + -11.622847557067871 + ], + [ + "▁killing", + -11.622913360595703 + ], + [ + "▁masa", + -11.622994422912598 + ], + [ + "▁parce", + -11.623026847839355 + ], + [ + "▁lady", + -11.623178482055664 + ], + [ + "ides", + -11.623221397399902 + ], + [ + "▁execut", + -11.62340259552002 + ], + [ + "▁floral", + -11.62341594696045 + ], + [ + "▁Child", + -11.623428344726562 + ], + [ + "▁medal", + -11.623503684997559 + ], + [ + "▁casa", + -11.623603820800781 + ], + [ + "▁enabled", + -11.623650550842285 + ], + [ + "12.", + -11.624239921569824 + ], + [ + "nger", + -11.624266624450684 + ], + [ + "▁vent", + -11.624297142028809 + ], + [ + "▁urmă", + -11.624727249145508 + ], + [ + "▁Herz", + -11.624835968017578 + ], + [ + "▁Jay", + -11.624916076660156 + ], + [ + ".....", + -11.624942779541016 + ], + [ + "▁Kris", + -11.62499713897705 + ], + [ + "kenn", + -11.625001907348633 + ], + [ + "ress", + -11.625027656555176 + ], + [ + "weight", + -11.62519359588623 + ], + [ + "▁indicates", + -11.625198364257812 + ], + [ + "▁mentor", + -11.625328063964844 + ], + [ + "using", + -11.625386238098145 + ], + [ + "▁femmes", + -11.625460624694824 + ], + [ + "▁Jung", + -11.625528335571289 + ], + [ + "▁Send", + -11.625574111938477 + ], + [ + "▁seasons", + -11.625906944274902 + ], + [ + "▁aesthetic", + -11.625964164733887 + ], + [ + "▁Block", + -11.626086235046387 + ], + [ + "▁babies", + -11.626150131225586 + ], + [ + "zig", + -11.626242637634277 + ], + [ + "edge", + -11.626428604125977 + ], + [ + "▁alike", + -11.626458168029785 + ], + [ + "▁immune", + -11.626609802246094 + ], + [ + "▁magical", + -11.626710891723633 + ], + [ + "▁Snow", + -11.626748085021973 + ], + [ + "▁spacious", + -11.627058982849121 + ], + [ + "▁Melbourne", + -11.62706184387207 + ], + [ + "order", + -11.627081871032715 + ], + [ + "▁timing", + -11.627176284790039 + ], + [ + "▁inainte", + -11.627220153808594 + ], + [ + "▁width", + -11.627327919006348 + ], + [ + "bild", + -11.627386093139648 + ], + [ + "Tra", + -11.627429008483887 + ], + [ + "▁appliances", + -11.627449989318848 + ], + [ + "▁dirt", + -11.627498626708984 + ], + [ + "▁Rent", + -11.627689361572266 + ], + [ + "responsibilities", + -11.627747535705566 + ], + [ + "▁blogs", + -11.62778377532959 + ], + [ + "nächsten", + -11.627799034118652 + ], + [ + "▁argue", + -11.627928733825684 + ], + [ + "▁Resume", + -11.627985954284668 + ], + [ + "▁Michel", + -11.628044128417969 + ], + [ + "▁terrible", + -11.628092765808105 + ], + [ + "graph", + -11.628151893615723 + ], + [ + "bird", + -11.628202438354492 + ], + [ + "▁Simple", + -11.628457069396973 + ], + [ + "nning", + -11.628658294677734 + ], + [ + "▁coconut", + -11.628683090209961 + ], + [ + "▁comprise", + -11.628787994384766 + ], + [ + "heure", + -11.628918647766113 + ], + [ + "▁nichts", + -11.628921508789062 + ], + [ + "▁manufacture", + -11.628966331481934 + ], + [ + "▁Sar", + -11.629011154174805 + ], + [ + "green", + -11.629014015197754 + ], + [ + "lining", + -11.62910270690918 + ], + [ + "▁tremendous", + -11.629128456115723 + ], + [ + "▁Wine", + -11.629164695739746 + ], + [ + "gir", + -11.629290580749512 + ], + [ + "▁Nothing", + -11.629562377929688 + ], + [ + "▁Miller", + -11.62957763671875 + ], + [ + "▁Schwe", + -11.629712104797363 + ], + [ + "zone", + -11.629942893981934 + ], + [ + "▁cunoscut", + -11.629964828491211 + ], + [ + "rupt", + -11.630166053771973 + ], + [ + "kle", + -11.630187034606934 + ], + [ + "▁Bucuresti", + -11.630510330200195 + ], + [ + "▁Abend", + -11.630574226379395 + ], + [ + "▁aura", + -11.630583763122559 + ], + [ + "▁Dance", + -11.63073444366455 + ], + [ + "▁Wilson", + -11.63086986541748 + ], + [ + "icide", + -11.630901336669922 + ], + [ + "bai", + -11.630910873413086 + ], + [ + "oriented", + -11.63103199005127 + ], + [ + "▁celebrated", + -11.631421089172363 + ], + [ + "schlag", + -11.631531715393066 + ], + [ + "▁10-", + -11.631600379943848 + ], + [ + "Unsere", + -11.63167667388916 + ], + [ + "énergie", + -11.632009506225586 + ], + [ + "▁qualify", + -11.63205623626709 + ], + [ + "▁contenu", + -11.632177352905273 + ], + [ + "▁Lauf", + -11.63220500946045 + ], + [ + "▁einzelne", + -11.632360458374023 + ], + [ + "▁Youth", + -11.632415771484375 + ], + [ + "explains", + -11.632601737976074 + ], + [ + "grat", + -11.632782936096191 + ], + [ + "▁72", + -11.632804870605469 + ], + [ + "labor", + -11.632885932922363 + ], + [ + "2018", + -11.632940292358398 + ], + [ + "▁Dank", + -11.633149147033691 + ], + [ + "▁Hey", + -11.633523941040039 + ], + [ + "▁refuse", + -11.633536338806152 + ], + [ + "▁graduated", + -11.633599281311035 + ], + [ + "▁României", + -11.633627891540527 + ], + [ + "punkt", + -11.633807182312012 + ], + [ + "▁regulation", + -11.633834838867188 + ], + [ + "Bru", + -11.633842468261719 + ], + [ + "▁Side", + -11.633891105651855 + ], + [ + "▁sol", + -11.633970260620117 + ], + [ + "▁extraordinary", + -11.634182929992676 + ], + [ + "▁ging", + -11.634247779846191 + ], + [ + "▁Creative", + -11.634299278259277 + ], + [ + "▁expanding", + -11.634349822998047 + ], + [ + "▁problème", + -11.63444995880127 + ], + [ + "▁Reserve", + -11.63459300994873 + ], + [ + "auteur", + -11.634642601013184 + ], + [ + "sphere", + -11.634657859802246 + ], + [ + "season", + -11.634716987609863 + ], + [ + "frei", + -11.634756088256836 + ], + [ + "▁8,", + -11.634765625 + ], + [ + "▁filing", + -11.634810447692871 + ], + [ + "▁Complete", + -11.635017395019531 + ], + [ + "▁revolution", + -11.635035514831543 + ], + [ + "▁unele", + -11.63520622253418 + ], + [ + "/8", + -11.635272979736328 + ], + [ + "istes", + -11.635310173034668 + ], + [ + "backed", + -11.635400772094727 + ], + [ + "shirt", + -11.635554313659668 + ], + [ + "▁Details", + -11.635673522949219 + ], + [ + "rod", + -11.635695457458496 + ], + [ + "▁pod", + -11.63582992553711 + ], + [ + "▁operators", + -11.635921478271484 + ], + [ + "was", + -11.635930061340332 + ], + [ + "hou", + -11.63594913482666 + ], + [ + "▁Coach", + -11.636075019836426 + ], + [ + "irii", + -11.636138916015625 + ], + [ + "▁ordinary", + -11.636186599731445 + ], + [ + "Institut", + -11.63620662689209 + ], + [ + "▁Flash", + -11.63633918762207 + ], + [ + "0-", + -11.636537551879883 + ], + [ + "▁flavour", + -11.6367769241333 + ], + [ + "specific", + -11.636906623840332 + ], + [ + "▁landing", + -11.636930465698242 + ], + [ + "▁geo", + -11.636935234069824 + ], + [ + "▁legend", + -11.636983871459961 + ], + [ + "vari", + -11.63703441619873 + ], + [ + "rop", + -11.637084007263184 + ], + [ + "▁Excel", + -11.6370849609375 + ], + [ + "▁Flu", + -11.637203216552734 + ], + [ + "▁intent", + -11.637582778930664 + ], + [ + "▁Deep", + -11.637594223022461 + ], + [ + "▁Kor", + -11.63763427734375 + ], + [ + "▁Philadelphia", + -11.637914657592773 + ], + [ + "▁rând", + -11.63800048828125 + ], + [ + "▁USD", + -11.638033866882324 + ], + [ + "laden", + -11.63803482055664 + ], + [ + "▁Hin", + -11.638047218322754 + ], + [ + "hap", + -11.638197898864746 + ], + [ + "▁thorough", + -11.638227462768555 + ], + [ + "▁oferit", + -11.63826847076416 + ], + [ + "kind", + -11.63831615447998 + ], + [ + "▁Cancer", + -11.638428688049316 + ], + [ + "apo", + -11.638596534729004 + ], + [ + "▁valve", + -11.638650894165039 + ], + [ + "▁encouraging", + -11.63884449005127 + ], + [ + "▁sûr", + -11.638904571533203 + ], + [ + "shing", + -11.638981819152832 + ], + [ + "▁49", + -11.639132499694824 + ], + [ + "gov", + -11.639142990112305 + ], + [ + "▁Five", + -11.63933277130127 + ], + [ + "▁stroke", + -11.639344215393066 + ], + [ + "▁apă", + -11.639398574829102 + ], + [ + "▁gambling", + -11.639543533325195 + ], + [ + "▁nord", + -11.63963508605957 + ], + [ + "onal", + -11.639691352844238 + ], + [ + "▁captured", + -11.63979721069336 + ], + [ + "▁lucruri", + -11.640068054199219 + ], + [ + "serait", + -11.640192985534668 + ], + [ + "▁Members", + -11.640265464782715 + ], + [ + "ital", + -11.640275955200195 + ], + [ + "▁mounted", + -11.640475273132324 + ], + [ + "▁opens", + -11.640792846679688 + ], + [ + "▁Marie", + -11.640861511230469 + ], + [ + "Tech", + -11.640902519226074 + ], + [ + "▁wishes", + -11.641016006469727 + ], + [ + "▁regards", + -11.641073226928711 + ], + [ + "going", + -11.641156196594238 + ], + [ + "Opti", + -11.641250610351562 + ], + [ + "▁femei", + -11.641331672668457 + ], + [ + "▁Fish", + -11.64142894744873 + ], + [ + "▁mount", + -11.641800880432129 + ], + [ + "▁Hunt", + -11.641887664794922 + ], + [ + "▁probabil", + -11.64205265045166 + ], + [ + "▁assured", + -11.642191886901855 + ], + [ + "pho", + -11.642230033874512 + ], + [ + "▁manufactured", + -11.642313003540039 + ], + [ + "▁realistic", + -11.642437934875488 + ], + [ + "ații", + -11.642580032348633 + ], + [ + "▁Planning", + -11.642598152160645 + ], + [ + "▁român", + -11.642645835876465 + ], + [ + "ggy", + -11.642669677734375 + ], + [ + "▁produces", + -11.642696380615234 + ], + [ + "▁reminder", + -11.64284896850586 + ], + [ + "TION", + -11.642868041992188 + ], + [ + "▁brake", + -11.642909049987793 + ], + [ + "▁pla", + -11.643172264099121 + ], + [ + "▁Premium", + -11.643270492553711 + ], + [ + "▁carb", + -11.643310546875 + ], + [ + "▁shine", + -11.643390655517578 + ], + [ + "▁carrier", + -11.643492698669434 + ], + [ + "▁poverty", + -11.64350414276123 + ], + [ + "▁effectiveness", + -11.6436128616333 + ], + [ + "administr", + -11.643655776977539 + ], + [ + "▁Chamber", + -11.643658638000488 + ], + [ + "▁suntem", + -11.64376163482666 + ], + [ + "▁noastră", + -11.643855094909668 + ], + [ + "▁sofort", + -11.643877983093262 + ], + [ + "▁moisture", + -11.644058227539062 + ], + [ + "limb", + -11.6441011428833 + ], + [ + "entre", + -11.644328117370605 + ], + [ + "▁SD", + -11.644330978393555 + ], + [ + "▁BC", + -11.644539833068848 + ], + [ + "▁selecting", + -11.6445951461792 + ], + [ + "achieving", + -11.644673347473145 + ], + [ + "info", + -11.644735336303711 + ], + [ + "▁membres", + -11.644983291625977 + ], + [ + "▁shoe", + -11.645014762878418 + ], + [ + "▁locate", + -11.645065307617188 + ], + [ + "▁assignment", + -11.645085334777832 + ], + [ + "lern", + -11.645283699035645 + ], + [ + "▁defeat", + -11.645406723022461 + ], + [ + "▁endless", + -11.645458221435547 + ], + [ + "▁Stunden", + -11.645523071289062 + ], + [ + "то", + -11.645561218261719 + ], + [ + "▁mur", + -11.645586013793945 + ], + [ + "▁wissen", + -11.645844459533691 + ], + [ + "aime", + -11.645915031433105 + ], + [ + "1-2", + -11.646056175231934 + ], + [ + "▁femme", + -11.646212577819824 + ], + [ + "robe", + -11.646468162536621 + ], + [ + "▁embrace", + -11.64647102355957 + ], + [ + "▁baseball", + -11.646614074707031 + ], + [ + "▁hunting", + -11.64663314819336 + ], + [ + "betrieb", + -11.646790504455566 + ], + [ + "▁gardens", + -11.647045135498047 + ], + [ + "▁risc", + -11.647096633911133 + ], + [ + "▁Cri", + -11.647263526916504 + ], + [ + "best", + -11.647506713867188 + ], + [ + "▁Audio", + -11.647621154785156 + ], + [ + "▁intens", + -11.647659301757812 + ], + [ + "▁Round", + -11.647744178771973 + ], + [ + "▁fireplace", + -11.6478271484375 + ], + [ + "▁dozen", + -11.647912979125977 + ], + [ + "▁hospitals", + -11.64802360534668 + ], + [ + "▁profits", + -11.648076057434082 + ], + [ + "▁Mail", + -11.64811897277832 + ], + [ + "obtenir", + -11.648191452026367 + ], + [ + "▁Ross", + -11.648241996765137 + ], + [ + "bun", + -11.648573875427246 + ], + [ + "polar", + -11.648688316345215 + ], + [ + "▁reflection", + -11.648873329162598 + ], + [ + "▁fut", + -11.648992538452148 + ], + [ + "phon", + -11.649017333984375 + ], + [ + "deck", + -11.649094581604004 + ], + [ + "renowned", + -11.649188041687012 + ], + [ + "▁cate", + -11.649308204650879 + ], + [ + "▁decorative", + -11.6494722366333 + ], + [ + "ieri", + -11.64957332611084 + ], + [ + "▁Tap", + -11.64958381652832 + ], + [ + "▁Dallas", + -11.649600982666016 + ], + [ + "rik", + -11.649665832519531 + ], + [ + "▁pied", + -11.649727821350098 + ], + [ + "rés", + -11.649821281433105 + ], + [ + "ppy", + -11.650137901306152 + ], + [ + "▁bitte", + -11.650188446044922 + ], + [ + "▁cave", + -11.650257110595703 + ], + [ + "▁rescue", + -11.650559425354004 + ], + [ + "▁Hilfe", + -11.650714874267578 + ], + [ + "▁Jason", + -11.650786399841309 + ], + [ + "▁Nations", + -11.650838851928711 + ], + [ + "▁profil", + -11.650938987731934 + ], + [ + "▁Atlantic", + -11.651105880737305 + ], + [ + "▁rub", + -11.651126861572266 + ], + [ + "▁collaborative", + -11.65113353729248 + ], + [ + "étude", + -11.651150703430176 + ], + [ + "▁Workshop", + -11.651389122009277 + ], + [ + "nez", + -11.651628494262695 + ], + [ + "▁chacun", + -11.651714324951172 + ], + [ + "▁Too", + -11.65211296081543 + ], + [ + "App", + -11.652313232421875 + ], + [ + "▁conseil", + -11.652399063110352 + ], + [ + "▁signals", + -11.652474403381348 + ], + [ + "▁Dead", + -11.652497291564941 + ], + [ + "▁Austria", + -11.652522087097168 + ], + [ + "▁slots", + -11.652579307556152 + ], + [ + "▁Dies", + -11.652623176574707 + ], + [ + "raj", + -11.652629852294922 + ], + [ + "stick", + -11.652833938598633 + ], + [ + "▁jaw", + -11.653030395507812 + ], + [ + "▁lounge", + -11.653059005737305 + ], + [ + "curi", + -11.653359413146973 + ], + [ + "nem", + -11.653456687927246 + ], + [ + "▁Cluj", + -11.653512954711914 + ], + [ + "▁rapide", + -11.653584480285645 + ], + [ + "▁companion", + -11.653716087341309 + ], + [ + "▁WE", + -11.653879165649414 + ], + [ + "▁bord", + -11.65389347076416 + ], + [ + "ody", + -11.654045104980469 + ], + [ + "gru", + -11.654057502746582 + ], + [ + "▁46", + -11.654410362243652 + ], + [ + "kra", + -11.654717445373535 + ], + [ + "eller", + -11.65477180480957 + ], + [ + "naire", + -11.65511703491211 + ], + [ + "hose", + -11.655253410339355 + ], + [ + "▁Atlanta", + -11.655254364013672 + ], + [ + "▁violent", + -11.65530776977539 + ], + [ + "▁imagination", + -11.655352592468262 + ], + [ + "▁reward", + -11.655389785766602 + ], + [ + "▁Korean", + -11.655441284179688 + ], + [ + "▁branches", + -11.655501365661621 + ], + [ + "▁GPS", + -11.655625343322754 + ], + [ + "glo", + -11.655633926391602 + ], + [ + "▁condo", + -11.655705451965332 + ], + [ + "▁Investment", + -11.655765533447266 + ], + [ + "▁involvement", + -11.655813217163086 + ], + [ + "▁trap", + -11.655829429626465 + ], + [ + "▁schön", + -11.655872344970703 + ], + [ + "▁ofera", + -11.655933380126953 + ], + [ + "▁unterschiedlich", + -11.65596866607666 + ], + [ + "Net", + -11.655987739562988 + ], + [ + "▁predict", + -11.656113624572754 + ], + [ + "identifying", + -11.656309127807617 + ], + [ + "▁noir", + -11.6566162109375 + ], + [ + "kos", + -11.656816482543945 + ], + [ + "poz", + -11.656816482543945 + ], + [ + "▁11,", + -11.65698528289795 + ], + [ + "▁fitted", + -11.657384872436523 + ], + [ + "MU", + -11.657469749450684 + ], + [ + "TT", + -11.657645225524902 + ], + [ + "▁vrea", + -11.657846450805664 + ], + [ + "▁wound", + -11.657864570617676 + ], + [ + "lac", + -11.657971382141113 + ], + [ + "▁purchases", + -11.658409118652344 + ], + [ + "▁Cape", + -11.65843677520752 + ], + [ + "▁Foto", + -11.658537864685059 + ], + [ + "▁acres", + -11.65865707397461 + ], + [ + "▁nec", + -11.658677101135254 + ], + [ + "▁burning", + -11.659050941467285 + ], + [ + "conf", + -11.659457206726074 + ], + [ + "▁browse", + -11.659486770629883 + ], + [ + "ural", + -11.659762382507324 + ], + [ + "▁Ah", + -11.659841537475586 + ], + [ + "▁stellt", + -11.65992259979248 + ], + [ + "▁ratings", + -11.660012245178223 + ], + [ + "▁Bowl", + -11.660027503967285 + ], + [ + "▁grav", + -11.660289764404297 + ], + [ + "titi", + -11.66048526763916 + ], + [ + "▁prêt", + -11.66075325012207 + ], + [ + "▁fallen", + -11.660818099975586 + ], + [ + "▁nombreuses", + -11.660940170288086 + ], + [ + "train", + -11.660953521728516 + ], + [ + "ène", + -11.661009788513184 + ], + [ + "Aceasta", + -11.661091804504395 + ], + [ + "▁drill", + -11.661421775817871 + ], + [ + "▁Exam", + -11.661477088928223 + ], + [ + "▁Furniture", + -11.661651611328125 + ], + [ + "eanu", + -11.661919593811035 + ], + [ + "étant", + -11.66230297088623 + ], + [ + "sville", + -11.662391662597656 + ], + [ + "▁swim", + -11.662796020507812 + ], + [ + "▁routes", + -11.662826538085938 + ], + [ + "INE", + -11.662860870361328 + ], + [ + "▁Por", + -11.662976264953613 + ], + [ + "ither", + -11.663168907165527 + ], + [ + "▁optim", + -11.663180351257324 + ], + [ + "▁lua", + -11.66331958770752 + ], + [ + "▁myth", + -11.663491249084473 + ], + [ + "▁Bett", + -11.6635103225708 + ], + [ + "chim", + -11.66355037689209 + ], + [ + "▁cyber", + -11.663553237915039 + ], + [ + "▁engineer", + -11.663825035095215 + ], + [ + "▁exploration", + -11.663918495178223 + ], + [ + "arranged", + -11.663973808288574 + ], + [ + "▁aged", + -11.663993835449219 + ], + [ + "▁beau", + -11.664024353027344 + ], + [ + "OUT", + -11.66402530670166 + ], + [ + "▁Minnesota", + -11.664031982421875 + ], + [ + "tress", + -11.664407730102539 + ], + [ + "▁Commercial", + -11.664509773254395 + ], + [ + "▁inspiring", + -11.66462516784668 + ], + [ + "▁Mare", + -11.664725303649902 + ], + [ + "apa", + -11.665140151977539 + ], + [ + "▁ignore", + -11.6651611328125 + ], + [ + "▁gros", + -11.665186882019043 + ], + [ + "▁measurement", + -11.66531753540039 + ], + [ + "ager", + -11.665395736694336 + ], + [ + "intele", + -11.665966987609863 + ], + [ + "▁suspension", + -11.666180610656738 + ], + [ + "▁cultures", + -11.666211128234863 + ], + [ + "▁Wow", + -11.666231155395508 + ], + [ + "▁pushing", + -11.666363716125488 + ], + [ + "▁bands", + -11.666438102722168 + ], + [ + "nage", + -11.666450500488281 + ], + [ + "▁Math", + -11.666515350341797 + ], + [ + "comb", + -11.66658878326416 + ], + [ + "▁créer", + -11.66658878326416 + ], + [ + "▁Lewis", + -11.666685104370117 + ], + [ + "▁VI", + -11.66678524017334 + ], + [ + "emploi", + -11.666791915893555 + ], + [ + "▁elections", + -11.666890144348145 + ], + [ + "▁logic", + -11.666982650756836 + ], + [ + "▁unlike", + -11.667122840881348 + ], + [ + "▁Matthew", + -11.66743278503418 + ], + [ + "▁pă", + -11.667486190795898 + ], + [ + "oxy", + -11.667620658874512 + ], + [ + "équipe", + -11.667717933654785 + ], + [ + "▁worden", + -11.668088912963867 + ], + [ + "dev", + -11.668258666992188 + ], + [ + "▁Massachusetts", + -11.668691635131836 + ], + [ + "▁Return", + -11.668695449829102 + ], + [ + "▁Friends", + -11.66891098022461 + ], + [ + "▁movements", + -11.66894245147705 + ], + [ + "chie", + -11.668964385986328 + ], + [ + "rak", + -11.669017791748047 + ], + [ + "▁Fit", + -11.66904354095459 + ], + [ + "▁copil", + -11.669113159179688 + ], + [ + "iunii", + -11.669188499450684 + ], + [ + "▁intensive", + -11.669234275817871 + ], + [ + "▁rug", + -11.669452667236328 + ], + [ + "lichkeit", + -11.669686317443848 + ], + [ + "kov", + -11.669724464416504 + ], + [ + "▁pense", + -11.66978645324707 + ], + [ + "pop", + -11.66978931427002 + ], + [ + "▁closet", + -11.669865608215332 + ], + [ + "▁prevention", + -11.669920921325684 + ], + [ + "▁Deb", + -11.670256614685059 + ], + [ + "▁devant", + -11.670430183410645 + ], + [ + "▁construit", + -11.670440673828125 + ], + [ + "▁breaks", + -11.67082405090332 + ], + [ + "otic", + -11.670886993408203 + ], + [ + "▁dig", + -11.67088794708252 + ], + [ + "▁près", + -11.670930862426758 + ], + [ + "chte", + -11.671029090881348 + ], + [ + "▁Chat", + -11.671029090881348 + ], + [ + "wel", + -11.671219825744629 + ], + [ + "▁edges", + -11.671272277832031 + ], + [ + "▁keen", + -11.671419143676758 + ], + [ + "▁infant", + -11.671716690063477 + ], + [ + "▁Hills", + -11.6719388961792 + ], + [ + "▁grounds", + -11.671969413757324 + ], + [ + "▁hab", + -11.672039031982422 + ], + [ + "▁Mun", + -11.67215347290039 + ], + [ + "▁references", + -11.672215461730957 + ], + [ + "▁hearts", + -11.672446250915527 + ], + [ + "exprim", + -11.672487258911133 + ], + [ + "▁tratament", + -11.672553062438965 + ], + [ + "LD", + -11.67258358001709 + ], + [ + "ssel", + -11.67275333404541 + ], + [ + "cover", + -11.672782897949219 + ], + [ + "bridge", + -11.672837257385254 + ], + [ + "▁Wein", + -11.672924995422363 + ], + [ + "▁voiture", + -11.673035621643066 + ], + [ + "▁Gemeinde", + -11.67313289642334 + ], + [ + "AI", + -11.673169136047363 + ], + [ + "▁renovation", + -11.673264503479004 + ], + [ + "bid", + -11.673285484313965 + ], + [ + "▁Reading", + -11.673481941223145 + ], + [ + "▁Gor", + -11.673490524291992 + ], + [ + "fur", + -11.673527717590332 + ], + [ + "▁Yoga", + -11.673544883728027 + ], + [ + "▁exclusively", + -11.673630714416504 + ], + [ + "▁emissions", + -11.67385482788086 + ], + [ + "ète", + -11.673905372619629 + ], + [ + "▁glasses", + -11.674055099487305 + ], + [ + "▁organizat", + -11.674135208129883 + ], + [ + "▁washing", + -11.67415714263916 + ], + [ + "▁Audi", + -11.674173355102539 + ], + [ + "▁Labor", + -11.674331665039062 + ], + [ + "▁legacy", + -11.674381256103516 + ], + [ + "▁abstract", + -11.674519538879395 + ], + [ + "▁knowledgeable", + -11.674601554870605 + ], + [ + "▁Glo", + -11.674795150756836 + ], + [ + "▁pregnant", + -11.67481803894043 + ], + [ + "liter", + -11.674851417541504 + ], + [ + "▁paintings", + -11.67522144317627 + ], + [ + "▁tête", + -11.675244331359863 + ], + [ + "voy", + -11.675626754760742 + ], + [ + "▁Jacob", + -11.675667762756348 + ], + [ + "▁dressing", + -11.675679206848145 + ], + [ + "▁provisions", + -11.675768852233887 + ], + [ + "bahn", + -11.675870895385742 + ], + [ + "▁depict", + -11.675875663757324 + ], + [ + "AW", + -11.676068305969238 + ], + [ + "▁bleibt", + -11.676163673400879 + ], + [ + "AND", + -11.676292419433594 + ], + [ + "▁fünf", + -11.676386833190918 + ], + [ + "▁hosts", + -11.676426887512207 + ], + [ + "vas", + -11.676708221435547 + ], + [ + "DO", + -11.67674732208252 + ], + [ + "▁max", + -11.676753997802734 + ], + [ + "▁contributed", + -11.676774978637695 + ], + [ + "roz", + -11.676796913146973 + ], + [ + "▁deschis", + -11.676800727844238 + ], + [ + "itaire", + -11.676809310913086 + ], + [ + "tube", + -11.676959991455078 + ], + [ + "▁Beck", + -11.676959991455078 + ], + [ + "▁curious", + -11.677130699157715 + ], + [ + "▁waves", + -11.677178382873535 + ], + [ + "▁regret", + -11.677248001098633 + ], + [ + "FO", + -11.677326202392578 + ], + [ + "droit", + -11.67734146118164 + ], + [ + "rö", + -11.677565574645996 + ], + [ + "▁Panel", + -11.677624702453613 + ], + [ + "▁pile", + -11.677660942077637 + ], + [ + "▁installing", + -11.677674293518066 + ], + [ + "▁Intr", + -11.677797317504883 + ], + [ + "nung", + -11.677823066711426 + ], + [ + "▁Outdoor", + -11.677855491638184 + ], + [ + "▁generator", + -11.67786693572998 + ], + [ + "▁zahlreiche", + -11.677868843078613 + ], + [ + "▁Third", + -11.67813491821289 + ], + [ + "frac", + -11.678180694580078 + ], + [ + "ovi", + -11.678236961364746 + ], + [ + "▁Casa", + -11.678374290466309 + ], + [ + "▁stomach", + -11.678393363952637 + ], + [ + "▁Lincoln", + -11.67844009399414 + ], + [ + "▁Electronic", + -11.678584098815918 + ], + [ + "coding", + -11.67895221710205 + ], + [ + "2017", + -11.67900276184082 + ], + [ + "▁friendship", + -11.679238319396973 + ], + [ + "ried", + -11.679250717163086 + ], + [ + "но", + -11.679265022277832 + ], + [ + "▁tail", + -11.679267883300781 + ], + [ + "▁petits", + -11.679308891296387 + ], + [ + "▁réseau", + -11.679696083068848 + ], + [ + "▁churches", + -11.679999351501465 + ], + [ + "▁marketplace", + -11.680062294006348 + ], + [ + "▁Pool", + -11.680318832397461 + ], + [ + "▁popularity", + -11.680455207824707 + ], + [ + "▁sprijin", + -11.680496215820312 + ], + [ + "▁Od", + -11.680527687072754 + ], + [ + "▁Transfer", + -11.680562973022461 + ], + [ + "▁fake", + -11.680791854858398 + ], + [ + "▁9,", + -11.681007385253906 + ], + [ + "▁weit", + -11.681264877319336 + ], + [ + "▁relaxed", + -11.681415557861328 + ], + [ + "pig", + -11.68161678314209 + ], + [ + "▁Lauren", + -11.68166732788086 + ], + [ + "gesetzt", + -11.681669235229492 + ], + [ + "▁Clar", + -11.681694984436035 + ], + [ + "▁unlikely", + -11.681731224060059 + ], + [ + "color", + -11.681832313537598 + ], + [ + "▁spouse", + -11.681843757629395 + ], + [ + "▁facile", + -11.681859970092773 + ], + [ + "▁Speed", + -11.681872367858887 + ], + [ + "KE", + -11.682230949401855 + ], + [ + "▁PO", + -11.68231201171875 + ], + [ + "▁Channel", + -11.682321548461914 + ], + [ + "argent", + -11.682356834411621 + ], + [ + "▁Making", + -11.682430267333984 + ], + [ + "▁Coll", + -11.682585716247559 + ], + [ + "cci", + -11.682721138000488 + ], + [ + "corresponding", + -11.68300724029541 + ], + [ + "▁heaven", + -11.683160781860352 + ], + [ + "ţă", + -11.68319320678711 + ], + [ + "▁darüber", + -11.683236122131348 + ], + [ + "acted", + -11.683420181274414 + ], + [ + "only", + -11.683460235595703 + ], + [ + "▁slight", + -11.683465003967285 + ], + [ + "lian", + -11.68348503112793 + ], + [ + "flă", + -11.683510780334473 + ], + [ + "▁vulnerable", + -11.683530807495117 + ], + [ + "▁creator", + -11.68356704711914 + ], + [ + "▁protecting", + -11.68360424041748 + ], + [ + "writing", + -11.68360710144043 + ], + [ + "▁Ter", + -11.68387222290039 + ], + [ + "▁barb", + -11.683987617492676 + ], + [ + "▁dată", + -11.683995246887207 + ], + [ + "▁Screen", + -11.684052467346191 + ], + [ + "▁BBC", + -11.684082984924316 + ], + [ + "Col", + -11.684206008911133 + ], + [ + "fung", + -11.684453964233398 + ], + [ + "▁dreptul", + -11.684494972229004 + ], + [ + "derived", + -11.684538841247559 + ], + [ + "▁designated", + -11.684553146362305 + ], + [ + "▁interactions", + -11.684617042541504 + ], + [ + "SG", + -11.684621810913086 + ], + [ + "▁häufig", + -11.684625625610352 + ], + [ + "▁Mega", + -11.684638023376465 + ], + [ + "▁jazz", + -11.684660911560059 + ], + [ + "lbs", + -11.684797286987305 + ], + [ + "▁Manual", + -11.68484115600586 + ], + [ + "pushed", + -11.685017585754395 + ], + [ + "▁analytics", + -11.685234069824219 + ], + [ + "▁lawsuit", + -11.68533706665039 + ], + [ + "▁gray", + -11.685364723205566 + ], + [ + "shirts", + -11.685401916503906 + ], + [ + "▁hill", + -11.685508728027344 + ], + [ + "▁1991", + -11.68550968170166 + ], + [ + "▁obligations", + -11.685568809509277 + ], + [ + "▁Dubai", + -11.68580436706543 + ], + [ + "()", + -11.685808181762695 + ], + [ + "▁acceptable", + -11.685810089111328 + ], + [ + "therapist", + -11.685877799987793 + ], + [ + "inger", + -11.6860990524292 + ], + [ + "▁territory", + -11.686208724975586 + ], + [ + "▁sang", + -11.6862211227417 + ], + [ + "ät", + -11.686224937438965 + ], + [ + "▁Zukunft", + -11.686238288879395 + ], + [ + "TU", + -11.68657398223877 + ], + [ + "▁horizontal", + -11.68665599822998 + ], + [ + "▁entrepreneurs", + -11.686710357666016 + ], + [ + "▁Eltern", + -11.687017440795898 + ], + [ + "▁presentations", + -11.687129974365234 + ], + [ + "▁confirmation", + -11.687173843383789 + ], + [ + "▁technological", + -11.687432289123535 + ], + [ + "▁1989", + -11.687530517578125 + ], + [ + "EF", + -11.687640190124512 + ], + [ + "ponent", + -11.687663078308105 + ], + [ + "NET", + -11.687699317932129 + ], + [ + "750", + -11.687772750854492 + ], + [ + "▁desert", + -11.687891960144043 + ], + [ + "▁contribu", + -11.687932968139648 + ], + [ + "▁Gun", + -11.687944412231445 + ], + [ + "▁Juli", + -11.688091278076172 + ], + [ + "ERS", + -11.688261985778809 + ], + [ + "▁inceput", + -11.688261985778809 + ], + [ + "▁answered", + -11.688369750976562 + ], + [ + "▁basement", + -11.688410758972168 + ], + [ + "film", + -11.688434600830078 + ], + [ + "▁taille", + -11.688593864440918 + ], + [ + "▁survival", + -11.688655853271484 + ], + [ + "ihnen", + -11.68869400024414 + ], + [ + "▁Bird", + -11.688840866088867 + ], + [ + "speed", + -11.689336776733398 + ], + [ + "▁journalist", + -11.68941879272461 + ], + [ + "▁Indonesia", + -11.689626693725586 + ], + [ + "▁15.", + -11.689973831176758 + ], + [ + "▁19.", + -11.690025329589844 + ], + [ + "étaient", + -11.690114974975586 + ], + [ + "▁tennis", + -11.69024658203125 + ], + [ + "▁aproximativ", + -11.69039249420166 + ], + [ + "▁Hans", + -11.690650939941406 + ], + [ + "▁Remove", + -11.69067096710205 + ], + [ + "▁cats", + -11.691022872924805 + ], + [ + "▁calories", + -11.691052436828613 + ], + [ + "▁limitations", + -11.69119644165039 + ], + [ + "▁subscribe", + -11.691198348999023 + ], + [ + "▁Dem", + -11.691339492797852 + ], + [ + "lust", + -11.691370010375977 + ], + [ + "▁adresa", + -11.691394805908203 + ], + [ + "▁sais", + -11.69140911102295 + ], + [ + "...\"", + -11.691473960876465 + ], + [ + "▁Luft", + -11.691485404968262 + ], + [ + "DL", + -11.691597938537598 + ], + [ + "▁estimates", + -11.691600799560547 + ], + [ + "▁protocol", + -11.691603660583496 + ], + [ + "▁Namen", + -11.691776275634766 + ], + [ + "▁grands", + -11.691901206970215 + ], + [ + "▁voter", + -11.691970825195312 + ], + [ + "▁vacuum", + -11.692075729370117 + ], + [ + "▁versch", + -11.692103385925293 + ], + [ + "▁Democratic", + -11.692107200622559 + ], + [ + "▁Books", + -11.692170143127441 + ], + [ + "▁frames", + -11.692727088928223 + ], + [ + "▁Bee", + -11.692864418029785 + ], + [ + "▁helfen", + -11.692934036254883 + ], + [ + "▁dive", + -11.692963600158691 + ], + [ + "▁physician", + -11.693037033081055 + ], + [ + "▁powered", + -11.693131446838379 + ], + [ + "▁zones", + -11.693337440490723 + ], + [ + "▁regime", + -11.69345474243164 + ], + [ + "check", + -11.693578720092773 + ], + [ + "11.", + -11.693793296813965 + ], + [ + "▁plaisir", + -11.693793296813965 + ], + [ + "▁physically", + -11.693811416625977 + ], + [ + "▁Pul", + -11.694245338439941 + ], + [ + "▁jardin", + -11.694294929504395 + ], + [ + "▁Nur", + -11.694417953491211 + ], + [ + "WC", + -11.694425582885742 + ], + [ + "▁Lock", + -11.694506645202637 + ], + [ + "▁économique", + -11.694530487060547 + ], + [ + "user", + -11.694536209106445 + ], + [ + "▁commit", + -11.694731712341309 + ], + [ + "▁oldest", + -11.694764137268066 + ], + [ + "▁fulfill", + -11.694780349731445 + ], + [ + "▁nervous", + -11.69482135772705 + ], + [ + "▁SH", + -11.695014953613281 + ], + [ + "SK", + -11.695150375366211 + ], + [ + "▁plein", + -11.695291519165039 + ], + [ + "show", + -11.695354461669922 + ], + [ + "▁disability", + -11.695356369018555 + ], + [ + "papier", + -11.69544506072998 + ], + [ + "▁Corp", + -11.695611000061035 + ], + [ + "ători", + -11.695676803588867 + ], + [ + "nţă", + -11.695813179016113 + ], + [ + "▁overseas", + -11.696009635925293 + ], + [ + "▁struck", + -11.69603157043457 + ], + [ + "astic", + -11.69607162475586 + ], + [ + "▁advised", + -11.696088790893555 + ], + [ + "BE", + -11.696161270141602 + ], + [ + "▁UV", + -11.696218490600586 + ], + [ + "patient", + -11.69626235961914 + ], + [ + "▁texte", + -11.696344375610352 + ], + [ + "▁timely", + -11.696444511413574 + ], + [ + "used", + -11.696471214294434 + ], + [ + "▁occasionally", + -11.696524620056152 + ], + [ + "▁entries", + -11.696550369262695 + ], + [ + "underlying", + -11.6967191696167 + ], + [ + "01.", + -11.696748733520508 + ], + [ + "▁automated", + -11.696791648864746 + ], + [ + "yes", + -11.696828842163086 + ], + [ + "▁Staff", + -11.697057723999023 + ], + [ + "▁Einzel", + -11.697546005249023 + ], + [ + "quit", + -11.697687149047852 + ], + [ + "▁Cela", + -11.697951316833496 + ], + [ + "▁snap", + -11.698298454284668 + ], + [ + "▁followers", + -11.698330879211426 + ], + [ + "CN", + -11.698709487915039 + ], + [ + "▁Cooper", + -11.698892593383789 + ], + [ + "ô", + -11.698921203613281 + ], + [ + "▁memorable", + -11.698965072631836 + ], + [ + "▁jur", + -11.698996543884277 + ], + [ + "▁ajutorul", + -11.69905948638916 + ], + [ + "▁Enter", + -11.6991548538208 + ], + [ + "Often", + -11.699294090270996 + ], + [ + "▁dintr", + -11.699341773986816 + ], + [ + "-30", + -11.699419975280762 + ], + [ + "ESS", + -11.699454307556152 + ], + [ + "▁weird", + -11.699462890625 + ], + [ + "▁Animal", + -11.699706077575684 + ], + [ + "▁complement", + -11.699719429016113 + ], + [ + "▁Bot", + -11.699756622314453 + ], + [ + "▁darf", + -11.699764251708984 + ], + [ + "yed", + -11.699808120727539 + ], + [ + "▁Mul", + -11.699872016906738 + ], + [ + "lick", + -11.700080871582031 + ], + [ + "▁Cambridge", + -11.700216293334961 + ], + [ + "adore", + -11.700407981872559 + ], + [ + "▁Dutch", + -11.700420379638672 + ], + [ + "▁Castle", + -11.700431823730469 + ], + [ + "igi", + -11.700563430786133 + ], + [ + "▁enemy", + -11.70071029663086 + ], + [ + "accompanied", + -11.700725555419922 + ], + [ + "▁teren", + -11.701102256774902 + ], + [ + "▁ET", + -11.701498985290527 + ], + [ + "ffle", + -11.701557159423828 + ], + [ + "-15", + -11.701651573181152 + ], + [ + "▁Geo", + -11.701680183410645 + ], + [ + "▁attractions", + -11.701730728149414 + ], + [ + "iker", + -11.70185661315918 + ], + [ + "▁bă", + -11.701990127563477 + ], + [ + "▁heal", + -11.701995849609375 + ], + [ + "weisen", + -11.702144622802734 + ], + [ + "▁spectrum", + -11.702186584472656 + ], + [ + "meld", + -11.702394485473633 + ], + [ + "▁eveniment", + -11.70247745513916 + ], + [ + "arra", + -11.702478408813477 + ], + [ + "rete", + -11.70250129699707 + ], + [ + "▁Had", + -11.70250415802002 + ], + [ + "looking", + -11.702692031860352 + ], + [ + "isierung", + -11.702805519104004 + ], + [ + "▁moyen", + -11.703129768371582 + ], + [ + "▁gesamte", + -11.703202247619629 + ], + [ + "▁destroy", + -11.703407287597656 + ], + [ + "125", + -11.703518867492676 + ], + [ + "▁suivant", + -11.703913688659668 + ], + [ + "▁declared", + -11.703925132751465 + ], + [ + "▁Urban", + -11.704131126403809 + ], + [ + "▁16.", + -11.704168319702148 + ], + [ + "▁Beg", + -11.704168319702148 + ], + [ + "▁canal", + -11.704225540161133 + ], + [ + "▁Pres", + -11.70431137084961 + ], + [ + "▁geeignet", + -11.704339981079102 + ], + [ + "▁strat", + -11.704365730285645 + ], + [ + "UB", + -11.704395294189453 + ], + [ + "▁Alexander", + -11.704424858093262 + ], + [ + "cycle", + -11.704666137695312 + ], + [ + "▁Var", + -11.704802513122559 + ], + [ + "▁domin", + -11.704805374145508 + ], + [ + "▁lasting", + -11.704939842224121 + ], + [ + "terio", + -11.705262184143066 + ], + [ + "▁Battle", + -11.705339431762695 + ], + [ + "▁publications", + -11.705647468566895 + ], + [ + "▁implica", + -11.705886840820312 + ], + [ + "▁NA", + -11.705963134765625 + ], + [ + "▁stocks", + -11.706036567687988 + ], + [ + "Plat", + -11.70611572265625 + ], + [ + "▁excitement", + -11.706149101257324 + ], + [ + "▁Muslim", + -11.706524848937988 + ], + [ + "▁Mari", + -11.706530570983887 + ], + [ + "▁Ul", + -11.706647872924805 + ], + [ + "nächst", + -11.706757545471191 + ], + [ + "▁trait", + -11.706833839416504 + ], + [ + "▁(3)", + -11.706852912902832 + ], + [ + "▁Attorney", + -11.706894874572754 + ], + [ + "▁Malaysia", + -11.70689582824707 + ], + [ + "▁slab", + -11.706960678100586 + ], + [ + "▁dam", + -11.707113265991211 + ], + [ + "▁Bir", + -11.707226753234863 + ], + [ + "▁sing", + -11.70738410949707 + ], + [ + "▁Culture", + -11.7073974609375 + ], + [ + "UD", + -11.707417488098145 + ], + [ + "▁Mes", + -11.707443237304688 + ], + [ + "ități", + -11.707615852355957 + ], + [ + "▁possess", + -11.708173751831055 + ], + [ + "enabling", + -11.70820426940918 + ], + [ + "▁settled", + -11.708335876464844 + ], + [ + "▁sagen", + -11.708492279052734 + ], + [ + "▁erfolgt", + -11.708564758300781 + ], + [ + "dog", + -11.708600997924805 + ], + [ + "ndu", + -11.708732604980469 + ], + [ + "ității", + -11.708745002746582 + ], + [ + "▁Islam", + -11.708930015563965 + ], + [ + "▁catalog", + -11.708931922912598 + ], + [ + "▁simt", + -11.709102630615234 + ], + [ + "tische", + -11.709150314331055 + ], + [ + "▁Mach", + -11.709334373474121 + ], + [ + "▁EP", + -11.709359169006348 + ], + [ + "▁Certified", + -11.709386825561523 + ], + [ + "▁Resources", + -11.70945930480957 + ], + [ + "▁Past", + -11.709607124328613 + ], + [ + "▁Termin", + -11.709755897521973 + ], + [ + "▁lightweight", + -11.709755897521973 + ], + [ + "▁championship", + -11.70994758605957 + ], + [ + "gebiet", + -11.710122108459473 + ], + [ + "▁jurisdiction", + -11.710135459899902 + ], + [ + "▁euros", + -11.710169792175293 + ], + [ + "▁Familien", + -11.710554122924805 + ], + [ + "▁GT", + -11.710677146911621 + ], + [ + "▁dvs", + -11.71081256866455 + ], + [ + "▁nouveaux", + -11.710838317871094 + ], + [ + "▁chill", + -11.710916519165039 + ], + [ + "▁ridicat", + -11.710920333862305 + ], + [ + "his", + -11.711079597473145 + ], + [ + "▁Indi", + -11.711159706115723 + ], + [ + "▁arrested", + -11.71116828918457 + ], + [ + "ităţii", + -11.711170196533203 + ], + [ + "onul", + -11.711274147033691 + ], + [ + "appar", + -11.711296081542969 + ], + [ + "▁Bachelor", + -11.711297988891602 + ], + [ + "▁erfolgreich", + -11.711426734924316 + ], + [ + "▁versatile", + -11.71163558959961 + ], + [ + "▁nécessaire", + -11.711761474609375 + ], + [ + "▁facial", + -11.712160110473633 + ], + [ + "▁Bull", + -11.712226867675781 + ], + [ + "Comm", + -11.712237358093262 + ], + [ + "atte", + -11.712307929992676 + ], + [ + "hom", + -11.7123384475708 + ], + [ + "start", + -11.712576866149902 + ], + [ + "▁roughly", + -11.712936401367188 + ], + [ + "▁bay", + -11.712984085083008 + ], + [ + "▁american", + -11.712986946105957 + ], + [ + "▁Wisconsin", + -11.713135719299316 + ], + [ + "▁Clinton", + -11.713142395019531 + ], + [ + "appareil", + -11.713153839111328 + ], + [ + "▁liberal", + -11.713455200195312 + ], + [ + "▁dau", + -11.713519096374512 + ], + [ + "ech", + -11.713521957397461 + ], + [ + "2014", + -11.713624000549316 + ], + [ + "▁lip", + -11.713645935058594 + ], + [ + "▁maintenant", + -11.713762283325195 + ], + [ + "▁Sil", + -11.713805198669434 + ], + [ + "rben", + -11.713891983032227 + ], + [ + "▁contents", + -11.713980674743652 + ], + [ + "▁magnetic", + -11.714111328125 + ], + [ + "▁terre", + -11.714151382446289 + ], + [ + "▁Rights", + -11.714475631713867 + ], + [ + "lose", + -11.714570045471191 + ], + [ + "▁crown", + -11.71468448638916 + ], + [ + "▁oils", + -11.7147216796875 + ], + [ + "▁entertaining", + -11.714841842651367 + ], + [ + "▁Option", + -11.714848518371582 + ], + [ + "▁Previous", + -11.714916229248047 + ], + [ + "▁vrai", + -11.714930534362793 + ], + [ + "▁Auswahl", + -11.715056419372559 + ], + [ + "▁horses", + -11.715106010437012 + ], + [ + "▁Author", + -11.71533489227295 + ], + [ + "▁Writing", + -11.715461730957031 + ], + [ + "▁travelling", + -11.715522766113281 + ], + [ + "▁350", + -11.715567588806152 + ], + [ + "daten", + -11.71560287475586 + ], + [ + "zan", + -11.715765953063965 + ], + [ + "▁sweat", + -11.715924263000488 + ], + [ + "▁Junior", + -11.715970993041992 + ], + [ + "markt", + -11.71609878540039 + ], + [ + "after", + -11.716105461120605 + ], + [ + "▁admitted", + -11.716262817382812 + ], + [ + "▁1950", + -11.716347694396973 + ], + [ + "▁Sche", + -11.71648120880127 + ], + [ + "▁dorit", + -11.716818809509277 + ], + [ + "▁transferred", + -11.716958045959473 + ], + [ + "utilise", + -11.717194557189941 + ], + [ + "sitz", + -11.717301368713379 + ], + [ + "gio", + -11.717320442199707 + ], + [ + "▁bisher", + -11.717473983764648 + ], + [ + "RD", + -11.717491149902344 + ], + [ + "▁Wales", + -11.717747688293457 + ], + [ + "▁smoking", + -11.717904090881348 + ], + [ + "dire", + -11.717939376831055 + ], + [ + "▁seating", + -11.717979431152344 + ], + [ + "▁constat", + -11.718056678771973 + ], + [ + "▁Hub", + -11.718324661254883 + ], + [ + "▁sieht", + -11.718345642089844 + ], + [ + "▁prospect", + -11.718378067016602 + ], + [ + "▁RO", + -11.718413352966309 + ], + [ + "▁Wars", + -11.718423843383789 + ], + [ + "eek", + -11.718496322631836 + ], + [ + "▁Bring", + -11.718646049499512 + ], + [ + "▁bleiben", + -11.718696594238281 + ], + [ + "arri", + -11.718826293945312 + ], + [ + "inal", + -11.718904495239258 + ], + [ + "▁Maryland", + -11.718932151794434 + ], + [ + "▁Process", + -11.719145774841309 + ], + [ + "They", + -11.719154357910156 + ], + [ + "▁Oxford", + -11.719176292419434 + ], + [ + "▁neat", + -11.719330787658691 + ], + [ + "▁cinema", + -11.719597816467285 + ], + [ + "▁Ist", + -11.719620704650879 + ], + [ + "▁vegan", + -11.719682693481445 + ], + [ + "wall", + -11.719708442687988 + ], + [ + "▁motive", + -11.72010612487793 + ], + [ + "▁mature", + -11.720544815063477 + ], + [ + "▁Dragon", + -11.720653533935547 + ], + [ + "▁google", + -11.720677375793457 + ], + [ + "blick", + -11.72110652923584 + ], + [ + "▁Cod", + -11.721220970153809 + ], + [ + "▁suffi", + -11.721319198608398 + ], + [ + "▁terrorist", + -11.721478462219238 + ], + [ + "Posted", + -11.721484184265137 + ], + [ + "▁Schi", + -11.72157096862793 + ], + [ + "▁Marc", + -11.721597671508789 + ], + [ + "▁operates", + -11.721661567687988 + ], + [ + "gress", + -11.721805572509766 + ], + [ + "has", + -11.721899032592773 + ], + [ + "sole", + -11.722108840942383 + ], + [ + "▁Buck", + -11.722122192382812 + ], + [ + "impl", + -11.722160339355469 + ], + [ + "▁Ron", + -11.722172737121582 + ], + [ + "▁handled", + -11.722346305847168 + ], + [ + "▁Apr", + -11.722347259521484 + ], + [ + "▁Storage", + -11.722467422485352 + ], + [ + "▁temp", + -11.722512245178223 + ], + [ + "▁differently", + -11.722614288330078 + ], + [ + "▁wherever", + -11.722670555114746 + ], + [ + "matched", + -11.722695350646973 + ], + [ + "rios", + -11.72276496887207 + ], + [ + "▁surprising", + -11.722846031188965 + ], + [ + "teilen", + -11.722867965698242 + ], + [ + "▁difficulties", + -11.72294807434082 + ], + [ + "tab", + -11.723064422607422 + ], + [ + "▁Leader", + -11.723128318786621 + ], + [ + "implementing", + -11.723372459411621 + ], + [ + "▁workforce", + -11.723384857177734 + ], + [ + "▁bereit", + -11.723503112792969 + ], + [ + "vig", + -11.72352123260498 + ], + [ + "▁LOVE", + -11.723580360412598 + ], + [ + "▁instances", + -11.723954200744629 + ], + [ + "▁frumos", + -11.723960876464844 + ], + [ + "▁Java", + -11.723974227905273 + ], + [ + "▁arrest", + -11.723977088928223 + ], + [ + "▁apparent", + -11.724152565002441 + ], + [ + "▁hence", + -11.724200248718262 + ], + [ + "▁entwickelt", + -11.72437572479248 + ], + [ + "▁Fra", + -11.724471092224121 + ], + [ + "▁prend", + -11.724486351013184 + ], + [ + "ließ", + -11.724522590637207 + ], + [ + "▁drawer", + -11.724671363830566 + ], + [ + "ARD", + -11.724926948547363 + ], + [ + "▁caring", + -11.72499942779541 + ], + [ + "▁wollte", + -11.725024223327637 + ], + [ + "▁vielleicht", + -11.72511100769043 + ], + [ + "▁iconic", + -11.725324630737305 + ], + [ + "äch", + -11.72552490234375 + ], + [ + "abel", + -11.725639343261719 + ], + [ + "▁génér", + -11.72570514678955 + ], + [ + "ault", + -11.725727081298828 + ], + [ + "▁alternatives", + -11.725909233093262 + ], + [ + "think", + -11.726025581359863 + ], + [ + "ро", + -11.726055145263672 + ], + [ + "whereas", + -11.726058006286621 + ], + [ + "erei", + -11.726366996765137 + ], + [ + "▁Eagle", + -11.726766586303711 + ], + [ + "situé", + -11.72704792022705 + ], + [ + "▁laboratory", + -11.727157592773438 + ], + [ + "▁Nutzung", + -11.727256774902344 + ], + [ + "▁Bathroom", + -11.72728157043457 + ], + [ + "▁loaded", + -11.727293968200684 + ], + [ + "niste", + -11.727408409118652 + ], + [ + "som", + -11.727429389953613 + ], + [ + "▁aucun", + -11.727666854858398 + ], + [ + "gebracht", + -11.727676391601562 + ], + [ + "▁tomb", + -11.727771759033203 + ], + [ + "▁Ty", + -11.727785110473633 + ], + [ + "▁afaceri", + -11.727971076965332 + ], + [ + "tex", + -11.72803783416748 + ], + [ + "ality", + -11.728147506713867 + ], + [ + "▁identification", + -11.728150367736816 + ], + [ + "▁cultiv", + -11.728255271911621 + ], + [ + "Not", + -11.728326797485352 + ], + [ + "▁acestor", + -11.72846508026123 + ], + [ + "▁PhD", + -11.728466033935547 + ], + [ + "nell", + -11.728470802307129 + ], + [ + "▁dial", + -11.728594779968262 + ], + [ + "chro", + -11.728673934936523 + ], + [ + "▁specifications", + -11.728682518005371 + ], + [ + "anii", + -11.72877025604248 + ], + [ + "▁cloth", + -11.728836059570312 + ], + [ + "▁highway", + -11.728914260864258 + ], + [ + "▁Vitamin", + -11.729118347167969 + ], + [ + "▁indication", + -11.729349136352539 + ], + [ + "80%", + -11.72959041595459 + ], + [ + "▁Lion", + -11.729681015014648 + ], + [ + "▁10,", + -11.729693412780762 + ], + [ + "▁Werk", + -11.72974967956543 + ], + [ + "▁combin", + -11.729803085327148 + ], + [ + "▁releases", + -11.7298583984375 + ], + [ + "LL", + -11.730006217956543 + ], + [ + "ktor", + -11.730186462402344 + ], + [ + "ufgrund", + -11.73018741607666 + ], + [ + "calc", + -11.73034381866455 + ], + [ + "▁accomplished", + -11.730606079101562 + ], + [ + "▁los", + -11.730619430541992 + ], + [ + "▁distant", + -11.730688095092773 + ], + [ + "▁secteur", + -11.73068904876709 + ], + [ + "logue", + -11.730781555175781 + ], + [ + "▁betting", + -11.730792999267578 + ], + [ + "elf", + -11.731180191040039 + ], + [ + "puteti", + -11.73123550415039 + ], + [ + "▁Moment", + -11.731236457824707 + ], + [ + "▁scoring", + -11.731548309326172 + ], + [ + "▁freuen", + -11.731572151184082 + ], + [ + "▁fastest", + -11.731873512268066 + ], + [ + "▁directors", + -11.732080459594727 + ], + [ + "▁fame", + -11.732234954833984 + ], + [ + "▁complaint", + -11.732239723205566 + ], + [ + "▁Ep", + -11.732314109802246 + ], + [ + "▁delicate", + -11.732329368591309 + ], + [ + "annonce", + -11.73240852355957 + ], + [ + "ext", + -11.732454299926758 + ], + [ + "▁quit", + -11.732473373413086 + ], + [ + "▁Cop", + -11.73253345489502 + ], + [ + "prop", + -11.732565879821777 + ], + [ + "365", + -11.732742309570312 + ], + [ + "▁Say", + -11.732879638671875 + ], + [ + "▁internationale", + -11.733064651489258 + ], + [ + "cott", + -11.733213424682617 + ], + [ + "▁Whatever", + -11.733261108398438 + ], + [ + "▁admir", + -11.733261108398438 + ], + [ + "▁bucur", + -11.733549118041992 + ], + [ + "▁entity", + -11.733779907226562 + ], + [ + "▁dancing", + -11.733837127685547 + ], + [ + "▁printre", + -11.733892440795898 + ], + [ + "▁meditation", + -11.734396934509277 + ], + [ + "▁avis", + -11.734416961669922 + ], + [ + "▁1988", + -11.73447036743164 + ], + [ + "10.", + -11.734506607055664 + ], + [ + "▁worker", + -11.734638214111328 + ], + [ + "▁$100", + -11.734784126281738 + ], + [ + "▁contrôle", + -11.7349853515625 + ], + [ + "▁insist", + -11.734997749328613 + ], + [ + "ements", + -11.73505973815918 + ], + [ + "izate", + -11.735163688659668 + ], + [ + "▁tied", + -11.735332489013672 + ], + [ + "▁correspond", + -11.735396385192871 + ], + [ + "▁apartments", + -11.735547065734863 + ], + [ + "▁2009.", + -11.735599517822266 + ], + [ + "▁tiles", + -11.735624313354492 + ], + [ + "▁boots", + -11.735639572143555 + ], + [ + "▁laundry", + -11.735673904418945 + ], + [ + "▁Coffee", + -11.735674858093262 + ], + [ + "▁CV", + -11.735727310180664 + ], + [ + "▁composed", + -11.736035346984863 + ], + [ + "atom", + -11.73622989654541 + ], + [ + "▁shore", + -11.736270904541016 + ], + [ + "▁marijuana", + -11.736312866210938 + ], + [ + "plic", + -11.73648452758789 + ], + [ + "▁Zahl", + -11.736649513244629 + ], + [ + "depth", + -11.73682689666748 + ], + [ + "▁Egypt", + -11.736854553222656 + ], + [ + "▁NFL", + -11.736906051635742 + ], + [ + "▁12,", + -11.736922264099121 + ], + [ + "▁pollution", + -11.736964225769043 + ], + [ + "▁Vergleich", + -11.73704719543457 + ], + [ + "û", + -11.737109184265137 + ], + [ + "▁nurse", + -11.737153053283691 + ], + [ + "▁Susan", + -11.737173080444336 + ], + [ + "▁verify", + -11.737393379211426 + ], + [ + "▁kon", + -11.737504959106445 + ], + [ + "▁ulei", + -11.7376127243042 + ], + [ + "▁Sept", + -11.737699508666992 + ], + [ + "▁Location", + -11.737908363342285 + ], + [ + "▁frozen", + -11.737991333007812 + ], + [ + "good", + -11.73802661895752 + ], + [ + "▁cine", + -11.738066673278809 + ], + [ + "forming", + -11.738181114196777 + ], + [ + "▁Near", + -11.738391876220703 + ], + [ + "▁Tab", + -11.738545417785645 + ], + [ + "▁Alexandr", + -11.738600730895996 + ], + [ + "ст", + -11.73863697052002 + ], + [ + "CK", + -11.738656044006348 + ], + [ + "▁loads", + -11.738948822021484 + ], + [ + "▁disorders", + -11.738957405090332 + ], + [ + "hip", + -11.739596366882324 + ], + [ + "▁blessing", + -11.73987102508545 + ], + [ + "▁vechi", + -11.73997688293457 + ], + [ + "▁Bookmark", + -11.740296363830566 + ], + [ + "SON", + -11.74036979675293 + ], + [ + "books", + -11.740428924560547 + ], + [ + "▁tropical", + -11.740438461303711 + ], + [ + "▁Garten", + -11.740447044372559 + ], + [ + "ôt", + -11.740760803222656 + ], + [ + "tures", + -11.740827560424805 + ], + [ + "▁obligation", + -11.741010665893555 + ], + [ + "▁admin", + -11.741011619567871 + ], + [ + "▁sélection", + -11.741106986999512 + ], + [ + "disp", + -11.741172790527344 + ], + [ + "▁Anyone", + -11.741225242614746 + ], + [ + "keeper", + -11.74138355255127 + ], + [ + "▁konnten", + -11.741521835327148 + ], + [ + "▁existe", + -11.741615295410156 + ], + [ + "▁Rund", + -11.741798400878906 + ], + [ + "▁retailers", + -11.74184799194336 + ], + [ + "folg", + -11.741948127746582 + ], + [ + "▁urmare", + -11.742019653320312 + ], + [ + "▁Liebe", + -11.742321014404297 + ], + [ + "▁actors", + -11.742422103881836 + ], + [ + "▁Druck", + -11.742618560791016 + ], + [ + "lien", + -11.742752075195312 + ], + [ + "sian", + -11.742847442626953 + ], + [ + "▁partid", + -11.74304485321045 + ], + [ + "▁loin", + -11.743114471435547 + ], + [ + "AZ", + -11.743119239807129 + ], + [ + "oasă", + -11.743501663208008 + ], + [ + "▁inclusiv", + -11.743656158447266 + ], + [ + "TD", + -11.743680953979492 + ], + [ + "▁anului", + -11.743766784667969 + ], + [ + "poc", + -11.743844985961914 + ], + [ + "▁musique", + -11.743972778320312 + ], + [ + "▁Hart", + -11.743997573852539 + ], + [ + "Sh", + -11.744283676147461 + ], + [ + "html", + -11.744290351867676 + ], + [ + "▁serial", + -11.744318008422852 + ], + [ + "țele", + -11.744369506835938 + ], + [ + "inning", + -11.744544982910156 + ], + [ + "▁Bureau", + -11.744555473327637 + ], + [ + "▁rush", + -11.744626998901367 + ], + [ + "▁deosebit", + -11.744637489318848 + ], + [ + "▁Wort", + -11.744648933410645 + ], + [ + "▁Thailand", + -11.744688987731934 + ], + [ + "▁Language", + -11.745193481445312 + ], + [ + "▁Governor", + -11.745213508605957 + ], + [ + "▁Later", + -11.74525260925293 + ], + [ + "rilor", + -11.745282173156738 + ], + [ + "▁activités", + -11.745372772216797 + ], + [ + "schaffen", + -11.745598793029785 + ], + [ + "▁harvest", + -11.74567985534668 + ], + [ + "▁municipal", + -11.745783805847168 + ], + [ + "einander", + -11.74600601196289 + ], + [ + "▁fingers", + -11.746383666992188 + ], + [ + "▁sculpture", + -11.74638843536377 + ], + [ + "▁Bien", + -11.746390342712402 + ], + [ + "▁departments", + -11.746562957763672 + ], + [ + "▁période", + -11.746746063232422 + ], + [ + "▁jeune", + -11.746960639953613 + ], + [ + "▁governments", + -11.74710750579834 + ], + [ + "uter", + -11.747179985046387 + ], + [ + "Aceste", + -11.747220039367676 + ], + [ + "▁Deal", + -11.747243881225586 + ], + [ + "▁Equipment", + -11.74726390838623 + ], + [ + "nous", + -11.747300148010254 + ], + [ + "▁gate", + -11.747315406799316 + ], + [ + "▁meta", + -11.747447967529297 + ], + [ + "▁stiu", + -11.747474670410156 + ], + [ + "fold", + -11.747486114501953 + ], + [ + "▁seule", + -11.747523307800293 + ], + [ + "▁varied", + -11.747541427612305 + ], + [ + "hit", + -11.747635841369629 + ], + [ + "▁DIY", + -11.74768352508545 + ], + [ + "▁lemn", + -11.747685432434082 + ], + [ + "OB", + -11.747865676879883 + ], + [ + "▁colorful", + -11.748095512390137 + ], + [ + "▁câ", + -11.74826431274414 + ], + [ + "▁semester", + -11.74830150604248 + ], + [ + "▁dealer", + -11.748575210571289 + ], + [ + "nett", + -11.748788833618164 + ], + [ + "▁shortly", + -11.748932838439941 + ], + [ + "▁Driver", + -11.748983383178711 + ], + [ + "culture", + -11.749052047729492 + ], + [ + "▁permitted", + -11.749072074890137 + ], + [ + "▁sorts", + -11.749432563781738 + ], + [ + "▁crop", + -11.74999713897705 + ], + [ + "▁valoare", + -11.75046157836914 + ], + [ + "▁analog", + -11.750576972961426 + ], + [ + "▁excuse", + -11.750588417053223 + ], + [ + "▁modèle", + -11.750657081604004 + ], + [ + "When", + -11.75068473815918 + ], + [ + "▁march", + -11.750744819641113 + ], + [ + "haz", + -11.750978469848633 + ], + [ + "▁minimize", + -11.750992774963379 + ], + [ + "traction", + -11.751028060913086 + ], + [ + "▁caracter", + -11.752382278442383 + ], + [ + "▁modules", + -11.7523832321167 + ], + [ + "clu", + -11.75244426727295 + ], + [ + "ţional", + -11.752482414245605 + ], + [ + "▁breach", + -11.752562522888184 + ], + [ + "▁priced", + -11.752614974975586 + ], + [ + "▁attorneys", + -11.752644538879395 + ], + [ + "▁implant", + -11.752645492553711 + ], + [ + "▁ANY", + -11.752655029296875 + ], + [ + "dition", + -11.752707481384277 + ], + [ + "▁trials", + -11.752838134765625 + ], + [ + "▁Nas", + -11.75293254852295 + ], + [ + "Pre", + -11.752970695495605 + ], + [ + "lorsque", + -11.752979278564453 + ], + [ + "plin", + -11.753050804138184 + ], + [ + "Er", + -11.753056526184082 + ], + [ + "▁Dom", + -11.753067970275879 + ], + [ + "▁tire", + -11.753190040588379 + ], + [ + "sili", + -11.753233909606934 + ], + [ + "▁coins", + -11.753350257873535 + ], + [ + "▁rend", + -11.753470420837402 + ], + [ + "▁reliability", + -11.753503799438477 + ], + [ + "▁Analysis", + -11.753508567810059 + ], + [ + "▁trails", + -11.753692626953125 + ], + [ + "trägt", + -11.753762245178223 + ], + [ + "▁Kansas", + -11.753908157348633 + ], + [ + "▁responsive", + -11.75390911102295 + ], + [ + "▁disappear", + -11.753988265991211 + ], + [ + "▁stakeholders", + -11.754022598266602 + ], + [ + "▁aplica", + -11.754164695739746 + ], + [ + "▁imi", + -11.754180908203125 + ], + [ + "▁Laura", + -11.754369735717773 + ], + [ + "▁Terms", + -11.75440788269043 + ], + [ + "450", + -11.754460334777832 + ], + [ + "▁voltage", + -11.754483222961426 + ], + [ + "▁Gel", + -11.754544258117676 + ], + [ + "▁qualities", + -11.754549026489258 + ], + [ + "▁qualifi", + -11.754603385925293 + ], + [ + "▁Mé", + -11.754735946655273 + ], + [ + "bereit", + -11.754829406738281 + ], + [ + "gleich", + -11.754875183105469 + ], + [ + "▁voting", + -11.754961013793945 + ], + [ + "▁trademark", + -11.755128860473633 + ], + [ + "▁2.5", + -11.75515079498291 + ], + [ + "ND", + -11.755438804626465 + ], + [ + "▁Kelly", + -11.755470275878906 + ], + [ + "▁weiteren", + -11.755559921264648 + ], + [ + "▁filters", + -11.75562572479248 + ], + [ + "▁coût", + -11.75562858581543 + ], + [ + "jur", + -11.755765914916992 + ], + [ + "acre", + -11.755804061889648 + ], + [ + "▁retired", + -11.756022453308105 + ], + [ + "▁Engine", + -11.756205558776855 + ], + [ + "▁président", + -11.756264686584473 + ], + [ + "ajul", + -11.756307601928711 + ], + [ + "▁GA", + -11.756425857543945 + ], + [ + "rät", + -11.75666332244873 + ], + [ + "▁instructor", + -11.756669998168945 + ], + [ + "▁Allen", + -11.75668716430664 + ], + [ + "▁Delhi", + -11.756771087646484 + ], + [ + "▁cure", + -11.756844520568848 + ], + [ + "seite", + -11.756898880004883 + ], + [ + "coming", + -11.756914138793945 + ], + [ + "▁mixing", + -11.756963729858398 + ], + [ + "▁Kno", + -11.757041931152344 + ], + [ + "▁Sure", + -11.757079124450684 + ], + [ + "▁hired", + -11.757102012634277 + ], + [ + "▁participated", + -11.757196426391602 + ], + [ + "Count", + -11.757320404052734 + ], + [ + "treffen", + -11.757355690002441 + ], + [ + "▁54", + -11.75735855102539 + ], + [ + "▁rings", + -11.75735855102539 + ], + [ + "▁Thor", + -11.757359504699707 + ], + [ + "éro", + -11.75744915008545 + ], + [ + "▁buttons", + -11.757488250732422 + ], + [ + "▁47", + -11.757539749145508 + ], + [ + "▁Tel", + -11.757694244384766 + ], + [ + "▁suport", + -11.757776260375977 + ], + [ + "▁rhythm", + -11.75782585144043 + ], + [ + "▁Theater", + -11.758113861083984 + ], + [ + "▁informatii", + -11.758121490478516 + ], + [ + "hält", + -11.758201599121094 + ], + [ + "▁ouvert", + -11.758238792419434 + ], + [ + "fewer", + -11.75828742980957 + ], + [ + "▁alumni", + -11.758466720581055 + ], + [ + "▁valley", + -11.758508682250977 + ], + [ + "tial", + -11.75860595703125 + ], + [ + "***", + -11.758782386779785 + ], + [ + "kri", + -11.75905704498291 + ], + [ + "▁accidents", + -11.759113311767578 + ], + [ + "▁barrel", + -11.759170532226562 + ], + [ + "mobil", + -11.759310722351074 + ], + [ + "etti", + -11.759437561035156 + ], + [ + "▁immigration", + -11.759515762329102 + ], + [ + "▁poveste", + -11.759528160095215 + ], + [ + "hren", + -11.759669303894043 + ], + [ + "hydr", + -11.759719848632812 + ], + [ + "▁tweet", + -11.759744644165039 + ], + [ + "▁zip", + -11.759872436523438 + ], + [ + "▁Bonus", + -11.760189056396484 + ], + [ + "ordnung", + -11.760287284851074 + ], + [ + "liber", + -11.76046085357666 + ], + [ + "▁Navy", + -11.760591506958008 + ], + [ + "▁agreements", + -11.760612487792969 + ], + [ + "▁detection", + -11.7607421875 + ], + [ + "DF", + -11.760762214660645 + ], + [ + "hur", + -11.760774612426758 + ], + [ + "0.00", + -11.760798454284668 + ], + [ + "▁07", + -11.760866165161133 + ], + [ + "etta", + -11.760884284973145 + ], + [ + "▁13,", + -11.760887145996094 + ], + [ + "rolled", + -11.760970115661621 + ], + [ + "▁injection", + -11.761002540588379 + ], + [ + "mig", + -11.761017799377441 + ], + [ + "wach", + -11.761107444763184 + ], + [ + "▁choisir", + -11.761515617370605 + ], + [ + "▁professionnels", + -11.76159954071045 + ], + [ + "▁Tower", + -11.76169490814209 + ], + [ + "▁neighbor", + -11.76170539855957 + ], + [ + "deutschen", + -11.76187801361084 + ], + [ + "▁luxurious", + -11.76201057434082 + ], + [ + "▁walks", + -11.762033462524414 + ], + [ + "reti", + -11.762046813964844 + ], + [ + "▁Pad", + -11.762085914611816 + ], + [ + "wise", + -11.762297630310059 + ], + [ + "▁exhaust", + -11.762307167053223 + ], + [ + "▁demonstration", + -11.762582778930664 + ], + [ + "▁agricultural", + -11.762667655944824 + ], + [ + "Upon", + -11.762885093688965 + ], + [ + "▁Blu", + -11.76292610168457 + ], + [ + "atorul", + -11.762967109680176 + ], + [ + "amour", + -11.762984275817871 + ], + [ + "issant", + -11.763004302978516 + ], + [ + "▁delighted", + -11.763031959533691 + ], + [ + "rita", + -11.763113021850586 + ], + [ + "requiring", + -11.763195037841797 + ], + [ + "ivity", + -11.763216972351074 + ], + [ + "▁Unser", + -11.763306617736816 + ], + [ + "FP", + -11.763379096984863 + ], + [ + "fait", + -11.763533592224121 + ], + [ + "dite", + -11.763562202453613 + ], + [ + "kul", + -11.763716697692871 + ], + [ + "arth", + -11.76376724243164 + ], + [ + "▁Ker", + -11.763815879821777 + ], + [ + "torilor", + -11.763816833496094 + ], + [ + "stage", + -11.763866424560547 + ], + [ + "▁HTML", + -11.76398754119873 + ], + [ + "▁Wheel", + -11.764005661010742 + ], + [ + "▁quelque", + -11.76414680480957 + ], + [ + "▁Ou", + -11.764196395874023 + ], + [ + "▁considerable", + -11.764277458190918 + ], + [ + "▁Sco", + -11.76458740234375 + ], + [ + "▁donations", + -11.76481819152832 + ], + [ + "dessen", + -11.765002250671387 + ], + [ + "▁pourquoi", + -11.765039443969727 + ], + [ + "▁Bow", + -11.765189170837402 + ], + [ + "▁Dupa", + -11.76522445678711 + ], + [ + "ska", + -11.765707015991211 + ], + [ + "hot", + -11.765732765197754 + ], + [ + "▁drove", + -11.765849113464355 + ], + [ + "▁oppos", + -11.766018867492676 + ], + [ + "▁hiking", + -11.766035079956055 + ], + [ + "▁Boot", + -11.766081809997559 + ], + [ + "One", + -11.766087532043457 + ], + [ + "▁guvern", + -11.766094207763672 + ], + [ + "▁15,", + -11.766400337219238 + ], + [ + "scheid", + -11.766437530517578 + ], + [ + "▁Miet", + -11.766458511352539 + ], + [ + "▁Technical", + -11.766767501831055 + ], + [ + "▁Dal", + -11.7669038772583 + ], + [ + "▁Metro", + -11.766966819763184 + ], + [ + "▁Baker", + -11.767215728759766 + ], + [ + "▁trece", + -11.767252922058105 + ], + [ + "tained", + -11.767302513122559 + ], + [ + "block", + -11.76738452911377 + ], + [ + "▁wander", + -11.767401695251465 + ], + [ + "▁penalty", + -11.76742172241211 + ], + [ + "▁shipped", + -11.767509460449219 + ], + [ + "▁30%", + -11.767518043518066 + ], + [ + "group", + -11.767541885375977 + ], + [ + "▁brothers", + -11.767701148986816 + ], + [ + "▁comanda", + -11.767777442932129 + ], + [ + "▁retreat", + -11.767789840698242 + ], + [ + "▁Movie", + -11.767802238464355 + ], + [ + "PU", + -11.76787281036377 + ], + [ + "▁Jun", + -11.767885208129883 + ], + [ + "▁$6", + -11.767969131469727 + ], + [ + "▁Fal", + -11.768054962158203 + ], + [ + "▁Palestinian", + -11.768075942993164 + ], + [ + "▁soccer", + -11.768217086791992 + ], + [ + "▁Autor", + -11.768254280090332 + ], + [ + "▁chamber", + -11.768266677856445 + ], + [ + "nement", + -11.768463134765625 + ], + [ + "▁offense", + -11.768610954284668 + ], + [ + "▁gig", + -11.768631935119629 + ], + [ + "▁abandon", + -11.768691062927246 + ], + [ + "▁Kraft", + -11.768783569335938 + ], + [ + "▁Medicare", + -11.768784523010254 + ], + [ + "▁soap", + -11.768835067749023 + ], + [ + "▁Fur", + -11.768990516662598 + ], + [ + "▁conditioning", + -11.769103050231934 + ], + [ + "rained", + -11.769132614135742 + ], + [ + "▁puts", + -11.769134521484375 + ], + [ + "▁cod", + -11.76930046081543 + ], + [ + "lassen", + -11.76941967010498 + ], + [ + "FL", + -11.769600868225098 + ], + [ + "▁komplett", + -11.769664764404297 + ], + [ + "▁entscheiden", + -11.769665718078613 + ], + [ + "▁Hour", + -11.769691467285156 + ], + [ + "?!", + -11.770040512084961 + ], + [ + "Stream", + -11.770145416259766 + ], + [ + "▁Grad", + -11.770209312438965 + ], + [ + "▁gently", + -11.770231246948242 + ], + [ + "▁poetry", + -11.770429611206055 + ], + [ + "▁secured", + -11.770438194274902 + ], + [ + "oph", + -11.770466804504395 + ], + [ + "hop", + -11.770561218261719 + ], + [ + "handel", + -11.770634651184082 + ], + [ + "▁besoins", + -11.770658493041992 + ], + [ + "got", + -11.770824432373047 + ], + [ + "▁Chrome", + -11.77088737487793 + ], + [ + "ILL", + -11.770930290222168 + ], + [ + "▁Schritt", + -11.771014213562012 + ], + [ + "▁spell", + -11.771063804626465 + ], + [ + "▁grinding", + -11.771334648132324 + ], + [ + "▁ramp", + -11.77144718170166 + ], + [ + "▁mama", + -11.7716064453125 + ], + [ + "▁bottles", + -11.77180290222168 + ], + [ + "▁canvas", + -11.771906852722168 + ], + [ + "▁ecosystem", + -11.77194595336914 + ], + [ + "aţii", + -11.771967887878418 + ], + [ + "cellular", + -11.772085189819336 + ], + [ + "▁Spin", + -11.772164344787598 + ], + [ + "▁Discover", + -11.772217750549316 + ], + [ + "-17", + -11.772322654724121 + ], + [ + "▁feeding", + -11.77246379852295 + ], + [ + "▁stops", + -11.7725191116333 + ], + [ + "▁haute", + -11.772552490234375 + ], + [ + "▁Entscheidung", + -11.7725830078125 + ], + [ + "▁semble", + -11.772590637207031 + ], + [ + "▁acele", + -11.772857666015625 + ], + [ + "▁Walk", + -11.773154258728027 + ], + [ + "▁joke", + -11.773180961608887 + ], + [ + "▁Fed", + -11.773294448852539 + ], + [ + "climat", + -11.773306846618652 + ], + [ + "▁Lot", + -11.773460388183594 + ], + [ + "runner", + -11.773551940917969 + ], + [ + "▁flip", + -11.773786544799805 + ], + [ + "▁werde", + -11.773818016052246 + ], + [ + "▁Deck", + -11.77417278289795 + ], + [ + "bala", + -11.774296760559082 + ], + [ + "▁sacrifice", + -11.774375915527344 + ], + [ + "cid", + -11.774388313293457 + ], + [ + "him", + -11.774569511413574 + ], + [ + "zahlen", + -11.774587631225586 + ], + [ + "▁heater", + -11.774596214294434 + ], + [ + "formed", + -11.774619102478027 + ], + [ + "plus", + -11.774711608886719 + ], + [ + "▁util", + -11.774742126464844 + ], + [ + "rama", + -11.775019645690918 + ], + [ + "(4)", + -11.7750244140625 + ], + [ + "▁knife", + -11.775111198425293 + ], + [ + "▁traditions", + -11.77520751953125 + ], + [ + "▁dip", + -11.775357246398926 + ], + [ + "kill", + -11.775405883789062 + ], + [ + "▁Rich", + -11.775418281555176 + ], + [ + "▁DI", + -11.775555610656738 + ], + [ + "▁containers", + -11.775677680969238 + ], + [ + "▁locuri", + -11.775728225708008 + ], + [ + "▁continent", + -11.775797843933105 + ], + [ + "teilung", + -11.776005744934082 + ], + [ + "▁vreme", + -11.776028633117676 + ], + [ + "organisation", + -11.776126861572266 + ], + [ + "serie", + -11.776135444641113 + ], + [ + "▁Diamond", + -11.776204109191895 + ], + [ + "magazin", + -11.77627944946289 + ], + [ + "▁poster", + -11.776455879211426 + ], + [ + "▁passenger", + -11.7765474319458 + ], + [ + "▁soldiers", + -11.776552200317383 + ], + [ + "▁urgent", + -11.776616096496582 + ], + [ + "▁Lip", + -11.77680778503418 + ], + [ + "▁aşa", + -11.776972770690918 + ], + [ + "▁BO", + -11.777024269104004 + ], + [ + "▁somebody", + -11.777076721191406 + ], + [ + "▁silence", + -11.777132034301758 + ], + [ + "cop", + -11.777359962463379 + ], + [ + "▁Burn", + -11.77749252319336 + ], + [ + "▁stopping", + -11.777544021606445 + ], + [ + "▁essence", + -11.777568817138672 + ], + [ + "▁hitting", + -11.777762413024902 + ], + [ + "▁producers", + -11.777801513671875 + ], + [ + "▁fibre", + -11.777894020080566 + ], + [ + "▁seasonal", + -11.777960777282715 + ], + [ + "▁tara", + -11.778096199035645 + ], + [ + "▁Jose", + -11.778099060058594 + ], + [ + "▁Better", + -11.77825927734375 + ], + [ + "▁steep", + -11.778295516967773 + ], + [ + "Alors", + -11.778353691101074 + ], + [ + "▁collecting", + -11.778507232666016 + ], + [ + "vre", + -11.778635025024414 + ], + [ + "▁disabled", + -11.77863883972168 + ], + [ + "▁voters", + -11.778679847717285 + ], + [ + "consuming", + -11.779092788696289 + ], + [ + "deemed", + -11.779115676879883 + ], + [ + "éra", + -11.779227256774902 + ], + [ + "opération", + -11.779273986816406 + ], + [ + "▁roller", + -11.779305458068848 + ], + [ + "Rather", + -11.779321670532227 + ], + [ + "▁leider", + -11.779370307922363 + ], + [ + "▁IV", + -11.779434204101562 + ], + [ + "▁erreichen", + -11.779473304748535 + ], + [ + "▁charging", + -11.779657363891602 + ], + [ + "tions", + -11.77973747253418 + ], + [ + "tiques", + -11.779861450195312 + ], + [ + "▁formats", + -11.779876708984375 + ], + [ + "▁painful", + -11.78000545501709 + ], + [ + "▁eager", + -11.780061721801758 + ], + [ + "generation", + -11.780137062072754 + ], + [ + "anna", + -11.780235290527344 + ], + [ + "▁races", + -11.780323028564453 + ], + [ + "force", + -11.780357360839844 + ], + [ + "▁ferm", + -11.780522346496582 + ], + [ + "▁breathing", + -11.780618667602539 + ], + [ + "▁offen", + -11.780648231506348 + ], + [ + "▁minds", + -11.780805587768555 + ], + [ + "▁musste", + -11.780832290649414 + ], + [ + "▁Vision", + -11.780888557434082 + ], + [ + "▁Installation", + -11.780988693237305 + ], + [ + "▁hesitate", + -11.781002044677734 + ], + [ + "▁somit", + -11.781023979187012 + ], + [ + "hôtel", + -11.781044006347656 + ], + [ + "cab", + -11.781235694885254 + ], + [ + "-16", + -11.781312942504883 + ], + [ + "▁Visual", + -11.781418800354004 + ], + [ + "intérêt", + -11.781524658203125 + ], + [ + "▁apel", + -11.781831741333008 + ], + [ + "therapy", + -11.782089233398438 + ], + [ + "volt", + -11.78225040435791 + ], + [ + "▁Rou", + -11.782439231872559 + ], + [ + "▁efficace", + -11.782464027404785 + ], + [ + "▁architectural", + -11.782605171203613 + ], + [ + "▁privilege", + -11.782670974731445 + ], + [ + "▁treating", + -11.782711029052734 + ], + [ + "▁Tam", + -11.782722473144531 + ], + [ + "tsch", + -11.782744407653809 + ], + [ + "building", + -11.782750129699707 + ], + [ + "▁associations", + -11.782929420471191 + ], + [ + "▁Consumer", + -11.783424377441406 + ], + [ + "▁Lim", + -11.783496856689453 + ], + [ + "newest", + -11.7835054397583 + ], + [ + "▁față", + -11.783675193786621 + ], + [ + "▁ships", + -11.783732414245605 + ], + [ + "lev", + -11.78373908996582 + ], + [ + "raft", + -11.783817291259766 + ], + [ + "▁variations", + -11.783845901489258 + ], + [ + "▁noua", + -11.78386402130127 + ], + [ + "▁Cab", + -11.784063339233398 + ], + [ + "1.2", + -11.78409481048584 + ], + [ + "▁ocazi", + -11.784347534179688 + ], + [ + "▁recommendation", + -11.784449577331543 + ], + [ + "titled", + -11.78445053100586 + ], + [ + "▁invoice", + -11.78459644317627 + ], + [ + "▁noastra", + -11.784647941589355 + ], + [ + "kur", + -11.784700393676758 + ], + [ + "issent", + -11.784758567810059 + ], + [ + "base", + -11.784778594970703 + ], + [ + "hä", + -11.7848482131958 + ], + [ + "888", + -11.784914016723633 + ], + [ + "▁declar", + -11.784941673278809 + ], + [ + "▁Football", + -11.7850341796875 + ], + [ + "▁Indeed", + -11.785293579101562 + ], + [ + "▁weapon", + -11.785333633422852 + ], + [ + "▁destroyed", + -11.785457611083984 + ], + [ + "▁enormous", + -11.785594940185547 + ], + [ + "▁blanket", + -11.7857084274292 + ], + [ + "▁aktiv", + -11.785759925842285 + ], + [ + "raw", + -11.785791397094727 + ], + [ + "▁computing", + -11.785823822021484 + ], + [ + "6)", + -11.785955429077148 + ], + [ + "▁Dam", + -11.786152839660645 + ], + [ + "▁confort", + -11.786174774169922 + ], + [ + "▁Gla", + -11.786198616027832 + ], + [ + "hardly", + -11.786242485046387 + ], + [ + "▁annually", + -11.786269187927246 + ], + [ + "▁destinations", + -11.786401748657227 + ], + [ + "▁guilty", + -11.786404609680176 + ], + [ + "▁scholarship", + -11.786439895629883 + ], + [ + "▁harmful", + -11.786453247070312 + ], + [ + "▁2-3", + -11.786616325378418 + ], + [ + "▁Race", + -11.786638259887695 + ], + [ + "▁hypo", + -11.78671646118164 + ], + [ + "▁shorter", + -11.786733627319336 + ], + [ + "quest", + -11.78675651550293 + ], + [ + "uze", + -11.786812782287598 + ], + [ + "izi", + -11.787005424499512 + ], + [ + "OO", + -11.787095069885254 + ], + [ + "▁Schutz", + -11.787097930908203 + ], + [ + "▁Teilnehmer", + -11.787185668945312 + ], + [ + "▁profiles", + -11.787199020385742 + ], + [ + "▁sustainability", + -11.78747272491455 + ], + [ + "▁emb", + -11.787489891052246 + ], + [ + "▁Augen", + -11.787516593933105 + ], + [ + "▁outdoors", + -11.787542343139648 + ], + [ + "▁Individual", + -11.787548065185547 + ], + [ + "▁pou", + -11.78757095336914 + ], + [ + "▁Together", + -11.787575721740723 + ], + [ + "HT", + -11.787674903869629 + ], + [ + "suited", + -11.787755012512207 + ], + [ + "▁tro", + -11.787782669067383 + ], + [ + "▁Strom", + -11.787805557250977 + ], + [ + "▁achievement", + -11.78799819946289 + ], + [ + "▁Range", + -11.78815746307373 + ], + [ + "tory", + -11.78817081451416 + ], + [ + "▁distribute", + -11.788250923156738 + ], + [ + "▁letzte", + -11.788276672363281 + ], + [ + "incorporated", + -11.788287162780762 + ], + [ + "▁Kir", + -11.788325309753418 + ], + [ + "ruf", + -11.78839111328125 + ], + [ + "▁disappointed", + -11.788543701171875 + ], + [ + "▁referral", + -11.788602828979492 + ], + [ + "flam", + -11.788687705993652 + ], + [ + "▁excessive", + -11.7886962890625 + ], + [ + "▁rapidement", + -11.788743019104004 + ], + [ + "▁Rio", + -11.78875732421875 + ], + [ + "aţia", + -11.788951873779297 + ], + [ + "▁meuble", + -11.78912353515625 + ], + [ + "▁2008.", + -11.789135932922363 + ], + [ + "▁Gall", + -11.78915023803711 + ], + [ + "▁française", + -11.789369583129883 + ], + [ + "▁ladies", + -11.789695739746094 + ], + [ + "ailed", + -11.789746284484863 + ], + [ + "El", + -11.789834976196289 + ], + [ + "▁wines", + -11.789868354797363 + ], + [ + "▁beispielsweise", + -11.789876937866211 + ], + [ + "▁gamme", + -11.790193557739258 + ], + [ + "▁guided", + -11.79028034210205 + ], + [ + "▁plin", + -11.790339469909668 + ], + [ + "Î", + -11.790390968322754 + ], + [ + "▁True", + -11.790498733520508 + ], + [ + "▁Temple", + -11.790507316589355 + ], + [ + "▁Pic", + -11.790520668029785 + ], + [ + "permalink", + -11.790547370910645 + ], + [ + "▁vedea", + -11.790656089782715 + ], + [ + "▁rank", + -11.790922164916992 + ], + [ + "▁Grill", + -11.791025161743164 + ], + [ + "clin", + -11.791070938110352 + ], + [ + "▁Hab", + -11.791089057922363 + ], + [ + "▁odds", + -11.791125297546387 + ], + [ + "▁anytime", + -11.791146278381348 + ], + [ + "▁Thanksgiving", + -11.791265487670898 + ], + [ + "guard", + -11.791300773620605 + ], + [ + "▁essays", + -11.791389465332031 + ], + [ + "▁PE", + -11.79139518737793 + ], + [ + "▁Rechts", + -11.791494369506836 + ], + [ + "mals", + -11.791751861572266 + ], + [ + "achi", + -11.791762351989746 + ], + [ + "▁Anthony", + -11.791765213012695 + ], + [ + "▁réponse", + -11.792036056518555 + ], + [ + "standing", + -11.79227352142334 + ], + [ + "▁Mol", + -11.792427062988281 + ], + [ + "▁Canon", + -11.792474746704102 + ], + [ + "▁silk", + -11.792515754699707 + ], + [ + "▁pourrait", + -11.79278564453125 + ], + [ + "▁raport", + -11.79280948638916 + ], + [ + "▁Woche", + -11.792889595031738 + ], + [ + "fallen", + -11.79293155670166 + ], + [ + "sting", + -11.79310131072998 + ], + [ + "▁circulation", + -11.793102264404297 + ], + [ + "▁skirt", + -11.7931547164917 + ], + [ + "▁Title", + -11.793187141418457 + ], + [ + "▁17.", + -11.79331111907959 + ], + [ + "▁Touch", + -11.793486595153809 + ], + [ + "▁utilizat", + -11.79352855682373 + ], + [ + "▁Organisation", + -11.793569564819336 + ], + [ + "▁mereu", + -11.793848991394043 + ], + [ + "▁oxygen", + -11.793953895568848 + ], + [ + "lique", + -11.793985366821289 + ], + [ + "▁consume", + -11.794100761413574 + ], + [ + "▁Barb", + -11.794102668762207 + ], + [ + "1.1", + -11.794105529785156 + ], + [ + "▁nicely", + -11.79419231414795 + ], + [ + "▁psychological", + -11.794227600097656 + ], + [ + "▁refrigerator", + -11.794478416442871 + ], + [ + "▁fantasy", + -11.79481029510498 + ], + [ + "▁dispute", + -11.79494571685791 + ], + [ + "▁IBM", + -11.794954299926758 + ], + [ + "▁Nation", + -11.794971466064453 + ], + [ + "▁mobil", + -11.795063972473145 + ], + [ + "▁density", + -11.795201301574707 + ], + [ + "ske", + -11.795230865478516 + ], + [ + "▁intimate", + -11.795313835144043 + ], + [ + "▁tailored", + -11.795319557189941 + ], + [ + "▁outline", + -11.795472145080566 + ], + [ + "TN", + -11.79554557800293 + ], + [ + "mur", + -11.795634269714355 + ], + [ + "GC", + -11.795662879943848 + ], + [ + "they", + -11.795992851257324 + ], + [ + "pag", + -11.796161651611328 + ], + [ + "▁Kultur", + -11.796246528625488 + ], + [ + "grün", + -11.796281814575195 + ], + [ + "voted", + -11.796529769897461 + ], + [ + "▁donné", + -11.796546936035156 + ], + [ + "▁Să", + -11.796629905700684 + ], + [ + "enberg", + -11.796648979187012 + ], + [ + "▁wi", + -11.79686450958252 + ], + [ + "▁Francis", + -11.797057151794434 + ], + [ + "▁Rick", + -11.797157287597656 + ], + [ + "accord", + -11.797403335571289 + ], + [ + "▁Zusammen", + -11.797415733337402 + ], + [ + "▁nonprofit", + -11.797456741333008 + ], + [ + "▁listings", + -11.797615051269531 + ], + [ + "6,", + -11.797908782958984 + ], + [ + "▁maximize", + -11.798253059387207 + ], + [ + "bud", + -11.798345565795898 + ], + [ + "▁promotional", + -11.798486709594727 + ], + [ + "cina", + -11.798646926879883 + ], + [ + "▁potatoes", + -11.79869556427002 + ], + [ + "▁mot", + -11.798871040344238 + ], + [ + "carries", + -11.799384117126465 + ], + [ + "▁stabilit", + -11.799458503723145 + ], + [ + "▁Door", + -11.799574851989746 + ], + [ + "▁downloaded", + -11.799574851989746 + ], + [ + "▁experimental", + -11.799724578857422 + ], + [ + "HD", + -11.7997407913208 + ], + [ + "▁parfois", + -11.79980182647705 + ], + [ + "▁zeigen", + -11.800092697143555 + ], + [ + "▁proposé", + -11.80030632019043 + ], + [ + "▁Verein", + -11.800636291503906 + ], + [ + "▁amestec", + -11.800676345825195 + ], + [ + "▁entreprise", + -11.800718307495117 + ], + [ + "▁PSD", + -11.800841331481934 + ], + [ + "▁bake", + -11.800897598266602 + ], + [ + "▁Rh", + -11.800904273986816 + ], + [ + "▁Mehr", + -11.800922393798828 + ], + [ + "▁purple", + -11.801074028015137 + ], + [ + "▁recipient", + -11.80109691619873 + ], + [ + "rare", + -11.801166534423828 + ], + [ + "egi", + -11.80117130279541 + ], + [ + "ancien", + -11.801176071166992 + ], + [ + "▁risque", + -11.80118465423584 + ], + [ + "▁mystery", + -11.80157470703125 + ], + [ + "mac", + -11.801697731018066 + ], + [ + "ibility", + -11.80182933807373 + ], + [ + "▁Moore", + -11.801881790161133 + ], + [ + "▁flavors", + -11.801911354064941 + ], + [ + "▁trauma", + -11.801966667175293 + ], + [ + "▁automotive", + -11.802112579345703 + ], + [ + "▁Anyway", + -11.802197456359863 + ], + [ + "▁simulation", + -11.802253723144531 + ], + [ + "▁crafts", + -11.802525520324707 + ], + [ + "▁measurements", + -11.80257511138916 + ], + [ + "▁cour", + -11.80257797241211 + ], + [ + "▁tard", + -11.802600860595703 + ], + [ + "nnie", + -11.802881240844727 + ], + [ + "▁Production", + -11.803388595581055 + ], + [ + "▁Cleaning", + -11.803567886352539 + ], + [ + "5,", + -11.803644180297852 + ], + [ + "▁Islamic", + -11.803766250610352 + ], + [ + "▁Gate", + -11.80378532409668 + ], + [ + "bay", + -11.803814888000488 + ], + [ + "HR", + -11.803990364074707 + ], + [ + "▁Offer", + -11.80399227142334 + ], + [ + "▁acceptance", + -11.804107666015625 + ], + [ + "▁Erfahrung", + -11.80412769317627 + ], + [ + "▁environ", + -11.804193496704102 + ], + [ + "▁fancy", + -11.804218292236328 + ], + [ + "▁bullet", + -11.80437183380127 + ], + [ + "organ", + -11.804466247558594 + ], + [ + "▁Peace", + -11.804520606994629 + ], + [ + "▁detalii", + -11.80461597442627 + ], + [ + "▁promised", + -11.804715156555176 + ], + [ + "▁wellness", + -11.804746627807617 + ], + [ + "▁satisfy", + -11.80481243133545 + ], + [ + "▁grants", + -11.805212020874023 + ], + [ + "accueil", + -11.80522346496582 + ], + [ + "▁oben", + -11.805412292480469 + ], + [ + "▁prospects", + -11.80543327331543 + ], + [ + "▁Events", + -11.805513381958008 + ], + [ + "2013", + -11.805569648742676 + ], + [ + "gesehen", + -11.805685997009277 + ], + [ + "▁£1", + -11.805727005004883 + ], + [ + "▁handelt", + -11.805798530578613 + ], + [ + "▁Spieler", + -11.805876731872559 + ], + [ + "▁Virtual", + -11.806145668029785 + ], + [ + "▁bubble", + -11.806239128112793 + ], + [ + "▁Trend", + -11.806254386901855 + ], + [ + "▁sistemul", + -11.806315422058105 + ], + [ + "▁Morgan", + -11.806320190429688 + ], + [ + "▁pole", + -11.806503295898438 + ], + [ + "▁spielen", + -11.806533813476562 + ], + [ + "tür", + -11.806571006774902 + ], + [ + "SCO", + -11.806572914123535 + ], + [ + "▁informative", + -11.806678771972656 + ], + [ + "▁affirm", + -11.806755065917969 + ], + [ + "▁Aqua", + -11.806818008422852 + ], + [ + "▁AR", + -11.806888580322266 + ], + [ + "richten", + -11.807071685791016 + ], + [ + "▁rewards", + -11.807122230529785 + ], + [ + "lub", + -11.807235717773438 + ], + [ + "shot", + -11.807236671447754 + ], + [ + "LM", + -11.807540893554688 + ], + [ + "Up", + -11.807586669921875 + ], + [ + "▁absolut", + -11.807737350463867 + ], + [ + "▁Mart", + -11.807806968688965 + ], + [ + "erweise", + -11.807812690734863 + ], + [ + "BP", + -11.807977676391602 + ], + [ + "▁difficile", + -11.808152198791504 + ], + [ + "▁Document", + -11.808159828186035 + ], + [ + "▁Sweet", + -11.8082914352417 + ], + [ + "▁indicator", + -11.808338165283203 + ], + [ + "▁Boden", + -11.808389663696289 + ], + [ + "mates", + -11.808477401733398 + ], + [ + "▁supporters", + -11.808504104614258 + ], + [ + "▁begun", + -11.808600425720215 + ], + [ + "▁blogging", + -11.808611869812012 + ], + [ + "▁CL", + -11.808663368225098 + ], + [ + "gres", + -11.808692932128906 + ], + [ + "▁preferences", + -11.808738708496094 + ], + [ + "▁screw", + -11.808756828308105 + ], + [ + "▁tutor", + -11.808858871459961 + ], + [ + "▁Additional", + -11.80891227722168 + ], + [ + "▁Bitte", + -11.808976173400879 + ], + [ + "utilizing", + -11.808998107910156 + ], + [ + "▁expérience", + -11.809073448181152 + ], + [ + "▁dur", + -11.809146881103516 + ], + [ + "▁precisely", + -11.809178352355957 + ], + [ + "▁janvier", + -11.809394836425781 + ], + [ + "AGE", + -11.80987548828125 + ], + [ + "moto", + -11.810007095336914 + ], + [ + "▁counsel", + -11.810195922851562 + ], + [ + "▁110", + -11.810226440429688 + ], + [ + "nick", + -11.810245513916016 + ], + [ + "licit", + -11.810540199279785 + ], + [ + "technik", + -11.810659408569336 + ], + [ + "▁collaborate", + -11.810736656188965 + ], + [ + "▁neighbors", + -11.810794830322266 + ], + [ + "tered", + -11.810922622680664 + ], + [ + "▁excel", + -11.811025619506836 + ], + [ + "▁Route", + -11.811059951782227 + ], + [ + "steuer", + -11.81109619140625 + ], + [ + "▁pioneer", + -11.811607360839844 + ], + [ + "nuit", + -11.81169319152832 + ], + [ + "▁skip", + -11.811963081359863 + ], + [ + "▁destruction", + -11.811997413635254 + ], + [ + "▁thesis", + -11.812249183654785 + ], + [ + "▁libre", + -11.812317848205566 + ], + [ + "▁petition", + -11.81234073638916 + ], + [ + "▁steady", + -11.812456130981445 + ], + [ + "▁medications", + -11.812458992004395 + ], + [ + "▁audiences", + -11.812623023986816 + ], + [ + "▁coaches", + -11.812689781188965 + ], + [ + "aller", + -11.812704086303711 + ], + [ + "3,000", + -11.812705993652344 + ], + [ + "▁anger", + -11.812785148620605 + ], + [ + "▁striking", + -11.812844276428223 + ], + [ + "▁shades", + -11.81291675567627 + ], + [ + "▁Sitz", + -11.812994956970215 + ], + [ + "▁gluten", + -11.813162803649902 + ], + [ + "▁egal", + -11.813222885131836 + ], + [ + "ania", + -11.813223838806152 + ], + [ + "▁defend", + -11.813241004943848 + ], + [ + "gut", + -11.81382942199707 + ], + [ + "▁reserves", + -11.813895225524902 + ], + [ + "▁advocate", + -11.814053535461426 + ], + [ + "▁Cit", + -11.814082145690918 + ], + [ + "▁technicians", + -11.814105033874512 + ], + [ + "▁cater", + -11.814138412475586 + ], + [ + "leitung", + -11.814190864562988 + ], + [ + "▁towns", + -11.814335823059082 + ], + [ + "▁Costa", + -11.814364433288574 + ], + [ + "▁confront", + -11.814567565917969 + ], + [ + "mount", + -11.814652442932129 + ], + [ + "▁nationale", + -11.814706802368164 + ], + [ + "▁adverse", + -11.814932823181152 + ], + [ + "▁couleur", + -11.815112113952637 + ], + [ + "▁delight", + -11.815169334411621 + ], + [ + "▁promises", + -11.815224647521973 + ], + [ + "▁silent", + -11.81550121307373 + ], + [ + "richtet", + -11.815556526184082 + ], + [ + "▁Companies", + -11.815614700317383 + ], + [ + "▁Charlotte", + -11.815620422363281 + ], + [ + "▁labels", + -11.815652847290039 + ], + [ + "▁Süd", + -11.815656661987305 + ], + [ + "▁Honor", + -11.81567096710205 + ], + [ + "▁complaints", + -11.815710067749023 + ], + [ + "▁siècle", + -11.815752029418945 + ], + [ + "▁suits", + -11.815792083740234 + ], + [ + "▁Bath", + -11.815827369689941 + ], + [ + "mise", + -11.815926551818848 + ], + [ + "▁acela", + -11.8159818649292 + ], + [ + "▁candidat", + -11.816011428833008 + ], + [ + "Flo", + -11.816207885742188 + ], + [ + "▁conservative", + -11.816215515136719 + ], + [ + "DD", + -11.816314697265625 + ], + [ + "▁changement", + -11.816414833068848 + ], + [ + "▁login", + -11.816492080688477 + ], + [ + "▁Fashion", + -11.816585540771484 + ], + [ + "reichen", + -11.816672325134277 + ], + [ + "through", + -11.816751480102539 + ], + [ + "aki", + -11.817240715026855 + ], + [ + "gna", + -11.817547798156738 + ], + [ + "▁verse", + -11.817551612854004 + ], + [ + "▁threats", + -11.817622184753418 + ], + [ + "▁Song", + -11.817770004272461 + ], + [ + "▁funded", + -11.81792163848877 + ], + [ + "langen", + -11.818023681640625 + ], + [ + "▁distribu", + -11.818195343017578 + ], + [ + "édition", + -11.818316459655762 + ], + [ + "▁royal", + -11.818562507629395 + ], + [ + "▁bevor", + -11.818829536437988 + ], + [ + "▁02", + -11.818854331970215 + ], + [ + "straße", + -11.818938255310059 + ], + [ + "edit", + -11.81904125213623 + ], + [ + "▁energetic", + -11.81922721862793 + ], + [ + "▁Carr", + -11.819757461547852 + ], + [ + "viol", + -11.819937705993652 + ], + [ + "▁niche", + -11.820054054260254 + ], + [ + "avais", + -11.820099830627441 + ], + [ + "▁backyard", + -11.82010269165039 + ], + [ + "▁Saudi", + -11.820158958435059 + ], + [ + "▁Zwei", + -11.820207595825195 + ], + [ + "▁Legal", + -11.82027530670166 + ], + [ + "accessed", + -11.820277214050293 + ], + [ + "▁choisi", + -11.820340156555176 + ], + [ + "▁GDP", + -11.820343971252441 + ], + [ + "oferă", + -11.820352554321289 + ], + [ + "hlen", + -11.820490837097168 + ], + [ + "▁Wor", + -11.820520401000977 + ], + [ + "▁cheer", + -11.820586204528809 + ], + [ + "▁barely", + -11.820625305175781 + ], + [ + "cost", + -11.820646286010742 + ], + [ + "▁Really", + -11.820661544799805 + ], + [ + "kol", + -11.820721626281738 + ], + [ + "▁binding", + -11.821045875549316 + ], + [ + "euer", + -11.821136474609375 + ], + [ + "▁optimization", + -11.821158409118652 + ], + [ + "▁Designer", + -11.8211669921875 + ], + [ + "▁measuring", + -11.82117748260498 + ], + [ + "ncy", + -11.821516036987305 + ], + [ + "weise", + -11.821520805358887 + ], + [ + "DER", + -11.821850776672363 + ], + [ + "▁$7", + -11.821949005126953 + ], + [ + "▁Anfang", + -11.821954727172852 + ], + [ + "material", + -11.821967124938965 + ], + [ + "▁antique", + -11.822281837463379 + ], + [ + "▁Certificate", + -11.822294235229492 + ], + [ + "▁modest", + -11.822370529174805 + ], + [ + "ției", + -11.822427749633789 + ], + [ + "▁praise", + -11.82245922088623 + ], + [ + "▁Springs", + -11.822660446166992 + ], + [ + "▁organiza", + -11.823041915893555 + ], + [ + "jurul", + -11.823047637939453 + ], + [ + "▁plumbing", + -11.82341194152832 + ], + [ + "▁foster", + -11.823490142822266 + ], + [ + "▁Wy", + -11.823491096496582 + ], + [ + "▁Sab", + -11.823503494262695 + ], + [ + "▁overwhelming", + -11.823677062988281 + ], + [ + "▁matin", + -11.823812484741211 + ], + [ + "▁responded", + -11.82408332824707 + ], + [ + "▁confused", + -11.824150085449219 + ], + [ + "▁blessed", + -11.824280738830566 + ], + [ + "▁160", + -11.824295997619629 + ], + [ + "▁ingredient", + -11.824360847473145 + ], + [ + "▁confer", + -11.82448673248291 + ], + [ + "▁Gesundheit", + -11.824530601501465 + ], + [ + "▁bucket", + -11.824555397033691 + ], + [ + "kraft", + -11.824565887451172 + ], + [ + "lange", + -11.824630737304688 + ], + [ + "▁Kopf", + -11.824678421020508 + ], + [ + "▁Prize", + -11.824678421020508 + ], + [ + "▁authorized", + -11.824779510498047 + ], + [ + "▁tick", + -11.824803352355957 + ], + [ + "▁steal", + -11.824910163879395 + ], + [ + "Depending", + -11.824918746948242 + ], + [ + "Depuis", + -11.824952125549316 + ], + [ + "▁functie", + -11.82499885559082 + ], + [ + "▁developments", + -11.825053215026855 + ], + [ + "▁Christians", + -11.825311660766602 + ], + [ + "▁calculated", + -11.8256254196167 + ], + [ + "▁Leave", + -11.825672149658203 + ], + [ + "▁Jam", + -11.82573413848877 + ], + [ + "▁habitat", + -11.825760841369629 + ], + [ + "▁Sorry", + -11.825801849365234 + ], + [ + "▁oficial", + -11.825944900512695 + ], + [ + "▁allein", + -11.826079368591309 + ], + [ + "▁concentrate", + -11.82608413696289 + ], + [ + "dica", + -11.826302528381348 + ], + [ + "▁Convention", + -11.826476097106934 + ], + [ + "illes", + -11.826550483703613 + ], + [ + "▁fum", + -11.82664680480957 + ], + [ + "▁Tal", + -11.826651573181152 + ], + [ + "Europe", + -11.826899528503418 + ], + [ + "▁attachment", + -11.826949119567871 + ], + [ + "▁sensibil", + -11.826995849609375 + ], + [ + "▁clue", + -11.82715892791748 + ], + [ + "▁specialty", + -11.827203750610352 + ], + [ + "▁Cou", + -11.827229499816895 + ], + [ + "▁liste", + -11.827278137207031 + ], + [ + "▁Penn", + -11.827465057373047 + ], + [ + "TRA", + -11.827559471130371 + ], + [ + "▁Themen", + -11.827561378479004 + ], + [ + "▁motivated", + -11.827906608581543 + ], + [ + "▁camere", + -11.828017234802246 + ], + [ + "▁14,", + -11.828393936157227 + ], + [ + "▁attendance", + -11.828557968139648 + ], + [ + "atorii", + -11.828581809997559 + ], + [ + "chemistry", + -11.82873821258545 + ], + [ + "▁roofing", + -11.828959465026855 + ], + [ + "▁Links", + -11.829048156738281 + ], + [ + "▁trou", + -11.829103469848633 + ], + [ + "▁trucks", + -11.829136848449707 + ], + [ + "hilfe", + -11.829557418823242 + ], + [ + "▁(6", + -11.829599380493164 + ], + [ + "vapor", + -11.82964038848877 + ], + [ + "mad", + -11.829668045043945 + ], + [ + "▁Albert", + -11.829877853393555 + ], + [ + "▁FIG", + -11.830073356628418 + ], + [ + "▁Rand", + -11.830187797546387 + ], + [ + "▁Constitution", + -11.830219268798828 + ], + [ + "ambi", + -11.830294609069824 + ], + [ + "▁Syria", + -11.830307006835938 + ], + [ + "▁Fond", + -11.830477714538574 + ], + [ + "▁gouvernement", + -11.830594062805176 + ], + [ + "▁Active", + -11.830705642700195 + ], + [ + "▁prints", + -11.830801963806152 + ], + [ + "▁weigh", + -11.8308687210083 + ], + [ + "▁Craft", + -11.831069946289062 + ], + [ + "▁projets", + -11.831247329711914 + ], + [ + "▁paste", + -11.831377029418945 + ], + [ + "anci", + -11.83139705657959 + ], + [ + "kie", + -11.831411361694336 + ], + [ + "▁gains", + -11.83165168762207 + ], + [ + "▁Record", + -11.831942558288574 + ], + [ + "▁beliefs", + -11.831954956054688 + ], + [ + "countless", + -11.831957817077637 + ], + [ + "▁tomatoes", + -11.831997871398926 + ], + [ + "arie", + -11.832082748413086 + ], + [ + "▁140", + -11.83211612701416 + ], + [ + "▁ethical", + -11.832229614257812 + ], + [ + "objectif", + -11.832279205322266 + ], + [ + "▁acestuia", + -11.832283973693848 + ], + [ + "▁Bluetooth", + -11.832398414611816 + ], + [ + "▁agriculture", + -11.832746505737305 + ], + [ + "uré", + -11.833027839660645 + ], + [ + "▁cale", + -11.833072662353516 + ], + [ + "▁articol", + -11.833073616027832 + ], + [ + "▁gum", + -11.833319664001465 + ], + [ + "▁vendor", + -11.833490371704102 + ], + [ + "ifié", + -11.833527565002441 + ], + [ + "▁peer", + -11.833662033081055 + ], + [ + "pod", + -11.834036827087402 + ], + [ + "▁utilized", + -11.834113121032715 + ], + [ + "▁Mü", + -11.834207534790039 + ], + [ + "owohl", + -11.834208488464355 + ], + [ + "hilst", + -11.834233283996582 + ], + [ + "frame", + -11.834260940551758 + ], + [ + "▁fridge", + -11.834822654724121 + ], + [ + "▁query", + -11.835108757019043 + ], + [ + "▁Survey", + -11.835227012634277 + ], + [ + "▁Hell", + -11.835247993469238 + ], + [ + "▁notification", + -11.83530044555664 + ], + [ + "TR", + -11.83538818359375 + ], + [ + "▁ultima", + -11.835505485534668 + ], + [ + "▁radiation", + -11.835631370544434 + ], + [ + "▁musicians", + -11.835821151733398 + ], + [ + "CAN", + -11.83595085144043 + ], + [ + "▁grocery", + -11.83607292175293 + ], + [ + "▁Sicherheit", + -11.83611011505127 + ], + [ + "▁Highway", + -11.836276054382324 + ], + [ + "▁Break", + -11.836285591125488 + ], + [ + "TED", + -11.836345672607422 + ], + [ + "ön", + -11.836352348327637 + ], + [ + "▁biological", + -11.836352348327637 + ], + [ + "qual", + -11.836397171020508 + ], + [ + "250", + -11.83641242980957 + ], + [ + "▁modify", + -11.836651802062988 + ], + [ + "▁Hit", + -11.836698532104492 + ], + [ + "▁Iar", + -11.836838722229004 + ], + [ + "aged", + -11.836884498596191 + ], + [ + "...)", + -11.83688735961914 + ], + [ + "▁contrat", + -11.836928367614746 + ], + [ + "▁centres", + -11.836956977844238 + ], + [ + "griff", + -11.836987495422363 + ], + [ + "Our", + -11.837233543395996 + ], + [ + "▁determination", + -11.837300300598145 + ], + [ + "▁variables", + -11.83742904663086 + ], + [ + "▁nuts", + -11.837472915649414 + ], + [ + "échange", + -11.837577819824219 + ], + [ + "extérieur", + -11.837631225585938 + ], + [ + "▁suflet", + -11.83764362335205 + ], + [ + "▁Scha", + -11.837752342224121 + ], + [ + "stück", + -11.837774276733398 + ], + [ + "▁Tau", + -11.837821960449219 + ], + [ + "▁participa", + -11.838008880615234 + ], + [ + "▁mad", + -11.838034629821777 + ], + [ + "▁relie", + -11.838051795959473 + ], + [ + "▁Fine", + -11.83808422088623 + ], + [ + "▁grape", + -11.838118553161621 + ], + [ + "▁wage", + -11.838141441345215 + ], + [ + "▁startup", + -11.838193893432617 + ], + [ + "▁blank", + -11.838194847106934 + ], + [ + "▁physique", + -11.838199615478516 + ], + [ + "▁punch", + -11.838233947753906 + ], + [ + "▁contacts", + -11.838321685791016 + ], + [ + "▁dezvolt", + -11.83835220336914 + ], + [ + "cross", + -11.838639259338379 + ], + [ + "▁TR", + -11.838652610778809 + ], + [ + "▁gener", + -11.838754653930664 + ], + [ + "▁indem", + -11.838823318481445 + ], + [ + "▁Stan", + -11.838839530944824 + ], + [ + "▁azi", + -11.838930130004883 + ], + [ + "▁Sel", + -11.838958740234375 + ], + [ + "▁Tot", + -11.83924674987793 + ], + [ + "vra", + -11.839341163635254 + ], + [ + "▁recruit", + -11.839482307434082 + ], + [ + "▁Yeah", + -11.839494705200195 + ], + [ + "/10", + -11.839507102966309 + ], + [ + "▁nail", + -11.83956241607666 + ], + [ + "▁Ky", + -11.839611053466797 + ], + [ + "▁beloved", + -11.839760780334473 + ], + [ + "operative", + -11.839823722839355 + ], + [ + "▁Tickets", + -11.83983325958252 + ], + [ + "▁tear", + -11.840229988098145 + ], + [ + "▁amp", + -11.840352058410645 + ], + [ + "▁04", + -11.840361595153809 + ], + [ + "▁illustrate", + -11.840361595153809 + ], + [ + "▁mac", + -11.840400695800781 + ], + [ + "▁receiver", + -11.840482711791992 + ], + [ + "atrice", + -11.840508460998535 + ], + [ + "▁souhait", + -11.840572357177734 + ], + [ + "▁Gewinn", + -11.840619087219238 + ], + [ + "▁Vit", + -11.840808868408203 + ], + [ + "roch", + -11.841202735900879 + ], + [ + "▁arata", + -11.841262817382812 + ], + [ + "▁Indiana", + -11.841364860534668 + ], + [ + "child", + -11.841516494750977 + ], + [ + "▁invested", + -11.84157657623291 + ], + [ + "▁Excellent", + -11.841625213623047 + ], + [ + "gori", + -11.841769218444824 + ], + [ + "▁thermal", + -11.841813087463379 + ], + [ + "Str", + -11.841973304748535 + ], + [ + "▁liver", + -11.84201717376709 + ], + [ + "miss", + -11.842035293579102 + ], + [ + "▁utiliser", + -11.842120170593262 + ], + [ + "▁prest", + -11.842445373535156 + ], + [ + "2016", + -11.842506408691406 + ], + [ + "isée", + -11.842508316040039 + ], + [ + "▁Index", + -11.842559814453125 + ], + [ + "▁arch", + -11.842639923095703 + ], + [ + "▁Toyota", + -11.842748641967773 + ], + [ + "▁YOUR", + -11.842782020568848 + ], + [ + "▁Mexican", + -11.842891693115234 + ], + [ + "▁gegenüber", + -11.842940330505371 + ], + [ + "▁cannabis", + -11.843033790588379 + ], + [ + "bis", + -11.843077659606934 + ], + [ + "vage", + -11.843083381652832 + ], + [ + "hall", + -11.843091011047363 + ], + [ + "fax", + -11.843137741088867 + ], + [ + "▁spoken", + -11.843232154846191 + ], + [ + "▁Zimmer", + -11.843544960021973 + ], + [ + "kauf", + -11.8436279296875 + ], + [ + "▁couleurs", + -11.843705177307129 + ], + [ + "▁NJ", + -11.844026565551758 + ], + [ + "▁Heritage", + -11.844318389892578 + ], + [ + "▁Pflege", + -11.844321250915527 + ], + [ + "luc", + -11.844361305236816 + ], + [ + "▁56", + -11.844489097595215 + ], + [ + "VP", + -11.844542503356934 + ], + [ + "▁cuvinte", + -11.844594955444336 + ], + [ + "▁Alliance", + -11.844614028930664 + ], + [ + "▁coco", + -11.844615936279297 + ], + [ + "▁leverage", + -11.844762802124023 + ], + [ + "auch", + -11.844844818115234 + ], + [ + "▁Cart", + -11.84506607055664 + ], + [ + "taux", + -11.84532642364502 + ], + [ + "east", + -11.84560775756836 + ], + [ + "▁decorating", + -11.84565258026123 + ], + [ + "tip", + -11.84565544128418 + ], + [ + "▁Communications", + -11.845780372619629 + ], + [ + "ACE", + -11.84580135345459 + ], + [ + "▁Consul", + -11.845993041992188 + ], + [ + "▁Swiss", + -11.846197128295898 + ], + [ + "inci", + -11.846230506896973 + ], + [ + "▁Fact", + -11.846312522888184 + ], + [ + "▁ajung", + -11.846321105957031 + ], + [ + "▁airline", + -11.846325874328613 + ], + [ + "▁kidney", + -11.846379280090332 + ], + [ + "▁Records", + -11.84642505645752 + ], + [ + "▁Olympic", + -11.846747398376465 + ], + [ + "▁dried", + -11.84719467163086 + ], + [ + "oivent", + -11.847333908081055 + ], + [ + "▁Adobe", + -11.847467422485352 + ], + [ + "▁powers", + -11.847748756408691 + ], + [ + "lande", + -11.847834587097168 + ], + [ + "▁relieve", + -11.847858428955078 + ], + [ + "ţine", + -11.847898483276367 + ], + [ + "▁gradually", + -11.847945213317871 + ], + [ + "mud", + -11.84811019897461 + ], + [ + "▁30,", + -11.848116874694824 + ], + [ + "▁plante", + -11.848133087158203 + ], + [ + "▁Hug", + -11.848225593566895 + ], + [ + "▁Focus", + -11.84853458404541 + ], + [ + "▁distinctive", + -11.848594665527344 + ], + [ + "▁Bab", + -11.848662376403809 + ], + [ + "tata", + -11.848679542541504 + ], + [ + "▁Nun", + -11.848797798156738 + ], + [ + "▁Eve", + -11.848811149597168 + ], + [ + "▁déc", + -11.848881721496582 + ], + [ + "▁Beitrag", + -11.84900951385498 + ], + [ + "▁devenit", + -11.849042892456055 + ], + [ + "driven", + -11.849250793457031 + ], + [ + "▁offerings", + -11.84933853149414 + ], + [ + "▁exc", + -11.84941577911377 + ], + [ + "encies", + -11.849576950073242 + ], + [ + "▁Neuro", + -11.849588394165039 + ], + [ + "scher", + -11.849604606628418 + ], + [ + "map", + -11.849703788757324 + ], + [ + "pending", + -11.849783897399902 + ], + [ + "▁courage", + -11.849799156188965 + ], + [ + "axe", + -11.849894523620605 + ], + [ + "▁Gesellschaft", + -11.849900245666504 + ], + [ + "▁ears", + -11.85000991821289 + ], + [ + "▁aider", + -11.850403785705566 + ], + [ + "▁Cast", + -11.85042667388916 + ], + [ + "fast", + -11.850442886352539 + ], + [ + "▁departe", + -11.850502014160156 + ], + [ + "▁oak", + -11.850507736206055 + ], + [ + "▁batch", + -11.850730895996094 + ], + [ + "▁Corporate", + -11.850762367248535 + ], + [ + "▁Ost", + -11.850895881652832 + ], + [ + "-14", + -11.850897789001465 + ], + [ + "▁Pie", + -11.85115909576416 + ], + [ + "▁ranking", + -11.851273536682129 + ], + [ + "clusion", + -11.851316452026367 + ], + [ + "▁costume", + -11.851347923278809 + ], + [ + "▁Knight", + -11.851449966430664 + ], + [ + "▁privat", + -11.851577758789062 + ], + [ + "▁Engineer", + -11.851593971252441 + ], + [ + "▁gens", + -11.8517427444458 + ], + [ + "physics", + -11.85176944732666 + ], + [ + "generating", + -11.851773262023926 + ], + [ + "directement", + -11.851786613464355 + ], + [ + "▁confidential", + -11.851810455322266 + ], + [ + "▁poet", + -11.851937294006348 + ], + [ + "▁monster", + -11.851944923400879 + ], + [ + "▁suppose", + -11.851984977722168 + ], + [ + "său", + -11.851996421813965 + ], + [ + "▁balls", + -11.852103233337402 + ], + [ + "▁substitute", + -11.852137565612793 + ], + [ + "▁simultaneously", + -11.852238655090332 + ], + [ + "▁specify", + -11.852272033691406 + ], + [ + "wald", + -11.852287292480469 + ], + [ + "▁collapse", + -11.852352142333984 + ], + [ + "dessus", + -11.852458953857422 + ], + [ + "▁vitr", + -11.852516174316406 + ], + [ + "▁recruitment", + -11.852607727050781 + ], + [ + "denken", + -11.852632522583008 + ], + [ + "▁candy", + -11.852691650390625 + ], + [ + "▁tourists", + -11.852721214294434 + ], + [ + "dimensional", + -11.852782249450684 + ], + [ + "conce", + -11.852814674377441 + ], + [ + "wechsel", + -11.852822303771973 + ], + [ + "▁passende", + -11.852971076965332 + ], + [ + "industrie", + -11.85299301147461 + ], + [ + "agne", + -11.853127479553223 + ], + [ + "▁warehouse", + -11.853233337402344 + ], + [ + "▁Jugend", + -11.853277206420898 + ], + [ + "▁Weise", + -11.853357315063477 + ], + [ + "▁Zone", + -11.853528022766113 + ], + [ + "▁licence", + -11.853550910949707 + ], + [ + "▁broker", + -11.853630065917969 + ], + [ + "▁Rolle", + -11.85365104675293 + ], + [ + "pton", + -11.853789329528809 + ], + [ + "▁preference", + -11.853846549987793 + ], + [ + "▁homeowners", + -11.853861808776855 + ], + [ + "▁Lum", + -11.85387134552002 + ], + [ + "▁Chairman", + -11.853879928588867 + ], + [ + "▁Pages", + -11.853998184204102 + ], + [ + "▁beam", + -11.854005813598633 + ], + [ + "▁coordinate", + -11.854158401489258 + ], + [ + "▁Tool", + -11.854212760925293 + ], + [ + "▁complexity", + -11.854272842407227 + ], + [ + "▁checks", + -11.854339599609375 + ], + [ + "▁Bedroom", + -11.854405403137207 + ], + [ + "minded", + -11.854538917541504 + ], + [ + "▁copiii", + -11.854694366455078 + ], + [ + "▁celebrating", + -11.85470199584961 + ], + [ + "zimmer", + -11.854759216308594 + ], + [ + "▁Imagine", + -11.854759216308594 + ], + [ + "▁decoration", + -11.854830741882324 + ], + [ + "team", + -11.855354309082031 + ], + [ + "▁împreună", + -11.855369567871094 + ], + [ + "▁publicly", + -11.855391502380371 + ], + [ + "▁centuries", + -11.855514526367188 + ], + [ + "▁Islands", + -11.855644226074219 + ], + [ + "▁ethnic", + -11.855663299560547 + ], + [ + "still", + -11.85576057434082 + ], + [ + "stieg", + -11.855823516845703 + ], + [ + "emia", + -11.855904579162598 + ], + [ + "tags", + -11.856026649475098 + ], + [ + "▁marche", + -11.856062889099121 + ], + [ + "▁migration", + -11.856096267700195 + ], + [ + "▁banner", + -11.85616683959961 + ], + [ + "▁macro", + -11.856378555297852 + ], + [ + "▁Edit", + -11.856379508972168 + ], + [ + "tran", + -11.85656452178955 + ], + [ + "ça", + -11.856597900390625 + ], + [ + "▁recycling", + -11.856670379638672 + ], + [ + "▁1,000", + -11.856673240661621 + ], + [ + "▁Quelle", + -11.856891632080078 + ], + [ + "▁Vel", + -11.85700511932373 + ], + [ + "▁Rit", + -11.857025146484375 + ], + [ + "▁Spaß", + -11.857046127319336 + ], + [ + "▁Corn", + -11.857074737548828 + ], + [ + "tracted", + -11.857177734375 + ], + [ + "cited", + -11.857185363769531 + ], + [ + "▁tablets", + -11.857202529907227 + ], + [ + "▁Display", + -11.857337951660156 + ], + [ + "▁persoana", + -11.857392311096191 + ], + [ + "Term", + -11.857410430908203 + ], + [ + "▁Vancouver", + -11.857537269592285 + ], + [ + "▁Gäste", + -11.857550621032715 + ], + [ + "determining", + -11.857608795166016 + ], + [ + "▁populations", + -11.85778522491455 + ], + [ + "aison", + -11.857873916625977 + ], + [ + "▁surgical", + -11.858072280883789 + ], + [ + "tale", + -11.858160018920898 + ], + [ + "ivi", + -11.858283042907715 + ], + [ + "▁Zur", + -11.858388900756836 + ], + [ + "esprit", + -11.858574867248535 + ], + [ + "▁Edge", + -11.858665466308594 + ], + [ + "dach", + -11.858760833740234 + ], + [ + "phi", + -11.858773231506348 + ], + [ + "▁suc", + -11.858841896057129 + ], + [ + "▁scrie", + -11.858848571777344 + ], + [ + "▁Ausbildung", + -11.858885765075684 + ], + [ + "▁51", + -11.85892391204834 + ], + [ + "ologi", + -11.858938217163086 + ], + [ + "▁correction", + -11.859049797058105 + ], + [ + "▁Wald", + -11.859078407287598 + ], + [ + "▁additionally", + -11.859131813049316 + ], + [ + "▁proche", + -11.859353065490723 + ], + [ + "▁classical", + -11.859477996826172 + ], + [ + "▁bringen", + -11.859490394592285 + ], + [ + "▁(10", + -11.859611511230469 + ], + [ + "▁Mile", + -11.859809875488281 + ], + [ + "lace", + -11.859885215759277 + ], + [ + "▁premi", + -11.85988712310791 + ], + [ + "▁constitute", + -11.860029220581055 + ], + [ + "▁bitter", + -11.860078811645508 + ], + [ + "▁Inform", + -11.860295295715332 + ], + [ + "▁corporations", + -11.860334396362305 + ], + [ + "▁Lisa", + -11.860494613647461 + ], + [ + "▁obligat", + -11.860685348510742 + ], + [ + "Throughout", + -11.860738754272461 + ], + [ + "▁Rs", + -11.860769271850586 + ], + [ + "▁Hair", + -11.860916137695312 + ], + [ + "▁supplements", + -11.86099624633789 + ], + [ + "▁motorcycle", + -11.861054420471191 + ], + [ + "escent", + -11.861132621765137 + ], + [ + "▁investi", + -11.861222267150879 + ], + [ + "▁continuously", + -11.861265182495117 + ], + [ + "▁Essen", + -11.861334800720215 + ], + [ + "▁precision", + -11.8613862991333 + ], + [ + "▁deficit", + -11.861461639404297 + ], + [ + "▁wallet", + -11.861481666564941 + ], + [ + "▁Bürger", + -11.861531257629395 + ], + [ + "chir", + -11.861574172973633 + ], + [ + "9)", + -11.86161994934082 + ], + [ + "▁Programme", + -11.861716270446777 + ], + [ + "▁simplement", + -11.86193561553955 + ], + [ + "MD", + -11.862093925476074 + ], + [ + "▁rouge", + -11.862096786499023 + ], + [ + "usion", + -11.862133979797363 + ], + [ + "▁stove", + -11.862208366394043 + ], + [ + "▁prospective", + -11.862224578857422 + ], + [ + "▁corp", + -11.86234188079834 + ], + [ + "▁impacts", + -11.862401008605957 + ], + [ + "▁bride", + -11.86266803741455 + ], + [ + "0.0", + -11.862788200378418 + ], + [ + "hid", + -11.862833976745605 + ], + [ + "▁warrant", + -11.862930297851562 + ], + [ + "▁Ice", + -11.8631010055542 + ], + [ + "▁sensible", + -11.863151550292969 + ], + [ + "▁vreo", + -11.863166809082031 + ], + [ + "spekt", + -11.863249778747559 + ], + [ + "▁appreciation", + -11.8633394241333 + ], + [ + "▁automation", + -11.863377571105957 + ], + [ + "Luc", + -11.86341381072998 + ], + [ + "teaches", + -11.863471031188965 + ], + [ + "▁fold", + -11.863506317138672 + ], + [ + "deutsche", + -11.863523483276367 + ], + [ + "▁assisted", + -11.86380386352539 + ], + [ + "▁straightforward", + -11.863932609558105 + ], + [ + "▁mechanic", + -11.864068031311035 + ], + [ + "observ", + -11.864169120788574 + ], + [ + "▁Schau", + -11.864195823669434 + ], + [ + "▁Recently", + -11.864301681518555 + ], + [ + "kers", + -11.86435604095459 + ], + [ + "▁Soft", + -11.864455223083496 + ], + [ + "muni", + -11.864537239074707 + ], + [ + "▁lie", + -11.864617347717285 + ], + [ + "▁Fat", + -11.864728927612305 + ], + [ + "cream", + -11.86476993560791 + ], + [ + "▁snack", + -11.864909172058105 + ], + [ + "▁juin", + -11.865068435668945 + ], + [ + "▁competent", + -11.865134239196777 + ], + [ + "▁Drug", + -11.865141868591309 + ], + [ + "▁Row", + -11.865302085876465 + ], + [ + "▁needle", + -11.865852355957031 + ], + [ + "▁convey", + -11.865900039672852 + ], + [ + "▁voie", + -11.86600399017334 + ], + [ + "▁Hon", + -11.866190910339355 + ], + [ + "▁ebook", + -11.866194725036621 + ], + [ + "▁veteran", + -11.866209030151367 + ], + [ + "▁statistical", + -11.866217613220215 + ], + [ + "190", + -11.866312980651855 + ], + [ + "▁munca", + -11.866402626037598 + ], + [ + "▁venues", + -11.866438865661621 + ], + [ + "▁Viel", + -11.866604804992676 + ], + [ + "▁décor", + -11.866799354553223 + ], + [ + "▁répond", + -11.8670015335083 + ], + [ + "▁produsele", + -11.86700439453125 + ], + [ + "ruc", + -11.867009162902832 + ], + [ + "▁drops", + -11.867011070251465 + ], + [ + "▁autant", + -11.867311477661133 + ], + [ + "▁Fahrzeug", + -11.867313385009766 + ], + [ + "▁hills", + -11.86735725402832 + ], + [ + "ference", + -11.867414474487305 + ], + [ + "▁Glück", + -11.86742115020752 + ], + [ + "▁Pac", + -11.867480278015137 + ], + [ + "▁permettr", + -11.867568969726562 + ], + [ + "▁mouvement", + -11.867713928222656 + ], + [ + "établissement", + -11.867859840393066 + ], + [ + "▁Parc", + -11.867874145507812 + ], + [ + "▁solving", + -11.867900848388672 + ], + [ + "▁jail", + -11.867972373962402 + ], + [ + "▁junk", + -11.867980003356934 + ], + [ + "▁jeux", + -11.868091583251953 + ], + [ + "▁rôle", + -11.868107795715332 + ], + [ + "▁cache", + -11.868124961853027 + ], + [ + "▁Answer", + -11.86832046508789 + ], + [ + "wir", + -11.868706703186035 + ], + [ + "option", + -11.868732452392578 + ], + [ + "▁Tiger", + -11.868739128112793 + ], + [ + "▁Ble", + -11.868793487548828 + ], + [ + "Mitglied", + -11.868797302246094 + ], + [ + "▁partial", + -11.868819236755371 + ], + [ + "▁Mercedes", + -11.86888313293457 + ], + [ + "tire", + -11.869001388549805 + ], + [ + "MENT", + -11.869091987609863 + ], + [ + "▁transit", + -11.869230270385742 + ], + [ + "▁cineva", + -11.869285583496094 + ], + [ + "▁Andrea", + -11.869294166564941 + ], + [ + "▁boundaries", + -11.869497299194336 + ], + [ + "script", + -11.870061874389648 + ], + [ + "▁Medi", + -11.870123863220215 + ], + [ + "schreiben", + -11.870203018188477 + ], + [ + "▁lobby", + -11.87035846710205 + ], + [ + "▁defendant", + -11.870406150817871 + ], + [ + "▁sq", + -11.870467185974121 + ], + [ + "▁forgotten", + -11.870569229125977 + ], + [ + "stimmung", + -11.870651245117188 + ], + [ + "hus", + -11.870665550231934 + ], + [ + "RY", + -11.870728492736816 + ], + [ + "▁Anderson", + -11.870748519897461 + ], + [ + "▁Dental", + -11.870828628540039 + ], + [ + "ject", + -11.87110710144043 + ], + [ + "▁Nutzer", + -11.871377944946289 + ], + [ + "▁Portland", + -11.871540069580078 + ], + [ + "scription", + -11.871636390686035 + ], + [ + "▁angel", + -11.871695518493652 + ], + [ + "▁monument", + -11.871748924255371 + ], + [ + "▁număr", + -11.871784210205078 + ], + [ + "▁Lane", + -11.871800422668457 + ], + [ + "▁Bai", + -11.871894836425781 + ], + [ + "But", + -11.871909141540527 + ], + [ + "▁calculate", + -11.872315406799316 + ], + [ + "▁provoca", + -11.87247371673584 + ], + [ + "▁votes", + -11.872493743896484 + ], + [ + "RNA", + -11.872503280639648 + ], + [ + "though", + -11.87259292602539 + ], + [ + "spor", + -11.872631072998047 + ], + [ + "▁connaissance", + -11.872695922851562 + ], + [ + "▁Anwendung", + -11.872932434082031 + ], + [ + "▁Kate", + -11.873123168945312 + ], + [ + "lob", + -11.87315845489502 + ], + [ + "▁Conf", + -11.873180389404297 + ], + [ + "bung", + -11.873212814331055 + ], + [ + "ander", + -11.873282432556152 + ], + [ + "▁functioning", + -11.873297691345215 + ], + [ + "▁sponsored", + -11.873324394226074 + ], + [ + "rav", + -11.873734474182129 + ], + [ + "▁resistant", + -11.873797416687012 + ], + [ + "tră", + -11.873916625976562 + ], + [ + "▁costly", + -11.873923301696777 + ], + [ + "▁Mars", + -11.873991012573242 + ], + [ + "▁tir", + -11.874075889587402 + ], + [ + "▁writes", + -11.874134063720703 + ], + [ + "▁Greg", + -11.874267578125 + ], + [ + "▁Question", + -11.874714851379395 + ], + [ + "▁corporation", + -11.87485408782959 + ], + [ + "▁lire", + -11.874991416931152 + ], + [ + "locked", + -11.875048637390137 + ], + [ + "8,", + -11.875092506408691 + ], + [ + "▁sagt", + -11.875301361083984 + ], + [ + "gaining", + -11.87536907196045 + ], + [ + "▁Pierre", + -11.875688552856445 + ], + [ + "verb", + -11.875725746154785 + ], + [ + "▁Barcelona", + -11.87578296661377 + ], + [ + "werte", + -11.876474380493164 + ], + [ + "▁disponible", + -11.87651538848877 + ], + [ + "▁urge", + -11.876521110534668 + ], + [ + "▁expecting", + -11.876572608947754 + ], + [ + "▁Girl", + -11.87662124633789 + ], + [ + "▁unlimited", + -11.876761436462402 + ], + [ + "watt", + -11.876788139343262 + ], + [ + "▁Möglichkeiten", + -11.876813888549805 + ], + [ + "▁schöne", + -11.876847267150879 + ], + [ + "rium", + -11.877076148986816 + ], + [ + "That", + -11.877272605895996 + ], + [ + "▁socio", + -11.877296447753906 + ], + [ + "▁Democrats", + -11.877351760864258 + ], + [ + "guten", + -11.877422332763672 + ], + [ + "▁Lou", + -11.877425193786621 + ], + [ + "ităţi", + -11.877559661865234 + ], + [ + "▁possibilité", + -11.877717018127441 + ], + [ + "▁adjustable", + -11.877938270568848 + ], + [ + "▁Salt", + -11.877967834472656 + ], + [ + "Thr", + -11.878021240234375 + ], + [ + "▁biseric", + -11.878056526184082 + ], + [ + "ieux", + -11.87808895111084 + ], + [ + "▁procur", + -11.8782377243042 + ], + [ + "▁credits", + -11.878250122070312 + ], + [ + "▁Netflix", + -11.878585815429688 + ], + [ + "doi", + -11.878605842590332 + ], + [ + "▁Jews", + -11.878663063049316 + ], + [ + "▁Ukraine", + -11.87873363494873 + ], + [ + "▁adevărat", + -11.878785133361816 + ], + [ + "▁Apply", + -11.878813743591309 + ], + [ + "▁coupons", + -11.878859519958496 + ], + [ + "▁Detroit", + -11.878881454467773 + ], + [ + "▁rue", + -11.878889083862305 + ], + [ + "anumite", + -11.878926277160645 + ], + [ + "ished", + -11.878973960876465 + ], + [ + "▁withdrawal", + -11.87915325164795 + ], + [ + "▁replacing", + -11.87917709350586 + ], + [ + "catching", + -11.879385948181152 + ], + [ + "▁climbing", + -11.879612922668457 + ], + [ + "▁Basic", + -11.879770278930664 + ], + [ + "▁inclus", + -11.879783630371094 + ], + [ + "scope", + -11.879887580871582 + ], + [ + "▁facem", + -11.879892349243164 + ], + [ + "▁plec", + -11.879904747009277 + ], + [ + "mäßig", + -11.879980087280273 + ], + [ + "▁tasty", + -11.880064010620117 + ], + [ + "▁tunnel", + -11.880074501037598 + ], + [ + "figured", + -11.88032341003418 + ], + [ + "gged", + -11.880390167236328 + ], + [ + "▁conditii", + -11.880599975585938 + ], + [ + "▁homework", + -11.880631446838379 + ], + [ + "volle", + -11.88063907623291 + ], + [ + "▁Gott", + -11.880807876586914 + ], + [ + "▁95", + -11.880969047546387 + ], + [ + "▁elect", + -11.881020545959473 + ], + [ + "▁blast", + -11.881043434143066 + ], + [ + "▁easiest", + -11.881248474121094 + ], + [ + "USE", + -11.881462097167969 + ], + [ + "concentr", + -11.881475448608398 + ], + [ + "orial", + -11.881596565246582 + ], + [ + "▁scroll", + -11.881638526916504 + ], + [ + "stead", + -11.881691932678223 + ], + [ + "▁hormone", + -11.881710052490234 + ], + [ + "▁starter", + -11.88179874420166 + ], + [ + "▁cald", + -11.881878852844238 + ], + [ + "▁wax", + -11.881895065307617 + ], + [ + "▁ridic", + -11.881900787353516 + ], + [ + "ously", + -11.881982803344727 + ], + [ + "maschine", + -11.882101058959961 + ], + [ + "licher", + -11.882399559020996 + ], + [ + "▁16,", + -11.882452964782715 + ], + [ + "▁hassle", + -11.882469177246094 + ], + [ + "semnat", + -11.882535934448242 + ], + [ + "▁pub", + -11.88260555267334 + ], + [ + "240", + -11.882800102233887 + ], + [ + "▁kits", + -11.882871627807617 + ], + [ + "▁Generation", + -11.88293743133545 + ], + [ + "▁merchant", + -11.883052825927734 + ], + [ + "▁Erd", + -11.883068084716797 + ], + [ + "▁café", + -11.883077621459961 + ], + [ + "hoff", + -11.88314151763916 + ], + [ + "▁WITH", + -11.883376121520996 + ], + [ + "▁gesch", + -11.883515357971191 + ], + [ + "▁Editor", + -11.883557319641113 + ], + [ + "▁treats", + -11.883609771728516 + ], + [ + "▁harsh", + -11.883711814880371 + ], + [ + "rome", + -11.883729934692383 + ], + [ + "▁Foreign", + -11.883928298950195 + ], + [ + "▁denied", + -11.883968353271484 + ], + [ + "▁Valentine", + -11.884014129638672 + ], + [ + "▁healthier", + -11.88408088684082 + ], + [ + "▁readily", + -11.884138107299805 + ], + [ + "nac", + -11.884190559387207 + ], + [ + "▁intake", + -11.884191513061523 + ], + [ + "▁puncte", + -11.884230613708496 + ], + [ + "erne", + -11.884431838989258 + ], + [ + "file", + -11.884668350219727 + ], + [ + "▁continually", + -11.884688377380371 + ], + [ + "door", + -11.884699821472168 + ], + [ + "▁imediat", + -11.884822845458984 + ], + [ + "▁accused", + -11.884833335876465 + ], + [ + "chy", + -11.884854316711426 + ], + [ + "▁wrapped", + -11.884861946105957 + ], + [ + "IES", + -11.884878158569336 + ], + [ + "▁terrace", + -11.884883880615234 + ], + [ + "mouth", + -11.884897232055664 + ], + [ + "▁defensive", + -11.884991645812988 + ], + [ + "▁Luci", + -11.88508129119873 + ], + [ + "▁significance", + -11.885107040405273 + ], + [ + "▁2007,", + -11.885213851928711 + ], + [ + "▁inclusion", + -11.885221481323242 + ], + [ + "▁rotation", + -11.885248184204102 + ], + [ + "hos", + -11.885283470153809 + ], + [ + "▁crea", + -11.885357856750488 + ], + [ + "üß", + -11.885903358459473 + ], + [ + "▁Install", + -11.885988235473633 + ], + [ + "▁dump", + -11.885998725891113 + ], + [ + "▁informations", + -11.886114120483398 + ], + [ + "▁Thi", + -11.886117935180664 + ], + [ + "▁85", + -11.886252403259277 + ], + [ + "dox", + -11.886283874511719 + ], + [ + "track", + -11.886436462402344 + ], + [ + "▁couples", + -11.886571884155273 + ], + [ + "▁Assembly", + -11.886594772338867 + ], + [ + "wagen", + -11.88672161102295 + ], + [ + "▁Hil", + -11.886723518371582 + ], + [ + "ières", + -11.886833190917969 + ], + [ + "▁Gabriel", + -11.886903762817383 + ], + [ + "▁patience", + -11.887053489685059 + ], + [ + "▁colored", + -11.887147903442383 + ], + [ + "▁separately", + -11.88715934753418 + ], + [ + "▁deployment", + -11.887166023254395 + ], + [ + "scape", + -11.887306213378906 + ], + [ + "▁Acum", + -11.8875150680542 + ], + [ + "▁länger", + -11.887518882751465 + ], + [ + "▁screens", + -11.887598991394043 + ], + [ + "▁prezenta", + -11.887630462646484 + ], + [ + "▁obicei", + -11.887638092041016 + ], + [ + "▁crisp", + -11.887758255004883 + ], + [ + "▁mechanisms", + -11.887771606445312 + ], + [ + "▁thirty", + -11.887786865234375 + ], + [ + "▁individually", + -11.887989044189453 + ], + [ + "▁internationally", + -11.887991905212402 + ], + [ + "lling", + -11.888050079345703 + ], + [ + "▁bureau", + -11.88843059539795 + ], + [ + "▁erfahren", + -11.88844108581543 + ], + [ + "TY", + -11.888553619384766 + ], + [ + "PF", + -11.888607025146484 + ], + [ + "wid", + -11.888752937316895 + ], + [ + "sell", + -11.888835906982422 + ], + [ + "▁Luke", + -11.888879776000977 + ], + [ + "▁Must", + -11.888916969299316 + ], + [ + "▁identical", + -11.888927459716797 + ], + [ + "▁Netherlands", + -11.888980865478516 + ], + [ + "▁investor", + -11.88905143737793 + ], + [ + "▁squad", + -11.889073371887207 + ], + [ + "▁21,", + -11.889143943786621 + ], + [ + "iko", + -11.889230728149414 + ], + [ + "▁departure", + -11.88937759399414 + ], + [ + "ega", + -11.889384269714355 + ], + [ + "uzi", + -11.889408111572266 + ], + [ + "▁lasa", + -11.889458656311035 + ], + [ + "bian", + -11.889525413513184 + ], + [ + "▁Madrid", + -11.889623641967773 + ], + [ + "▁Iowa", + -11.889806747436523 + ], + [ + "▁Yellow", + -11.890026092529297 + ], + [ + "conom", + -11.89004898071289 + ], + [ + "▁hint", + -11.890098571777344 + ], + [ + "NOW", + -11.890111923217773 + ], + [ + "dress", + -11.890204429626465 + ], + [ + "▁Stück", + -11.890267372131348 + ], + [ + "echt", + -11.890424728393555 + ], + [ + "rial", + -11.89045238494873 + ], + [ + "▁Initiative", + -11.890474319458008 + ], + [ + "▁magnificent", + -11.890474319458008 + ], + [ + "▁pipeline", + -11.890543937683105 + ], + [ + "▁08", + -11.890806198120117 + ], + [ + "▁écrit", + -11.890889167785645 + ], + [ + "KA", + -11.891085624694824 + ], + [ + "arile", + -11.891151428222656 + ], + [ + "▁unfortunately", + -11.891352653503418 + ], + [ + "dose", + -11.891355514526367 + ], + [ + "▁counts", + -11.891427993774414 + ], + [ + "deciding", + -11.891549110412598 + ], + [ + "WA", + -11.89167308807373 + ], + [ + "▁doresc", + -11.891685485839844 + ], + [ + "NY", + -11.892008781433105 + ], + [ + "olin", + -11.892112731933594 + ], + [ + "▁Urlaub", + -11.892133712768555 + ], + [ + "▁alătur", + -11.892317771911621 + ], + [ + "▁Vic", + -11.892515182495117 + ], + [ + "▁fier", + -11.89269733428955 + ], + [ + "EU", + -11.892772674560547 + ], + [ + "▁triple", + -11.892871856689453 + ], + [ + "▁compliment", + -11.89310359954834 + ], + [ + "▁vegetable", + -11.89334487915039 + ], + [ + "member", + -11.893743515014648 + ], + [ + "atiei", + -11.893793106079102 + ], + [ + "▁toxic", + -11.893835067749023 + ], + [ + "▁converted", + -11.893888473510742 + ], + [ + "▁Pink", + -11.893999099731445 + ], + [ + "▁fragment", + -11.894020080566406 + ], + [ + "presenting", + -11.894027709960938 + ], + [ + "▁garantie", + -11.894031524658203 + ], + [ + "▁31,", + -11.894052505493164 + ], + [ + "▁puisqu", + -11.894105911254883 + ], + [ + "aching", + -11.894107818603516 + ], + [ + "▁Shan", + -11.894119262695312 + ], + [ + "▁Affairs", + -11.894368171691895 + ], + [ + "üsse", + -11.894405364990234 + ], + [ + "▁CBD", + -11.894428253173828 + ], + [ + "▁quatre", + -11.894588470458984 + ], + [ + "▁horror", + -11.894651412963867 + ], + [ + "▁culoare", + -11.894661903381348 + ], + [ + "▁welcoming", + -11.894673347473145 + ], + [ + "▁headache", + -11.894808769226074 + ], + [ + "▁septembre", + -11.894820213317871 + ], + [ + "▁Tür", + -11.894862174987793 + ], + [ + "lateral", + -11.89507007598877 + ], + [ + "▁termin", + -11.895228385925293 + ], + [ + "▁Aid", + -11.895291328430176 + ], + [ + "second", + -11.895308494567871 + ], + [ + "▁Philip", + -11.895310401916504 + ], + [ + "berries", + -11.895347595214844 + ], + [ + "▁Slot", + -11.895431518554688 + ], + [ + "ка", + -11.895442962646484 + ], + [ + "▁consecutive", + -11.895590782165527 + ], + [ + "value", + -11.895705223083496 + ], + [ + "▁islands", + -11.8958101272583 + ], + [ + "▁posibilitatea", + -11.895928382873535 + ], + [ + "0.5", + -11.896341323852539 + ], + [ + "▁Dumpster", + -11.896471977233887 + ], + [ + "▁Gran", + -11.89647388458252 + ], + [ + "▁restricted", + -11.8967924118042 + ], + [ + "▁discussing", + -11.896921157836914 + ], + [ + "cock", + -11.896966934204102 + ], + [ + "Serie", + -11.896989822387695 + ], + [ + "▁crushing", + -11.896998405456543 + ], + [ + "RB", + -11.897034645080566 + ], + [ + "▁Gy", + -11.897068977355957 + ], + [ + "normal", + -11.897098541259766 + ], + [ + "DT", + -11.897180557250977 + ], + [ + "▁concurs", + -11.897181510925293 + ], + [ + "▁Beratung", + -11.897231101989746 + ], + [ + "▁handful", + -11.897235870361328 + ], + [ + "▁loading", + -11.897237777709961 + ], + [ + "▁WI", + -11.897269248962402 + ], + [ + "▁Fitness", + -11.897283554077148 + ], + [ + "▁RAM", + -11.897302627563477 + ], + [ + "▁Twi", + -11.89730453491211 + ], + [ + "adurch", + -11.897345542907715 + ], + [ + "▁obiectiv", + -11.897366523742676 + ], + [ + "BM", + -11.897635459899902 + ], + [ + "▁amendment", + -11.8976469039917 + ], + [ + "whi", + -11.897652626037598 + ], + [ + "▁Besonder", + -11.897871017456055 + ], + [ + "ALL", + -11.898003578186035 + ], + [ + "▁earning", + -11.898090362548828 + ], + [ + "▁nutrients", + -11.898580551147461 + ], + [ + "pru", + -11.898633003234863 + ], + [ + "▁offensive", + -11.898696899414062 + ], + [ + "▁shelves", + -11.898711204528809 + ], + [ + "▁încâ", + -11.898726463317871 + ], + [ + "▁execute", + -11.898923873901367 + ], + [ + "▁cauz", + -11.898966789245605 + ], + [ + "exist", + -11.899179458618164 + ], + [ + "▁Meter", + -11.899191856384277 + ], + [ + "there", + -11.899201393127441 + ], + [ + "▁réaliser", + -11.899249076843262 + ], + [ + "blog", + -11.899362564086914 + ], + [ + "▁résultats", + -11.89937973022461 + ], + [ + "baren", + -11.899391174316406 + ], + [ + "▁lang", + -11.899425506591797 + ], + [ + "▁mere", + -11.899870872497559 + ], + [ + "▁toti", + -11.900079727172852 + ], + [ + "DN", + -11.90017032623291 + ], + [ + "Hi", + -11.900310516357422 + ], + [ + "▁merg", + -11.900359153747559 + ], + [ + "▁Camera", + -11.90054988861084 + ], + [ + "▁parfum", + -11.900697708129883 + ], + [ + "CG", + -11.900701522827148 + ], + [ + "posed", + -11.900713920593262 + ], + [ + "▁proposals", + -11.900732040405273 + ], + [ + "▁incorrect", + -11.900811195373535 + ], + [ + "▁Denver", + -11.901168823242188 + ], + [ + "▁noapte", + -11.901397705078125 + ], + [ + "▁VPN", + -11.901436805725098 + ], + [ + "▁Oklahoma", + -11.90159797668457 + ], + [ + "horizon", + -11.901647567749023 + ], + [ + "▁villa", + -11.901668548583984 + ], + [ + "duce", + -11.901812553405762 + ], + [ + "Dienst", + -11.902042388916016 + ], + [ + "▁oversee", + -11.902511596679688 + ], + [ + "astr", + -11.902548789978027 + ], + [ + "brand", + -11.902713775634766 + ], + [ + "▁Safe", + -11.902746200561523 + ], + [ + "▁competing", + -11.902812004089355 + ], + [ + "▁subiect", + -11.902812004089355 + ], + [ + "▁équipe", + -11.903091430664062 + ], + [ + "▁Dress", + -11.903095245361328 + ], + [ + "▁Juni", + -11.903139114379883 + ], + [ + "▁repeated", + -11.90317153930664 + ], + [ + "2012", + -11.903226852416992 + ], + [ + "▁départ", + -11.903234481811523 + ], + [ + "immer", + -11.903335571289062 + ], + [ + "▁mondial", + -11.903374671936035 + ], + [ + "▁datelor", + -11.903703689575195 + ], + [ + "▁surgeon", + -11.903782844543457 + ], + [ + "▁demanding", + -11.903812408447266 + ], + [ + "▁concluded", + -11.903878211975098 + ], + [ + "țiile", + -11.903950691223145 + ], + [ + "marin", + -11.903999328613281 + ], + [ + "▁estim", + -11.904206275939941 + ], + [ + "▁Loan", + -11.904361724853516 + ], + [ + "sculpt", + -11.904373168945312 + ], + [ + "▁99", + -11.904391288757324 + ], + [ + "void", + -11.904400825500488 + ], + [ + "▁Empire", + -11.904499053955078 + ], + [ + "▁Brit", + -11.90450382232666 + ], + [ + "▁véhicule", + -11.904777526855469 + ], + [ + "▁dividend", + -11.905069351196289 + ], + [ + "▁refused", + -11.905077934265137 + ], + [ + "▁speaks", + -11.905156135559082 + ], + [ + "▁Morris", + -11.905282020568848 + ], + [ + "dict", + -11.905349731445312 + ], + [ + "▁funeral", + -11.905556678771973 + ], + [ + "▁Behandlung", + -11.905763626098633 + ], + [ + "▁Revolution", + -11.905905723571777 + ], + [ + "▁Sum", + -11.905935287475586 + ], + [ + "einigen", + -11.906030654907227 + ], + [ + "RES", + -11.906070709228516 + ], + [ + "▁vite", + -11.906071662902832 + ], + [ + "▁Captain", + -11.906190872192383 + ], + [ + "▁assurance", + -11.9061918258667 + ], + [ + "uga", + -11.906500816345215 + ], + [ + "▁conserv", + -11.906583786010742 + ], + [ + "▁therapeutic", + -11.906641006469727 + ], + [ + "▁Sweden", + -11.906753540039062 + ], + [ + "▁Lead", + -11.906888961791992 + ], + [ + "ément", + -11.907071113586426 + ], + [ + "▁53", + -11.90709114074707 + ], + [ + "▁fraction", + -11.9071683883667 + ], + [ + "▁magnet", + -11.907170295715332 + ], + [ + "assurer", + -11.907184600830078 + ], + [ + "▁Steuer", + -11.90733814239502 + ], + [ + "▁flori", + -11.90735149383545 + ], + [ + "▁charming", + -11.907588958740234 + ], + [ + "▁athletic", + -11.907621383666992 + ], + [ + "▁membri", + -11.907706260681152 + ], + [ + "▁Sep", + -11.907726287841797 + ], + [ + "ogue", + -11.907800674438477 + ], + [ + "▁familie", + -11.907800674438477 + ], + [ + "▁SW", + -11.90796947479248 + ], + [ + "▁diagnosed", + -11.908023834228516 + ], + [ + "RR", + -11.908143997192383 + ], + [ + "▁Fern", + -11.908233642578125 + ], + [ + "▁rational", + -11.908281326293945 + ], + [ + "▁talents", + -11.90828800201416 + ], + [ + "ziert", + -11.908317565917969 + ], + [ + "▁chemin", + -11.908459663391113 + ], + [ + "sheet", + -11.908562660217285 + ], + [ + "▁outer", + -11.908565521240234 + ], + [ + "▁Kap", + -11.908591270446777 + ], + [ + "▁HERE", + -11.908656120300293 + ], + [ + "▁uman", + -11.908824920654297 + ], + [ + "▁accompany", + -11.908880233764648 + ], + [ + "▁varieties", + -11.908881187438965 + ], + [ + "▁sensors", + -11.908957481384277 + ], + [ + "▁25%", + -11.90919017791748 + ], + [ + "▁tray", + -11.909354209899902 + ], + [ + "▁critique", + -11.909459114074707 + ], + [ + "▁puţin", + -11.909515380859375 + ], + [ + "▁Schüler", + -11.90953540802002 + ], + [ + "▁repar", + -11.909744262695312 + ], + [ + "▁overlook", + -11.909931182861328 + ], + [ + "▁surf", + -11.910048484802246 + ], + [ + "▁tasting", + -11.910118103027344 + ], + [ + "bog", + -11.91027545928955 + ], + [ + "▁Payment", + -11.910289764404297 + ], + [ + "▁Helen", + -11.91049575805664 + ], + [ + "▁Refer", + -11.910694122314453 + ], + [ + "application", + -11.910698890686035 + ], + [ + "lection", + -11.910856246948242 + ], + [ + "▁avril", + -11.911042213439941 + ], + [ + "▁Grace", + -11.911109924316406 + ], + [ + "▁kau", + -11.911274909973145 + ], + [ + "▁libraries", + -11.911319732666016 + ], + [ + "▁closest", + -11.911347389221191 + ], + [ + "▁coating", + -11.911351203918457 + ], + [ + "▁suicide", + -11.911364555358887 + ], + [ + "▁undergraduate", + -11.911449432373047 + ], + [ + "▁stitch", + -11.91149616241455 + ], + [ + "▁reset", + -11.911593437194824 + ], + [ + "▁Greece", + -11.911626815795898 + ], + [ + "▁Fred", + -11.91197681427002 + ], + [ + "▁18.", + -11.912047386169434 + ], + [ + "▁nuit", + -11.912087440490723 + ], + [ + "▁lying", + -11.912199974060059 + ], + [ + "▁cottage", + -11.91232681274414 + ], + [ + "bone", + -11.912477493286133 + ], + [ + "▁milieu", + -11.912480354309082 + ], + [ + "management", + -11.912623405456543 + ], + [ + "▁Freund", + -11.912724494934082 + ], + [ + "▁specially", + -11.912841796875 + ], + [ + "veut", + -11.912961959838867 + ], + [ + "▁necesare", + -11.912999153137207 + ], + [ + "▁cert", + -11.913081169128418 + ], + [ + "articul", + -11.913151741027832 + ], + [ + "150", + -11.913174629211426 + ], + [ + "rounded", + -11.913180351257324 + ], + [ + "▁longue", + -11.913193702697754 + ], + [ + "▁Quel", + -11.913240432739258 + ], + [ + "Until", + -11.913322448730469 + ], + [ + "▁700", + -11.913398742675781 + ], + [ + "▁installations", + -11.913423538208008 + ], + [ + "▁boats", + -11.913467407226562 + ], + [ + "Fig", + -11.913609504699707 + ], + [ + "▁cocktail", + -11.913613319396973 + ], + [ + "▁rocks", + -11.91366958618164 + ], + [ + "meinen", + -11.91374683380127 + ], + [ + "entrepreneur", + -11.913780212402344 + ], + [ + "schwarz", + -11.913924217224121 + ], + [ + "▁diesel", + -11.91392993927002 + ], + [ + "▁villages", + -11.913969039916992 + ], + [ + "▁cups", + -11.914076805114746 + ], + [ + "▁stairs", + -11.914241790771484 + ], + [ + "▁Match", + -11.914350509643555 + ], + [ + "Taking", + -11.914437294006348 + ], + [ + "prin", + -11.914469718933105 + ], + [ + "▁penal", + -11.91472053527832 + ], + [ + "partner", + -11.914867401123047 + ], + [ + "wave", + -11.91497802734375 + ], + [ + "▁baie", + -11.91515064239502 + ], + [ + "LAN", + -11.915151596069336 + ], + [ + "fix", + -11.915202140808105 + ], + [ + "▁surveillance", + -11.915295600891113 + ], + [ + "▁Register", + -11.915343284606934 + ], + [ + "oara", + -11.915536880493164 + ], + [ + "▁Phoenix", + -11.915602684020996 + ], + [ + "aktuellen", + -11.915613174438477 + ], + [ + "▁livres", + -11.915618896484375 + ], + [ + "▁entities", + -11.916102409362793 + ], + [ + "▁Regard", + -11.916112899780273 + ], + [ + "▁Jazz", + -11.91614055633545 + ], + [ + "▁flame", + -11.91616153717041 + ], + [ + "▁independence", + -11.916215896606445 + ], + [ + "▁Adventure", + -11.916341781616211 + ], + [ + "▁assign", + -11.916399955749512 + ], + [ + "▁Adult", + -11.916579246520996 + ], + [ + "kehr", + -11.916666984558105 + ], + [ + "▁ordering", + -11.916850090026855 + ], + [ + "▁charts", + -11.91687297821045 + ], + [ + "▁Român", + -11.916936874389648 + ], + [ + "bauen", + -11.916982650756836 + ], + [ + "▁Floor", + -11.917065620422363 + ], + [ + "▁Meet", + -11.917101860046387 + ], + [ + "▁compromise", + -11.917158126831055 + ], + [ + "regarded", + -11.917171478271484 + ], + [ + "02.", + -11.917215347290039 + ], + [ + "▁granite", + -11.917299270629883 + ], + [ + "▁Judge", + -11.917314529418945 + ], + [ + "opti", + -11.917373657226562 + ], + [ + "liste", + -11.917379379272461 + ], + [ + "▁capacité", + -11.917427062988281 + ], + [ + "▁criticism", + -11.917450904846191 + ], + [ + "LES", + -11.918198585510254 + ], + [ + "▁Century", + -11.918211936950684 + ], + [ + "▁mobility", + -11.918252944946289 + ], + [ + "▁variation", + -11.918622016906738 + ], + [ + "▁Utah", + -11.91867446899414 + ], + [ + "▁seminar", + -11.918678283691406 + ], + [ + "▁experiments", + -11.918803215026855 + ], + [ + "midst", + -11.918943405151367 + ], + [ + "▁Psycho", + -11.919002532958984 + ], + [ + "▁choses", + -11.919121742248535 + ], + [ + "▁Karl", + -11.919175148010254 + ], + [ + "▁ruling", + -11.919286727905273 + ], + [ + "▁Voice", + -11.919404983520508 + ], + [ + "▁împotriv", + -11.919442176818848 + ], + [ + "▁mesaj", + -11.919500350952148 + ], + [ + "▁vrei", + -11.919594764709473 + ], + [ + "fan", + -11.919601440429688 + ], + [ + "parent", + -11.919648170471191 + ], + [ + "▁oraș", + -11.919770240783691 + ], + [ + "▁printable", + -11.919777870178223 + ], + [ + "▁diver", + -11.919859886169434 + ], + [ + "▁ochi", + -11.919949531555176 + ], + [ + "▁teenager", + -11.920125961303711 + ], + [ + "▁Death", + -11.920150756835938 + ], + [ + "▁manque", + -11.920289993286133 + ], + [ + "ască", + -11.920345306396484 + ], + [ + "▁prob", + -11.9203519821167 + ], + [ + "▁télé", + -11.920354843139648 + ], + [ + "cursul", + -11.920378684997559 + ], + [ + "pion", + -11.92052173614502 + ], + [ + "▁dedication", + -11.920644760131836 + ], + [ + "▁opr", + -11.920687675476074 + ], + [ + "führung", + -11.920761108398438 + ], + [ + "▁cognitive", + -11.920827865600586 + ], + [ + "soft", + -11.920868873596191 + ], + [ + "▁19,", + -11.9209623336792 + ], + [ + "▁24-", + -11.921197891235352 + ], + [ + "▁legitimate", + -11.921220779418945 + ], + [ + "▁comedy", + -11.921277046203613 + ], + [ + "▁violation", + -11.921327590942383 + ], + [ + "▁disposal", + -11.921472549438477 + ], + [ + "▁liegen", + -11.921605110168457 + ], + [ + "ко", + -11.921878814697266 + ], + [ + "▁martie", + -11.921931266784668 + ], + [ + "▁Vas", + -11.92212200164795 + ], + [ + "rash", + -11.922134399414062 + ], + [ + "▁hadn", + -11.922174453735352 + ], + [ + "▁connu", + -11.922204971313477 + ], + [ + "▁regelmäßig", + -11.922216415405273 + ], + [ + "▁Webseite", + -11.922224998474121 + ], + [ + "▁failing", + -11.922273635864258 + ], + [ + "explique", + -11.922449111938477 + ], + [ + "▁Player", + -11.922513961791992 + ], + [ + "vul", + -11.922560691833496 + ], + [ + "camp", + -11.922992706298828 + ], + [ + "▁erreicht", + -11.922996520996094 + ], + [ + "▁tags", + -11.922998428344727 + ], + [ + "▁headline", + -11.923210144042969 + ], + [ + "▁banc", + -11.923253059387207 + ], + [ + "▁Mayor", + -11.923309326171875 + ], + [ + "trop", + -11.923395156860352 + ], + [ + "AK", + -11.9235258102417 + ], + [ + "▁lighter", + -11.923602104187012 + ], + [ + "▁syndrome", + -11.923604965209961 + ], + [ + "▁Adrian", + -11.92365550994873 + ], + [ + "▁EUR", + -11.923759460449219 + ], + [ + "▁Missouri", + -11.923916816711426 + ], + [ + "▁Chan", + -11.924108505249023 + ], + [ + "topped", + -11.924233436584473 + ], + [ + "▁nationwide", + -11.924276351928711 + ], + [ + "▁6-", + -11.924302101135254 + ], + [ + "final", + -11.924408912658691 + ], + [ + "ttes", + -11.924485206604004 + ], + [ + "▁FO", + -11.924537658691406 + ], + [ + "▁legi", + -11.924556732177734 + ], + [ + "▁Hum", + -11.924575805664062 + ], + [ + "vita", + -11.924662590026855 + ], + [ + "▁Regen", + -11.924695014953613 + ], + [ + "▁confusion", + -11.92498779296875 + ], + [ + "▁valori", + -11.925142288208008 + ], + [ + "mill", + -11.92516803741455 + ], + [ + "did", + -11.925237655639648 + ], + [ + "pid", + -11.925253868103027 + ], + [ + "▁implications", + -11.925284385681152 + ], + [ + "▁Value", + -11.92552375793457 + ], + [ + "lângă", + -11.925666809082031 + ], + [ + "▁véritable", + -11.92577075958252 + ], + [ + "▁Stick", + -11.925814628601074 + ], + [ + "zol", + -11.925835609436035 + ], + [ + "▁ebenso", + -11.925863265991211 + ], + [ + "west", + -11.925895690917969 + ], + [ + "▁auszu", + -11.92600154876709 + ], + [ + "▁adorable", + -11.926016807556152 + ], + [ + "▁clarity", + -11.92605209350586 + ], + [ + "▁Wash", + -11.926335334777832 + ], + [ + "▁alien", + -11.926423072814941 + ], + [ + "usement", + -11.926626205444336 + ], + [ + "▁bones", + -11.9266357421875 + ], + [ + "▁Beau", + -11.926726341247559 + ], + [ + "▁Jet", + -11.926727294921875 + ], + [ + "▁visibility", + -11.927034378051758 + ], + [ + "impose", + -11.927063941955566 + ], + [ + "food", + -11.927133560180664 + ], + [ + "▁duce", + -11.927361488342285 + ], + [ + "▁Format", + -11.927386283874512 + ], + [ + "▁durability", + -11.927424430847168 + ], + [ + "▁Prim", + -11.927614212036133 + ], + [ + "▁mele", + -11.927629470825195 + ], + [ + "▁dürfen", + -11.927631378173828 + ], + [ + "▁Angebote", + -11.92765998840332 + ], + [ + "▁discharge", + -11.927745819091797 + ], + [ + "▁Justin", + -11.928055763244629 + ], + [ + "▁shame", + -11.928228378295898 + ], + [ + "▁heated", + -11.928282737731934 + ], + [ + "ères", + -11.92856216430664 + ], + [ + "human", + -11.928810119628906 + ], + [ + "4.5", + -11.928831100463867 + ], + [ + "▁lien", + -11.928955078125 + ], + [ + "▁Alan", + -11.92896556854248 + ], + [ + "▁transmis", + -11.929130554199219 + ], + [ + "▁Bul", + -11.929137229919434 + ], + [ + "plu", + -11.929169654846191 + ], + [ + "acul", + -11.929337501525879 + ], + [ + "merk", + -11.929434776306152 + ], + [ + "▁altfel", + -11.929566383361816 + ], + [ + "deli", + -11.929689407348633 + ], + [ + "▁Cru", + -11.930001258850098 + ], + [ + "▁hommes", + -11.930127143859863 + ], + [ + "aurait", + -11.930137634277344 + ], + [ + "cca", + -11.930187225341797 + ], + [ + "▁Path", + -11.930208206176758 + ], + [ + "astronom", + -11.930241584777832 + ], + [ + "▁détail", + -11.930276870727539 + ], + [ + "▁blocked", + -11.930394172668457 + ], + [ + "iding", + -11.93044376373291 + ], + [ + "schä", + -11.930500030517578 + ], + [ + "▁30-", + -11.930624008178711 + ], + [ + "diction", + -11.930813789367676 + ], + [ + "▁pulling", + -11.930868148803711 + ], + [ + "▁Sample", + -11.930924415588379 + ], + [ + "▁renewable", + -11.930997848510742 + ], + [ + "▁Pinterest", + -11.93106746673584 + ], + [ + "▁Tages", + -11.93106746673584 + ], + [ + "▁shed", + -11.931171417236328 + ], + [ + "▁hart", + -11.931188583374023 + ], + [ + "▁serie", + -11.931200981140137 + ], + [ + "▁documentary", + -11.931208610534668 + ], + [ + "gebaut", + -11.931220054626465 + ], + [ + "▁Hause", + -11.931272506713867 + ], + [ + "share", + -11.931303977966309 + ], + [ + "▁inflation", + -11.93138599395752 + ], + [ + "▁gall", + -11.931504249572754 + ], + [ + "▁adjacent", + -11.931673049926758 + ], + [ + "jer", + -11.93173885345459 + ], + [ + "▁Universal", + -11.931946754455566 + ], + [ + "▁disabilities", + -11.931984901428223 + ], + [ + "▁proposition", + -11.93204116821289 + ], + [ + "Work", + -11.932293891906738 + ], + [ + "▁closure", + -11.932306289672852 + ], + [ + "▁separated", + -11.932496070861816 + ], + [ + "▁soda", + -11.932549476623535 + ], + [ + "▁elite", + -11.93263053894043 + ], + [ + "appro", + -11.93265438079834 + ], + [ + "▁acute", + -11.93266487121582 + ], + [ + "utton", + -11.932938575744629 + ], + [ + "▁facă", + -11.933053016662598 + ], + [ + "▁collector", + -11.933121681213379 + ], + [ + "▁unlock", + -11.933249473571777 + ], + [ + "▁Alpha", + -11.933267593383789 + ], + [ + "▁Used", + -11.933267593383789 + ], + [ + "▁applicants", + -11.933302879333496 + ], + [ + "▁înseamn", + -11.933387756347656 + ], + [ + "▁inclu", + -11.933414459228516 + ], + [ + "▁disclosure", + -11.933544158935547 + ], + [ + "▁Fahr", + -11.933995246887207 + ], + [ + "AST", + -11.934061050415039 + ], + [ + "▁vivre", + -11.934069633483887 + ], + [ + "»,", + -11.934167861938477 + ], + [ + "laud", + -11.93430233001709 + ], + [ + "▁soir", + -11.934365272521973 + ], + [ + "▁barrier", + -11.934405326843262 + ], + [ + "înd", + -11.934470176696777 + ], + [ + "▁ambition", + -11.93451976776123 + ], + [ + "asta", + -11.934550285339355 + ], + [ + "occupied", + -11.934747695922852 + ], + [ + "▁Gau", + -11.934774398803711 + ], + [ + "four", + -11.93481159210205 + ], + [ + "▁nap", + -11.934887886047363 + ], + [ + "iez", + -11.934922218322754 + ], + [ + "endra", + -11.935242652893066 + ], + [ + "gaben", + -11.935464859008789 + ], + [ + "▁Carol", + -11.935481071472168 + ], + [ + "▁Switzerland", + -11.935575485229492 + ], + [ + "▁Bond", + -11.935617446899414 + ], + [ + "▁crossing", + -11.935630798339844 + ], + [ + "▁Palace", + -11.9359769821167 + ], + [ + "NG", + -11.935986518859863 + ], + [ + "▁Budget", + -11.93622875213623 + ], + [ + "▁lid", + -11.936372756958008 + ], + [ + "bab", + -11.936393737792969 + ], + [ + "▁polish", + -11.936416625976562 + ], + [ + "▁herbs", + -11.93673038482666 + ], + [ + "▁dear", + -11.936747550964355 + ], + [ + "▁devrai", + -11.936846733093262 + ], + [ + "walk", + -11.936864852905273 + ], + [ + "▁humanity", + -11.936897277832031 + ], + [ + "▁tires", + -11.936978340148926 + ], + [ + "égal", + -11.936994552612305 + ], + [ + "▁bow", + -11.937032699584961 + ], + [ + "▁debris", + -11.937201499938965 + ], + [ + "▁keywords", + -11.937273025512695 + ], + [ + "irk", + -11.937345504760742 + ], + [ + "▁suspend", + -11.937360763549805 + ], + [ + "▁pourra", + -11.93738079071045 + ], + [ + "migran", + -11.937454223632812 + ], + [ + "thereby", + -11.937570571899414 + ], + [ + "▁Harris", + -11.937943458557129 + ], + [ + "ateurs", + -11.937956809997559 + ], + [ + "▁fal", + -11.938271522521973 + ], + [ + "alleged", + -11.938355445861816 + ], + [ + "noch", + -11.938494682312012 + ], + [ + "▁observation", + -11.938506126403809 + ], + [ + "▁București", + -11.93855094909668 + ], + [ + "▁SQL", + -11.938624382019043 + ], + [ + "▁Phase", + -11.938760757446289 + ], + [ + "▁adventures", + -11.93881607055664 + ], + [ + "▁Kol", + -11.938885688781738 + ], + [ + "▁professionnel", + -11.938916206359863 + ], + [ + "crit", + -11.939026832580566 + ], + [ + "LR", + -11.939313888549805 + ], + [ + "▁preview", + -11.939464569091797 + ], + [ + "▁highlighted", + -11.939942359924316 + ], + [ + "▁Stud", + -11.939949035644531 + ], + [ + "▁labour", + -11.939956665039062 + ], + [ + "MV", + -11.9399995803833 + ], + [ + "click", + -11.940049171447754 + ], + [ + "approche", + -11.94016170501709 + ], + [ + "tian", + -11.940183639526367 + ], + [ + "cité", + -11.940192222595215 + ], + [ + "▁Rain", + -11.94028377532959 + ], + [ + "typ", + -11.94032096862793 + ], + [ + "Usually", + -11.940435409545898 + ], + [ + "▁outlet", + -11.940513610839844 + ], + [ + "logging", + -11.940814018249512 + ], + [ + "▁Temperatur", + -11.940906524658203 + ], + [ + "▁Scottish", + -11.94090747833252 + ], + [ + "iga", + -11.940942764282227 + ], + [ + "▁glory", + -11.941086769104004 + ], + [ + "▁Rom", + -11.941242218017578 + ], + [ + "zeug", + -11.941337585449219 + ], + [ + "establishing", + -11.941339492797852 + ], + [ + "▁imaging", + -11.941926002502441 + ], + [ + "▁Beauty", + -11.942015647888184 + ], + [ + "igan", + -11.942042350769043 + ], + [ + "après", + -11.94224739074707 + ], + [ + "Adresse", + -11.942267417907715 + ], + [ + "cliff", + -11.942349433898926 + ], + [ + "▁unnecessary", + -11.943267822265625 + ], + [ + "▁slim", + -11.943324089050293 + ], + [ + "dir", + -11.943490982055664 + ], + [ + "▁leisure", + -11.943660736083984 + ], + [ + "▁principale", + -11.94368839263916 + ], + [ + "▁Viele", + -11.943770408630371 + ], + [ + "▁2007.", + -11.943802833557129 + ], + [ + "Hopefully", + -11.943829536437988 + ], + [ + "cola", + -11.943851470947266 + ], + [ + "▁Planet", + -11.943927764892578 + ], + [ + "▁orientation", + -11.943933486938477 + ], + [ + "▁angry", + -11.94419002532959 + ], + [ + "MIT", + -11.944234848022461 + ], + [ + "▁Kenya", + -11.944265365600586 + ], + [ + "▁bless", + -11.94435977935791 + ], + [ + "▁Fill", + -11.944524765014648 + ], + [ + "▁compar", + -11.944664001464844 + ], + [ + "▁curtain", + -11.94473934173584 + ], + [ + "ţei", + -11.944754600524902 + ], + [ + "▁Az", + -11.94482421875 + ], + [ + "▁Rang", + -11.944908142089844 + ], + [ + "▁dominant", + -11.944974899291992 + ], + [ + "race", + -11.944985389709473 + ], + [ + "▁Target", + -11.944987297058105 + ], + [ + "▁manually", + -11.944987297058105 + ], + [ + "objet", + -11.945024490356445 + ], + [ + "thrown", + -11.945131301879883 + ], + [ + "NF", + -11.945149421691895 + ], + [ + "durant", + -11.945185661315918 + ], + [ + "rect", + -11.945302963256836 + ], + [ + "▁Größe", + -11.945320129394531 + ], + [ + "VM", + -11.9453763961792 + ], + [ + "▁aprilie", + -11.945476531982422 + ], + [ + "▁Welche", + -11.945639610290527 + ], + [ + "▁verde", + -11.946157455444336 + ], + [ + "▁Portugal", + -11.946266174316406 + ], + [ + "▁algorithm", + -11.94627571105957 + ], + [ + "ăț", + -11.946328163146973 + ], + [ + "▁Grey", + -11.946371078491211 + ], + [ + "▁cleaned", + -11.94644832611084 + ], + [ + "▁modes", + -11.946463584899902 + ], + [ + "▁relaxation", + -11.946599006652832 + ], + [ + "mbr", + -11.946786880493164 + ], + [ + "étique", + -11.946821212768555 + ], + [ + "Her", + -11.946904182434082 + ], + [ + "▁beta", + -11.946952819824219 + ], + [ + "▁nobody", + -11.94699764251709 + ], + [ + "▁aplic", + -11.947060585021973 + ], + [ + "present", + -11.947080612182617 + ], + [ + "emis", + -11.947197914123535 + ], + [ + "éléments", + -11.947257995605469 + ], + [ + "▁lately", + -11.947303771972656 + ], + [ + "fab", + -11.94732666015625 + ], + [ + "▁aluminiu", + -11.947373390197754 + ], + [ + "▁vest", + -11.947524070739746 + ], + [ + "▁statue", + -11.947558403015137 + ], + [ + "▁publice", + -11.947586059570312 + ], + [ + "▁merchandise", + -11.9476900100708 + ], + [ + "▁relat", + -11.947810173034668 + ], + [ + "git", + -11.94796371459961 + ], + [ + "▁interne", + -11.948281288146973 + ], + [ + "▁Tokyo", + -11.948325157165527 + ], + [ + "chal", + -11.948348045349121 + ], + [ + "contacted", + -11.948430061340332 + ], + [ + "▁tras", + -11.948455810546875 + ], + [ + "▁Clinic", + -11.948626518249512 + ], + [ + "▁unbe", + -11.948633193969727 + ], + [ + "▁dumneavoastra", + -11.948798179626465 + ], + [ + "float", + -11.949078559875488 + ], + [ + "isson", + -11.94909381866455 + ], + [ + "▁vessel", + -11.949126243591309 + ], + [ + "attempting", + -11.949161529541016 + ], + [ + "▁doute", + -11.94918441772461 + ], + [ + "▁Leadership", + -11.949322700500488 + ], + [ + "▁sustain", + -11.94947338104248 + ], + [ + "▁textile", + -11.949666023254395 + ], + [ + "auer", + -11.949702262878418 + ], + [ + "▁90%", + -11.949899673461914 + ], + [ + "garten", + -11.949911117553711 + ], + [ + "▁adauga", + -11.949991226196289 + ], + [ + "▁Kil", + -11.950061798095703 + ], + [ + "▁troops", + -11.950420379638672 + ], + [ + "▁pale", + -11.950568199157715 + ], + [ + "host", + -11.950743675231934 + ], + [ + "▁cry", + -11.950757026672363 + ], + [ + "▁Alb", + -11.950793266296387 + ], + [ + "▁Brad", + -11.95089340209961 + ], + [ + "▁bicycle", + -11.951054573059082 + ], + [ + "▁24/7", + -11.951217651367188 + ], + [ + "▁с", + -11.951228141784668 + ], + [ + "▁stimul", + -11.951401710510254 + ], + [ + "gler", + -11.951445579528809 + ], + [ + "▁notwendig", + -11.951496124267578 + ], + [ + "▁cousin", + -11.95158863067627 + ], + [ + "cheie", + -11.951600074768066 + ], + [ + "hay", + -11.951751708984375 + ], + [ + "▁rezolv", + -11.952134132385254 + ], + [ + "▁THIS", + -11.952143669128418 + ], + [ + "ordre", + -11.952157974243164 + ], + [ + "iști", + -11.952173233032227 + ], + [ + "▁conclude", + -11.952310562133789 + ], + [ + "▁Lage", + -11.952327728271484 + ], + [ + "▁Entertainment", + -11.952454566955566 + ], + [ + "▁valued", + -11.952478408813477 + ], + [ + "ktion", + -11.95253849029541 + ], + [ + "▁priorities", + -11.95268440246582 + ], + [ + "▁1986", + -11.952770233154297 + ], + [ + "▁fatal", + -11.952934265136719 + ], + [ + "▁accurately", + -11.952988624572754 + ], + [ + "▁1987", + -11.953022956848145 + ], + [ + "▁folk", + -11.953073501586914 + ], + [ + "7)", + -11.953163146972656 + ], + [ + "führer", + -11.95360279083252 + ], + [ + "▁knot", + -11.953612327575684 + ], + [ + "haltung", + -11.953720092773438 + ], + [ + "▁Charlie", + -11.953733444213867 + ], + [ + "âge", + -11.95376205444336 + ], + [ + "▁threshold", + -11.954041481018066 + ], + [ + "▁assault", + -11.954130172729492 + ], + [ + "▁meist", + -11.954141616821289 + ], + [ + "bine", + -11.954155921936035 + ], + [ + "surprisingly", + -11.954171180725098 + ], + [ + "▁Protect", + -11.954180717468262 + ], + [ + "▁Hack", + -11.954258918762207 + ], + [ + "▁Quant", + -11.954537391662598 + ], + [ + "▁Cet", + -11.954782485961914 + ], + [ + "▁convinced", + -11.95481014251709 + ], + [ + "▁muncă", + -11.955033302307129 + ], + [ + "dging", + -11.955066680908203 + ], + [ + "▁Millionen", + -11.955129623413086 + ], + [ + "zahlung", + -11.955148696899414 + ], + [ + "▁anticipated", + -11.955192565917969 + ], + [ + "▁brass", + -11.9552001953125 + ], + [ + "KO", + -11.955244064331055 + ], + [ + "▁culori", + -11.955286979675293 + ], + [ + "▁Aero", + -11.955326080322266 + ], + [ + "▁intermediu", + -11.955373764038086 + ], + [ + "▁Philippines", + -11.955381393432617 + ], + [ + "▁jury", + -11.955387115478516 + ], + [ + "▁Funktion", + -11.95569896697998 + ], + [ + "▁probe", + -11.955704689025879 + ], + [ + "TL", + -11.955748558044434 + ], + [ + "1.0", + -11.955804824829102 + ], + [ + "ELL", + -11.95581340789795 + ], + [ + "She", + -11.956001281738281 + ], + [ + "▁Blood", + -11.956073760986328 + ], + [ + "▁Dean", + -11.956111907958984 + ], + [ + "▁scène", + -11.9561185836792 + ], + [ + "volu", + -11.95621395111084 + ], + [ + "▁Epi", + -11.95621395111084 + ], + [ + "▁séjour", + -11.95627498626709 + ], + [ + "▁Smartphone", + -11.956306457519531 + ], + [ + "▁fired", + -11.956357955932617 + ], + [ + "beat", + -11.95650577545166 + ], + [ + "▁pockets", + -11.956506729125977 + ], + [ + "▁serviciu", + -11.956624031066895 + ], + [ + "▁affairs", + -11.95678424835205 + ], + [ + "▁Ry", + -11.956842422485352 + ], + [ + "▁Stadium", + -11.956954956054688 + ], + [ + "▁snacks", + -11.957182884216309 + ], + [ + "▁efectu", + -11.957221031188965 + ], + [ + "▁Richtung", + -11.957273483276367 + ], + [ + "▁dresses", + -11.957352638244629 + ], + [ + "▁Medien", + -11.95744800567627 + ], + [ + "writer", + -11.95759105682373 + ], + [ + "changing", + -11.957655906677246 + ], + [ + "▁supportive", + -11.957849502563477 + ], + [ + "▁beneath", + -11.957873344421387 + ], + [ + "paid", + -11.958078384399414 + ], + [ + "▁customize", + -11.958155632019043 + ], + [ + "▁Ferr", + -11.958187103271484 + ], + [ + "reaches", + -11.958338737487793 + ], + [ + "arma", + -11.958401679992676 + ], + [ + "ción", + -11.958598136901855 + ], + [ + "▁elderly", + -11.959243774414062 + ], + [ + "▁modification", + -11.95934009552002 + ], + [ + "▁perfection", + -11.959381103515625 + ], + [ + "▁Allow", + -11.959492683410645 + ], + [ + "▁belonging", + -11.959542274475098 + ], + [ + "▁compound", + -11.959589004516602 + ], + [ + "▁Results", + -11.959681510925293 + ], + [ + "▁astăzi", + -11.959793090820312 + ], + [ + "▁Liber", + -11.959818840026855 + ], + [ + "jor", + -11.959850311279297 + ], + [ + "▁Nin", + -11.959980964660645 + ], + [ + "▁lumina", + -11.959992408752441 + ], + [ + "▁130", + -11.960073471069336 + ], + [ + "▁Platform", + -11.960121154785156 + ], + [ + "▁SMS", + -11.960221290588379 + ], + [ + "▁medic", + -11.96024227142334 + ], + [ + "hör", + -11.960315704345703 + ], + [ + "▁Kas", + -11.96038818359375 + ], + [ + "▁tomato", + -11.960403442382812 + ], + [ + "▁logiciel", + -11.960505485534668 + ], + [ + "php", + -11.960654258728027 + ], + [ + "▁premises", + -11.96071720123291 + ], + [ + "▁Communication", + -11.96072769165039 + ], + [ + "▁reprezintă", + -11.960762023925781 + ], + [ + "▁Partners", + -11.960866928100586 + ], + [ + "▁RV", + -11.961090087890625 + ], + [ + "▁pants", + -11.961197853088379 + ], + [ + "▁envie", + -11.961256980895996 + ], + [ + "▁commerce", + -11.961263656616211 + ], + [ + "▁tears", + -11.961298942565918 + ], + [ + "▁cooler", + -11.961494445800781 + ], + [ + "strand", + -11.961556434631348 + ], + [ + "▁Gil", + -11.961588859558105 + ], + [ + "▁référence", + -11.961641311645508 + ], + [ + "▁electronics", + -11.961681365966797 + ], + [ + "exposition", + -11.961700439453125 + ], + [ + "▁Caribbean", + -11.96171760559082 + ], + [ + "▁compelling", + -11.96171760559082 + ], + [ + "luci", + -11.961723327636719 + ], + [ + "▁Brooklyn", + -11.961892127990723 + ], + [ + "▁Thai", + -11.961950302124023 + ], + [ + "dler", + -11.96198844909668 + ], + [ + "▁supra", + -11.962016105651855 + ], + [ + "centered", + -11.962026596069336 + ], + [ + "▁metro", + -11.962081909179688 + ], + [ + "▁03", + -11.962299346923828 + ], + [ + "▁enrich", + -11.962437629699707 + ], + [ + "▁adevarat", + -11.962594985961914 + ], + [ + "5000", + -11.962961196899414 + ], + [ + "▁bell", + -11.96297550201416 + ], + [ + "▁sine", + -11.962996482849121 + ], + [ + "▁appealing", + -11.963088989257812 + ], + [ + "clam", + -11.963116645812988 + ], + [ + "▁vorhanden", + -11.963165283203125 + ], + [ + "▁pickup", + -11.963268280029297 + ], + [ + "▁Alaska", + -11.963269233703613 + ], + [ + "▁Nacht", + -11.963300704956055 + ], + [ + "borough", + -11.9633207321167 + ], + [ + "▁Blanc", + -11.96340274810791 + ], + [ + "▁apare", + -11.963616371154785 + ], + [ + "▁Works", + -11.963798522949219 + ], + [ + "mettent", + -11.963801383972168 + ], + [ + "atter", + -11.96389389038086 + ], + [ + "terra", + -11.963946342468262 + ], + [ + "▁Bit", + -11.964105606079102 + ], + [ + "RL", + -11.964131355285645 + ], + [ + "▁Wander", + -11.964262962341309 + ], + [ + "▁Hawk", + -11.964595794677734 + ], + [ + "▁Probleme", + -11.964665412902832 + ], + [ + "regel", + -11.964729309082031 + ], + [ + "hne", + -11.964739799499512 + ], + [ + "fass", + -11.96486759185791 + ], + [ + "▁Andy", + -11.965014457702637 + ], + [ + "▁befinde", + -11.965179443359375 + ], + [ + "boo", + -11.965265274047852 + ], + [ + "▁connectivity", + -11.965304374694824 + ], + [ + "▁spielt", + -11.965418815612793 + ], + [ + "zweiten", + -11.96547794342041 + ], + [ + "ţilor", + -11.965526580810547 + ], + [ + "▁confi", + -11.96561336517334 + ], + [ + "▁schlecht", + -11.965773582458496 + ], + [ + "▁Beginn", + -11.96581745147705 + ], + [ + "▁floating", + -11.965903282165527 + ], + [ + "nimmt", + -11.966071128845215 + ], + [ + "▁arbeiten", + -11.96611213684082 + ], + [ + "pillar", + -11.966131210327148 + ], + [ + "sterreich", + -11.966347694396973 + ], + [ + "▁Schule", + -11.966446876525879 + ], + [ + "▁durée", + -11.966521263122559 + ], + [ + "▁honestly", + -11.96653938293457 + ], + [ + "▁acel", + -11.9666166305542 + ], + [ + "▁Prozess", + -11.96662425994873 + ], + [ + "Min", + -11.966629028320312 + ], + [ + "enii", + -11.966632843017578 + ], + [ + "DAY", + -11.966758728027344 + ], + [ + "▁Blo", + -11.966806411743164 + ], + [ + "▁bolt", + -11.966946601867676 + ], + [ + "sicher", + -11.967070579528809 + ], + [ + "▁17,", + -11.967122077941895 + ], + [ + "▁anchor", + -11.967215538024902 + ], + [ + "▁consistency", + -11.967241287231445 + ], + [ + "▁relatives", + -11.967263221740723 + ], + [ + "▁lac", + -11.967385292053223 + ], + [ + "105", + -11.967432975769043 + ], + [ + "▁Craig", + -11.967534065246582 + ], + [ + "▁mandate", + -11.967598915100098 + ], + [ + "▁bedeutet", + -11.967674255371094 + ], + [ + "▁Soviet", + -11.967680931091309 + ], + [ + "▁arguments", + -11.967938423156738 + ], + [ + "▁Gebäude", + -11.967997550964355 + ], + [ + "▁Parliament", + -11.968005180358887 + ], + [ + "▁Kha", + -11.968087196350098 + ], + [ + "nica", + -11.968130111694336 + ], + [ + "▁Amazing", + -11.968162536621094 + ], + [ + "gründe", + -11.968179702758789 + ], + [ + "▁Ott", + -11.968269348144531 + ], + [ + "Exp", + -11.968314170837402 + ], + [ + "▁ianuarie", + -11.96848201751709 + ], + [ + "riot", + -11.968571662902832 + ], + [ + "▁futur", + -11.968626976013184 + ], + [ + "▁Honda", + -11.968647956848145 + ], + [ + "!!!!", + -11.96865177154541 + ], + [ + "▁citit", + -11.968689918518066 + ], + [ + "▁22,", + -11.968708992004395 + ], + [ + "țional", + -11.968711853027344 + ], + [ + "▁lovers", + -11.968732833862305 + ], + [ + "▁Current", + -11.968835830688477 + ], + [ + "▁drone", + -11.96927261352539 + ], + [ + "▁promising", + -11.969335556030273 + ], + [ + "devoted", + -11.969443321228027 + ], + [ + "▁Born", + -11.969520568847656 + ], + [ + "▁viitor", + -11.969589233398438 + ], + [ + "▁ritual", + -11.969614028930664 + ], + [ + "▁Guard", + -11.969681739807129 + ], + [ + "09.", + -11.969828605651855 + ], + [ + "▁Py", + -11.970260620117188 + ], + [ + "▁finds", + -11.970380783081055 + ], + [ + "▁boli", + -11.970394134521484 + ], + [ + "▁Mitglieder", + -11.970697402954102 + ], + [ + "ogni", + -11.97107982635498 + ], + [ + "▁stones", + -11.97118854522705 + ], + [ + "rox", + -11.971210479736328 + ], + [ + "▁dock", + -11.971390724182129 + ], + [ + "▁onion", + -11.97144889831543 + ], + [ + "▁classified", + -11.971538543701172 + ], + [ + "big", + -11.971833229064941 + ], + [ + "RG", + -11.971857070922852 + ], + [ + "influenced", + -11.971955299377441 + ], + [ + "▁sudden", + -11.971988677978516 + ], + [ + "▁ample", + -11.97204303741455 + ], + [ + "án", + -11.972095489501953 + ], + [ + "▁ornament", + -11.972122192382812 + ], + [ + "datele", + -11.972227096557617 + ], + [ + "▁Dad", + -11.97225284576416 + ], + [ + "BER", + -11.972278594970703 + ], + [ + "gerecht", + -11.972380638122559 + ], + [ + "kett", + -11.972536087036133 + ], + [ + "▁Antonio", + -11.972572326660156 + ], + [ + "Nu", + -11.972834587097168 + ], + [ + "dium", + -11.97284984588623 + ], + [ + "CAD", + -11.972850799560547 + ], + [ + "▁bundle", + -11.972916603088379 + ], + [ + "▁Vari", + -11.97301197052002 + ], + [ + "▁thrive", + -11.973020553588867 + ], + [ + "▁Seminar", + -11.973071098327637 + ], + [ + "wire", + -11.973084449768066 + ], + [ + "▁contributing", + -11.973114967346191 + ], + [ + "▁Bour", + -11.97320556640625 + ], + [ + "▁dori", + -11.973206520080566 + ], + [ + "▁packing", + -11.97343921661377 + ], + [ + "▁colleges", + -11.973459243774414 + ], + [ + "▁garbage", + -11.97366714477539 + ], + [ + "▁vector", + -11.973837852478027 + ], + [ + "▁suggestion", + -11.973897933959961 + ], + [ + "borne", + -11.973904609680176 + ], + [ + "▁Listen", + -11.973938941955566 + ], + [ + "▁Prix", + -11.973957061767578 + ], + [ + "viennent", + -11.974162101745605 + ], + [ + "insbesondere", + -11.97426700592041 + ], + [ + "▁fonctionne", + -11.974435806274414 + ], + [ + "▁mainstream", + -11.974485397338867 + ], + [ + "▁merci", + -11.974574089050293 + ], + [ + "oko", + -11.97460651397705 + ], + [ + "▁Commerce", + -11.97493839263916 + ], + [ + "▁droits", + -11.975115776062012 + ], + [ + "▁muzica", + -11.975141525268555 + ], + [ + "▁profesor", + -11.9751558303833 + ], + [ + "▁epic", + -11.97518253326416 + ], + [ + "▁intuitive", + -11.975186347961426 + ], + [ + "▁aggregate", + -11.975223541259766 + ], + [ + "▁vaccine", + -11.97529411315918 + ], + [ + "▁dank", + -11.975459098815918 + ], + [ + "▁situ", + -11.975578308105469 + ], + [ + "▁Cand", + -11.975593566894531 + ], + [ + "▁Ganz", + -11.97562313079834 + ], + [ + "▁Crystal", + -11.97578239440918 + ], + [ + "▁discretion", + -11.975825309753418 + ], + [ + "mug", + -11.975997924804688 + ], + [ + "▁anzu", + -11.976144790649414 + ], + [ + "▁cement", + -11.97616958618164 + ], + [ + "▁priest", + -11.97625732421875 + ], + [ + "▁rejected", + -11.976298332214355 + ], + [ + "▁Summit", + -11.976325988769531 + ], + [ + "▁Sara", + -11.976424217224121 + ], + [ + "▁palette", + -11.976527214050293 + ], + [ + "▁continuare", + -11.976569175720215 + ], + [ + "uge", + -11.976676940917969 + ], + [ + "ryl", + -11.976844787597656 + ], + [ + "▁Solid", + -11.977142333984375 + ], + [ + "▁meilleure", + -11.977177619934082 + ], + [ + "▁Tennessee", + -11.977248191833496 + ], + [ + "rail", + -11.977326393127441 + ], + [ + "▁attributes", + -11.9773530960083 + ], + [ + "▁vessels", + -11.977840423583984 + ], + [ + "cylinder", + -11.977900505065918 + ], + [ + "▁parfait", + -11.977916717529297 + ], + [ + "abb", + -11.97801399230957 + ], + [ + "▁Julie", + -11.97806167602539 + ], + [ + "▁pièces", + -11.978120803833008 + ], + [ + "▁proiecte", + -11.978142738342285 + ], + [ + "médi", + -11.978273391723633 + ], + [ + "▁décembre", + -11.9783935546875 + ], + [ + "Per", + -11.97841739654541 + ], + [ + "1/", + -11.978520393371582 + ], + [ + "regulated", + -11.978601455688477 + ], + [ + "▁Dy", + -11.978633880615234 + ], + [ + "▁23,", + -11.978694915771484 + ], + [ + "beck", + -11.978763580322266 + ], + [ + "tură", + -11.97885513305664 + ], + [ + "▁Chiar", + -11.978931427001953 + ], + [ + "▁isolated", + -11.979012489318848 + ], + [ + "▁kennen", + -11.979259490966797 + ], + [ + "Du", + -11.979260444641113 + ], + [ + "reflected", + -11.979482650756836 + ], + [ + "▁belong", + -11.979571342468262 + ], + [ + "▁welcomed", + -11.97969913482666 + ], + [ + "▁Rate", + -11.979776382446289 + ], + [ + "prestigious", + -11.979859352111816 + ], + [ + "▁1/4", + -11.979930877685547 + ], + [ + "▁distinction", + -11.979966163635254 + ], + [ + "▁boring", + -11.980001449584961 + ], + [ + "▁booked", + -11.980369567871094 + ], + [ + "▁citizen", + -11.980441093444824 + ], + [ + "▁comprises", + -11.980498313903809 + ], + [ + "▁aufge", + -11.98051929473877 + ], + [ + "GL", + -11.980566024780273 + ], + [ + "▁nearest", + -11.980616569519043 + ], + [ + "▁printr", + -11.980692863464355 + ], + [ + "▁département", + -11.981318473815918 + ], + [ + "▁planner", + -11.981510162353516 + ], + [ + "▁Rai", + -11.981817245483398 + ], + [ + "▁Broad", + -11.981934547424316 + ], + [ + "▁pastor", + -11.981947898864746 + ], + [ + "▁reservation", + -11.982243537902832 + ], + [ + "▁decembrie", + -11.982315063476562 + ], + [ + "▁suficient", + -11.982501983642578 + ], + [ + "geld", + -11.982560157775879 + ], + [ + "training", + -11.982620239257812 + ], + [ + "deshalb", + -11.982634544372559 + ], + [ + "▁chaud", + -11.982651710510254 + ], + [ + "Cor", + -11.982662200927734 + ], + [ + "▁Grade", + -11.982769966125488 + ], + [ + "▁faţă", + -11.982809066772461 + ], + [ + "story", + -11.982839584350586 + ], + [ + "gericht", + -11.98286247253418 + ], + [ + "▁Got", + -11.982954025268555 + ], + [ + "particulièrement", + -11.982976913452148 + ], + [ + "▁bump", + -11.983051300048828 + ], + [ + "▁fatigue", + -11.983160018920898 + ], + [ + "Activ", + -11.983250617980957 + ], + [ + "▁numéro", + -11.983302116394043 + ], + [ + "▁stranger", + -11.983312606811523 + ], + [ + "▁Skin", + -11.983327865600586 + ], + [ + "add", + -11.98344898223877 + ], + [ + "Ainsi", + -11.98357105255127 + ], + [ + "▁assists", + -11.983684539794922 + ], + [ + "▁zusätzlich", + -11.983943939208984 + ], + [ + "▁vede", + -11.983979225158691 + ], + [ + "RON", + -11.984108924865723 + ], + [ + "▁seemingly", + -11.984126091003418 + ], + [ + "▁NU", + -11.98417854309082 + ], + [ + "geb", + -11.984273910522461 + ], + [ + "▁Release", + -11.984353065490723 + ], + [ + "▁throwing", + -11.984427452087402 + ], + [ + "▁Alabama", + -11.984447479248047 + ], + [ + "▁Something", + -11.984590530395508 + ], + [ + "▁Cuba", + -11.98464584350586 + ], + [ + "▁Verbindung", + -11.984649658203125 + ], + [ + "▁Cir", + -11.984654426574707 + ], + [ + "your", + -11.984713554382324 + ], + [ + "-13", + -11.984748840332031 + ], + [ + "▁Delta", + -11.984801292419434 + ], + [ + "▁Twin", + -11.98504638671875 + ], + [ + "▁governance", + -11.985156059265137 + ], + [ + "▁groom", + -11.985310554504395 + ], + [ + "▁conception", + -11.98533821105957 + ], + [ + "▁governor", + -11.985383033752441 + ], + [ + "▁Spar", + -11.985416412353516 + ], + [ + "▁coastal", + -11.985652923583984 + ], + [ + "▁Seven", + -11.985856056213379 + ], + [ + "▁inclusive", + -11.986002922058105 + ], + [ + "cili", + -11.986035346984863 + ], + [ + "▁Ridge", + -11.986100196838379 + ], + [ + "teller", + -11.986224174499512 + ], + [ + "▁Kin", + -11.986247062683105 + ], + [ + "leiter", + -11.986279487609863 + ], + [ + "stern", + -11.986364364624023 + ], + [ + "change", + -11.986404418945312 + ], + [ + "▁presidential", + -11.986433982849121 + ], + [ + "▁composer", + -11.986544609069824 + ], + [ + "Stu", + -11.986560821533203 + ], + [ + "▁Frankfurt", + -11.986584663391113 + ], + [ + "prä", + -11.986639976501465 + ], + [ + "▁Ideal", + -11.986644744873047 + ], + [ + "▁linear", + -11.986857414245605 + ], + [ + "▁bloom", + -11.986879348754883 + ], + [ + "▁grades", + -11.986881256103516 + ], + [ + "mettant", + -11.98692512512207 + ], + [ + "▁finishes", + -11.986952781677246 + ], + [ + "holz", + -11.987086296081543 + ], + [ + "▁dirty", + -11.987317085266113 + ], + [ + "▁Roh", + -11.987386703491211 + ], + [ + "▁Praxis", + -11.987408638000488 + ], + [ + "tempo", + -11.987433433532715 + ], + [ + "▁attempted", + -11.987433433532715 + ], + [ + "▁primar", + -11.987434387207031 + ], + [ + "▁pomp", + -11.987528800964355 + ], + [ + "▁tolle", + -11.987614631652832 + ], + [ + "▁adres", + -11.988011360168457 + ], + [ + "▁Between", + -11.988066673278809 + ], + [ + "▁ruin", + -11.988432884216309 + ], + [ + "▁matériel", + -11.988561630249023 + ], + [ + "MER", + -11.988913536071777 + ], + [ + "Nevertheless", + -11.989055633544922 + ], + [ + "▁corruption", + -11.989119529724121 + ], + [ + "spire", + -11.989180564880371 + ], + [ + "▁mou", + -11.989208221435547 + ], + [ + "ROM", + -11.989278793334961 + ], + [ + "▁underground", + -11.98935604095459 + ], + [ + "▁relativ", + -11.989389419555664 + ], + [ + "waited", + -11.989462852478027 + ], + [ + "▁speeds", + -11.989468574523926 + ], + [ + "▁adjusted", + -11.989486694335938 + ], + [ + "▁Flat", + -11.989514350891113 + ], + [ + "UND", + -11.98965835571289 + ], + [ + "▁individuelle", + -11.989744186401367 + ], + [ + "▁anybody", + -11.98978042602539 + ], + [ + "EO", + -11.989790916442871 + ], + [ + "->", + -11.989791870117188 + ], + [ + "▁Spend", + -11.989876747131348 + ], + [ + "aktion", + -11.990011215209961 + ], + [ + "édit", + -11.99006462097168 + ], + [ + "▁quest", + -11.990078926086426 + ], + [ + "rind", + -11.990541458129883 + ], + [ + "▁mediu", + -11.99057388305664 + ], + [ + "▁barriers", + -11.99062442779541 + ], + [ + "▁répondre", + -11.990633010864258 + ], + [ + "▁novembre", + -11.990708351135254 + ], + [ + "▁champ", + -11.990736961364746 + ], + [ + "saw", + -11.990757942199707 + ], + [ + "▁fed", + -11.990804672241211 + ], + [ + "▁favorites", + -11.990939140319824 + ], + [ + "▁shield", + -11.991055488586426 + ], + [ + "▁Wide", + -11.991146087646484 + ], + [ + "▁problema", + -11.991445541381836 + ], + [ + "▁Asta", + -11.991525650024414 + ], + [ + "▁refreshing", + -11.99168872833252 + ], + [ + "hey", + -11.991692543029785 + ], + [ + "obtaining", + -11.991788864135742 + ], + [ + "▁parler", + -11.992072105407715 + ], + [ + "▁Cele", + -11.992134094238281 + ], + [ + "frage", + -11.992136001586914 + ], + [ + "écran", + -11.992324829101562 + ], + [ + "▁cleared", + -11.992448806762695 + ], + [ + "zehn", + -11.992594718933105 + ], + [ + "parmi", + -11.992647171020508 + ], + [ + "änder", + -11.992691993713379 + ], + [ + "▁Defense", + -11.992693901062012 + ], + [ + "tatea", + -11.992696762084961 + ], + [ + "▁reasonably", + -11.992939949035645 + ], + [ + "▁Idee", + -11.992985725402832 + ], + [ + "nehm", + -11.993000030517578 + ], + [ + "technologie", + -11.993020057678223 + ], + [ + "atura", + -11.993048667907715 + ], + [ + "▁slope", + -11.993332862854004 + ], + [ + "Hence", + -11.993351936340332 + ], + [ + "▁40%", + -11.993391990661621 + ], + [ + "▁jewe", + -11.993448257446289 + ], + [ + "▁queries", + -11.993470191955566 + ], + [ + "▁$8", + -11.994096755981445 + ], + [ + "▁Parker", + -11.994107246398926 + ], + [ + "▁publique", + -11.994488716125488 + ], + [ + "quant", + -11.994529724121094 + ], + [ + "issue", + -11.994690895080566 + ], + [ + "▁Cleveland", + -11.994847297668457 + ], + [ + "4,000", + -11.995071411132812 + ], + [ + "IDE", + -11.995145797729492 + ], + [ + "▁Barbara", + -11.995233535766602 + ], + [ + "udge", + -11.995477676391602 + ], + [ + "corn", + -11.99554443359375 + ], + [ + "veți", + -11.995588302612305 + ], + [ + "▁proteins", + -11.995707511901855 + ], + [ + "▁trăi", + -11.995793342590332 + ], + [ + "▁mijloc", + -11.995842933654785 + ], + [ + "logie", + -11.995884895324707 + ], + [ + "▁Walter", + -11.995884895324707 + ], + [ + "heißt", + -11.99593448638916 + ], + [ + "search", + -11.995946884155273 + ], + [ + "▁hochwertige", + -11.996010780334473 + ], + [ + "▁încerc", + -11.996014595031738 + ], + [ + "▁administrator", + -11.99608039855957 + ], + [ + "tension", + -11.996133804321289 + ], + [ + "▁homemade", + -11.996438026428223 + ], + [ + "▁$20", + -11.99651050567627 + ], + [ + "▁leben", + -11.996662139892578 + ], + [ + "netz", + -11.996665954589844 + ], + [ + "▁intensity", + -11.996882438659668 + ], + [ + "▁clever", + -11.996891975402832 + ], + [ + "▁installer", + -11.996999740600586 + ], + [ + "▁Wand", + -11.997087478637695 + ], + [ + "meister", + -11.997130393981934 + ], + [ + "ziel", + -11.99744701385498 + ], + [ + "▁architect", + -11.99748706817627 + ], + [ + "▁crede", + -11.997512817382812 + ], + [ + "▁Sleep", + -11.997675895690918 + ], + [ + "▁demonstr", + -11.997745513916016 + ], + [ + "cake", + -11.997781753540039 + ], + [ + "▁Cheap", + -11.997783660888672 + ], + [ + "pool", + -11.9979829788208 + ], + [ + "▁gadget", + -11.998004913330078 + ], + [ + "▁Anbieter", + -11.998005867004395 + ], + [ + "▁Jonathan", + -11.998170852661133 + ], + [ + "ül", + -11.998492240905762 + ], + [ + "▁Harvard", + -11.998503684997559 + ], + [ + "▁1985", + -11.998773574829102 + ], + [ + "HP", + -11.998839378356934 + ], + [ + "▁afara", + -11.99893569946289 + ], + [ + "▁halten", + -11.999008178710938 + ], + [ + "▁Technik", + -11.999042510986328 + ], + [ + "▁dressed", + -11.999149322509766 + ], + [ + "weis", + -11.999165534973145 + ], + [ + "▁donated", + -11.9993314743042 + ], + [ + "also", + -11.99938678741455 + ], + [ + "▁EN", + -11.999405860900879 + ], + [ + "▁imprim", + -11.99942398071289 + ], + [ + "▁onions", + -11.999458312988281 + ], + [ + "Par", + -11.99950122833252 + ], + [ + "▁donate", + -11.99958324432373 + ], + [ + "▁mice", + -11.999610900878906 + ], + [ + "referring", + -11.999897956848145 + ], + [ + "▁restored", + -12.00003433227539 + ], + [ + "▁amateur", + -12.0000581741333 + ], + [ + "▁Switch", + -12.000075340270996 + ], + [ + "appel", + -12.00013542175293 + ], + [ + "▁idéal", + -12.0001859664917 + ], + [ + "▁wheat", + -12.000199317932129 + ], + [ + "▁lime", + -12.000240325927734 + ], + [ + "REA", + -12.00027084350586 + ], + [ + "riti", + -12.000357627868652 + ], + [ + "ţiile", + -12.00058364868164 + ], + [ + "▁machinery", + -12.00064754486084 + ], + [ + "UNE", + -12.00089168548584 + ], + [ + "▁Cont", + -12.000971794128418 + ], + [ + "▁attendees", + -12.001014709472656 + ], + [ + "▁aparat", + -12.001080513000488 + ], + [ + "freundlich", + -12.00117301940918 + ], + [ + "▁zilnic", + -12.001175880432129 + ], + [ + "▁spark", + -12.001421928405762 + ], + [ + "▁Gast", + -12.001459121704102 + ], + [ + "▁Issue", + -12.00147533416748 + ], + [ + "▁scam", + -12.001566886901855 + ], + [ + "▁bonds", + -12.001618385314941 + ], + [ + "owner", + -12.001641273498535 + ], + [ + "▁empfehlen", + -12.001673698425293 + ], + [ + "elia", + -12.001749992370605 + ], + [ + "cic", + -12.001757621765137 + ], + [ + "▁honored", + -12.001800537109375 + ], + [ + "▁castle", + -12.001846313476562 + ], + [ + "avand", + -12.002058982849121 + ], + [ + "rough", + -12.002108573913574 + ], + [ + "▁Address", + -12.002116203308105 + ], + [ + "angle", + -12.00217342376709 + ], + [ + "leton", + -12.002259254455566 + ], + [ + "▁locked", + -12.002392768859863 + ], + [ + "▁consolid", + -12.00248908996582 + ], + [ + "▁voucher", + -12.003011703491211 + ], + [ + "ației", + -12.003201484680176 + ], + [ + "wachsen", + -12.003211975097656 + ], + [ + "▁magazines", + -12.003287315368652 + ], + [ + "▁Schools", + -12.003318786621094 + ], + [ + "▁voices", + -12.003362655639648 + ], + [ + "▁Dry", + -12.003479957580566 + ], + [ + "▁tricks", + -12.00349235534668 + ], + [ + "schließlich", + -12.003546714782715 + ], + [ + "▁loyalty", + -12.003687858581543 + ], + [ + "risk", + -12.003764152526855 + ], + [ + "▁Vers", + -12.003786087036133 + ], + [ + "chester", + -12.003802299499512 + ], + [ + "▁decorated", + -12.003830909729004 + ], + [ + "▁copiilor", + -12.003969192504883 + ], + [ + "riz", + -12.003994941711426 + ], + [ + "03.", + -12.004013061523438 + ], + [ + "▁Hur", + -12.004016876220703 + ], + [ + "▁archive", + -12.004021644592285 + ], + [ + "▁Continue", + -12.004042625427246 + ], + [ + "▁Nähe", + -12.004043579101562 + ], + [ + "jit", + -12.004090309143066 + ], + [ + "gekommen", + -12.004301071166992 + ], + [ + "▁conjunction", + -12.004349708557129 + ], + [ + "combining", + -12.004404067993164 + ], + [ + "▁Unterstützung", + -12.004517555236816 + ], + [ + "oza", + -12.004593849182129 + ], + [ + "▁sketch", + -12.004720687866211 + ], + [ + "▁arată", + -12.004731178283691 + ], + [ + "▁Mining", + -12.004765510559082 + ], + [ + "uous", + -12.004791259765625 + ], + [ + "▁devis", + -12.004834175109863 + ], + [ + "Almost", + -12.004862785339355 + ], + [ + "Hu", + -12.005037307739258 + ], + [ + "▁Om", + -12.005366325378418 + ], + [ + "MF", + -12.00544548034668 + ], + [ + "liz", + -12.005451202392578 + ], + [ + "▁fails", + -12.005456924438477 + ], + [ + "▁comparable", + -12.005459785461426 + ], + [ + "▁vein", + -12.005547523498535 + ], + [ + "▁Vis", + -12.00561809539795 + ], + [ + "▁viagra", + -12.005654335021973 + ], + [ + "▁farming", + -12.005678176879883 + ], + [ + "▁Late", + -12.005765914916992 + ], + [ + "geschrieben", + -12.006033897399902 + ], + [ + "hrew", + -12.006103515625 + ], + [ + "▁melt", + -12.006120681762695 + ], + [ + "lager", + -12.006168365478516 + ], + [ + "halte", + -12.006240844726562 + ], + [ + "▁Hotels", + -12.006266593933105 + ], + [ + "▁facebook", + -12.0064058303833 + ], + [ + "▁défi", + -12.006550788879395 + ], + [ + "shore", + -12.006802558898926 + ], + [ + "▁membrane", + -12.006866455078125 + ], + [ + "▁sixth", + -12.006903648376465 + ], + [ + "api", + -12.007003784179688 + ], + [ + "▁Owner", + -12.007222175598145 + ], + [ + "▁(\"", + -12.007234573364258 + ], + [ + "▁$50", + -12.007280349731445 + ], + [ + "▁protective", + -12.007420539855957 + ], + [ + "/2", + -12.007548332214355 + ], + [ + "▁Girls", + -12.007562637329102 + ], + [ + "Gri", + -12.00769329071045 + ], + [ + "▁nouă", + -12.007708549499512 + ], + [ + "▁infections", + -12.007813453674316 + ], + [ + "rân", + -12.007868766784668 + ], + [ + "▁Geb", + -12.0078763961792 + ], + [ + "▁Conseil", + -12.007905006408691 + ], + [ + "▁imagini", + -12.007909774780273 + ], + [ + "▁promotions", + -12.00794792175293 + ], + [ + "▁enforce", + -12.00795841217041 + ], + [ + "▁applicant", + -12.007965087890625 + ], + [ + "▁Apart", + -12.008087158203125 + ], + [ + "▁progression", + -12.008151054382324 + ], + [ + "▁careers", + -12.008511543273926 + ], + [ + "▁litigation", + -12.008533477783203 + ], + [ + "▁Menge", + -12.00866413116455 + ], + [ + "▁Contract", + -12.00871753692627 + ], + [ + "▁Kel", + -12.0087308883667 + ], + [ + "▁réserve", + -12.008769035339355 + ], + [ + "▁Cold", + -12.008870124816895 + ], + [ + "▁larg", + -12.009040832519531 + ], + [ + "▁microwave", + -12.009090423583984 + ], + [ + "▁Whit", + -12.009212493896484 + ], + [ + "▁Technologies", + -12.009381294250488 + ], + [ + "OU", + -12.00949478149414 + ], + [ + "itudine", + -12.00959587097168 + ], + [ + "▁handles", + -12.009895324707031 + ], + [ + "▁proceedings", + -12.009982109069824 + ], + [ + "▁prizes", + -12.010043144226074 + ], + [ + "▁unterstützen", + -12.010062217712402 + ], + [ + "▁piele", + -12.010090827941895 + ], + [ + "▁profound", + -12.010153770446777 + ], + [ + "schließen", + -12.0101957321167 + ], + [ + "▁trafic", + -12.01025104522705 + ], + [ + "▁Nar", + -12.010441780090332 + ], + [ + "▁Gesamt", + -12.0106201171875 + ], + [ + "▁bugs", + -12.010720252990723 + ], + [ + "▁Amy", + -12.010764122009277 + ], + [ + "▁eastern", + -12.010775566101074 + ], + [ + "nice", + -12.010784149169922 + ], + [ + "▁Besuch", + -12.010835647583008 + ], + [ + "▁synth", + -12.010892868041992 + ], + [ + "▁clasa", + -12.011194229125977 + ], + [ + "Book", + -12.01134204864502 + ], + [ + "▁ribbon", + -12.011415481567383 + ], + [ + "▁neues", + -12.011431694030762 + ], + [ + "ZE", + -12.011504173278809 + ], + [ + "▁peers", + -12.011613845825195 + ], + [ + "leistung", + -12.011730194091797 + ], + [ + "▁internship", + -12.011808395385742 + ], + [ + "count", + -12.011850357055664 + ], + [ + "nam", + -12.01193618774414 + ], + [ + "▁12-", + -12.012072563171387 + ], + [ + "acked", + -12.012146949768066 + ], + [ + "gonna", + -12.012146949768066 + ], + [ + "▁Dinge", + -12.01215648651123 + ], + [ + "Time", + -12.012299537658691 + ], + [ + "▁twelve", + -12.01242446899414 + ], + [ + "eye", + -12.012432098388672 + ], + [ + "▁avantaj", + -12.01253604888916 + ], + [ + "▁Glas", + -12.012731552124023 + ], + [ + "aucune", + -12.0127534866333 + ], + [ + "▁boil", + -12.012763977050781 + ], + [ + "▁Gray", + -12.012773513793945 + ], + [ + "adapt", + -12.01288890838623 + ], + [ + "occ", + -12.012895584106445 + ], + [ + "▁prieten", + -12.012897491455078 + ], + [ + "▁trai", + -12.01296615600586 + ], + [ + "▁Scal", + -12.013009071350098 + ], + [ + "▁conscious", + -12.013057708740234 + ], + [ + "▁charter", + -12.013093948364258 + ], + [ + "KS", + -12.013242721557617 + ], + [ + "▁Barr", + -12.013404846191406 + ], + [ + "▁summit", + -12.013411521911621 + ], + [ + "▁inflammation", + -12.013439178466797 + ], + [ + "tungs", + -12.013440132141113 + ], + [ + "ovic", + -12.013449668884277 + ], + [ + "▁conduit", + -12.013465881347656 + ], + [ + "▁Alice", + -12.013702392578125 + ], + [ + "▁veterans", + -12.013850212097168 + ], + [ + "Während", + -12.013944625854492 + ], + [ + "▁maximal", + -12.014013290405273 + ], + [ + "▁Hawaii", + -12.014037132263184 + ], + [ + "▁Pine", + -12.01432991027832 + ], + [ + "acelasi", + -12.014391899108887 + ], + [ + "hyp", + -12.014424324035645 + ], + [ + "sensitivity", + -12.01445198059082 + ], + [ + "pour", + -12.014481544494629 + ], + [ + "ре", + -12.014493942260742 + ], + [ + "▁Kentucky", + -12.015129089355469 + ], + [ + "▁badge", + -12.015276908874512 + ], + [ + "affecting", + -12.015310287475586 + ], + [ + "▁chairman", + -12.015311241149902 + ], + [ + "▁München", + -12.015467643737793 + ], + [ + "▁Hersteller", + -12.015469551086426 + ], + [ + "▁urmat", + -12.015615463256836 + ], + [ + "tels", + -12.015654563903809 + ], + [ + "▁FM", + -12.015701293945312 + ], + [ + "▁Basis", + -12.015732765197754 + ], + [ + "▁erklärt", + -12.015809059143066 + ], + [ + "▁changer", + -12.015859603881836 + ], + [ + "tischen", + -12.0159330368042 + ], + [ + "▁brave", + -12.015960693359375 + ], + [ + "▁siguranta", + -12.015986442565918 + ], + [ + "▁partnerships", + -12.015989303588867 + ], + [ + "ților", + -12.015999794006348 + ], + [ + "▁breathe", + -12.016141891479492 + ], + [ + "rink", + -12.016551971435547 + ], + [ + "▁footage", + -12.016654014587402 + ], + [ + "▁transformed", + -12.016658782958984 + ], + [ + "▁prep", + -12.016866683959961 + ], + [ + "▁upset", + -12.016901969909668 + ], + [ + "▁Native", + -12.017059326171875 + ], + [ + "▁Prima", + -12.017154693603516 + ], + [ + "▁jersey", + -12.017163276672363 + ], + [ + "230", + -12.017182350158691 + ], + [ + "▁lucrurile", + -12.017393112182617 + ], + [ + "▁divine", + -12.017502784729004 + ], + [ + "▁Pit", + -12.017593383789062 + ], + [ + "RIS", + -12.01765251159668 + ], + [ + "▁Cultural", + -12.017672538757324 + ], + [ + "▁exotic", + -12.017786979675293 + ], + [ + "▁tastes", + -12.017881393432617 + ], + [ + "▁bargain", + -12.017913818359375 + ], + [ + "▁optimize", + -12.017985343933105 + ], + [ + "▁électrique", + -12.018012046813965 + ], + [ + "deuxième", + -12.018030166625977 + ], + [ + "▁Gary", + -12.018085479736328 + ], + [ + "▁projection", + -12.018122673034668 + ], + [ + "▁sliding", + -12.018195152282715 + ], + [ + "club", + -12.018216133117676 + ], + [ + "association", + -12.01823902130127 + ], + [ + "▁LG", + -12.018259048461914 + ], + [ + "▁capsule", + -12.018291473388672 + ], + [ + "▁politicians", + -12.018397331237793 + ], + [ + "▁thumb", + -12.018423080444336 + ], + [ + "▁globally", + -12.018743515014648 + ], + [ + "positioned", + -12.018796920776367 + ], + [ + "▁Hamilton", + -12.018861770629883 + ], + [ + "arme", + -12.018881797790527 + ], + [ + "▁efectuat", + -12.018881797790527 + ], + [ + "zip", + -12.019111633300781 + ], + [ + "▁welfare", + -12.019201278686523 + ], + [ + "Leistung", + -12.019230842590332 + ], + [ + "▁Bac", + -12.019316673278809 + ], + [ + "▁fizic", + -12.019338607788086 + ], + [ + "OK", + -12.019454002380371 + ], + [ + "▁limba", + -12.019545555114746 + ], + [ + "▁wardrobe", + -12.019549369812012 + ], + [ + "▁offline", + -12.019627571105957 + ], + [ + "▁fortune", + -12.019665718078613 + ], + [ + "▁dialog", + -12.019681930541992 + ], + [ + "▁dramatically", + -12.01997184753418 + ], + [ + "▁NYC", + -12.020045280456543 + ], + [ + "▁Rem", + -12.02017593383789 + ], + [ + "▁bronze", + -12.020455360412598 + ], + [ + "▁pulse", + -12.02053451538086 + ], + [ + "Fortunately", + -12.020562171936035 + ], + [ + "▁glue", + -12.020596504211426 + ], + [ + "▁Expo", + -12.020720481872559 + ], + [ + "▁profitable", + -12.020776748657227 + ], + [ + "▁distributor", + -12.020845413208008 + ], + [ + "abilité", + -12.020869255065918 + ], + [ + "▁lyrics", + -12.020913124084473 + ], + [ + "▁mesh", + -12.02114486694336 + ], + [ + "▁organizational", + -12.021157264709473 + ], + [ + "▁vanilla", + -12.021249771118164 + ], + [ + "▁foc", + -12.021355628967285 + ], + [ + "▁1984", + -12.02147388458252 + ], + [ + "▁créé", + -12.02172565460205 + ], + [ + "▁servi", + -12.022027969360352 + ], + [ + "▁underneath", + -12.022095680236816 + ], + [ + "▁surveys", + -12.022143363952637 + ], + [ + "▁genes", + -12.022238731384277 + ], + [ + "▁limite", + -12.02224349975586 + ], + [ + "oder", + -12.022247314453125 + ], + [ + "▁mandatory", + -12.022269248962402 + ], + [ + "▁hospitality", + -12.022303581237793 + ], + [ + "▁bikes", + -12.022309303283691 + ], + [ + "▁Quote", + -12.022358894348145 + ], + [ + "glu", + -12.02241039276123 + ], + [ + "▁activitatea", + -12.022513389587402 + ], + [ + "preventing", + -12.022584915161133 + ], + [ + "▁Kh", + -12.02259635925293 + ], + [ + "économie", + -12.022616386413574 + ], + [ + "▁visite", + -12.022757530212402 + ], + [ + "▁spectacle", + -12.022778511047363 + ], + [ + "▁tract", + -12.022860527038574 + ], + [ + "▁quant", + -12.022862434387207 + ], + [ + "▁evolu", + -12.022866249084473 + ], + [ + "▁invata", + -12.023070335388184 + ], + [ + "▁homo", + -12.02311897277832 + ], + [ + "▁Users", + -12.02344799041748 + ], + [ + "introducing", + -12.023632049560547 + ], + [ + "hibi", + -12.023661613464355 + ], + [ + "▁Instrument", + -12.023805618286133 + ], + [ + "▁ép", + -12.023839950561523 + ], + [ + "▁Raj", + -12.023869514465332 + ], + [ + "▁executives", + -12.023881912231445 + ], + [ + "atoire", + -12.023885726928711 + ], + [ + "▁erforderlich", + -12.02397346496582 + ], + [ + "male", + -12.024211883544922 + ], + [ + "umble", + -12.024271011352539 + ], + [ + "erson", + -12.024277687072754 + ], + [ + "▁Treatment", + -12.024286270141602 + ], + [ + "▁Representative", + -12.024314880371094 + ], + [ + "▁corners", + -12.024409294128418 + ], + [ + "▁Petit", + -12.024599075317383 + ], + [ + "8)", + -12.02464771270752 + ], + [ + "▁Walker", + -12.024714469909668 + ], + [ + "▁Stir", + -12.02476692199707 + ], + [ + "/19", + -12.024767875671387 + ], + [ + "▁Stelle", + -12.024979591369629 + ], + [ + "ără", + -12.025009155273438 + ], + [ + "osse", + -12.025166511535645 + ], + [ + "2000", + -12.025189399719238 + ], + [ + "▁McG", + -12.025580406188965 + ], + [ + "DV", + -12.025773048400879 + ], + [ + "▁Firm", + -12.025862693786621 + ], + [ + "▁packet", + -12.025904655456543 + ], + [ + "Toate", + -12.02640438079834 + ], + [ + "▁institutional", + -12.026479721069336 + ], + [ + "rug", + -12.026663780212402 + ], + [ + "DG", + -12.026837348937988 + ], + [ + "fine", + -12.026837348937988 + ], + [ + "bringen", + -12.026856422424316 + ], + [ + "▁Horse", + -12.026921272277832 + ], + [ + "▁premiere", + -12.026937484741211 + ], + [ + "▁Că", + -12.027026176452637 + ], + [ + "acheter", + -12.02703857421875 + ], + [ + "▁Afghanistan", + -12.027053833007812 + ], + [ + "▁Prop", + -12.027085304260254 + ], + [ + "ühr", + -12.02715015411377 + ], + [ + "▁braucht", + -12.027398109436035 + ], + [ + "▁sunny", + -12.027424812316895 + ], + [ + "▁Sach", + -12.027461051940918 + ], + [ + "▁volumes", + -12.02753734588623 + ], + [ + "tinut", + -12.02759838104248 + ], + [ + "▁Sho", + -12.027722358703613 + ], + [ + "▁winds", + -12.027735710144043 + ], + [ + "▁Mall", + -12.027873992919922 + ], + [ + "ledge", + -12.027937889099121 + ], + [ + "▁sciences", + -12.027997016906738 + ], + [ + "plication", + -12.028024673461914 + ], + [ + "VR", + -12.028068542480469 + ], + [ + "destin", + -12.028234481811523 + ], + [ + "▁früh", + -12.02833366394043 + ], + [ + "▁tongue", + -12.028359413146973 + ], + [ + "▁Jennifer", + -12.028425216674805 + ], + [ + "▁bracket", + -12.028427124023438 + ], + [ + "▁episodes", + -12.02845287322998 + ], + [ + "breite", + -12.028461456298828 + ], + [ + "▁stoc", + -12.028635025024414 + ], + [ + "ilia", + -12.028728485107422 + ], + [ + "▁Gulf", + -12.02874755859375 + ], + [ + "▁transparency", + -12.028768539428711 + ], + [ + "Industrie", + -12.028853416442871 + ], + [ + "▁viewers", + -12.028916358947754 + ], + [ + "AIN", + -12.029129981994629 + ], + [ + "▁Registration", + -12.029149055480957 + ], + [ + "/4", + -12.029309272766113 + ], + [ + "▁fera", + -12.029337882995605 + ], + [ + "▁06", + -12.029351234436035 + ], + [ + "▁einzu", + -12.029391288757324 + ], + [ + "enburg", + -12.02944278717041 + ], + [ + "▁eff", + -12.029449462890625 + ], + [ + "▁Stage", + -12.029558181762695 + ], + [ + "▁Cour", + -12.029685020446777 + ], + [ + "indu", + -12.029836654663086 + ], + [ + "▁Tools", + -12.029909133911133 + ], + [ + "IST", + -12.029921531677246 + ], + [ + "grund", + -12.030105590820312 + ], + [ + "seitig", + -12.030153274536133 + ], + [ + "pai", + -12.030250549316406 + ], + [ + "▁waist", + -12.030350685119629 + ], + [ + "▁Therapy", + -12.03049373626709 + ], + [ + "▁nomination", + -12.030599594116211 + ], + [ + "▁seama", + -12.030790328979492 + ], + [ + "▁analyse", + -12.030975341796875 + ], + [ + "▁emerge", + -12.031044006347656 + ], + [ + "▁adjustment", + -12.031106948852539 + ], + [ + "▁stroll", + -12.031106948852539 + ], + [ + "▁Beyond", + -12.031174659729004 + ], + [ + "▁legally", + -12.03122615814209 + ], + [ + "▁gauge", + -12.03123664855957 + ], + [ + "▁26,", + -12.031360626220703 + ], + [ + "Tex", + -12.031390190124512 + ], + [ + "economic", + -12.031488418579102 + ], + [ + "stoffe", + -12.031532287597656 + ], + [ + "Wir", + -12.031559944152832 + ], + [ + "ffen", + -12.031601905822754 + ], + [ + "▁acoperi", + -12.031609535217285 + ], + [ + "▁finale", + -12.031792640686035 + ], + [ + "▁theoretical", + -12.031864166259766 + ], + [ + "1.3", + -12.031875610351562 + ], + [ + "anim", + -12.031888008117676 + ], + [ + "▁separation", + -12.031928062438965 + ], + [ + "agence", + -12.031937599182129 + ], + [ + "▁réalisé", + -12.032069206237793 + ], + [ + "sprech", + -12.03215503692627 + ], + [ + "▁embedded", + -12.032208442687988 + ], + [ + "▁defence", + -12.032242774963379 + ], + [ + "éni", + -12.032569885253906 + ], + [ + "▁Norman", + -12.032613754272461 + ], + [ + "▁insgesamt", + -12.032621383666992 + ], + [ + "▁reminde", + -12.032631874084473 + ], + [ + "▁timeline", + -12.032703399658203 + ], + [ + "▁symbols", + -12.032770156860352 + ], + [ + "▁booth", + -12.032783508300781 + ], + [ + "▁Window", + -12.032788276672363 + ], + [ + "▁Titan", + -12.032910346984863 + ], + [ + "înt", + -12.033021926879883 + ], + [ + "▁langa", + -12.033021926879883 + ], + [ + "isant", + -12.03303337097168 + ], + [ + "hart", + -12.033113479614258 + ], + [ + "broader", + -12.033266067504883 + ], + [ + "▁stays", + -12.033288955688477 + ], + [ + "dur", + -12.033488273620605 + ], + [ + "▁Actually", + -12.033514022827148 + ], + [ + "works", + -12.03351879119873 + ], + [ + "▁réussi", + -12.03357219696045 + ], + [ + "▁performant", + -12.033658981323242 + ], + [ + "▁banana", + -12.033788681030273 + ], + [ + "▁baked", + -12.033870697021484 + ], + [ + "▁Parlament", + -12.033931732177734 + ], + [ + "▁Legend", + -12.033967018127441 + ], + [ + "toata", + -12.034172058105469 + ], + [ + "platte", + -12.03419017791748 + ], + [ + "▁Mou", + -12.034192085266113 + ], + [ + "HL", + -12.034235000610352 + ], + [ + "▁(8", + -12.034290313720703 + ], + [ + "▁accepting", + -12.034313201904297 + ], + [ + "▁Senator", + -12.034340858459473 + ], + [ + "▁consciousness", + -12.034396171569824 + ], + [ + "▁conducting", + -12.0344820022583 + ], + [ + "▁panic", + -12.034833908081055 + ], + [ + "▁FDA", + -12.035112380981445 + ], + [ + "▁(7", + -12.035163879394531 + ], + [ + "tool", + -12.035300254821777 + ], + [ + "▁Shipping", + -12.03538703918457 + ], + [ + "▁hop", + -12.035545349121094 + ], + [ + "▁conferences", + -12.03564167022705 + ], + [ + "▁pork", + -12.035661697387695 + ], + [ + "▁spam", + -12.035730361938477 + ], + [ + "▁interesant", + -12.035815238952637 + ], + [ + "▁Tagen", + -12.03581714630127 + ], + [ + "sig", + -12.035886764526367 + ], + [ + "étro", + -12.036044120788574 + ], + [ + "▁legendary", + -12.036449432373047 + ], + [ + "▁Alternative", + -12.036643981933594 + ], + [ + "iana", + -12.036704063415527 + ], + [ + "▁responsable", + -12.036888122558594 + ], + [ + "▁Mihai", + -12.037237167358398 + ], + [ + "▁decreased", + -12.037345886230469 + ], + [ + "▁organised", + -12.037485122680664 + ], + [ + "▁Lamp", + -12.037589073181152 + ], + [ + "litz", + -12.037622451782227 + ], + [ + "ohn", + -12.037622451782227 + ], + [ + "▁moteur", + -12.0376615524292 + ], + [ + "III", + -12.03768539428711 + ], + [ + "▁Montag", + -12.037755012512207 + ], + [ + "▁naturel", + -12.037814140319824 + ], + [ + "▁Hus", + -12.037842750549316 + ], + [ + "▁Schl", + -12.037884712219238 + ], + [ + "ains", + -12.037968635559082 + ], + [ + "▁dying", + -12.0380859375 + ], + [ + "▁HIV", + -12.038115501403809 + ], + [ + "],", + -12.038164138793945 + ], + [ + "alität", + -12.03818416595459 + ], + [ + "▁institute", + -12.038249015808105 + ], + [ + "mix", + -12.038433074951172 + ], + [ + "▁Regulation", + -12.038453102111816 + ], + [ + "▁pagina", + -12.03857707977295 + ], + [ + "▁Awesome", + -12.03860092163086 + ], + [ + "▁Official", + -12.03860092163086 + ], + [ + "▁Minute", + -12.038601875305176 + ], + [ + "▁dairy", + -12.038787841796875 + ], + [ + "▁carti", + -12.038881301879883 + ], + [ + "isk", + -12.039091110229492 + ], + [ + "▁thrilled", + -12.039138793945312 + ], + [ + "▁german", + -12.039172172546387 + ], + [ + "▁frustration", + -12.039228439331055 + ], + [ + "▁forums", + -12.03927230834961 + ], + [ + "command", + -12.039361000061035 + ], + [ + "▁router", + -12.039399147033691 + ], + [ + "▁Lösung", + -12.039423942565918 + ], + [ + "white", + -12.039470672607422 + ], + [ + "▁synthetic", + -12.039487838745117 + ], + [ + "▁retrouver", + -12.039554595947266 + ], + [ + "alle", + -12.039621353149414 + ], + [ + "daran", + -12.039653778076172 + ], + [ + "▁wahr", + -12.039697647094727 + ], + [ + "▁paths", + -12.039875984191895 + ], + [ + "▁unver", + -12.039962768554688 + ], + [ + "▁Environment", + -12.0400972366333 + ], + [ + "▁médecin", + -12.040510177612305 + ], + [ + "crypt", + -12.040572166442871 + ], + [ + "▁pursuit", + -12.040595054626465 + ], + [ + "flat", + -12.040611267089844 + ], + [ + "bron", + -12.040698051452637 + ], + [ + "▁Specialist", + -12.040852546691895 + ], + [ + "▁Vent", + -12.041157722473145 + ], + [ + "Gen", + -12.04132080078125 + ], + [ + "▁attraction", + -12.04132080078125 + ], + [ + "▁piese", + -12.041372299194336 + ], + [ + "CHE", + -12.041665077209473 + ], + [ + "fähig", + -12.04172420501709 + ], + [ + "▁28,", + -12.041773796081543 + ], + [ + "defender", + -12.041810989379883 + ], + [ + "▁stupid", + -12.04181957244873 + ], + [ + "enfin", + -12.04185962677002 + ], + [ + "▁composite", + -12.04207706451416 + ], + [ + "fragen", + -12.042202949523926 + ], + [ + "Part", + -12.042232513427734 + ], + [ + "may", + -12.042238235473633 + ], + [ + "▁Bucureşti", + -12.042248725891113 + ], + [ + "▁février", + -12.042248725891113 + ], + [ + "RED", + -12.042417526245117 + ], + [ + "▁makers", + -12.042462348937988 + ], + [ + "▁guns", + -12.042594909667969 + ], + [ + "▁pasta", + -12.042706489562988 + ], + [ + "STR", + -12.04271125793457 + ], + [ + "▁worthy", + -12.042760848999023 + ], + [ + "Poate", + -12.042783737182617 + ], + [ + "▁101", + -12.04286003112793 + ], + [ + "▁souhaitez", + -12.04299545288086 + ], + [ + "GN", + -12.043449401855469 + ], + [ + "drive", + -12.043499946594238 + ], + [ + "▁aveti", + -12.043582916259766 + ], + [ + "▁eventual", + -12.043591499328613 + ], + [ + "▁américain", + -12.043642044067383 + ], + [ + "▁Mine", + -12.043678283691406 + ], + [ + "▁sunset", + -12.043729782104492 + ], + [ + "▁Choice", + -12.043844223022461 + ], + [ + "▁offset", + -12.043944358825684 + ], + [ + "APP", + -12.04410457611084 + ], + [ + "▁suchen", + -12.044130325317383 + ], + [ + "▁aduc", + -12.044228553771973 + ], + [ + "▁Unternehmens", + -12.044342041015625 + ], + [ + "▁//", + -12.044651985168457 + ], + [ + "▁astept", + -12.044678688049316 + ], + [ + "▁Birthday", + -12.045061111450195 + ], + [ + "▁barn", + -12.045083999633789 + ], + [ + "apport", + -12.045105934143066 + ], + [ + "▁collar", + -12.045212745666504 + ], + [ + "▁gefunden", + -12.045294761657715 + ], + [ + "▁Hai", + -12.045429229736328 + ], + [ + "▁Soul", + -12.045441627502441 + ], + [ + "ismus", + -12.045654296875 + ], + [ + "letzt", + -12.045754432678223 + ], + [ + "▁maker", + -12.045841217041016 + ], + [ + "▁executed", + -12.045857429504395 + ], + [ + "▁Forschung", + -12.045915603637695 + ], + [ + "▁täglich", + -12.045958518981934 + ], + [ + "▁tailor", + -12.045960426330566 + ], + [ + "▁headquarters", + -12.0460844039917 + ], + [ + "▁physicians", + -12.046112060546875 + ], + [ + "▁Scout", + -12.046126365661621 + ], + [ + "folgen", + -12.046175003051758 + ], + [ + "▁cycling", + -12.046184539794922 + ], + [ + "mindestens", + -12.04620361328125 + ], + [ + "▁joli", + -12.046216011047363 + ], + [ + "▁classification", + -12.046225547790527 + ], + [ + "▁Führung", + -12.046258926391602 + ], + [ + "▁peau", + -12.04629135131836 + ], + [ + "INT", + -12.046502113342285 + ], + [ + "▁Garage", + -12.046664237976074 + ], + [ + "teile", + -12.046714782714844 + ], + [ + "util", + -12.046716690063477 + ], + [ + "▁petrec", + -12.046751022338867 + ], + [ + "▁Nevada", + -12.046826362609863 + ], + [ + "▁laisser", + -12.04706859588623 + ], + [ + "▁territoire", + -12.047131538391113 + ], + [ + "▁fichier", + -12.047154426574707 + ], + [ + "▁Formula", + -12.047343254089355 + ], + [ + "scopul", + -12.047379493713379 + ], + [ + "▁Tee", + -12.047486305236816 + ], + [ + "▁Monte", + -12.047529220581055 + ], + [ + "▁pumpkin", + -12.04757022857666 + ], + [ + "▁picnic", + -12.047589302062988 + ], + [ + "▁occupation", + -12.047652244567871 + ], + [ + "▁numérique", + -12.047831535339355 + ], + [ + "linie", + -12.04786491394043 + ], + [ + "▁masina", + -12.048117637634277 + ], + [ + "▁Prä", + -12.048173904418945 + ], + [ + "▁dezvoltare", + -12.048177719116211 + ], + [ + "▁vient", + -12.048291206359863 + ], + [ + "▁ranks", + -12.048295021057129 + ], + [ + "▁Bruce", + -12.048420906066895 + ], + [ + "▁seara", + -12.048433303833008 + ], + [ + "▁hungry", + -12.048563003540039 + ], + [ + "▁resolved", + -12.048650741577148 + ], + [ + "paired", + -12.048735618591309 + ], + [ + "▁Congratulations", + -12.048881530761719 + ], + [ + "▁religi", + -12.048918724060059 + ], + [ + "sätze", + -12.04897689819336 + ], + [ + "▁Eat", + -12.049172401428223 + ], + [ + "▁dense", + -12.049442291259766 + ], + [ + "▁slice", + -12.049447059631348 + ], + [ + "▁mulți", + -12.049463272094727 + ], + [ + "▁vorbe", + -12.049517631530762 + ], + [ + "▁terminate", + -12.049779891967773 + ], + [ + "worm", + -12.049880981445312 + ], + [ + "ignon", + -12.0499267578125 + ], + [ + "▁Howard", + -12.049992561340332 + ], + [ + "▁toddler", + -12.050017356872559 + ], + [ + "▁waters", + -12.050033569335938 + ], + [ + "▁graduates", + -12.0501708984375 + ], + [ + "▁fundraising", + -12.050298690795898 + ], + [ + "06.", + -12.05031967163086 + ], + [ + "▁scent", + -12.050346374511719 + ], + [ + "▁CPU", + -12.050406455993652 + ], + [ + "▁Kid", + -12.05045223236084 + ], + [ + "▁Years", + -12.050460815429688 + ], + [ + "▁Oktober", + -12.05063533782959 + ], + [ + "filled", + -12.050726890563965 + ], + [ + "▁Laser", + -12.05079460144043 + ], + [ + "▁tut", + -12.051032066345215 + ], + [ + "ively", + -12.051101684570312 + ], + [ + "▁WiFi", + -12.051161766052246 + ], + [ + "standen", + -12.051176071166992 + ], + [ + "▁publié", + -12.051243782043457 + ], + [ + "▁explaining", + -12.051279067993164 + ], + [ + "trieb", + -12.051288604736328 + ], + [ + "▁Rapid", + -12.0513334274292 + ], + [ + "▁unterstützt", + -12.051352500915527 + ], + [ + "▁Sonnen", + -12.051401138305664 + ], + [ + "▁lenses", + -12.05141544342041 + ], + [ + "▁pressing", + -12.051477432250977 + ], + [ + "▁respected", + -12.051657676696777 + ], + [ + "adapted", + -12.051706314086914 + ], + [ + "Don", + -12.051726341247559 + ], + [ + "▁mun", + -12.051733016967773 + ], + [ + "MAR", + -12.05180835723877 + ], + [ + "▁seam", + -12.051852226257324 + ], + [ + "chev", + -12.052140235900879 + ], + [ + "▁Sozial", + -12.052424430847168 + ], + [ + "▁Arabia", + -12.052485466003418 + ], + [ + "▁equation", + -12.05257511138916 + ], + [ + "▁elevi", + -12.052780151367188 + ], + [ + "▁piata", + -12.052868843078613 + ], + [ + "JA", + -12.052873611450195 + ], + [ + "▁wholesale", + -12.052887916564941 + ], + [ + "▁faithful", + -12.05296516418457 + ], + [ + "legal", + -12.053092002868652 + ], + [ + "▁Brexit", + -12.053095817565918 + ], + [ + "vention", + -12.053120613098145 + ], + [ + "▁adhere", + -12.053221702575684 + ], + [ + "▁Associate", + -12.053257942199707 + ], + [ + "▁decorations", + -12.053272247314453 + ], + [ + "▁crois", + -12.053359985351562 + ], + [ + "buck", + -12.053370475769043 + ], + [ + "▁smartphones", + -12.053421020507812 + ], + [ + "Regardless", + -12.053427696228027 + ], + [ + "center", + -12.053434371948242 + ], + [ + "eiß", + -12.053481101989746 + ], + [ + "▁emotion", + -12.053584098815918 + ], + [ + "▁Gespräch", + -12.053797721862793 + ], + [ + "▁Avi", + -12.053963661193848 + ], + [ + "▁loft", + -12.054059982299805 + ], + [ + "▁Wissen", + -12.054391860961914 + ], + [ + "▁orchestra", + -12.05439567565918 + ], + [ + "▁gehören", + -12.054421424865723 + ], + [ + "▁Reich", + -12.054532051086426 + ], + [ + "▁abandoned", + -12.054548263549805 + ], + [ + "▁Lanka", + -12.054586410522461 + ], + [ + "pala", + -12.054832458496094 + ], + [ + "▁Stell", + -12.054838180541992 + ], + [ + "logged", + -12.054924964904785 + ], + [ + "terie", + -12.054935455322266 + ], + [ + "▁educa", + -12.054954528808594 + ], + [ + "1).", + -12.055097579956055 + ], + [ + "▁disponibil", + -12.055119514465332 + ], + [ + "IND", + -12.055197715759277 + ], + [ + "▁Pont", + -12.055288314819336 + ], + [ + "▁téléphone", + -12.055398941040039 + ], + [ + "▁rope", + -12.055595397949219 + ], + [ + "ève", + -12.055622100830078 + ], + [ + "▁Trainer", + -12.056062698364258 + ], + [ + "▁présence", + -12.0560941696167 + ], + [ + "▁Oscar", + -12.056121826171875 + ], + [ + "▁VR", + -12.056342124938965 + ], + [ + "▁Besucher", + -12.056357383728027 + ], + [ + "▁disponibles", + -12.056447982788086 + ], + [ + "▁gelten", + -12.056604385375977 + ], + [ + "▁ports", + -12.056645393371582 + ], + [ + "Invest", + -12.056693077087402 + ], + [ + "ésormais", + -12.056795120239258 + ], + [ + "schauen", + -12.056880950927734 + ], + [ + "▁Command", + -12.056958198547363 + ], + [ + "▁alternate", + -12.05709171295166 + ], + [ + "citation", + -12.05713939666748 + ], + [ + "évolution", + -12.05714225769043 + ], + [ + "▁Maine", + -12.057145118713379 + ], + [ + "pflege", + -12.057174682617188 + ], + [ + "2011", + -12.057343482971191 + ], + [ + "▁Ground", + -12.057364463806152 + ], + [ + "▁ghost", + -12.057418823242188 + ], + [ + "lebt", + -12.057530403137207 + ], + [ + "▁scenarios", + -12.057595252990723 + ], + [ + "▁mall", + -12.057634353637695 + ], + [ + "▁Kings", + -12.057653427124023 + ], + [ + "▁15%", + -12.057848930358887 + ], + [ + "▁Paint", + -12.057848930358887 + ], + [ + "FD", + -12.057849884033203 + ], + [ + "ugg", + -12.058011054992676 + ], + [ + "▁Leon", + -12.058023452758789 + ], + [ + "▁grows", + -12.058135032653809 + ], + [ + "▁pharmacy", + -12.058384895324707 + ], + [ + "▁situat", + -12.0584135055542 + ], + [ + "20,000", + -12.05855941772461 + ], + [ + "▁10,000", + -12.058760643005371 + ], + [ + "▁membre", + -12.058771133422852 + ], + [ + "▁facilement", + -12.058806419372559 + ], + [ + "▁Analytics", + -12.058915138244629 + ], + [ + "▁Marvel", + -12.058930397033691 + ], + [ + "▁survived", + -12.059097290039062 + ], + [ + "▁conviction", + -12.059124946594238 + ], + [ + "▁Produktion", + -12.059260368347168 + ], + [ + "▁professionally", + -12.059293746948242 + ], + [ + "▁contributor", + -12.059486389160156 + ], + [ + "▁Kurs", + -12.059503555297852 + ], + [ + "▁humor", + -12.059549331665039 + ], + [ + "▁cinci", + -12.059609413146973 + ], + [ + "▁Different", + -12.059670448303223 + ], + [ + "▁Verarbeitung", + -12.059800148010254 + ], + [ + "▁inexpensive", + -12.059800148010254 + ], + [ + "▁sortie", + -12.05980110168457 + ], + [ + "▁thankful", + -12.059951782226562 + ], + [ + "▁vacances", + -12.059978485107422 + ], + [ + "▁vergangen", + -12.059979438781738 + ], + [ + "▁wings", + -12.05998420715332 + ], + [ + "▁nano", + -12.06003475189209 + ], + [ + "▁touches", + -12.060088157653809 + ], + [ + "▁Notice", + -12.060348510742188 + ], + [ + "▁reprezinta", + -12.060466766357422 + ], + [ + "▁rewarding", + -12.060555458068848 + ], + [ + "▁Kurz", + -12.060580253601074 + ], + [ + "▁mega", + -12.060611724853516 + ], + [ + "▁secrets", + -12.060646057128906 + ], + [ + "▁vorher", + -12.060667037963867 + ], + [ + "▁crescut", + -12.06074333190918 + ], + [ + "▁coordination", + -12.060754776000977 + ], + [ + "▁dissertation", + -12.060863494873047 + ], + [ + "▁header", + -12.060873985290527 + ], + [ + "existent", + -12.061070442199707 + ], + [ + "thal", + -12.061185836791992 + ], + [ + "▁translate", + -12.061214447021484 + ], + [ + "vertrag", + -12.06124210357666 + ], + [ + "GU", + -12.06126594543457 + ], + [ + "▁Arthur", + -12.061315536499023 + ], + [ + "wahl", + -12.061534881591797 + ], + [ + "▁octobre", + -12.061573028564453 + ], + [ + "▁bother", + -12.06157398223877 + ], + [ + "▁pencil", + -12.061580657958984 + ], + [ + "▁Dyna", + -12.061604499816895 + ], + [ + "▁complimentary", + -12.061651229858398 + ], + [ + "écoute", + -12.061676979064941 + ], + [ + "PB", + -12.061722755432129 + ], + [ + "▁independently", + -12.061759948730469 + ], + [ + "▁targeting", + -12.061840057373047 + ], + [ + "fought", + -12.061944961547852 + ], + [ + "mental", + -12.062112808227539 + ], + [ + "▁Veranstaltung", + -12.062300682067871 + ], + [ + "▁tatsächlich", + -12.062314987182617 + ], + [ + "▁Features", + -12.0625 + ], + [ + "▁1920", + -12.062554359436035 + ], + [ + "▁Domain", + -12.062885284423828 + ], + [ + "▁rally", + -12.062901496887207 + ], + [ + "▁iunie", + -12.063036918640137 + ], + [ + "▁fabrics", + -12.063070297241211 + ], + [ + "▁mint", + -12.063331604003906 + ], + [ + "▁antioxidant", + -12.063347816467285 + ], + [ + "hut", + -12.063432693481445 + ], + [ + "EPA", + -12.063496589660645 + ], + [ + "▁rigid", + -12.063498497009277 + ], + [ + "▁evit", + -12.063549995422363 + ], + [ + "▁personnage", + -12.063977241516113 + ], + [ + "▁garanti", + -12.0640287399292 + ], + [ + "▁Hä", + -12.064042091369629 + ], + [ + "▁Days", + -12.064048767089844 + ], + [ + "boarding", + -12.064050674438477 + ], + [ + "jemand", + -12.064166069030762 + ], + [ + "▁Pos", + -12.064262390136719 + ], + [ + "▁wool", + -12.064288139343262 + ], + [ + "▁boom", + -12.064349174499512 + ], + [ + "▁wichtige", + -12.06447982788086 + ], + [ + "▁emerged", + -12.064517974853516 + ], + [ + "▁smoothly", + -12.064802169799805 + ], + [ + "▁Interview", + -12.064942359924316 + ], + [ + "gemäß", + -12.06505012512207 + ], + [ + "▁suivi", + -12.065064430236816 + ], + [ + "▁missions", + -12.065129280090332 + ], + [ + "▁Kreis", + -12.065328598022461 + ], + [ + "century", + -12.065348625183105 + ], + [ + "▁tuned", + -12.065370559692383 + ], + [ + "isieren", + -12.065407752990723 + ], + [ + "▁Branch", + -12.065427780151367 + ], + [ + "▁Russell", + -12.065483093261719 + ], + [ + "▁**", + -12.065519332885742 + ], + [ + "▁Lehr", + -12.065617561340332 + ], + [ + "▁perspectives", + -12.065690040588379 + ], + [ + "▁handed", + -12.06570816040039 + ], + [ + "▁apporte", + -12.065743446350098 + ], + [ + "unta", + -12.065959930419922 + ], + [ + "▁contemplat", + -12.066255569458008 + ], + [ + "riel", + -12.06633472442627 + ], + [ + "▁freely", + -12.066341400146484 + ], + [ + "▁loyal", + -12.066451072692871 + ], + [ + "▁evolved", + -12.066518783569336 + ], + [ + "▁Cafe", + -12.066548347473145 + ], + [ + "▁assignments", + -12.066598892211914 + ], + [ + "▁Cream", + -12.066718101501465 + ], + [ + "▁Build", + -12.066731452941895 + ], + [ + "▁exams", + -12.066746711730957 + ], + [ + "▁graduation", + -12.066765785217285 + ], + [ + "▁Dining", + -12.066773414611816 + ], + [ + "inne", + -12.06684398651123 + ], + [ + "▁propriu", + -12.067055702209473 + ], + [ + "▁accordingly", + -12.067241668701172 + ], + [ + "▁seniors", + -12.067484855651855 + ], + [ + "▁sisters", + -12.067505836486816 + ], + [ + "formerly", + -12.067658424377441 + ], + [ + "▁fleur", + -12.067702293395996 + ], + [ + "▁alten", + -12.067802429199219 + ], + [ + "▁Gefühl", + -12.06797981262207 + ], + [ + "▁freeze", + -12.068222045898438 + ], + [ + "▁structured", + -12.068312644958496 + ], + [ + "▁reserved", + -12.068367004394531 + ], + [ + "stellt", + -12.068638801574707 + ], + [ + "▁foto", + -12.068668365478516 + ], + [ + "linger", + -12.06871223449707 + ], + [ + "▁profiter", + -12.068737030029297 + ], + [ + "▁trup", + -12.068862915039062 + ], + [ + "▁Hunter", + -12.068974494934082 + ], + [ + "▁widespread", + -12.069050788879395 + ], + [ + "entretien", + -12.069242477416992 + ], + [ + "▁Truck", + -12.06958293914795 + ], + [ + "Can", + -12.069656372070312 + ], + [ + "péri", + -12.06976318359375 + ], + [ + "▁>>", + -12.069926261901855 + ], + [ + "▁trains", + -12.070141792297363 + ], + [ + "▁faca", + -12.070149421691895 + ], + [ + "▁Patienten", + -12.070170402526855 + ], + [ + "▁scor", + -12.070361137390137 + ], + [ + "▁perceived", + -12.070384979248047 + ], + [ + "setzung", + -12.070393562316895 + ], + [ + "▁Robin", + -12.070558547973633 + ], + [ + "▁geboren", + -12.07060718536377 + ], + [ + "lons", + -12.070687294006348 + ], + [ + "inţa", + -12.070836067199707 + ], + [ + "glob", + -12.070887565612793 + ], + [ + "subsequently", + -12.07111930847168 + ], + [ + "▁vet", + -12.071170806884766 + ], + [ + "▁Holland", + -12.071328163146973 + ], + [ + "▁Clinical", + -12.071370124816895 + ], + [ + "▁uncertainty", + -12.071381568908691 + ], + [ + "hohen", + -12.071386337280273 + ], + [ + "uza", + -12.071431159973145 + ], + [ + "▁kleiner", + -12.071518898010254 + ], + [ + "▁substances", + -12.07155704498291 + ], + [ + "ados", + -12.071627616882324 + ], + [ + "wheel", + -12.07178020477295 + ], + [ + "▁cone", + -12.071990966796875 + ], + [ + "▁castig", + -12.072218894958496 + ], + [ + "▁Conditions", + -12.072242736816406 + ], + [ + "minus", + -12.072643280029297 + ], + [ + "▁permits", + -12.07265853881836 + ], + [ + "fond", + -12.072784423828125 + ], + [ + "▁reactions", + -12.07278823852539 + ], + [ + "▁Mario", + -12.072819709777832 + ], + [ + "▁materiale", + -12.07291030883789 + ], + [ + "AH", + -12.072924613952637 + ], + [ + "▁juillet", + -12.073172569274902 + ], + [ + "▁juridic", + -12.073182106018066 + ], + [ + "▁dropping", + -12.073200225830078 + ], + [ + "expérience", + -12.073225021362305 + ], + [ + "▁depot", + -12.073345184326172 + ], + [ + "▁plea", + -12.073490142822266 + ], + [ + "dezvoltarea", + -12.073512077331543 + ], + [ + "▁Independent", + -12.07363224029541 + ], + [ + "▁Homes", + -12.073674201965332 + ], + [ + "▁crust", + -12.073808670043945 + ], + [ + "▁pillow", + -12.073899269104004 + ], + [ + "kreis", + -12.073920249938965 + ], + [ + "▁boiler", + -12.073928833007812 + ], + [ + "latin", + -12.073978424072266 + ], + [ + "▁stet", + -12.074131965637207 + ], + [ + "GH", + -12.074143409729004 + ], + [ + "▁absent", + -12.074334144592285 + ], + [ + "▁Directors", + -12.074501037597656 + ], + [ + "zwischen", + -12.07462215423584 + ], + [ + "▁comprendre", + -12.07465648651123 + ], + [ + "▁25,", + -12.074832916259766 + ], + [ + "▁pharmaceutical", + -12.075145721435547 + ], + [ + "▁placeholder", + -12.075174331665039 + ], + [ + "KI", + -12.075176239013672 + ], + [ + "▁români", + -12.07540225982666 + ], + [ + "▁Dollar", + -12.075509071350098 + ], + [ + "▁Operations", + -12.075525283813477 + ], + [ + "▁Dublin", + -12.075550079345703 + ], + [ + "▁drawings", + -12.0756196975708 + ], + [ + "▁respir", + -12.075769424438477 + ], + [ + "▁haul", + -12.0758056640625 + ], + [ + "Obviously", + -12.075864791870117 + ], + [ + "▁Beat", + -12.075864791870117 + ], + [ + "▁jeans", + -12.07590103149414 + ], + [ + "▁Masters", + -12.075927734375 + ], + [ + "▁bits", + -12.076213836669922 + ], + [ + "poți", + -12.076226234436035 + ], + [ + "▁asigur", + -12.076228141784668 + ], + [ + "▁intampla", + -12.076228141784668 + ], + [ + "▁marc", + -12.076282501220703 + ], + [ + "......", + -12.076404571533203 + ], + [ + "▁districts", + -12.076437950134277 + ], + [ + "cru", + -12.076457023620605 + ], + [ + "nav", + -12.076608657836914 + ], + [ + "huile", + -12.076644897460938 + ], + [ + "▁limitation", + -12.076647758483887 + ], + [ + "boat", + -12.076712608337402 + ], + [ + "IRE", + -12.076720237731934 + ], + [ + "Unis", + -12.07675838470459 + ], + [ + "dated", + -12.0769624710083 + ], + [ + "▁consultants", + -12.07699203491211 + ], + [ + "▁Josh", + -12.077007293701172 + ], + [ + "tanz", + -12.077184677124023 + ], + [ + "launching", + -12.0772066116333 + ], + [ + "▁browsing", + -12.077310562133789 + ], + [ + "▁incerc", + -12.077314376831055 + ], + [ + "▁27,", + -12.077375411987305 + ], + [ + "не", + -12.077398300170898 + ], + [ + "wig", + -12.077415466308594 + ], + [ + "▁spar", + -12.077458381652832 + ], + [ + "▁token", + -12.077547073364258 + ], + [ + "▁09", + -12.077548027038574 + ], + [ + "spa", + -12.07766056060791 + ], + [ + "ometer", + -12.07772159576416 + ], + [ + "▁riders", + -12.077869415283203 + ], + [ + "▁Drop", + -12.077898979187012 + ], + [ + "RN", + -12.078103065490723 + ], + [ + "▁pairs", + -12.07815933227539 + ], + [ + "▁psychology", + -12.078420639038086 + ], + [ + "▁Douglas", + -12.078437805175781 + ], + [ + "▁verwenden", + -12.078516960144043 + ], + [ + "▁(9", + -12.07857894897461 + ], + [ + "▁Rental", + -12.078728675842285 + ], + [ + "▁délai", + -12.078847885131836 + ], + [ + "▁sooner", + -12.078882217407227 + ], + [ + "▁bankruptcy", + -12.079109191894531 + ], + [ + "04.", + -12.079110145568848 + ], + [ + "abend", + -12.079194068908691 + ], + [ + "çon", + -12.079237937927246 + ], + [ + "▁Ple", + -12.079243659973145 + ], + [ + "fug", + -12.079337120056152 + ], + [ + "▁Wohnung", + -12.079410552978516 + ], + [ + "▁Preise", + -12.079424858093262 + ], + [ + "▁Kay", + -12.079427719116211 + ], + [ + "▁notify", + -12.079474449157715 + ], + [ + "▁Brain", + -12.079534530639648 + ], + [ + "▁optical", + -12.079580307006836 + ], + [ + "▁modifications", + -12.079727172851562 + ], + [ + "▁repos", + -12.07999324798584 + ], + [ + "▁worksheet", + -12.0800142288208 + ], + [ + "continu", + -12.08005428314209 + ], + [ + "▁assumed", + -12.08059024810791 + ], + [ + "varying", + -12.080626487731934 + ], + [ + "feier", + -12.080643653869629 + ], + [ + "▁Freedom", + -12.080717086791992 + ], + [ + "▁Inhalte", + -12.080740928649902 + ], + [ + "▁observations", + -12.080755233764648 + ], + [ + "▁Gruppe", + -12.080791473388672 + ], + [ + "▁Cyber", + -12.080883979797363 + ], + [ + "hort", + -12.080889701843262 + ], + [ + "▁langue", + -12.080915451049805 + ], + [ + "führen", + -12.08110523223877 + ], + [ + "ganze", + -12.081254005432129 + ], + [ + "▁forte", + -12.081327438354492 + ], + [ + "▁Stefan", + -12.081376075744629 + ], + [ + "▁Jetzt", + -12.081463813781738 + ], + [ + "mehr", + -12.081489562988281 + ], + [ + "trip", + -12.081549644470215 + ], + [ + "▁poem", + -12.081583976745605 + ], + [ + "▁practitioners", + -12.081720352172852 + ], + [ + "▁connector", + -12.08177661895752 + ], + [ + "ECT", + -12.081794738769531 + ], + [ + "▁inseamna", + -12.081820487976074 + ], + [ + "addressing", + -12.081867218017578 + ], + [ + "▁beliebt", + -12.081908226013184 + ], + [ + "▁Mama", + -12.082002639770508 + ], + [ + "▁fade", + -12.08204460144043 + ], + [ + "messen", + -12.08205509185791 + ], + [ + "▁Visa", + -12.082080841064453 + ], + [ + "▁Meta", + -12.082154273986816 + ], + [ + "lene", + -12.082188606262207 + ], + [ + "▁remembered", + -12.082334518432617 + ], + [ + "/3", + -12.082337379455566 + ], + [ + "apte", + -12.082347869873047 + ], + [ + "▁uncomfortable", + -12.082364082336426 + ], + [ + "▁romance", + -12.08253002166748 + ], + [ + "▁réalis", + -12.082601547241211 + ], + [ + "▁Vincent", + -12.082706451416016 + ], + [ + "▁ABC", + -12.08275318145752 + ], + [ + "▁handicap", + -12.082756042480469 + ], + [ + "▁Shin", + -12.082801818847656 + ], + [ + "▁Hunde", + -12.082847595214844 + ], + [ + "▁Ach", + -12.083131790161133 + ], + [ + "▁Questions", + -12.083136558532715 + ], + [ + "▁particles", + -12.083226203918457 + ], + [ + "usch", + -12.083230018615723 + ], + [ + "▁SUV", + -12.083279609680176 + ], + [ + "▁Tous", + -12.083301544189453 + ], + [ + "▁empower", + -12.08336067199707 + ], + [ + "▁Yi", + -12.083446502685547 + ], + [ + "▁LinkedIn", + -12.083453178405762 + ], + [ + "▁Profile", + -12.083507537841797 + ], + [ + "▁surround", + -12.083553314208984 + ], + [ + "▁wh", + -12.083560943603516 + ], + [ + "▁Weiter", + -12.083577156066895 + ], + [ + "▁Weight", + -12.083672523498535 + ], + [ + "▁creatures", + -12.083807945251465 + ], + [ + "Especially", + -12.08381462097168 + ], + [ + "▁repede", + -12.08383560180664 + ], + [ + "▁albums", + -12.083885192871094 + ], + [ + "▁compatibil", + -12.0839204788208 + ], + [ + "▁Interesse", + -12.083929061889648 + ], + [ + "abili", + -12.084062576293945 + ], + [ + "▁roast", + -12.084310531616211 + ], + [ + "▁unii", + -12.084310531616211 + ], + [ + "▁Glad", + -12.084421157836914 + ], + [ + "▁enthusiasm", + -12.084539413452148 + ], + [ + "▁whisk", + -12.084547996520996 + ], + [ + "▁freezer", + -12.084712982177734 + ], + [ + "▁stolen", + -12.084715843200684 + ], + [ + "▁neighbour", + -12.084883689880371 + ], + [ + "▁sake", + -12.084967613220215 + ], + [ + "▁Effect", + -12.0850191116333 + ], + [ + "▁fighter", + -12.085044860839844 + ], + [ + "▁tranquil", + -12.085084915161133 + ], + [ + "▁organizer", + -12.085199356079102 + ], + [ + "pixel", + -12.085306167602539 + ], + [ + "▁Guest", + -12.085338592529297 + ], + [ + "▁Philipp", + -12.085369110107422 + ], + [ + "kunft", + -12.085382461547852 + ], + [ + "▁Meer", + -12.085409164428711 + ], + [ + "▁inviting", + -12.085432052612305 + ], + [ + "gänge", + -12.085450172424316 + ], + [ + "▁Position", + -12.085627555847168 + ], + [ + "giving", + -12.085693359375 + ], + [ + "▁marble", + -12.085807800292969 + ], + [ + "▁neg", + -12.085813522338867 + ], + [ + "▁Haar", + -12.085914611816406 + ], + [ + "Ein", + -12.086039543151855 + ], + [ + "▁buses", + -12.086187362670898 + ], + [ + "▁Lodge", + -12.086188316345215 + ], + [ + "soare", + -12.086319923400879 + ], + [ + "▁Barn", + -12.086409568786621 + ], + [ + "▁captain", + -12.086527824401855 + ], + [ + "▁Fix", + -12.08657169342041 + ], + [ + "ulate", + -12.086629867553711 + ], + [ + "ență", + -12.086709022521973 + ], + [ + "▁finances", + -12.086770057678223 + ], + [ + "▁VIP", + -12.086800575256348 + ], + [ + "▁Adams", + -12.086801528930664 + ], + [ + "▁spécialisé", + -12.086960792541504 + ], + [ + "▁fortunate", + -12.087236404418945 + ], + [ + "ility", + -12.087345123291016 + ], + [ + "▁democracy", + -12.08749771118164 + ], + [ + "shu", + -12.087580680847168 + ], + [ + "▁consiste", + -12.087624549865723 + ], + [ + "▁tort", + -12.087692260742188 + ], + [ + "▁branding", + -12.087793350219727 + ], + [ + "▁porch", + -12.08780288696289 + ], + [ + "UNI", + -12.087867736816406 + ], + [ + "▁placut", + -12.087915420532227 + ], + [ + "▁coupled", + -12.088058471679688 + ], + [ + "▁ministre", + -12.088187217712402 + ], + [ + "▁minerals", + -12.088335037231445 + ], + [ + "▁safer", + -12.088335990905762 + ], + [ + "▁outlets", + -12.088438034057617 + ], + [ + "▁caution", + -12.08864688873291 + ], + [ + "▁lightly", + -12.0886869430542 + ], + [ + "▁utilizator", + -12.088700294494629 + ], + [ + "▁Pala", + -12.088959693908691 + ], + [ + "▁doll", + -12.088961601257324 + ], + [ + "(1)", + -12.089065551757812 + ], + [ + "chol", + -12.089120864868164 + ], + [ + "▁Left", + -12.08919620513916 + ], + [ + "▁roulant", + -12.089277267456055 + ], + [ + "▁propune", + -12.089301109313965 + ], + [ + "▁Cred", + -12.089339256286621 + ], + [ + "▁negotiations", + -12.089362144470215 + ], + [ + "amba", + -12.089393615722656 + ], + [ + "▁grasp", + -12.089420318603516 + ], + [ + "▁Amsterdam", + -12.089451789855957 + ], + [ + "▁Zweck", + -12.08945369720459 + ], + [ + "▁conven", + -12.089563369750977 + ], + [ + "▁organizing", + -12.089574813842773 + ], + [ + "section", + -12.089618682861328 + ], + [ + "▁endeavor", + -12.089634895324707 + ], + [ + "▁basics", + -12.089722633361816 + ], + [ + "jud", + -12.089874267578125 + ], + [ + "▁yarn", + -12.090049743652344 + ], + [ + "▁shout", + -12.09009075164795 + ], + [ + "fällt", + -12.090285301208496 + ], + [ + "▁dragoste", + -12.09054946899414 + ], + [ + "▁Rein", + -12.090594291687012 + ], + [ + "Cal", + -12.090688705444336 + ], + [ + "▁deaths", + -12.090729713439941 + ], + [ + "▁24,", + -12.0907564163208 + ], + [ + "▁măr", + -12.090773582458496 + ], + [ + "server", + -12.090825080871582 + ], + [ + "▁explic", + -12.09085464477539 + ], + [ + "▁sufer", + -12.090903282165527 + ], + [ + "▁lucrări", + -12.091097831726074 + ], + [ + "▁Disease", + -12.091126441955566 + ], + [ + "▁prescribed", + -12.091194152832031 + ], + [ + "prozess", + -12.091285705566406 + ], + [ + "▁dessin", + -12.091343879699707 + ], + [ + "▁refuge", + -12.091473579406738 + ], + [ + "▁cope", + -12.091631889343262 + ], + [ + "pole", + -12.09196949005127 + ], + [ + "▁vacant", + -12.091984748840332 + ], + [ + "▁sezon", + -12.092035293579102 + ], + [ + "▁Carbon", + -12.092227935791016 + ], + [ + "▁goût", + -12.092233657836914 + ], + [ + "Ste", + -12.092320442199707 + ], + [ + "▁surroundings", + -12.092754364013672 + ], + [ + "definite", + -12.09284496307373 + ], + [ + "▁adaptation", + -12.093358993530273 + ], + [ + "cteur", + -12.0933837890625 + ], + [ + "System", + -12.093442916870117 + ], + [ + "▁Burg", + -12.093550682067871 + ], + [ + "▁retention", + -12.093579292297363 + ], + [ + "examen", + -12.093618392944336 + ], + [ + "▁adjustments", + -12.093668937683105 + ], + [ + "nies", + -12.094213485717773 + ], + [ + "▁RSS", + -12.094215393066406 + ], + [ + "▁Umwelt", + -12.094259262084961 + ], + [ + "▁strengths", + -12.094326972961426 + ], + [ + "loom", + -12.094401359558105 + ], + [ + "▁pics", + -12.094404220581055 + ], + [ + "phase", + -12.09443187713623 + ], + [ + "▁Poland", + -12.094472885131836 + ], + [ + "▁practicing", + -12.094558715820312 + ], + [ + "monetary", + -12.094756126403809 + ], + [ + "▁embodiment", + -12.094756126403809 + ], + [ + "▁jocuri", + -12.094846725463867 + ], + [ + "▁impreuna", + -12.094939231872559 + ], + [ + "▁Lyon", + -12.094985961914062 + ], + [ + "keeping", + -12.095157623291016 + ], + [ + "▁Starting", + -12.095202445983887 + ], + [ + "▁începe", + -12.095357894897461 + ], + [ + "▁clay", + -12.095440864562988 + ], + [ + "bildung", + -12.095444679260254 + ], + [ + "Technologie", + -12.095513343811035 + ], + [ + "toxic", + -12.095624923706055 + ], + [ + "▁gasit", + -12.095819473266602 + ], + [ + "rott", + -12.095870018005371 + ], + [ + "brook", + -12.095935821533203 + ], + [ + "▁wann", + -12.096029281616211 + ], + [ + "▁lined", + -12.09610366821289 + ], + [ + "▁Chelsea", + -12.096223831176758 + ], + [ + "▁Orlando", + -12.096224784851074 + ], + [ + "▁Otherwise", + -12.096267700195312 + ], + [ + "▁debit", + -12.096273422241211 + ], + [ + "▁entsprechend", + -12.09648323059082 + ], + [ + "nism", + -12.09654426574707 + ], + [ + "issen", + -12.09664535522461 + ], + [ + "▁rendez", + -12.096646308898926 + ], + [ + "▁processus", + -12.096745491027832 + ], + [ + "mbi", + -12.096890449523926 + ], + [ + "▁Graduate", + -12.096960067749023 + ], + [ + "▁cozy", + -12.097119331359863 + ], + [ + "▁Freunde", + -12.097320556640625 + ], + [ + "▁teme", + -12.097389221191406 + ], + [ + "▁bias", + -12.097548484802246 + ], + [ + "102", + -12.09756851196289 + ], + [ + "terrorism", + -12.09770679473877 + ], + [ + "threatening", + -12.097756385803223 + ], + [ + "ни", + -12.097776412963867 + ], + [ + "▁Sonntag", + -12.098062515258789 + ], + [ + "▁efect", + -12.098116874694824 + ], + [ + "▁prayers", + -12.098134994506836 + ], + [ + "▁backpack", + -12.09841537475586 + ], + [ + "?)", + -12.098489761352539 + ], + [ + "▁searches", + -12.098788261413574 + ], + [ + "ouverture", + -12.09880256652832 + ], + [ + "▁sustained", + -12.098865509033203 + ], + [ + "hawk", + -12.098869323730469 + ], + [ + "messe", + -12.098958969116211 + ], + [ + "▁prototype", + -12.098989486694336 + ], + [ + "▁stră", + -12.09903335571289 + ], + [ + "▁Neo", + -12.099040985107422 + ], + [ + "▁29,", + -12.099109649658203 + ], + [ + "izo", + -12.099306106567383 + ], + [ + "▁Anton", + -12.099333763122559 + ], + [ + "SIS", + -12.099564552307129 + ], + [ + "pendant", + -12.099617958068848 + ], + [ + "▁passive", + -12.099813461303711 + ], + [ + "▁Aaron", + -12.099824905395508 + ], + [ + "▁Karen", + -12.099831581115723 + ], + [ + "▁Bildung", + -12.09994888305664 + ], + [ + "ario", + -12.099949836730957 + ], + [ + "▁regulator", + -12.100006103515625 + ], + [ + "gruppe", + -12.100032806396484 + ], + [ + "stepped", + -12.100053787231445 + ], + [ + "▁interventions", + -12.10014533996582 + ], + [ + "▁rounds", + -12.100149154663086 + ], + [ + "▁Khan", + -12.10020637512207 + ], + [ + "▁railway", + -12.10028076171875 + ], + [ + "▁souvenir", + -12.100296974182129 + ], + [ + "▁Plans", + -12.100336074829102 + ], + [ + "aille", + -12.100372314453125 + ], + [ + "▁billing", + -12.100473403930664 + ], + [ + "▁Spiele", + -12.100541114807129 + ], + [ + "▁supermarket", + -12.100556373596191 + ], + [ + "▁flows", + -12.100625991821289 + ], + [ + "▁PayPal", + -12.100641250610352 + ], + [ + "▁tribe", + -12.10067081451416 + ], + [ + "anni", + -12.100780487060547 + ], + [ + "▁rides", + -12.100934982299805 + ], + [ + "▁Orleans", + -12.101009368896484 + ], + [ + "▁evaluated", + -12.101021766662598 + ], + [ + "founder", + -12.10106372833252 + ], + [ + "▁Feld", + -12.101212501525879 + ], + [ + "▁altele", + -12.10122299194336 + ], + [ + "▁thermo", + -12.101290702819824 + ], + [ + "ugh", + -12.101330757141113 + ], + [ + "▁adus", + -12.101375579833984 + ], + [ + "▁Taiwan", + -12.101396560668945 + ], + [ + "▁clause", + -12.101409912109375 + ], + [ + "oxi", + -12.101465225219727 + ], + [ + "alcool", + -12.101495742797852 + ], + [ + "▁Noi", + -12.101531982421875 + ], + [ + "rub", + -12.101540565490723 + ], + [ + "▁dosar", + -12.101582527160645 + ], + [ + "▁Nelson", + -12.101751327514648 + ], + [ + "fassung", + -12.102316856384277 + ], + [ + "▁Kill", + -12.102489471435547 + ], + [ + "▁Standards", + -12.102490425109863 + ], + [ + "▁upward", + -12.102653503417969 + ], + [ + "▁Coloring", + -12.102664947509766 + ], + [ + "Designed", + -12.102754592895508 + ], + [ + "▁Nou", + -12.10281753540039 + ], + [ + "▁borrow", + -12.102940559387207 + ], + [ + "▁Poll", + -12.10321044921875 + ], + [ + "▁antibiotic", + -12.103277206420898 + ], + [ + "▁fabrication", + -12.103388786315918 + ], + [ + "quo", + -12.103432655334473 + ], + [ + "▁crimes", + -12.103464126586914 + ], + [ + "▁nahe", + -12.103484153747559 + ], + [ + "▁aplicat", + -12.103565216064453 + ], + [ + "OST", + -12.1035737991333 + ], + [ + "▁Beijing", + -12.103599548339844 + ], + [ + "fight", + -12.103612899780273 + ], + [ + "▁lodge", + -12.103612899780273 + ], + [ + "dreh", + -12.103922843933105 + ], + [ + "▁harness", + -12.104036331176758 + ], + [ + "▁noiembrie", + -12.104151725769043 + ], + [ + "ounded", + -12.104161262512207 + ], + [ + "▁Imp", + -12.1041841506958 + ], + [ + "▁nächste", + -12.104275703430176 + ], + [ + "funktion", + -12.104476928710938 + ], + [ + "exploitation", + -12.104569435119629 + ], + [ + "▁Ready", + -12.10457706451416 + ], + [ + "▁Plate", + -12.104598999023438 + ], + [ + "▁octombrie", + -12.104706764221191 + ], + [ + "▁considerat", + -12.104982376098633 + ], + [ + "▁Xbox", + -12.105067253112793 + ], + [ + "mind", + -12.105107307434082 + ], + [ + "▁Lind", + -12.105111122131348 + ], + [ + "runde", + -12.105352401733398 + ], + [ + "mination", + -12.105374336242676 + ], + [ + "▁memori", + -12.105377197265625 + ], + [ + "▁cere", + -12.105389595031738 + ], + [ + "barkeit", + -12.105517387390137 + ], + [ + "▁găsi", + -12.105761528015137 + ], + [ + "2.1", + -12.105863571166992 + ], + [ + "▁Finding", + -12.105891227722168 + ], + [ + "▁static", + -12.106405258178711 + ], + [ + "court", + -12.106439590454102 + ], + [ + "▁Gem", + -12.106489181518555 + ], + [ + "▁pièce", + -12.106494903564453 + ], + [ + "▁reel", + -12.10651969909668 + ], + [ + "▁manuscript", + -12.106560707092285 + ], + [ + "▁complications", + -12.106578826904297 + ], + [ + "▁controlling", + -12.106585502624512 + ], + [ + "▁favour", + -12.106738090515137 + ], + [ + "▁advancement", + -12.106739044189453 + ], + [ + "▁Radi", + -12.106870651245117 + ], + [ + "▁faites", + -12.107076644897461 + ], + [ + "▁ordin", + -12.107131958007812 + ], + [ + "sorted", + -12.107152938842773 + ], + [ + "▁1982", + -12.10715389251709 + ], + [ + "▁brutal", + -12.107154846191406 + ], + [ + "▁Guy", + -12.107226371765137 + ], + [ + "▁accomplishment", + -12.107248306274414 + ], + [ + "▁wer", + -12.107329368591309 + ], + [ + "▁withdraw", + -12.107460975646973 + ], + [ + "abilitate", + -12.1075439453125 + ], + [ + "▁NBA", + -12.107625961303711 + ], + [ + "▁Benefit", + -12.107675552368164 + ], + [ + "▁divide", + -12.107824325561523 + ], + [ + "induced", + -12.107913970947266 + ], + [ + "▁văzut", + -12.108049392700195 + ], + [ + "▁peel", + -12.10807991027832 + ], + [ + "▁joints", + -12.108160972595215 + ], + [ + "▁enthalten", + -12.108301162719727 + ], + [ + "▁spy", + -12.108397483825684 + ], + [ + "▁occasional", + -12.108437538146973 + ], + [ + "warm", + -12.108514785766602 + ], + [ + "ême", + -12.108542442321777 + ], + [ + "▁Betriebs", + -12.108551979064941 + ], + [ + "▁Ioan", + -12.1087064743042 + ], + [ + "▁balloon", + -12.108809471130371 + ], + [ + "▁leap", + -12.108869552612305 + ], + [ + "pelled", + -12.109000205993652 + ], + [ + "▁realise", + -12.109073638916016 + ], + [ + "▁Retail", + -12.109118461608887 + ], + [ + "▁Farben", + -12.109151840209961 + ], + [ + "▁Kennedy", + -12.10916519165039 + ], + [ + "▁Firma", + -12.109196662902832 + ], + [ + "▁tineri", + -12.10934066772461 + ], + [ + "tub", + -12.109354019165039 + ], + [ + "PORT", + -12.109381675720215 + ], + [ + "▁stiff", + -12.109416007995605 + ], + [ + "▁notable", + -12.109476089477539 + ], + [ + "tler", + -12.109498023986816 + ], + [ + "▁utile", + -12.10958480834961 + ], + [ + "▁jouer", + -12.109674453735352 + ], + [ + "▁Primary", + -12.109735488891602 + ], + [ + "▁retailer", + -12.109764099121094 + ], + [ + "▁jederzeit", + -12.109808921813965 + ], + [ + "▁amend", + -12.109817504882812 + ], + [ + "▁sagte", + -12.109845161437988 + ], + [ + "atch", + -12.10995864868164 + ], + [ + "ution", + -12.110008239746094 + ], + [ + "once", + -12.110018730163574 + ], + [ + "ended", + -12.1100435256958 + ], + [ + "▁literary", + -12.11013126373291 + ], + [ + "▁wrist", + -12.110281944274902 + ], + [ + "vii", + -12.11036205291748 + ], + [ + "scriere", + -12.110367774963379 + ], + [ + "▁compassion", + -12.110443115234375 + ], + [ + "▁Milan", + -12.110474586486816 + ], + [ + "▁Dach", + -12.110490798950195 + ], + [ + "▁problèmes", + -12.110630989074707 + ], + [ + "▁Pré", + -12.110687255859375 + ], + [ + "▁Feder", + -12.110759735107422 + ], + [ + "Dr", + -12.110814094543457 + ], + [ + "Spr", + -12.110908508300781 + ], + [ + "▁né", + -12.110969543457031 + ], + [ + "François", + -12.111023902893066 + ], + [ + "▁Shu", + -12.111115455627441 + ], + [ + "▁poison", + -12.111154556274414 + ], + [ + "zier", + -12.111176490783691 + ], + [ + "▁attain", + -12.11124038696289 + ], + [ + "▁switching", + -12.111310958862305 + ], + [ + "▁vibration", + -12.111348152160645 + ], + [ + "▁Tablet", + -12.11136531829834 + ], + [ + "▁Lern", + -12.11148452758789 + ], + [ + "offrir", + -12.111660957336426 + ], + [ + "123", + -12.11168098449707 + ], + [ + "cheapest", + -12.11173152923584 + ], + [ + "▁numărul", + -12.111764907836914 + ], + [ + "break", + -12.11180305480957 + ], + [ + "cyto", + -12.111836433410645 + ], + [ + "▁Mississippi", + -12.111955642700195 + ], + [ + "▁dragon", + -12.11207389831543 + ], + [ + "fir", + -12.112176895141602 + ], + [ + "▁fête", + -12.112180709838867 + ], + [ + "▁Wait", + -12.112350463867188 + ], + [ + "buy", + -12.112359046936035 + ], + [ + "având", + -12.112391471862793 + ], + [ + "▁Scar", + -12.112517356872559 + ], + [ + "▁Hund", + -12.112586975097656 + ], + [ + "bug", + -12.112807273864746 + ], + [ + "▁classique", + -12.112811088562012 + ], + [ + "▁tenant", + -12.112860679626465 + ], + [ + "▁Walt", + -12.11296272277832 + ], + [ + "▁timber", + -12.11296272277832 + ], + [ + "inscription", + -12.11300277709961 + ], + [ + "BD", + -12.113016128540039 + ], + [ + "▁Commissioner", + -12.113018989562988 + ], + [ + "▁casinos", + -12.11306095123291 + ], + [ + "▁prochain", + -12.113168716430664 + ], + [ + "▁rustic", + -12.11349868774414 + ], + [ + "▁Kent", + -12.113607406616211 + ], + [ + "▁Deci", + -12.113761901855469 + ], + [ + "ли", + -12.113855361938477 + ], + [ + "▁crossed", + -12.113861083984375 + ], + [ + "▁delightful", + -12.113869667053223 + ], + [ + "▁metres", + -12.113872528076172 + ], + [ + "▁scandal", + -12.113906860351562 + ], + [ + "▁activitate", + -12.113986015319824 + ], + [ + "▁nimeni", + -12.114009857177734 + ], + [ + "ease", + -12.11402416229248 + ], + [ + "▁revenues", + -12.1140775680542 + ], + [ + "▁partially", + -12.114187240600586 + ], + [ + "AE", + -12.114263534545898 + ], + [ + "nique", + -12.114410400390625 + ], + [ + "▁fixtures", + -12.114426612854004 + ], + [ + "▁pupils", + -12.114694595336914 + ], + [ + "Lib", + -12.11471176147461 + ], + [ + "analyse", + -12.114739418029785 + ], + [ + "▁Oracle", + -12.114767074584961 + ], + [ + "troph", + -12.114859580993652 + ], + [ + "▁detected", + -12.114879608154297 + ], + [ + "▁servant", + -12.11507797241211 + ], + [ + "▁badly", + -12.115121841430664 + ], + [ + "comparing", + -12.115150451660156 + ], + [ + "abs", + -12.115238189697266 + ], + [ + "▁fotografi", + -12.115443229675293 + ], + [ + "▁Million", + -12.115541458129883 + ], + [ + "▁Gordon", + -12.11557388305664 + ], + [ + "▁Smok", + -12.115592002868652 + ], + [ + "▁Essay", + -12.11565113067627 + ], + [ + "eptic", + -12.115665435791016 + ], + [ + "▁Transportation", + -12.115728378295898 + ], + [ + "/2019", + -12.115767478942871 + ], + [ + "▁alignment", + -12.115778923034668 + ], + [ + "▁laut", + -12.11578369140625 + ], + [ + "stände", + -12.115791320800781 + ], + [ + "▁concerts", + -12.115811347961426 + ], + [ + "▁weekends", + -12.11589241027832 + ], + [ + "▁obstacles", + -12.115941047668457 + ], + [ + "wür", + -12.115964889526367 + ], + [ + "▁Fisher", + -12.116219520568848 + ], + [ + "▁supervisor", + -12.116242408752441 + ], + [ + "▁traders", + -12.116262435913086 + ], + [ + "▁scary", + -12.116484642028809 + ], + [ + "▁Grove", + -12.116538047790527 + ], + [ + "▁expose", + -12.116583824157715 + ], + [ + "▁enemies", + -12.116630554199219 + ], + [ + "▁Lux", + -12.11667537689209 + ], + [ + "▁Berufs", + -12.11672306060791 + ], + [ + "▁Sheet", + -12.116780281066895 + ], + [ + "▁Natürlich", + -12.116819381713867 + ], + [ + "▁examined", + -12.116886138916016 + ], + [ + "pursuing", + -12.116920471191406 + ], + [ + "▁pools", + -12.116923332214355 + ], + [ + "▁Thompson", + -12.117005348205566 + ], + [ + "▁SAP", + -12.117010116577148 + ], + [ + "claiming", + -12.117053985595703 + ], + [ + "buried", + -12.117055892944336 + ], + [ + "assurance", + -12.117138862609863 + ], + [ + "▁sandwich", + -12.117195129394531 + ], + [ + "uber", + -12.117310523986816 + ], + [ + "▁laisse", + -12.117321968078613 + ], + [ + "peak", + -12.117348670959473 + ], + [ + "spring", + -12.1173677444458 + ], + [ + "▁august", + -12.117369651794434 + ], + [ + "▁benötigt", + -12.11738109588623 + ], + [ + "▁achievements", + -12.117470741271973 + ], + [ + "coala", + -12.117478370666504 + ], + [ + "▁scr", + -12.117842674255371 + ], + [ + "gesagt", + -12.118122100830078 + ], + [ + "▁envelope", + -12.118141174316406 + ], + [ + "▁mapping", + -12.118169784545898 + ], + [ + "▁Suche", + -12.118298530578613 + ], + [ + "first", + -12.118329048156738 + ], + [ + "▁Quin", + -12.118447303771973 + ], + [ + "räu", + -12.118561744689941 + ], + [ + "▁răs", + -12.118583679199219 + ], + [ + "chemical", + -12.118597984313965 + ], + [ + "dad", + -12.118927955627441 + ], + [ + "formation", + -12.118983268737793 + ], + [ + "▁cushion", + -12.119026184082031 + ], + [ + "▁Maß", + -12.119046211242676 + ], + [ + "07.", + -12.119184494018555 + ], + [ + "▁perioadă", + -12.119257926940918 + ], + [ + "▁Wunsch", + -12.11925983428955 + ], + [ + "▁joi", + -12.119423866271973 + ], + [ + "▁$25", + -12.119482040405273 + ], + [ + "▁uploaded", + -12.11952018737793 + ], + [ + "▁hobby", + -12.119633674621582 + ], + [ + "▁septembrie", + -12.119633674621582 + ], + [ + "▁Dimension", + -12.119634628295898 + ], + [ + "▁domeniu", + -12.119661331176758 + ], + [ + "▁Tourism", + -12.119747161865234 + ], + [ + "▁fais", + -12.119800567626953 + ], + [ + "aches", + -12.119919776916504 + ], + [ + "neck", + -12.119969367980957 + ], + [ + "▁Chip", + -12.119982719421387 + ], + [ + "▁Tisch", + -12.1199951171875 + ], + [ + "▁Pai", + -12.120006561279297 + ], + [ + "▁Butter", + -12.120083808898926 + ], + [ + "▁altor", + -12.120133399963379 + ], + [ + "cultural", + -12.120182991027832 + ], + [ + "▁bases", + -12.12028980255127 + ], + [ + "▁Christopher", + -12.120396614074707 + ], + [ + "Kindle", + -12.120401382446289 + ], + [ + "▁bathrooms", + -12.12049388885498 + ], + [ + "▁civilian", + -12.12052059173584 + ], + [ + "▁Architecture", + -12.12058162689209 + ], + [ + "heiten", + -12.120641708374023 + ], + [ + "otte", + -12.120763778686523 + ], + [ + "ри", + -12.120784759521484 + ], + [ + "wash", + -12.120792388916016 + ], + [ + "▁evenimente", + -12.12086296081543 + ], + [ + "lade", + -12.121132850646973 + ], + [ + "▁ermöglicht", + -12.121140480041504 + ], + [ + "Port", + -12.121149063110352 + ], + [ + "▁Horn", + -12.12119197845459 + ], + [ + "▁Housing", + -12.121232032775879 + ], + [ + "▁Profit", + -12.121304512023926 + ], + [ + "▁stressed", + -12.12136459350586 + ], + [ + "▁70%", + -12.121431350708008 + ], + [ + "laying", + -12.121458053588867 + ], + [ + "▁specialize", + -12.121490478515625 + ], + [ + "▁Published", + -12.121519088745117 + ], + [ + "corp", + -12.121554374694824 + ], + [ + "▁revision", + -12.121611595153809 + ], + [ + "▁sail", + -12.121804237365723 + ], + [ + "courtesy", + -12.121909141540527 + ], + [ + "tax", + -12.1219482421875 + ], + [ + "▁perfekt", + -12.122018814086914 + ], + [ + "▁Risk", + -12.122088432312012 + ], + [ + "▁chaleur", + -12.122129440307617 + ], + [ + "ych", + -12.122132301330566 + ], + [ + "▁spine", + -12.12218189239502 + ], + [ + "▁holders", + -12.122264862060547 + ], + [ + "▁Speaking", + -12.122271537780762 + ], + [ + "▁Bernard", + -12.122400283813477 + ], + [ + "incarc", + -12.122532844543457 + ], + [ + "shalb", + -12.122639656066895 + ], + [ + "Potrivit", + -12.12264633178711 + ], + [ + "arising", + -12.122654914855957 + ], + [ + "▁kingdom", + -12.122665405273438 + ], + [ + "▁potato", + -12.122766494750977 + ], + [ + "▁promoted", + -12.122814178466797 + ], + [ + "▁judges", + -12.1228609085083 + ], + [ + "▁naturelle", + -12.122992515563965 + ], + [ + "▁Kindern", + -12.123022079467773 + ], + [ + "schicht", + -12.123047828674316 + ], + [ + "▁Drag", + -12.123066902160645 + ], + [ + "atta", + -12.123132705688477 + ], + [ + "soient", + -12.123249053955078 + ], + [ + "INS", + -12.12336540222168 + ], + [ + "▁legislative", + -12.123642921447754 + ], + [ + "▁teens", + -12.123785018920898 + ], + [ + "▁Fotos", + -12.123842239379883 + ], + [ + "▁illustrations", + -12.12392520904541 + ], + [ + "möglichkeiten", + -12.12415599822998 + ], + [ + "Votre", + -12.124194145202637 + ], + [ + "▁tarif", + -12.124195098876953 + ], + [ + "cli", + -12.124488830566406 + ], + [ + "▁landlord", + -12.12473201751709 + ], + [ + "cine", + -12.124743461608887 + ], + [ + "▁bot", + -12.124798774719238 + ], + [ + "enhancing", + -12.12491226196289 + ], + [ + "▁März", + -12.12491226196289 + ], + [ + "▁succès", + -12.125106811523438 + ], + [ + "▁disclose", + -12.125120162963867 + ], + [ + "▁Geräte", + -12.125321388244629 + ], + [ + "▁Magn", + -12.125422477722168 + ], + [ + "dessous", + -12.12580680847168 + ], + [ + "▁miracle", + -12.125862121582031 + ], + [ + "▁travailler", + -12.125933647155762 + ], + [ + "▁herb", + -12.125945091247559 + ], + [ + "-01", + -12.126049041748047 + ], + [ + "litre", + -12.126104354858398 + ], + [ + "▁tău", + -12.126120567321777 + ], + [ + "ACC", + -12.126190185546875 + ], + [ + "▁diminu", + -12.126275062561035 + ], + [ + "itzer", + -12.126317024230957 + ], + [ + "▁personenbezogen", + -12.126395225524902 + ], + [ + "▁Pure", + -12.126436233520508 + ], + [ + "▁influences", + -12.12668228149414 + ], + [ + "ană", + -12.126765251159668 + ], + [ + "▁proposer", + -12.126856803894043 + ], + [ + "▁longest", + -12.12692642211914 + ], + [ + "euses", + -12.127080917358398 + ], + [ + "/1", + -12.127487182617188 + ], + [ + "hafte", + -12.127716064453125 + ], + [ + "▁Dich", + -12.127761840820312 + ], + [ + "▁candle", + -12.128026962280273 + ], + [ + "ouche", + -12.128191947937012 + ], + [ + "installation", + -12.128241539001465 + ], + [ + "▁Includes", + -12.128280639648438 + ], + [ + "▁entfernt", + -12.12831974029541 + ], + [ + "traf", + -12.128499031066895 + ], + [ + "▁None", + -12.128508567810059 + ], + [ + "▁produc", + -12.128510475158691 + ], + [ + "held", + -12.128519058227539 + ], + [ + "graphic", + -12.128531455993652 + ], + [ + "▁demographic", + -12.128584861755371 + ], + [ + "ingham", + -12.1287841796875 + ], + [ + "schul", + -12.128812789916992 + ], + [ + "▁sneak", + -12.128843307495117 + ], + [ + "laub", + -12.128889083862305 + ], + [ + "▁thickness", + -12.12911605834961 + ], + [ + "▁killer", + -12.129297256469727 + ], + [ + "▁entsprechende", + -12.129344940185547 + ], + [ + "▁theft", + -12.129396438598633 + ], + [ + "▁Jerusalem", + -12.129457473754883 + ], + [ + "Adapt", + -12.129495620727539 + ], + [ + "▁updating", + -12.129497528076172 + ], + [ + "tete", + -12.12954330444336 + ], + [ + "▁warming", + -12.129701614379883 + ], + [ + "anlage", + -12.129739761352539 + ], + [ + "▁lenders", + -12.129814147949219 + ], + [ + "mobile", + -12.130008697509766 + ], + [ + "▁Package", + -12.130080223083496 + ], + [ + "▁Volume", + -12.130152702331543 + ], + [ + "---", + -12.130167007446289 + ], + [ + "▁Others", + -12.130173683166504 + ], + [ + "content", + -12.130188941955566 + ], + [ + "tement", + -12.130253791809082 + ], + [ + "bildet", + -12.13027572631836 + ], + [ + "▁washer", + -12.13053035736084 + ], + [ + "▁freelance", + -12.130623817443848 + ], + [ + "▁fein", + -12.130753517150879 + ], + [ + "▁catering", + -12.130851745605469 + ], + [ + "▁warmth", + -12.130911827087402 + ], + [ + "▁Month", + -12.131103515625 + ], + [ + "▁Federation", + -12.131134033203125 + ], + [ + "▁editorial", + -12.13121223449707 + ], + [ + "▁Shopping", + -12.131241798400879 + ], + [ + "▁efort", + -12.131296157836914 + ], + [ + "▁damp", + -12.131314277648926 + ], + [ + "▁declined", + -12.131332397460938 + ], + [ + "▁1978", + -12.13135051727295 + ], + [ + "6,000", + -12.131355285644531 + ], + [ + "location", + -12.131551742553711 + ], + [ + "▁blogger", + -12.131572723388672 + ], + [ + "▁goodness", + -12.131826400756836 + ], + [ + "▁Purchase", + -12.132119178771973 + ], + [ + "▁suspended", + -12.132159233093262 + ], + [ + "▁assessed", + -12.132201194763184 + ], + [ + "rada", + -12.132286071777344 + ], + [ + "▁Lac", + -12.132291793823242 + ], + [ + "▁angeboten", + -12.13235092163086 + ], + [ + "▁Wetter", + -12.132370948791504 + ], + [ + "ores", + -12.13243579864502 + ], + [ + "▁fourni", + -12.132476806640625 + ], + [ + "▁retire", + -12.13269329071045 + ], + [ + "▁Baptist", + -12.132741928100586 + ], + [ + "▁Saison", + -12.13277530670166 + ], + [ + "Bar", + -12.132794380187988 + ], + [ + "▁dossier", + -12.132979393005371 + ], + [ + "brow", + -12.133044242858887 + ], + [ + "▁Kaffee", + -12.133071899414062 + ], + [ + "-25", + -12.133463859558105 + ], + [ + "▁festivals", + -12.133599281311035 + ], + [ + "▁sellers", + -12.133716583251953 + ], + [ + "Ü", + -12.13393783569336 + ], + [ + "▁publisher", + -12.133960723876953 + ], + [ + "▁Designs", + -12.133970260620117 + ], + [ + "▁putut", + -12.13400936126709 + ], + [ + "▁Built", + -12.134417533874512 + ], + [ + "▁recreational", + -12.134476661682129 + ], + [ + "▁european", + -12.134514808654785 + ], + [ + "▁binary", + -12.134631156921387 + ], + [ + "▁Nieder", + -12.134764671325684 + ], + [ + "taking", + -12.1348237991333 + ], + [ + "▁Lots", + -12.13494873046875 + ], + [ + "▁recognised", + -12.135031700134277 + ], + [ + "ssant", + -12.135063171386719 + ], + [ + "ITE", + -12.135271072387695 + ], + [ + "oom", + -12.135298728942871 + ], + [ + "▁Kre", + -12.135310173034668 + ], + [ + "▁pipes", + -12.135631561279297 + ], + [ + "▁hinge", + -12.135653495788574 + ], + [ + "▁enterprises", + -12.135664939880371 + ], + [ + "▁texts", + -12.13583755493164 + ], + [ + "Organiz", + -12.136080741882324 + ], + [ + "▁suivre", + -12.136124610900879 + ], + [ + "noc", + -12.136157989501953 + ], + [ + "fair", + -12.136194229125977 + ], + [ + "▁darkness", + -12.136305809020996 + ], + [ + "▁Whi", + -12.13631534576416 + ], + [ + "natural", + -12.136321067810059 + ], + [ + "Bas", + -12.136422157287598 + ], + [ + "▁tribute", + -12.136443138122559 + ], + [ + "▁Naţional", + -12.136573791503906 + ], + [ + "hara", + -12.136622428894043 + ], + [ + "▁catégorie", + -12.136697769165039 + ], + [ + "▁Schedule", + -12.136698722839355 + ], + [ + "▁lernen", + -12.13671875 + ], + [ + "▁Plastic", + -12.136725425720215 + ], + [ + "▁giveaway", + -12.13675594329834 + ], + [ + "▁Ideen", + -12.136906623840332 + ], + [ + "▁circa", + -12.13718032836914 + ], + [ + "▁lice", + -12.137242317199707 + ], + [ + "▁Meinung", + -12.137264251708984 + ], + [ + "▁beside", + -12.137566566467285 + ], + [ + "▁vazut", + -12.137673377990723 + ], + [ + "strom", + -12.137749671936035 + ], + [ + "boro", + -12.137775421142578 + ], + [ + "▁Soon", + -12.137796401977539 + ], + [ + "dozens", + -12.137896537780762 + ], + [ + "▁Arena", + -12.137943267822266 + ], + [ + "▁viața", + -12.137989044189453 + ], + [ + "▁Impact", + -12.138082504272461 + ], + [ + "current", + -12.138106346130371 + ], + [ + "FM", + -12.138117790222168 + ], + [ + "▁coil", + -12.138657569885254 + ], + [ + "gold", + -12.138679504394531 + ], + [ + "▁spate", + -12.138679504394531 + ], + [ + "1.4", + -12.13875675201416 + ], + [ + "solution", + -12.138769149780273 + ], + [ + "▁Wayne", + -12.138835906982422 + ], + [ + "▁queen", + -12.138898849487305 + ], + [ + "illion", + -12.139022827148438 + ], + [ + "greifen", + -12.139127731323242 + ], + [ + "▁Bil", + -12.139174461364746 + ], + [ + "rote", + -12.139185905456543 + ], + [ + "END", + -12.13918685913086 + ], + [ + "äl", + -12.139206886291504 + ], + [ + "▁reçu", + -12.139378547668457 + ], + [ + "flower", + -12.139495849609375 + ], + [ + "▁draws", + -12.139519691467285 + ], + [ + "plant", + -12.139605522155762 + ], + [ + "2010", + -12.139702796936035 + ], + [ + "▁oper", + -12.139762878417969 + ], + [ + "▁conserve", + -12.139777183532715 + ], + [ + "▁sprinkle", + -12.13984203338623 + ], + [ + "mode", + -12.139924049377441 + ], + [ + "▁lifting", + -12.139941215515137 + ], + [ + "▁Institution", + -12.139951705932617 + ], + [ + "Când", + -12.14001750946045 + ], + [ + "Aus", + -12.140048027038574 + ], + [ + "▁fears", + -12.140054702758789 + ], + [ + "▁appointments", + -12.140079498291016 + ], + [ + "oarele", + -12.140162467956543 + ], + [ + "▁duck", + -12.140193939208984 + ], + [ + "▁stadium", + -12.140213012695312 + ], + [ + "▁vezi", + -12.140227317810059 + ], + [ + "▁lap", + -12.140315055847168 + ], + [ + "▁proceeds", + -12.140382766723633 + ], + [ + "geschlossen", + -12.140412330627441 + ], + [ + "▁tren", + -12.140478134155273 + ], + [ + "VS", + -12.140536308288574 + ], + [ + "▁vais", + -12.140800476074219 + ], + [ + "ținut", + -12.140859603881836 + ], + [ + "▁Concert", + -12.140928268432617 + ], + [ + "▁planting", + -12.141008377075195 + ], + [ + "▁honour", + -12.141069412231445 + ], + [ + "▁gras", + -12.141071319580078 + ], + [ + "woo", + -12.141092300415039 + ], + [ + "▁Hero", + -12.141282081604004 + ], + [ + "▁stimulate", + -12.14134407043457 + ], + [ + "▁überhaupt", + -12.141426086425781 + ], + [ + "▁bounce", + -12.14148235321045 + ], + [ + "oodle", + -12.14151382446289 + ], + [ + "▁packs", + -12.141576766967773 + ], + [ + "▁Poker", + -12.14158821105957 + ], + [ + "▁acea", + -12.141684532165527 + ], + [ + "▁parish", + -12.141754150390625 + ], + [ + "-24", + -12.141766548156738 + ], + [ + "▁iTunes", + -12.141874313354492 + ], + [ + "▁lumière", + -12.141948699951172 + ], + [ + "third", + -12.142024993896484 + ], + [ + "▁dynamics", + -12.142038345336914 + ], + [ + "Unless", + -12.142162322998047 + ], + [ + "▁immense", + -12.142416000366211 + ], + [ + "▁Sec", + -12.142781257629395 + ], + [ + "lois", + -12.143009185791016 + ], + [ + "époque", + -12.14302921295166 + ], + [ + "NB", + -12.143139839172363 + ], + [ + "written", + -12.143210411071777 + ], + [ + "▁logement", + -12.143226623535156 + ], + [ + "submitting", + -12.143295288085938 + ], + [ + "▁Quand", + -12.14331340789795 + ], + [ + "▁foi", + -12.143322944641113 + ], + [ + "▁catalogue", + -12.143351554870605 + ], + [ + "nova", + -12.14343547821045 + ], + [ + "▁prezentat", + -12.143527030944824 + ], + [ + "▁tart", + -12.143877983093262 + ], + [ + "те", + -12.143912315368652 + ], + [ + "hack", + -12.143916130065918 + ], + [ + "▁Politic", + -12.144003868103027 + ], + [ + "▁18,", + -12.144048690795898 + ], + [ + "▁ignored", + -12.144145965576172 + ], + [ + "▁spoon", + -12.144245147705078 + ], + [ + "▁Joy", + -12.144280433654785 + ], + [ + "▁reside", + -12.144482612609863 + ], + [ + ".99", + -12.144488334655762 + ], + [ + "lytic", + -12.144625663757324 + ], + [ + "▁bogat", + -12.144643783569336 + ], + [ + "▁nurses", + -12.144845008850098 + ], + [ + "▁funcţi", + -12.145029067993164 + ], + [ + "▁produselor", + -12.145038604736328 + ], + [ + "▁Associates", + -12.145069122314453 + ], + [ + "Est", + -12.14511489868164 + ], + [ + "▁peanut", + -12.145187377929688 + ], + [ + "▁résultat", + -12.145257949829102 + ], + [ + "08.", + -12.145424842834473 + ], + [ + "▁Astro", + -12.145439147949219 + ], + [ + "▁personnelle", + -12.145527839660645 + ], + [ + "320", + -12.145668983459473 + ], + [ + "▁Grab", + -12.145748138427734 + ], + [ + "éco", + -12.145801544189453 + ], + [ + "▁clasic", + -12.145857810974121 + ], + [ + "offre", + -12.14588451385498 + ], + [ + "▁idee", + -12.14589786529541 + ], + [ + "▁cheat", + -12.146259307861328 + ], + [ + "▁Flug", + -12.146286964416504 + ], + [ + "▁1500", + -12.146413803100586 + ], + [ + "▁kurze", + -12.14643383026123 + ], + [ + "With", + -12.146512985229492 + ], + [ + "▁Half", + -12.146575927734375 + ], + [ + "▁disciplines", + -12.146642684936523 + ], + [ + "sorption", + -12.14669132232666 + ], + [ + "▁greutate", + -12.146927833557129 + ], + [ + "mä", + -12.146940231323242 + ], + [ + "▁Literatur", + -12.146956443786621 + ], + [ + "3/", + -12.147016525268555 + ], + [ + "4.0", + -12.147095680236816 + ], + [ + "▁déco", + -12.147119522094727 + ], + [ + "▁Fuß", + -12.147233963012695 + ], + [ + "▁Deutsche", + -12.147289276123047 + ], + [ + "▁abundance", + -12.14746379852295 + ], + [ + "▁Luther", + -12.14750862121582 + ], + [ + "▁nutritional", + -12.147562980651855 + ], + [ + "▁Jude", + -12.147687911987305 + ], + [ + "AY", + -12.14786148071289 + ], + [ + "▁chore", + -12.147916793823242 + ], + [ + "▁Kro", + -12.148006439208984 + ], + [ + "▁alin", + -12.14801025390625 + ], + [ + "lösung", + -12.148030281066895 + ], + [ + "▁geworden", + -12.148238182067871 + ], + [ + "▁sociaux", + -12.148255348205566 + ], + [ + "▁Spark", + -12.1486177444458 + ], + [ + "▁phenomenon", + -12.148624420166016 + ], + [ + "ICA", + -12.148805618286133 + ], + [ + "▁Ran", + -12.148836135864258 + ], + [ + "▁Schwarz", + -12.148959159851074 + ], + [ + "▁1983", + -12.148985862731934 + ], + [ + "ет", + -12.148990631103516 + ], + [ + "möglich", + -12.149084091186523 + ], + [ + "vocation", + -12.149087905883789 + ], + [ + "▁Organic", + -12.14926815032959 + ], + [ + "Oh", + -12.149408340454102 + ], + [ + "▁blockchain", + -12.149422645568848 + ], + [ + "▁Bă", + -12.149515151977539 + ], + [ + "▁Bass", + -12.14953899383545 + ], + [ + "enie", + -12.149687767028809 + ], + [ + "▁rêve", + -12.149807929992676 + ], + [ + "▁Rap", + -12.149986267089844 + ], + [ + "▁democratic", + -12.150044441223145 + ], + [ + "▁Chart", + -12.150167465209961 + ], + [ + "▁Voi", + -12.150189399719238 + ], + [ + "process", + -12.150263786315918 + ], + [ + "▁preach", + -12.150389671325684 + ], + [ + "tient", + -12.150456428527832 + ], + [ + "▁Train", + -12.150468826293945 + ], + [ + "▁Reihe", + -12.150472640991211 + ], + [ + "help", + -12.150514602661133 + ], + [ + "1.6", + -12.150547981262207 + ], + [ + "▁cazuri", + -12.150547981262207 + ], + [ + "▁chap", + -12.150559425354004 + ], + [ + "aktiv", + -12.150632858276367 + ], + [ + "▁2006.", + -12.15079116821289 + ], + [ + "iene", + -12.150849342346191 + ], + [ + "▁BBQ", + -12.150969505310059 + ], + [ + "dauer", + -12.151028633117676 + ], + [ + "2).", + -12.151226997375488 + ], + [ + "▁Monat", + -12.151277542114258 + ], + [ + "Generally", + -12.151285171508789 + ], + [ + "▁bracelet", + -12.151336669921875 + ], + [ + "▁cartoon", + -12.151349067687988 + ], + [ + "▁pui", + -12.151488304138184 + ], + [ + "temp", + -12.151506423950195 + ], + [ + "▁Particip", + -12.151555061340332 + ], + [ + "▁dumneavoastră", + -12.151725769042969 + ], + [ + "▁Gin", + -12.151824951171875 + ], + [ + "iunile", + -12.151829719543457 + ], + [ + "reise", + -12.151849746704102 + ], + [ + "▁einzige", + -12.15189266204834 + ], + [ + "ANCE", + -12.15192985534668 + ], + [ + "▁humble", + -12.151951789855957 + ], + [ + "claim", + -12.152093887329102 + ], + [ + "LV", + -12.152143478393555 + ], + [ + "▁confiance", + -12.152270317077637 + ], + [ + "▁Trading", + -12.152535438537598 + ], + [ + "▁Fabric", + -12.152770042419434 + ], + [ + "▁Duke", + -12.152851104736328 + ], + [ + "spieler", + -12.152937889099121 + ], + [ + "▁reject", + -12.152987480163574 + ], + [ + "▁crise", + -12.153170585632324 + ], + [ + "▁borders", + -12.153196334838867 + ], + [ + "▁Vehicle", + -12.153279304504395 + ], + [ + "zeiten", + -12.153481483459473 + ], + [ + "enrolled", + -12.153514862060547 + ], + [ + "venue", + -12.153555870056152 + ], + [ + "▁forests", + -12.153564453125 + ], + [ + "vascular", + -12.15358829498291 + ], + [ + "▁phrases", + -12.153661727905273 + ], + [ + "▁receptor", + -12.15368366241455 + ], + [ + "schied", + -12.153687477111816 + ], + [ + "▁soirée", + -12.153785705566406 + ], + [ + "▁partener", + -12.153987884521484 + ], + [ + "▁Jobs", + -12.15417194366455 + ], + [ + "▁segments", + -12.154216766357422 + ], + [ + "▁violate", + -12.154438972473145 + ], + [ + "▁viable", + -12.154500007629395 + ], + [ + "▁encountered", + -12.154533386230469 + ], + [ + "▁travelers", + -12.154552459716797 + ], + [ + "▁împ", + -12.154679298400879 + ], + [ + "▁convince", + -12.154693603515625 + ], + [ + "▁mailing", + -12.154693603515625 + ], + [ + "▁Zahn", + -12.154698371887207 + ], + [ + "attend", + -12.15477466583252 + ], + [ + "▁eBay", + -12.154836654663086 + ], + [ + "▁Emergency", + -12.154844284057617 + ], + [ + "wirtschaft", + -12.154882431030273 + ], + [ + "▁scholars", + -12.154947280883789 + ], + [ + "▁considerably", + -12.155118942260742 + ], + [ + "▁combo", + -12.1551513671875 + ], + [ + "hiver", + -12.155198097229004 + ], + [ + "▁mysterious", + -12.15522575378418 + ], + [ + "▁Degree", + -12.155234336853027 + ], + [ + "▁fate", + -12.155242919921875 + ], + [ + "▁transplant", + -12.155281066894531 + ], + [ + "▁samedi", + -12.155400276184082 + ], + [ + "unit", + -12.155519485473633 + ], + [ + "▁moyenne", + -12.155611991882324 + ], + [ + "▁Liverpool", + -12.155614852905273 + ], + [ + "▁Champions", + -12.155728340148926 + ], + [ + "zzle", + -12.155824661254883 + ], + [ + "▁arena", + -12.156228065490723 + ], + [ + "▁Pipe", + -12.15633487701416 + ], + [ + "▁waterproof", + -12.156356811523438 + ], + [ + "▁eternal", + -12.156463623046875 + ], + [ + "Whenever", + -12.156503677368164 + ], + [ + "▁Hop", + -12.156535148620605 + ], + [ + "▁Betrieb", + -12.156816482543945 + ], + [ + "gne", + -12.15692138671875 + ], + [ + "▁spe", + -12.156975746154785 + ], + [ + "▁Corner", + -12.157078742980957 + ], + [ + "▁devenir", + -12.157118797302246 + ], + [ + "ambiance", + -12.157144546508789 + ], + [ + "▁Graham", + -12.157200813293457 + ], + [ + "▁desires", + -12.157289505004883 + ], + [ + "▁Applications", + -12.157291412353516 + ], + [ + "▁genutzt", + -12.157477378845215 + ], + [ + "tek", + -12.157612800598145 + ], + [ + "▁Career", + -12.157641410827637 + ], + [ + "▁staple", + -12.157695770263672 + ], + [ + "▁Dodge", + -12.157817840576172 + ], + [ + "▁strictly", + -12.157889366149902 + ], + [ + "▁Gruppen", + -12.157952308654785 + ], + [ + "▁Finanz", + -12.157981872558594 + ], + [ + "▁sporting", + -12.15809440612793 + ], + [ + "▁Wieder", + -12.158127784729004 + ], + [ + "anny", + -12.158208847045898 + ], + [ + "▁bucura", + -12.158233642578125 + ], + [ + "▁Pest", + -12.15824031829834 + ], + [ + "▁circles", + -12.158246994018555 + ], + [ + "▁richtige", + -12.158309936523438 + ], + [ + "▁cycles", + -12.158379554748535 + ], + [ + "static", + -12.15845012664795 + ], + [ + "lasting", + -12.15847396850586 + ], + [ + "▁calcium", + -12.158549308776855 + ], + [ + "▁digest", + -12.158697128295898 + ], + [ + "Enfin", + -12.158865928649902 + ], + [ + "▁stressful", + -12.158951759338379 + ], + [ + "▁schemes", + -12.158981323242188 + ], + [ + "▁décision", + -12.158987045288086 + ], + [ + "▁comercial", + -12.15907096862793 + ], + [ + "işti", + -12.159098625183105 + ], + [ + "▁Comic", + -12.15910816192627 + ], + [ + "▁extensions", + -12.159140586853027 + ], + [ + "▁Sieg", + -12.159168243408203 + ], + [ + "▁pine", + -12.15919017791748 + ], + [ + "ieß", + -12.159272193908691 + ], + [ + "▁Images", + -12.159427642822266 + ], + [ + "▁Mensch", + -12.159668922424316 + ], + [ + "Pap", + -12.159773826599121 + ], + [ + "▁crops", + -12.15994930267334 + ], + [ + "▁sheep", + -12.159996032714844 + ], + [ + "▁istoric", + -12.160001754760742 + ], + [ + "▁Assessment", + -12.160035133361816 + ], + [ + "▁mounting", + -12.16035270690918 + ], + [ + "wirken", + -12.160469055175781 + ], + [ + "▁augment", + -12.160469055175781 + ], + [ + "▁picioare", + -12.160542488098145 + ], + [ + "organisme", + -12.160590171813965 + ], + [ + "▁Monitor", + -12.16060733795166 + ], + [ + "▁celles", + -12.160642623901367 + ], + [ + "▁Maison", + -12.160709381103516 + ], + [ + "notified", + -12.160783767700195 + ], + [ + "▁chew", + -12.160831451416016 + ], + [ + "▁bleu", + -12.16083812713623 + ], + [ + "dow", + -12.160844802856445 + ], + [ + "▁Grav", + -12.16097354888916 + ], + [ + "▁curtains", + -12.160975456237793 + ], + [ + "▁Campus", + -12.161076545715332 + ], + [ + "▁controversial", + -12.161087036132812 + ], + [ + "▁soutien", + -12.161189079284668 + ], + [ + "▁Dell", + -12.1613187789917 + ], + [ + "▁instrumental", + -12.161431312561035 + ], + [ + "▁Nan", + -12.161514282226562 + ], + [ + "▁prom", + -12.161520957946777 + ], + [ + "▁spatial", + -12.161523818969727 + ], + [ + "Similarly", + -12.161558151245117 + ], + [ + "▁Gala", + -12.161601066589355 + ], + [ + "ultimul", + -12.16162109375 + ], + [ + "▁Vom", + -12.161761283874512 + ], + [ + "▁Foot", + -12.161784172058105 + ], + [ + "bike", + -12.1618013381958 + ], + [ + "▁acids", + -12.161979675292969 + ], + [ + "entend", + -12.162002563476562 + ], + [ + "ivă", + -12.162040710449219 + ], + [ + "▁Weitere", + -12.162124633789062 + ], + [ + "▁vitamins", + -12.162131309509277 + ], + [ + "▁enhancement", + -12.16234016418457 + ], + [ + "▁Cruise", + -12.162367820739746 + ], + [ + "assemble", + -12.162385940551758 + ], + [ + "▁spécifique", + -12.162459373474121 + ], + [ + "affaires", + -12.16261100769043 + ], + [ + "▁indispensable", + -12.1626558303833 + ], + [ + "▁logistics", + -12.16283130645752 + ], + [ + "▁manche", + -12.162919044494629 + ], + [ + "▁dealt", + -12.16297435760498 + ], + [ + "▁favorable", + -12.163036346435547 + ], + [ + "▁unwanted", + -12.163047790527344 + ], + [ + "▁handmade", + -12.163065910339355 + ], + [ + "▁Regi", + -12.163102149963379 + ], + [ + "safe", + -12.163134574890137 + ], + [ + "persoanele", + -12.163202285766602 + ], + [ + "▁destinat", + -12.163252830505371 + ], + [ + "▁Maxi", + -12.163299560546875 + ], + [ + "▁salmon", + -12.163454055786133 + ], + [ + "wag", + -12.163578033447266 + ], + [ + "210", + -12.163769721984863 + ], + [ + "▁warned", + -12.163865089416504 + ], + [ + "läuft", + -12.16386604309082 + ], + [ + "agging", + -12.163931846618652 + ], + [ + "▁responsabil", + -12.16398811340332 + ], + [ + "▁presse", + -12.164271354675293 + ], + [ + "▁amis", + -12.164305686950684 + ], + [ + "▁rolls", + -12.164377212524414 + ], + [ + "control", + -12.164405822753906 + ], + [ + "▁Manufacturer", + -12.164422988891602 + ], + [ + "hnen", + -12.164449691772461 + ], + [ + "▁buget", + -12.164546012878418 + ], + [ + "OW", + -12.16467571258545 + ], + [ + "etro", + -12.164745330810547 + ], + [ + "▁communauté", + -12.164837837219238 + ], + [ + "unci", + -12.164944648742676 + ], + [ + "▁Chine", + -12.164952278137207 + ], + [ + "combines", + -12.16501235961914 + ], + [ + "▁learners", + -12.165046691894531 + ], + [ + "STE", + -12.165055274963379 + ], + [ + "ckel", + -12.16511344909668 + ], + [ + "Service", + -12.165169715881348 + ], + [ + "▁veröffentlicht", + -12.165209770202637 + ], + [ + "besides", + -12.165266036987305 + ], + [ + "getragen", + -12.165349960327148 + ], + [ + "▁opponent", + -12.165521621704102 + ], + [ + "▁volum", + -12.165533065795898 + ], + [ + "▁confusing", + -12.165802001953125 + ], + [ + "invasive", + -12.165813446044922 + ], + [ + "▁conseils", + -12.165881156921387 + ], + [ + "▁vibe", + -12.165928840637207 + ], + [ + "View", + -12.166062355041504 + ], + [ + "oară", + -12.166086196899414 + ], + [ + "Link", + -12.166261672973633 + ], + [ + "▁holy", + -12.166261672973633 + ], + [ + "▁crema", + -12.16629409790039 + ], + [ + "▁Michelle", + -12.166303634643555 + ], + [ + "▁Wien", + -12.166383743286133 + ], + [ + "▁undertake", + -12.166404724121094 + ], + [ + "▁Photograph", + -12.166421890258789 + ], + [ + "humain", + -12.16645336151123 + ], + [ + "▁Hang", + -12.166545867919922 + ], + [ + "designed", + -12.16657829284668 + ], + [ + "▁analyses", + -12.166614532470703 + ], + [ + "▁compose", + -12.166653633117676 + ], + [ + "▁substantially", + -12.166765213012695 + ], + [ + "▁marking", + -12.166772842407227 + ], + [ + "▁campagne", + -12.166826248168945 + ], + [ + "▁$15", + -12.166828155517578 + ], + [ + "pharma", + -12.166972160339355 + ], + [ + "▁playoff", + -12.1669921875 + ], + [ + "▁momentum", + -12.167091369628906 + ], + [ + "Temp", + -12.16714096069336 + ], + [ + "▁vinegar", + -12.167143821716309 + ], + [ + "▁descriptions", + -12.167581558227539 + ], + [ + "christ", + -12.167656898498535 + ], + [ + "wore", + -12.16773509979248 + ], + [ + "ITY", + -12.167768478393555 + ], + [ + "stehen", + -12.167771339416504 + ], + [ + "▁insulation", + -12.1677827835083 + ], + [ + "grav", + -12.167842864990234 + ], + [ + "2.2", + -12.167887687683105 + ], + [ + "▁Explore", + -12.168028831481934 + ], + [ + "▁dye", + -12.168127059936523 + ], + [ + "stair", + -12.168155670166016 + ], + [ + "artisan", + -12.168207168579102 + ], + [ + "▁zoom", + -12.168285369873047 + ], + [ + "▁turkey", + -12.168573379516602 + ], + [ + "▁locksmith", + -12.168577194213867 + ], + [ + "▁sewing", + -12.168610572814941 + ], + [ + "▁modeling", + -12.168627738952637 + ], + [ + "lied", + -12.16870403289795 + ], + [ + "adel", + -12.168773651123047 + ], + [ + "▁Going", + -12.168785095214844 + ], + [ + "WH", + -12.168798446655273 + ], + [ + "▁deserves", + -12.168919563293457 + ], + [ + "▁arriving", + -12.168960571289062 + ], + [ + "OFF", + -12.169039726257324 + ], + [ + "torului", + -12.169109344482422 + ], + [ + "ucked", + -12.16921615600586 + ], + [ + "▁approached", + -12.169351577758789 + ], + [ + "▁élevé", + -12.169354438781738 + ], + [ + "▁quotidien", + -12.169416427612305 + ], + [ + "▁derzeit", + -12.16942024230957 + ], + [ + "nutzt", + -12.169656753540039 + ], + [ + "science", + -12.169729232788086 + ], + [ + "▁Emma", + -12.169841766357422 + ], + [ + "▁builds", + -12.169879913330078 + ], + [ + "▁Logo", + -12.169949531555176 + ], + [ + "▁clouds", + -12.170061111450195 + ], + [ + "inflammatory", + -12.170141220092773 + ], + [ + "țiuni", + -12.170199394226074 + ], + [ + "▁Cisco", + -12.17025089263916 + ], + [ + "▁würden", + -12.170254707336426 + ], + [ + "▁Shaw", + -12.170256614685059 + ], + [ + "▁Ell", + -12.170266151428223 + ], + [ + "avance", + -12.1703519821167 + ], + [ + "anglais", + -12.170365333557129 + ], + [ + "weil", + -12.170368194580078 + ], + [ + "▁singura", + -12.170464515686035 + ], + [ + "ACK", + -12.170489311218262 + ], + [ + "likewise", + -12.170522689819336 + ], + [ + "ographie", + -12.170646667480469 + ], + [ + "liegen", + -12.17088508605957 + ], + [ + "▁Crow", + -12.170964241027832 + ], + [ + "▁unic", + -12.171187400817871 + ], + [ + "▁Ale", + -12.171241760253906 + ], + [ + "▁păstr", + -12.17125129699707 + ], + [ + "▁informal", + -12.171337127685547 + ], + [ + "650", + -12.17136287689209 + ], + [ + "Benz", + -12.171489715576172 + ], + [ + "▁antenna", + -12.171540260314941 + ], + [ + "▁pagini", + -12.171552658081055 + ], + [ + "▁lansat", + -12.171561241149902 + ], + [ + "▁Fans", + -12.171576499938965 + ], + [ + "taine", + -12.171822547912598 + ], + [ + "JO", + -12.171853065490723 + ], + [ + "▁Tips", + -12.172091484069824 + ], + [ + "cir", + -12.172130584716797 + ], + [ + "nou", + -12.172384262084961 + ], + [ + "▁planted", + -12.17241382598877 + ], + [ + "▁steering", + -12.172423362731934 + ], + [ + "▁Waren", + -12.172475814819336 + ], + [ + "▁clearance", + -12.172515869140625 + ], + [ + "▁Moscow", + -12.172516822814941 + ], + [ + "▁Faith", + -12.172534942626953 + ], + [ + "▁Pizza", + -12.172572135925293 + ], + [ + "▁Tank", + -12.17273998260498 + ], + [ + "QUE", + -12.172783851623535 + ], + [ + "▁studii", + -12.172804832458496 + ], + [ + "éné", + -12.172829627990723 + ], + [ + "▁guerre", + -12.1728515625 + ], + [ + "▁celebr", + -12.173083305358887 + ], + [ + "▁Factory", + -12.173111915588379 + ], + [ + "▁Browse", + -12.173198699951172 + ], + [ + "▁Request", + -12.17323112487793 + ], + [ + "▁taxpayer", + -12.173311233520508 + ], + [ + "▁assert", + -12.173562049865723 + ], + [ + "unternehmen", + -12.173588752746582 + ], + [ + "▁Ergebnis", + -12.173687934875488 + ], + [ + "▁Antwort", + -12.173727035522461 + ], + [ + "▁Photography", + -12.173808097839355 + ], + [ + "▁plă", + -12.173866271972656 + ], + [ + "IME", + -12.173982620239258 + ], + [ + "▁prochaine", + -12.174074172973633 + ], + [ + "ajouter", + -12.174103736877441 + ], + [ + "▁buffet", + -12.174227714538574 + ], + [ + "▁pixels", + -12.174239158630371 + ], + [ + "▁pledge", + -12.174250602722168 + ], + [ + "▁Inhalt", + -12.17435359954834 + ], + [ + "▁chase", + -12.174384117126465 + ], + [ + "Flow", + -12.174493789672852 + ], + [ + "▁melodi", + -12.174872398376465 + ], + [ + "▁Abu", + -12.174991607666016 + ], + [ + "▁1979", + -12.175042152404785 + ], + [ + "▁Photos", + -12.175042152404785 + ], + [ + "▁qualifications", + -12.175148963928223 + ], + [ + "▁zis", + -12.175213813781738 + ], + [ + "IAL", + -12.175354957580566 + ], + [ + "▁lender", + -12.175390243530273 + ], + [ + "▁indiferent", + -12.175494194030762 + ], + [ + "▁behaviors", + -12.175506591796875 + ], + [ + "▁flowing", + -12.175531387329102 + ], + [ + "▁zweite", + -12.1756010055542 + ], + [ + "abl", + -12.175765037536621 + ], + [ + "Schw", + -12.176004409790039 + ], + [ + "opi", + -12.176030158996582 + ], + [ + "ggi", + -12.176164627075195 + ], + [ + "▁depart", + -12.176314353942871 + ], + [ + "▁garde", + -12.17640209197998 + ], + [ + "▁tuition", + -12.176490783691406 + ], + [ + "fälle", + -12.17650032043457 + ], + [ + "▁determina", + -12.17652702331543 + ], + [ + "▁spice", + -12.176627159118652 + ], + [ + "▁petites", + -12.176777839660645 + ], + [ + "kot", + -12.176973342895508 + ], + [ + "▁intersection", + -12.177242279052734 + ], + [ + "hak", + -12.177248001098633 + ], + [ + "▁autumn", + -12.177284240722656 + ], + [ + "▁verbunden", + -12.177284240722656 + ], + [ + "▁ferme", + -12.177287101745605 + ], + [ + "PN", + -12.17733097076416 + ], + [ + "▁insurer", + -12.177390098571777 + ], + [ + "arten", + -12.177401542663574 + ], + [ + "▁Turkish", + -12.177715301513672 + ], + [ + "▁shoulders", + -12.177732467651367 + ], + [ + "=>", + -12.177742004394531 + ], + [ + "▁Nike", + -12.177760124206543 + ], + [ + "uire", + -12.177763938903809 + ], + [ + "▁Chile", + -12.177811622619629 + ], + [ + "jon", + -12.177842140197754 + ], + [ + "▁fragrance", + -12.177884101867676 + ], + [ + "▁bean", + -12.177908897399902 + ], + [ + "ips", + -12.178108215332031 + ], + [ + "assuming", + -12.178191184997559 + ], + [ + "liens", + -12.178215026855469 + ], + [ + "tocmai", + -12.178267478942871 + ], + [ + "▁60%", + -12.178301811218262 + ], + [ + "ipped", + -12.178384780883789 + ], + [ + "DIS", + -12.178473472595215 + ], + [ + "▁predicted", + -12.178537368774414 + ], + [ + "▁Picture", + -12.178555488586426 + ], + [ + "Bahn", + -12.178796768188477 + ], + [ + "104", + -12.178854942321777 + ], + [ + "tended", + -12.178958892822266 + ], + [ + "▁approve", + -12.179031372070312 + ], + [ + "▁magasin", + -12.17908000946045 + ], + [ + "▁mindset", + -12.179208755493164 + ], + [ + "rase", + -12.179363250732422 + ], + [ + "grand", + -12.179469108581543 + ], + [ + "▁Principal", + -12.17947769165039 + ], + [ + "▁informații", + -12.17959976196289 + ], + [ + "▁legătur", + -12.179628372192383 + ], + [ + "▁Farb", + -12.179692268371582 + ], + [ + "▁Dieu", + -12.179710388183594 + ], + [ + "▁alliance", + -12.180378913879395 + ], + [ + "weiligen", + -12.180397987365723 + ], + [ + "▁Câ", + -12.18048095703125 + ], + [ + "▁counseling", + -12.180521011352539 + ], + [ + "▁traveled", + -12.180533409118652 + ], + [ + "▁translated", + -12.180558204650879 + ], + [ + "▁carne", + -12.180679321289062 + ], + [ + "aked", + -12.180707931518555 + ], + [ + "▁LCD", + -12.180868148803711 + ], + [ + "▁Folge", + -12.180909156799316 + ], + [ + "▁Erfahrungen", + -12.18093204498291 + ], + [ + "▁1981", + -12.18106460571289 + ], + [ + "▁răspuns", + -12.181075096130371 + ], + [ + "itori", + -12.18117618560791 + ], + [ + "▁elementary", + -12.181200981140137 + ], + [ + "▁vorbei", + -12.18127727508545 + ], + [ + "▁cargo", + -12.181361198425293 + ], + [ + "disciplinary", + -12.18140983581543 + ], + [ + "WR", + -12.181492805480957 + ], + [ + "▁counterpart", + -12.18162727355957 + ], + [ + "family", + -12.181641578674316 + ], + [ + "▁viață", + -12.181644439697266 + ], + [ + "▁Definition", + -12.18167495727539 + ], + [ + "▁Cow", + -12.18171501159668 + ], + [ + "fällig", + -12.182003021240234 + ], + [ + "▁Sicht", + -12.182025909423828 + ], + [ + "▁mum", + -12.182145118713379 + ], + [ + "▁Mediterranean", + -12.182275772094727 + ], + [ + "nev", + -12.182278633117676 + ], + [ + "bü", + -12.182293891906738 + ], + [ + "▁slave", + -12.182293891906738 + ], + [ + "schnitt", + -12.18233871459961 + ], + [ + "▁firme", + -12.182430267333984 + ], + [ + "▁spill", + -12.182454109191895 + ], + [ + "▁wages", + -12.182592391967773 + ], + [ + "▁refine", + -12.182615280151367 + ], + [ + "▁upgraded", + -12.182632446289062 + ], + [ + "▁gospel", + -12.182698249816895 + ], + [ + "▁quartier", + -12.182744979858398 + ], + [ + "▁#2", + -12.182772636413574 + ], + [ + "▁Situation", + -12.18298625946045 + ], + [ + "▁suggesting", + -12.183075904846191 + ], + [ + "▁acne", + -12.183113098144531 + ], + [ + "▁Murray", + -12.183337211608887 + ], + [ + "▁Ian", + -12.183469772338867 + ], + [ + "hören", + -12.183489799499512 + ], + [ + "bia", + -12.183603286743164 + ], + [ + "▁Bewegung", + -12.183684349060059 + ], + [ + "▁abzu", + -12.18379020690918 + ], + [ + "reveals", + -12.183795928955078 + ], + [ + "friend", + -12.184025764465332 + ], + [ + "▁Connecticut", + -12.18407917022705 + ], + [ + "▁Testament", + -12.184151649475098 + ], + [ + "▁Lit", + -12.184199333190918 + ], + [ + "▁Ship", + -12.184209823608398 + ], + [ + "▁minunat", + -12.184344291687012 + ], + [ + "▁Moving", + -12.184346199035645 + ], + [ + "▁Device", + -12.184486389160156 + ], + [ + "▁Bake", + -12.18453598022461 + ], + [ + "▁qualification", + -12.184633255004883 + ], + [ + "▁challenged", + -12.184640884399414 + ], + [ + "▁Hinweis", + -12.184721946716309 + ], + [ + "▁sechs", + -12.184769630432129 + ], + [ + "та", + -12.184903144836426 + ], + [ + "120", + -12.184904098510742 + ], + [ + "licht", + -12.184940338134766 + ], + [ + "▁supervision", + -12.185022354125977 + ], + [ + "▁milestone", + -12.18503189086914 + ], + [ + "zeig", + -12.185050964355469 + ], + [ + "▁emphasize", + -12.185224533081055 + ], + [ + "▁complain", + -12.185232162475586 + ], + [ + "sack", + -12.185341835021973 + ], + [ + "▁rebuild", + -12.185445785522461 + ], + [ + "projekt", + -12.18548583984375 + ], + [ + "▁saint", + -12.185644149780273 + ], + [ + "lette", + -12.185752868652344 + ], + [ + "rade", + -12.18580150604248 + ], + [ + "▁pacient", + -12.185893058776855 + ], + [ + "signed", + -12.186169624328613 + ], + [ + "▁mil", + -12.186261177062988 + ], + [ + "cali", + -12.186266899108887 + ], + [ + "▁brochure", + -12.186487197875977 + ], + [ + "▁Bulgaria", + -12.186488151550293 + ], + [ + "Har", + -12.186623573303223 + ], + [ + "DH", + -12.186697006225586 + ], + [ + "▁jumping", + -12.186712265014648 + ], + [ + "ären", + -12.186732292175293 + ], + [ + "▁tactics", + -12.186911582946777 + ], + [ + "▁soleil", + -12.187030792236328 + ], + [ + "lessness", + -12.18705940246582 + ], + [ + "steigen", + -12.187085151672363 + ], + [ + "▁Brief", + -12.187117576599121 + ], + [ + "▁Oz", + -12.18718433380127 + ], + [ + "credit", + -12.187239646911621 + ], + [ + "glass", + -12.187241554260254 + ], + [ + "▁Baltimore", + -12.187292098999023 + ], + [ + "varies", + -12.187445640563965 + ], + [ + "sourced", + -12.187575340270996 + ], + [ + "▁documented", + -12.187604904174805 + ], + [ + "▁devine", + -12.187664985656738 + ], + [ + "möglichst", + -12.187732696533203 + ], + [ + "▁früher", + -12.187756538391113 + ], + [ + "outefois", + -12.18790054321289 + ], + [ + "▁Engagement", + -12.187934875488281 + ], + [ + "▁anumit", + -12.18806266784668 + ], + [ + "▁1930", + -12.188186645507812 + ], + [ + "▁Aufgaben", + -12.188214302062988 + ], + [ + "▁lineup", + -12.188227653503418 + ], + [ + "▁Cad", + -12.188349723815918 + ], + [ + "améliorer", + -12.188437461853027 + ], + [ + "▁februarie", + -12.188499450683594 + ], + [ + "▁cancellation", + -12.188529968261719 + ], + [ + "▁locks", + -12.188577651977539 + ], + [ + "▁modèles", + -12.188711166381836 + ], + [ + "▁breakdown", + -12.188748359680176 + ], + [ + "Ticket", + -12.188810348510742 + ], + [ + "▁Chen", + -12.188855171203613 + ], + [ + "▁Competition", + -12.188910484313965 + ], + [ + "▁median", + -12.18896770477295 + ], + [ + "rische", + -12.189159393310547 + ], + [ + "▁multipli", + -12.189269065856934 + ], + [ + "▁Belgium", + -12.189305305480957 + ], + [ + "▁Physical", + -12.189308166503906 + ], + [ + "▁parameter", + -12.189432144165039 + ], + [ + "▁carrot", + -12.189435005187988 + ], + [ + "▁mandat", + -12.189617156982422 + ], + [ + "▁towel", + -12.189697265625 + ], + [ + "▁insured", + -12.189825057983398 + ], + [ + "PRI", + -12.189868927001953 + ], + [ + "etter", + -12.189915657043457 + ], + [ + "▁Oder", + -12.190083503723145 + ], + [ + "argued", + -12.190171241760254 + ], + [ + "FB", + -12.190196990966797 + ], + [ + "versicherung", + -12.190197944641113 + ], + [ + "abila", + -12.190251350402832 + ], + [ + "▁Coin", + -12.190324783325195 + ], + [ + "around", + -12.19050121307373 + ], + [ + "▁Lorsqu", + -12.190773963928223 + ], + [ + "valent", + -12.190918922424316 + ], + [ + "▁weltweit", + -12.19092082977295 + ], + [ + "Mod", + -12.191039085388184 + ], + [ + "▁defect", + -12.191044807434082 + ], + [ + "ibly", + -12.191136360168457 + ], + [ + "▁Juan", + -12.191153526306152 + ], + [ + "▁Jur", + -12.191171646118164 + ], + [ + "large", + -12.191307067871094 + ], + [ + "▁indicators", + -12.191461563110352 + ], + [ + "invest", + -12.19168472290039 + ], + [ + "▁rehabilitation", + -12.191705703735352 + ], + [ + "nag", + -12.191823959350586 + ], + [ + "▁Grundlage", + -12.191829681396484 + ], + [ + "▁Strategy", + -12.192131042480469 + ], + [ + "▁supérieur", + -12.192173957824707 + ], + [ + "▁orbit", + -12.192281723022461 + ], + [ + "▁Auftrag", + -12.192360877990723 + ], + [ + "▁Verb", + -12.192441940307617 + ], + [ + "ANA", + -12.19256591796875 + ], + [ + "▁trimis", + -12.192611694335938 + ], + [ + "▁Rub", + -12.192704200744629 + ], + [ + "institu", + -12.192732810974121 + ], + [ + "▁inspect", + -12.1927490234375 + ], + [ + "▁Princess", + -12.192757606506348 + ], + [ + "especially", + -12.192777633666992 + ], + [ + "▁combinations", + -12.192793846130371 + ], + [ + "▁gaze", + -12.192842483520508 + ], + [ + "elemente", + -12.192970275878906 + ], + [ + "deal", + -12.192980766296387 + ], + [ + "polis", + -12.193157196044922 + ], + [ + "shaw", + -12.193168640136719 + ], + [ + "▁Republicans", + -12.193203926086426 + ], + [ + "aded", + -12.193244934082031 + ], + [ + "▁Louisiana", + -12.193364143371582 + ], + [ + "▁Ville", + -12.193368911743164 + ], + [ + "▁afterwards", + -12.193389892578125 + ], + [ + "ONG", + -12.193608283996582 + ], + [ + "▁dryer", + -12.193636894226074 + ], + [ + "▁Manhattan", + -12.19374942779541 + ], + [ + "▁recomanda", + -12.19412612915039 + ], + [ + "▁juca", + -12.194253921508789 + ], + [ + "▁Crown", + -12.194260597229004 + ], + [ + "▁flesh", + -12.194347381591797 + ], + [ + "sichtig", + -12.194358825683594 + ], + [ + "▁rempli", + -12.19437026977539 + ], + [ + "▁deposits", + -12.19438362121582 + ], + [ + "▁Voll", + -12.194599151611328 + ], + [ + "▁analysts", + -12.194672584533691 + ], + [ + "▁Krieg", + -12.19484806060791 + ], + [ + "▁Rosa", + -12.19495964050293 + ], + [ + "▁Supply", + -12.194964408874512 + ], + [ + "GF", + -12.19497013092041 + ], + [ + "idad", + -12.195098876953125 + ], + [ + "▁flush", + -12.195103645324707 + ], + [ + "▁circular", + -12.195355415344238 + ], + [ + "▁național", + -12.195379257202148 + ], + [ + "▁lorsqu", + -12.195441246032715 + ], + [ + "▁analyst", + -12.195459365844727 + ], + [ + "▁Jahrhundert", + -12.195586204528809 + ], + [ + "▁biology", + -12.195713996887207 + ], + [ + "copy", + -12.195733070373535 + ], + [ + "▁bringt", + -12.195765495300293 + ], + [ + "▁Gospel", + -12.195780754089355 + ], + [ + "▁sorgen", + -12.195842742919922 + ], + [ + "zeichnung", + -12.196181297302246 + ], + [ + "chair", + -12.196197509765625 + ], + [ + "EB", + -12.19636344909668 + ], + [ + "▁Beth", + -12.1964111328125 + ], + [ + "115", + -12.196416854858398 + ], + [ + "▁Neue", + -12.196479797363281 + ], + [ + "▁faible", + -12.196599960327148 + ], + [ + "▁methodology", + -12.196603775024414 + ], + [ + "spiele", + -12.196647644042969 + ], + [ + "▁cherry", + -12.196727752685547 + ], + [ + "▁Mak", + -12.196802139282227 + ], + [ + "▁volet", + -12.196982383728027 + ], + [ + "funk", + -12.197196006774902 + ], + [ + "▁aktuelle", + -12.197372436523438 + ], + [ + "▁Yahoo", + -12.197408676147461 + ], + [ + "▁Zusammenarbeit", + -12.197669982910156 + ], + [ + "▁Serve", + -12.197754859924316 + ], + [ + "▁simpler", + -12.197978019714355 + ], + [ + "intégr", + -12.197990417480469 + ], + [ + "ndlich", + -12.198083877563477 + ], + [ + "▁actress", + -12.198320388793945 + ], + [ + "▁reuse", + -12.198332786560059 + ], + [ + "▁reviewing", + -12.198405265808105 + ], + [ + "statt", + -12.198457717895508 + ], + [ + "▁diving", + -12.198469161987305 + ], + [ + "▁Național", + -12.198677062988281 + ], + [ + "voi", + -12.19873332977295 + ], + [ + "Disc", + -12.198812484741211 + ], + [ + "▁Mineral", + -12.19886302947998 + ], + [ + "▁emit", + -12.199007034301758 + ], + [ + "witz", + -12.199078559875488 + ], + [ + "▁forgot", + -12.19909954071045 + ], + [ + "▁dim", + -12.199115753173828 + ], + [ + "upper", + -12.19947624206543 + ], + [ + "sichtlich", + -12.19949722290039 + ], + [ + "▁parcours", + -12.199670791625977 + ], + [ + "8:00", + -12.199697494506836 + ], + [ + "▁keyword", + -12.199701309204102 + ], + [ + "▁upgrades", + -12.199763298034668 + ], + [ + "kunden", + -12.200177192687988 + ], + [ + "▁Seg", + -12.200257301330566 + ], + [ + "▁Circle", + -12.200289726257324 + ], + [ + "▁ginger", + -12.200336456298828 + ], + [ + "mment", + -12.200516700744629 + ], + [ + "▁expenditure", + -12.200655937194824 + ], + [ + "▁parle", + -12.200693130493164 + ], + [ + "▁Counsel", + -12.200722694396973 + ], + [ + "▁Gui", + -12.200722694396973 + ], + [ + "resident", + -12.20103645324707 + ], + [ + "▁benchmark", + -12.20103931427002 + ], + [ + "▁Elektro", + -12.201064109802246 + ], + [ + "▁réalité", + -12.201064109802246 + ], + [ + "▁ridiculous", + -12.201067924499512 + ], + [ + "▁necklace", + -12.20108699798584 + ], + [ + "nian", + -12.201117515563965 + ], + [ + "▁Move", + -12.20113468170166 + ], + [ + "▁elevated", + -12.201204299926758 + ], + [ + "WE", + -12.201281547546387 + ], + [ + "▁Drum", + -12.20132064819336 + ], + [ + "▁Delivery", + -12.201350212097168 + ], + [ + "indicating", + -12.201452255249023 + ], + [ + "▁Benjamin", + -12.201472282409668 + ], + [ + "▁Samuel", + -12.2014741897583 + ], + [ + "bene", + -12.201666831970215 + ], + [ + "▁experienta", + -12.201676368713379 + ], + [ + "▁rocket", + -12.201839447021484 + ], + [ + "▁fossil", + -12.201883316040039 + ], + [ + "▁festive", + -12.20193099975586 + ], + [ + "▁conscience", + -12.201964378356934 + ], + [ + "▁bacon", + -12.202136993408203 + ], + [ + "▁aero", + -12.202159881591797 + ], + [ + "public", + -12.202187538146973 + ], + [ + "▁zic", + -12.202218055725098 + ], + [ + "ombre", + -12.202356338500977 + ], + [ + "▁Drain", + -12.202550888061523 + ], + [ + "7.5", + -12.202672004699707 + ], + [ + "▁Deutschen", + -12.202703475952148 + ], + [ + "reportedly", + -12.202754974365234 + ], + [ + "▁Français", + -12.203105926513672 + ], + [ + "▁enzyme", + -12.203106880187988 + ], + [ + "▁inquiry", + -12.203117370605469 + ], + [ + "▁presque", + -12.203193664550781 + ], + [ + "▁Airlines", + -12.203228950500488 + ], + [ + "▁Salon", + -12.203237533569336 + ], + [ + "▁Volunteer", + -12.203310012817383 + ], + [ + "▁modular", + -12.203349113464355 + ], + [ + "ón", + -12.203364372253418 + ], + [ + "NH", + -12.203449249267578 + ], + [ + "▁souhaite", + -12.203516960144043 + ], + [ + "social", + -12.203659057617188 + ], + [ + "▁Include", + -12.203729629516602 + ], + [ + "▁Decor", + -12.2037992477417 + ], + [ + "dded", + -12.203965187072754 + ], + [ + "▁Außen", + -12.203969955444336 + ], + [ + "rendu", + -12.20412540435791 + ], + [ + "▁MBA", + -12.204150199890137 + ], + [ + "▁columns", + -12.204155921936035 + ], + [ + "▁Wing", + -12.204436302185059 + ], + [ + "▁landmark", + -12.204442977905273 + ], + [ + "schritt", + -12.204594612121582 + ], + [ + "▁désir", + -12.204630851745605 + ], + [ + "(5)", + -12.204680442810059 + ], + [ + "▁réseaux", + -12.204693794250488 + ], + [ + "income", + -12.204710960388184 + ], + [ + "▁revised", + -12.204819679260254 + ], + [ + "HY", + -12.204863548278809 + ], + [ + "▁Explorer", + -12.204873085021973 + ], + [ + "▁Lam", + -12.204877853393555 + ], + [ + "▁almond", + -12.204910278320312 + ], + [ + "▁faux", + -12.204910278320312 + ], + [ + "opt", + -12.204923629760742 + ], + [ + "Out", + -12.204939842224121 + ], + [ + "▁virtue", + -12.205025672912598 + ], + [ + "▁Chocolate", + -12.205151557922363 + ], + [ + "▁spannend", + -12.205305099487305 + ], + [ + "▁spices", + -12.205327033996582 + ], + [ + "▁Climate", + -12.205560684204102 + ], + [ + "▁Residential", + -12.205560684204102 + ], + [ + "gung", + -12.205700874328613 + ], + [ + "▁filtr", + -12.20606803894043 + ], + [ + "circ", + -12.206123352050781 + ], + [ + "sisted", + -12.206172943115234 + ], + [ + "▁dedicat", + -12.206243515014648 + ], + [ + "▁foil", + -12.206387519836426 + ], + [ + "▁uita", + -12.206392288208008 + ], + [ + "▁lié", + -12.206402778625488 + ], + [ + "▁Demo", + -12.206409454345703 + ], + [ + "▁spoil", + -12.2064208984375 + ], + [ + "Cu", + -12.206448554992676 + ], + [ + "naut", + -12.206525802612305 + ], + [ + "▁configured", + -12.206535339355469 + ], + [ + "UK", + -12.206543922424316 + ], + [ + "▁disagree", + -12.20656967163086 + ], + [ + "Medic", + -12.206767082214355 + ], + [ + "cosm", + -12.207074165344238 + ], + [ + "Toute", + -12.207109451293945 + ], + [ + "▁beneficia", + -12.207170486450195 + ], + [ + "fassen", + -12.207327842712402 + ], + [ + "▁bail", + -12.207337379455566 + ], + [ + "igue", + -12.207439422607422 + ], + [ + "▁Mă", + -12.20744800567627 + ], + [ + "▁strips", + -12.20748519897461 + ], + [ + "▁Dritte", + -12.207537651062012 + ], + [ + "▁putere", + -12.207597732543945 + ], + [ + "Play", + -12.20763111114502 + ], + [ + "▁Samstag", + -12.207632064819336 + ], + [ + "▁households", + -12.207791328430176 + ], + [ + "▁persistent", + -12.207914352416992 + ], + [ + "uben", + -12.207942962646484 + ], + [ + "Web", + -12.20809555053711 + ], + [ + "▁scenery", + -12.20820140838623 + ], + [ + "▁défini", + -12.208257675170898 + ], + [ + "news", + -12.208337783813477 + ], + [ + "eira", + -12.208428382873535 + ], + [ + "▁Mumbai", + -12.208438873291016 + ], + [ + "▁Ward", + -12.208558082580566 + ], + [ + "▁ladder", + -12.2086181640625 + ], + [ + "▁plaque", + -12.208623886108398 + ], + [ + "nés", + -12.208639144897461 + ], + [ + "▁condamn", + -12.20864486694336 + ], + [ + "▁attribute", + -12.208687782287598 + ], + [ + "atti", + -12.20873737335205 + ], + [ + "▁Emily", + -12.208953857421875 + ], + [ + "▁pleine", + -12.20896053314209 + ], + [ + "▁automatisch", + -12.209004402160645 + ], + [ + "ifies", + -12.209052085876465 + ], + [ + "onna", + -12.209104537963867 + ], + [ + "▁inject", + -12.209157943725586 + ], + [ + "▁evolve", + -12.209297180175781 + ], + [ + "▁breeze", + -12.209299087524414 + ], + [ + "▁montre", + -12.209415435791016 + ], + [ + "▁memorial", + -12.209425926208496 + ], + [ + "ämlich", + -12.209465026855469 + ], + [ + "NBC", + -12.209589958190918 + ], + [ + "▁1940", + -12.209836959838867 + ], + [ + "▁trouvé", + -12.209892272949219 + ], + [ + "when", + -12.209914207458496 + ], + [ + "▁Büro", + -12.209959983825684 + ], + [ + "▁probability", + -12.209978103637695 + ], + [ + "cute", + -12.21006965637207 + ], + [ + "▁sturdy", + -12.210078239440918 + ], + [ + "AMP", + -12.210165023803711 + ], + [ + "▁Constantin", + -12.210283279418945 + ], + [ + "▁batter", + -12.21037483215332 + ], + [ + "▁bist", + -12.210470199584961 + ], + [ + "▁streams", + -12.210528373718262 + ], + [ + "rushing", + -12.21057415008545 + ], + [ + "▁shaft", + -12.21065902709961 + ], + [ + "▁proprii", + -12.210722923278809 + ], + [ + "émi", + -12.21074390411377 + ], + [ + "online", + -12.210817337036133 + ], + [ + "▁vanity", + -12.210870742797852 + ], + [ + "▁mural", + -12.210878372192383 + ], + [ + "▁distinguish", + -12.210905075073242 + ], + [ + "▁niciun", + -12.211191177368164 + ], + [ + "▁européenne", + -12.211252212524414 + ], + [ + "▁secretary", + -12.211289405822754 + ], + [ + "▁gaps", + -12.211492538452148 + ], + [ + "▁realm", + -12.211499214172363 + ], + [ + "▁elastic", + -12.211504936218262 + ], + [ + "▁Avoid", + -12.211519241333008 + ], + [ + "▁mauvais", + -12.211931228637695 + ], + [ + "▁innovations", + -12.212663650512695 + ], + [ + "▁suprem", + -12.212776184082031 + ], + [ + "▁vederea", + -12.212817192077637 + ], + [ + "wenden", + -12.212892532348633 + ], + [ + "-40", + -12.213075637817383 + ], + [ + "prenant", + -12.213155746459961 + ], + [ + "utilisateur", + -12.213210105895996 + ], + [ + "▁Oliver", + -12.213228225708008 + ], + [ + "111", + -12.21326732635498 + ], + [ + "▁manifestation", + -12.213382720947266 + ], + [ + "▁Rachel", + -12.213458061218262 + ], + [ + "agog", + -12.21348762512207 + ], + [ + "▁seamless", + -12.213534355163574 + ], + [ + "▁Employee", + -12.213576316833496 + ], + [ + "▁dimanche", + -12.213582038879395 + ], + [ + "▁banii", + -12.213631629943848 + ], + [ + "▁Ruth", + -12.213781356811523 + ], + [ + "▁Roy", + -12.21385383605957 + ], + [ + "▁homeless", + -12.2139253616333 + ], + [ + "▁Lower", + -12.213932037353516 + ], + [ + "health", + -12.21393871307373 + ], + [ + "▁atenti", + -12.2140474319458 + ], + [ + "▁touched", + -12.214183807373047 + ], + [ + "May", + -12.214195251464844 + ], + [ + "▁Buc", + -12.214225769042969 + ], + [ + "▁explored", + -12.214393615722656 + ], + [ + "▁declare", + -12.214461326599121 + ], + [ + "▁garment", + -12.214469909667969 + ], + [ + "▁buzz", + -12.214483261108398 + ], + [ + "▁rappel", + -12.214662551879883 + ], + [ + "▁uscat", + -12.214903831481934 + ], + [ + "▁Hyper", + -12.214914321899414 + ], + [ + "Etat", + -12.215007781982422 + ], + [ + "▁Titel", + -12.215035438537598 + ], + [ + "product", + -12.215191841125488 + ], + [ + "woman", + -12.215280532836914 + ], + [ + "▁Gab", + -12.215450286865234 + ], + [ + "▁advances", + -12.215615272521973 + ], + [ + "2/", + -12.215753555297852 + ], + [ + "prone", + -12.215770721435547 + ], + [ + "kö", + -12.215986251831055 + ], + [ + "▁counting", + -12.21599292755127 + ], + [ + "Sollte", + -12.216043472290039 + ], + [ + "▁Konzept", + -12.216063499450684 + ], + [ + "▁backgrounds", + -12.216153144836426 + ], + [ + "jährige", + -12.216154098510742 + ], + [ + "▁Alltag", + -12.216187477111816 + ], + [ + "▁metrics", + -12.21619701385498 + ], + [ + "▁illustrated", + -12.216222763061523 + ], + [ + "▁Charge", + -12.21631908416748 + ], + [ + "▁thoughtful", + -12.216423034667969 + ], + [ + "gesetz", + -12.216527938842773 + ], + [ + "pfen", + -12.216611862182617 + ], + [ + "▁déroul", + -12.216713905334473 + ], + [ + "▁checkout", + -12.216876029968262 + ], + [ + "quette", + -12.216936111450195 + ], + [ + "▁pierdut", + -12.2170991897583 + ], + [ + "▁Seat", + -12.217140197753906 + ], + [ + "▁linen", + -12.217193603515625 + ], + [ + "archiv", + -12.217245101928711 + ], + [ + "arna", + -12.217254638671875 + ], + [ + "importe", + -12.21742057800293 + ], + [ + "▁PHP", + -12.217496871948242 + ], + [ + "▁Parents", + -12.217503547668457 + ], + [ + "▁Birmingham", + -12.217513084411621 + ], + [ + "▁Integr", + -12.217588424682617 + ], + [ + "▁Mason", + -12.217607498168945 + ], + [ + "zieht", + -12.217781066894531 + ], + [ + "▁camps", + -12.217803001403809 + ], + [ + "OG", + -12.21786117553711 + ], + [ + "▁syrup", + -12.217927932739258 + ], + [ + "▁Cookies", + -12.217928886413574 + ], + [ + "▁Comfort", + -12.217955589294434 + ], + [ + "ută", + -12.217976570129395 + ], + [ + "abia", + -12.217979431152344 + ], + [ + "zeci", + -12.218003273010254 + ], + [ + "▁Gardens", + -12.218009948730469 + ], + [ + "▁incidents", + -12.218149185180664 + ], + [ + "▁participat", + -12.218235969543457 + ], + [ + "▁glimpse", + -12.218342781066895 + ], + [ + "5.5", + -12.218437194824219 + ], + [ + "▁dealers", + -12.218469619750977 + ], + [ + "▁Grande", + -12.218565940856934 + ], + [ + "▁raid", + -12.218944549560547 + ], + [ + "owing", + -12.21903133392334 + ], + [ + "▁contrary", + -12.219109535217285 + ], + [ + "Earlier", + -12.219138145446777 + ], + [ + "tien", + -12.21916389465332 + ], + [ + "drop", + -12.219169616699219 + ], + [ + "▁angajat", + -12.219359397888184 + ], + [ + "▁procesul", + -12.219515800476074 + ], + [ + "▁focal", + -12.219564437866211 + ], + [ + "▁impart", + -12.219703674316406 + ], + [ + "▁Abschluss", + -12.219749450683594 + ], + [ + "carui", + -12.219830513000488 + ], + [ + "insul", + -12.220277786254883 + ], + [ + "▁creamy", + -12.220283508300781 + ], + [ + "eille", + -12.22032356262207 + ], + [ + "suppl", + -12.220335960388184 + ], + [ + "▁Heaven", + -12.220471382141113 + ], + [ + "éna", + -12.220667839050293 + ], + [ + "▁swap", + -12.220739364624023 + ], + [ + "▁vreau", + -12.220762252807617 + ], + [ + "▁Bryan", + -12.220809936523438 + ], + [ + "▁Zug", + -12.220815658569336 + ], + [ + "▁glance", + -12.220848083496094 + ], + [ + "▁elimin", + -12.220900535583496 + ], + [ + "▁yeux", + -12.221084594726562 + ], + [ + "wehr", + -12.221238136291504 + ], + [ + "2.5", + -12.221287727355957 + ], + [ + "▁poses", + -12.221364974975586 + ], + [ + "▁parcel", + -12.221585273742676 + ], + [ + "▁Apartment", + -12.221749305725098 + ], + [ + "▁NASA", + -12.221768379211426 + ], + [ + "▁bénéfici", + -12.22187614440918 + ], + [ + "▁Umgebung", + -12.221890449523926 + ], + [ + "asia", + -12.221946716308594 + ], + [ + "abi", + -12.221967697143555 + ], + [ + "coup", + -12.222002983093262 + ], + [ + "synchron", + -12.222017288208008 + ], + [ + "▁Sicherheits", + -12.222029685974121 + ], + [ + "bic", + -12.222076416015625 + ], + [ + "▁distract", + -12.222148895263672 + ], + [ + "▁rentals", + -12.222163200378418 + ], + [ + "constru", + -12.222290992736816 + ], + [ + "curs", + -12.222345352172852 + ], + [ + "genannten", + -12.222386360168457 + ], + [ + "▁Shanghai", + -12.222501754760742 + ], + [ + "▁vague", + -12.222504615783691 + ], + [ + "▁Leather", + -12.22250747680664 + ], + [ + "▁Vintage", + -12.222532272338867 + ], + [ + "pointing", + -12.22259521484375 + ], + [ + "avant", + -12.22268295288086 + ], + [ + "gues", + -12.222949028015137 + ], + [ + "sweise", + -12.22302532196045 + ], + [ + "▁Greater", + -12.223065376281738 + ], + [ + "fig", + -12.22310733795166 + ], + [ + "▁Blut", + -12.223217964172363 + ], + [ + "▁Stellen", + -12.22326946258545 + ], + [ + "▁isolation", + -12.22337818145752 + ], + [ + "▁overhead", + -12.22338581085205 + ], + [ + "▁wondered", + -12.223508834838867 + ], + [ + "essai", + -12.223609924316406 + ], + [ + "aves", + -12.2236328125 + ], + [ + "▁Shore", + -12.223637580871582 + ], + [ + "▁INC", + -12.223709106445312 + ], + [ + "rufen", + -12.223980903625488 + ], + [ + "▁magnifique", + -12.224069595336914 + ], + [ + "▁intéressant", + -12.224072456359863 + ], + [ + "▁tanks", + -12.224075317382812 + ], + [ + "▁Tun", + -12.224367141723633 + ], + [ + "▁approaching", + -12.224390029907227 + ], + [ + "▁relay", + -12.224479675292969 + ], + [ + "▁Küche", + -12.224529266357422 + ], + [ + "describing", + -12.224587440490723 + ], + [ + "▁Certification", + -12.224588394165039 + ], + [ + "▁Breakfast", + -12.224597930908203 + ], + [ + "▁Frame", + -12.224891662597656 + ], + [ + "▁Stoff", + -12.224909782409668 + ], + [ + "▁victime", + -12.224924087524414 + ], + [ + "Observ", + -12.224943161010742 + ], + [ + "▁gutter", + -12.224989891052246 + ], + [ + "standard", + -12.225220680236816 + ], + [ + "▁Sci", + -12.225244522094727 + ], + [ + "▁sept", + -12.225377082824707 + ], + [ + "▁Potter", + -12.225423812866211 + ], + [ + "letter", + -12.22577953338623 + ], + [ + "▁tobacco", + -12.225852012634277 + ], + [ + "▁threatened", + -12.22591781616211 + ], + [ + "MW", + -12.225936889648438 + ], + [ + "▁Cher", + -12.225944519042969 + ], + [ + "0.1", + -12.225957870483398 + ], + [ + "mitted", + -12.22596263885498 + ], + [ + "zustellen", + -12.225967407226562 + ], + [ + "dominated", + -12.226165771484375 + ], + [ + "/16", + -12.22623348236084 + ], + [ + "POS", + -12.226317405700684 + ], + [ + "▁Zin", + -12.226373672485352 + ], + [ + "▁Okay", + -12.226381301879883 + ], + [ + "▁projected", + -12.226405143737793 + ], + [ + "▁selber", + -12.226548194885254 + ], + [ + "▁proiectului", + -12.2266206741333 + ], + [ + "▁Shell", + -12.226683616638184 + ], + [ + "▁cartridge", + -12.226706504821777 + ], + [ + "Message", + -12.2267484664917 + ], + [ + "haben", + -12.226799964904785 + ], + [ + "▁slides", + -12.226829528808594 + ], + [ + "▁gleichzeitig", + -12.226886749267578 + ], + [ + "▁Racing", + -12.227051734924316 + ], + [ + "▁20,", + -12.227070808410645 + ], + [ + "▁separat", + -12.227094650268555 + ], + [ + "▁repeatedly", + -12.227110862731934 + ], + [ + "▁casting", + -12.22728157043457 + ], + [ + "▁sacred", + -12.227283477783203 + ], + [ + "verfahren", + -12.227387428283691 + ], + [ + "▁echilibr", + -12.227514266967773 + ], + [ + "▁rebel", + -12.2277250289917 + ], + [ + "säu", + -12.227794647216797 + ], + [ + "ummy", + -12.227815628051758 + ], + [ + "▁backing", + -12.227889060974121 + ], + [ + "▁sponsors", + -12.227912902832031 + ], + [ + "▁Stress", + -12.22802448272705 + ], + [ + "▁Rules", + -12.228083610534668 + ], + [ + "▁render", + -12.228241920471191 + ], + [ + "▁funktioniert", + -12.228384971618652 + ], + [ + "▁Pearl", + -12.228472709655762 + ], + [ + "▁Scho", + -12.228527069091797 + ], + [ + "schwer", + -12.228595733642578 + ], + [ + "▁descoperit", + -12.228702545166016 + ], + [ + "holen", + -12.228720664978027 + ], + [ + "imposed", + -12.228960990905762 + ], + [ + "▁appearing", + -12.228968620300293 + ], + [ + "▁höher", + -12.229082107543945 + ], + [ + "▁Victorian", + -12.229111671447754 + ], + [ + "▁founding", + -12.229155540466309 + ], + [ + "▁Polish", + -12.229239463806152 + ], + [ + "▁anume", + -12.229248046875 + ], + [ + "Box", + -12.229488372802734 + ], + [ + "▁intrat", + -12.229598999023438 + ], + [ + "▁Inspiration", + -12.229610443115234 + ], + [ + "▁Canyon", + -12.229625701904297 + ], + [ + "▁Franklin", + -12.22974681854248 + ], + [ + "▁susceptible", + -12.22982120513916 + ], + [ + "trap", + -12.229839324951172 + ], + [ + "▁Roma", + -12.23000717163086 + ], + [ + "▁ethics", + -12.230009078979492 + ], + [ + "▁Privat", + -12.230027198791504 + ], + [ + "▁journalists", + -12.230090141296387 + ], + [ + "▁Universität", + -12.230246543884277 + ], + [ + "▁conditioner", + -12.230308532714844 + ], + [ + "folge", + -12.230327606201172 + ], + [ + "kirche", + -12.230416297912598 + ], + [ + "gehalten", + -12.230530738830566 + ], + [ + "midi", + -12.230570793151855 + ], + [ + "▁radar", + -12.230619430541992 + ], + [ + "▁Yard", + -12.230775833129883 + ], + [ + "▁professionnelle", + -12.230863571166992 + ], + [ + "▁Orchestra", + -12.230870246887207 + ], + [ + "▁immigrants", + -12.230870246887207 + ], + [ + "▁refined", + -12.230929374694824 + ], + [ + "▁Bishop", + -12.231036186218262 + ], + [ + "string", + -12.231095314025879 + ], + [ + "▁majoritatea", + -12.231231689453125 + ], + [ + "▁workflow", + -12.23123836517334 + ], + [ + "▁întreg", + -12.231306076049805 + ], + [ + "went", + -12.231563568115234 + ], + [ + "▁trat", + -12.231689453125 + ], + [ + "felul", + -12.23176383972168 + ], + [ + "▁hardwood", + -12.231821060180664 + ], + [ + "▁Task", + -12.231867790222168 + ], + [ + "branded", + -12.231921195983887 + ], + [ + "▁cinq", + -12.231966018676758 + ], + [ + "▁curb", + -12.232041358947754 + ], + [ + "▁Discount", + -12.232043266296387 + ], + [ + "▁Episode", + -12.232131958007812 + ], + [ + "▁Knowledge", + -12.232144355773926 + ], + [ + "▁tricky", + -12.232173919677734 + ], + [ + "▁characteristic", + -12.232233047485352 + ], + [ + "▁plata", + -12.23226261138916 + ], + [ + "▁Labour", + -12.23232650756836 + ], + [ + "▁Tha", + -12.232372283935547 + ], + [ + "▁Liefer", + -12.232430458068848 + ], + [ + "▁Reader", + -12.232471466064453 + ], + [ + "▁Linda", + -12.232521057128906 + ], + [ + "ittlerweile", + -12.232552528381348 + ], + [ + "defining", + -12.232564926147461 + ], + [ + "▁delayed", + -12.232635498046875 + ], + [ + "▁Bewertung", + -12.232674598693848 + ], + [ + "▁Unique", + -12.232791900634766 + ], + [ + "▁Champion", + -12.232866287231445 + ], + [ + "2008", + -12.232897758483887 + ], + [ + "▁conclu", + -12.232934951782227 + ], + [ + "▁câștig", + -12.2329740524292 + ], + [ + "▁scheduling", + -12.2329740524292 + ], + [ + "▁sailing", + -12.233116149902344 + ], + [ + "▁Storm", + -12.23318862915039 + ], + [ + "▁Stil", + -12.23320198059082 + ], + [ + "▁Album", + -12.233211517333984 + ], + [ + "▁ultime", + -12.233343124389648 + ], + [ + "url", + -12.233369827270508 + ], + [ + "▁terrific", + -12.23339557647705 + ], + [ + "▁remedy", + -12.233396530151367 + ], + [ + "▁Around", + -12.233592987060547 + ], + [ + "▁Kni", + -12.233756065368652 + ], + [ + "etty", + -12.23376750946045 + ], + [ + "Managing", + -12.233809471130371 + ], + [ + "▁Bedeutung", + -12.233816146850586 + ], + [ + "▁earthquake", + -12.233817100524902 + ], + [ + "▁Telefon", + -12.233818054199219 + ], + [ + "▁Upper", + -12.233869552612305 + ], + [ + "▁validation", + -12.233892440795898 + ], + [ + "-22", + -12.233997344970703 + ], + [ + "▁queue", + -12.23401165008545 + ], + [ + "tinde", + -12.234025001525879 + ], + [ + "built", + -12.234047889709473 + ], + [ + "▁voix", + -12.234125137329102 + ], + [ + "▁Resource", + -12.234126091003418 + ], + [ + "ţiuni", + -12.234143257141113 + ], + [ + "▁satisfying", + -12.234299659729004 + ], + [ + "▁Kohl", + -12.234441757202148 + ], + [ + "▁Materials", + -12.234618186950684 + ], + [ + "▁esp", + -12.234732627868652 + ], + [ + "enseignement", + -12.234773635864258 + ], + [ + "danach", + -12.234883308410645 + ], + [ + "peux", + -12.234932899475098 + ], + [ + "▁deployed", + -12.235113143920898 + ], + [ + "▁1976", + -12.235126495361328 + ], + [ + "ușor", + -12.235334396362305 + ], + [ + "élection", + -12.235380172729492 + ], + [ + "ettes", + -12.235437393188477 + ], + [ + "▁Madison", + -12.235506057739258 + ], + [ + "108", + -12.235685348510742 + ], + [ + "berger", + -12.235696792602539 + ], + [ + "▁pedal", + -12.235702514648438 + ], + [ + "▁quasi", + -12.235820770263672 + ], + [ + "▁lend", + -12.235843658447266 + ], + [ + "VER", + -12.235940933227539 + ], + [ + "▁chapters", + -12.236002922058105 + ], + [ + "▁idei", + -12.23600959777832 + ], + [ + "Deine", + -12.236034393310547 + ], + [ + "▁endure", + -12.236092567443848 + ], + [ + "▁Studios", + -12.236259460449219 + ], + [ + "structure", + -12.236274719238281 + ], + [ + "▁puiss", + -12.236370086669922 + ], + [ + "▁Morning", + -12.236443519592285 + ], + [ + "guide", + -12.236462593078613 + ], + [ + "▁Wave", + -12.236617088317871 + ], + [ + "▁banque", + -12.236879348754883 + ], + [ + "änd", + -12.236912727355957 + ], + [ + "oubli", + -12.237070083618164 + ], + [ + "▁mixer", + -12.237125396728516 + ], + [ + "▁remedi", + -12.237210273742676 + ], + [ + "▁scop", + -12.237421989440918 + ], + [ + "▁Rosen", + -12.237561225891113 + ], + [ + "▁spital", + -12.23773193359375 + ], + [ + "blau", + -12.237811088562012 + ], + [ + "▁financiar", + -12.237865447998047 + ], + [ + "avour", + -12.237871170043945 + ], + [ + "Def", + -12.238025665283203 + ], + [ + "▁socket", + -12.238076210021973 + ], + [ + "▁occurring", + -12.238360404968262 + ], + [ + "▁munci", + -12.238368034362793 + ], + [ + "▁realiza", + -12.238426208496094 + ], + [ + "▁beating", + -12.2384614944458 + ], + [ + "▁Phillip", + -12.238490104675293 + ], + [ + "▁courant", + -12.238509178161621 + ], + [ + "Auto", + -12.238608360290527 + ], + [ + "▁Lager", + -12.238685607910156 + ], + [ + "▁folos", + -12.238696098327637 + ], + [ + "▁moyens", + -12.238770484924316 + ], + [ + "▁Ec", + -12.238780975341797 + ], + [ + "▁Strip", + -12.238788604736328 + ], + [ + "sparen", + -12.238848686218262 + ], + [ + "▁Nintendo", + -12.238886833190918 + ], + [ + "▁Murphy", + -12.238912582397461 + ], + [ + "▁flux", + -12.239034652709961 + ], + [ + "▁mots", + -12.239034652709961 + ], + [ + "▁rechts", + -12.239045143127441 + ], + [ + "▁cardio", + -12.239142417907715 + ], + [ + "avoiding", + -12.239343643188477 + ], + [ + "érer", + -12.239453315734863 + ], + [ + "hiel", + -12.239461898803711 + ], + [ + "▁rezistent", + -12.239521980285645 + ], + [ + "close", + -12.23954963684082 + ], + [ + "hésitez", + -12.239596366882324 + ], + [ + "Hz", + -12.239631652832031 + ], + [ + "▁elaborate", + -12.239689826965332 + ], + [ + "▁permanently", + -12.239709854125977 + ], + [ + "▁Pittsburgh", + -12.239734649658203 + ], + [ + "▁counties", + -12.239819526672363 + ], + [ + "▁bookmark", + -12.239919662475586 + ], + [ + "▁Label", + -12.239965438842773 + ], + [ + "▁Freude", + -12.239974021911621 + ], + [ + "▁preferat", + -12.239986419677734 + ], + [ + "▁Mein", + -12.239995002746582 + ], + [ + "▁Crew", + -12.240218162536621 + ], + [ + "▁clips", + -12.240253448486328 + ], + [ + "8,000", + -12.240263938903809 + ], + [ + "▁recognise", + -12.240311622619629 + ], + [ + "ință", + -12.240365028381348 + ], + [ + "▁prieteni", + -12.240447044372559 + ], + [ + "Heute", + -12.240522384643555 + ], + [ + "ancienne", + -12.240534782409668 + ], + [ + "▁annoying", + -12.240583419799805 + ], + [ + "▁awful", + -12.240704536437988 + ], + [ + "▁Comments", + -12.240774154663086 + ], + [ + "▁musician", + -12.240830421447754 + ], + [ + "▁Elite", + -12.241023063659668 + ], + [ + "▁patri", + -12.241024017333984 + ], + [ + "▁Coupon", + -12.241037368774414 + ], + [ + "▁Farbe", + -12.241097450256348 + ], + [ + "▁contribui", + -12.241110801696777 + ], + [ + "hari", + -12.241294860839844 + ], + [ + "▁activitati", + -12.24161148071289 + ], + [ + "▁Traum", + -12.2416410446167 + ], + [ + "1.8", + -12.24170207977295 + ], + [ + "▁Healthcare", + -12.24172306060791 + ], + [ + "▁refresh", + -12.241943359375 + ], + [ + "▁Maha", + -12.242060661315918 + ], + [ + "▁dép", + -12.242082595825195 + ], + [ + "▁Studien", + -12.242314338684082 + ], + [ + "▁spectacol", + -12.242378234863281 + ], + [ + "impro", + -12.24254035949707 + ], + [ + "▁commentaire", + -12.242544174194336 + ], + [ + "ported", + -12.242570877075195 + ], + [ + "▁reclam", + -12.242612838745117 + ], + [ + "▁Verkauf", + -12.242634773254395 + ], + [ + "▁newspapers", + -12.242661476135254 + ], + [ + "▁iubit", + -12.242838859558105 + ], + [ + "▁Kenne", + -12.242844581604004 + ], + [ + "▁Consultant", + -12.242958068847656 + ], + [ + "▁stau", + -12.242986679077148 + ], + [ + "TON", + -12.243057250976562 + ], + [ + "▁Fehler", + -12.243070602416992 + ], + [ + "▁lettre", + -12.243167877197266 + ], + [ + "▁investigator", + -12.243172645568848 + ], + [ + "▁quantities", + -12.243184089660645 + ], + [ + "ogram", + -12.243208885192871 + ], + [ + "avaient", + -12.24323844909668 + ], + [ + "▁reducere", + -12.243265151977539 + ], + [ + "Lite", + -12.243402481079102 + ], + [ + "kurs", + -12.243443489074707 + ], + [ + "pré", + -12.24383544921875 + ], + [ + "pap", + -12.243898391723633 + ], + [ + "▁Männer", + -12.243983268737793 + ], + [ + "▁gauche", + -12.244022369384766 + ], + [ + "▁ähnlich", + -12.244027137756348 + ], + [ + "▁sunlight", + -12.244063377380371 + ], + [ + "▁rester", + -12.24422550201416 + ], + [ + "jumped", + -12.244586944580078 + ], + [ + "▁exclusiv", + -12.24463176727295 + ], + [ + "▁electoral", + -12.244640350341797 + ], + [ + "▁Portal", + -12.244650840759277 + ], + [ + "ulent", + -12.244688987731934 + ], + [ + "▁sonst", + -12.24474048614502 + ], + [ + "entraîne", + -12.24483585357666 + ], + [ + "▁repas", + -12.244837760925293 + ], + [ + "▁redus", + -12.244858741760254 + ], + [ + "aku", + -12.244866371154785 + ], + [ + "▁Graphic", + -12.245251655578613 + ], + [ + "▁geringe", + -12.24539566040039 + ], + [ + "plätze", + -12.245474815368652 + ], + [ + "Trebuie", + -12.245479583740234 + ], + [ + "▁rezultate", + -12.245479583740234 + ], + [ + "▁configure", + -12.245683670043945 + ], + [ + "▁PV", + -12.245834350585938 + ], + [ + "▁insect", + -12.246109962463379 + ], + [ + "▁Reviews", + -12.246129035949707 + ], + [ + "releasing", + -12.246186256408691 + ], + [ + "▁appliance", + -12.246246337890625 + ], + [ + "▁oferte", + -12.246482849121094 + ], + [ + "▁WILL", + -12.246484756469727 + ], + [ + "rion", + -12.246499061584473 + ], + [ + "▁Cole", + -12.246582984924316 + ], + [ + "▁1975", + -12.246650695800781 + ], + [ + "Admin", + -12.24677848815918 + ], + [ + "▁parade", + -12.246800422668457 + ], + [ + "▁mélange", + -12.24692153930664 + ], + [ + "▁shortage", + -12.247007369995117 + ], + [ + "▁Measure", + -12.247400283813477 + ], + [ + "anchmal", + -12.24742603302002 + ], + [ + "▁transfers", + -12.247432708740234 + ], + [ + "▁sistemului", + -12.247573852539062 + ], + [ + "▁deschide", + -12.247819900512695 + ], + [ + "▁Künstler", + -12.247821807861328 + ], + [ + "▁Plain", + -12.247848510742188 + ], + [ + "▁messaging", + -12.247855186462402 + ], + [ + "▁metabolism", + -12.247879981994629 + ], + [ + "fill", + -12.248031616210938 + ], + [ + "▁Bomb", + -12.24814224243164 + ], + [ + "usine", + -12.248208045959473 + ], + [ + "▁restart", + -12.248233795166016 + ], + [ + "▁Discussion", + -12.248336791992188 + ], + [ + "smith", + -12.248472213745117 + ], + [ + "▁Bh", + -12.248607635498047 + ], + [ + "▁sap", + -12.248689651489258 + ], + [ + "Moo", + -12.248714447021484 + ], + [ + "▁indirect", + -12.248785972595215 + ], + [ + "▁eingesetzt", + -12.248863220214844 + ], + [ + "▁Hip", + -12.248870849609375 + ], + [ + "▁iulie", + -12.249113082885742 + ], + [ + "▁atac", + -12.249201774597168 + ], + [ + "▁passport", + -12.2492036819458 + ], + [ + "▁Egyptian", + -12.249290466308594 + ], + [ + "▁soluți", + -12.249349594116211 + ], + [ + "▁cakes", + -12.249356269836426 + ], + [ + "▁Fellow", + -12.24949836730957 + ], + [ + "▁collision", + -12.249533653259277 + ], + [ + "▁abundant", + -12.249961853027344 + ], + [ + "▁Wonder", + -12.24997329711914 + ], + [ + "▁theories", + -12.249991416931152 + ], + [ + "landed", + -12.250046730041504 + ], + [ + "▁meantime", + -12.2500638961792 + ], + [ + "schlüsse", + -12.25022029876709 + ], + [ + "▁helicopter", + -12.25039005279541 + ], + [ + "Voici", + -12.250479698181152 + ], + [ + "▁Honey", + -12.25049877166748 + ], + [ + "▁deleted", + -12.250511169433594 + ], + [ + "▁Projekte", + -12.250523567199707 + ], + [ + "▁gasi", + -12.2506742477417 + ], + [ + "applique", + -12.25068473815918 + ], + [ + "TAL", + -12.250699043273926 + ], + [ + "notch", + -12.250699996948242 + ], + [ + "▁Response", + -12.250818252563477 + ], + [ + "▁deveni", + -12.250818252563477 + ], + [ + "▁regulate", + -12.250829696655273 + ], + [ + "▁vegetarian", + -12.25083065032959 + ], + [ + "▁Pastor", + -12.250880241394043 + ], + [ + "▁Strong", + -12.250940322875977 + ], + [ + "▁élèves", + -12.251055717468262 + ], + [ + "▁alimente", + -12.25113582611084 + ], + [ + "graphy", + -12.251181602478027 + ], + [ + "▁spirits", + -12.251266479492188 + ], + [ + "▁Cau", + -12.251282691955566 + ], + [ + "determin", + -12.251304626464844 + ], + [ + "arilor", + -12.251382827758789 + ], + [ + "▁masura", + -12.251470565795898 + ], + [ + "RAN", + -12.251500129699707 + ], + [ + "marked", + -12.251564979553223 + ], + [ + "cuba", + -12.251602172851562 + ], + [ + "omni", + -12.251609802246094 + ], + [ + "▁detox", + -12.251662254333496 + ], + [ + "▁quartz", + -12.251741409301758 + ], + [ + "▁Bug", + -12.25177001953125 + ], + [ + "▁Sugar", + -12.25185775756836 + ], + [ + "▁opponents", + -12.25197982788086 + ], + [ + "▁solved", + -12.25207805633545 + ], + [ + "semn", + -12.252257347106934 + ], + [ + "▁Prepare", + -12.252558708190918 + ], + [ + "ffel", + -12.252586364746094 + ], + [ + "▁Highlight", + -12.252608299255371 + ], + [ + "▁curent", + -12.252618789672852 + ], + [ + "▁praktisch", + -12.252626419067383 + ], + [ + "▁lending", + -12.252676963806152 + ], + [ + "▁minority", + -12.252752304077148 + ], + [ + "Free", + -12.252970695495605 + ], + [ + "business", + -12.252997398376465 + ], + [ + "▁outlook", + -12.253097534179688 + ], + [ + "▁assessments", + -12.253168106079102 + ], + [ + "▁Brother", + -12.253266334533691 + ], + [ + "▁partager", + -12.25326919555664 + ], + [ + "▁Brun", + -12.25329303741455 + ], + [ + "▁pedestrian", + -12.25339412689209 + ], + [ + "anța", + -12.253413200378418 + ], + [ + "▁recycled", + -12.253457069396973 + ], + [ + "▁quicker", + -12.253626823425293 + ], + [ + "▁lamps", + -12.253683090209961 + ], + [ + "▁nationally", + -12.253813743591309 + ], + [ + "▁Supplier", + -12.253823280334473 + ], + [ + "ograph", + -12.253936767578125 + ], + [ + "engage", + -12.253981590270996 + ], + [ + "▁Marg", + -12.254131317138672 + ], + [ + "▁aplicare", + -12.254181861877441 + ], + [ + "▁scared", + -12.254194259643555 + ], + [ + "▁accredited", + -12.254255294799805 + ], + [ + "▁outils", + -12.25436019897461 + ], + [ + "▁bâtiment", + -12.254446029663086 + ], + [ + "▁existed", + -12.254586219787598 + ], + [ + "gegangen", + -12.254619598388672 + ], + [ + "▁elevation", + -12.25463581085205 + ], + [ + "▁Tradition", + -12.254670143127441 + ], + [ + "▁Gericht", + -12.254677772521973 + ], + [ + "hub", + -12.254680633544922 + ], + [ + "strahl", + -12.25473690032959 + ], + [ + "build", + -12.254796981811523 + ], + [ + "▁Customers", + -12.25487232208252 + ], + [ + "klasse", + -12.254890441894531 + ], + [ + "▁pierre", + -12.254895210266113 + ], + [ + "(2)", + -12.255006790161133 + ], + [ + "Life", + -12.255125999450684 + ], + [ + "▁bachelor", + -12.25513744354248 + ], + [ + "▁quad", + -12.255195617675781 + ], + [ + "▁dispozitiv", + -12.25523567199707 + ], + [ + "106", + -12.255266189575195 + ], + [ + "▁suburb", + -12.255495071411133 + ], + [ + "▁1977", + -12.255586624145508 + ], + [ + "▁Alzheimer", + -12.255973815917969 + ], + [ + "▁spicy", + -12.255988121032715 + ], + [ + "▁spreading", + -12.256002426147461 + ], + [ + "nötigen", + -12.256078720092773 + ], + [ + "▁novels", + -12.256104469299316 + ], + [ + "▁responsabilité", + -12.256141662597656 + ], + [ + "▁Bud", + -12.256332397460938 + ], + [ + "▁desirable", + -12.256407737731934 + ], + [ + "TOR", + -12.256444931030273 + ], + [ + "five", + -12.256547927856445 + ], + [ + "▁Firmen", + -12.256860733032227 + ], + [ + "oeuvre", + -12.257075309753418 + ], + [ + "grass", + -12.257233619689941 + ], + [ + "▁practically", + -12.257277488708496 + ], + [ + "▁runners", + -12.257281303405762 + ], + [ + "▁mothers", + -12.257341384887695 + ], + [ + "Shop", + -12.257345199584961 + ], + [ + "▁Chicken", + -12.257408142089844 + ], + [ + "▁License", + -12.257593154907227 + ], + [ + "▁Bach", + -12.25765323638916 + ], + [ + "earliest", + -12.257729530334473 + ], + [ + "▁replica", + -12.25774097442627 + ], + [ + "▁haunt", + -12.257833480834961 + ], + [ + "▁materi", + -12.257854461669922 + ], + [ + "▁Finland", + -12.257893562316895 + ], + [ + "▁europene", + -12.257919311523438 + ], + [ + "abilă", + -12.257944107055664 + ], + [ + "cati", + -12.258007049560547 + ], + [ + "▁cholesterol", + -12.258132934570312 + ], + [ + "...).", + -12.258151054382324 + ], + [ + "cardi", + -12.25838565826416 + ], + [ + "▁(12", + -12.258387565612793 + ], + [ + "analyzed", + -12.258506774902344 + ], + [ + "▁respondents", + -12.258591651916504 + ], + [ + "▁höchste", + -12.258646011352539 + ], + [ + "▁Kern", + -12.258647918701172 + ], + [ + "▁knapp", + -12.258781433105469 + ], + [ + "▁Someone", + -12.258955001831055 + ], + [ + "▁équipé", + -12.258997917175293 + ], + [ + "credited", + -12.259106636047363 + ], + [ + "▁numar", + -12.259163856506348 + ], + [ + "▁Ace", + -12.259185791015625 + ], + [ + "zentrum", + -12.2592191696167 + ], + [ + "nehmer", + -12.259270668029785 + ], + [ + "arrivée", + -12.259282112121582 + ], + [ + "ELE", + -12.259291648864746 + ], + [ + "clean", + -12.259418487548828 + ], + [ + "Boost", + -12.259538650512695 + ], + [ + "call", + -12.259575843811035 + ], + [ + "▁Polizei", + -12.259659767150879 + ], + [ + "▁Januar", + -12.259663581848145 + ], + [ + "▁Tile", + -12.259681701660156 + ], + [ + "▁traduc", + -12.259744644165039 + ], + [ + "▁promptly", + -12.259773254394531 + ], + [ + "limit", + -12.259809494018555 + ], + [ + "▁recharge", + -12.2598237991333 + ], + [ + "▁wipe", + -12.259862899780273 + ], + [ + "▁Norway", + -12.26001262664795 + ], + [ + "▁Municipal", + -12.260077476501465 + ], + [ + "▁medieval", + -12.260117530822754 + ], + [ + "▁Treat", + -12.26021671295166 + ], + [ + "Orient", + -12.260283470153809 + ], + [ + "▁Stewart", + -12.260294914245605 + ], + [ + "▁lol", + -12.26039981842041 + ], + [ + "appartement", + -12.260522842407227 + ], + [ + "▁payer", + -12.260655403137207 + ], + [ + "▁splash", + -12.260723114013672 + ], + [ + "doubtedly", + -12.260726928710938 + ], + [ + "dry", + -12.260846138000488 + ], + [ + "▁Forex", + -12.260939598083496 + ], + [ + "▁Edinburgh", + -12.260943412780762 + ], + [ + "▁Traditional", + -12.261032104492188 + ], + [ + "▁1968", + -12.261134147644043 + ], + [ + "▁glow", + -12.261248588562012 + ], + [ + "Alternatively", + -12.261265754699707 + ], + [ + "▁partly", + -12.261354446411133 + ], + [ + "égi", + -12.261401176452637 + ], + [ + "▁Prices", + -12.261640548706055 + ], + [ + "haupt", + -12.261651992797852 + ], + [ + "▁sentences", + -12.261711120605469 + ], + [ + "ouvre", + -12.261735916137695 + ], + [ + "▁Liter", + -12.261746406555176 + ], + [ + "▁Important", + -12.2620267868042 + ], + [ + "▁Collins", + -12.262077331542969 + ], + [ + "▁reproduce", + -12.262106895446777 + ], + [ + "▁selten", + -12.262124061584473 + ], + [ + "▁Mitte", + -12.262170791625977 + ], + [ + "OA", + -12.262174606323242 + ], + [ + "▁Sister", + -12.262358665466309 + ], + [ + "▁responding", + -12.262385368347168 + ], + [ + "▁ballot", + -12.262455940246582 + ], + [ + "▁Nutrition", + -12.262460708618164 + ], + [ + "occurrence", + -12.26246452331543 + ], + [ + "Atunci", + -12.262604713439941 + ], + [ + "▁hockey", + -12.262680053710938 + ], + [ + "▁undertaking", + -12.262697219848633 + ], + [ + "▁educators", + -12.262885093688965 + ], + [ + "▁Swedish", + -12.262893676757812 + ], + [ + "▁Recovery", + -12.262894630432129 + ], + [ + "▁circum", + -12.262910842895508 + ], + [ + "▁chains", + -12.263084411621094 + ], + [ + "▁genug", + -12.263113021850586 + ], + [ + "▁Pil", + -12.263227462768555 + ], + [ + "▁farms", + -12.263265609741211 + ], + [ + "▁simplicity", + -12.263336181640625 + ], + [ + "-21", + -12.263399124145508 + ], + [ + "▁partition", + -12.263493537902832 + ], + [ + "▁Relations", + -12.26360034942627 + ], + [ + "zentrale", + -12.263794898986816 + ], + [ + "lapse", + -12.263855934143066 + ], + [ + "▁toast", + -12.263862609863281 + ], + [ + "▁citi", + -12.263946533203125 + ], + [ + "▁longtemps", + -12.263984680175781 + ], + [ + "maj", + -12.264448165893555 + ], + [ + "▁Cin", + -12.264483451843262 + ], + [ + "zeichen", + -12.264504432678223 + ], + [ + "▁Zoo", + -12.264567375183105 + ], + [ + "▁frisch", + -12.264570236206055 + ], + [ + "▁permettra", + -12.264595031738281 + ], + [ + "▁Liberty", + -12.264642715454102 + ], + [ + "▁playground", + -12.264873504638672 + ], + [ + "▁Mate", + -12.265031814575195 + ], + [ + "▁evolving", + -12.265066146850586 + ], + [ + "national", + -12.265207290649414 + ], + [ + "▁signifie", + -12.265279769897461 + ], + [ + "▁Related", + -12.265292167663574 + ], + [ + "NES", + -12.265337944030762 + ], + [ + "euil", + -12.265473365783691 + ], + [ + "▁struggles", + -12.265542030334473 + ], + [ + "▁instinct", + -12.265628814697266 + ], + [ + "arbre", + -12.26608943939209 + ], + [ + "▁commands", + -12.266222953796387 + ], + [ + "▁frumoase", + -12.26637077331543 + ], + [ + "▁watches", + -12.266779899597168 + ], + [ + "NM", + -12.266804695129395 + ], + [ + "▁influential", + -12.266807556152344 + ], + [ + "▁gewesen", + -12.266901969909668 + ], + [ + "▁Pictures", + -12.267224311828613 + ], + [ + "▁HVAC", + -12.267242431640625 + ], + [ + "▁skate", + -12.26732063293457 + ], + [ + "▁Robot", + -12.267327308654785 + ], + [ + "▁Boys", + -12.267404556274414 + ], + [ + "▁Mutter", + -12.267425537109375 + ], + [ + "▁marques", + -12.267539024353027 + ], + [ + "utiliser", + -12.267793655395508 + ], + [ + "▁amazed", + -12.267799377441406 + ], + [ + "ächtig", + -12.26783275604248 + ], + [ + "▁Success", + -12.267870903015137 + ], + [ + "gramm", + -12.267956733703613 + ], + [ + "▁1972", + -12.267956733703613 + ], + [ + "▁marina", + -12.268269538879395 + ], + [ + "▁lou", + -12.268321990966797 + ], + [ + "▁précis", + -12.268380165100098 + ], + [ + "ographic", + -12.268482208251953 + ], + [ + "people", + -12.26848316192627 + ], + [ + "fahr", + -12.268547058105469 + ], + [ + "▁Contemporary", + -12.268550872802734 + ], + [ + "▁frustrating", + -12.26858139038086 + ], + [ + "chide", + -12.268704414367676 + ], + [ + "1.5", + -12.268807411193848 + ], + [ + "▁ankle", + -12.268850326538086 + ], + [ + "▁proximity", + -12.268986701965332 + ], + [ + "▁Leute", + -12.269006729125977 + ], + [ + "UA", + -12.269031524658203 + ], + [ + "union", + -12.269131660461426 + ], + [ + "▁recovered", + -12.269133567810059 + ], + [ + "▁sword", + -12.269216537475586 + ], + [ + "▁Mut", + -12.26923942565918 + ], + [ + "▁Rin", + -12.269360542297363 + ], + [ + "▁lectures", + -12.26942253112793 + ], + [ + "▁licensing", + -12.269423484802246 + ], + [ + "MAC", + -12.269498825073242 + ], + [ + "▁commute", + -12.269776344299316 + ], + [ + "Acesta", + -12.269858360290527 + ], + [ + "▁Koch", + -12.270088195800781 + ], + [ + "▁depozit", + -12.270119667053223 + ], + [ + "▁erstmal", + -12.270163536071777 + ], + [ + "arhi", + -12.270271301269531 + ], + [ + "▁Normal", + -12.270462036132812 + ], + [ + "EZ", + -12.270464897155762 + ], + [ + "ărilor", + -12.270986557006836 + ], + [ + "▁favoris", + -12.271041870117188 + ], + [ + "▁$9", + -12.271050453186035 + ], + [ + "▁Lawrence", + -12.271172523498535 + ], + [ + "▁fixing", + -12.271200180053711 + ], + [ + "▁researching", + -12.271288871765137 + ], + [ + "▁Pant", + -12.271467208862305 + ], + [ + "▁candid", + -12.271490097045898 + ], + [ + "▁Arkansas", + -12.27160930633545 + ], + [ + "▁bitcoin", + -12.271612167358398 + ], + [ + "ва", + -12.271645545959473 + ], + [ + "▁Finger", + -12.271692276000977 + ], + [ + "▁SRL", + -12.271718978881836 + ], + [ + "Arg", + -12.271797180175781 + ], + [ + "trade", + -12.271903991699219 + ], + [ + "▁extraction", + -12.271941184997559 + ], + [ + "▁footprint", + -12.2720308303833 + ], + [ + "▁folosite", + -12.272085189819336 + ], + [ + "▁Flex", + -12.272184371948242 + ], + [ + "▁dys", + -12.272294998168945 + ], + [ + "▁Wright", + -12.272343635559082 + ], + [ + "▁multitude", + -12.272378921508789 + ], + [ + "▁Chu", + -12.272494316101074 + ], + [ + "▁Jerry", + -12.27249526977539 + ], + [ + "▁notebook", + -12.272722244262695 + ], + [ + "▁SIM", + -12.272932052612305 + ], + [ + "dietary", + -12.272963523864746 + ], + [ + "▁polished", + -12.272984504699707 + ], + [ + "▁carriers", + -12.272993087768555 + ], + [ + "▁cardiac", + -12.27299976348877 + ], + [ + "▁burned", + -12.273038864135742 + ], + [ + "▁sealed", + -12.273062705993652 + ], + [ + "▁pumps", + -12.273224830627441 + ], + [ + "▁consumed", + -12.273233413696289 + ], + [ + "▁Teaching", + -12.273446083068848 + ], + [ + "▁daughters", + -12.27348518371582 + ], + [ + "serviciile", + -12.273600578308105 + ], + [ + "▁Teams", + -12.273690223693848 + ], + [ + "▁avoided", + -12.273903846740723 + ], + [ + "▁compagnie", + -12.274019241333008 + ], + [ + "▁mașin", + -12.274024963378906 + ], + [ + "▁Sean", + -12.27418041229248 + ], + [ + "▁arunc", + -12.274208068847656 + ], + [ + "kräfte", + -12.274238586425781 + ], + [ + "vani", + -12.274255752563477 + ], + [ + "Metall", + -12.27437973022461 + ], + [ + "2009", + -12.274449348449707 + ], + [ + "moi", + -12.274688720703125 + ], + [ + "▁THAT", + -12.274700164794922 + ], + [ + "▁Ny", + -12.274809837341309 + ], + [ + "▁countertops", + -12.274860382080078 + ], + [ + "Pod", + -12.274938583374023 + ], + [ + "amente", + -12.274943351745605 + ], + [ + "▁offshore", + -12.275001525878906 + ], + [ + "luti", + -12.275087356567383 + ], + [ + "parked", + -12.275160789489746 + ], + [ + "ajout", + -12.275247573852539 + ], + [ + "Shirt", + -12.275328636169434 + ], + [ + "▁3/4", + -12.275389671325684 + ], + [ + "▁gratuite", + -12.27543830871582 + ], + [ + "mètres", + -12.27557373046875 + ], + [ + "▁Wish", + -12.2755765914917 + ], + [ + "▁holistic", + -12.27558422088623 + ], + [ + "gren", + -12.275607109069824 + ], + [ + "compiled", + -12.275660514831543 + ], + [ + "▁innocent", + -12.275779724121094 + ], + [ + "▁sorte", + -12.275787353515625 + ], + [ + "▁insulin", + -12.275792121887207 + ], + [ + "▁Academic", + -12.275996208190918 + ], + [ + "▁acrylic", + -12.27600383758545 + ], + [ + "▁hinzu", + -12.27616024017334 + ], + [ + "▁compression", + -12.27619457244873 + ], + [ + "▁viral", + -12.276220321655273 + ], + [ + "▁stereo", + -12.2764892578125 + ], + [ + "▁Concept", + -12.276542663574219 + ], + [ + "▁Margaret", + -12.276659965515137 + ], + [ + "▁consolidation", + -12.276875495910645 + ], + [ + "Figure", + -12.277058601379395 + ], + [ + "zzo", + -12.277061462402344 + ], + [ + "▁Egg", + -12.277098655700684 + ], + [ + "weiterhin", + -12.277213096618652 + ], + [ + "▁Vista", + -12.277252197265625 + ], + [ + "▁necessity", + -12.277316093444824 + ], + [ + "▁kayak", + -12.277490615844727 + ], + [ + "▁consensus", + -12.277535438537598 + ], + [ + "▁Katz", + -12.277602195739746 + ], + [ + "▁Warren", + -12.277640342712402 + ], + [ + "▁custody", + -12.277755737304688 + ], + [ + "++", + -12.277759552001953 + ], + [ + "▁paiement", + -12.277782440185547 + ], + [ + "▁foul", + -12.277878761291504 + ], + [ + "Chaque", + -12.277934074401855 + ], + [ + "▁Syrian", + -12.277998924255371 + ], + [ + "▁photographers", + -12.278056144714355 + ], + [ + "▁dismiss", + -12.278270721435547 + ], + [ + "▁Gaz", + -12.278526306152344 + ], + [ + "▁développer", + -12.278529167175293 + ], + [ + "▁Dakota", + -12.27863883972168 + ], + [ + "▁cardiovascular", + -12.278642654418945 + ], + [ + "▁tattoo", + -12.278858184814453 + ], + [ + "▁Lighting", + -12.278918266296387 + ], + [ + "▁nowhere", + -12.278940200805664 + ], + [ + "vada", + -12.27895450592041 + ], + [ + "▁Favor", + -12.279084205627441 + ], + [ + "ruled", + -12.2791748046875 + ], + [ + "▁Dating", + -12.2793550491333 + ], + [ + "gain", + -12.279963493347168 + ], + [ + "rism", + -12.28016471862793 + ], + [ + "coloured", + -12.280169486999512 + ], + [ + "▁refugees", + -12.280184745788574 + ], + [ + "▁Schm", + -12.2803955078125 + ], + [ + "▁happily", + -12.280402183532715 + ], + [ + "▁specification", + -12.280607223510742 + ], + [ + "WM", + -12.280736923217773 + ], + [ + "▁intro", + -12.280823707580566 + ], + [ + "rack", + -12.28097915649414 + ], + [ + "characterized", + -12.28107738494873 + ], + [ + "▁externe", + -12.281136512756348 + ], + [ + "▁arrives", + -12.28114128112793 + ], + [ + "WO", + -12.281181335449219 + ], + [ + "bericht", + -12.281233787536621 + ], + [ + "▁delays", + -12.281242370605469 + ], + [ + "▁Flight", + -12.281256675720215 + ], + [ + "1-3", + -12.281524658203125 + ], + [ + "▁Singh", + -12.281548500061035 + ], + [ + "▁shifting", + -12.281651496887207 + ], + [ + "▁dashboard", + -12.281729698181152 + ], + [ + "▁lieux", + -12.281781196594238 + ], + [ + "▁validate", + -12.281901359558105 + ], + [ + "▁uniquement", + -12.281963348388672 + ], + [ + "clip", + -12.28199291229248 + ], + [ + "cov", + -12.282132148742676 + ], + [ + "▁tendance", + -12.282215118408203 + ], + [ + "èle", + -12.282258033752441 + ], + [ + "▁incepe", + -12.282261848449707 + ], + [ + "▁chunk", + -12.282585144042969 + ], + [ + "▁Nr", + -12.28266716003418 + ], + [ + "▁Montana", + -12.282674789428711 + ], + [ + "▁sticks", + -12.28277587890625 + ], + [ + "▁caps", + -12.28309154510498 + ], + [ + "▁Jimmy", + -12.283167839050293 + ], + [ + "▁Levi", + -12.283285140991211 + ], + [ + "▁cables", + -12.28345012664795 + ], + [ + "▁SB", + -12.283550262451172 + ], + [ + "▁thème", + -12.2836275100708 + ], + [ + "ADA", + -12.283672332763672 + ], + [ + "▁garant", + -12.283686637878418 + ], + [ + "▁Joint", + -12.283820152282715 + ], + [ + "▁partage", + -12.28398323059082 + ], + [ + "schreib", + -12.284119606018066 + ], + [ + "ether", + -12.28420352935791 + ], + [ + "▁Klima", + -12.284303665161133 + ], + [ + "▁medicines", + -12.284317016601562 + ], + [ + "▁pH", + -12.284320831298828 + ], + [ + "Architect", + -12.284378051757812 + ], + [ + "știi", + -12.284396171569824 + ], + [ + "▁retrouve", + -12.284700393676758 + ], + [ + "▁posture", + -12.284753799438477 + ], + [ + "Feature", + -12.284773826599121 + ], + [ + "▁drying", + -12.284884452819824 + ], + [ + "trifft", + -12.28488826751709 + ], + [ + "ibi", + -12.285079002380371 + ], + [ + "▁rezerv", + -12.285116195678711 + ], + [ + "▁Vă", + -12.28518009185791 + ], + [ + "▁Speaker", + -12.285282135009766 + ], + [ + "▁illustration", + -12.285319328308105 + ], + [ + "oooo", + -12.285419464111328 + ], + [ + "▁initiated", + -12.285518646240234 + ], + [ + "PK", + -12.285545349121094 + ], + [ + "▁algorithms", + -12.285630226135254 + ], + [ + "▁zice", + -12.285757064819336 + ], + [ + "WI", + -12.28581428527832 + ], + [ + "urgence", + -12.285823822021484 + ], + [ + "▁bloggers", + -12.285887718200684 + ], + [ + "▁realitate", + -12.285894393920898 + ], + [ + "eks", + -12.28598690032959 + ], + [ + "▁cushions", + -12.286149024963379 + ], + [ + "▁Kri", + -12.286224365234375 + ], + [ + "▁réalisation", + -12.286396026611328 + ], + [ + "▁Photoshop", + -12.286407470703125 + ], + [ + "cret", + -12.286462783813477 + ], + [ + "faire", + -12.286613464355469 + ], + [ + "▁Cei", + -12.286782264709473 + ], + [ + "ICO", + -12.286789894104004 + ], + [ + "Contin", + -12.28681755065918 + ], + [ + "▁Builder", + -12.286916732788086 + ], + [ + "look", + -12.28698444366455 + ], + [ + "▁tenants", + -12.287023544311523 + ], + [ + "▁gloves", + -12.287113189697266 + ], + [ + "Day", + -12.287169456481934 + ], + [ + "firmly", + -12.28725814819336 + ], + [ + "CIA", + -12.287352561950684 + ], + [ + "▁TVA", + -12.28741455078125 + ], + [ + "▁notifications", + -12.287446975708008 + ], + [ + "▁Higher", + -12.287459373474121 + ], + [ + "▁Weihnachts", + -12.287491798400879 + ], + [ + "▁blur", + -12.287755012512207 + ], + [ + "ов", + -12.288087844848633 + ], + [ + "feder", + -12.288159370422363 + ], + [ + "▁explosion", + -12.288171768188477 + ], + [ + "▁Fenster", + -12.288189888000488 + ], + [ + "▁junge", + -12.288225173950195 + ], + [ + "▁Highland", + -12.288230895996094 + ], + [ + "▁Lü", + -12.288290023803711 + ], + [ + "▁Alba", + -12.28832721710205 + ], + [ + "▁Dort", + -12.288338661193848 + ], + [ + "▁recruiting", + -12.28835391998291 + ], + [ + "▁Multiple", + -12.288549423217773 + ], + [ + "▁animated", + -12.288604736328125 + ], + [ + "▁Virgin", + -12.288637161254883 + ], + [ + "1000", + -12.288676261901855 + ], + [ + "▁resin", + -12.288700103759766 + ], + [ + "▁matrix", + -12.288826942443848 + ], + [ + "irri", + -12.289011001586914 + ], + [ + "▁chiffre", + -12.28904914855957 + ], + [ + "▁Corps", + -12.289252281188965 + ], + [ + "▁advocacy", + -12.28927230834961 + ], + [ + "▁pozitiv", + -12.289274215698242 + ], + [ + "▁pouss", + -12.289451599121094 + ], + [ + "événement", + -12.28950309753418 + ], + [ + "▁pielii", + -12.289717674255371 + ], + [ + "onnais", + -12.289750099182129 + ], + [ + "▁Statement", + -12.289754867553711 + ], + [ + "crimin", + -12.289868354797363 + ], + [ + "hidrat", + -12.289942741394043 + ], + [ + "▁Jugendliche", + -12.290057182312012 + ], + [ + "TRI", + -12.290223121643066 + ], + [ + "erra", + -12.290240287780762 + ], + [ + "chat", + -12.290321350097656 + ], + [ + "▁traits", + -12.290359497070312 + ], + [ + "▁incentives", + -12.29038143157959 + ], + [ + "▁accelerate", + -12.290568351745605 + ], + [ + "woven", + -12.290633201599121 + ], + [ + "UST", + -12.290688514709473 + ], + [ + "▁premiers", + -12.290717124938965 + ], + [ + "▁Ferien", + -12.290755271911621 + ], + [ + "▁mariage", + -12.290796279907227 + ], + [ + "▁financially", + -12.290801048278809 + ], + [ + "gesellschaft", + -12.290863037109375 + ], + [ + "▁situaţi", + -12.290865898132324 + ], + [ + "▁quoted", + -12.291373252868652 + ], + [ + "▁periodic", + -12.291421890258789 + ], + [ + "▁chaos", + -12.291543960571289 + ], + [ + "▁remodel", + -12.29159927368164 + ], + [ + "▁Contractor", + -12.291641235351562 + ], + [ + "▁recuper", + -12.291729927062988 + ], + [ + "▁driveway", + -12.291755676269531 + ], + [ + "▁entertain", + -12.291765213012695 + ], + [ + "▁condus", + -12.291769027709961 + ], + [ + "▁chefs", + -12.29184341430664 + ], + [ + "pak", + -12.291866302490234 + ], + [ + "▁possède", + -12.291948318481445 + ], + [ + "▁outreach", + -12.291984558105469 + ], + [ + "▁navig", + -12.292036056518555 + ], + [ + "▁renewal", + -12.292071342468262 + ], + [ + "▁Rice", + -12.292309761047363 + ], + [ + "▁Czech", + -12.292398452758789 + ], + [ + "▁entstehen", + -12.292445182800293 + ], + [ + "▁droite", + -12.292448997497559 + ], + [ + "▁Investor", + -12.292497634887695 + ], + [ + "▁Soci", + -12.29250431060791 + ], + [ + "▁scalp", + -12.292622566223145 + ], + [ + "▁politiques", + -12.292815208435059 + ], + [ + "▁plaintiff", + -12.292841911315918 + ], + [ + "extending", + -12.29287052154541 + ], + [ + "▁paperwork", + -12.29300594329834 + ], + [ + "vizi", + -12.293142318725586 + ], + [ + "assisting", + -12.29317569732666 + ], + [ + "local", + -12.293272972106934 + ], + [ + "▁Wear", + -12.293323516845703 + ], + [ + "▁descend", + -12.293340682983398 + ], + [ + "▁Wikipedia", + -12.293513298034668 + ], + [ + "▁Consiliului", + -12.293516159057617 + ], + [ + "▁Nokia", + -12.293540000915527 + ], + [ + "▁facult", + -12.293560028076172 + ], + [ + "▁altogether", + -12.293851852416992 + ], + [ + "▁rankings", + -12.29391860961914 + ], + [ + "▁downloading", + -12.293953895568848 + ], + [ + "QU", + -12.294007301330566 + ], + [ + "▁Olive", + -12.294041633605957 + ], + [ + "▁backdrop", + -12.294110298156738 + ], + [ + "▁recomandat", + -12.294116020202637 + ], + [ + "▁Faculty", + -12.294184684753418 + ], + [ + "ANS", + -12.294220924377441 + ], + [ + "▁fracture", + -12.294225692749023 + ], + [ + "job", + -12.29448127746582 + ], + [ + "▁anticipate", + -12.294525146484375 + ], + [ + "▁drift", + -12.294543266296387 + ], + [ + "▁Marco", + -12.294632911682129 + ], + [ + "▁witnessed", + -12.294700622558594 + ], + [ + "▁comprend", + -12.294974327087402 + ], + [ + "▁bulb", + -12.29504680633545 + ], + [ + "▁shallow", + -12.295059204101562 + ], + [ + "stärke", + -12.295063972473145 + ], + [ + "▁Jessica", + -12.295080184936523 + ], + [ + "▁démarche", + -12.29508113861084 + ], + [ + "▁traditionally", + -12.29508113861084 + ], + [ + "Deputy", + -12.295093536376953 + ], + [ + "▁rivers", + -12.295260429382324 + ], + [ + "▁livraison", + -12.29531192779541 + ], + [ + "▁lacking", + -12.295421600341797 + ], + [ + "▁remodeling", + -12.295426368713379 + ], + [ + "▁acesteia", + -12.295514106750488 + ], + [ + "▁grosse", + -12.295669555664062 + ], + [ + "▁propus", + -12.295833587646484 + ], + [ + "lessly", + -12.29587459564209 + ], + [ + "▁Kredit", + -12.295931816101074 + ], + [ + "reputable", + -12.295981407165527 + ], + [ + "▁Sell", + -12.2960205078125 + ], + [ + "▁Crime", + -12.296111106872559 + ], + [ + "Ent", + -12.296310424804688 + ], + [ + "finity", + -12.296422004699707 + ], + [ + "▁Complex", + -12.296500205993652 + ], + [ + "easing", + -12.296638488769531 + ], + [ + "dynamic", + -12.296670913696289 + ], + [ + "▁eaten", + -12.296727180480957 + ], + [ + "gezogen", + -12.296734809875488 + ], + [ + "▁2004,", + -12.296774864196777 + ], + [ + "▁Muslims", + -12.296822547912598 + ], + [ + "▁Sprache", + -12.296883583068848 + ], + [ + "▁Truth", + -12.296927452087402 + ], + [ + "▁guarantees", + -12.296928405761719 + ], + [ + "/5", + -12.29712963104248 + ], + [ + "”).", + -12.297135353088379 + ], + [ + "▁Medium", + -12.2972993850708 + ], + [ + "▁décidé", + -12.297445297241211 + ], + [ + "▁balcony", + -12.29747200012207 + ], + [ + "leuchte", + -12.297502517700195 + ], + [ + "hik", + -12.297849655151367 + ], + [ + "▁Agriculture", + -12.298221588134766 + ], + [ + "▁securities", + -12.298221588134766 + ], + [ + "Probably", + -12.298224449157715 + ], + [ + "▁macar", + -12.29824161529541 + ], + [ + "▁Signal", + -12.298399925231934 + ], + [ + "lake", + -12.298677444458008 + ], + [ + "▁compétences", + -12.298726081848145 + ], + [ + "▁proprietary", + -12.298812866210938 + ], + [ + "allons", + -12.298850059509277 + ], + [ + "▁belongs", + -12.298916816711426 + ], + [ + "▁missile", + -12.298958778381348 + ], + [ + "țiune", + -12.298999786376953 + ], + [ + "▁Integration", + -12.299116134643555 + ], + [ + "▁testimony", + -12.299120903015137 + ], + [ + "▁wesentlich", + -12.299142837524414 + ], + [ + "▁donors", + -12.299152374267578 + ], + [ + "▁pivot", + -12.299202919006348 + ], + [ + "▁Uber", + -12.299219131469727 + ], + [ + "▁databases", + -12.299281120300293 + ], + [ + "▁studi", + -12.299317359924316 + ], + [ + "totdeauna", + -12.299351692199707 + ], + [ + "▁briefly", + -12.299449920654297 + ], + [ + "▁livr", + -12.29952335357666 + ], + [ + "▁CRM", + -12.299581527709961 + ], + [ + "gone", + -12.299697875976562 + ], + [ + "10)", + -12.299761772155762 + ], + [ + "▁zilele", + -12.299920082092285 + ], + [ + "Basically", + -12.300008773803711 + ], + [ + "▁medie", + -12.300041198730469 + ], + [ + "spotted", + -12.30006217956543 + ], + [ + "▁troubles", + -12.30009937286377 + ], + [ + "▁acknowledged", + -12.300176620483398 + ], + [ + "350", + -12.300185203552246 + ], + [ + "LB", + -12.300273895263672 + ], + [ + "Phy", + -12.30038833618164 + ], + [ + "natal", + -12.300397872924805 + ], + [ + "illé", + -12.300445556640625 + ], + [ + "bilder", + -12.300625801086426 + ], + [ + "▁apples", + -12.300636291503906 + ], + [ + "graphical", + -12.300889015197754 + ], + [ + "organiser", + -12.301024436950684 + ], + [ + "▁ochii", + -12.301040649414062 + ], + [ + "glas", + -12.301178932189941 + ], + [ + "CAP", + -12.301180839538574 + ], + [ + "▁Doors", + -12.301331520080566 + ], + [ + "▁Eis", + -12.30156135559082 + ], + [ + "tipuri", + -12.301590919494629 + ], + [ + "▁Worth", + -12.301684379577637 + ], + [ + "izează", + -12.301719665527344 + ], + [ + "nunț", + -12.30180549621582 + ], + [ + "▁Trip", + -12.30186653137207 + ], + [ + "ISS", + -12.301976203918457 + ], + [ + "efficient", + -12.30201530456543 + ], + [ + "Luckily", + -12.302099227905273 + ], + [ + "▁vase", + -12.302133560180664 + ], + [ + "▁gay", + -12.302343368530273 + ], + [ + "▁certificates", + -12.302434921264648 + ], + [ + "riad", + -12.302549362182617 + ], + [ + "stab", + -12.302570343017578 + ], + [ + "affiche", + -12.302604675292969 + ], + [ + "▁iPod", + -12.302645683288574 + ], + [ + "▁aștept", + -12.302726745605469 + ], + [ + "▁$500", + -12.302751541137695 + ], + [ + "▁Catherine", + -12.302952766418457 + ], + [ + "▁Circuit", + -12.302957534790039 + ], + [ + "▁ranch", + -12.303045272827148 + ], + [ + "▁consequence", + -12.303118705749512 + ], + [ + "listened", + -12.303131103515625 + ], + [ + "▁Options", + -12.303187370300293 + ], + [ + "feed", + -12.30318832397461 + ], + [ + "▁adviser", + -12.303248405456543 + ], + [ + "▁présenter", + -12.30333423614502 + ], + [ + "substant", + -12.30337905883789 + ], + [ + "▁Flag", + -12.303604125976562 + ], + [ + "▁Keith", + -12.30366325378418 + ], + [ + "▁inima", + -12.303709983825684 + ], + [ + "▁substrate", + -12.30373764038086 + ], + [ + "▁charger", + -12.303803443908691 + ], + [ + "▁reporter", + -12.303844451904297 + ], + [ + "ütz", + -12.304068565368652 + ], + [ + "▁unten", + -12.30417537689209 + ], + [ + "▁sympa", + -12.304542541503906 + ], + [ + "▁defeated", + -12.304600715637207 + ], + [ + "ändig", + -12.304644584655762 + ], + [ + "individu", + -12.304747581481934 + ], + [ + "▁Straßen", + -12.304774284362793 + ], + [ + "▁Nepal", + -12.304791450500488 + ], + [ + "million", + -12.304803848266602 + ], + [ + "▁Cake", + -12.30499267578125 + ], + [ + "▁investigations", + -12.30526065826416 + ], + [ + "▁inspector", + -12.3054780960083 + ], + [ + "▁Campbell", + -12.305486679077148 + ], + [ + "▁consommation", + -12.305489540100098 + ], + [ + "▁Ministerul", + -12.305628776550293 + ], + [ + "Advisory", + -12.305749893188477 + ], + [ + "▁Leistungs", + -12.305939674377441 + ], + [ + "▁Pull", + -12.306157112121582 + ], + [ + "▁lover", + -12.306194305419922 + ], + [ + "▁trunk", + -12.306380271911621 + ], + [ + "▁folosesc", + -12.30639934539795 + ], + [ + "pom", + -12.306558609008789 + ], + [ + "wunder", + -12.306794166564941 + ], + [ + "▁happier", + -12.306801795959473 + ], + [ + "▁embark", + -12.30689525604248 + ], + [ + "▁mediul", + -12.3069486618042 + ], + [ + "riff", + -12.306973457336426 + ], + [ + "▁copilul", + -12.307039260864258 + ], + [ + "ommage", + -12.307126998901367 + ], + [ + "rechnung", + -12.307218551635742 + ], + [ + "NU", + -12.307220458984375 + ], + [ + "▁fellowship", + -12.307395935058594 + ], + [ + "▁Mental", + -12.307403564453125 + ], + [ + "▁fever", + -12.3074312210083 + ], + [ + "▁silly", + -12.307547569274902 + ], + [ + "Object", + -12.30756664276123 + ], + [ + "NV", + -12.307591438293457 + ], + [ + "от", + -12.30774974822998 + ], + [ + "▁Strand", + -12.307762145996094 + ], + [ + "▁Exist", + -12.30777359008789 + ], + [ + "warum", + -12.307832717895508 + ], + [ + "CY", + -12.307848930358887 + ], + [ + "kä", + -12.307856559753418 + ], + [ + "!!!!!", + -12.307869911193848 + ], + [ + "▁moarte", + -12.30793571472168 + ], + [ + "▁waterfall", + -12.308024406433105 + ], + [ + "left", + -12.30815601348877 + ], + [ + "▁Nursing", + -12.308225631713867 + ], + [ + "▁invalid", + -12.30826187133789 + ], + [ + "struktur", + -12.308385848999023 + ], + [ + "Allerdings", + -12.30838680267334 + ], + [ + "étranger", + -12.30838680267334 + ], + [ + "▁prost", + -12.308517456054688 + ], + [ + "▁Parent", + -12.308562278747559 + ], + [ + "▁întreag", + -12.308611869812012 + ], + [ + "▁compensate", + -12.308871269226074 + ], + [ + "▁sometime", + -12.308955192565918 + ], + [ + "graduate", + -12.308968544006348 + ], + [ + "▁Carter", + -12.30898380279541 + ], + [ + "▁crap", + -12.308998107910156 + ], + [ + "▁mathematics", + -12.309067726135254 + ], + [ + "resemble", + -12.309069633483887 + ], + [ + "Dame", + -12.309152603149414 + ], + [ + "▁Swa", + -12.309198379516602 + ], + [ + "▁celebrity", + -12.309239387512207 + ], + [ + "▁verified", + -12.309338569641113 + ], + [ + "▁Behind", + -12.309349060058594 + ], + [ + "carbon", + -12.309432983398438 + ], + [ + "▁gateway", + -12.309490203857422 + ], + [ + "▁ambitious", + -12.30952262878418 + ], + [ + "▁Wellness", + -12.30966567993164 + ], + [ + "30,000", + -12.30968189239502 + ], + [ + "defined", + -12.309929847717285 + ], + [ + "specializes", + -12.310121536254883 + ], + [ + "▁Chase", + -12.310199737548828 + ], + [ + "HF", + -12.310233116149902 + ], + [ + "ABLE", + -12.310348510742188 + ], + [ + "▁Ehr", + -12.310467720031738 + ], + [ + "▁régime", + -12.310480117797852 + ], + [ + "▁awake", + -12.310487747192383 + ], + [ + "▁seafood", + -12.310487747192383 + ], + [ + "leading", + -12.310554504394531 + ], + [ + "▁Rule", + -12.310602188110352 + ], + [ + "verkehr", + -12.310726165771484 + ], + [ + "erem", + -12.310737609863281 + ], + [ + "▁1973", + -12.310795783996582 + ], + [ + "personal", + -12.311171531677246 + ], + [ + "ența", + -12.311330795288086 + ], + [ + "apprend", + -12.311396598815918 + ], + [ + "faisant", + -12.311420440673828 + ], + [ + "▁Sounds", + -12.31151008605957 + ], + [ + "▁Launch", + -12.31151294708252 + ], + [ + "half", + -12.311636924743652 + ], + [ + "▁verre", + -12.311859130859375 + ], + [ + "▁Regular", + -12.31207275390625 + ], + [ + "▁Nancy", + -12.312142372131348 + ], + [ + "quelles", + -12.312161445617676 + ], + [ + "▁erhält", + -12.312169075012207 + ], + [ + "▁socks", + -12.3121919631958 + ], + [ + "lamp", + -12.312387466430664 + ], + [ + "▁durchgeführt", + -12.312472343444824 + ], + [ + "▁advertise", + -12.31260871887207 + ], + [ + "powered", + -12.312653541564941 + ], + [ + "▁concur", + -12.312699317932129 + ], + [ + "▁ressources", + -12.31293773651123 + ], + [ + "▁allocation", + -12.312986373901367 + ], + [ + "chon", + -12.313041687011719 + ], + [ + "▁Larry", + -12.313177108764648 + ], + [ + "lässig", + -12.313254356384277 + ], + [ + "OLD", + -12.313493728637695 + ], + [ + "itty", + -12.313599586486816 + ], + [ + "▁immuno", + -12.313645362854004 + ], + [ + "▁(+", + -12.313651084899902 + ], + [ + "▁Essential", + -12.313674926757812 + ], + [ + "▁semaines", + -12.313719749450684 + ], + [ + "Ru", + -12.31375503540039 + ], + [ + "▁Gear", + -12.313764572143555 + ], + [ + "völlig", + -12.313850402832031 + ], + [ + "liga", + -12.31391716003418 + ], + [ + "▁Neg", + -12.314082145690918 + ], + [ + "▁gratitude", + -12.31408977508545 + ], + [ + "aventure", + -12.314108848571777 + ], + [ + "▁frustrated", + -12.314115524291992 + ], + [ + "▁retrait", + -12.31422233581543 + ], + [ + "▁statut", + -12.314231872558594 + ], + [ + "550", + -12.31434440612793 + ], + [ + "ла", + -12.314428329467773 + ], + [ + "risto", + -12.314448356628418 + ], + [ + "WAY", + -12.314607620239258 + ], + [ + "▁pigment", + -12.314652442932129 + ], + [ + "Selon", + -12.314715385437012 + ], + [ + "stil", + -12.3148775100708 + ], + [ + "▁Marin", + -12.315055847167969 + ], + [ + "ashi", + -12.315085411071777 + ], + [ + "▁contine", + -12.31519889831543 + ], + [ + "▁Economics", + -12.315200805664062 + ], + [ + "both", + -12.3152437210083 + ], + [ + "▁Dou", + -12.31527328491211 + ], + [ + "Fel", + -12.315373420715332 + ], + [ + "UNT", + -12.315434455871582 + ], + [ + "▁grandmother", + -12.31548023223877 + ], + [ + "▁domicile", + -12.315678596496582 + ], + [ + "▁buffer", + -12.31574535369873 + ], + [ + "▁fuse", + -12.315815925598145 + ], + [ + "▁dosage", + -12.315821647644043 + ], + [ + "▁Nici", + -12.315839767456055 + ], + [ + "▁worries", + -12.315908432006836 + ], + [ + "▁Rail", + -12.3159818649292 + ], + [ + "uneori", + -12.315990447998047 + ], + [ + "▁Sierra", + -12.316030502319336 + ], + [ + "▁porni", + -12.316032409667969 + ], + [ + "▁NOTE", + -12.316056251525879 + ], + [ + "▁tendency", + -12.316065788269043 + ], + [ + "Set", + -12.316256523132324 + ], + [ + "▁Hof", + -12.31629753112793 + ], + [ + "▁Ruhe", + -12.316300392150879 + ], + [ + "harm", + -12.316360473632812 + ], + [ + "▁Developer", + -12.316367149353027 + ], + [ + "suing", + -12.316400527954102 + ], + [ + "persönlichen", + -12.31658935546875 + ], + [ + "▁agréable", + -12.316596031188965 + ], + [ + "commissioned", + -12.316696166992188 + ], + [ + "▁1974", + -12.31672191619873 + ], + [ + "▁1969", + -12.316758155822754 + ], + [ + "▁regl", + -12.316996574401855 + ], + [ + "▁terror", + -12.317042350769043 + ], + [ + "▁température", + -12.317051887512207 + ], + [ + "▁Archiv", + -12.31706714630127 + ], + [ + "▁Military", + -12.317140579223633 + ], + [ + "▁König", + -12.317290306091309 + ], + [ + "▁forex", + -12.31737232208252 + ], + [ + "wiki", + -12.31745719909668 + ], + [ + "thetic", + -12.317506790161133 + ], + [ + "alaturi", + -12.317974090576172 + ], + [ + "▁montant", + -12.3179931640625 + ], + [ + "▁maladie", + -12.318044662475586 + ], + [ + "gust", + -12.318151473999023 + ], + [ + "▁demander", + -12.318164825439453 + ], + [ + "avocat", + -12.318191528320312 + ], + [ + "▁sci", + -12.318192481994629 + ], + [ + "▁Wireless", + -12.318214416503906 + ], + [ + "▁Dein", + -12.318220138549805 + ], + [ + "▁trio", + -12.3183012008667 + ], + [ + "▁Same", + -12.318395614624023 + ], + [ + "Datei", + -12.318464279174805 + ], + [ + "▁alerg", + -12.318578720092773 + ], + [ + "crowded", + -12.318657875061035 + ], + [ + "▁Punkt", + -12.318853378295898 + ], + [ + "▁sanctions", + -12.318864822387695 + ], + [ + "stating", + -12.318922996520996 + ], + [ + "▁discusse", + -12.318949699401855 + ], + [ + "▁Eigen", + -12.319068908691406 + ], + [ + "▁sănătate", + -12.31911563873291 + ], + [ + "▁correspondence", + -12.319211959838867 + ], + [ + "cred", + -12.319331169128418 + ], + [ + "VG", + -12.319347381591797 + ], + [ + "▁différence", + -12.319347381591797 + ], + [ + "▁Montreal", + -12.319391250610352 + ], + [ + "▁masini", + -12.319398880004883 + ], + [ + "iata", + -12.319487571716309 + ], + [ + "▁sampling", + -12.319574356079102 + ], + [ + "▁Gib", + -12.319831848144531 + ], + [ + "▁sheer", + -12.319944381713867 + ], + [ + "330", + -12.319947242736816 + ], + [ + "CHI", + -12.319990158081055 + ], + [ + "▁damn", + -12.320030212402344 + ], + [ + "▁Advisor", + -12.320201873779297 + ], + [ + "Typically", + -12.320302963256836 + ], + [ + "ssé", + -12.320352554321289 + ], + [ + "quart", + -12.320361137390137 + ], + [ + "chete", + -12.320385932922363 + ], + [ + "▁Puerto", + -12.32049560546875 + ], + [ + "2-1", + -12.32050609588623 + ], + [ + "NN", + -12.320674896240234 + ], + [ + "▁styling", + -12.320707321166992 + ], + [ + "rud", + -12.320777893066406 + ], + [ + "од", + -12.320856094360352 + ], + [ + "▁Hydro", + -12.320941925048828 + ], + [ + "▁Cable", + -12.320961952209473 + ], + [ + "video", + -12.320974349975586 + ], + [ + "▁Wirkung", + -12.321194648742676 + ], + [ + "▁noble", + -12.321270942687988 + ], + [ + "▁Sonder", + -12.32129192352295 + ], + [ + "mati", + -12.321317672729492 + ], + [ + "850", + -12.321395874023438 + ], + [ + "▁Richmond", + -12.32143497467041 + ], + [ + "▁niciodată", + -12.321442604064941 + ], + [ + "AO", + -12.321527481079102 + ], + [ + "▁altered", + -12.321648597717285 + ], + [ + "▁(15", + -12.32168960571289 + ], + [ + "▁Motiv", + -12.322052001953125 + ], + [ + "AKE", + -12.322089195251465 + ], + [ + "▁bestimmte", + -12.322172164916992 + ], + [ + "6.5", + -12.322176933288574 + ], + [ + "hectare", + -12.322333335876465 + ], + [ + "atorită", + -12.322335243225098 + ], + [ + "▁phases", + -12.322447776794434 + ], + [ + "▁Nova", + -12.322566032409668 + ], + [ + "ordinateur", + -12.322579383850098 + ], + [ + "▁corrupt", + -12.322813034057617 + ], + [ + "error", + -12.322895050048828 + ], + [ + "▁attacked", + -12.323005676269531 + ], + [ + "▁Kirche", + -12.323019981384277 + ], + [ + "heir", + -12.323040962219238 + ], + [ + "Das", + -12.323254585266113 + ], + [ + "▁anxious", + -12.323258399963379 + ], + [ + "▁Doc", + -12.323386192321777 + ], + [ + "▁Roth", + -12.323415756225586 + ], + [ + "▁Cine", + -12.32388687133789 + ], + [ + "▁auditor", + -12.324418067932129 + ], + [ + "▁beverage", + -12.324586868286133 + ], + [ + "▁précédent", + -12.324637413024902 + ], + [ + "▁deploy", + -12.324837684631348 + ], + [ + "▁accessibility", + -12.324843406677246 + ], + [ + "▁cage", + -12.324885368347168 + ], + [ + "▁Contra", + -12.324934005737305 + ], + [ + "Best", + -12.324952125549316 + ], + [ + "iji", + -12.324972152709961 + ], + [ + "▁père", + -12.325060844421387 + ], + [ + "▁scenic", + -12.32511043548584 + ], + [ + "synthesis", + -12.325165748596191 + ], + [ + "ßen", + -12.32534408569336 + ], + [ + "▁Videos", + -12.325482368469238 + ], + [ + "▁refus", + -12.325484275817871 + ], + [ + "stimmen", + -12.3255615234375 + ], + [ + "▁sleek", + -12.325577735900879 + ], + [ + "artige", + -12.32563591003418 + ], + [ + "mari", + -12.32568359375 + ], + [ + "▁excelent", + -12.325740814208984 + ], + [ + "▁negativ", + -12.325806617736816 + ], + [ + "▁blocking", + -12.32590103149414 + ], + [ + "spricht", + -12.326001167297363 + ], + [ + "▁discomfort", + -12.32602310180664 + ], + [ + "▁stratégie", + -12.32602310180664 + ], + [ + "▁Datenschutz", + -12.326078414916992 + ], + [ + "curg", + -12.326128005981445 + ], + [ + "▁lapte", + -12.326432228088379 + ], + [ + "▁acasă", + -12.326491355895996 + ], + [ + "▁ausschließlich", + -12.32653522491455 + ], + [ + "▁unbedingt", + -12.326802253723145 + ], + [ + "▁Linie", + -12.32689380645752 + ], + [ + "▁subscribers", + -12.327019691467285 + ], + [ + "109", + -12.32702350616455 + ], + [ + "▁Waste", + -12.32712173461914 + ], + [ + "▁Planung", + -12.327231407165527 + ], + [ + "▁visually", + -12.32734489440918 + ], + [ + "utilizarea", + -12.327370643615723 + ], + [ + "uba", + -12.327381134033203 + ], + [ + "▁fifteen", + -12.327411651611328 + ], + [ + "▁légère", + -12.327411651611328 + ], + [ + "ința", + -12.327446937561035 + ], + [ + "▁tolerance", + -12.327460289001465 + ], + [ + "▁piscine", + -12.327536582946777 + ], + [ + "▁nails", + -12.327569007873535 + ], + [ + "▁accus", + -12.327693939208984 + ], + [ + "▁coeur", + -12.327773094177246 + ], + [ + "freie", + -12.327849388122559 + ], + [ + "enţă", + -12.32812213897705 + ], + [ + "▁glucose", + -12.328336715698242 + ], + [ + "▁Jar", + -12.32838249206543 + ], + [ + "▁commencer", + -12.328387260437012 + ], + [ + "▁eliminating", + -12.328414916992188 + ], + [ + "▁mutation", + -12.32844352722168 + ], + [ + "▁afirma", + -12.328444480895996 + ], + [ + "▁Consulting", + -12.328454971313477 + ], + [ + "adia", + -12.328543663024902 + ], + [ + "zog", + -12.328604698181152 + ], + [ + "▁pielea", + -12.328658103942871 + ], + [ + "rton", + -12.328706741333008 + ], + [ + "exercice", + -12.3287935256958 + ], + [ + "namely", + -12.328847885131836 + ], + [ + "▁ajutor", + -12.3289155960083 + ], + [ + "▁markers", + -12.328917503356934 + ], + [ + "▁gardening", + -12.328932762145996 + ], + [ + "Karte", + -12.329038619995117 + ], + [ + "▁Pump", + -12.329142570495605 + ], + [ + "▁Dual", + -12.329169273376465 + ], + [ + "▁pratiques", + -12.329349517822266 + ], + [ + "▁behavioral", + -12.329358100891113 + ], + [ + "▁construire", + -12.329511642456055 + ], + [ + "▁Leonard", + -12.329596519470215 + ], + [ + "ediglich", + -12.329630851745605 + ], + [ + "ubbed", + -12.3297758102417 + ], + [ + "NK", + -12.329792022705078 + ], + [ + "shell", + -12.329912185668945 + ], + [ + "▁persönliche", + -12.329996109008789 + ], + [ + "ecuring", + -12.329998970031738 + ], + [ + "beaten", + -12.33000373840332 + ], + [ + "ALE", + -12.330053329467773 + ], + [ + "▁puppy", + -12.33023452758789 + ], + [ + "▁capac", + -12.33027458190918 + ], + [ + "▁seventh", + -12.330394744873047 + ], + [ + "▁nursery", + -12.330400466918945 + ], + [ + "▁Rum", + -12.330419540405273 + ], + [ + "▁exquisite", + -12.330423355102539 + ], + [ + "▁Legi", + -12.330483436584473 + ], + [ + "▁persist", + -12.330497741699219 + ], + [ + "bacterial", + -12.330548286437988 + ], + [ + "▁cereal", + -12.330572128295898 + ], + [ + "▁principe", + -12.330693244934082 + ], + [ + "chip", + -12.330766677856445 + ], + [ + "rush", + -12.330832481384277 + ], + [ + "▁funnel", + -12.330904006958008 + ], + [ + "▁calitatea", + -12.331024169921875 + ], + [ + "ibă", + -12.33104419708252 + ], + [ + "▁reign", + -12.331086158752441 + ], + [ + "▁congregation", + -12.331120491027832 + ], + [ + "▁obtine", + -12.331270217895508 + ], + [ + "▁découverte", + -12.331286430358887 + ], + [ + "▁gama", + -12.331315040588379 + ], + [ + "▁judec", + -12.33132553100586 + ], + [ + "Plan", + -12.331351280212402 + ], + [ + "▁gesture", + -12.331539154052734 + ], + [ + "öffentlichen", + -12.331644058227539 + ], + [ + "▁imported", + -12.331693649291992 + ], + [ + "▁rotate", + -12.331747055053711 + ], + [ + "blown", + -12.331756591796875 + ], + [ + "▁Protein", + -12.331827163696289 + ], + [ + "parfaitement", + -12.331832885742188 + ], + [ + "ondo", + -12.331868171691895 + ], + [ + "ologists", + -12.331890106201172 + ], + [ + "▁neighborhoods", + -12.331989288330078 + ], + [ + "▁Pope", + -12.33202075958252 + ], + [ + "▁museums", + -12.332194328308105 + ], + [ + "▁porter", + -12.332330703735352 + ], + [ + "▁kiss", + -12.332335472106934 + ], + [ + "pdf", + -12.332354545593262 + ], + [ + "sided", + -12.332359313964844 + ], + [ + "▁gern", + -12.332395553588867 + ], + [ + "bedingungen", + -12.332496643066406 + ], + [ + "▁Ride", + -12.332582473754883 + ], + [ + "Apoi", + -12.332584381103516 + ], + [ + "▁bestehen", + -12.332603454589844 + ], + [ + "5\"", + -12.33285903930664 + ], + [ + "bob", + -12.332862854003906 + ], + [ + "ficient", + -12.33303165435791 + ], + [ + "premise", + -12.333086967468262 + ], + [ + "▁Clip", + -12.333112716674805 + ], + [ + "▁concours", + -12.333213806152344 + ], + [ + "olar", + -12.333281517028809 + ], + [ + "▁Centr", + -12.333356857299805 + ], + [ + "outlined", + -12.333429336547852 + ], + [ + "▁observa", + -12.333511352539062 + ], + [ + "▁negotiate", + -12.333537101745605 + ], + [ + "▁Partnership", + -12.33358383178711 + ], + [ + "clock", + -12.333662033081055 + ], + [ + "roasted", + -12.333755493164062 + ], + [ + "Pourquoi", + -12.33391284942627 + ], + [ + "▁Marshall", + -12.334005355834961 + ], + [ + "▁Gerade", + -12.334052085876465 + ], + [ + "▁pachet", + -12.334160804748535 + ], + [ + "▁preliminary", + -12.334162712097168 + ], + [ + "▁tragic", + -12.334200859069824 + ], + [ + "author", + -12.334268569946289 + ], + [ + "▁Gov", + -12.334309577941895 + ], + [ + "▁comunic", + -12.334403991699219 + ], + [ + "▁coordinator", + -12.334410667419434 + ], + [ + "YA", + -12.33445930480957 + ], + [ + "▁Steam", + -12.33476734161377 + ], + [ + "▁Nag", + -12.334796905517578 + ], + [ + "▁Kara", + -12.334851264953613 + ], + [ + "▁Gang", + -12.334858894348145 + ], + [ + "aurez", + -12.334868431091309 + ], + [ + "▁horrible", + -12.334869384765625 + ], + [ + "▁Luxury", + -12.335076332092285 + ], + [ + "▁encouragement", + -12.335169792175293 + ], + [ + "▁conceptual", + -12.335250854492188 + ], + [ + "▁constituent", + -12.335431098937988 + ], + [ + "nvelop", + -12.335494041442871 + ], + [ + "ucc", + -12.335500717163086 + ], + [ + "▁conçu", + -12.335542678833008 + ], + [ + "pfel", + -12.33559513092041 + ], + [ + "special", + -12.335700988769531 + ], + [ + "▁Growth", + -12.335834503173828 + ], + [ + "cada", + -12.335916519165039 + ], + [ + "▁oamenilor", + -12.335976600646973 + ], + [ + "▁vendredi", + -12.336021423339844 + ], + [ + "▁coupe", + -12.336055755615234 + ], + [ + "▁Danke", + -12.336134910583496 + ], + [ + "reflects", + -12.336181640625 + ], + [ + "▁girlfriend", + -12.336273193359375 + ], + [ + "▁diffuse", + -12.336325645446777 + ], + [ + "HER", + -12.336328506469727 + ], + [ + "storing", + -12.336464881896973 + ], + [ + "ailing", + -12.336591720581055 + ], + [ + "▁Desi", + -12.336601257324219 + ], + [ + "stitution", + -12.336832046508789 + ], + [ + "▁adun", + -12.336844444274902 + ], + [ + "▁Partie", + -12.336869239807129 + ], + [ + "▁tissues", + -12.336958885192871 + ], + [ + "▁discovering", + -12.337154388427734 + ], + [ + "Jacques", + -12.337178230285645 + ], + [ + "lungs", + -12.33724594116211 + ], + [ + "▁Handy", + -12.337261199951172 + ], + [ + "centric", + -12.337285995483398 + ], + [ + "slav", + -12.337442398071289 + ], + [ + "▁sights", + -12.337560653686523 + ], + [ + "▁Category", + -12.337644577026367 + ], + [ + "▁Einrichtung", + -12.337957382202148 + ], + [ + "▁Robinson", + -12.33804702758789 + ], + [ + "▁Terra", + -12.338150978088379 + ], + [ + "▁creep", + -12.338167190551758 + ], + [ + "▁Lob", + -12.338184356689453 + ], + [ + "001", + -12.33820629119873 + ], + [ + "kop", + -12.338208198547363 + ], + [ + "Emb", + -12.338292121887207 + ], + [ + "▁forgive", + -12.338391304016113 + ], + [ + "▁icons", + -12.33847427368164 + ], + [ + "electric", + -12.3385009765625 + ], + [ + "▁faucet", + -12.338516235351562 + ], + [ + "▁invisible", + -12.3386812210083 + ], + [ + "sprach", + -12.338801383972168 + ], + [ + "▁beachten", + -12.33881664276123 + ], + [ + "rahm", + -12.338833808898926 + ], + [ + "▁Teacher", + -12.338919639587402 + ], + [ + "Fab", + -12.339070320129395 + ], + [ + "▁joue", + -12.339101791381836 + ], + [ + "▁Popular", + -12.339120864868164 + ], + [ + "▁Februar", + -12.339171409606934 + ], + [ + "sound", + -12.339251518249512 + ], + [ + "▁(0", + -12.339317321777344 + ], + [ + "▁Compare", + -12.33938980102539 + ], + [ + "▁pads", + -12.339455604553223 + ], + [ + "270", + -12.339498519897461 + ], + [ + "ousse", + -12.339548110961914 + ], + [ + "▁UAE", + -12.339786529541016 + ], + [ + "izări", + -12.339787483215332 + ], + [ + "▁bonuses", + -12.33993911743164 + ], + [ + "▁switches", + -12.3400239944458 + ], + [ + "▁Brothers", + -12.340166091918945 + ], + [ + "▁environmentally", + -12.340171813964844 + ], + [ + "vista", + -12.340264320373535 + ], + [ + "▁intentions", + -12.3402738571167 + ], + [ + "▁Terri", + -12.340301513671875 + ], + [ + "▁diabet", + -12.34030532836914 + ], + [ + "▁prese", + -12.340333938598633 + ], + [ + "▁parcurs", + -12.340389251708984 + ], + [ + "Warum", + -12.340449333190918 + ], + [ + "▁credentials", + -12.340455055236816 + ], + [ + "▁PLA", + -12.34046459197998 + ], + [ + "▁instruct", + -12.340470314025879 + ], + [ + "▁benefic", + -12.340633392333984 + ], + [ + "write", + -12.340675354003906 + ], + [ + "▁poids", + -12.340773582458496 + ], + [ + "▁Anspruch", + -12.340923309326172 + ], + [ + "▁avocado", + -12.340923309326172 + ], + [ + "▁inevitable", + -12.340923309326172 + ], + [ + "▁poorly", + -12.340950965881348 + ], + [ + "karte", + -12.340994834899902 + ], + [ + "▁Publishing", + -12.340999603271484 + ], + [ + "odată", + -12.341140747070312 + ], + [ + "▁scientifique", + -12.341157913208008 + ], + [ + "▁lăsa", + -12.341262817382812 + ], + [ + "▁secol", + -12.34131908416748 + ], + [ + "▁nevertheless", + -12.341392517089844 + ], + [ + "SAT", + -12.341597557067871 + ], + [ + "280", + -12.341651916503906 + ], + [ + "▁prevederi", + -12.341670989990234 + ], + [ + "▁chrome", + -12.342002868652344 + ], + [ + "institut", + -12.342267036437988 + ], + [ + "richtigen", + -12.34228515625 + ], + [ + "▁grief", + -12.342338562011719 + ], + [ + "▁penalties", + -12.342373847961426 + ], + [ + "▁Bayern", + -12.34238052368164 + ], + [ + "▁caramel", + -12.342473983764648 + ], + [ + "Now", + -12.342495918273926 + ], + [ + "Stiftung", + -12.342576026916504 + ], + [ + "country", + -12.342737197875977 + ], + [ + "dication", + -12.34278678894043 + ], + [ + "▁Chor", + -12.342801094055176 + ], + [ + "▁rămâne", + -12.342936515808105 + ], + [ + "▁TOP", + -12.34300708770752 + ], + [ + "▁complète", + -12.34301471710205 + ], + [ + "▁Marian", + -12.34302806854248 + ], + [ + "▁Avant", + -12.343121528625488 + ], + [ + "▁Shower", + -12.343156814575195 + ], + [ + "treu", + -12.34316349029541 + ], + [ + "▁chop", + -12.34321403503418 + ], + [ + "▁comfortably", + -12.343220710754395 + ], + [ + "▁autism", + -12.34323787689209 + ], + [ + "▁Sind", + -12.34328556060791 + ], + [ + "▁(20", + -12.343340873718262 + ], + [ + "▁Cinema", + -12.343414306640625 + ], + [ + "compania", + -12.343606948852539 + ], + [ + "▁Lex", + -12.343622207641602 + ], + [ + "▁Sofa", + -12.343716621398926 + ], + [ + "dru", + -12.343753814697266 + ], + [ + "▁verification", + -12.343770027160645 + ], + [ + "▁Immer", + -12.343825340270996 + ], + [ + "lomb", + -12.343829154968262 + ], + [ + "meric", + -12.34385871887207 + ], + [ + "▁slower", + -12.34398365020752 + ], + [ + "▁propag", + -12.344090461730957 + ], + [ + "Inter", + -12.344097137451172 + ], + [ + "selling", + -12.34418773651123 + ], + [ + "▁Bright", + -12.344269752502441 + ], + [ + "condition", + -12.344280242919922 + ], + [ + "PDF", + -12.344291687011719 + ], + [ + "oyez", + -12.344391822814941 + ], + [ + "▁Fried", + -12.344420433044434 + ], + [ + "▁Nazi", + -12.34443187713623 + ], + [ + "▁Buffalo", + -12.344447135925293 + ], + [ + "▁Sue", + -12.344449043273926 + ], + [ + "▁Rhein", + -12.34468936920166 + ], + [ + "▁Klaus", + -12.344889640808105 + ], + [ + "▁indiqu", + -12.344963073730469 + ], + [ + "echte", + -12.344996452331543 + ], + [ + "▁frecvent", + -12.345165252685547 + ], + [ + "▁conveniently", + -12.345187187194824 + ], + [ + "▁Moi", + -12.345197677612305 + ], + [ + "▁greenhouse", + -12.345220565795898 + ], + [ + "▁rédui", + -12.34524154663086 + ], + [ + "▁lengthy", + -12.34542179107666 + ], + [ + "verband", + -12.345534324645996 + ], + [ + "inţă", + -12.345622062683105 + ], + [ + "▁rigorous", + -12.345625877380371 + ], + [ + "▁Finish", + -12.34580135345459 + ], + [ + "▁FBI", + -12.346052169799805 + ], + [ + "cultura", + -12.346083641052246 + ], + [ + "▁compartment", + -12.346110343933105 + ], + [ + "▁pretend", + -12.346117973327637 + ], + [ + "▁assembled", + -12.346212387084961 + ], + [ + "▁Nie", + -12.34639835357666 + ], + [ + "fession", + -12.34640884399414 + ], + [ + "▁£2", + -12.34642219543457 + ], + [ + "algré", + -12.3468017578125 + ], + [ + "▁anterior", + -12.346817970275879 + ], + [ + "▁Wissenschaft", + -12.34683609008789 + ], + [ + "▁Harbor", + -12.346923828125 + ], + [ + "lix", + -12.346985816955566 + ], + [ + "=\"", + -12.347049713134766 + ], + [ + "▁breathtaking", + -12.34705638885498 + ], + [ + "▁Stern", + -12.34708309173584 + ], + [ + "▁Internetseite", + -12.347132682800293 + ], + [ + "▁locker", + -12.347216606140137 + ], + [ + "▁feather", + -12.34726619720459 + ], + [ + "Serv", + -12.347297668457031 + ], + [ + "▁snake", + -12.347332000732422 + ], + [ + "▁Border", + -12.347396850585938 + ], + [ + "▁undergo", + -12.347518920898438 + ], + [ + "▁petrol", + -12.347558975219727 + ], + [ + "▁dealership", + -12.3475923538208 + ], + [ + "▁commander", + -12.347596168518066 + ], + [ + "▁Monate", + -12.347599983215332 + ], + [ + "▁Guardian", + -12.347665786743164 + ], + [ + "▁Todd", + -12.347774505615234 + ], + [ + "Ann", + -12.347825050354004 + ], + [ + "ibilité", + -12.347918510437012 + ], + [ + "▁Quarter", + -12.347987174987793 + ], + [ + "▁portray", + -12.348097801208496 + ], + [ + "▁Tai", + -12.34813404083252 + ], + [ + "▁strikes", + -12.348224639892578 + ], + [ + "illage", + -12.348381042480469 + ], + [ + "▁IRS", + -12.348417282104492 + ], + [ + "▁lupta", + -12.348455429077148 + ], + [ + "▁Sper", + -12.348493576049805 + ], + [ + "PRO", + -12.348530769348145 + ], + [ + "▁Export", + -12.348549842834473 + ], + [ + "▁crypto", + -12.348587989807129 + ], + [ + "▁barbecue", + -12.348692893981934 + ], + [ + "▁portions", + -12.348787307739258 + ], + [ + "▁explicit", + -12.348793983459473 + ], + [ + "▁angenehm", + -12.348834037780762 + ], + [ + "▁marathon", + -12.348946571350098 + ], + [ + "▁apartament", + -12.348982810974121 + ], + [ + "▁Eva", + -12.349079132080078 + ], + [ + "plate", + -12.349181175231934 + ], + [ + "viel", + -12.34925365447998 + ], + [ + "FIN", + -12.34926986694336 + ], + [ + "dependent", + -12.34935188293457 + ], + [ + "▁cercet", + -12.34942626953125 + ], + [ + "▁midnight", + -12.349499702453613 + ], + [ + "copie", + -12.349563598632812 + ], + [ + "▁companii", + -12.349621772766113 + ], + [ + "▁tenu", + -12.349660873413086 + ], + [ + "1/2", + -12.349662780761719 + ], + [ + "2.4", + -12.349693298339844 + ], + [ + "abri", + -12.349699974060059 + ], + [ + "▁warn", + -12.34980297088623 + ], + [ + "▁luggage", + -12.349875450134277 + ], + [ + "numarul", + -12.349968910217285 + ], + [ + "▁contour", + -12.350014686584473 + ], + [ + "▁Ghost", + -12.350016593933105 + ], + [ + "Angaben", + -12.35012435913086 + ], + [ + "▁unemployment", + -12.350296020507812 + ], + [ + "▁rău", + -12.350380897521973 + ], + [ + "▁dispatch", + -12.350445747375488 + ], + [ + "investissement", + -12.350547790527344 + ], + [ + "▁passt", + -12.35057258605957 + ], + [ + "▁Germania", + -12.350578308105469 + ], + [ + "▁webpage", + -12.350651741027832 + ], + [ + "▁reservations", + -12.350688934326172 + ], + [ + "▁Kai", + -12.350743293762207 + ], + [ + "▁Cav", + -12.350890159606934 + ], + [ + "▁Patient", + -12.351109504699707 + ], + [ + "ер", + -12.351213455200195 + ], + [ + "▁Belle", + -12.351236343383789 + ], + [ + "▁Nashville", + -12.351296424865723 + ], + [ + "▁Talent", + -12.351332664489746 + ], + [ + "ouvrage", + -12.351364135742188 + ], + [ + "▁bekommt", + -12.351365089416504 + ], + [ + "USA", + -12.351430892944336 + ], + [ + "CES", + -12.351432800292969 + ], + [ + "▁Peru", + -12.351499557495117 + ], + [ + "▁erkennen", + -12.35153579711914 + ], + [ + "prinde", + -12.351569175720215 + ], + [ + "▁constitution", + -12.351922035217285 + ], + [ + "itatile", + -12.351998329162598 + ], + [ + "bah", + -12.352147102355957 + ], + [ + "▁avail", + -12.352148056030273 + ], + [ + "▁disponibile", + -12.352149963378906 + ], + [ + "hér", + -12.352258682250977 + ], + [ + "ол", + -12.352411270141602 + ], + [ + "▁startups", + -12.352435111999512 + ], + [ + "▁carton", + -12.352485656738281 + ], + [ + "▁Newsletter", + -12.35251235961914 + ], + [ + "éti", + -12.352560997009277 + ], + [ + "▁investigating", + -12.352779388427734 + ], + [ + "itul", + -12.352925300598145 + ], + [ + "touch", + -12.352962493896484 + ], + [ + "Sport", + -12.353137016296387 + ], + [ + "AME", + -12.353203773498535 + ], + [ + "MIN", + -12.353222846984863 + ], + [ + "metry", + -12.353371620178223 + ], + [ + "icy", + -12.353492736816406 + ], + [ + "▁Luna", + -12.35351848602295 + ], + [ + "▁asthma", + -12.353614807128906 + ], + [ + "▁conduc", + -12.35365104675293 + ], + [ + "▁Ari", + -12.35369873046875 + ], + [ + "trust", + -12.353832244873047 + ], + [ + "▁defines", + -12.353894233703613 + ], + [ + "▁Blend", + -12.353927612304688 + ], + [ + "azo", + -12.353989601135254 + ], + [ + "▁sweep", + -12.354169845581055 + ], + [ + "lope", + -12.354331016540527 + ], + [ + "ţinut", + -12.35439682006836 + ], + [ + "WD", + -12.354503631591797 + ], + [ + "▁appetite", + -12.354619979858398 + ], + [ + "▁Seed", + -12.354753494262695 + ], + [ + "Friend", + -12.354854583740234 + ], + [ + "▁repet", + -12.354876518249512 + ], + [ + "▁throat", + -12.354936599731445 + ], + [ + "philosoph", + -12.355141639709473 + ], + [ + "▁connaître", + -12.355156898498535 + ], + [ + "▁Counter", + -12.355299949645996 + ], + [ + "▁Anforderungen", + -12.35533332824707 + ], + [ + "▁Polit", + -12.355363845825195 + ], + [ + "▁Weather", + -12.3554048538208 + ], + [ + "bow", + -12.355423927307129 + ], + [ + "▁recreation", + -12.355484008789062 + ], + [ + "▁culinary", + -12.355571746826172 + ], + [ + "▁plage", + -12.355609893798828 + ], + [ + "▁Cruz", + -12.355659484863281 + ], + [ + "▁equip", + -12.355668067932129 + ], + [ + "▁Recent", + -12.355697631835938 + ], + [ + "LED", + -12.355767250061035 + ], + [ + "▁steak", + -12.355772972106934 + ], + [ + "▁belly", + -12.355880737304688 + ], + [ + "photo", + -12.356130599975586 + ], + [ + "▁lakes", + -12.35623836517334 + ], + [ + "▁intact", + -12.356287956237793 + ], + [ + "▁spiral", + -12.356386184692383 + ], + [ + "▁Billy", + -12.356468200683594 + ], + [ + "▁Understanding", + -12.356534957885742 + ], + [ + "▁Lay", + -12.356558799743652 + ], + [ + "▁roster", + -12.356632232666016 + ], + [ + "▁admire", + -12.356647491455078 + ], + [ + "▁android", + -12.356732368469238 + ], + [ + "▁technician", + -12.356734275817871 + ], + [ + "gène", + -12.356818199157715 + ], + [ + "motiv", + -12.356954574584961 + ], + [ + "▁Boat", + -12.356988906860352 + ], + [ + "▁genießen", + -12.357000350952148 + ], + [ + "▁Geschmack", + -12.357001304626465 + ], + [ + "▁heroes", + -12.3570556640625 + ], + [ + "▁1800", + -12.357137680053711 + ], + [ + "numeroase", + -12.35776138305664 + ], + [ + "▁anschließend", + -12.357802391052246 + ], + [ + "▁Spur", + -12.357813835144043 + ], + [ + "▁clarify", + -12.35784912109375 + ], + [ + "▁warmer", + -12.357889175415039 + ], + [ + "▁Ranch", + -12.357955932617188 + ], + [ + "▁simti", + -12.358024597167969 + ], + [ + "Thank", + -12.35838508605957 + ], + [ + "▁freight", + -12.358434677124023 + ], + [ + "▁administrators", + -12.358453750610352 + ], + [ + "Reg", + -12.358588218688965 + ], + [ + "Această", + -12.358670234680176 + ], + [ + "▁legume", + -12.358741760253906 + ], + [ + "▁utilizare", + -12.358786582946777 + ], + [ + "CON", + -12.358904838562012 + ], + [ + "urgi", + -12.358917236328125 + ], + [ + "▁Gesicht", + -12.358920097351074 + ], + [ + "▁counselor", + -12.358954429626465 + ], + [ + "▁mondiale", + -12.359009742736816 + ], + [ + "helm", + -12.359137535095215 + ], + [ + "▁Promo", + -12.359156608581543 + ], + [ + "▁Schweiz", + -12.35917854309082 + ], + [ + "Ich", + -12.35929012298584 + ], + [ + "▁intalni", + -12.359295845031738 + ], + [ + "▁Bloom", + -12.359318733215332 + ], + [ + "▁Score", + -12.359362602233887 + ], + [ + "▁Fruit", + -12.35944652557373 + ], + [ + "▁constraints", + -12.359447479248047 + ], + [ + "▁farmer", + -12.359745979309082 + ], + [ + "▁précise", + -12.359807014465332 + ], + [ + "evaluating", + -12.359868049621582 + ], + [ + "▁Period", + -12.359891891479492 + ], + [ + "byte", + -12.359893798828125 + ], + [ + "wah", + -12.360025405883789 + ], + [ + "Mac", + -12.360123634338379 + ], + [ + "iron", + -12.360197067260742 + ], + [ + "′", + -12.360337257385254 + ], + [ + "▁tehnic", + -12.360539436340332 + ], + [ + "▁legat", + -12.36054515838623 + ], + [ + "▁Pilot", + -12.360574722290039 + ], + [ + "▁Carpet", + -12.36064624786377 + ], + [ + "TEN", + -12.360812187194824 + ], + [ + "▁shareholders", + -12.36082649230957 + ], + [ + "vină", + -12.360880851745605 + ], + [ + "▁parole", + -12.360939979553223 + ], + [ + "ătă", + -12.360984802246094 + ], + [ + "bbing", + -12.361000061035156 + ], + [ + "▁switched", + -12.361002922058105 + ], + [ + "▁Petro", + -12.361010551452637 + ], + [ + "▁Vertrags", + -12.36111831665039 + ], + [ + "cham", + -12.361178398132324 + ], + [ + "wang", + -12.361284255981445 + ], + [ + "▁Bean", + -12.36139965057373 + ], + [ + "minister", + -12.361442565917969 + ], + [ + "▁Wu", + -12.361522674560547 + ], + [ + "▁Olympics", + -12.361539840698242 + ], + [ + "tipul", + -12.361542701721191 + ], + [ + "▁Citi", + -12.36166763305664 + ], + [ + "▁Fold", + -12.361873626708984 + ], + [ + "▁Partei", + -12.361940383911133 + ], + [ + "▁centrale", + -12.361984252929688 + ], + [ + "île", + -12.362032890319824 + ], + [ + "pflicht", + -12.362175941467285 + ], + [ + "heli", + -12.362398147583008 + ], + [ + "▁erwartet", + -12.362414360046387 + ], + [ + "▁oferta", + -12.362458229064941 + ], + [ + "▁NHS", + -12.36246395111084 + ], + [ + "annon", + -12.362570762634277 + ], + [ + "▁Rud", + -12.362701416015625 + ], + [ + "▁Stuttgart", + -12.362737655639648 + ], + [ + "▁rămas", + -12.362746238708496 + ], + [ + "▁eliminated", + -12.36275577545166 + ], + [ + "▁hiding", + -12.362797737121582 + ], + [ + "▁cadeau", + -12.362832069396973 + ], + [ + "▁mock", + -12.363115310668945 + ], + [ + "▁elder", + -12.363333702087402 + ], + [ + "▁Liz", + -12.363364219665527 + ], + [ + "aji", + -12.363544464111328 + ], + [ + "▁endlich", + -12.363653182983398 + ], + [ + "sufficient", + -12.363668441772461 + ], + [ + "▁zusätzliche", + -12.363712310791016 + ], + [ + "scient", + -12.363757133483887 + ], + [ + "▁Adjust", + -12.363883972167969 + ], + [ + "▁incentive", + -12.363945007324219 + ], + [ + "▁Papa", + -12.364012718200684 + ], + [ + "▁Pharma", + -12.364041328430176 + ], + [ + "▁conflicts", + -12.364107131958008 + ], + [ + "zählen", + -12.364113807678223 + ], + [ + "▁chien", + -12.364118576049805 + ], + [ + "KB", + -12.36413288116455 + ], + [ + "ultimi", + -12.364188194274902 + ], + [ + "▁Jul", + -12.36421012878418 + ], + [ + "▁Male", + -12.36422061920166 + ], + [ + "▁viewer", + -12.36427116394043 + ], + [ + "▁Sector", + -12.364328384399414 + ], + [ + "▁REAL", + -12.364344596862793 + ], + [ + "▁arbitr", + -12.36436939239502 + ], + [ + "resistant", + -12.364399909973145 + ], + [ + "▁Bristol", + -12.364423751831055 + ], + [ + "▁shy", + -12.364540100097656 + ], + [ + "SW", + -12.364593505859375 + ], + [ + "▁Kirk", + -12.36460018157959 + ], + [ + "centrul", + -12.364653587341309 + ], + [ + "▁Venezuela", + -12.364657402038574 + ], + [ + "▁communicating", + -12.364657402038574 + ], + [ + "▁Chemical", + -12.364663124084473 + ], + [ + "▁surprises", + -12.364843368530273 + ], + [ + "▁Jamie", + -12.364933967590332 + ], + [ + "▁Heavy", + -12.364965438842773 + ], + [ + "▁turnover", + -12.36498737335205 + ], + [ + "▁étudiants", + -12.365114212036133 + ], + [ + "welcher", + -12.365124702453613 + ], + [ + "▁preturi", + -12.365200996398926 + ], + [ + "▁Mono", + -12.365283966064453 + ], + [ + "▁paddle", + -12.365309715270996 + ], + [ + "▁accountability", + -12.365364074707031 + ], + [ + "OUS", + -12.365592956542969 + ], + [ + "▁marketers", + -12.365762710571289 + ], + [ + "fection", + -12.365900993347168 + ], + [ + "▁Outside", + -12.365921020507812 + ], + [ + "▁Jefferson", + -12.366114616394043 + ], + [ + "oaie", + -12.36617660522461 + ], + [ + "tenue", + -12.366275787353516 + ], + [ + "HU", + -12.366329193115234 + ], + [ + "Très", + -12.36639404296875 + ], + [ + "valoarea", + -12.36642837524414 + ], + [ + "103", + -12.366482734680176 + ], + [ + "▁Privacy", + -12.366580963134766 + ], + [ + "▁Leistungen", + -12.366598129272461 + ], + [ + "(3)", + -12.36662483215332 + ], + [ + "▁études", + -12.366734504699707 + ], + [ + "sko", + -12.366750717163086 + ], + [ + "drum", + -12.366822242736816 + ], + [ + "▁lamb", + -12.366842269897461 + ], + [ + "▁nicio", + -12.367094993591309 + ], + [ + "▁NATO", + -12.367104530334473 + ], + [ + "▁Freitag", + -12.367178916931152 + ], + [ + "▁precedent", + -12.367178916931152 + ], + [ + "▁partenaires", + -12.367202758789062 + ], + [ + "▁companiei", + -12.367234230041504 + ], + [ + "▁Plaza", + -12.367249488830566 + ], + [ + "▁disruption", + -12.367274284362793 + ], + [ + "▁violations", + -12.367338180541992 + ], + [ + "▁Reference", + -12.367446899414062 + ], + [ + "▁habitants", + -12.36770248413086 + ], + [ + "▁compost", + -12.36776351928711 + ], + [ + "▁citoyen", + -12.367785453796387 + ], + [ + "▁Historical", + -12.367857933044434 + ], + [ + "vollen", + -12.36793327331543 + ], + [ + "▁Eck", + -12.36815357208252 + ], + [ + "▁lumii", + -12.368180274963379 + ], + [ + "▁reusit", + -12.368278503417969 + ], + [ + "genic", + -12.368307113647461 + ], + [ + "Why", + -12.368436813354492 + ], + [ + "ASE", + -12.368474006652832 + ], + [ + "▁athlete", + -12.36854076385498 + ], + [ + "▁Spitze", + -12.368559837341309 + ], + [ + "▁schimbat", + -12.368566513061523 + ], + [ + "▁anonymous", + -12.368850708007812 + ], + [ + "jedes", + -12.368856430053711 + ], + [ + "exclu", + -12.368874549865723 + ], + [ + "factor", + -12.369199752807617 + ], + [ + "▁Dezember", + -12.369231224060059 + ], + [ + "▁scientist", + -12.369373321533203 + ], + [ + "▁likelihood", + -12.36947250366211 + ], + [ + "▁Rhode", + -12.369488716125488 + ], + [ + "▁Balance", + -12.369521141052246 + ], + [ + "istoria", + -12.36959457397461 + ], + [ + "▁Neil", + -12.369780540466309 + ], + [ + "▁bush", + -12.369919776916504 + ], + [ + "▁Ergebnisse", + -12.369935989379883 + ], + [ + "▁Sinn", + -12.369956016540527 + ], + [ + "▁spezielle", + -12.370128631591797 + ], + [ + "▁jucat", + -12.37015438079834 + ], + [ + "▁spite", + -12.370179176330566 + ], + [ + "▁Ultimate", + -12.370365142822266 + ], + [ + "▁fructe", + -12.370401382446289 + ], + [ + "▁asleep", + -12.370441436767578 + ], + [ + "▁Goal", + -12.370539665222168 + ], + [ + "▁PAR", + -12.370631217956543 + ], + [ + "▁rows", + -12.370705604553223 + ], + [ + "▁Fol", + -12.3709135055542 + ], + [ + "▁durata", + -12.370945930480957 + ], + [ + "▁traditionnel", + -12.37100887298584 + ], + [ + "▁tema", + -12.37122917175293 + ], + [ + "▁crédit", + -12.371232986450195 + ], + [ + "smallest", + -12.371358871459961 + ], + [ + "▁amino", + -12.371358871459961 + ], + [ + "▁elephant", + -12.371405601501465 + ], + [ + "▁tubes", + -12.371685028076172 + ], + [ + "▁Verwendung", + -12.371719360351562 + ], + [ + "▁Excellence", + -12.371889114379883 + ], + [ + "▁utilities", + -12.371962547302246 + ], + [ + "frau", + -12.372111320495605 + ], + [ + "▁poze", + -12.3721342086792 + ], + [ + "août", + -12.372307777404785 + ], + [ + "ango", + -12.372514724731445 + ], + [ + "give", + -12.372532844543457 + ], + [ + "▁appelé", + -12.372576713562012 + ], + [ + "▁yeast", + -12.372671127319336 + ], + [ + "▁enrollment", + -12.372676849365234 + ], + [ + "organiz", + -12.3727445602417 + ], + [ + "▁asociat", + -12.372753143310547 + ], + [ + "▁cattle", + -12.372772216796875 + ], + [ + "▁Solution", + -12.372798919677734 + ], + [ + "evoke", + -12.372807502746582 + ], + [ + "▁Hampshire", + -12.372857093811035 + ], + [ + "▁yeah", + -12.372878074645996 + ], + [ + "▁Argentina", + -12.372928619384766 + ], + [ + "▁abnormal", + -12.373022079467773 + ], + [ + "▁Heights", + -12.373082160949707 + ], + [ + "▁Mitchell", + -12.373099327087402 + ], + [ + "▁Quad", + -12.373350143432617 + ], + [ + "▁textures", + -12.373382568359375 + ], + [ + "▁coalition", + -12.373384475708008 + ], + [ + "▁dataset", + -12.37338924407959 + ], + [ + "World", + -12.373438835144043 + ], + [ + "ständ", + -12.373456001281738 + ], + [ + "▁groove", + -12.373476028442383 + ], + [ + "▁emotionally", + -12.373562812805176 + ], + [ + "▁preciz", + -12.373636245727539 + ], + [ + "kte", + -12.373741149902344 + ], + [ + "berechtigt", + -12.373828887939453 + ], + [ + "▁1971", + -12.373888969421387 + ], + [ + "grandes", + -12.373907089233398 + ], + [ + "▁Broadway", + -12.37391185760498 + ], + [ + "▁comunicat", + -12.373994827270508 + ], + [ + "nui", + -12.37402629852295 + ], + [ + "GER", + -12.374079704284668 + ], + [ + "pick", + -12.374125480651855 + ], + [ + "inscrit", + -12.37414264678955 + ], + [ + "▁Gross", + -12.374258995056152 + ], + [ + "▁McDonald", + -12.374310493469238 + ], + [ + "▁Zero", + -12.374330520629883 + ], + [ + "▁Halb", + -12.374341011047363 + ], + [ + "▁caractère", + -12.374553680419922 + ], + [ + "▁doctrine", + -12.374553680419922 + ], + [ + "▁Sinne", + -12.37458610534668 + ], + [ + "MLS", + -12.374594688415527 + ], + [ + "▁réel", + -12.374759674072266 + ], + [ + "▁Ful", + -12.37476921081543 + ], + [ + "limiting", + -12.37483024597168 + ], + [ + "▁Gan", + -12.374870300292969 + ], + [ + "▁exclude", + -12.37490463256836 + ], + [ + "imba", + -12.374974250793457 + ], + [ + "rolul", + -12.374991416931152 + ], + [ + "▁veggies", + -12.375059127807617 + ], + [ + "▁fasci", + -12.375092506408691 + ], + [ + "▁oval", + -12.375173568725586 + ], + [ + "▁contacter", + -12.375221252441406 + ], + [ + "▁linking", + -12.375279426574707 + ], + [ + "▁knit", + -12.375308990478516 + ], + [ + "▁enroll", + -12.375504493713379 + ], + [ + "▁dédié", + -12.375533103942871 + ], + [ + "▁renting", + -12.375541687011719 + ], + [ + "▁genera", + -12.37567138671875 + ], + [ + "citing", + -12.375691413879395 + ], + [ + "▁bend", + -12.375700950622559 + ], + [ + "guin", + -12.375752449035645 + ], + [ + "▁caregiver", + -12.375768661499023 + ], + [ + "▁könnt", + -12.375791549682617 + ], + [ + "▁Scripture", + -12.375795364379883 + ], + [ + "▁Mic", + -12.375899314880371 + ], + [ + "▁Denmark", + -12.37590217590332 + ], + [ + "▁qualifying", + -12.375917434692383 + ], + [ + "▁costumes", + -12.375958442687988 + ], + [ + "▁dwelling", + -12.37601375579834 + ], + [ + "▁recrut", + -12.376099586486816 + ], + [ + "▁bedding", + -12.37618637084961 + ], + [ + "gesprochen", + -12.376253128051758 + ], + [ + "▁editors", + -12.376386642456055 + ], + [ + "/12", + -12.37657642364502 + ], + [ + "▁cumparat", + -12.376583099365234 + ], + [ + "fiction", + -12.376730918884277 + ], + [ + "▁spinal", + -12.376740455627441 + ], + [ + "▁pathway", + -12.376799583435059 + ], + [ + "▁vârst", + -12.37683391571045 + ], + [ + "mba", + -12.376874923706055 + ], + [ + "▁enthusiastic", + -12.37692642211914 + ], + [ + "▁Watt", + -12.37697982788086 + ], + [ + "symptom", + -12.376992225646973 + ], + [ + "▁pup", + -12.37712287902832 + ], + [ + "▁glorious", + -12.377225875854492 + ], + [ + "▁fața", + -12.377228736877441 + ], + [ + "▁prohibited", + -12.377256393432617 + ], + [ + "vergleich", + -12.377286911010742 + ], + [ + "▁suspected", + -12.377334594726562 + ], + [ + "▁Railway", + -12.377381324768066 + ], + [ + "▁Aujourd", + -12.377469062805176 + ], + [ + "▁Patients", + -12.377476692199707 + ], + [ + "▁séance", + -12.377501487731934 + ], + [ + "▁contraire", + -12.377503395080566 + ], + [ + "▁cuvânt", + -12.37771224975586 + ], + [ + "▁trotzdem", + -12.37773609161377 + ], + [ + "émission", + -12.377795219421387 + ], + [ + "▁bore", + -12.37782096862793 + ], + [ + "▁safeguard", + -12.377851486206055 + ], + [ + "▁galleries", + -12.37820053100586 + ], + [ + "cron", + -12.378268241882324 + ], + [ + "▁Rica", + -12.378335952758789 + ], + [ + "fläche", + -12.37839126586914 + ], + [ + "▁Slow", + -12.37842082977295 + ], + [ + "▁vara", + -12.378549575805664 + ], + [ + "▁Swan", + -12.378564834594727 + ], + [ + "▁compounds", + -12.378564834594727 + ], + [ + "▁Slo", + -12.378621101379395 + ], + [ + "▁accommodations", + -12.378621101379395 + ], + [ + "▁Putin", + -12.378708839416504 + ], + [ + "▁undertaken", + -12.378767967224121 + ], + [ + "▁prépar", + -12.37879467010498 + ], + [ + "▁gandi", + -12.37881088256836 + ], + [ + "sediul", + -12.378924369812012 + ], + [ + "▁Nathan", + -12.379143714904785 + ], + [ + "▁fountain", + -12.379173278808594 + ], + [ + "▁mère", + -12.379194259643555 + ], + [ + "fatty", + -12.379201889038086 + ], + [ + "▁concentrated", + -12.379241943359375 + ], + [ + "richtung", + -12.379300117492676 + ], + [ + "▁appropriately", + -12.37955379486084 + ], + [ + "107", + -12.379631996154785 + ], + [ + "▁shark", + -12.379735946655273 + ], + [ + "▁Topic", + -12.379867553710938 + ], + [ + "▁Ausstellung", + -12.379880905151367 + ], + [ + "▁SUA", + -12.380267143249512 + ], + [ + "SER", + -12.380359649658203 + ], + [ + "▁Nicole", + -12.38039779663086 + ], + [ + "▁utilisateurs", + -12.380620956420898 + ], + [ + "▁Brazilian", + -12.380753517150879 + ], + [ + "▁continut", + -12.380865097045898 + ], + [ + "▁sanatate", + -12.380881309509277 + ], + [ + "faudra", + -12.380882263183594 + ], + [ + "nahm", + -12.380938529968262 + ], + [ + "▁Specific", + -12.381153106689453 + ], + [ + "aiba", + -12.381199836730957 + ], + [ + "cepând", + -12.381296157836914 + ], + [ + "▁Beer", + -12.381366729736328 + ], + [ + "roni", + -12.381616592407227 + ], + [ + "kay", + -12.381636619567871 + ], + [ + "▁gravity", + -12.381844520568848 + ], + [ + "▁verfügt", + -12.381856918334961 + ], + [ + "7:30", + -12.381878852844238 + ], + [ + "▁Players", + -12.381945610046387 + ], + [ + "▁Industries", + -12.38198184967041 + ], + [ + "punkte", + -12.382119178771973 + ], + [ + "▁yacht", + -12.382135391235352 + ], + [ + "-04", + -12.382149696350098 + ], + [ + "onné", + -12.382192611694336 + ], + [ + "▁Cards", + -12.382221221923828 + ], + [ + "▁fete", + -12.382420539855957 + ], + [ + "breaking", + -12.38257884979248 + ], + [ + "baum", + -12.382621765136719 + ], + [ + "nada", + -12.382651329040527 + ], + [ + "▁geplant", + -12.382750511169434 + ], + [ + "genuinely", + -12.382766723632812 + ], + [ + "talk", + -12.382871627807617 + ], + [ + "▁disadvantage", + -12.382920265197754 + ], + [ + "▁shutter", + -12.383003234863281 + ], + [ + "virus", + -12.38302230834961 + ], + [ + "▁cricket", + -12.38308048248291 + ], + [ + "▁comenzi", + -12.383102416992188 + ], + [ + "hier", + -12.383170127868652 + ], + [ + "▁aufzu", + -12.383198738098145 + ], + [ + "▁Rez", + -12.38321304321289 + ], + [ + "▁conclusions", + -12.383329391479492 + ], + [ + "▁Wang", + -12.383509635925293 + ], + [ + "Darüber", + -12.383524894714355 + ], + [ + "▁CSS", + -12.383573532104492 + ], + [ + "CW", + -12.383780479431152 + ], + [ + "▁Chr", + -12.383790969848633 + ], + [ + "▁traded", + -12.383843421936035 + ], + [ + "▁Schon", + -12.384265899658203 + ], + [ + "mped", + -12.38429069519043 + ], + [ + "▁alloy", + -12.384385108947754 + ], + [ + "AVE", + -12.38451099395752 + ], + [ + "▁imagery", + -12.384542465209961 + ], + [ + "▁resurse", + -12.38479995727539 + ], + [ + "▁Thunder", + -12.384834289550781 + ], + [ + "▁schimbare", + -12.384860038757324 + ], + [ + "▁Youtube", + -12.38499927520752 + ], + [ + "▁Monster", + -12.385189056396484 + ], + [ + "phil", + -12.385234832763672 + ], + [ + "▁bébé", + -12.385284423828125 + ], + [ + "Creating", + -12.385428428649902 + ], + [ + "ănă", + -12.385466575622559 + ], + [ + "▁Staat", + -12.385504722595215 + ], + [ + "adică", + -12.385531425476074 + ], + [ + "▁boyfriend", + -12.385552406311035 + ], + [ + "▁Winner", + -12.385594367980957 + ], + [ + "▁disputes", + -12.385653495788574 + ], + [ + "▁lush", + -12.3856840133667 + ], + [ + "▁CMS", + -12.385719299316406 + ], + [ + "▁locaux", + -12.385725021362305 + ], + [ + "▁Verfahren", + -12.38576889038086 + ], + [ + "▁Café", + -12.385786056518555 + ], + [ + "▁Vorstand", + -12.385870933532715 + ], + [ + "▁lucrat", + -12.385960578918457 + ], + [ + "▁Root", + -12.38602352142334 + ], + [ + "▁decis", + -12.386059761047363 + ], + [ + "▁Shadow", + -12.386062622070312 + ], + [ + "▁countryside", + -12.386067390441895 + ], + [ + "▁analiza", + -12.386114120483398 + ], + [ + "obos", + -12.38616943359375 + ], + [ + "opera", + -12.386175155639648 + ], + [ + "actu", + -12.386207580566406 + ], + [ + "▁Songs", + -12.3864164352417 + ], + [ + "reifen", + -12.38648509979248 + ], + [ + "▁hilft", + -12.386650085449219 + ], + [ + "region", + -12.386727333068848 + ], + [ + "▁categoria", + -12.387001991271973 + ], + [ + "capturing", + -12.38701343536377 + ], + [ + "▁1967", + -12.387025833129883 + ], + [ + "▁optimized", + -12.387032508850098 + ], + [ + "▁Dim", + -12.387353897094727 + ], + [ + "▁adapté", + -12.387447357177734 + ], + [ + "zeichnet", + -12.387524604797363 + ], + [ + "▁strada", + -12.387625694274902 + ], + [ + "fulness", + -12.38774585723877 + ], + [ + "▁technically", + -12.38774585723877 + ], + [ + "▁marker", + -12.387757301330566 + ], + [ + "▁vizita", + -12.387808799743652 + ], + [ + "▁imperative", + -12.387986183166504 + ], + [ + "▁pensé", + -12.38802719116211 + ], + [ + "▁drilling", + -12.388030052185059 + ], + [ + "ISA", + -12.38818073272705 + ], + [ + "▁Massage", + -12.388201713562012 + ], + [ + "▁Terry", + -12.388238906860352 + ], + [ + "▁pourtant", + -12.38835334777832 + ], + [ + "▁declaration", + -12.388440132141113 + ], + [ + "▁instructors", + -12.388453483581543 + ], + [ + "Eventually", + -12.38847827911377 + ], + [ + "▁banned", + -12.38847827911377 + ], + [ + "MAT", + -12.388520240783691 + ], + [ + "▁medici", + -12.38856315612793 + ], + [ + "▁Warm", + -12.388615608215332 + ], + [ + "▁trec", + -12.388731002807617 + ], + [ + "▁ecran", + -12.388763427734375 + ], + [ + "▁goat", + -12.388838768005371 + ], + [ + "▁manipulation", + -12.388850212097168 + ], + [ + "▁mayor", + -12.388898849487305 + ], + [ + "▁unterwegs", + -12.388975143432617 + ], + [ + "▁journals", + -12.3890380859375 + ], + [ + "▁hedge", + -12.389239311218262 + ], + [ + "Merc", + -12.389300346374512 + ], + [ + "▁joueurs", + -12.389411926269531 + ], + [ + "▁Religion", + -12.3894624710083 + ], + [ + "▁Mountains", + -12.389477729797363 + ], + [ + "▁renewed", + -12.389497756958008 + ], + [ + "▁Limit", + -12.389543533325195 + ], + [ + "ikea", + -12.389771461486816 + ], + [ + "▁utiliza", + -12.38977336883545 + ], + [ + "sogenannte", + -12.389808654785156 + ], + [ + "0.2", + -12.389836311340332 + ], + [ + "▁Organ", + -12.38987922668457 + ], + [ + "▁Shakespeare", + -12.389952659606934 + ], + [ + "▁Maintenance", + -12.38995361328125 + ], + [ + "▁Wärme", + -12.389954566955566 + ], + [ + "▁Northwest", + -12.390060424804688 + ], + [ + "▁numit", + -12.390106201171875 + ], + [ + "▁mica", + -12.390165328979492 + ], + [ + "turm", + -12.390168190002441 + ], + [ + "▁motivate", + -12.390250205993652 + ], + [ + "▁Staats", + -12.390355110168457 + ], + [ + "optimum", + -12.390487670898438 + ], + [ + "▁sortir", + -12.390546798706055 + ], + [ + "▁Asset", + -12.390555381774902 + ], + [ + "▁hervorragend", + -12.390692710876465 + ], + [ + "▁commentary", + -12.39071273803711 + ], + [ + "▁actuellement", + -12.390732765197754 + ], + [ + "NER", + -12.390765190124512 + ], + [ + "NL", + -12.390789985656738 + ], + [ + "ritt", + -12.390803337097168 + ], + [ + "▁Wirtschafts", + -12.390813827514648 + ], + [ + "träger", + -12.390840530395508 + ], + [ + "▁Versand", + -12.390870094299316 + ], + [ + "▁nostri", + -12.390953063964844 + ], + [ + "▁enorm", + -12.391227722167969 + ], + [ + "▁whale", + -12.391260147094727 + ], + [ + "▁Aufgabe", + -12.391277313232422 + ], + [ + "▁unfair", + -12.391291618347168 + ], + [ + "▁Cord", + -12.391315460205078 + ], + [ + "incorporating", + -12.39134693145752 + ], + [ + "luck", + -12.39157772064209 + ], + [ + "Afrique", + -12.39168643951416 + ], + [ + "▁coated", + -12.391857147216797 + ], + [ + "▁india", + -12.391908645629883 + ], + [ + "▁temporarily", + -12.39193058013916 + ], + [ + "▁ciuda", + -12.392097473144531 + ], + [ + "▁coral", + -12.392184257507324 + ], + [ + "▁wirkt", + -12.392203330993652 + ], + [ + "▁folding", + -12.392309188842773 + ], + [ + "wichtigsten", + -12.392398834228516 + ], + [ + "impacted", + -12.392422676086426 + ], + [ + "▁wählen", + -12.392423629760742 + ], + [ + "▁differentiate", + -12.392492294311523 + ], + [ + "▁froid", + -12.392544746398926 + ], + [ + "▁hug", + -12.39255142211914 + ], + [ + "▁construi", + -12.39255428314209 + ], + [ + "▁membru", + -12.392603874206543 + ], + [ + "▁masculin", + -12.392667770385742 + ], + [ + "partisan", + -12.392711639404297 + ], + [ + "▁schimba", + -12.392725944519043 + ], + [ + "▁economies", + -12.392827987670898 + ], + [ + "▁Abraham", + -12.392914772033691 + ], + [ + "wesen", + -12.393013954162598 + ], + [ + "enia", + -12.393026351928711 + ], + [ + "▁answering", + -12.393080711364746 + ], + [ + "▁activități", + -12.39309024810791 + ], + [ + "▁mémoire", + -12.393160820007324 + ], + [ + "▁versucht", + -12.393305778503418 + ], + [ + "ember", + -12.39333438873291 + ], + [ + "▁instala", + -12.39334774017334 + ], + [ + "▁eligibility", + -12.393407821655273 + ], + [ + "▁enjoyment", + -12.393409729003906 + ], + [ + "▁Arme", + -12.39350414276123 + ], + [ + "although", + -12.393534660339355 + ], + [ + "▁encompass", + -12.393596649169922 + ], + [ + "▁zufrieden", + -12.393658638000488 + ], + [ + "Script", + -12.393691062927246 + ], + [ + "KG", + -12.39385986328125 + ], + [ + "▁adhesive", + -12.393902778625488 + ], + [ + "▁Verkehrs", + -12.393908500671387 + ], + [ + "▁monitored", + -12.394103050231934 + ], + [ + "▁Conservation", + -12.394148826599121 + ], + [ + "hav", + -12.394156455993652 + ], + [ + "▁Above", + -12.394174575805664 + ], + [ + "▁Former", + -12.394241333007812 + ], + [ + "▁Certain", + -12.394250869750977 + ], + [ + "saving", + -12.394311904907227 + ], + [ + "▁Pun", + -12.394390106201172 + ], + [ + "▁awkward", + -12.394397735595703 + ], + [ + "▁Pretty", + -12.394410133361816 + ], + [ + "▁scanning", + -12.394417762756348 + ], + [ + "layer", + -12.394527435302734 + ], + [ + "motor", + -12.39453125 + ], + [ + "▁beginnt", + -12.39455795288086 + ], + [ + "▁affiliated", + -12.394681930541992 + ], + [ + "▁archives", + -12.394686698913574 + ], + [ + "▁sunshine", + -12.394892692565918 + ], + [ + "kha", + -12.394988059997559 + ], + [ + "▁investigated", + -12.395149230957031 + ], + [ + "▁fantas", + -12.395277976989746 + ], + [ + "▁united", + -12.395355224609375 + ], + [ + "allegedly", + -12.395373344421387 + ], + [ + "▁Eugen", + -12.3955078125 + ], + [ + "▁proprie", + -12.395843505859375 + ], + [ + "uca", + -12.396183013916016 + ], + [ + "DES", + -12.396187782287598 + ], + [ + "ştii", + -12.396190643310547 + ], + [ + "▁Running", + -12.39620590209961 + ], + [ + "lbstverständlich", + -12.396248817443848 + ], + [ + "index", + -12.396300315856934 + ], + [ + "▁studiu", + -12.396512031555176 + ], + [ + "URE", + -12.396553039550781 + ], + [ + "gültig", + -12.396627426147461 + ], + [ + "▁lundi", + -12.396649360656738 + ], + [ + "▁Zucker", + -12.396650314331055 + ], + [ + "▁positively", + -12.396721839904785 + ], + [ + "folgenden", + -12.396758079528809 + ], + [ + "anță", + -12.396800994873047 + ], + [ + "▁clan", + -12.396866798400879 + ], + [ + "▁literacy", + -12.396879196166992 + ], + [ + "▁ober", + -12.39699935913086 + ], + [ + "John", + -12.397003173828125 + ], + [ + "greg", + -12.39700984954834 + ], + [ + "▁titlu", + -12.397049903869629 + ], + [ + "▁ţări", + -12.39707088470459 + ], + [ + "Bra", + -12.397100448608398 + ], + [ + "▁Evans", + -12.397164344787598 + ], + [ + "modern", + -12.397172927856445 + ], + [ + "▁hauteur", + -12.397353172302246 + ], + [ + "refers", + -12.397416114807129 + ], + [ + "▁plasma", + -12.397575378417969 + ], + [ + "▁optic", + -12.397595405578613 + ], + [ + "▁shampoo", + -12.397619247436523 + ], + [ + "▁cheek", + -12.397727966308594 + ], + [ + "opted", + -12.397741317749023 + ], + [ + "▁persönlich", + -12.397832870483398 + ], + [ + "▁1945", + -12.398118019104004 + ], + [ + "ICI", + -12.398193359375 + ], + [ + "biotic", + -12.398222923278809 + ], + [ + "▁Beruf", + -12.398372650146484 + ], + [ + "▁trez", + -12.398383140563965 + ], + [ + "▁diploma", + -12.398388862609863 + ], + [ + "nahmen", + -12.398421287536621 + ], + [ + "▁curl", + -12.398625373840332 + ], + [ + "▁agricole", + -12.398824691772461 + ], + [ + "▁recomand", + -12.398844718933105 + ], + [ + "▁pediatric", + -12.398862838745117 + ], + [ + "Fiecare", + -12.39887523651123 + ], + [ + "Anlage", + -12.398906707763672 + ], + [ + "weiß", + -12.398974418640137 + ], + [ + "elecommunication", + -12.39898681640625 + ], + [ + "hog", + -12.399184226989746 + ], + [ + "▁Stamp", + -12.399364471435547 + ], + [ + "▁Tipp", + -12.399369239807129 + ], + [ + "▁kindness", + -12.399415969848633 + ], + [ + "▁Marina", + -12.399577140808105 + ], + [ + "▁Gleich", + -12.39963436126709 + ], + [ + "▁grij", + -12.39970588684082 + ], + [ + "▁desperate", + -12.39974594116211 + ], + [ + "▁recordings", + -12.399842262268066 + ], + [ + "▁neglect", + -12.399861335754395 + ], + [ + "▁inherent", + -12.400035858154297 + ], + [ + "▁Rezept", + -12.400138854980469 + ], + [ + "▁soins", + -12.400164604187012 + ], + [ + "▁brut", + -12.400250434875488 + ], + [ + "▁revolutionary", + -12.400495529174805 + ], + [ + "▁liberté", + -12.400530815124512 + ], + [ + "cours", + -12.400945663452148 + ], + [ + "▁Similar", + -12.401247024536133 + ], + [ + "▁cheveux", + -12.40136432647705 + ], + [ + "▁ieftin", + -12.401599884033203 + ], + [ + "▁promovare", + -12.40160846710205 + ], + [ + "▁grains", + -12.401729583740234 + ], + [ + "ти", + -12.401749610900879 + ], + [ + "▁fonctionnement", + -12.401789665222168 + ], + [ + "▁Coming", + -12.401832580566406 + ], + [ + "▁analytical", + -12.401847839355469 + ], + [ + "▁simplify", + -12.401856422424316 + ], + [ + "▁chambres", + -12.401893615722656 + ], + [ + "▁fifty", + -12.401930809020996 + ], + [ + "jour", + -12.402070999145508 + ], + [ + "▁(17", + -12.402194023132324 + ], + [ + "cărui", + -12.402292251586914 + ], + [ + "▁harmony", + -12.402352333068848 + ], + [ + "grin", + -12.402355194091797 + ], + [ + "▁drunk", + -12.402359962463379 + ], + [ + "260", + -12.402374267578125 + ], + [ + "3-5", + -12.40243148803711 + ], + [ + "▁articole", + -12.402442932128906 + ], + [ + "▁flooding", + -12.402482986450195 + ], + [ + "halle", + -12.402580261230469 + ], + [ + "▁defects", + -12.40276050567627 + ], + [ + "▁rifle", + -12.402839660644531 + ], + [ + "▁Boc", + -12.402843475341797 + ], + [ + "▁Athletic", + -12.40284538269043 + ], + [ + "▁acordat", + -12.40292739868164 + ], + [ + "AIR", + -12.402969360351562 + ], + [ + "▁entwickeln", + -12.403104782104492 + ], + [ + "▁Advance", + -12.403188705444336 + ], + [ + "▁Heil", + -12.403216361999512 + ], + [ + "Stainless", + -12.403345108032227 + ], + [ + "▁Psychology", + -12.40337085723877 + ], + [ + "▁omul", + -12.403435707092285 + ], + [ + "▁Arbeiten", + -12.403494834899902 + ], + [ + "▁rabbit", + -12.403495788574219 + ], + [ + "▁méta", + -12.40351390838623 + ], + [ + "ismul", + -12.403534889221191 + ], + [ + "▁Herausforderung", + -12.403594970703125 + ], + [ + "▁Euch", + -12.403654098510742 + ], + [ + "geschichte", + -12.40390682220459 + ], + [ + "▁Milk", + -12.404057502746582 + ], + [ + "▁pregăt", + -12.404065132141113 + ], + [ + "▁Standort", + -12.404141426086426 + ], + [ + "Val", + -12.404180526733398 + ], + [ + "▁Ronald", + -12.404350280761719 + ], + [ + "▁Werbe", + -12.404558181762695 + ], + [ + "▁restrict", + -12.404658317565918 + ], + [ + "▁tablespoon", + -12.404844284057617 + ], + [ + "▁Amendment", + -12.404845237731934 + ], + [ + "▁Johnny", + -12.404914855957031 + ], + [ + "▁lively", + -12.404938697814941 + ], + [ + "ORD", + -12.405147552490234 + ], + [ + "▁mulţi", + -12.40523624420166 + ], + [ + "èrent", + -12.405241012573242 + ], + [ + "Every", + -12.405277252197266 + ], + [ + "eignet", + -12.405296325683594 + ], + [ + "GD", + -12.40546989440918 + ], + [ + "▁Ghana", + -12.405628204345703 + ], + [ + "▁wealthy", + -12.40576171875 + ], + [ + "▁advocates", + -12.405818939208984 + ], + [ + "▁Campaign", + -12.40584659576416 + ], + [ + "▁posters", + -12.405964851379395 + ], + [ + "flug", + -12.406011581420898 + ], + [ + "▁métier", + -12.406139373779297 + ], + [ + "kir", + -12.406148910522461 + ], + [ + "bond", + -12.406176567077637 + ], + [ + "datorita", + -12.406188011169434 + ], + [ + "▁Hochzeit", + -12.406230926513672 + ], + [ + "▁effectué", + -12.406271934509277 + ], + [ + "▁angles", + -12.40654182434082 + ], + [ + "▁Electrical", + -12.406705856323242 + ], + [ + "▁Administrator", + -12.40674114227295 + ], + [ + "▁spur", + -12.407389640808105 + ], + [ + "▁größere", + -12.407444953918457 + ], + [ + "woke", + -12.407515525817871 + ], + [ + "▁gewinnen", + -12.407689094543457 + ], + [ + "▁ajută", + -12.407712936401367 + ], + [ + "▁ventilation", + -12.407853126525879 + ], + [ + "▁viaţa", + -12.407853126525879 + ], + [ + "▁Dinner", + -12.408079147338867 + ], + [ + "respond", + -12.408095359802246 + ], + [ + "▁OEM", + -12.408120155334473 + ], + [ + "▁affair", + -12.4081392288208 + ], + [ + "▁öffentlich", + -12.408143043518066 + ], + [ + "ENS", + -12.408209800720215 + ], + [ + "▁Cent", + -12.408224105834961 + ], + [ + "▁făc", + -12.408267974853516 + ], + [ + "▁Doppel", + -12.408285140991211 + ], + [ + "▁fericit", + -12.408363342285156 + ], + [ + "▁coordon", + -12.40845775604248 + ], + [ + "geht", + -12.408547401428223 + ], + [ + "▁perfekte", + -12.408610343933105 + ], + [ + "▁sportive", + -12.408700942993164 + ], + [ + "▁proiectul", + -12.40870189666748 + ], + [ + "▁deadly", + -12.408804893493652 + ], + [ + "Geschäft", + -12.408822059631348 + ], + [ + "▁inspirational", + -12.408854484558105 + ], + [ + "+1", + -12.409013748168945 + ], + [ + "▁pearl", + -12.409022331237793 + ], + [ + "▁scrub", + -12.409036636352539 + ], + [ + "▁scheint", + -12.409079551696777 + ], + [ + "poo", + -12.409147262573242 + ], + [ + "▁Pier", + -12.409220695495605 + ], + [ + "▁commented", + -12.409285545349121 + ], + [ + "lute", + -12.409302711486816 + ], + [ + "▁cancelled", + -12.409488677978516 + ], + [ + "Win", + -12.409605979919434 + ], + [ + "▁payroll", + -12.409781455993652 + ], + [ + "▁varsta", + -12.409881591796875 + ], + [ + "stuffed", + -12.410097122192383 + ], + [ + "▁beads", + -12.410138130187988 + ], + [ + "▁poems", + -12.410356521606445 + ], + [ + "pokesman", + -12.410399436950684 + ], + [ + "▁checklist", + -12.410523414611816 + ], + [ + "▁Mich", + -12.410636901855469 + ], + [ + "GEN", + -12.410676002502441 + ], + [ + "▁Lau", + -12.410783767700195 + ], + [ + "▁stie", + -12.410965919494629 + ], + [ + "▁Lovely", + -12.4110107421875 + ], + [ + "▁Anschluss", + -12.411062240600586 + ], + [ + "▁personaj", + -12.41108226776123 + ], + [ + "▁ausgestattet", + -12.411121368408203 + ], + [ + "▁beginners", + -12.411163330078125 + ], + [ + "▁noon", + -12.411189079284668 + ], + [ + "▁celule", + -12.41128921508789 + ], + [ + "Trans", + -12.411324501037598 + ], + [ + "boot", + -12.411331176757812 + ], + [ + "▁drumul", + -12.41136646270752 + ], + [ + "gruppen", + -12.41140079498291 + ], + [ + "étend", + -12.41140365600586 + ], + [ + "▁risques", + -12.411405563354492 + ], + [ + "acclaimed", + -12.411447525024414 + ], + [ + "▁celelalte", + -12.411617279052734 + ], + [ + "▁condiţii", + -12.411620140075684 + ], + [ + "▁skiing", + -12.411685943603516 + ], + [ + "▁optimale", + -12.411689758300781 + ], + [ + "technology", + -12.411773681640625 + ], + [ + "▁renew", + -12.411784172058105 + ], + [ + "Cloud", + -12.41179084777832 + ], + [ + "▁damaging", + -12.411905288696289 + ], + [ + "GT", + -12.412219047546387 + ], + [ + "▁Reform", + -12.41230583190918 + ], + [ + "vedem", + -12.412349700927734 + ], + [ + "▁indicat", + -12.412461280822754 + ], + [ + "▁Maker", + -12.412467002868652 + ], + [ + "▁lichid", + -12.412582397460938 + ], + [ + "3.1", + -12.412614822387695 + ], + [ + "păt", + -12.412620544433594 + ], + [ + "lumina", + -12.41264820098877 + ], + [ + "▁Situ", + -12.412806510925293 + ], + [ + "▁Archives", + -12.412857055664062 + ], + [ + "▁allergies", + -12.41287899017334 + ], + [ + "▁Cameron", + -12.412883758544922 + ], + [ + "▁Immun", + -12.412899017333984 + ], + [ + "wissenschaftlich", + -12.41301441192627 + ], + [ + "▁supplémentaire", + -12.413128852844238 + ], + [ + "▁puterea", + -12.413261413574219 + ], + [ + "Lab", + -12.413331985473633 + ], + [ + "inspired", + -12.413384437561035 + ], + [ + "▁shrink", + -12.413403511047363 + ], + [ + "▁voit", + -12.413426399230957 + ], + [ + "▁chopped", + -12.413467407226562 + ], + [ + "▁Franz", + -12.413537979125977 + ], + [ + "oku", + -12.413652420043945 + ], + [ + "▁suppress", + -12.413673400878906 + ], + [ + "▁impress", + -12.413751602172852 + ], + [ + "▁Liga", + -12.413755416870117 + ], + [ + "▁Eight", + -12.41378402709961 + ], + [ + "720", + -12.413795471191406 + ], + [ + "▁securely", + -12.413870811462402 + ], + [ + "KU", + -12.413934707641602 + ], + [ + "modell", + -12.413992881774902 + ], + [ + "Ensure", + -12.414154052734375 + ], + [ + "größte", + -12.414204597473145 + ], + [ + "▁réuni", + -12.414215087890625 + ], + [ + "▁Internal", + -12.41423225402832 + ], + [ + "▁Punkte", + -12.414320945739746 + ], + [ + "▁replicate", + -12.414412498474121 + ], + [ + "▁spreadsheet", + -12.414434432983398 + ], + [ + "▁Hindu", + -12.414549827575684 + ], + [ + "▁Cham", + -12.414578437805176 + ], + [ + "nati", + -12.414670944213867 + ], + [ + "imply", + -12.414679527282715 + ], + [ + "funded", + -12.414894104003906 + ], + [ + "▁charitable", + -12.414896011352539 + ], + [ + "▁imagined", + -12.415014266967773 + ], + [ + "hausen", + -12.41517448425293 + ], + [ + "Keeping", + -12.415239334106445 + ], + [ + "▁attitudes", + -12.415287971496582 + ], + [ + "esque", + -12.415365219116211 + ], + [ + "▁Tennis", + -12.415409088134766 + ], + [ + "Jeremy", + -12.415410041809082 + ], + [ + "▁majeur", + -12.415475845336914 + ], + [ + "▁stii", + -12.4155912399292 + ], + [ + "▁herbal", + -12.415790557861328 + ], + [ + "▁cauta", + -12.41580867767334 + ], + [ + "▁voluntary", + -12.415828704833984 + ], + [ + "wohl", + -12.415877342224121 + ], + [ + "▁ideea", + -12.41588306427002 + ], + [ + "▁WW", + -12.415899276733398 + ], + [ + "▁erneut", + -12.416010856628418 + ], + [ + "größten", + -12.416094779968262 + ], + [ + "Grâce", + -12.416159629821777 + ], + [ + "▁Köln", + -12.416193008422852 + ], + [ + "▁mobilier", + -12.416199684143066 + ], + [ + "▁fool", + -12.416254043579102 + ], + [ + "▁Calcul", + -12.416295051574707 + ], + [ + "attaque", + -12.41637897491455 + ], + [ + "▁digestive", + -12.41656494140625 + ], + [ + "performance", + -12.416647911071777 + ], + [ + "▁homeowner", + -12.41675853729248 + ], + [ + "▁hunger", + -12.4169282913208 + ], + [ + "2.3", + -12.41696834564209 + ], + [ + "▁Sort", + -12.417085647583008 + ], + [ + "▁Dennis", + -12.41723918914795 + ], + [ + "▁certificat", + -12.417250633239746 + ], + [ + "▁Canal", + -12.417337417602539 + ], + [ + "▁Yesterday", + -12.417424201965332 + ], + [ + "▁sausage", + -12.417499542236328 + ], + [ + "▁perdu", + -12.417736053466797 + ], + [ + "ösen", + -12.417741775512695 + ], + [ + "▁preserved", + -12.417750358581543 + ], + [ + "▁trendy", + -12.4177885055542 + ], + [ + "▁iubire", + -12.417935371398926 + ], + [ + "▁grandfather", + -12.417961120605469 + ], + [ + "▁shoppers", + -12.41820240020752 + ], + [ + "▁verschieden", + -12.418252944946289 + ], + [ + "▁gagner", + -12.41826343536377 + ], + [ + "▁lucra", + -12.418437004089355 + ], + [ + "metru", + -12.418464660644531 + ], + [ + "buz", + -12.418469429016113 + ], + [ + "▁flourish", + -12.418484687805176 + ], + [ + "affin", + -12.418523788452148 + ], + [ + "▁Pflanzen", + -12.41858196258545 + ], + [ + "agh", + -12.418588638305664 + ], + [ + "▁Gill", + -12.418660163879395 + ], + [ + "▁Kä", + -12.418671607971191 + ], + [ + "▁Wege", + -12.41876220703125 + ], + [ + "▁Liberal", + -12.418929100036621 + ], + [ + "▁Glasgow", + -12.418944358825684 + ], + [ + "Objekt", + -12.4189453125 + ], + [ + "▁Huawei", + -12.4189453125 + ], + [ + "appropri", + -12.418986320495605 + ], + [ + "▁genius", + -12.419037818908691 + ], + [ + "▁brokers", + -12.419068336486816 + ], + [ + "▁themed", + -12.41918659210205 + ], + [ + "▁barre", + -12.419210433959961 + ], + [ + "1.7", + -12.419219017028809 + ], + [ + "▁Electro", + -12.419303894042969 + ], + [ + "▁umbrella", + -12.419333457946777 + ], + [ + "▁advisory", + -12.419417381286621 + ], + [ + "▁comport", + -12.419421195983887 + ], + [ + "▁neuer", + -12.419452667236328 + ], + [ + "▁Wick", + -12.419568061828613 + ], + [ + "wak", + -12.419618606567383 + ], + [ + "▁Woman", + -12.419695854187012 + ], + [ + "▁lesser", + -12.419843673706055 + ], + [ + "▁replied", + -12.419987678527832 + ], + [ + "▁représente", + -12.420050621032715 + ], + [ + "▁thé", + -12.420135498046875 + ], + [ + "Deutsch", + -12.420428276062012 + ], + [ + "Cat", + -12.420483589172363 + ], + [ + "▁équipes", + -12.420534133911133 + ], + [ + "▁spider", + -12.420578956604004 + ], + [ + "▁Gaming", + -12.420589447021484 + ], + [ + "▁Liste", + -12.420592308044434 + ], + [ + "▁affection", + -12.420639038085938 + ], + [ + "lipsa", + -12.420982360839844 + ], + [ + "▁Spider", + -12.420987129211426 + ], + [ + "▁Julia", + -12.421034812927246 + ], + [ + "anlagen", + -12.421159744262695 + ], + [ + "Kon", + -12.421363830566406 + ], + [ + "nței", + -12.421368598937988 + ], + [ + "▁Verwaltung", + -12.421483993530273 + ], + [ + "▁raspuns", + -12.421489715576172 + ], + [ + "samt", + -12.421491622924805 + ], + [ + "▁creștere", + -12.421512603759766 + ], + [ + "▁decorate", + -12.421701431274414 + ], + [ + "▁Chain", + -12.422021865844727 + ], + [ + "ów", + -12.422050476074219 + ], + [ + "0-0", + -12.422104835510254 + ], + [ + "▁Cran", + -12.422407150268555 + ], + [ + "▁streak", + -12.42242431640625 + ], + [ + "ор", + -12.422517776489258 + ], + [ + "▁căuta", + -12.422754287719727 + ], + [ + "wende", + -12.422801971435547 + ], + [ + "▁haine", + -12.42280387878418 + ], + [ + "▁landscaping", + -12.423009872436523 + ], + [ + "▁historian", + -12.423016548156738 + ], + [ + "▁grandchildren", + -12.423033714294434 + ], + [ + "▁crawl", + -12.423056602478027 + ], + [ + "▁Cub", + -12.423239707946777 + ], + [ + "▁nécessaires", + -12.423515319824219 + ], + [ + "▁swift", + -12.42352294921875 + ], + [ + "▁calculation", + -12.423656463623047 + ], + [ + "▁acteurs", + -12.423715591430664 + ], + [ + "VT", + -12.423752784729004 + ], + [ + "▁Hristos", + -12.423778533935547 + ], + [ + "▁slices", + -12.423850059509277 + ], + [ + "See", + -12.424203872680664 + ], + [ + "▁Bran", + -12.424233436584473 + ], + [ + "Symbol", + -12.424449920654297 + ], + [ + "▁allowance", + -12.424492835998535 + ], + [ + "▁Effective", + -12.424537658691406 + ], + [ + "▁Wünsche", + -12.424539566040039 + ], + [ + "▁shiny", + -12.424569129943848 + ], + [ + "▁professionalism", + -12.424715995788574 + ], + [ + "/6", + -12.424970626831055 + ], + [ + "▁terrasse", + -12.425087928771973 + ], + [ + "▁researcher", + -12.425156593322754 + ], + [ + "▁fragile", + -12.425203323364258 + ], + [ + "▁greeting", + -12.425274848937988 + ], + [ + "freien", + -12.4253511428833 + ], + [ + "▁valuation", + -12.425372123718262 + ], + [ + "▁incur", + -12.425386428833008 + ], + [ + "▁Zwischen", + -12.425559997558594 + ], + [ + "▁comfy", + -12.425569534301758 + ], + [ + "▁méthode", + -12.42569351196289 + ], + [ + "▁Pirate", + -12.425816535949707 + ], + [ + "▁Moto", + -12.425822257995605 + ], + [ + "(6)", + -12.425823211669922 + ], + [ + "▁devin", + -12.42582893371582 + ], + [ + "▁civic", + -12.425837516784668 + ], + [ + "usage", + -12.425889015197754 + ], + [ + "▁istorie", + -12.425945281982422 + ], + [ + "▁piste", + -12.425955772399902 + ], + [ + "▁Rug", + -12.426091194152832 + ], + [ + "pä", + -12.426129341125488 + ], + [ + "▁matur", + -12.426148414611816 + ], + [ + "CAS", + -12.426155090332031 + ], + [ + "TIC", + -12.42618465423584 + ], + [ + "▁Reduce", + -12.426234245300293 + ], + [ + "▁commemorat", + -12.426321983337402 + ], + [ + "▁cease", + -12.42653751373291 + ], + [ + "unterschiedliche", + -12.42656421661377 + ], + [ + "▁cinnamon", + -12.426581382751465 + ], + [ + "▁Font", + -12.426583290100098 + ], + [ + "▁justify", + -12.426751136779785 + ], + [ + "deteriorat", + -12.426797866821289 + ], + [ + "▁Schön", + -12.42684555053711 + ], + [ + "plain", + -12.426993370056152 + ], + [ + "frist", + -12.427002906799316 + ], + [ + "▁helmet", + -12.42712116241455 + ], + [ + "▁statute", + -12.42721939086914 + ], + [ + "accept", + -12.427236557006836 + ], + [ + "▁1,5", + -12.42724323272705 + ], + [ + "▁recon", + -12.42724323272705 + ], + [ + "▁Möbel", + -12.427348136901855 + ], + [ + "▁idées", + -12.427367210388184 + ], + [ + "automat", + -12.427552223205566 + ], + [ + "Team", + -12.42758846282959 + ], + [ + "▁performers", + -12.427688598632812 + ], + [ + "▁microphone", + -12.427722930908203 + ], + [ + "impotriva", + -12.427775382995605 + ], + [ + "▁pillows", + -12.42780876159668 + ], + [ + "▁accountable", + -12.427812576293945 + ], + [ + "▁strings", + -12.42782974243164 + ], + [ + "hydrate", + -12.427835464477539 + ], + [ + "▁Yan", + -12.427865028381348 + ], + [ + "starea", + -12.427918434143066 + ], + [ + "▁présenté", + -12.42793083190918 + ], + [ + "▁extensively", + -12.428048133850098 + ], + [ + "äst", + -12.428114891052246 + ], + [ + "▁correlation", + -12.428115844726562 + ], + [ + "bespoke", + -12.428119659423828 + ], + [ + "▁creste", + -12.428196907043457 + ], + [ + "▁Armenia", + -12.428248405456543 + ], + [ + "nose", + -12.428426742553711 + ], + [ + "▁strengthening", + -12.428604125976562 + ], + [ + "▁Horizon", + -12.428627014160156 + ], + [ + "▁obesity", + -12.428627967834473 + ], + [ + "seasoned", + -12.428686141967773 + ], + [ + "▁screenshot", + -12.428736686706543 + ], + [ + "girl", + -12.42875862121582 + ], + [ + "▁hardest", + -12.428826332092285 + ], + [ + "▁weakness", + -12.428855895996094 + ], + [ + "effectuer", + -12.429012298583984 + ], + [ + "▁Florence", + -12.429034233093262 + ], + [ + "▁Europene", + -12.429062843322754 + ], + [ + "triggered", + -12.429333686828613 + ], + [ + "Apparently", + -12.42939567565918 + ], + [ + "▁diagnose", + -12.42943286895752 + ], + [ + "rushed", + -12.429494857788086 + ], + [ + "▁trotz", + -12.429516792297363 + ], + [ + "▁spécial", + -12.429680824279785 + ], + [ + "▁lumi", + -12.429783821105957 + ], + [ + "7:00", + -12.429877281188965 + ], + [ + "▁publicat", + -12.429903984069824 + ], + [ + "ос", + -12.430086135864258 + ], + [ + "▁hue", + -12.430136680603027 + ], + [ + "▁termination", + -12.430139541625977 + ], + [ + "▁Nam", + -12.430240631103516 + ], + [ + "Well", + -12.430376052856445 + ], + [ + "▁Extract", + -12.430441856384277 + ], + [ + "atiile", + -12.43062686920166 + ], + [ + "▁vivid", + -12.43076229095459 + ], + [ + "hrs", + -12.430858612060547 + ], + [ + "▁povesti", + -12.430984497070312 + ], + [ + "stehenden", + -12.430988311767578 + ], + [ + "▁informieren", + -12.431070327758789 + ], + [ + "employed", + -12.431133270263672 + ], + [ + "▁armor", + -12.431180953979492 + ], + [ + "▁Columbus", + -12.431191444396973 + ], + [ + "Registr", + -12.431200981140137 + ], + [ + "▁Kamera", + -12.431203842163086 + ], + [ + "▁ugly", + -12.431203842163086 + ], + [ + "outil", + -12.431234359741211 + ], + [ + "▁evenly", + -12.43134593963623 + ], + [ + "lungul", + -12.431349754333496 + ], + [ + "koch", + -12.431439399719238 + ], + [ + "▁Dig", + -12.431450843811035 + ], + [ + "purely", + -12.431489944458008 + ], + [ + "▁Surf", + -12.431560516357422 + ], + [ + "rilla", + -12.431628227233887 + ], + [ + "▁Watson", + -12.43171215057373 + ], + [ + "trug", + -12.431719779968262 + ], + [ + "figuring", + -12.431784629821777 + ], + [ + "▁competitor", + -12.431807518005371 + ], + [ + "▁humid", + -12.431889533996582 + ], + [ + "▁Lawyer", + -12.43189811706543 + ], + [ + "Added", + -12.43205451965332 + ], + [ + "▁salva", + -12.432056427001953 + ], + [ + "▁drainage", + -12.4321870803833 + ], + [ + "Featuring", + -12.432220458984375 + ], + [ + "▁Pel", + -12.43234634399414 + ], + [ + "▁acasa", + -12.432611465454102 + ], + [ + "▁expectation", + -12.43265438079834 + ], + [ + "gibt", + -12.432663917541504 + ], + [ + "▁marginal", + -12.432831764221191 + ], + [ + "ceni", + -12.433028221130371 + ], + [ + "▁européen", + -12.433065414428711 + ], + [ + "clav", + -12.433090209960938 + ], + [ + "▁Shot", + -12.433167457580566 + ], + [ + "commun", + -12.43322467803955 + ], + [ + "▁Calendar", + -12.433247566223145 + ], + [ + "▁trek", + -12.433348655700684 + ], + [ + "rechtliche", + -12.433406829833984 + ], + [ + "▁Perry", + -12.43342399597168 + ], + [ + "▁surge", + -12.433484077453613 + ], + [ + "geschäft", + -12.433504104614258 + ], + [ + "paced", + -12.433793067932129 + ], + [ + "depend", + -12.433871269226074 + ], + [ + "▁Sache", + -12.433947563171387 + ], + [ + "▁Example", + -12.433998107910156 + ], + [ + "▁lider", + -12.434118270874023 + ], + [ + "▁nochmal", + -12.434240341186523 + ], + [ + "▁Present", + -12.434243202209473 + ], + [ + "KW", + -12.434335708618164 + ], + [ + "prompted", + -12.434350967407227 + ], + [ + "logique", + -12.434444427490234 + ], + [ + "Université", + -12.434466361999512 + ], + [ + "lith", + -12.434489250183105 + ], + [ + "▁Gefahr", + -12.434579849243164 + ], + [ + "▁Acid", + -12.434625625610352 + ], + [ + "objets", + -12.434791564941406 + ], + [ + "▁societies", + -12.434791564941406 + ], + [ + "▁distraction", + -12.434816360473633 + ], + [ + "▁puissance", + -12.434934616088867 + ], + [ + "▁alleviat", + -12.435026168823242 + ], + [ + "▁Capitol", + -12.435050010681152 + ], + [ + "▁Heim", + -12.435129165649414 + ], + [ + "judicial", + -12.435230255126953 + ], + [ + "▁nowadays", + -12.435309410095215 + ], + [ + "▁Hammer", + -12.435317039489746 + ], + [ + "▁metallic", + -12.435327529907227 + ], + [ + "▁distr", + -12.435388565063477 + ], + [ + "▁dispos", + -12.435397148132324 + ], + [ + "profile", + -12.435408592224121 + ], + [ + "▁Nicolas", + -12.435602188110352 + ], + [ + "▁presa", + -12.435760498046875 + ], + [ + "augh", + -12.43578052520752 + ], + [ + "schuss", + -12.435787200927734 + ], + [ + "▁Diana", + -12.436062812805176 + ], + [ + "4-5", + -12.436097145080566 + ], + [ + "▁Chapel", + -12.43612003326416 + ], + [ + "▁zahar", + -12.436150550842285 + ], + [ + "âmb", + -12.4362154006958 + ], + [ + "▁Tarif", + -12.436264991760254 + ], + [ + "▁devastating", + -12.436339378356934 + ], + [ + "6:00", + -12.4364013671875 + ], + [ + "▁100,000", + -12.43645191192627 + ], + [ + "NIC", + -12.436580657958984 + ], + [ + "▁Lucas", + -12.436612129211426 + ], + [ + "▁bequem", + -12.436662673950195 + ], + [ + "▁Motion", + -12.436698913574219 + ], + [ + "7,000", + -12.436701774597168 + ], + [ + "▁malware", + -12.436708450317383 + ], + [ + "▁avenue", + -12.436723709106445 + ], + [ + "▁manger", + -12.436747550964355 + ], + [ + "▁Queensland", + -12.436857223510742 + ], + [ + "▁Papier", + -12.436861991882324 + ], + [ + "▁Increase", + -12.436880111694336 + ], + [ + "▁implies", + -12.436954498291016 + ], + [ + "▁äußer", + -12.43697452545166 + ], + [ + "▁Meine", + -12.436980247497559 + ], + [ + "Reuters", + -12.437155723571777 + ], + [ + "▁Belt", + -12.437232971191406 + ], + [ + "Educat", + -12.437251091003418 + ], + [ + "▁Aktion", + -12.437355041503906 + ], + [ + "schläge", + -12.437372207641602 + ], + [ + "▁înregistrat", + -12.437426567077637 + ], + [ + "▁Ortho", + -12.43756103515625 + ], + [ + "▁bulbs", + -12.437761306762695 + ], + [ + "kap", + -12.437793731689453 + ], + [ + "▁peinture", + -12.437901496887207 + ], + [ + "▁Lounge", + -12.437907218933105 + ], + [ + "▁Tampa", + -12.438008308410645 + ], + [ + "ifiziert", + -12.438100814819336 + ], + [ + "kinder", + -12.438172340393066 + ], + [ + "▁comparativ", + -12.438281059265137 + ], + [ + "häuser", + -12.438323974609375 + ], + [ + "incarn", + -12.438363075256348 + ], + [ + "▁amazon", + -12.438464164733887 + ], + [ + "▁Southeast", + -12.438505172729492 + ], + [ + "▁economical", + -12.438667297363281 + ], + [ + "▁broth", + -12.438697814941406 + ], + [ + "▁Secure", + -12.438750267028809 + ], + [ + "damals", + -12.438875198364258 + ], + [ + "▁Elementary", + -12.438921928405762 + ], + [ + "▁Wildlife", + -12.438995361328125 + ], + [ + "▁Jewel", + -12.439001083374023 + ], + [ + "▁protocols", + -12.439297676086426 + ], + [ + "▁zbor", + -12.4393892288208 + ], + [ + "▁enthusiasts", + -12.439398765563965 + ], + [ + "▁Mirror", + -12.439444541931152 + ], + [ + "▁soak", + -12.439537048339844 + ], + [ + "▁Sad", + -12.439574241638184 + ], + [ + "▁dishwasher", + -12.439957618713379 + ], + [ + "▁vollständig", + -12.440186500549316 + ], + [ + "▁Vermont", + -12.440407752990723 + ], + [ + "▁caut", + -12.440449714660645 + ], + [ + "▁fournisseur", + -12.440475463867188 + ], + [ + "▁Concrete", + -12.44047737121582 + ], + [ + "▁Instant", + -12.440595626831055 + ], + [ + "▁reveni", + -12.440597534179688 + ], + [ + "▁Surface", + -12.44059944152832 + ], + [ + "zumindest", + -12.440713882446289 + ], + [ + "▁feast", + -12.440725326538086 + ], + [ + "▁stretching", + -12.440803527832031 + ], + [ + "ERA", + -12.440997123718262 + ], + [ + "▁Scholarship", + -12.441020965576172 + ], + [ + "▁vineyard", + -12.4410400390625 + ], + [ + "▁régulièrement", + -12.441083908081055 + ], + [ + "▁patches", + -12.441093444824219 + ], + [ + "▁Gamb", + -12.44113540649414 + ], + [ + "▁Vereins", + -12.441152572631836 + ], + [ + "ège", + -12.441372871398926 + ], + [ + "▁constitutional", + -12.441411018371582 + ], + [ + "erreur", + -12.441413879394531 + ], + [ + "▁Colombia", + -12.441514015197754 + ], + [ + "UF", + -12.441618919372559 + ], + [ + "aider", + -12.441665649414062 + ], + [ + "cision", + -12.44180965423584 + ], + [ + "▁publishers", + -12.441913604736328 + ], + [ + "▁prelua", + -12.441967964172363 + ], + [ + "▁keiner", + -12.441990852355957 + ], + [ + "▁amid", + -12.442020416259766 + ], + [ + "▁quantitative", + -12.442031860351562 + ], + [ + "▁decay", + -12.442058563232422 + ], + [ + "▁distinguished", + -12.4420747756958 + ], + [ + "▁Gründe", + -12.442209243774414 + ], + [ + "▁statului", + -12.442362785339355 + ], + [ + "CAT", + -12.442436218261719 + ], + [ + "allow", + -12.442481994628906 + ], + [ + "▁mathematical", + -12.442550659179688 + ], + [ + "▁tragedy", + -12.44255542755127 + ], + [ + "▁heels", + -12.442609786987305 + ], + [ + "opia", + -12.44265365600586 + ], + [ + "▁merger", + -12.4428071975708 + ], + [ + "dispositif", + -12.442813873291016 + ], + [ + "▁pneu", + -12.44283390045166 + ], + [ + "elte", + -12.443058013916016 + ], + [ + "▁Introduction", + -12.443070411682129 + ], + [ + "▁biscuit", + -12.443134307861328 + ], + [ + "▁leftover", + -12.443275451660156 + ], + [ + "▁tester", + -12.443314552307129 + ], + [ + "▁Terre", + -12.443380355834961 + ], + [ + "▁Oui", + -12.44338321685791 + ], + [ + "▁rar", + -12.443520545959473 + ], + [ + "▁beverages", + -12.443666458129883 + ], + [ + "▁parenting", + -12.443892478942871 + ], + [ + "1-0", + -12.444053649902344 + ], + [ + "▁Barry", + -12.44417667388916 + ], + [ + "▁Lynn", + -12.444209098815918 + ], + [ + "▁Tyler", + -12.444262504577637 + ], + [ + "▁fotbal", + -12.44437026977539 + ], + [ + "dron", + -12.444475173950195 + ], + [ + "▁donor", + -12.44455623626709 + ], + [ + "▁drape", + -12.444558143615723 + ], + [ + "▁positioning", + -12.444963455200195 + ], + [ + "▁Tang", + -12.445006370544434 + ], + [ + "▁overwhelmed", + -12.445161819458008 + ], + [ + "▁perte", + -12.445192337036133 + ], + [ + "▁blender", + -12.445302963256836 + ], + [ + "TG", + -12.445467948913574 + ], + [ + "GHz", + -12.445490837097168 + ], + [ + "▁administrat", + -12.445719718933105 + ], + [ + "▁glaube", + -12.445771217346191 + ], + [ + "Char", + -12.445947647094727 + ], + [ + "impression", + -12.44627571105957 + ], + [ + "proving", + -12.446297645568848 + ], + [ + "▁Inner", + -12.446434020996094 + ], + [ + "root", + -12.446501731872559 + ], + [ + "▁Gedanken", + -12.446508407592773 + ], + [ + "▁underway", + -12.446596145629883 + ], + [ + "coat", + -12.44660758972168 + ], + [ + "▁thereof", + -12.446663856506348 + ], + [ + "rius", + -12.446700096130371 + ], + [ + "▁intermediate", + -12.446751594543457 + ], + [ + "gmail", + -12.446869850158691 + ], + [ + "114", + -12.446893692016602 + ], + [ + "▁interfere", + -12.446908950805664 + ], + [ + "▁Found", + -12.446930885314941 + ], + [ + "LF", + -12.447071075439453 + ], + [ + "▁equality", + -12.447099685668945 + ], + [ + "▁concurrent", + -12.44710636138916 + ], + [ + "akh", + -12.447107315063477 + ], + [ + "▁touching", + -12.44715690612793 + ], + [ + "▁curiosity", + -12.447235107421875 + ], + [ + "▁rendering", + -12.447263717651367 + ], + [ + "▁1964", + -12.447442054748535 + ], + [ + "sorge", + -12.447468757629395 + ], + [ + "ARC", + -12.447505950927734 + ], + [ + "▁Desktop", + -12.44752311706543 + ], + [ + "▁Tak", + -12.44760799407959 + ], + [ + "filtration", + -12.447651863098145 + ], + [ + "▁gates", + -12.4478759765625 + ], + [ + "Sehr", + -12.44791316986084 + ], + [ + "▁spatiu", + -12.44798755645752 + ], + [ + "▁Leg", + -12.448103904724121 + ], + [ + "▁aviation", + -12.448277473449707 + ], + [ + "wandel", + -12.44827938079834 + ], + [ + "▁Shar", + -12.448323249816895 + ], + [ + "▁Volks", + -12.448409080505371 + ], + [ + "maz", + -12.448698997497559 + ], + [ + "governmental", + -12.44874095916748 + ], + [ + "euros", + -12.448819160461426 + ], + [ + "avantage", + -12.448823928833008 + ], + [ + "sitzt", + -12.448856353759766 + ], + [ + "IER", + -12.448920249938965 + ], + [ + "▁Theory", + -12.44894027709961 + ], + [ + "Cependant", + -12.44907283782959 + ], + [ + "▁Teachers", + -12.449080467224121 + ], + [ + "anspruch", + -12.449095726013184 + ], + [ + "▁afecta", + -12.449139595031738 + ], + [ + "enko", + -12.449193000793457 + ], + [ + "▁breeding", + -12.449198722839355 + ], + [ + "▁Peak", + -12.449457168579102 + ], + [ + "▁găsit", + -12.449516296386719 + ], + [ + "▁măsuri", + -12.4495267868042 + ], + [ + "edia", + -12.449625968933105 + ], + [ + "biz", + -12.449640274047852 + ], + [ + "zum", + -12.449776649475098 + ], + [ + "▁schwierig", + -12.449847221374512 + ], + [ + "Sense", + -12.450050354003906 + ], + [ + "▁Jump", + -12.450081825256348 + ], + [ + "▁cocktails", + -12.450108528137207 + ], + [ + "abhängig", + -12.45012378692627 + ], + [ + "realised", + -12.450140953063965 + ], + [ + "▁programul", + -12.450214385986328 + ], + [ + "▁prévu", + -12.450238227844238 + ], + [ + "▁twitter", + -12.450372695922852 + ], + [ + "Union", + -12.450400352478027 + ], + [ + "▁Marathon", + -12.45040225982666 + ], + [ + "▁Christianity", + -12.450432777404785 + ], + [ + "▁Alberta", + -12.450811386108398 + ], + [ + "einheit", + -12.45097827911377 + ], + [ + "▁wellbeing", + -12.450982093811035 + ], + [ + "phen", + -12.451166152954102 + ], + [ + "▁Charleston", + -12.451180458068848 + ], + [ + "▁uncover", + -12.451323509216309 + ], + [ + "▁humaine", + -12.451464653015137 + ], + [ + "▁bleeding", + -12.451531410217285 + ], + [ + "▁manipul", + -12.451532363891602 + ], + [ + "▁humidity", + -12.451570510864258 + ], + [ + "▁Puis", + -12.451748847961426 + ], + [ + "▁aktuell", + -12.451922416687012 + ], + [ + "▁Nissan", + -12.451943397521973 + ], + [ + "▁Eisen", + -12.45202922821045 + ], + [ + "treiben", + -12.452059745788574 + ], + [ + "cios", + -12.452073097229004 + ], + [ + "ikh", + -12.452381134033203 + ], + [ + "acquiring", + -12.452466011047363 + ], + [ + "▁Wallpaper", + -12.452488899230957 + ], + [ + "▁rond", + -12.452558517456055 + ], + [ + "▁Doug", + -12.45267391204834 + ], + [ + "sourcing", + -12.452696800231934 + ], + [ + "▁1900", + -12.452825546264648 + ], + [ + "▁buni", + -12.452913284301758 + ], + [ + "vest", + -12.452916145324707 + ], + [ + "▁Bangladesh", + -12.452990531921387 + ], + [ + "Home", + -12.453160285949707 + ], + [ + "▁wrinkle", + -12.453252792358398 + ], + [ + "rado", + -12.453290939331055 + ], + [ + "▁Pain", + -12.45334243774414 + ], + [ + "▁herzlich", + -12.453354835510254 + ], + [ + "MRI", + -12.453426361083984 + ], + [ + "UG", + -12.453631401062012 + ], + [ + "▁Desk", + -12.453679084777832 + ], + [ + "▁remarc", + -12.453718185424805 + ], + [ + "▁sodium", + -12.453857421875 + ], + [ + "▁Jede", + -12.453892707824707 + ], + [ + "▁réelle", + -12.453959465026855 + ], + [ + "▁Polar", + -12.454068183898926 + ], + [ + "▁activists", + -12.454273223876953 + ], + [ + "lasted", + -12.454300880432129 + ], + [ + "Some", + -12.45432186126709 + ], + [ + "ISE", + -12.454338073730469 + ], + [ + "▁peine", + -12.454671859741211 + ], + [ + "▁crude", + -12.454852104187012 + ], + [ + "Maur", + -12.454916954040527 + ], + [ + "▁forcing", + -12.454933166503906 + ], + [ + "▁politici", + -12.454970359802246 + ], + [ + "▁condiții", + -12.454988479614258 + ], + [ + "▁Saving", + -12.454999923706055 + ], + [ + "▁descoperi", + -12.455020904541016 + ], + [ + "avenir", + -12.455055236816406 + ], + [ + "Akt", + -12.455069541931152 + ], + [ + "▁vocabulary", + -12.45509147644043 + ], + [ + "▁pont", + -12.455168724060059 + ], + [ + "West", + -12.45518970489502 + ], + [ + "lenk", + -12.455278396606445 + ], + [ + "▁Verbraucher", + -12.455367088317871 + ], + [ + "affects", + -12.455448150634766 + ], + [ + "▁Flower", + -12.455543518066406 + ], + [ + "▁Nebraska", + -12.455617904663086 + ], + [ + "▁assortment", + -12.455618858337402 + ], + [ + "hock", + -12.455619812011719 + ], + [ + "▁discounted", + -12.455803871154785 + ], + [ + "▁Sensor", + -12.455840110778809 + ], + [ + "Lie", + -12.45588207244873 + ], + [ + "▁Volkswagen", + -12.455887794494629 + ], + [ + "isseur", + -12.455888748168945 + ], + [ + "indice", + -12.455936431884766 + ], + [ + "▁scanner", + -12.455986022949219 + ], + [ + "fashioned", + -12.456040382385254 + ], + [ + "▁postal", + -12.456141471862793 + ], + [ + "ouvrir", + -12.45615291595459 + ], + [ + "▁seminars", + -12.45622444152832 + ], + [ + "ioase", + -12.456232070922852 + ], + [ + "▁Stanley", + -12.456260681152344 + ], + [ + "Various", + -12.456335067749023 + ], + [ + "essentiel", + -12.45650577545166 + ], + [ + "▁administered", + -12.456693649291992 + ], + [ + "▁concession", + -12.456748008728027 + ], + [ + "▁mould", + -12.456789016723633 + ], + [ + "▁strongest", + -12.456826210021973 + ], + [ + "Erlebnis", + -12.456933975219727 + ], + [ + "▁ehemalige", + -12.456933975219727 + ], + [ + "▁Tale", + -12.457234382629395 + ], + [ + "▁Buyer", + -12.457353591918945 + ], + [ + "ück", + -12.457578659057617 + ], + [ + "▁Kommentar", + -12.457720756530762 + ], + [ + "▁Schrift", + -12.457756996154785 + ], + [ + "Design", + -12.457792282104492 + ], + [ + "▁stirring", + -12.457937240600586 + ], + [ + "▁towels", + -12.457987785339355 + ], + [ + "▁$30", + -12.458101272583008 + ], + [ + "sprache", + -12.458279609680176 + ], + [ + "▁Regierung", + -12.458346366882324 + ], + [ + "▁nachhaltig", + -12.458406448364258 + ], + [ + "▁électronique", + -12.458515167236328 + ], + [ + "▁Andrei", + -12.458587646484375 + ], + [ + "because", + -12.458647727966309 + ], + [ + "informatique", + -12.458650588989258 + ], + [ + "IGHT", + -12.4586820602417 + ], + [ + "stepping", + -12.4586820602417 + ], + [ + "▁gris", + -12.458748817443848 + ], + [ + "vious", + -12.458773612976074 + ], + [ + "▁upside", + -12.4591064453125 + ], + [ + "▁Examples", + -12.459108352661133 + ], + [ + "IU", + -12.459110260009766 + ], + [ + "▁princess", + -12.459111213684082 + ], + [ + "spielen", + -12.45921516418457 + ], + [ + "legung", + -12.45950984954834 + ], + [ + "▁reflecting", + -12.4597806930542 + ], + [ + "▁Processing", + -12.459939002990723 + ], + [ + "▁jungle", + -12.460033416748047 + ], + [ + "▁insects", + -12.46006965637207 + ], + [ + "▁Sibiu", + -12.460220336914062 + ], + [ + "160", + -12.460259437561035 + ], + [ + "▁interessante", + -12.460267066955566 + ], + [ + "▁multimedia", + -12.460455894470215 + ], + [ + "essel", + -12.46049690246582 + ], + [ + "/18", + -12.460647583007812 + ], + [ + "nière", + -12.460683822631836 + ], + [ + "ministru", + -12.46072006225586 + ], + [ + "▁implants", + -12.460826873779297 + ], + [ + "▁Settings", + -12.461360931396484 + ], + [ + "▁invaluable", + -12.461432456970215 + ], + [ + "stains", + -12.461448669433594 + ], + [ + "onym", + -12.461518287658691 + ], + [ + "▁searched", + -12.461570739746094 + ], + [ + "▁disappointment", + -12.461628913879395 + ], + [ + "▁Iranian", + -12.461630821228027 + ], + [ + "▁questionnaire", + -12.461630821228027 + ], + [ + "Founder", + -12.46178913116455 + ], + [ + "▁Bericht", + -12.461792945861816 + ], + [ + "▁youngest", + -12.461896896362305 + ], + [ + "▁Automatic", + -12.461956024169922 + ], + [ + "▁plecat", + -12.46203327178955 + ], + [ + "geber", + -12.462119102478027 + ], + [ + "soweit", + -12.462124824523926 + ], + [ + "▁unfold", + -12.462236404418945 + ], + [ + "▁befinden", + -12.462274551391602 + ], + [ + "▁susţin", + -12.462637901306152 + ], + [ + "▁Mack", + -12.462675094604492 + ], + [ + "▁dificil", + -12.462757110595703 + ], + [ + "enseigne", + -12.463038444519043 + ], + [ + "▁vitamine", + -12.463047981262207 + ], + [ + "▁Memory", + -12.463092803955078 + ], + [ + "ripping", + -12.463129043579102 + ], + [ + "drin", + -12.463146209716797 + ], + [ + "3.2", + -12.463278770446777 + ], + [ + "▁verstehen", + -12.463287353515625 + ], + [ + "▁scaun", + -12.46341323852539 + ], + [ + "▁procédure", + -12.46380615234375 + ], + [ + "▁molecules", + -12.463911056518555 + ], + [ + "▁Anzahl", + -12.46391487121582 + ], + [ + "▁yogurt", + -12.464071273803711 + ], + [ + "▁Dominic", + -12.464113235473633 + ], + [ + "▁shocked", + -12.464156150817871 + ], + [ + "▁zilei", + -12.464269638061523 + ], + [ + "▁Heiz", + -12.464412689208984 + ], + [ + "▁Educational", + -12.464571952819824 + ], + [ + "BN", + -12.464577674865723 + ], + [ + "analyzing", + -12.464601516723633 + ], + [ + "hair", + -12.464676856994629 + ], + [ + "spiegel", + -12.464871406555176 + ], + [ + "▁illusion", + -12.464889526367188 + ], + [ + "BG", + -12.46505355834961 + ], + [ + "deductible", + -12.46513557434082 + ], + [ + "▁adj", + -12.4651460647583 + ], + [ + "▁accessory", + -12.465166091918945 + ], + [ + "▁Draw", + -12.465167999267578 + ], + [ + "▁airlines", + -12.46518611907959 + ], + [ + "▁satisfai", + -12.46536636352539 + ], + [ + "▁architects", + -12.465447425842285 + ], + [ + "istische", + -12.465508460998535 + ], + [ + "▁Healthy", + -12.465539932250977 + ], + [ + "großer", + -12.465669631958008 + ], + [ + "▁comunicare", + -12.465764999389648 + ], + [ + "▁Meyer", + -12.46577262878418 + ], + [ + "▁reproduction", + -12.465882301330566 + ], + [ + "▁Manufacturing", + -12.465929985046387 + ], + [ + "immobilier", + -12.465930938720703 + ], + [ + "▁Unterschied", + -12.465958595275879 + ], + [ + "▁cumpara", + -12.466029167175293 + ], + [ + "▁duplicate", + -12.466094017028809 + ], + [ + "▁(16", + -12.466096878051758 + ], + [ + "▁detector", + -12.466279983520508 + ], + [ + "▁observat", + -12.466387748718262 + ], + [ + "▁1965", + -12.466682434082031 + ], + [ + "▁Fantasy", + -12.466728210449219 + ], + [ + "▁brauchen", + -12.466728210449219 + ], + [ + "▁Participants", + -12.466780662536621 + ], + [ + "▁décide", + -12.466817855834961 + ], + [ + "▁kicke", + -12.466819763183594 + ], + [ + "▁SSL", + -12.466885566711426 + ], + [ + "360", + -12.466989517211914 + ], + [ + "Anim", + -12.467019081115723 + ], + [ + "▁cupcake", + -12.467031478881836 + ], + [ + "▁Lamb", + -12.467107772827148 + ], + [ + "▁Sä", + -12.467155456542969 + ], + [ + "ntă", + -12.46738052368164 + ], + [ + "▁Pig", + -12.467421531677246 + ], + [ + "1,000", + -12.467677116394043 + ], + [ + "nhof", + -12.467782020568848 + ], + [ + "▁discret", + -12.467947959899902 + ], + [ + "▁deloc", + -12.467991828918457 + ], + [ + "▁Bücher", + -12.467999458312988 + ], + [ + "chor", + -12.468042373657227 + ], + [ + "course", + -12.468070030212402 + ], + [ + "▁cough", + -12.468076705932617 + ], + [ + "▁erstellt", + -12.468087196350098 + ], + [ + "▁Than", + -12.468097686767578 + ], + [ + "stätte", + -12.46812915802002 + ], + [ + "▁exceptionally", + -12.468162536621094 + ], + [ + "▁semnal", + -12.468186378479004 + ], + [ + "▁Interessen", + -12.468329429626465 + ], + [ + "ле", + -12.468356132507324 + ], + [ + "xx", + -12.468402862548828 + ], + [ + "▁Veterans", + -12.468422889709473 + ], + [ + "▁Kreuz", + -12.468683242797852 + ], + [ + "▁Nachricht", + -12.468701362609863 + ], + [ + "treated", + -12.468894004821777 + ], + [ + "▁tide", + -12.469230651855469 + ], + [ + "▁nonetheless", + -12.469390869140625 + ], + [ + "▁Subject", + -12.469439506530762 + ], + [ + "▁Stau", + -12.469440460205078 + ], + [ + "▁stickers", + -12.469463348388672 + ], + [ + "Alp", + -12.46950912475586 + ], + [ + "▁flagship", + -12.469541549682617 + ], + [ + "▁trimite", + -12.469619750976562 + ], + [ + "▁polyester", + -12.469664573669434 + ], + [ + "▁locui", + -12.469671249389648 + ], + [ + "▁chili", + -12.46968936920166 + ], + [ + "▁Browser", + -12.469808578491211 + ], + [ + "sieg", + -12.469809532165527 + ], + [ + "▁Arabic", + -12.469876289367676 + ], + [ + "blich", + -12.47001838684082 + ], + [ + "▁wunderbar", + -12.470090866088867 + ], + [ + "▁furnishings", + -12.470210075378418 + ], + [ + "rtie", + -12.470243453979492 + ], + [ + "8.5", + -12.470742225646973 + ], + [ + "▁Sponsor", + -12.471016883850098 + ], + [ + "▁glitter", + -12.471280097961426 + ], + [ + "▁piaț", + -12.471402168273926 + ], + [ + "▁interviewed", + -12.471519470214844 + ], + [ + "▁Statistics", + -12.471529006958008 + ], + [ + "▁cerc", + -12.47154712677002 + ], + [ + "augmentation", + -12.47155475616455 + ], + [ + "▁Navi", + -12.471558570861816 + ], + [ + "▁Begriff", + -12.47156047821045 + ], + [ + "▁știu", + -12.471596717834473 + ], + [ + "▁unabhängig", + -12.471778869628906 + ], + [ + "▁könnten", + -12.471978187561035 + ], + [ + "▁travaille", + -12.472000122070312 + ], + [ + "▁companie", + -12.472027778625488 + ], + [ + "▁Scientific", + -12.472061157226562 + ], + [ + "▁Outlook", + -12.472091674804688 + ], + [ + "▁fairy", + -12.472158432006836 + ], + [ + "zam", + -12.472282409667969 + ], + [ + "bak", + -12.472448348999023 + ], + [ + "▁Traffic", + -12.472596168518066 + ], + [ + "gerät", + -12.472671508789062 + ], + [ + "▁freezing", + -12.472701072692871 + ], + [ + "▁broadband", + -12.4727201461792 + ], + [ + "110", + -12.47279167175293 + ], + [ + "▁revenu", + -12.472887992858887 + ], + [ + "listed", + -12.472900390625 + ], + [ + "▁Rico", + -12.472941398620605 + ], + [ + "Laure", + -12.472990036010742 + ], + [ + "ATA", + -12.473112106323242 + ], + [ + "▁participer", + -12.47313117980957 + ], + [ + "▁sponsorship", + -12.473235130310059 + ], + [ + "▁distress", + -12.473286628723145 + ], + [ + "▁Brisbane", + -12.47339916229248 + ], + [ + "schönen", + -12.473437309265137 + ], + [ + "▁fizice", + -12.473465919494629 + ], + [ + "▁Political", + -12.47362232208252 + ], + [ + "uhr", + -12.473657608032227 + ], + [ + "▁procedura", + -12.473713874816895 + ], + [ + "▁hervor", + -12.473770141601562 + ], + [ + "melted", + -12.473776817321777 + ], + [ + "▁Emp", + -12.47384262084961 + ], + [ + "▁Ernährung", + -12.4739351272583 + ], + [ + "▁Pendant", + -12.473944664001465 + ], + [ + "▁recipients", + -12.474047660827637 + ], + [ + "Claude", + -12.474133491516113 + ], + [ + "▁regimen", + -12.47415828704834 + ], + [ + "expo", + -12.474346160888672 + ], + [ + "adevăr", + -12.47437858581543 + ], + [ + "▁critically", + -12.474440574645996 + ], + [ + "▁grabbe", + -12.474468231201172 + ], + [ + "▁Kann", + -12.474474906921387 + ], + [ + "▁directeur", + -12.474613189697266 + ], + [ + "gator", + -12.474908828735352 + ], + [ + "problem", + -12.474910736083984 + ], + [ + "scribe", + -12.474913597106934 + ], + [ + "▁exig", + -12.474920272827148 + ], + [ + "Tri", + -12.474969863891602 + ], + [ + "▁aqua", + -12.475631713867188 + ], + [ + "appréci", + -12.47569465637207 + ], + [ + "▁viaţă", + -12.47571849822998 + ], + [ + "▁dominate", + -12.475865364074707 + ], + [ + "disc", + -12.475889205932617 + ], + [ + "▁conseiller", + -12.47603988647461 + ], + [ + "▁shuttle", + -12.476180076599121 + ], + [ + "▁Status", + -12.47623062133789 + ], + [ + "▁ausreichend", + -12.476371765136719 + ], + [ + "▁spät", + -12.476411819458008 + ], + [ + "▁remainder", + -12.476417541503906 + ], + [ + "wett", + -12.476430892944336 + ], + [ + "schlossen", + -12.476491928100586 + ], + [ + "PAC", + -12.476505279541016 + ], + [ + "▁suprafata", + -12.476617813110352 + ], + [ + "5.000", + -12.476673126220703 + ], + [ + "supplying", + -12.47673225402832 + ], + [ + "▁uniquely", + -12.476905822753906 + ], + [ + "▁retard", + -12.476929664611816 + ], + [ + "▁Bang", + -12.477006912231445 + ], + [ + "ieuse", + -12.477087020874023 + ], + [ + "▁Ted", + -12.477248191833496 + ], + [ + "▁ermöglichen", + -12.47732925415039 + ], + [ + "▁builders", + -12.477380752563477 + ], + [ + "▁proximité", + -12.477423667907715 + ], + [ + "▁unforgettable", + -12.477423667907715 + ], + [ + "256", + -12.477446556091309 + ], + [ + "fähigkeit", + -12.477550506591797 + ], + [ + "▁procurement", + -12.477561950683594 + ], + [ + "▁Gewicht", + -12.477693557739258 + ], + [ + "▁potentiel", + -12.47778606414795 + ], + [ + "▁topping", + -12.478300094604492 + ], + [ + "▁canada", + -12.478304862976074 + ], + [ + "▁Destin", + -12.478355407714844 + ], + [ + "▁Knowing", + -12.478411674499512 + ], + [ + "▁retained", + -12.478426933288574 + ], + [ + "▁zinc", + -12.478470802307129 + ], + [ + "▁worrying", + -12.478655815124512 + ], + [ + "faţa", + -12.478676795959473 + ], + [ + "▁initi", + -12.478837966918945 + ], + [ + "ORI", + -12.4788818359375 + ], + [ + "▁refuz", + -12.478921890258789 + ], + [ + "bruch", + -12.479202270507812 + ], + [ + "▁impun", + -12.479233741760254 + ], + [ + "▁persoană", + -12.479308128356934 + ], + [ + "EAR", + -12.479347229003906 + ], + [ + "bedarf", + -12.479368209838867 + ], + [ + "▁Gebiet", + -12.47940731048584 + ], + [ + "▁Roof", + -12.479436874389648 + ], + [ + "▁negligence", + -12.47957706451416 + ], + [ + "security", + -12.479618072509766 + ], + [ + "▁accesorii", + -12.479641914367676 + ], + [ + "▁unclear", + -12.479667663574219 + ], + [ + "▁securitate", + -12.479848861694336 + ], + [ + "▁spotlight", + -12.479896545410156 + ], + [ + "▁speziell", + -12.479923248291016 + ], + [ + "▁mentally", + -12.479942321777344 + ], + [ + "▁preservation", + -12.48011589050293 + ], + [ + "▁Promotion", + -12.480156898498535 + ], + [ + "partnered", + -12.480274200439453 + ], + [ + "▁Hinter", + -12.48031997680664 + ], + [ + "▁punishment", + -12.480359077453613 + ], + [ + "▁grease", + -12.480713844299316 + ], + [ + "▁NW", + -12.480714797973633 + ], + [ + "▁curse", + -12.480897903442383 + ], + [ + "ckle", + -12.48101806640625 + ], + [ + "▁Hire", + -12.481043815612793 + ], + [ + "▁Whole", + -12.481088638305664 + ], + [ + "▁basse", + -12.481289863586426 + ], + [ + "▁DNS", + -12.481427192687988 + ], + [ + "flamm", + -12.481560707092285 + ], + [ + "▁scoop", + -12.481574058532715 + ], + [ + "Norm", + -12.481663703918457 + ], + [ + "▁Surgery", + -12.481735229492188 + ], + [ + "▁widget", + -12.481741905212402 + ], + [ + "connected", + -12.481863021850586 + ], + [ + "autorité", + -12.481961250305176 + ], + [ + "▁utilis", + -12.482096672058105 + ], + [ + "▁formă", + -12.482185363769531 + ], + [ + "▁clearing", + -12.482307434082031 + ], + [ + "▁jumătate", + -12.482815742492676 + ], + [ + "größe", + -12.482831954956055 + ], + [ + "▁Tief", + -12.482852935791016 + ], + [ + "épi", + -12.482939720153809 + ], + [ + "zunehmen", + -12.483174324035645 + ], + [ + "▁touchdown", + -12.48318099975586 + ], + [ + "▁scholarships", + -12.483236312866211 + ], + [ + "▁dementia", + -12.483319282531738 + ], + [ + "▁Jeder", + -12.48333740234375 + ], + [ + "▁nightmare", + -12.483379364013672 + ], + [ + "▁Raw", + -12.48342514038086 + ], + [ + "absorbed", + -12.483468055725098 + ], + [ + "lohnt", + -12.483484268188477 + ], + [ + "quent", + -12.483580589294434 + ], + [ + "interest", + -12.483626365661621 + ], + [ + "OSS", + -12.483649253845215 + ], + [ + "▁Leaf", + -12.483667373657227 + ], + [ + "▁timeless", + -12.48381519317627 + ], + [ + "DY", + -12.483865737915039 + ], + [ + "▁Remote", + -12.483907699584961 + ], + [ + "chner", + -12.483938217163086 + ], + [ + "▁Pam", + -12.484014511108398 + ], + [ + "urban", + -12.484060287475586 + ], + [ + "во", + -12.484146118164062 + ], + [ + "▁Kunde", + -12.484166145324707 + ], + [ + "▁Laptop", + -12.484169006347656 + ], + [ + "finder", + -12.484336853027344 + ], + [ + "▁Pole", + -12.484567642211914 + ], + [ + "2.8", + -12.484588623046875 + ], + [ + "finished", + -12.484670639038086 + ], + [ + "▁prophet", + -12.484697341918945 + ], + [ + "mailed", + -12.484758377075195 + ], + [ + "2-0", + -12.4849214553833 + ], + [ + "▁disciples", + -12.484949111938477 + ], + [ + "▁intriguing", + -12.484980583190918 + ], + [ + "IRA", + -12.485033988952637 + ], + [ + "petit", + -12.485077857971191 + ], + [ + "▁Membership", + -12.485097885131836 + ], + [ + "▁provincial", + -12.485177040100098 + ], + [ + "▁Prüfung", + -12.485292434692383 + ], + [ + "-50", + -12.485450744628906 + ], + [ + "▁cryptocurrency", + -12.485522270202637 + ], + [ + "▁journalism", + -12.485536575317383 + ], + [ + "▁Downtown", + -12.485593795776367 + ], + [ + "inserted", + -12.485655784606934 + ], + [ + "▁Direction", + -12.485718727111816 + ], + [ + "lipid", + -12.485732078552246 + ], + [ + "▁Sebastian", + -12.485793113708496 + ], + [ + "fordert", + -12.48591136932373 + ], + [ + "Originally", + -12.485989570617676 + ], + [ + "tipp", + -12.486048698425293 + ], + [ + "verantwortlich", + -12.486064910888672 + ], + [ + "▁wheelchair", + -12.486085891723633 + ], + [ + "▁structura", + -12.48609733581543 + ], + [ + "▁Danny", + -12.486138343811035 + ], + [ + "999", + -12.486284255981445 + ], + [ + "▁Schiff", + -12.486380577087402 + ], + [ + "formally", + -12.486408233642578 + ], + [ + "focused", + -12.486428260803223 + ], + [ + "▁Vater", + -12.486478805541992 + ], + [ + "▁Dear", + -12.486599922180176 + ], + [ + "▁reinforce", + -12.486794471740723 + ], + [ + "proprietar", + -12.48690414428711 + ], + [ + "▁Kyle", + -12.487004280090332 + ], + [ + "În", + -12.487015724182129 + ], + [ + "▁servir", + -12.487268447875977 + ], + [ + "length", + -12.48730754852295 + ], + [ + "▁showroom", + -12.48735237121582 + ], + [ + "reli", + -12.487473487854004 + ], + [ + "▁Brü", + -12.487529754638672 + ], + [ + "▁Schle", + -12.487634658813477 + ], + [ + "▁profond", + -12.487773895263672 + ], + [ + "▁Superior", + -12.487826347351074 + ], + [ + "▁lifted", + -12.487844467163086 + ], + [ + "highlighting", + -12.487850189208984 + ], + [ + "▁Connection", + -12.48793888092041 + ], + [ + "▁similarly", + -12.487998962402344 + ], + [ + "▁diferit", + -12.488005638122559 + ], + [ + "▁sweater", + -12.488014221191406 + ], + [ + "État", + -12.48803997039795 + ], + [ + "rooted", + -12.488069534301758 + ], + [ + "▁sleeves", + -12.488236427307129 + ], + [ + "де", + -12.488264083862305 + ], + [ + "▁Laboratory", + -12.488265991210938 + ], + [ + "ündig", + -12.488719940185547 + ], + [ + "▁Viking", + -12.488741874694824 + ], + [ + "▁Origin", + -12.48878002166748 + ], + [ + "▁vibr", + -12.488812446594238 + ], + [ + "199", + -12.488974571228027 + ], + [ + "▁yummy", + -12.489001274108887 + ], + [ + "STAR", + -12.489140510559082 + ], + [ + "▁repro", + -12.489152908325195 + ], + [ + "▁Kirchen", + -12.489229202270508 + ], + [ + "hopper", + -12.48925495147705 + ], + [ + "zza", + -12.489335060119629 + ], + [ + "▁vitesse", + -12.48934555053711 + ], + [ + "▁minimalist", + -12.489412307739258 + ], + [ + "▁Election", + -12.489420890808105 + ], + [ + "draw", + -12.489501953125 + ], + [ + "▁candles", + -12.48959732055664 + ], + [ + "▁Mund", + -12.489615440368652 + ], + [ + "urged", + -12.489901542663574 + ], + [ + "▁cânt", + -12.489917755126953 + ], + [ + "Ultimately", + -12.49002742767334 + ], + [ + "▁Lift", + -12.490124702453613 + ], + [ + "loaded", + -12.490334510803223 + ], + [ + "demand", + -12.490508079528809 + ], + [ + "▁aleg", + -12.490621566772461 + ], + [ + "▁Discovery", + -12.490755081176758 + ], + [ + "▁Vienna", + -12.490960121154785 + ], + [ + "▁Kategorie", + -12.490961074829102 + ], + [ + "▁Cotton", + -12.490962028503418 + ], + [ + "▁$200", + -12.491043090820312 + ], + [ + "▁Drei", + -12.491052627563477 + ], + [ + "▁reicht", + -12.491168975830078 + ], + [ + "speicher", + -12.491231918334961 + ], + [ + "▁Immobilien", + -12.491483688354492 + ], + [ + "gefühl", + -12.491509437561035 + ], + [ + "make", + -12.491525650024414 + ], + [ + "pell", + -12.49155044555664 + ], + [ + "▁dull", + -12.491598129272461 + ], + [ + "▁arbeitet", + -12.491681098937988 + ], + [ + "retaining", + -12.491700172424316 + ], + [ + "losen", + -12.491707801818848 + ], + [ + "match", + -12.491876602172852 + ], + [ + "-60", + -12.491880416870117 + ], + [ + "▁ecological", + -12.492000579833984 + ], + [ + "▁vend", + -12.492051124572754 + ], + [ + "▁grammar", + -12.492061614990234 + ], + [ + "▁1:1", + -12.492225646972656 + ], + [ + "grilled", + -12.492279052734375 + ], + [ + "geordnet", + -12.492321014404297 + ], + [ + "▁Pav", + -12.49236011505127 + ], + [ + "▁Depot", + -12.492368698120117 + ], + [ + "▁Walking", + -12.492372512817383 + ], + [ + "teamed", + -12.492402076721191 + ], + [ + "▁torque", + -12.492537498474121 + ], + [ + "▁Venture", + -12.492659568786621 + ], + [ + "▁beginner", + -12.49269962310791 + ], + [ + "▁Monaten", + -12.492712020874023 + ], + [ + "▁Pune", + -12.493054389953613 + ], + [ + "connect", + -12.493075370788574 + ], + [ + "▁textbook", + -12.493132591247559 + ], + [ + "▁unprecedented", + -12.49314022064209 + ], + [ + "▁implied", + -12.493168830871582 + ], + [ + "▁cubic", + -12.493668556213379 + ], + [ + "enthält", + -12.493696212768555 + ], + [ + "▁Brenn", + -12.49388313293457 + ], + [ + "▁Expect", + -12.49394416809082 + ], + [ + "▁lever", + -12.4939603805542 + ], + [ + "veux", + -12.49399185180664 + ], + [ + "▁Claire", + -12.494112968444824 + ], + [ + "Acc", + -12.49432373046875 + ], + [ + "▁Typ", + -12.494478225708008 + ], + [ + "▁smoothie", + -12.494501113891602 + ], + [ + "▁Idaho", + -12.494780540466309 + ], + [ + "▁spati", + -12.494802474975586 + ], + [ + "▁bénéficier", + -12.49488353729248 + ], + [ + "▁Kle", + -12.495161056518555 + ], + [ + "▁serviciilor", + -12.495169639587402 + ], + [ + "▁prohibit", + -12.495267868041992 + ], + [ + "EAD", + -12.495417594909668 + ], + [ + "▁Turner", + -12.495418548583984 + ], + [ + "▁elibera", + -12.49543571472168 + ], + [ + "▁payday", + -12.495464324951172 + ], + [ + "▁prolong", + -12.495466232299805 + ], + [ + "▁sued", + -12.495481491088867 + ], + [ + "▁Devil", + -12.495536804199219 + ], + [ + "▁Skills", + -12.495552062988281 + ], + [ + "▁Marcel", + -12.495553970336914 + ], + [ + "▁silhouette", + -12.495601654052734 + ], + [ + "▁preț", + -12.495742797851562 + ], + [ + "▁Gö", + -12.495747566223145 + ], + [ + "▁Creator", + -12.495774269104004 + ], + [ + "fed", + -12.4959077835083 + ], + [ + "Cap", + -12.495997428894043 + ], + [ + "▁dedicate", + -12.496042251586914 + ], + [ + "0000", + -12.496124267578125 + ], + [ + "▁VAT", + -12.496259689331055 + ], + [ + "▁Firefox", + -12.496443748474121 + ], + [ + "▁therapies", + -12.496477127075195 + ], + [ + "▁screws", + -12.496662139892578 + ], + [ + "▁Province", + -12.496697425842285 + ], + [ + "▁problematic", + -12.496871948242188 + ], + [ + "▁Vid", + -12.496915817260742 + ], + [ + "▁Lost", + -12.496950149536133 + ], + [ + "▁elegance", + -12.497520446777344 + ], + [ + "▁Elegant", + -12.497525215148926 + ], + [ + "ignant", + -12.497573852539062 + ], + [ + "▁darin", + -12.497649192810059 + ], + [ + "▁anonym", + -12.497669219970703 + ], + [ + "▁vegeta", + -12.49767780303955 + ], + [ + "incoming", + -12.497762680053711 + ], + [ + "▁pills", + -12.497846603393555 + ], + [ + "governing", + -12.497893333435059 + ], + [ + "▁Haven", + -12.497920989990234 + ], + [ + "paper", + -12.497947692871094 + ], + [ + "räume", + -12.497979164123535 + ], + [ + "paw", + -12.498099327087402 + ], + [ + "▁spelling", + -12.498283386230469 + ], + [ + "ambele", + -12.498318672180176 + ], + [ + "▁reprezentat", + -12.498371124267578 + ], + [ + "▁mâ", + -12.49853515625 + ], + [ + "wirtschaftliche", + -12.498558044433594 + ], + [ + "▁valabil", + -12.498579025268555 + ], + [ + "▁konkret", + -12.498618125915527 + ], + [ + "▁financier", + -12.498619079589844 + ], + [ + "▁irre", + -12.499135971069336 + ], + [ + "▁Silicon", + -12.499171257019043 + ], + [ + "Viv", + -12.499181747436523 + ], + [ + "▁viruses", + -12.49927043914795 + ], + [ + "▁CNN", + -12.499324798583984 + ], + [ + "▁erleben", + -12.499482154846191 + ], + [ + "gina", + -12.499492645263672 + ], + [ + "punctul", + -12.49951457977295 + ], + [ + "▁Sfânt", + -12.499753952026367 + ], + [ + "▁Manage", + -12.499811172485352 + ], + [ + "▁payable", + -12.499984741210938 + ], + [ + "▁practitioner", + -12.500143051147461 + ], + [ + "▁conférence", + -12.50026798248291 + ], + [ + "▁drought", + -12.50027084350586 + ], + [ + "▁devote", + -12.500361442565918 + ], + [ + "wertung", + -12.500420570373535 + ], + [ + "stabil", + -12.5004301071167 + ], + [ + "▁balcon", + -12.500553131103516 + ], + [ + "▁Lebensmittel", + -12.500603675842285 + ], + [ + "COL", + -12.500950813293457 + ], + [ + "▁Domnul", + -12.501093864440918 + ], + [ + "carved", + -12.501359939575195 + ], + [ + "▁preparat", + -12.5014009475708 + ], + [ + "101", + -12.501537322998047 + ], + [ + "▁specimen", + -12.501580238342285 + ], + [ + "urgeon", + -12.501596450805664 + ], + [ + "LIC", + -12.50163459777832 + ], + [ + "Plattform", + -12.501643180847168 + ], + [ + "▁ramas", + -12.501739501953125 + ], + [ + "▁copilului", + -12.501791954040527 + ], + [ + "bacter", + -12.501812934875488 + ], + [ + "körper", + -12.501940727233887 + ], + [ + "▁Kru", + -12.501981735229492 + ], + [ + "▁Employ", + -12.502055168151855 + ], + [ + "office", + -12.502080917358398 + ], + [ + "▁simmer", + -12.502120018005371 + ], + [ + "qualität", + -12.502137184143066 + ], + [ + "▁freshly", + -12.502215385437012 + ], + [ + "▁Nine", + -12.50223159790039 + ], + [ + "▁tonnes", + -12.50223445892334 + ], + [ + "boden", + -12.502236366271973 + ], + [ + "enquête", + -12.50240707397461 + ], + [ + "▁Colour", + -12.502481460571289 + ], + [ + "▁Diagram", + -12.502495765686035 + ], + [ + "▁gewählt", + -12.502516746520996 + ], + [ + "▁viitoare", + -12.502538681030273 + ], + [ + "▁reporters", + -12.502913475036621 + ], + [ + "guer", + -12.502991676330566 + ], + [ + "▁Kombination", + -12.503021240234375 + ], + [ + "▁qualitative", + -12.50302505493164 + ], + [ + "Centrul", + -12.503131866455078 + ], + [ + "avy", + -12.503170013427734 + ], + [ + "▁Eng", + -12.503175735473633 + ], + [ + "▁sufletul", + -12.50327205657959 + ], + [ + "▁germ", + -12.503412246704102 + ], + [ + "▁prevented", + -12.503448486328125 + ], + [ + "appelle", + -12.503533363342285 + ], + [ + "gins", + -12.503556251525879 + ], + [ + "▁Skype", + -12.503585815429688 + ], + [ + "conditioned", + -12.503617286682129 + ], + [ + "▁clutch", + -12.503641128540039 + ], + [ + "environ", + -12.503694534301758 + ], + [ + "3.3", + -12.503774642944336 + ], + [ + "▁webinar", + -12.503866195678711 + ], + [ + "▁forty", + -12.504104614257812 + ], + [ + "▁Medicaid", + -12.504127502441406 + ], + [ + "▁dismissed", + -12.504167556762695 + ], + [ + "▁siblings", + -12.504168510437012 + ], + [ + "▁Jaw", + -12.504196166992188 + ], + [ + "guiding", + -12.504220962524414 + ], + [ + "cigarette", + -12.504374504089355 + ], + [ + "▁Shah", + -12.504681587219238 + ], + [ + "▁Lehrer", + -12.504684448242188 + ], + [ + "▁muscular", + -12.504694938659668 + ], + [ + "spatele", + -12.504796981811523 + ], + [ + "▁réduction", + -12.504836082458496 + ], + [ + "▁fixes", + -12.504851341247559 + ], + [ + "Span", + -12.50511646270752 + ], + [ + "▁Hudson", + -12.505231857299805 + ], + [ + "development", + -12.505250930786133 + ], + [ + "▁excluded", + -12.50525951385498 + ], + [ + "Democrat", + -12.505260467529297 + ], + [ + "▁nominal", + -12.505317687988281 + ], + [ + "purpose", + -12.50540828704834 + ], + [ + "▁bored", + -12.505500793457031 + ], + [ + "espèce", + -12.50550651550293 + ], + [ + "▁(30", + -12.5055570602417 + ], + [ + "Neither", + -12.505608558654785 + ], + [ + "hänge", + -12.505610466003418 + ], + [ + "square", + -12.505728721618652 + ], + [ + "voller", + -12.505736351013184 + ], + [ + "▁pertinent", + -12.505783081054688 + ], + [ + "▁Wool", + -12.50595474243164 + ], + [ + "settling", + -12.50607681274414 + ], + [ + "fangen", + -12.506148338317871 + ], + [ + "▁Testing", + -12.506152153015137 + ], + [ + "distin", + -12.506196022033691 + ], + [ + "▁Marken", + -12.506227493286133 + ], + [ + "▁Beta", + -12.506300926208496 + ], + [ + "▁fulfilling", + -12.506339073181152 + ], + [ + "Leider", + -12.506357192993164 + ], + [ + "black", + -12.506389617919922 + ], + [ + "occupe", + -12.50658893585205 + ], + [ + "itățile", + -12.506688117980957 + ], + [ + "Pay", + -12.506887435913086 + ], + [ + "▁bandwidth", + -12.506890296936035 + ], + [ + "▁neighbourhood", + -12.506918907165527 + ], + [ + "▁Gutschein", + -12.506922721862793 + ], + [ + "degree", + -12.507055282592773 + ], + [ + "ivité", + -12.507116317749023 + ], + [ + "4.1", + -12.507169723510742 + ], + [ + "▁tätig", + -12.507170677185059 + ], + [ + "topic", + -12.507242202758789 + ], + [ + "ätz", + -12.507243156433105 + ], + [ + "these", + -12.50733470916748 + ], + [ + "▁propriété", + -12.507438659667969 + ], + [ + "▁innings", + -12.507458686828613 + ], + [ + "▁Prevention", + -12.50754165649414 + ], + [ + "▁Saw", + -12.507585525512695 + ], + [ + "▁opener", + -12.507752418518066 + ], + [ + "entwicklung", + -12.507824897766113 + ], + [ + "▁Johann", + -12.507865905761719 + ], + [ + "▁statistic", + -12.507881164550781 + ], + [ + "oids", + -12.507966995239258 + ], + [ + "▁Delaware", + -12.508000373840332 + ], + [ + "▁Isle", + -12.508001327514648 + ], + [ + "▁accompagn", + -12.508028984069824 + ], + [ + "▁Risiko", + -12.508079528808594 + ], + [ + "▁Conform", + -12.508268356323242 + ], + [ + "zeichnen", + -12.508395195007324 + ], + [ + "▁acuz", + -12.508479118347168 + ], + [ + "▁Mort", + -12.508524894714355 + ], + [ + "Fällen", + -12.50853157043457 + ], + [ + "▁blended", + -12.50871467590332 + ], + [ + "found", + -12.50872802734375 + ], + [ + "▁gestalten", + -12.50874137878418 + ], + [ + "▁Découvrez", + -12.508830070495605 + ], + [ + "▁Wett", + -12.508956909179688 + ], + [ + "▁débat", + -12.508990287780762 + ], + [ + "▁Tire", + -12.509007453918457 + ], + [ + "benz", + -12.509037017822266 + ], + [ + "Yes", + -12.509074211120605 + ], + [ + "▁pierde", + -12.509110450744629 + ], + [ + "▁niciodata", + -12.509121894836426 + ], + [ + "▁precipit", + -12.509145736694336 + ], + [ + "▁lazy", + -12.509334564208984 + ], + [ + "▁creature", + -12.509370803833008 + ], + [ + "Wettbewerb", + -12.509385108947754 + ], + [ + "▁Explo", + -12.509496688842773 + ], + [ + "wolf", + -12.509657859802246 + ], + [ + "▁conséquence", + -12.509662628173828 + ], + [ + "▁jewellery", + -12.509662628173828 + ], + [ + "▁Extension", + -12.509735107421875 + ], + [ + "▁transmitted", + -12.509872436523438 + ], + [ + "▁darker", + -12.509973526000977 + ], + [ + "▁simbol", + -12.510065078735352 + ], + [ + "kim", + -12.510069847106934 + ], + [ + "▁proteja", + -12.510098457336426 + ], + [ + "▁Copper", + -12.510189056396484 + ], + [ + "mitglied", + -12.510218620300293 + ], + [ + "▁explosive", + -12.510222434997559 + ], + [ + "▁Nicolae", + -12.510223388671875 + ], + [ + "▁intricate", + -12.510231971740723 + ], + [ + "lati", + -12.510313034057617 + ], + [ + "Mark", + -12.510334014892578 + ], + [ + "▁Porsche", + -12.510339736938477 + ], + [ + "▁Revenue", + -12.510479927062988 + ], + [ + "4.2", + -12.510613441467285 + ], + [ + "certain", + -12.510836601257324 + ], + [ + "▁Coaching", + -12.510879516601562 + ], + [ + "▁allocated", + -12.510879516601562 + ], + [ + "▁optimiz", + -12.511017799377441 + ], + [ + "▁heel", + -12.511205673217773 + ], + [ + "▁indigenous", + -12.511330604553223 + ], + [ + "▁vineri", + -12.511396408081055 + ], + [ + "▁Inspector", + -12.51145076751709 + ], + [ + "▁colleague", + -12.5115327835083 + ], + [ + "ANG", + -12.511649131774902 + ], + [ + "éducation", + -12.511887550354004 + ], + [ + "▁Geschenk", + -12.51188850402832 + ], + [ + "channel", + -12.511899948120117 + ], + [ + "▁trapped", + -12.511954307556152 + ], + [ + "BF", + -12.511974334716797 + ], + [ + "▁firing", + -12.512086868286133 + ], + [ + "▁chlor", + -12.512103080749512 + ], + [ + "▁Carlos", + -12.512115478515625 + ], + [ + "▁proxy", + -12.512128829956055 + ], + [ + "▁pinch", + -12.512167930603027 + ], + [ + "▁Pete", + -12.512201309204102 + ], + [ + "phospho", + -12.512458801269531 + ], + [ + "▁waiver", + -12.51246452331543 + ], + [ + "▁Croatia", + -12.512480735778809 + ], + [ + "▁behave", + -12.51258373260498 + ], + [ + "▁frig", + -12.512676239013672 + ], + [ + "▁Vorteil", + -12.51279067993164 + ], + [ + "▁wichtiger", + -12.512837409973145 + ], + [ + "........", + -12.512929916381836 + ], + [ + "▁flick", + -12.513007164001465 + ], + [ + "▁Stanford", + -12.51306438446045 + ], + [ + "öse", + -12.513096809387207 + ], + [ + "▁Fernseh", + -12.513099670410156 + ], + [ + "▁vélo", + -12.51322078704834 + ], + [ + "reisen", + -12.513304710388184 + ], + [ + "residing", + -12.513504981994629 + ], + [ + "▁Taste", + -12.513580322265625 + ], + [ + "▁disappeared", + -12.513630867004395 + ], + [ + "▁Hood", + -12.513776779174805 + ], + [ + "▁fabriqu", + -12.514046669006348 + ], + [ + "▁Jake", + -12.514470100402832 + ], + [ + "Lastly", + -12.51462173461914 + ], + [ + "▁furnace", + -12.514673233032227 + ], + [ + "▁Ottawa", + -12.51473331451416 + ], + [ + "▁dictate", + -12.514742851257324 + ], + [ + "zece", + -12.514817237854004 + ], + [ + "protect", + -12.514932632446289 + ], + [ + "FU", + -12.51495361328125 + ], + [ + "Stack", + -12.514954566955566 + ], + [ + "▁teilweise", + -12.515018463134766 + ], + [ + "▁Publisher", + -12.51506233215332 + ], + [ + "▁lutte", + -12.515159606933594 + ], + [ + "202", + -12.515178680419922 + ], + [ + "psy", + -12.515190124511719 + ], + [ + "▁wünschen", + -12.515238761901855 + ], + [ + "▁pathways", + -12.515356063842773 + ], + [ + "ivitate", + -12.515559196472168 + ], + [ + "▁continuă", + -12.515658378601074 + ], + [ + "ziemlich", + -12.515791893005371 + ], + [ + "verted", + -12.515812873840332 + ], + [ + "▁sequel", + -12.515839576721191 + ], + [ + "tinct", + -12.51599407196045 + ], + [ + "vette", + -12.516020774841309 + ], + [ + "▁exceeding", + -12.516032218933105 + ], + [ + "▁Yorkshire", + -12.51607608795166 + ], + [ + "▁cleanse", + -12.51613998413086 + ], + [ + "Sadly", + -12.516159057617188 + ], + [ + "▁präsentiert", + -12.516164779663086 + ], + [ + "angled", + -12.516311645507812 + ], + [ + "tude", + -12.516339302062988 + ], + [ + "chain", + -12.516371726989746 + ], + [ + "▁Oakland", + -12.51639175415039 + ], + [ + "xia", + -12.516514778137207 + ], + [ + "▁foremost", + -12.51653003692627 + ], + [ + "▁incomplete", + -12.516786575317383 + ], + [ + "▁restriction", + -12.516905784606934 + ], + [ + "▁whatsoever", + -12.516908645629883 + ], + [ + "▁shipment", + -12.517017364501953 + ], + [ + "**", + -12.517059326171875 + ], + [ + "Aici", + -12.517110824584961 + ], + [ + "PART", + -12.517247200012207 + ], + [ + "▁grams", + -12.517251014709473 + ], + [ + "▁Folk", + -12.517457008361816 + ], + [ + "▁encryption", + -12.517467498779297 + ], + [ + "▁Alfred", + -12.517748832702637 + ], + [ + "▁Veränderung", + -12.517749786376953 + ], + [ + "▁privately", + -12.517817497253418 + ], + [ + "£", + -12.517909049987793 + ], + [ + "▁Sonne", + -12.51799201965332 + ], + [ + "kow", + -12.518117904663086 + ], + [ + "▁CBS", + -12.518172264099121 + ], + [ + "▁Feuer", + -12.518198013305664 + ], + [ + "▁crushed", + -12.518230438232422 + ], + [ + "▁cazare", + -12.518270492553711 + ], + [ + "▁beraten", + -12.518401145935059 + ], + [ + "envoi", + -12.518423080444336 + ], + [ + "▁genannt", + -12.51843547821045 + ], + [ + "▁Lok", + -12.518472671508789 + ], + [ + "nox", + -12.518569946289062 + ], + [ + "wishing", + -12.518759727478027 + ], + [ + "▁freak", + -12.518759727478027 + ], + [ + "rasi", + -12.51879596710205 + ], + [ + "▁calculations", + -12.518888473510742 + ], + [ + "▁sprechen", + -12.51890754699707 + ], + [ + "5:00", + -12.519062042236328 + ], + [ + "▁Gam", + -12.519074440002441 + ], + [ + "▁invasion", + -12.519159317016602 + ], + [ + "ZA", + -12.519230842590332 + ], + [ + "aiming", + -12.519327163696289 + ], + [ + "▁näher", + -12.519404411315918 + ], + [ + "▁Maßnahmen", + -12.519433975219727 + ], + [ + "▁măsură", + -12.519490242004395 + ], + [ + "▁Bestellung", + -12.519610404968262 + ], + [ + "▁gown", + -12.519665718078613 + ], + [ + "▁oblige", + -12.519747734069824 + ], + [ + "länder", + -12.51977825164795 + ], + [ + "posi", + -12.519853591918945 + ], + [ + "▁Earn", + -12.51988410949707 + ], + [ + "▁dubl", + -12.51999282836914 + ], + [ + "▁sticky", + -12.520100593566895 + ], + [ + "▁litter", + -12.520181655883789 + ], + [ + "▁Salz", + -12.520257949829102 + ], + [ + "▁Matter", + -12.520272254943848 + ], + [ + "▁Driving", + -12.520275115966797 + ], + [ + "▁pursu", + -12.520285606384277 + ], + [ + "ographer", + -12.520390510559082 + ], + [ + "▁touring", + -12.520400047302246 + ], + [ + "opter", + -12.520444869995117 + ], + [ + "▁fierce", + -12.520475387573242 + ], + [ + "▁Audit", + -12.520480155944824 + ], + [ + "▁imperi", + -12.520755767822266 + ], + [ + "▁positiv", + -12.520780563354492 + ], + [ + "règles", + -12.520849227905273 + ], + [ + "▁bouton", + -12.520990371704102 + ], + [ + "▁victorie", + -12.520990371704102 + ], + [ + "▁manuel", + -12.521015167236328 + ], + [ + "▁await", + -12.52103042602539 + ], + [ + "▁transformer", + -12.521041870117188 + ], + [ + "▁cupboard", + -12.52108383178711 + ], + [ + "▁Hag", + -12.521117210388184 + ], + [ + "naj", + -12.521214485168457 + ], + [ + "▁annoncé", + -12.52139663696289 + ], + [ + "▁scolaire", + -12.521401405334473 + ], + [ + "▁étape", + -12.521482467651367 + ], + [ + "▁pirate", + -12.521761894226074 + ], + [ + "▁Rated", + -12.521794319152832 + ], + [ + "LOT", + -12.521846771240234 + ], + [ + "▁natura", + -12.521944046020508 + ], + [ + "oga", + -12.522336959838867 + ], + [ + "Read", + -12.522388458251953 + ], + [ + "idio", + -12.522444725036621 + ], + [ + "▁recession", + -12.522698402404785 + ], + [ + "veţi", + -12.522761344909668 + ], + [ + "▁blossom", + -12.523082733154297 + ], + [ + "▁lunar", + -12.523141860961914 + ], + [ + "▁inhibit", + -12.52316951751709 + ], + [ + "gemein", + -12.523219108581543 + ], + [ + "▁Historic", + -12.523262023925781 + ], + [ + "▁HTTP", + -12.523370742797852 + ], + [ + "misiune", + -12.5234956741333 + ], + [ + "▁Manda", + -12.523601531982422 + ], + [ + "▁Hurricane", + -12.523643493652344 + ], + [ + "Strat", + -12.523646354675293 + ], + [ + "▁populaire", + -12.523756980895996 + ], + [ + "▁useless", + -12.523762702941895 + ], + [ + "▁Leipzig", + -12.523924827575684 + ], + [ + "▁Krankheit", + -12.52392578125 + ], + [ + "▁Bonne", + -12.52397346496582 + ], + [ + "▁tissu", + -12.52399730682373 + ], + [ + "▁Baum", + -12.523998260498047 + ], + [ + "▁BUT", + -12.524152755737305 + ], + [ + "▁Mondial", + -12.52423095703125 + ], + [ + "▁triangle", + -12.524242401123047 + ], + [ + "▁Tesla", + -12.524250984191895 + ], + [ + "▁pământ", + -12.52430534362793 + ], + [ + "▁aminte", + -12.524726867675781 + ], + [ + "▁vehicul", + -12.524770736694336 + ], + [ + "▁cerut", + -12.52482795715332 + ], + [ + "▁respiratory", + -12.524836540222168 + ], + [ + "▁rayon", + -12.524993896484375 + ], + [ + "▁gestaltet", + -12.525067329406738 + ], + [ + "310", + -12.525139808654785 + ], + [ + "pfl", + -12.525239944458008 + ], + [ + "▁shrimp", + -12.525337219238281 + ], + [ + "▁reconnu", + -12.525409698486328 + ], + [ + "ologique", + -12.525476455688477 + ], + [ + "▁unity", + -12.525674819946289 + ], + [ + "Speicher", + -12.52569580078125 + ], + [ + "▁Movement", + -12.525794982910156 + ], + [ + "ddling", + -12.52581787109375 + ], + [ + "OE", + -12.525818824768066 + ], + [ + "▁Resolution", + -12.525863647460938 + ], + [ + "esteem", + -12.525898933410645 + ], + [ + "▁Teen", + -12.526288986206055 + ], + [ + "▁believing", + -12.526463508605957 + ], + [ + "▁Tipps", + -12.526481628417969 + ], + [ + "jpg", + -12.526494026184082 + ], + [ + "▁obs", + -12.526519775390625 + ], + [ + "SHA", + -12.526702880859375 + ], + [ + "▁quietly", + -12.526907920837402 + ], + [ + "setting", + -12.52712345123291 + ], + [ + "▁elevator", + -12.527185440063477 + ], + [ + "phor", + -12.527194023132324 + ], + [ + "Just", + -12.52725887298584 + ], + [ + "▁legatura", + -12.52739143371582 + ], + [ + "elected", + -12.527414321899414 + ], + [ + "▁disclosed", + -12.527419090270996 + ], + [ + "quarter", + -12.52743148803711 + ], + [ + "zzy", + -12.527461051940918 + ], + [ + "▁gata", + -12.527491569519043 + ], + [ + "SAN", + -12.527532577514648 + ], + [ + "▁Cathedral", + -12.527592658996582 + ], + [ + "192", + -12.527656555175781 + ], + [ + "▁RBI", + -12.527726173400879 + ], + [ + "▁Seller", + -12.527798652648926 + ], + [ + "▁urine", + -12.527807235717773 + ], + [ + "▁Hardware", + -12.527966499328613 + ], + [ + "▁steadi", + -12.527993202209473 + ], + [ + "percussion", + -12.528158187866211 + ], + [ + "▁francez", + -12.528172492980957 + ], + [ + "▁rude", + -12.528202056884766 + ], + [ + "bod", + -12.528223037719727 + ], + [ + "cession", + -12.528249740600586 + ], + [ + "▁HTC", + -12.528372764587402 + ], + [ + "HB", + -12.528576850891113 + ], + [ + "▁descent", + -12.528644561767578 + ], + [ + "▁Painting", + -12.528681755065918 + ], + [ + "119", + -12.528684616088867 + ], + [ + "sagen", + -12.52877426147461 + ], + [ + "▁salvation", + -12.52880573272705 + ], + [ + "arro", + -12.528814315795898 + ], + [ + "0.3", + -12.52886962890625 + ], + [ + "▁Duck", + -12.52890396118164 + ], + [ + "Mit", + -12.529052734375 + ], + [ + "да", + -12.52927017211914 + ], + [ + "▁Diesel", + -12.529322624206543 + ], + [ + "▁Medal", + -12.529413223266602 + ], + [ + "▁interim", + -12.529439926147461 + ], + [ + "▁montagne", + -12.529439926147461 + ], + [ + "▁Pixel", + -12.529631614685059 + ], + [ + "LINE", + -12.529806137084961 + ], + [ + "▁dureri", + -12.529938697814941 + ], + [ + "▁Bengal", + -12.529990196228027 + ], + [ + "Legea", + -12.530080795288086 + ], + [ + "▁Strecke", + -12.530094146728516 + ], + [ + "▁schneller", + -12.53012752532959 + ], + [ + "▁Karten", + -12.5301513671875 + ], + [ + "cion", + -12.530241966247559 + ], + [ + "▁Coco", + -12.53037166595459 + ], + [ + "troisième", + -12.53052806854248 + ], + [ + "401", + -12.530616760253906 + ], + [ + "▁sandwiches", + -12.530704498291016 + ], + [ + "▁folosind", + -12.530920028686523 + ], + [ + "▁Folgen", + -12.530953407287598 + ], + [ + "▁triumph", + -12.530991554260254 + ], + [ + "▁Hintergrund", + -12.530996322631836 + ], + [ + "▁revelation", + -12.531084060668945 + ], + [ + "ôme", + -12.531222343444824 + ], + [ + "▁Nex", + -12.531245231628418 + ], + [ + "jährigen", + -12.531295776367188 + ], + [ + "▁militant", + -12.531296730041504 + ], + [ + "▁fabricant", + -12.531671524047852 + ], + [ + "iano", + -12.531713485717773 + ], + [ + "▁formulation", + -12.53188705444336 + ], + [ + "integrating", + -12.532050132751465 + ], + [ + "▁Items", + -12.532142639160156 + ], + [ + "▁contractual", + -12.532320976257324 + ], + [ + "AIDS", + -12.532424926757812 + ], + [ + "▁pitcher", + -12.532610893249512 + ], + [ + "▁Snap", + -12.532623291015625 + ], + [ + "▁systematic", + -12.532663345336914 + ], + [ + "▁referendum", + -12.532694816589355 + ], + [ + "gau", + -12.53281021118164 + ], + [ + "administration", + -12.532917022705078 + ], + [ + "▁speci", + -12.532981872558594 + ], + [ + "ieni", + -12.532998085021973 + ], + [ + "prox", + -12.533186912536621 + ], + [ + "▁bouquet", + -12.533241271972656 + ], + [ + "▁sinnvoll", + -12.533270835876465 + ], + [ + "▁Fleisch", + -12.533309936523438 + ], + [ + "ktuell", + -12.533381462097168 + ], + [ + "▁mushrooms", + -12.533408164978027 + ], + [ + "▁Straf", + -12.533470153808594 + ], + [ + "▁cresc", + -12.533491134643555 + ], + [ + "TEM", + -12.533502578735352 + ], + [ + "▁vindec", + -12.53352165222168 + ], + [ + "▁Drama", + -12.533540725708008 + ], + [ + "chief", + -12.533550262451172 + ], + [ + "▁müsst", + -12.533614158630371 + ], + [ + "▁Warner", + -12.533662796020508 + ], + [ + "118", + -12.533761024475098 + ], + [ + "▁saptamana", + -12.533831596374512 + ], + [ + "▁animaux", + -12.53412914276123 + ], + [ + "▁Directory", + -12.534146308898926 + ], + [ + "▁entgegen", + -12.53415584564209 + ], + [ + "▁deduction", + -12.534156799316406 + ], + [ + "▁Strategic", + -12.53426456451416 + ], + [ + "▁rats", + -12.534419059753418 + ], + [ + "▁Moses", + -12.534448623657227 + ], + [ + "eko", + -12.534564971923828 + ], + [ + "strict", + -12.534590721130371 + ], + [ + "▁Ashley", + -12.534603118896484 + ], + [ + "mik", + -12.534622192382812 + ], + [ + "▁relocate", + -12.534668922424316 + ], + [ + "▁whip", + -12.534738540649414 + ], + [ + "central", + -12.534750938415527 + ], + [ + "mack", + -12.534892082214355 + ], + [ + "stufe", + -12.534961700439453 + ], + [ + "▁Metropolitan", + -12.5349702835083 + ], + [ + "▁croissance", + -12.534974098205566 + ], + [ + "▁celebrities", + -12.535021781921387 + ], + [ + "▁Geh", + -12.53507137298584 + ], + [ + "▁verifica", + -12.535196304321289 + ], + [ + "▁satisfac", + -12.535211563110352 + ], + [ + "▁Julian", + -12.535271644592285 + ], + [ + "▁remotely", + -12.535432815551758 + ], + [ + "▁Safari", + -12.535542488098145 + ], + [ + "▁Chic", + -12.53557014465332 + ], + [ + "▁clamp", + -12.535818099975586 + ], + [ + "▁Schnee", + -12.535918235778809 + ], + [ + "grown", + -12.536069869995117 + ], + [ + "▁Character", + -12.536110877990723 + ], + [ + "▁charities", + -12.536137580871582 + ], + [ + "Thankfully", + -12.536625862121582 + ], + [ + "▁țară", + -12.53681468963623 + ], + [ + "IZ", + -12.536816596984863 + ], + [ + "Vielleicht", + -12.536999702453613 + ], + [ + "▁Pon", + -12.537108421325684 + ], + [ + "gegen", + -12.53711986541748 + ], + [ + "chez", + -12.537185668945312 + ], + [ + "Black", + -12.537544250488281 + ], + [ + "▁alimentare", + -12.537555694580078 + ], + [ + "▁verloren", + -12.537562370300293 + ], + [ + "▁predictions", + -12.537657737731934 + ], + [ + "Founded", + -12.53795337677002 + ], + [ + "▁femeie", + -12.538022994995117 + ], + [ + "wahrscheinlich", + -12.538107872009277 + ], + [ + "▁squeeze", + -12.53819465637207 + ], + [ + "▁verfügbar", + -12.538259506225586 + ], + [ + "▁hygiene", + -12.538393020629883 + ], + [ + "voire", + -12.538667678833008 + ], + [ + "▁birou", + -12.538901329040527 + ], + [ + "▁initiate", + -12.538921356201172 + ], + [ + "▁Patriot", + -12.539009094238281 + ], + [ + "▁Income", + -12.539159774780273 + ], + [ + "▁marry", + -12.539310455322266 + ], + [ + "lokal", + -12.539336204528809 + ], + [ + "logic", + -12.53940486907959 + ], + [ + "▁Abstract", + -12.53966236114502 + ], + [ + "▁grundsätzlich", + -12.539822578430176 + ], + [ + "▁tariff", + -12.539886474609375 + ], + [ + "▁definitiv", + -12.539892196655273 + ], + [ + "paz", + -12.53989315032959 + ], + [ + "Result", + -12.539921760559082 + ], + [ + "1:30", + -12.54005241394043 + ], + [ + "▁Latest", + -12.540075302124023 + ], + [ + "▁Dauer", + -12.540155410766602 + ], + [ + "Med", + -12.540275573730469 + ], + [ + "gewicht", + -12.540348052978516 + ], + [ + "▁Gaza", + -12.540430068969727 + ], + [ + "▁Newton", + -12.540769577026367 + ], + [ + "Dokument", + -12.540897369384766 + ], + [ + "formular", + -12.540945053100586 + ], + [ + "ILE", + -12.540964126586914 + ], + [ + "▁surse", + -12.541040420532227 + ], + [ + "MH", + -12.54116153717041 + ], + [ + "▁Arctic", + -12.541255950927734 + ], + [ + "▁ISBN", + -12.541274070739746 + ], + [ + "▁quarterback", + -12.541315078735352 + ], + [ + "▁absurd", + -12.541555404663086 + ], + [ + "▁Zusammenhang", + -12.541561126708984 + ], + [ + "▁Module", + -12.54156494140625 + ], + [ + "mented", + -12.541667938232422 + ], + [ + "worthy", + -12.541797637939453 + ], + [ + "▁célèbre", + -12.541828155517578 + ], + [ + "▁maritime", + -12.541836738586426 + ], + [ + "▁Reed", + -12.541938781738281 + ], + [ + "▁threaten", + -12.542037010192871 + ], + [ + "▁Satz", + -12.542095184326172 + ], + [ + "▁sticking", + -12.542203903198242 + ], + [ + "▁transcript", + -12.542372703552246 + ], + [ + "▁Morgen", + -12.542425155639648 + ], + [ + "▁Förder", + -12.542435646057129 + ], + [ + "▁Gottes", + -12.542572021484375 + ], + [ + "▁Coordinator", + -12.542648315429688 + ], + [ + "LOG", + -12.54265022277832 + ], + [ + "EAN", + -12.542677879333496 + ], + [ + "▁préparation", + -12.54273509979248 + ], + [ + "▁Brass", + -12.542799949645996 + ], + [ + "Așa", + -12.542853355407715 + ], + [ + "▁Utiliz", + -12.54294490814209 + ], + [ + "framed", + -12.542973518371582 + ], + [ + "▁asphalt", + -12.543050765991211 + ], + [ + "116", + -12.543061256408691 + ], + [ + "▁historically", + -12.54310417175293 + ], + [ + "▁doamn", + -12.543176651000977 + ], + [ + "Air", + -12.543293952941895 + ], + [ + "▁economist", + -12.543838500976562 + ], + [ + "fresh", + -12.54384994506836 + ], + [ + "engine", + -12.543906211853027 + ], + [ + "▁Rücken", + -12.543919563293457 + ], + [ + "▁worthwhile", + -12.544124603271484 + ], + [ + "▁Therapie", + -12.544140815734863 + ], + [ + "▁Joshua", + -12.544151306152344 + ], + [ + "sicherheit", + -12.544175148010254 + ], + [ + "▁scena", + -12.544254302978516 + ], + [ + "ifiant", + -12.54433822631836 + ], + [ + "/20", + -12.54442024230957 + ], + [ + "fehl", + -12.544469833374023 + ], + [ + "karten", + -12.544515609741211 + ], + [ + "501", + -12.544656753540039 + ], + [ + "▁vide", + -12.544673919677734 + ], + [ + "▁miliarde", + -12.544699668884277 + ], + [ + "▁trillion", + -12.54470157623291 + ], + [ + "oudre", + -12.544761657714844 + ], + [ + "nderung", + -12.544803619384766 + ], + [ + "▁inquiries", + -12.544992446899414 + ], + [ + "▁echipe", + -12.545034408569336 + ], + [ + "▁investiga", + -12.545040130615234 + ], + [ + "▁detailing", + -12.545042991638184 + ], + [ + "VIS", + -12.545086860656738 + ], + [ + "▁geographical", + -12.545157432556152 + ], + [ + "▁authentication", + -12.54519271850586 + ], + [ + "▁Schwa", + -12.545201301574707 + ], + [ + "▁Scri", + -12.545230865478516 + ], + [ + "▁discourage", + -12.54527473449707 + ], + [ + "Pass", + -12.54529094696045 + ], + [ + "▁scattered", + -12.54529857635498 + ], + [ + "▁langsam", + -12.545300483703613 + ], + [ + "telles", + -12.545380592346191 + ], + [ + "▁ramane", + -12.5454740524292 + ], + [ + "▁inhibitor", + -12.545486450195312 + ], + [ + "▁Habit", + -12.54556941986084 + ], + [ + "▁10:00", + -12.545577049255371 + ], + [ + "▁rezultat", + -12.545595169067383 + ], + [ + "äck", + -12.545943260192871 + ], + [ + ",000.", + -12.545979499816895 + ], + [ + "▁remedies", + -12.546103477478027 + ], + [ + "▁comportament", + -12.546195983886719 + ], + [ + "namen", + -12.546229362487793 + ], + [ + "▁#3", + -12.546327590942383 + ], + [ + "enstein", + -12.546493530273438 + ], + [ + "▁relevance", + -12.546516418457031 + ], + [ + "▁présentation", + -12.54655933380127 + ], + [ + "MHz", + -12.546648979187012 + ], + [ + "EMA", + -12.546661376953125 + ], + [ + "▁palace", + -12.546709060668945 + ], + [ + "▁vizibil", + -12.546723365783691 + ], + [ + "▁griev", + -12.546820640563965 + ], + [ + "▁severely", + -12.54688549041748 + ], + [ + "expert", + -12.546942710876465 + ], + [ + "▁ravi", + -12.54696273803711 + ], + [ + "▁feasible", + -12.547002792358398 + ], + [ + "▁Wholesale", + -12.547009468078613 + ], + [ + "▁graduat", + -12.547077178955078 + ], + [ + "Kü", + -12.547094345092773 + ], + [ + "▁quotation", + -12.547157287597656 + ], + [ + "/11", + -12.54716968536377 + ], + [ + "lutter", + -12.547415733337402 + ], + [ + "▁dice", + -12.547467231750488 + ], + [ + "modal", + -12.547749519348145 + ], + [ + "ggling", + -12.547819137573242 + ], + [ + "▁considér", + -12.547986030578613 + ], + [ + "▁Insel", + -12.548097610473633 + ], + [ + "▁Database", + -12.5483980178833 + ], + [ + "icism", + -12.548508644104004 + ], + [ + "▁quarterly", + -12.54851245880127 + ], + [ + "▁formule", + -12.548558235168457 + ], + [ + "▁renouvel", + -12.54873275756836 + ], + [ + "▁Treasure", + -12.548737525939941 + ], + [ + "▁1962", + -12.548844337463379 + ], + [ + "▁republic", + -12.549111366271973 + ], + [ + "▁États", + -12.549254417419434 + ], + [ + "▁salut", + -12.549356460571289 + ], + [ + "HK", + -12.54941463470459 + ], + [ + "▁Bali", + -12.549427032470703 + ], + [ + "▁Rechnung", + -12.549447059631348 + ], + [ + "fruit", + -12.54945182800293 + ], + [ + "lays", + -12.549467086791992 + ], + [ + "LAS", + -12.54951000213623 + ], + [ + "inclin", + -12.549708366394043 + ], + [ + "▁Cré", + -12.549813270568848 + ], + [ + "▁compt", + -12.54985237121582 + ], + [ + "țiilor", + -12.550056457519531 + ], + [ + "heft", + -12.550111770629883 + ], + [ + "▁Comisi", + -12.55024242401123 + ], + [ + "▁Nurse", + -12.550516128540039 + ], + [ + "loid", + -12.550540924072266 + ], + [ + "grove", + -12.550761222839355 + ], + [ + "▁Copy", + -12.550867080688477 + ], + [ + "▁Kampf", + -12.550873756408691 + ], + [ + "izată", + -12.550945281982422 + ], + [ + "würdig", + -12.551244735717773 + ], + [ + "-2018", + -12.551305770874023 + ], + [ + "ozo", + -12.551350593566895 + ], + [ + "▁integriert", + -12.551397323608398 + ], + [ + "▁réunion", + -12.551448822021484 + ], + [ + "▁mică", + -12.551520347595215 + ], + [ + "▁Chau", + -12.551595687866211 + ], + [ + "▁allegations", + -12.551626205444336 + ], + [ + "▁shaping", + -12.551640510559082 + ], + [ + "▁transcription", + -12.551671981811523 + ], + [ + "▁Monica", + -12.551711082458496 + ], + [ + "▁torture", + -12.551795959472656 + ], + [ + "▁cooperative", + -12.551962852478027 + ], + [ + "▁invité", + -12.551987648010254 + ], + [ + "▁bamboo", + -12.552204132080078 + ], + [ + "▁Thinking", + -12.55232048034668 + ], + [ + "▁gratis", + -12.552392959594727 + ], + [ + "117", + -12.55267333984375 + ], + [ + "renz", + -12.55279541015625 + ], + [ + "▁Fußball", + -12.552823066711426 + ], + [ + "▁Gram", + -12.552873611450195 + ], + [ + "sprung", + -12.55290412902832 + ], + [ + "▁Schluss", + -12.55308723449707 + ], + [ + "▁Diploma", + -12.553345680236816 + ], + [ + "▁apparatus", + -12.553363800048828 + ], + [ + "notably", + -12.553483963012695 + ], + [ + "▁exercit", + -12.553532600402832 + ], + [ + "ământ", + -12.553536415100098 + ], + [ + "▁masses", + -12.553610801696777 + ], + [ + "▁preuve", + -12.553642272949219 + ], + [ + "great", + -12.553754806518555 + ], + [ + "▁Drink", + -12.553792953491211 + ], + [ + "islam", + -12.553828239440918 + ], + [ + "ARM", + -12.553914070129395 + ], + [ + "indre", + -12.554404258728027 + ], + [ + "DW", + -12.554410934448242 + ], + [ + "▁Flowers", + -12.554500579833984 + ], + [ + "▁pill", + -12.554574966430664 + ], + [ + "▁objectifs", + -12.554594039916992 + ], + [ + "▁Bezug", + -12.554659843444824 + ], + [ + "▁assumptions", + -12.55466365814209 + ], + [ + "▁vesti", + -12.554742813110352 + ], + [ + "route", + -12.554783821105957 + ], + [ + "▁Bangkok", + -12.554815292358398 + ], + [ + "▁seamlessly", + -12.55482006072998 + ], + [ + "config", + -12.554882049560547 + ], + [ + "▁username", + -12.554890632629395 + ], + [ + "unsure", + -12.555024147033691 + ], + [ + "▁poser", + -12.555129051208496 + ], + [ + "▁impozit", + -12.555246353149414 + ], + [ + "▁metode", + -12.555333137512207 + ], + [ + "defending", + -12.555347442626953 + ], + [ + "▁Nic", + -12.555431365966797 + ], + [ + "▁Vertrag", + -12.555508613586426 + ], + [ + "▁plăcut", + -12.55552864074707 + ], + [ + "▁Pou", + -12.555675506591797 + ], + [ + "UCH", + -12.555785179138184 + ], + [ + "▁Fein", + -12.555903434753418 + ], + [ + "reading", + -12.555994987487793 + ], + [ + "snip", + -12.55604076385498 + ], + [ + "▁Livre", + -12.556401252746582 + ], + [ + "lander", + -12.556509971618652 + ], + [ + "▁hydraulic", + -12.556559562683105 + ], + [ + "veiled", + -12.556563377380371 + ], + [ + "intr", + -12.556609153747559 + ], + [ + "▁Domnului", + -12.556641578674316 + ], + [ + "▁$0.", + -12.556713104248047 + ], + [ + "▁kilometers", + -12.556753158569336 + ], + [ + "spann", + -12.556870460510254 + ], + [ + "▁credibility", + -12.556892395019531 + ], + [ + "▁eBook", + -12.556953430175781 + ], + [ + "VERY", + -12.556994438171387 + ], + [ + "▁Charm", + -12.557122230529785 + ], + [ + "Evangeli", + -12.557193756103516 + ], + [ + "▁anderer", + -12.557193756103516 + ], + [ + "▁Entry", + -12.557195663452148 + ], + [ + "ffy", + -12.5573148727417 + ], + [ + "▁Exc", + -12.55737018585205 + ], + [ + "▁Omega", + -12.557446479797363 + ], + [ + "▁Funktionen", + -12.557455062866211 + ], + [ + "▁Gay", + -12.55752182006836 + ], + [ + "▁acht", + -12.557608604431152 + ], + [ + "colored", + -12.557615280151367 + ], + [ + "itude", + -12.557634353637695 + ], + [ + "▁accompagné", + -12.557645797729492 + ], + [ + "▁unfortunate", + -12.557981491088867 + ], + [ + "▁DIN", + -12.558091163635254 + ], + [ + "▁installment", + -12.558252334594727 + ], + [ + "▁indépendant", + -12.558307647705078 + ], + [ + "These", + -12.558364868164062 + ], + [ + "mitten", + -12.558394432067871 + ], + [ + "thank", + -12.558470726013184 + ], + [ + "▁Trek", + -12.558721542358398 + ], + [ + "üchte", + -12.55874252319336 + ], + [ + "▁cuir", + -12.55875015258789 + ], + [ + "▁turbo", + -12.558802604675293 + ], + [ + "Table", + -12.558847427368164 + ], + [ + "▁Extrem", + -12.558866500854492 + ], + [ + "▁advertisements", + -12.55915355682373 + ], + [ + "▁chaîne", + -12.559206008911133 + ], + [ + "▁corridor", + -12.559473991394043 + ], + [ + "▁râ", + -12.559651374816895 + ], + [ + "▁Opening", + -12.559718132019043 + ], + [ + "Get", + -12.559747695922852 + ], + [ + "▁storytelling", + -12.55976676940918 + ], + [ + "▁severity", + -12.559771537780762 + ], + [ + "4\"", + -12.559956550598145 + ], + [ + "▁parasit", + -12.559967994689941 + ], + [ + "angebot", + -12.56002426147461 + ], + [ + "Data", + -12.56005573272705 + ], + [ + "listen", + -12.560086250305176 + ], + [ + "▁vârstă", + -12.560094833374023 + ], + [ + "▁swallow", + -12.56025505065918 + ], + [ + "TRE", + -12.560321807861328 + ], + [ + "▁daunting", + -12.56035041809082 + ], + [ + "▁Oli", + -12.560481071472168 + ], + [ + "▁definitive", + -12.56066608428955 + ], + [ + "▁rezerva", + -12.560667037963867 + ], + [ + "/15", + -12.560807228088379 + ], + [ + "▁Landschaft", + -12.560887336730957 + ], + [ + "▁Automotive", + -12.560934066772461 + ], + [ + "▁convers", + -12.56113052368164 + ], + [ + "▁thru", + -12.561139106750488 + ], + [ + "▁Township", + -12.561140060424805 + ], + [ + "▁tilt", + -12.56119441986084 + ], + [ + "▁Criminal", + -12.561227798461914 + ], + [ + "riez", + -12.561407089233398 + ], + [ + "▁Parking", + -12.561440467834473 + ], + [ + "▁humanitarian", + -12.561518669128418 + ], + [ + "▁Kilometer", + -12.561529159545898 + ], + [ + "controlled", + -12.56189250946045 + ], + [ + "▁Klick", + -12.561910629272461 + ], + [ + "support", + -12.56199836730957 + ], + [ + "handed", + -12.562005996704102 + ], + [ + "ämtliche", + -12.562104225158691 + ], + [ + "access", + -12.562232971191406 + ], + [ + "▁eleven", + -12.562232971191406 + ], + [ + "▁ferry", + -12.56229305267334 + ], + [ + "zieren", + -12.562620162963867 + ], + [ + "▁Gebrauch", + -12.562688827514648 + ], + [ + "▁vigoare", + -12.562689781188965 + ], + [ + "MON", + -12.562756538391113 + ], + [ + "fox", + -12.562886238098145 + ], + [ + "bestimmten", + -12.562894821166992 + ], + [ + "▁Gur", + -12.563069343566895 + ], + [ + "▁Mannschaft", + -12.563146591186523 + ], + [ + "▁patrol", + -12.563173294067383 + ], + [ + "▁casă", + -12.563376426696777 + ], + [ + "▁Stories", + -12.563380241394043 + ], + [ + "▁robotic", + -12.563425064086914 + ], + [ + "tiri", + -12.563576698303223 + ], + [ + "gewiesen", + -12.5636568069458 + ], + [ + "CV", + -12.563722610473633 + ], + [ + "▁parinti", + -12.563899040222168 + ], + [ + "▁Owen", + -12.563931465148926 + ], + [ + "▁Katie", + -12.564116477966309 + ], + [ + "▁Combine", + -12.56422233581543 + ], + [ + "enfalls", + -12.56442928314209 + ], + [ + "▁financière", + -12.564447402954102 + ], + [ + "▁parliament", + -12.564549446105957 + ], + [ + "▁Weekend", + -12.564616203308105 + ], + [ + "▁Sonic", + -12.564757347106934 + ], + [ + "▁fixture", + -12.56479263305664 + ], + [ + "majorité", + -12.56497573852539 + ], + [ + "▁gravel", + -12.565028190612793 + ], + [ + "realizate", + -12.565109252929688 + ], + [ + "examining", + -12.565113067626953 + ], + [ + "▁grim", + -12.5653657913208 + ], + [ + "▁stabili", + -12.565458297729492 + ], + [ + "▁Wochenende", + -12.56551456451416 + ], + [ + "▁Hebrew", + -12.565597534179688 + ], + [ + "▁Harrison", + -12.565799713134766 + ], + [ + "▁boundary", + -12.565858840942383 + ], + [ + "40,000", + -12.565902709960938 + ], + [ + "▁Ambassador", + -12.566208839416504 + ], + [ + "▁scoate", + -12.566229820251465 + ], + [ + "ffin", + -12.56623363494873 + ], + [ + "▁crème", + -12.566269874572754 + ], + [ + "▁obiecte", + -12.566378593444824 + ], + [ + "enţa", + -12.566763877868652 + ], + [ + "▁subsidiary", + -12.566797256469727 + ], + [ + "▁Franco", + -12.56688404083252 + ], + [ + "▁visuel", + -12.567042350769043 + ], + [ + "▁uitat", + -12.56708812713623 + ], + [ + "▁revisit", + -12.567122459411621 + ], + [ + "▁Camping", + -12.567150115966797 + ], + [ + "▁Divine", + -12.567304611206055 + ], + [ + "4-6", + -12.567323684692383 + ], + [ + "▁Brandon", + -12.567378997802734 + ], + [ + "ма", + -12.567450523376465 + ], + [ + "sofern", + -12.56745433807373 + ], + [ + "ntweder", + -12.56748104095459 + ], + [ + "▁Shoot", + -12.567618370056152 + ], + [ + "étais", + -12.56771183013916 + ], + [ + "SPEC", + -12.567930221557617 + ], + [ + "▁dreapta", + -12.567973136901855 + ], + [ + "▁repaired", + -12.568055152893066 + ], + [ + "pyr", + -12.568136215209961 + ], + [ + "▁warranties", + -12.568175315856934 + ], + [ + "▁représent", + -12.568263053894043 + ], + [ + "ADE", + -12.568293571472168 + ], + [ + "▁selective", + -12.56836223602295 + ], + [ + "▁Banking", + -12.568441390991211 + ], + [ + "▁ergonomic", + -12.568562507629395 + ], + [ + "...”", + -12.568602561950684 + ], + [ + "▁willingness", + -12.56867790222168 + ], + [ + "isser", + -12.568784713745117 + ], + [ + "▁confection", + -12.568961143493652 + ], + [ + "admi", + -12.569009780883789 + ], + [ + "▁Freizeit", + -12.569023132324219 + ], + [ + "▁illuminate", + -12.569151878356934 + ], + [ + "▁Repeat", + -12.569170951843262 + ], + [ + "▁Zeitpunkt", + -12.56933879852295 + ], + [ + "claimed", + -12.569439888000488 + ], + [ + "▁erhältlich", + -12.569480895996094 + ], + [ + "▁paysage", + -12.569537162780762 + ], + [ + "▁Atom", + -12.569890022277832 + ], + [ + "▁Graf", + -12.570086479187012 + ], + [ + "▁firmware", + -12.570093154907227 + ], + [ + "▁Swift", + -12.570180892944336 + ], + [ + "▁cercetare", + -12.57018756866455 + ], + [ + "▁internațional", + -12.570330619812012 + ], + [ + "▁zombie", + -12.570330619812012 + ], + [ + "▁Spread", + -12.57050609588623 + ], + [ + "ECO", + -12.57056999206543 + ], + [ + "▁Gestaltung", + -12.570758819580078 + ], + [ + "rast", + -12.570858001708984 + ], + [ + "▁perfume", + -12.5709228515625 + ], + [ + "▁roulette", + -12.570924758911133 + ], + [ + "▁distill", + -12.57096004486084 + ], + [ + "▁Produkten", + -12.570992469787598 + ], + [ + "225", + -12.571310043334961 + ], + [ + "facing", + -12.571371078491211 + ], + [ + "▁paradigm", + -12.571514129638672 + ], + [ + "▁Rah", + -12.571532249450684 + ], + [ + "▁Renault", + -12.571846961975098 + ], + [ + "willig", + -12.571864128112793 + ], + [ + "▁Vet", + -12.571890830993652 + ], + [ + "▁reprezenta", + -12.572126388549805 + ], + [ + "stoß", + -12.572185516357422 + ], + [ + "▁Weiß", + -12.5722074508667 + ], + [ + "▁Solo", + -12.572210311889648 + ], + [ + "▁Jin", + -12.572646141052246 + ], + [ + "▁Brussels", + -12.572693824768066 + ], + [ + "▁Tournament", + -12.572693824768066 + ], + [ + "▁proced", + -12.572710037231445 + ], + [ + "▁Rabbi", + -12.572835922241211 + ], + [ + "▁gameplay", + -12.572851181030273 + ], + [ + "▁ATM", + -12.572901725769043 + ], + [ + "▁firearm", + -12.572906494140625 + ], + [ + "revealing", + -12.573003768920898 + ], + [ + "schütz", + -12.57310676574707 + ], + [ + "▁Absolutely", + -12.573288917541504 + ], + [ + "▁interference", + -12.573433876037598 + ], + [ + "▁Employment", + -12.573558807373047 + ], + [ + "▁chord", + -12.57356071472168 + ], + [ + "▁oportun", + -12.573585510253906 + ], + [ + "▁frontier", + -12.573770523071289 + ], + [ + "▁Lunch", + -12.573891639709473 + ], + [ + "bread", + -12.57397174835205 + ], + [ + "▁rendered", + -12.573976516723633 + ], + [ + "5.1", + -12.573984146118164 + ], + [ + "▁motif", + -12.574066162109375 + ], + [ + "▁Schlag", + -12.574227333068848 + ], + [ + "113", + -12.574264526367188 + ], + [ + "▁Deux", + -12.574288368225098 + ], + [ + "▁surplus", + -12.574309349060059 + ], + [ + "ALS", + -12.574417114257812 + ], + [ + "▁abortion", + -12.574472427368164 + ], + [ + "▁airplane", + -12.574475288391113 + ], + [ + "▁migrants", + -12.574501991271973 + ], + [ + "kli", + -12.574539184570312 + ], + [ + "▁crochet", + -12.57454776763916 + ], + [ + "fahrer", + -12.574671745300293 + ], + [ + "▁reconstruction", + -12.57471752166748 + ], + [ + "▁difer", + -12.574752807617188 + ], + [ + "▁Conserv", + -12.57478141784668 + ], + [ + "▁NSW", + -12.57479476928711 + ], + [ + "▁regim", + -12.574844360351562 + ], + [ + "▁Except", + -12.574904441833496 + ], + [ + "▁trage", + -12.574978828430176 + ], + [ + "▁Consiliul", + -12.575058937072754 + ], + [ + "▁Bedarf", + -12.575064659118652 + ], + [ + "▁additive", + -12.5750732421875 + ], + [ + "know", + -12.5751371383667 + ], + [ + "▁sauna", + -12.57517147064209 + ], + [ + "▁mortality", + -12.575201034545898 + ], + [ + "kräftig", + -12.575358390808105 + ], + [ + "▁Own", + -12.575445175170898 + ], + [ + "nzo", + -12.575519561767578 + ], + [ + "▁villes", + -12.575543403625488 + ], + [ + "▁recette", + -12.575749397277832 + ], + [ + "▁attacking", + -12.575799942016602 + ], + [ + "beruf", + -12.57608699798584 + ], + [ + "▁integrat", + -12.57612419128418 + ], + [ + "realizarea", + -12.576201438903809 + ], + [ + "▁exemption", + -12.57628345489502 + ], + [ + "GW", + -12.576285362243652 + ], + [ + "▁Nano", + -12.576395034790039 + ], + [ + "SCH", + -12.576440811157227 + ], + [ + "▁honesty", + -12.576457023620605 + ], + [ + "▁Arriv", + -12.576515197753906 + ], + [ + "▁gland", + -12.576542854309082 + ], + [ + "▁proactive", + -12.576746940612793 + ], + [ + "▁agile", + -12.576837539672852 + ], + [ + "▁kernel", + -12.576844215393066 + ], + [ + "▁nurture", + -12.576860427856445 + ], + [ + "▁Patent", + -12.576963424682617 + ], + [ + "▁excursi", + -12.577189445495605 + ], + [ + "pulsion", + -12.577326774597168 + ], + [ + "stellte", + -12.577351570129395 + ], + [ + "ständige", + -12.577421188354492 + ], + [ + "▁Rebecca", + -12.577436447143555 + ], + [ + "▁Securities", + -12.577436447143555 + ], + [ + "mètre", + -12.577446937561035 + ], + [ + "LOW", + -12.577469825744629 + ], + [ + "▁consilier", + -12.577537536621094 + ], + [ + "▁Architekt", + -12.577733993530273 + ], + [ + "▁china", + -12.57777214050293 + ], + [ + "älfte", + -12.577778816223145 + ], + [ + "▁Combin", + -12.577795028686523 + ], + [ + "480", + -12.577999114990234 + ], + [ + "liv", + -12.578021049499512 + ], + [ + "▁peur", + -12.578067779541016 + ], + [ + "keep", + -12.57822322845459 + ], + [ + "▁Verhalten", + -12.578324317932129 + ], + [ + "▁peek", + -12.578446388244629 + ], + [ + "▁dient", + -12.578550338745117 + ], + [ + "▁prevazut", + -12.578625679016113 + ], + [ + "Emmanuel", + -12.57862663269043 + ], + [ + "▁incidence", + -12.57862663269043 + ], + [ + "▁Framework", + -12.578715324401855 + ], + [ + "dass", + -12.578816413879395 + ], + [ + "artiste", + -12.578874588012695 + ], + [ + "▁Accept", + -12.578971862792969 + ], + [ + "▁plunge", + -12.579073905944824 + ], + [ + "chauff", + -12.579118728637695 + ], + [ + "▁guilt", + -12.579156875610352 + ], + [ + "▁senator", + -12.57945442199707 + ], + [ + "▁disable", + -12.579776763916016 + ], + [ + "▁partout", + -12.579901695251465 + ], + [ + "JC", + -12.580045700073242 + ], + [ + "▁Highly", + -12.580150604248047 + ], + [ + "▁beneficii", + -12.58021068572998 + ], + [ + "fibro", + -12.580347061157227 + ], + [ + "interpreted", + -12.580550193786621 + ], + [ + "▁genauso", + -12.58056354522705 + ], + [ + "▁basil", + -12.580601692199707 + ], + [ + "▁Angst", + -12.580697059631348 + ], + [ + "rzte", + -12.580933570861816 + ], + [ + "Master", + -12.58112907409668 + ], + [ + "▁french", + -12.581324577331543 + ], + [ + "▁Duration", + -12.581343650817871 + ], + [ + "HM", + -12.581402778625488 + ], + [ + "▁Bert", + -12.581518173217773 + ], + [ + "▁1963", + -12.581534385681152 + ], + [ + "▁warrior", + -12.581604957580566 + ], + [ + "2007", + -12.581696510314941 + ], + [ + "▁recycle", + -12.581722259521484 + ], + [ + "▁fertiliz", + -12.581808090209961 + ], + [ + "▁hatch", + -12.581809997558594 + ], + [ + "ISH", + -12.581811904907227 + ], + [ + "luft", + -12.582321166992188 + ], + [ + "▁crying", + -12.582452774047852 + ], + [ + "▁activist", + -12.5824613571167 + ], + [ + "schränkt", + -12.582500457763672 + ], + [ + "▁diff", + -12.582500457763672 + ], + [ + "▁Demand", + -12.58262825012207 + ], + [ + "▁transported", + -12.582669258117676 + ], + [ + "▁Remodel", + -12.582686424255371 + ], + [ + "▁Etats", + -12.582704544067383 + ], + [ + "ANI", + -12.582777976989746 + ], + [ + "▁spéciale", + -12.582804679870605 + ], + [ + "▁Konzert", + -12.582805633544922 + ], + [ + "▁Bedürfnisse", + -12.58281135559082 + ], + [ + "▁overlooked", + -12.582864761352539 + ], + [ + "▁cutter", + -12.582974433898926 + ], + [ + "klär", + -12.58311939239502 + ], + [ + "▁Materialien", + -12.583135604858398 + ], + [ + "▁gewisse", + -12.583388328552246 + ], + [ + "bull", + -12.583499908447266 + ], + [ + "Good", + -12.583513259887695 + ], + [ + "Gig", + -12.583616256713867 + ], + [ + "Logic", + -12.583736419677734 + ], + [ + "▁Schlaf", + -12.583970069885254 + ], + [ + "▁Yankee", + -12.583996772766113 + ], + [ + "▁Batman", + -12.584020614624023 + ], + [ + "▁funcție", + -12.584166526794434 + ], + [ + "▁partenariat", + -12.584294319152832 + ], + [ + "▁Antrag", + -12.584348678588867 + ], + [ + "▁Pill", + -12.584519386291504 + ], + [ + "▁tram", + -12.584637641906738 + ], + [ + "▁Minor", + -12.58465576171875 + ], + [ + "pertaining", + -12.584678649902344 + ], + [ + "▁apropiere", + -12.584843635559082 + ], + [ + "▁Barack", + -12.584965705871582 + ], + [ + "schön", + -12.585174560546875 + ], + [ + "▁Sandy", + -12.585182189941406 + ], + [ + "kilometre", + -12.585192680358887 + ], + [ + "▁diy", + -12.585234642028809 + ], + [ + "▁1966", + -12.585453987121582 + ], + [ + "gelassen", + -12.585485458374023 + ], + [ + "▁Trial", + -12.585592269897461 + ], + [ + "▁Bauer", + -12.585603713989258 + ], + [ + "▁assumption", + -12.585648536682129 + ], + [ + "birth", + -12.585668563842773 + ], + [ + "rechnen", + -12.585861206054688 + ], + [ + "▁meci", + -12.585867881774902 + ], + [ + "▁gloss", + -12.585906982421875 + ], + [ + "▁sewer", + -12.58593463897705 + ], + [ + "▁Stimme", + -12.585955619812012 + ], + [ + "▁Fortune", + -12.585967063903809 + ], + [ + "▁Lösungen", + -12.586007118225098 + ], + [ + "▁impresi", + -12.586074829101562 + ], + [ + "schlaf", + -12.586089134216309 + ], + [ + "prüfung", + -12.586097717285156 + ], + [ + "▁instalat", + -12.586198806762695 + ], + [ + "▁picturesque", + -12.586233139038086 + ], + [ + "vait", + -12.586240768432617 + ], + [ + "8.1", + -12.58629035949707 + ], + [ + "▁călători", + -12.586392402648926 + ], + [ + "▁dix", + -12.586400032043457 + ], + [ + "▁furnished", + -12.586411476135254 + ], + [ + "▁dolari", + -12.586445808410645 + ], + [ + "▁regener", + -12.586562156677246 + ], + [ + "▁astazi", + -12.586621284484863 + ], + [ + "▁Sprach", + -12.586750030517578 + ], + [ + "delà", + -12.586846351623535 + ], + [ + "avec", + -12.58694076538086 + ], + [ + "▁Buddhist", + -12.586990356445312 + ], + [ + "▁alphabet", + -12.586990356445312 + ], + [ + "▁berichtet", + -12.587201118469238 + ], + [ + "ideally", + -12.587209701538086 + ], + [ + "▁annuel", + -12.587421417236328 + ], + [ + "▁laughing", + -12.587532997131348 + ], + [ + "▁Zustand", + -12.587639808654785 + ], + [ + "cini", + -12.587692260742188 + ], + [ + "solid", + -12.587724685668945 + ], + [ + "▁Broker", + -12.587868690490723 + ], + [ + "▁developmental", + -12.5879545211792 + ], + [ + "▁Summary", + -12.588191032409668 + ], + [ + "▁Trinity", + -12.58819580078125 + ], + [ + "▁sucre", + -12.58821964263916 + ], + [ + "▁sandal", + -12.588231086730957 + ], + [ + "PEN", + -12.588274955749512 + ], + [ + "gewinn", + -12.588486671447754 + ], + [ + "olé", + -12.588555335998535 + ], + [ + "matric", + -12.58865737915039 + ], + [ + "xton", + -12.588695526123047 + ], + [ + "werten", + -12.588740348815918 + ], + [ + "▁Dust", + -12.588765144348145 + ], + [ + "▁Journey", + -12.588791847229004 + ], + [ + "▁Rush", + -12.588793754577637 + ], + [ + "▁NCAA", + -12.588839530944824 + ], + [ + "▁allgemeine", + -12.588926315307617 + ], + [ + "▁Universe", + -12.589007377624512 + ], + [ + "▁connais", + -12.589099884033203 + ], + [ + "▁quantité", + -12.58912467956543 + ], + [ + "▁Kab", + -12.589150428771973 + ], + [ + "▁purse", + -12.589150428771973 + ], + [ + "Health", + -12.589210510253906 + ], + [ + "▁apărut", + -12.589288711547852 + ], + [ + "▁bypass", + -12.589313507080078 + ], + [ + "pronounced", + -12.58936595916748 + ], + [ + "▁magnitude", + -12.589393615722656 + ], + [ + "▁Walmart", + -12.589394569396973 + ], + [ + "ède", + -12.589409828186035 + ], + [ + "▁serum", + -12.589590072631836 + ], + [ + "▁baseline", + -12.589765548706055 + ], + [ + "STER", + -12.589932441711426 + ], + [ + "▁ONLY", + -12.590052604675293 + ], + [ + "▁individuell", + -12.590086936950684 + ], + [ + "▁Ghi", + -12.590139389038086 + ], + [ + "▁Ruby", + -12.59020709991455 + ], + [ + "▁Chal", + -12.590241432189941 + ], + [ + "▁Vier", + -12.590261459350586 + ], + [ + "5.0", + -12.5903902053833 + ], + [ + "▁fog", + -12.590519905090332 + ], + [ + "esel", + -12.590557098388672 + ], + [ + "▁Python", + -12.590598106384277 + ], + [ + "▁urmează", + -12.590608596801758 + ], + [ + "▁trustworthy", + -12.590639114379883 + ], + [ + "hört", + -12.590729713439941 + ], + [ + "▁tâche", + -12.59078311920166 + ], + [ + "Patri", + -12.590799331665039 + ], + [ + "▁grind", + -12.590928077697754 + ], + [ + "▁Raven", + -12.590934753417969 + ], + [ + "▁poursuiv", + -12.590951919555664 + ], + [ + "▁simpli", + -12.591140747070312 + ], + [ + "▁echo", + -12.591165542602539 + ], + [ + "▁Attention", + -12.591313362121582 + ], + [ + "Against", + -12.591402053833008 + ], + [ + "GET", + -12.59148120880127 + ], + [ + "▁turistic", + -12.591535568237305 + ], + [ + "▁tenure", + -12.59158992767334 + ], + [ + "▁alimentaire", + -12.591651916503906 + ], + [ + "Who", + -12.59172248840332 + ], + [ + "▁ändern", + -12.591729164123535 + ], + [ + "▁rebound", + -12.591778755187988 + ], + [ + "grenze", + -12.591849327087402 + ], + [ + "▁Fame", + -12.592093467712402 + ], + [ + "▁Kick", + -12.592215538024902 + ], + [ + "▁Detail", + -12.59228801727295 + ], + [ + "▁Push", + -12.592308044433594 + ], + [ + "production", + -12.592430114746094 + ], + [ + "▁Candidates", + -12.59244441986084 + ], + [ + "▁reușit", + -12.592484474182129 + ], + [ + "istischen", + -12.592525482177734 + ], + [ + "lassung", + -12.592649459838867 + ], + [ + "▁Hann", + -12.592713356018066 + ], + [ + "espère", + -12.592965126037598 + ], + [ + "▁vergessen", + -12.593008041381836 + ], + [ + "▁smiling", + -12.593010902404785 + ], + [ + "▁devotion", + -12.593016624450684 + ], + [ + "▁pastry", + -12.593071937561035 + ], + [ + "Add", + -12.593390464782715 + ], + [ + "▁authorization", + -12.593494415283203 + ], + [ + "▁Suisse", + -12.593568801879883 + ], + [ + "▁Berkeley", + -12.593611717224121 + ], + [ + "▁Guild", + -12.593660354614258 + ], + [ + "▁choir", + -12.593748092651367 + ], + [ + "learning", + -12.593802452087402 + ], + [ + "▁Tanz", + -12.593894004821777 + ], + [ + "mardi", + -12.594076156616211 + ], + [ + "▁rezultatele", + -12.594191551208496 + ], + [ + "▁earrings", + -12.594218254089355 + ], + [ + "▁turbine", + -12.594223976135254 + ], + [ + "▁jeudi", + -12.594284057617188 + ], + [ + "terapie", + -12.594576835632324 + ], + [ + "regain", + -12.59461498260498 + ], + [ + "SET", + -12.594643592834473 + ], + [ + "▁Hände", + -12.594681739807129 + ], + [ + "▁Globe", + -12.594683647155762 + ], + [ + "frag", + -12.594775199890137 + ], + [ + "▁Treasury", + -12.594820976257324 + ], + [ + "▁hazardous", + -12.594820976257324 + ], + [ + "▁Fahrt", + -12.594928741455078 + ], + [ + "▁fulfilled", + -12.594966888427734 + ], + [ + "▁manga", + -12.594987869262695 + ], + [ + "▁composé", + -12.595067977905273 + ], + [ + "▁ABS", + -12.595132827758789 + ], + [ + "▁preced", + -12.595197677612305 + ], + [ + "▁beauté", + -12.595233917236328 + ], + [ + "▁interessant", + -12.59526252746582 + ], + [ + "▁lieber", + -12.595324516296387 + ], + [ + "▁Kö", + -12.595378875732422 + ], + [ + "EMS", + -12.595410346984863 + ], + [ + "FER", + -12.595413208007812 + ], + [ + "▁eure", + -12.595427513122559 + ], + [ + "▁plumber", + -12.595427513122559 + ], + [ + "Love", + -12.595463752746582 + ], + [ + "▁Marcus", + -12.595635414123535 + ], + [ + "▁registry", + -12.595637321472168 + ], + [ + "▁uncle", + -12.595696449279785 + ], + [ + "▁neuf", + -12.595728874206543 + ], + [ + "▁Fläche", + -12.59575080871582 + ], + [ + "▁restaur", + -12.595815658569336 + ], + [ + "▁noticeable", + -12.595833778381348 + ], + [ + "▁riches", + -12.595871925354004 + ], + [ + "occupy", + -12.596031188964844 + ], + [ + "▁hurricane", + -12.596031188964844 + ], + [ + "▁gespeichert", + -12.596033096313477 + ], + [ + "▁Bordeaux", + -12.596039772033691 + ], + [ + "▁Maj", + -12.59637451171875 + ], + [ + "Applied", + -12.596439361572266 + ], + [ + "▁compter", + -12.596575736999512 + ], + [ + "impact", + -12.59663200378418 + ], + [ + "▁Improve", + -12.596758842468262 + ], + [ + "▁Calif", + -12.596832275390625 + ], + [ + "▁desfășur", + -12.596939086914062 + ], + [ + "▁packaged", + -12.597001075744629 + ], + [ + "180", + -12.59703540802002 + ], + [ + "devenu", + -12.597042083740234 + ], + [ + "▁Battery", + -12.597243309020996 + ], + [ + "▁objection", + -12.597254753112793 + ], + [ + "▁anual", + -12.597305297851562 + ], + [ + "▁Landscape", + -12.59731674194336 + ], + [ + "IQ", + -12.597403526306152 + ], + [ + "grès", + -12.597586631774902 + ], + [ + "▁witnesses", + -12.597750663757324 + ], + [ + "enţial", + -12.597764015197754 + ], + [ + "▁plateau", + -12.597779273986816 + ], + [ + "▁bilete", + -12.59783935546875 + ], + [ + "▁Bronze", + -12.59786605834961 + ], + [ + "▁Kiss", + -12.597946166992188 + ], + [ + "▁Serge", + -12.598093032836914 + ], + [ + "atomic", + -12.598145484924316 + ], + [ + "▁renovated", + -12.59817886352539 + ], + [ + "player", + -12.598212242126465 + ], + [ + "▁dirig", + -12.598291397094727 + ], + [ + "▁Îm", + -12.598296165466309 + ], + [ + "▁plimb", + -12.59843635559082 + ], + [ + "▁ambassador", + -12.598455429077148 + ], + [ + "▁apropiat", + -12.598455429077148 + ], + [ + "▁adaug", + -12.598602294921875 + ], + [ + "ogenic", + -12.59872055053711 + ], + [ + "kämpfe", + -12.598779678344727 + ], + [ + "▁Hillary", + -12.598907470703125 + ], + [ + "yak", + -12.598942756652832 + ], + [ + "General", + -12.59925365447998 + ], + [ + "▁Zugang", + -12.599400520324707 + ], + [ + "▁fertil", + -12.599457740783691 + ], + [ + "incat", + -12.599536895751953 + ], + [ + "assessing", + -12.599587440490723 + ], + [ + "▁Cincinnati", + -12.59967041015625 + ], + [ + "▁convincing", + -12.599685668945312 + ], + [ + "sadly", + -12.59974479675293 + ], + [ + "kunde", + -12.599801063537598 + ], + [ + "ambul", + -12.599913597106934 + ], + [ + "▁familii", + -12.599974632263184 + ], + [ + "juri", + -12.60007095336914 + ], + [ + "ionen", + -12.600102424621582 + ], + [ + "▁Wirtschaft", + -12.600130081176758 + ], + [ + "contract", + -12.600135803222656 + ], + [ + "punem", + -12.600151062011719 + ], + [ + "handlung", + -12.600394248962402 + ], + [ + "▁fournir", + -12.600455284118652 + ], + [ + "▁Ambi", + -12.600663185119629 + ], + [ + "▁Isaac", + -12.600663185119629 + ], + [ + "▁praying", + -12.6007719039917 + ], + [ + "▁Italien", + -12.600848197937012 + ], + [ + "233", + -12.600850105285645 + ], + [ + "spawn", + -12.600913047790527 + ], + [ + "▁legii", + -12.60092544555664 + ], + [ + "▁zuvor", + -12.601018905639648 + ], + [ + "▁comune", + -12.601030349731445 + ], + [ + "official", + -12.601165771484375 + ], + [ + "144", + -12.601290702819824 + ], + [ + "izeaza", + -12.601329803466797 + ], + [ + "▁Keller", + -12.601372718811035 + ], + [ + "ORE", + -12.601378440856934 + ], + [ + "122", + -12.601485252380371 + ], + [ + "incurred", + -12.60150146484375 + ], + [ + "CHA", + -12.601579666137695 + ], + [ + "▁Herzen", + -12.601590156555176 + ], + [ + "▁reasoning", + -12.6016263961792 + ], + [ + "affaire", + -12.601849555969238 + ], + [ + "ooth", + -12.601890563964844 + ], + [ + "155", + -12.601998329162598 + ], + [ + "▁invented", + -12.602113723754883 + ], + [ + "▁Comun", + -12.602140426635742 + ], + [ + "zähl", + -12.602179527282715 + ], + [ + "geliefert", + -12.602212905883789 + ], + [ + "explorer", + -12.602213859558105 + ], + [ + "nect", + -12.602326393127441 + ], + [ + "▁mercredi", + -12.602408409118652 + ], + [ + "▁volonté", + -12.602408409118652 + ], + [ + "easy", + -12.602453231811523 + ], + [ + "▁feat", + -12.602490425109863 + ], + [ + "rented", + -12.602580070495605 + ], + [ + "▁converter", + -12.602592468261719 + ], + [ + "Verhältnis", + -12.602713584899902 + ], + [ + "▁Iceland", + -12.602792739868164 + ], + [ + "▁pretul", + -12.602933883666992 + ], + [ + "▁Vorstellung", + -12.602960586547852 + ], + [ + "▁hydrogen", + -12.603096008300781 + ], + [ + "▁pouvai", + -12.603097915649414 + ], + [ + "▁dawn", + -12.603153228759766 + ], + [ + "▁Georg", + -12.603269577026367 + ], + [ + "▁cautious", + -12.603367805480957 + ], + [ + "▁Pattern", + -12.603464126586914 + ], + [ + "▁Ox", + -12.603602409362793 + ], + [ + "▁decizie", + -12.603676795959473 + ], + [ + "REC", + -12.603889465332031 + ], + [ + "▁Mortgage", + -12.60393238067627 + ], + [ + "attributed", + -12.603973388671875 + ], + [ + "floor", + -12.603992462158203 + ], + [ + "▁Wichtig", + -12.604207992553711 + ], + [ + "enseignant", + -12.604265213012695 + ], + [ + "▁civilization", + -12.604302406311035 + ], + [ + "▁dispozitie", + -12.60450553894043 + ], + [ + "▁geographic", + -12.604543685913086 + ], + [ + "▁Kun", + -12.604607582092285 + ], + [ + "LIN", + -12.604679107666016 + ], + [ + "▁auzit", + -12.604707717895508 + ], + [ + "except", + -12.604761123657227 + ], + [ + "▁superbe", + -12.604904174804688 + ], + [ + "▁installé", + -12.605000495910645 + ], + [ + "▁Peninsula", + -12.605154037475586 + ], + [ + "▁norme", + -12.605164527893066 + ], + [ + "elul", + -12.60517406463623 + ], + [ + "▁Experten", + -12.605256080627441 + ], + [ + "expression", + -12.605295181274414 + ], + [ + "Christ", + -12.605320930480957 + ], + [ + "▁Fuel", + -12.605369567871094 + ], + [ + "▁muffin", + -12.605485916137695 + ], + [ + "▁lecteur", + -12.605521202087402 + ], + [ + "▁gifted", + -12.605589866638184 + ], + [ + "▁Japon", + -12.605602264404297 + ], + [ + "▁SSD", + -12.605644226074219 + ], + [ + "▁Calgary", + -12.605765342712402 + ], + [ + "▁hooked", + -12.605876922607422 + ], + [ + "▁Joan", + -12.605896949768066 + ], + [ + "▁tangible", + -12.606083869934082 + ], + [ + "FW", + -12.606225967407227 + ], + [ + "olli", + -12.6062593460083 + ], + [ + "▁Platinum", + -12.606376647949219 + ], + [ + "▁miniature", + -12.606392860412598 + ], + [ + "▁lump", + -12.606608390808105 + ], + [ + "ologische", + -12.60689926147461 + ], + [ + "▁Istanbul", + -12.606987953186035 + ], + [ + "▁Compar", + -12.607060432434082 + ], + [ + "tropic", + -12.607256889343262 + ], + [ + "KING", + -12.607279777526855 + ], + [ + "Präsident", + -12.607297897338867 + ], + [ + "▁fotografii", + -12.607303619384766 + ], + [ + "hoped", + -12.607451438903809 + ], + [ + "▁pâte", + -12.607601165771484 + ], + [ + "▁mercy", + -12.60760498046875 + ], + [ + "▁quiz", + -12.607619285583496 + ], + [ + "demonstrating", + -12.607678413391113 + ], + [ + "▁douce", + -12.607832908630371 + ], + [ + "▁Vest", + -12.607841491699219 + ], + [ + "▁Harvey", + -12.6082181930542 + ], + [ + "▁breit", + -12.608227729797363 + ], + [ + "▁Bereits", + -12.608291625976562 + ], + [ + "▁breakthrough", + -12.608316421508789 + ], + [ + "▁masterpiece", + -12.608320236206055 + ], + [ + "▁Chester", + -12.60838794708252 + ], + [ + "▁indiqué", + -12.608451843261719 + ], + [ + "hook", + -12.60857105255127 + ], + [ + "statutory", + -12.608596801757812 + ], + [ + "▁Direkt", + -12.608617782592773 + ], + [ + "▁specs", + -12.608708381652832 + ], + [ + "Drive", + -12.608725547790527 + ], + [ + "▁survivors", + -12.608826637268066 + ], + [ + "▁jackpot", + -12.608840942382812 + ], + [ + "▁garder", + -12.608872413635254 + ], + [ + "▁Geburtstag", + -12.60887336730957 + ], + [ + "145", + -12.608963966369629 + ], + [ + "▁Clay", + -12.609028816223145 + ], + [ + "▁WHO", + -12.60906982421875 + ], + [ + "▁Ellen", + -12.609393119812012 + ], + [ + "▁bonheur", + -12.609440803527832 + ], + [ + "▁hazards", + -12.609440803527832 + ], + [ + "▁Kaiser", + -12.609488487243652 + ], + [ + "▁tightly", + -12.609506607055664 + ], + [ + "Universitatea", + -12.609529495239258 + ], + [ + "▁rinse", + -12.609533309936523 + ], + [ + "▁passant", + -12.609640121459961 + ], + [ + "▁sânge", + -12.609832763671875 + ], + [ + "▁peuple", + -12.60983657836914 + ], + [ + "jungen", + -12.609975814819336 + ], + [ + "▁inappropriate", + -12.610054969787598 + ], + [ + "▁mitigate", + -12.610066413879395 + ], + [ + "MID", + -12.610221862792969 + ], + [ + "▁telecom", + -12.610297203063965 + ], + [ + "▁plaj", + -12.610316276550293 + ], + [ + "▁presupune", + -12.610361099243164 + ], + [ + "acco", + -12.61038875579834 + ], + [ + "expressing", + -12.610654830932617 + ], + [ + "▁Symphony", + -12.61066722869873 + ], + [ + "temperatur", + -12.610710144042969 + ], + [ + "▁activităţi", + -12.610800743103027 + ], + [ + "▁amended", + -12.610847473144531 + ], + [ + "▁rehab", + -12.610909461975098 + ], + [ + "▁sportiv", + -12.611004829406738 + ], + [ + "hotel", + -12.611031532287598 + ], + [ + "branche", + -12.61103630065918 + ], + [ + "▁Noch", + -12.611079216003418 + ], + [ + "▁1961", + -12.611238479614258 + ], + [ + "release", + -12.611359596252441 + ], + [ + "blaze", + -12.611381530761719 + ], + [ + "Adv", + -12.61139965057373 + ], + [ + "Line", + -12.611671447753906 + ], + [ + "▁financiare", + -12.61184310913086 + ], + [ + "▁chauffage", + -12.611919403076172 + ], + [ + "мо", + -12.61192512512207 + ], + [ + "schuhe", + -12.612035751342773 + ], + [ + "blé", + -12.612040519714355 + ], + [ + "▁Echo", + -12.612468719482422 + ], + [ + "▁remarks", + -12.61253547668457 + ], + [ + "scriu", + -12.612629890441895 + ], + [ + "Vir", + -12.612701416015625 + ], + [ + "War", + -12.61271858215332 + ], + [ + "atifs", + -12.613006591796875 + ], + [ + "RING", + -12.613082885742188 + ], + [ + "▁Instruction", + -12.613150596618652 + ], + [ + "▁verlassen", + -12.613155364990234 + ], + [ + "▁ergänz", + -12.613234519958496 + ], + [ + "▁Emil", + -12.613248825073242 + ], + [ + "▁empire", + -12.613263130187988 + ], + [ + "▁Einkauf", + -12.613306999206543 + ], + [ + "utigen", + -12.613329887390137 + ], + [ + "▁audition", + -12.613390922546387 + ], + [ + "travelled", + -12.61347484588623 + ], + [ + "ло", + -12.613579750061035 + ], + [ + "▁infinite", + -12.613720893859863 + ], + [ + "▁Lieblings", + -12.613749504089355 + ], + [ + "▁vân", + -12.613754272460938 + ], + [ + "▁spinning", + -12.613778114318848 + ], + [ + "converting", + -12.614031791687012 + ], + [ + "▁uncertain", + -12.61415958404541 + ], + [ + "restul", + -12.614168167114258 + ], + [ + "▁colourful", + -12.61420726776123 + ], + [ + "▁accountant", + -12.614338874816895 + ], + [ + "bourg", + -12.614532470703125 + ], + [ + "▁structuri", + -12.614538192749023 + ], + [ + "▁Booking", + -12.61465835571289 + ], + [ + "intéresse", + -12.614683151245117 + ], + [ + "▁coordinated", + -12.614753723144531 + ], + [ + "▁precaution", + -12.61497688293457 + ], + [ + "▁Cheese", + -12.615015983581543 + ], + [ + "▁surfing", + -12.615192413330078 + ], + [ + "▁souffr", + -12.61524486541748 + ], + [ + "▁Menu", + -12.615447998046875 + ], + [ + "▁arthritis", + -12.615593910217285 + ], + [ + "▁headphones", + -12.615601539611816 + ], + [ + "▁upgrading", + -12.615602493286133 + ], + [ + "▁apparel", + -12.615653038024902 + ], + [ + "▁Haushalt", + -12.61572551727295 + ], + [ + "▁Personally", + -12.615815162658691 + ], + [ + "▁insane", + -12.615950584411621 + ], + [ + "▁fonduri", + -12.616083145141602 + ], + [ + "▁entier", + -12.616239547729492 + ], + [ + "▁Herbst", + -12.616264343261719 + ], + [ + "▁cyclist", + -12.616331100463867 + ], + [ + "▁filmmaker", + -12.616741180419922 + ], + [ + "▁Portuguese", + -12.616829872131348 + ], + [ + "▁nominee", + -12.616851806640625 + ], + [ + "▁Yang", + -12.616857528686523 + ], + [ + "▁slate", + -12.616943359375 + ], + [ + "▁entièrement", + -12.616974830627441 + ], + [ + "▁Umgang", + -12.617049217224121 + ], + [ + "shifted", + -12.617135047912598 + ], + [ + "▁défaut", + -12.617138862609863 + ], + [ + "heiz", + -12.617246627807617 + ], + [ + "▁Seal", + -12.617379188537598 + ], + [ + "▁servicing", + -12.617451667785645 + ], + [ + "marketing", + -12.617562294006348 + ], + [ + "▁demandé", + -12.617755889892578 + ], + [ + "TING", + -12.617841720581055 + ], + [ + "▁modifier", + -12.617907524108887 + ], + [ + "lysis", + -12.617966651916504 + ], + [ + "▁suplimentare", + -12.618117332458496 + ], + [ + "OTHER", + -12.618359565734863 + ], + [ + "Graph", + -12.618379592895508 + ], + [ + "▁coincide", + -12.618448257446289 + ], + [ + "governed", + -12.618598937988281 + ], + [ + "▁locking", + -12.618638038635254 + ], + [ + "▁Properties", + -12.618685722351074 + ], + [ + "▁Panama", + -12.61876392364502 + ], + [ + "▁Coupe", + -12.618846893310547 + ], + [ + "songwriter", + -12.618978500366211 + ], + [ + "exhibited", + -12.618988990783691 + ], + [ + "▁semnificativ", + -12.618995666503906 + ], + [ + "▁purchaser", + -12.619004249572754 + ], + [ + "▁puff", + -12.619097709655762 + ], + [ + "Back", + -12.619105339050293 + ], + [ + "fragt", + -12.61919116973877 + ], + [ + "▁deputy", + -12.619362831115723 + ], + [ + "▁revien", + -12.619556427001953 + ], + [ + "▁Christine", + -12.619558334350586 + ], + [ + "▁Cities", + -12.619573593139648 + ], + [ + "▁Charakter", + -12.61961555480957 + ], + [ + "atteindre", + -12.619625091552734 + ], + [ + "▁fou", + -12.619635581970215 + ], + [ + "▁obligatoire", + -12.619643211364746 + ], + [ + "INA", + -12.619791030883789 + ], + [ + "etc", + -12.6198148727417 + ], + [ + "▁newborn", + -12.620091438293457 + ], + [ + "▁explicitly", + -12.620116233825684 + ], + [ + "simplest", + -12.620203018188477 + ], + [ + "▁plateforme", + -12.62023639678955 + ], + [ + "ordinate", + -12.620291709899902 + ], + [ + "displaying", + -12.620346069335938 + ], + [ + "▁messy", + -12.620464324951172 + ], + [ + "gespielt", + -12.620466232299805 + ], + [ + "▁electron", + -12.62061882019043 + ], + [ + "▁Dreh", + -12.620796203613281 + ], + [ + "▁ambient", + -12.620976448059082 + ], + [ + "340", + -12.620979309082031 + ], + [ + "▁directive", + -12.62109375 + ], + [ + "▁Vall", + -12.621152877807617 + ], + [ + "ookie", + -12.621206283569336 + ], + [ + "▁wasted", + -12.621304512023926 + ], + [ + "CIS", + -12.621367454528809 + ], + [ + "lude", + -12.621378898620605 + ], + [ + "rach", + -12.621472358703613 + ], + [ + "▁gasest", + -12.62150764465332 + ], + [ + "▁miros", + -12.62150764465332 + ], + [ + "transforming", + -12.621536254882812 + ], + [ + "▁Milwaukee", + -12.621787071228027 + ], + [ + "▁uncommon", + -12.621789932250977 + ], + [ + "▁tableau", + -12.621841430664062 + ], + [ + "geräte", + -12.621952056884766 + ], + [ + "ophil", + -12.622139930725098 + ], + [ + "▁Jeep", + -12.62220287322998 + ], + [ + "▁wreck", + -12.622422218322754 + ], + [ + "LAND", + -12.622434616088867 + ], + [ + "attach", + -12.622566223144531 + ], + [ + "▁Panther", + -12.622634887695312 + ], + [ + "9:30", + -12.622777938842773 + ], + [ + "▁induce", + -12.622974395751953 + ], + [ + "▁privest", + -12.623006820678711 + ], + [ + "Ident", + -12.623047828674316 + ], + [ + "▁illnesses", + -12.623076438903809 + ], + [ + "▁inhabitants", + -12.623138427734375 + ], + [ + "▁fehlen", + -12.623357772827148 + ], + [ + "obtenu", + -12.623391151428223 + ], + [ + "▁gegründet", + -12.623655319213867 + ], + [ + "ARA", + -12.623711585998535 + ], + [ + "3-2", + -12.623835563659668 + ], + [ + "▁milliards", + -12.623968124389648 + ], + [ + "▁Bü", + -12.624001502990723 + ], + [ + "▁angegeben", + -12.624102592468262 + ], + [ + "TUR", + -12.624143600463867 + ], + [ + "▁arab", + -12.624166488647461 + ], + [ + "▁Scientist", + -12.624275207519531 + ], + [ + "▁minut", + -12.624394416809082 + ], + [ + "▁beast", + -12.624481201171875 + ], + [ + "▁accidentally", + -12.624573707580566 + ], + [ + "WN", + -12.624579429626465 + ], + [ + "▁Ralph", + -12.624588966369629 + ], + [ + "hängt", + -12.62462329864502 + ], + [ + "▁Erik", + -12.624639511108398 + ], + [ + "▁différent", + -12.624711990356445 + ], + [ + "▁conformitate", + -12.624842643737793 + ], + [ + "thriving", + -12.624900817871094 + ], + [ + "▁Piece", + -12.625123023986816 + ], + [ + "plasm", + -12.625152587890625 + ], + [ + "▁erwarten", + -12.62520980834961 + ], + [ + "owski", + -12.62523365020752 + ], + [ + "prayed", + -12.625293731689453 + ], + [ + "three", + -12.625542640686035 + ], + [ + "▁soundtrack", + -12.625651359558105 + ], + [ + "guru", + -12.625709533691406 + ], + [ + "▁cracked", + -12.625710487365723 + ], + [ + "▁adh", + -12.625823020935059 + ], + [ + "▁maître", + -12.625834465026855 + ], + [ + "▁Oberfläche", + -12.62585735321045 + ], + [ + "▁crab", + -12.625886917114258 + ], + [ + "▁Foster", + -12.625944137573242 + ], + [ + "▁gemütlich", + -12.626145362854004 + ], + [ + "SIC", + -12.626226425170898 + ], + [ + "ième", + -12.626298904418945 + ], + [ + "▁Few", + -12.626330375671387 + ], + [ + "gérer", + -12.626360893249512 + ], + [ + "2006", + -12.626456260681152 + ], + [ + "cool", + -12.626498222351074 + ], + [ + "▁dispune", + -12.626523971557617 + ], + [ + "recevoir", + -12.626577377319336 + ], + [ + "▁Bak", + -12.626585960388184 + ], + [ + "▁steer", + -12.62659740447998 + ], + [ + "ICS", + -12.626733779907227 + ], + [ + "▁Brett", + -12.626733779907227 + ], + [ + "▁downside", + -12.626751899719238 + ], + [ + "▁residency", + -12.62678050994873 + ], + [ + "important", + -12.626991271972656 + ], + [ + "ubb", + -12.627073287963867 + ], + [ + "mony", + -12.627259254455566 + ], + [ + "▁leasing", + -12.627341270446777 + ], + [ + "▁Gir", + -12.62735366821289 + ], + [ + "▁Biology", + -12.627364158630371 + ], + [ + "▁Colin", + -12.627463340759277 + ], + [ + "▁complicat", + -12.627775192260742 + ], + [ + "▁regroup", + -12.627899169921875 + ], + [ + "SPA", + -12.627950668334961 + ], + [ + "▁Veranstaltungen", + -12.627986907958984 + ], + [ + "convicted", + -12.628019332885742 + ], + [ + "▁Wonderful", + -12.628636360168457 + ], + [ + "züge", + -12.628799438476562 + ], + [ + "yton", + -12.628813743591309 + ], + [ + "EMENT", + -12.628887176513672 + ], + [ + "▁bent", + -12.62893009185791 + ], + [ + "heben", + -12.629231452941895 + ], + [ + "▁Sustainable", + -12.62926959991455 + ], + [ + "▁Newcastle", + -12.629276275634766 + ], + [ + "mother", + -12.629507064819336 + ], + [ + "▁eighth", + -12.629572868347168 + ], + [ + "▁atmosfer", + -12.629582405090332 + ], + [ + "expériment", + -12.629584312438965 + ], + [ + "▁Interest", + -12.629608154296875 + ], + [ + "▁successes", + -12.62964153289795 + ], + [ + "▁preschool", + -12.629802703857422 + ], + [ + "▁Funeral", + -12.629900932312012 + ], + [ + "blast", + -12.630083084106445 + ], + [ + "▁dimensiuni", + -12.630125999450684 + ], + [ + "▁Dow", + -12.630167007446289 + ], + [ + "▁pulp", + -12.63022518157959 + ], + [ + "▁Heather", + -12.630356788635254 + ], + [ + "▁erstellen", + -12.63044261932373 + ], + [ + "locating", + -12.630470275878906 + ], + [ + "direct", + -12.630475997924805 + ], + [ + "▁tractor", + -12.630494117736816 + ], + [ + "growing", + -12.630576133728027 + ], + [ + "▁inventor", + -12.630587577819824 + ], + [ + "ASA", + -12.63060188293457 + ], + [ + "insta", + -12.630732536315918 + ], + [ + "yana", + -12.63082504272461 + ], + [ + "▁squash", + -12.630839347839355 + ], + [ + "▁Basketball", + -12.630853652954102 + ], + [ + "AMA", + -12.631041526794434 + ], + [ + "insel", + -12.631093978881836 + ], + [ + "▁Fisch", + -12.631138801574707 + ], + [ + "▁metaphor", + -12.631221771240234 + ], + [ + "TES", + -12.631304740905762 + ], + [ + "▁conduce", + -12.631308555603027 + ], + [ + "stehende", + -12.631370544433594 + ], + [ + "▁FAQ", + -12.631475448608398 + ], + [ + "▁bezeichnet", + -12.631658554077148 + ], + [ + "wendung", + -12.631706237792969 + ], + [ + "▁Commonwealth", + -12.631776809692383 + ], + [ + "▁bait", + -12.631793975830078 + ], + [ + "▁Umsetzung", + -12.631834030151367 + ], + [ + "▁Equi", + -12.632063865661621 + ], + [ + "▁validity", + -12.632109642028809 + ], + [ + "Off", + -12.63222599029541 + ], + [ + "▁produsul", + -12.632314682006836 + ], + [ + "▁sensory", + -12.632363319396973 + ], + [ + "▁Imperial", + -12.632501602172852 + ], + [ + "▁Dick", + -12.632542610168457 + ], + [ + "kampf", + -12.632596969604492 + ], + [ + "▁Arzt", + -12.63267993927002 + ], + [ + "▁Reason", + -12.63267993927002 + ], + [ + "ITS", + -12.63270092010498 + ], + [ + "URL", + -12.632720947265625 + ], + [ + "demonstrates", + -12.632725715637207 + ], + [ + "▁dépend", + -12.632753372192383 + ], + [ + "NAS", + -12.632970809936523 + ], + [ + "▁funcți", + -12.633031845092773 + ], + [ + "▁vulnerability", + -12.633085250854492 + ], + [ + "2.7", + -12.633143424987793 + ], + [ + "layered", + -12.633152961730957 + ], + [ + "escence", + -12.633206367492676 + ], + [ + "▁République", + -12.633346557617188 + ], + [ + "▁Lust", + -12.633377075195312 + ], + [ + "▁sute", + -12.633381843566895 + ], + [ + "▁autonomous", + -12.633661270141602 + ], + [ + "Biserica", + -12.633662223815918 + ], + [ + "▁Chuck", + -12.633749961853027 + ], + [ + "▁protéger", + -12.6339750289917 + ], + [ + "rrell", + -12.634061813354492 + ], + [ + "▁Schaden", + -12.634062767028809 + ], + [ + "prennent", + -12.634100914001465 + ], + [ + "maß", + -12.6343412399292 + ], + [ + "OV", + -12.634453773498535 + ], + [ + "▁Wake", + -12.63450813293457 + ], + [ + "produire", + -12.634635925292969 + ], + [ + "▁Elder", + -12.634749412536621 + ], + [ + "Max", + -12.634839057922363 + ], + [ + "▁Chemistry", + -12.634918212890625 + ], + [ + "▁gourmet", + -12.634918212890625 + ], + [ + "erri", + -12.634967803955078 + ], + [ + "ени", + -12.635085105895996 + ], + [ + "▁Gru", + -12.635147094726562 + ], + [ + "▁vorbit", + -12.635408401489258 + ], + [ + "▁precede", + -12.635455131530762 + ], + [ + "▁randomly", + -12.635489463806152 + ], + [ + "▁efecte", + -12.63563060760498 + ], + [ + "▁calatori", + -12.635668754577637 + ], + [ + "▁Poor", + -12.635765075683594 + ], + [ + "List", + -12.635781288146973 + ], + [ + "▁regula", + -12.635964393615723 + ], + [ + "▁organisé", + -12.636028289794922 + ], + [ + "Div", + -12.636076927185059 + ], + [ + "▁volunteering", + -12.636423110961914 + ], + [ + "▁horr", + -12.636449813842773 + ], + [ + "9.99", + -12.636487007141113 + ], + [ + "▁UPS", + -12.636513710021973 + ], + [ + "▁englez", + -12.63652229309082 + ], + [ + "▁Eden", + -12.636523246765137 + ], + [ + "GG", + -12.63659954071045 + ], + [ + "▁typing", + -12.63664722442627 + ], + [ + "Likewise", + -12.636700630187988 + ], + [ + "▁stabilize", + -12.636737823486328 + ], + [ + "physio", + -12.636747360229492 + ], + [ + "ми", + -12.636785507202148 + ], + [ + "▁protagonist", + -12.636808395385742 + ], + [ + "▁velvet", + -12.636812210083008 + ], + [ + "schrank", + -12.636861801147461 + ], + [ + "▁Allah", + -12.63693618774414 + ], + [ + "▁forefront", + -12.636968612670898 + ], + [ + "▁salaries", + -12.637001037597656 + ], + [ + "▁prediction", + -12.637041091918945 + ], + [ + "▁Advent", + -12.637182235717773 + ], + [ + "politik", + -12.637280464172363 + ], + [ + "▁Heimat", + -12.637350082397461 + ], + [ + "ducted", + -12.637380599975586 + ], + [ + "ASH", + -12.637386322021484 + ], + [ + "▁Mold", + -12.637773513793945 + ], + [ + "▁publi", + -12.63784122467041 + ], + [ + "▁Vil", + -12.637892723083496 + ], + [ + "▁stu", + -12.637925148010254 + ], + [ + "INTE", + -12.638032913208008 + ], + [ + "▁fave", + -12.638151168823242 + ], + [ + "▁grounded", + -12.638175010681152 + ], + [ + "▁Anything", + -12.638184547424316 + ], + [ + "vik", + -12.638481140136719 + ], + [ + "Bank", + -12.63853645324707 + ], + [ + "deserved", + -12.638550758361816 + ], + [ + "machen", + -12.63874626159668 + ], + [ + "▁rugged", + -12.638751029968262 + ], + [ + "▁Nest", + -12.638901710510254 + ], + [ + "▁profund", + -12.639043807983398 + ], + [ + "▁quantum", + -12.639067649841309 + ], + [ + "▁funcționa", + -12.639118194580078 + ], + [ + "klu", + -12.639158248901367 + ], + [ + "▁consulter", + -12.63917350769043 + ], + [ + "MED", + -12.639286994934082 + ], + [ + "▁câştig", + -12.639334678649902 + ], + [ + "▁săptămâni", + -12.639334678649902 + ], + [ + "questioned", + -12.639517784118652 + ], + [ + "▁Trop", + -12.639530181884766 + ], + [ + "▁convo", + -12.639533042907715 + ], + [ + "▁sparkling", + -12.639533996582031 + ], + [ + "▁specialise", + -12.639566421508789 + ], + [ + "▁pancake", + -12.639726638793945 + ], + [ + "habitude", + -12.639727592468262 + ], + [ + "phal", + -12.640009880065918 + ], + [ + "▁Roche", + -12.640158653259277 + ], + [ + "▁personalities", + -12.640250205993652 + ], + [ + "▁Venice", + -12.640308380126953 + ], + [ + "▁comerciale", + -12.640379905700684 + ], + [ + "▁wounded", + -12.64075756072998 + ], + [ + "▁oraş", + -12.640864372253418 + ], + [ + "▁Pepper", + -12.641044616699219 + ], + [ + "▁Tourist", + -12.641094207763672 + ], + [ + "▁Mull", + -12.64116382598877 + ], + [ + "▁dignity", + -12.641234397888184 + ], + [ + "▁Fixed", + -12.641291618347168 + ], + [ + "çant", + -12.64130687713623 + ], + [ + "▁spectator", + -12.641402244567871 + ], + [ + "▁somn", + -12.641685485839844 + ], + [ + "▁ständig", + -12.641820907592773 + ], + [ + "▁resilience", + -12.641866683959961 + ], + [ + "▁Malta", + -12.642251014709473 + ], + [ + "▁problemele", + -12.642253875732422 + ], + [ + "▁Martha", + -12.642254829406738 + ], + [ + "▁extern", + -12.642267227172852 + ], + [ + "embre", + -12.642379760742188 + ], + [ + "▁médical", + -12.642526626586914 + ], + [ + "fordern", + -12.64256477355957 + ], + [ + "nji", + -12.642592430114746 + ], + [ + "▁aboard", + -12.642740249633789 + ], + [ + "▁sidewalk", + -12.642759323120117 + ], + [ + "WIN", + -12.642775535583496 + ], + [ + "▁Bobby", + -12.642842292785645 + ], + [ + "▁umfangreiche", + -12.642876625061035 + ], + [ + "leid", + -12.64292049407959 + ], + [ + "▁compens", + -12.642967224121094 + ], + [ + "▁juge", + -12.64299488067627 + ], + [ + "gerufen", + -12.64311408996582 + ], + [ + "▁médicament", + -12.643135070800781 + ], + [ + "▁1918", + -12.643155097961426 + ], + [ + "▁blanche", + -12.643163681030273 + ], + [ + "▁pleasing", + -12.643220901489258 + ], + [ + "▁propria", + -12.643471717834473 + ], + [ + "ergebnisse", + -12.643503189086914 + ], + [ + "▁retrouv", + -12.643571853637695 + ], + [ + "urteil", + -12.643592834472656 + ], + [ + "▁Draft", + -12.64361572265625 + ], + [ + "▁concluzi", + -12.643671035766602 + ], + [ + "centralized", + -12.643789291381836 + ], + [ + "▁Hannah", + -12.64382266998291 + ], + [ + "grija", + -12.64392375946045 + ], + [ + "▁Exercise", + -12.643972396850586 + ], + [ + "RAL", + -12.644001960754395 + ], + [ + "creme", + -12.64408016204834 + ], + [ + "High", + -12.644126892089844 + ], + [ + "clude", + -12.644131660461426 + ], + [ + "Considering", + -12.644208908081055 + ], + [ + "▁Guarantee", + -12.644404411315918 + ], + [ + "▁cuptor", + -12.644436836242676 + ], + [ + "ivität", + -12.64468002319336 + ], + [ + "▁Southwest", + -12.644882202148438 + ], + [ + "▁vivant", + -12.644890785217285 + ], + [ + "Your", + -12.64498519897461 + ], + [ + "▁Stunde", + -12.645003318786621 + ], + [ + "▁Ethernet", + -12.645040512084961 + ], + [ + "angebote", + -12.645078659057617 + ], + [ + "▁Sage", + -12.645271301269531 + ], + [ + "▁Boeing", + -12.645295143127441 + ], + [ + "▁$300", + -12.645381927490234 + ], + [ + "2-4", + -12.64546012878418 + ], + [ + "▁nécessit", + -12.645516395568848 + ], + [ + "▁ferment", + -12.645599365234375 + ], + [ + "▁Anmeldung", + -12.64567756652832 + ], + [ + "▁exhausted", + -12.645758628845215 + ], + [ + "▁Schloss", + -12.645772933959961 + ], + [ + "▁Replacement", + -12.645859718322754 + ], + [ + "▁Aussi", + -12.645933151245117 + ], + [ + "jection", + -12.646127700805664 + ], + [ + "978", + -12.64615535736084 + ], + [ + "▁siège", + -12.646258354187012 + ], + [ + "crest", + -12.646310806274414 + ], + [ + "▁jumatate", + -12.646312713623047 + ], + [ + "effizient", + -12.646317481994629 + ], + [ + "▁colaborare", + -12.6464262008667 + ], + [ + "HQ", + -12.646615028381348 + ], + [ + "130", + -12.646695137023926 + ], + [ + "culaire", + -12.646907806396484 + ], + [ + "▁Jamaica", + -12.646952629089355 + ], + [ + "▁cardboard", + -12.64731216430664 + ], + [ + "▁technische", + -12.64731502532959 + ], + [ + "▁cereri", + -12.647507667541504 + ], + [ + "▁contradict", + -12.647570610046387 + ], + [ + "▁irrigation", + -12.647586822509766 + ], + [ + "Nume", + -12.64765739440918 + ], + [ + "▁Bier", + -12.647714614868164 + ], + [ + "▁livrare", + -12.647903442382812 + ], + [ + "▁reservoir", + -12.647906303405762 + ], + [ + "vâr", + -12.648130416870117 + ], + [ + "▁galben", + -12.648213386535645 + ], + [ + "▁Geneva", + -12.648303985595703 + ], + [ + "▁lightning", + -12.648418426513672 + ], + [ + "wished", + -12.64842414855957 + ], + [ + "▁Blind", + -12.648481369018555 + ], + [ + "Interested", + -12.648499488830566 + ], + [ + "▁Primări", + -12.648627281188965 + ], + [ + "anthropo", + -12.648954391479492 + ], + [ + "▁Transaction", + -12.648961067199707 + ], + [ + "▁marcat", + -12.648971557617188 + ], + [ + "▁gelegen", + -12.649077415466309 + ], + [ + "▁contemporain", + -12.649182319641113 + ], + [ + "▁politică", + -12.649182319641113 + ], + [ + "▁1948", + -12.64928150177002 + ], + [ + "▁Mik", + -12.649287223815918 + ], + [ + "▁preţ", + -12.649310111999512 + ], + [ + "moor", + -12.649312973022461 + ], + [ + "ANN", + -12.649432182312012 + ], + [ + "▁constructive", + -12.649454116821289 + ], + [ + "konzept", + -12.649502754211426 + ], + [ + "▁entendu", + -12.649511337280273 + ], + [ + "▁Genesis", + -12.649541854858398 + ], + [ + "arzt", + -12.649581909179688 + ], + [ + "▁Allgemein", + -12.64970874786377 + ], + [ + "▁Derby", + -12.649725914001465 + ], + [ + "Class", + -12.649762153625488 + ], + [ + "▁$12", + -12.649770736694336 + ], + [ + "▁Tube", + -12.6498441696167 + ], + [ + "▁Contribu", + -12.649847030639648 + ], + [ + "▁HAVE", + -12.649860382080078 + ], + [ + "▁oxide", + -12.64986515045166 + ], + [ + "▁producator", + -12.649941444396973 + ], + [ + "▁Bench", + -12.650132179260254 + ], + [ + "▁comprehend", + -12.650139808654785 + ], + [ + "▁Damen", + -12.650494575500488 + ], + [ + "▁Garant", + -12.65056037902832 + ], + [ + "▁disappointing", + -12.650614738464355 + ], + [ + "▁réalisée", + -12.650693893432617 + ], + [ + "▁comportement", + -12.65072250366211 + ], + [ + "▁clash", + -12.650753021240234 + ], + [ + "▁curry", + -12.65076732635498 + ], + [ + "▁Lebanon", + -12.65078067779541 + ], + [ + "▁Romaniei", + -12.650784492492676 + ], + [ + "▁reprise", + -12.650840759277344 + ], + [ + "▁perceive", + -12.65095329284668 + ], + [ + "▁weaknesses", + -12.65101146697998 + ], + [ + "▁aminti", + -12.651057243347168 + ], + [ + "▁Concern", + -12.651103973388672 + ], + [ + "shadow", + -12.651310920715332 + ], + [ + "▁basin", + -12.651311874389648 + ], + [ + "moral", + -12.652063369750977 + ], + [ + "▁Hughes", + -12.652101516723633 + ], + [ + "Psych", + -12.652266502380371 + ], + [ + "▁Lieferung", + -12.65227222442627 + ], + [ + "▁serrurier", + -12.652379035949707 + ], + [ + "ussi", + -12.652386665344238 + ], + [ + "▁timpului", + -12.6524658203125 + ], + [ + "üm", + -12.652629852294922 + ], + [ + "▁Vladimir", + -12.652701377868652 + ], + [ + "▁Jag", + -12.65279483795166 + ], + [ + "▁verific", + -12.652849197387695 + ], + [ + "▁Pru", + -12.652894020080566 + ], + [ + "▁Laut", + -12.653285026550293 + ], + [ + "ITA", + -12.653287887573242 + ], + [ + "usually", + -12.653294563293457 + ], + [ + "▁carrière", + -12.65341854095459 + ], + [ + "▁extracted", + -12.653663635253906 + ], + [ + "kultur", + -12.653679847717285 + ], + [ + "öpfe", + -12.653932571411133 + ], + [ + "▁rejection", + -12.654016494750977 + ], + [ + "▁Hydr", + -12.654062271118164 + ], + [ + "▁informaţii", + -12.654098510742188 + ], + [ + "▁tolerate", + -12.654122352600098 + ], + [ + "▁cinéma", + -12.654302597045898 + ], + [ + "traumatic", + -12.654305458068848 + ], + [ + "produkt", + -12.654450416564941 + ], + [ + "▁Contest", + -12.654560089111328 + ], + [ + "lotte", + -12.654570579528809 + ], + [ + "▁Pension", + -12.65461254119873 + ], + [ + "▁Advertising", + -12.654623985290527 + ], + [ + "▁payout", + -12.654772758483887 + ], + [ + "▁Amanda", + -12.65481185913086 + ], + [ + "Elect", + -12.65485668182373 + ], + [ + "▁interiorul", + -12.654996871948242 + ], + [ + "stay", + -12.655348777770996 + ], + [ + "▁feminine", + -12.655352592468262 + ], + [ + "▁întâmplă", + -12.655437469482422 + ], + [ + "▁insult", + -12.65562915802002 + ], + [ + "▁chocolat", + -12.65567398071289 + ], + [ + "▁noroc", + -12.655750274658203 + ], + [ + "▁centr", + -12.655781745910645 + ], + [ + "▁Bühne", + -12.655858039855957 + ], + [ + "mighty", + -12.6558837890625 + ], + [ + "▁Buddha", + -12.655908584594727 + ], + [ + "▁parental", + -12.655997276306152 + ], + [ + "storm", + -12.656451225280762 + ], + [ + "recurring", + -12.6565523147583 + ], + [ + "▁luxe", + -12.656588554382324 + ], + [ + "niște", + -12.656728744506836 + ], + [ + "cuit", + -12.656839370727539 + ], + [ + "▁ausgewählt", + -12.656880378723145 + ], + [ + "▁dumb", + -12.657047271728516 + ], + [ + "IPS", + -12.657127380371094 + ], + [ + "▁Thir", + -12.65717887878418 + ], + [ + "Definitely", + -12.657195091247559 + ], + [ + "▁hilarious", + -12.657195091247559 + ], + [ + "▁rainbow", + -12.657231330871582 + ], + [ + "▁Bravo", + -12.657251358032227 + ], + [ + "▁entstanden", + -12.657259941101074 + ], + [ + "itorul", + -12.657269477844238 + ], + [ + "▁prosperity", + -12.657299041748047 + ], + [ + "▁Bord", + -12.657336235046387 + ], + [ + "▁familiei", + -12.657363891601562 + ], + [ + "▁scade", + -12.657425880432129 + ], + [ + "wöhn", + -12.657426834106445 + ], + [ + "▁ingrediente", + -12.65743637084961 + ], + [ + "RAD", + -12.657441139221191 + ], + [ + "▁tăi", + -12.657472610473633 + ], + [ + "bours", + -12.65747356414795 + ], + [ + "ATI", + -12.657540321350098 + ], + [ + "▁Blake", + -12.65761661529541 + ], + [ + "▁Implement", + -12.657712936401367 + ], + [ + "▁Beziehung", + -12.657838821411133 + ], + [ + "finanz", + -12.657953262329102 + ], + [ + "intestin", + -12.658513069152832 + ], + [ + "ließen", + -12.658535957336426 + ], + [ + "▁récent", + -12.658594131469727 + ], + [ + "▁laminate", + -12.658692359924316 + ], + [ + "▁Hör", + -12.65876579284668 + ], + [ + "▁personnalisé", + -12.658804893493652 + ], + [ + "edel", + -12.65890121459961 + ], + [ + "▁advertisement", + -12.658902168273926 + ], + [ + "▁pinterest", + -12.658921241760254 + ], + [ + "185", + -12.659058570861816 + ], + [ + "identité", + -12.65938949584961 + ], + [ + "▁Brick", + -12.659408569335938 + ], + [ + "Glu", + -12.65941047668457 + ], + [ + "▁attendant", + -12.659571647644043 + ], + [ + "▁Flip", + -12.659614562988281 + ], + [ + "attracting", + -12.659662246704102 + ], + [ + "functional", + -12.659703254699707 + ], + [ + "conceived", + -12.659772872924805 + ], + [ + "▁summarize", + -12.659773826599121 + ], + [ + "adjusting", + -12.659809112548828 + ], + [ + "CAL", + -12.660041809082031 + ], + [ + "▁Operating", + -12.660076141357422 + ], + [ + "zzi", + -12.66008472442627 + ], + [ + "▁Rover", + -12.6603364944458 + ], + [ + "▁versuchen", + -12.6603364944458 + ], + [ + "▁articulate", + -12.660600662231445 + ], + [ + "▁privé", + -12.660614013671875 + ], + [ + "▁consequent", + -12.660663604736328 + ], + [ + "EAT", + -12.660690307617188 + ], + [ + "▁Marsh", + -12.660696983337402 + ], + [ + "▁teenage", + -12.660717964172363 + ], + [ + "▁Renaissance", + -12.660740852355957 + ], + [ + "▁furnizor", + -12.660883903503418 + ], + [ + "▁Desert", + -12.660894393920898 + ], + [ + "unicipiului", + -12.66104793548584 + ], + [ + "▁ulterior", + -12.661065101623535 + ], + [ + "▁Ebene", + -12.661280632019043 + ], + [ + "▁monkey", + -12.661351203918457 + ], + [ + "▁enclosed", + -12.661389350891113 + ], + [ + "▁profitability", + -12.66139030456543 + ], + [ + "▁Evolution", + -12.661628723144531 + ], + [ + "▁adica", + -12.661670684814453 + ], + [ + "▁Structure", + -12.661709785461426 + ], + [ + "▁primer", + -12.661761283874512 + ], + [ + "▁asigură", + -12.662001609802246 + ], + [ + "▁Manuel", + -12.662220001220703 + ], + [ + "polita", + -12.662267684936523 + ], + [ + "▁Portable", + -12.662286758422852 + ], + [ + "fecți", + -12.662413597106934 + ], + [ + "▁obscure", + -12.662424087524414 + ], + [ + "▁Atlas", + -12.662436485290527 + ], + [ + "fährt", + -12.662679672241211 + ], + [ + "▁clinician", + -12.662837982177734 + ], + [ + "fuhr", + -12.66310977935791 + ], + [ + "▁matériaux", + -12.663113594055176 + ], + [ + "écrire", + -12.663142204284668 + ], + [ + "▁suspicious", + -12.6632080078125 + ], + [ + "pore", + -12.663263320922852 + ], + [ + "▁outdated", + -12.663304328918457 + ], + [ + "▁Mädchen", + -12.663328170776367 + ], + [ + "rcis", + -12.663420677185059 + ], + [ + "nicht", + -12.663463592529297 + ], + [ + "holding", + -12.663561820983887 + ], + [ + "▁heavier", + -12.66366195678711 + ], + [ + "ezimal", + -12.663960456848145 + ], + [ + "▁silicone", + -12.66397476196289 + ], + [ + "punerea", + -12.664108276367188 + ], + [ + "▁begeistert", + -12.664237976074219 + ], + [ + "2004", + -12.664283752441406 + ], + [ + "▁predecessor", + -12.664299011230469 + ], + [ + "▁overlap", + -12.664369583129883 + ], + [ + "▁digging", + -12.664376258850098 + ], + [ + "▁Upgrade", + -12.664407730102539 + ], + [ + "▁interesat", + -12.664543151855469 + ], + [ + "▁spinach", + -12.66456127166748 + ], + [ + "▁politice", + -12.664626121520996 + ], + [ + "activity", + -12.664831161499023 + ], + [ + "▁Rating", + -12.66484546661377 + ], + [ + "▁serrure", + -12.664846420288086 + ], + [ + "▁tânăr", + -12.664959907531738 + ], + [ + "▁WHAT", + -12.664970397949219 + ], + [ + "▁railroad", + -12.664989471435547 + ], + [ + "▁avid", + -12.665081024169922 + ], + [ + "▁Sophie", + -12.665084838867188 + ], + [ + "preferably", + -12.665173530578613 + ], + [ + "▁Fourth", + -12.665431022644043 + ], + [ + "kommenden", + -12.665452003479004 + ], + [ + "QUI", + -12.665478706359863 + ], + [ + "lohn", + -12.665505409240723 + ], + [ + "▁promis", + -12.665611267089844 + ], + [ + "▁shrub", + -12.665621757507324 + ], + [ + "nummer", + -12.66579818725586 + ], + [ + "▁dinosaur", + -12.665922164916992 + ], + [ + "▁Lucky", + -12.665937423706055 + ], + [ + "relates", + -12.666038513183594 + ], + [ + "▁FROM", + -12.666049003601074 + ], + [ + "▁racism", + -12.66610336303711 + ], + [ + "physical", + -12.66611385345459 + ], + [ + "alcoholic", + -12.666119575500488 + ], + [ + "▁reef", + -12.666126251220703 + ], + [ + "▁centru", + -12.66618824005127 + ], + [ + "université", + -12.66622257232666 + ], + [ + "▁visage", + -12.666232109069824 + ], + [ + "ităţile", + -12.666253089904785 + ], + [ + "▁Gent", + -12.666345596313477 + ], + [ + "zugeben", + -12.66643238067627 + ], + [ + "▁paradise", + -12.66646957397461 + ], + [ + "fuel", + -12.666505813598633 + ], + [ + "ografie", + -12.666568756103516 + ], + [ + "▁TIP", + -12.666730880737305 + ], + [ + "schreibung", + -12.66683292388916 + ], + [ + "▁bark", + -12.666840553283691 + ], + [ + "accéder", + -12.666895866394043 + ], + [ + "▁contamination", + -12.666937828063965 + ], + [ + "▁swelling", + -12.666950225830078 + ], + [ + "▁optimistic", + -12.666974067687988 + ], + [ + "▁differential", + -12.667015075683594 + ], + [ + "▁Arad", + -12.667030334472656 + ], + [ + "toxins", + -12.667075157165527 + ], + [ + "▁übernehmen", + -12.667091369628906 + ], + [ + "▁anime", + -12.667143821716309 + ], + [ + "actuel", + -12.667462348937988 + ], + [ + "▁bientôt", + -12.667525291442871 + ], + [ + "▁Patio", + -12.66761302947998 + ], + [ + "▁baisse", + -12.667630195617676 + ], + [ + "▁sprint", + -12.66773796081543 + ], + [ + "▁bilden", + -12.66811466217041 + ], + [ + "VAL", + -12.668132781982422 + ], + [ + "▁réflexion", + -12.668220520019531 + ], + [ + "hopping", + -12.668242454528809 + ], + [ + "genesis", + -12.66834545135498 + ], + [ + "achtet", + -12.668435096740723 + ], + [ + "▁chinois", + -12.668525695800781 + ], + [ + "▁dezvoltat", + -12.668795585632324 + ], + [ + "arguably", + -12.66884708404541 + ], + [ + "▁Protocol", + -12.66884708404541 + ], + [ + "▁Sterling", + -12.668862342834473 + ], + [ + "▁Cave", + -12.668975830078125 + ], + [ + "▁Condo", + -12.66921615600586 + ], + [ + "▁erhöht", + -12.669235229492188 + ], + [ + "typische", + -12.669416427612305 + ], + [ + "merged", + -12.669439315795898 + ], + [ + "▁accumulation", + -12.669560432434082 + ], + [ + "sicherlich", + -12.669569969177246 + ], + [ + "kW", + -12.669620513916016 + ], + [ + "▁schriftlich", + -12.669757843017578 + ], + [ + "▁Vorteile", + -12.669918060302734 + ], + [ + "▁Northeast", + -12.669922828674316 + ], + [ + "frunt", + -12.669941902160645 + ], + [ + "istik", + -12.670003890991211 + ], + [ + "erster", + -12.670035362243652 + ], + [ + "▁Assistance", + -12.670150756835938 + ], + [ + "▁Fantastic", + -12.670150756835938 + ], + [ + "▁bărbat", + -12.670150756835938 + ], + [ + "▁Grinding", + -12.670151710510254 + ], + [ + "▁diffusion", + -12.670161247253418 + ], + [ + "▁vreun", + -12.670331954956055 + ], + [ + "▁Butler", + -12.670342445373535 + ], + [ + "▁Cherry", + -12.670352935791016 + ], + [ + "▁visualization", + -12.670540809631348 + ], + [ + "Paket", + -12.670572280883789 + ], + [ + "blin", + -12.670619010925293 + ], + [ + "▁cadou", + -12.670705795288086 + ], + [ + "▁Celtic", + -12.670754432678223 + ], + [ + "alegerea", + -12.670894622802734 + ], + [ + "▁Dorf", + -12.671035766601562 + ], + [ + "▁Noir", + -12.671185493469238 + ], + [ + "payment", + -12.67126750946045 + ], + [ + "▁Caroline", + -12.671334266662598 + ], + [ + "▁Berry", + -12.671359062194824 + ], + [ + "▁professeur", + -12.67147445678711 + ], + [ + "▁gratuitement", + -12.671503067016602 + ], + [ + "Suntem", + -12.671523094177246 + ], + [ + "IAN", + -12.671738624572754 + ], + [ + "▁fingerprint", + -12.671780586242676 + ], + [ + "▁controversy", + -12.671781539916992 + ], + [ + "▁fled", + -12.671875 + ], + [ + "▁Pokémon", + -12.67210865020752 + ], + [ + "excluding", + -12.67211627960205 + ], + [ + "▁friction", + -12.672161102294922 + ], + [ + "therapie", + -12.67225456237793 + ], + [ + "/7", + -12.672398567199707 + ], + [ + "▁designation", + -12.672442436218262 + ], + [ + "▁Belgia", + -12.672704696655273 + ], + [ + "▁cursuri", + -12.672836303710938 + ], + [ + "model", + -12.672840118408203 + ], + [ + "super", + -12.672987937927246 + ], + [ + "▁réduit", + -12.673028945922852 + ], + [ + "▁implicit", + -12.673177719116211 + ], + [ + "athlon", + -12.673227310180664 + ], + [ + "anniversaire", + -12.673416137695312 + ], + [ + "▁teaspoon", + -12.673416137695312 + ], + [ + "▁corrosion", + -12.673418998718262 + ], + [ + "▁überzeugt", + -12.673418998718262 + ], + [ + "▁flawless", + -12.673421859741211 + ], + [ + "▁vegetation", + -12.673477172851562 + ], + [ + "▁iarna", + -12.673507690429688 + ], + [ + "▁psychologist", + -12.673591613769531 + ], + [ + "hora", + -12.673625946044922 + ], + [ + "gab", + -12.67387580871582 + ], + [ + "▁soothing", + -12.674084663391113 + ], + [ + "▁stew", + -12.674141883850098 + ], + [ + "▁wager", + -12.674172401428223 + ], + [ + "▁tinere", + -12.674322128295898 + ], + [ + "▁baut", + -12.674323081970215 + ], + [ + "ecunoscut", + -12.674352645874023 + ], + [ + "gearbeitet", + -12.674422264099121 + ], + [ + "▁functi", + -12.674480438232422 + ], + [ + "▁dürfte", + -12.674724578857422 + ], + [ + "▁média", + -12.674724578857422 + ], + [ + "▁campanie", + -12.67475700378418 + ], + [ + "▁Distribu", + -12.674817085266113 + ], + [ + "▁mentoring", + -12.674959182739258 + ], + [ + "▁criz", + -12.675020217895508 + ], + [ + "findest", + -12.675056457519531 + ], + [ + "▁Vasile", + -12.675058364868164 + ], + [ + "▁compassionate", + -12.675115585327148 + ], + [ + "▁Tudor", + -12.675140380859375 + ], + [ + "▁flare", + -12.675260543823242 + ], + [ + "intreaga", + -12.675283432006836 + ], + [ + "gaz", + -12.6753511428833 + ], + [ + "▁porcelain", + -12.675379753112793 + ], + [ + "▁expedition", + -12.675520896911621 + ], + [ + "▁Azure", + -12.67553997039795 + ], + [ + "räumen", + -12.675549507141113 + ], + [ + "eiro", + -12.675567626953125 + ], + [ + "variante", + -12.675804138183594 + ], + [ + "▁Lucy", + -12.675825119018555 + ], + [ + "ôle", + -12.675909996032715 + ], + [ + "▁revenir", + -12.67602252960205 + ], + [ + "▁stained", + -12.676040649414062 + ], + [ + "▁falsch", + -12.676166534423828 + ], + [ + "▁incorpor", + -12.676166534423828 + ], + [ + "merkt", + -12.676187515258789 + ], + [ + "▁achten", + -12.6762056350708 + ], + [ + "▁hello", + -12.676290512084961 + ], + [ + "selben", + -12.676422119140625 + ], + [ + "ifty", + -12.676525115966797 + ], + [ + "▁Feier", + -12.67653751373291 + ], + [ + "1.000", + -12.676557540893555 + ], + [ + "▁Patch", + -12.676583290100098 + ], + [ + "peptid", + -12.676846504211426 + ], + [ + "▁recovering", + -12.676898956298828 + ], + [ + "Symptom", + -12.677020072937012 + ], + [ + "▁Auckland", + -12.677020072937012 + ], + [ + "▁retrieve", + -12.677328109741211 + ], + [ + "▁800-", + -12.67733097076416 + ], + [ + "schlagen", + -12.677473068237305 + ], + [ + "▁lourd", + -12.677562713623047 + ], + [ + "▁Purple", + -12.67760181427002 + ], + [ + "▁mittels", + -12.677776336669922 + ], + [ + "▁Düsseldorf", + -12.67800521850586 + ], + [ + "▁getaway", + -12.67803955078125 + ], + [ + "▁Cedar", + -12.678061485290527 + ], + [ + "▁Function", + -12.678241729736328 + ], + [ + "▁bizarre", + -12.67833423614502 + ], + [ + "4.3", + -12.67849063873291 + ], + [ + "▁fundraiser", + -12.67866325378418 + ], + [ + "geared", + -12.678780555725098 + ], + [ + "▁privée", + -12.678781509399414 + ], + [ + "▁Bonjour", + -12.67894458770752 + ], + [ + "Gar", + -12.67895793914795 + ], + [ + "▁Lloyd", + -12.678991317749023 + ], + [ + "▁Reinigung", + -12.6790132522583 + ], + [ + "▁Geno", + -12.679155349731445 + ], + [ + "▁Teilnahme", + -12.67919635772705 + ], + [ + "pian", + -12.679362297058105 + ], + [ + "sammelt", + -12.679368019104004 + ], + [ + "Pad", + -12.679755210876465 + ], + [ + "▁Troy", + -12.67976188659668 + ], + [ + "HG", + -12.679943084716797 + ], + [ + "▁klein", + -12.679962158203125 + ], + [ + "▁lettuce", + -12.679978370666504 + ], + [ + "▁patrimoine", + -12.679978370666504 + ], + [ + "▁cooker", + -12.680055618286133 + ], + [ + "▁accesibil", + -12.680137634277344 + ], + [ + "▁Spray", + -12.680201530456543 + ], + [ + "▁negotiation", + -12.68047046661377 + ], + [ + "▁jewel", + -12.680480003356934 + ], + [ + "▁dynamique", + -12.68063735961914 + ], + [ + "▁plastique", + -12.68067741394043 + ], + [ + "▁Limo", + -12.680682182312012 + ], + [ + "▁Funk", + -12.68069076538086 + ], + [ + "▁omului", + -12.680702209472656 + ], + [ + "title", + -12.680768013000488 + ], + [ + "curved", + -12.68082046508789 + ], + [ + "▁Lemon", + -12.680851936340332 + ], + [ + "förder", + -12.680891990661621 + ], + [ + "▁bewusst", + -12.681112289428711 + ], + [ + "inevitably", + -12.681296348571777 + ], + [ + "▁derivative", + -12.681297302246094 + ], + [ + "2:30", + -12.681300163269043 + ], + [ + "komfort", + -12.681305885314941 + ], + [ + "original", + -12.681480407714844 + ], + [ + "sanct", + -12.681540489196777 + ], + [ + "▁matte", + -12.6815767288208 + ], + [ + "empêche", + -12.681628227233887 + ], + [ + "▁jucător", + -12.681634902954102 + ], + [ + "▁attentive", + -12.681640625 + ], + [ + "▁recunoscut", + -12.681674003601074 + ], + [ + "▁Brush", + -12.68167495727539 + ], + [ + "▁consommateur", + -12.68183422088623 + ], + [ + "érence", + -12.682063102722168 + ], + [ + "typical", + -12.682084083557129 + ], + [ + "strategie", + -12.682205200195312 + ], + [ + "Effekt", + -12.682290077209473 + ], + [ + "▁Alcohol", + -12.682292938232422 + ], + [ + "oji", + -12.682333946228027 + ], + [ + "▁ruler", + -12.682357788085938 + ], + [ + "▁Norwegian", + -12.682615280151367 + ], + [ + "▁PlayStation", + -12.682615280151367 + ], + [ + "▁Hook", + -12.682747840881348 + ], + [ + "▁viewpoint", + -12.682759284973145 + ], + [ + "THER", + -12.682841300964355 + ], + [ + "420", + -12.682888984680176 + ], + [ + "Consequently", + -12.68294620513916 + ], + [ + "▁entschieden", + -12.68294620513916 + ], + [ + "▁Trag", + -12.68295669555664 + ], + [ + "▁Dawn", + -12.683003425598145 + ], + [ + "▁fuss", + -12.68301773071289 + ], + [ + "*****", + -12.683040618896484 + ], + [ + "▁Bullet", + -12.683140754699707 + ], + [ + "CAM", + -12.683155059814453 + ], + [ + "▁wonderfully", + -12.683201789855957 + ], + [ + "▁parlamentar", + -12.683263778686523 + ], + [ + "▁geometric", + -12.683307647705078 + ], + [ + "talement", + -12.683321952819824 + ], + [ + "/2018", + -12.683577537536621 + ], + [ + "▁oversight", + -12.684036254882812 + ], + [ + "kindly", + -12.684080123901367 + ], + [ + "therm", + -12.684305191040039 + ], + [ + "▁treaba", + -12.6846342086792 + ], + [ + "▁Trim", + -12.68471908569336 + ], + [ + "▁intelege", + -12.684842109680176 + ], + [ + "cino", + -12.685032844543457 + ], + [ + "▁straw", + -12.68508529663086 + ], + [ + "Tru", + -12.685251235961914 + ], + [ + "▁Television", + -12.68530559539795 + ], + [ + "Trader", + -12.68538761138916 + ], + [ + "▁Passion", + -12.685394287109375 + ], + [ + "rescu", + -12.685622215270996 + ], + [ + "Nicol", + -12.685635566711426 + ], + [ + "luj", + -12.685805320739746 + ], + [ + "▁mijloace", + -12.685921669006348 + ], + [ + "▁Removal", + -12.685922622680664 + ], + [ + "▁1944", + -12.686034202575684 + ], + [ + "▁shortcut", + -12.686159133911133 + ], + [ + "▁Fett", + -12.686258316040039 + ], + [ + "largement", + -12.686371803283691 + ], + [ + "▁altern", + -12.686446189880371 + ], + [ + "▁cleansing", + -12.686562538146973 + ], + [ + "▁Qatar", + -12.686692237854004 + ], + [ + "▁Ceci", + -12.686826705932617 + ], + [ + "▁weave", + -12.686848640441895 + ], + [ + "schmerz", + -12.686878204345703 + ], + [ + "▁dots", + -12.686888694763184 + ], + [ + "Télécharger", + -12.68691635131836 + ], + [ + "▁Conduct", + -12.686944007873535 + ], + [ + "bekannten", + -12.687325477600098 + ], + [ + "▁lungime", + -12.687344551086426 + ], + [ + "▁Ferrari", + -12.687390327453613 + ], + [ + "▁totusi", + -12.687605857849121 + ], + [ + "▁Anniversary", + -12.687911033630371 + ], + [ + "▁wilderness", + -12.687911987304688 + ], + [ + "▁Christoph", + -12.687939643859863 + ], + [ + "▁Nikon", + -12.688112258911133 + ], + [ + "▁Digi", + -12.68818473815918 + ], + [ + "▁Blumen", + -12.688190460205078 + ], + [ + "▁altul", + -12.688249588012695 + ], + [ + "▁Parish", + -12.688321113586426 + ], + [ + "czy", + -12.688393592834473 + ], + [ + "▁temper", + -12.688401222229004 + ], + [ + "▁Powder", + -12.688576698303223 + ], + [ + "▁Arnold", + -12.688577651977539 + ], + [ + "capacitatea", + -12.688687324523926 + ], + [ + "nderungen", + -12.688787460327148 + ], + [ + "▁utilization", + -12.688859939575195 + ], + [ + "99%", + -12.688942909240723 + ], + [ + "▁Fear", + -12.689099311828613 + ], + [ + "JE", + -12.689165115356445 + ], + [ + "▁Simpson", + -12.689239501953125 + ], + [ + "▁Podcast", + -12.68924617767334 + ], + [ + "▁Cardinal", + -12.689290046691895 + ], + [ + "▁Distribution", + -12.689315795898438 + ], + [ + "▁Drawing", + -12.689373970031738 + ], + [ + "▁tint", + -12.689412117004395 + ], + [ + "▁hran", + -12.68945598602295 + ], + [ + "▁Slide", + -12.68960189819336 + ], + [ + "▁Vertrauen", + -12.689654350280762 + ], + [ + "cloth", + -12.68971061706543 + ], + [ + "▁redirect", + -12.689728736877441 + ], + [ + "126", + -12.689842224121094 + ], + [ + "▁constituie", + -12.68985652923584 + ], + [ + "Mai", + -12.690070152282715 + ], + [ + "▁idol", + -12.690088272094727 + ], + [ + "▁tehnice", + -12.690163612365723 + ], + [ + "dip", + -12.690393447875977 + ], + [ + "▁soldier", + -12.690400123596191 + ], + [ + "▁Ordin", + -12.690409660339355 + ], + [ + "wobe", + -12.69050407409668 + ], + [ + "▁Brent", + -12.69058895111084 + ], + [ + "▁Sudan", + -12.690597534179688 + ], + [ + "6000", + -12.690619468688965 + ], + [ + "turism", + -12.690689086914062 + ], + [ + "▁Rocky", + -12.690744400024414 + ], + [ + "naming", + -12.69092082977295 + ], + [ + "▁entrepreneurial", + -12.690925598144531 + ], + [ + "hearted", + -12.690962791442871 + ], + [ + "ayne", + -12.69097900390625 + ], + [ + "▁hover", + -12.691081047058105 + ], + [ + "▁skull", + -12.691279411315918 + ], + [ + "▁tribal", + -12.691407203674316 + ], + [ + "▁crafting", + -12.691543579101562 + ], + [ + "bewertungen", + -12.691569328308105 + ], + [ + "▁decizii", + -12.691625595092773 + ], + [ + "obwohl", + -12.691655158996582 + ], + [ + "▁compromised", + -12.691875457763672 + ], + [ + "▁quelqu", + -12.69195556640625 + ], + [ + "▁Hilton", + -12.692075729370117 + ], + [ + "▁maturity", + -12.692095756530762 + ], + [ + "gelesen", + -12.692100524902344 + ], + [ + "▁harbor", + -12.69210433959961 + ], + [ + "▁maple", + -12.692326545715332 + ], + [ + "▁développ", + -12.6924409866333 + ], + [ + "▁Nobody", + -12.692517280578613 + ], + [ + "équipement", + -12.69255542755127 + ], + [ + "121", + -12.69274616241455 + ], + [ + "140", + -12.692827224731445 + ], + [ + "▁artistes", + -12.692914962768555 + ], + [ + "▁depune", + -12.692941665649414 + ], + [ + "▁erase", + -12.693129539489746 + ], + [ + "▁erzählt", + -12.693197250366211 + ], + [ + "▁Hyundai", + -12.69323444366455 + ], + [ + "▁impairment", + -12.69323444366455 + ], + [ + "▁conving", + -12.693279266357422 + ], + [ + "chasing", + -12.693426132202148 + ], + [ + "▁Claus", + -12.693438529968262 + ], + [ + "▁adaptée", + -12.693687438964844 + ], + [ + "▁Raz", + -12.693740844726562 + ], + [ + "rugs", + -12.693796157836914 + ], + [ + "▁urme", + -12.69387435913086 + ], + [ + "Nonetheless", + -12.693902015686035 + ], + [ + "▁Cemetery", + -12.693902969360352 + ], + [ + "umps", + -12.693906784057617 + ], + [ + "ACA", + -12.694003105163574 + ], + [ + "▁perioade", + -12.694235801696777 + ], + [ + "▁slogan", + -12.694263458251953 + ], + [ + "▁downward", + -12.694441795349121 + ], + [ + "eidig", + -12.694446563720703 + ], + [ + "RAC", + -12.69444751739502 + ], + [ + "▁inaugur", + -12.694496154785156 + ], + [ + "се", + -12.694588661193848 + ], + [ + "▁înțeleg", + -12.694608688354492 + ], + [ + "▁hopeful", + -12.694635391235352 + ], + [ + "▁customization", + -12.6946439743042 + ], + [ + "▁prisoners", + -12.694708824157715 + ], + [ + "▁Rau", + -12.695270538330078 + ], + [ + "▁Pitt", + -12.695389747619629 + ], + [ + "ături", + -12.695542335510254 + ], + [ + "▁metabolic", + -12.695842742919922 + ], + [ + "▁Zach", + -12.695868492126465 + ], + [ + "▁umfassende", + -12.695914268493652 + ], + [ + "▁révél", + -12.695950508117676 + ], + [ + "131", + -12.696052551269531 + ], + [ + "ismului", + -12.696062088012695 + ], + [ + "▁Sac", + -12.696076393127441 + ], + [ + "efficacité", + -12.69624137878418 + ], + [ + "cruci", + -12.69625473022461 + ], + [ + "bisschen", + -12.69632339477539 + ], + [ + "▁Oster", + -12.696324348449707 + ], + [ + "lowered", + -12.6964693069458 + ], + [ + "▁Ausland", + -12.69674015045166 + ], + [ + "▁Pub", + -12.696794509887695 + ], + [ + "▁Marseille", + -12.696925163269043 + ], + [ + "▁Charter", + -12.696959495544434 + ], + [ + "howcasing", + -12.697010040283203 + ], + [ + "risti", + -12.6971435546875 + ], + [ + "▁thermostat", + -12.697151184082031 + ], + [ + "▁Clin", + -12.697233200073242 + ], + [ + "▁entsteht", + -12.697246551513672 + ], + [ + "Choosing", + -12.697248458862305 + ], + [ + "▁Schmerz", + -12.697284698486328 + ], + [ + "▁Till", + -12.697307586669922 + ], + [ + "▁Polo", + -12.697399139404297 + ], + [ + "▁proceduri", + -12.697402000427246 + ], + [ + "▁Believe", + -12.697444915771484 + ], + [ + "▁playful", + -12.697514533996582 + ], + [ + "▁verändert", + -12.697588920593262 + ], + [ + "▁pairing", + -12.697654724121094 + ], + [ + "MAG", + -12.69784927368164 + ], + [ + "leiste", + -12.69788932800293 + ], + [ + "▁testimonial", + -12.697916030883789 + ], + [ + "▁Economy", + -12.697916984558105 + ], + [ + "▁Wechsel", + -12.697918891906738 + ], + [ + "wirkung", + -12.69801139831543 + ], + [ + "▁exceeded", + -12.698030471801758 + ], + [ + "South", + -12.698067665100098 + ], + [ + "create", + -12.698221206665039 + ], + [ + "▁davantage", + -12.698270797729492 + ], + [ + "Log", + -12.69831657409668 + ], + [ + "▁irregular", + -12.698587417602539 + ], + [ + "VB", + -12.698691368103027 + ], + [ + "▁Rö", + -12.698741912841797 + ], + [ + "▁intreb", + -12.698881149291992 + ], + [ + "▁penser", + -12.698920249938965 + ], + [ + "▁déclaré", + -12.698923110961914 + ], + [ + "▁Tommy", + -12.699026107788086 + ], + [ + "2,500", + -12.699163436889648 + ], + [ + "▁Uganda", + -12.699260711669922 + ], + [ + "contacting", + -12.699445724487305 + ], + [ + "▁apreciat", + -12.699485778808594 + ], + [ + "▁beginnen", + -12.6995210647583 + ], + [ + "▁Gain", + -12.699580192565918 + ], + [ + "Office", + -12.69969654083252 + ], + [ + "ermittlung", + -12.699710845947266 + ], + [ + "▁Admission", + -12.699727058410645 + ], + [ + "▁Earl", + -12.6997652053833 + ], + [ + "▁Aviation", + -12.699833869934082 + ], + [ + "▁apologize", + -12.699929237365723 + ], + [ + "▁enclosure", + -12.699929237365723 + ], + [ + "▁Lack", + -12.69998836517334 + ], + [ + "wife", + -12.699995994567871 + ], + [ + "▁rotating", + -12.700016975402832 + ], + [ + "▁hergestellt", + -12.700020790100098 + ], + [ + "▁repository", + -12.70002269744873 + ], + [ + "TK", + -12.700149536132812 + ], + [ + "▁lectur", + -12.700190544128418 + ], + [ + "▁reflex", + -12.700286865234375 + ], + [ + "▁Harmon", + -12.700401306152344 + ], + [ + "▁vrem", + -12.700479507446289 + ], + [ + "▁Strange", + -12.70055103302002 + ], + [ + "▁champagne", + -12.700615882873535 + ], + [ + "▁oscil", + -12.700647354125977 + ], + [ + "sensitive", + -12.700677871704102 + ], + [ + "▁Sheriff", + -12.700841903686523 + ], + [ + "PRES", + -12.700956344604492 + ], + [ + "▁vow", + -12.70123291015625 + ], + [ + "▁dioxide", + -12.701276779174805 + ], + [ + "ен", + -12.701374053955078 + ], + [ + "▁corpului", + -12.701376914978027 + ], + [ + "▁prevăzut", + -12.70160961151123 + ], + [ + "India", + -12.701827049255371 + ], + [ + "hausse", + -12.70189094543457 + ], + [ + "▁clienți", + -12.701957702636719 + ], + [ + "▁entour", + -12.70202350616455 + ], + [ + "▁Sharp", + -12.70209789276123 + ], + [ + "▁teatru", + -12.702285766601562 + ], + [ + "▁Grow", + -12.702327728271484 + ], + [ + "▁caravan", + -12.70234203338623 + ], + [ + "▁sieben", + -12.702420234680176 + ], + [ + "▁cunosc", + -12.702502250671387 + ], + [ + "Bereichen", + -12.702527046203613 + ], + [ + "▁Benutzer", + -12.702619552612305 + ], + [ + "▁Ethiopia", + -12.702619552612305 + ], + [ + "▁Physics", + -12.702619552612305 + ], + [ + "preserving", + -12.70263385772705 + ], + [ + "ал", + -12.702712059020996 + ], + [ + "▁aerial", + -12.70272159576416 + ], + [ + "▁nouvel", + -12.702741622924805 + ], + [ + "▁stamped", + -12.702954292297363 + ], + [ + "▁inaugural", + -12.702970504760742 + ], + [ + "▁medicinal", + -12.702999114990234 + ], + [ + "Quite", + -12.703028678894043 + ], + [ + "accumulated", + -12.703165054321289 + ], + [ + "register", + -12.703271865844727 + ], + [ + "▁Falcon", + -12.70327377319336 + ], + [ + "▁boiling", + -12.703301429748535 + ], + [ + "▁advertised", + -12.703339576721191 + ], + [ + "collect", + -12.703362464904785 + ], + [ + "albeit", + -12.703418731689453 + ], + [ + "▁Organis", + -12.703473091125488 + ], + [ + "luate", + -12.703536033630371 + ], + [ + "▁préféré", + -12.70369815826416 + ], + [ + "▁frumoasa", + -12.703968048095703 + ], + [ + "▁truc", + -12.704092979431152 + ], + [ + "▁Fä", + -12.704154968261719 + ], + [ + "▁dome", + -12.704180717468262 + ], + [ + "Mobile", + -12.704191207885742 + ], + [ + "▁redeem", + -12.704198837280273 + ], + [ + "IONS", + -12.70422077178955 + ], + [ + "▁țări", + -12.704235076904297 + ], + [ + "▁singular", + -12.704385757446289 + ], + [ + "▁livestock", + -12.704425811767578 + ], + [ + "▁démont", + -12.704427719116211 + ], + [ + "clés", + -12.704527854919434 + ], + [ + "music", + -12.704561233520508 + ], + [ + "▁explicat", + -12.704602241516113 + ], + [ + "▁Fellowship", + -12.704703330993652 + ], + [ + "▁electrode", + -12.704760551452637 + ], + [ + "129", + -12.704977035522461 + ], + [ + "▁Rescue", + -12.704983711242676 + ], + [ + "▁Rocket", + -12.705159187316895 + ], + [ + "OSE", + -12.705301284790039 + ], + [ + "▁Sacramento", + -12.705317497253418 + ], + [ + "▁Haiti", + -12.705357551574707 + ], + [ + "▁Erwachsene", + -12.705390930175781 + ], + [ + "▁Terminal", + -12.70541000366211 + ], + [ + "URI", + -12.705453872680664 + ], + [ + "▁Rural", + -12.70549201965332 + ], + [ + "▁achizitiona", + -12.70552921295166 + ], + [ + "▁identifiable", + -12.705655097961426 + ], + [ + "▁gekauft", + -12.705659866333008 + ], + [ + "▁improper", + -12.705673217773438 + ], + [ + "lashes", + -12.705751419067383 + ], + [ + "vorbim", + -12.705751419067383 + ], + [ + "▁hinder", + -12.705862045288086 + ], + [ + "▁Grenz", + -12.705878257751465 + ], + [ + "Nav", + -12.705955505371094 + ], + [ + "alimentation", + -12.705972671508789 + ], + [ + "▁Cottage", + -12.7059965133667 + ], + [ + "▁nötig", + -12.706197738647461 + ], + [ + "▁cuprinde", + -12.70622444152832 + ], + [ + "session", + -12.706256866455078 + ], + [ + "▁Separat", + -12.70634651184082 + ], + [ + "▁besuchen", + -12.706672668457031 + ], + [ + "▁noodles", + -12.706684112548828 + ], + [ + "▁ballet", + -12.706696510314941 + ], + [ + "WG", + -12.706731796264648 + ], + [ + "▁Duty", + -12.706871032714844 + ], + [ + "▁porc", + -12.706944465637207 + ], + [ + "▁booster", + -12.70698356628418 + ], + [ + "galerie", + -12.707056045532227 + ], + [ + "▁Lance", + -12.707119941711426 + ], + [ + "▁déplac", + -12.707178115844727 + ], + [ + "▁rugby", + -12.707240104675293 + ], + [ + "▁upholstery", + -12.707345962524414 + ], + [ + "▁bustl", + -12.70736312866211 + ], + [ + "▁Dealer", + -12.70740032196045 + ], + [ + "▁genome", + -12.707414627075195 + ], + [ + "▁citizenship", + -12.707466125488281 + ], + [ + "rora", + -12.707515716552734 + ], + [ + "ARK", + -12.707776069641113 + ], + [ + "▁Semi", + -12.707820892333984 + ], + [ + "▁Improvement", + -12.707892417907715 + ], + [ + "▁negru", + -12.708142280578613 + ], + [ + "▁Bruxelles", + -12.70836067199707 + ], + [ + "flüge", + -12.70837688446045 + ], + [ + "▁Technique", + -12.708392143249512 + ], + [ + "▁Obst", + -12.708413124084473 + ], + [ + "2020", + -12.708560943603516 + ], + [ + "▁gek", + -12.708593368530273 + ], + [ + "▁drepturi", + -12.708600997924805 + ], + [ + "▁Logan", + -12.708605766296387 + ], + [ + "gelöst", + -12.70863151550293 + ], + [ + "▁grandparents", + -12.708702087402344 + ], + [ + "phin", + -12.708950996398926 + ], + [ + "▁dwell", + -12.709037780761719 + ], + [ + "▁Nobel", + -12.709151268005371 + ], + [ + "dial", + -12.70927906036377 + ], + [ + "▁spontan", + -12.709344863891602 + ], + [ + "advancing", + -12.70937728881836 + ], + [ + "starring", + -12.70947551727295 + ], + [ + "▁astea", + -12.709498405456543 + ], + [ + "igueur", + -12.709638595581055 + ], + [ + "▁Ancient", + -12.709700584411621 + ], + [ + "filter", + -12.70971965789795 + ], + [ + "Doar", + -12.709758758544922 + ], + [ + "▁Workers", + -12.709759712219238 + ], + [ + "Certainly", + -12.709906578063965 + ], + [ + "▁commencé", + -12.709914207458496 + ], + [ + "▁zipper", + -12.710001945495605 + ], + [ + "▁Selection", + -12.710070610046387 + ], + [ + "▁succ", + -12.710280418395996 + ], + [ + "headed", + -12.710345268249512 + ], + [ + "RIA", + -12.710350036621094 + ], + [ + "▁papa", + -12.710366249084473 + ], + [ + "▁profesionale", + -12.710394859313965 + ], + [ + "▁Zeichen", + -12.710402488708496 + ], + [ + "▁artisans", + -12.710489273071289 + ], + [ + "▁Geist", + -12.710585594177246 + ], + [ + "practic", + -12.710741996765137 + ], + [ + "▁ministrul", + -12.71076488494873 + ], + [ + "viens", + -12.710912704467773 + ], + [ + "prezintă", + -12.710919380187988 + ], + [ + "Integrated", + -12.710981369018555 + ], + [ + "▁rooftop", + -12.710989952087402 + ], + [ + "▁successor", + -12.710991859436035 + ], + [ + "OTO", + -12.711012840270996 + ], + [ + "liés", + -12.711027145385742 + ], + [ + "▁Diver", + -12.71121597290039 + ], + [ + "Specifically", + -12.711297988891602 + ], + [ + "▁calibr", + -12.711301803588867 + ], + [ + "KK", + -12.711341857910156 + ], + [ + "▁défense", + -12.711414337158203 + ], + [ + "▁english", + -12.711414337158203 + ], + [ + "verbrauch", + -12.711418151855469 + ], + [ + "▁attire", + -12.711433410644531 + ], + [ + "▁Recipe", + -12.711441040039062 + ], + [ + "équilibre", + -12.711457252502441 + ], + [ + "accumul", + -12.71157169342041 + ], + [ + "▁financement", + -12.71169662475586 + ], + [ + "rij", + -12.711962699890137 + ], + [ + "▁prince", + -12.711999893188477 + ], + [ + "▁préparer", + -12.7120361328125 + ], + [ + "surviving", + -12.71211051940918 + ], + [ + "operation", + -12.712233543395996 + ], + [ + "▁judet", + -12.71242904663086 + ], + [ + "▁Verantwortung", + -12.712433815002441 + ], + [ + "▁Vinyl", + -12.712536811828613 + ], + [ + "DEN", + -12.712584495544434 + ], + [ + "▁Tail", + -12.712589263916016 + ], + [ + "yearly", + -12.712590217590332 + ], + [ + "▁comisi", + -12.712613105773926 + ], + [ + "lava", + -12.71261978149414 + ], + [ + "▁succession", + -12.71264934539795 + ], + [ + "▁Whisk", + -12.713030815124512 + ], + [ + "▁precizat", + -12.713096618652344 + ], + [ + "▁unmittelbar", + -12.713117599487305 + ], + [ + "ICH", + -12.713139533996582 + ], + [ + "▁atteint", + -12.713199615478516 + ], + [ + "▁hometown", + -12.713268280029297 + ], + [ + "▁Zip", + -12.71328353881836 + ], + [ + "▁Weekly", + -12.71336841583252 + ], + [ + "▁crashes", + -12.713401794433594 + ], + [ + "▁Turbo", + -12.713421821594238 + ], + [ + "▁susține", + -12.713468551635742 + ], + [ + "▁Venus", + -12.713587760925293 + ], + [ + "▁finalement", + -12.713595390319824 + ], + [ + "rewarded", + -12.713693618774414 + ], + [ + "▁principau", + -12.713899612426758 + ], + [ + "▁régional", + -12.713979721069336 + ], + [ + "▁1958", + -12.714178085327148 + ], + [ + "▁Musical", + -12.714189529418945 + ], + [ + "▁stylist", + -12.714251518249512 + ], + [ + "cetate", + -12.714282035827637 + ], + [ + "gorge", + -12.71433162689209 + ], + [ + "▁espresso", + -12.714493751525879 + ], + [ + "überall", + -12.714576721191406 + ], + [ + "▁NHL", + -12.714593887329102 + ], + [ + "▁Dock", + -12.71472454071045 + ], + [ + "▁mosquito", + -12.71481704711914 + ], + [ + "▁forthcoming", + -12.714852333068848 + ], + [ + "▁Visitors", + -12.714881896972656 + ], + [ + "kro", + -12.714882850646973 + ], + [ + "_______", + -12.715048789978027 + ], + [ + "▁STEM", + -12.715105056762695 + ], + [ + "9.5", + -12.715141296386719 + ], + [ + "accompagne", + -12.715177536010742 + ], + [ + "▁Trick", + -12.715202331542969 + ], + [ + "▁endorsement", + -12.715400695800781 + ], + [ + "▁amplifier", + -12.715498924255371 + ], + [ + "▁malicious", + -12.715499877929688 + ], + [ + "▁roam", + -12.71552848815918 + ], + [ + "▁kennt", + -12.715635299682617 + ], + [ + "Connor", + -12.715690612792969 + ], + [ + "▁dysfunction", + -12.715828895568848 + ], + [ + "▁zuverlässig", + -12.715840339660645 + ], + [ + "▁corpul", + -12.71595573425293 + ], + [ + "▁boule", + -12.715967178344727 + ], + [ + "otti", + -12.715991973876953 + ], + [ + "440", + -12.716050148010254 + ], + [ + "▁mimic", + -12.716056823730469 + ], + [ + "farben", + -12.716129302978516 + ], + [ + "▁Wagner", + -12.716214179992676 + ], + [ + "Kom", + -12.7162504196167 + ], + [ + "▁miteinander", + -12.716269493103027 + ], + [ + "▁String", + -12.716296195983887 + ], + [ + "▁Ellis", + -12.716313362121582 + ], + [ + "▁Perth", + -12.716337203979492 + ], + [ + "▁temperatura", + -12.716381072998047 + ], + [ + "umbling", + -12.716397285461426 + ], + [ + "▁Medizin", + -12.716554641723633 + ], + [ + "▁KY", + -12.71660327911377 + ], + [ + "apei", + -12.716642379760742 + ], + [ + "counter", + -12.716647148132324 + ], + [ + "strich", + -12.71665096282959 + ], + [ + "▁Între", + -12.716652870178223 + ], + [ + "▁Cliff", + -12.716785430908203 + ], + [ + "▁foreclosure", + -12.716864585876465 + ], + [ + "................", + -12.716878890991211 + ], + [ + "Clearly", + -12.717028617858887 + ], + [ + "AJ", + -12.717057228088379 + ], + [ + "ndro", + -12.717180252075195 + ], + [ + "▁Arsenal", + -12.717206001281738 + ], + [ + "▁Recherche", + -12.717216491699219 + ], + [ + "Guests", + -12.717225074768066 + ], + [ + "▁besucht", + -12.717242240905762 + ], + [ + "wissen", + -12.717266082763672 + ], + [ + "fekt", + -12.717414855957031 + ], + [ + "hottest", + -12.717414855957031 + ], + [ + "▁Tomorrow", + -12.717547416687012 + ], + [ + "▁Signature", + -12.717557907104492 + ], + [ + "127", + -12.717583656311035 + ], + [ + "▁competence", + -12.71766471862793 + ], + [ + "Einige", + -12.717686653137207 + ], + [ + "patented", + -12.71782112121582 + ], + [ + "▁Exhibition", + -12.717889785766602 + ], + [ + "▁verbessern", + -12.717889785766602 + ], + [ + "▁Garcia", + -12.718043327331543 + ], + [ + "▁inquire", + -12.718278884887695 + ], + [ + "coping", + -12.718353271484375 + ], + [ + "▁linguri", + -12.71842098236084 + ], + [ + "▁trivia", + -12.718433380126953 + ], + [ + "▁începutul", + -12.718489646911621 + ], + [ + "▁parteneriat", + -12.7186279296875 + ], + [ + "tagen", + -12.718636512756348 + ], + [ + "▁engagé", + -12.718916893005371 + ], + [ + "▁chalk", + -12.718944549560547 + ], + [ + "▁fashionable", + -12.719416618347168 + ], + [ + "0.8", + -12.719635009765625 + ], + [ + "▁sticker", + -12.719751358032227 + ], + [ + "▁desperately", + -12.719765663146973 + ], + [ + "höhe", + -12.719903945922852 + ], + [ + "▁fericire", + -12.71994400024414 + ], + [ + "évaluation", + -12.719948768615723 + ], + [ + "▁Divide", + -12.719959259033203 + ], + [ + "▁indulge", + -12.719979286193848 + ], + [ + "fett", + -12.720014572143555 + ], + [ + "▁communal", + -12.72017765045166 + ], + [ + "▁mindful", + -12.720187187194824 + ], + [ + "dauert", + -12.720192909240723 + ], + [ + "▁veille", + -12.720263481140137 + ], + [ + "▁vér", + -12.720330238342285 + ], + [ + "▁Baseball", + -12.720373153686523 + ], + [ + "▁succeeded", + -12.720418930053711 + ], + [ + "▁Terrasse", + -12.720420837402344 + ], + [ + "irgend", + -12.720500946044922 + ], + [ + "▁Munich", + -12.720556259155273 + ], + [ + "weisung", + -12.72067642211914 + ], + [ + "metre", + -12.720916748046875 + ], + [ + "▁Raymond", + -12.721015930175781 + ], + [ + "▁chute", + -12.72102165222168 + ], + [ + "▁Accounting", + -12.721075057983398 + ], + [ + "▁pantry", + -12.721122741699219 + ], + [ + "▁underwater", + -12.721181869506836 + ], + [ + "ARI", + -12.721222877502441 + ], + [ + "lowed", + -12.721245765686035 + ], + [ + "numbered", + -12.721430778503418 + ], + [ + "REN", + -12.72148609161377 + ], + [ + "▁industriel", + -12.721489906311035 + ], + [ + "wäh", + -12.721531867980957 + ], + [ + "kenntnis", + -12.721631050109863 + ], + [ + "▁govern", + -12.721635818481445 + ], + [ + "strained", + -12.721661567687988 + ], + [ + "▁rythme", + -12.721689224243164 + ], + [ + "ин", + -12.72169303894043 + ], + [ + "▁burner", + -12.721723556518555 + ], + [ + "▁zählt", + -12.721790313720703 + ], + [ + "▁verte", + -12.721883773803711 + ], + [ + "▁Catalog", + -12.721896171569824 + ], + [ + "▁Bruno", + -12.721988677978516 + ], + [ + "0.7", + -12.721997261047363 + ], + [ + "▁litig", + -12.72207260131836 + ], + [ + "▁greet", + -12.722129821777344 + ], + [ + "▁stool", + -12.722393035888672 + ], + [ + "gression", + -12.722457885742188 + ], + [ + "▁Klassen", + -12.722491264343262 + ], + [ + "▁neon", + -12.722661018371582 + ], + [ + "▁Tall", + -12.722734451293945 + ], + [ + "▁satin", + -12.722895622253418 + ], + [ + "▁Bend", + -12.722915649414062 + ], + [ + "▁soluţi", + -12.723077774047852 + ], + [ + "▁styl", + -12.723196983337402 + ], + [ + "▁Siri", + -12.723358154296875 + ], + [ + "▁Sanders", + -12.723464012145996 + ], + [ + "▁spike", + -12.723499298095703 + ], + [ + "pinion", + -12.723854064941406 + ], + [ + "▁purta", + -12.724122047424316 + ], + [ + "CARE", + -12.724224090576172 + ], + [ + "▁creştere", + -12.724311828613281 + ], + [ + "▁fry", + -12.724374771118164 + ], + [ + "▁Schweizer", + -12.724400520324707 + ], + [ + "durchschnittlich", + -12.724411010742188 + ], + [ + "celaşi", + -12.724446296691895 + ], + [ + "▁deceased", + -12.724474906921387 + ], + [ + "▁Nerv", + -12.724668502807617 + ], + [ + "2-2", + -12.7247314453125 + ], + [ + "▁Stahl", + -12.724753379821777 + ], + [ + "▁workload", + -12.724834442138672 + ], + [ + "erhielt", + -12.724984169006348 + ], + [ + "▁hypothesis", + -12.725103378295898 + ], + [ + "bib", + -12.725110054016113 + ], + [ + "▁ţară", + -12.725116729736328 + ], + [ + "vaut", + -12.725122451782227 + ], + [ + "prehensi", + -12.725184440612793 + ], + [ + "▁Offering", + -12.725188255310059 + ], + [ + "▁dislike", + -12.725252151489258 + ], + [ + "▁firewall", + -12.725252151489258 + ], + [ + "mania", + -12.725255966186523 + ], + [ + "195", + -12.725278854370117 + ], + [ + "▁Champ", + -12.725324630737305 + ], + [ + "▁philosophical", + -12.725343704223633 + ], + [ + "länge", + -12.72553539276123 + ], + [ + "advisable", + -12.725785255432129 + ], + [ + "negotiating", + -12.725785255432129 + ], + [ + "Providing", + -12.725791931152344 + ], + [ + "▁1959", + -12.725801467895508 + ], + [ + "▁spyware", + -12.725831031799316 + ], + [ + "sharing", + -12.725837707519531 + ], + [ + "▁prévoi", + -12.725905418395996 + ], + [ + "▁jaune", + -12.7260103225708 + ], + [ + "schoss", + -12.726028442382812 + ], + [ + "▁obține", + -12.726129531860352 + ], + [ + "▁attraktiv", + -12.726489067077637 + ], + [ + "gemeinschaft", + -12.7265043258667 + ], + [ + "BV", + -12.726505279541016 + ], + [ + "Top", + -12.726617813110352 + ], + [ + "▁Sharon", + -12.726625442504883 + ], + [ + "bok", + -12.726675033569336 + ], + [ + "▁résist", + -12.726811408996582 + ], + [ + "Napoca", + -12.726822853088379 + ], + [ + "▁Uncategorized", + -12.726898193359375 + ], + [ + "▁trustee", + -12.726936340332031 + ], + [ + "▁remise", + -12.727025985717773 + ], + [ + "▁aştept", + -12.727165222167969 + ], + [ + "▁allergic", + -12.727206230163574 + ], + [ + "èvre", + -12.727211952209473 + ], + [ + "LAR", + -12.72734546661377 + ], + [ + "1.9", + -12.727497100830078 + ], + [ + "▁outbreak", + -12.727520942687988 + ], + [ + "▁trocken", + -12.727568626403809 + ], + [ + "▁laughter", + -12.727724075317383 + ], + [ + "▁Attend", + -12.727785110473633 + ], + [ + "jung", + -12.727822303771973 + ], + [ + "racking", + -12.727934837341309 + ], + [ + "ORS", + -12.728178024291992 + ], + [ + "▁rasp", + -12.728527069091797 + ], + [ + "VF", + -12.728551864624023 + ], + [ + "▁Tamil", + -12.72860050201416 + ], + [ + "124", + -12.728602409362793 + ], + [ + "▁Fiber", + -12.728714942932129 + ], + [ + "▁launches", + -12.728755950927734 + ], + [ + "Post", + -12.728777885437012 + ], + [ + "▁bucks", + -12.729072570800781 + ], + [ + "▁Nicholas", + -12.72923755645752 + ], + [ + "▁cărți", + -12.729255676269531 + ], + [ + "emper", + -12.729681968688965 + ], + [ + "Point", + -12.729689598083496 + ], + [ + "fraction", + -12.729753494262695 + ], + [ + "▁BIG", + -12.729804992675781 + ], + [ + "▁lancer", + -12.729829788208008 + ], + [ + "EVER", + -12.72997760772705 + ], + [ + "trend", + -12.73000431060791 + ], + [ + "▁remerci", + -12.730076789855957 + ], + [ + "▁prevalent", + -12.730168342590332 + ], + [ + "370", + -12.730290412902832 + ], + [ + "▁bestellen", + -12.730327606201172 + ], + [ + "Buying", + -12.730341911315918 + ], + [ + "▁Aufbau", + -12.730416297912598 + ], + [ + "▁opini", + -12.730416297912598 + ], + [ + "▁regiune", + -12.730663299560547 + ], + [ + "▁martial", + -12.73069953918457 + ], + [ + "LK", + -12.730754852294922 + ], + [ + "▁Feuerwehr", + -12.730974197387695 + ], + [ + "screened", + -12.73099422454834 + ], + [ + "Blue", + -12.73120403289795 + ], + [ + "▁analize", + -12.731237411499023 + ], + [ + "▁lure", + -12.731247901916504 + ], + [ + "▁internally", + -12.731283187866211 + ], + [ + "father", + -12.731322288513184 + ], + [ + "▁diplomatic", + -12.731343269348145 + ], + [ + "▁Activity", + -12.731464385986328 + ], + [ + "▁cliqu", + -12.73156452178955 + ], + [ + "▁adequately", + -12.731809616088867 + ], + [ + "▁Elena", + -12.73183822631836 + ], + [ + "▁Citizens", + -12.732102394104004 + ], + [ + "▁Länge", + -12.732295989990234 + ], + [ + "▁respectful", + -12.732300758361816 + ], + [ + "▁zuständig", + -12.73248291015625 + ], + [ + "▁réception", + -12.732584953308105 + ], + [ + "▁headset", + -12.732686996459961 + ], + [ + "▁awhile", + -12.732705116271973 + ], + [ + "▁speculation", + -12.732707977294922 + ], + [ + "▁WhatsApp", + -12.732714653015137 + ], + [ + "▁tulbur", + -12.732731819152832 + ], + [ + "▁voluntar", + -12.732758522033691 + ], + [ + "▁Studium", + -12.73277473449707 + ], + [ + "▁protector", + -12.732833862304688 + ], + [ + "▁Wrap", + -12.732840538024902 + ], + [ + "staat", + -12.732951164245605 + ], + [ + "▁judgement", + -12.733396530151367 + ], + [ + "unauthorized", + -12.733397483825684 + ], + [ + "Rank", + -12.733487129211426 + ], + [ + "pră", + -12.733503341674805 + ], + [ + "▁Paw", + -12.733627319335938 + ], + [ + "▁relev", + -12.733664512634277 + ], + [ + "▁arbor", + -12.733830451965332 + ], + [ + "stretches", + -12.733885765075684 + ], + [ + "nook", + -12.733906745910645 + ], + [ + "▁Tunis", + -12.733907699584961 + ], + [ + "▁shocking", + -12.734036445617676 + ], + [ + "▁oppress", + -12.73414421081543 + ], + [ + "10.1", + -12.7341890335083 + ], + [ + "▁ERP", + -12.734310150146484 + ], + [ + "wolle", + -12.7343168258667 + ], + [ + "▁Catch", + -12.734352111816406 + ], + [ + "Plus", + -12.734368324279785 + ], + [ + "Market", + -12.734445571899414 + ], + [ + "scribed", + -12.734536170959473 + ], + [ + "▁décoration", + -12.734594345092773 + ], + [ + "▁chanson", + -12.734607696533203 + ], + [ + "▁Midwest", + -12.734763145446777 + ], + [ + "▁Spencer", + -12.734795570373535 + ], + [ + "▁societate", + -12.734807968139648 + ], + [ + "curated", + -12.735087394714355 + ], + [ + "▁canopy", + -12.735135078430176 + ], + [ + "ат", + -12.735142707824707 + ], + [ + "Sig", + -12.73514461517334 + ], + [ + "▁witch", + -12.735153198242188 + ], + [ + "envoyer", + -12.735175132751465 + ], + [ + "▁$1,000", + -12.735230445861816 + ], + [ + "▁peripheral", + -12.735482215881348 + ], + [ + "nnouncing", + -12.735509872436523 + ], + [ + "perfect", + -12.73559284210205 + ], + [ + "▁warten", + -12.735748291015625 + ], + [ + "ELI", + -12.735822677612305 + ], + [ + "▁recap", + -12.735912322998047 + ], + [ + "dün", + -12.735978126525879 + ], + [ + "▁Spre", + -12.736029624938965 + ], + [ + "2005", + -12.736153602600098 + ], + [ + "▁réparation", + -12.73617935180664 + ], + [ + "▁extraordinar", + -12.736196517944336 + ], + [ + "existence", + -12.736337661743164 + ], + [ + "oanele", + -12.736467361450195 + ], + [ + "▁reprezentant", + -12.736474990844727 + ], + [ + "▁attacker", + -12.736490249633789 + ], + [ + "▁Berliner", + -12.73657512664795 + ], + [ + "experience", + -12.736649513244629 + ], + [ + "▁Monde", + -12.736800193786621 + ], + [ + "intervention", + -12.736956596374512 + ], + [ + "▁Einstellung", + -12.736977577209473 + ], + [ + "▁Valentin", + -12.737011909484863 + ], + [ + "▁zonă", + -12.737200736999512 + ], + [ + "occupant", + -12.737223625183105 + ], + [ + "▁mobilis", + -12.737260818481445 + ], + [ + "metall", + -12.737261772155762 + ], + [ + "evangeli", + -12.73729133605957 + ], + [ + "Adding", + -12.737326622009277 + ], + [ + "▁Roland", + -12.73735237121582 + ], + [ + "ENCE", + -12.737462043762207 + ], + [ + "▁Insul", + -12.737478256225586 + ], + [ + "tellement", + -12.737497329711914 + ], + [ + "▁Blogger", + -12.737499237060547 + ], + [ + "▁prote", + -12.737504005432129 + ], + [ + "▁Minimum", + -12.737574577331543 + ], + [ + "▁termic", + -12.737624168395996 + ], + [ + "▁Sachen", + -12.737859725952148 + ], + [ + "▁Maschinen", + -12.737863540649414 + ], + [ + "▁Dragnea", + -12.737926483154297 + ], + [ + "▁overtime", + -12.737967491149902 + ], + [ + "calorie", + -12.737968444824219 + ], + [ + "▁jene", + -12.73814868927002 + ], + [ + "▁Satan", + -12.738153457641602 + ], + [ + "▁currencies", + -12.73827075958252 + ], + [ + "▁echipamente", + -12.738329887390137 + ], + [ + "▁forgiveness", + -12.73843765258789 + ], + [ + "▁Pause", + -12.738479614257812 + ], + [ + "▁Witt", + -12.738529205322266 + ], + [ + "STOR", + -12.738632202148438 + ], + [ + "▁actuelle", + -12.738703727722168 + ], + [ + "▁Ard", + -12.738853454589844 + ], + [ + "▁Constitu", + -12.738880157470703 + ], + [ + "ghan", + -12.7388916015625 + ], + [ + "Make", + -12.738906860351562 + ], + [ + "▁garne", + -12.738947868347168 + ], + [ + "▁Hitler", + -12.738956451416016 + ], + [ + "▁rubbish", + -12.738973617553711 + ], + [ + "6.0", + -12.739025115966797 + ], + [ + "▁Giving", + -12.739177703857422 + ], + [ + "▁persever", + -12.73937702178955 + ], + [ + "wirk", + -12.7394380569458 + ], + [ + "liegenden", + -12.739455223083496 + ], + [ + "▁morceau", + -12.73946762084961 + ], + [ + "atty", + -12.73961067199707 + ], + [ + "▁Quebec", + -12.739669799804688 + ], + [ + "harmonie", + -12.739705085754395 + ], + [ + "Nummer", + -12.739721298217773 + ], + [ + "▁splendid", + -12.739747047424316 + ], + [ + "▁halfway", + -12.739808082580566 + ], + [ + "▁periodically", + -12.740071296691895 + ], + [ + "▁Ländern", + -12.740077018737793 + ], + [ + "▁AAA", + -12.740083694458008 + ], + [ + "▁Frost", + -12.740198135375977 + ], + [ + "▁heroin", + -12.740289688110352 + ], + [ + "▁bucurie", + -12.7403564453125 + ], + [ + "▁Pradesh", + -12.74036693572998 + ], + [ + "zusetzen", + -12.740405082702637 + ], + [ + "raising", + -12.740425109863281 + ], + [ + "▁furniz", + -12.740567207336426 + ], + [ + "▁convi", + -12.740575790405273 + ], + [ + "pictured", + -12.740911483764648 + ], + [ + "▁inadequate", + -12.741065979003906 + ], + [ + "▁aprobat", + -12.741069793701172 + ], + [ + "▁exercising", + -12.741083145141602 + ], + [ + "▁faisai", + -12.741138458251953 + ], + [ + "▁prosecution", + -12.741231918334961 + ], + [ + "380", + -12.741402626037598 + ], + [ + "▁Potential", + -12.74145793914795 + ], + [ + "▁Magi", + -12.741523742675781 + ], + [ + "From", + -12.741752624511719 + ], + [ + "batterie", + -12.74181079864502 + ], + [ + "▁poisson", + -12.74185562133789 + ], + [ + "▁Probe", + -12.741950988769531 + ], + [ + "▁pastel", + -12.741998672485352 + ], + [ + "▁tracked", + -12.742410659790039 + ], + [ + "▁advertisers", + -12.74251937866211 + ], + [ + "adevar", + -12.742537498474121 + ], + [ + "ит", + -12.742776870727539 + ], + [ + "▁Herren", + -12.742815971374512 + ], + [ + "EAM", + -12.742820739746094 + ], + [ + "▁scooter", + -12.742822647094727 + ], + [ + "requesting", + -12.742841720581055 + ], + [ + "dynamis", + -12.742949485778809 + ], + [ + "▁dahin", + -12.742961883544922 + ], + [ + "▁tweak", + -12.743061065673828 + ], + [ + "▁hail", + -12.743101119995117 + ], + [ + "▁întotdeauna", + -12.743160247802734 + ], + [ + "▁Publikum", + -12.743167877197266 + ], + [ + "▁panoramic", + -12.743167877197266 + ], + [ + "▁PRE", + -12.74331283569336 + ], + [ + "▁thrill", + -12.743361473083496 + ], + [ + "Open", + -12.743366241455078 + ], + [ + "▁Layer", + -12.74345588684082 + ], + [ + "▁Bosch", + -12.743459701538086 + ], + [ + "hull", + -12.743511199951172 + ], + [ + "▁născut", + -12.743518829345703 + ], + [ + "tausch", + -12.743559837341309 + ], + [ + "▁autoturism", + -12.743577003479004 + ], + [ + "▁crank", + -12.743701934814453 + ], + [ + "CLE", + -12.743735313415527 + ], + [ + "▁Frederick", + -12.74386978149414 + ], + [ + "mog", + -12.743887901306152 + ], + [ + "behalten", + -12.74396800994873 + ], + [ + "▁aunt", + -12.744050979614258 + ], + [ + "▁Triple", + -12.744141578674316 + ], + [ + "▁Ark", + -12.744242668151855 + ], + [ + "AUD", + -12.744440078735352 + ], + [ + "▁Candy", + -12.744505882263184 + ], + [ + "tama", + -12.744515419006348 + ], + [ + "▁Evaluation", + -12.744571685791016 + ], + [ + "▁Memphis", + -12.744571685791016 + ], + [ + "▁stellar", + -12.74457836151123 + ], + [ + "▁fabricat", + -12.744632720947266 + ], + [ + "▁terminat", + -12.744868278503418 + ], + [ + "▁domnul", + -12.744913101196289 + ], + [ + "▁keynote", + -12.744925498962402 + ], + [ + "▁dentistry", + -12.744951248168945 + ], + [ + "rift", + -12.745052337646484 + ], + [ + "▁bilan", + -12.745119094848633 + ], + [ + "2.6", + -12.745125770568848 + ], + [ + "undergoing", + -12.745210647583008 + ], + [ + "▁pseudo", + -12.745274543762207 + ], + [ + "▁maşin", + -12.745280265808105 + ], + [ + "▁munte", + -12.74555492401123 + ], + [ + "▁VW", + -12.745932579040527 + ], + [ + "▁Rab", + -12.74593448638916 + ], + [ + "▁sustine", + -12.745972633361816 + ], + [ + "▁Bedingungen", + -12.745977401733398 + ], + [ + "▁învăţ", + -12.745980262756348 + ], + [ + "▁pyramid", + -12.745983123779297 + ], + [ + "HEN", + -12.746020317077637 + ], + [ + "▁citrus", + -12.746058464050293 + ], + [ + "Code", + -12.746064186096191 + ], + [ + "▁Beginning", + -12.746164321899414 + ], + [ + "▁discourse", + -12.746249198913574 + ], + [ + "▁miercuri", + -12.746329307556152 + ], + [ + "▁producător", + -12.74637508392334 + ], + [ + "▁analys", + -12.746397972106934 + ], + [ + "▁Evan", + -12.7467041015625 + ], + [ + "138", + -12.746987342834473 + ], + [ + "▁târziu", + -12.74703311920166 + ], + [ + "▁relocation", + -12.747052192687988 + ], + [ + "decizia", + -12.74708080291748 + ], + [ + "tollen", + -12.74714183807373 + ], + [ + "TRO", + -12.747180938720703 + ], + [ + "▁runway", + -12.74719524383545 + ], + [ + "illet", + -12.747270584106445 + ], + [ + "▁serveur", + -12.747387886047363 + ], + [ + "bezogen", + -12.747427940368652 + ], + [ + "▁believers", + -12.747668266296387 + ], + [ + "determined", + -12.747711181640625 + ], + [ + "▁reinforced", + -12.74791431427002 + ], + [ + "▁wedge", + -12.748006820678711 + ], + [ + "methyl", + -12.74807357788086 + ], + [ + "MES", + -12.748188018798828 + ], + [ + "vpn", + -12.748374938964844 + ], + [ + "▁consta", + -12.74837875366211 + ], + [ + "▁vizitat", + -12.748420715332031 + ], + [ + "modul", + -12.748455047607422 + ], + [ + "▁routing", + -12.748528480529785 + ], + [ + "tempted", + -12.748540878295898 + ], + [ + "URS", + -12.748785018920898 + ], + [ + "apprentissage", + -12.748795509338379 + ], + [ + "▁Hungary", + -12.748796463012695 + ], + [ + "Previously", + -12.74880313873291 + ], + [ + "▁translator", + -12.748804092407227 + ], + [ + "▁resonate", + -12.748830795288086 + ], + [ + "201", + -12.748851776123047 + ], + [ + "3-0", + -12.749029159545898 + ], + [ + "▁reunion", + -12.749090194702148 + ], + [ + "▁palate", + -12.749096870422363 + ], + [ + "0.4", + -12.749171257019043 + ], + [ + "reheat", + -12.74924373626709 + ], + [ + "Roo", + -12.749261856079102 + ], + [ + "200,000", + -12.74940013885498 + ], + [ + "Bro", + -12.749431610107422 + ], + [ + "▁estimation", + -12.749468803405762 + ], + [ + "schneiden", + -12.749499320983887 + ], + [ + "▁Inspired", + -12.749506950378418 + ], + [ + "▁lottery", + -12.749539375305176 + ], + [ + "▁Friedrich", + -12.749887466430664 + ], + [ + "FIT", + -12.749913215637207 + ], + [ + "0.6", + -12.7499418258667 + ], + [ + "▁dagegen", + -12.74997615814209 + ], + [ + "▁Reb", + -12.750115394592285 + ], + [ + "▁Eigenschaften", + -12.75020694732666 + ], + [ + "▁molding", + -12.750361442565918 + ], + [ + "▁Harper", + -12.750548362731934 + ], + [ + "verwaltung", + -12.75055980682373 + ], + [ + "▁Schlüssel", + -12.75055980682373 + ], + [ + "▁desfasura", + -12.75055980682373 + ], + [ + "▁rencontrer", + -12.75055980682373 + ], + [ + "▁negoci", + -12.750581741333008 + ], + [ + "▁Leading", + -12.750615119934082 + ], + [ + "▁necesita", + -12.750652313232422 + ], + [ + "▁biking", + -12.750683784484863 + ], + [ + "▁jointly", + -12.75069808959961 + ], + [ + "▁crush", + -12.750702857971191 + ], + [ + "Vol", + -12.750768661499023 + ], + [ + "▁ebay", + -12.750836372375488 + ], + [ + "▁Shri", + -12.750991821289062 + ], + [ + "▁AMD", + -12.751029968261719 + ], + [ + "FG", + -12.751032829284668 + ], + [ + "Argentin", + -12.75120735168457 + ], + [ + "▁incercat", + -12.751431465148926 + ], + [ + "▁tidy", + -12.751628875732422 + ], + [ + "▁provoqu", + -12.751635551452637 + ], + [ + "▁Written", + -12.751649856567383 + ], + [ + "▁Kooperation", + -12.751666069030762 + ], + [ + "▁scripture", + -12.751952171325684 + ], + [ + "▁Pflicht", + -12.751974105834961 + ], + [ + "ficial", + -12.752013206481934 + ], + [ + "vremea", + -12.752013206481934 + ], + [ + "▁Growing", + -12.752115249633789 + ], + [ + "▁redesign", + -12.752119064331055 + ], + [ + "▁obstacle", + -12.752214431762695 + ], + [ + "▁rugam", + -12.752235412597656 + ], + [ + "▁SPD", + -12.752243995666504 + ], + [ + "165", + -12.752270698547363 + ], + [ + "fiz", + -12.752284049987793 + ], + [ + "▁startet", + -12.752326011657715 + ], + [ + "▁Principle", + -12.752327919006348 + ], + [ + "▁abdominal", + -12.752327919006348 + ], + [ + "▁podium", + -12.752528190612793 + ], + [ + "duty", + -12.752616882324219 + ], + [ + "bonne", + -12.752679824829102 + ], + [ + "▁Serbia", + -12.752687454223633 + ], + [ + "▁brunch", + -12.752839088439941 + ], + [ + "▁Personne", + -12.752975463867188 + ], + [ + "▁Idea", + -12.753034591674805 + ], + [ + "forementioned", + -12.753036499023438 + ], + [ + "▁chassis", + -12.753037452697754 + ], + [ + "gebühr", + -12.753050804138184 + ], + [ + "ucun", + -12.753061294555664 + ], + [ + "▁Maz", + -12.7531156539917 + ], + [ + "1-4", + -12.75318431854248 + ], + [ + "kleid", + -12.753273963928223 + ], + [ + "▁Volvo", + -12.753337860107422 + ], + [ + "brechen", + -12.753378868103027 + ], + [ + "▁homepage", + -12.753472328186035 + ], + [ + "fuz", + -12.753509521484375 + ], + [ + "▁abgeschlossen", + -12.753595352172852 + ], + [ + "▁gelungen", + -12.753658294677734 + ], + [ + "▁booklet", + -12.753711700439453 + ], + [ + "▁Ukrainian", + -12.753745079040527 + ], + [ + "▁Melissa", + -12.753746032714844 + ], + [ + "CENT", + -12.75379467010498 + ], + [ + "▁intégré", + -12.753806114196777 + ], + [ + "weighing", + -12.753827095031738 + ], + [ + "▁crumbl", + -12.753894805908203 + ], + [ + "▁bunk", + -12.754167556762695 + ], + [ + "krieg", + -12.754207611083984 + ], + [ + "▁freshman", + -12.754307746887207 + ], + [ + "alaya", + -12.754339218139648 + ], + [ + "Avem", + -12.754353523254395 + ], + [ + "▁Kne", + -12.754423141479492 + ], + [ + "▁upstairs", + -12.75448226928711 + ], + [ + "AIL", + -12.754508972167969 + ], + [ + "țul", + -12.75478744506836 + ], + [ + "▁Lecture", + -12.754817962646484 + ], + [ + "▁entdecken", + -12.754843711853027 + ], + [ + "▁GMT", + -12.754912376403809 + ], + [ + "▁Leitung", + -12.754937171936035 + ], + [ + "▁inclined", + -12.755170822143555 + ], + [ + "▁skillet", + -12.75555419921875 + ], + [ + "FN", + -12.755742073059082 + ], + [ + "▁Perform", + -12.755821228027344 + ], + [ + "shift", + -12.75583267211914 + ], + [ + "recognizing", + -12.755873680114746 + ], + [ + "▁concise", + -12.755873680114746 + ], + [ + "▁obsessed", + -12.755873680114746 + ], + [ + "▁removable", + -12.755873680114746 + ], + [ + "▁Relax", + -12.755888938903809 + ], + [ + "delegates", + -12.75605583190918 + ], + [ + "▁expedi", + -12.756074905395508 + ], + [ + "▁Schä", + -12.756138801574707 + ], + [ + "iete", + -12.756211280822754 + ], + [ + "▁reciproc", + -12.756229400634766 + ], + [ + "▁neutr", + -12.75625228881836 + ], + [ + "lactic", + -12.756314277648926 + ], + [ + "▁Nah", + -12.756328582763672 + ], + [ + "scene", + -12.7565279006958 + ], + [ + "▁Helm", + -12.756563186645508 + ], + [ + "▁Bewerbung", + -12.756671905517578 + ], + [ + "▁Cassi", + -12.75667953491211 + ], + [ + "▁Gelegenheit", + -12.756939888000488 + ], + [ + "▁reflective", + -12.757140159606934 + ], + [ + "▁încredere", + -12.757149696350098 + ], + [ + "▁cigarettes", + -12.75717544555664 + ], + [ + "▁Zusätzlich", + -12.757295608520508 + ], + [ + "▁intercept", + -12.75731372833252 + ], + [ + "▁Finn", + -12.757468223571777 + ], + [ + "▁ignor", + -12.757661819458008 + ], + [ + "gian", + -12.75766372680664 + ], + [ + "BRA", + -12.757740020751953 + ], + [ + "leader", + -12.757957458496094 + ], + [ + "nius", + -12.757981300354004 + ], + [ + "▁skies", + -12.757987022399902 + ], + [ + "▁nunta", + -12.758023262023926 + ], + [ + "▁grec", + -12.758041381835938 + ], + [ + "arranging", + -12.75816822052002 + ], + [ + "wartet", + -12.758231163024902 + ], + [ + "▁kostet", + -12.758377075195312 + ], + [ + "▁Entre", + -12.758541107177734 + ], + [ + "Mag", + -12.758575439453125 + ], + [ + "▁radiator", + -12.758598327636719 + ], + [ + "übrigens", + -12.758689880371094 + ], + [ + "Internet", + -12.758706092834473 + ], + [ + "▁connexion", + -12.758718490600586 + ], + [ + "▁prolonged", + -12.758854866027832 + ], + [ + "▁capabil", + -12.75914192199707 + ], + [ + "▁feeder", + -12.759217262268066 + ], + [ + "Initially", + -12.759223937988281 + ], + [ + "Green", + -12.75926685333252 + ], + [ + "▁passiert", + -12.759272575378418 + ], + [ + "▁courtyard", + -12.759299278259277 + ], + [ + "▁judeţ", + -12.759320259094238 + ], + [ + "▁Coalition", + -12.759431838989258 + ], + [ + "▁atmospheric", + -12.759431838989258 + ], + [ + "▁velocity", + -12.759431838989258 + ], + [ + "▁Frühstück", + -12.759432792663574 + ], + [ + "vacancies", + -12.759438514709473 + ], + [ + "unified", + -12.759538650512695 + ], + [ + "▁Ahmed", + -12.759538650512695 + ], + [ + "poured", + -12.759550094604492 + ], + [ + "▁Mikro", + -12.75959587097168 + ], + [ + "▁Klar", + -12.759661674499512 + ], + [ + "kommt", + -12.759681701660156 + ], + [ + "seated", + -12.759744644165039 + ], + [ + "musik", + -12.75976848602295 + ], + [ + "▁stimulation", + -12.759841918945312 + ], + [ + "▁solicitat", + -12.759880065917969 + ], + [ + "▁politically", + -12.760165214538574 + ], + [ + "restoring", + -12.760322570800781 + ], + [ + "▁Rag", + -12.760435104370117 + ], + [ + "▁officielle", + -12.760468482971191 + ], + [ + "▁Annie", + -12.760479927062988 + ], + [ + "▁tourne", + -12.760634422302246 + ], + [ + "▁Joel", + -12.760642051696777 + ], + [ + "blieben", + -12.760666847229004 + ], + [ + "▁repayment", + -12.760736465454102 + ], + [ + "▁Strategi", + -12.760781288146973 + ], + [ + "▁prietenii", + -12.760804176330566 + ], + [ + "▁Montgomery", + -12.760858535766602 + ], + [ + "▁résidence", + -12.760858535766602 + ], + [ + "▁sunglasses", + -12.760858535766602 + ], + [ + "▁1956", + -12.760882377624512 + ], + [ + "MEN", + -12.76093578338623 + ], + [ + "pouvant", + -12.760997772216797 + ], + [ + "375", + -12.761061668395996 + ], + [ + "directed", + -12.761173248291016 + ], + [ + "▁grinder", + -12.76120662689209 + ], + [ + "rträge", + -12.761279106140137 + ], + [ + "▁nickel", + -12.761299133300781 + ], + [ + "▁Maintain", + -12.761313438415527 + ], + [ + "▁Holmes", + -12.761392593383789 + ], + [ + "▁obtinut", + -12.76157283782959 + ], + [ + "▁walnut", + -12.761585235595703 + ], + [ + "▁consultancy", + -12.761640548706055 + ], + [ + "cooled", + -12.761651039123535 + ], + [ + "▁Brig", + -12.761711120605469 + ], + [ + "▁Produc", + -12.761873245239258 + ], + [ + "street", + -12.76187515258789 + ], + [ + "▁Einfach", + -12.761897087097168 + ], + [ + "North", + -12.762149810791016 + ], + [ + "▁PET", + -12.76220989227295 + ], + [ + "▁Président", + -12.762288093566895 + ], + [ + "▁produsului", + -12.762457847595215 + ], + [ + "literatur", + -12.762483596801758 + ], + [ + "133", + -12.762561798095703 + ], + [ + "▁recours", + -12.762591361999512 + ], + [ + "▁verpflichtet", + -12.76264476776123 + ], + [ + "▁Wur", + -12.762733459472656 + ], + [ + "▁psiholog", + -12.762796401977539 + ], + [ + "Veg", + -12.762871742248535 + ], + [ + "▁hype", + -12.762930870056152 + ], + [ + "augmenter", + -12.762974739074707 + ], + [ + "▁Welsh", + -12.763012886047363 + ], + [ + "mounted", + -12.763158798217773 + ], + [ + "▁Wann", + -12.763425827026367 + ], + [ + "▁gezeigt", + -12.763620376586914 + ], + [ + "▁memo", + -12.763631820678711 + ], + [ + "veterinary", + -12.763717651367188 + ], + [ + "▁Olympia", + -12.763717651367188 + ], + [ + "▁handsome", + -12.763871192932129 + ], + [ + "yama", + -12.763911247253418 + ], + [ + "studio", + -12.763912200927734 + ], + [ + "sozial", + -12.764020919799805 + ], + [ + "▁reap", + -12.764104843139648 + ], + [ + "▁didactic", + -12.764111518859863 + ], + [ + "▁Cookie", + -12.764126777648926 + ], + [ + "▁cooper", + -12.764230728149414 + ], + [ + "▁discern", + -12.76441478729248 + ], + [ + "▁Ubuntu", + -12.764433860778809 + ], + [ + "domain", + -12.76443862915039 + ], + [ + "▁plasa", + -12.764460563659668 + ], + [ + "hong", + -12.764585494995117 + ], + [ + "▁Freiheit", + -12.764662742614746 + ], + [ + "▁Gateway", + -12.764678001403809 + ], + [ + "▁poke", + -12.764796257019043 + ], + [ + "▁niedrig", + -12.76484203338623 + ], + [ + "▁corrected", + -12.764899253845215 + ], + [ + "▁predator", + -12.76490306854248 + ], + [ + "QA", + -12.76507568359375 + ], + [ + "Physio", + -12.765101432800293 + ], + [ + "MAS", + -12.765108108520508 + ], + [ + "▁sanctuary", + -12.765151023864746 + ], + [ + "▁aferent", + -12.76523494720459 + ], + [ + "▁perdre", + -12.765268325805664 + ], + [ + "▁recherch", + -12.765397071838379 + ], + [ + "ready", + -12.76559829711914 + ], + [ + "without", + -12.76560115814209 + ], + [ + "▁locuitori", + -12.765628814697266 + ], + [ + "▁Memo", + -12.765636444091797 + ], + [ + "▁Laden", + -12.765646934509277 + ], + [ + "danken", + -12.76577377319336 + ], + [ + "▁CNC", + -12.765861511230469 + ], + [ + "▁jealous", + -12.765881538391113 + ], + [ + "▁Background", + -12.765951156616211 + ], + [ + "▁Marx", + -12.765999794006348 + ], + [ + "▁Heli", + -12.766039848327637 + ], + [ + "▁osteo", + -12.766057968139648 + ], + [ + "▁rassembl", + -12.766162872314453 + ], + [ + "▁altceva", + -12.766226768493652 + ], + [ + "▁beschäftigt", + -12.766226768493652 + ], + [ + "▁accru", + -12.766266822814941 + ], + [ + "üft", + -12.766273498535156 + ], + [ + "▁sprout", + -12.766288757324219 + ], + [ + "endorf", + -12.76647663116455 + ], + [ + "▁specialitate", + -12.766483306884766 + ], + [ + "éanmoins", + -12.766586303710938 + ], + [ + "▁poign", + -12.766663551330566 + ], + [ + "▁mânca", + -12.766668319702148 + ], + [ + "▁stretched", + -12.766752243041992 + ], + [ + "fensiv", + -12.76677131652832 + ], + [ + "▁Auction", + -12.76683235168457 + ], + [ + "hints", + -12.766944885253906 + ], + [ + "▁typo", + -12.766983032226562 + ], + [ + "▁Rare", + -12.767003059387207 + ], + [ + "▁interruption", + -12.767043113708496 + ], + [ + "▁Mean", + -12.76709270477295 + ], + [ + "privileged", + -12.767108917236328 + ], + [ + "▁purtat", + -12.767129898071289 + ], + [ + "studie", + -12.767229080200195 + ], + [ + "offres", + -12.767248153686523 + ], + [ + "▁flap", + -12.76729679107666 + ], + [ + "▁rhetoric", + -12.767304420471191 + ], + [ + "▁snapshot", + -12.767325401306152 + ], + [ + "▁Conservative", + -12.767367362976074 + ], + [ + "▁taie", + -12.767416954040527 + ], + [ + "Game", + -12.767499923706055 + ], + [ + "▁naissance", + -12.767663955688477 + ], + [ + "Prof", + -12.767704963684082 + ], + [ + "qualified", + -12.767745971679688 + ], + [ + "▁suppression", + -12.767749786376953 + ], + [ + "▁răspunde", + -12.767765045166016 + ], + [ + "▁1/3", + -12.767803192138672 + ], + [ + "▁lieben", + -12.767858505249023 + ], + [ + "ù", + -12.767898559570312 + ], + [ + "america", + -12.767955780029297 + ], + [ + "▁Mum", + -12.768182754516602 + ], + [ + "▁Researchers", + -12.76827335357666 + ], + [ + "quip", + -12.768308639526367 + ], + [ + "▁fenomen", + -12.768383026123047 + ], + [ + "stools", + -12.768387794494629 + ], + [ + "▁commodity", + -12.768742561340332 + ], + [ + "▁rejuvenat", + -12.768745422363281 + ], + [ + "▁ausgezeichnet", + -12.76876449584961 + ], + [ + "▁păcate", + -12.768784523010254 + ], + [ + "3.6", + -12.76882553100586 + ], + [ + "zwei", + -12.768904685974121 + ], + [ + "accounted", + -12.768982887268066 + ], + [ + "▁Cycle", + -12.76900863647461 + ], + [ + "politischen", + -12.769031524658203 + ], + [ + "Normally", + -12.76904010772705 + ], + [ + "▁transcend", + -12.769158363342285 + ], + [ + "▁Classes", + -12.769268989562988 + ], + [ + "▁vene", + -12.769363403320312 + ], + [ + "protein", + -12.76942253112793 + ], + [ + "formulaire", + -12.76944351196289 + ], + [ + "▁endurance", + -12.769463539123535 + ], + [ + "▁Census", + -12.769464492797852 + ], + [ + "▁census", + -12.7694673538208 + ], + [ + "▁conțin", + -12.76952838897705 + ], + [ + "▁multinational", + -12.769563674926758 + ], + [ + "▁consomm", + -12.769572257995605 + ], + [ + "▁Porter", + -12.769762992858887 + ], + [ + "▁marvel", + -12.769777297973633 + ], + [ + "▁probable", + -12.769824028015137 + ], + [ + "dependable", + -12.770044326782227 + ], + [ + "▁crore", + -12.77015495300293 + ], + [ + "▁6:30", + -12.770224571228027 + ], + [ + "▁Bradley", + -12.77032470703125 + ], + [ + "molecule", + -12.770400047302246 + ], + [ + "inclusiv", + -12.770516395568848 + ], + [ + "▁privilégi", + -12.770543098449707 + ], + [ + "▁cerere", + -12.770611763000488 + ], + [ + "ouille", + -12.770696640014648 + ], + [ + "▁âgé", + -12.770787239074707 + ], + [ + "▁ghid", + -12.770801544189453 + ], + [ + "▁Controller", + -12.77082347869873 + ], + [ + "▁incredere", + -12.770988464355469 + ], + [ + "▁hostel", + -12.771015167236328 + ], + [ + "wissenschaft", + -12.771121978759766 + ], + [ + "▁cooperate", + -12.771183967590332 + ], + [ + "ки", + -12.771202087402344 + ], + [ + "▁Küchen", + -12.771384239196777 + ], + [ + "▁BIO", + -12.771406173706055 + ], + [ + "▁deliveries", + -12.771458625793457 + ], + [ + "▁urmări", + -12.771553993225098 + ], + [ + "▁überzeugen", + -12.771631240844727 + ], + [ + "Roofing", + -12.771703720092773 + ], + [ + "▁Adel", + -12.771737098693848 + ], + [ + "▁navy", + -12.77181339263916 + ], + [ + "▁cider", + -12.772101402282715 + ], + [ + "▁dulce", + -12.772109985351562 + ], + [ + "▁inspirat", + -12.772163391113281 + ], + [ + "allez", + -12.772164344787598 + ], + [ + "HH", + -12.77221965789795 + ], + [ + "▁Danish", + -12.7722749710083 + ], + [ + "CDC", + -12.7722806930542 + ], + [ + "▁Milch", + -12.772303581237793 + ], + [ + "▁Hockey", + -12.772346496582031 + ], + [ + "▁Smooth", + -12.772347450256348 + ], + [ + "▁FIFA", + -12.772361755371094 + ], + [ + "▁Devon", + -12.772364616394043 + ], + [ + "chung", + -12.772379875183105 + ], + [ + "▁villain", + -12.772420883178711 + ], + [ + "▁musée", + -12.772441864013672 + ], + [ + "tiennent", + -12.772557258605957 + ], + [ + "chou", + -12.772732734680176 + ], + [ + "kopf", + -12.772809982299805 + ], + [ + "printed", + -12.77281379699707 + ], + [ + "▁Depression", + -12.773076057434082 + ], + [ + "▁opioid", + -12.773082733154297 + ], + [ + "nomie", + -12.773098945617676 + ], + [ + "▁footwear", + -12.773211479187012 + ], + [ + "▁Cause", + -12.773260116577148 + ], + [ + "SEL", + -12.773515701293945 + ], + [ + "▁Roller", + -12.773523330688477 + ], + [ + "▁einzigartige", + -12.773589134216309 + ], + [ + "desea", + -12.773597717285156 + ], + [ + "▁nasty", + -12.773792266845703 + ], + [ + "formulated", + -12.773877143859863 + ], + [ + "breaker", + -12.773958206176758 + ], + [ + "▁goodies", + -12.773961067199707 + ], + [ + "▁sandy", + -12.774189949035645 + ], + [ + "method", + -12.77425479888916 + ], + [ + "▁Maple", + -12.774308204650879 + ], + [ + "gefragt", + -12.774435997009277 + ], + [ + "▁decreasing", + -12.774515151977539 + ], + [ + "ceşti", + -12.774555206298828 + ], + [ + "▁DUI", + -12.774563789367676 + ], + [ + "▁pierdere", + -12.774574279785156 + ], + [ + "▁brushes", + -12.77466869354248 + ], + [ + "▁Fully", + -12.774712562561035 + ], + [ + "filtered", + -12.774789810180664 + ], + [ + "ruins", + -12.774988174438477 + ], + [ + "Save", + -12.775114059448242 + ], + [ + "sweeping", + -12.7752046585083 + ], + [ + "PCR", + -12.775334358215332 + ], + [ + "▁folded", + -12.775337219238281 + ], + [ + "▁urca", + -12.775444030761719 + ], + [ + "▁clic", + -12.775484085083008 + ], + [ + "▁spécialiste", + -12.775614738464355 + ], + [ + "▁durfte", + -12.775686264038086 + ], + [ + "tuși", + -12.775871276855469 + ], + [ + "▁diligent", + -12.77596378326416 + ], + [ + "▁verdict", + -12.775972366333008 + ], + [ + "▁chaise", + -12.776039123535156 + ], + [ + "▁cleanup", + -12.776068687438965 + ], + [ + "▁Guitar", + -12.776076316833496 + ], + [ + "▁Dip", + -12.776142120361328 + ], + [ + "vru", + -12.776260375976562 + ], + [ + "▁cogn", + -12.776373863220215 + ], + [ + "something", + -12.776529312133789 + ], + [ + "hidr", + -12.776535034179688 + ], + [ + "ENG", + -12.776607513427734 + ], + [ + "Paul", + -12.776679039001465 + ], + [ + "▁reboot", + -12.776687622070312 + ], + [ + "savvy", + -12.776688575744629 + ], + [ + "▁Macron", + -12.776710510253906 + ], + [ + "▁Kino", + -12.77682876586914 + ], + [ + "232", + -12.776832580566406 + ], + [ + "▁gravit", + -12.776861190795898 + ], + [ + "ANC", + -12.776883125305176 + ], + [ + "▁petrecut", + -12.776944160461426 + ], + [ + "▁signage", + -12.776959419250488 + ], + [ + "odia", + -12.776987075805664 + ], + [ + "▁GRA", + -12.77712631225586 + ], + [ + "▁alegeril", + -12.777129173278809 + ], + [ + "leger", + -12.77717399597168 + ], + [ + "▁medicamente", + -12.777174949645996 + ], + [ + "pentru", + -12.777249336242676 + ], + [ + "▁collectif", + -12.777251243591309 + ], + [ + "▁Sohn", + -12.777298927307129 + ], + [ + "205", + -12.777313232421875 + ], + [ + "▁Reach", + -12.77733039855957 + ], + [ + "RAM", + -12.777400970458984 + ], + [ + "3.4", + -12.777405738830566 + ], + [ + "▁bleach", + -12.777409553527832 + ], + [ + "▁diligence", + -12.777414321899414 + ], + [ + "▁MORE", + -12.777440071105957 + ], + [ + "▁Critical", + -12.777471542358398 + ], + [ + "▁singură", + -12.77767276763916 + ], + [ + "▁adversar", + -12.777791023254395 + ], + [ + "▁Buzz", + -12.7778902053833 + ], + [ + "▁demeure", + -12.778063774108887 + ], + [ + "▁nephew", + -12.778141021728516 + ], + [ + "▁Boom", + -12.77817440032959 + ], + [ + "▁shining", + -12.77819538116455 + ], + [ + "▁sponge", + -12.778206825256348 + ], + [ + "liest", + -12.77841854095459 + ], + [ + "rseits", + -12.778690338134766 + ], + [ + "▁capita", + -12.778823852539062 + ], + [ + "esthesia", + -12.778867721557617 + ], + [ + "500,000", + -12.77895736694336 + ], + [ + "▁Pressure", + -12.77898120880127 + ], + [ + "ifikation", + -12.779021263122559 + ], + [ + "▁acceleration", + -12.779181480407715 + ], + [ + "▁Pfarr", + -12.779282569885254 + ], + [ + "▁imobil", + -12.779304504394531 + ], + [ + "▁pericol", + -12.779326438903809 + ], + [ + "▁flock", + -12.779454231262207 + ], + [ + "▁Scholar", + -12.77962875366211 + ], + [ + "▁Fusion", + -12.779630661010742 + ], + [ + "▁revolve", + -12.779637336730957 + ], + [ + "Plugin", + -12.779664993286133 + ], + [ + "▁Ruf", + -12.779691696166992 + ], + [ + "▁tehnici", + -12.780024528503418 + ], + [ + "voice", + -12.78005313873291 + ], + [ + "▁anomal", + -12.780203819274902 + ], + [ + "▁gefallen", + -12.780252456665039 + ], + [ + "▁Wyoming", + -12.780322074890137 + ], + [ + "▁9:00", + -12.780354499816895 + ], + [ + "packed", + -12.780461311340332 + ], + [ + "▁Zimbabwe", + -12.780686378479004 + ], + [ + "▁glücklich", + -12.780766487121582 + ], + [ + "ethanol", + -12.78077220916748 + ], + [ + "▁effektiv", + -12.780936241149902 + ], + [ + "▁saptamani", + -12.781049728393555 + ], + [ + "▁umfasst", + -12.781052589416504 + ], + [ + "▁Werbung", + -12.781103134155273 + ], + [ + "▁undermine", + -12.781164169311523 + ], + [ + "▁Lego", + -12.781322479248047 + ], + [ + "▁Rac", + -12.781323432922363 + ], + [ + "educating", + -12.781441688537598 + ], + [ + "leiten", + -12.781451225280762 + ], + [ + "derma", + -12.781518936157227 + ], + [ + "hängen", + -12.781597137451172 + ], + [ + "Lumin", + -12.781846046447754 + ], + [ + "▁PNL", + -12.781913757324219 + ], + [ + "▁volcano", + -12.782064437866211 + ], + [ + "▁Anfrage", + -12.782066345214844 + ], + [ + "▁resp", + -12.782124519348145 + ], + [ + "leigh", + -12.78217601776123 + ], + [ + "▁addict", + -12.782176971435547 + ], + [ + "WORK", + -12.782312393188477 + ], + [ + "▁FY", + -12.782322883605957 + ], + [ + "▁maneuver", + -12.782513618469238 + ], + [ + "flächen", + -12.782525062561035 + ], + [ + "zweck", + -12.782527923583984 + ], + [ + "tolerant", + -12.782609939575195 + ], + [ + "Davidson", + -12.78272533416748 + ], + [ + "▁meteor", + -12.782849311828613 + ], + [ + "▁Stephanie", + -12.78291130065918 + ], + [ + "▁plafon", + -12.783126831054688 + ], + [ + "technischen", + -12.78316879272461 + ], + [ + "unused", + -12.783193588256836 + ], + [ + "▁voulai", + -12.783228874206543 + ], + [ + "▁fehlt", + -12.783447265625 + ], + [ + "möglichen", + -12.783955574035645 + ], + [ + "▁Twenty", + -12.783968925476074 + ], + [ + "composing", + -12.783979415893555 + ], + [ + "▁rebate", + -12.78400707244873 + ], + [ + "Italie", + -12.784036636352539 + ], + [ + "▁goodbye", + -12.784058570861816 + ], + [ + "wild", + -12.784061431884766 + ], + [ + "▁lancé", + -12.784077644348145 + ], + [ + "▁wunderschöne", + -12.784083366394043 + ], + [ + "▁Frontier", + -12.784139633178711 + ], + [ + "▁murit", + -12.784313201904297 + ], + [ + "▁scump", + -12.78464412689209 + ], + [ + "OVER", + -12.784682273864746 + ], + [ + "▁meme", + -12.784709930419922 + ], + [ + "Super", + -12.784733772277832 + ], + [ + "▁Crack", + -12.784849166870117 + ], + [ + "rennen", + -12.784907341003418 + ], + [ + "▁interessiert", + -12.784941673278809 + ], + [ + "▁relaţi", + -12.784942626953125 + ], + [ + "▁factories", + -12.784975051879883 + ], + [ + "▁[...]", + -12.785066604614258 + ], + [ + "▁vizite", + -12.785075187683105 + ], + [ + "▁erfolgen", + -12.785199165344238 + ], + [ + "▁Hosting", + -12.785244941711426 + ], + [ + "▁localitate", + -12.78528118133545 + ], + [ + "▁chasse", + -12.785415649414062 + ], + [ + "▁Meadow", + -12.785465240478516 + ], + [ + "▁expansive", + -12.785513877868652 + ], + [ + "hov", + -12.785874366760254 + ], + [ + "Phil", + -12.785978317260742 + ], + [ + "illian", + -12.786107063293457 + ], + [ + "▁manipulate", + -12.786107063293457 + ], + [ + "informationen", + -12.786130905151367 + ], + [ + "▁profesionist", + -12.786162376403809 + ], + [ + "risen", + -12.786252975463867 + ], + [ + "frem", + -12.786300659179688 + ], + [ + "Act", + -12.78640079498291 + ], + [ + "supervised", + -12.786491394042969 + ], + [ + "▁capul", + -12.786506652832031 + ], + [ + "▁Craiova", + -12.786528587341309 + ], + [ + "▁victoire", + -12.786528587341309 + ], + [ + "▁guitarist", + -12.786680221557617 + ], + [ + "▁identific", + -12.786684036254883 + ], + [ + "democrat", + -12.786864280700684 + ], + [ + "Authentic", + -12.786894798278809 + ], + [ + "▁Autumn", + -12.786894798278809 + ], + [ + "▁bodi", + -12.787014961242676 + ], + [ + "April", + -12.787044525146484 + ], + [ + "▁Burger", + -12.787049293518066 + ], + [ + "▁BEST", + -12.787490844726562 + ], + [ + "▁torrent", + -12.78749942779541 + ], + [ + "UV", + -12.787567138671875 + ], + [ + "▁renal", + -12.787676811218262 + ], + [ + "founded", + -12.787693977355957 + ], + [ + "203", + -12.787956237792969 + ], + [ + "▁Flooring", + -12.78799057006836 + ], + [ + "▁kilogram", + -12.787994384765625 + ], + [ + "▁garantiert", + -12.788139343261719 + ], + [ + "▁fulfil", + -12.788204193115234 + ], + [ + "303", + -12.788330078125 + ], + [ + "▁schafft", + -12.788363456726074 + ], + [ + "▁butterfly", + -12.788365364074707 + ], + [ + "▁Stuart", + -12.788382530212402 + ], + [ + "▁Versuch", + -12.788392066955566 + ], + [ + "▁liking", + -12.788412094116211 + ], + [ + "▁chercher", + -12.788508415222168 + ], + [ + "▁wrapping", + -12.788527488708496 + ], + [ + "schrieb", + -12.788652420043945 + ], + [ + "▁abuz", + -12.788718223571777 + ], + [ + "▁maîtrise", + -12.788772583007812 + ], + [ + "EQ", + -12.788887977600098 + ], + [ + "▁Erinnerung", + -12.789095878601074 + ], + [ + "▁bridal", + -12.78909969329834 + ], + [ + "Rock", + -12.789118766784668 + ], + [ + "▁copied", + -12.789193153381348 + ], + [ + "Met", + -12.789206504821777 + ], + [ + "▁incep", + -12.789233207702637 + ], + [ + "▁sinus", + -12.789336204528809 + ], + [ + "▁Felix", + -12.789831161499023 + ], + [ + "▁Deluxe", + -12.789837837219238 + ], + [ + "▁GPU", + -12.789848327636719 + ], + [ + "Sie", + -12.790164947509766 + ], + [ + "lowering", + -12.790262222290039 + ], + [ + "▁Trotz", + -12.790282249450684 + ], + [ + "333", + -12.790417671203613 + ], + [ + "withstand", + -12.79055118560791 + ], + [ + "▁Aufenthalt", + -12.790566444396973 + ], + [ + "▁unhealthy", + -12.790567398071289 + ], + [ + "▁urbain", + -12.790573120117188 + ], + [ + "▁LOL", + -12.790702819824219 + ], + [ + "▁Ballet", + -12.79074478149414 + ], + [ + "▁Decoration", + -12.79083251953125 + ], + [ + "weist", + -12.790839195251465 + ], + [ + "▁Residence", + -12.790932655334473 + ], + [ + "▁Leeds", + -12.791055679321289 + ], + [ + "▁Genau", + -12.791084289550781 + ], + [ + "Imagin", + -12.791136741638184 + ], + [ + "▁suspicion", + -12.791300773620605 + ], + [ + "▁pêche", + -12.791301727294922 + ], + [ + "▁Soccer", + -12.791306495666504 + ], + [ + "▁protectie", + -12.791553497314453 + ], + [ + "ATS", + -12.791796684265137 + ], + [ + "stocked", + -12.791838645935059 + ], + [ + "▁gymnas", + -12.79184627532959 + ], + [ + "ASP", + -12.792027473449707 + ], + [ + "▁Independence", + -12.792037010192871 + ], + [ + "▁Wizard", + -12.792037963867188 + ], + [ + "▁nitrogen", + -12.79204273223877 + ], + [ + "amerikanische", + -12.7920503616333 + ], + [ + "▁Indianapolis", + -12.79205322265625 + ], + [ + "catches", + -12.792131423950195 + ], + [ + "stria", + -12.792275428771973 + ], + [ + "schätze", + -12.79235553741455 + ], + [ + "▁Räume", + -12.792387962341309 + ], + [ + "▁Interesting", + -12.792403221130371 + ], + [ + "bürger", + -12.79240608215332 + ], + [ + "sweet", + -12.792410850524902 + ], + [ + "Identify", + -12.792632102966309 + ], + [ + "EEN", + -12.792651176452637 + ], + [ + "▁£3", + -12.792654991149902 + ], + [ + "interacting", + -12.7926664352417 + ], + [ + "NYSE", + -12.792762756347656 + ], + [ + "▁Dynamics", + -12.79277515411377 + ], + [ + "▁modificări", + -12.792777061462402 + ], + [ + "▁Kumar", + -12.792936325073242 + ], + [ + "chette", + -12.79313850402832 + ], + [ + "▁presiune", + -12.79316234588623 + ], + [ + "arni", + -12.793164253234863 + ], + [ + "▁vielfältig", + -12.793221473693848 + ], + [ + "KC", + -12.793259620666504 + ], + [ + "▁Cuisine", + -12.793513298034668 + ], + [ + "▁australia", + -12.793885231018066 + ], + [ + "▁încet", + -12.794026374816895 + ], + [ + "▁caracteristic", + -12.794257164001465 + ], + [ + "▁cookbook", + -12.794501304626465 + ], + [ + "▁douleur", + -12.79453182220459 + ], + [ + "AVI", + -12.794593811035156 + ], + [ + "artikel", + -12.794740676879883 + ], + [ + "feta", + -12.79493522644043 + ], + [ + "▁fréquent", + -12.794987678527832 + ], + [ + "▁Prophet", + -12.795051574707031 + ], + [ + "▁dépense", + -12.795202255249023 + ], + [ + "▁Smile", + -12.795235633850098 + ], + [ + "▁lawmakers", + -12.79525375366211 + ], + [ + "▁Kollegen", + -12.795391082763672 + ], + [ + "▁Pir", + -12.79555606842041 + ], + [ + "serez", + -12.79561710357666 + ], + [ + "▁consumator", + -12.795656204223633 + ], + [ + "▁playlist", + -12.795730590820312 + ], + [ + "▁envisage", + -12.795733451843262 + ], + [ + "swept", + -12.795780181884766 + ], + [ + "▁Grim", + -12.795825004577637 + ], + [ + "▁widow", + -12.795836448669434 + ], + [ + "authorised", + -12.795886039733887 + ], + [ + "▁(...)", + -12.796035766601562 + ], + [ + "▁photographic", + -12.796060562133789 + ], + [ + "▁libertate", + -12.796173095703125 + ], + [ + "▁principalement", + -12.796201705932617 + ], + [ + "umming", + -12.796260833740234 + ], + [ + "▁Montréal", + -12.796465873718262 + ], + [ + "▁compilation", + -12.796468734741211 + ], + [ + "▁erlaubt", + -12.79647159576416 + ], + [ + "▁biblical", + -12.796518325805664 + ], + [ + "volume", + -12.796561241149902 + ], + [ + "5-7", + -12.796809196472168 + ], + [ + "▁Versch", + -12.79689884185791 + ], + [ + "▁Shark", + -12.796957015991211 + ], + [ + "ologne", + -12.796969413757324 + ], + [ + "4.4", + -12.797086715698242 + ], + [ + "decken", + -12.797112464904785 + ], + [ + "▁frequencies", + -12.797205924987793 + ], + [ + "▁inferior", + -12.79720687866211 + ], + [ + "visible", + -12.797321319580078 + ], + [ + "▁educator", + -12.797394752502441 + ], + [ + "▁soziale", + -12.797420501708984 + ], + [ + "▁billet", + -12.797523498535156 + ], + [ + "folosirea", + -12.797574996948242 + ], + [ + "▁aufgenommen", + -12.797590255737305 + ], + [ + "▁Thread", + -12.797649383544922 + ], + [ + "registering", + -12.797694206237793 + ], + [ + "▁Loop", + -12.797747611999512 + ], + [ + "innovation", + -12.79783821105957 + ], + [ + "▁elimination", + -12.797857284545898 + ], + [ + "136", + -12.797883987426758 + ], + [ + "▁fluctu", + -12.797892570495605 + ], + [ + "▁Mercury", + -12.79794692993164 + ], + [ + "▁bouche", + -12.797955513000488 + ], + [ + "▁hurdle", + -12.7979736328125 + ], + [ + "▁Bennett", + -12.798040390014648 + ], + [ + "STI", + -12.79818344116211 + ], + [ + "▁théâtre", + -12.798316955566406 + ], + [ + "▁confortable", + -12.798359870910645 + ], + [ + "▁Automobil", + -12.79838752746582 + ], + [ + "▁Donna", + -12.798399925231934 + ], + [ + "▁foyer", + -12.79841136932373 + ], + [ + "▁hollow", + -12.798465728759766 + ], + [ + "▁règlement", + -12.79861068725586 + ], + [ + "effi", + -12.798616409301758 + ], + [ + "▁sediment", + -12.79869270324707 + ], + [ + "▁Mä", + -12.798774719238281 + ], + [ + "▁faint", + -12.798833847045898 + ], + [ + "feti", + -12.79890251159668 + ], + [ + "▁Concord", + -12.798959732055664 + ], + [ + "▁Ladies", + -12.798990249633789 + ], + [ + "▁pregatit", + -12.799052238464355 + ], + [ + "▁Ensemble", + -12.79905891418457 + ], + [ + "▁Ingredient", + -12.79905891418457 + ], + [ + "▁Respond", + -12.79914379119873 + ], + [ + "▁impaired", + -12.799356460571289 + ], + [ + "▁Feedback", + -12.799430847167969 + ], + [ + "▁ultrasound", + -12.799461364746094 + ], + [ + "▁Guvernului", + -12.799617767333984 + ], + [ + "▁Unterricht", + -12.799654006958008 + ], + [ + "▁prosecut", + -12.799662590026855 + ], + [ + "spend", + -12.799732208251953 + ], + [ + "▁capitol", + -12.799800872802734 + ], + [ + "USD", + -12.799822807312012 + ], + [ + "observing", + -12.799947738647461 + ], + [ + "▁effortlessly", + -12.800045013427734 + ], + [ + "▁Setting", + -12.80010986328125 + ], + [ + "▁spontaneous", + -12.80020809173584 + ], + [ + "▁LEGO", + -12.800238609313965 + ], + [ + "initiative", + -12.800299644470215 + ], + [ + "▁Sak", + -12.800299644470215 + ], + [ + "Interestingly", + -12.800326347351074 + ], + [ + "▁Yale", + -12.800352096557617 + ], + [ + "▁größer", + -12.80038070678711 + ], + [ + "RIC", + -12.800406455993652 + ], + [ + "▁distracted", + -12.800436973571777 + ], + [ + "drafted", + -12.800484657287598 + ], + [ + "▁Brenda", + -12.800522804260254 + ], + [ + "monopol", + -12.800551414489746 + ], + [ + "städt", + -12.800580024719238 + ], + [ + "▁altar", + -12.80058765411377 + ], + [ + "▁Hannover", + -12.800596237182617 + ], + [ + "▁Spiritual", + -12.800702095031738 + ], + [ + "▁thriller", + -12.800747871398926 + ], + [ + "▁Schneider", + -12.800760269165039 + ], + [ + "▁accumulate", + -12.800817489624023 + ], + [ + "▁mediului", + -12.800822257995605 + ], + [ + "▁Mathematics", + -12.800914764404297 + ], + [ + "▁paradox", + -12.800986289978027 + ], + [ + "▁Sham", + -12.801230430603027 + ], + [ + "▁SITE", + -12.801375389099121 + ], + [ + "▁echipei", + -12.801508903503418 + ], + [ + "▁staircase", + -12.801660537719727 + ], + [ + "▁întrebări", + -12.801705360412598 + ], + [ + "Commerce", + -12.802020072937012 + ], + [ + "▁selfie", + -12.802353858947754 + ], + [ + "▁Pocket", + -12.802404403686523 + ], + [ + "▁niemand", + -12.80263614654541 + ], + [ + "Tool", + -12.802678108215332 + ], + [ + "igma", + -12.802695274353027 + ], + [ + "utilisant", + -12.802915573120117 + ], + [ + "▁negatively", + -12.80295181274414 + ], + [ + "Secondly", + -12.802955627441406 + ], + [ + "▁ROI", + -12.8030366897583 + ], + [ + "Arch", + -12.803121566772461 + ], + [ + "▁continuity", + -12.80318546295166 + ], + [ + "▁Prayer", + -12.803235054016113 + ], + [ + "inverse", + -12.803241729736328 + ], + [ + "▁Himmel", + -12.803336143493652 + ], + [ + "prinz", + -12.803478240966797 + ], + [ + "wichtigen", + -12.803496360778809 + ], + [ + "étage", + -12.803522109985352 + ], + [ + "summe", + -12.8036527633667 + ], + [ + "▁Zeitung", + -12.80366039276123 + ], + [ + "▁realization", + -12.803897857666016 + ], + [ + "▁influent", + -12.804291725158691 + ], + [ + "▁Valid", + -12.804357528686523 + ], + [ + "▁publicity", + -12.804439544677734 + ], + [ + "▁vertreten", + -12.804447174072266 + ], + [ + "▁Shoes", + -12.804609298706055 + ], + [ + "▁Diabetes", + -12.80463695526123 + ], + [ + "▁anticipation", + -12.804670333862305 + ], + [ + "▁Blank", + -12.8047456741333 + ], + [ + "asked", + -12.804899215698242 + ], + [ + "Power", + -12.804938316345215 + ], + [ + "arrelage", + -12.805140495300293 + ], + [ + "▁appraisal", + -12.80538272857666 + ], + [ + "▁harassment", + -12.805542945861816 + ], + [ + "Anzeige", + -12.805682182312012 + ], + [ + "liners", + -12.80584716796875 + ], + [ + "Firstly", + -12.805851936340332 + ], + [ + "transferring", + -12.805951118469238 + ], + [ + "▁Diane", + -12.806012153625488 + ], + [ + "▁1/2\"", + -12.80606746673584 + ], + [ + "▁adrenal", + -12.806131362915039 + ], + [ + "▁Prague", + -12.806208610534668 + ], + [ + "insertion", + -12.80635929107666 + ], + [ + "▁Fahrer", + -12.806465148925781 + ], + [ + "▁divin", + -12.806585311889648 + ], + [ + "▁douche", + -12.80673885345459 + ], + [ + "▁meticulous", + -12.806879043579102 + ], + [ + "▁IEEE", + -12.806981086730957 + ], + [ + "▁Rabatt", + -12.807259559631348 + ], + [ + "Runner", + -12.807342529296875 + ], + [ + "▁Leder", + -12.807429313659668 + ], + [ + "project", + -12.80745792388916 + ], + [ + "▁Split", + -12.807562828063965 + ], + [ + "Gold", + -12.807600021362305 + ], + [ + "5.00", + -12.807629585266113 + ], + [ + "iola", + -12.807655334472656 + ], + [ + "standardized", + -12.807890892028809 + ], + [ + "ordination", + -12.807984352111816 + ], + [ + "▁Egal", + -12.808158874511719 + ], + [ + "▁ruhig", + -12.808241844177246 + ], + [ + "▁judiciar", + -12.80837345123291 + ], + [ + "▁Nowadays", + -12.808374404907227 + ], + [ + "▁whistle", + -12.808374404907227 + ], + [ + "▁superhero", + -12.808379173278809 + ], + [ + "▁PowerPoint", + -12.808408737182617 + ], + [ + "flop", + -12.808420181274414 + ], + [ + "olph", + -12.808460235595703 + ], + [ + "▁pallet", + -12.808916091918945 + ], + [ + "posons", + -12.809005737304688 + ], + [ + "▁Listing", + -12.809032440185547 + ], + [ + "Tag", + -12.809075355529785 + ], + [ + "introductory", + -12.809122085571289 + ], + [ + "▁Profil", + -12.809123992919922 + ], + [ + "symmetric", + -12.809126853942871 + ], + [ + "▁aisle", + -12.809138298034668 + ], + [ + "▁ajouté", + -12.809147834777832 + ], + [ + "opathy", + -12.809149742126465 + ], + [ + "prezentate", + -12.809155464172363 + ], + [ + "▁hurry", + -12.809165000915527 + ], + [ + "Auth", + -12.809310913085938 + ], + [ + "▁Homepage", + -12.809435844421387 + ], + [ + "ashes", + -12.809489250183105 + ], + [ + "▁inklusive", + -12.809496879577637 + ], + [ + "populated", + -12.809502601623535 + ], + [ + "▁nein", + -12.809554100036621 + ], + [ + "▁syndicat", + -12.809690475463867 + ], + [ + "▁développé", + -12.809842109680176 + ], + [ + "▁Domestic", + -12.809877395629883 + ], + [ + "essay", + -12.809967994689941 + ], + [ + "Atelier", + -12.809980392456055 + ], + [ + "▁proceeding", + -12.810006141662598 + ], + [ + "▁SAS", + -12.810038566589355 + ], + [ + "task", + -12.810063362121582 + ], + [ + "▁blackjack", + -12.810114860534668 + ], + [ + "Key", + -12.810186386108398 + ], + [ + "thérapie", + -12.810247421264648 + ], + [ + "▁Cohen", + -12.810397148132324 + ], + [ + "Direct", + -12.810510635375977 + ], + [ + "▁Estimat", + -12.810517311096191 + ], + [ + "élève", + -12.810616493225098 + ], + [ + "cind", + -12.810640335083008 + ], + [ + "▁prezenț", + -12.810701370239258 + ], + [ + "▁notorious", + -12.810725212097168 + ], + [ + "climbed", + -12.810816764831543 + ], + [ + "▁flexibil", + -12.810830116271973 + ], + [ + "▁entlang", + -12.810855865478516 + ], + [ + "longed", + -12.81103515625 + ], + [ + "▁elbow", + -12.811078071594238 + ], + [ + "BH", + -12.811296463012695 + ], + [ + "▁Radu", + -12.811376571655273 + ], + [ + "▁lonely", + -12.811378479003906 + ], + [ + "ALA", + -12.811405181884766 + ], + [ + "Variante", + -12.811639785766602 + ], + [ + "▁Influen", + -12.81169319152832 + ], + [ + "▁Budapest", + -12.811747550964355 + ], + [ + "▁Gemüse", + -12.811747550964355 + ], + [ + "▁continental", + -12.811750411987305 + ], + [ + "ippo", + -12.811771392822266 + ], + [ + "▁Affordable", + -12.81212329864502 + ], + [ + "▁niece", + -12.812187194824219 + ], + [ + "oscopic", + -12.812190055847168 + ], + [ + "▁Grid", + -12.81222152709961 + ], + [ + "sliced", + -12.812270164489746 + ], + [ + "▁voici", + -12.812294006347656 + ], + [ + "aveam", + -12.812471389770508 + ], + [ + "▁Lars", + -12.812612533569336 + ], + [ + "APA", + -12.812657356262207 + ], + [ + "▁particulière", + -12.812858581542969 + ], + [ + "sorb", + -12.8128662109375 + ], + [ + "▁1955", + -12.812887191772461 + ], + [ + "▁solutii", + -12.812942504882812 + ], + [ + "loch", + -12.812960624694824 + ], + [ + "▁summon", + -12.813212394714355 + ], + [ + "wurf", + -12.813271522521973 + ], + [ + "▁protecți", + -12.813288688659668 + ], + [ + "2001", + -12.813499450683594 + ], + [ + "▁sophomore", + -12.813627243041992 + ], + [ + "▁Schwerpunkt", + -12.813628196716309 + ], + [ + "▁diplomat", + -12.813687324523926 + ], + [ + "▁artistique", + -12.813726425170898 + ], + [ + "▁accueille", + -12.813739776611328 + ], + [ + "Disp", + -12.813746452331543 + ], + [ + "inherited", + -12.813764572143555 + ], + [ + "▁COMP", + -12.813889503479004 + ], + [ + "▁envoyé", + -12.814046859741211 + ], + [ + "▁tuning", + -12.814056396484375 + ], + [ + "▁entspricht", + -12.814062118530273 + ], + [ + "▁exerc", + -12.81406307220459 + ], + [ + "▁accessoires", + -12.8140869140625 + ], + [ + "▁Automat", + -12.814348220825195 + ], + [ + "importance", + -12.814408302307129 + ], + [ + "▁travellers", + -12.814432144165039 + ], + [ + "seiten", + -12.814474105834961 + ], + [ + "▁slider", + -12.814481735229492 + ], + [ + "effect", + -12.814591407775879 + ], + [ + "▁siding", + -12.814669609069824 + ], + [ + "▁Crit", + -12.814780235290527 + ], + [ + "▁sportif", + -12.814827919006348 + ], + [ + "▁Accessories", + -12.81513500213623 + ], + [ + "▁Anteil", + -12.815184593200684 + ], + [ + "▁limbi", + -12.81519603729248 + ], + [ + "▁vendre", + -12.815269470214844 + ], + [ + "borg", + -12.815435409545898 + ], + [ + "▁Deposit", + -12.815508842468262 + ], + [ + "▁Hö", + -12.815717697143555 + ], + [ + "employé", + -12.8157320022583 + ], + [ + "▁Bangalore", + -12.815887451171875 + ], + [ + "▁itinerary", + -12.815888404846191 + ], + [ + "▁Deliver", + -12.816008567810059 + ], + [ + "dik", + -12.816024780273438 + ], + [ + "▁advent", + -12.816100120544434 + ], + [ + "▁Turk", + -12.81614875793457 + ], + [ + "▁Nico", + -12.816154479980469 + ], + [ + "organizarea", + -12.816161155700684 + ], + [ + "▁remport", + -12.816166877746582 + ], + [ + "▁tribunal", + -12.816266059875488 + ], + [ + "▁Rusia", + -12.8162841796875 + ], + [ + "glazed", + -12.816339492797852 + ], + [ + "▁destiné", + -12.816502571105957 + ], + [ + "304", + -12.816533088684082 + ], + [ + "album", + -12.816650390625 + ], + [ + "▁junction", + -12.81665325164795 + ], + [ + "▁Fleet", + -12.816664695739746 + ], + [ + "venant", + -12.81667423248291 + ], + [ + "▁buddy", + -12.816694259643555 + ], + [ + "▁neglected", + -12.816694259643555 + ], + [ + "▁Mask", + -12.816783905029297 + ], + [ + "▁testament", + -12.816844940185547 + ], + [ + "▁Basil", + -12.81690788269043 + ], + [ + "masă", + -12.816922187805176 + ], + [ + "▁racist", + -12.81692886352539 + ], + [ + "640", + -12.816990852355957 + ], + [ + "▁Standing", + -12.817028045654297 + ], + [ + "▁MUST", + -12.817266464233398 + ], + [ + "situation", + -12.817327499389648 + ], + [ + "▁informiert", + -12.817337036132812 + ], + [ + "ABA", + -12.817353248596191 + ], + [ + "▁Timothy", + -12.817397117614746 + ], + [ + "▁generosity", + -12.817397117614746 + ], + [ + "▁erscheint", + -12.817402839660645 + ], + [ + "▁verarbeitet", + -12.81740665435791 + ], + [ + "▁burial", + -12.817444801330566 + ], + [ + "▁limestone", + -12.817458152770996 + ], + [ + "▁1953", + -12.817480087280273 + ], + [ + "▁Lucr", + -12.817506790161133 + ], + [ + "small", + -12.817633628845215 + ], + [ + "aveau", + -12.81763744354248 + ], + [ + "versiune", + -12.81773567199707 + ], + [ + "▁inkl", + -12.81775951385498 + ], + [ + "▁Minneapolis", + -12.81777572631836 + ], + [ + "Spiel", + -12.81781005859375 + ], + [ + "▁encode", + -12.817895889282227 + ], + [ + "▁beforehand", + -12.818021774291992 + ], + [ + "▁Vital", + -12.818086624145508 + ], + [ + "▁socialist", + -12.818228721618652 + ], + [ + "inho", + -12.81824779510498 + ], + [ + "▁chapel", + -12.81825065612793 + ], + [ + "▁Monitoring", + -12.81838607788086 + ], + [ + "▁quotidienne", + -12.818404197692871 + ], + [ + "cloud", + -12.818506240844727 + ], + [ + "▁desfăşur", + -12.818531036376953 + ], + [ + "▁1952", + -12.818638801574707 + ], + [ + "▁Rü", + -12.818690299987793 + ], + [ + "▁Sigma", + -12.818804740905762 + ], + [ + "134", + -12.818835258483887 + ], + [ + "Sullivan", + -12.818909645080566 + ], + [ + "▁Bevölkerung", + -12.818909645080566 + ], + [ + "▁sufficiently", + -12.818953514099121 + ], + [ + "Check", + -12.818992614746094 + ], + [ + "rnie", + -12.8190336227417 + ], + [ + "contamin", + -12.819132804870605 + ], + [ + "▁gewonnen", + -12.81928825378418 + ], + [ + "▁bugetul", + -12.819376945495605 + ], + [ + "▁mustard", + -12.819414138793945 + ], + [ + "132", + -12.819478988647461 + ], + [ + "0.9", + -12.819535255432129 + ], + [ + "▁tratat", + -12.81957721710205 + ], + [ + "▁dilemma", + -12.819666862487793 + ], + [ + "▁versatility", + -12.819666862487793 + ], + [ + "▁clutter", + -12.819670677185059 + ], + [ + "▁Musk", + -12.81973934173584 + ], + [ + "▁Beide", + -12.819750785827637 + ], + [ + "hurst", + -12.819758415222168 + ], + [ + "atsu", + -12.819767951965332 + ], + [ + "absence", + -12.819784164428711 + ], + [ + "rebounds", + -12.819881439208984 + ], + [ + "6.1", + -12.820029258728027 + ], + [ + "Dia", + -12.820046424865723 + ], + [ + "▁siguranță", + -12.820060729980469 + ], + [ + "▁Blade", + -12.820072174072266 + ], + [ + "▁disrupt", + -12.820074081420898 + ], + [ + "▁visiteurs", + -12.820169448852539 + ], + [ + "tested", + -12.820282936096191 + ], + [ + "▁Lup", + -12.820353507995605 + ], + [ + "▁Rouge", + -12.820371627807617 + ], + [ + "▁asbestos", + -12.82042407989502 + ], + [ + "▁moisturize", + -12.820427894592285 + ], + [ + "▁acknowledg", + -12.82045841217041 + ], + [ + "▁procent", + -12.820467948913574 + ], + [ + "▁swear", + -12.82050895690918 + ], + [ + "▁911", + -12.820647239685059 + ], + [ + "präsent", + -12.820724487304688 + ], + [ + "▁cohort", + -12.82072639465332 + ], + [ + "▁intimid", + -12.820830345153809 + ], + [ + "JS", + -12.820849418640137 + ], + [ + "îm", + -12.82096004486084 + ], + [ + "▁Kunststoff", + -12.820963859558105 + ], + [ + "rison", + -12.820972442626953 + ], + [ + "▁praf", + -12.82097339630127 + ], + [ + "▁convient", + -12.821019172668457 + ], + [ + "▁partenaire", + -12.821088790893555 + ], + [ + "▁Verantwortlich", + -12.821182250976562 + ], + [ + "▁semiconductor", + -12.821182250976562 + ], + [ + "▁kürz", + -12.821187019348145 + ], + [ + "▁Bottom", + -12.821187973022461 + ], + [ + "▁tratamentul", + -12.82127571105957 + ], + [ + "Source", + -12.821331024169922 + ], + [ + "authored", + -12.82172679901123 + ], + [ + "robo", + -12.821867942810059 + ], + [ + "▁turf", + -12.82194709777832 + ], + [ + "▁liebe", + -12.821971893310547 + ], + [ + "▁Fotografi", + -12.821995735168457 + ], + [ + "Big", + -12.822064399719238 + ], + [ + "▁fireworks", + -12.822081565856934 + ], + [ + "▁presă", + -12.822135925292969 + ], + [ + "▁conceal", + -12.822269439697266 + ], + [ + "▁originated", + -12.82227897644043 + ], + [ + "▁biciclet", + -12.822319984436035 + ], + [ + "acești", + -12.822577476501465 + ], + [ + "▁mortar", + -12.822585105895996 + ], + [ + "▁Wunder", + -12.822626113891602 + ], + [ + "ionist", + -12.822696685791016 + ], + [ + "KM", + -12.822871208190918 + ], + [ + "▁Marion", + -12.822918891906738 + ], + [ + "produkte", + -12.822933197021484 + ], + [ + "▁Sprint", + -12.822999000549316 + ], + [ + "▁Nachde", + -12.8230619430542 + ], + [ + "▁verfüge", + -12.823100090026855 + ], + [ + "Marea", + -12.823177337646484 + ], + [ + "▁compressor", + -12.823253631591797 + ], + [ + "Arm", + -12.823290824890137 + ], + [ + "Auf", + -12.823311805725098 + ], + [ + "▁Polyester", + -12.823461532592773 + ], + [ + "▁Sheffield", + -12.823461532592773 + ], + [ + "illiard", + -12.823494911193848 + ], + [ + "▁misleading", + -12.82353401184082 + ], + [ + "multi", + -12.823749542236328 + ], + [ + "ripped", + -12.82381820678711 + ], + [ + "▁Cosmetic", + -12.82383918762207 + ], + [ + "▁Regal", + -12.823890686035156 + ], + [ + "▁authenticity", + -12.82414436340332 + ], + [ + "▁customizable", + -12.824219703674316 + ], + [ + "▁bathtub", + -12.824275016784668 + ], + [ + "▁Average", + -12.824292182922363 + ], + [ + "▁Muster", + -12.824522018432617 + ], + [ + "290", + -12.824529647827148 + ], + [ + "▁Ersatz", + -12.824570655822754 + ], + [ + "▁Might", + -12.824588775634766 + ], + [ + "published", + -12.82461929321289 + ], + [ + "▁Interpret", + -12.824640274047852 + ], + [ + "▁încep", + -12.82480239868164 + ], + [ + "▁proto", + -12.824851036071777 + ], + [ + "▁disque", + -12.824889183044434 + ], + [ + "▁Palestine", + -12.824980735778809 + ], + [ + "Over", + -12.824981689453125 + ], + [ + "▁verbessert", + -12.824983596801758 + ], + [ + "▁liefern", + -12.825017929077148 + ], + [ + "▁Handlung", + -12.825095176696777 + ], + [ + "▁Handels", + -12.825150489807129 + ], + [ + "▁eater", + -12.825201988220215 + ], + [ + "▁$40", + -12.825251579284668 + ], + [ + "illard", + -12.825334548950195 + ], + [ + "▁apariti", + -12.825413703918457 + ], + [ + "▁gag", + -12.825422286987305 + ], + [ + "▁chimic", + -12.825541496276855 + ], + [ + "▁Guru", + -12.825594902038574 + ], + [ + "▁Toilet", + -12.82571792602539 + ], + [ + "▁Tochter", + -12.825748443603516 + ], + [ + "▁Aurora", + -12.82579231262207 + ], + [ + "contro", + -12.825922966003418 + ], + [ + "▁GOP", + -12.825995445251465 + ], + [ + "Provence", + -12.826130867004395 + ], + [ + "▁Frieden", + -12.82614803314209 + ], + [ + "ăci", + -12.826216697692871 + ], + [ + "portée", + -12.826268196105957 + ], + [ + "▁upright", + -12.826300621032715 + ], + [ + "▁Physician", + -12.82650375366211 + ], + [ + "▁juridique", + -12.82650375366211 + ], + [ + "▁territorial", + -12.82650375366211 + ], + [ + "▁kindergarten", + -12.826505661010742 + ], + [ + "aéroport", + -12.826510429382324 + ], + [ + "▁whisper", + -12.826513290405273 + ], + [ + "▁capacities", + -12.826562881469727 + ], + [ + "dichte", + -12.826641082763672 + ], + [ + "▁Grenzen", + -12.826822280883789 + ], + [ + "▁Riv", + -12.82710075378418 + ], + [ + "épreuve", + -12.827266693115234 + ], + [ + "▁Scheme", + -12.827290534973145 + ], + [ + "mesures", + -12.827330589294434 + ], + [ + "▁Einfluss", + -12.827333450317383 + ], + [ + "appui", + -12.827713966369629 + ], + [ + "▁apuc", + -12.827827453613281 + ], + [ + "▁radiat", + -12.82794189453125 + ], + [ + "▁allergy", + -12.828035354614258 + ], + [ + "▁spear", + -12.828038215637207 + ], + [ + "▁Luxembourg", + -12.828086853027344 + ], + [ + "▁Registered", + -12.828115463256836 + ], + [ + "▁Shape", + -12.828198432922363 + ], + [ + "genie", + -12.828328132629395 + ], + [ + "nsonsten", + -12.828385353088379 + ], + [ + "▁Symposium", + -12.828412055969238 + ], + [ + "forderung", + -12.828474998474121 + ], + [ + "▁personalizat", + -12.82866096496582 + ], + [ + "▁ştiu", + -12.82875919342041 + ], + [ + "blatt", + -12.828804016113281 + ], + [ + "▁geometry", + -12.828807830810547 + ], + [ + "▁8:30", + -12.828831672668457 + ], + [ + "▁Fahrrad", + -12.828861236572266 + ], + [ + "After", + -12.828927040100098 + ], + [ + "▁ventilat", + -12.829072952270508 + ], + [ + "▁nylon", + -12.829190254211426 + ], + [ + "▁verkauft", + -12.829304695129395 + ], + [ + "öß", + -12.829345703125 + ], + [ + "▁Kath", + -12.829523086547852 + ], + [ + "▁Nuclear", + -12.829558372497559 + ], + [ + "▁Verizon", + -12.829560279846191 + ], + [ + "▁spokesperson", + -12.829560279846191 + ], + [ + "▁vietii", + -12.829560279846191 + ], + [ + "▁prescri", + -12.829629898071289 + ], + [ + "ру", + -12.829666137695312 + ], + [ + "6.2", + -12.829801559448242 + ], + [ + "▁spațiu", + -12.830018997192383 + ], + [ + "▁solvent", + -12.83006763458252 + ], + [ + ",000,000", + -12.830142974853516 + ], + [ + "reuen", + -12.830185890197754 + ], + [ + "plast", + -12.830245018005371 + ], + [ + "▁Activities", + -12.830334663391113 + ], + [ + "▁domni", + -12.83056926727295 + ], + [ + "▁trophy", + -12.830572128295898 + ], + [ + "▁saddle", + -12.830657958984375 + ], + [ + "▁renovat", + -12.830708503723145 + ], + [ + "▁bumper", + -12.830717086791992 + ], + [ + "▁penny", + -12.830741882324219 + ], + [ + "omato", + -12.830743789672852 + ], + [ + "AQ", + -12.83083438873291 + ], + [ + "kunst", + -12.830843925476074 + ], + [ + "hydrat", + -12.830860137939453 + ], + [ + "minder", + -12.830931663513184 + ], + [ + "trecerea", + -12.830949783325195 + ], + [ + "brush", + -12.831185340881348 + ], + [ + "TEC", + -12.83121395111084 + ], + [ + "Please", + -12.831253051757812 + ], + [ + "hydrated", + -12.831483840942383 + ], + [ + "ICAL", + -12.831636428833008 + ], + [ + "trauen", + -12.831639289855957 + ], + [ + "9,000", + -12.83175277709961 + ], + [ + "▁2030", + -12.831830024719238 + ], + [ + "▁Chennai", + -12.831854820251465 + ], + [ + "▁empirical", + -12.831854820251465 + ], + [ + "▁Subscribe", + -12.83206844329834 + ], + [ + "▁vorgestellt", + -12.832120895385742 + ], + [ + "▁Springfield", + -12.832159996032715 + ], + [ + "▁continuu", + -12.832311630249023 + ], + [ + "208", + -12.832351684570312 + ], + [ + "▁Bearing", + -12.83240795135498 + ], + [ + "2003", + -12.832572937011719 + ], + [ + "cheta", + -12.832608222961426 + ], + [ + "▁empathy", + -12.832623481750488 + ], + [ + "▁Alert", + -12.832817077636719 + ], + [ + "▁recreate", + -12.832879066467285 + ], + [ + "PJ", + -12.833159446716309 + ], + [ + "Name", + -12.83323860168457 + ], + [ + "▁Mouse", + -12.833405494689941 + ], + [ + "▁disturbing", + -12.833443641662598 + ], + [ + "▁leichter", + -12.83344841003418 + ], + [ + "▁cruel", + -12.833507537841797 + ], + [ + "▁detective", + -12.833531379699707 + ], + [ + "▁reimbursement", + -12.833626747131348 + ], + [ + "▁Gemeinschaft", + -12.833772659301758 + ], + [ + "▁adolescents", + -12.833772659301758 + ], + [ + "▁Reality", + -12.833954811096191 + ], + [ + "▁Stockholm", + -12.83415699005127 + ], + [ + "▁Gründen", + -12.834304809570312 + ], + [ + "▁Reflect", + -12.83432388305664 + ], + [ + "▁Palmer", + -12.834336280822754 + ], + [ + "▁treac", + -12.8343505859375 + ], + [ + "▁tentative", + -12.834497451782227 + ], + [ + "▁surrender", + -12.834677696228027 + ], + [ + "▁broadly", + -12.834734916687012 + ], + [ + "▁județ", + -12.834814071655273 + ], + [ + "▁Thu", + -12.834845542907715 + ], + [ + "wärts", + -12.834961891174316 + ], + [ + "▁crește", + -12.835074424743652 + ], + [ + "▁déplacement", + -12.835208892822266 + ], + [ + "blanc", + -12.835268020629883 + ], + [ + "▁£5", + -12.835308074951172 + ], + [ + "▁confidentiality", + -12.835320472717285 + ], + [ + "veraging", + -12.835444450378418 + ], + [ + "unité", + -12.835609436035156 + ], + [ + "clar", + -12.83564567565918 + ], + [ + "rigg", + -12.835693359375 + ], + [ + "honneur", + -12.835694313049316 + ], + [ + "▁adventurous", + -12.835694313049316 + ], + [ + "▁Nutzen", + -12.835758209228516 + ], + [ + "▁Kabel", + -12.835800170898438 + ], + [ + "empowering", + -12.836040496826172 + ], + [ + "verhalten", + -12.836042404174805 + ], + [ + "▁prevail", + -12.8361234664917 + ], + [ + "mashed", + -12.836138725280762 + ], + [ + "▁1947", + -12.83616828918457 + ], + [ + "function", + -12.836292266845703 + ], + [ + "niveaux", + -12.83633041381836 + ], + [ + "▁territories", + -12.836463928222656 + ], + [ + "▁Permanent", + -12.836465835571289 + ], + [ + "▁christmas", + -12.836471557617188 + ], + [ + "arguing", + -12.836490631103516 + ], + [ + "zukünftig", + -12.836654663085938 + ], + [ + "▁Eindruck", + -12.836817741394043 + ], + [ + "personalised", + -12.836854934692383 + ], + [ + "▁vecin", + -12.837211608886719 + ], + [ + "▁Affiliate", + -12.837234497070312 + ], + [ + "▁Silk", + -12.837249755859375 + ], + [ + "▁Tub", + -12.837440490722656 + ], + [ + "▁remont", + -12.837493896484375 + ], + [ + "▁sauber", + -12.837530136108398 + ], + [ + "gehörig", + -12.837562561035156 + ], + [ + "Maritime", + -12.83771800994873 + ], + [ + "▁Bö", + -12.837973594665527 + ], + [ + "▁1957", + -12.83800220489502 + ], + [ + "▁unparalleled", + -12.838005065917969 + ], + [ + "▁fulfillment", + -12.838042259216309 + ], + [ + "▁collage", + -12.838179588317871 + ], + [ + "fenders", + -12.838248252868652 + ], + [ + "▁neige", + -12.838275909423828 + ], + [ + "▁gamers", + -12.838325500488281 + ], + [ + "tefan", + -12.838339805603027 + ], + [ + "▁wifi", + -12.838349342346191 + ], + [ + "▁leisten", + -12.83835506439209 + ], + [ + "▁Verbesserung", + -12.838390350341797 + ], + [ + "▁composant", + -12.838400840759277 + ], + [ + "▁LORD", + -12.8384370803833 + ], + [ + "arrive", + -12.838472366333008 + ], + [ + "▁conquer", + -12.838562965393066 + ], + [ + "▁lentil", + -12.838767051696777 + ], + [ + "▁Sprech", + -12.838995933532715 + ], + [ + "▁substitution", + -12.839015007019043 + ], + [ + ".05.", + -12.839020729064941 + ], + [ + "FORM", + -12.839144706726074 + ], + [ + "cădere", + -12.839154243469238 + ], + [ + "▁canyon", + -12.839430809020996 + ], + [ + "▁capacitate", + -12.839442253112793 + ], + [ + "▁menace", + -12.839461326599121 + ], + [ + "▁Antique", + -12.839519500732422 + ], + [ + "▁dizaine", + -12.839550971984863 + ], + [ + "▁Saturn", + -12.839578628540039 + ], + [ + "▁gastro", + -12.83962631225586 + ], + [ + "▁Vand", + -12.839641571044922 + ], + [ + "▁africa", + -12.839682579040527 + ], + [ + "▁hackers", + -12.839702606201172 + ], + [ + "▁Bailey", + -12.839736938476562 + ], + [ + "ouette", + -12.839822769165039 + ], + [ + "hoch", + -12.839885711669922 + ], + [ + "étudiant", + -12.839973449707031 + ], + [ + "▁1600", + -12.840004920959473 + ], + [ + "utiliz", + -12.840167999267578 + ], + [ + "reinigung", + -12.840263366699219 + ], + [ + "▁mileage", + -12.84029483795166 + ], + [ + "▁consacré", + -12.840309143066406 + ], + [ + "▁Norfolk", + -12.840327262878418 + ], + [ + "stacked", + -12.840659141540527 + ], + [ + "anbieter", + -12.840731620788574 + ], + [ + "▁gewünschte", + -12.84073543548584 + ], + [ + "▁silicon", + -12.840761184692383 + ], + [ + "Ensuite", + -12.840794563293457 + ], + [ + "▁vendu", + -12.840850830078125 + ], + [ + "▁viteza", + -12.840851783752441 + ], + [ + "▁evaluare", + -12.840913772583008 + ], + [ + "▁contient", + -12.841036796569824 + ], + [ + "▁Viagra", + -12.841100692749023 + ], + [ + "▁circumstance", + -12.841283798217773 + ], + [ + "walker", + -12.841383934020996 + ], + [ + "▁Aluminium", + -12.84148120880127 + ], + [ + "ço", + -12.841556549072266 + ], + [ + "▁Kli", + -12.841643333435059 + ], + [ + "▁deliberately", + -12.841649055480957 + ], + [ + "▁gamble", + -12.841893196105957 + ], + [ + "▁nourri", + -12.841903686523438 + ], + [ + "▁sealing", + -12.84194278717041 + ], + [ + "▁Atmosphäre", + -12.842255592346191 + ], + [ + "▁erschien", + -12.842260360717773 + ], + [ + "▁brightness", + -12.842340469360352 + ], + [ + "autonomie", + -12.84251594543457 + ], + [ + "▁propel", + -12.842525482177734 + ], + [ + "▁Infrastructure", + -12.842642784118652 + ], + [ + "▁război", + -12.842642784118652 + ], + [ + "▁jelly", + -12.842684745788574 + ], + [ + "scalable", + -12.84280776977539 + ], + [ + "regal", + -12.84296703338623 + ], + [ + "▁sarcini", + -12.843031883239746 + ], + [ + "▁Dienstag", + -12.84304428100586 + ], + [ + "▁Receive", + -12.8430814743042 + ], + [ + "▁mango", + -12.843356132507324 + ], + [ + "▁compétition", + -12.84341812133789 + ], + [ + "▁Monument", + -12.843428611755371 + ], + [ + "▁mast", + -12.844159126281738 + ], + [ + "▁instructed", + -12.84425163269043 + ], + [ + "▁aventur", + -12.844277381896973 + ], + [ + "139", + -12.844298362731934 + ], + [ + "▁Parmi", + -12.84435749053955 + ], + [ + "confined", + -12.844416618347168 + ], + [ + "acious", + -12.844441413879395 + ], + [ + "▁simptome", + -12.844581604003906 + ], + [ + "▁Fischer", + -12.844897270202637 + ], + [ + "störung", + -12.844985008239746 + ], + [ + "▁bilateral", + -12.84504508972168 + ], + [ + "preşedintele", + -12.845274925231934 + ], + [ + "accueillir", + -12.845357894897461 + ], + [ + "▁Schmidt", + -12.845359802246094 + ], + [ + "litis", + -12.845373153686523 + ], + [ + "WL", + -12.8454008102417 + ], + [ + "▁Rise", + -12.845436096191406 + ], + [ + "▁streamline", + -12.845556259155273 + ], + [ + "sozialen", + -12.845585823059082 + ], + [ + "▁Emirates", + -12.845746040344238 + ], + [ + "▁encrypted", + -12.845746040344238 + ], + [ + "▁unfamiliar", + -12.845746040344238 + ], + [ + "established", + -12.84577751159668 + ], + [ + "▁Tätigkeit", + -12.845818519592285 + ], + [ + "▁unaware", + -12.845913887023926 + ], + [ + "2:00", + -12.8460054397583 + ], + [ + "macher", + -12.846013069152832 + ], + [ + "NSA", + -12.8461275100708 + ], + [ + "▁rutier", + -12.846177101135254 + ], + [ + "▁Trent", + -12.846212387084961 + ], + [ + "▁sickness", + -12.846277236938477 + ], + [ + "▁advert", + -12.846417427062988 + ], + [ + "▁Kranken", + -12.846426963806152 + ], + [ + "▁Sandra", + -12.846443176269531 + ], + [ + "▁Recreation", + -12.846449851989746 + ], + [ + "▁Evidence", + -12.846524238586426 + ], + [ + "▁Immigration", + -12.846524238586426 + ], + [ + "▁carriage", + -12.846524238586426 + ], + [ + "▁justified", + -12.84655475616455 + ], + [ + "▁veche", + -12.846579551696777 + ], + [ + "PGA", + -12.846604347229004 + ], + [ + "▁Carmen", + -12.846735000610352 + ], + [ + "▁Faites", + -12.846750259399414 + ], + [ + "▁erfüllt", + -12.84691333770752 + ], + [ + "▁voilà", + -12.846931457519531 + ], + [ + "▁împlin", + -12.846959114074707 + ], + [ + "deposited", + -12.84721565246582 + ], + [ + "▁decisiv", + -12.847241401672363 + ], + [ + "CSA", + -12.847249031066895 + ], + [ + "pathy", + -12.84726619720459 + ], + [ + "▁erweitert", + -12.847302436828613 + ], + [ + "▁liquor", + -12.847302436828613 + ], + [ + "▁resilient", + -12.847302436828613 + ], + [ + "▁walmart", + -12.847302436828613 + ], + [ + "▁fencing", + -12.847308158874512 + ], + [ + "▁dépasse", + -12.84731388092041 + ], + [ + "KT", + -12.847354888916016 + ], + [ + "▁fries", + -12.847368240356445 + ], + [ + "vadă", + -12.847421646118164 + ], + [ + "▁Spania", + -12.847478866577148 + ], + [ + "▁complètement", + -12.847725868225098 + ], + [ + "▁lucrari", + -12.84777545928955 + ], + [ + "▁Lieb", + -12.847908973693848 + ], + [ + "leistungen", + -12.847943305969238 + ], + [ + "198", + -12.847979545593262 + ], + [ + "▁Schnell", + -12.847997665405273 + ], + [ + "▁radius", + -12.84814453125 + ], + [ + "▁beneficiaries", + -12.848151206970215 + ], + [ + "▁northwest", + -12.848174095153809 + ], + [ + "▁#4", + -12.848223686218262 + ], + [ + "▁embryo", + -12.848492622375488 + ], + [ + "▁ditch", + -12.848791122436523 + ], + [ + "▁Seriously", + -12.848859786987305 + ], + [ + "oppel", + -12.848941802978516 + ], + [ + "▁stalk", + -12.849053382873535 + ], + [ + "écriture", + -12.849066734313965 + ], + [ + "512", + -12.84912109375 + ], + [ + "wiesen", + -12.849271774291992 + ], + [ + "▁Consum", + -12.849321365356445 + ], + [ + "▁lună", + -12.849405288696289 + ], + [ + "▁lantern", + -12.849441528320312 + ], + [ + "▁italian", + -12.849629402160645 + ], + [ + "▁achiziți", + -12.849639892578125 + ], + [ + "▁catalyst", + -12.849639892578125 + ], + [ + "▁Arbeitgeber", + -12.849662780761719 + ], + [ + "▁researched", + -12.8496675491333 + ], + [ + "▁drastically", + -12.849679946899414 + ], + [ + "versammlung", + -12.849735260009766 + ], + [ + "410", + -12.849800109863281 + ], + [ + "▁impus", + -12.850153923034668 + ], + [ + "▁interchange", + -12.850173950195312 + ], + [ + "▁pharmacie", + -12.850215911865234 + ], + [ + "Live", + -12.850354194641113 + ], + [ + "dents", + -12.850384712219238 + ], + [ + "▁charcoal", + -12.850419998168945 + ], + [ + "▁odihn", + -12.850420951843262 + ], + [ + "▁pistol", + -12.850444793701172 + ], + [ + "▁complaining", + -12.850576400756836 + ], + [ + "manager", + -12.850578308105469 + ], + [ + "themed", + -12.850578308105469 + ], + [ + "▁Chang", + -12.850650787353516 + ], + [ + "▁rookie", + -12.85070514678955 + ], + [ + "Great", + -12.850706100463867 + ], + [ + "▁smoker", + -12.850733757019043 + ], + [ + "▁Container", + -12.850812911987305 + ], + [ + "▁bancaire", + -12.850852966308594 + ], + [ + "▁Actual", + -12.850966453552246 + ], + [ + "füllen", + -12.850982666015625 + ], + [ + "forum", + -12.850985527038574 + ], + [ + "bleib", + -12.851073265075684 + ], + [ + "▁combi", + -12.851079940795898 + ], + [ + "smoked", + -12.851137161254883 + ], + [ + "difficultés", + -12.851161003112793 + ], + [ + "▁tactical", + -12.851240158081055 + ], + [ + "▁sichtbar", + -12.851483345031738 + ], + [ + "▁dreptate", + -12.851598739624023 + ], + [ + "ERT", + -12.85168743133545 + ], + [ + "▁Pond", + -12.85177993774414 + ], + [ + "▁Holly", + -12.851844787597656 + ], + [ + "erfolg", + -12.8518705368042 + ], + [ + "▁Nordic", + -12.851896286010742 + ], + [ + "évènement", + -12.851983070373535 + ], + [ + "embracing", + -12.851984024047852 + ], + [ + "▁Maximum", + -12.851984024047852 + ], + [ + "▁défend", + -12.85205078125 + ], + [ + "▁fruct", + -12.852056503295898 + ], + [ + "▁Conditioning", + -12.852099418640137 + ], + [ + "LG", + -12.852127075195312 + ], + [ + "exigence", + -12.852166175842285 + ], + [ + "amide", + -12.852187156677246 + ], + [ + "▁darunter", + -12.852208137512207 + ], + [ + "▁EVERY", + -12.852420806884766 + ], + [ + "▁comparat", + -12.85244083404541 + ], + [ + "boosting", + -12.852452278137207 + ], + [ + "▁Hawaiian", + -12.852553367614746 + ], + [ + "▁Geburt", + -12.852752685546875 + ], + [ + "deci", + -12.852782249450684 + ], + [ + "▁Apollo", + -12.852803230285645 + ], + [ + "▁schützen", + -12.852821350097656 + ], + [ + "tragere", + -12.852893829345703 + ], + [ + "Online", + -12.852904319763184 + ], + [ + "▁neural", + -12.852913856506348 + ], + [ + "▁lucrez", + -12.853188514709473 + ], + [ + "▁phenomenal", + -12.853253364562988 + ], + [ + "▁Height", + -12.853368759155273 + ], + [ + "coordinating", + -12.853548049926758 + ], + [ + "geschnitten", + -12.853631019592285 + ], + [ + "auront", + -12.853641510009766 + ], + [ + "▁administer", + -12.853644371032715 + ], + [ + "▁contend", + -12.853707313537598 + ], + [ + "▁crispy", + -12.853784561157227 + ], + [ + "chuck", + -12.854011535644531 + ], + [ + "▁Condition", + -12.8540678024292 + ], + [ + "gestaltung", + -12.854324340820312 + ], + [ + "▁Blvd", + -12.854331970214844 + ], + [ + "▁subjective", + -12.854470252990723 + ], + [ + "▁événements", + -12.854708671569824 + ], + [ + "▁Jenny", + -12.855131149291992 + ], + [ + "▁cumpăra", + -12.85519027709961 + ], + [ + "constructing", + -12.855262756347656 + ], + [ + "▁instructional", + -12.85539436340332 + ], + [ + "▁sterling", + -12.855446815490723 + ], + [ + "scrise", + -12.855470657348633 + ], + [ + "▁Boulevard", + -12.855551719665527 + ], + [ + "pipe", + -12.855620384216309 + ], + [ + "▁Pride", + -12.855748176574707 + ], + [ + "▁Kau", + -12.855751991271973 + ], + [ + "▁overhaul", + -12.855924606323242 + ], + [ + "▁Recruitment", + -12.855925559997559 + ], + [ + "▁thrilling", + -12.856218338012695 + ], + [ + "living", + -12.856302261352539 + ], + [ + "▁rămân", + -12.85645866394043 + ], + [ + "▁MOD", + -12.85661792755127 + ], + [ + "▁Newport", + -12.856675148010254 + ], + [ + "▁infectious", + -12.856688499450684 + ], + [ + "6-3", + -12.856860160827637 + ], + [ + "▁Apache", + -12.856976509094238 + ], + [ + "▁dependence", + -12.85698413848877 + ], + [ + "nutzung", + -12.857199668884277 + ], + [ + "praised", + -12.857211112976074 + ], + [ + "▁craving", + -12.857346534729004 + ], + [ + "▁cramp", + -12.857397079467773 + ], + [ + "▁mancare", + -12.857455253601074 + ], + [ + "▁entdeckt", + -12.857474327087402 + ], + [ + "▁Pioneer", + -12.857484817504883 + ], + [ + "▁Adelaide", + -12.857490539550781 + ], + [ + "2.0", + -12.857503890991211 + ], + [ + "168", + -12.857526779174805 + ], + [ + "▁Decorating", + -12.857611656188965 + ], + [ + "▁unpleasant", + -12.857854843139648 + ], + [ + "▁déclaration", + -12.857865333557129 + ], + [ + "▁Grafik", + -12.857908248901367 + ], + [ + "5-2", + -12.857937812805176 + ], + [ + "căci", + -12.857940673828125 + ], + [ + "▁invade", + -12.858171463012695 + ], + [ + "▁internaţional", + -12.858259201049805 + ], + [ + "▁fraudulent", + -12.858281135559082 + ], + [ + "▁crestere", + -12.858441352844238 + ], + [ + "ografic", + -12.858729362487793 + ], + [ + "plină", + -12.859140396118164 + ], + [ + "sunteti", + -12.859150886535645 + ], + [ + "/04", + -12.859176635742188 + ], + [ + "▁admis", + -12.85935115814209 + ], + [ + "▁mediation", + -12.859403610229492 + ], + [ + "ICC", + -12.859424591064453 + ], + [ + "roș", + -12.859660148620605 + ], + [ + "▁Aroma", + -12.8596773147583 + ], + [ + "1:00", + -12.859792709350586 + ], + [ + "gasesc", + -12.859822273254395 + ], + [ + "▁Defence", + -12.859850883483887 + ], + [ + "▁dictionary", + -12.859856605529785 + ], + [ + "▁Batterie", + -12.859865188598633 + ], + [ + "▁gesunde", + -12.85997486114502 + ], + [ + "146", + -12.860099792480469 + ], + [ + "▁mortal", + -12.860129356384277 + ], + [ + "▁Flughafen", + -12.860230445861816 + ], + [ + "hhh", + -12.860284805297852 + ], + [ + "▁novice", + -12.860342025756836 + ], + [ + "▁Develop", + -12.86043930053711 + ], + [ + "▁accidental", + -12.860516548156738 + ], + [ + "Muzeul", + -12.86054515838623 + ], + [ + "▁Jupiter", + -12.86062240600586 + ], + [ + "supposedly", + -12.860662460327148 + ], + [ + "energy", + -12.860758781433105 + ], + [ + "▁montrer", + -12.860764503479004 + ], + [ + "recalled", + -12.860795021057129 + ], + [ + "Press", + -12.860801696777344 + ], + [ + "▁postcard", + -12.86080265045166 + ], + [ + "target", + -12.86081600189209 + ], + [ + "▁vêtements", + -12.860881805419922 + ], + [ + "▁particle", + -12.860888481140137 + ], + [ + "professional", + -12.8608980178833 + ], + [ + "▁1949", + -12.860917091369629 + ], + [ + "yah", + -12.860980033874512 + ], + [ + "▁Spiegel", + -12.861017227172852 + ], + [ + "▁Jeffrey", + -12.861023902893066 + ], + [ + "fahrzeug", + -12.861027717590332 + ], + [ + "▁Plug", + -12.861051559448242 + ], + [ + "▁violin", + -12.861150741577148 + ], + [ + "▁condemn", + -12.861381530761719 + ], + [ + "▁conducere", + -12.861398696899414 + ], + [ + "▁Chevrolet", + -12.861412048339844 + ], + [ + "▁conceput", + -12.861461639404297 + ], + [ + "▁Merri", + -12.861493110656738 + ], + [ + "judging", + -12.861559867858887 + ], + [ + "embraced", + -12.86168098449707 + ], + [ + "▁Compact", + -12.861715316772461 + ], + [ + "▁château", + -12.861807823181152 + ], + [ + "etch", + -12.861945152282715 + ], + [ + "bedroom", + -12.861995697021484 + ], + [ + "People", + -12.862038612365723 + ], + [ + "25,000", + -12.86209774017334 + ], + [ + "ocyte", + -12.862146377563477 + ], + [ + "▁Lenovo", + -12.862205505371094 + ], + [ + "▁Hampton", + -12.862241744995117 + ], + [ + "5.2", + -12.862244606018066 + ], + [ + "▁progres", + -12.862266540527344 + ], + [ + "hoc", + -12.862288475036621 + ], + [ + "▁complementary", + -12.86241340637207 + ], + [ + "turned", + -12.862485885620117 + ], + [ + "mangel", + -12.862508773803711 + ], + [ + "▁Drew", + -12.862592697143555 + ], + [ + "épisode", + -12.86259651184082 + ], + [ + "▁Versorgung", + -12.86259651184082 + ], + [ + "▁ausdrücklich", + -12.86259651184082 + ], + [ + "ciune", + -12.862788200378418 + ], + [ + "▁sfârșit", + -12.862990379333496 + ], + [ + "Agricultural", + -12.862991333007812 + ], + [ + "▁caffeine", + -12.862991333007812 + ], + [ + "▁emergencies", + -12.862991333007812 + ], + [ + "▁unhappy", + -12.862991333007812 + ], + [ + "(7)", + -12.863043785095215 + ], + [ + "▁inlocui", + -12.863059043884277 + ], + [ + "▁Rochester", + -12.863153457641602 + ], + [ + "183", + -12.863155364990234 + ], + [ + "niz", + -12.863285064697266 + ], + [ + "tasche", + -12.863462448120117 + ], + [ + "▁Salle", + -12.86347484588623 + ], + [ + "cît", + -12.863478660583496 + ], + [ + "▁Singer", + -12.863489151000977 + ], + [ + "▁economically", + -12.863506317138672 + ], + [ + "▁ieși", + -12.863525390625 + ], + [ + "▁façade", + -12.86378288269043 + ], + [ + "Ohne", + -12.863801956176758 + ], + [ + "▁edible", + -12.863842964172363 + ], + [ + "Rob", + -12.863851547241211 + ], + [ + "▁(2014)", + -12.863859176635742 + ], + [ + "▁Zar", + -12.863919258117676 + ], + [ + "▁obey", + -12.863995552062988 + ], + [ + "Pack", + -12.864087104797363 + ], + [ + "▁Omni", + -12.864198684692383 + ], + [ + "▁Gilbert", + -12.864212036132812 + ], + [ + "▁Vlad", + -12.86429500579834 + ], + [ + "▁pauvre", + -12.864333152770996 + ], + [ + "▁secular", + -12.864383697509766 + ], + [ + "Center", + -12.864415168762207 + ], + [ + "▁Prospect", + -12.864457130432129 + ], + [ + "▁Noah", + -12.86450481414795 + ], + [ + "▁Interactive", + -12.86471176147461 + ], + [ + "▁centaine", + -12.86485767364502 + ], + [ + "▁cerebral", + -12.864971160888672 + ], + [ + "▁Novel", + -12.865013122558594 + ], + [ + "▁Käufer", + -12.865039825439453 + ], + [ + "werfen", + -12.865056991577148 + ], + [ + "▁reluctant", + -12.865143775939941 + ], + [ + "ес", + -12.86520004272461 + ], + [ + "Look", + -12.86521053314209 + ], + [ + "Erkrankung", + -12.86536693572998 + ], + [ + "▁cucumber", + -12.86536693572998 + ], + [ + "/2017", + -12.865399360656738 + ], + [ + "▁flank", + -12.865405082702637 + ], + [ + "opportunité", + -12.865667343139648 + ], + [ + "zugleich", + -12.865766525268555 + ], + [ + "RAT", + -12.865840911865234 + ], + [ + "▁avantages", + -12.865880012512207 + ], + [ + "▁außer", + -12.866008758544922 + ], + [ + "GV", + -12.866090774536133 + ], + [ + "▁Continental", + -12.866159439086914 + ], + [ + "▁affiliation", + -12.866159439086914 + ], + [ + "▁ursprünglich", + -12.86618423461914 + ], + [ + "▁hardship", + -12.866349220275879 + ], + [ + "âme", + -12.86647891998291 + ], + [ + "▁hallway", + -12.866576194763184 + ], + [ + "▁afară", + -12.866578102111816 + ], + [ + "western", + -12.866714477539062 + ], + [ + "▁Jacket", + -12.866802215576172 + ], + [ + "▁culturelle", + -12.866876602172852 + ], + [ + "▁glaci", + -12.866995811462402 + ], + [ + "metoda", + -12.867036819458008 + ], + [ + "▁clerk", + -12.867045402526855 + ], + [ + "▁ordinance", + -12.867185592651367 + ], + [ + "▁Initial", + -12.867197036743164 + ], + [ + "waking", + -12.86722469329834 + ], + [ + "▁Secondary", + -12.867366790771484 + ], + [ + "▁Solomon", + -12.867411613464355 + ], + [ + "glomer", + -12.867488861083984 + ], + [ + "SYS", + -12.867530822753906 + ], + [ + "▁Florin", + -12.867596626281738 + ], + [ + "ffentlich", + -12.867670059204102 + ], + [ + "▁Printer", + -12.867674827575684 + ], + [ + "▁dimineata", + -12.86774730682373 + ], + [ + "▁stripes", + -12.867748260498047 + ], + [ + "plugged", + -12.86776065826416 + ], + [ + "öhl", + -12.867836952209473 + ], + [ + "infused", + -12.867875099182129 + ], + [ + "▁Rubber", + -12.867895126342773 + ], + [ + "paved", + -12.867898941040039 + ], + [ + "▁Devi", + -12.867995262145996 + ], + [ + "▁subway", + -12.8681640625 + ], + [ + "▁gases", + -12.868306159973145 + ], + [ + "▁reguli", + -12.868371963500977 + ], + [ + "▁Rebel", + -12.868413925170898 + ], + [ + "▁destructive", + -12.868546485900879 + ], + [ + "▁oferind", + -12.868664741516113 + ], + [ + "9001", + -12.868876457214355 + ], + [ + "CRA", + -12.868912696838379 + ], + [ + "why", + -12.868932723999023 + ], + [ + "sensul", + -12.869036674499512 + ], + [ + "guter", + -12.869277000427246 + ], + [ + "Empfehlung", + -12.869338035583496 + ], + [ + "▁convertible", + -12.86953353881836 + ], + [ + "▁predominantly", + -12.869637489318848 + ], + [ + "▁Mentor", + -12.869649887084961 + ], + [ + "Practic", + -12.869720458984375 + ], + [ + "▁echipă", + -12.869754791259766 + ], + [ + "onsite", + -12.869853019714355 + ], + [ + "▁zunehmend", + -12.86994743347168 + ], + [ + "▁Harbour", + -12.870016098022461 + ], + [ + "▁pineapple", + -12.870133399963379 + ], + [ + "▁gasoline", + -12.870139122009277 + ], + [ + "▁Jaguar", + -12.870158195495605 + ], + [ + "kno", + -12.870259284973145 + ], + [ + "▁heap", + -12.870448112487793 + ], + [ + "▁fictional", + -12.870481491088867 + ], + [ + "fiinta", + -12.870753288269043 + ], + [ + "▁Amber", + -12.87081241607666 + ], + [ + "▁Exclusive", + -12.870929718017578 + ], + [ + "▁Pharmaceutical", + -12.870929718017578 + ], + [ + "▁unterscheide", + -12.871044158935547 + ], + [ + "▁1942", + -12.871116638183594 + ], + [ + "▁Ceiling", + -12.87115478515625 + ], + [ + "developed", + -12.871228218078613 + ], + [ + "▁consacr", + -12.87132453918457 + ], + [ + "▁Membr", + -12.871411323547363 + ], + [ + "erton", + -12.871447563171387 + ], + [ + "habitation", + -12.871685981750488 + ], + [ + "▁longevity", + -12.871726989746094 + ], + [ + "▁Starbucks", + -12.871728897094727 + ], + [ + "▁poat", + -12.871771812438965 + ], + [ + "▁commissioner", + -12.871794700622559 + ], + [ + "pedia", + -12.871938705444336 + ], + [ + "popped", + -12.872468948364258 + ], + [ + "versorgung", + -12.872525215148926 + ], + [ + "▁Aktivitäten", + -12.872525215148926 + ], + [ + "▁Betreuung", + -12.872525215148926 + ], + [ + "▁afacere", + -12.872968673706055 + ], + [ + "▁Mechanical", + -12.873323440551758 + ], + [ + "▁Leiter", + -12.873346328735352 + ], + [ + "▁scaling", + -12.873427391052246 + ], + [ + "▁Slim", + -12.87350082397461 + ], + [ + "▁temperaturi", + -12.873516082763672 + ], + [ + "ACH", + -12.873558044433594 + ], + [ + "▁jährlich", + -12.873682022094727 + ], + [ + "▁photographie", + -12.873722076416016 + ], + [ + "▁préalable", + -12.873725891113281 + ], + [ + "▁părinți", + -12.87372875213623 + ], + [ + "▁Farmers", + -12.873873710632324 + ], + [ + "▁Printable", + -12.873905181884766 + ], + [ + "Früh", + -12.873908996582031 + ], + [ + "approved", + -12.87398624420166 + ], + [ + "otro", + -12.874094009399414 + ], + [ + "▁veneer", + -12.874099731445312 + ], + [ + "▁Warriors", + -12.874122619628906 + ], + [ + "▁Approach", + -12.874149322509766 + ], + [ + "Share", + -12.874238967895508 + ], + [ + "▁buds", + -12.874252319335938 + ], + [ + "▁Într", + -12.874330520629883 + ], + [ + "glichen", + -12.87452507019043 + ], + [ + "▁anbieten", + -12.87452507019043 + ], + [ + "MET", + -12.874539375305176 + ], + [ + "amélioration", + -12.87468147277832 + ], + [ + "ländische", + -12.87468433380127 + ], + [ + "nsgesamt", + -12.874764442443848 + ], + [ + "einiger", + -12.874822616577148 + ], + [ + "▁Förderung", + -12.874876022338867 + ], + [ + "destroying", + -12.874910354614258 + ], + [ + "▁accreditation", + -12.874922752380371 + ], + [ + "reminiscent", + -12.875094413757324 + ], + [ + "▁retriev", + -12.87528133392334 + ], + [ + "▁Flü", + -12.875306129455566 + ], + [ + "▁Monsieur", + -12.875322341918945 + ], + [ + "German", + -12.87536334991455 + ], + [ + "Orice", + -12.875443458557129 + ], + [ + "künftig", + -12.875523567199707 + ], + [ + "▁vorbi", + -12.875639915466309 + ], + [ + "▁intentionally", + -12.875733375549316 + ], + [ + "▁îngrij", + -12.875743865966797 + ], + [ + "▁laughed", + -12.875850677490234 + ], + [ + "▁Fiction", + -12.875913619995117 + ], + [ + "▁inteligent", + -12.875914573669434 + ], + [ + "▁Translation", + -12.875953674316406 + ], + [ + "greete", + -12.875983238220215 + ], + [ + "▁énergétique", + -12.876123428344727 + ], + [ + "uncovered", + -12.876248359680176 + ], + [ + "▁évidemment", + -12.876523971557617 + ], + [ + "▁Vietnamese", + -12.876535415649414 + ], + [ + "▁Libya", + -12.876675605773926 + ], + [ + "▁Trailer", + -12.876734733581543 + ], + [ + "▁Wohl", + -12.876871109008789 + ], + [ + "▁Congo", + -12.87698745727539 + ], + [ + "▁freut", + -12.877002716064453 + ], + [ + "zauber", + -12.877090454101562 + ], + [ + "▁Pân", + -12.877142906188965 + ], + [ + "▁mentine", + -12.877333641052246 + ], + [ + "▁welding", + -12.877335548400879 + ], + [ + "▁Mircea", + -12.8773775100708 + ], + [ + "▁optimism", + -12.877455711364746 + ], + [ + "VEL", + -12.877504348754883 + ], + [ + "oilea", + -12.877540588378906 + ], + [ + "▁thereafter", + -12.877612113952637 + ], + [ + "▁André", + -12.877710342407227 + ], + [ + "forschung", + -12.877799987792969 + ], + [ + "running", + -12.878022193908691 + ], + [ + "▁hostile", + -12.878059387207031 + ], + [ + "Homme", + -12.87811279296875 + ], + [ + "▁Satellite", + -12.878129005432129 + ], + [ + "▁collagen", + -12.87841796875 + ], + [ + "▁concedi", + -12.878518104553223 + ], + [ + "▁produziert", + -12.87852954864502 + ], + [ + "▁virgin", + -12.878540992736816 + ], + [ + "frant", + -12.87857723236084 + ], + [ + "▁teammates", + -12.878744125366211 + ], + [ + "▁faceti", + -12.878802299499512 + ], + [ + "▁Restoration", + -12.87893295288086 + ], + [ + "▁detached", + -12.878935813903809 + ], + [ + "▁Instructor", + -12.878950119018555 + ], + [ + "montag", + -12.879227638244629 + ], + [ + "▁borrowing", + -12.879375457763672 + ], + [ + "▁Retro", + -12.879446983337402 + ], + [ + "▁behandelt", + -12.879536628723145 + ], + [ + "▁Aussage", + -12.879715919494629 + ], + [ + "▁snorkel", + -12.879734992980957 + ], + [ + "▁Proceedings", + -12.879754066467285 + ], + [ + "▁Judy", + -12.879776000976562 + ], + [ + "▁Wendy", + -12.879783630371094 + ], + [ + "artă", + -12.879920959472656 + ], + [ + "▁Vergangenheit", + -12.88013744354248 + ], + [ + "▁Gegner", + -12.880139350891113 + ], + [ + "▁ulcer", + -12.880166053771973 + ], + [ + "wirksam", + -12.880553245544434 + ], + [ + "▁închis", + -12.880560874938965 + ], + [ + "▁emission", + -12.88068962097168 + ], + [ + "ulescu", + -12.880754470825195 + ], + [ + "▁bancar", + -12.880819320678711 + ], + [ + "compromising", + -12.880924224853516 + ], + [ + "▁Priest", + -12.881156921386719 + ], + [ + "▁Progress", + -12.881318092346191 + ], + [ + "▁punish", + -12.88144588470459 + ], + [ + "▁Afin", + -12.881450653076172 + ], + [ + "▁Bog", + -12.881514549255371 + ], + [ + "lunii", + -12.881525039672852 + ], + [ + "▁ressembl", + -12.881570816040039 + ], + [ + "▁Creation", + -12.881644248962402 + ], + [ + "effet", + -12.881668090820312 + ], + [ + "Versicherung", + -12.881671905517578 + ], + [ + "médias", + -12.881672859191895 + ], + [ + "▁Kritik", + -12.881793975830078 + ], + [ + "idia", + -12.881896018981934 + ], + [ + "▁Wasch", + -12.881929397583008 + ], + [ + "UAL", + -12.882059097290039 + ], + [ + "Approximately", + -12.882149696350098 + ], + [ + "izari", + -12.882152557373047 + ], + [ + "▁Dortmund", + -12.882152557373047 + ], + [ + "▁contul", + -12.882343292236328 + ], + [ + "▁Airways", + -12.882408142089844 + ], + [ + "sicherung", + -12.882535934448242 + ], + [ + "échelle", + -12.882560729980469 + ], + [ + "ADD", + -12.882582664489746 + ], + [ + "DIA", + -12.88259506225586 + ], + [ + "kabel", + -12.882621765136719 + ], + [ + "Media", + -12.88268756866455 + ], + [ + "ampli", + -12.882894515991211 + ], + [ + "▁quarry", + -12.88295841217041 + ], + [ + "▁acoper", + -12.883072853088379 + ], + [ + "halter", + -12.883326530456543 + ], + [ + "▁solicitor", + -12.883684158325195 + ], + [ + "phosphat", + -12.883763313293457 + ], + [ + "▁drown", + -12.883773803710938 + ], + [ + "congratulat", + -12.884047508239746 + ], + [ + "▁uneven", + -12.884087562561035 + ], + [ + "▁rupe", + -12.884154319763184 + ], + [ + "▁heureux", + -12.88417911529541 + ], + [ + "caractéristiques", + -12.884221076965332 + ], + [ + "60,000", + -12.884283065795898 + ], + [ + "ambigu", + -12.884340286254883 + ], + [ + "224", + -12.884417533874512 + ], + [ + "dov", + -12.88454532623291 + ], + [ + "▁Naturally", + -12.884629249572754 + ], + [ + "▁Ernst", + -12.884634017944336 + ], + [ + "Camp", + -12.884757995605469 + ], + [ + "▁Worldwide", + -12.884909629821777 + ], + [ + "▁antrenament", + -12.885042190551758 + ], + [ + "▁jocul", + -12.88521671295166 + ], + [ + "▁broccoli", + -12.88537883758545 + ], + [ + "▁fascinated", + -12.88537883758545 + ], + [ + "▁Abbey", + -12.885387420654297 + ], + [ + "▁aquarium", + -12.885390281677246 + ], + [ + "HAN", + -12.885458946228027 + ], + [ + "chaffung", + -12.885480880737305 + ], + [ + "137", + -12.885503768920898 + ], + [ + "rumors", + -12.885515213012695 + ], + [ + "reliance", + -12.885557174682617 + ], + [ + "▁vaccination", + -12.8856782913208 + ], + [ + "responsabilitate", + -12.885777473449707 + ], + [ + "▁legislati", + -12.885782241821289 + ], + [ + "ATT", + -12.885826110839844 + ], + [ + "206", + -12.885896682739258 + ], + [ + "▁miere", + -12.885967254638672 + ], + [ + "▁rezultatul", + -12.885988235473633 + ], + [ + "părea", + -12.88599681854248 + ], + [ + "zuführen", + -12.886159896850586 + ], + [ + "▁Kompetenz", + -12.886187553405762 + ], + [ + "▁nickname", + -12.886195182800293 + ], + [ + "pilot", + -12.88620376586914 + ], + [ + "▁ninth", + -12.886252403259277 + ], + [ + "▁Tyr", + -12.886446952819824 + ], + [ + "▁misuse", + -12.886469841003418 + ], + [ + "▁SUP", + -12.886514663696289 + ], + [ + "▁Attack", + -12.88667106628418 + ], + [ + "Smart", + -12.88669490814209 + ], + [ + "▁Philosoph", + -12.886930465698242 + ], + [ + "▁Alege", + -12.886931419372559 + ], + [ + "▁femeile", + -12.886967658996582 + ], + [ + "▁Heating", + -12.88698673248291 + ], + [ + "▁Cricket", + -12.886999130249023 + ], + [ + "▁scholar", + -12.887049674987793 + ], + [ + "Model", + -12.887073516845703 + ], + [ + "▁stimulating", + -12.887182235717773 + ], + [ + "▁industrielle", + -12.887189865112305 + ], + [ + "▁phenomena", + -12.887303352355957 + ], + [ + "▁Nahrung", + -12.887414932250977 + ], + [ + "▁Conditioner", + -12.887433052062988 + ], + [ + "führ", + -12.887489318847656 + ], + [ + "▁révolution", + -12.88757610321045 + ], + [ + "plastic", + -12.887595176696777 + ], + [ + "▁approximate", + -12.887596130371094 + ], + [ + "▁dienen", + -12.887624740600586 + ], + [ + "▁obsession", + -12.887807846069336 + ], + [ + "▁rectangular", + -12.887807846069336 + ], + [ + "Allemagne", + -12.887808799743652 + ], + [ + "▁Tanzania", + -12.887824058532715 + ], + [ + "border", + -12.887884140014648 + ], + [ + "▁crashed", + -12.887958526611328 + ], + [ + "visor", + -12.887974739074707 + ], + [ + "▁autorizat", + -12.888072967529297 + ], + [ + "▁Champagne", + -12.888222694396973 + ], + [ + "längst", + -12.888238906860352 + ], + [ + "▁realities", + -12.888314247131348 + ], + [ + "▁Keyword", + -12.88831615447998 + ], + [ + "▁GUI", + -12.888495445251465 + ], + [ + "▁simplified", + -12.88865852355957 + ], + [ + "▁Rack", + -12.888681411743164 + ], + [ + "▁Zahlen", + -12.888693809509277 + ], + [ + "growth", + -12.888897895812988 + ], + [ + "▁rehearsal", + -12.888991355895996 + ], + [ + "▁Epic", + -12.888999938964844 + ], + [ + "▁réussite", + -12.889195442199707 + ], + [ + "▁politician", + -12.889263153076172 + ], + [ + "▁emoți", + -12.889378547668457 + ], + [ + "▁delegation", + -12.889449119567871 + ], + [ + "▁со", + -12.889464378356934 + ], + [ + "oversized", + -12.889477729797363 + ], + [ + "▁Motto", + -12.889481544494629 + ], + [ + "1860", + -12.889788627624512 + ], + [ + "▁defective", + -12.889803886413574 + ], + [ + "brewing", + -12.889852523803711 + ], + [ + "linguistic", + -12.890243530273438 + ], + [ + "▁Hopkins", + -12.890265464782715 + ], + [ + "▁(2012)", + -12.89030933380127 + ], + [ + "crease", + -12.890436172485352 + ], + [ + "▁Versicherungs", + -12.89052677154541 + ], + [ + "▁Noble", + -12.890752792358398 + ], + [ + "▁Bekannt", + -12.890896797180176 + ], + [ + "▁vorstellen", + -12.89095401763916 + ], + [ + "▁suburban", + -12.890970230102539 + ], + [ + "DAC", + -12.890995025634766 + ], + [ + "▁scatter", + -12.89103889465332 + ], + [ + "▁Artificial", + -12.8910551071167 + ], + [ + "▁reactor", + -12.891073226928711 + ], + [ + "▁modelling", + -12.89108943939209 + ], + [ + "▁Holder", + -12.891148567199707 + ], + [ + "athon", + -12.891149520874023 + ], + [ + "147", + -12.891190528869629 + ], + [ + "▁stagn", + -12.891257286071777 + ], + [ + "ARY", + -12.891261100769043 + ], + [ + "Space", + -12.89126968383789 + ], + [ + "▁Gibson", + -12.891718864440918 + ], + [ + "▁Investigator", + -12.89173698425293 + ], + [ + "▁1914", + -12.891818046569824 + ], + [ + "▁Muhammad", + -12.891868591308594 + ], + [ + "▁shove", + -12.892073631286621 + ], + [ + "▁erklären", + -12.892276763916016 + ], + [ + "▁abdomen", + -12.892277717590332 + ], + [ + "▁Mazda", + -12.892349243164062 + ], + [ + "▁hemo", + -12.892364501953125 + ], + [ + "National", + -12.892455101013184 + ], + [ + "starken", + -12.89267635345459 + ], + [ + "▁Cyprus", + -12.892683982849121 + ], + [ + "▁tread", + -12.892721176147461 + ], + [ + "▁sweetness", + -12.892725944519043 + ], + [ + "stunden", + -12.892790794372559 + ], + [ + "▁couverture", + -12.893059730529785 + ], + [ + "▁Successful", + -12.893060684204102 + ], + [ + "▁oublier", + -12.893171310424805 + ], + [ + "▁esential", + -12.893203735351562 + ], + [ + "estival", + -12.89321231842041 + ], + [ + "gnac", + -12.893280029296875 + ], + [ + "▁Basement", + -12.893457412719727 + ], + [ + "presumably", + -12.893497467041016 + ], + [ + "▁mourn", + -12.893561363220215 + ], + [ + "armée", + -12.893677711486816 + ], + [ + "148", + -12.893845558166504 + ], + [ + "▁residue", + -12.894006729125977 + ], + [ + "▁metalic", + -12.89404296875 + ], + [ + "▁Zell", + -12.89425277709961 + ], + [ + "Build", + -12.894280433654785 + ], + [ + "▁prevalence", + -12.894312858581543 + ], + [ + "▁wrestling", + -12.894312858581543 + ], + [ + "▁ascuns", + -12.894325256347656 + ], + [ + "Sacred", + -12.894340515136719 + ], + [ + "Tec", + -12.89438533782959 + ], + [ + "▁Kindergarten", + -12.894389152526855 + ], + [ + "bindung", + -12.894464492797852 + ], + [ + "▁ritm", + -12.894545555114746 + ], + [ + "▁triste", + -12.894651412963867 + ], + [ + "▁introdus", + -12.894758224487305 + ], + [ + "/2016", + -12.894824028015137 + ], + [ + "▁română", + -12.894899368286133 + ], + [ + "▁bibli", + -12.89490032196045 + ], + [ + "▁cigar", + -12.894913673400879 + ], + [ + "Rie", + -12.894990921020508 + ], + [ + "▁intentional", + -12.894999504089355 + ], + [ + "▁cuprins", + -12.895098686218262 + ], + [ + "remarkably", + -12.895129203796387 + ], + [ + "▁printemps", + -12.895133972167969 + ], + [ + "▁declining", + -12.895171165466309 + ], + [ + "Magazin", + -12.89552116394043 + ], + [ + "▁săptămână", + -12.895537376403809 + ], + [ + "▁vérifier", + -12.895549774169922 + ], + [ + "▁Speise", + -12.895584106445312 + ], + [ + "▁reteta", + -12.8956298828125 + ], + [ + "heed", + -12.895772933959961 + ], + [ + "▁Compliance", + -12.895946502685547 + ], + [ + "▁embroidery", + -12.895946502685547 + ], + [ + "cried", + -12.896025657653809 + ], + [ + "▁(„", + -12.896282196044922 + ], + [ + "▁heck", + -12.89629077911377 + ], + [ + "▁sadness", + -12.896501541137695 + ], + [ + "▁impulse", + -12.896585464477539 + ], + [ + "ATH", + -12.896740913391113 + ], + [ + "▁lavender", + -12.896773338317871 + ], + [ + "uiesc", + -12.896790504455566 + ], + [ + "▁Disorder", + -12.896876335144043 + ], + [ + "stroke", + -12.896991729736328 + ], + [ + "▁piaţ", + -12.8970365524292 + ], + [ + "ournée", + -12.897049903869629 + ], + [ + "▁Barnes", + -12.8971586227417 + ], + [ + "▁scăzut", + -12.897172927856445 + ], + [ + "▁équipements", + -12.89725112915039 + ], + [ + "OND", + -12.897375106811523 + ], + [ + "▁Compet", + -12.897424697875977 + ], + [ + "▁Bestell", + -12.89748477935791 + ], + [ + "▁immédiatement", + -12.897587776184082 + ], + [ + "aparut", + -12.89759635925293 + ], + [ + "▁rainfall", + -12.897882461547852 + ], + [ + "oreille", + -12.89797306060791 + ], + [ + "▁ministère", + -12.898014068603516 + ], + [ + "iris", + -12.898140907287598 + ], + [ + "dyna", + -12.898279190063477 + ], + [ + "drücken", + -12.898343086242676 + ], + [ + "▁détect", + -12.89834976196289 + ], + [ + "▁fonctionnalité", + -12.89840030670166 + ], + [ + "▁imbalance", + -12.89840030670166 + ], + [ + "▁unpredictable", + -12.89840030670166 + ], + [ + "▁literar", + -12.89846134185791 + ], + [ + "▁Windsor", + -12.898472785949707 + ], + [ + "▁Unlimited", + -12.898481369018555 + ], + [ + "colour", + -12.898674964904785 + ], + [ + "▁Portfolio", + -12.898810386657715 + ], + [ + "149", + -12.898883819580078 + ], + [ + "volution", + -12.898890495300293 + ], + [ + "▁folgende", + -12.899078369140625 + ], + [ + "▁arbitration", + -12.899105072021484 + ], + [ + "kicking", + -12.89913558959961 + ], + [ + "zügig", + -12.89923095703125 + ], + [ + "▁1941", + -12.899311065673828 + ], + [ + "▁Drake", + -12.89955997467041 + ], + [ + "▁ausführlich", + -12.899630546569824 + ], + [ + "▁chaussure", + -12.899630546569824 + ], + [ + "▁intestinal", + -12.89976692199707 + ], + [ + "▁pilgrim", + -12.900040626525879 + ], + [ + "▁Bark", + -12.900142669677734 + ], + [ + "between", + -12.900157928466797 + ], + [ + "disposed", + -12.900175094604492 + ], + [ + "▁Dylan", + -12.900218963623047 + ], + [ + "ств", + -12.900253295898438 + ], + [ + "NOR", + -12.900287628173828 + ], + [ + "traces", + -12.90038776397705 + ], + [ + "▁moindre", + -12.900500297546387 + ], + [ + "▁$10,000", + -12.900552749633789 + ], + [ + "212", + -12.900599479675293 + ], + [ + "wusste", + -12.900659561157227 + ], + [ + "▁predictable", + -12.900671005249023 + ], + [ + "poţi", + -12.900679588317871 + ], + [ + "▁Celsius", + -12.900860786437988 + ], + [ + "gebunden", + -12.90086841583252 + ], + [ + "▁Legacy", + -12.900891304016113 + ], + [ + "movers", + -12.90090274810791 + ], + [ + "▁concret", + -12.90098762512207 + ], + [ + "▁simpla", + -12.901050567626953 + ], + [ + "rechnet", + -12.901103973388672 + ], + [ + "▁certainty", + -12.901144981384277 + ], + [ + "entrepreneurship", + -12.901153564453125 + ], + [ + "kohl", + -12.901289939880371 + ], + [ + "▁curte", + -12.901311874389648 + ], + [ + "▁Forbes", + -12.901411056518555 + ], + [ + "▁Zusatz", + -12.901535987854004 + ], + [ + "blending", + -12.90163803100586 + ], + [ + "▁variat", + -12.901642799377441 + ], + [ + "▁galaxy", + -12.90168285369873 + ], + [ + "▁safari", + -12.90168571472168 + ], + [ + "▁municipalities", + -12.9017972946167 + ], + [ + "▁Drept", + -12.90180778503418 + ], + [ + "aufnahme", + -12.902128219604492 + ], + [ + "▁endorse", + -12.902223587036133 + ], + [ + "einrichtung", + -12.902244567871094 + ], + [ + "Sync", + -12.902270317077637 + ], + [ + "abide", + -12.902323722839355 + ], + [ + "brushed", + -12.902350425720215 + ], + [ + "▁actiune", + -12.902410507202148 + ], + [ + "quaint", + -12.902498245239258 + ], + [ + "▁volatility", + -12.902504920959473 + ], + [ + "▁repetitive", + -12.902505874633789 + ], + [ + "▁découvr", + -12.902560234069824 + ], + [ + "Totodat", + -12.902585983276367 + ], + [ + "▁românesc", + -12.902682304382324 + ], + [ + "▁tempting", + -12.902772903442383 + ], + [ + "thesis", + -12.902947425842285 + ], + [ + "secure", + -12.903013229370117 + ], + [ + "delt", + -12.903019905090332 + ], + [ + "▁şef", + -12.903167724609375 + ], + [ + "▁epidemic", + -12.903326988220215 + ], + [ + "▁Appliance", + -12.903327941894531 + ], + [ + "cearcă", + -12.903331756591797 + ], + [ + "▁lodging", + -12.903361320495605 + ], + [ + "▁photographed", + -12.903507232666016 + ], + [ + "geschlagen", + -12.903794288635254 + ], + [ + "▁Methodist", + -12.90380859375 + ], + [ + "▁Transit", + -12.90389347076416 + ], + [ + "▁Länder", + -12.903934478759766 + ], + [ + "villa", + -12.903986930847168 + ], + [ + "▁toilette", + -12.904031753540039 + ], + [ + "anno", + -12.904074668884277 + ], + [ + "▁Aufnahme", + -12.904091835021973 + ], + [ + "▁Coral", + -12.904099464416504 + ], + [ + "pourraient", + -12.904129981994629 + ], + [ + "▁digestion", + -12.904245376586914 + ], + [ + "▁Vacation", + -12.904274940490723 + ], + [ + "▁Rugby", + -12.904275894165039 + ], + [ + "MIC", + -12.904311180114746 + ], + [ + "▁choc", + -12.904417991638184 + ], + [ + "2002", + -12.904492378234863 + ], + [ + "gestion", + -12.904674530029297 + ], + [ + "▁Zoom", + -12.904745101928711 + ], + [ + "essor", + -12.904763221740723 + ], + [ + "weighed", + -12.904793739318848 + ], + [ + "▁dispus", + -12.904987335205078 + ], + [ + "▁redemption", + -12.90502643585205 + ], + [ + "▁plaster", + -12.905071258544922 + ], + [ + "▁Quilt", + -12.90507698059082 + ], + [ + "▁teritoriul", + -12.905088424682617 + ], + [ + "ndern", + -12.905097961425781 + ], + [ + "▁expired", + -12.905105590820312 + ], + [ + "▁Tribunal", + -12.905122756958008 + ], + [ + "occupation", + -12.9052152633667 + ], + [ + "▁woodland", + -12.905248641967773 + ], + [ + "vieux", + -12.905254364013672 + ], + [ + "▁Midland", + -12.905465126037598 + ], + [ + "gât", + -12.90571117401123 + ], + [ + "électricité", + -12.905800819396973 + ], + [ + "▁vanzare", + -12.905811309814453 + ], + [ + "biologi", + -12.905961036682129 + ], + [ + "▁vive", + -12.906060218811035 + ], + [ + "▁Alarm", + -12.906097412109375 + ], + [ + "▁experiență", + -12.9061279296875 + ], + [ + "▁Loch", + -12.906133651733398 + ], + [ + "▁Pedro", + -12.906194686889648 + ], + [ + "▁detergent", + -12.906217575073242 + ], + [ + "language", + -12.906554222106934 + ], + [ + "▁sedan", + -12.906655311584473 + ], + [ + "▁Brady", + -12.906736373901367 + ], + [ + "▁compus", + -12.906976699829102 + ], + [ + "▁landfill", + -12.906982421875 + ], + [ + "giu", + -12.907039642333984 + ], + [ + "beziehung", + -12.9070405960083 + ], + [ + "▁picior", + -12.907184600830078 + ], + [ + "ALI", + -12.907235145568848 + ], + [ + "▁Commander", + -12.907256126403809 + ], + [ + "EPS", + -12.907303810119629 + ], + [ + "▁Textil", + -12.907320022583008 + ], + [ + "▁industria", + -12.907339096069336 + ], + [ + "lox", + -12.907365798950195 + ], + [ + "▁eclectic", + -12.907453536987305 + ], + [ + "▁gracious", + -12.907477378845215 + ], + [ + "Uniunea", + -12.907525062561035 + ], + [ + "bps", + -12.90754222869873 + ], + [ + "▁entertained", + -12.907634735107422 + ], + [ + "depinde", + -12.907767295837402 + ], + [ + "▁daylight", + -12.907893180847168 + ], + [ + "▁résistance", + -12.907995223999023 + ], + [ + "ARN", + -12.908194541931152 + ], + [ + "▁unavailable", + -12.908201217651367 + ], + [ + "Curtea", + -12.908390045166016 + ], + [ + "▁pores", + -12.908502578735352 + ], + [ + "▁Tonight", + -12.908649444580078 + ], + [ + "▁datori", + -12.90869426727295 + ], + [ + "▁gezielt", + -12.908703804016113 + ], + [ + "▁rupture", + -12.90875244140625 + ], + [ + "▁disput", + -12.908848762512207 + ], + [ + "▁sonstige", + -12.908895492553711 + ], + [ + "▁Ordnung", + -12.90910816192627 + ], + [ + "▁beschrieben", + -12.909114837646484 + ], + [ + "▁Rainbow", + -12.90911865234375 + ], + [ + "▁Werkzeug", + -12.909136772155762 + ], + [ + "GIN", + -12.909354209899902 + ], + [ + "facilitating", + -12.909490585327148 + ], + [ + "hunt", + -12.90955638885498 + ], + [ + "▁Serving", + -12.909673690795898 + ], + [ + "Writ", + -12.909692764282227 + ], + [ + "requisite", + -12.909798622131348 + ], + [ + "▁Kerry", + -12.90989875793457 + ], + [ + "▁riesig", + -12.909957885742188 + ], + [ + "▁Healing", + -12.91030502319336 + ], + [ + "▁1954", + -12.910365104675293 + ], + [ + "▁mousse", + -12.910428047180176 + ], + [ + "▁Positive", + -12.910764694213867 + ], + [ + "embodie", + -12.910772323608398 + ], + [ + "▁penetrate", + -12.910774230957031 + ], + [ + "endorsed", + -12.910882949829102 + ], + [ + "▁situatia", + -12.910927772521973 + ], + [ + "▁Unity", + -12.911083221435547 + ], + [ + "142", + -12.911102294921875 + ], + [ + "▁farmhouse", + -12.911138534545898 + ], + [ + "▁Handbook", + -12.911368370056152 + ], + [ + "▁symbolic", + -12.911378860473633 + ], + [ + "pristine", + -12.911439895629883 + ], + [ + "moitié", + -12.911595344543457 + ], + [ + "▁Sessions", + -12.912017822265625 + ], + [ + "technisch", + -12.912116050720215 + ], + [ + "▁lesquel", + -12.912148475646973 + ], + [ + "▁electronically", + -12.912208557128906 + ], + [ + "▁modificat", + -12.912240982055664 + ], + [ + "▁adjoin", + -12.912242889404297 + ], + [ + "actualité", + -12.912256240844727 + ], + [ + "vati", + -12.91229248046875 + ], + [ + "VENT", + -12.912299156188965 + ], + [ + "▁salsa", + -12.912333488464355 + ], + [ + "acupunctur", + -12.912424087524414 + ], + [ + "▁Opportunity", + -12.912424087524414 + ], + [ + "▁Inspection", + -12.912425994873047 + ], + [ + "▁vereinbart", + -12.912425994873047 + ], + [ + "▁Residents", + -12.912426948547363 + ], + [ + "▁perennial", + -12.91242790222168 + ], + [ + "CHAN", + -12.912555694580078 + ], + [ + "Search", + -12.912572860717773 + ], + [ + "UTE", + -12.912696838378906 + ], + [ + "▁Lens", + -12.912703514099121 + ], + [ + "▁Banner", + -12.91281509399414 + ], + [ + "aménagement", + -12.912839889526367 + ], + [ + "▁Decision", + -12.91286849975586 + ], + [ + "▁ferr", + -12.912869453430176 + ], + [ + "▁Transformation", + -12.912878036499023 + ], + [ + "▁Stamm", + -12.912955284118652 + ], + [ + "▁Galerie", + -12.913003921508789 + ], + [ + "onny", + -12.913126945495605 + ], + [ + "▁caption", + -12.913195610046387 + ], + [ + "▁viitorul", + -12.91323471069336 + ], + [ + "▁professionelle", + -12.913281440734863 + ], + [ + "drepturile", + -12.913294792175293 + ], + [ + "ylon", + -12.913345336914062 + ], + [ + "Société", + -12.913387298583984 + ], + [ + "AIS", + -12.913456916809082 + ], + [ + "March", + -12.91350269317627 + ], + [ + "▁Rav", + -12.91357707977295 + ], + [ + "▁1946", + -12.913691520690918 + ], + [ + "accompagnement", + -12.913713455200195 + ], + [ + "Liviu", + -12.913716316223145 + ], + [ + "▁Appeal", + -12.913826942443848 + ], + [ + "▁sentir", + -12.913952827453613 + ], + [ + "▁Indigenous", + -12.914087295532227 + ], + [ + "▁wizard", + -12.914087295532227 + ], + [ + "▁collateral", + -12.914127349853516 + ], + [ + "▁Proof", + -12.914324760437012 + ], + [ + "▁prze", + -12.914398193359375 + ], + [ + "▁obținut", + -12.91450309753418 + ], + [ + "COP", + -12.914629936218262 + ], + [ + "▁obiect", + -12.914681434631348 + ], + [ + "▁isolate", + -12.914685249328613 + ], + [ + "▁nieder", + -12.914793014526367 + ], + [ + "TECH", + -12.914953231811523 + ], + [ + "▁Sharing", + -12.914998054504395 + ], + [ + "Ideally", + -12.915008544921875 + ], + [ + "▁naked", + -12.915059089660645 + ], + [ + "horaire", + -12.915130615234375 + ], + [ + "▁prelucrare", + -12.915180206298828 + ], + [ + "▁forcément", + -12.915349006652832 + ], + [ + "▁ESPN", + -12.915403366088867 + ], + [ + "▁southwest", + -12.9154634475708 + ], + [ + "▁Timber", + -12.915682792663574 + ], + [ + "kleidung", + -12.915748596191406 + ], + [ + "MJ", + -12.915854454040527 + ], + [ + "Ped", + -12.915889739990234 + ], + [ + "▁lymph", + -12.916181564331055 + ], + [ + "wärme", + -12.916399002075195 + ], + [ + "▁Olivia", + -12.916610717773438 + ], + [ + "Ziua", + -12.916705131530762 + ], + [ + "reihe", + -12.916747093200684 + ], + [ + "▁selfish", + -12.916752815246582 + ], + [ + "▁geography", + -12.916814804077148 + ], + [ + "▁etaj", + -12.916924476623535 + ], + [ + "▁acquis", + -12.91698932647705 + ], + [ + "▁rejoin", + -12.91701602935791 + ], + [ + "7.1", + -12.917097091674805 + ], + [ + "▁paix", + -12.91713809967041 + ], + [ + "tirer", + -12.917284965515137 + ], + [ + "▁clase", + -12.91745662689209 + ], + [ + "▁blink", + -12.917572021484375 + ], + [ + "▁Interface", + -12.917611122131348 + ], + [ + "nado", + -12.917655944824219 + ], + [ + "RIT", + -12.91777515411377 + ], + [ + "ESC", + -12.918120384216309 + ], + [ + "▁carving", + -12.918190002441406 + ], + [ + "▁articolul", + -12.918194770812988 + ], + [ + "▁wreath", + -12.918258666992188 + ], + [ + "▁propaganda", + -12.918266296386719 + ], + [ + "▁Pair", + -12.918267250061035 + ], + [ + "▁pamant", + -12.91831111907959 + ], + [ + "▁venituri", + -12.918357849121094 + ], + [ + "rtz", + -12.91835880279541 + ], + [ + "uddle", + -12.918529510498047 + ], + [ + "uille", + -12.918543815612793 + ], + [ + "▁embed", + -12.918654441833496 + ], + [ + "0.05", + -12.918655395507812 + ], + [ + "▁Brighton", + -12.918718338012695 + ], + [ + "estens", + -12.918742179870605 + ], + [ + "▁occupational", + -12.918862342834473 + ], + [ + "ем", + -12.918890953063965 + ], + [ + "wünsche", + -12.919081687927246 + ], + [ + "▁Poetry", + -12.91909408569336 + ], + [ + "▁visualize", + -12.919109344482422 + ], + [ + "Across", + -12.919121742248535 + ], + [ + "▁essentielle", + -12.919123649597168 + ], + [ + "beratung", + -12.919143676757812 + ], + [ + "▁Guidelines", + -12.91919231414795 + ], + [ + "▁Fehl", + -12.919198036193848 + ], + [ + "▁liberty", + -12.91921329498291 + ], + [ + "▁Investigation", + -12.91922378540039 + ], + [ + "▁sunrise", + -12.919266700744629 + ], + [ + "▁12:00", + -12.919541358947754 + ], + [ + "venind", + -12.919583320617676 + ], + [ + "▁lotion", + -12.919655799865723 + ], + [ + "conscious", + -12.91968822479248 + ], + [ + "logists", + -12.91973876953125 + ], + [ + "▁judecător", + -12.919893264770508 + ], + [ + "▁Ecuador", + -12.919928550720215 + ], + [ + "▁ambulance", + -12.91994857788086 + ], + [ + "▁Already", + -12.920026779174805 + ], + [ + "▁eröffnet", + -12.920090675354004 + ], + [ + "▁naval", + -12.92010498046875 + ], + [ + "▁imposibil", + -12.92011547088623 + ], + [ + "▁Merry", + -12.92011833190918 + ], + [ + "▁Duncan", + -12.920272827148438 + ], + [ + "▁léger", + -12.9203519821167 + ], + [ + "▁delta", + -12.920391082763672 + ], + [ + "▁Machinery", + -12.920578002929688 + ], + [ + "▁craftsmanship", + -12.920766830444336 + ], + [ + "▁angezeigt", + -12.9207763671875 + ], + [ + "▁formidable", + -12.9207763671875 + ], + [ + "▁Startup", + -12.920878410339355 + ], + [ + "venus", + -12.920969009399414 + ], + [ + "▁tannin", + -12.921019554138184 + ], + [ + "collaborating", + -12.921128273010254 + ], + [ + "▁abrupt", + -12.921152114868164 + ], + [ + "emergence", + -12.921171188354492 + ], + [ + "Dienstleistungen", + -12.921197891235352 + ], + [ + "▁liefert", + -12.921217918395996 + ], + [ + "engagement", + -12.921222686767578 + ], + [ + "▁maximise", + -12.921304702758789 + ], + [ + "modeled", + -12.9214448928833 + ], + [ + "▁crane", + -12.92148208618164 + ], + [ + "▁effortless", + -12.921540260314941 + ], + [ + "▁Buffet", + -12.92160701751709 + ], + [ + "8000", + -12.921648979187012 + ], + [ + "▁Überblick", + -12.921687126159668 + ], + [ + "micro", + -12.921981811523438 + ], + [ + "▁vergleichen", + -12.92204475402832 + ], + [ + "143", + -12.922080993652344 + ], + [ + "5.6", + -12.922094345092773 + ], + [ + "▁odata", + -12.922131538391113 + ], + [ + "▁interviu", + -12.922162055969238 + ], + [ + "▁poliţi", + -12.922375679016113 + ], + [ + "plated", + -12.922383308410645 + ], + [ + "Roman", + -12.922406196594238 + ], + [ + "▁satisfactory", + -12.922453880310059 + ], + [ + "▁unanimous", + -12.922459602355957 + ], + [ + "▁întâln", + -12.922464370727539 + ], + [ + "nonsense", + -12.922558784484863 + ], + [ + "▁HOW", + -12.922616004943848 + ], + [ + "prezinta", + -12.922639846801758 + ], + [ + "▁măsura", + -12.9226655960083 + ], + [ + "▁Fuji", + -12.92275619506836 + ], + [ + "▁Meaning", + -12.92278003692627 + ], + [ + "aspiring", + -12.922850608825684 + ], + [ + "▁Suceava", + -12.922863006591797 + ], + [ + "arba", + -12.922983169555664 + ], + [ + "pressive", + -12.922988891601562 + ], + [ + "▁creek", + -12.92301082611084 + ], + [ + "trakt", + -12.923023223876953 + ], + [ + "▁fluffy", + -12.923303604125977 + ], + [ + "▁bateau", + -12.923371315002441 + ], + [ + "ме", + -12.923545837402344 + ], + [ + "UNG", + -12.923609733581543 + ], + [ + "motifs", + -12.923907279968262 + ], + [ + "Type", + -12.923958778381348 + ], + [ + "perçu", + -12.924132347106934 + ], + [ + "singurul", + -12.924139022827148 + ], + [ + "▁(2011)", + -12.92418384552002 + ], + [ + "▁hemp", + -12.924263954162598 + ], + [ + "betroffenen", + -12.92431640625 + ], + [ + "▁sermon", + -12.924369812011719 + ], + [ + "AID", + -12.924545288085938 + ], + [ + "3.7", + -12.924627304077148 + ], + [ + "▁heiß", + -12.92463207244873 + ], + [ + "▁bolnav", + -12.924982070922852 + ], + [ + "First", + -12.924995422363281 + ], + [ + "▁interrupt", + -12.925040245056152 + ], + [ + "phag", + -12.925106048583984 + ], + [ + "235", + -12.925201416015625 + ], + [ + "▁discoveries", + -12.925262451171875 + ], + [ + "▁Wellington", + -12.925263404846191 + ], + [ + "▁wechseln", + -12.925298690795898 + ], + [ + "▁strategically", + -12.925379753112793 + ], + [ + "▁iphone", + -12.925440788269043 + ], + [ + "geteilt", + -12.925646781921387 + ], + [ + "generative", + -12.925748825073242 + ], + [ + "▁Monroe", + -12.925806045532227 + ], + [ + "▁Execut", + -12.925863265991211 + ], + [ + "▁knitting", + -12.925931930541992 + ], + [ + "▁Couple", + -12.925939559936523 + ], + [ + "▁Shade", + -12.926020622253418 + ], + [ + "▁Taj", + -12.926060676574707 + ], + [ + "950", + -12.926077842712402 + ], + [ + "boiled", + -12.92609977722168 + ], + [ + "▁mixes", + -12.926130294799805 + ], + [ + "betroffene", + -12.926156044006348 + ], + [ + "▁continuation", + -12.926169395446777 + ], + [ + "▁begleitet", + -12.926226615905762 + ], + [ + "▁numerical", + -12.926281929016113 + ], + [ + "▁(2013)", + -12.92630386352539 + ], + [ + "▁nourish", + -12.926399230957031 + ], + [ + "oricar", + -12.926485061645508 + ], + [ + "focus", + -12.926486015319824 + ], + [ + "▁Crazy", + -12.926651000976562 + ], + [ + "▁ascend", + -12.926671028137207 + ], + [ + "▁vinde", + -12.926855087280273 + ], + [ + "roar", + -12.926874160766602 + ], + [ + "Vac", + -12.926929473876953 + ], + [ + "▁Zuschauer", + -12.927068710327148 + ], + [ + "izeze", + -12.927179336547852 + ], + [ + "▁Mindest", + -12.92721939086914 + ], + [ + "lingual", + -12.927229881286621 + ], + [ + "▁violet", + -12.927264213562012 + ], + [ + "▁Opfer", + -12.927299499511719 + ], + [ + "ARS", + -12.927431106567383 + ], + [ + "4.7", + -12.92744255065918 + ], + [ + "millennial", + -12.927492141723633 + ], + [ + "▁striv", + -12.927639961242676 + ], + [ + "▁bishop", + -12.927680015563965 + ], + [ + "▁Durham", + -12.927708625793457 + ], + [ + "opathic", + -12.927817344665527 + ], + [ + "Where", + -12.927999496459961 + ], + [ + "▁Rider", + -12.928030014038086 + ], + [ + "▁Reid", + -12.928030967712402 + ], + [ + "stumbled", + -12.928156852722168 + ], + [ + "deep", + -12.92827320098877 + ], + [ + "▁11:00", + -12.928340911865234 + ], + [ + "▁Essex", + -12.928380966186523 + ], + [ + "▁Analyst", + -12.928397178649902 + ], + [ + "feel", + -12.928546905517578 + ], + [ + "▁rave", + -12.928601264953613 + ], + [ + "▁Eddie", + -12.928631782531738 + ], + [ + "▁communiqué", + -12.928756713867188 + ], + [ + "[/", + -12.928791046142578 + ], + [ + "▁Tho", + -12.929011344909668 + ], + [ + "ffentlichkeit", + -12.929019927978516 + ], + [ + "instrument", + -12.929126739501953 + ], + [ + "▁metropolitan", + -12.929179191589355 + ], + [ + "▁experienţ", + -12.929181098937988 + ], + [ + "East", + -12.929198265075684 + ], + [ + "Compared", + -12.929434776306152 + ], + [ + "worn", + -12.929484367370605 + ], + [ + "berufliche", + -12.92966365814209 + ], + [ + "▁Umstände", + -12.929710388183594 + ], + [ + "individuellen", + -12.929901123046875 + ], + [ + "siehe", + -12.929912567138672 + ], + [ + "▁sfarsit", + -12.929969787597656 + ], + [ + "▁Strength", + -12.929999351501465 + ], + [ + "▁prejudice", + -12.930024147033691 + ], + [ + "▁shutdown", + -12.930159568786621 + ], + [ + "chatting", + -12.93022346496582 + ], + [ + "▁Gerne", + -12.930227279663086 + ], + [ + "▁Yum", + -12.930305480957031 + ], + [ + "▁coastline", + -12.930387496948242 + ], + [ + "▁headboard", + -12.930623054504395 + ], + [ + "▁politische", + -12.930768966674805 + ], + [ + "Sub", + -12.930838584899902 + ], + [ + "▁Henderson", + -12.930870056152344 + ], + [ + "▁astonishing", + -12.930870056152344 + ], + [ + "▁Dresden", + -12.930871963500977 + ], + [ + "▁strawberry", + -12.93088436126709 + ], + [ + "prenez", + -12.930889129638672 + ], + [ + "▁Monaco", + -12.930912971496582 + ], + [ + "▁empowered", + -12.930953025817871 + ], + [ + "fäl", + -12.93109130859375 + ], + [ + "▁creier", + -12.931120872497559 + ], + [ + "▁Equ", + -12.931300163269043 + ], + [ + "▁Selling", + -12.931379318237305 + ], + [ + "▁$35", + -12.931483268737793 + ], + [ + "konto", + -12.931503295898438 + ], + [ + "▁Procedure", + -12.931715965270996 + ], + [ + "▁reduziert", + -12.931715965270996 + ], + [ + "▁royalty", + -12.931740760803223 + ], + [ + "wyn", + -12.931756019592285 + ], + [ + "▁Unfall", + -12.932141304016113 + ], + [ + "NAT", + -12.932161331176758 + ], + [ + "▁grafic", + -12.93251895904541 + ], + [ + "▁Collective", + -12.932563781738281 + ], + [ + "▁Computing", + -12.932564735412598 + ], + [ + "▁Established", + -12.932594299316406 + ], + [ + "▁zest", + -12.932598114013672 + ], + [ + "venez", + -12.932611465454102 + ], + [ + "follow", + -12.9326171875 + ], + [ + "▁Motivation", + -12.932640075683594 + ], + [ + "▁dictator", + -12.932755470275879 + ], + [ + "whichever", + -12.93281078338623 + ], + [ + "▁întâmpl", + -12.93293285369873 + ], + [ + "Flüchtling", + -12.932987213134766 + ], + [ + "EMI", + -12.933015823364258 + ], + [ + "404", + -12.933019638061523 + ], + [ + "ICK", + -12.93302059173584 + ], + [ + "emplacement", + -12.933191299438477 + ], + [ + "complete", + -12.933349609375 + ], + [ + "advising", + -12.933412551879883 + ], + [ + "▁Administrative", + -12.933481216430664 + ], + [ + "▁deviation", + -12.933496475219727 + ], + [ + "▁experienț", + -12.933500289916992 + ], + [ + "lethor", + -12.933996200561523 + ], + [ + "▁compress", + -12.934081077575684 + ], + [ + "rival", + -12.934173583984375 + ], + [ + "reprendre", + -12.934186935424805 + ], + [ + "ugi", + -12.934266090393066 + ], + [ + "▁Invitation", + -12.934267044067383 + ], + [ + "▁retina", + -12.934332847595215 + ], + [ + "▁farther", + -12.934335708618164 + ], + [ + "▁fenêtre", + -12.934799194335938 + ], + [ + "6-7", + -12.934815406799316 + ], + [ + "zhou", + -12.934834480285645 + ], + [ + "▁Piano", + -12.934840202331543 + ], + [ + "▁Congrats", + -12.935114860534668 + ], + [ + "▁Configur", + -12.935131072998047 + ], + [ + "▁superficial", + -12.935179710388184 + ], + [ + "▁melting", + -12.935315132141113 + ], + [ + "▁raspunde", + -12.935626983642578 + ], + [ + "▁drip", + -12.93564224243164 + ], + [ + "östlich", + -12.9358491897583 + ], + [ + "189", + -12.935925483703613 + ], + [ + "▁Ludwig", + -12.935959815979004 + ], + [ + "▁keto", + -12.935985565185547 + ], + [ + "▁Bogdan", + -12.936013221740723 + ], + [ + "▁contracted", + -12.936029434204102 + ], + [ + "▁revive", + -12.936100006103516 + ], + [ + "▁cristal", + -12.936232566833496 + ], + [ + "▁mailbox", + -12.936257362365723 + ], + [ + "președintele", + -12.936559677124023 + ], + [ + "▁seekers", + -12.936627388000488 + ], + [ + "func", + -12.936904907226562 + ], + [ + "▁Markus", + -12.93691349029541 + ], + [ + "Unter", + -12.936923027038574 + ], + [ + "▁übertragen", + -12.937003135681152 + ], + [ + "▁adaptive", + -12.937024116516113 + ], + [ + "caster", + -12.937051773071289 + ], + [ + "▁geek", + -12.937164306640625 + ], + [ + "▁réservation", + -12.937236785888672 + ], + [ + "▁irritation", + -12.937240600585938 + ], + [ + "▁HDMI", + -12.937346458435059 + ], + [ + "Seeing", + -12.937485694885254 + ], + [ + "▁genul", + -12.937569618225098 + ], + [ + "▁catastrophe", + -12.937662124633789 + ], + [ + "▁Tweet", + -12.937665939331055 + ], + [ + "TZ", + -12.937729835510254 + ], + [ + "▁credible", + -12.937946319580078 + ], + [ + "▁cobor", + -12.938064575195312 + ], + [ + "▁realizeaz", + -12.938159942626953 + ], + [ + "journal", + -12.938274383544922 + ], + [ + "▁shaking", + -12.938532829284668 + ], + [ + "3-6", + -12.938572883605957 + ], + [ + "▁beneficiaz", + -12.938605308532715 + ], + [ + "▁Frankreich", + -12.938633918762207 + ], + [ + "committing", + -12.9386568069458 + ], + [ + "AMS", + -12.938835144042969 + ], + [ + "▁Feli", + -12.939007759094238 + ], + [ + "▁Producer", + -12.939023971557617 + ], + [ + "▁übrig", + -12.93940544128418 + ], + [ + "gemeinde", + -12.939593315124512 + ], + [ + "should", + -12.939799308776855 + ], + [ + "▁neurons", + -12.939799308776855 + ], + [ + "▁Agenda", + -12.939833641052246 + ], + [ + "▁hashtag", + -12.939896583557129 + ], + [ + "▁confortabil", + -12.939897537231445 + ], + [ + "520", + -12.940008163452148 + ], + [ + "bonded", + -12.940033912658691 + ], + [ + "▁următoare", + -12.940191268920898 + ], + [ + "▁volatile", + -12.940223693847656 + ], + [ + "infamous", + -12.940225601196289 + ], + [ + "seară", + -12.940229415893555 + ], + [ + "▁Sorge", + -12.940346717834473 + ], + [ + "▁Beiträge", + -12.940420150756836 + ], + [ + "▁îndeplin", + -12.940449714660645 + ], + [ + "gespräch", + -12.940649032592773 + ], + [ + "▁joueur", + -12.940701484680176 + ], + [ + "▁outsourcing", + -12.940701484680176 + ], + [ + "▁Guvernul", + -12.940814018249512 + ], + [ + "6-2", + -12.940818786621094 + ], + [ + "▁prioritize", + -12.941068649291992 + ], + [ + "▁duminică", + -12.941076278686523 + ], + [ + "▁resignation", + -12.941076278686523 + ], + [ + "▁Converter", + -12.941079139709473 + ], + [ + "hereby", + -12.941155433654785 + ], + [ + "▁stresses", + -12.941299438476562 + ], + [ + "▁brun", + -12.941415786743164 + ], + [ + "▁elev", + -12.941423416137695 + ], + [ + "▁Skip", + -12.941479682922363 + ], + [ + "540", + -12.941499710083008 + ], + [ + "TURE", + -12.941603660583496 + ], + [ + "▁Lynch", + -12.941635131835938 + ], + [ + "▁preveni", + -12.941643714904785 + ], + [ + "compatible", + -12.941692352294922 + ], + [ + "surveyed", + -12.941702842712402 + ], + [ + "▁Ausnahme", + -12.941713333129883 + ], + [ + "▁medicul", + -12.941812515258789 + ], + [ + "▁subtil", + -12.941865921020508 + ], + [ + "▁Quali", + -12.941890716552734 + ], + [ + "▁techno", + -12.941900253295898 + ], + [ + "presently", + -12.94193172454834 + ], + [ + "▁Müller", + -12.941934585571289 + ], + [ + "DIRECT", + -12.941937446594238 + ], + [ + "schuld", + -12.941944122314453 + ], + [ + "▁Bloomberg", + -12.941994667053223 + ], + [ + "feuer", + -12.942181587219238 + ], + [ + "▁Pharmacy", + -12.942270278930664 + ], + [ + "▁Schnitt", + -12.942301750183105 + ], + [ + "186", + -12.942333221435547 + ], + [ + "peaks", + -12.942355155944824 + ], + [ + "▁Gemeinsam", + -12.94235897064209 + ], + [ + "▁récemment", + -12.94235897064209 + ], + [ + "▁Pascal", + -12.942490577697754 + ], + [ + "filmed", + -12.942523956298828 + ], + [ + "RCA", + -12.942548751831055 + ], + [ + "▁virtuelle", + -12.942622184753418 + ], + [ + "▁dotat", + -12.942630767822266 + ], + [ + "logisch", + -12.942717552185059 + ], + [ + "▁Luck", + -12.943005561828613 + ], + [ + "cosy", + -12.943132400512695 + ], + [ + "▁Awareness", + -12.943216323852539 + ], + [ + "▁gesetzlich", + -12.943263053894043 + ], + [ + "padded", + -12.943306922912598 + ], + [ + "▁Lotus", + -12.943395614624023 + ], + [ + "urging", + -12.9434175491333 + ], + [ + "▁mushroom", + -12.943426132202148 + ], + [ + "▁adultes", + -12.943527221679688 + ], + [ + "▁Coca", + -12.943571090698242 + ], + [ + "▁recev", + -12.943586349487305 + ], + [ + "▁mantra", + -12.943610191345215 + ], + [ + "▁practise", + -12.943644523620605 + ], + [ + "▁acceler", + -12.943663597106934 + ], + [ + "bolster", + -12.943756103515625 + ], + [ + "▁compressed", + -12.943818092346191 + ], + [ + "TIN", + -12.943899154663086 + ], + [ + "▁aromatic", + -12.944236755371094 + ], + [ + "geleitet", + -12.944408416748047 + ], + [ + "▁fibr", + -12.944443702697754 + ], + [ + "exécut", + -12.94444751739502 + ], + [ + "▁unconscious", + -12.94456958770752 + ], + [ + "HAR", + -12.944607734680176 + ], + [ + "▁Gregory", + -12.944661140441895 + ], + [ + "▁Manila", + -12.944738388061523 + ], + [ + "ozitate", + -12.944756507873535 + ], + [ + "exemplary", + -12.944803237915039 + ], + [ + "éventuel", + -12.944906234741211 + ], + [ + "▁Craciun", + -12.944930076599121 + ], + [ + "▁tehnologii", + -12.944931030273438 + ], + [ + "▁Despre", + -12.945138931274414 + ], + [ + "▁1917", + -12.945141792297363 + ], + [ + "▁upfront", + -12.945146560668945 + ], + [ + "▁Iulia", + -12.945280075073242 + ], + [ + "▁erwähnt", + -12.945359230041504 + ], + [ + "▁magnesium", + -12.945359230041504 + ], + [ + "▁descriptive", + -12.94536304473877 + ], + [ + "▁consumul", + -12.945364952087402 + ], + [ + "▁10-15", + -12.945423126220703 + ], + [ + "▁erfüllen", + -12.945611953735352 + ], + [ + "gig", + -12.945657730102539 + ], + [ + "430", + -12.945765495300293 + ], + [ + "▁Migration", + -12.945789337158203 + ], + [ + "bră", + -12.94579029083252 + ], + [ + "▁réforme", + -12.945863723754883 + ], + [ + "▁york", + -12.94610595703125 + ], + [ + "dritten", + -12.946109771728516 + ], + [ + "cumva", + -12.946182250976562 + ], + [ + "▁Alumni", + -12.946218490600586 + ], + [ + "▁Ceramic", + -12.946222305297852 + ], + [ + "▁rappelle", + -12.946236610412598 + ], + [ + "▁pianist", + -12.946248054504395 + ], + [ + "twisted", + -12.946306228637695 + ], + [ + "earned", + -12.946432113647461 + ], + [ + "▁Hose", + -12.946514129638672 + ], + [ + "156", + -12.946610450744629 + ], + [ + "▁Salmon", + -12.946687698364258 + ], + [ + "Level", + -12.946913719177246 + ], + [ + "▁swirl", + -12.947052001953125 + ], + [ + "erfahrung", + -12.947061538696289 + ], + [ + "▁liabilities", + -12.947078704833984 + ], + [ + "praxis", + -12.9470853805542 + ], + [ + "IPO", + -12.947089195251465 + ], + [ + "▁screaming", + -12.947092056274414 + ], + [ + "emphasized", + -12.947200775146484 + ], + [ + "DEA", + -12.947260856628418 + ], + [ + "▁dermatolog", + -12.947351455688477 + ], + [ + "▁pacate", + -12.947498321533203 + ], + [ + "▁ansamblu", + -12.947507858276367 + ], + [ + "▁beteiligt", + -12.947509765625 + ], + [ + "▁Needles", + -12.947574615478516 + ], + [ + "▁organisiert", + -12.947607040405273 + ], + [ + "Pacific", + -12.947639465332031 + ], + [ + "actual", + -12.947823524475098 + ], + [ + "prindere", + -12.94801139831543 + ], + [ + "▁Indoor", + -12.948348045349121 + ], + [ + "▁Gewalt", + -12.948431015014648 + ], + [ + "▁rezid", + -12.948507308959961 + ], + [ + "censor", + -12.948522567749023 + ], + [ + "▁unlawful", + -12.94882869720459 + ], + [ + "▁Explain", + -12.948873519897461 + ], + [ + "▁Flame", + -12.948897361755371 + ], + [ + "▁brachte", + -12.948941230773926 + ], + [ + "▁Mustang", + -12.94899845123291 + ], + [ + "ectomy", + -12.949044227600098 + ], + [ + "▁deliberate", + -12.949064254760742 + ], + [ + "▁sparkle", + -12.949225425720215 + ], + [ + "▁inchis", + -12.94926929473877 + ], + [ + "▁Cristian", + -12.949289321899414 + ], + [ + "▁facture", + -12.949291229248047 + ], + [ + "▁Grundstück", + -12.949292182922363 + ], + [ + "außerhalb", + -12.949300765991211 + ], + [ + "coast", + -12.949321746826172 + ], + [ + "anilor", + -12.949396133422852 + ], + [ + "255", + -12.94952392578125 + ], + [ + "nterdisciplinary", + -12.949576377868652 + ], + [ + "▁Isabel", + -12.949655532836914 + ], + [ + "▁Städte", + -12.949701309204102 + ], + [ + "▁cicl", + -12.949837684631348 + ], + [ + "▁Zeug", + -12.949905395507812 + ], + [ + "▁Muskel", + -12.949951171875 + ], + [ + "▁indirectly", + -12.950051307678223 + ], + [ + "▁Vorbereitung", + -12.950093269348145 + ], + [ + "MMA", + -12.95012378692627 + ], + [ + "▁pudding", + -12.950197219848633 + ], + [ + "rax", + -12.950389862060547 + ], + [ + "▁Stimmung", + -12.95052433013916 + ], + [ + "▁hierarchy", + -12.95052433013916 + ], + [ + "partie", + -12.950597763061523 + ], + [ + "▁elevate", + -12.950685501098633 + ], + [ + "▁Persian", + -12.950690269470215 + ], + [ + "forensic", + -12.95077896118164 + ], + [ + "Become", + -12.950854301452637 + ], + [ + "leicht", + -12.9508695602417 + ], + [ + "▁staging", + -12.950942039489746 + ], + [ + "▁fühlt", + -12.950965881347656 + ], + [ + "fenster", + -12.950979232788086 + ], + [ + "▁unbelievable", + -12.951089859008789 + ], + [ + "„", + -12.951260566711426 + ], + [ + "▁Guatemala", + -12.951387405395508 + ], + [ + "LET", + -12.95141315460205 + ], + [ + "▁buff", + -12.951454162597656 + ], + [ + "▁Primul", + -12.951626777648926 + ], + [ + "▁mainland", + -12.951702117919922 + ], + [ + "campus", + -12.951923370361328 + ], + [ + "▁gefällt", + -12.952075958251953 + ], + [ + "BAN", + -12.952153205871582 + ], + [ + "finish", + -12.952229499816895 + ], + [ + "accustomed", + -12.952251434326172 + ], + [ + "▁Businesses", + -12.95234203338623 + ], + [ + "▁întreb", + -12.95239543914795 + ], + [ + "▁recomandă", + -12.952425956726074 + ], + [ + "▁pellet", + -12.952474594116211 + ], + [ + "▁GST", + -12.952507972717285 + ], + [ + "SEA", + -12.952601432800293 + ], + [ + "▁categorie", + -12.952631950378418 + ], + [ + "▁convainc", + -12.95268440246582 + ], + [ + "▁considéré", + -12.952739715576172 + ], + [ + "rois", + -12.952853202819824 + ], + [ + "▁thrust", + -12.952898979187012 + ], + [ + "ijk", + -12.953001022338867 + ], + [ + "gefüllt", + -12.953118324279785 + ], + [ + "▁situatii", + -12.953327178955078 + ], + [ + "▁Jacksonville", + -12.95337200164795 + ], + [ + "▁bakery", + -12.953473091125488 + ], + [ + "▁Accident", + -12.953554153442383 + ], + [ + "▁urmeaza", + -12.953572273254395 + ], + [ + "▁crib", + -12.953593254089355 + ], + [ + "getroffen", + -12.953707695007324 + ], + [ + "Based", + -12.953877449035645 + ], + [ + "Including", + -12.95398235321045 + ], + [ + "▁Morocco", + -12.95398235321045 + ], + [ + "▁casserole", + -12.95398235321045 + ], + [ + "▁enquiry", + -12.953983306884766 + ], + [ + "▁pahar", + -12.954017639160156 + ], + [ + "▁Unternehmer", + -12.954025268554688 + ], + [ + "électro", + -12.954068183898926 + ], + [ + "Marie", + -12.95413589477539 + ], + [ + "▁Sno", + -12.954153060913086 + ], + [ + "▁prostate", + -12.954168319702148 + ], + [ + "▁Wallace", + -12.95426082611084 + ], + [ + "empre", + -12.954402923583984 + ], + [ + "▁Multumesc", + -12.954415321350098 + ], + [ + "White", + -12.954675674438477 + ], + [ + "brief", + -12.954751014709473 + ], + [ + "▁kitten", + -12.954751014709473 + ], + [ + "füh", + -12.954780578613281 + ], + [ + "▁mankind", + -12.954821586608887 + ], + [ + "ENE", + -12.95483112335205 + ], + [ + "▁Ethics", + -12.954848289489746 + ], + [ + "▁Realty", + -12.954946517944336 + ], + [ + "▁Emerg", + -12.954988479614258 + ], + [ + "7-8", + -12.955055236816406 + ], + [ + "museum", + -12.955096244812012 + ], + [ + "BRE", + -12.95518970489502 + ], + [ + "▁kilometri", + -12.955282211303711 + ], + [ + "oyaume", + -12.955286026000977 + ], + [ + "▁Cambodia", + -12.955288887023926 + ], + [ + "▁bruit", + -12.955304145812988 + ], + [ + "▁sépar", + -12.955334663391113 + ], + [ + "mastered", + -12.9554443359375 + ], + [ + "shake", + -12.955608367919922 + ], + [ + "▁liaison", + -12.955718994140625 + ], + [ + "▁Boulder", + -12.955719947814941 + ], + [ + "▁tortilla", + -12.955720901489258 + ], + [ + "▁Fokus", + -12.955731391906738 + ], + [ + "▁Blair", + -12.95573902130127 + ], + [ + "▁disturbance", + -12.955775260925293 + ], + [ + "geladen", + -12.955843925476074 + ], + [ + "▁sunscreen", + -12.955886840820312 + ], + [ + "▁reuș", + -12.955896377563477 + ], + [ + "▁Braun", + -12.956155776977539 + ], + [ + "▁existente", + -12.956157684326172 + ], + [ + "stift", + -12.956242561340332 + ], + [ + "▁preot", + -12.956387519836426 + ], + [ + "▁doved", + -12.956445693969727 + ], + [ + "sexual", + -12.956488609313965 + ], + [ + "meanwhile", + -12.956583976745605 + ], + [ + "▁legislature", + -12.956583976745605 + ], + [ + "▁vermeiden", + -12.956583976745605 + ], + [ + "▁inequality", + -12.95687484741211 + ], + [ + "▁turc", + -12.956881523132324 + ], + [ + "ви", + -12.95698070526123 + ], + [ + "▁Kontrolle", + -12.95702075958252 + ], + [ + "▁Ursache", + -12.95704174041748 + ], + [ + "▁confess", + -12.95704174041748 + ], + [ + "▁poetic", + -12.957109451293945 + ], + [ + "attention", + -12.957236289978027 + ], + [ + "textured", + -12.957386016845703 + ], + [ + "GES", + -12.957586288452148 + ], + [ + "6-4", + -12.957637786865234 + ], + [ + "Ray", + -12.957696914672852 + ], + [ + "chromat", + -12.957745552062988 + ], + [ + "▁insightful", + -12.957775115966797 + ], + [ + "▁Navigation", + -12.957887649536133 + ], + [ + "▁destiny", + -12.957887649536133 + ], + [ + "▁ergeben", + -12.957892417907715 + ], + [ + "▁versteh", + -12.958090782165527 + ], + [ + "301", + -12.958209037780762 + ], + [ + "▁Exterior", + -12.958321571350098 + ], + [ + "église", + -12.958322525024414 + ], + [ + "▁Failure", + -12.958322525024414 + ], + [ + "▁Patricia", + -12.958324432373047 + ], + [ + "▁geschützt", + -12.958328247070312 + ], + [ + "intrarea", + -12.95833969116211 + ], + [ + "▁Forward", + -12.958368301391602 + ], + [ + "▁Portrait", + -12.95844841003418 + ], + [ + "▁enregistré", + -12.958480834960938 + ], + [ + "▁wagon", + -12.958620071411133 + ], + [ + "stealing", + -12.958879470825195 + ], + [ + "▁Numero", + -12.958880424499512 + ], + [ + "▁tradui", + -12.958986282348633 + ], + [ + "▁klassische", + -12.959033966064453 + ], + [ + "▁profitieren", + -12.959043502807617 + ], + [ + "▁laboratories", + -12.95919132232666 + ], + [ + "▁reconnaissance", + -12.95919132232666 + ], + [ + "ку", + -12.959314346313477 + ], + [ + "▁Petersburg", + -12.959359169006348 + ], + [ + "▁fertility", + -12.959421157836914 + ], + [ + "▁Understand", + -12.959516525268555 + ], + [ + "dehors", + -12.959746360778809 + ], + [ + "▁Knox", + -12.959762573242188 + ], + [ + "software", + -12.959797859191895 + ], + [ + "▁Celebration", + -12.959823608398438 + ], + [ + "4.6", + -12.959897994995117 + ], + [ + "quino", + -12.959930419921875 + ], + [ + "▁endeavour", + -12.960073471069336 + ], + [ + "▁temptation", + -12.960136413574219 + ], + [ + "▁Registry", + -12.96035385131836 + ], + [ + "IMP", + -12.960502624511719 + ], + [ + "bedingt", + -12.960625648498535 + ], + [ + "▁$60", + -12.960846900939941 + ], + [ + "▁Kriterien", + -12.96093463897705 + ], + [ + "▁strawberries", + -12.960943222045898 + ], + [ + "▁conspiracy", + -12.96094799041748 + ], + [ + "▁pouch", + -12.960976600646973 + ], + [ + "▁Alexandria", + -12.961017608642578 + ], + [ + "▁Mick", + -12.961102485656738 + ], + [ + "extra", + -12.961114883422852 + ], + [ + "▁Operator", + -12.961151123046875 + ], + [ + "enduring", + -12.96132755279541 + ], + [ + "▁smash", + -12.961359024047852 + ], + [ + "Euro", + -12.961360931396484 + ], + [ + "▁Nouvelle", + -12.961370468139648 + ], + [ + "▁Raspberry", + -12.961370468139648 + ], + [ + "▁präsentieren", + -12.961380004882812 + ], + [ + "▁electrician", + -12.961404800415039 + ], + [ + "▁cheerful", + -12.961472511291504 + ], + [ + "▁chargé", + -12.961508750915527 + ], + [ + "▁Diskussion", + -12.961511611938477 + ], + [ + "▁surpass", + -12.961604118347168 + ], + [ + "▁Acces", + -12.961701393127441 + ], + [ + "tausend", + -12.961771011352539 + ], + [ + "▁vigorous", + -12.961808204650879 + ], + [ + "▁tava", + -12.961810111999512 + ], + [ + "CHO", + -12.96193790435791 + ], + [ + "▁1951", + -12.961941719055176 + ], + [ + "▁Umsatz", + -12.962019920349121 + ], + [ + "▁slavery", + -12.962055206298828 + ], + [ + "travel", + -12.962294578552246 + ], + [ + "▁correspondent", + -12.962297439575195 + ], + [ + "▁$150", + -12.962307929992676 + ], + [ + "▁stärker", + -12.962594985961914 + ], + [ + "Alb", + -12.96264362335205 + ], + [ + "▁Lopez", + -12.962682723999023 + ], + [ + "▁longueur", + -12.962767601013184 + ], + [ + "▁successive", + -12.962772369384766 + ], + [ + "▁(2015)", + -12.96278190612793 + ], + [ + "teig", + -12.962790489196777 + ], + [ + "custom", + -12.962944984436035 + ], + [ + "TIM", + -12.963099479675293 + ], + [ + "▁Escape", + -12.963174819946289 + ], + [ + "▁Sekunden", + -12.963349342346191 + ], + [ + "tiré", + -12.963444709777832 + ], + [ + "▁chantier", + -12.963489532470703 + ], + [ + "▁saturated", + -12.963555335998535 + ], + [ + "▁confrontation", + -12.963804244995117 + ], + [ + "▁biography", + -12.963805198669434 + ], + [ + "zuerst", + -12.9639892578125 + ], + [ + "▁rencontré", + -12.963991165161133 + ], + [ + "▁harmless", + -12.96412181854248 + ], + [ + "Branche", + -12.964139938354492 + ], + [ + "▁QR", + -12.964380264282227 + ], + [ + "▁Ereignis", + -12.964430809020996 + ], + [ + "▁verkaufen", + -12.96444320678711 + ], + [ + "0:00", + -12.96451187133789 + ], + [ + "Association", + -12.96469783782959 + ], + [ + "▁Santiago", + -12.964865684509277 + ], + [ + "Control", + -12.964993476867676 + ], + [ + "▁Angriff", + -12.9650297164917 + ], + [ + "lase", + -12.96505069732666 + ], + [ + "▁sfaturi", + -12.965224266052246 + ], + [ + "▁Comprehensive", + -12.965304374694824 + ], + [ + "▁Shepherd", + -12.965304374694824 + ], + [ + "▁exponential", + -12.965304374694824 + ], + [ + "▁penetration", + -12.965304374694824 + ], + [ + "▁comble", + -12.965394973754883 + ], + [ + "ionar", + -12.965557098388672 + ], + [ + "slept", + -12.965563774108887 + ], + [ + "▁Spice", + -12.965633392333984 + ], + [ + "mAh", + -12.965688705444336 + ], + [ + "▁Vertreter", + -12.965747833251953 + ], + [ + "fehler", + -12.965752601623535 + ], + [ + "▁Scroll", + -12.96599292755127 + ], + [ + "▁WARRANT", + -12.966179847717285 + ], + [ + "▁minimise", + -12.966326713562012 + ], + [ + "▁Dept", + -12.966474533081055 + ], + [ + "▁urinar", + -12.96661376953125 + ], + [ + "établir", + -12.966619491577148 + ], + [ + "verhältnis", + -12.966713905334473 + ], + [ + "▁glowing", + -12.966979026794434 + ], + [ + "kulturelle", + -12.966984748840332 + ], + [ + "▁Pediatric", + -12.967057228088379 + ], + [ + "▁inconvenience", + -12.967057228088379 + ], + [ + "Antoine", + -12.967121124267578 + ], + [ + "▁Heck", + -12.967164993286133 + ], + [ + "▁couches", + -12.967265129089355 + ], + [ + "▁1938", + -12.967331886291504 + ], + [ + "maybe", + -12.967333793640137 + ], + [ + "ETA", + -12.9673433303833 + ], + [ + "▁solaire", + -12.96748161315918 + ], + [ + "▁Zürich", + -12.967495918273926 + ], + [ + "computer", + -12.967545509338379 + ], + [ + "milk", + -12.96756362915039 + ], + [ + "он", + -12.967585563659668 + ], + [ + "modalitate", + -12.967608451843262 + ], + [ + "spanning", + -12.967655181884766 + ], + [ + "▁Crypto", + -12.96774959564209 + ], + [ + "▁Spotify", + -12.967935562133789 + ], + [ + "mycin", + -12.967944145202637 + ], + [ + "▁similarities", + -12.96811294555664 + ], + [ + "▁eclipse", + -12.968377113342285 + ], + [ + "Map", + -12.968610763549805 + ], + [ + "double", + -12.96861743927002 + ], + [ + "corporate", + -12.968734741210938 + ], + [ + "▁Hindi", + -12.968853950500488 + ], + [ + "battling", + -12.968866348266602 + ], + [ + "▁habituel", + -12.969098091125488 + ], + [ + "▁Transition", + -12.969196319580078 + ], + [ + "▁luptă", + -12.96920394897461 + ], + [ + "▁trainee", + -12.969219207763672 + ], + [ + "LIS", + -12.96922492980957 + ], + [ + "▁Vatican", + -12.969254493713379 + ], + [ + "Archived", + -12.9692964553833 + ], + [ + "Connect", + -12.969305038452148 + ], + [ + "▁prealabil", + -12.969307899475098 + ], + [ + "▁Chambre", + -12.969327926635742 + ], + [ + "stuhl", + -12.969440460205078 + ], + [ + "▁arrivé", + -12.969557762145996 + ], + [ + "▁Urteil", + -12.969575881958008 + ], + [ + "▁scrutiny", + -12.969818115234375 + ], + [ + "▁memoir", + -12.969854354858398 + ], + [ + "▁innovant", + -12.9699068069458 + ], + [ + "▁sublime", + -12.969943046569824 + ], + [ + "children", + -12.970004081726074 + ], + [ + "▁Handwerk", + -12.970056533813477 + ], + [ + "▁campuses", + -12.970268249511719 + ], + [ + "▁durabil", + -12.970502853393555 + ], + [ + "▁immersive", + -12.970632553100586 + ], + [ + "▁Magnet", + -12.970732688903809 + ], + [ + "läufe", + -12.970808029174805 + ], + [ + "▁Techno", + -12.970837593078613 + ], + [ + "MAP", + -12.9710693359375 + ], + [ + "7.2", + -12.971145629882812 + ], + [ + "▁Schwimm", + -12.971181869506836 + ], + [ + "BOOK", + -12.971186637878418 + ], + [ + "188", + -12.971441268920898 + ], + [ + "▁Supervisor", + -12.971498489379883 + ], + [ + "prévue", + -12.971691131591797 + ], + [ + "needed", + -12.971813201904297 + ], + [ + "▁creditors", + -12.971822738647461 + ], + [ + "▁brin", + -12.971837043762207 + ], + [ + "▁Neck", + -12.971900939941406 + ], + [ + "▁Salut", + -12.971988677978516 + ], + [ + "▁despair", + -12.972105979919434 + ], + [ + "▁Sauce", + -12.972261428833008 + ], + [ + "▁Westminster", + -12.972335815429688 + ], + [ + "▁langfristig", + -12.972335815429688 + ], + [ + "▁northeast", + -12.972365379333496 + ], + [ + "▁încercat", + -12.972399711608887 + ], + [ + "▁nausea", + -12.972408294677734 + ], + [ + "▁Paypal", + -12.972440719604492 + ], + [ + "▁Arrow", + -12.972469329833984 + ], + [ + "▁Travis", + -12.972633361816406 + ], + [ + "(2009)", + -12.972713470458984 + ], + [ + "▁Rising", + -12.972719192504883 + ], + [ + "termes", + -12.973097801208496 + ], + [ + "Australie", + -12.973154067993164 + ], + [ + "▁scarf", + -12.973187446594238 + ], + [ + "klassischen", + -12.97337818145752 + ], + [ + "▁boug", + -12.973466873168945 + ], + [ + "DOT", + -12.97360610961914 + ], + [ + "▁Trink", + -12.97361946105957 + ], + [ + "▁bestätigt", + -12.97365951538086 + ], + [ + "▁officiel", + -12.97370433807373 + ], + [ + "Produkt", + -12.973873138427734 + ], + [ + "DNA", + -12.974140167236328 + ], + [ + "▁*******", + -12.97426700592041 + ], + [ + "GAR", + -12.974271774291992 + ], + [ + "therapeut", + -12.974377632141113 + ], + [ + "187", + -12.974420547485352 + ], + [ + "▁Louisville", + -12.974493026733398 + ], + [ + "▁geöffnet", + -12.97462272644043 + ], + [ + "Watch", + -12.974640846252441 + ], + [ + "85%", + -12.974678993225098 + ], + [ + "▁Candida", + -12.974698066711426 + ], + [ + "▁Kathy", + -12.974703788757324 + ], + [ + "▁Animation", + -12.974711418151855 + ], + [ + "planung", + -12.974715232849121 + ], + [ + "woche", + -12.974730491638184 + ], + [ + "Video", + -12.974966049194336 + ], + [ + "▁Automation", + -12.97507095336914 + ], + [ + "▁foliage", + -12.97507381439209 + ], + [ + "▁evenimentului", + -12.975175857543945 + ], + [ + "SEN", + -12.975362777709961 + ], + [ + "▁Dialog", + -12.975372314453125 + ], + [ + "▁ZIP", + -12.975372314453125 + ], + [ + "▁vieții", + -12.97537612915039 + ], + [ + "▁passionné", + -12.975425720214844 + ], + [ + "▁WOW", + -12.97544002532959 + ], + [ + "ectiv", + -12.975464820861816 + ], + [ + "▁vorbesc", + -12.975482940673828 + ], + [ + "▁computational", + -12.975533485412598 + ], + [ + "▁idiot", + -12.97557258605957 + ], + [ + "▁stigma", + -12.97567081451416 + ], + [ + "▁multumesc", + -12.975870132446289 + ], + [ + "▁sărbători", + -12.975870132446289 + ], + [ + "▁Advantage", + -12.975906372070312 + ], + [ + "▁alegeri", + -12.976024627685547 + ], + [ + "▁philosopher", + -12.976031303405762 + ], + [ + "RIE", + -12.976117134094238 + ], + [ + "refundable", + -12.976221084594727 + ], + [ + "▁Sofia", + -12.97623348236084 + ], + [ + "▁încheiat", + -12.976313591003418 + ], + [ + "meilleures", + -12.976473808288574 + ], + [ + "critical", + -12.976744651794434 + ], + [ + "▁cavity", + -12.976766586303711 + ], + [ + "▁ressort", + -12.976792335510254 + ], + [ + "strong", + -12.976798057556152 + ], + [ + "▁Backup", + -12.976948738098145 + ], + [ + "▁Zeitraum", + -12.977023124694824 + ], + [ + "▁Szene", + -12.977027893066406 + ], + [ + "▁Candle", + -12.977173805236816 + ], + [ + "▁ciocolat", + -12.977198600769043 + ], + [ + "etched", + -12.977227210998535 + ], + [ + "ан", + -12.977302551269531 + ], + [ + "▁Anchor", + -12.977365493774414 + ], + [ + "equate", + -12.977470397949219 + ], + [ + "▁bulg", + -12.977476119995117 + ], + [ + "▁motorist", + -12.977524757385254 + ], + [ + "träglich", + -12.977736473083496 + ], + [ + "please", + -12.977936744689941 + ], + [ + "different", + -12.978011131286621 + ], + [ + "▁Accel", + -12.97813606262207 + ], + [ + "Proiectul", + -12.97829818725586 + ], + [ + "▁cabbage", + -12.97852897644043 + ], + [ + "▁télécharger", + -12.97852897644043 + ], + [ + "▁Presentation", + -12.97856330871582 + ], + [ + "▁Struktur", + -12.978621482849121 + ], + [ + "bücher", + -12.978650093078613 + ], + [ + "▁flatter", + -12.978672981262207 + ], + [ + "emprunt", + -12.979074478149414 + ], + [ + "▁oriental", + -12.979111671447754 + ], + [ + "▁Turnier", + -12.979166984558105 + ], + [ + "brücke", + -12.97917366027832 + ], + [ + "▁légumes", + -12.979416847229004 + ], + [ + "gerechnet", + -12.979595184326172 + ], + [ + "flooded", + -12.979621887207031 + ], + [ + "LER", + -12.979679107666016 + ], + [ + "üben", + -12.97973918914795 + ], + [ + "internaute", + -12.979888916015625 + ], + [ + "▁Austausch", + -12.979935646057129 + ], + [ + "gefordert", + -12.980034828186035 + ], + [ + "▁adoptat", + -12.980277061462402 + ], + [ + "▁erinnern", + -12.980305671691895 + ], + [ + "▁dolphin", + -12.980307579040527 + ], + [ + "▁Parkinson", + -12.980308532714844 + ], + [ + "büro", + -12.980310440063477 + ], + [ + "▁Crest", + -12.980368614196777 + ], + [ + "▁Ikea", + -12.980437278747559 + ], + [ + "▁ecologic", + -12.980470657348633 + ], + [ + "mplă", + -12.98065185546875 + ], + [ + "▁șef", + -12.980655670166016 + ], + [ + "coop", + -12.980868339538574 + ], + [ + "▁Carson", + -12.980900764465332 + ], + [ + "▁uşor", + -12.981054306030273 + ], + [ + "▁exert", + -12.981070518493652 + ], + [ + "▁countertop", + -12.981114387512207 + ], + [ + "ntended", + -12.981136322021484 + ], + [ + "▁Civic", + -12.981313705444336 + ], + [ + "▁attentes", + -12.98133373260498 + ], + [ + "gesetzlichen", + -12.981356620788574 + ], + [ + "frischen", + -12.981475830078125 + ], + [ + "▁Bottle", + -12.981636047363281 + ], + [ + "▁cautare", + -12.982080459594727 + ], + [ + "▁waterfront", + -12.982226371765137 + ], + [ + "▁centerpiece", + -12.982312202453613 + ], + [ + "▁Castel", + -12.982441902160645 + ], + [ + "510", + -12.98270034790039 + ], + [ + "capped", + -12.982709884643555 + ], + [ + "▁mattresses", + -12.982850074768066 + ], + [ + "▁readiness", + -12.982865333557129 + ], + [ + "diag", + -12.982970237731934 + ], + [ + "▁geändert", + -12.982980728149414 + ], + [ + "▁complained", + -12.983051300048828 + ], + [ + "▁diary", + -12.983073234558105 + ], + [ + "▁ceremonies", + -12.983144760131836 + ], + [ + "▁următor", + -12.983181953430176 + ], + [ + "▁Engel", + -12.983270645141602 + ], + [ + "▁disconnect", + -12.9832763671875 + ], + [ + "▁Silvi", + -12.983282089233398 + ], + [ + "▁eingerichtet", + -12.9834566116333 + ], + [ + "medizin", + -12.983512878417969 + ], + [ + "▁majestic", + -12.983869552612305 + ], + [ + "▁Random", + -12.983943939208984 + ], + [ + "▁Equity", + -12.984046936035156 + ], + [ + "▁Echipa", + -12.984111785888672 + ], + [ + "са", + -12.984163284301758 + ], + [ + "316", + -12.984179496765137 + ], + [ + "▁Formation", + -12.984183311462402 + ], + [ + "inland", + -12.98421859741211 + ], + [ + "appuy", + -12.984301567077637 + ], + [ + "TAN", + -12.984481811523438 + ], + [ + "slipped", + -12.984918594360352 + ], + [ + "Certains", + -12.985247611999512 + ], + [ + "▁Silber", + -12.98525333404541 + ], + [ + "▁reçoi", + -12.985257148742676 + ], + [ + "▁Monthly", + -12.985323905944824 + ], + [ + "calculating", + -12.985494613647461 + ], + [ + "▁scratches", + -12.98554515838623 + ], + [ + "▁concurrence", + -12.985654830932617 + ], + [ + "▁Stärke", + -12.985662460327148 + ], + [ + "▁intermediar", + -12.985751152038574 + ], + [ + "▁erlebt", + -12.98579216003418 + ], + [ + "gesellschaftlich", + -12.986037254333496 + ], + [ + "▁Volk", + -12.986041069030762 + ], + [ + "▁Ansprüche", + -12.986101150512695 + ], + [ + "▁cumulative", + -12.986103057861328 + ], + [ + "▁Randy", + -12.986183166503906 + ], + [ + "▁instituții", + -12.98622989654541 + ], + [ + "together", + -12.986489295959473 + ], + [ + "▁Sap", + -12.986539840698242 + ], + [ + "▁modificari", + -12.986551284790039 + ], + [ + "▁erosion", + -12.986572265625 + ], + [ + "▁wicked", + -12.986577033996582 + ], + [ + "soaked", + -12.986613273620605 + ], + [ + "▁cellar", + -12.9866361618042 + ], + [ + "ignoring", + -12.986726760864258 + ], + [ + "▁scarce", + -12.986815452575684 + ], + [ + "ueuse", + -12.98697280883789 + ], + [ + "▁bibliothèque", + -12.986995697021484 + ], + [ + "critères", + -12.987017631530762 + ], + [ + "▁overlay", + -12.987166404724121 + ], + [ + "IPA", + -12.98737907409668 + ], + [ + "director", + -12.987393379211426 + ], + [ + "▁Krishna", + -12.987444877624512 + ], + [ + "▁methodologies", + -12.987451553344727 + ], + [ + "iocese", + -12.987513542175293 + ], + [ + "▁saucepan", + -12.987713813781738 + ], + [ + "184", + -12.987948417663574 + ], + [ + "275", + -12.987981796264648 + ], + [ + "▁précieu", + -12.988165855407715 + ], + [ + "▁academy", + -12.9883394241333 + ], + [ + "460", + -12.988438606262207 + ], + [ + "ERN", + -12.988679885864258 + ], + [ + "▁emoti", + -12.988725662231445 + ], + [ + "▁télévision", + -12.988823890686035 + ], + [ + "EDIT", + -12.988901138305664 + ], + [ + "▁Valeri", + -12.989045143127441 + ], + [ + "▁Charity", + -12.98911190032959 + ], + [ + "Voilà", + -12.989297866821289 + ], + [ + "▁lipsit", + -12.989356994628906 + ], + [ + "▁unleash", + -12.989373207092285 + ], + [ + "▁suferit", + -12.989506721496582 + ], + [ + "▁Lifestyle", + -12.98953914642334 + ], + [ + "▁Edel", + -12.989603996276855 + ], + [ + "▁Derek", + -12.989643096923828 + ], + [ + "▁Manga", + -12.989801406860352 + ], + [ + "▁increment", + -12.989990234375 + ], + [ + "▁plötzlich", + -12.990133285522461 + ], + [ + "▁5:30", + -12.990208625793457 + ], + [ + "▁Republicii", + -12.990246772766113 + ], + [ + "▁capitalism", + -12.990293502807617 + ], + [ + "ROW", + -12.990510940551758 + ], + [ + "▁Paar", + -12.990523338317871 + ], + [ + "allée", + -12.99057674407959 + ], + [ + "▁motto", + -12.990610122680664 + ], + [ + "Schäden", + -12.990630149841309 + ], + [ + "▁£10", + -12.99063491821289 + ], + [ + "RIP", + -12.990728378295898 + ], + [ + "courir", + -12.990761756896973 + ], + [ + "rocky", + -12.990944862365723 + ], + [ + "▁Sunshine", + -12.991031646728516 + ], + [ + "▁chimney", + -12.991044998168945 + ], + [ + "▁préfér", + -12.991153717041016 + ], + [ + "▁relaxare", + -12.991189956665039 + ], + [ + "▁colabora", + -12.99134349822998 + ], + [ + "liefer", + -12.99142837524414 + ], + [ + "▁ordentlich", + -12.991486549377441 + ], + [ + "▁dauerhaft", + -12.991535186767578 + ], + [ + "kammer", + -12.991572380065918 + ], + [ + "▁Basket", + -12.991579055786133 + ], + [ + "Site", + -12.991657257080078 + ], + [ + "▁Regina", + -12.991716384887695 + ], + [ + "▁simulate", + -12.991868019104004 + ], + [ + "▁wrestle", + -12.991939544677734 + ], + [ + "wertig", + -12.991986274719238 + ], + [ + "▁Christie", + -12.992018699645996 + ], + [ + "download", + -12.992033004760742 + ], + [ + "▁torch", + -12.992213249206543 + ], + [ + "riya", + -12.992216110229492 + ], + [ + "▁Grie", + -12.992247581481934 + ], + [ + "bitten", + -12.992356300354004 + ], + [ + "▁spezialisiert", + -12.99238109588623 + ], + [ + "▁Parade", + -12.992408752441406 + ], + [ + "▁migraine", + -12.992830276489258 + ], + [ + "▁Armstrong", + -12.992846488952637 + ], + [ + "▁cutie", + -12.9928560256958 + ], + [ + "▁bullying", + -12.992889404296875 + ], + [ + "▁Estonia", + -12.99293041229248 + ], + [ + "▁harvested", + -12.992948532104492 + ], + [ + "▁Hunger", + -12.992971420288086 + ], + [ + "▁frapp", + -12.992999076843262 + ], + [ + "REM", + -12.993117332458496 + ], + [ + "sensor", + -12.993189811706543 + ], + [ + "▁GREAT", + -12.993293762207031 + ], + [ + "▁thyroid", + -12.993302345275879 + ], + [ + "▁mărturi", + -12.993335723876953 + ], + [ + "ocupă", + -12.993809700012207 + ], + [ + "▁Wealth", + -12.993812561035156 + ], + [ + "▁convins", + -12.993841171264648 + ], + [ + "141", + -12.993876457214355 + ], + [ + "▁vingt", + -12.993901252746582 + ], + [ + "▁revel", + -12.994054794311523 + ], + [ + "▁Adri", + -12.994083404541016 + ], + [ + "▁remix", + -12.994207382202148 + ], + [ + "▁fermentation", + -12.99425220489502 + ], + [ + "▁achiziti", + -12.994352340698242 + ], + [ + "dream", + -12.994426727294922 + ], + [ + "▁contemporan", + -12.994632720947266 + ], + [ + "▁youngsters", + -12.994685173034668 + ], + [ + "▁Hartford", + -12.994745254516602 + ], + [ + "▁Wagen", + -12.994988441467285 + ], + [ + "▁Celebr", + -12.995214462280273 + ], + [ + "leveraging", + -12.99527645111084 + ], + [ + "▁Iasi", + -12.99549674987793 + ], + [ + "tackling", + -12.9955415725708 + ], + [ + "▁intrinsic", + -12.995553970336914 + ], + [ + "▁Macedon", + -12.995603561401367 + ], + [ + "NIA", + -12.995784759521484 + ], + [ + "▁bliss", + -12.995905876159668 + ], + [ + "▁gradual", + -12.995908737182617 + ], + [ + "▁inregistrat", + -12.995981216430664 + ], + [ + "▁volleyball", + -12.995986938476562 + ], + [ + "▁offiziell", + -12.996054649353027 + ], + [ + "▁carré", + -12.99611759185791 + ], + [ + "Mostly", + -12.996174812316895 + ], + [ + "▁Harley", + -12.996193885803223 + ], + [ + "▁locati", + -12.996216773986816 + ], + [ + "▁Klo", + -12.996223449707031 + ], + [ + "▁Equal", + -12.996238708496094 + ], + [ + "▁citat", + -12.996369361877441 + ], + [ + "▁argint", + -12.996478080749512 + ], + [ + "prüft", + -12.996528625488281 + ], + [ + "▁Fence", + -12.996600151062012 + ], + [ + "positive", + -12.996988296508789 + ], + [ + "▁Kaz", + -12.997245788574219 + ], + [ + "▁distortion", + -12.997342109680176 + ], + [ + "▁sâmbătă", + -12.997342109680176 + ], + [ + "▁frontière", + -12.997346878051758 + ], + [ + "▁revanch", + -12.997394561767578 + ], + [ + "▁Held", + -12.997465133666992 + ], + [ + "▁Hobb", + -12.99776554107666 + ], + [ + "▁reuşit", + -12.997796058654785 + ], + [ + "deem", + -12.997880935668945 + ], + [ + "▁dorint", + -12.997902870178223 + ], + [ + "▁Anlagen", + -12.997908592224121 + ], + [ + "▁cheval", + -12.997973442077637 + ], + [ + "630", + -12.99806022644043 + ], + [ + "▁implementare", + -12.99808406829834 + ], + [ + "▁curator", + -12.99821662902832 + ], + [ + "▁legislator", + -12.998247146606445 + ], + [ + "▁potassium", + -12.998247146606445 + ], + [ + "▁veterinarian", + -12.998247146606445 + ], + [ + "▁domenii", + -12.998273849487305 + ], + [ + "▁revue", + -12.998310089111328 + ], + [ + "Vielen", + -12.998333930969238 + ], + [ + "africain", + -12.998570442199707 + ], + [ + "before", + -12.998680114746094 + ], + [ + "▁Bestandteil", + -12.998702049255371 + ], + [ + "▁(2010)", + -12.998767852783203 + ], + [ + "▁Arlington", + -12.999153137207031 + ], + [ + "▁Gründung", + -12.999153137207031 + ], + [ + "▁Sprinkle", + -12.999153137207031 + ], + [ + "▁Princeton", + -12.999186515808105 + ], + [ + "chirurg", + -12.999228477478027 + ], + [ + "▁laissé", + -12.999357223510742 + ], + [ + "whoever", + -12.999384880065918 + ], + [ + "▁pasture", + -12.999431610107422 + ], + [ + "ajute", + -12.999436378479004 + ], + [ + "▁joyful", + -12.999494552612305 + ], + [ + "etapa", + -12.999905586242676 + ], + [ + "ESP", + -13.000017166137695 + ], + [ + "▁Iohannis", + -13.000059127807617 + ], + [ + "▁10:30", + -13.000127792358398 + ], + [ + "▁Kingston", + -13.000140190124512 + ], + [ + "▁contender", + -13.000164031982422 + ], + [ + "▁Damage", + -13.000177383422852 + ], + [ + "▁schreibt", + -13.000482559204102 + ], + [ + "sstisch", + -13.000631332397461 + ], + [ + "Associated", + -13.00072956085205 + ], + [ + "▁disposable", + -13.000782012939453 + ], + [ + "veranstaltung", + -13.00096607208252 + ], + [ + "▁puppet", + -13.00100040435791 + ], + [ + "pong", + -13.001093864440918 + ], + [ + "▁Chronicle", + -13.001176834106445 + ], + [ + "222", + -13.001286506652832 + ], + [ + "intuit", + -13.001396179199219 + ], + [ + "inscrire", + -13.001429557800293 + ], + [ + "▁speeches", + -13.001431465148926 + ], + [ + "▁Eingang", + -13.001775741577148 + ], + [ + "▁Adidas", + -13.001875877380371 + ], + [ + "▁cemetery", + -13.001877784729004 + ], + [ + "▁juicy", + -13.001885414123535 + ], + [ + "▁wertvolle", + -13.0018892288208 + ], + [ + "▁militari", + -13.001917839050293 + ], + [ + "China", + -13.00196361541748 + ], + [ + "ecția", + -13.002041816711426 + ], + [ + "luster", + -13.002063751220703 + ], + [ + "auftrag", + -13.00234317779541 + ], + [ + "▁Marius", + -13.002523422241211 + ], + [ + "▁crossover", + -13.002555847167969 + ], + [ + "▁enthusiast", + -13.002555847167969 + ], + [ + "▁cantitate", + -13.002630233764648 + ], + [ + "▁animat", + -13.002634048461914 + ], + [ + "Park", + -13.002793312072754 + ], + [ + "▁unchanged", + -13.00279426574707 + ], + [ + "russia", + -13.00281810760498 + ], + [ + "instant", + -13.002833366394043 + ], + [ + "ţiunea", + -13.002835273742676 + ], + [ + "▁franchi", + -13.002920150756836 + ], + [ + "▁mobiliz", + -13.002963066101074 + ], + [ + "athlet", + -13.003013610839844 + ], + [ + "▁Cardio", + -13.0031099319458 + ], + [ + "▁supus", + -13.003119468688965 + ], + [ + "▁Griff", + -13.003137588500977 + ], + [ + "flakes", + -13.003217697143555 + ], + [ + "soluble", + -13.003250122070312 + ], + [ + "Known", + -13.003693580627441 + ], + [ + "leaking", + -13.003741264343262 + ], + [ + "▁Holocaust", + -13.004148483276367 + ], + [ + "gift", + -13.004197120666504 + ], + [ + "▁tradiţi", + -13.004359245300293 + ], + [ + "▁southeast", + -13.004498481750488 + ], + [ + "▁correspondant", + -13.00460147857666 + ], + [ + "Isaiah", + -13.004603385925293 + ], + [ + "▁diagonal", + -13.004606246948242 + ], + [ + "▁Probabil", + -13.004680633544922 + ], + [ + "▁dégust", + -13.004791259765625 + ], + [ + "▁Naval", + -13.004802703857422 + ], + [ + "▁cultivation", + -13.004839897155762 + ], + [ + "▁Vertrieb", + -13.004849433898926 + ], + [ + "▁pony", + -13.004854202270508 + ], + [ + "▁Throw", + -13.0050048828125 + ], + [ + "little", + -13.005010604858398 + ], + [ + "▁remarque", + -13.005074501037598 + ], + [ + "▁parcare", + -13.005085945129395 + ], + [ + "3.8", + -13.00518798828125 + ], + [ + "▁renunt", + -13.005330085754395 + ], + [ + "▁Rewards", + -13.005487442016602 + ], + [ + "▁Thur", + -13.005496978759766 + ], + [ + "▁underestimate", + -13.005515098571777 + ], + [ + "▁frankly", + -13.005516052246094 + ], + [ + "Bretagne", + -13.005517959594727 + ], + [ + "axial", + -13.005537986755371 + ], + [ + "▁identities", + -13.0055570602417 + ], + [ + "▁Harvest", + -13.00561237335205 + ], + [ + "▁skippe", + -13.00561237335205 + ], + [ + "▁Boutique", + -13.005670547485352 + ], + [ + "▁intuition", + -13.005746841430664 + ], + [ + "▁Rotary", + -13.00581169128418 + ], + [ + "▁SERVICE", + -13.005875587463379 + ], + [ + "▁refill", + -13.005915641784668 + ], + [ + "▁arcade", + -13.006060600280762 + ], + [ + "▁komme", + -13.006386756896973 + ], + [ + "▁irrelevant", + -13.006427764892578 + ], + [ + "▁Sortiment", + -13.006429672241211 + ], + [ + "▁scriitor", + -13.006488800048828 + ], + [ + "▁clicked", + -13.006516456604004 + ], + [ + "▁ciel", + -13.006610870361328 + ], + [ + "▁Caesar", + -13.00680160522461 + ], + [ + "hound", + -13.006803512573242 + ], + [ + "whipped", + -13.006843566894531 + ], + [ + "licate", + -13.006867408752441 + ], + [ + "▁formatting", + -13.006986618041992 + ], + [ + "▁mosaic", + -13.007028579711914 + ], + [ + "(2017)", + -13.007122039794922 + ], + [ + "777", + -13.007257461547852 + ], + [ + "▁Messenger", + -13.007342338562012 + ], + [ + "dulci", + -13.007369041442871 + ], + [ + "▁(2016)", + -13.007420539855957 + ], + [ + "▁popcorn", + -13.007425308227539 + ], + [ + "▁Presidential", + -13.007497787475586 + ], + [ + "▁brokerage", + -13.007564544677734 + ], + [ + "dachte", + -13.00762939453125 + ], + [ + "verkauf", + -13.00768756866455 + ], + [ + "▁pomme", + -13.007721900939941 + ], + [ + "▁fret", + -13.007822036743164 + ], + [ + "▁revere", + -13.007894515991211 + ], + [ + "▁Canvas", + -13.008092880249023 + ], + [ + "▁Nottingham", + -13.008255004882812 + ], + [ + "▁Refuge", + -13.008257865905762 + ], + [ + "▁injustice", + -13.008259773254395 + ], + [ + "▁External", + -13.008264541625977 + ], + [ + "dincolo", + -13.008304595947266 + ], + [ + "directing", + -13.008511543273926 + ], + [ + "▁Toulouse", + -13.008710861206055 + ], + [ + "▁cheltuieli", + -13.008746147155762 + ], + [ + "▁distrus", + -13.008816719055176 + ], + [ + "impôt", + -13.008912086486816 + ], + [ + "landschaft", + -13.008964538574219 + ], + [ + "passion", + -13.00897216796875 + ], + [ + "▁Hobby", + -13.009099006652832 + ], + [ + "significant", + -13.009115219116211 + ], + [ + "▁Guinea", + -13.009209632873535 + ], + [ + "pecializing", + -13.009237289428711 + ], + [ + "pozitie", + -13.009245872497559 + ], + [ + "bourne", + -13.009295463562012 + ], + [ + "▁mâini", + -13.00933837890625 + ], + [ + "▁CFR", + -13.009395599365234 + ], + [ + "▁Konflikt", + -13.009626388549805 + ], + [ + "▁Vodafone", + -13.009626388549805 + ], + [ + "OUG", + -13.009681701660156 + ], + [ + "▁Übersicht", + -13.009735107421875 + ], + [ + "negotiated", + -13.009903907775879 + ], + [ + "▁gliss", + -13.010042190551758 + ], + [ + "▁Kapital", + -13.010111808776855 + ], + [ + "QC", + -13.0101318359375 + ], + [ + "▁gentleman", + -13.01024341583252 + ], + [ + "Inde", + -13.010514259338379 + ], + [ + "▁immensely", + -13.010639190673828 + ], + [ + "Business", + -13.010702133178711 + ], + [ + "▁04/2", + -13.010882377624512 + ], + [ + "societatea", + -13.010973930358887 + ], + [ + "fluoxetine", + -13.011000633239746 + ], + [ + "▁Wachstum", + -13.011000633239746 + ], + [ + "▁récit", + -13.011011123657227 + ], + [ + "▁Preisvergleich", + -13.011034965515137 + ], + [ + "▁Mohammed", + -13.011460304260254 + ], + [ + "gefangen", + -13.011462211608887 + ], + [ + "▁calibration", + -13.011608123779297 + ], + [ + "bekam", + -13.011728286743164 + ], + [ + "▁FUN", + -13.011758804321289 + ], + [ + "wasting", + -13.011839866638184 + ], + [ + "▁prosper", + -13.011862754821777 + ], + [ + "▁Afghan", + -13.011919021606445 + ], + [ + "▁Heroes", + -13.011921882629395 + ], + [ + "▁VMware", + -13.011927604675293 + ], + [ + "exception", + -13.011969566345215 + ], + [ + "▁înlocui", + -13.01244831085205 + ], + [ + "Neu", + -13.01246452331543 + ], + [ + "initiation", + -13.01250171661377 + ], + [ + "▁Peel", + -13.01281452178955 + ], + [ + "▁cunoaste", + -13.012836456298828 + ], + [ + "▁menschliche", + -13.012849807739258 + ], + [ + "▁poarta", + -13.012852668762207 + ], + [ + "▁congestion", + -13.012930870056152 + ], + [ + "▁îmbunătăț", + -13.013103485107422 + ], + [ + "EUR", + -13.013171195983887 + ], + [ + "▁sushi", + -13.01326847076416 + ], + [ + "Jährige", + -13.01329517364502 + ], + [ + "espoir", + -13.013423919677734 + ], + [ + "inspected", + -13.013444900512695 + ], + [ + "▁etape", + -13.013677597045898 + ], + [ + "▁pharmacist", + -13.013754844665527 + ], + [ + "flect", + -13.013840675354004 + ], + [ + "Changing", + -13.013932228088379 + ], + [ + "▁radiant", + -13.014046669006348 + ], + [ + "Daddy", + -13.014275550842285 + ], + [ + "▁categorii", + -13.014360427856445 + ], + [ + "quête", + -13.014628410339355 + ], + [ + "▁skincare", + -13.014657020568848 + ], + [ + "hébergement", + -13.014674186706543 + ], + [ + "840", + -13.01477336883545 + ], + [ + "awaiting", + -13.014822006225586 + ], + [ + "▁murdered", + -13.014841079711914 + ], + [ + "▁proficient", + -13.014863967895508 + ], + [ + "▁chauffe", + -13.014899253845215 + ], + [ + "▁contur", + -13.014937400817871 + ], + [ + "▁rejoindre", + -13.015145301818848 + ], + [ + "▁foloseste", + -13.01521110534668 + ], + [ + "▁Grup", + -13.01535701751709 + ], + [ + "152", + -13.01541519165039 + ], + [ + "▁workspace", + -13.015438079833984 + ], + [ + "▁primitive", + -13.015546798706055 + ], + [ + "▁Ginger", + -13.015557289123535 + ], + [ + "▁chemotherapy", + -13.015595436096191 + ], + [ + "▁platinum", + -13.015596389770508 + ], + [ + "▁sarcina", + -13.01559829711914 + ], + [ + "▁revival", + -13.015820503234863 + ], + [ + "▁Meditation", + -13.016111373901367 + ], + [ + "▁Vogel", + -13.0161714553833 + ], + [ + "IMA", + -13.016359329223633 + ], + [ + "▁handset", + -13.016486167907715 + ], + [ + "▁Nachmittag", + -13.01651668548584 + ], + [ + "▁déchets", + -13.016517639160156 + ], + [ + "▁Cornwall", + -13.0165433883667 + ], + [ + "▁Curry", + -13.016605377197266 + ], + [ + "▁cuplu", + -13.016607284545898 + ], + [ + "▁Birth", + -13.016822814941406 + ], + [ + "forward", + -13.016936302185059 + ], + [ + "Dezvoltare", + -13.016977310180664 + ], + [ + "▁irgendwie", + -13.016980171203613 + ], + [ + "▁erzielt", + -13.016993522644043 + ], + [ + "LOS", + -13.01700496673584 + ], + [ + "▁overload", + -13.01708984375 + ], + [ + "▁repay", + -13.01713752746582 + ], + [ + "urlaub", + -13.017155647277832 + ], + [ + "7.0", + -13.01716423034668 + ], + [ + "▁Wheat", + -13.01748275756836 + ], + [ + "▁degrab", + -13.017488479614258 + ], + [ + "▁Brock", + -13.017491340637207 + ], + [ + "▁inhabit", + -13.0176362991333 + ], + [ + "▁Speech", + -13.017834663391113 + ], + [ + "directional", + -13.017862319946289 + ], + [ + "▁Mandel", + -13.017909049987793 + ], + [ + "▁erscheinen", + -13.01791763305664 + ], + [ + "consciously", + -13.018059730529785 + ], + [ + "▁sunet", + -13.0182523727417 + ], + [ + "▁stole", + -13.018259048461914 + ], + [ + "▁Utilis", + -13.018349647521973 + ], + [ + "▁obstruction", + -13.01852798461914 + ], + [ + "▁mindfulness", + -13.0186767578125 + ], + [ + "partnering", + -13.01868724822998 + ], + [ + "CSI", + -13.018819808959961 + ], + [ + "204", + -13.01905632019043 + ], + [ + "▁squirrel", + -13.019286155700684 + ], + [ + "▁Rwanda", + -13.01975154876709 + ], + [ + "▁hunters", + -13.019850730895996 + ], + [ + "▁revitaliz", + -13.02022647857666 + ], + [ + "▁avansat", + -13.020232200622559 + ], + [ + "▁Yamaha", + -13.020294189453125 + ], + [ + "foto", + -13.020435333251953 + ], + [ + "▁Vegan", + -13.020469665527344 + ], + [ + "▁pitched", + -13.02053165435791 + ], + [ + "▁Vortrag", + -13.020540237426758 + ], + [ + "traditional", + -13.020809173583984 + ], + [ + "offrent", + -13.021024703979492 + ], + [ + "▁Expression", + -13.021315574645996 + ], + [ + "▁apprécié", + -13.021354675292969 + ], + [ + "▁Christina", + -13.021408081054688 + ], + [ + "eilig", + -13.021464347839355 + ], + [ + "▁verhindern", + -13.021599769592285 + ], + [ + "culturii", + -13.021607398986816 + ], + [ + "Aşa", + -13.021703720092773 + ], + [ + "▁enamel", + -13.021756172180176 + ], + [ + "▁fördern", + -13.021771430969238 + ], + [ + "▁acheté", + -13.021798133850098 + ], + [ + "▁eventuell", + -13.021842956542969 + ], + [ + "▁Sino", + -13.021873474121094 + ], + [ + "▁totodat", + -13.022008895874023 + ], + [ + "accelerated", + -13.022202491760254 + ], + [ + "▁strengthened", + -13.02245044708252 + ], + [ + "corro", + -13.022482872009277 + ], + [ + "4,5", + -13.02253246307373 + ], + [ + "▁Beverly", + -13.022533416748047 + ], + [ + "ulevard", + -13.022615432739258 + ], + [ + "▁hamper", + -13.022644996643066 + ], + [ + "▁Tempe", + -13.02268123626709 + ], + [ + "▁Yacht", + -13.022799491882324 + ], + [ + "▁LGBT", + -13.022871017456055 + ], + [ + "▁fingertips", + -13.022991180419922 + ], + [ + "▁Auftraggeber", + -13.02299976348877 + ], + [ + "▁harbour", + -13.0230131149292 + ], + [ + "blew", + -13.0230712890625 + ], + [ + "▁ideology", + -13.023115158081055 + ], + [ + "▁covenant", + -13.023170471191406 + ], + [ + "▁faction", + -13.023419380187988 + ], + [ + "▁animé", + -13.023481369018555 + ], + [ + "energie", + -13.023515701293945 + ], + [ + "iterführende", + -13.02369499206543 + ], + [ + "▁MAI", + -13.023784637451172 + ], + [ + "▁pluie", + -13.023905754089355 + ], + [ + "▁cathedral", + -13.023919105529785 + ], + [ + "▁chiropractic", + -13.023919105529785 + ], + [ + "monies", + -13.023968696594238 + ], + [ + "▁contraction", + -13.024054527282715 + ], + [ + "pvc", + -13.024202346801758 + ], + [ + "staff", + -13.024209022521973 + ], + [ + "BIT", + -13.024216651916504 + ], + [ + "EET", + -13.024514198303223 + ], + [ + "▁sanction", + -13.024575233459473 + ], + [ + "▁Reiki", + -13.024709701538086 + ], + [ + "Trying", + -13.024772644042969 + ], + [ + "▁endangered", + -13.024847984313965 + ], + [ + "▁Emperor", + -13.024849891662598 + ], + [ + "▁empfi", + -13.024909973144531 + ], + [ + "animation", + -13.024998664855957 + ], + [ + "207", + -13.025029182434082 + ], + [ + "separating", + -13.02512264251709 + ], + [ + "▁lucrative", + -13.025148391723633 + ], + [ + "▁ortho", + -13.02524185180664 + ], + [ + "variété", + -13.025266647338867 + ], + [ + "hésit", + -13.025287628173828 + ], + [ + "nuances", + -13.025289535522461 + ], + [ + "▁$250", + -13.025394439697266 + ], + [ + "▁drumuri", + -13.025435447692871 + ], + [ + "▁unsafe", + -13.025446891784668 + ], + [ + "▁1943", + -13.025477409362793 + ], + [ + "▁automatique", + -13.025524139404297 + ], + [ + "billed", + -13.025585174560547 + ], + [ + "▁rectangle", + -13.02578067779541 + ], + [ + "▁Spannung", + -13.025781631469727 + ], + [ + "▁dévoil", + -13.025790214538574 + ], + [ + "▁perimeter", + -13.02580738067627 + ], + [ + "▁imaginative", + -13.02581787109375 + ], + [ + "actifs", + -13.025851249694824 + ], + [ + "neuve", + -13.0259428024292 + ], + [ + "leagă", + -13.026269912719727 + ], + [ + "gehende", + -13.026700973510742 + ], + [ + "▁Gorgeous", + -13.026708602905273 + ], + [ + "▁impeccable", + -13.026708602905273 + ], + [ + "▁Curtain", + -13.026718139648438 + ], + [ + "▁presume", + -13.026731491088867 + ], + [ + "surpassed", + -13.02687931060791 + ], + [ + "schiff", + -13.026927947998047 + ], + [ + "Allied", + -13.02699089050293 + ], + [ + "fanden", + -13.027080535888672 + ], + [ + "▁célébr", + -13.027174949645996 + ], + [ + "▁phénomène", + -13.027174949645996 + ], + [ + "▁Powell", + -13.027413368225098 + ], + [ + "jean", + -13.027631759643555 + ], + [ + "▁peculiar", + -13.027640342712402 + ], + [ + "▁Antarctic", + -13.027641296386719 + ], + [ + "▁gradient", + -13.027663230895996 + ], + [ + "▁brainstorm", + -13.027704238891602 + ], + [ + "échapp", + -13.027726173400879 + ], + [ + "Bot", + -13.027738571166992 + ], + [ + "cita", + -13.027743339538574 + ], + [ + "▁lumber", + -13.027752876281738 + ], + [ + "weichen", + -13.027852058410645 + ], + [ + "▁Halte", + -13.028024673461914 + ], + [ + "▁noștri", + -13.028107643127441 + ], + [ + "construction", + -13.028165817260742 + ], + [ + "DOC", + -13.028236389160156 + ], + [ + "▁aluat", + -13.028319358825684 + ], + [ + "streamlined", + -13.028462409973145 + ], + [ + "Bio", + -13.028494834899902 + ], + [ + "▁nutritious", + -13.028573036193848 + ], + [ + "▁délicat", + -13.0286283493042 + ], + [ + "▁sticla", + -13.028656959533691 + ], + [ + "OVE", + -13.028721809387207 + ], + [ + "▁panneau", + -13.028793334960938 + ], + [ + "▁hetero", + -13.028801918029785 + ], + [ + "▁annul", + -13.028839111328125 + ], + [ + "IDA", + -13.028935432434082 + ], + [ + "▁pitches", + -13.028960227966309 + ], + [ + "▁Edmonton", + -13.029040336608887 + ], + [ + "mediated", + -13.029136657714844 + ], + [ + "AFP", + -13.029139518737793 + ], + [ + "▁Tibetan", + -13.029228210449219 + ], + [ + "intégration", + -13.02934455871582 + ], + [ + "▁Rox", + -13.0294771194458 + ], + [ + "energia", + -13.02950668334961 + ], + [ + "▁reconnaît", + -13.029509544372559 + ], + [ + "▁ține", + -13.029525756835938 + ], + [ + "▁ignition", + -13.029534339904785 + ], + [ + "Foarte", + -13.029541015625 + ], + [ + "▁HOME", + -13.029545783996582 + ], + [ + "▁MLB", + -13.029545783996582 + ], + [ + "▁Wähle", + -13.029590606689453 + ], + [ + "▁Merkel", + -13.029658317565918 + ], + [ + "poarte", + -13.029664993286133 + ], + [ + "ALT", + -13.02979850769043 + ], + [ + "jenigen", + -13.029985427856445 + ], + [ + "▁conflit", + -13.029987335205078 + ], + [ + "▁buckle", + -13.029996871948242 + ], + [ + "▁cacao", + -13.030035018920898 + ], + [ + "▁représentation", + -13.030076026916504 + ], + [ + "incepand", + -13.030267715454102 + ], + [ + "▁Carroll", + -13.030306816101074 + ], + [ + "▁clientilor", + -13.030370712280273 + ], + [ + "▁immunity", + -13.030441284179688 + ], + [ + "oût", + -13.03044319152832 + ], + [ + "▁Witch", + -13.030488014221191 + ], + [ + "▁Wolfgang", + -13.030532836914062 + ], + [ + "▁prudent", + -13.030701637268066 + ], + [ + "fotograf", + -13.03084945678711 + ], + [ + "paar", + -13.030871391296387 + ], + [ + "ergeti", + -13.030927658081055 + ], + [ + "▁empowerment", + -13.031112670898438 + ], + [ + "▁Admir", + -13.03122329711914 + ], + [ + "▁complémentaire", + -13.031340599060059 + ], + [ + "▁angepasst", + -13.031376838684082 + ], + [ + "▁flirt", + -13.031376838684082 + ], + [ + "▁elektronische", + -13.031388282775879 + ], + [ + "▁stereotype", + -13.03140640258789 + ], + [ + "SIL", + -13.031465530395508 + ], + [ + "▁Realtor", + -13.031471252441406 + ], + [ + "Edit", + -13.031774520874023 + ], + [ + "requête", + -13.03181266784668 + ], + [ + "▁Herstellung", + -13.031815528869629 + ], + [ + "▁cyst", + -13.031947135925293 + ], + [ + "syndic", + -13.031994819641113 + ], + [ + "leni", + -13.032007217407227 + ], + [ + "▁fringe", + -13.032020568847656 + ], + [ + "▁Jardin", + -13.032032012939453 + ], + [ + "▁Vezi", + -13.032052993774414 + ], + [ + "▁Ausstattung", + -13.032312393188477 + ], + [ + "▁glide", + -13.032590866088867 + ], + [ + "▁Andere", + -13.032758712768555 + ], + [ + "▁Haftung", + -13.032781600952148 + ], + [ + "maßnahmen", + -13.032788276672363 + ], + [ + "▁recommandé", + -13.032790184020996 + ], + [ + "▁nave", + -13.032793998718262 + ], + [ + "viziune", + -13.033051490783691 + ], + [ + "▁stimulus", + -13.033098220825195 + ], + [ + "faulty", + -13.0331449508667 + ], + [ + "▁vicinity", + -13.033249855041504 + ], + [ + "▁turnaround", + -13.033445358276367 + ], + [ + "stammt", + -13.033846855163574 + ], + [ + "▁problemlos", + -13.033856391906738 + ], + [ + "▁Establish", + -13.03415298461914 + ], + [ + "▁Silva", + -13.034172058105469 + ], + [ + "▁muzică", + -13.034187316894531 + ], + [ + "▁theatrical", + -13.03421401977539 + ], + [ + "▁braid", + -13.034242630004883 + ], + [ + "▁blieb", + -13.034276962280273 + ], + [ + "158", + -13.034296989440918 + ], + [ + "▁ignorance", + -13.034330368041992 + ], + [ + "onset", + -13.034416198730469 + ], + [ + "zeitlich", + -13.034523963928223 + ], + [ + "▁Sink", + -13.034523963928223 + ], + [ + "▁caractéris", + -13.034594535827637 + ], + [ + "▁kreative", + -13.03465747833252 + ], + [ + "behörde", + -13.034677505493164 + ], + [ + "repairing", + -13.034680366516113 + ], + [ + "▁tumble", + -13.034757614135742 + ], + [ + "zione", + -13.034871101379395 + ], + [ + "▁Evil", + -13.03494644165039 + ], + [ + "▁popping", + -13.034952163696289 + ], + [ + "▁mutant", + -13.035025596618652 + ], + [ + "emme", + -13.035030364990234 + ], + [ + "▁Pleasant", + -13.035125732421875 + ], + [ + "▁appetizer", + -13.035125732421875 + ], + [ + "▁PLEASE", + -13.035126686096191 + ], + [ + "▁physiological", + -13.035128593444824 + ], + [ + "▁Facility", + -13.035131454467773 + ], + [ + "▁quirky", + -13.035131454467773 + ], + [ + "▁colectiv", + -13.035154342651367 + ], + [ + "151", + -13.035181999206543 + ], + [ + "August", + -13.03531551361084 + ], + [ + "▁Jewelry", + -13.035327911376953 + ], + [ + "▁ziar", + -13.035481452941895 + ], + [ + "▁puissant", + -13.035489082336426 + ], + [ + "▁Argument", + -13.035595893859863 + ], + [ + "▁Betracht", + -13.035621643066406 + ], + [ + "▁TRANS", + -13.035636901855469 + ], + [ + "Exception", + -13.036011695861816 + ], + [ + "nosti", + -13.036083221435547 + ], + [ + "▁Geographic", + -13.036155700683594 + ], + [ + "amazingly", + -13.036173820495605 + ], + [ + "▁météo", + -13.036181449890137 + ], + [ + "streit", + -13.036314010620117 + ], + [ + "▁idle", + -13.036439895629883 + ], + [ + "179", + -13.036441802978516 + ], + [ + "▁Bremen", + -13.036534309387207 + ], + [ + "▁Kläger", + -13.03653621673584 + ], + [ + "▁Grammy", + -13.036598205566406 + ], + [ + "▁Philosophy", + -13.036613464355469 + ], + [ + "▁utilizeaz", + -13.036779403686523 + ], + [ + "Accord", + -13.036897659301758 + ], + [ + "▁USDA", + -13.036986351013184 + ], + [ + "Continuing", + -13.037010192871094 + ], + [ + "geschenk", + -13.037178039550781 + ], + [ + "kredit", + -13.037248611450195 + ], + [ + "Laugh", + -13.037297248840332 + ], + [ + "oaring", + -13.037406921386719 + ], + [ + "▁Richter", + -13.037460327148438 + ], + [ + "▁Figur", + -13.037938117980957 + ], + [ + "▁inconsistent", + -13.037947654724121 + ], + [ + "cresterea", + -13.038069725036621 + ], + [ + "▁regeneration", + -13.038130760192871 + ], + [ + "speaking", + -13.03818416595459 + ], + [ + "▁nasal", + -13.03824234008789 + ], + [ + "▁partagé", + -13.038259506225586 + ], + [ + "▁Warranty", + -13.038419723510742 + ], + [ + "▁Mueller", + -13.038501739501953 + ], + [ + "formează", + -13.038734436035156 + ], + [ + "hundert", + -13.038745880126953 + ], + [ + "gemeldet", + -13.038893699645996 + ], + [ + "▁excursions", + -13.038912773132324 + ], + [ + "▁linii", + -13.039066314697266 + ], + [ + "gefährlich", + -13.039067268371582 + ], + [ + "▁schema", + -13.03907299041748 + ], + [ + "nişte", + -13.039131164550781 + ], + [ + "▁roadway", + -13.039132118225098 + ], + [ + "▁regression", + -13.039135932922363 + ], + [ + "▁mână", + -13.039366722106934 + ], + [ + "5.3", + -13.039373397827148 + ], + [ + "▁Spät", + -13.039734840393066 + ], + [ + "▁stubborn", + -13.039833068847656 + ], + [ + "efectele", + -13.040030479431152 + ], + [ + "▁atenţi", + -13.040136337280273 + ], + [ + "▁dovedit", + -13.04018497467041 + ], + [ + "▁Agile", + -13.040190696716309 + ], + [ + "denying", + -13.04023265838623 + ], + [ + "fluss", + -13.040620803833008 + ], + [ + "▁Calvin", + -13.04066276550293 + ], + [ + "Sculpt", + -13.04083251953125 + ], + [ + "égalité", + -13.040884971618652 + ], + [ + "ticket", + -13.040977478027344 + ], + [ + "marketed", + -13.041044235229492 + ], + [ + "holic", + -13.041173934936523 + ], + [ + "▁eCommerce", + -13.041346549987793 + ], + [ + "▁Slip", + -13.041369438171387 + ], + [ + "▁degradation", + -13.041736602783203 + ], + [ + "écart", + -13.041742324829102 + ], + [ + "AGR", + -13.041807174682617 + ], + [ + "▁burglar", + -13.041837692260742 + ], + [ + "▁conjug", + -13.041903495788574 + ], + [ + "LLP", + -13.04194164276123 + ], + [ + "couvrir", + -13.041997909545898 + ], + [ + "▁Hearing", + -13.042001724243164 + ], + [ + "▁canton", + -13.042006492614746 + ], + [ + "▁sixteen", + -13.042068481445312 + ], + [ + "▁Verlust", + -13.042097091674805 + ], + [ + "allied", + -13.042268753051758 + ], + [ + "Performing", + -13.042393684387207 + ], + [ + "▁évoqu", + -13.042519569396973 + ], + [ + "▁bookstore", + -13.042574882507324 + ], + [ + "▁intrebari", + -13.042627334594727 + ], + [ + "▁Hyderabad", + -13.042668342590332 + ], + [ + "▁repertoire", + -13.042668342590332 + ], + [ + "▁cablu", + -13.042678833007812 + ], + [ + "▁Costume", + -13.04269790649414 + ], + [ + "▁Shannon", + -13.042713165283203 + ], + [ + "▁glossy", + -13.042800903320312 + ], + [ + "▁cible", + -13.042876243591309 + ], + [ + "Saint", + -13.042984008789062 + ], + [ + "▁Ultima", + -13.043042182922363 + ], + [ + "▁teint", + -13.0432767868042 + ], + [ + "▁envision", + -13.043477058410645 + ], + [ + "▁thinner", + -13.043478965759277 + ], + [ + "ис", + -13.043609619140625 + ], + [ + "▁bladder", + -13.043615341186523 + ], + [ + "▁Prairie", + -13.043618202209473 + ], + [ + "▁puppies", + -13.043633460998535 + ], + [ + "▁overweight", + -13.043729782104492 + ], + [ + "destined", + -13.043925285339355 + ], + [ + "▁addictive", + -13.043935775756836 + ], + [ + "▁posé", + -13.043993949890137 + ], + [ + "▁mecanism", + -13.044112205505371 + ], + [ + "▁chorus", + -13.044466972351074 + ], + [ + "weder", + -13.044528007507324 + ], + [ + "▁begrüß", + -13.044562339782715 + ], + [ + "▁unsuccessful", + -13.044562339782715 + ], + [ + "executing", + -13.044564247131348 + ], + [ + "▁metadata", + -13.044611930847168 + ], + [ + "traiter", + -13.044620513916016 + ], + [ + "▁borrowed", + -13.044649124145508 + ], + [ + "▁aeroport", + -13.044679641723633 + ], + [ + "▁Bibli", + -13.044761657714844 + ], + [ + "▁youthful", + -13.044902801513672 + ], + [ + "▁Herbert", + -13.044913291931152 + ], + [ + "client", + -13.04500961303711 + ], + [ + "merci", + -13.04520034790039 + ], + [ + "▁Beast", + -13.045210838317871 + ], + [ + "▁Entrepreneur", + -13.045230865478516 + ], + [ + "▁Gelände", + -13.045256614685059 + ], + [ + "▁Packers", + -13.045268058776855 + ], + [ + "formarea", + -13.045469284057617 + ], + [ + "▁Kündigung", + -13.045511245727539 + ], + [ + "▁verdient", + -13.045515060424805 + ], + [ + "▁solutie", + -13.045530319213867 + ], + [ + "figuration", + -13.045611381530762 + ], + [ + "voluntarily", + -13.045622825622559 + ], + [ + "Gregor", + -13.045742988586426 + ], + [ + "▁Uncle", + -13.04589557647705 + ], + [ + "tarifs", + -13.045907020568848 + ], + [ + "▁écologique", + -13.045987129211426 + ], + [ + "▁Investition", + -13.045991897583008 + ], + [ + "exemplar", + -13.046127319335938 + ], + [ + "▁prevede", + -13.046144485473633 + ], + [ + "▁waive", + -13.046147346496582 + ], + [ + "▁Legion", + -13.046156883239746 + ], + [ + "similar", + -13.046247482299805 + ], + [ + "▁shareholder", + -13.04626750946045 + ], + [ + "▁oyster", + -13.046476364135742 + ], + [ + "▁Lightning", + -13.046530723571777 + ], + [ + "experimenting", + -13.04662799835205 + ], + [ + "▁replies", + -13.04663372039795 + ], + [ + "80,000", + -13.046757698059082 + ], + [ + "▁adept", + -13.04692554473877 + ], + [ + "▁Crăciun", + -13.046935081481934 + ], + [ + "▁sanatos", + -13.046935081481934 + ], + [ + "305", + -13.04699993133545 + ], + [ + "specialised", + -13.047069549560547 + ], + [ + "▁drummer", + -13.047189712524414 + ], + [ + "Applicants", + -13.04741096496582 + ], + [ + "objekt", + -13.04741096496582 + ], + [ + "▁Fifth", + -13.047446250915527 + ], + [ + "rgic", + -13.047567367553711 + ], + [ + "theater", + -13.047635078430176 + ], + [ + "▁terminé", + -13.047852516174316 + ], + [ + "▁Englisch", + -13.047894477844238 + ], + [ + "▁Oradea", + -13.047898292541504 + ], + [ + "possesses", + -13.0479097366333 + ], + [ + "illiers", + -13.047986030578613 + ], + [ + "▁refurbish", + -13.048110961914062 + ], + [ + "graphie", + -13.04814338684082 + ], + [ + "▁Booth", + -13.048174858093262 + ], + [ + "▁Ausdruck", + -13.048192977905273 + ], + [ + "▁Marriage", + -13.048361778259277 + ], + [ + "▁knives", + -13.048362731933594 + ], + [ + "▁Relief", + -13.048368453979492 + ], + [ + "▁Clerk", + -13.048392295837402 + ], + [ + "wait", + -13.048501014709473 + ], + [ + "▁probablement", + -13.048698425292969 + ], + [ + "▁suplimentar", + -13.048701286315918 + ], + [ + "dollar", + -13.048797607421875 + ], + [ + "English", + -13.04898452758789 + ], + [ + "866", + -13.049300193786621 + ], + [ + "▁Savannah", + -13.049314498901367 + ], + [ + "▁aftermath", + -13.049318313598633 + ], + [ + "phé", + -13.04932689666748 + ], + [ + "▁Plum", + -13.049417495727539 + ], + [ + "264", + -13.049566268920898 + ], + [ + "2.000", + -13.049582481384277 + ], + [ + "niei", + -13.049603462219238 + ], + [ + "ATP", + -13.049803733825684 + ], + [ + "mila", + -13.04985523223877 + ], + [ + "▁glut", + -13.049887657165527 + ], + [ + "gotta", + -13.049891471862793 + ], + [ + "schütt", + -13.049893379211426 + ], + [ + "klick", + -13.049996376037598 + ], + [ + "whether", + -13.050090789794922 + ], + [ + "▁Wade", + -13.050163269042969 + ], + [ + "▁Riley", + -13.050280570983887 + ], + [ + "Chancellor", + -13.050288200378418 + ], + [ + "▁nebun", + -13.050300598144531 + ], + [ + "▁aufgebaut", + -13.050374984741211 + ], + [ + "steigt", + -13.050423622131348 + ], + [ + "▁entirety", + -13.050494194030762 + ], + [ + "▁telefoane", + -13.05074691772461 + ], + [ + "▁Roulette", + -13.050763130187988 + ], + [ + "1700", + -13.050787925720215 + ], + [ + "▁lycée", + -13.050856590270996 + ], + [ + "rotary", + -13.051128387451172 + ], + [ + "benefited", + -13.051170349121094 + ], + [ + "▁Bisericii", + -13.051220893859863 + ], + [ + "▁Rehabilitation", + -13.051220893859863 + ], + [ + "▁lithium", + -13.051228523254395 + ], + [ + "imposing", + -13.051279067993164 + ], + [ + "176", + -13.051329612731934 + ], + [ + "▁thunder", + -13.051527976989746 + ], + [ + "ăsesc", + -13.052000045776367 + ], + [ + "▁Einblick", + -13.052010536193848 + ], + [ + "oiled", + -13.052151679992676 + ], + [ + "SSA", + -13.052181243896484 + ], + [ + "apparition", + -13.05224609375 + ], + [ + "▁Impress", + -13.052273750305176 + ], + [ + "▁Aboriginal", + -13.052297592163086 + ], + [ + "loos", + -13.052383422851562 + ], + [ + "▁Bread", + -13.052440643310547 + ], + [ + "177", + -13.052619934082031 + ], + [ + "VERS", + -13.052638053894043 + ], + [ + "▁Respect", + -13.05271053314209 + ], + [ + "▁Practical", + -13.053047180175781 + ], + [ + "drafting", + -13.05306339263916 + ], + [ + "си", + -13.053099632263184 + ], + [ + "▁faza", + -13.053109169006348 + ], + [ + "▁sovereign", + -13.053123474121094 + ], + [ + "▁Untersuchung", + -13.05314826965332 + ], + [ + "▁Niveau", + -13.053154945373535 + ], + [ + "transport", + -13.053182601928711 + ], + [ + "▁downstream", + -13.053293228149414 + ], + [ + "▁Milton", + -13.053383827209473 + ], + [ + "▁knob", + -13.053390502929688 + ], + [ + "employeur", + -13.053499221801758 + ], + [ + "▁furnish", + -13.053544044494629 + ], + [ + "weather", + -13.053564071655273 + ], + [ + "LAB", + -13.053646087646484 + ], + [ + "166", + -13.053853988647461 + ], + [ + "▁salaire", + -13.053937911987305 + ], + [ + "▁Carnival", + -13.054088592529297 + ], + [ + "4-0", + -13.054168701171875 + ], + [ + "▁Angle", + -13.054291725158691 + ], + [ + "▁José", + -13.054399490356445 + ], + [ + "architecture", + -13.054475784301758 + ], + [ + "▁Sunset", + -13.054574966430664 + ], + [ + "▁Absolut", + -13.054694175720215 + ], + [ + "▁herrlich", + -13.05470085144043 + ], + [ + "12%", + -13.054703712463379 + ], + [ + "▁Indo", + -13.054823875427246 + ], + [ + "▁Komfort", + -13.055049896240234 + ], + [ + "▁acțiuni", + -13.05505084991455 + ], + [ + "energize", + -13.055085182189941 + ], + [ + "▁Warning", + -13.055171966552734 + ], + [ + "▁Sunny", + -13.055216789245605 + ], + [ + "▁razor", + -13.055489540100098 + ], + [ + "▁psychic", + -13.055490493774414 + ], + [ + "▁convivial", + -13.055525779724121 + ], + [ + "Voraussetzungen", + -13.05555534362793 + ], + [ + "IMO", + -13.055622100830078 + ], + [ + "opérateur", + -13.055743217468262 + ], + [ + "▁langjährige", + -13.05575942993164 + ], + [ + "▁Spanie", + -13.055901527404785 + ], + [ + "pulmonary", + -13.056004524230957 + ], + [ + "▁Bingo", + -13.056050300598145 + ], + [ + "▁confession", + -13.056096076965332 + ], + [ + "▁Petru", + -13.056100845336914 + ], + [ + "▁prerequisite", + -13.056164741516113 + ], + [ + "▁dodge", + -13.056352615356445 + ], + [ + "▁McN", + -13.056436538696289 + ], + [ + "▁originate", + -13.056577682495117 + ], + [ + "▁nettoy", + -13.056612014770508 + ], + [ + "▁$14", + -13.056645393371582 + ], + [ + "▁Bride", + -13.05669116973877 + ], + [ + "▁noisy", + -13.05673885345459 + ], + [ + "▁Worcester", + -13.056963920593262 + ], + [ + "▁Surrey", + -13.056982040405273 + ], + [ + "harmonis", + -13.057110786437988 + ], + [ + "▁représentant", + -13.057304382324219 + ], + [ + "organisée", + -13.057475090026855 + ], + [ + "truction", + -13.057513236999512 + ], + [ + "injected", + -13.057597160339355 + ], + [ + "▁Suzuki", + -13.057924270629883 + ], + [ + "▁japonais", + -13.057924270629883 + ], + [ + "▁turquoise", + -13.057924270629883 + ], + [ + "▁Peut", + -13.058004379272461 + ], + [ + "▁Sequ", + -13.058028221130371 + ], + [ + "slated", + -13.058037757873535 + ], + [ + "▁Alma", + -13.058215141296387 + ], + [ + "▁gebraucht", + -13.05827522277832 + ], + [ + "gängig", + -13.058281898498535 + ], + [ + "▁commis", + -13.058377265930176 + ], + [ + "ACS", + -13.05856990814209 + ], + [ + "pressure", + -13.058664321899414 + ], + [ + "cured", + -13.05874252319336 + ], + [ + "▁Jackie", + -13.058757781982422 + ], + [ + "▁Kashmir", + -13.05888557434082 + ], + [ + "▁recruited", + -13.059000968933105 + ], + [ + "▁vécu", + -13.059011459350586 + ], + [ + "▁opus", + -13.059052467346191 + ], + [ + "kWh", + -13.05927562713623 + ], + [ + "▁tapping", + -13.059292793273926 + ], + [ + "▁tehnologie", + -13.05931282043457 + ], + [ + "▁Gentle", + -13.059365272521973 + ], + [ + "▁bombard", + -13.059372901916504 + ], + [ + "▁caméra", + -13.059427261352539 + ], + [ + "züglich", + -13.059431076049805 + ], + [ + "▁bingo", + -13.059453010559082 + ], + [ + "private", + -13.059496879577637 + ], + [ + "▁mediator", + -13.059642791748047 + ], + [ + "▁carbohydrates", + -13.059847831726074 + ], + [ + "▁workmanship", + -13.059849739074707 + ], + [ + "▁Combat", + -13.059853553771973 + ], + [ + "▁Mickey", + -13.059901237487793 + ], + [ + "▁distressed", + -13.059908866882324 + ], + [ + "lucrează", + -13.059924125671387 + ], + [ + "treatment", + -13.06007194519043 + ], + [ + "▁Einwohner", + -13.060330390930176 + ], + [ + "▁glaze", + -13.060386657714844 + ], + [ + "scholarly", + -13.06043529510498 + ], + [ + "ROC", + -13.060750007629395 + ], + [ + "▁Darwin", + -13.060774803161621 + ], + [ + "drückt", + -13.060775756835938 + ], + [ + "▁treadmill", + -13.060819625854492 + ], + [ + "ntz", + -13.060830116271973 + ], + [ + "620", + -13.061087608337402 + ], + [ + "surface", + -13.061148643493652 + ], + [ + "▁vieţii", + -13.0612211227417 + ], + [ + "990", + -13.061296463012695 + ], + [ + "▁doigt", + -13.061341285705566 + ], + [ + "▁explor", + -13.061450004577637 + ], + [ + "▁asistent", + -13.061670303344727 + ], + [ + "coloriage", + -13.061734199523926 + ], + [ + "▁Martinez", + -13.061758041381836 + ], + [ + "▁antibodies", + -13.061775207519531 + ], + [ + "Schülerinnen", + -13.061779975891113 + ], + [ + "Honestly", + -13.06178092956543 + ], + [ + "grabbing", + -13.061871528625488 + ], + [ + "▁Cardiff", + -13.061897277832031 + ], + [ + "▁Trophy", + -13.062084197998047 + ], + [ + "▁pupil", + -13.062117576599121 + ], + [ + "▁invoke", + -13.062161445617676 + ], + [ + "bezüglich", + -13.062193870544434 + ], + [ + "Anschließend", + -13.062275886535645 + ], + [ + "perks", + -13.062360763549805 + ], + [ + "530", + -13.062373161315918 + ], + [ + "▁emblem", + -13.062431335449219 + ], + [ + "770", + -13.062543869018555 + ], + [ + "clairement", + -13.062590599060059 + ], + [ + "▁sublinia", + -13.062597274780273 + ], + [ + "▁1910", + -13.062719345092773 + ], + [ + "▁Embassy", + -13.062740325927734 + ], + [ + "▁Valencia", + -13.062740325927734 + ], + [ + "▁catastrophic", + -13.062740325927734 + ], + [ + "▁simulator", + -13.06274700164795 + ], + [ + "Pierre", + -13.062766075134277 + ], + [ + "▁doorstep", + -13.062806129455566 + ], + [ + "▁rallie", + -13.062881469726562 + ], + [ + "▁șans", + -13.062891960144043 + ], + [ + "▁crosses", + -13.06300163269043 + ], + [ + "▁zodi", + -13.06312084197998 + ], + [ + "Next", + -13.06314754486084 + ], + [ + "▁rebuilt", + -13.063152313232422 + ], + [ + "▁panorama", + -13.063222885131836 + ], + [ + "196", + -13.06324291229248 + ], + [ + "▁erinnert", + -13.06370735168457 + ], + [ + "lism", + -13.06371784210205 + ], + [ + "opened", + -13.06383228302002 + ], + [ + "▁breakout", + -13.064126014709473 + ], + [ + "▁mosque", + -13.064153671264648 + ], + [ + "boc", + -13.064507484436035 + ], + [ + "▁grout", + -13.064568519592285 + ], + [ + "▁Gather", + -13.064582824707031 + ], + [ + "▁vampire", + -13.06467342376709 + ], + [ + "▁tandem", + -13.064684867858887 + ], + [ + "▁pastra", + -13.064702033996582 + ], + [ + "▁lösen", + -13.064794540405273 + ], + [ + "▁discontinu", + -13.064826965332031 + ], + [ + "fuses", + -13.064885139465332 + ], + [ + "▁identitate", + -13.064947128295898 + ], + [ + "BAC", + -13.064964294433594 + ], + [ + "▁$100,000", + -13.065122604370117 + ], + [ + "Finder", + -13.06515121459961 + ], + [ + "▁Leicester", + -13.065157890319824 + ], + [ + "▁1933", + -13.065159797668457 + ], + [ + "informatiile", + -13.065234184265137 + ], + [ + "lädt", + -13.065309524536133 + ], + [ + "iggle", + -13.065399169921875 + ], + [ + "▁Discuss", + -13.065462112426758 + ], + [ + "distributing", + -13.065470695495605 + ], + [ + "▁disappoint", + -13.065475463867188 + ], + [ + "ecţia", + -13.065611839294434 + ], + [ + "▁condiment", + -13.065640449523926 + ], + [ + "▁Marriott", + -13.065642356872559 + ], + [ + "▁entspannt", + -13.065644264221191 + ], + [ + "arbitrary", + -13.06564998626709 + ], + [ + "rühren", + -13.06574821472168 + ], + [ + "Intensiv", + -13.065771102905273 + ], + [ + "eliminare", + -13.065895080566406 + ], + [ + "muster", + -13.06594467163086 + ], + [ + "▁komplexe", + -13.066130638122559 + ], + [ + "▁(2008)", + -13.066184997558594 + ], + [ + "absolument", + -13.066349029541016 + ], + [ + "aloo", + -13.066420555114746 + ], + [ + "cererea", + -13.06655216217041 + ], + [ + "▁imobiliar", + -13.066696166992188 + ], + [ + "▁paramount", + -13.066705703735352 + ], + [ + "▁Vince", + -13.066723823547363 + ], + [ + "pov", + -13.067076683044434 + ], + [ + "▁conveyor", + -13.067549705505371 + ], + [ + "▁Natalie", + -13.067583084106445 + ], + [ + "▁Comedy", + -13.067623138427734 + ], + [ + "Developing", + -13.0678129196167 + ], + [ + "disputed", + -13.067878723144531 + ], + [ + "164", + -13.067911148071289 + ], + [ + "▁Communist", + -13.067949295043945 + ], + [ + "▁Bahnhof", + -13.06806468963623 + ], + [ + "dokument", + -13.068145751953125 + ], + [ + "▁Somali", + -13.06828498840332 + ], + [ + "▁Strasbourg", + -13.068503379821777 + ], + [ + "▁Technician", + -13.068550109863281 + ], + [ + "▁subsidies", + -13.068633079528809 + ], + [ + "judeţul", + -13.068723678588867 + ], + [ + "▁bible", + -13.068769454956055 + ], + [ + "gefahren", + -13.068855285644531 + ], + [ + "▁literal", + -13.068882942199707 + ], + [ + "▁diminish", + -13.068940162658691 + ], + [ + "Sfântul", + -13.0689697265625 + ], + [ + "▁doreșt", + -13.068978309631348 + ], + [ + "▁Xiaomi", + -13.069036483764648 + ], + [ + "▁planète", + -13.069130897521973 + ], + [ + "▁LTD", + -13.069175720214844 + ], + [ + "▁Zugriff", + -13.069196701049805 + ], + [ + "beginn", + -13.06921672821045 + ], + [ + "▁Einführung", + -13.069294929504395 + ], + [ + "▁coronar", + -13.069393157958984 + ], + [ + "lomi", + -13.0693941116333 + ], + [ + "▁Accueil", + -13.0695219039917 + ], + [ + "scanned", + -13.069528579711914 + ], + [ + "▁Banque", + -13.06952953338623 + ], + [ + "▁réaction", + -13.069531440734863 + ], + [ + "▁Hoffman", + -13.069546699523926 + ], + [ + "▁merveille", + -13.069637298583984 + ], + [ + "navigating", + -13.069719314575195 + ], + [ + "schalten", + -13.06984806060791 + ], + [ + "▁ieşi", + -13.070136070251465 + ], + [ + "1-6", + -13.070175170898438 + ], + [ + "▁frustr", + -13.070670127868652 + ], + [ + "▁réfléchi", + -13.0709810256958 + ], + [ + "▁difuz", + -13.071100234985352 + ], + [ + "▁freue", + -13.07121753692627 + ], + [ + "besuch", + -13.071349143981934 + ], + [ + "153", + -13.071386337280273 + ], + [ + "▁butterflies", + -13.071467399597168 + ], + [ + "▁terrifying", + -13.071467399597168 + ], + [ + "▁încuraj", + -13.071468353271484 + ], + [ + "▁Château", + -13.071470260620117 + ], + [ + "▁contingent", + -13.071474075317383 + ], + [ + "▁abusive", + -13.0714750289917 + ], + [ + "▁SharePoint", + -13.07148551940918 + ], + [ + "▁skating", + -13.071573257446289 + ], + [ + "▁militaire", + -13.07166576385498 + ], + [ + "▁Vig", + -13.071690559387207 + ], + [ + "omics", + -13.071840286254883 + ], + [ + "▁Blockchain", + -13.07197093963623 + ], + [ + "▁principii", + -13.071975708007812 + ], + [ + "▁permitting", + -13.071979522705078 + ], + [ + "optimisation", + -13.072270393371582 + ], + [ + "▁maintien", + -13.072328567504883 + ], + [ + "▁Aluminum", + -13.072442054748535 + ], + [ + "▁Plymouth", + -13.072443008422852 + ], + [ + "▁Weiterbildung", + -13.072457313537598 + ], + [ + "▁Finanzierung", + -13.072505950927734 + ], + [ + "▁Kerala", + -13.072514533996582 + ], + [ + "insulated", + -13.072668075561523 + ], + [ + "▁loaf", + -13.072802543640137 + ], + [ + "▁Sammlung", + -13.072929382324219 + ], + [ + "▁îndepărt", + -13.072930335998535 + ], + [ + "▁Gewerbe", + -13.072942733764648 + ], + [ + "udel", + -13.072988510131836 + ], + [ + "▁coursework", + -13.073104858398438 + ], + [ + "▁Darstellung", + -13.073246002197266 + ], + [ + "▁indeplin", + -13.073433876037598 + ], + [ + "▁Gandhi", + -13.073434829711914 + ], + [ + "tossed", + -13.07361888885498 + ], + [ + "ewed", + -13.073844909667969 + ], + [ + "▁classement", + -13.073884963989258 + ], + [ + "▁Protestant", + -13.073905944824219 + ], + [ + "▁frumoasă", + -13.073905944824219 + ], + [ + "▁pantalon", + -13.073906898498535 + ], + [ + "▁rivet", + -13.073966979980469 + ], + [ + "▁Echt", + -13.0741605758667 + ], + [ + "erviciului", + -13.07421588897705 + ], + [ + "fabricated", + -13.074322700500488 + ], + [ + "Compania", + -13.074372291564941 + ], + [ + "▁juvenile", + -13.074394226074219 + ], + [ + "▁souligne", + -13.07444953918457 + ], + [ + "▁chrono", + -13.07447338104248 + ], + [ + "▁VII", + -13.074594497680664 + ], + [ + "▁Kirch", + -13.074714660644531 + ], + [ + "catcher", + -13.075014114379883 + ], + [ + "salv", + -13.075263023376465 + ], + [ + "▁Enforcement", + -13.075370788574219 + ], + [ + "▁Penguin", + -13.075410842895508 + ], + [ + "kowski", + -13.075465202331543 + ], + [ + "▁2:1", + -13.075470924377441 + ], + [ + "gesundheit", + -13.075475692749023 + ], + [ + "▁unveil", + -13.075519561767578 + ], + [ + "bending", + -13.075531959533691 + ], + [ + "▁conecta", + -13.075579643249512 + ], + [ + "▁faim", + -13.075885772705078 + ], + [ + "▁MacBook", + -13.075969696044922 + ], + [ + "versuch", + -13.07600212097168 + ], + [ + "▁regiuni", + -13.076029777526855 + ], + [ + "▁Willow", + -13.076184272766113 + ], + [ + "▁finanziell", + -13.076303482055664 + ], + [ + "▁nurturing", + -13.076354026794434 + ], + [ + "impuls", + -13.076370239257812 + ], + [ + "▁funktionieren", + -13.076371192932129 + ], + [ + "▁rezult", + -13.076554298400879 + ], + [ + "▁spui", + -13.076593399047852 + ], + [ + "▁walkway", + -13.076653480529785 + ], + [ + "▁Rauch", + -13.076708793640137 + ], + [ + "169", + -13.076793670654297 + ], + [ + "610", + -13.076863288879395 + ], + [ + "▁scazut", + -13.0773286819458 + ], + [ + "▁Garrett", + -13.077329635620117 + ], + [ + "▁necesită", + -13.077352523803711 + ], + [ + "Articolul", + -13.077364921569824 + ], + [ + "numită", + -13.077371597290039 + ], + [ + "Coastal", + -13.077383041381836 + ], + [ + "▁canned", + -13.077421188354492 + ], + [ + "▁Friendly", + -13.077499389648438 + ], + [ + "dissolved", + -13.0775728225708 + ], + [ + "seid", + -13.077674865722656 + ], + [ + "▁feminin", + -13.077685356140137 + ], + [ + "▁fetch", + -13.077710151672363 + ], + [ + "▁Accent", + -13.077767372131348 + ], + [ + "phrase", + -13.077771186828613 + ], + [ + "effekt", + -13.077775955200195 + ], + [ + "▁Progressive", + -13.077777862548828 + ], + [ + "▁canadien", + -13.077820777893066 + ], + [ + "iety", + -13.077839851379395 + ], + [ + "eignen", + -13.077984809875488 + ], + [ + "paraître", + -13.07812213897705 + ], + [ + "▁asylum", + -13.07833194732666 + ], + [ + "▁Albany", + -13.078362464904785 + ], + [ + "▁remis", + -13.078386306762695 + ], + [ + "▁Joyce", + -13.078664779663086 + ], + [ + "schätzt", + -13.078784942626953 + ], + [ + "▁begleiten", + -13.078801155090332 + ], + [ + "▁Siemens", + -13.079007148742676 + ], + [ + "▁schlimm", + -13.079061508178711 + ], + [ + "▁Libra", + -13.079254150390625 + ], + [ + "▁Composite", + -13.079290390014648 + ], + [ + "▁écr", + -13.079315185546875 + ], + [ + "disciplina", + -13.079379081726074 + ], + [ + "▁premature", + -13.079630851745605 + ], + [ + "▁scopuri", + -13.079681396484375 + ], + [ + "ffnung", + -13.079715728759766 + ], + [ + "7000", + -13.079726219177246 + ], + [ + "▁conséquent", + -13.079780578613281 + ], + [ + "▁côte", + -13.079787254333496 + ], + [ + "celul", + -13.079872131347656 + ], + [ + "▁fourteen", + -13.079940795898438 + ], + [ + "▁Riverside", + -13.080077171325684 + ], + [ + "gemacht", + -13.08013916015625 + ], + [ + "▁volcanic", + -13.080272674560547 + ], + [ + "▁Salesforce", + -13.080315589904785 + ], + [ + "▁Granite", + -13.080317497253418 + ], + [ + "▁Zentral", + -13.080329895019531 + ], + [ + "▁Female", + -13.080341339111328 + ], + [ + "▁culmin", + -13.08047103881836 + ], + [ + "▁urmatoare", + -13.080547332763672 + ], + [ + "toxicity", + -13.080560684204102 + ], + [ + "▁mâna", + -13.080678939819336 + ], + [ + "▁Umfang", + -13.080764770507812 + ], + [ + "▁Encore", + -13.08077621459961 + ], + [ + "▁Edgar", + -13.080831527709961 + ], + [ + "▁négoci", + -13.080852508544922 + ], + [ + "njeux", + -13.080873489379883 + ], + [ + "▁variance", + -13.080917358398438 + ], + [ + "▁Functional", + -13.080973625183105 + ], + [ + "172", + -13.081046104431152 + ], + [ + "▁dissolve", + -13.0811185836792 + ], + [ + "förderung", + -13.081188201904297 + ], + [ + "▁Brilliant", + -13.081254959106445 + ], + [ + "▁comprehension", + -13.081254959106445 + ], + [ + "▁soybean", + -13.081254959106445 + ], + [ + "▁standalone", + -13.081255912780762 + ], + [ + "▁Communi", + -13.081303596496582 + ], + [ + "▁ajut", + -13.081313133239746 + ], + [ + "▁lavish", + -13.081338882446289 + ], + [ + "Ouest", + -13.081384658813477 + ], + [ + "▁Maggie", + -13.081385612487793 + ], + [ + "▁evolutionary", + -13.081550598144531 + ], + [ + "bowel", + -13.081575393676758 + ], + [ + "▁glyco", + -13.081626892089844 + ], + [ + "▁Happi", + -13.081706047058105 + ], + [ + "organising", + -13.081710815429688 + ], + [ + "▁übernimm", + -13.081727027893066 + ], + [ + "▁snowboard", + -13.081793785095215 + ], + [ + "▁prévention", + -13.081830024719238 + ], + [ + "▁Celebrate", + -13.082160949707031 + ], + [ + "▁pottery", + -13.082254409790039 + ], + [ + "▁Outstanding", + -13.082328796386719 + ], + [ + "▁toamna", + -13.082331657409668 + ], + [ + "▁graceful", + -13.082548141479492 + ], + [ + "197", + -13.082559585571289 + ], + [ + "strecke", + -13.082598686218262 + ], + [ + "▁medizinische", + -13.082733154296875 + ], + [ + "216", + -13.082839965820312 + ], + [ + "▁prune", + -13.082868576049805 + ], + [ + "Pourtant", + -13.083000183105469 + ], + [ + "▁Difference", + -13.083224296569824 + ], + [ + "▁factura", + -13.083830833435059 + ], + [ + "Mass", + -13.084161758422852 + ], + [ + "▁Enhanc", + -13.084190368652344 + ], + [ + "upholstered", + -13.084209442138672 + ], + [ + "▁übernommen", + -13.084209442138672 + ], + [ + "▁mitigation", + -13.084210395812988 + ], + [ + "▁Hidden", + -13.084219932556152 + ], + [ + "▁Häuser", + -13.084234237670898 + ], + [ + "▁Pavel", + -13.084403991699219 + ], + [ + "▁congress", + -13.084512710571289 + ], + [ + "▁antibody", + -13.084598541259766 + ], + [ + "▁stitches", + -13.084811210632324 + ], + [ + "▁colonies", + -13.084820747375488 + ], + [ + "Into", + -13.084900856018066 + ], + [ + "▁démo", + -13.084924697875977 + ], + [ + "▁MVP", + -13.085041046142578 + ], + [ + "▁replay", + -13.085062026977539 + ], + [ + "▁usoara", + -13.08522891998291 + ], + [ + "▁Breast", + -13.085278511047363 + ], + [ + "ooney", + -13.085336685180664 + ], + [ + "▁außen", + -13.085663795471191 + ], + [ + "▁Motorola", + -13.085695266723633 + ], + [ + "▁spalat", + -13.08578109741211 + ], + [ + "euillez", + -13.086088180541992 + ], + [ + "▁jeunesse", + -13.086170196533203 + ], + [ + "▁pastoral", + -13.086174011230469 + ], + [ + "▁Sussex", + -13.086185455322266 + ], + [ + "▁stencil", + -13.08619213104248 + ], + [ + "▁organismului", + -13.086504936218262 + ], + [ + "seized", + -13.086649894714355 + ], + [ + "▁întrebare", + -13.086865425109863 + ], + [ + "cliquez", + -13.086874961853027 + ], + [ + "5.7", + -13.086984634399414 + ], + [ + "▁Yama", + -13.087080955505371 + ], + [ + "painted", + -13.08708667755127 + ], + [ + "▁Swimming", + -13.087176322937012 + ], + [ + "Rhythm", + -13.087202072143555 + ], + [ + "▁sorrow", + -13.087210655212402 + ], + [ + "▁Movers", + -13.08731460571289 + ], + [ + "renforcer", + -13.08735466003418 + ], + [ + "▁Wach", + -13.087381362915039 + ], + [ + "0,00", + -13.087390899658203 + ], + [ + "▁glove", + -13.08753490447998 + ], + [ + "▁stâng", + -13.087669372558594 + ], + [ + "rgendwann", + -13.087687492370605 + ], + [ + "▁Philippine", + -13.08769416809082 + ], + [ + "▁anunțat", + -13.087716102600098 + ], + [ + "▁Coleman", + -13.087723731994629 + ], + [ + "affir", + -13.087918281555176 + ], + [ + "uleiul", + -13.08808422088623 + ], + [ + "▁Coconut", + -13.088197708129883 + ], + [ + "▁Supplement", + -13.088210105895996 + ], + [ + "haudiere", + -13.088293075561523 + ], + [ + "▁kettle", + -13.088313102722168 + ], + [ + "▁3,5", + -13.088370323181152 + ], + [ + "refurbished", + -13.088425636291504 + ], + [ + "esthétique", + -13.088665962219238 + ], + [ + "performing", + -13.088667869567871 + ], + [ + "▁Engag", + -13.088762283325195 + ], + [ + "Group", + -13.088801383972168 + ], + [ + "▁viande", + -13.088887214660645 + ], + [ + "▁oricum", + -13.088888168334961 + ], + [ + "Spitalul", + -13.089093208312988 + ], + [ + "▁cesse", + -13.089110374450684 + ], + [ + "▁contradiction", + -13.089130401611328 + ], + [ + "▁Chrysler", + -13.089154243469238 + ], + [ + "▁poultry", + -13.089154243469238 + ], + [ + "▁thirteen", + -13.089154243469238 + ], + [ + "▁sightseeing", + -13.089155197143555 + ], + [ + "▁Miguel", + -13.089158058166504 + ], + [ + "▁terminology", + -13.089334487915039 + ], + [ + "▁Genetic", + -13.089553833007812 + ], + [ + "commercial", + -13.08963394165039 + ], + [ + "gehoben", + -13.08965015411377 + ], + [ + "RIGHT", + -13.08995532989502 + ], + [ + "▁proprietate", + -13.089990615844727 + ], + [ + "▁Cannes", + -13.090012550354004 + ], + [ + "▁klicken", + -13.090023040771484 + ], + [ + "▁Belgique", + -13.0901460647583 + ], + [ + "tapped", + -13.09034538269043 + ], + [ + "kinetic", + -13.090569496154785 + ], + [ + "▁feuilles", + -13.090673446655273 + ], + [ + "whitening", + -13.090760231018066 + ], + [ + "Any", + -13.090946197509766 + ], + [ + "Manager", + -13.091099739074707 + ], + [ + "▁constatat", + -13.091106414794922 + ], + [ + "▁Myanmar", + -13.091140747070312 + ], + [ + "▁Examination", + -13.091142654418945 + ], + [ + "▁règle", + -13.091208457946777 + ], + [ + "▁umgesetzt", + -13.09128475189209 + ], + [ + "211", + -13.091336250305176 + ], + [ + "▁Herald", + -13.091449737548828 + ], + [ + "Alex", + -13.091680526733398 + ], + [ + "▁drauf", + -13.091707229614258 + ], + [ + "logger", + -13.091714859008789 + ], + [ + "▁pictur", + -13.09186840057373 + ], + [ + "▁Divi", + -13.09196949005127 + ], + [ + "▁furnizat", + -13.092089653015137 + ], + [ + "▁verzichten", + -13.092132568359375 + ], + [ + "▁Sergi", + -13.092199325561523 + ], + [ + "contaminated", + -13.09223747253418 + ], + [ + "▁Buddy", + -13.092243194580078 + ], + [ + "▁chilled", + -13.092268943786621 + ], + [ + "▁vorlieg", + -13.092317581176758 + ], + [ + "▁Claudia", + -13.092632293701172 + ], + [ + "▁miserable", + -13.092653274536133 + ], + [ + "▁sketches", + -13.092683792114258 + ], + [ + "schicken", + -13.092814445495605 + ], + [ + "since", + -13.0928373336792 + ], + [ + "2.9", + -13.092840194702148 + ], + [ + "▁sitzen", + -13.092928886413574 + ], + [ + "ceapa", + -13.093396186828613 + ], + [ + "respectarea", + -13.093438148498535 + ], + [ + "▁handheld", + -13.093448638916016 + ], + [ + "popular", + -13.093527793884277 + ], + [ + "calming", + -13.093603134155273 + ], + [ + "Govern", + -13.093632698059082 + ], + [ + "▁omega", + -13.093645095825195 + ], + [ + "▁Planner", + -13.093791007995605 + ], + [ + "enriched", + -13.093850135803223 + ], + [ + "154", + -13.093976974487305 + ], + [ + "▁autorisé", + -13.093989372253418 + ], + [ + "▁cadouri", + -13.09407901763916 + ], + [ + "▁vulnerabilities", + -13.094143867492676 + ], + [ + "▁Arbeitnehmer", + -13.094158172607422 + ], + [ + "éditeur", + -13.094234466552734 + ], + [ + "▁Anleitung", + -13.094317436218262 + ], + [ + "rubbing", + -13.094343185424805 + ], + [ + "▁autovehicul", + -13.094621658325195 + ], + [ + "▁öffnen", + -13.094621658325195 + ], + [ + "▁Napoleon", + -13.094622611999512 + ], + [ + "▁cliché", + -13.094637870788574 + ], + [ + "▁Schaf", + -13.09469985961914 + ], + [ + "regulating", + -13.094894409179688 + ], + [ + "▁Kühl", + -13.09490966796875 + ], + [ + "▁blush", + -13.094913482666016 + ], + [ + "▁discard", + -13.094992637634277 + ], + [ + "▁confine", + -13.095027923583984 + ], + [ + "▁Rodriguez", + -13.09511947631836 + ], + [ + "▁ADHD", + -13.095165252685547 + ], + [ + "▁Madame", + -13.09516716003418 + ], + [ + "▁résolution", + -13.095319747924805 + ], + [ + "▁flair", + -13.095369338989258 + ], + [ + "▁claw", + -13.095422744750977 + ], + [ + "▁1929", + -13.095643043518066 + ], + [ + "ETH", + -13.095672607421875 + ], + [ + "nähe", + -13.095804214477539 + ], + [ + "▁soothe", + -13.0958251953125 + ], + [ + "4.9", + -13.095833778381348 + ], + [ + "montée", + -13.095925331115723 + ], + [ + "confirming", + -13.095989227294922 + ], + [ + "continent", + -13.09613037109375 + ], + [ + "reiz", + -13.09643840789795 + ], + [ + "john", + -13.096577644348145 + ], + [ + "IONAL", + -13.096588134765625 + ], + [ + "▁exported", + -13.0966215133667 + ], + [ + "▁Prison", + -13.096651077270508 + ], + [ + "possessed", + -13.096952438354492 + ], + [ + "▁placebo", + -13.096991539001465 + ], + [ + "▁biodiversity", + -13.097116470336914 + ], + [ + "▁combustion", + -13.097116470336914 + ], + [ + "▁Plumbing", + -13.09711742401123 + ], + [ + "ixie", + -13.097124099731445 + ], + [ + "▁repetition", + -13.09715461730957 + ], + [ + "▁soumis", + -13.097372055053711 + ], + [ + "▁reduc", + -13.097671508789062 + ], + [ + "▁constrain", + -13.097759246826172 + ], + [ + "Anti", + -13.097760200500488 + ], + [ + "consolidated", + -13.097817420959473 + ], + [ + "214", + -13.098095893859863 + ], + [ + "▁breaches", + -13.098108291625977 + ], + [ + "infringement", + -13.098115921020508 + ], + [ + "▁drizzle", + -13.098115921020508 + ], + [ + "▁erhöhen", + -13.098116874694824 + ], + [ + "▁Somerset", + -13.098118782043457 + ], + [ + "▁blonde", + -13.098132133483887 + ], + [ + "▁Funny", + -13.09813404083252 + ], + [ + "tuşi", + -13.098149299621582 + ], + [ + "▁reinvent", + -13.098162651062012 + ], + [ + "▁sérieux", + -13.098247528076172 + ], + [ + "▁croire", + -13.098308563232422 + ], + [ + "general", + -13.098315238952637 + ], + [ + "▁Distance", + -13.098319053649902 + ], + [ + "▁VoIP", + -13.098348617553711 + ], + [ + "▁adăugat", + -13.098406791687012 + ], + [ + "matik", + -13.098546028137207 + ], + [ + "▁avatar", + -13.098647117614746 + ], + [ + "▁superstar", + -13.098804473876953 + ], + [ + "8.0", + -13.098814010620117 + ], + [ + "lusieurs", + -13.098982810974121 + ], + [ + "▁Judeţean", + -13.099117279052734 + ], + [ + "offenen", + -13.099128723144531 + ], + [ + "RAF", + -13.099133491516113 + ], + [ + "▁restroom", + -13.099207878112793 + ], + [ + "enfance", + -13.099348068237305 + ], + [ + "▁garnish", + -13.099499702453613 + ], + [ + "▁vermittelt", + -13.099631309509277 + ], + [ + "Histoire", + -13.099634170532227 + ], + [ + "cyan", + -13.100628852844238 + ], + [ + "Talk", + -13.100666046142578 + ], + [ + "▁Varianten", + -13.10069465637207 + ], + [ + "▁Lille", + -13.10085678100586 + ], + [ + "▁offenbar", + -13.10098934173584 + ], + [ + "▁rénovation", + -13.10112190246582 + ], + [ + "▁comentarii", + -13.101249694824219 + ], + [ + "▁Bedford", + -13.10130500793457 + ], + [ + "▁cercetări", + -13.101325988769531 + ], + [ + "▁précision", + -13.101337432861328 + ], + [ + "MRC", + -13.101358413696289 + ], + [ + "alterations", + -13.101476669311523 + ], + [ + "▁discours", + -13.101531028747559 + ], + [ + "äger", + -13.101577758789062 + ], + [ + "▁antreprenor", + -13.101622581481934 + ], + [ + "▁Oriental", + -13.101849555969238 + ], + [ + "conducerea", + -13.101868629455566 + ], + [ + "CBC", + -13.101932525634766 + ], + [ + "▁mince", + -13.101985931396484 + ], + [ + "▁presidency", + -13.10212516784668 + ], + [ + "▁lipstick", + -13.102167129516602 + ], + [ + "▁SERVICES", + -13.102237701416016 + ], + [ + "productive", + -13.10237979888916 + ], + [ + "Assad", + -13.102400779724121 + ], + [ + "▁efectiv", + -13.102540969848633 + ], + [ + "▁gestern", + -13.102596282958984 + ], + [ + "▁RGB", + -13.102606773376465 + ], + [ + "▁Transilvania", + -13.102627754211426 + ], + [ + "▁Raleigh", + -13.102670669555664 + ], + [ + "DOM", + -13.102702140808105 + ], + [ + "▁iesit", + -13.102806091308594 + ], + [ + "▁anuntat", + -13.102810859680176 + ], + [ + "▁automatiquement", + -13.102901458740234 + ], + [ + "▁proliferation", + -13.103130340576172 + ], + [ + "▁Maroc", + -13.103156089782715 + ], + [ + "▁prezenţ", + -13.10323429107666 + ], + [ + "▁Filipino", + -13.103296279907227 + ], + [ + "▁Traian", + -13.103351593017578 + ], + [ + "▁swimmer", + -13.10356616973877 + ], + [ + "▁Slovenia", + -13.103632926940918 + ], + [ + "phobia", + -13.103724479675293 + ], + [ + "curricular", + -13.103734016418457 + ], + [ + "jurnal", + -13.103825569152832 + ], + [ + "▁vorne", + -13.103870391845703 + ], + [ + "▁asuma", + -13.103875160217285 + ], + [ + "defended", + -13.104104995727539 + ], + [ + "▁imminent", + -13.104140281677246 + ], + [ + "favored", + -13.10417366027832 + ], + [ + "▁innovator", + -13.104179382324219 + ], + [ + "▁Salzburg", + -13.104289054870605 + ], + [ + "5.4", + -13.104452133178711 + ], + [ + "Safe", + -13.104597091674805 + ], + [ + "▁inteleg", + -13.104744911193848 + ], + [ + "▁charisma", + -13.104781150817871 + ], + [ + "nature", + -13.104784965515137 + ], + [ + "4.8", + -13.104942321777344 + ], + [ + "argues", + -13.105104446411133 + ], + [ + "▁dimensiune", + -13.105142593383789 + ], + [ + "▁subdivision", + -13.105142593383789 + ], + [ + "▁embarrassing", + -13.105144500732422 + ], + [ + "▁confuse", + -13.105207443237305 + ], + [ + "DIC", + -13.105460166931152 + ], + [ + "rubrique", + -13.10549545288086 + ], + [ + "dépendance", + -13.105598449707031 + ], + [ + "INCLUD", + -13.10565185546875 + ], + [ + "▁Griffin", + -13.10574722290039 + ], + [ + "157", + -13.105751037597656 + ], + [ + "▁revamp", + -13.105839729309082 + ], + [ + "▁umgehen", + -13.10595989227295 + ], + [ + "▁mențin", + -13.106231689453125 + ], + [ + "▁1937", + -13.106695175170898 + ], + [ + "eklagte", + -13.106766700744629 + ], + [ + "▁clientèle", + -13.106801986694336 + ], + [ + "▁campsite", + -13.10708999633789 + ], + [ + "▁florist", + -13.107144355773926 + ], + [ + "▁Ferguson", + -13.107159614562988 + ], + [ + "▁demolition", + -13.107160568237305 + ], + [ + "▁McCain", + -13.107254981994629 + ], + [ + "▁reckon", + -13.10733413696289 + ], + [ + "striped", + -13.107414245605469 + ], + [ + "▁sonore", + -13.107481002807617 + ], + [ + "migrated", + -13.107548713684082 + ], + [ + "▁fluorescent", + -13.107664108276367 + ], + [ + "▁Colegi", + -13.107762336730957 + ], + [ + "ianu", + -13.107860565185547 + ], + [ + "cruising", + -13.107882499694824 + ], + [ + "LINK", + -13.107965469360352 + ], + [ + "▁Cutting", + -13.108001708984375 + ], + [ + "ABILITY", + -13.108168601989746 + ], + [ + "▁Categories", + -13.108168601989746 + ], + [ + "▁erhoben", + -13.108168601989746 + ], + [ + "▁Cocktail", + -13.108169555664062 + ], + [ + "▁Generator", + -13.108177185058594 + ], + [ + "▁gesucht", + -13.108186721801758 + ], + [ + "▁telescope", + -13.10818862915039 + ], + [ + "KET", + -13.108192443847656 + ], + [ + "▁hilfreich", + -13.108192443847656 + ], + [ + "▁beneficiary", + -13.108585357666016 + ], + [ + "▁Winston", + -13.108636856079102 + ], + [ + "Auswirkungen", + -13.108675956726074 + ], + [ + "portrayed", + -13.108705520629883 + ], + [ + "▁Aspekte", + -13.108743667602539 + ], + [ + "ffected", + -13.108901023864746 + ], + [ + "eutic", + -13.108905792236328 + ], + [ + "International", + -13.109021186828613 + ], + [ + "attente", + -13.109078407287598 + ], + [ + "mentioning", + -13.109119415283203 + ], + [ + "launch", + -13.109129905700684 + ], + [ + "▁EURO", + -13.109152793884277 + ], + [ + "▁Fraser", + -13.109344482421875 + ], + [ + "▁Johannes", + -13.109408378601074 + ], + [ + "▁felicit", + -13.109477043151855 + ], + [ + "▁plâng", + -13.109522819519043 + ], + [ + "izant", + -13.10971736907959 + ], + [ + "▁reţe", + -13.109846115112305 + ], + [ + "Mech", + -13.109954833984375 + ], + [ + "▁algebra", + -13.110193252563477 + ], + [ + "▁surgeries", + -13.110257148742676 + ], + [ + "▁semifinal", + -13.110262870788574 + ], + [ + "▁intimidating", + -13.110288619995117 + ], + [ + "▁exkl", + -13.110604286193848 + ], + [ + "asigurarea", + -13.110918998718262 + ], + [ + "Tek", + -13.111136436462402 + ], + [ + "▁Einladung", + -13.111205101013184 + ], + [ + "▁similaire", + -13.111205101013184 + ], + [ + "▁bebelus", + -13.111221313476562 + ], + [ + "▁déclin", + -13.111400604248047 + ], + [ + "▁Console", + -13.111495018005371 + ], + [ + "RET", + -13.111573219299316 + ], + [ + "appli", + -13.111586570739746 + ], + [ + "45%", + -13.111663818359375 + ], + [ + "Evenimentul", + -13.111811637878418 + ], + [ + "sincerely", + -13.111812591552734 + ], + [ + "sammlung", + -13.112098693847656 + ], + [ + "Amérique", + -13.112220764160156 + ], + [ + "▁1919", + -13.112326622009277 + ], + [ + "regulation", + -13.112367630004883 + ], + [ + "gebäude", + -13.112726211547852 + ], + [ + "▁Perspektive", + -13.112726211547852 + ], + [ + "Espagne", + -13.112744331359863 + ], + [ + "▁Underground", + -13.11283016204834 + ], + [ + "secret", + -13.112833976745605 + ], + [ + "▁Aussicht", + -13.112874031066895 + ], + [ + "Photo", + -13.112977027893066 + ], + [ + "▁Brust", + -13.113144874572754 + ], + [ + "▁Sustainability", + -13.11323356628418 + ], + [ + "▁clădiri", + -13.11323356628418 + ], + [ + "▁librarian", + -13.11323356628418 + ], + [ + "▁HBO", + -13.113235473632812 + ], + [ + "▁Parallel", + -13.113240242004395 + ], + [ + "▁shimmer", + -13.113283157348633 + ], + [ + "▁schlicht", + -13.113292694091797 + ], + [ + "▁anticipat", + -13.113311767578125 + ], + [ + "▁foolish", + -13.11335563659668 + ], + [ + "▁Ability", + -13.11347484588623 + ], + [ + "▁ceremoni", + -13.11358642578125 + ], + [ + "▁Ablauf", + -13.11359977722168 + ], + [ + "icrobial", + -13.113606452941895 + ], + [ + "▁actiuni", + -13.11362361907959 + ], + [ + "▁Wilhelm", + -13.113761901855469 + ], + [ + "▁nennen", + -13.113775253295898 + ], + [ + "▁botez", + -13.113832473754883 + ], + [ + "Alpes", + -13.113912582397461 + ], + [ + "▁libér", + -13.11392593383789 + ], + [ + "▁sneakers", + -13.114052772521973 + ], + [ + "geschafft", + -13.114252090454102 + ], + [ + "▁downstairs", + -13.114261627197266 + ], + [ + "▁wrench", + -13.114294052124023 + ], + [ + "▁erheblich", + -13.11442756652832 + ], + [ + "▁alimentar", + -13.114710807800293 + ], + [ + "▁suger", + -13.11474323272705 + ], + [ + "analysis", + -13.114883422851562 + ], + [ + "öhn", + -13.114891052246094 + ], + [ + "▁Nantes", + -13.114895820617676 + ], + [ + "▁Arbor", + -13.114899635314941 + ], + [ + "ooze", + -13.115150451660156 + ], + [ + "▁facade", + -13.115229606628418 + ], + [ + "▁MySQL", + -13.115266799926758 + ], + [ + "▁Salvador", + -13.115266799926758 + ], + [ + "▁Schlafzimmer", + -13.115279197692871 + ], + [ + "▁autentic", + -13.115320205688477 + ], + [ + "▁prezint", + -13.115348815917969 + ], + [ + "▁campground", + -13.115397453308105 + ], + [ + "Query", + -13.11540412902832 + ], + [ + "bekannt", + -13.115598678588867 + ], + [ + "arcinia", + -13.115632057189941 + ], + [ + "▁stunt", + -13.115825653076172 + ], + [ + "▁informare", + -13.115830421447754 + ], + [ + "▁interzis", + -13.11584186553955 + ], + [ + "▁Burke", + -13.115995407104492 + ], + [ + "certified", + -13.11601734161377 + ], + [ + "▁clove", + -13.11605167388916 + ], + [ + "java", + -13.116271018981934 + ], + [ + "▁Vielfalt", + -13.116284370422363 + ], + [ + "gebung", + -13.116329193115234 + ], + [ + "▁9/11", + -13.116497993469238 + ], + [ + "▁disruptive", + -13.11650562286377 + ], + [ + "visual", + -13.116693496704102 + ], + [ + "▁anunţat", + -13.11679458618164 + ], + [ + "▁Plätze", + -13.116799354553223 + ], + [ + "▁reduceri", + -13.116920471191406 + ], + [ + "autorisation", + -13.116950035095215 + ], + [ + "▁ligament", + -13.11705207824707 + ], + [ + "▁învăța", + -13.117081642150879 + ], + [ + "läufig", + -13.117303848266602 + ], + [ + "▁Copenhagen", + -13.117303848266602 + ], + [ + "▁commodities", + -13.117303848266602 + ], + [ + "▁eindeutig", + -13.117313385009766 + ], + [ + "▁catheter", + -13.117321014404297 + ], + [ + "erklärung", + -13.117720603942871 + ], + [ + "▁intelectual", + -13.117814064025879 + ], + [ + "▁municipality", + -13.117891311645508 + ], + [ + "▁1936", + -13.11798095703125 + ], + [ + "rruption", + -13.118217468261719 + ], + [ + "▁Lafayette", + -13.118324279785156 + ], + [ + "▁berühmte", + -13.118324279785156 + ], + [ + "▁idylli", + -13.118325233459473 + ], + [ + "▁caldura", + -13.118447303771973 + ], + [ + "▁tablette", + -13.118535995483398 + ], + [ + "▁liquidity", + -13.118728637695312 + ], + [ + "NGOs", + -13.118885040283203 + ], + [ + "▁supliment", + -13.11889934539795 + ], + [ + "contact", + -13.119075775146484 + ], + [ + "lustig", + -13.119219779968262 + ], + [ + "▁watercolor", + -13.119319915771484 + ], + [ + "▁Tiffany", + -13.119344711303711 + ], + [ + "▁Glauben", + -13.119365692138672 + ], + [ + "Immobilie", + -13.119406700134277 + ], + [ + "▁stripped", + -13.119549751281738 + ], + [ + "▁Beatles", + -13.119601249694824 + ], + [ + "ани", + -13.119770050048828 + ], + [ + "▁lifespan", + -13.119986534118652 + ], + [ + "▁profondeur", + -13.120251655578613 + ], + [ + "▁durere", + -13.120329856872559 + ], + [ + "▁Lithuania", + -13.120367050170898 + ], + [ + "▁resurrection", + -13.120367050170898 + ], + [ + "▁suitcase", + -13.120535850524902 + ], + [ + "▁Plumber", + -13.120545387268066 + ], + [ + "criticized", + -13.120595932006836 + ], + [ + "feared", + -13.120756149291992 + ], + [ + "▁Aunt", + -13.120929718017578 + ], + [ + "otwithstanding", + -13.121068000793457 + ], + [ + "verständlich", + -13.12115478515625 + ], + [ + "fiber", + -13.121248245239258 + ], + [ + "headquartered", + -13.121390342712402 + ], + [ + "▁Perspective", + -13.121391296386719 + ], + [ + "▁semantic", + -13.121413230895996 + ], + [ + "VIEW", + -13.121431350708008 + ], + [ + "▁Ersatzteile", + -13.121567726135254 + ], + [ + "▁disgust", + -13.121685981750488 + ], + [ + "rrington", + -13.121834754943848 + ], + [ + "ässe", + -13.121922492980957 + ], + [ + "▁anerkannt", + -13.121956825256348 + ], + [ + "meaning", + -13.12203598022461 + ], + [ + "178", + -13.122039794921875 + ], + [ + "▁grupuri", + -13.1221284866333 + ], + [ + "ciones", + -13.122267723083496 + ], + [ + "▁Mobility", + -13.122414588928223 + ], + [ + "▁unstable", + -13.122422218322754 + ], + [ + "▁FULL", + -13.122456550598145 + ], + [ + "austausch", + -13.122491836547852 + ], + [ + "▁culminat", + -13.122549057006836 + ], + [ + "▁Roast", + -13.122742652893066 + ], + [ + "existant", + -13.122940063476562 + ], + [ + "167", + -13.123008728027344 + ], + [ + "tinerii", + -13.123040199279785 + ], + [ + "September", + -13.123115539550781 + ], + [ + "▁haircut", + -13.123274803161621 + ], + [ + "▁Tutorial", + -13.123440742492676 + ], + [ + "▁enquiries", + -13.123440742492676 + ], + [ + "▁livelihood", + -13.123440742492676 + ], + [ + "▁proficiency", + -13.123440742492676 + ], + [ + "▁pavement", + -13.123443603515625 + ], + [ + "▁Reservation", + -13.123445510864258 + ], + [ + "aimerai", + -13.123491287231445 + ], + [ + "▁laboratoire", + -13.123492240905762 + ], + [ + "leihen", + -13.123501777648926 + ], + [ + "ministerium", + -13.123518943786621 + ], + [ + "▁Concentr", + -13.12366008758545 + ], + [ + "▁swipe", + -13.12368106842041 + ], + [ + "extrêmement", + -13.123687744140625 + ], + [ + "cultivated", + -13.123708724975586 + ], + [ + "▁Converse", + -13.123845100402832 + ], + [ + "▁paycheck", + -13.123863220214844 + ], + [ + "olltest", + -13.123995780944824 + ], + [ + "▁Bauch", + -13.124022483825684 + ], + [ + "▁autobuz", + -13.124067306518555 + ], + [ + "attack", + -13.124094009399414 + ], + [ + "While", + -13.124311447143555 + ], + [ + "Retrouvez", + -13.124320983886719 + ], + [ + "▁Dolphin", + -13.124466896057129 + ], + [ + "▁Shelby", + -13.124480247497559 + ], + [ + "▁Diagnostic", + -13.124486923217773 + ], + [ + "▁reconcil", + -13.124558448791504 + ], + [ + "▁Iaşi", + -13.124733924865723 + ], + [ + "▁iubesc", + -13.124979972839355 + ], + [ + "▁Bestseller", + -13.124985694885254 + ], + [ + "▁antrenor", + -13.125035285949707 + ], + [ + "▁Imaging", + -13.125089645385742 + ], + [ + "▁priorité", + -13.125295639038086 + ], + [ + "▁brewery", + -13.125494003295898 + ], + [ + "▁residual", + -13.125494003295898 + ], + [ + "▁intermittent", + -13.125494956970215 + ], + [ + "Kollekt", + -13.125585556030273 + ], + [ + "▁Walsh", + -13.12558650970459 + ], + [ + "▁marvelous", + -13.125653266906738 + ], + [ + "canceled", + -13.125686645507812 + ], + [ + "174", + -13.125761985778809 + ], + [ + "normes", + -13.125837326049805 + ], + [ + "▁Tempo", + -13.125996589660645 + ], + [ + "▁Târgu", + -13.126008987426758 + ], + [ + "877", + -13.126165390014648 + ], + [ + "5-8", + -13.126190185546875 + ], + [ + "960", + -13.126486778259277 + ], + [ + "▁Scandinavia", + -13.1265230178833 + ], + [ + "▁prolific", + -13.126526832580566 + ], + [ + "lasi", + -13.126916885375977 + ], + [ + "glück", + -13.127097129821777 + ], + [ + "▁immersion", + -13.127204895019531 + ], + [ + "RSA", + -13.127323150634766 + ], + [ + "▁Polk", + -13.127340316772461 + ], + [ + "▁transmitter", + -13.12747859954834 + ], + [ + "▁Kleidung", + -13.12755298614502 + ], + [ + "▁Cosmo", + -13.127676963806152 + ], + [ + "▁1935", + -13.127788543701172 + ], + [ + "höhere", + -13.127906799316406 + ], + [ + "▁Tatsache", + -13.128074645996094 + ], + [ + "▁Outlet", + -13.1282377243042 + ], + [ + "▁canalisation", + -13.12824821472168 + ], + [ + "Mbps", + -13.128433227539062 + ], + [ + "▁skeptical", + -13.128582954406738 + ], + [ + "mplification", + -13.128617286682129 + ], + [ + "▁Advice", + -13.128618240356445 + ], + [ + "▁détaillé", + -13.128676414489746 + ], + [ + "660", + -13.128701210021973 + ], + [ + "▁eyebrow", + -13.128722190856934 + ], + [ + "▁HIGH", + -13.128898620605469 + ], + [ + "hnlich", + -13.129073143005371 + ], + [ + "▁depăș", + -13.12910270690918 + ], + [ + "▁procurori", + -13.129140853881836 + ], + [ + "▁refrain", + -13.129212379455566 + ], + [ + "▁geschaffen", + -13.12952995300293 + ], + [ + "justement", + -13.129663467407227 + ], + [ + "exposing", + -13.129700660705566 + ], + [ + "243", + -13.1298828125 + ], + [ + "sectorul", + -13.130104064941406 + ], + [ + "▁courrier", + -13.130180358886719 + ], + [ + "▁carcas", + -13.130199432373047 + ], + [ + "sitter", + -13.13022518157959 + ], + [ + "▁Schreiben", + -13.130335807800293 + ], + [ + "▁malfunction", + -13.130358695983887 + ], + [ + "poartă", + -13.130522727966309 + ], + [ + "raisons", + -13.130565643310547 + ], + [ + "▁HOT", + -13.130650520324707 + ], + [ + "▁refreshed", + -13.130730628967285 + ], + [ + "mânt", + -13.130744934082031 + ], + [ + "▁coefficient", + -13.13097858428955 + ], + [ + "▁instituţii", + -13.131194114685059 + ], + [ + "▁sanguin", + -13.131202697753906 + ], + [ + "▁ceci", + -13.131213188171387 + ], + [ + "▁garçon", + -13.131232261657715 + ], + [ + "deluxe", + -13.131237030029297 + ], + [ + "▁rectif", + -13.131311416625977 + ], + [ + "920", + -13.131364822387695 + ], + [ + "Exista", + -13.131428718566895 + ], + [ + "▁magnif", + -13.131568908691406 + ], + [ + "efficiencies", + -13.131681442260742 + ], + [ + "▁Mitsubishi", + -13.131681442260742 + ], + [ + "▁consortium", + -13.131681442260742 + ], + [ + "▁baggage", + -13.131683349609375 + ], + [ + "▁guild", + -13.131736755371094 + ], + [ + "▁sixty", + -13.13193130493164 + ], + [ + "▁Retreat", + -13.13245677947998 + ], + [ + "batting", + -13.132473945617676 + ], + [ + "470", + -13.132708549499512 + ], + [ + "▁Britanie", + -13.132718086242676 + ], + [ + "displaced", + -13.132734298706055 + ], + [ + "▁spați", + -13.132794380187988 + ], + [ + "▁exceptionnelle", + -13.13281536102295 + ], + [ + "▁authorize", + -13.132906913757324 + ], + [ + "▁prescribe", + -13.133187294006348 + ], + [ + "▁dépannage", + -13.133234024047852 + ], + [ + "▁sexuelle", + -13.133234024047852 + ], + [ + "valid", + -13.133275032043457 + ], + [ + "▁hymn", + -13.133752822875977 + ], + [ + "▁histories", + -13.133757591247559 + ], + [ + "▁oriunde", + -13.133764266967773 + ], + [ + "Pop", + -13.133785247802734 + ], + [ + "▁dispoziţi", + -13.133800506591797 + ], + [ + "ADI", + -13.133819580078125 + ], + [ + "Google", + -13.133830070495605 + ], + [ + "▁Autism", + -13.133918762207031 + ], + [ + "▁aggr", + -13.134354591369629 + ], + [ + "bleed", + -13.134618759155273 + ], + [ + "▁displacement", + -13.13478946685791 + ], + [ + "▁hobbies", + -13.13478946685791 + ], + [ + "▁anatomy", + -13.134799003601074 + ], + [ + "▁Klinik", + -13.134821891784668 + ], + [ + "▁CCTV", + -13.1348237991333 + ], + [ + "readable", + -13.134886741638184 + ], + [ + "ulph", + -13.134982109069824 + ], + [ + "metabol", + -13.135035514831543 + ], + [ + "▁rugăm", + -13.135037422180176 + ], + [ + "▁Scotia", + -13.135087013244629 + ], + [ + "▁Einheit", + -13.135211944580078 + ], + [ + "▁troupe", + -13.13581371307373 + ], + [ + "▁Practitioner", + -13.135828018188477 + ], + [ + "▁oarec", + -13.135909080505371 + ], + [ + "Appel", + -13.135998725891113 + ], + [ + "situația", + -13.136096000671387 + ], + [ + "▁Yemen", + -13.136353492736816 + ], + [ + "piping", + -13.136515617370605 + ], + [ + "blood", + -13.136772155761719 + ], + [ + "engraved", + -13.136866569519043 + ], + [ + "▁Cristina", + -13.136866569519043 + ], + [ + "▁inaccurate", + -13.136866569519043 + ], + [ + "savory", + -13.136878967285156 + ], + [ + "atism", + -13.136919021606445 + ], + [ + "▁dependency", + -13.137007713317871 + ], + [ + "▁assertion", + -13.137015342712402 + ], + [ + "▁intersect", + -13.137201309204102 + ], + [ + "DATA", + -13.137224197387695 + ], + [ + "▁britanic", + -13.1373872756958 + ], + [ + "▁sanitaire", + -13.137393951416016 + ], + [ + "▁PLUS", + -13.137436866760254 + ], + [ + "▁platter", + -13.137730598449707 + ], + [ + "▁reconsider", + -13.137802124023438 + ], + [ + "▁Swim", + -13.13786792755127 + ], + [ + "▁Scene", + -13.137896537780762 + ], + [ + "▁Reynolds", + -13.137907028198242 + ], + [ + "▁gesund", + -13.137922286987305 + ], + [ + "international", + -13.137959480285645 + ], + [ + "government", + -13.13804817199707 + ], + [ + "▁gemstone", + -13.138052940368652 + ], + [ + "▁reproductive", + -13.1381196975708 + ], + [ + "▁expressive", + -13.13820743560791 + ], + [ + "▁tranche", + -13.13842487335205 + ], + [ + "▁Niagara", + -13.138427734375 + ], + [ + "▁Studierende", + -13.138434410095215 + ], + [ + "▁crave", + -13.138607025146484 + ], + [ + "pathetic", + -13.138739585876465 + ], + [ + "▁1916", + -13.138858795166016 + ], + [ + "▁Thousand", + -13.138873100280762 + ], + [ + "uffed", + -13.138893127441406 + ], + [ + "▁Lancaster", + -13.138960838317871 + ], + [ + "▁revenge", + -13.138972282409668 + ], + [ + "▁melody", + -13.1389741897583 + ], + [ + "Suitable", + -13.138991355895996 + ], + [ + "▁beacon", + -13.139082908630371 + ], + [ + "▁MAY", + -13.139205932617188 + ], + [ + "livré", + -13.139216423034668 + ], + [ + "Virus", + -13.139391899108887 + ], + [ + "▁collaborator", + -13.139413833618164 + ], + [ + "produktion", + -13.139480590820312 + ], + [ + "▁iluminat", + -13.139593124389648 + ], + [ + "facets", + -13.13975715637207 + ], + [ + "▁expus", + -13.139784812927246 + ], + [ + "▁baptism", + -13.13999080657959 + ], + [ + "▁urgency", + -13.140016555786133 + ], + [ + "artery", + -13.14030647277832 + ], + [ + "▁eingeladen", + -13.14043140411377 + ], + [ + "▁entfernen", + -13.14051342010498 + ], + [ + "soaking", + -13.140555381774902 + ], + [ + "▁irré", + -13.140557289123535 + ], + [ + "▁purity", + -13.140700340270996 + ], + [ + "▁adăug", + -13.140731811523438 + ], + [ + "historischen", + -13.140777587890625 + ], + [ + "crezi", + -13.140793800354004 + ], + [ + "▁tarziu", + -13.141035079956055 + ], + [ + "▁Mozart", + -13.141040802001953 + ], + [ + "▁trimming", + -13.141056060791016 + ], + [ + "▁violat", + -13.141056060791016 + ], + [ + "▁Vermögen", + -13.14108943939209 + ], + [ + "▁Theorie", + -13.141114234924316 + ], + [ + "scheibe", + -13.14114761352539 + ], + [ + "Partidul", + -13.141324996948242 + ], + [ + "▁childcare", + -13.14133071899414 + ], + [ + "ajele", + -13.141345977783203 + ], + [ + "▁Punjab", + -13.141390800476074 + ], + [ + "6.3", + -13.14156436920166 + ], + [ + "▁recount", + -13.141571044921875 + ], + [ + "▁repel", + -13.141799926757812 + ], + [ + "vantage", + -13.1419095993042 + ], + [ + "6.4", + -13.141953468322754 + ], + [ + "▁comedian", + -13.142087936401367 + ], + [ + "▁snappe", + -13.142256736755371 + ], + [ + "PLE", + -13.142271041870117 + ], + [ + "▁rapper", + -13.142439842224121 + ], + [ + "▁Belfast", + -13.142657279968262 + ], + [ + "▁predictive", + -13.14271068572998 + ], + [ + "dépôt", + -13.1427583694458 + ], + [ + "flavored", + -13.142769813537598 + ], + [ + "chließlich", + -13.14293098449707 + ], + [ + "▁stump", + -13.142955780029297 + ], + [ + "▁lakh", + -13.142963409423828 + ], + [ + "3:30", + -13.143021583557129 + ], + [ + "▁cetățeni", + -13.1431245803833 + ], + [ + "▁Milliarden", + -13.143125534057617 + ], + [ + "Assurance", + -13.143128395080566 + ], + [ + "▁Marketplace", + -13.143329620361328 + ], + [ + "equipped", + -13.143423080444336 + ], + [ + "▁russe", + -13.143462181091309 + ], + [ + "Exactly", + -13.143651008605957 + ], + [ + "▁Venez", + -13.144125938415527 + ], + [ + "▁Pavilion", + -13.144171714782715 + ], + [ + "▁incontournable", + -13.144171714782715 + ], + [ + "▁slaughter", + -13.14417839050293 + ], + [ + "asteptam", + -13.144190788269043 + ], + [ + "▁Fighter", + -13.144196510314941 + ], + [ + "▁Landkreis", + -13.144278526306152 + ], + [ + "▁lumini", + -13.144312858581543 + ], + [ + "▁connaît", + -13.144615173339844 + ], + [ + "▁Breite", + -13.144674301147461 + ], + [ + "▁Disability", + -13.144774436950684 + ], + [ + "▁Alfa", + -13.144786834716797 + ], + [ + "▁poise", + -13.144895553588867 + ], + [ + "▁Alpen", + -13.144898414611816 + ], + [ + "betont", + -13.145031929016113 + ], + [ + "159", + -13.145161628723145 + ], + [ + "▁geprägt", + -13.145219802856445 + ], + [ + "▁intrigued", + -13.145219802856445 + ], + [ + "▁sympathy", + -13.145220756530762 + ], + [ + "societal", + -13.145225524902344 + ], + [ + "▁sédui", + -13.145243644714355 + ], + [ + "▁differentiation", + -13.145384788513184 + ], + [ + "▁aprobare", + -13.145744323730469 + ], + [ + "schirm", + -13.14585018157959 + ], + [ + "sagt", + -13.145956039428711 + ], + [ + "7.3", + -13.146101951599121 + ], + [ + "Bib", + -13.146263122558594 + ], + [ + "europäischen", + -13.146268844604492 + ], + [ + "▁Innovative", + -13.146268844604492 + ], + [ + "▁autonome", + -13.146330833435059 + ], + [ + "▁Objective", + -13.146400451660156 + ], + [ + "▁refusal", + -13.146551132202148 + ], + [ + "▁exposé", + -13.146719932556152 + ], + [ + "▁cetăţeni", + -13.146793365478516 + ], + [ + "▁stimmt", + -13.146798133850098 + ], + [ + "acordul", + -13.147162437438965 + ], + [ + "▁hormonal", + -13.147254943847656 + ], + [ + "intermédiaire", + -13.147319793701172 + ], + [ + "▁doubl", + -13.147374153137207 + ], + [ + "▁flute", + -13.147509574890137 + ], + [ + "▁Balkon", + -13.147523880004883 + ], + [ + "▁Florian", + -13.147607803344727 + ], + [ + "737", + -13.147614479064941 + ], + [ + "▁dritte", + -13.147639274597168 + ], + [ + "spitze", + -13.147685050964355 + ], + [ + "donnent", + -13.14778995513916 + ], + [ + "▁Zuhause", + -13.147850036621094 + ], + [ + "▁VIII", + -13.147852897644043 + ], + [ + "familien", + -13.148151397705078 + ], + [ + "▁sécurisé", + -13.148313522338867 + ], + [ + "▁glamour", + -13.148370742797852 + ], + [ + "▁societati", + -13.148370742797852 + ], + [ + "typique", + -13.1483793258667 + ], + [ + "▁addicted", + -13.148421287536621 + ], + [ + "▁Providence", + -13.148500442504883 + ], + [ + "▁Extended", + -13.148506164550781 + ], + [ + "▁Barbie", + -13.148513793945312 + ], + [ + "zustand", + -13.148516654968262 + ], + [ + "▁Sauna", + -13.148638725280762 + ], + [ + "▁propane", + -13.148663520812988 + ], + [ + "europa", + -13.148894309997559 + ], + [ + "glued", + -13.148940086364746 + ], + [ + "▁Mystery", + -13.148941993713379 + ], + [ + "▁travaillé", + -13.149106979370117 + ], + [ + "riol", + -13.149251937866211 + ], + [ + "fleisch", + -13.149288177490234 + ], + [ + "▁Eintritt", + -13.149327278137207 + ], + [ + "▁Syndrome", + -13.149422645568848 + ], + [ + "▁petroleum", + -13.149426460266113 + ], + [ + "▁genial", + -13.149433135986328 + ], + [ + "sponsored", + -13.149436950683594 + ], + [ + "▁Cindy", + -13.149436950683594 + ], + [ + "▁courier", + -13.149600982666016 + ], + [ + "▁Scrap", + -13.149640083312988 + ], + [ + "▁conţin", + -13.149724006652832 + ], + [ + "(2007)", + -13.149764060974121 + ], + [ + "▁gewährleisten", + -13.149949073791504 + ], + [ + "▁proprietor", + -13.15011215209961 + ], + [ + "▁cheque", + -13.15046215057373 + ], + [ + "maternity", + -13.150477409362793 + ], + [ + "▁Gustav", + -13.15048599243164 + ], + [ + "▁arterial", + -13.150497436523438 + ], + [ + "▁whiskey", + -13.150510787963867 + ], + [ + "▁concealed", + -13.150525093078613 + ], + [ + "thèque", + -13.150553703308105 + ], + [ + "felony", + -13.150579452514648 + ], + [ + "▁tweeted", + -13.150613784790039 + ], + [ + "OTA", + -13.150619506835938 + ], + [ + "nsel", + -13.150664329528809 + ], + [ + "▁coarse", + -13.150664329528809 + ], + [ + "▁identificat", + -13.150707244873047 + ], + [ + "▁variability", + -13.150716781616211 + ], + [ + "civ", + -13.150843620300293 + ], + [ + "▁drastic", + -13.150956153869629 + ], + [ + "▁hatred", + -13.151090621948242 + ], + [ + "▁Bürgermeister", + -13.151237487792969 + ], + [ + "▁utilizatorilor", + -13.15124225616455 + ], + [ + "OULD", + -13.15137004852295 + ], + [ + "rmaßen", + -13.151383399963379 + ], + [ + "▁windshield", + -13.151530265808105 + ], + [ + "▁Particular", + -13.151531219482422 + ], + [ + "▁Tunnel", + -13.151638984680176 + ], + [ + "▁litri", + -13.15164852142334 + ], + [ + "extrême", + -13.15180492401123 + ], + [ + "▁Schalt", + -13.151944160461426 + ], + [ + "paket", + -13.152159690856934 + ], + [ + "berlin", + -13.152169227600098 + ], + [ + "▁slujb", + -13.152193069458008 + ], + [ + "facilitated", + -13.152206420898438 + ], + [ + "Congressional", + -13.152510643005371 + ], + [ + "▁honeymoon", + -13.152585983276367 + ], + [ + "▁Provision", + -13.152697563171387 + ], + [ + "▁Outfit", + -13.152779579162598 + ], + [ + "udder", + -13.152814865112305 + ], + [ + "▁chandelier", + -13.153002738952637 + ], + [ + "donating", + -13.153132438659668 + ], + [ + "historic", + -13.15333080291748 + ], + [ + "organized", + -13.153508186340332 + ], + [ + "(8)", + -13.15356731414795 + ], + [ + "▁touristique", + -13.153610229492188 + ], + [ + "▁Roosevelt", + -13.153643608093262 + ], + [ + "▁Verständnis", + -13.153643608093262 + ], + [ + "▁prilej", + -13.153655052185059 + ], + [ + "Vanity", + -13.153806686401367 + ], + [ + "chilly", + -13.153964042663574 + ], + [ + "loyer", + -13.154031753540039 + ], + [ + "▁Zhang", + -13.154053688049316 + ], + [ + "▁Nouveau", + -13.154193878173828 + ], + [ + "Soft", + -13.154326438903809 + ], + [ + "▁motherboard", + -13.15441608428955 + ], + [ + "▁Erklärung", + -13.154701232910156 + ], + [ + "▁Tasmania", + -13.154702186584473 + ], + [ + "▁verändern", + -13.154703140258789 + ], + [ + "▁seldom", + -13.154711723327637 + ], + [ + "▁Karriere", + -13.154714584350586 + ], + [ + "▁Mixed", + -13.154902458190918 + ], + [ + "umfang", + -13.154970169067383 + ], + [ + "▁Strategies", + -13.155035972595215 + ], + [ + "CHAR", + -13.155051231384277 + ], + [ + "olitary", + -13.155075073242188 + ], + [ + "▁Persoan", + -13.1550874710083 + ], + [ + "bewegung", + -13.155242919921875 + ], + [ + "▁Ernest", + -13.155367851257324 + ], + [ + "withdrawn", + -13.155855178833008 + ], + [ + "▁stationary", + -13.155881881713867 + ], + [ + "▁bland", + -13.155939102172852 + ], + [ + "▁Replace", + -13.156059265136719 + ], + [ + "▁Londres", + -13.156290054321289 + ], + [ + "▁plural", + -13.156290054321289 + ], + [ + "▁concentrat", + -13.156515121459961 + ], + [ + "Maschine", + -13.156675338745117 + ], + [ + "▁Advocate", + -13.156820297241211 + ], + [ + "▁vermitteln", + -13.156824111938477 + ], + [ + "▁dispenser", + -13.156827926635742 + ], + [ + "▁tedious", + -13.15695858001709 + ], + [ + "▁Straight", + -13.15705394744873 + ], + [ + "▁Corona", + -13.157061576843262 + ], + [ + "▁monumental", + -13.157073020935059 + ], + [ + "▁migrate", + -13.15720272064209 + ], + [ + "▁verlieren", + -13.157366752624512 + ], + [ + "▁Lub", + -13.157482147216797 + ], + [ + "▁reinforcement", + -13.157827377319336 + ], + [ + "▁cherish", + -13.157843589782715 + ], + [ + "Veterinary", + -13.157881736755371 + ], + [ + "geschwindigkeit", + -13.157881736755371 + ], + [ + "▁féminin", + -13.157881736755371 + ], + [ + "▁Facilities", + -13.157964706420898 + ], + [ + "▁urmari", + -13.158050537109375 + ], + [ + "▁Vertical", + -13.158098220825195 + ], + [ + "echoe", + -13.158188819885254 + ], + [ + "toured", + -13.158548355102539 + ], + [ + "Served", + -13.158772468566895 + ], + [ + "más", + -13.158853530883789 + ], + [ + "license", + -13.158893585205078 + ], + [ + "misunderstanding", + -13.158944129943848 + ], + [ + "▁glamorous", + -13.158944129943848 + ], + [ + "BJP", + -13.158973693847656 + ], + [ + "▁découvert", + -13.159173965454102 + ], + [ + "schönsten", + -13.159517288208008 + ], + [ + "▁(2018)", + -13.159577369689941 + ], + [ + "▁orasului", + -13.159581184387207 + ], + [ + "328", + -13.159674644470215 + ], + [ + "thighs", + -13.159801483154297 + ], + [ + "éclairage", + -13.160008430480957 + ], + [ + "Oamenii", + -13.160009384155273 + ], + [ + "▁Transmission", + -13.16014575958252 + ], + [ + "▁transpir", + -13.16015911102295 + ], + [ + "▁președinte", + -13.160321235656738 + ], + [ + "finalists", + -13.160327911376953 + ], + [ + "genügend", + -13.160524368286133 + ], + [ + "▁Aufmerksamkeit", + -13.160539627075195 + ], + [ + "▁unglaublich", + -13.160539627075195 + ], + [ + "▁descarc", + -13.160604476928711 + ], + [ + "▁Couch", + -13.160683631896973 + ], + [ + "eaucoup", + -13.160788536071777 + ], + [ + "▁adidas", + -13.161075592041016 + ], + [ + "▁1-800-", + -13.161077499389648 + ], + [ + "▁Communities", + -13.161102294921875 + ], + [ + "▁Einkommen", + -13.161102294921875 + ], + [ + "▁Reagan", + -13.16114330291748 + ], + [ + "▁Stoke", + -13.161260604858398 + ], + [ + "▁Snapchat", + -13.161269187927246 + ], + [ + "éclat", + -13.161272048950195 + ], + [ + "▁auseinander", + -13.161367416381836 + ], + [ + "▁richesse", + -13.16137409210205 + ], + [ + "▁toggle", + -13.161396026611328 + ], + [ + "▁Zutaten", + -13.161606788635254 + ], + [ + "▁député", + -13.16161060333252 + ], + [ + "▁battlefield", + -13.161611557006836 + ], + [ + "▁spirituel", + -13.161611557006836 + ], + [ + "▁Shuttle", + -13.161632537841797 + ], + [ + "▁Aktien", + -13.161665916442871 + ], + [ + "hormon", + -13.161819458007812 + ], + [ + "connection", + -13.16187858581543 + ], + [ + "▁vizitatori", + -13.16191577911377 + ], + [ + "érité", + -13.161971092224121 + ], + [ + "truck", + -13.1619873046875 + ], + [ + "▁yourselves", + -13.162139892578125 + ], + [ + "▁Logistics", + -13.162140846252441 + ], + [ + "coveted", + -13.16215705871582 + ], + [ + "▁şedinţ", + -13.162671089172363 + ], + [ + "▁messenger", + -13.162703514099121 + ], + [ + "▁țar", + -13.162918090820312 + ], + [ + "▁Grau", + -13.163025856018066 + ], + [ + "chirurgie", + -13.163138389587402 + ], + [ + "▁Ressourcen", + -13.16320514678955 + ], + [ + "▁Jésus", + -13.163207054138184 + ], + [ + "▁acțiune", + -13.163208961486816 + ], + [ + "▁Bundesliga", + -13.163249015808105 + ], + [ + "Lizenz", + -13.163379669189453 + ], + [ + "ELLE", + -13.163908958435059 + ], + [ + "vraie", + -13.1639986038208 + ], + [ + "ruined", + -13.164018630981445 + ], + [ + "▁Marble", + -13.164109230041504 + ], + [ + "▁Zambia", + -13.164308547973633 + ], + [ + "▁Finnish", + -13.164366722106934 + ], + [ + "▁trackback", + -13.164488792419434 + ], + [ + "héros", + -13.16451644897461 + ], + [ + "▁réclam", + -13.164534568786621 + ], + [ + "locurile", + -13.164706230163574 + ], + [ + "tägliche", + -13.164753913879395 + ], + [ + "IFF", + -13.164824485778809 + ], + [ + "▁contextual", + -13.164938926696777 + ], + [ + "▁Elvis", + -13.165084838867188 + ], + [ + "▁Batch", + -13.165183067321777 + ], + [ + "▁appris", + -13.16519546508789 + ], + [ + "intensive", + -13.165404319763184 + ], + [ + "▁întâmplat", + -13.16565990447998 + ], + [ + "▁prelucr", + -13.16576099395752 + ], + [ + "flore", + -13.165873527526855 + ], + [ + "▁Alkohol", + -13.165877342224121 + ], + [ + "Konzern", + -13.165895462036133 + ], + [ + "Delete", + -13.166082382202148 + ], + [ + "öck", + -13.16612720489502 + ], + [ + "▁clientii", + -13.16614818572998 + ], + [ + "▁innovate", + -13.166224479675293 + ], + [ + "▁ASAP", + -13.166345596313477 + ], + [ + "crumbs", + -13.166425704956055 + ], + [ + "reusable", + -13.166489601135254 + ], + [ + "▁Beaver", + -13.166507720947266 + ], + [ + "▁rosii", + -13.166643142700195 + ], + [ + "Arr", + -13.166704177856445 + ], + [ + "▁Zubehör", + -13.166948318481445 + ], + [ + "▁stolz", + -13.166952133178711 + ], + [ + "▁$75", + -13.16695499420166 + ], + [ + "▁Frühling", + -13.166967391967773 + ], + [ + "▁disagreement", + -13.166988372802734 + ], + [ + "▁formulate", + -13.167381286621094 + ], + [ + "braking", + -13.167522430419922 + ], + [ + "▁submarine", + -13.167535781860352 + ], + [ + "▁identificare", + -13.167652130126953 + ], + [ + "lansarea", + -13.167659759521484 + ], + [ + "covered", + -13.167753219604492 + ], + [ + "benso", + -13.167859077453613 + ], + [ + "▁situatie", + -13.167989730834961 + ], + [ + "hilf", + -13.1681547164917 + ], + [ + "▁Southampton", + -13.168557167053223 + ], + [ + "▁intéressé", + -13.168557167053223 + ], + [ + "▁congressional", + -13.168572425842285 + ], + [ + "65%", + -13.168595314025879 + ], + [ + "▁Allison", + -13.168627738952637 + ], + [ + "Mainland", + -13.168726921081543 + ], + [ + "▁touchscreen", + -13.16882038116455 + ], + [ + "leitet", + -13.168922424316406 + ], + [ + "mnului", + -13.16958999633789 + ], + [ + "▁engagiert", + -13.169631004333496 + ], + [ + "joacă", + -13.16964340209961 + ], + [ + "▁$5,000", + -13.169652938842773 + ], + [ + "upscale", + -13.1697359085083 + ], + [ + "▁vérité", + -13.16983413696289 + ], + [ + "flüssig", + -13.170167922973633 + ], + [ + "Richtlinie", + -13.170169830322266 + ], + [ + "▁positif", + -13.170169830322266 + ], + [ + "▁diferenta", + -13.170175552368164 + ], + [ + "▁întâi", + -13.170707702636719 + ], + [ + "ethylene", + -13.170791625976562 + ], + [ + "kreuz", + -13.170913696289062 + ], + [ + "Surely", + -13.170990943908691 + ], + [ + "puneti", + -13.171002388000488 + ], + [ + "europe", + -13.171142578125 + ], + [ + "▁comunist", + -13.171271324157715 + ], + [ + "unterricht", + -13.171302795410156 + ], + [ + "▁Füll", + -13.171304702758789 + ], + [ + "▁Aberdeen", + -13.171792030334473 + ], + [ + "▁DSLR", + -13.171792030334473 + ], + [ + "▁functioneaza", + -13.171799659729004 + ], + [ + "▁benches", + -13.171807289123535 + ], + [ + "▁Alpine", + -13.171866416931152 + ], + [ + "phthal", + -13.172003746032715 + ], + [ + "▁counselling", + -13.17219066619873 + ], + [ + "▁erzielen", + -13.172323226928711 + ], + [ + "▁părinţi", + -13.172329902648926 + ], + [ + "▁besitzen", + -13.17236614227295 + ], + [ + "heavenly", + -13.172389030456543 + ], + [ + "▁masque", + -13.17281723022461 + ], + [ + "▁Legislature", + -13.172859191894531 + ], + [ + "▁Recycling", + -13.172861099243164 + ], + [ + "▁Derma", + -13.172883987426758 + ], + [ + "reunite", + -13.172926902770996 + ], + [ + "recettes", + -13.17310619354248 + ], + [ + "converge", + -13.173262596130371 + ], + [ + "▁compoziti", + -13.17327880859375 + ], + [ + "▁Nürnberg", + -13.173398971557617 + ], + [ + "760", + -13.173545837402344 + ], + [ + "▁entière", + -13.173674583435059 + ], + [ + "▁parchment", + -13.173944473266602 + ], + [ + "▁Aufwand", + -13.173945426940918 + ], + [ + "▁antivirus", + -13.174087524414062 + ], + [ + "▁remettr", + -13.17409610748291 + ], + [ + "▁NEVER", + -13.174243927001953 + ], + [ + "▁restrictive", + -13.174266815185547 + ], + [ + "▁beurre", + -13.174283027648926 + ], + [ + "▁frigider", + -13.174478530883789 + ], + [ + "acquisition", + -13.174642562866211 + ], + [ + "▁Correct", + -13.174866676330566 + ], + [ + "▁immortal", + -13.175017356872559 + ], + [ + "▁occupancy", + -13.175017356872559 + ], + [ + "▁Tucson", + -13.175019264221191 + ], + [ + "▁Dhabi", + -13.175025939941406 + ], + [ + "obligation", + -13.175033569335938 + ], + [ + "▁warfare", + -13.175037384033203 + ], + [ + "▁syntax", + -13.175045013427734 + ], + [ + "APS", + -13.175106048583984 + ], + [ + "мен", + -13.175209999084473 + ], + [ + "▁diferenț", + -13.175251960754395 + ], + [ + "wordpress", + -13.17549991607666 + ], + [ + "▁Wohnzimmer", + -13.175593376159668 + ], + [ + "oppo", + -13.175736427307129 + ], + [ + "▁miscare", + -13.175762176513672 + ], + [ + "companiilor", + -13.17581558227539 + ], + [ + "▁bezahlt", + -13.17584228515625 + ], + [ + "Sterne", + -13.175864219665527 + ], + [ + "inability", + -13.175898551940918 + ], + [ + "▁Hoffnung", + -13.176156044006348 + ], + [ + "▁românească", + -13.176176071166992 + ], + [ + "document", + -13.176177024841309 + ], + [ + "borrowers", + -13.17625904083252 + ], + [ + "▁rasa", + -13.176301956176758 + ], + [ + "▁bénéfice", + -13.176445960998535 + ], + [ + "▁Panda", + -13.17645263671875 + ], + [ + "▁cărţi", + -13.176730155944824 + ], + [ + "▁Vorgehen", + -13.17690658569336 + ], + [ + "▁afecteaz", + -13.176956176757812 + ], + [ + "▁diagnos", + -13.177050590515137 + ], + [ + "▁Dentistry", + -13.177180290222168 + ], + [ + "▁staggering", + -13.177180290222168 + ], + [ + "präsident", + -13.177181243896484 + ], + [ + "▁vocational", + -13.177239418029785 + ], + [ + "Combined", + -13.177287101745605 + ], + [ + "stère", + -13.177306175231934 + ], + [ + "▁frunze", + -13.177478790283203 + ], + [ + "OLI", + -13.177525520324707 + ], + [ + "▁răc", + -13.177752494812012 + ], + [ + "▁changé", + -13.177754402160645 + ], + [ + "▁reprezentanți", + -13.177757263183594 + ], + [ + "▁ausgeschlossen", + -13.177777290344238 + ], + [ + "Windows", + -13.177891731262207 + ], + [ + "sometimes", + -13.177898406982422 + ], + [ + "▁dargestellt", + -13.178120613098145 + ], + [ + "provoking", + -13.178263664245605 + ], + [ + "terribly", + -13.178264617919922 + ], + [ + "▁speculate", + -13.178274154663086 + ], + [ + "▁complément", + -13.178305625915527 + ], + [ + "▁(2006)", + -13.178306579589844 + ], + [ + "zulegen", + -13.178668022155762 + ], + [ + "▁définitive", + -13.178876876831055 + ], + [ + "considerare", + -13.17911148071289 + ], + [ + "▁Subaru", + -13.179354667663574 + ], + [ + "WAN", + -13.179390907287598 + ], + [ + "guessed", + -13.179417610168457 + ], + [ + "spannung", + -13.179479598999023 + ], + [ + "▁supernatural", + -13.179515838623047 + ], + [ + "▁Interstate", + -13.17957878112793 + ], + [ + "▁redundant", + -13.179891586303711 + ], + [ + "▁HUG", + -13.179893493652344 + ], + [ + "▁restauration", + -13.180006980895996 + ], + [ + "repute", + -13.180011749267578 + ], + [ + "coagul", + -13.180028915405273 + ], + [ + "tehnologia", + -13.18043327331543 + ], + [ + "warded", + -13.180444717407227 + ], + [ + "▁lobster", + -13.180469512939453 + ], + [ + "▁Hafen", + -13.180542945861816 + ], + [ + "▁Guess", + -13.18056583404541 + ], + [ + "seraient", + -13.181038856506348 + ], + [ + "▁trench", + -13.181156158447266 + ], + [ + "▁piept", + -13.181283950805664 + ], + [ + "categorized", + -13.181396484375 + ], + [ + "softer", + -13.1815185546875 + ], + [ + "▁feasibility", + -13.181519508361816 + ], + [ + "▁restructuring", + -13.181519508361816 + ], + [ + "▁GOOD", + -13.181537628173828 + ], + [ + "▁inspiré", + -13.181610107421875 + ], + [ + "▁spéci", + -13.18163013458252 + ], + [ + "▁Mattress", + -13.181686401367188 + ], + [ + "▁biologique", + -13.181702613830566 + ], + [ + "▁Crema", + -13.182043075561523 + ], + [ + "▁korrekt", + -13.182063102722168 + ], + [ + "▁imperfect", + -13.182205200195312 + ], + [ + "▁advantageous", + -13.182329177856445 + ], + [ + "9.00", + -13.182390213012695 + ], + [ + "PAL", + -13.182557106018066 + ], + [ + "▁Illustration", + -13.182607650756836 + ], + [ + "▁Katherine", + -13.182607650756836 + ], + [ + "▁cervical", + -13.182607650756836 + ], + [ + "▁hectic", + -13.182611465454102 + ], + [ + "▁Belastung", + -13.182615280151367 + ], + [ + "▁Laguna", + -13.182628631591797 + ], + [ + "▁Burton", + -13.182761192321777 + ], + [ + "nettoyage", + -13.182875633239746 + ], + [ + "Toward", + -13.183072090148926 + ], + [ + "continuare", + -13.183072090148926 + ], + [ + "▁acumulat", + -13.183106422424316 + ], + [ + "▁déposé", + -13.183216094970703 + ], + [ + "▁prestige", + -13.183269500732422 + ], + [ + "▁LNG", + -13.183525085449219 + ], + [ + "▁Dacia", + -13.183662414550781 + ], + [ + "▁concede", + -13.183691024780273 + ], + [ + "▁reconciliation", + -13.183822631835938 + ], + [ + "Sistemul", + -13.183877944946289 + ], + [ + "Speed", + -13.183937072753906 + ], + [ + "▁Implant", + -13.183977127075195 + ], + [ + "▁möchtest", + -13.184020042419434 + ], + [ + "▁Norton", + -13.184064865112305 + ], + [ + "▁cosmic", + -13.184181213378906 + ], + [ + "enregistrement", + -13.184247016906738 + ], + [ + "țării", + -13.18433952331543 + ], + [ + "Veröffentlichung", + -13.184786796569824 + ], + [ + "erlebnis", + -13.184786796569824 + ], + [ + "▁Carpenter", + -13.184786796569824 + ], + [ + "▁INFORMATION", + -13.184786796569824 + ], + [ + "invites", + -13.18481731414795 + ], + [ + "▁gewan", + -13.1849365234375 + ], + [ + "▁réservé", + -13.184986114501953 + ], + [ + "▁aquatic", + -13.184988021850586 + ], + [ + "▁Seoul", + -13.18507194519043 + ], + [ + "▁älter", + -13.185185432434082 + ], + [ + "▁classmates", + -13.185223579406738 + ], + [ + "gelangen", + -13.185253143310547 + ], + [ + "▁Camill", + -13.185285568237305 + ], + [ + "simo", + -13.185291290283203 + ], + [ + "▁dormitor", + -13.185333251953125 + ], + [ + "wahren", + -13.185354232788086 + ], + [ + "▁incremental", + -13.185357093811035 + ], + [ + "▁caci", + -13.185494422912598 + ], + [ + "mittlere", + -13.185752868652344 + ], + [ + "▁condominium", + -13.185877799987793 + ], + [ + "▁rainforest", + -13.185877799987793 + ], + [ + "▁championnat", + -13.185891151428223 + ], + [ + "▁interrupted", + -13.185921669006348 + ], + [ + "▁tactile", + -13.185930252075195 + ], + [ + "▁unconditional", + -13.185945510864258 + ], + [ + "▁reactive", + -13.186041831970215 + ], + [ + "▁Stretch", + -13.1861572265625 + ], + [ + "▁serene", + -13.18624210357666 + ], + [ + "570", + -13.186318397521973 + ], + [ + "igte", + -13.186376571655273 + ], + [ + "Louis", + -13.186410903930664 + ], + [ + "▁Mittelpunkt", + -13.186493873596191 + ], + [ + "EEP", + -13.18651294708252 + ], + [ + "▁vault", + -13.186552047729492 + ], + [ + "absolu", + -13.186893463134766 + ], + [ + "▁solidarity", + -13.186971664428711 + ], + [ + "CLICK", + -13.18708324432373 + ], + [ + "▁hustle", + -13.187090873718262 + ], + [ + "▁microscope", + -13.187105178833008 + ], + [ + "▁Recommended", + -13.187111854553223 + ], + [ + "âche", + -13.18716812133789 + ], + [ + "▁flashlight", + -13.187286376953125 + ], + [ + "modificarea", + -13.18754768371582 + ], + [ + "izaţi", + -13.18773078918457 + ], + [ + "planned", + -13.187899589538574 + ], + [ + "Download", + -13.187906265258789 + ], + [ + "▁gourmand", + -13.188064575195312 + ], + [ + "▁subsidiaries", + -13.188064575195312 + ], + [ + "orthodox", + -13.188135147094727 + ], + [ + "▁Auburn", + -13.188323020935059 + ], + [ + "▁exprimat", + -13.188336372375488 + ], + [ + "procédé", + -13.18861198425293 + ], + [ + "▁ressenti", + -13.188648223876953 + ], + [ + "▁stint", + -13.188678741455078 + ], + [ + "Essentially", + -13.189072608947754 + ], + [ + "▁Savior", + -13.189164161682129 + ], + [ + "▁Flood", + -13.189168930053711 + ], + [ + "▁neurological", + -13.189249038696289 + ], + [ + "▁strig", + -13.189340591430664 + ], + [ + "scended", + -13.189421653747559 + ], + [ + "▁Shiva", + -13.189483642578125 + ], + [ + "▁Sketch", + -13.189544677734375 + ], + [ + "▁monarch", + -13.18956184387207 + ], + [ + "▁Preview", + -13.189632415771484 + ], + [ + "▁bewegt", + -13.189811706542969 + ], + [ + "mapped", + -13.189818382263184 + ], + [ + "énorme", + -13.189962387084961 + ], + [ + "▁définition", + -13.189963340759277 + ], + [ + "▁nécessité", + -13.189984321594238 + ], + [ + "▁antren", + -13.190027236938477 + ], + [ + "▁Infant", + -13.190072059631348 + ], + [ + "▁incumbent", + -13.190255165100098 + ], + [ + "▁pavilion", + -13.190255165100098 + ], + [ + "▁Taliban", + -13.19025707244873 + ], + [ + "Easily", + -13.19025993347168 + ], + [ + "▁verteilt", + -13.19030475616455 + ], + [ + "▁Biblical", + -13.190320014953613 + ], + [ + "Christian", + -13.190333366394043 + ], + [ + "județul", + -13.190436363220215 + ], + [ + "Learning", + -13.19046688079834 + ], + [ + "▁Expand", + -13.19054126739502 + ], + [ + "▁Attach", + -13.19056224822998 + ], + [ + "consideră", + -13.190573692321777 + ], + [ + "einsatz", + -13.190574645996094 + ], + [ + "Numai", + -13.190585136413574 + ], + [ + "▁Eintrag", + -13.190597534179688 + ], + [ + "▁üblich", + -13.190607070922852 + ], + [ + "▁cumpără", + -13.19062614440918 + ], + [ + "escaped", + -13.190693855285645 + ], + [ + "▁Ortodox", + -13.190804481506348 + ], + [ + "▁obţinut", + -13.190805435180664 + ], + [ + "ecluded", + -13.191036224365234 + ], + [ + "▁brownie", + -13.191089630126953 + ], + [ + "▁regulament", + -13.191253662109375 + ], + [ + "▁Chaos", + -13.191302299499512 + ], + [ + "▁masiv", + -13.19132137298584 + ], + [ + "▁Gerald", + -13.191376686096191 + ], + [ + "▁Sigur", + -13.191380500793457 + ], + [ + "▁wavelength", + -13.191380500793457 + ], + [ + "▁retiring", + -13.191396713256836 + ], + [ + "▁exactement", + -13.191819190979004 + ], + [ + "ntino", + -13.191823959350586 + ], + [ + "▁Krebs", + -13.19194221496582 + ], + [ + "▁monatlich", + -13.191956520080566 + ], + [ + "▁aranj", + -13.192011833190918 + ], + [ + "▁priveşt", + -13.192099571228027 + ], + [ + "▁mecanic", + -13.192109107971191 + ], + [ + "money", + -13.192233085632324 + ], + [ + "parliamentary", + -13.1922607421875 + ], + [ + "▁probation", + -13.192427635192871 + ], + [ + "embroidered", + -13.192451477050781 + ], + [ + "▁amenajat", + -13.192451477050781 + ], + [ + "▁remnant", + -13.192451477050781 + ], + [ + "▁senzati", + -13.192472457885742 + ], + [ + "▁Declaration", + -13.192483901977539 + ], + [ + "farbe", + -13.192506790161133 + ], + [ + "▁skinny", + -13.19260311126709 + ], + [ + "Energi", + -13.192648887634277 + ], + [ + "verhältnisse", + -13.19288158416748 + ], + [ + "Recruit", + -13.192972183227539 + ], + [ + "frying", + -13.193161010742188 + ], + [ + "925", + -13.193294525146484 + ], + [ + "nstruire", + -13.193302154541016 + ], + [ + "toasted", + -13.193424224853516 + ], + [ + "▁nicotine", + -13.193551063537598 + ], + [ + "recessed", + -13.193570137023926 + ], + [ + "▁dialect", + -13.193572044372559 + ], + [ + "▁confisc", + -13.193575859069824 + ], + [ + "▁bubbl", + -13.193643569946289 + ], + [ + "▁Precision", + -13.193682670593262 + ], + [ + "▁sollicit", + -13.193842887878418 + ], + [ + "▁Moral", + -13.193977355957031 + ], + [ + "▁renseignements", + -13.194112777709961 + ], + [ + "UMP", + -13.194116592407227 + ], + [ + "ijn", + -13.194183349609375 + ], + [ + "▁fermeture", + -13.194320678710938 + ], + [ + "▁blueprint", + -13.19462776184082 + ], + [ + "▁groceries", + -13.194652557373047 + ], + [ + "möbel", + -13.194655418395996 + ], + [ + "▁Plenty", + -13.194657325744629 + ], + [ + "▁forfeit", + -13.194719314575195 + ], + [ + "méthodes", + -13.194915771484375 + ], + [ + "paving", + -13.19493293762207 + ], + [ + "outheastern", + -13.194979667663574 + ], + [ + "▁Overview", + -13.19503116607666 + ], + [ + "▁observers", + -13.195171356201172 + ], + [ + "▁Timișoara", + -13.19520378112793 + ], + [ + "noticing", + -13.195332527160645 + ], + [ + "▁Owl", + -13.195381164550781 + ], + [ + "▁1925", + -13.195517539978027 + ], + [ + "▁prüfen", + -13.195755004882812 + ], + [ + "▁Bewohner", + -13.195756912231445 + ], + [ + "▁Latvia", + -13.195770263671875 + ], + [ + "▁Tuscan", + -13.19577407836914 + ], + [ + "▁apprenticeship", + -13.195789337158203 + ], + [ + "▁courteous", + -13.1958646774292 + ], + [ + "adult", + -13.196023941040039 + ], + [ + "Licensed", + -13.196029663085938 + ], + [ + "abused", + -13.196762084960938 + ], + [ + "confidence", + -13.19678020477295 + ], + [ + "▁revolt", + -13.196782112121582 + ], + [ + "conference", + -13.196861267089844 + ], + [ + "genoss", + -13.196914672851562 + ], + [ + "▁răni", + -13.196944236755371 + ], + [ + "▁Intervention", + -13.196949005126953 + ], + [ + "▁primesc", + -13.196969985961914 + ], + [ + "trays", + -13.197041511535645 + ], + [ + "nozzle", + -13.197216033935547 + ], + [ + "▁splitting", + -13.197443962097168 + ], + [ + "▁könne", + -13.197507858276367 + ], + [ + "▁peisaj", + -13.197943687438965 + ], + [ + "▁academia", + -13.197962760925293 + ], + [ + "▁chakra", + -13.197979927062988 + ], + [ + "▁Abdul", + -13.1981201171875 + ], + [ + "▁Beschreibung", + -13.198225021362305 + ], + [ + "Regeln", + -13.19831371307373 + ], + [ + "eezy", + -13.198314666748047 + ], + [ + "▁problématique", + -13.198515892028809 + ], + [ + "▁Ausführung", + -13.198524475097656 + ], + [ + "▁reconnect", + -13.19868278503418 + ], + [ + "▁telefonic", + -13.198966026306152 + ], + [ + "▁Ethereum", + -13.199069023132324 + ], + [ + "▁Winnipeg", + -13.199069023132324 + ], + [ + "▁misconception", + -13.199069023132324 + ], + [ + "▁Verpackung", + -13.199070930480957 + ], + [ + "▁erzeugt", + -13.199097633361816 + ], + [ + "▁Identity", + -13.199104309082031 + ], + [ + "▁dunkle", + -13.199109077453613 + ], + [ + "sustaining", + -13.19916820526123 + ], + [ + "▁pereche", + -13.199178695678711 + ], + [ + "▁neîn", + -13.199239730834961 + ], + [ + "directorul", + -13.199291229248047 + ], + [ + "▁élabor", + -13.199584007263184 + ], + [ + "▁Hollow", + -13.19960880279541 + ], + [ + "▁getestet", + -13.199751853942871 + ], + [ + "▁Promote", + -13.199797630310059 + ], + [ + "agriculture", + -13.199920654296875 + ], + [ + "▁deosebir", + -13.199934005737305 + ], + [ + "▁neam", + -13.199999809265137 + ], + [ + "aufbau", + -13.200042724609375 + ], + [ + "▁susținut", + -13.200079917907715 + ], + [ + "fueled", + -13.200119018554688 + ], + [ + "▁impresionant", + -13.200177192687988 + ], + [ + "innate", + -13.20026969909668 + ], + [ + "grenzt", + -13.200340270996094 + ], + [ + "rescued", + -13.200514793395996 + ], + [ + "bestand", + -13.200559616088867 + ], + [ + "▁adjunct", + -13.200729370117188 + ], + [ + "▁Mischung", + -13.200754165649414 + ], + [ + "▁Lease", + -13.201258659362793 + ], + [ + "espagnol", + -13.201284408569336 + ], + [ + "▁Kickstarter", + -13.201284408569336 + ], + [ + "▁buzunar", + -13.201284408569336 + ], + [ + "▁buddies", + -13.20129108428955 + ], + [ + "käufe", + -13.201485633850098 + ], + [ + "cevoir", + -13.201582908630371 + ], + [ + "▁creşte", + -13.201675415039062 + ], + [ + "▁Cluster", + -13.201825141906738 + ], + [ + "▁obișnui", + -13.201838493347168 + ], + [ + "▁cassette", + -13.201889038085938 + ], + [ + "▁optisch", + -13.201947212219238 + ], + [ + "manned", + -13.20200252532959 + ], + [ + "schneid", + -13.202362060546875 + ], + [ + "Württemberg", + -13.202393531799316 + ], + [ + "shredded", + -13.202393531799316 + ], + [ + "▁botanical", + -13.20239543914795 + ], + [ + "characterization", + -13.202445983886719 + ], + [ + "▁Durchführung", + -13.202452659606934 + ], + [ + "▁tireless", + -13.20250129699707 + ], + [ + "lässlich", + -13.20254135131836 + ], + [ + "▁Merchant", + -13.202570915222168 + ], + [ + "joutez", + -13.20259952545166 + ], + [ + "▁amélior", + -13.202676773071289 + ], + [ + "fixed", + -13.202741622924805 + ], + [ + "kho", + -13.202760696411133 + ], + [ + "▁televizor", + -13.202948570251465 + ], + [ + "▁Davies", + -13.202964782714844 + ], + [ + "enceinte", + -13.203118324279785 + ], + [ + "▁Panorama", + -13.20350456237793 + ], + [ + "▁maternal", + -13.203507423400879 + ], + [ + "diversified", + -13.203513145446777 + ], + [ + "▁Jü", + -13.203570365905762 + ], + [ + "▁naz", + -13.203730583190918 + ], + [ + "▁plonge", + -13.2039213180542 + ], + [ + "geschickt", + -13.203944206237793 + ], + [ + "MIS", + -13.204215049743652 + ], + [ + "ragged", + -13.204553604125977 + ], + [ + "▁diarrhea", + -13.20461654663086 + ], + [ + "▁tsunami", + -13.20461654663086 + ], + [ + "▁Nikola", + -13.204625129699707 + ], + [ + "▁festivities", + -13.20464038848877 + ], + [ + "potting", + -13.20479965209961 + ], + [ + "▁telefonisch", + -13.204874038696289 + ], + [ + "TAR", + -13.204971313476562 + ], + [ + "▁schimbări", + -13.205023765563965 + ], + [ + "▁occidental", + -13.205172538757324 + ], + [ + "schloss", + -13.205179214477539 + ], + [ + "Print", + -13.205284118652344 + ], + [ + "▁autoritățil", + -13.205361366271973 + ], + [ + "idos", + -13.20556640625 + ], + [ + "mediocr", + -13.20559310913086 + ], + [ + "▁Decla", + -13.205686569213867 + ], + [ + "▁Elliott", + -13.205729484558105 + ], + [ + "▁pinpoint", + -13.205734252929688 + ], + [ + "▁disciple", + -13.20579719543457 + ], + [ + "▁Cairo", + -13.2058744430542 + ], + [ + "▁15-20", + -13.2059326171875 + ], + [ + "▁limbaj", + -13.20611572265625 + ], + [ + "▁retenu", + -13.206154823303223 + ], + [ + "▁Blüte", + -13.20628833770752 + ], + [ + "▁MINI", + -13.206467628479004 + ], + [ + "▁lumină", + -13.206567764282227 + ], + [ + "▁flawed", + -13.206846237182617 + ], + [ + "▁Belarus", + -13.207067489624023 + ], + [ + "Totul", + -13.207207679748535 + ], + [ + "hôte", + -13.207273483276367 + ], + [ + "▁verbringen", + -13.207315444946289 + ], + [ + "▁simultaneous", + -13.207344055175781 + ], + [ + "▁competiți", + -13.207402229309082 + ], + [ + "▁lancement", + -13.207413673400879 + ], + [ + "▁proprietati", + -13.207432746887207 + ], + [ + "▁angajator", + -13.207465171813965 + ], + [ + "▁ignorant", + -13.207674026489258 + ], + [ + "▁indicative", + -13.207700729370117 + ], + [ + "▁Bearbeitung", + -13.207961082458496 + ], + [ + "▁Ungaria", + -13.207961082458496 + ], + [ + "▁Sfint", + -13.208015441894531 + ], + [ + "▁Trojan", + -13.20804214477539 + ], + [ + "▁1911", + -13.208100318908691 + ], + [ + "▁reliabl", + -13.2081937789917 + ], + [ + "6-0", + -13.20827865600586 + ], + [ + "obst", + -13.208523750305176 + ], + [ + "▁relève", + -13.208579063415527 + ], + [ + "▁standpoint", + -13.208874702453613 + ], + [ + "ridden", + -13.208918571472168 + ], + [ + "▁Pdf", + -13.209005355834961 + ], + [ + "tatewide", + -13.209051132202148 + ], + [ + "Water", + -13.209062576293945 + ], + [ + "▁Pricing", + -13.209089279174805 + ], + [ + "▁protecţi", + -13.209168434143066 + ], + [ + "November", + -13.209615707397461 + ], + [ + "▁televiziune", + -13.20964241027832 + ], + [ + "Sodium", + -13.209881782531738 + ], + [ + "douceur", + -13.209942817687988 + ], + [ + "▁Flasche", + -13.210183143615723 + ], + [ + "3.9", + -13.210193634033203 + ], + [ + "▁electromagnetic", + -13.210195541381836 + ], + [ + "▁mitochondria", + -13.210195541381836 + ], + [ + "Suddenly", + -13.210199356079102 + ], + [ + "▁Drupal", + -13.210201263427734 + ], + [ + "▁supraveghere", + -13.210211753845215 + ], + [ + "▁cornea", + -13.210288047790527 + ], + [ + "räumt", + -13.210309982299805 + ], + [ + "▁healed", + -13.210410118103027 + ], + [ + "Roc", + -13.210649490356445 + ], + [ + "▁temporar", + -13.210707664489746 + ], + [ + "▁amaze", + -13.210770606994629 + ], + [ + "▁confrunta", + -13.210833549499512 + ], + [ + "Afterward", + -13.210836410522461 + ], + [ + "▁festgelegt", + -13.21084213256836 + ], + [ + "▁Kuchen", + -13.210844993591309 + ], + [ + "▁perpetual", + -13.210858345031738 + ], + [ + "systematically", + -13.211000442504883 + ], + [ + "▁coloan", + -13.211006164550781 + ], + [ + "▁extensi", + -13.211058616638184 + ], + [ + "▁Județean", + -13.211315155029297 + ], + [ + "▁amelior", + -13.211315155029297 + ], + [ + "▁illustrator", + -13.211315155029297 + ], + [ + "▁titanium", + -13.211344718933105 + ], + [ + "SMEs", + -13.211384773254395 + ], + [ + "taxable", + -13.211578369140625 + ], + [ + "▁Borough", + -13.211607933044434 + ], + [ + "verlust", + -13.211772918701172 + ], + [ + "ductive", + -13.21233081817627 + ], + [ + "▁Küste", + -13.212335586547852 + ], + [ + "▁végétal", + -13.212410926818848 + ], + [ + "▁breastfeeding", + -13.212435722351074 + ], + [ + "▁captivating", + -13.212435722351074 + ], + [ + "▁Chevy", + -13.212443351745605 + ], + [ + "▁aerospace", + -13.212469100952148 + ], + [ + "pozitia", + -13.213095664978027 + ], + [ + "Tutor", + -13.213199615478516 + ], + [ + "▁spum", + -13.213312149047852 + ], + [ + "curând", + -13.213419914245605 + ], + [ + "iscus", + -13.213458061218262 + ], + [ + "October", + -13.213495254516602 + ], + [ + "▁Reparatur", + -13.213557243347168 + ], + [ + "▁Servicii", + -13.213574409484863 + ], + [ + "▁Gonz", + -13.21357536315918 + ], + [ + "▁cybersecurity", + -13.21357536315918 + ], + [ + "▁UCLA", + -13.213678359985352 + ], + [ + "rissa", + -13.213835716247559 + ], + [ + "▁Kemp", + -13.213850021362305 + ], + [ + "▁piston", + -13.214046478271484 + ], + [ + "▁révèle", + -13.214118957519531 + ], + [ + "▁posséd", + -13.21412181854248 + ], + [ + "▁versehen", + -13.214129447937012 + ], + [ + "▁scrutin", + -13.214226722717285 + ], + [ + "donnant", + -13.21436882019043 + ], + [ + "▁Geschwindigkeit", + -13.214680671691895 + ], + [ + "▁Panasonic", + -13.214680671691895 + ], + [ + "audio", + -13.214700698852539 + ], + [ + "▁Packaging", + -13.214771270751953 + ], + [ + "phra", + -13.2147798538208 + ], + [ + "▁Letzte", + -13.214954376220703 + ], + [ + "insicht", + -13.215141296386719 + ], + [ + "▁sammeln", + -13.215243339538574 + ], + [ + "▁extins", + -13.215259552001953 + ], + [ + "▁collège", + -13.215266227722168 + ], + [ + "ancies", + -13.215343475341797 + ], + [ + "▁întâlnit", + -13.215350151062012 + ], + [ + "▁Servi", + -13.215392112731934 + ], + [ + "stattet", + -13.215493202209473 + ], + [ + "▁abstraction", + -13.215566635131836 + ], + [ + "▁candidature", + -13.215592384338379 + ], + [ + "ONU", + -13.215676307678223 + ], + [ + "▁raffle", + -13.215826988220215 + ], + [ + "▁Soldier", + -13.215834617614746 + ], + [ + "▁stipulate", + -13.215883255004883 + ], + [ + "▁vizual", + -13.215950012207031 + ], + [ + "lucht", + -13.216007232666016 + ], + [ + "▁circus", + -13.216068267822266 + ], + [ + "▁decree", + -13.216259002685547 + ], + [ + "immeuble", + -13.216367721557617 + ], + [ + "Store", + -13.216426849365234 + ], + [ + "randul", + -13.216622352600098 + ], + [ + "▁narration", + -13.216933250427246 + ], + [ + "implication", + -13.216958045959473 + ], + [ + "▁discontinued", + -13.216971397399902 + ], + [ + "▁Pilates", + -13.216989517211914 + ], + [ + "▁biais", + -13.21701431274414 + ], + [ + "panel", + -13.217325210571289 + ], + [ + "▁mower", + -13.217458724975586 + ], + [ + "▁Castro", + -13.21753978729248 + ], + [ + "pregătire", + -13.217641830444336 + ], + [ + "▁denomination", + -13.218062400817871 + ], + [ + "▁throttle", + -13.21806526184082 + ], + [ + "▁finition", + -13.218086242675781 + ], + [ + "▁clarification", + -13.218286514282227 + ], + [ + "laut", + -13.218366622924805 + ], + [ + "▁wastewater", + -13.2184419631958 + ], + [ + "▁Sanchez", + -13.218770980834961 + ], + [ + "▁Umfeld", + -13.2189359664917 + ], + [ + "▁consili", + -13.218997955322266 + ], + [ + "extrait", + -13.219013214111328 + ], + [ + "ionism", + -13.2190523147583 + ], + [ + "▁Cannabis", + -13.219186782836914 + ], + [ + "▁misconduct", + -13.219186782836914 + ], + [ + "▁shepherd", + -13.219186782836914 + ], + [ + "▁feminist", + -13.21919059753418 + ], + [ + "▁criterii", + -13.219212532043457 + ], + [ + "America", + -13.219219207763672 + ], + [ + "▁Telephone", + -13.219270706176758 + ], + [ + "▁Fritz", + -13.219438552856445 + ], + [ + "▁cheltui", + -13.219794273376465 + ], + [ + "▁Übung", + -13.219857215881348 + ], + [ + "făcută", + -13.22006893157959 + ], + [ + "▁străzi", + -13.220170021057129 + ], + [ + "influencing", + -13.220315933227539 + ], + [ + "▁Democracy", + -13.220321655273438 + ], + [ + "atorium", + -13.220376014709473 + ], + [ + "▁Stufe", + -13.220465660095215 + ], + [ + "▁Cornell", + -13.220660209655762 + ], + [ + "zugehen", + -13.22074031829834 + ], + [ + "▁coton", + -13.220804214477539 + ], + [ + "▁beinhaltet", + -13.220881462097168 + ], + [ + "▁kritisch", + -13.220884323120117 + ], + [ + "▁Kalender", + -13.22105884552002 + ], + [ + "▁Teig", + -13.221253395080566 + ], + [ + "cooked", + -13.221264839172363 + ], + [ + "▁diversité", + -13.221390724182129 + ], + [ + "recognizable", + -13.221446990966797 + ], + [ + "▁Dictionary", + -13.221446990966797 + ], + [ + "attribution", + -13.22145938873291 + ], + [ + "▁Teresa", + -13.221471786499023 + ], + [ + "▁Ahmad", + -13.221487998962402 + ], + [ + "HAM", + -13.221627235412598 + ], + [ + "▁floss", + -13.221668243408203 + ], + [ + "génie", + -13.2218599319458 + ], + [ + "▁Espa", + -13.221989631652832 + ], + [ + "hersteller", + -13.221993446350098 + ], + [ + "Musée", + -13.222001075744629 + ], + [ + "▁Crawford", + -13.222579002380371 + ], + [ + "▁Phantom", + -13.222579002380371 + ], + [ + "▁Jenkins", + -13.222640037536621 + ], + [ + "genauer", + -13.222774505615234 + ], + [ + "▁acţiuni", + -13.222885131835938 + ], + [ + "▁meciuri", + -13.22322940826416 + ], + [ + "▁verstärkt", + -13.22326374053955 + ], + [ + "▁troop", + -13.22341251373291 + ], + [ + "räder", + -13.223483085632324 + ], + [ + "Putting", + -13.223536491394043 + ], + [ + "NASDAQ", + -13.223712921142578 + ], + [ + "▁Buddhism", + -13.223712921142578 + ], + [ + "▁Religious", + -13.223712921142578 + ], + [ + "▁accommodating", + -13.223712921142578 + ], + [ + "▁lendemain", + -13.223712921142578 + ], + [ + "▁plywood", + -13.223714828491211 + ], + [ + "▁inflatable", + -13.223724365234375 + ], + [ + "▁sèche", + -13.223731994628906 + ], + [ + "▁fragil", + -13.223845481872559 + ], + [ + "▁Filip", + -13.224115371704102 + ], + [ + "▁Terrace", + -13.224274635314941 + ], + [ + "Biblio", + -13.22432804107666 + ], + [ + "resides", + -13.22448444366455 + ], + [ + "▁varf", + -13.22451114654541 + ], + [ + "Bildern", + -13.224528312683105 + ], + [ + "loß", + -13.224685668945312 + ], + [ + "555", + -13.224702835083008 + ], + [ + "▁astounding", + -13.224847793579102 + ], + [ + "▁brillant", + -13.224857330322266 + ], + [ + "▁Railroad", + -13.224871635437012 + ], + [ + "minimizing", + -13.224907875061035 + ], + [ + "▁Benedict", + -13.225019454956055 + ], + [ + "▁$400", + -13.225068092346191 + ], + [ + "▁schematic", + -13.225217819213867 + ], + [ + "Canada", + -13.225371360778809 + ], + [ + "▁psihic", + -13.225415229797363 + ], + [ + "▁avertiz", + -13.225497245788574 + ], + [ + "▁Breed", + -13.225550651550293 + ], + [ + "▁gradina", + -13.225606918334961 + ], + [ + "▁Liege", + -13.225822448730469 + ], + [ + "▁Retirement", + -13.225983619689941 + ], + [ + "▁pergola", + -13.226005554199219 + ], + [ + "▁Kuwait", + -13.2260103225708 + ], + [ + "▁logistic", + -13.22629451751709 + ], + [ + "▁captive", + -13.22651481628418 + ], + [ + "prepared", + -13.226568222045898 + ], + [ + "▁prononc", + -13.226568222045898 + ], + [ + "Celui", + -13.226676940917969 + ], + [ + "deutschland", + -13.227120399475098 + ], + [ + "▁devreme", + -13.227124214172363 + ], + [ + "▁părți", + -13.227270126342773 + ], + [ + "▁1934", + -13.227517127990723 + ], + [ + "▁ersetzt", + -13.227560997009277 + ], + [ + "▁frightening", + -13.227689743041992 + ], + [ + "▁fiecărui", + -13.227819442749023 + ], + [ + "correct", + -13.22799015045166 + ], + [ + "6.6", + -13.228057861328125 + ], + [ + "▁Manitoba", + -13.228259086608887 + ], + [ + "Chartered", + -13.228416442871094 + ], + [ + "▁părăs", + -13.228543281555176 + ], + [ + "Powered", + -13.228697776794434 + ], + [ + "impede", + -13.22876262664795 + ], + [ + "agonist", + -13.22878646850586 + ], + [ + "▁stratégique", + -13.228829383850098 + ], + [ + "▁vigilant", + -13.228830337524414 + ], + [ + "faceted", + -13.228930473327637 + ], + [ + "available", + -13.229308128356934 + ], + [ + "▁Promise", + -13.229388236999512 + ], + [ + "▁humorous", + -13.229446411132812 + ], + [ + "treibt", + -13.229449272155762 + ], + [ + "▁Patrol", + -13.229514122009277 + ], + [ + "huh", + -13.229523658752441 + ], + [ + "ztlich", + -13.229804039001465 + ], + [ + "▁rejet", + -13.2299165725708 + ], + [ + "odeur", + -13.229935646057129 + ], + [ + "usziehbar", + -13.22996997833252 + ], + [ + "▁gespannt", + -13.229972839355469 + ], + [ + "church", + -13.230018615722656 + ], + [ + "▁Popescu", + -13.230109214782715 + ], + [ + "▁einmalig", + -13.230518341064453 + ], + [ + "diluted", + -13.230551719665527 + ], + [ + "lighted", + -13.231070518493652 + ], + [ + "▁stattfinden", + -13.23111343383789 + ], + [ + "▁Reaktion", + -13.231183052062988 + ], + [ + "▁délivr", + -13.23134994506836 + ], + [ + "▁Helfer", + -13.231407165527344 + ], + [ + "Fiind", + -13.23142147064209 + ], + [ + "rmând", + -13.231507301330566 + ], + [ + "▁Beweis", + -13.231671333312988 + ], + [ + "▁Violet", + -13.231733322143555 + ], + [ + "kamera", + -13.231764793395996 + ], + [ + "▁Romney", + -13.231779098510742 + ], + [ + "▁Bradford", + -13.231800079345703 + ], + [ + "stellbar", + -13.231852531433105 + ], + [ + "▁roadmap", + -13.231921195983887 + ], + [ + "▁subconscious", + -13.23204231262207 + ], + [ + "contrasting", + -13.232138633728027 + ], + [ + "mécanisme", + -13.232254981994629 + ], + [ + "kämpft", + -13.232255935668945 + ], + [ + "▁Preston", + -13.232719421386719 + ], + [ + "▁Anliegen", + -13.232802391052246 + ], + [ + "▁necessities", + -13.232827186584473 + ], + [ + "▁detrimental", + -13.232828140258789 + ], + [ + "▁sprawl", + -13.232830047607422 + ], + [ + "▁Erfüllung", + -13.23287582397461 + ], + [ + "▁massacre", + -13.2329683303833 + ], + [ + "▁pietre", + -13.232987403869629 + ], + [ + "▁situații", + -13.233027458190918 + ], + [ + "vêtement", + -13.233080863952637 + ], + [ + "Listed", + -13.233144760131836 + ], + [ + "▁extravagant", + -13.233399391174316 + ], + [ + "▁axle", + -13.233525276184082 + ], + [ + "OTT", + -13.233663558959961 + ], + [ + "wildly", + -13.233744621276855 + ], + [ + "70,000", + -13.233797073364258 + ], + [ + "▁chauffeur", + -13.23384952545166 + ], + [ + "▁Brasov", + -13.233972549438477 + ], + [ + "▁Fähigkeiten", + -13.233972549438477 + ], + [ + "▁staatlich", + -13.234025001525879 + ], + [ + "outlines", + -13.234034538269043 + ], + [ + "▁aufmerksam", + -13.234545707702637 + ], + [ + "▁Relation", + -13.234749794006348 + ], + [ + "▁Stephan", + -13.234947204589844 + ], + [ + "yland", + -13.23494815826416 + ], + [ + "proclaimed", + -13.235086441040039 + ], + [ + "Wallet", + -13.235100746154785 + ], + [ + "verarbeitung", + -13.235118865966797 + ], + [ + "▁überraschen", + -13.235118865966797 + ], + [ + "▁Injury", + -13.235125541687012 + ], + [ + "▁horsepower", + -13.235237121582031 + ], + [ + "▁Tropical", + -13.23523998260498 + ], + [ + "▁wives", + -13.235459327697754 + ], + [ + "adherence", + -13.235677719116211 + ], + [ + "schätzung", + -13.235692977905273 + ], + [ + "▁coherent", + -13.235708236694336 + ], + [ + "parlament", + -13.23574161529541 + ], + [ + "▁stup", + -13.235852241516113 + ], + [ + "▁resonance", + -13.23626708984375 + ], + [ + "▁inheritance", + -13.236355781555176 + ], + [ + "commenced", + -13.23645305633545 + ], + [ + "▁supervise", + -13.236475944519043 + ], + [ + "▁facilitator", + -13.236488342285156 + ], + [ + "fares", + -13.236678123474121 + ], + [ + "▁Tibet", + -13.23672866821289 + ], + [ + "communication", + -13.236787796020508 + ], + [ + "yog", + -13.236806869506836 + ], + [ + "▁WLAN", + -13.236842155456543 + ], + [ + "▁Chili", + -13.23685073852539 + ], + [ + "▁Harold", + -13.2369966506958 + ], + [ + "▁Guerre", + -13.237005233764648 + ], + [ + "▁Femme", + -13.237146377563477 + ], + [ + "▁Lisbon", + -13.237231254577637 + ], + [ + "▁mulțumi", + -13.237415313720703 + ], + [ + "▁vorbereitet", + -13.237415313720703 + ], + [ + "▁aperture", + -13.237422943115234 + ], + [ + "▁Universities", + -13.237442016601562 + ], + [ + "▁reckless", + -13.237471580505371 + ], + [ + "▁Botschaft", + -13.237533569335938 + ], + [ + "▁Squad", + -13.238022804260254 + ], + [ + "▁buoy", + -13.238061904907227 + ], + [ + "participarea", + -13.238236427307129 + ], + [ + "stiinta", + -13.238389015197754 + ], + [ + "▁repeal", + -13.238415718078613 + ], + [ + "drilled", + -13.238489151000977 + ], + [ + "▁Conversation", + -13.238567352294922 + ], + [ + "▁subsid", + -13.238615036010742 + ], + [ + "anstalt", + -13.238741874694824 + ], + [ + "faktor", + -13.23874282836914 + ], + [ + "▁swamp", + -13.238790512084961 + ], + [ + "pflichtig", + -13.238921165466309 + ], + [ + "▁camion", + -13.238970756530762 + ], + [ + "▁gouvern", + -13.239032745361328 + ], + [ + "▁archaeological", + -13.239141464233398 + ], + [ + "▁glitch", + -13.239198684692383 + ], + [ + "average", + -13.239294052124023 + ], + [ + "▁coffre", + -13.239481925964355 + ], + [ + "▁Insert", + -13.239513397216797 + ], + [ + "▁colonne", + -13.2395601272583 + ], + [ + "▁Assess", + -13.23962116241455 + ], + [ + "▁batches", + -13.239716529846191 + ], + [ + "▁ammunition", + -13.239717483520508 + ], + [ + "▁scissors", + -13.239717483520508 + ], + [ + "▁Locksmith", + -13.239740371704102 + ], + [ + "▁Bollywood", + -13.239991188049316 + ], + [ + "expédi", + -13.240288734436035 + ], + [ + "▁descendants", + -13.24039363861084 + ], + [ + "▁unwilling", + -13.240506172180176 + ], + [ + "▁Noise", + -13.240649223327637 + ], + [ + "▁Directive", + -13.240660667419434 + ], + [ + "ATOR", + -13.240765571594238 + ], + [ + "▁Rajasthan", + -13.240870475769043 + ], + [ + "▁chaotic", + -13.240888595581055 + ], + [ + "▁NEED", + -13.24093246459961 + ], + [ + "▁părere", + -13.24095344543457 + ], + [ + "▁begonnen", + -13.241448402404785 + ], + [ + "▁Reef", + -13.241504669189453 + ], + [ + "▁vorgesehen", + -13.24161434173584 + ], + [ + "▁allocate", + -13.241826057434082 + ], + [ + "▁exceptionnel", + -13.241936683654785 + ], + [ + "▁gefertigt", + -13.24203872680664 + ], + [ + "fading", + -13.242072105407715 + ], + [ + "▁interpersonal", + -13.242178916931152 + ], + [ + "▁occupie", + -13.242204666137695 + ], + [ + "▁Teatr", + -13.242579460144043 + ], + [ + "▁kilomètres", + -13.242603302001953 + ], + [ + "▁verbinden", + -13.242608070373535 + ], + [ + "▁Frucht", + -13.242643356323242 + ], + [ + "augmented", + -13.242720603942871 + ], + [ + "▁twentieth", + -13.243181228637695 + ], + [ + "▁aggression", + -13.243183135986328 + ], + [ + "▁Miracle", + -13.243184089660645 + ], + [ + "▁peninsula", + -13.243184089660645 + ], + [ + "▁Fernando", + -13.243185043334961 + ], + [ + "▁autorităţil", + -13.243203163146973 + ], + [ + "▁Iisus", + -13.243217468261719 + ], + [ + "▁puck", + -13.243423461914062 + ], + [ + "titel", + -13.243454933166504 + ], + [ + "▁remake", + -13.243562698364258 + ], + [ + "freiheit", + -13.243563652038574 + ], + [ + "▁Belize", + -13.243590354919434 + ], + [ + "▁secundar", + -13.243779182434082 + ], + [ + "▁perpetrat", + -13.243786811828613 + ], + [ + "jedenfalls", + -13.243797302246094 + ], + [ + "linked", + -13.243820190429688 + ], + [ + "▁dégag", + -13.243918418884277 + ], + [ + "LAY", + -13.243926048278809 + ], + [ + "behandlung", + -13.244172096252441 + ], + [ + "▁1928", + -13.244193077087402 + ], + [ + "▁Nickel", + -13.244205474853516 + ], + [ + "rophy", + -13.244256973266602 + ], + [ + "▁autonomy", + -13.244338989257812 + ], + [ + "▁Treffen", + -13.244402885437012 + ], + [ + "▁groundbreaking", + -13.24445915222168 + ], + [ + "politisch", + -13.244484901428223 + ], + [ + "▁Vector", + -13.244553565979004 + ], + [ + "oricine", + -13.244684219360352 + ], + [ + "utilisées", + -13.244684219360352 + ], + [ + "plete", + -13.244771003723145 + ], + [ + "droht", + -13.244918823242188 + ], + [ + "▁alternativ", + -13.245104789733887 + ], + [ + "▁Bernie", + -13.245213508605957 + ], + [ + "▁embellish", + -13.245260238647461 + ], + [ + "▁Curriculum", + -13.24549674987793 + ], + [ + "herrscht", + -13.245525360107422 + ], + [ + "escalier", + -13.246126174926758 + ], + [ + "hian", + -13.246333122253418 + ], + [ + "ertaining", + -13.246387481689453 + ], + [ + "hitter", + -13.246430397033691 + ], + [ + "▁kompetente", + -13.24665641784668 + ], + [ + "▁trekking", + -13.246760368347168 + ], + [ + "EACH", + -13.246841430664062 + ], + [ + "▁Bedien", + -13.2470703125 + ], + [ + "starred", + -13.247169494628906 + ], + [ + "▁săptămâna", + -13.247236251831055 + ], + [ + "▁Gratuit", + -13.247239112854004 + ], + [ + "▁Jahrzehnte", + -13.247241020202637 + ], + [ + "ingénieur", + -13.24731731414795 + ], + [ + "▁Huang", + -13.24736213684082 + ], + [ + "Music", + -13.247401237487793 + ], + [ + "misiei", + -13.247544288635254 + ], + [ + "▁masuri", + -13.247733116149902 + ], + [ + "▁Achievement", + -13.247817039489746 + ], + [ + "▁Dorothy", + -13.247817039489746 + ], + [ + "blätter", + -13.247817993164062 + ], + [ + "éloign", + -13.247817993164062 + ], + [ + "▁Anglia", + -13.247990608215332 + ], + [ + "brach", + -13.248013496398926 + ], + [ + "▁Optimization", + -13.248085021972656 + ], + [ + "6.7", + -13.248170852661133 + ], + [ + "winkel", + -13.248210906982422 + ], + [ + "contenan", + -13.248347282409668 + ], + [ + "Astăzi", + -13.248398780822754 + ], + [ + "wiped", + -13.248441696166992 + ], + [ + "granting", + -13.248665809631348 + ], + [ + "▁plăti", + -13.248859405517578 + ], + [ + "▁Compensation", + -13.248979568481445 + ], + [ + "▁Verkäufer", + -13.248979568481445 + ], + [ + "▁angajați", + -13.248980522155762 + ], + [ + "▁diminished", + -13.24902057647705 + ], + [ + "employment", + -13.249250411987305 + ], + [ + "yahoo", + -13.249435424804688 + ], + [ + "▁détrui", + -13.249698638916016 + ], + [ + "▁suffisant", + -13.24982738494873 + ], + [ + "▁Moldovei", + -13.250144004821777 + ], + [ + "▁Pokemon", + -13.250144004821777 + ], + [ + "▁Malcolm", + -13.250144958496094 + ], + [ + "▁mysteries", + -13.250147819519043 + ], + [ + "▁Diversity", + -13.250149726867676 + ], + [ + "▁clinique", + -13.250327110290527 + ], + [ + "landais", + -13.250344276428223 + ], + [ + "▁campanii", + -13.250399589538574 + ], + [ + "▁témoignage", + -13.250439643859863 + ], + [ + "▁paralel", + -13.250467300415039 + ], + [ + "▁travailleurs", + -13.250576972961426 + ], + [ + "▁salvage", + -13.250580787658691 + ], + [ + "▁crayon", + -13.250732421875 + ], + [ + "immédiat", + -13.25085163116455 + ], + [ + "hopped", + -13.250958442687988 + ], + [ + "▁senzor", + -13.25102710723877 + ], + [ + "▁imbunatati", + -13.251073837280273 + ], + [ + "▁capitalize", + -13.2511568069458 + ], + [ + "▁Elephant", + -13.25130844116211 + ], + [ + "▁insomnia", + -13.25131607055664 + ], + [ + "▁Ansicht", + -13.251325607299805 + ], + [ + "▁lupte", + -13.251556396484375 + ], + [ + "▁genomic", + -13.251557350158691 + ], + [ + "▁Grape", + -13.251769065856934 + ], + [ + "MONT", + -13.25197982788086 + ], + [ + "métiers", + -13.252004623413086 + ], + [ + "▁Pierce", + -13.252123832702637 + ], + [ + "consulted", + -13.252388954162598 + ], + [ + "▁Responsible", + -13.252474784851074 + ], + [ + "symmetry", + -13.252476692199707 + ], + [ + "▁sulfur", + -13.252487182617188 + ], + [ + "▁înapoi", + -13.252510070800781 + ], + [ + "▁Junction", + -13.252549171447754 + ], + [ + "▁trilogy", + -13.252622604370117 + ], + [ + "▁unkompliziert", + -13.253059387207031 + ], + [ + "▁zugänglich", + -13.253059387207031 + ], + [ + "▁préfèr", + -13.253153800964355 + ], + [ + "oarelor", + -13.253361701965332 + ], + [ + "langage", + -13.253460884094238 + ], + [ + "admired", + -13.253589630126953 + ], + [ + "platform", + -13.253595352172852 + ], + [ + "▁pluralit", + -13.253616333007812 + ], + [ + "▁betrachtet", + -13.253643035888672 + ], + [ + "▁reproduc", + -13.253790855407715 + ], + [ + "exemple", + -13.25385570526123 + ], + [ + "▁conspir", + -13.254347801208496 + ], + [ + "▁pelvi", + -13.25437068939209 + ], + [ + "leased", + -13.254551887512207 + ], + [ + "▁souffle", + -13.254570960998535 + ], + [ + "▁approprié", + -13.254705429077148 + ], + [ + "absorbing", + -13.254817962646484 + ], + [ + "dividing", + -13.254855155944824 + ], + [ + "herently", + -13.255147933959961 + ], + [ + "▁blister", + -13.255179405212402 + ], + [ + "löst", + -13.255182266235352 + ], + [ + "Apotheke", + -13.255398750305176 + ], + [ + "▁Asociaţi", + -13.255424499511719 + ], + [ + "education", + -13.255904197692871 + ], + [ + "▁retract", + -13.255982398986816 + ], + [ + "▁appraise", + -13.255990982055664 + ], + [ + "▁Debbie", + -13.256075859069824 + ], + [ + "▁arhitect", + -13.256193161010742 + ], + [ + "▁Mohamed", + -13.256568908691406 + ], + [ + "▁îndrept", + -13.256568908691406 + ], + [ + "▁exhaustive", + -13.256753921508789 + ], + [ + "▁Notebook", + -13.257004737854004 + ], + [ + "crashing", + -13.257068634033203 + ], + [ + "▁Betreiber", + -13.257155418395996 + ], + [ + "▁présidentielle", + -13.257159233093262 + ], + [ + "▁Träger", + -13.257172584533691 + ], + [ + "▁noteworthy", + -13.257259368896484 + ], + [ + "▁séparé", + -13.257729530334473 + ], + [ + "▁doppelt", + -13.257795333862305 + ], + [ + "tină", + -13.258066177368164 + ], + [ + "Quelques", + -13.258085250854492 + ], + [ + "culoarea", + -13.258100509643555 + ], + [ + "▁ethic", + -13.258166313171387 + ], + [ + "▁cohesive", + -13.258329391479492 + ], + [ + "▁congratulations", + -13.258334159851074 + ], + [ + "▁sovereignty", + -13.25833797454834 + ], + [ + "▁Aplica", + -13.258413314819336 + ], + [ + "▁Covenant", + -13.25851058959961 + ], + [ + "▁multicultural", + -13.258591651916504 + ], + [ + "assemblée", + -13.258955001831055 + ], + [ + "▁petals", + -13.258974075317383 + ], + [ + "erode", + -13.259026527404785 + ], + [ + "▁porumb", + -13.259035110473633 + ], + [ + "▁Barrier", + -13.259050369262695 + ], + [ + "▁WWE", + -13.259085655212402 + ], + [ + "Etwa", + -13.259175300598145 + ], + [ + "▁recunosc", + -13.259271621704102 + ], + [ + "▁turtle", + -13.259415626525879 + ], + [ + "▁vârf", + -13.259444236755371 + ], + [ + "▁Ranking", + -13.259448051452637 + ], + [ + "▁sympathetic", + -13.259514808654785 + ], + [ + "exploded", + -13.2595796585083 + ], + [ + "▁influenț", + -13.259591102600098 + ], + [ + "▁Fireplace", + -13.25972843170166 + ], + [ + "▁Nachwuchs", + -13.260090827941895 + ], + [ + "▁empfohlen", + -13.260090827941895 + ], + [ + "Voir", + -13.260661125183105 + ], + [ + "▁Vimeo", + -13.26069164276123 + ], + [ + "▁weaving", + -13.260967254638672 + ], + [ + "beneficiar", + -13.261198043823242 + ], + [ + "▁balade", + -13.261216163635254 + ], + [ + "▁Mercy", + -13.261566162109375 + ], + [ + "3.000", + -13.26181697845459 + ], + [ + "Immediately", + -13.261857032775879 + ], + [ + "▁frosting", + -13.261868476867676 + ], + [ + "▁Fiscal", + -13.261882781982422 + ], + [ + "downloadable", + -13.26188850402832 + ], + [ + "▁Hwy", + -13.261902809143066 + ], + [ + "évoluer", + -13.261951446533203 + ], + [ + "▁vieille", + -13.2620210647583 + ], + [ + "heißen", + -13.262436866760254 + ], + [ + "▁étrangère", + -13.262446403503418 + ], + [ + "▁incapable", + -13.262490272521973 + ], + [ + "volunteered", + -13.262520790100098 + ], + [ + "fortunately", + -13.262564659118652 + ], + [ + "company", + -13.262738227844238 + ], + [ + "denkt", + -13.2627592086792 + ], + [ + "▁citesc", + -13.262818336486816 + ], + [ + "▁intrebare", + -13.262896537780762 + ], + [ + "pleasantly", + -13.262990951538086 + ], + [ + "▁Minecraft", + -13.263079643249512 + ], + [ + "▁Schmuck", + -13.26308536529541 + ], + [ + "▁maghiar", + -13.263099670410156 + ], + [ + "conductive", + -13.263339042663574 + ], + [ + "décrit", + -13.263534545898438 + ], + [ + "provide", + -13.26353931427002 + ], + [ + "▁depăş", + -13.263628959655762 + ], + [ + "ituated", + -13.263657569885254 + ], + [ + "▁trumpet", + -13.264216423034668 + ], + [ + "▁nastere", + -13.2642240524292 + ], + [ + "▁Région", + -13.264245986938477 + ], + [ + "Occupational", + -13.264411926269531 + ], + [ + "▁Grecia", + -13.264415740966797 + ], + [ + "▁Conclusion", + -13.26449203491211 + ], + [ + "▁collaborateurs", + -13.264927864074707 + ], + [ + "▁Alibaba", + -13.265398025512695 + ], + [ + "▁amplasat", + -13.265398979187012 + ], + [ + "▁Plastik", + -13.265992164611816 + ], + [ + "▁stash", + -13.266023635864258 + ], + [ + "▁Bonnie", + -13.266045570373535 + ], + [ + "▁ehrlich", + -13.266156196594238 + ], + [ + "▁contention", + -13.266193389892578 + ], + [ + "▁Oslo", + -13.266263008117676 + ], + [ + "englische", + -13.266319274902344 + ], + [ + "measurable", + -13.266439437866211 + ], + [ + "loppy", + -13.266470909118652 + ], + [ + "▁Refrigerat", + -13.266579627990723 + ], + [ + "▁remboursement", + -13.266580581665039 + ], + [ + "▁societăţi", + -13.266580581665039 + ], + [ + "translates", + -13.266607284545898 + ], + [ + "ichtigkeit", + -13.266685485839844 + ], + [ + "agentur", + -13.266741752624512 + ], + [ + "▁compute", + -13.266800880432129 + ], + [ + "berater", + -13.266921043395996 + ], + [ + "▁Georgetown", + -13.266945838928223 + ], + [ + "wolves", + -13.266951560974121 + ], + [ + "ceased", + -13.266959190368652 + ], + [ + "▁Binary", + -13.267030715942383 + ], + [ + "▁kontrolliert", + -13.267172813415527 + ], + [ + "informer", + -13.267416000366211 + ], + [ + "lehrer", + -13.267578125 + ], + [ + "lieferung", + -13.267709732055664 + ], + [ + "▁definit", + -13.267742156982422 + ], + [ + "chèque", + -13.267765045166016 + ], + [ + "▁clergy", + -13.267765045166016 + ], + [ + "▁ministries", + -13.267767906188965 + ], + [ + "▁plague", + -13.267779350280762 + ], + [ + "▁Jedi", + -13.267805099487305 + ], + [ + "▁Blackjack", + -13.268025398254395 + ], + [ + "▁subsection", + -13.26807689666748 + ], + [ + "▁Sachsen", + -13.268121719360352 + ], + [ + "valorile", + -13.268146514892578 + ], + [ + "molded", + -13.26816463470459 + ], + [ + "▁betroffen", + -13.268183708190918 + ], + [ + "▁adecvat", + -13.268229484558105 + ], + [ + "▁collègue", + -13.26835823059082 + ], + [ + "▁chinez", + -13.268392562866211 + ], + [ + "emelle", + -13.268695831298828 + ], + [ + "▁körperliche", + -13.268902778625488 + ], + [ + "▁titan", + -13.26891040802002 + ], + [ + "▁sophistication", + -13.268951416015625 + ], + [ + "▁provoke", + -13.268957138061523 + ], + [ + "▁pensii", + -13.269042015075684 + ], + [ + "▁Tucker", + -13.269377708435059 + ], + [ + "▁motoare", + -13.26943302154541 + ], + [ + "supported", + -13.269536972045898 + ], + [ + "▁Sicil", + -13.269697189331055 + ], + [ + "▁Ausgangs", + -13.26987361907959 + ], + [ + "▁verletzt", + -13.269908905029297 + ], + [ + "Ligue", + -13.269996643066406 + ], + [ + "▁organizatori", + -13.270026206970215 + ], + [ + "▁apprentice", + -13.270099639892578 + ], + [ + "▁Potato", + -13.270183563232422 + ], + [ + "▁Duft", + -13.27039623260498 + ], + [ + "▁medicament", + -13.270566940307617 + ], + [ + "Hôtel", + -13.270740509033203 + ], + [ + "▁Triangle", + -13.270842552185059 + ], + [ + "buted", + -13.271100044250488 + ], + [ + "▁Bentley", + -13.271336555480957 + ], + [ + "următoarele", + -13.271389961242676 + ], + [ + "animate", + -13.271404266357422 + ], + [ + "megapixel", + -13.271404266357422 + ], + [ + "einfachen", + -13.271514892578125 + ], + [ + "▁performanț", + -13.271544456481934 + ], + [ + "lurry", + -13.27184009552002 + ], + [ + "suffisamment", + -13.27192211151123 + ], + [ + "▁Weihnachten", + -13.27192211151123 + ], + [ + "▁Detective", + -13.27194595336914 + ], + [ + "▁lovit", + -13.272049903869629 + ], + [ + "▁blouse", + -13.27213191986084 + ], + [ + "▁hartie", + -13.272163391113281 + ], + [ + "vro", + -13.27225112915039 + ], + [ + "▁disastrous", + -13.272517204284668 + ], + [ + "vermutlich", + -13.2725191116333 + ], + [ + "▁Stafford", + -13.272527694702148 + ], + [ + "ehlt", + -13.272628784179688 + ], + [ + "▁vielseitig", + -13.272643089294434 + ], + [ + "Manifest", + -13.273274421691895 + ], + [ + "homage", + -13.27354907989502 + ], + [ + "menée", + -13.273566246032715 + ], + [ + "▁erläuter", + -13.27370834350586 + ], + [ + "▁volontaire", + -13.273709297180176 + ], + [ + "wrought", + -13.27371597290039 + ], + [ + "▁Naples", + -13.273719787597656 + ], + [ + "recommending", + -13.273759841918945 + ], + [ + "▁thermique", + -13.273774147033691 + ], + [ + "▁subtitle", + -13.273787498474121 + ], + [ + "▁Slam", + -13.273809432983398 + ], + [ + "▁necesitate", + -13.273809432983398 + ], + [ + "trimmed", + -13.274099349975586 + ], + [ + "urmatoarele", + -13.274178504943848 + ], + [ + "▁Sorin", + -13.274245262145996 + ], + [ + "▁compromis", + -13.274300575256348 + ], + [ + "overcoming", + -13.274477005004883 + ], + [ + "▁Samantha", + -13.274901390075684 + ], + [ + "dazzling", + -13.27490234375 + ], + [ + "▁Pearson", + -13.274903297424316 + ], + [ + "▁glazing", + -13.274911880493164 + ], + [ + "Revelation", + -13.274921417236328 + ], + [ + "destinée", + -13.275156021118164 + ], + [ + "öffnet", + -13.27515983581543 + ], + [ + "CERT", + -13.275327682495117 + ], + [ + "▁Sneak", + -13.275503158569336 + ], + [ + "proiectele", + -13.275605201721191 + ], + [ + "▁longitudinal", + -13.27609634399414 + ], + [ + "▁cocaine", + -13.276098251342773 + ], + [ + "▁universitar", + -13.276108741760254 + ], + [ + "▁refreshments", + -13.276166915893555 + ], + [ + "▁instanţ", + -13.276243209838867 + ], + [ + "▁kostenfrei", + -13.276397705078125 + ], + [ + "▁comédie", + -13.276451110839844 + ], + [ + "▁Locat", + -13.276725769042969 + ], + [ + "▁Albania", + -13.276732444763184 + ], + [ + "▁mécanique", + -13.276776313781738 + ], + [ + "messung", + -13.27683162689209 + ], + [ + "issus", + -13.277260780334473 + ], + [ + "pinned", + -13.277328491210938 + ], + [ + "▁sanft", + -13.277335166931152 + ], + [ + "▁geprüft", + -13.277435302734375 + ], + [ + "▁procè", + -13.277442932128906 + ], + [ + "▁Üb", + -13.277765274047852 + ], + [ + "5-0", + -13.277802467346191 + ], + [ + "▁Catering", + -13.277957916259766 + ], + [ + "▁prosperous", + -13.27801513671875 + ], + [ + "▁replication", + -13.278098106384277 + ], + [ + "▁obese", + -13.278441429138184 + ], + [ + "clerosis", + -13.278489112854004 + ], + [ + "▁Carnegie", + -13.278489112854004 + ], + [ + "▁Incredible", + -13.278489112854004 + ], + [ + "▁Teppich", + -13.278489112854004 + ], + [ + "▁crunchy", + -13.278489112854004 + ], + [ + "▁vomiting", + -13.278529167175293 + ], + [ + "▁sourire", + -13.278619766235352 + ], + [ + "publish", + -13.278948783874512 + ], + [ + "▁exterioar", + -13.279094696044922 + ], + [ + "▁forehead", + -13.279107093811035 + ], + [ + "▁climatique", + -13.279313087463379 + ], + [ + "▁conservator", + -13.279458999633789 + ], + [ + "▁Russland", + -13.279687881469727 + ], + [ + "▁kombiniert", + -13.279687881469727 + ], + [ + "▁Thrones", + -13.279688835144043 + ], + [ + "▁Griffith", + -13.27968978881836 + ], + [ + "▁fragrant", + -13.279695510864258 + ], + [ + "▁RSVP", + -13.279698371887207 + ], + [ + "klima", + -13.279751777648926 + ], + [ + "▁situație", + -13.279808044433594 + ], + [ + "deschiderea", + -13.280009269714355 + ], + [ + "▁moale", + -13.280033111572266 + ], + [ + "▁Trevor", + -13.280112266540527 + ], + [ + "ménager", + -13.28011417388916 + ], + [ + "deploying", + -13.280428886413574 + ], + [ + "▁Loft", + -13.280500411987305 + ], + [ + "▁Willkommen", + -13.28059196472168 + ], + [ + "▁Bezirks", + -13.280887603759766 + ], + [ + "▁Himself", + -13.280975341796875 + ], + [ + "▁quarant", + -13.28101634979248 + ], + [ + "▁1901", + -13.281079292297363 + ], + [ + "▁tripod", + -13.28136920928955 + ], + [ + "▁récolt", + -13.281553268432617 + ], + [ + "natură", + -13.281631469726562 + ], + [ + "School", + -13.281649589538574 + ], + [ + "contested", + -13.281773567199707 + ], + [ + "bwohl", + -13.281784057617188 + ], + [ + "Darren", + -13.281830787658691 + ], + [ + "medicine", + -13.281903266906738 + ], + [ + "▁Impuls", + -13.282041549682617 + ], + [ + "prevailing", + -13.282057762145996 + ], + [ + "▁orthodontic", + -13.282089233398438 + ], + [ + "▁sequential", + -13.282089233398438 + ], + [ + "▁Kolkata", + -13.28209114074707 + ], + [ + "▁séch", + -13.282100677490234 + ], + [ + "▁diaper", + -13.28212833404541 + ], + [ + "▁simplifie", + -13.282144546508789 + ], + [ + "▁reflux", + -13.282163619995117 + ], + [ + "▁Hypo", + -13.282242774963379 + ], + [ + "imprimer", + -13.282251358032227 + ], + [ + "▁Folosi", + -13.282401084899902 + ], + [ + "Info", + -13.282570838928223 + ], + [ + "▁Investiga", + -13.282801628112793 + ], + [ + "stabilirea", + -13.282845497131348 + ], + [ + "élis", + -13.283149719238281 + ], + [ + "ccessed", + -13.28320026397705 + ], + [ + "▁recyclable", + -13.283293724060059 + ], + [ + "▁forbidden", + -13.283295631408691 + ], + [ + "▁Colonel", + -13.283297538757324 + ], + [ + "▁nisip", + -13.28330135345459 + ], + [ + "▁Fundamental", + -13.283303260803223 + ], + [ + "▁nouveauté", + -13.283308029174805 + ], + [ + "khi", + -13.283357620239258 + ], + [ + "▁ecology", + -13.28339672088623 + ], + [ + "▁filament", + -13.283540725708008 + ], + [ + "▁relentless", + -13.283559799194336 + ], + [ + "▁Behavior", + -13.283669471740723 + ], + [ + "titulaire", + -13.283900260925293 + ], + [ + "▁administrativ", + -13.28404426574707 + ], + [ + "▁Vorlage", + -13.284209251403809 + ], + [ + "zeigte", + -13.28427791595459 + ], + [ + "▁Bäume", + -13.284497261047363 + ], + [ + "▁Kartoffel", + -13.284497261047363 + ], + [ + "▁Possible", + -13.284500122070312 + ], + [ + "▁perturb", + -13.28466510772705 + ], + [ + "▁Grigor", + -13.284717559814453 + ], + [ + "▁streng", + -13.284759521484375 + ], + [ + "▁vânzare", + -13.285101890563965 + ], + [ + "concentrating", + -13.285698890686035 + ], + [ + "▁rechtzeitig", + -13.2857027053833 + ], + [ + "▁eternity", + -13.28570556640625 + ], + [ + "▁Puzzle", + -13.28575611114502 + ], + [ + "▁malade", + -13.285775184631348 + ], + [ + "▁Metallic", + -13.285776138305664 + ], + [ + "▁Unterhaltung", + -13.285783767700195 + ], + [ + "▁4:00", + -13.285820960998535 + ], + [ + "▁magique", + -13.285908699035645 + ], + [ + "▁cellphone", + -13.285975456237793 + ], + [ + "▁inhibition", + -13.286023139953613 + ], + [ + "▁remplacement", + -13.286025047302246 + ], + [ + "▁WWII", + -13.286089897155762 + ], + [ + "Eff", + -13.286258697509766 + ], + [ + "kontakt", + -13.286832809448242 + ], + [ + "Update", + -13.286869049072266 + ], + [ + "▁Emerald", + -13.286910057067871 + ], + [ + "▁hammock", + -13.286910057067871 + ], + [ + "POWER", + -13.286917686462402 + ], + [ + "automne", + -13.286917686462402 + ], + [ + "▁(2004)", + -13.286961555480957 + ], + [ + "▁participanți", + -13.287012100219727 + ], + [ + "1998)", + -13.287014961242676 + ], + [ + "▁deletion", + -13.287186622619629 + ], + [ + "▁Proiect", + -13.287226676940918 + ], + [ + "IDENT", + -13.287504196166992 + ], + [ + "▁precis", + -13.287623405456543 + ], + [ + "▁limp", + -13.287676811218262 + ], + [ + "▁Pompe", + -13.287686347961426 + ], + [ + "▁ménage", + -13.28780746459961 + ], + [ + "▁Wahrheit", + -13.288119316101074 + ], + [ + "▁Intelligent", + -13.28812026977539 + ], + [ + "▁instability", + -13.2881441116333 + ], + [ + "insurance", + -13.288346290588379 + ], + [ + "▁Nursery", + -13.288352966308594 + ], + [ + "▁synonym", + -13.288427352905273 + ], + [ + "▁ignite", + -13.28848934173584 + ], + [ + "▁Vernon", + -13.28849983215332 + ], + [ + "purchase", + -13.288524627685547 + ], + [ + "▁disponibilité", + -13.288662910461426 + ], + [ + "▁producţi", + -13.28909969329834 + ], + [ + "▁Pentagon", + -13.289329528808594 + ], + [ + "▁illumination", + -13.289329528808594 + ], + [ + "▁obsolete", + -13.289329528808594 + ], + [ + "▁unacceptable", + -13.28933048248291 + ], + [ + "Gleichzeitig", + -13.289938926696777 + ], + [ + "rutsch", + -13.290071487426758 + ], + [ + "viziuni", + -13.290409088134766 + ], + [ + "▁Nicaragua", + -13.29054069519043 + ], + [ + "▁hesitation", + -13.290541648864746 + ], + [ + "▁nascut", + -13.290545463562012 + ], + [ + "▁Warehouse", + -13.29055404663086 + ], + [ + "geboten", + -13.290558815002441 + ], + [ + "▁Lagos", + -13.290844917297363 + ], + [ + "produced", + -13.290874481201172 + ], + [ + "cativa", + -13.291309356689453 + ], + [ + "▁Tracy", + -13.291326522827148 + ], + [ + "Projekt", + -13.291468620300293 + ], + [ + "▁malaria", + -13.291692733764648 + ], + [ + "▁Baldwin", + -13.291755676269531 + ], + [ + "Take", + -13.291791915893555 + ], + [ + "▁fluctuations", + -13.291844367980957 + ], + [ + "▁titular", + -13.29194450378418 + ], + [ + "bmw", + -13.291976928710938 + ], + [ + "▁brevet", + -13.29202651977539 + ], + [ + "étapes", + -13.292173385620117 + ], + [ + "wikipedia", + -13.292373657226562 + ], + [ + "▁corporal", + -13.292424201965332 + ], + [ + "▁Schönheit", + -13.2926664352417 + ], + [ + "utilizatorii", + -13.292695999145508 + ], + [ + "INFO", + -13.292807579040527 + ], + [ + "▁formularul", + -13.292900085449219 + ], + [ + "femi", + -13.292959213256836 + ], + [ + "Konferenz", + -13.29296875 + ], + [ + "▁carnival", + -13.29296875 + ], + [ + "▁Kräuter", + -13.292969703674316 + ], + [ + "▁gelernt", + -13.292981147766113 + ], + [ + "▁Sherman", + -13.293017387390137 + ], + [ + "▁persistence", + -13.293289184570312 + ], + [ + "▁Behörden", + -13.293577194213867 + ], + [ + "▁Frühjahr", + -13.293578147888184 + ], + [ + "▁Guvern", + -13.293649673461914 + ], + [ + "interpreting", + -13.293878555297852 + ], + [ + "▁nommé", + -13.294021606445312 + ], + [ + "consult", + -13.294035911560059 + ], + [ + "▁obligaţi", + -13.294184684753418 + ], + [ + "▁Newspaper", + -13.2942476272583 + ], + [ + "(2005)", + -13.294515609741211 + ], + [ + "pumped", + -13.294614791870117 + ], + [ + "▁autoritati", + -13.294634819030762 + ], + [ + "▁aplicatii", + -13.294644355773926 + ], + [ + "▁verhindert", + -13.294794082641602 + ], + [ + "▁évident", + -13.294794082641602 + ], + [ + "▁getrennt", + -13.294795036315918 + ], + [ + "▁Encourage", + -13.295403480529785 + ], + [ + "▁lurk", + -13.295432090759277 + ], + [ + "▁condemned", + -13.295455932617188 + ], + [ + "▁4:30", + -13.295502662658691 + ], + [ + "labelled", + -13.29576587677002 + ], + [ + "ordinea", + -13.295899391174316 + ], + [ + "▁pantofi", + -13.296012878417969 + ], + [ + "Default", + -13.296042442321777 + ], + [ + "▁beruh", + -13.296120643615723 + ], + [ + "/01/", + -13.296268463134766 + ], + [ + "league", + -13.296503067016602 + ], + [ + "▁couvert", + -13.296524047851562 + ], + [ + "▁competencies", + -13.296622276306152 + ], + [ + "▁mozzarella", + -13.296622276306152 + ], + [ + "jihad", + -13.29662799835205 + ], + [ + "▁gossip", + -13.29662799835205 + ], + [ + "▁Omaha", + -13.296628952026367 + ], + [ + "▁coincidence", + -13.296669960021973 + ], + [ + "▁Pinot", + -13.296710968017578 + ], + [ + "dotted", + -13.296789169311523 + ], + [ + "schilder", + -13.297197341918945 + ], + [ + "▁Munte", + -13.297224998474121 + ], + [ + "▁Vermieter", + -13.297232627868652 + ], + [ + "▁britannique", + -13.297232627868652 + ], + [ + "▁comentariu", + -13.297235488891602 + ], + [ + "abonnement", + -13.29725456237793 + ], + [ + "▁inventive", + -13.29727840423584 + ], + [ + "complie", + -13.297279357910156 + ], + [ + "composée", + -13.29734992980957 + ], + [ + "▁glatt", + -13.297684669494629 + ], + [ + "adorned", + -13.297842979431152 + ], + [ + "▁Opportunities", + -13.297842979431152 + ], + [ + "▁equilibrium", + -13.297842979431152 + ], + [ + "▁persuasive", + -13.297842979431152 + ], + [ + "▁achiziţi", + -13.297843933105469 + ], + [ + "▁déterminer", + -13.297843933105469 + ], + [ + "▁fleece", + -13.297857284545898 + ], + [ + "▁ivory", + -13.29786205291748 + ], + [ + "▁Genuss", + -13.297900199890137 + ], + [ + "Thousands", + -13.297930717468262 + ], + [ + "▁izolat", + -13.297965049743652 + ], + [ + "▁symbolize", + -13.298033714294434 + ], + [ + "gâteau", + -13.298051834106445 + ], + [ + "▁relații", + -13.298062324523926 + ], + [ + "▁Classroom", + -13.298144340515137 + ], + [ + "settlers", + -13.298155784606934 + ], + [ + "▁vremuri", + -13.298195838928223 + ], + [ + "▁Serial", + -13.29838752746582 + ], + [ + "▁boite", + -13.298399925231934 + ], + [ + "équivalent", + -13.298453330993652 + ], + [ + "▁benutzen", + -13.298454284667969 + ], + [ + "▁Recomand", + -13.298462867736816 + ], + [ + "▁Sinai", + -13.298968315124512 + ], + [ + "▁Advertise", + -13.29906940460205 + ], + [ + "▁Thermal", + -13.299206733703613 + ], + [ + "fiance", + -13.299471855163574 + ], + [ + "▁universitaire", + -13.299683570861816 + ], + [ + "▁rivière", + -13.299793243408203 + ], + [ + "▁reimburse", + -13.299907684326172 + ], + [ + "ţara", + -13.299932479858398 + ], + [ + "tician", + -13.30002498626709 + ], + [ + "intelligence", + -13.300041198730469 + ], + [ + "▁abgestimmt", + -13.300288200378418 + ], + [ + "▁compliqué", + -13.300288200378418 + ], + [ + "▁succulent", + -13.300297737121582 + ], + [ + "opéra", + -13.300395011901855 + ], + [ + "7-9", + -13.300456047058105 + ], + [ + "▁pierderi", + -13.300654411315918 + ], + [ + "extinction", + -13.30090045928955 + ], + [ + "▁Zweifel", + -13.30103874206543 + ], + [ + "ATCH", + -13.30112361907959 + ], + [ + "10,000", + -13.301222801208496 + ], + [ + "▁uninterrupted", + -13.301513671875 + ], + [ + "▁Eigentum", + -13.301517486572266 + ], + [ + "▁Utility", + -13.301517486572266 + ], + [ + "ско", + -13.301529884338379 + ], + [ + "▁tornado", + -13.301544189453125 + ], + [ + "▁Güte", + -13.301727294921875 + ], + [ + "▁pertain", + -13.301923751831055 + ], + [ + "painters", + -13.301993370056152 + ], + [ + "Help", + -13.3021240234375 + ], + [ + "▁străinătate", + -13.30212688446045 + ], + [ + "▁stammen", + -13.302170753479004 + ], + [ + "opposition", + -13.302229881286621 + ], + [ + "▁rhino", + -13.302233695983887 + ], + [ + "intervenir", + -13.302427291870117 + ], + [ + "▁hyperlink", + -13.302441596984863 + ], + [ + "höchst", + -13.302518844604492 + ], + [ + "roach", + -13.302627563476562 + ], + [ + "wSt", + -13.302687644958496 + ], + [ + "▁monastery", + -13.302740097045898 + ], + [ + "▁algae", + -13.302754402160645 + ], + [ + "▁shaving", + -13.302757263183594 + ], + [ + "présentent", + -13.302804946899414 + ], + [ + "Africa", + -13.302860260009766 + ], + [ + "eigener", + -13.303047180175781 + ], + [ + "▁glace", + -13.303153991699219 + ], + [ + "▁discurs", + -13.303179740905762 + ], + [ + "▁autograph", + -13.303204536437988 + ], + [ + "▁Conflict", + -13.303359031677246 + ], + [ + "▁școli", + -13.303411483764648 + ], + [ + "▁excerpt", + -13.303617477416992 + ], + [ + "correlated", + -13.303628921508789 + ], + [ + "empel", + -13.303841590881348 + ], + [ + "cryptocurrencies", + -13.30396842956543 + ], + [ + "▁symposium", + -13.30396842956543 + ], + [ + "▁gewohnt", + -13.303994178771973 + ], + [ + "PTSD", + -13.304070472717285 + ], + [ + "▁harmonic", + -13.304166793823242 + ], + [ + "discarded", + -13.304282188415527 + ], + [ + "▁Flint", + -13.304359436035156 + ], + [ + "Russia", + -13.304422378540039 + ], + [ + "▁ședinț", + -13.304583549499512 + ], + [ + "▁accusations", + -13.304727554321289 + ], + [ + "▁încălc", + -13.304827690124512 + ], + [ + "sendung", + -13.305152893066406 + ], + [ + "▁Chiropractic", + -13.305197715759277 + ], + [ + "▁excepți", + -13.305201530456543 + ], + [ + "▁proclaim", + -13.305201530456543 + ], + [ + "▁Flexible", + -13.305295944213867 + ], + [ + "▁Hüt", + -13.30538272857666 + ], + [ + "▁Baltic", + -13.30539608001709 + ], + [ + "▁inaltime", + -13.30553913116455 + ], + [ + "▁montré", + -13.305868148803711 + ], + [ + "exécution", + -13.305898666381836 + ], + [ + "partei", + -13.305961608886719 + ], + [ + "▁specifie", + -13.306072235107422 + ], + [ + "▁Jackpot", + -13.306105613708496 + ], + [ + "▁stumble", + -13.306134223937988 + ], + [ + "▁individuel", + -13.306161880493164 + ], + [ + "▁Veteran", + -13.306217193603516 + ], + [ + "▁Supplies", + -13.306428909301758 + ], + [ + "▁excavation", + -13.306428909301758 + ], + [ + "▁Libraries", + -13.306469917297363 + ], + [ + "▁prénom", + -13.306476593017578 + ], + [ + "WOOD", + -13.30650806427002 + ], + [ + "meciul", + -13.306917190551758 + ], + [ + "Chef", + -13.306938171386719 + ], + [ + "▁SUPER", + -13.306940078735352 + ], + [ + "Appeals", + -13.30696964263916 + ], + [ + "terapia", + -13.307113647460938 + ], + [ + "▁relatii", + -13.30713939666748 + ], + [ + "modifying", + -13.30748462677002 + ], + [ + "▁Regulament", + -13.307662010192871 + ], + [ + "▁bănci", + -13.307662963867188 + ], + [ + "▁agility", + -13.307666778564453 + ], + [ + "▁Magnetic", + -13.307674407958984 + ], + [ + "▁piatra", + -13.30767822265625 + ], + [ + "▁Governance", + -13.307680130004883 + ], + [ + "▁clown", + -13.30772876739502 + ], + [ + "▁Choir", + -13.308337211608887 + ], + [ + "aujourd", + -13.308548927307129 + ], + [ + "▁vendeur", + -13.308732032775879 + ], + [ + "ndererseits", + -13.308859825134277 + ], + [ + "▁Bahrain", + -13.3088960647583 + ], + [ + "▁Timisoara", + -13.3088960647583 + ], + [ + "▁exklusive", + -13.3088960647583 + ], + [ + "▁Population", + -13.309001922607422 + ], + [ + "▁nepo", + -13.309073448181152 + ], + [ + "▁relish", + -13.309085845947266 + ], + [ + "▁Pumpkin", + -13.309571266174316 + ], + [ + "▁détente", + -13.309784889221191 + ], + [ + "▁episcop", + -13.309860229492188 + ], + [ + "patterned", + -13.309929847717285 + ], + [ + "▁THANK", + -13.310132026672363 + ], + [ + "▁Widerspruch", + -13.310132026672363 + ], + [ + "▁Crisis", + -13.310189247131348 + ], + [ + "▁goose", + -13.310226440429688 + ], + [ + "▁couture", + -13.310307502746582 + ], + [ + "▁hinweg", + -13.310446739196777 + ], + [ + "supplemental", + -13.310486793518066 + ], + [ + "shingles", + -13.31060791015625 + ], + [ + "investir", + -13.310635566711426 + ], + [ + "▁steriliz", + -13.310759544372559 + ], + [ + "tractors", + -13.310761451721191 + ], + [ + "cellules", + -13.31078815460205 + ], + [ + "▁Gloria", + -13.310888290405273 + ], + [ + "▁teilnehmen", + -13.311092376708984 + ], + [ + "companiile", + -13.311248779296875 + ], + [ + "surfacing", + -13.311279296875 + ], + [ + "▁nostalgic", + -13.311368942260742 + ], + [ + "▁Badezimmer", + -13.311369895935059 + ], + [ + "▁conjoint", + -13.311370849609375 + ], + [ + "vacancy", + -13.31145191192627 + ], + [ + "▁homeland", + -13.311582565307617 + ], + [ + "▁Abschnitt", + -13.311625480651855 + ], + [ + "Cartea", + -13.311653137207031 + ], + [ + "SIA", + -13.311782836914062 + ], + [ + "▁explode", + -13.311786651611328 + ], + [ + "fostering", + -13.311959266662598 + ], + [ + "▁ceilalti", + -13.31198787689209 + ], + [ + "▁gentil", + -13.31214714050293 + ], + [ + "oplasty", + -13.31218433380127 + ], + [ + "bodied", + -13.312424659729004 + ], + [ + "▁1906", + -13.312499046325684 + ], + [ + "▁BlackBerry", + -13.312607765197754 + ], + [ + "▁Presbyterian", + -13.312607765197754 + ], + [ + "▁berücksichtigt", + -13.312607765197754 + ], + [ + "▁compartiment", + -13.312607765197754 + ], + [ + "▁compulsory", + -13.312607765197754 + ], + [ + "Millennial", + -13.312609672546387 + ], + [ + "▁sanitar", + -13.312638282775879 + ], + [ + "▁stink", + -13.312975883483887 + ], + [ + "lius", + -13.313047409057617 + ], + [ + "thankfully", + -13.313136100769043 + ], + [ + "modalité", + -13.313173294067383 + ], + [ + "▁cunoaște", + -13.313226699829102 + ], + [ + "Infrastruktur", + -13.313227653503418 + ], + [ + "▁studenți", + -13.313253402709961 + ], + [ + "Bref", + -13.313270568847656 + ], + [ + "London", + -13.31360149383545 + ], + [ + "▁Arduino", + -13.313847541809082 + ], + [ + "▁cilantro", + -13.313847541809082 + ], + [ + "▁Rafael", + -13.313848495483398 + ], + [ + "▁untersucht", + -13.313861846923828 + ], + [ + "▁martyr", + -13.31389331817627 + ], + [ + "▁Mormon", + -13.313984870910645 + ], + [ + "▁wicket", + -13.313996315002441 + ], + [ + "cherished", + -13.314335823059082 + ], + [ + "liquid", + -13.314417839050293 + ], + [ + "▁dorinț", + -13.314571380615234 + ], + [ + "lehnt", + -13.314717292785645 + ], + [ + "meisterschaft", + -13.31493091583252 + ], + [ + "fondateur", + -13.314971923828125 + ], + [ + "câble", + -13.315078735351562 + ], + [ + "▁erreichbar", + -13.315091133117676 + ], + [ + "▁footsteps", + -13.315094947814941 + ], + [ + "▁Kloster", + -13.31519889831543 + ], + [ + "▁multiplayer", + -13.315218925476074 + ], + [ + "▁substitu", + -13.315276145935059 + ], + [ + "▁Frisch", + -13.315526962280273 + ], + [ + "▁arsenal", + -13.315712928771973 + ], + [ + "explication", + -13.315866470336914 + ], + [ + "▁conexiun", + -13.315986633300781 + ], + [ + "muddy", + -13.316045761108398 + ], + [ + "▁Reifen", + -13.316120147705078 + ], + [ + "auraient", + -13.316132545471191 + ], + [ + "▁biologic", + -13.316136360168457 + ], + [ + "▁acquainted", + -13.316332817077637 + ], + [ + "▁shelving", + -13.316341400146484 + ], + [ + "Stunning", + -13.316373825073242 + ], + [ + "▁Clothing", + -13.316394805908203 + ], + [ + "▁kidding", + -13.316431999206543 + ], + [ + "excellent", + -13.316452026367188 + ], + [ + "▁susțin", + -13.316487312316895 + ], + [ + "bătut", + -13.316502571105957 + ], + [ + "elusive", + -13.3165283203125 + ], + [ + "werbung", + -13.316743850708008 + ], + [ + "slipping", + -13.316813468933105 + ], + [ + "▁configura", + -13.316926956176758 + ], + [ + "▁proaspat", + -13.31695556640625 + ], + [ + "▁apporté", + -13.317120552062988 + ], + [ + "▁démarr", + -13.317328453063965 + ], + [ + "Spezialist", + -13.317578315734863 + ], + [ + "▁obligați", + -13.317578315734863 + ], + [ + "▁societăți", + -13.317578315734863 + ], + [ + "▁malpractice", + -13.31757926940918 + ], + [ + "Hundreds", + -13.317609786987305 + ], + [ + "▁3:1", + -13.318138122558594 + ], + [ + "▁computation", + -13.31817626953125 + ], + [ + "▁Heilig", + -13.318528175354004 + ], + [ + "▁Helsinki", + -13.318824768066406 + ], + [ + "▁firefighters", + -13.318824768066406 + ], + [ + "▁obedience", + -13.318824768066406 + ], + [ + "▁evacuate", + -13.318825721740723 + ], + [ + "▁Floyd", + -13.318840026855469 + ], + [ + "▁Disneyland", + -13.318859100341797 + ], + [ + "Cathy", + -13.319069862365723 + ], + [ + "▁Broken", + -13.319278717041016 + ], + [ + "cript", + -13.319952011108398 + ], + [ + "▁Gewähr", + -13.320073127746582 + ], + [ + "▁embarrassed", + -13.320073127746582 + ], + [ + "▁Leicht", + -13.32007884979248 + ], + [ + "▁témoign", + -13.320379257202148 + ], + [ + "▁viteze", + -13.3206148147583 + ], + [ + "▁hallmark", + -13.320731163024902 + ], + [ + "uploads", + -13.32082462310791 + ], + [ + "▁Submission", + -13.320929527282715 + ], + [ + "▁croissant", + -13.321049690246582 + ], + [ + "awning", + -13.32105827331543 + ], + [ + "detecting", + -13.321198463439941 + ], + [ + "▁Bahamas", + -13.321322441101074 + ], + [ + "▁Kathleen", + -13.321325302124023 + ], + [ + "▁latch", + -13.321377754211426 + ], + [ + "▁pronounce", + -13.321380615234375 + ], + [ + "▁choke", + -13.321428298950195 + ], + [ + "▁$50,000", + -13.3215970993042 + ], + [ + "▁historische", + -13.321642875671387 + ], + [ + "jugé", + -13.321829795837402 + ], + [ + "▁MasterCard", + -13.321949005126953 + ], + [ + "▁Horror", + -13.321955680847168 + ], + [ + "spoiled", + -13.321958541870117 + ], + [ + "▁apariți", + -13.32202434539795 + ], + [ + "geschaltet", + -13.3225736618042 + ], + [ + "▁Londra", + -13.322578430175781 + ], + [ + "viction", + -13.322580337524414 + ], + [ + "▁Disaster", + -13.322593688964844 + ], + [ + "▁desigur", + -13.322601318359375 + ], + [ + "▁substanț", + -13.322601318359375 + ], + [ + "▁compiler", + -13.322613716125488 + ], + [ + "▁vanzari", + -13.32262897491455 + ], + [ + "▁Simulation", + -13.322669982910156 + ], + [ + "Occasionally", + -13.322842597961426 + ], + [ + "Seite", + -13.322884559631348 + ], + [ + "Linked", + -13.322938919067383 + ], + [ + "Roll", + -13.323015213012695 + ], + [ + "▁trajet", + -13.323244094848633 + ], + [ + "Molecular", + -13.323834419250488 + ], + [ + "▁pragmatic", + -13.323843002319336 + ], + [ + "judecată", + -13.323915481567383 + ], + [ + "ров", + -13.32400894165039 + ], + [ + "serrurerie", + -13.324024200439453 + ], + [ + "▁reconstruct", + -13.324129104614258 + ], + [ + "▁heureuse", + -13.324179649353027 + ], + [ + "▁knight", + -13.32422924041748 + ], + [ + "knowingly", + -13.324431419372559 + ], + [ + "▁perspectiva", + -13.324453353881836 + ], + [ + "ordinary", + -13.324604034423828 + ], + [ + "▁chaudière", + -13.324721336364746 + ], + [ + "Neill", + -13.324727058410645 + ], + [ + "cellulose", + -13.325080871582031 + ], + [ + "▁Delicious", + -13.325080871582031 + ], + [ + "▁incearca", + -13.325080871582031 + ], + [ + "▁retrospective", + -13.325080871582031 + ], + [ + "▁mundane", + -13.325081825256348 + ], + [ + "▁definiert", + -13.32508659362793 + ], + [ + "▁cockpit", + -13.325088500976562 + ], + [ + "Aktionen", + -13.325363159179688 + ], + [ + "▁distanț", + -13.325654029846191 + ], + [ + "▁diplôme", + -13.325708389282227 + ], + [ + "prepaid", + -13.325737953186035 + ], + [ + "▁Tabellen", + -13.325758934020996 + ], + [ + "▁economie", + -13.325770378112793 + ], + [ + "December", + -13.325826644897461 + ], + [ + "Punkten", + -13.32613754272461 + ], + [ + "▁Punch", + -13.32614517211914 + ], + [ + "Martin", + -13.326154708862305 + ], + [ + "▁Espresso", + -13.326314926147461 + ], + [ + "▁ubiquitous", + -13.326335906982422 + ], + [ + "▁Mongolia", + -13.326337814331055 + ], + [ + "▁collabor", + -13.326635360717773 + ], + [ + "▁Vordergrund", + -13.32696533203125 + ], + [ + "cameră", + -13.327091217041016 + ], + [ + "represented", + -13.327268600463867 + ], + [ + "▁AUTO", + -13.327446937561035 + ], + [ + "▁Ofert", + -13.327542304992676 + ], + [ + "neig", + -13.327593803405762 + ], + [ + "▁Hazard", + -13.327595710754395 + ], + [ + "▁Constanta", + -13.327596664428711 + ], + [ + "▁tumour", + -13.32759952545166 + ], + [ + "▁Neighborhood", + -13.327603340148926 + ], + [ + "▁detaliat", + -13.327619552612305 + ], + [ + "▁extraordinaire", + -13.327665328979492 + ], + [ + "▁Therapeutic", + -13.327686309814453 + ], + [ + "predicting", + -13.327693939208984 + ], + [ + "▁institutii", + -13.32776165008545 + ], + [ + "ifizierung", + -13.327797889709473 + ], + [ + "wählt", + -13.328207015991211 + ], + [ + "▁remarquable", + -13.32822322845459 + ], + [ + "Invent", + -13.328512191772461 + ], + [ + "▁foloseșt", + -13.328514099121094 + ], + [ + "öfte", + -13.328703880310059 + ], + [ + "▁discreet", + -13.328853607177734 + ], + [ + "▁Flickr", + -13.32885456085205 + ], + [ + "▁trésor", + -13.328856468200684 + ], + [ + "▁steroids", + -13.328872680664062 + ], + [ + "▁personnalité", + -13.328953742980957 + ], + [ + "▁Krankenhaus", + -13.32901668548584 + ], + [ + "▁affordability", + -13.329218864440918 + ], + [ + "deuten", + -13.329398155212402 + ], + [ + "Detailed", + -13.329412460327148 + ], + [ + "Walk", + -13.329444885253906 + ], + [ + "▁parallèle", + -13.329483032226562 + ], + [ + "thèse", + -13.329649925231934 + ], + [ + "▁gefördert", + -13.330117225646973 + ], + [ + "Greeting", + -13.33014965057373 + ], + [ + "gelistet", + -13.330172538757324 + ], + [ + "▁chlorine", + -13.330392837524414 + ], + [ + "behält", + -13.33039665222168 + ], + [ + "emption", + -13.330435752868652 + ], + [ + "▁mobilité", + -13.330601692199707 + ], + [ + "▁randonnée", + -13.330668449401855 + ], + [ + "habitant", + -13.330718040466309 + ], + [ + "zilla", + -13.331082344055176 + ], + [ + "▁Lili", + -13.331160545349121 + ], + [ + "▁répét", + -13.331341743469238 + ], + [ + "trucât", + -13.331376075744629 + ], + [ + "▁Hospice", + -13.331376075744629 + ], + [ + "▁grassroots", + -13.331377029418945 + ], + [ + "▁affiché", + -13.331393241882324 + ], + [ + "pears", + -13.331470489501953 + ], + [ + "▁linistit", + -13.331497192382812 + ], + [ + "▁Patron", + -13.331552505493164 + ], + [ + "▁Stalin", + -13.331626892089844 + ], + [ + "▁închiri", + -13.331751823425293 + ], + [ + "▁Apostol", + -13.332018852233887 + ], + [ + "▁poudre", + -13.332246780395508 + ], + [ + "▁piscin", + -13.332419395446777 + ], + [ + "merlin", + -13.33259391784668 + ], + [ + "limited", + -13.33260726928711 + ], + [ + "▁métallique", + -13.332639694213867 + ], + [ + "gazebo", + -13.33267879486084 + ], + [ + "weilige", + -13.332718849182129 + ], + [ + "prosecutors", + -13.33278751373291 + ], + [ + "Expert", + -13.33314323425293 + ], + [ + "Assemblée", + -13.333271980285645 + ], + [ + "▁fauna", + -13.333285331726074 + ], + [ + "▁Turtle", + -13.333353996276855 + ], + [ + "▁Consortium", + -13.333905220031738 + ], + [ + "▁assemblies", + -13.333905220031738 + ], + [ + "▁trajectory", + -13.333905220031738 + ], + [ + "▁Vineyard", + -13.333906173706055 + ], + [ + "▁Mehrwert", + -13.334037780761719 + ], + [ + "▁sunflower", + -13.334043502807617 + ], + [ + "develop", + -13.334060668945312 + ], + [ + "▁heroic", + -13.334100723266602 + ], + [ + "▁riscuri", + -13.334151268005371 + ], + [ + "oeuf", + -13.334300994873047 + ], + [ + "influence", + -13.334452629089355 + ], + [ + "▁Voraussetzung", + -13.334500312805176 + ], + [ + "utoritatea", + -13.334518432617188 + ], + [ + "Produsul", + -13.334654808044434 + ], + [ + "▁gewährleistet", + -13.335171699523926 + ], + [ + "▁brûl", + -13.335175514221191 + ], + [ + "▁Column", + -13.335184097290039 + ], + [ + "▁trousers", + -13.335209846496582 + ], + [ + "▁posterior", + -13.33521556854248 + ], + [ + "glyph", + -13.335251808166504 + ], + [ + "▁Happen", + -13.335280418395996 + ], + [ + "▁créateur", + -13.335667610168457 + ], + [ + "▁apostle", + -13.335898399353027 + ], + [ + "▁padding", + -13.335907936096191 + ], + [ + "▁Digitalisierung", + -13.335908889770508 + ], + [ + "▁Laurie", + -13.335915565490723 + ], + [ + "▁Erwerb", + -13.336065292358398 + ], + [ + "▁bătrân", + -13.336440086364746 + ], + [ + "▁harmonious", + -13.336441040039062 + ], + [ + "▁ailments", + -13.336456298828125 + ], + [ + "▁Venue", + -13.33650016784668 + ], + [ + "▁Motorcycle", + -13.336523056030273 + ], + [ + "▁cortex", + -13.336551666259766 + ], + [ + "▁Sunrise", + -13.336636543273926 + ], + [ + "Software", + -13.336775779724121 + ], + [ + "▁advocat", + -13.336934089660645 + ], + [ + "essentiellement", + -13.337422370910645 + ], + [ + "•", + -13.337494850158691 + ], + [ + "părut", + -13.337522506713867 + ], + [ + "▁Suffolk", + -13.337711334228516 + ], + [ + "▁righteousness", + -13.337711334228516 + ], + [ + "▁Shirley", + -13.337712287902832 + ], + [ + "▁Famous", + -13.337749481201172 + ], + [ + "▁emulate", + -13.337788581848145 + ], + [ + "vermögen", + -13.33788776397705 + ], + [ + "generated", + -13.337963104248047 + ], + [ + "Ecole", + -13.337977409362793 + ], + [ + "▁managerial", + -13.338086128234863 + ], + [ + "believe", + -13.338091850280762 + ], + [ + "▁récupére", + -13.338348388671875 + ], + [ + "▁recens", + -13.338531494140625 + ], + [ + "▁Barrett", + -13.338778495788574 + ], + [ + "▁courageous", + -13.338814735412598 + ], + [ + "9.95", + -13.338961601257324 + ], + [ + "▁Odyssey", + -13.338982582092285 + ], + [ + "▁Violence", + -13.338982582092285 + ], + [ + "▁concasseur", + -13.338982582092285 + ], + [ + "▁evacuation", + -13.338982582092285 + ], + [ + "▁kontinuierlich", + -13.338982582092285 + ], + [ + "▁epidemi", + -13.3389892578125 + ], + [ + "▁disconnected", + -13.339197158813477 + ], + [ + "frucht", + -13.339339256286621 + ], + [ + "Trustees", + -13.339348793029785 + ], + [ + "▁Massiv", + -13.339459419250488 + ], + [ + "gebucht", + -13.339473724365234 + ], + [ + "stütze", + -13.339526176452637 + ], + [ + "▁febr", + -13.339741706848145 + ], + [ + "honoured", + -13.339743614196777 + ], + [ + "▁digitiz", + -13.340079307556152 + ], + [ + "Image", + -13.34021282196045 + ], + [ + "▁Brunswick", + -13.34025764465332 + ], + [ + "▁Therapist", + -13.34026050567627 + ], + [ + "accessoire", + -13.340264320373535 + ], + [ + "▁croqu", + -13.340291023254395 + ], + [ + "Pflanz", + -13.34052848815918 + ], + [ + "dragging", + -13.340536117553711 + ], + [ + "▁Facilit", + -13.340750694274902 + ], + [ + "soucis", + -13.340765953063965 + ], + [ + "Asadar", + -13.34081745147705 + ], + [ + "▁Thames", + -13.341021537780762 + ], + [ + "▁cariera", + -13.341116905212402 + ], + [ + "▁mercury", + -13.341530799865723 + ], + [ + "▁Blessed", + -13.341533660888672 + ], + [ + "▁Whitney", + -13.341630935668945 + ], + [ + "▁géant", + -13.341926574707031 + ], + [ + "▁coordonnée", + -13.342217445373535 + ], + [ + "oidal", + -13.342623710632324 + ], + [ + "Wohnungen", + -13.342696189880371 + ], + [ + "▁Spectrum", + -13.34280776977539 + ], + [ + "▁Avengers", + -13.342808723449707 + ], + [ + "▁Gloucester", + -13.342808723449707 + ], + [ + "▁nützlich", + -13.342811584472656 + ], + [ + "▁toothbrush", + -13.342830657958984 + ], + [ + "▁Vanessa", + -13.342843055725098 + ], + [ + "Saxon", + -13.342947959899902 + ], + [ + "▁comunități", + -13.343165397644043 + ], + [ + "reprezentanţi", + -13.343175888061523 + ], + [ + "▁întâlnire", + -13.343225479125977 + ], + [ + "delve", + -13.343234062194824 + ], + [ + "▁technologique", + -13.343452453613281 + ], + [ + "Describe", + -13.343466758728027 + ], + [ + "▁constient", + -13.343501091003418 + ], + [ + "gestalt", + -13.343600273132324 + ], + [ + "▁Tribune", + -13.344090461730957 + ], + [ + "▁fiberglass", + -13.34412956237793 + ], + [ + "verbindung", + -13.344210624694824 + ], + [ + "sacrificing", + -13.344351768493652 + ], + [ + "▁Pablo", + -13.344470024108887 + ], + [ + "▁adanc", + -13.34525203704834 + ], + [ + "omia", + -13.345309257507324 + ], + [ + "hâte", + -13.345317840576172 + ], + [ + "▁Sanctuary", + -13.345366477966309 + ], + [ + "▁accolade", + -13.345368385314941 + ], + [ + "▁Wurzel", + -13.345398902893066 + ], + [ + "▁spacing", + -13.345433235168457 + ], + [ + "▁bedeutend", + -13.345481872558594 + ], + [ + "▁biased", + -13.345499992370605 + ], + [ + "randomized", + -13.345747947692871 + ], + [ + "▁agenți", + -13.345856666564941 + ], + [ + "▁excepţi", + -13.346012115478516 + ], + [ + "▁fișier", + -13.346028327941895 + ], + [ + "▁fisier", + -13.34664535522461 + ], + [ + "irrespective", + -13.346648216247559 + ], + [ + "▁Gardner", + -13.34665584564209 + ], + [ + "▁aprecia", + -13.346884727478027 + ], + [ + "▁Klu", + -13.347082138061523 + ], + [ + "▁apropie", + -13.347535133361816 + ], + [ + "▁echival", + -13.347784042358398 + ], + [ + "tauchen", + -13.347862243652344 + ], + [ + "▁hauptsächlich", + -13.347930908203125 + ], + [ + "▁pollutants", + -13.347930908203125 + ], + [ + "▁mammals", + -13.347931861877441 + ], + [ + "▁Landwirtschaft", + -13.347936630249023 + ], + [ + "▁stăpân", + -13.34793758392334 + ], + [ + "▁Prüf", + -13.347990989685059 + ], + [ + "▁Motorsport", + -13.34807300567627 + ], + [ + "Leaving", + -13.348352432250977 + ], + [ + "schädigung", + -13.348573684692383 + ], + [ + "▁calendrier", + -13.348573684692383 + ], + [ + "plikation", + -13.348655700683594 + ], + [ + "▁DOE", + -13.348655700683594 + ], + [ + "ред", + -13.348966598510742 + ], + [ + "Jahr", + -13.34913444519043 + ], + [ + "▁entitlement", + -13.34921646118164 + ], + [ + "schuldig", + -13.349217414855957 + ], + [ + "▁Münster", + -13.349218368530273 + ], + [ + "pository", + -13.349451065063477 + ], + [ + "▁numero", + -13.350220680236816 + ], + [ + "▁entsprechen", + -13.350383758544922 + ], + [ + "▁astronaut", + -13.350502967834473 + ], + [ + "▁hexagon", + -13.350502967834473 + ], + [ + "▁DAMAGE", + -13.350503921508789 + ], + [ + "▁Quartz", + -13.350504875183105 + ], + [ + "▁rédaction", + -13.350504875183105 + ], + [ + "▁replenish", + -13.350508689880371 + ], + [ + "▁amoureux", + -13.350523948669434 + ], + [ + "▁opțiun", + -13.350616455078125 + ], + [ + "Custom", + -13.350622177124023 + ], + [ + "▁Telekom", + -13.350639343261719 + ], + [ + "▁RFID", + -13.351163864135742 + ], + [ + "▁Scorpio", + -13.351264953613281 + ], + [ + "▁thirst", + -13.35152816772461 + ], + [ + "▁Kosovo", + -13.351791381835938 + ], + [ + "▁precursor", + -13.351794242858887 + ], + [ + "▁sarbatori", + -13.351810455322266 + ], + [ + "▁Daisy", + -13.351828575134277 + ], + [ + "▁Dropbox", + -13.351898193359375 + ], + [ + "Smith", + -13.351949691772461 + ], + [ + "contabil", + -13.352191925048828 + ], + [ + "▁monnaie", + -13.352437973022461 + ], + [ + "capsul", + -13.352577209472656 + ], + [ + "treff", + -13.352760314941406 + ], + [ + "beauftragte", + -13.352761268615723 + ], + [ + "industrial", + -13.353006362915039 + ], + [ + "responsables", + -13.353010177612305 + ], + [ + "▁FIRST", + -13.353080749511719 + ], + [ + "▁crezut", + -13.35308837890625 + ], + [ + "▁reseller", + -13.353107452392578 + ], + [ + "▁direcți", + -13.353154182434082 + ], + [ + "mouvoir", + -13.353294372558594 + ], + [ + "▁Invite", + -13.353431701660156 + ], + [ + "▁constructii", + -13.353440284729004 + ], + [ + "▁oublié", + -13.353577613830566 + ], + [ + "găseșt", + -13.353687286376953 + ], + [ + "▁végét", + -13.353755950927734 + ], + [ + "idine", + -13.35385799407959 + ], + [ + "▁Ajout", + -13.353951454162598 + ], + [ + "▁Shelf", + -13.354195594787598 + ], + [ + "HALL", + -13.35422420501709 + ], + [ + "▁nostalgia", + -13.35437297821045 + ], + [ + "▁ottoman", + -13.35437297821045 + ], + [ + "▁ambalaj", + -13.354398727416992 + ], + [ + "municipiul", + -13.354405403137207 + ], + [ + "NOVA", + -13.354500770568848 + ], + [ + "▁disregard", + -13.354997634887695 + ], + [ + "▁bijuterii", + -13.355018615722656 + ], + [ + "▁sorgfältig", + -13.355018615722656 + ], + [ + "vraient", + -13.355307579040527 + ], + [ + "▁backsplash", + -13.355669975280762 + ], + [ + "▁nuisance", + -13.355679512023926 + ], + [ + "▁Territory", + -13.35568618774414 + ], + [ + "▁surprins", + -13.355693817138672 + ], + [ + "enchanting", + -13.35571002960205 + ], + [ + "trospecti", + -13.355847358703613 + ], + [ + "▁dvd", + -13.356199264526367 + ], + [ + "Totally", + -13.356329917907715 + ], + [ + "▁Edelstahl", + -13.35696029663086 + ], + [ + "▁sequencing", + -13.356961250305176 + ], + [ + "▁Circus", + -13.35696792602539 + ], + [ + "▁ashamed", + -13.35696792602539 + ], + [ + "▁horrific", + -13.357028007507324 + ], + [ + "▁taiat", + -13.357033729553223 + ], + [ + "▁Angehörige", + -13.357125282287598 + ], + [ + "Michel", + -13.357256889343262 + ], + [ + "▁communion", + -13.357298851013184 + ], + [ + "▁psiho", + -13.357378959655762 + ], + [ + "losigkeit", + -13.357405662536621 + ], + [ + "dipping", + -13.357512474060059 + ], + [ + "▁profesională", + -13.357608795166016 + ], + [ + "Indiferent", + -13.357609748840332 + ], + [ + "▁crestin", + -13.357723236083984 + ], + [ + "wholesome", + -13.357796669006348 + ], + [ + "▁Welfare", + -13.358257293701172 + ], + [ + "▁plentiful", + -13.358257293701172 + ], + [ + "▁Triumph", + -13.358258247375488 + ], + [ + "▁fascination", + -13.358260154724121 + ], + [ + "▁vicious", + -13.358291625976562 + ], + [ + "▁Höchst", + -13.358294486999512 + ], + [ + "▁Dunkel", + -13.358386039733887 + ], + [ + "▁harass", + -13.358406066894531 + ], + [ + "ambogia", + -13.358475685119629 + ], + [ + "▁synonymous", + -13.358598709106445 + ], + [ + "bottom", + -13.35879898071289 + ], + [ + "▁bénévole", + -13.358906745910645 + ], + [ + "▁suprafaț", + -13.358906745910645 + ], + [ + "▁umplut", + -13.358997344970703 + ], + [ + "▁Teddy", + -13.359162330627441 + ], + [ + "breathable", + -13.359292984008789 + ], + [ + "▁Toshiba", + -13.3595552444458 + ], + [ + "▁seismic", + -13.359569549560547 + ], + [ + "▁dringend", + -13.359583854675293 + ], + [ + "▁cultură", + -13.359585762023926 + ], + [ + "▁Waffen", + -13.359665870666504 + ], + [ + "▁Bubble", + -13.359702110290527 + ], + [ + "▁Brigade", + -13.359759330749512 + ], + [ + "▁Blatt", + -13.36012077331543 + ], + [ + "▁scénario", + -13.36020565032959 + ], + [ + "allah", + -13.360396385192871 + ], + [ + "▁superintendent", + -13.360855102539062 + ], + [ + "pflanzen", + -13.360856056213379 + ], + [ + "▁kurzfristig", + -13.360856056213379 + ], + [ + "▁raspberry", + -13.360876083374023 + ], + [ + "▁Evident", + -13.360904693603516 + ], + [ + "▁inutile", + -13.361076354980469 + ], + [ + "prouvé", + -13.361104011535645 + ], + [ + "▁obtien", + -13.36141300201416 + ], + [ + "▁Matthias", + -13.361506462097168 + ], + [ + "▁déclench", + -13.361506462097168 + ], + [ + "Situationen", + -13.361529350280762 + ], + [ + "▁Disclaimer", + -13.362156867980957 + ], + [ + "▁loneliness", + -13.362156867980957 + ], + [ + "▁Gothic", + -13.362164497375488 + ], + [ + "▁humility", + -13.362165451049805 + ], + [ + "▁machiaj", + -13.362175941467285 + ], + [ + "▁Sophia", + -13.362178802490234 + ], + [ + "▁Forecast", + -13.362265586853027 + ], + [ + "IBLE", + -13.362456321716309 + ], + [ + "ivism", + -13.362480163574219 + ], + [ + "israel", + -13.36278247833252 + ], + [ + "▁kümmern", + -13.362809181213379 + ], + [ + "▁verbreitet", + -13.362825393676758 + ], + [ + "▁capacitor", + -13.362832069396973 + ], + [ + "deprived", + -13.3634614944458 + ], + [ + "unbiased", + -13.3634614944458 + ], + [ + "▁Dominique", + -13.3634614944458 + ], + [ + "▁Bamboo", + -13.363462448120117 + ], + [ + "▁Heinrich", + -13.363465309143066 + ], + [ + "individualized", + -13.363550186157227 + ], + [ + "▁ansprechen", + -13.363776206970215 + ], + [ + "ordinaire", + -13.363801002502441 + ], + [ + "▁Ucraina", + -13.364112854003906 + ], + [ + "▁militare", + -13.364115715026855 + ], + [ + "massif", + -13.364352226257324 + ], + [ + "▁emisiuni", + -13.364501953125 + ], + [ + "maladies", + -13.364622116088867 + ], + [ + "▁pneumonia", + -13.364765167236328 + ], + [ + "▁graffiti", + -13.364767074584961 + ], + [ + "▁Determine", + -13.3648099899292 + ], + [ + "▁Northwestern", + -13.364893913269043 + ], + [ + "▁grasimi", + -13.364897727966309 + ], + [ + "▁lebendig", + -13.364920616149902 + ], + [ + "▁cifre", + -13.364946365356445 + ], + [ + "▁accelerator", + -13.36533260345459 + ], + [ + "▁nib", + -13.365374565124512 + ], + [ + "▁Jocuri", + -13.365400314331055 + ], + [ + "▁außergewöhnlich", + -13.365402221679688 + ], + [ + "▁orchid", + -13.36542797088623 + ], + [ + "zugreifen", + -13.365530967712402 + ], + [ + "utilisent", + -13.365662574768066 + ], + [ + "▁nineteenth", + -13.366071701049805 + ], + [ + "improvisation", + -13.366072654724121 + ], + [ + "▁Disclosure", + -13.366072654724121 + ], + [ + "▁Überraschung", + -13.366072654724121 + ], + [ + "▁Casual", + -13.366093635559082 + ], + [ + "▁Witness", + -13.366093635559082 + ], + [ + "teacher", + -13.366125106811523 + ], + [ + "Printed", + -13.366129875183105 + ], + [ + "▁prețuri", + -13.366189956665039 + ], + [ + "rues", + -13.366216659545898 + ], + [ + "▁cerinte", + -13.366338729858398 + ], + [ + "rouvent", + -13.36662483215332 + ], + [ + "assembling", + -13.36673355102539 + ], + [ + "▁atenție", + -13.366769790649414 + ], + [ + "▁amintiri", + -13.366782188415527 + ], + [ + "▁sustinut", + -13.366805076599121 + ], + [ + "Digital", + -13.367257118225098 + ], + [ + "▁Deborah", + -13.36738109588623 + ], + [ + "gesichts", + -13.367382049560547 + ], + [ + "▁temperament", + -13.367440223693848 + ], + [ + "▁competency", + -13.367447853088379 + ], + [ + "▁dwarf", + -13.367515563964844 + ], + [ + "▁dureaz", + -13.367539405822754 + ], + [ + "habilit", + -13.367764472961426 + ], + [ + "leaned", + -13.3679838180542 + ], + [ + "▁illicit", + -13.368348121643066 + ], + [ + "Availability", + -13.368691444396973 + ], + [ + "▁Brașov", + -13.368691444396973 + ], + [ + "▁Pyramid", + -13.368691444396973 + ], + [ + "▁achievable", + -13.368691444396973 + ], + [ + "▁judiciaire", + -13.368691444396973 + ], + [ + "Übrigen", + -13.368693351745605 + ], + [ + "▁activism", + -13.368795394897461 + ], + [ + "▁boycott", + -13.368839263916016 + ], + [ + "Desigur", + -13.368927001953125 + ], + [ + "klingt", + -13.369264602661133 + ], + [ + "▁Leidenschaft", + -13.369346618652344 + ], + [ + "▁Richtig", + -13.369701385498047 + ], + [ + "▁Airbnb", + -13.370002746582031 + ], + [ + "▁învățământ", + -13.370002746582031 + ], + [ + "Kampagne", + -13.370004653930664 + ], + [ + "▁thumbnail", + -13.370014190673828 + ], + [ + "Bestimmungen", + -13.370016098022461 + ], + [ + "▁vollkommen", + -13.37001895904541 + ], + [ + "▁biomass", + -13.370027542114258 + ], + [ + "▁escalate", + -13.370030403137207 + ], + [ + "wächst", + -13.370085716247559 + ], + [ + "▁scăpa", + -13.370098114013672 + ], + [ + "▁résult", + -13.37014389038086 + ], + [ + "▁shrine", + -13.370217323303223 + ], + [ + "maximizing", + -13.370370864868164 + ], + [ + "avoue", + -13.370492935180664 + ], + [ + "dirigeants", + -13.370665550231934 + ], + [ + "▁cerveau", + -13.370672225952148 + ], + [ + "▁proast", + -13.370955467224121 + ], + [ + "▁contaminants", + -13.371325492858887 + ], + [ + "effectue", + -13.37151050567627 + ], + [ + "ediție", + -13.371539115905762 + ], + [ + "monetiz", + -13.371772766113281 + ], + [ + "▁deplasare", + -13.371976852416992 + ], + [ + "▁Sfant", + -13.37209415435791 + ], + [ + "ROOM", + -13.372113227844238 + ], + [ + "bushes", + -13.372151374816895 + ], + [ + "mairie", + -13.37251091003418 + ], + [ + "obligate", + -13.372528076171875 + ], + [ + "▁tug", + -13.372573852539062 + ], + [ + "▁Collector", + -13.372632026672363 + ], + [ + "▁annoyed", + -13.372633934020996 + ], + [ + "▁aerobic", + -13.372654914855957 + ], + [ + "▁integer", + -13.372830390930176 + ], + [ + "▁Upload", + -13.373249053955078 + ], + [ + "▁impartial", + -13.37346076965332 + ], + [ + "▁discuţi", + -13.373623847961426 + ], + [ + "gastrointestinal", + -13.37394905090332 + ], + [ + "▁chiropractor", + -13.37394905090332 + ], + [ + "▁treptat", + -13.373950004577637 + ], + [ + "▁fishermen", + -13.37395191192627 + ], + [ + "levitra", + -13.3739595413208 + ], + [ + "Gruppe", + -13.373964309692383 + ], + [ + "▁Apostle", + -13.373970985412598 + ], + [ + "▁conseillé", + -13.374068260192871 + ], + [ + "Isra", + -13.37421703338623 + ], + [ + "▁Persönlichkeit", + -13.374431610107422 + ], + [ + "▁cantitati", + -13.374459266662598 + ], + [ + "▁incredibil", + -13.374614715576172 + ], + [ + "▁Berater", + -13.374800682067871 + ], + [ + "▁propuneri", + -13.374835014343262 + ], + [ + "MEDIA", + -13.375236511230469 + ], + [ + "▁opaque", + -13.37526798248291 + ], + [ + "▁Nielsen", + -13.375269889831543 + ], + [ + "▁cartofi", + -13.375277519226074 + ], + [ + "▁Whale", + -13.37533950805664 + ], + [ + "erzeugen", + -13.375890731811523 + ], + [ + "▁knack", + -13.375931739807129 + ], + [ + "Kandidat", + -13.375936508178711 + ], + [ + "▁tradițional", + -13.375937461853027 + ], + [ + "zählige", + -13.375983238220215 + ], + [ + "▁Petroleum", + -13.376588821411133 + ], + [ + "▁deficiencies", + -13.376588821411133 + ], + [ + "▁persecution", + -13.376588821411133 + ], + [ + "▁zgomot", + -13.376588821411133 + ], + [ + "▁reiterate", + -13.376592636108398 + ], + [ + "▁Slice", + -13.376670837402344 + ], + [ + "▁envy", + -13.376704216003418 + ], + [ + "▁stomac", + -13.376851081848145 + ], + [ + "Donnell", + -13.376914978027344 + ], + [ + "▁primordial", + -13.377249717712402 + ], + [ + "reclining", + -13.377274513244629 + ], + [ + "PASS", + -13.377861976623535 + ], + [ + "▁Resistance", + -13.377910614013672 + ], + [ + "▁Widerruf", + -13.377911567687988 + ], + [ + "▁vodka", + -13.377911567687988 + ], + [ + "▁yolk", + -13.377912521362305 + ], + [ + "ollywood", + -13.377915382385254 + ], + [ + "▁truffle", + -13.377933502197266 + ], + [ + "▁Sänger", + -13.377955436706543 + ], + [ + "▁Kenntnis", + -13.377968788146973 + ], + [ + "▁Kiel", + -13.37803840637207 + ], + [ + "▁Mutual", + -13.378044128417969 + ], + [ + "▁saliva", + -13.37816047668457 + ], + [ + "▁renforce", + -13.378411293029785 + ], + [ + "▁mulch", + -13.378680229187012 + ], + [ + "▁reviste", + -13.378875732421875 + ], + [ + "lucrarea", + -13.378978729248047 + ], + [ + "▁multiply", + -13.379130363464355 + ], + [ + "▁marshmallow", + -13.379234313964844 + ], + [ + "▁Durchschnitt", + -13.379288673400879 + ], + [ + "▁Authorities", + -13.379426002502441 + ], + [ + "▁greed", + -13.379521369934082 + ], + [ + "Visiting", + -13.379638671875 + ], + [ + "Carlton", + -13.379727363586426 + ], + [ + "▁splend", + -13.37975025177002 + ], + [ + "▁Erkenntnisse", + -13.379898071289062 + ], + [ + "▁Russie", + -13.379916191101074 + ], + [ + "Agence", + -13.38007926940918 + ], + [ + "schickt", + -13.380288124084473 + ], + [ + "##", + -13.3804931640625 + ], + [ + "▁Erweiterung", + -13.380560874938965 + ], + [ + "▁Franchise", + -13.380560874938965 + ], + [ + "Dedicated", + -13.380563735961914 + ], + [ + "▁Wisdom", + -13.380569458007812 + ], + [ + "▁gagnant", + -13.380592346191406 + ], + [ + "planetary", + -13.380598068237305 + ], + [ + "▁affinity", + -13.380619049072266 + ], + [ + "▁préférence", + -13.380739212036133 + ], + [ + "▁intellect", + -13.380810737609863 + ], + [ + "▁Translat", + -13.380830764770508 + ], + [ + "▁Sultan", + -13.38089370727539 + ], + [ + "▁birouri", + -13.38101577758789 + ], + [ + "▁Academie", + -13.381224632263184 + ], + [ + "▁consequential", + -13.38138484954834 + ], + [ + "▁festgestellt", + -13.381402015686035 + ], + [ + "▁Chanel", + -13.381444931030273 + ], + [ + "▁soutenu", + -13.381875038146973 + ], + [ + "▁Montessori", + -13.381888389587402 + ], + [ + "▁equitable", + -13.381892204284668 + ], + [ + "▁théorie", + -13.381893157958984 + ], + [ + "▁primavara", + -13.3818941116333 + ], + [ + "▁Daughter", + -13.38189697265625 + ], + [ + "▁Dixon", + -13.381898880004883 + ], + [ + "▁unravel", + -13.38190746307373 + ], + [ + "Olimp", + -13.381915092468262 + ], + [ + "▁disturbed", + -13.381916999816895 + ], + [ + "▁novelty", + -13.382004737854004 + ], + [ + "synchronous", + -13.382113456726074 + ], + [ + "relevant", + -13.382166862487793 + ], + [ + "bourgeois", + -13.38251781463623 + ], + [ + "▁Parfum", + -13.38255500793457 + ], + [ + "▁Polonia", + -13.382563591003418 + ], + [ + "▁monoton", + -13.382781028747559 + ], + [ + "tratare", + -13.38302230834961 + ], + [ + "dumping", + -13.38318157196045 + ], + [ + "▁Bibliothek", + -13.383217811584473 + ], + [ + "▁Saskatchewan", + -13.383217811584473 + ], + [ + "▁experiential", + -13.383217811584473 + ], + [ + "▁verursacht", + -13.383217811584473 + ], + [ + "intègre", + -13.383218765258789 + ], + [ + "▁Intermediate", + -13.383275032043457 + ], + [ + "Israel", + -13.383476257324219 + ], + [ + "lucreaza", + -13.383495330810547 + ], + [ + "▁quantify", + -13.383862495422363 + ], + [ + "▁zahăr", + -13.383882522583008 + ], + [ + "▁încadr", + -13.383902549743652 + ], + [ + "Personalized", + -13.383946418762207 + ], + [ + "▁Chronic", + -13.384309768676758 + ], + [ + "hôpital", + -13.384549140930176 + ], + [ + "▁diskutiert", + -13.384549140930176 + ], + [ + "electrique", + -13.3848876953125 + ], + [ + "ethos", + -13.384978294372559 + ], + [ + "Nase", + -13.385059356689453 + ], + [ + "atmosphère", + -13.385214805603027 + ], + [ + "▁ungefähr", + -13.385215759277344 + ], + [ + "évaluer", + -13.385251998901367 + ], + [ + "▁scuz", + -13.385321617126465 + ], + [ + "haltige", + -13.38533878326416 + ], + [ + "January", + -13.38557243347168 + ], + [ + "▁Sharma", + -13.385603904724121 + ], + [ + "▁seizures", + -13.385881423950195 + ], + [ + "▁zucchini", + -13.385881423950195 + ], + [ + "▁Stadi", + -13.385885238647461 + ], + [ + "▁eccentric", + -13.385885238647461 + ], + [ + "▁offensichtlich", + -13.385909080505371 + ], + [ + "▁Irvine", + -13.385920524597168 + ], + [ + "cuprinse", + -13.38601303100586 + ], + [ + "▁Arbitr", + -13.386157035827637 + ], + [ + "Buenos", + -13.386183738708496 + ], + [ + "▁Shelter", + -13.386210441589355 + ], + [ + "CEPT", + -13.386454582214355 + ], + [ + "ouvri", + -13.386455535888672 + ], + [ + "acryl", + -13.386539459228516 + ], + [ + "▁Gourmet", + -13.38654899597168 + ], + [ + "scented", + -13.386595726013184 + ], + [ + "doubling", + -13.38659954071045 + ], + [ + "▁rafina", + -13.386608123779297 + ], + [ + "▁Vereinbarung", + -13.38721752166748 + ], + [ + "▁Dashboard", + -13.387218475341797 + ], + [ + "▁Sandwich", + -13.387218475341797 + ], + [ + "▁Riviera", + -13.387226104736328 + ], + [ + "échec", + -13.387237548828125 + ], + [ + "Giro", + -13.387253761291504 + ], + [ + "▁oasis", + -13.38725757598877 + ], + [ + "▁apology", + -13.3872709274292 + ], + [ + "▁YEAR", + -13.387272834777832 + ], + [ + "▁realtor", + -13.387504577636719 + ], + [ + "acheteur", + -13.38754653930664 + ], + [ + "▁larva", + -13.387613296508789 + ], + [ + "▁invitați", + -13.388097763061523 + ], + [ + "exhibiting", + -13.38830852508545 + ], + [ + "modernen", + -13.388331413269043 + ], + [ + "▁Collaboration", + -13.38855266571045 + ], + [ + "▁dezvălui", + -13.38855266571045 + ], + [ + "▁kiosk", + -13.38855266571045 + ], + [ + "▁Bermuda", + -13.388553619384766 + ], + [ + "Copiii", + -13.388564109802246 + ], + [ + "▁goddess", + -13.388581275939941 + ], + [ + "uplifting", + -13.388609886169434 + ], + [ + "▁simultan", + -13.388808250427246 + ], + [ + "▁episod", + -13.388884544372559 + ], + [ + "▁Braşov", + -13.38922119140625 + ], + [ + "cunoscută", + -13.389634132385254 + ], + [ + "▁Cherokee", + -13.389890670776367 + ], + [ + "▁Kazakhstan", + -13.389890670776367 + ], + [ + "▁Lauderdale", + -13.389890670776367 + ], + [ + "▁închisoare", + -13.389898300170898 + ], + [ + "▁Christchurch", + -13.389934539794922 + ], + [ + "▁influenţ", + -13.389982223510742 + ], + [ + "▁Meghan", + -13.390019416809082 + ], + [ + "▁Dienstleistung", + -13.390557289123535 + ], + [ + "▁cladiri", + -13.390564918518066 + ], + [ + "▁evrei", + -13.391148567199707 + ], + [ + "▁oatmeal", + -13.391230583190918 + ], + [ + "▁chronique", + -13.3912353515625 + ], + [ + "▁associée", + -13.391264915466309 + ], + [ + "▁Goose", + -13.391283988952637 + ], + [ + "gänz", + -13.391855239868164 + ], + [ + "▁Blätter", + -13.391901969909668 + ], + [ + "▁jurnalist", + -13.392212867736816 + ], + [ + "cedat", + -13.392263412475586 + ], + [ + "nommée", + -13.392315864562988 + ], + [ + "écrivain", + -13.392572402954102 + ], + [ + "▁epoxy", + -13.392577171325684 + ], + [ + "▁verlangt", + -13.392590522766113 + ], + [ + "Störung", + -13.392708778381348 + ], + [ + "▁Doyle", + -13.392729759216309 + ], + [ + "▁Philharmoni", + -13.392844200134277 + ], + [ + "▁déclare", + -13.393044471740723 + ], + [ + "effort", + -13.393045425415039 + ], + [ + "ström", + -13.393118858337402 + ], + [ + "▁cunoaşte", + -13.393244743347168 + ], + [ + "▁gigantic", + -13.3932466506958 + ], + [ + "któ", + -13.393378257751465 + ], + [ + "▁ilustr", + -13.393529891967773 + ], + [ + "▁frec", + -13.39371109008789 + ], + [ + "▁Syracuse", + -13.393916130065918 + ], + [ + "▁Einwilligung", + -13.393917083740234 + ], + [ + "▁miraculous", + -13.393917083740234 + ], + [ + "▁ökologisch", + -13.393917083740234 + ], + [ + "▁Simmons", + -13.393922805786133 + ], + [ + "▁albastru", + -13.393926620483398 + ], + [ + "besser", + -13.393962860107422 + ], + [ + "▁interioare", + -13.394006729125977 + ], + [ + "▁Trocken", + -13.394068717956543 + ], + [ + "niveau", + -13.39406967163086 + ], + [ + "▁Torah", + -13.394122123718262 + ], + [ + "▁beobachten", + -13.3945894241333 + ], + [ + "▁behandeln", + -13.394637107849121 + ], + [ + "staffed", + -13.394742965698242 + ], + [ + "hütte", + -13.394824028015137 + ], + [ + "Central", + -13.394939422607422 + ], + [ + "▁Freiburg", + -13.395198822021484 + ], + [ + "▁Netanyahu", + -13.395261764526367 + ], + [ + "▁Lexington", + -13.395302772521973 + ], + [ + "▁insotit", + -13.395492553710938 + ], + [ + "▁depasi", + -13.39560604095459 + ], + [ + "sewage", + -13.395853996276855 + ], + [ + "erkrankung", + -13.395951271057129 + ], + [ + "▁părţi", + -13.396234512329102 + ], + [ + "▁Nixon", + -13.39661693572998 + ], + [ + "Byron", + -13.396905899047852 + ], + [ + "▁varietat", + -13.39724063873291 + ], + [ + "▁Bildschirm", + -13.397299766540527 + ], + [ + "▁accompli", + -13.397424697875977 + ], + [ + "affirmed", + -13.397525787353516 + ], + [ + "▁phyto", + -13.397533416748047 + ], + [ + "sectiune", + -13.397592544555664 + ], + [ + "abteilung", + -13.397932052612305 + ], + [ + "▁voastre", + -13.397957801818848 + ], + [ + "GitHub", + -13.397958755493164 + ], + [ + "▁Jorge", + -13.39796257019043 + ], + [ + "ACTION", + -13.397972106933594 + ], + [ + "voastra", + -13.397984504699707 + ], + [ + "▁Peanut", + -13.397987365722656 + ], + [ + "▁bilingual", + -13.398011207580566 + ], + [ + "▁nourriture", + -13.39803695678711 + ], + [ + "▁Asphalt", + -13.398640632629395 + ], + [ + "emballage", + -13.399310111999512 + ], + [ + "▁sanitation", + -13.399310111999512 + ], + [ + "▁Dessert", + -13.399313926696777 + ], + [ + "intitulé", + -13.399322509765625 + ], + [ + "▁acţiune", + -13.399374008178711 + ], + [ + "▁Übersetzung", + -13.399402618408203 + ], + [ + "destinate", + -13.39941692352295 + ], + [ + "▁Goddess", + -13.399504661560059 + ], + [ + "poziție", + -13.399576187133789 + ], + [ + "denumirea", + -13.400002479553223 + ], + [ + "cantitatea", + -13.40002727508545 + ], + [ + "▁Stereo", + -13.400223731994629 + ], + [ + "object", + -13.400373458862305 + ], + [ + "▁décè", + -13.40058708190918 + ], + [ + "▁Handeln", + -13.400665283203125 + ], + [ + "▁ambience", + -13.400697708129883 + ], + [ + "▁Lindsay", + -13.4006986618042 + ], + [ + "▁tensiune", + -13.400781631469727 + ], + [ + "▁thrift", + -13.400788307189941 + ], + [ + "▁Optimiz", + -13.400843620300293 + ], + [ + "▁beantworten", + -13.401338577270508 + ], + [ + "▁magistrat", + -13.401342391967773 + ], + [ + "évidence", + -13.402016639709473 + ], + [ + "▁Eclipse", + -13.402016639709473 + ], + [ + "▁Ribbon", + -13.402016639709473 + ], + [ + "▁condensation", + -13.402016639709473 + ], + [ + "▁innocence", + -13.402018547058105 + ], + [ + "▁mascara", + -13.402023315429688 + ], + [ + "▁seventeen", + -13.402290344238281 + ], + [ + "▁compétent", + -13.402694702148438 + ], + [ + "bewertet", + -13.402717590332031 + ], + [ + "▁Muzic", + -13.40285587310791 + ], + [ + "complexities", + -13.402928352355957 + ], + [ + "ddington", + -13.403324127197266 + ], + [ + "Entwickler", + -13.403372764587402 + ], + [ + "masonry", + -13.4033784866333 + ], + [ + "Führer", + -13.403386116027832 + ], + [ + "▁awakening", + -13.403388977050781 + ], + [ + "▁lovitur", + -13.403806686401367 + ], + [ + "gebrochen", + -13.404068946838379 + ], + [ + "indexed", + -13.404478073120117 + ], + [ + "campania", + -13.404515266418457 + ], + [ + "▁Fountain", + -13.404730796813965 + ], + [ + "▁Joomla", + -13.404730796813965 + ], + [ + "▁Superintendent", + -13.404730796813965 + ], + [ + "▁Dahl", + -13.404742240905762 + ], + [ + "▁Benefici", + -13.404863357543945 + ], + [ + "optimiser", + -13.404919624328613 + ], + [ + "bursting", + -13.405380249023438 + ], + [ + "diplom", + -13.405427932739258 + ], + [ + "microsoft", + -13.405621528625488 + ], + [ + "▁correlate", + -13.405776977539062 + ], + [ + "▁arhitectura", + -13.405848503112793 + ], + [ + "▁lunette", + -13.40611743927002 + ], + [ + "Statistical", + -13.406147003173828 + ], + [ + "▁iarnă", + -13.406201362609863 + ], + [ + "▁importanț", + -13.406932830810547 + ], + [ + "sistence", + -13.407366752624512 + ], + [ + "associated", + -13.407402992248535 + ], + [ + "Occident", + -13.407452583312988 + ], + [ + "▁Heidelberg", + -13.407452583312988 + ], + [ + "▁acquaintance", + -13.407452583312988 + ], + [ + "Introducing", + -13.407453536987305 + ], + [ + "▁ripple", + -13.407480239868164 + ], + [ + "▁Childhood", + -13.407563209533691 + ], + [ + "drywall", + -13.407577514648438 + ], + [ + "Vreau", + -13.40771770477295 + ], + [ + "▁compétence", + -13.407967567443848 + ], + [ + "▁asteapta", + -13.408135414123535 + ], + [ + "▁duhovnic", + -13.408135414123535 + ], + [ + "▁învăţământ", + -13.408141136169434 + ], + [ + "encompassing", + -13.40829849243164 + ], + [ + "1997)", + -13.408370018005371 + ], + [ + "▁atractiv", + -13.408515930175781 + ], + [ + "Majoritatea", + -13.408775329589844 + ], + [ + "▁bungalow", + -13.40881633758545 + ], + [ + "▁Introduce", + -13.408817291259766 + ], + [ + "▁culprit", + -13.408817291259766 + ], + [ + "▁malheureusement", + -13.408817291259766 + ], + [ + "▁voudrai", + -13.408817291259766 + ], + [ + "Europäische", + -13.408825874328613 + ], + [ + "wunsch", + -13.408880233764648 + ], + [ + "▁înțeles", + -13.408892631530762 + ], + [ + "▁infestation", + -13.40889835357666 + ], + [ + "Bringing", + -13.409186363220215 + ], + [ + "▁Mehrheit", + -13.409229278564453 + ], + [ + "ски", + -13.409456253051758 + ], + [ + "▁procéder", + -13.409499168395996 + ], + [ + "grupului", + -13.409504890441895 + ], + [ + "▁dispoziti", + -13.40964412689209 + ], + [ + "▁snug", + -13.409950256347656 + ], + [ + "▁Afrika", + -13.41018295288086 + ], + [ + "▁Madagascar", + -13.41018295288086 + ], + [ + "Părinte", + -13.410195350646973 + ], + [ + "▁Clayton", + -13.410223960876465 + ], + [ + "▁antagonist", + -13.410239219665527 + ], + [ + "termeni", + -13.410250663757324 + ], + [ + "▁Literary", + -13.410391807556152 + ], + [ + "▁Babylon", + -13.410452842712402 + ], + [ + "▁überprüfen", + -13.410865783691406 + ], + [ + "▁duminica", + -13.410879135131836 + ], + [ + "farbig", + -13.410970687866211 + ], + [ + "nennt", + -13.411064147949219 + ], + [ + "annual", + -13.411487579345703 + ], + [ + "▁Qualcomm", + -13.41154956817627 + ], + [ + "▁Slovakia", + -13.41154956817627 + ], + [ + "▁plictis", + -13.411552429199219 + ], + [ + "▁prairie", + -13.411554336547852 + ], + [ + "▁Schatten", + -13.411622047424316 + ], + [ + "▁compléter", + -13.41223430633545 + ], + [ + "inauguration", + -13.412376403808594 + ], + [ + "▁apărare", + -13.412407875061035 + ], + [ + "▁întăr", + -13.412412643432617 + ], + [ + "▁pronunciation", + -13.412919044494629 + ], + [ + "▁bewährt", + -13.412919998168945 + ], + [ + "▁Viertel", + -13.413084983825684 + ], + [ + "▁Heidi", + -13.413252830505371 + ], + [ + "▁Gummi", + -13.413507461547852 + ], + [ + "▁veggie", + -13.413552284240723 + ], + [ + "▁monsieur", + -13.413604736328125 + ], + [ + "éveil", + -13.413630485534668 + ], + [ + "shipments", + -13.413928985595703 + ], + [ + "▁Medikamente", + -13.414290428161621 + ], + [ + "▁Johannesburg", + -13.414314270019531 + ], + [ + "▁ermittelt", + -13.414321899414062 + ], + [ + "▁bataille", + -13.414440155029297 + ], + [ + "extrem", + -13.414609909057617 + ], + [ + "▁1:2", + -13.414671897888184 + ], + [ + "Array", + -13.414725303649902 + ], + [ + "▁portail", + -13.414857864379883 + ], + [ + "▁găzdui", + -13.414977073669434 + ], + [ + "▁Calcium", + -13.41497802734375 + ], + [ + "▁Correction", + -13.415104866027832 + ], + [ + "bureaux", + -13.41528034210205 + ], + [ + "bestselling", + -13.415338516235352 + ], + [ + "Übungen", + -13.415420532226562 + ], + [ + "paramètres", + -13.415633201599121 + ], + [ + "▁Provincial", + -13.415663719177246 + ], + [ + "▁outrageous", + -13.415680885314941 + ], + [ + "▁Giveaway", + -13.415775299072266 + ], + [ + "▁LGBTQ", + -13.41589641571045 + ], + [ + "geklärt", + -13.416854858398438 + ], + [ + "▁Karlsruhe", + -13.417038917541504 + ], + [ + "▁esențial", + -13.417038917541504 + ], + [ + "avancée", + -13.41703987121582 + ], + [ + "hesitant", + -13.417040824890137 + ], + [ + "enlarged", + -13.417069435119629 + ], + [ + "▁inherit", + -13.417121887207031 + ], + [ + "Food", + -13.4171724319458 + ], + [ + "bucuria", + -13.417181015014648 + ], + [ + "▁BTW", + -13.417400360107422 + ], + [ + "associe", + -13.417579650878906 + ], + [ + "▁Möchte", + -13.417742729187012 + ], + [ + "demokrat", + -13.417789459228516 + ], + [ + "Turcia", + -13.417964935302734 + ], + [ + "forged", + -13.418370246887207 + ], + [ + "▁Zhao", + -13.418442726135254 + ], + [ + "▁cherries", + -13.418556213378906 + ], + [ + "▁evangelical", + -13.418631553649902 + ], + [ + "▁jüng", + -13.418792724609375 + ], + [ + "spans", + -13.41880989074707 + ], + [ + "▁străluc", + -13.41888427734375 + ], + [ + "▁geschie", + -13.41893196105957 + ], + [ + "▁Tattoo", + -13.419112205505371 + ], + [ + "sanitary", + -13.419114112854004 + ], + [ + "▁biopsy", + -13.419353485107422 + ], + [ + "▁imprumut", + -13.419795036315918 + ], + [ + "▁unreasonable", + -13.419795036315918 + ], + [ + "Funktion", + -13.419800758361816 + ], + [ + "▁prohibition", + -13.419904708862305 + ], + [ + "▁Prezent", + -13.419939041137695 + ], + [ + "boosted", + -13.419967651367188 + ], + [ + "▁chalet", + -13.420382499694824 + ], + [ + "▁tanar", + -13.420450210571289 + ], + [ + "Faktoren", + -13.420489311218262 + ], + [ + "▁Mozilla", + -13.420550346374512 + ], + [ + "▁Lambert", + -13.420760154724121 + ], + [ + "▁Cruci", + -13.420927047729492 + ], + [ + "▁Flugzeug", + -13.421198844909668 + ], + [ + "reassure", + -13.421205520629883 + ], + [ + "envisioned", + -13.421542167663574 + ], + [ + "Traditionally", + -13.421773910522461 + ], + [ + "▁parametri", + -13.42185115814209 + ], + [ + "▁unicorn", + -13.421891212463379 + ], + [ + "▁adéquat", + -13.421894073486328 + ], + [ + "▁Colonial", + -13.421915054321289 + ], + [ + "▁Kwa", + -13.422097206115723 + ], + [ + "▁SERV", + -13.422333717346191 + ], + [ + "tourism", + -13.422627449035645 + ], + [ + "▁Kiev", + -13.422974586486816 + ], + [ + "heightened", + -13.42309284210205 + ], + [ + "circulating", + -13.423099517822266 + ], + [ + "▁Kreditkarte", + -13.42310619354248 + ], + [ + "gedruckt", + -13.423110008239746 + ], + [ + "▁Depend", + -13.423120498657227 + ], + [ + "Style", + -13.423196792602539 + ], + [ + "▁Rettungs", + -13.42325496673584 + ], + [ + "wrongful", + -13.423418998718262 + ], + [ + "▁devour", + -13.423453330993652 + ], + [ + "▁manevr", + -13.423582077026367 + ], + [ + "carora", + -13.423628807067871 + ], + [ + "erfolgreichen", + -13.423723220825195 + ], + [ + "überwiegend", + -13.423942565917969 + ], + [ + "▁Sauvignon", + -13.423942565917969 + ], + [ + "händler", + -13.423944473266602 + ], + [ + "▁annotation", + -13.424009323120117 + ], + [ + "▁expans", + -13.424020767211914 + ], + [ + "▁recital", + -13.424080848693848 + ], + [ + "inhabited", + -13.424367904663086 + ], + [ + "OnePlus", + -13.424549102783203 + ], + [ + "Gästen", + -13.424588203430176 + ], + [ + "beliebig", + -13.424613952636719 + ], + [ + "▁Anonymous", + -13.424635887145996 + ], + [ + "▁Ansprechpartner", + -13.424635887145996 + ], + [ + "▁tamb", + -13.42464542388916 + ], + [ + "estimating", + -13.424670219421387 + ], + [ + "frequent", + -13.424769401550293 + ], + [ + "▁disciplin", + -13.425241470336914 + ], + [ + "▁plombier", + -13.425329208374023 + ], + [ + "▁teoretic", + -13.42533016204834 + ], + [ + "greift", + -13.425339698791504 + ], + [ + "▁Einschränkung", + -13.42537784576416 + ], + [ + "obscur", + -13.426115989685059 + ], + [ + "architecte", + -13.426233291625977 + ], + [ + "▁détour", + -13.42647647857666 + ], + [ + "▁spaghetti", + -13.426717758178711 + ], + [ + "croft", + -13.42693042755127 + ], + [ + "▁Grammar", + -13.426953315734863 + ], + [ + "▁investitii", + -13.427062034606934 + ], + [ + "▁glorif", + -13.427067756652832 + ], + [ + "architekt", + -13.427412033081055 + ], + [ + "Oricum", + -13.427451133728027 + ], + [ + "▁bruise", + -13.427692413330078 + ], + [ + "▁McCarthy", + -13.428107261657715 + ], + [ + "▁Uruguay", + -13.428107261657715 + ], + [ + "Produsele", + -13.428109169006348 + ], + [ + "▁Comparison", + -13.42811107635498 + ], + [ + "▁fondamental", + -13.42811107635498 + ], + [ + "▁stradă", + -13.428115844726562 + ], + [ + "▁Countries", + -13.428131103515625 + ], + [ + "▁guéri", + -13.42825698852539 + ], + [ + "▁bâti", + -13.428339004516602 + ], + [ + "▁blunt", + -13.428515434265137 + ], + [ + "▁Sistem", + -13.428645133972168 + ], + [ + "▁Betroffenen", + -13.428803443908691 + ], + [ + "efectuare", + -13.428823471069336 + ], + [ + "▁scharf", + -13.428899765014648 + ], + [ + "naps", + -13.429057121276855 + ], + [ + "▁plaid", + -13.429163932800293 + ], + [ + "▁investiții", + -13.429367065429688 + ], + [ + "evenimentele", + -13.42948055267334 + ], + [ + "▁Phuket", + -13.429499626159668 + ], + [ + "▁testosterone", + -13.429499626159668 + ], + [ + "▁scaffold", + -13.429500579833984 + ], + [ + "▁rasch", + -13.430022239685059 + ], + [ + "▁adânc", + -13.430076599121094 + ], + [ + "atteinte", + -13.430228233337402 + ], + [ + "▁educație", + -13.430320739746094 + ], + [ + "▁leopard", + -13.430893898010254 + ], + [ + "▁superioare", + -13.430893898010254 + ], + [ + "▁téléchargement", + -13.430893898010254 + ], + [ + "▁Weapon", + -13.431103706359863 + ], + [ + "favourable", + -13.431336402893066 + ], + [ + "nourishing", + -13.43143367767334 + ], + [ + "▁verfolgt", + -13.43160629272461 + ], + [ + "▁tablou", + -13.431633949279785 + ], + [ + "Algérie", + -13.431657791137695 + ], + [ + "Islam", + -13.431700706481934 + ], + [ + "faser", + -13.431825637817383 + ], + [ + "rhythm", + -13.432214736938477 + ], + [ + "▁Anthropolog", + -13.432291030883789 + ], + [ + "▁clôtur", + -13.432291030883789 + ], + [ + "spüren", + -13.432291984558105 + ], + [ + "▁Architectural", + -13.432294845581055 + ], + [ + "▁imaginary", + -13.432368278503418 + ], + [ + "cône", + -13.432456016540527 + ], + [ + "▁snuggl", + -13.432744026184082 + ], + [ + "disadvantaged", + -13.432745933532715 + ], + [ + "radically", + -13.4329195022583 + ], + [ + "Première", + -13.433011054992676 + ], + [ + "▁combinaison", + -13.433027267456055 + ], + [ + "▁Algeria", + -13.43303108215332 + ], + [ + "▁Wände", + -13.43317985534668 + ], + [ + "aesthetically", + -13.43336009979248 + ], + [ + "▁McKe", + -13.433368682861328 + ], + [ + "interroge", + -13.433473587036133 + ], + [ + "exclusive", + -13.433475494384766 + ], + [ + "▁Thomson", + -13.433688163757324 + ], + [ + "▁Gujarat", + -13.43368911743164 + ], + [ + "irgendwo", + -13.433690071105957 + ], + [ + "Severin", + -13.433767318725586 + ], + [ + "▁imitation", + -13.433926582336426 + ], + [ + "constructed", + -13.434194564819336 + ], + [ + "▁Montpellier", + -13.434388160705566 + ], + [ + "cedent", + -13.434539794921875 + ], + [ + "accelerating", + -13.434563636779785 + ], + [ + "dommages", + -13.4346284866333 + ], + [ + "lideri", + -13.434730529785156 + ], + [ + "▁Millennium", + -13.435089111328125 + ], + [ + "▁imprisonment", + -13.435089111328125 + ], + [ + "machining", + -13.435111999511719 + ], + [ + "▁anxiet", + -13.43521499633789 + ], + [ + "Contains", + -13.435298919677734 + ], + [ + "pleade", + -13.435563087463379 + ], + [ + "DOWN", + -13.43564510345459 + ], + [ + "geschehen", + -13.435797691345215 + ], + [ + "restaurant", + -13.435811996459961 + ], + [ + "Totusi", + -13.435839653015137 + ], + [ + "amintesc", + -13.436158180236816 + ], + [ + "▁Crisp", + -13.436233520507812 + ], + [ + "aduse", + -13.436278343200684 + ], + [ + "▁imposé", + -13.436351776123047 + ], + [ + "Jubiläum", + -13.436490058898926 + ], + [ + "▁Plaintiff", + -13.436491012573242 + ], + [ + "▁authoritative", + -13.436491966247559 + ], + [ + "▁rendition", + -13.436633110046387 + ], + [ + "Royce", + -13.436707496643066 + ], + [ + "1996)", + -13.436724662780762 + ], + [ + "Asociația", + -13.437192916870117 + ], + [ + "▁Gluten", + -13.437264442443848 + ], + [ + "feature", + -13.43741226196289 + ], + [ + "Behavioral", + -13.437454223632812 + ], + [ + "tearing", + -13.437763214111328 + ], + [ + "▁Entfernung", + -13.437894821166992 + ], + [ + "▁Responsibility", + -13.437894821166992 + ], + [ + "▁negligent", + -13.437894821166992 + ], + [ + "▁syllabus", + -13.437894821166992 + ], + [ + "▁Cycling", + -13.437895774841309 + ], + [ + "generell", + -13.438114166259766 + ], + [ + "customised", + -13.438392639160156 + ], + [ + "Management", + -13.43850326538086 + ], + [ + "▁timid", + -13.438518524169922 + ], + [ + "Tagged", + -13.438730239868164 + ], + [ + "▁susţinut", + -13.438809394836426 + ], + [ + "anchored", + -13.43892765045166 + ], + [ + "alternating", + -13.439055442810059 + ], + [ + "▁obligatoriu", + -13.439300537109375 + ], + [ + "▁reinstate", + -13.439456939697266 + ], + [ + "Können", + -13.43946361541748 + ], + [ + "▁Paol", + -13.439596176147461 + ], + [ + "öhr", + -13.439603805541992 + ], + [ + "▁Asociati", + -13.439876556396484 + ], + [ + "▁commenc", + -13.440285682678223 + ], + [ + "reinigt", + -13.440293312072754 + ], + [ + "commended", + -13.440350532531738 + ], + [ + "▁Proceed", + -13.440675735473633 + ], + [ + "beutel", + -13.440702438354492 + ], + [ + "▁Experimental", + -13.44070816040039 + ], + [ + "▁constellation", + -13.44070816040039 + ], + [ + "▁gepflegt", + -13.44070816040039 + ], + [ + "▁Ergänzung", + -13.440709114074707 + ], + [ + "Judith", + -13.440713882446289 + ], + [ + "▁Quartet", + -13.440720558166504 + ], + [ + "complemented", + -13.440742492675781 + ], + [ + "ausbildung", + -13.440750122070312 + ], + [ + "▁uncertainties", + -13.44077205657959 + ], + [ + "▁humiliat", + -13.440914154052734 + ], + [ + "luta", + -13.441121101379395 + ], + [ + "▁complexion", + -13.441482543945312 + ], + [ + "Serviciul", + -13.441612243652344 + ], + [ + "▁Toast", + -13.441722869873047 + ], + [ + "ummies", + -13.442425727844238 + ], + [ + "▁irit", + -13.442463874816895 + ], + [ + "producing", + -13.442585945129395 + ], + [ + "amenajare", + -13.442825317382812 + ], + [ + "▁béton", + -13.442828178405762 + ], + [ + "▁serpent", + -13.442851066589355 + ], + [ + "▁vizită", + -13.442996978759766 + ], + [ + "▁Beamte", + -13.443017959594727 + ], + [ + "▁Füße", + -13.443166732788086 + ], + [ + "▁Norwich", + -13.443531036376953 + ], + [ + "▁acronym", + -13.443531036376953 + ], + [ + "▁eradicate", + -13.443531036376953 + ], + [ + "▁solidarité", + -13.44353199005127 + ], + [ + "▁eggplant", + -13.443582534790039 + ], + [ + "▁sailors", + -13.443619728088379 + ], + [ + "waschen", + -13.444538116455078 + ], + [ + "Editura", + -13.444757461547852 + ], + [ + "▁erwerben", + -13.444944381713867 + ], + [ + "▁unconventional", + -13.444944381713867 + ], + [ + "▁boulder", + -13.444948196411133 + ], + [ + "Diplom", + -13.445013046264648 + ], + [ + "influx", + -13.446162223815918 + ], + [ + "▁Twelve", + -13.446361541748047 + ], + [ + "▁Sexual", + -13.44636344909668 + ], + [ + "numite", + -13.446369171142578 + ], + [ + "▁kontaktieren", + -13.446370124816895 + ], + [ + "▁strâns", + -13.44637680053711 + ], + [ + "▁précisément", + -13.446382522583008 + ], + [ + "empfindlich", + -13.446405410766602 + ], + [ + "▁divulg", + -13.446490287780762 + ], + [ + "▁delicat", + -13.446539878845215 + ], + [ + "compete", + -13.446542739868164 + ], + [ + "▁implique", + -13.446616172790527 + ], + [ + "implantation", + -13.44672966003418 + ], + [ + "frères", + -13.447328567504883 + ], + [ + "shedding", + -13.44758415222168 + ], + [ + "découvrez", + -13.447657585144043 + ], + [ + "rith", + -13.447735786437988 + ], + [ + "▁réglementation", + -13.447778701782227 + ], + [ + "▁transistor", + -13.447785377502441 + ], + [ + "inflated", + -13.447792053222656 + ], + [ + "▁Bluff", + -13.447887420654297 + ], + [ + "▁Aquarium", + -13.448526382446289 + ], + [ + "▁mananc", + -13.448638916015625 + ], + [ + "▁disinfect", + -13.448700904846191 + ], + [ + "tuft", + -13.448740005493164 + ], + [ + "Public", + -13.449081420898438 + ], + [ + "conceivabl", + -13.449197769165039 + ], + [ + "▁Cadillac", + -13.449197769165039 + ], + [ + "Assassin", + -13.449199676513672 + ], + [ + "issuance", + -13.449252128601074 + ], + [ + "▁Achtung", + -13.449287414550781 + ], + [ + "▁grundlegend", + -13.449909210205078 + ], + [ + "▁Băsescu", + -13.449910163879395 + ], + [ + "schaden", + -13.45014476776123 + ], + [ + "coached", + -13.450409889221191 + ], + [ + "▁betreffend", + -13.45046329498291 + ], + [ + "ergebnis", + -13.450541496276855 + ], + [ + "▁Lieutenant", + -13.4506196975708 + ], + [ + "WORLD", + -13.450620651245117 + ], + [ + "▁Moroccan", + -13.450620651245117 + ], + [ + "▁Butterfly", + -13.450621604919434 + ], + [ + "would", + -13.450737953186035 + ], + [ + "▁Metropol", + -13.451025009155273 + ], + [ + "lexic", + -13.451192855834961 + ], + [ + "comunitatea", + -13.45124340057373 + ], + [ + "vapeur", + -13.451456069946289 + ], + [ + "4.000", + -13.451559066772461 + ], + [ + "Pentru", + -13.451581954956055 + ], + [ + "üblichen", + -13.451613426208496 + ], + [ + "▁Général", + -13.451770782470703 + ], + [ + "▁Versailles", + -13.452046394348145 + ], + [ + "▁engraving", + -13.452046394348145 + ], + [ + "▁pédagogique", + -13.452192306518555 + ], + [ + "▁Policies", + -13.452759742736816 + ], + [ + "descending", + -13.453235626220703 + ], + [ + "stärkt", + -13.453349113464355 + ], + [ + "▁démocratie", + -13.453470230102539 + ], + [ + "▁granddaughter", + -13.453470230102539 + ], + [ + "▁buffalo", + -13.453474998474121 + ], + [ + "Datorita", + -13.45347785949707 + ], + [ + "hydroxy", + -13.453537940979004 + ], + [ + "▁ganduri", + -13.453566551208496 + ], + [ + "▁hijack", + -13.453624725341797 + ], + [ + "zahn", + -13.453699111938477 + ], + [ + "poziția", + -13.45406436920166 + ], + [ + "▁Zähne", + -13.454184532165527 + ], + [ + "▁grossesse", + -13.454296112060547 + ], + [ + "embassy", + -13.4548978805542 + ], + [ + "▁cérémonie", + -13.4548978805542 + ], + [ + "Rhône", + -13.454898834228516 + ], + [ + "▁Cabernet", + -13.454898834228516 + ], + [ + "▁Namibia", + -13.454902648925781 + ], + [ + "▁pedestal", + -13.454902648925781 + ], + [ + "▁Fighting", + -13.45490550994873 + ], + [ + "▁Threat", + -13.454962730407715 + ], + [ + "▁ideological", + -13.455047607421875 + ], + [ + "▁restitu", + -13.455183029174805 + ], + [ + "gelangt", + -13.455510139465332 + ], + [ + "Mitgliedern", + -13.455537796020508 + ], + [ + "acquérir", + -13.455613136291504 + ], + [ + "▁inferioar", + -13.45561695098877 + ], + [ + "Thierry", + -13.455619812011719 + ], + [ + "▁Entspannung", + -13.455638885498047 + ], + [ + "frequency", + -13.45566177368164 + ], + [ + "▁Fluid", + -13.455686569213867 + ], + [ + "▁betreut", + -13.455901145935059 + ], + [ + "Biological", + -13.455965995788574 + ], + [ + "▁Constanţa", + -13.456328392028809 + ], + [ + "▁beschäftigen", + -13.456328392028809 + ], + [ + "▁undesirable", + -13.456328392028809 + ], + [ + "▁protégé", + -13.456365585327148 + ], + [ + "▁nautical", + -13.456474304199219 + ], + [ + "▁sniff", + -13.456507682800293 + ], + [ + "Decizi", + -13.456510543823242 + ], + [ + "▁căldur", + -13.45706558227539 + ], + [ + "▁ideologi", + -13.457335472106934 + ], + [ + "Fraktion", + -13.457545280456543 + ], + [ + "collegiate", + -13.45776081085205 + ], + [ + "▁sănătos", + -13.45776081085205 + ], + [ + "▁Observatory", + -13.45776653289795 + ], + [ + "▁saturation", + -13.457769393920898 + ], + [ + "organizate", + -13.457771301269531 + ], + [ + "mergem", + -13.458321571350098 + ], + [ + "Publish", + -13.458451271057129 + ], + [ + "▁rattle", + -13.458460807800293 + ], + [ + "▁întâlniri", + -13.458663940429688 + ], + [ + "emporte", + -13.458741188049316 + ], + [ + "▁înscris", + -13.459046363830566 + ], + [ + "▁Patterson", + -13.459195137023926 + ], + [ + "▁ehrenamtlich", + -13.459195137023926 + ], + [ + "linux", + -13.459213256835938 + ], + [ + "conduire", + -13.45921802520752 + ], + [ + "▁absolven", + -13.459223747253418 + ], + [ + "▁einzigartig", + -13.459598541259766 + ], + [ + "▁_____", + -13.459803581237793 + ], + [ + "▁Beschäftigung", + -13.459912300109863 + ], + [ + "▁erfasst", + -13.459927558898926 + ], + [ + "▁Datum", + -13.459992408752441 + ], + [ + "raportul", + -13.460284233093262 + ], + [ + "ennemi", + -13.460460662841797 + ], + [ + "default", + -13.460643768310547 + ], + [ + "icillin", + -13.46066951751709 + ], + [ + "▁diamant", + -13.460671424865723 + ], + [ + "amerika", + -13.460684776306152 + ], + [ + "▁pescuit", + -13.46070384979248 + ], + [ + "▁grappl", + -13.460797309875488 + ], + [ + "▁Homeland", + -13.46082592010498 + ], + [ + "▁tromb", + -13.46112060546875 + ], + [ + "▁reduzieren", + -13.461349487304688 + ], + [ + "▁Statut", + -13.461593627929688 + ], + [ + "booming", + -13.461670875549316 + ], + [ + "fenced", + -13.461723327636719 + ], + [ + "measure", + -13.461888313293457 + ], + [ + "témoin", + -13.462069511413574 + ], + [ + "▁Inventory", + -13.462069511413574 + ], + [ + "▁circonstance", + -13.462069511413574 + ], + [ + "▁téléphonique", + -13.462069511413574 + ], + [ + "▁împiedic", + -13.46207046508789 + ], + [ + "▁Settlement", + -13.462072372436523 + ], + [ + "kannte", + -13.462076187133789 + ], + [ + "▁substantive", + -13.462385177612305 + ], + [ + "miterea", + -13.462642669677734 + ], + [ + "▁noştri", + -13.462790489196777 + ], + [ + "▁plăcere", + -13.462791442871094 + ], + [ + "▁eticheta", + -13.462823867797852 + ], + [ + "quickest", + -13.462993621826172 + ], + [ + "▁pasageri", + -13.463089942932129 + ], + [ + "▁Publi", + -13.463495254516602 + ], + [ + "▁Suzanne", + -13.463509559631348 + ], + [ + "▁bucătări", + -13.463509559631348 + ], + [ + "Regulatory", + -13.463510513305664 + ], + [ + "▁Mandarin", + -13.463647842407227 + ], + [ + "surgical", + -13.463947296142578 + ], + [ + "▁Smash", + -13.463950157165527 + ], + [ + "▁mândr", + -13.46403694152832 + ], + [ + "▁Unterkunft", + -13.464315414428711 + ], + [ + "moos", + -13.464374542236328 + ], + [ + "Camere", + -13.464510917663574 + ], + [ + "/03/", + -13.464651107788086 + ], + [ + "▁ethno", + -13.464677810668945 + ], + [ + "▁Eröffnung", + -13.46495246887207 + ], + [ + "▁Snyder", + -13.46495246887207 + ], + [ + "▁Wilmington", + -13.46495246887207 + ], + [ + "▁Canberra", + -13.464953422546387 + ], + [ + "▁Tahoe", + -13.464953422546387 + ], + [ + "▁slippery", + -13.464953422546387 + ], + [ + "▁Snake", + -13.464957237243652 + ], + [ + "▁turmeric", + -13.464963912963867 + ], + [ + "▁Cartoon", + -13.46499252319336 + ], + [ + "▁scrisoare", + -13.46500015258789 + ], + [ + "▁reprend", + -13.465425491333008 + ], + [ + "▁Konkurrenz", + -13.46567440032959 + ], + [ + "▁raisins", + -13.465693473815918 + ], + [ + "▁Werkstatt", + -13.465713500976562 + ], + [ + "▁agresiv", + -13.465795516967773 + ], + [ + "hugs", + -13.46615219116211 + ], + [ + "cazurile", + -13.46618938446045 + ], + [ + "spirited", + -13.466232299804688 + ], + [ + "▁britisch", + -13.466307640075684 + ], + [ + "spritz", + -13.466367721557617 + ], + [ + "auxiliary", + -13.46639633178711 + ], + [ + "interprétation", + -13.46639633178711 + ], + [ + "▁verbindet", + -13.46639633178711 + ], + [ + "▁fuzzy", + -13.466429710388184 + ], + [ + "▁turmoil", + -13.466432571411133 + ], + [ + "▁redefine", + -13.466819763183594 + ], + [ + "▁Kiwi", + -13.466890335083008 + ], + [ + "oiseaux", + -13.46712875366211 + ], + [ + "▁pamper", + -13.467146873474121 + ], + [ + "▁desfaso", + -13.46719741821289 + ], + [ + "▁pragu", + -13.467576026916504 + ], + [ + "prevenirea", + -13.467730522155762 + ], + [ + "▁convergence", + -13.467846870422363 + ], + [ + "tufted", + -13.467878341674805 + ], + [ + "brewed", + -13.467981338500977 + ], + [ + "villagers", + -13.468003273010254 + ], + [ + "▁Irving", + -13.468170166015625 + ], + [ + "nigsten", + -13.468660354614258 + ], + [ + "▁embod", + -13.468742370605469 + ], + [ + "Alicia", + -13.468938827514648 + ], + [ + "probably", + -13.469009399414062 + ], + [ + "divider", + -13.46904468536377 + ], + [ + "Attempt", + -13.469223022460938 + ], + [ + "▁Cognitive", + -13.469292640686035 + ], + [ + "▁Recognition", + -13.469292640686035 + ], + [ + "▁concierge", + -13.469292640686035 + ], + [ + "▁Semester", + -13.4692964553833 + ], + [ + "Economie", + -13.469417572021484 + ], + [ + "sortiment", + -13.469460487365723 + ], + [ + "shortest", + -13.46961498260498 + ], + [ + "üchtig", + -13.469650268554688 + ], + [ + "▁conveyanc", + -13.469978332519531 + ], + [ + "▁Ferdinand", + -13.470017433166504 + ], + [ + "▁permanence", + -13.470019340515137 + ], + [ + "▁incadr", + -13.470145225524902 + ], + [ + "▁estrogen", + -13.470290184020996 + ], + [ + "February", + -13.470661163330078 + ], + [ + "gedeckt", + -13.470704078674316 + ], + [ + "▁reagieren", + -13.470743179321289 + ], + [ + "▁meditate", + -13.470980644226074 + ], + [ + "simulated", + -13.471010208129883 + ], + [ + "▁supprimer", + -13.471468925476074 + ], + [ + "▁bumbac", + -13.47146987915039 + ], + [ + "▁vânzări", + -13.471477508544922 + ], + [ + "▁Kapitel", + -13.471478462219238 + ], + [ + "▁Weltkrieg", + -13.471513748168945 + ], + [ + "déposer", + -13.471674919128418 + ], + [ + "Asus", + -13.4718017578125 + ], + [ + "▁Communicat", + -13.471851348876953 + ], + [ + "Finished", + -13.47188949584961 + ], + [ + "▁Telegraph", + -13.472054481506348 + ], + [ + "▁Competitive", + -13.472196578979492 + ], + [ + "▁collectivités", + -13.472197532653809 + ], + [ + "▁protège", + -13.472199440002441 + ], + [ + "▁scallop", + -13.472219467163086 + ], + [ + "Happy", + -13.472335815429688 + ], + [ + "tehnică", + -13.472352981567383 + ], + [ + "▁Gestalt", + -13.47270393371582 + ], + [ + "▁benign", + -13.47295093536377 + ], + [ + "kraut", + -13.473149299621582 + ], + [ + "louer", + -13.473221778869629 + ], + [ + "▁Printr", + -13.47326946258545 + ], + [ + "mputation", + -13.473346710205078 + ], + [ + "▁dicke", + -13.473429679870605 + ], + [ + "▁Halifax", + -13.473650932312012 + ], + [ + "▁bounty", + -13.473650932312012 + ], + [ + "▁cauliflower", + -13.473650932312012 + ], + [ + "▁Survival", + -13.473654747009277 + ], + [ + "▁Chandler", + -13.473684310913086 + ], + [ + "▁bemüh", + -13.473760604858398 + ], + [ + "phro", + -13.473855972290039 + ], + [ + "Friday", + -13.474018096923828 + ], + [ + "particularly", + -13.474032402038574 + ], + [ + "arteries", + -13.474197387695312 + ], + [ + "Lösung", + -13.474771499633789 + ], + [ + "▁causal", + -13.474817276000977 + ], + [ + "▁recueilli", + -13.475075721740723 + ], + [ + "Stylish", + -13.47510814666748 + ], + [ + "schränke", + -13.47510814666748 + ], + [ + "▁francophone", + -13.47510814666748 + ], + [ + "▁limousine", + -13.47510814666748 + ], + [ + "▁statistiques", + -13.47510814666748 + ], + [ + "▁Kleider", + -13.475111961364746 + ], + [ + "▁dunkel", + -13.475127220153809 + ], + [ + "tätigkeit", + -13.475190162658691 + ], + [ + "▁punished", + -13.475257873535156 + ], + [ + "▁implică", + -13.475539207458496 + ], + [ + "▁inițial", + -13.475568771362305 + ], + [ + "▁Eminescu", + -13.475837707519531 + ], + [ + "▁expliqué", + -13.475837707519531 + ], + [ + "▁Eduard", + -13.475839614868164 + ], + [ + "▁psychologique", + -13.475870132446289 + ], + [ + "▁protejeaz", + -13.476580619812012 + ], + [ + "spül", + -13.476709365844727 + ], + [ + "▁Virtu", + -13.477021217346191 + ], + [ + "▁régulière", + -13.477044105529785 + ], + [ + "▁Outreach", + -13.477130889892578 + ], + [ + "▁Apprentice", + -13.47729778289795 + ], + [ + "▁compréhension", + -13.47729778289795 + ], + [ + "▁zwölf", + -13.47729778289795 + ], + [ + "Surgical", + -13.477315902709961 + ], + [ + "latéral", + -13.477417945861816 + ], + [ + "▁Ceremony", + -13.47803020477295 + ], + [ + "▁Shampoo", + -13.47803783416748 + ], + [ + "Global", + -13.478239059448242 + ], + [ + "▁paradis", + -13.478302955627441 + ], + [ + "Developed", + -13.478493690490723 + ], + [ + "▁figurine", + -13.478549003601074 + ], + [ + "sujets", + -13.478574752807617 + ], + [ + "▁Naomi", + -13.478772163391113 + ], + [ + "financed", + -13.478838920593262 + ], + [ + "forestry", + -13.478896141052246 + ], + [ + "▁Anregung", + -13.479494094848633 + ], + [ + "▁spectateur", + -13.479804039001465 + ], + [ + "▁exercitii", + -13.479815483093262 + ], + [ + "▁russisch", + -13.479888916015625 + ], + [ + "gefunden", + -13.479988098144531 + ], + [ + "schleunig", + -13.480225563049316 + ], + [ + "▁géographique", + -13.480225563049316 + ], + [ + "▁Delphi", + -13.480317115783691 + ], + [ + "Freddie", + -13.4806489944458 + ], + [ + "▁muzici", + -13.480958938598633 + ], + [ + "▁Edmund", + -13.48095989227295 + ], + [ + "finanzielle", + -13.481032371520996 + ], + [ + "(2003)", + -13.481319427490234 + ], + [ + "accentuate", + -13.481437683105469 + ], + [ + "overlapping", + -13.48151969909668 + ], + [ + "▁Pluto", + -13.481595993041992 + ], + [ + "românii", + -13.481683731079102 + ], + [ + "▁Timişoara", + -13.48169231414795 + ], + [ + "▁poivr", + -13.481754302978516 + ], + [ + "▁repris", + -13.481852531433105 + ], + [ + "▁Geschlecht", + -13.482426643371582 + ], + [ + "▁thieves", + -13.482426643371582 + ], + [ + "▁Transformer", + -13.482431411743164 + ], + [ + "▁shortcomings", + -13.482438087463379 + ], + [ + "▁aptitude", + -13.48244571685791 + ], + [ + "pitfalls", + -13.482468605041504 + ], + [ + "▁manicure", + -13.482577323913574 + ], + [ + "mystical", + -13.482723236083984 + ], + [ + "▁abolish", + -13.482833862304688 + ], + [ + "▁Zielgruppe", + -13.482873916625977 + ], + [ + "▁naţionale", + -13.483160972595215 + ], + [ + "▁trandafir", + -13.483160972595215 + ], + [ + "▁matematic", + -13.483193397521973 + ], + [ + "▁Hirsch", + -13.483257293701172 + ], + [ + "Fahr", + -13.483458518981934 + ], + [ + "connaissent", + -13.483476638793945 + ], + [ + "browned", + -13.483846664428711 + ], + [ + "▁bearbeitet", + -13.483881950378418 + ], + [ + "▁usturoi", + -13.483896255493164 + ], + [ + "▁Surprise", + -13.48389720916748 + ], + [ + "▁Tehran", + -13.483899116516113 + ], + [ + "▁BLACK", + -13.483901023864746 + ], + [ + "▁abonament", + -13.483904838562012 + ], + [ + "▁mêl", + -13.483972549438477 + ], + [ + "Angebot", + -13.484091758728027 + ], + [ + "ajungi", + -13.48410415649414 + ], + [ + "▁Woodland", + -13.48420524597168 + ], + [ + "▁gradini", + -13.484305381774902 + ], + [ + "▁Marilyn", + -13.48464584350586 + ], + [ + "kilometer", + -13.484880447387695 + ], + [ + "tempered", + -13.485230445861816 + ], + [ + "▁intimacy", + -13.485371589660645 + ], + [ + "▁thunderstorm", + -13.485373497009277 + ], + [ + "▁Uttar", + -13.485413551330566 + ], + [ + "▁varnish", + -13.485535621643066 + ], + [ + "opathie", + -13.485982894897461 + ], + [ + "▁școlar", + -13.48611068725586 + ], + [ + "▁raisonnable", + -13.486114501953125 + ], + [ + "proactively", + -13.486490249633789 + ], + [ + "▁gib", + -13.486536979675293 + ], + [ + "▁hospice", + -13.48684310913086 + ], + [ + "▁constă", + -13.486896514892578 + ], + [ + "▁Crescent", + -13.48690128326416 + ], + [ + "▁ambasad", + -13.486933708190918 + ], + [ + "hotărâre", + -13.486969947814941 + ], + [ + "▁fraîche", + -13.48709774017334 + ], + [ + "▁bundesweit", + -13.487581253051758 + ], + [ + "nsbesondere", + -13.487812042236328 + ], + [ + "▁intoarce", + -13.487863540649414 + ], + [ + "▁Schokolade", + -13.488319396972656 + ], + [ + "▁adjective", + -13.488319396972656 + ], + [ + "▁incalzire", + -13.488319396972656 + ], + [ + "▁Qualification", + -13.488320350646973 + ], + [ + "▁Bolivia", + -13.488324165344238 + ], + [ + "▁cruelty", + -13.488334655761719 + ], + [ + "pläne", + -13.48834228515625 + ], + [ + "▁solitude", + -13.488354682922363 + ], + [ + "▁Bosnia", + -13.488568305969238 + ], + [ + "rohr", + -13.488643646240234 + ], + [ + "▁regrette", + -13.48877239227295 + ], + [ + "zusammengestellt", + -13.48924732208252 + ], + [ + "▁Kardashian", + -13.489798545837402 + ], + [ + "▁Picasso", + -13.489798545837402 + ], + [ + "▁unverbindlich", + -13.489798545837402 + ], + [ + "▁Headquarters", + -13.489799499511719 + ], + [ + "métrage", + -13.4898099899292 + ], + [ + "▁Magento", + -13.489816665649414 + ], + [ + "▁exhibitors", + -13.489898681640625 + ], + [ + "utty", + -13.490381240844727 + ], + [ + "▁Fünf", + -13.490538597106934 + ], + [ + "▁Peugeot", + -13.490538597106934 + ], + [ + "▁verdienen", + -13.490538597106934 + ], + [ + "▁absolviert", + -13.49053955078125 + ], + [ + "schutzerklärung", + -13.490679740905762 + ], + [ + "sistemele", + -13.49089241027832 + ], + [ + "▁concrète", + -13.491279602050781 + ], + [ + "▁rhyme", + -13.491279602050781 + ], + [ + "▁Continuous", + -13.49128246307373 + ], + [ + "versprechen", + -13.491312026977539 + ], + [ + "▁Melanie", + -13.49202823638916 + ], + [ + "▁clienţi", + -13.492046356201172 + ], + [ + "luckily", + -13.492205619812012 + ], + [ + "▁counterfeit", + -13.492762565612793 + ], + [ + "▁locomotive", + -13.492889404296875 + ], + [ + "▁reacți", + -13.492908477783203 + ], + [ + "ampered", + -13.493005752563477 + ], + [ + "atenția", + -13.493011474609375 + ], + [ + "Suppose", + -13.493062973022461 + ], + [ + "hinweis", + -13.493464469909668 + ], + [ + "verletzung", + -13.493504524230957 + ], + [ + "▁mănânc", + -13.493504524230957 + ], + [ + "▁provoac", + -13.493507385253906 + ], + [ + "▁regizor", + -13.493511199951172 + ], + [ + "kundig", + -13.49352741241455 + ], + [ + "embarqu", + -13.493584632873535 + ], + [ + "Radio", + -13.493690490722656 + ], + [ + "Ministrul", + -13.493896484375 + ], + [ + "weakened", + -13.494214057922363 + ], + [ + "▁translucent", + -13.494247436523438 + ], + [ + "George", + -13.494380950927734 + ], + [ + "▁bacterii", + -13.494402885437012 + ], + [ + "intervalul", + -13.494803428649902 + ], + [ + "▁vizualiz", + -13.494832038879395 + ], + [ + "▁Feuchtigkeit", + -13.494991302490234 + ], + [ + "▁choisissez", + -13.494991302490234 + ], + [ + "▁plausible", + -13.494991302490234 + ], + [ + "▁perpetu", + -13.495122909545898 + ], + [ + "▁bucati", + -13.495194435119629 + ], + [ + "▁Giovanni", + -13.495735168457031 + ], + [ + "▁bluetooth", + -13.495736122131348 + ], + [ + "▁translating", + -13.49573802947998 + ], + [ + "▁Kyoto", + -13.495739936828613 + ], + [ + "▁homosexual", + -13.495745658874512 + ], + [ + "treabă", + -13.495820045471191 + ], + [ + "ntrepid", + -13.495983123779297 + ], + [ + "▁fachlich", + -13.496664047241211 + ], + [ + "Vaccin", + -13.496774673461914 + ], + [ + "▁Treib", + -13.497248649597168 + ], + [ + "varsity", + -13.497272491455078 + ], + [ + "▁Tavern", + -13.497278213500977 + ], + [ + "▁ensue", + -13.497330665588379 + ], + [ + "flexibel", + -13.497971534729004 + ], + [ + "retrieved", + -13.498102188110352 + ], + [ + "traditionellen", + -13.498230934143066 + ], + [ + "▁circulati", + -13.498546600341797 + ], + [ + "▁Diagnose", + -13.498717308044434 + ], + [ + "▁Strawberry", + -13.498717308044434 + ], + [ + "Societatea", + -13.49871826171875 + ], + [ + "expertise", + -13.498849868774414 + ], + [ + "▁naturii", + -13.499464988708496 + ], + [ + "▁4:1", + -13.499515533447266 + ], + [ + "Frequently", + -13.500210762023926 + ], + [ + "disproportionate", + -13.500210762023926 + ], + [ + "▁LIMITED", + -13.500210762023926 + ], + [ + "▁ancestral", + -13.500227928161621 + ], + [ + "▁Logistik", + -13.500237464904785 + ], + [ + "▁recolt", + -13.50042724609375 + ], + [ + "▁liebevoll", + -13.500436782836914 + ], + [ + "importing", + -13.500452041625977 + ], + [ + "aparatul", + -13.500458717346191 + ], + [ + "poziţia", + -13.500564575195312 + ], + [ + "facerilor", + -13.500658988952637 + ], + [ + "Submitted", + -13.50086784362793 + ], + [ + "ografia", + -13.501221656799316 + ], + [ + "onformément", + -13.50168228149414 + ], + [ + "▁dissemination", + -13.501708030700684 + ], + [ + "afli", + -13.501834869384766 + ], + [ + "luminous", + -13.502154350280762 + ], + [ + "▁draußen", + -13.502456665039062 + ], + [ + "▁Zauber", + -13.502535820007324 + ], + [ + "▁Ibrahim", + -13.503207206726074 + ], + [ + "▁eruption", + -13.503216743469238 + ], + [ + "écrite", + -13.50357723236084 + ], + [ + "avril", + -13.503898620605469 + ], + [ + "Increasing", + -13.504171371459961 + ], + [ + "hingeg", + -13.504411697387695 + ], + [ + "fidelity", + -13.504707336425781 + ], + [ + "étonnant", + -13.504707336425781 + ], + [ + "▁créativité", + -13.504707336425781 + ], + [ + "▁Required", + -13.504708290100098 + ], + [ + "▁Edison", + -13.504719734191895 + ], + [ + "▁Stuhl", + -13.504719734191895 + ], + [ + "outhwestern", + -13.506060600280762 + ], + [ + "▁Beschwerden", + -13.506210327148438 + ], + [ + "▁angajaţi", + -13.506210327148438 + ], + [ + "▁Currency", + -13.506211280822754 + ], + [ + "▁reagiert", + -13.506214141845703 + ], + [ + "Science", + -13.506229400634766 + ], + [ + "hospital", + -13.506253242492676 + ], + [ + "professionellen", + -13.50649356842041 + ], + [ + "▁Trouve", + -13.506768226623535 + ], + [ + "▁utopi", + -13.50683307647705 + ], + [ + "gypte", + -13.506928443908691 + ], + [ + "▁Konsequenz", + -13.506962776184082 + ], + [ + "▁pacienți", + -13.506962776184082 + ], + [ + "▁orizont", + -13.506988525390625 + ], + [ + "Corey", + -13.506999015808105 + ], + [ + "▁quartet", + -13.507009506225586 + ], + [ + "▁Sherlock", + -13.50710678100586 + ], + [ + "▁gagné", + -13.507237434387207 + ], + [ + "▁Jusqu", + -13.50732707977295 + ], + [ + "▁Clickfunnel", + -13.507465362548828 + ], + [ + "Survivor", + -13.507716178894043 + ], + [ + "▁Beethoven", + -13.507716178894043 + ], + [ + "▁Exemplar", + -13.507716178894043 + ], + [ + "▁Gonzalez", + -13.507716178894043 + ], + [ + "▁Illustrator", + -13.507716178894043 + ], + [ + "▁Verpflichtung", + -13.507718086242676 + ], + [ + "Possibly", + -13.507719993591309 + ], + [ + "Maintenant", + -13.507721900939941 + ], + [ + "▁incendiu", + -13.507721900939941 + ], + [ + "▁poêl", + -13.507747650146484 + ], + [ + "▁aşez", + -13.507757186889648 + ], + [ + "phenol", + -13.508248329162598 + ], + [ + "▁magician", + -13.508421897888184 + ], + [ + "éventuellement", + -13.508512496948242 + ], + [ + "▁amortiz", + -13.508736610412598 + ], + [ + "bouchage", + -13.50873851776123 + ], + [ + "▁Accommodation", + -13.509223937988281 + ], + [ + "▁Significant", + -13.509223937988281 + ], + [ + "▁rejoice", + -13.509223937988281 + ], + [ + "▁Lorraine", + -13.509224891662598 + ], + [ + "▁Necklace", + -13.509234428405762 + ], + [ + "▁hamburger", + -13.509273529052734 + ], + [ + "Enhanced", + -13.5095796585083 + ], + [ + "▁Audrey", + -13.509978294372559 + ], + [ + "▁considère", + -13.509986877441406 + ], + [ + "hafen", + -13.51050853729248 + ], + [ + "acordare", + -13.510509490966797 + ], + [ + "▁ediți", + -13.51075553894043 + ], + [ + "▁militia", + -13.510767936706543 + ], + [ + "captivate", + -13.510771751403809 + ], + [ + "▁rebellion", + -13.510777473449707 + ], + [ + "▁veranstalte", + -13.510844230651855 + ], + [ + "▁matelas", + -13.510859489440918 + ], + [ + "originating", + -13.510873794555664 + ], + [ + "Typical", + -13.51092529296875 + ], + [ + "▁législat", + -13.511360168457031 + ], + [ + "▁Kräfte", + -13.511488914489746 + ], + [ + "▁Eigentümer", + -13.511489868164062 + ], + [ + "▁gonfl", + -13.511608123779297 + ], + [ + "dispoziție", + -13.512028694152832 + ], + [ + "▁Fabulous", + -13.512246131896973 + ], + [ + "▁Guillaume", + -13.512246131896973 + ], + [ + "▁Genuine", + -13.512247085571289 + ], + [ + "selbe", + -13.512449264526367 + ], + [ + "(2002)", + -13.512616157531738 + ], + [ + "Einen", + -13.512908935546875 + ], + [ + "▁Snapdragon", + -13.513002395629883 + ], + [ + "▁plagiarism", + -13.513002395629883 + ], + [ + "▁Rendez", + -13.513019561767578 + ], + [ + "▁înregistrare", + -13.513033866882324 + ], + [ + "probiert", + -13.513081550598145 + ], + [ + "gestiegen", + -13.513153076171875 + ], + [ + "Teatrul", + -13.513370513916016 + ], + [ + "trove", + -13.513469696044922 + ], + [ + "ntsprechend", + -13.513566017150879 + ], + [ + "Städten", + -13.513691902160645 + ], + [ + "unforeseen", + -13.513760566711426 + ], + [ + "▁Meridian", + -13.513761520385742 + ], + [ + "▁Ministries", + -13.513763427734375 + ], + [ + "plaît", + -13.513769149780273 + ], + [ + "▁Telefonnummer", + -13.513772010803223 + ], + [ + "welded", + -13.513788223266602 + ], + [ + "pondere", + -13.513976097106934 + ], + [ + "▁funcţiona", + -13.514012336730957 + ], + [ + "▁politicieni", + -13.514187812805176 + ], + [ + "fleck", + -13.514240264892578 + ], + [ + "▁Nitro", + -13.514264106750488 + ], + [ + "wettbewerb", + -13.514518737792969 + ], + [ + "▁ingrijire", + -13.514518737792969 + ], + [ + "▁Gehirn", + -13.514521598815918 + ], + [ + "sigură", + -13.514904022216797 + ], + [ + "400,000", + -13.515237808227539 + ], + [ + "▁cataract", + -13.515277862548828 + ], + [ + "outskirt", + -13.515280723571777 + ], + [ + "▁Identification", + -13.515287399291992 + ], + [ + "▁imperfections", + -13.515317916870117 + ], + [ + "▁Dokumentation", + -13.515474319458008 + ], + [ + "Engine", + -13.515851974487305 + ], + [ + "extindere", + -13.516046524047852 + ], + [ + "bijoux", + -13.516797065734863 + ], + [ + "▁dărui", + -13.516802787780762 + ], + [ + "▁Moderator", + -13.516913414001465 + ], + [ + "biblio", + -13.517024040222168 + ], + [ + "енн", + -13.517024040222168 + ], + [ + "▁Relevan", + -13.51728630065918 + ], + [ + "ansprüche", + -13.517557144165039 + ], + [ + "épaisseur", + -13.517580032348633 + ], + [ + "▁emoţi", + -13.517677307128906 + ], + [ + "exacerbate", + -13.518318176269531 + ], + [ + "▁Wimbledon", + -13.518318176269531 + ], + [ + "▁Pandora", + -13.518319129943848 + ], + [ + "perhaps", + -13.518725395202637 + ], + [ + "certify", + -13.518762588500977 + ], + [ + "Strukturen", + -13.5189208984375 + ], + [ + "▁Kreativität", + -13.519079208374023 + ], + [ + "schlägt", + -13.51908016204834 + ], + [ + "▁certifié", + -13.51911735534668 + ], + [ + "/09/", + -13.519211769104004 + ], + [ + "▁suprafaţ", + -13.519493103027344 + ], + [ + "verständnis", + -13.519841194152832 + ], + [ + "presedintele", + -13.519842147827148 + ], + [ + "▁orthopedic", + -13.519842147827148 + ], + [ + "▁superioara", + -13.519843101501465 + ], + [ + "älteste", + -13.519903182983398 + ], + [ + "▁conducător", + -13.520153999328613 + ], + [ + "supplementary", + -13.520243644714355 + ], + [ + "wetlands", + -13.520438194274902 + ], + [ + "▁suprafete", + -13.520605087280273 + ], + [ + "▁aparțin", + -13.520951271057129 + ], + [ + "analiză", + -13.521014213562012 + ], + [ + "Uneori", + -13.52115535736084 + ], + [ + "Toujours", + -13.521368026733398 + ], + [ + "▁Nairobi", + -13.521368026733398 + ], + [ + "▁asparagus", + -13.521368026733398 + ], + [ + "▁crowdfunding", + -13.521368026733398 + ], + [ + "gutachten", + -13.521369934082031 + ], + [ + "smelling", + -13.521659851074219 + ], + [ + "▁elektrisch", + -13.521718978881836 + ], + [ + "begging", + -13.522055625915527 + ], + [ + "▁Renewable", + -13.522896766662598 + ], + [ + "▁Trouble", + -13.522896766662598 + ], + [ + "▁devastated", + -13.522896766662598 + ], + [ + "▁remplacé", + -13.522896766662598 + ], + [ + "▁schmeckt", + -13.522896766662598 + ], + [ + "▁exerciți", + -13.523005485534668 + ], + [ + "▁vermute", + -13.523650169372559 + ], + [ + "▁Constanța", + -13.523661613464355 + ], + [ + "expunere", + -13.523693084716797 + ], + [ + "▁Fitzgerald", + -13.52442741394043 + ], + [ + "▁Mechanism", + -13.524429321289062 + ], + [ + "▁underscore", + -13.524484634399414 + ], + [ + "poziţie", + -13.524901390075684 + ], + [ + "stöbern", + -13.525193214416504 + ], + [ + "▁littérature", + -13.525193214416504 + ], + [ + "▁împrumut", + -13.525193214416504 + ], + [ + "Vision", + -13.525771141052246 + ], + [ + "▁overwhelm", + -13.525773048400879 + ], + [ + "▁erweitern", + -13.525959968566895 + ], + [ + "skeletal", + -13.525960922241211 + ], + [ + "▁terrified", + -13.525960922241211 + ], + [ + "aggravate", + -13.525962829589844 + ], + [ + "▁Malawi", + -13.525969505310059 + ], + [ + "▁neuroscience", + -13.526009559631348 + ], + [ + "trecută", + -13.526097297668457 + ], + [ + "▁maestr", + -13.52634334564209 + ], + [ + "нов", + -13.526555061340332 + ], + [ + "▁Cobb", + -13.52667236328125 + ], + [ + "▁Schwangerschaft", + -13.526727676391602 + ], + [ + "▁internationaux", + -13.526727676391602 + ], + [ + "▁entspannen", + -13.526729583740234 + ], + [ + "▁Früchte", + -13.52676773071289 + ], + [ + "mâine", + -13.526805877685547 + ], + [ + "stützt", + -13.526938438415527 + ], + [ + "flipped", + -13.527076721191406 + ], + [ + "Palatul", + -13.527252197265625 + ], + [ + "▁Gérard", + -13.527496337890625 + ], + [ + "▁Kensington", + -13.527498245239258 + ], + [ + "chargée", + -13.52807331085205 + ], + [ + "iolo", + -13.528203964233398 + ], + [ + "▁excesiv", + -13.52904987335205 + ], + [ + "▁Gymnas", + -13.52962875366211 + ], + [ + "▁optimise", + -13.529678344726562 + ], + [ + "possibilités", + -13.529717445373535 + ], + [ + "▁periculoas", + -13.529810905456543 + ], + [ + "mechanical", + -13.529839515686035 + ], + [ + "▁confruntă", + -13.529868125915527 + ], + [ + "quatrième", + -13.530573844909668 + ], + [ + "▁Preservation", + -13.530573844909668 + ], + [ + "▁Juventus", + -13.530574798583984 + ], + [ + "vorsitzende", + -13.5305757522583 + ], + [ + "électora", + -13.530586242675781 + ], + [ + "▁fascinant", + -13.53061580657959 + ], + [ + "▁lagoon", + -13.530671119689941 + ], + [ + "referencing", + -13.53079605102539 + ], + [ + "appointed", + -13.530988693237305 + ], + [ + "Audible", + -13.531112670898438 + ], + [ + "sighted", + -13.531612396240234 + ], + [ + "▁gewünscht", + -13.532061576843262 + ], + [ + "▁Expedition", + -13.532115936279297 + ], + [ + "▁genunchi", + -13.532115936279297 + ], + [ + "▁PROVIDE", + -13.53211784362793 + ], + [ + "▁rosemary", + -13.532118797302246 + ], + [ + "▁cleanliness", + -13.532130241394043 + ], + [ + "commanded", + -13.53223991394043 + ], + [ + "ältere", + -13.532530784606934 + ], + [ + "ност", + -13.532547950744629 + ], + [ + "kühlen", + -13.532917976379395 + ], + [ + "mettez", + -13.533548355102539 + ], + [ + "connaitre", + -13.533661842346191 + ], + [ + "Qaeda", + -13.533662796020508 + ], + [ + "▁traumhaft", + -13.53366470336914 + ], + [ + "kommst", + -13.533666610717773 + ], + [ + "▁Abbott", + -13.533669471740723 + ], + [ + "▁Fool", + -13.533686637878418 + ], + [ + "▁médaill", + -13.533687591552734 + ], + [ + "▁genotyp", + -13.533693313598633 + ], + [ + "▁Fälle", + -13.53375244140625 + ], + [ + "▁actuator", + -13.533843994140625 + ], + [ + "CLASS", + -13.534042358398438 + ], + [ + "progressively", + -13.534421920776367 + ], + [ + "negative", + -13.53469467163086 + ], + [ + "bundled", + -13.535009384155273 + ], + [ + "▁dezbatere", + -13.535208702087402 + ], + [ + "kamagra", + -13.535237312316895 + ], + [ + "gardinen", + -13.535250663757324 + ], + [ + "unsecured", + -13.535271644592285 + ], + [ + "Assisted", + -13.535298347473145 + ], + [ + "Gymnasium", + -13.535386085510254 + ], + [ + "▁brusc", + -13.535591125488281 + ], + [ + "prinzip", + -13.535655975341797 + ], + [ + "Torrent", + -13.535964965820312 + ], + [ + "Presented", + -13.535967826843262 + ], + [ + "▁impressionnant", + -13.53628921508789 + ], + [ + "charakter", + -13.536758422851562 + ], + [ + "▁Acoustic", + -13.536762237548828 + ], + [ + "▁appartient", + -13.536763191223145 + ], + [ + "gesteuert", + -13.536879539489746 + ], + [ + "▁condiți", + -13.537089347839355 + ], + [ + "authentic", + -13.537313461303711 + ], + [ + "▁Erholung", + -13.537534713745117 + ], + [ + "▁Veranstalter", + -13.537534713745117 + ], + [ + "▁Filial", + -13.537665367126465 + ], + [ + "ruhigen", + -13.537714958190918 + ], + [ + "symptôme", + -13.538311004638672 + ], + [ + "▁Efficiency", + -13.538311004638672 + ], + [ + "▁stunned", + -13.538311004638672 + ], + [ + "▁sympathique", + -13.538311004638672 + ], + [ + "Uploaded", + -13.538352966308594 + ], + [ + "▁geistig", + -13.538453102111816 + ], + [ + "Pläne", + -13.538509368896484 + ], + [ + "▁Apartament", + -13.53855037689209 + ], + [ + "▁ușoar", + -13.539119720458984 + ], + [ + "▁locuinț", + -13.539122581481934 + ], + [ + "épouse", + -13.539166450500488 + ], + [ + "îngrijire", + -13.539215087890625 + ], + [ + "Obtain", + -13.539261817932129 + ], + [ + "Detect", + -13.539590835571289 + ], + [ + "▁Dumitru", + -13.539865493774414 + ], + [ + "▁refrigeration", + -13.539865493774414 + ], + [ + "ärztliche", + -13.539881706237793 + ], + [ + "efficiency", + -13.540032386779785 + ], + [ + "▁snail", + -13.540328979492188 + ], + [ + "gelände", + -13.540419578552246 + ], + [ + "expected", + -13.540620803833008 + ], + [ + "kompetenz", + -13.540643692016602 + ], + [ + "▁sfânt", + -13.540643692016602 + ], + [ + "océan", + -13.540685653686523 + ], + [ + "▁Plasma", + -13.540717124938965 + ], + [ + "▁vulgar", + -13.54075813293457 + ], + [ + "▁slump", + -13.541083335876465 + ], + [ + "autoimmune", + -13.541422843933105 + ], + [ + "▁Cynthia", + -13.541422843933105 + ], + [ + "▁dimineaţ", + -13.541422843933105 + ], + [ + "▁whimsical", + -13.541422843933105 + ], + [ + "▁evaporate", + -13.541488647460938 + ], + [ + "▁calorii", + -13.54186725616455 + ], + [ + "portion", + -13.54187297821045 + ], + [ + "crowned", + -13.5419282913208 + ], + [ + "▁întâmpin", + -13.54220199584961 + ], + [ + "▁Centenar", + -13.542620658874512 + ], + [ + "▁Genehmigung", + -13.54298210144043 + ], + [ + "▁Wahrscheinlich", + -13.54298210144043 + ], + [ + "▁accompaniment", + -13.54298210144043 + ], + [ + "▁Negoti", + -13.542984962463379 + ], + [ + "▁Vanilla", + -13.543000221252441 + ], + [ + "▁Receiv", + -13.543014526367188 + ], + [ + "▁bestseller", + -13.543052673339844 + ], + [ + "tendons", + -13.543069839477539 + ], + [ + "Reilly", + -13.543192863464355 + ], + [ + "▁refroidi", + -13.543731689453125 + ], + [ + "▁überrascht", + -13.543763160705566 + ], + [ + "Gitarre", + -13.543828964233398 + ], + [ + "wände", + -13.544173240661621 + ], + [ + "veniturile", + -13.544321060180664 + ], + [ + "▁portofoliu", + -13.54454517364502 + ], + [ + "▁temporaire", + -13.54454517364502 + ], + [ + "▁Dawson", + -13.544546127319336 + ], + [ + "foreseeable", + -13.544547080993652 + ], + [ + "▁Gastgeber", + -13.545344352722168 + ], + [ + "Access", + -13.545432090759277 + ], + [ + "▁Defender", + -13.545537948608398 + ], + [ + "▁Quarry", + -13.546109199523926 + ], + [ + "▁trolley", + -13.546110153198242 + ], + [ + "▁carburant", + -13.546111106872559 + ], + [ + "▁titluri", + -13.54631233215332 + ], + [ + "comparatively", + -13.546327590942383 + ], + [ + "nachfolgend", + -13.54659652709961 + ], + [ + "anfang", + -13.546740531921387 + ], + [ + "▁faszinieren", + -13.546891212463379 + ], + [ + "trăiesc", + -13.547082901000977 + ], + [ + "▁Travail", + -13.547159194946289 + ], + [ + "Contact", + -13.547235488891602 + ], + [ + "fashion", + -13.547245025634766 + ], + [ + "▁épais", + -13.547585487365723 + ], + [ + "plattform", + -13.547676086425781 + ], + [ + "ventricular", + -13.547677040100098 + ], + [ + "▁Portsmouth", + -13.547677993774414 + ], + [ + "▁împărat", + -13.54767894744873 + ], + [ + "▁vândut", + -13.547698020935059 + ], + [ + "▁evidenț", + -13.547708511352539 + ], + [ + "Purchasing", + -13.547877311706543 + ], + [ + "discerning", + -13.54804801940918 + ], + [ + "odonti", + -13.548080444335938 + ], + [ + "distilled", + -13.548316955566406 + ], + [ + "saveur", + -13.548447608947754 + ], + [ + "▁récompense", + -13.54845905303955 + ], + [ + "confortul", + -13.548552513122559 + ], + [ + "arbeitete", + -13.548787117004395 + ], + [ + "partenerii", + -13.549064636230469 + ], + [ + "mirrored", + -13.54908561706543 + ], + [ + "Dienstleister", + -13.549243927001953 + ], + [ + "▁Jakarta", + -13.549243927001953 + ], + [ + "▁WEBSITE", + -13.549243927001953 + ], + [ + "▁Acquisition", + -13.549262046813965 + ], + [ + "▁Miranda", + -13.549287796020508 + ], + [ + "Syndic", + -13.549356460571289 + ], + [ + "▁stadiu", + -13.549450874328613 + ], + [ + "▁Parchet", + -13.549498558044434 + ], + [ + "Générale", + -13.54954719543457 + ], + [ + "▁jpl", + -13.549579620361328 + ], + [ + "attainable", + -13.549949645996094 + ], + [ + "École", + -13.550041198730469 + ], + [ + "Sphere", + -13.550538063049316 + ], + [ + "obtainable", + -13.550592422485352 + ], + [ + "▁Sapphire", + -13.55081558227539 + ], + [ + "▁aérienne", + -13.55081558227539 + ], + [ + "▁bărbați", + -13.55081558227539 + ], + [ + "▁irritating", + -13.55081558227539 + ], + [ + "▁ultraviolet", + -13.550816535949707 + ], + [ + "untouched", + -13.550817489624023 + ], + [ + "▁Ramsey", + -13.550819396972656 + ], + [ + "titres", + -13.551087379455566 + ], + [ + "▁Coordinat", + -13.551218032836914 + ], + [ + "believable", + -13.551358222961426 + ], + [ + "▁Grundsätzlich", + -13.551602363586426 + ], + [ + "▁konsequent", + -13.551602363586426 + ], + [ + "▁Cerceta", + -13.551909446716309 + ], + [ + "dirigé", + -13.552116394042969 + ], + [ + "▁disturb", + -13.552151679992676 + ], + [ + "conciliation", + -13.552210807800293 + ], + [ + "▁gelöscht", + -13.552390098571777 + ], + [ + "▁sauvegarde", + -13.552391052246094 + ], + [ + "▁cavities", + -13.552393913269043 + ], + [ + "stunde", + -13.55241584777832 + ], + [ + "▁foloseasc", + -13.552430152893066 + ], + [ + "▁simpati", + -13.552873611450195 + ], + [ + "Chacun", + -13.553032875061035 + ], + [ + "adversaire", + -13.553178787231445 + ], + [ + "Eigentlich", + -13.55319881439209 + ], + [ + "defense", + -13.553593635559082 + ], + [ + "consider", + -13.553672790527344 + ], + [ + "▁Trinidad", + -13.553966522216797 + ], + [ + "▁strategist", + -13.553966522216797 + ], + [ + "distorted", + -13.553967475891113 + ], + [ + "▁hypothetical", + -13.553967475891113 + ], + [ + "▁ramburs", + -13.55396842956543 + ], + [ + "▁Mallorca", + -13.553970336914062 + ], + [ + "▁Domino", + -13.554018020629883 + ], + [ + "arrondissement", + -13.554756164550781 + ], + [ + "konferenz", + -13.554756164550781 + ], + [ + "▁Beleuchtung", + -13.554756164550781 + ], + [ + "aggregat", + -13.55484676361084 + ], + [ + "subsidize", + -13.554896354675293 + ], + [ + "shri", + -13.555503845214844 + ], + [ + "Kaufentscheidung", + -13.555545806884766 + ], + [ + "▁Hernandez", + -13.555545806884766 + ], + [ + "▁Upholster", + -13.555546760559082 + ], + [ + "atlantic", + -13.555614471435547 + ], + [ + "▁locuinte", + -13.555652618408203 + ], + [ + "integrates", + -13.55583381652832 + ], + [ + "ewusst", + -13.555878639221191 + ], + [ + "▁Avocado", + -13.556337356567383 + ], + [ + "Decorative", + -13.557014465332031 + ], + [ + "▁Corinthians", + -13.557127952575684 + ], + [ + "▁clădire", + -13.557127952575684 + ], + [ + "▁plomberie", + -13.557127952575684 + ], + [ + "vases", + -13.557143211364746 + ], + [ + "▁crippl", + -13.557247161865234 + ], + [ + "cluttered", + -13.557487487792969 + ], + [ + "departed", + -13.557807922363281 + ], + [ + "▁entscheidet", + -13.5579195022583 + ], + [ + "Certaine", + -13.558243751525879 + ], + [ + "honda", + -13.558294296264648 + ], + [ + "triggering", + -13.558527946472168 + ], + [ + "▁Erdogan", + -13.558712005615234 + ], + [ + "▁Widerstand", + -13.558712005615234 + ], + [ + "▁Bhutan", + -13.558713912963867 + ], + [ + "▁ascunde", + -13.558736801147461 + ], + [ + "▁shading", + -13.558748245239258 + ], + [ + "behavioural", + -13.559172630310059 + ], + [ + "▁transfér", + -13.55960750579834 + ], + [ + "versichert", + -13.559623718261719 + ], + [ + "▁vinovat", + -13.559646606445312 + ], + [ + "▁airfare", + -13.560142517089844 + ], + [ + "▁simplistic", + -13.56030559539795 + ], + [ + "▁Asigura", + -13.560320854187012 + ], + [ + "Chauffe", + -13.560480117797852 + ], + [ + "scrisă", + -13.560585975646973 + ], + [ + "trouvez", + -13.560702323913574 + ], + [ + "greasy", + -13.560709953308105 + ], + [ + "bottled", + -13.560809135437012 + ], + [ + "grouped", + -13.560934066772461 + ], + [ + "▁beeinflussen", + -13.561092376708984 + ], + [ + "▁chronological", + -13.561114311218262 + ], + [ + "(2000)", + -13.56127643585205 + ], + [ + "sheltered", + -13.561298370361328 + ], + [ + "Historically", + -13.561931610107422 + ], + [ + "piled", + -13.562012672424316 + ], + [ + "publicate", + -13.562378883361816 + ], + [ + "▁étudié", + -13.56268310546875 + ], + [ + "▁vertraut", + -13.562688827514648 + ], + [ + "▁Anpassung", + -13.562697410583496 + ], + [ + "cifra", + -13.562705993652344 + ], + [ + "▁recueil", + -13.562762260437012 + ], + [ + "enforceable", + -13.563183784484863 + ], + [ + "Distinguished", + -13.56347942352295 + ], + [ + "Empfänger", + -13.56347942352295 + ], + [ + "▁Acrylic", + -13.56347942352295 + ], + [ + "▁Encyclopedia", + -13.56347942352295 + ], + [ + "▁proaspete", + -13.56347942352295 + ], + [ + "▁unrealistic", + -13.56347942352295 + ], + [ + "▁Assignment", + -13.563481330871582 + ], + [ + "▁incubator", + -13.563491821289062 + ], + [ + "▁unilateral", + -13.563501358032227 + ], + [ + "elasticity", + -13.564398765563965 + ], + [ + "amintim", + -13.564475059509277 + ], + [ + "fournit", + -13.564553260803223 + ], + [ + "semblent", + -13.564763069152832 + ], + [ + "▁$69.", + -13.56496524810791 + ], + [ + "▁prominence", + -13.56507396697998 + ], + [ + "Übertragung", + -13.565075874328613 + ], + [ + "▁2014-11-", + -13.565075874328613 + ], + [ + "▁Giurgiu", + -13.565104484558105 + ], + [ + "étendue", + -13.565123558044434 + ], + [ + "ceputul", + -13.565187454223633 + ], + [ + "Schwierigkeiten", + -13.565872192382812 + ], + [ + "▁subtract", + -13.565881729125977 + ], + [ + "▁gesichert", + -13.56589126586914 + ], + [ + "▁uimit", + -13.565925598144531 + ], + [ + "▁mensuel", + -13.565967559814453 + ], + [ + "Vorgaben", + -13.566215515136719 + ], + [ + "▁legitimacy", + -13.566670417785645 + ], + [ + "▁Kendall", + -13.566673278808594 + ], + [ + "▁détach", + -13.566790580749512 + ], + [ + "▁kennenlernen", + -13.567469596862793 + ], + [ + "▁gewöhnlich", + -13.56747055053711 + ], + [ + "Octav", + -13.567917823791504 + ], + [ + "responsive", + -13.568169593811035 + ], + [ + "▁Mängel", + -13.568269729614258 + ], + [ + "▁mișcare", + -13.568269729614258 + ], + [ + "▁ludique", + -13.568270683288574 + ], + [ + "▁Exeter", + -13.568324089050293 + ], + [ + "▁respins", + -13.569114685058594 + ], + [ + "oraşului", + -13.569173812866211 + ], + [ + "▁sfârşit", + -13.56949520111084 + ], + [ + "BUSINESS", + -13.56987190246582 + ], + [ + "illustrating", + -13.56987190246582 + ], + [ + "▁Tottenham", + -13.56987190246582 + ], + [ + "▁pruning", + -13.569886207580566 + ], + [ + "▁Înainte", + -13.569904327392578 + ], + [ + "▁interesel", + -13.570096969604492 + ], + [ + "discovered", + -13.57031536102295 + ], + [ + "(0)", + -13.570572853088379 + ], + [ + "▁Bewerber", + -13.570673942565918 + ], + [ + "▁DESIGN", + -13.570673942565918 + ], + [ + "▁Orientierung", + -13.570686340332031 + ], + [ + "library", + -13.571041107177734 + ], + [ + "cheltuielile", + -13.571419715881348 + ], + [ + "▁Canterbury", + -13.571475982666016 + ], + [ + "▁intellectuelle", + -13.571477890014648 + ], + [ + "▁amalgam", + -13.571497917175293 + ], + [ + "▁Toledo", + -13.57150650024414 + ], + [ + "gezahlt", + -13.571531295776367 + ], + [ + "Veronica", + -13.571659088134766 + ], + [ + "deleting", + -13.571946144104004 + ], + [ + "▁Merlin", + -13.572442054748535 + ], + [ + "▁opérationnel", + -13.572554588317871 + ], + [ + "schmutz", + -13.572568893432617 + ], + [ + "hyroid", + -13.57279109954834 + ], + [ + "▁Compatible", + -13.57308292388916 + ], + [ + "▁Leopard", + -13.57308292388916 + ], + [ + "▁cylindrical", + -13.57308292388916 + ], + [ + "▁terrestrial", + -13.57308292388916 + ], + [ + "conferencing", + -13.573088645935059 + ], + [ + "▁Variety", + -13.573097229003906 + ], + [ + "▁Screw", + -13.573164939880371 + ], + [ + "character", + -13.573637962341309 + ], + [ + "shortened", + -13.573643684387207 + ], + [ + "▁întrerup", + -13.573736190795898 + ], + [ + "freude", + -13.573884010314941 + ], + [ + "▁dezbateri", + -13.573887825012207 + ], + [ + "viteză", + -13.574563026428223 + ], + [ + "formațiile", + -13.574600219726562 + ], + [ + "▁responsibly", + -13.574692726135254 + ], + [ + "Dimensiuni", + -13.574695587158203 + ], + [ + "Arrangement", + -13.57469654083252 + ], + [ + "▁Leisure", + -13.574712753295898 + ], + [ + "escaping", + -13.5750732421875 + ], + [ + "flexion", + -13.575104713439941 + ], + [ + "▁religieuse", + -13.575308799743652 + ], + [ + "crystalline", + -13.575457572937012 + ], + [ + "▁clasp", + -13.575520515441895 + ], + [ + "festigt", + -13.57554817199707 + ], + [ + "▁trouvai", + -13.57596206665039 + ], + [ + "cutaneous", + -13.576305389404297 + ], + [ + "▁carcinoma", + -13.576305389404297 + ], + [ + "▁juxtapos", + -13.576305389404297 + ], + [ + "assemblage", + -13.576306343078613 + ], + [ + "▁Messiah", + -13.576306343078613 + ], + [ + "▁Sleeve", + -13.576306343078613 + ], + [ + "▁șofer", + -13.576386451721191 + ], + [ + "/05/", + -13.57666301727295 + ], + [ + "▁expoziți", + -13.576703071594238 + ], + [ + "▁pătrun", + -13.577343940734863 + ], + [ + "▁Lydia", + -13.57739543914795 + ], + [ + "▁grădini", + -13.577919006347656 + ], + [ + "▁toothpaste", + -13.577919960021973 + ], + [ + "ordained", + -13.577921867370605 + ], + [ + "▁Renovation", + -13.577922821044922 + ], + [ + "voicing", + -13.578327178955078 + ], + [ + "président", + -13.578595161437988 + ], + [ + "▁gestartet", + -13.578728675842285 + ], + [ + "Multi", + -13.579121589660645 + ], + [ + "itinéraire", + -13.579537391662598 + ], + [ + "▁influenza", + -13.579537391662598 + ], + [ + "▁psychiatrist", + -13.579537391662598 + ], + [ + "▁schizophrenia", + -13.579537391662598 + ], + [ + "▁Magnolia", + -13.57953929901123 + ], + [ + "▁Scottsdale", + -13.579541206359863 + ], + [ + "▁interessieren", + -13.579548835754395 + ], + [ + "▁asfalt", + -13.579643249511719 + ], + [ + "▁Journalism", + -13.57977294921875 + ], + [ + "Multe", + -13.580089569091797 + ], + [ + "Westfalen", + -13.580347061157227 + ], + [ + "▁Vorschriften", + -13.580348014831543 + ], + [ + "Angleterre", + -13.58034896850586 + ], + [ + "sustainable", + -13.580354690551758 + ], + [ + "▁Retour", + -13.580589294433594 + ], + [ + "▁pâr", + -13.5809965133667 + ], + [ + "steigert", + -13.581120491027832 + ], + [ + "▁AMAZING", + -13.581157684326172 + ], + [ + "▁turbulent", + -13.581157684326172 + ], + [ + "costing", + -13.58155345916748 + ], + [ + "▁Carolyn", + -13.581634521484375 + ], + [ + "utti", + -13.581802368164062 + ], + [ + "dürftig", + -13.581968307495117 + ], + [ + "Keep", + -13.582038879394531 + ], + [ + "▁Théâtre", + -13.582780838012695 + ], + [ + "▁combustibil", + -13.582780838012695 + ], + [ + "▁halloween", + -13.582780838012695 + ], + [ + "▁emulator", + -13.582785606384277 + ], + [ + "▁povești", + -13.582785606384277 + ], + [ + "broyeur", + -13.582810401916504 + ], + [ + "▁émerg", + -13.582927703857422 + ], + [ + "overwhelmingly", + -13.583025932312012 + ], + [ + "regulă", + -13.583124160766602 + ], + [ + "goutte", + -13.583125114440918 + ], + [ + "▁Fertigung", + -13.583593368530273 + ], + [ + "constituted", + -13.584304809570312 + ], + [ + "▁QuickBooks", + -13.584406852722168 + ], + [ + "▁genealogy", + -13.584407806396484 + ], + [ + "▁laundering", + -13.584432601928711 + ], + [ + "▁échéan", + -13.584491729736328 + ], + [ + "Account", + -13.584601402282715 + ], + [ + "oyons", + -13.584792137145996 + ], + [ + "nitro", + -13.584905624389648 + ], + [ + "▁corespund", + -13.585219383239746 + ], + [ + "▁suggér", + -13.58527660369873 + ], + [ + "manipulated", + -13.585348129272461 + ], + [ + "deseori", + -13.585817337036133 + ], + [ + "permeabil", + -13.585912704467773 + ], + [ + "Australia", + -13.58594799041748 + ], + [ + "▁Erasmus", + -13.586034774780273 + ], + [ + "▁disrespect", + -13.586034774780273 + ], + [ + "▁trimestre", + -13.586038589477539 + ], + [ + "▁emanat", + -13.586103439331055 + ], + [ + "Schraub", + -13.58624267578125 + ], + [ + "distinctly", + -13.586319923400879 + ], + [ + "Germain", + -13.586637496948242 + ], + [ + "▁pedepse", + -13.5868501663208 + ], + [ + "réglage", + -13.5868558883667 + ], + [ + "făcute", + -13.587308883666992 + ], + [ + "▁garanteaz", + -13.587434768676758 + ], + [ + "▁unterlieg", + -13.587701797485352 + ], + [ + "▁cheddar", + -13.587712287902832 + ], + [ + "▁refugi", + -13.587756156921387 + ], + [ + "▁inférieur", + -13.587836265563965 + ], + [ + "dimension", + -13.588440895080566 + ], + [ + "▁erkennt", + -13.588570594787598 + ], + [ + "amitié", + -13.588632583618164 + ], + [ + "▁predominant", + -13.588680267333984 + ], + [ + "nourishe", + -13.588800430297852 + ], + [ + "exerce", + -13.588907241821289 + ], + [ + "▁disguise", + -13.589225769042969 + ], + [ + "▁traditi", + -13.589289665222168 + ], + [ + "▁Intellectual", + -13.5892972946167 + ], + [ + "▁imunitar", + -13.589299201965332 + ], + [ + "▁Cushion", + -13.589300155639648 + ], + [ + "▁erwachsene", + -13.589517593383789 + ], + [ + "▁Internațional", + -13.590115547180176 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ], + [ + "", + 0.0 + ] + ] + } +} \ No newline at end of file diff --git a/qa_mdt/checkpoints/flant5/tokenizer_config.json b/qa_mdt/checkpoints/flant5/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints.tar b/qa_mdt/checkpoints/hifi-gan/checkpoints.tar new file mode 100644 index 0000000000000000000000000000000000000000..7777069ac80c3e49ae706856e531795250c4ad9a --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints.tar @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:47ce7082877984370a40f9795ff02b26aeae48588c0c7586c8fc2d16b7f3ae63 +size 7815792640 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/.gitkeep b/qa_mdt/checkpoints/hifi-gan/checkpoints/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/audiomae_16k_128bins.ckpt b/qa_mdt/checkpoints/hifi-gan/checkpoints/audiomae_16k_128bins.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..f83e27b52be7e830d6df92970030d47d3b32b5bb --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/audiomae_16k_128bins.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a71507cf57513fd513c76c72c74b6396787c1e345bd93936ac154185a75dc29 +size 1639079507 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/clap_htsat_tiny.pt b/qa_mdt/checkpoints/hifi-gan/checkpoints/clap_htsat_tiny.pt new file mode 100644 index 0000000000000000000000000000000000000000..0c53b20c9543ecb8a59c284832dbf1e46f556637 --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/clap_htsat_tiny.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b945c9550254099cb6ba871aa28c74e146defdd5483c11d58299d6a2d5175f4 +size 1863587645 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/clap_music_speech_audioset_epoch_15_esc_89.98.pt b/qa_mdt/checkpoints/hifi-gan/checkpoints/clap_music_speech_audioset_epoch_15_esc_89.98.pt new file mode 100644 index 0000000000000000000000000000000000000000..026b327c66328dcdec4ff32c5d58fe26f8551e19 --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/clap_music_speech_audioset_epoch_15_esc_89.98.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:51c68f12f9d7ea25fdaaccf741ec7f81e93ee594455410f3bca4f47f88d8e006 +size 2352471003 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_16k_64bins.ckpt b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_16k_64bins.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..a908504c08184582c289529e0112d3f573a9bc80 --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_16k_64bins.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0938bbd2d0f3e610b05368afbb6b303bceb86bc340b8896a6d17bfa476695c2b +size 221242730 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_16k_64bins.json b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_16k_64bins.json new file mode 100644 index 0000000000000000000000000000000000000000..09fb7a56f705dc623537ced63c4a3352eb3b3c8e --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_16k_64bins.json @@ -0,0 +1,37 @@ +{ + "resblock": "1", + "num_gpus": 6, + "batch_size": 16, + "learning_rate": 0.0002, + "adam_b1": 0.8, + "adam_b2": 0.99, + "lr_decay": 0.999, + "seed": 1234, + + "upsample_rates": [5,4,2,2,2], + "upsample_kernel_sizes": [16,16,8,4,4], + "upsample_initial_channel": 1024, + "resblock_kernel_sizes": [3,7,11], + "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]], + + "segment_size": 8192, + "num_mels": 64, + "num_freq": 1025, + "n_fft": 1024, + "hop_size": 160, + "win_size": 1024, + + "sampling_rate": 16000, + + "fmin": 0, + "fmax": 8000, + "fmax_for_loss": null, + + "num_workers": 4, + + "dist_config": { + "dist_backend": "nccl", + "dist_url": "tcp://localhost:54321", + "world_size": 1 + } +} diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_48k_256bins.ckpt b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_48k_256bins.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..97a11d70a7ad749f8906ed247787f33cc8d64708 --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_48k_256bins.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56448c9f194fe73453e6839d1c3e045623682fb6e6d8bfeb4d795caab0acd0d8 +size 761419396 diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_48k_256bins.json b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_48k_256bins.json new file mode 100644 index 0000000000000000000000000000000000000000..06d618041aa2312a8d7e530f26895c69d4271b29 --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/hifigan_48k_256bins.json @@ -0,0 +1,36 @@ +{ + "resblock": "1", + "num_gpus": 8, + "batch_size": 128, + "learning_rate": 0.0001, + "adam_b1": 0.8, + "adam_b2": 0.99, + "lr_decay": 0.999, + "seed": 1234, + + "upsample_rates": [6,5,4,2,2], + "upsample_kernel_sizes": [12,10,8,4,4], + "upsample_initial_channel": 1536, + "resblock_kernel_sizes": [3,7,11,15], + "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5], [1,3,5]], + + "segment_size": 15360, + "num_mels": 256, + "n_fft": 2048, + "hop_size": 480, + "win_size": 2048, + + "sampling_rate": 48000, + + "fmin": 20, + "fmax": 24000, + "fmax_for_loss": null, + + "num_workers": 8, + + "dist_config": { + "dist_backend": "nccl", + "dist_url": "tcp://localhost:18273", + "world_size": 1 + } +} diff --git a/qa_mdt/checkpoints/hifi-gan/checkpoints/vae_mel_16k_64bins.ckpt b/qa_mdt/checkpoints/hifi-gan/checkpoints/vae_mel_16k_64bins.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..1242baa9f6ad358923c68c456ba79e591a5921dd --- /dev/null +++ b/qa_mdt/checkpoints/hifi-gan/checkpoints/vae_mel_16k_64bins.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1879044a35c3c38600c70f2e70bee8f28706982af51be6dd7e00ec2e6788807 +size 977975678 diff --git a/qa_mdt/checkpoints/robertabase/config.json b/qa_mdt/checkpoints/robertabase/config.json new file mode 100644 index 0000000000000000000000000000000000000000..bdc75c44b327ae0bff83fe15a0590c45f3eb03fe --- /dev/null +++ b/qa_mdt/checkpoints/robertabase/config.json @@ -0,0 +1,21 @@ +{ + "architectures": [ + "RobertaForMaskedLM" + ], + "attention_probs_dropout_prob": 0.1, + "bos_token_id": 0, + "eos_token_id": 2, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "layer_norm_eps": 1e-05, + "max_position_embeddings": 514, + "model_type": "roberta", + "num_attention_heads": 12, + "num_hidden_layers": 12, + "pad_token_id": 1, + "type_vocab_size": 1, + "vocab_size": 50265 +} \ No newline at end of file diff --git a/qa_mdt/checkpoints/robertabase/dict.txt b/qa_mdt/checkpoints/robertabase/dict.txt new file mode 100644 index 0000000000000000000000000000000000000000..69d79faee0095496d053b243cdca80e8a320e2c0 --- /dev/null +++ b/qa_mdt/checkpoints/robertabase/dict.txt @@ -0,0 +1,50260 @@ +13 850314647 +262 800385005 +11 800251374 +284 432911125 +290 394899794 +286 386139013 +257 357878752 +287 311196488 +12 215156821 +329 155236946 +326 154060431 +319 147178919 +318 142591644 +447 130810923 +338 116498242 +351 114784681 +383 108664122 +373 100357189 +366 93880741 +379 93284459 +340 88803471 +355 85749070 +531 85009762 +247 82642284 +307 77095226 +82 76381845 +416 73380803 +422 71911149 +389 68628918 +423 67243391 +468 64317701 +25 63508661 +357 63001640 +339 61994245 +314 60989470 +465 56381137 +481 55817121 +281 55370942 +428 52404829 +8 49955136 +564 49278190 +407 49022194 +251 48828693 +345 46413707 +250 46095324 +511 42623671 +393 41629710 +484 41252315 +356 40985272 +475 40041980 +508 39889004 +517 36480426 +550 35941594 +587 34803895 +547 34523820 +546 33398226 +553 33091056 +543 32654778 +510 32035371 +663 32028126 +460 31691389 +530 31181535 +503 30862486 +635 30813519 +720 30660454 +607 30374808 +477 29369504 +706 29183313 +526 29041171 +14 28893906 +561 27738361 +470 26738514 +614 25458253 +618 24232023 +717 23994060 +673 23817299 +734 23792701 +625 23376942 +661 23220442 +317 22862326 +674 22516011 +632 22500762 +640 22453472 +621 22170426 +656 21469936 +612 21420897 +83 21318775 +679 21314775 +649 21268970 +851 21092011 +938 20404401 +655 20375026 +554 20334200 +584 20320611 +523 20315428 +644 20012607 +40 19422652 +588 19096246 +64 18759122 +617 18693984 +50 18238046 +26689 18079440 +606 17992787 +812 17864313 +6 17843244 +466 17817361 +534 17796224 +532 17532111 +352 17384084 +1 17279082 +611 17091775 +714 17025679 +30 16939428 +645 16677856 +72 16553037 +76 16327061 +651 15971344 +471 15879338 +783 15823492 +683 15819244 +736 15197650 +887 15053172 +784 14786686 +616 14556795 +705 14539133 +691 14309272 +1115 14176045 +26 14145184 +362 14098304 +464 14083981 +16 14033199 +1411 13989417 +1028 13787695 +878 13752356 +1664 13470232 +78 13378307 +1301 13160863 +703 13034870 +780 12998354 +597 12928745 +749 12878804 +852 12866041 +787 12811365 +810 12810008 +1141 12785466 +832 12685151 +981 12676060 +830 12643489 +770 12491952 +1510 12485834 +278 12398432 +513 12345382 +925 12242439 +880 12187173 +838 12095675 +866 12088892 +572 12004582 +1139 11932599 +502 11810743 +347 11778080 +1016 11554835 +1074 11537640 +775 11416721 +883 11147387 +1230 11002697 +835 10997448 +1135 10975044 +867 10945123 +788 10941163 +670 10932097 +1297 10878979 +785 10826031 +17 10797378 +983 10787263 +843 10673666 +259 10657207 +1941 10592731 +279 10574822 +845 10526221 +1110 10523541 +1363 10476406 +1011 10465302 +1285 10446380 +1201 10423577 +968 10348378 +743 10336013 +772 10297739 +1622 10289758 +766 10280263 +2177 10243413 +1181 10234334 +642 10188179 +276 10110644 +815 10077564 +1088 10066642 +2864 10065012 +1218 10051426 +514 10027697 +991 9981059 +881 9920784 +604 9911573 +922 9903273 +892 9899715 +4 9886396 +311 9824882 +777 9788574 +1910 9785844 +360 9705921 +400 9632736 +467 9594120 +821 9549150 +884 9547397 +760 9515151 +1390 9514008 +836 9399858 +88 9399371 +1306 9350994 +350 9326094 +750 9317800 +739 9296956 +910 9295771 +268 9233925 +406 9191046 +1022 9185932 +583 9178621 +509 9120375 +327 9057596 +718 8963492 +995 8936939 +636 8882717 +399 8811370 +826 8792653 +765 8755010 +1440 8704406 +828 8681852 +1029 8668925 +761 8595175 +260 8550153 +68 8521820 +1026 8495454 +1037 8482028 +20 8478672 +18 8372937 +1499 8372574 +371 8359700 +1644 8353078 +32 8336893 +890 8325716 +1119 8309982 +886 8287585 +263 8275838 +309 8275603 +337 8268937 +84 8260458 +1111 8203580 +994 8202575 +272 8193169 +261 8189103 +767 8122128 +390 8069108 +1375 7988935 +1597 7984150 +989 7938818 +73 7922334 +364 7880395 +1107 7805773 +1992 7800278 +283 7777203 +402 7732480 +3217 7712997 +376 7593883 +1266 7589093 +976 7577523 +1194 7566250 +900 7556294 +727 7550625 +1320 7507098 +292 7495866 +77 7470439 +1282 7450955 +1641 7411391 +1171 7409099 +1114 7363929 +1081 7347040 +15 7312426 +367 7306406 +807 7279240 +1160 7272825 +1936 7262915 +274 7253218 +3431 7220578 +299 7218583 +3635 7152871 +3860 7135265 +71 7117156 +1353 7113713 +1392 7090351 +1204 7074121 +3321 7040732 +1043 7037776 +779 7012062 +370 6995545 +19 6983878 +3583 6974965 +898 6966637 +1864 6959506 +711 6952288 +905 6939061 +520 6938890 +582 6920647 +1364 6908579 +1578 6875577 +1105 6855797 +1295 6829761 +1002 6812464 +1256 6769157 +1966 6727321 +657 6641494 +737 6624794 +1104 6593949 +494 6588847 +2997 6571290 +256 6561739 +7303 6545519 +0 6525880 +89 6511169 +74 6481093 +1812 6451687 +2173 6415053 +1448 6412987 +1524 6397024 +1321 6394244 +1584 6392878 +282 6358319 +81 6351445 +1592 6336754 +1705 6331987 +973 6315804 +1234 6313784 +1748 6306370 +449 6287674 +1318 6264091 +1271 6240689 +34 6237906 +1053 6231567 +1123 6220759 +1165 6216478 +1839 6189790 +306 6174365 +1227 6170682 +271 6158328 +2087 6143528 +804 6141406 +1365 6119704 +790 6119691 +1222 6097096 +1528 6093784 +860 6086478 +1718 6080393 +1755 6062347 +304 6058377 +1367 6044404 +418 6021291 +1178 5961986 +273 5944078 +2258 5884266 +921 5875428 +2368 5843037 +1049 5834912 +1444 5825616 +1550 5810506 +1613 5807417 +1625 5806489 +1933 5804564 +3909 5804009 +1315 5793456 +1263 5781210 +412 5780871 +1294 5756002 +1243 5755617 +440 5703257 +288 5696335 +923 5686348 +33 5683530 +4283 5662615 +1542 5657499 +1466 5655969 +2520 5626252 +1737 5617548 +1239 5613802 +1893 5604800 +3502 5592880 +1231 5592559 +805 5586720 +23 5579916 +1422 5562000 +1957 5554673 +21 5545853 +1223 5537815 +1339 5525740 +1439 5518540 +270 5516122 +22 5511292 +1406 5508724 +1751 5500821 +1497 5473031 +1310 5460960 +2237 5456445 +2254 5424602 +3418 5378471 +1366 5362221 +265 5361225 +1541 5359232 +67 5328572 +1637 5322515 +1903 5319711 +1973 5285283 +2938 5283708 +1057 5281356 +1568 5274805 +321 5273108 +2756 5236169 +1830 5223345 +1770 5222102 +65 5208299 +1244 5204410 +1180 5203669 +2098 5169445 +1730 5168645 +2056 5168496 +3349 5156053 +2055 5138614 +2807 5130949 +1101 5123031 +66 5103752 +1816 5093006 +1400 5076411 +1498 5071579 +1642 5055917 +1989 5044992 +1290 5034040 +2643 5023350 +2097 5022728 +1762 5015331 +44 5015012 +479 5008250 +1775 5005597 +2706 5005225 +1909 4997835 +1866 4990678 +1566 4981122 +1336 4950527 +757 4941775 +2063 4937397 +2648 4929257 +293 4925771 +1464 4911409 +2184 4905422 +75 4894914 +392 4889056 +1487 4877439 +1064 4865397 +24 4854296 +1080 4852641 +569 4850805 +1971 4849650 +1605 4847284 +1182 4846260 +1938 4828745 +857 4805157 +1535 4772487 +285 4766512 +1176 4764682 +966 4760238 +2277 4743915 +764 4731348 +1377 4728044 +1479 4720640 +1539 4714734 +1085 4700915 +1811 4696785 +2274 4657189 +869 4652756 +45 4649060 +1099 4644445 +1394 4638480 +1280 4637662 +3000 4618471 +1577 4618338 +544 4614094 +2805 4608260 +35 4606393 +2351 4602990 +1629 4586377 +1661 4584310 +2003 4567755 +49 4546496 +1478 4546419 +2795 4542206 +2828 4536638 +1248 4526225 +1593 4511113 +69 4502347 +2457 4489997 +1511 4484539 +1881 4472447 +47 4460868 +1708 4455874 +1097 4450969 +1551 4445924 +1660 4433465 +1785 4424736 +1627 4412038 +1445 4401529 +2594 4393865 +1719 4389889 +1649 4380458 +2444 4375390 +4287 4371881 +417 4370421 +716 4365322 +1168 4364296 +1735 4360877 +1621 4331683 +2233 4330036 +3249 4325097 +42 4320291 +1276 4314178 +1829 4314165 +1884 4312560 +38 4306679 +2555 4304404 +2084 4296432 +2151 4287967 +1688 4285173 +2831 4280062 +1342 4276707 +1270 4239023 +555 4236623 +1327 4234882 +2139 4227635 +1467 4219535 +2045 4208129 +2714 4198810 +303 4195216 +1771 4185532 +2901 4181081 +2077 4179380 +1863 4178336 +1965 4175165 +2067 4175032 +1716 4169486 +2651 4155215 +2267 4147302 +2607 4145837 +1964 4133545 +1462 4126396 +1978 4126179 +1972 4121510 +1410 4119035 +1679 4106021 +51 4105433 +1871 4105196 +2406 4102924 +2551 4097217 +2008 4097102 +1853 4093083 +70 4092580 +2293 4087207 +2324 4083845 +43 4083418 +1337 4082770 +1813 4079395 +1695 4077362 +960 4077221 +264 4076617 +2688 4072140 +1183 4070745 +1414 4064159 +1474 4057078 +2282 4053024 +3414 4042308 +1430 4037596 +3035 4031658 +1103 4018655 +2059 4015508 +2080 4011218 +2969 4005397 +1919 4000144 +1969 3997080 +316 3993493 +1459 3984707 +1521 3978369 +37 3969953 +1675 3968119 +3009 3965772 +996 3965252 +1596 3943972 +2263 3941497 +3457 3938197 +1450 3937663 +86 3925608 +2058 3919358 +1636 3917053 +1804 3911665 +1429 3909332 +1757 3906464 +354 3891128 +405 3889614 +3176 3888837 +1877 3882885 +1576 3878715 +2893 3873729 +2252 3872753 +1281 3863057 +1254 3862011 +301 3858937 +1048 3852378 +3203 3851712 +2159 3847206 +1626 3842112 +324 3832573 +1760 3832298 +1169 3818301 +2739 3816048 +1687 3814765 +1595 3811813 +1517 3803324 +2260 3791037 +1693 3787455 +1262 3785788 +2102 3779927 +291 3777762 +1923 3777288 +1700 3776768 +2157 3771717 +1378 3757930 +2732 3755186 +79 3754633 +1854 3745778 +3269 3737875 +1502 3737616 +685 3723444 +4200 3719859 +1865 3719356 +128 3715003 +1402 3710786 +2168 3703789 +1986 3699485 +1867 3696280 +2026 3695382 +1683 3694355 +2961 3691575 +1842 3680139 +929 3678416 +2489 3665779 +1052 3661007 +396 3659796 +3329 3643884 +2669 3638778 +1862 3622381 +3452 3605249 +3794 3602291 +2111 3590156 +2046 3587528 +2957 3581398 +1913 3580361 +1441 3577048 +1241 3568130 +46 3565250 +2811 3562952 +2278 3561460 +1998 3550042 +461 3548528 +1744 3532659 +1975 3526648 +2291 3521569 +3056 3518738 +2904 3518633 +1752 3511388 +1900 3510560 +2626 3506312 +1654 3504513 +385 3502364 +2745 3487380 +2057 3487072 +3136 3485766 +7955 3485171 +4139 3481632 +2415 3480433 +2148 3480049 +1628 3467067 +2071 3466219 +2107 3463315 +940 3460357 +1598 3457691 +258 3455533 +1575 3454361 +2826 3452135 +2716 3449165 +3985 3445703 +85 3445461 +1987 3443747 +3598 3439871 +3352 3431656 +2478 3424520 +333 3421332 +246 3418219 +2620 3415717 +1212 3412196 +2450 3409727 +1247 3405540 +1912 3399689 +36 3395391 +346 3391001 +3426 3380470 +3298 3379545 +3292 3377200 +2250 3371380 +2440 3369691 +3061 3367419 +39 3363104 +978 3353736 +1802 3350527 +2431 3348906 +3071 3340128 +2253 3337972 +2494 3334848 +609 3333865 +2310 3329148 +986 3328812 +2635 3325356 +3437 3320853 +2292 3319741 +2823 3308131 +1588 3303360 +269 3302371 +275 3284415 +60 3282646 +2428 3276582 +1918 3276387 +2615 3273427 +2472 3272517 +1690 3267675 +410 3265844 +2678 3262749 +2106 3260749 +2354 3251238 +2717 3247356 +678 3244526 +1109 3242666 +3334 3241700 +3451 3238451 +320 3236458 +3230 3233294 +3389 3229315 +2166 3227294 +1611 3224985 +1994 3213613 +430 3209260 +2986 3199943 +1790 3194716 +1438 3193856 +4784 3192749 +1781 3170903 +302 3166428 +2227 3162561 +54 3145229 +2693 3138924 +1393 3138049 +2597 3137970 +2482 3137124 +3034 3122439 +1946 3121857 +2863 3119047 +3267 3115876 +2041 3113770 +1743 3107914 +2476 3105231 +388 3102434 +300 3100235 +3186 3098789 +1729 3098376 +2488 3094662 +5018 3092842 +4058 3079283 +2156 3078111 +52 3074167 +3096 3072323 +1468 3071877 +2497 3070835 +2793 3050336 +3427 3047066 +1630 3040837 +3284 3037800 +3624 3034708 +2650 3033943 +2785 3033180 +1807 3027961 +3645 3026379 +2691 3025436 +3106 3024747 +3037 3023165 +3759 3023164 +312 3020879 +1767 3018684 +2526 3018183 +666 3015679 +3139 3012306 +3085 3009667 +2223 3002610 +4041 3002353 +2712 3001744 +1838 2997522 +2048 2983869 +2854 2981556 +2534 2972131 +308 2969299 +2646 2967019 +3016 2965071 +3337 2960427 +3187 2957831 +4912 2956818 +3331 2956176 +1643 2956098 +2722 2953729 +2932 2951114 +2422 2950537 +2399 2948398 +500 2946582 +4039 2945677 +3961 2944538 +2222 2943764 +3078 2943739 +4275 2942029 +1724 2934719 +911 2931322 +3296 2930626 +384 2925764 +2319 2924706 +1238 2912540 +1911 2911206 +53 2910401 +2005 2910213 +2923 2909079 +1303 2908146 +4536 2904452 +2921 2898494 +3530 2896507 +343 2894182 +575 2892577 +3058 2891202 +277 2889780 +323 2886056 +710 2881312 +660 2874230 +1949 2873478 +3250 2868743 +225 2861798 +41 2858852 +1808 2848588 +1021 2846040 +3773 2842914 +7713 2841920 +540 2838877 +2137 2837279 +2750 2836122 +3271 2833311 +2994 2832696 +397 2832081 +2174 2831245 +2630 2825882 +1073 2823768 +378 2822150 +2491 2819311 +403 2817676 +2540 2811122 +2060 2808168 +2214 2807667 +2242 2804699 +3554 2801970 +266 2800975 +3442 2799863 +5544 2795504 +1682 2795443 +1351 2777650 +297 2776601 +3155 2770111 +2050 2768526 +3466 2759754 +1544 2759525 +993 2754965 +3340 2752396 +8591 2751808 +1255 2750444 +1895 2750214 +3015 2746600 +3125 2744902 +3945 2744846 +6426 2744124 +2897 2740354 +1309 2739832 +959 2737933 +2822 2737646 +1368 2733555 +2042 2730078 +374 2728295 +3006 2714274 +2245 2700521 +2928 2694744 +2872 2687504 +4896 2686827 +4297 2685685 +2766 2685288 +444 2682283 +2888 2681984 +1200 2679658 +2975 2678829 +377 2675721 +1988 2675064 +2523 2673705 +1583 2671163 +1024 2667070 +415 2666262 +3576 2658993 +2119 2657291 +2647 2648808 +3227 2648233 +1997 2646862 +4081 2645756 +4094 2645293 +1633 2637801 +1917 2637232 +2276 2635825 +2492 2634522 +1312 2634263 +2839 2633915 +2592 2632902 +3662 2624861 +3224 2624698 +1766 2624083 +3663 2624035 +1745 2621047 +5 2620736 +2300 2619855 +4664 2619338 +3430 2619137 +2130 2618208 +6184 2618030 +3687 2611608 +13130 2607739 +2637 2602497 +2622 2597101 +3700 2596588 +2435 2591941 +2158 2587673 +2279 2584888 +2506 2577787 +3724 2574566 +2950 2573209 +2460 2568568 +2125 2566267 +2861 2562749 +1134 2549917 +5454 2544616 +3751 2536696 +1858 2535706 +2579 2530192 +1826 2529534 +2608 2528860 +2681 2527523 +56 2526960 +3814 2525489 +4332 2524158 +2735 2523828 +3367 2523419 +2272 2516165 +3756 2511014 +2585 2509794 +5041 2503584 +4248 2503218 +2802 2502456 +2180 2500659 +3482 2499158 +3899 2496197 +2666 2495174 +395 2490074 +368 2486179 +1976 2484836 +2773 2481413 +669 2475721 +448 2470404 +1314 2468787 +1175 2466968 +3052 2465830 +3491 2465693 +55 2458697 +305 2457793 +2496 2455621 +2241 2454504 +1210 2453199 +2031 2450641 +3111 2447239 +2568 2446982 +7781 2446876 +1635 2445064 +2582 2444049 +2613 2443100 +3195 2441989 +5079 2432713 +2211 2428055 +3234 2426818 +4037 2426428 +1549 2425595 +5991 2421705 +4495 2417713 +952 2416570 +267 2415840 +2458 2414786 +328 2414598 +3790 2413453 +2641 2411736 +1296 2408790 +3199 2407660 +3072 2407258 +763 2402573 +2742 2402326 +4640 2400825 +1907 2399123 +654 2395466 +2911 2394784 +3931 2392276 +3818 2385503 +4346 2385039 +3119 2384203 +31 2383468 +1492 2381026 +3397 2380456 +3484 2379083 +330 2378895 +4706 2377588 +2251 2376885 +2479 2374155 +3053 2371784 +5939 2368766 +1388 2368606 +1692 2366880 +1908 2363611 +4542 2356967 +3596 2356312 +1122 2352389 +5003 2349430 +2962 2349359 +1607 2349127 +3047 2347321 +2627 2346853 +3025 2342305 +2995 2337450 +2835 2335936 +1004 2333657 +3214 2332768 +2029 2332229 +13440 2330317 +1561 2324920 +3074 2315814 +380 2312070 +515 2311421 +365 2310632 +3382 2306041 +363 2305310 +3160 2304973 +296 2303562 +435 2300111 +3512 2297054 +3747 2295650 +1334 2294588 +4281 2294310 +2614 2292185 +2524 2284943 +3394 2281882 +3095 2281714 +2147 2281124 +2187 2279131 +2855 2275319 +2702 2272741 +3517 2271280 +325 2268773 +3520 2268531 +1065 2264589 +3215 2261296 +4395 2260201 +2652 2259754 +1120 2259547 +648 2258982 +4436 2257188 +2089 2255914 +2209 2255859 +4969 2249342 +3022 2248989 +4530 2248944 +2330 2247322 +3261 2246707 +1532 2245244 +12042 2243318 +2270 2243237 +1657 2239438 +2846 2238923 +3999 2237519 +3845 2237334 +2271 2233955 +2126 2233086 +499 2231417 +1659 2229762 +5193 2228394 +3688 2226618 +1382 2226353 +446 2225805 +818 2224123 +2092 2222423 +3623 2220823 +5373 2220082 +2321 2219693 +5057 2219662 +1526 2219349 +5169 2218821 +2912 2217212 +3220 2216122 +3315 2215806 +2808 2214953 +2365 2214929 +2628 2212471 +3805 2211616 +2121 2211232 +1560 2204752 +3259 2204036 +3240 2203146 +2634 2202115 +3656 2200682 +2683 2199255 +3767 2197871 +2612 2193603 +1138 2190750 +3181 2189669 +4193 2189626 +3162 2186721 +2239 2185546 +2988 2185413 +2589 2185214 +1720 2185135 +2192 2184857 +4387 2184827 +4038 2180149 +4492 2178553 +1249 2178508 +3599 2177234 +3988 2176292 +4519 2175412 +2010 2173733 +3965 2173661 +4149 2170484 +3833 2170048 +3017 2169734 +1100 2168903 +4884 2168582 +349 2167597 +2035 2164690 +4040 2163330 +3407 2162686 +3415 2161099 +680 2159475 +1399 2158929 +4661 2157800 +1228 2155490 +551 2154941 +87 2154822 +4376 2154446 +559 2153394 +2392 2153055 +315 2150386 +2342 2150379 +3936 2150034 +1639 2148662 +2837 2143071 +358 2142250 +5153 2141503 +3793 2139387 +2940 2139279 +3126 2139153 +8358 2138790 +1477 2137623 +2720 2137372 +2138 2135247 +5085 2134103 +3957 2132520 +662 2131237 +3432 2125446 +2663 2125055 +8428 2121327 +2408 2121234 +1051 2121150 +3012 2120925 +3740 2120211 +3362 2118913 +2877 2111347 +820 2109304 +1195 2108223 +528 2107435 +2297 2104723 +1956 2104400 +2990 2101876 +5567 2098776 +62 2098771 +2312 2098237 +1570 2097507 +4086 2094834 +1738 2094385 +3142 2094215 +4505 2092752 +1031 2081290 +797 2079864 +2900 2079818 +1157 2079615 +3277 2079046 +3492 2078225 +1803 2078060 +4466 2077826 +4445 2076000 +5953 2074891 +4172 2072344 +2383 2070424 +3274 2062552 +2405 2061430 +474 2059389 +4539 2058329 +5371 2054600 +2753 2053126 +3377 2051234 +2884 2050465 +313 2048274 +1437 2048095 +1495 2047894 +1044 2047797 +1672 2046882 +4773 2046230 +3128 2045207 +4444 2043458 +439 2043235 +12637 2038256 +5692 2037358 +504 2035309 +3307 2032332 +2323 2031475 +2495 2028884 +4196 2027995 +341 2026813 +4176 2023899 +5834 2020359 +4744 2020302 +2563 2016917 +2882 2013222 +505 2010517 +3707 2009054 +1612 2007764 +4652 2006805 +2687 2006766 +5342 1994135 +3670 1992183 +4375 1990482 +2761 1988748 +4756 1987768 +3403 1986706 +2266 1985660 +3066 1985309 +129 1983048 +4481 1981282 +4354 1979172 +2033 1978125 +4576 1977385 +1943 1973375 +2370 1972674 +2486 1971043 +3090 1969680 +2810 1969527 +5401 1967900 +4381 1967895 +3800 1966998 +641 1966963 +2776 1966928 +3611 1965109 +6567 1965030 +3710 1963705 +803 1963552 +1332 1963452 +1600 1962893 +5442 1959629 +2936 1959617 +2723 1956891 +57 1956274 +2331 1955550 +2728 1955126 +4266 1954189 +3708 1952903 +4155 1951761 +3236 1951553 +2011 1949390 +3893 1944470 +3715 1943834 +3501 1942268 +3641 1942249 +3967 1941157 +5695 1939245 +3946 1939135 +1848 1938862 +2982 1935927 +3081 1935780 +2842 1935540 +3393 1929631 +4409 1927034 +533 1926581 +1208 1925558 +6123 1924601 +3328 1919903 +3073 1919157 +5478 1915260 +3690 1915130 +992 1914387 +3519 1914014 +3677 1913098 +4479 1909848 +5555 1908826 +353 1908475 +2952 1907503 +1374 1906721 +2972 1906704 +3151 1906344 +2298 1905945 +5047 1905773 +2407 1905690 +298 1904852 +80 1903294 +1040 1903278 +4590 1902254 +1833 1899229 +5595 1897899 +4251 1897347 +2610 1895735 +2724 1894339 +3626 1892750 +4244 1890732 +4318 1889199 +4957 1886758 +2775 1885004 +5095 1881967 +4721 1880372 +7198 1879205 +3892 1878736 +1558 1875025 +3884 1874312 +3942 1869131 +3206 1868585 +2499 1868244 +2562 1867987 +1507 1866658 +4838 1862252 +289 1862164 +1645 1859225 +2700 1855499 +1754 1855200 +3772 1851086 +4888 1850666 +4045 1848988 +3867 1847435 +157 1843867 +4493 1842927 +2619 1838354 +4786 1836011 +1398 1834121 +3088 1832988 +4120 1831679 +2695 1831610 +5665 1829988 +3381 1828595 +2427 1828369 +4452 1825877 +4477 1823113 +2974 1821399 +6672 1821145 +1814 1821079 +5466 1821016 +4639 1820572 +1265 1819995 +2314 1818041 +1008 1816897 +437 1815421 +3423 1814262 +3245 1812767 +3650 1811355 +2503 1810984 +1728 1810732 +2116 1810704 +8872 1806001 +2332 1805620 +2968 1804542 +1061 1803718 +4581 1802885 +1525 1802513 +3888 1799212 +2925 1794822 +2727 1794109 +1302 1794082 +4560 1793094 +3114 1791585 +3513 1791504 +709 1790178 +4783 1789889 +4488 1789224 +2832 1789080 +2746 1788885 +5652 1787699 +331 1785106 +4865 1784176 +3739 1782930 +4586 1782099 +3877 1780375 +5059 1780266 +2869 1779960 +4485 1778625 +6130 1774579 +3940 1772616 +4271 1768597 +2985 1768596 +4918 1768371 +5030 1768004 +220 1767607 +280 1766245 +1872 1766073 +3706 1765293 +1545 1759778 +5267 1758636 +3761 1757768 +5491 1757560 +2104 1756508 +7313 1756194 +4054 1755775 +2574 1753800 +3667 1753746 +3651 1753739 +4405 1752238 +571 1751391 +4308 1751021 +5437 1750489 +1944 1749114 +1355 1748662 +4274 1746718 +8278 1743712 +578 1742168 +2694 1741205 +2656 1738365 +2636 1738002 +4513 1737002 +2185 1736027 +5449 1728990 +2813 1728933 +2993 1728096 +2587 1728026 +615 1727522 +4881 1726170 +4776 1724711 +2565 1724643 +2726 1722884 +4999 1718878 +3098 1718690 +3685 1717832 +2987 1717445 +1559 1716265 +10205 1715772 +3486 1713237 +427 1712990 +2583 1712089 +4995 1711510 +3871 1710985 +5134 1710681 +4902 1710660 +4602 1710486 +620 1710125 +5537 1709921 +3170 1709734 +1892 1706520 +3173 1705945 +2073 1704583 +5011 1703887 +2346 1703108 +786 1702332 +4059 1699595 +1844 1697348 +295 1696098 +4473 1694758 +4696 1694438 +4280 1692697 +3241 1692185 +3067 1691819 +5694 1688571 +563 1687198 +4987 1687182 +2000 1686158 +1982 1680689 +4317 1679080 +2800 1678829 +2743 1676341 +3782 1676203 +4136 1675550 +4141 1675316 +3011 1674431 +404 1671943 +4508 1671300 +3848 1671224 +6341 1671141 +1486 1670636 +3338 1670469 +4394 1670223 +3357 1669888 +3226 1669751 +6557 1668766 +1359 1668112 +2081 1664853 +2364 1664159 +3146 1663984 +1257 1663233 +2193 1661698 +3294 1659449 +1148 1658760 +411 1658023 +874 1655097 +2219 1654452 +1143 1654129 +4427 1653889 +5583 1651423 +3954 1651313 +2779 1651054 +3216 1650840 +5396 1650681 +2150 1649743 +3127 1647955 +4373 1645141 +3164 1644786 +1433 1644622 +5201 1644557 +2423 1643559 +2672 1641242 +598 1640066 +1869 1639116 +3926 1637219 +6956 1635903 +4390 1635697 +1485 1635611 +6180 1633463 +4186 1632566 +11033 1632005 +10575 1631124 +5398 1630774 +4152 1630130 +6523 1629814 +2708 1628267 +3371 1627540 +4842 1624633 +3807 1624226 +1415 1621531 +414 1620770 +3386 1620487 +5180 1618946 +4956 1618827 +3033 1618817 +4854 1618654 +1395 1617375 +4418 1616971 +3275 1614077 +496 1613939 +4367 1612988 +3115 1611458 +4138 1611246 +707 1609966 +3941 1609655 +943 1608211 +4689 1605650 +541 1602489 +1220 1600656 +5856 1600558 +3465 1599819 +2014 1599485 +2642 1597905 +361 1597790 +3804 1596713 +4930 1595012 +4656 1594232 +4032 1591856 +2094 1591768 +4486 1590377 +3850 1589189 +3417 1587932 +4068 1587210 +6484 1587158 +3573 1586303 +751 1584930 +5273 1584119 +1001 1582810 +4511 1582438 +1350 1582111 +5682 1581323 +5701 1579275 +3750 1578369 +1215 1578106 +1288 1575560 +6443 1574356 +2456 1574032 +4305 1572178 +6786 1570930 +1722 1567616 +5070 1564834 +2391 1563465 +5229 1561859 +4992 1560941 +5828 1560570 +689 1560524 +1000 1560250 +3469 1559413 +3 1558890 +375 1558286 +4789 1557386 +3288 1557204 +5361 1556430 +829 1555089 +602 1553100 +6502 1552435 +3869 1550400 +482 1550363 +3769 1550026 +7 1548704 +695 1548541 +2221 1548436 +3615 1544415 +1891 1544125 +6186 1543528 +1129 1542997 +3741 1540855 +3412 1539970 +2198 1539205 +6193 1536595 +4009 1535736 +3923 1530812 +1879 1530386 +4137 1528607 +4502 1527678 +2513 1527637 +506 1527207 +5717 1527072 +4796 1526999 +5611 1525342 +4438 1524952 +7324 1524876 +5103 1524376 +3932 1523931 +4983 1523249 +2644 1522609 +3354 1522133 +4890 1521795 +3730 1521634 +42159 1521590 +1077 1521212 +1203 1518704 +5531 1515641 +4606 1515536 +3218 1515150 +6745 1513200 +5006 1513115 +5890 1512226 +5052 1511945 +4970 1510725 +6714 1508429 +4030 1506853 +1245 1505561 +3675 1504974 +4426 1504340 +3375 1503790 +3177 1503516 +382 1502511 +39711 1501599 +336 1500357 +1961 1499155 +3504 1498266 +5298 1496420 +732 1496386 +4752 1496009 +2879 1495099 +4260 1494843 +1041 1494107 +4746 1494014 +2556 1493772 +2074 1493385 +8734 1492850 +4979 1492643 +5510 1489374 +3521 1488249 +3812 1487987 +2989 1487864 +1537 1487614 +4219 1485195 +6686 1483477 +2504 1482854 +3584 1481993 +4568 1481365 +3421 1481356 +4237 1481211 +4814 1480648 +334 1479315 +3439 1479269 +3002 1477903 +1417 1477126 +930 1475368 +322 1474128 +1546 1473723 +4647 1473119 +488 1473092 +4073 1473078 +7024 1472792 +1531 1472346 +1797 1471634 +3625 1471454 +2409 1470321 +2196 1468500 +1663 1466759 +5672 1466160 +3689 1465868 +1981 1465495 +2858 1464549 +5199 1464208 +6916 1463903 +3574 1462903 +525 1461357 +3301 1459805 +3341 1459758 +1805 1458557 +2677 1458359 +6821 1458176 +3443 1458132 +4523 1457686 +1821 1456407 +8064 1455853 +2485 1453972 +7648 1453898 +8549 1453728 +3701 1452597 +344 1452384 +5471 1451483 +434 1451045 +624 1450197 +386 1449819 +719 1449119 +3859 1448585 +3092 1447520 +5166 1447492 +5323 1447286 +4585 1446385 +560 1444813 +4056 1444800 +7530 1442047 +3049 1440630 +5399 1440467 +1810 1439368 +39883 1438860 +4619 1438062 +6047 1437307 +4100 1435643 +4403 1435094 +6233 1434841 +4875 1431868 +2215 1430928 +518 1430530 +6798 1430070 +4379 1429963 +2191 1429101 +5212 1428466 +5926 1426982 +3726 1426439 +5127 1425739 +4608 1425400 +3770 1423320 +684 1423301 +3399 1423143 +549 1422979 +1471 1422620 +1326 1422545 +4034 1422191 +4414 1421600 +5260 1421418 +3285 1421044 +4671 1419983 +4388 1419806 +2329 1419508 +3117 1418665 +2512 1418078 +3614 1417918 +5296 1417278 +4380 1416914 +4691 1415979 +1242 1415603 +1042 1415013 +3094 1413700 +3059 1413465 +4019 1411855 +5658 1410605 +4101 1410072 +14420 1409152 +2576 1408914 +7229 1406591 +6095 1406131 +3729 1402106 +6168 1401550 +5535 1401292 +5348 1399444 +3131 1399224 +3050 1399104 +1108 1399004 +4769 1398370 +516 1395487 +5802 1392661 +671 1392187 +5175 1391611 +12216 1390973 +2141 1390969 +3764 1390483 +5158 1389181 +4504 1386980 +2546 1384925 +4301 1384339 +10767 1383934 +6011 1383767 +1793 1383582 +4809 1380537 +2443 1380319 +372 1379345 +2769 1378891 +3822 1378669 +4065 1378474 +7092 1378135 +576 1377793 +8060 1377625 +5150 1375001 +391 1374575 +6246 1374163 +5618 1372598 +4392 1372508 +442 1371294 +2372 1370549 +7636 1368619 +4734 1367530 +2679 1367016 +4811 1365243 +5068 1363461 +2751 1362395 +4986 1361444 +48 1361374 +7638 1360237 +3064 1359735 +9747 1358672 +5747 1358332 +2947 1357419 +2692 1356699 +3613 1356588 +912 1355680 +359 1353287 +4268 1353229 +10395 1351988 +4650 1350702 +3368 1350513 +939 1350079 +452 1349984 +2465 1349409 +4006 1347932 +5710 1347611 +3420 1346666 +5410 1346311 +557 1346053 +2657 1345538 +3562 1344045 +4646 1343369 +1565 1343288 +791 1342812 +3841 1342808 +3244 1342552 +8301 1340601 +15069 1340247 +6290 1339930 +5780 1339030 +5928 1338936 +2231 1338201 +469 1338045 +5123 1337766 +4046 1337492 +1370 1337438 +5742 1336548 +6027 1336530 +3895 1335611 +463 1334739 +2099 1333298 +2763 1332891 +5094 1332269 +6592 1330873 +5096 1330694 +3434 1330495 +11421 1330487 +11092 1330119 +4133 1329427 +2611 1328981 +1701 1328763 +1703 1326543 +2362 1326290 +4574 1325119 +6136 1323601 +1954 1320496 +6334 1318454 +8464 1316365 +2910 1316215 +5093 1315678 +3158 1315478 +5156 1314587 +4314 1314066 +4341 1314003 +409 1313875 +5054 1312722 +4326 1312569 +3327 1311762 +4587 1310546 +7994 1310530 +3436 1310399 +1634 1309798 +4787 1308208 +3842 1307129 +3336 1303877 +1453 1303867 +527 1303388 +3819 1302492 +4870 1302035 +1711 1301722 +4736 1301545 +3280 1301512 +485 1300709 +3938 1300533 +988 1299815 +1023 1299461 +5197 1298056 +3691 1298022 +3785 1297957 +3265 1296350 +2398 1296348 +6729 1296290 +5033 1295463 +3409 1294973 +5913 1291479 +4654 1290537 +7630 1288623 +3709 1286675 +6376 1286193 +3781 1285807 +7756 1285579 +10171 1285322 +2818 1284996 +1014 1282901 +4097 1282603 +4813 1282045 +3038 1281458 +5341 1281418 +420 1281046 +4955 1280803 +8836 1279703 +1731 1278908 +3636 1278857 +3392 1278431 +7392 1277991 +3194 1276594 +4446 1276547 +4540 1275393 +4099 1274105 +4788 1273253 +4167 1272413 +459 1272249 +1948 1271480 +1640 1270835 +7176 1270571 +4429 1269217 +4042 1269107 +2740 1268901 +5866 1268234 +4737 1267826 +6299 1267725 +8150 1266632 +3099 1265853 +19398 1265684 +1878 1265115 +4893 1265091 +2318 1263652 +5627 1263447 +8909 1263276 +5174 1263166 +2829 1261919 +4688 1261278 +6289 1261136 +4765 1258870 +4525 1258646 +4940 1258077 +433 1257886 +4675 1257299 +3950 1257106 +4334 1255854 +4371 1255469 +4478 1255023 +6459 1254251 +3958 1253900 +5364 1253811 +2327 1253249 +119 1252957 +3264 1252530 +3588 1252368 +1624 1252185 +4885 1251484 +521 1251476 +3607 1251163 +7420 1251114 +3572 1250281 +536 1250114 +6325 1249281 +5140 1249176 +5284 1248712 +1828 1247137 +4978 1245733 +4708 1244437 +6825 1243796 +3968 1241799 +6280 1240647 +6835 1240422 +12052 1240210 +6023 1236586 +1283 1236065 +5804 1235634 +4762 1234227 +2095 1233277 +2447 1232432 +445 1231550 +3734 1231293 +3161 1231023 +3612 1230155 +2638 1229977 +1343 1228431 +6516 1227971 +5137 1227542 +7014 1227326 +6638 1226525 +2275 1225586 +5670 1225366 +15862 1224702 +5426 1222773 +1338 1222222 +4569 1222157 +12499 1221455 +4043 1221352 +2043 1221192 +2581 1221146 +622 1220070 +1547 1219481 +1096 1219401 +5461 1219355 +3657 1219079 +4966 1218169 +1137 1218103 +5366 1217930 +2866 1216988 +3356 1216655 +4641 1215786 +5163 1214894 +1656 1214167 +3825 1213380 +443 1212646 +3863 1212582 +6078 1212500 +2426 1212420 +425 1212037 +647 1211602 +7320 1211287 +1086 1211104 +3568 1210892 +4621 1210058 +5741 1209984 +4763 1209467 +4698 1209089 +2948 1208943 +2461 1208205 +1714 1207871 +7372 1205825 +5689 1205245 +4533 1205189 +3084 1205002 +5112 1204702 +3148 1204416 +4205 1203481 +3933 1202908 +11302 1202825 +7968 1201690 +4410 1200581 +6491 1199580 +6884 1199477 +9027 1199088 +6476 1197258 +2402 1196830 +3272 1196577 +4946 1196366 +1847 1195644 +2068 1195640 +2230 1194751 +5668 1194054 +6841 1193691 +6628 1192538 +5617 1192353 +3704 1192118 +454 1190907 +4113 1190819 +3360 1188180 +3595 1188055 +6388 1186942 +3024 1186481 +5676 1186075 +2616 1185977 +5076 1185573 +6997 1185148 +1573 1184210 +2502 1184197 +3956 1183015 +3306 1182352 +4191 1182121 +2907 1181996 +3758 1181687 +2933 1181593 +12820 1181273 +3505 1181150 +2481 1181067 +4258 1180761 +5339 1179754 +4497 1178907 +4705 1178786 +8424 1178672 +6989 1178672 +7225 1178011 +519 1177463 +5613 1177032 +837 1176942 +4203 1176560 +2736 1176366 +7067 1176283 +726 1175669 +2926 1175583 +5839 1175102 +4028 1175019 +6898 1174845 +46444 1173449 +3518 1172575 +2176 1172215 +3344 1171044 +6682 1170042 +4286 1170039 +4075 1168934 +3925 1168605 +3516 1168434 +7915 1167772 +6314 1167088 +1666 1166890 +6586 1165644 +3896 1164453 +1205 1163886 +4315 1163570 +6717 1163432 +4048 1163167 +6301 1162084 +7281 1161824 +3589 1161761 +1045 1161449 +3335 1160825 +3840 1160664 +4406 1160456 +4330 1160159 +1340 1159444 +871 1158633 +5995 1158628 +3722 1158586 +4618 1158428 +5010 1157604 +4564 1157265 +3190 1156679 +4369 1156412 +4047 1154902 +6342 1154157 +5707 1154075 +4393 1153892 +3355 1153701 +1413 1152770 +6542 1152768 +6447 1152716 +7366 1152389 +5382 1152114 +4683 1151509 +2311 1150108 +2328 1149589 +5179 1149446 +3496 1149070 +4206 1148604 +3748 1146891 +4425 1146252 +5311 1145728 +6702 1145147 +4398 1144278 +3952 1144184 +4795 1143514 +4900 1141559 +6858 1140893 +3348 1140558 +4166 1140194 +7396 1138944 +1277 1138668 +5001 1137748 +3446 1137430 +10662 1136646 +4858 1136361 +3091 1136277 +8783 1135186 +2605 1135155 +3809 1134429 +3251 1134183 +6983 1133029 +4673 1132793 +6025 1131898 +473 1131860 +1582 1131777 +8900 1131533 +5942 1131397 +5448 1131115 +5845 1131004 +3665 1130444 +4153 1129425 +3580 1129156 +6612 1129095 +5394 1129005 +30494 1128381 +421 1128217 +2883 1128149 +6922 1128102 +7044 1128058 +3788 1126787 +591 1126267 +4067 1123860 +4632 1123334 +5871 1122772 +3653 1121714 +6265 1121635 +2061 1121351 +1709 1121162 +3648 1120782 +7172 1120292 +2112 1120153 +2566 1119761 +4165 1119543 +2039 1118103 +4077 1117991 +5213 1117193 +2939 1116899 +9952 1116227 +11214 1115457 +6294 1115417 +6182 1114988 +1236 1114768 +4676 1113847 +14018 1113731 +5818 1113054 +6568 1112793 +3649 1112551 +7945 1111652 +4290 1111506 +11063 1111148 +7478 1110529 +5664 1110127 +2561 1108755 +11419 1108318 +599 1108264 +7055 1107776 +5025 1107245 +4151 1106800 +6699 1106003 +3660 1105998 +5007 1105989 +4753 1105892 +4122 1103755 +3951 1103085 +4819 1103013 +7541 1102865 +6308 1101995 +5081 1101551 +3827 1101551 +6348 1101534 +7025 1101049 +5520 1100898 +4713 1100782 +8406 1099966 +5257 1099287 +3592 1099115 +9689 1098471 +3497 1097861 +4430 1097362 +9912 1097102 +8153 1097019 +6108 1096435 +6154 1096150 +5502 1094500 +7425 1094293 +7127 1093101 +3283 1093034 +4624 1092815 +7000 1092519 +6241 1091967 +17560 1091497 +512 1090181 +5403 1090022 +6081 1088193 +3359 1088146 +3221 1087980 +7082 1087741 +3290 1087438 +1921 1086770 +10169 1085547 +3717 1084848 +4963 1084155 +3487 1083889 +4512 1083842 +781 1083714 +3402 1082905 +1717 1082724 +3297 1082684 +7910 1082114 +6934 1081913 +4914 1080997 +5504 1080481 +7415 1079818 +4441 1079514 +4928 1079417 +1323 1078936 +5764 1078723 +13598 1078487 +5205 1078470 +4747 1078344 +9068 1077920 +18015 1077721 +7008 1077624 +9502 1077051 +3919 1076785 +7939 1076621 +4461 1076561 +6355 1076297 +3026 1074645 +6046 1074406 +453 1073993 +2128 1073157 +3594 1073046 +2149 1072456 +13520 1072257 +5290 1071604 +5675 1070845 +4050 1070287 +5451 1069447 +6643 1069445 +4336 1069141 +431 1068235 +5389 1067211 +1514 1066254 +3424 1065502 +1906 1065274 +5581 1064940 +7504 1064485 +3578 1064180 +3666 1063907 +3744 1063473 +590 1063051 +5370 1062933 +7072 1062318 +9256 1062317 +833 1062017 +5236 1061829 +16462 1060612 +5291 1059433 +4201 1059425 +3777 1059131 +861 1058768 +2356 1058460 +562 1058081 +4831 1056793 +1362 1056283 +11435 1056101 +5637 1055964 +6079 1055924 +495 1055621 +3463 1055372 +1870 1055004 +3597 1054912 +6191 1054492 +4302 1054350 +672 1054156 +6332 1054152 +5538 1054038 +3774 1053925 +4678 1053559 +5852 1052549 +6698 1052045 +694 1052010 +4642 1051966 +2208 1051338 +5252 1050757 +1619 1050376 +4001 1050365 +3970 1050229 +5316 1049898 +6076 1049745 +5170 1049627 +4633 1049406 +6219 1048935 +3318 1048824 +5087 1048697 +5895 1048672 +4499 1048514 +3776 1048447 +5386 1048337 +5203 1047480 +5293 1047160 +4171 1047152 +3159 1047020 +6150 1046461 +8009 1045545 +1268 1044664 +4953 1044482 +5176 1044260 +6272 1043933 +5349 1043837 +7728 1043680 +3953 1043554 +3197 1042977 +6116 1041224 +8886 1040488 +6343 1040295 +5086 1039785 +958 1039594 +831 1039207 +1317 1038938 +893 1037936 +5597 1037832 +9621 1037628 +538 1037073 +501 1036988 +7908 1036807 +6110 1036682 +1899 1036322 +8511 1035818 +5954 1035279 +96 1035250 +6853 1034370 +3350 1033703 +4610 1033561 +2951 1033423 +7124 1033278 +5693 1032803 +7476 1032681 +6288 1031061 +4496 1030906 +4538 1030856 +4771 1029333 +2075 1028886 +2113 1028695 +10390 1028255 +7779 1027567 +507 1027336 +3406 1027246 +7517 1026986 +5395 1026926 +387 1025854 +3252 1025192 +4570 1024225 +4261 1024017 +8092 1023604 +4964 1023023 +2013 1022989 +3918 1022342 +4599 1021950 +5762 1021750 +6995 1021687 +4291 1020561 +5750 1020427 +3586 1020053 +6035 1019853 +4104 1019777 +7802 1018633 +5587 1018578 +1424 1018565 +2344 1017560 +4609 1017207 +4860 1016525 +4365 1016357 +3511 1016197 +4905 1015998 +4025 1015981 +3347 1015669 +7028 1015628 +2078 1015524 +1983 1015352 +3031 1015337 +979 1015312 +9072 1014743 +5122 1014687 +3891 1014111 +5474 1014058 +7880 1013953 +3210 1013783 +568 1013436 +5220 1013406 +4434 1012403 +4697 1011550 +5362 1011498 +3737 1011380 +756 1011159 +5136 1010585 +5242 1010457 +5110 1010415 +5523 1010390 +4908 1010100 +7534 1010030 +3387 1008087 +8063 1007595 +3910 1007470 +701 1007115 +600 1005923 +2425 1005712 +2713 1005668 +4792 1005214 +3806 1004967 +4190 1004836 +798 1004024 +3947 1003824 +1734 1003335 +774 1002962 +49430 1002589 +5859 1001740 +5704 1001278 +4238 1001170 +918 1001111 +4553 1000558 +5031 1000426 +8437 999699 +2246 999417 +4084 998042 +4197 998026 +8545 997343 +9611 996559 +4887 996285 +7865 995676 +4692 995619 +6961 995585 +1404 995563 +4423 995201 +8108 995057 +3450 994841 +4849 994834 +4220 994662 +889 994403 +6914 993625 +3621 993134 +1039 992744 +5938 992199 +715 992013 +1689 991685 +5281 991483 +1482 990745 +4372 990263 +7964 989765 +10123 989614 +3854 989508 +782 988371 +4800 988139 +4439 988074 +7395 987559 +4325 987452 +2448 986794 +1681 986285 +5882 986221 +5891 984558 +6630 984477 +3555 984317 +5733 983370 +6613 982970 +455 982830 +4725 982772 +7943 981571 +2885 981392 +5419 980765 +7374 979270 +4917 978913 +3561 978800 +1078 978520 +4162 978296 +9669 978242 +2815 978236 +1680 978235 +8693 978157 +498 977781 +4422 977129 +2167 976132 +2390 975986 +4931 975803 +4635 975620 +5922 973722 +5115 973595 +4384 973528 +7927 973338 +8031 973073 +424 972909 +896 972209 +1352 972098 +4974 971531 +3716 971410 +5545 971211 +4837 970688 +3105 970602 +3972 969545 +9388 969002 +6151 968990 +5380 968941 +5798 968828 +6848 968442 +5062 968256 +8136 968147 +7291 967583 +3087 967418 +1962 966850 +3894 966452 +10191 966175 +2051 965356 +913 964915 +8047 964868 +4458 964367 +5300 964311 +6994 964187 +3725 962770 +7529 962681 +8121 962545 +5526 961651 +6033 961565 +6466 961557 +4150 961454 +1650 961250 +486 961032 +491 960800 +5745 960709 +8878 959982 +5334 959692 +5045 959526 +7903 959023 +6057 958754 +5749 958063 +8087 958003 +6264 957693 +4004 957171 +9413 956628 +8611 956473 +627 955912 +1219 955395 +3499 954203 +10610 954183 +776 954130 +3404 953636 +7593 952789 +3713 951708 +4694 951700 +5486 951539 +9089 950646 +2389 950463 +10499 950298 +5788 950061 +6926 948694 +4518 948669 +7864 948572 +748 948224 +7606 947933 +1010 947597 +8059 945880 +4571 944428 +16267 944143 +6705 944128 +8674 943230 +4923 942533 +2421 942380 +5556 941699 +4735 941386 +5270 940960 +4202 940146 +5335 937988 +1890 937973 +6088 937822 +4816 937750 +4588 937427 +7452 937286 +1658 937147 +5533 936928 +1616 936623 +7712 935685 +3108 935633 +6205 935469 +7901 935057 +4950 934554 +3619 934220 +419 933976 +3478 933536 +5262 932866 +945 932541 +18840 931723 +5009 930935 +21138 930513 +1772 929479 +4755 929010 +5941 928630 +7133 927505 +6977 927246 +4833 926967 +7018 926786 +6403 926666 +6497 926492 +10247 926122 +6149 925968 +1446 925939 +25370 925871 +5688 925496 +10 925332 +8123 923747 +5989 923482 +1503 922356 +4637 922330 +4634 921904 +12385 921506 +4631 921152 +7628 921120 +413 920921 +7351 919431 +5975 919370 +5091 919110 +7297 918894 +5149 918737 +10330 918368 +12131 918249 +6621 918125 +7403 918086 +7052 917508 +369 917495 +3886 916915 +5125 916460 +2366 916128 +9116 915995 +7137 915749 +4998 915052 +5929 914971 +5585 914915 +3538 914319 +26442 913487 +4710 913429 +7253 913428 +3303 913013 +4457 912882 +40026 912761 +3914 910594 +6793 910482 +6596 910007 +4947 909361 +3122 908576 +7746 908460 +6510 908043 +2890 908025 +2420 907987 +5699 907839 +5402 907697 +8473 907013 +6128 906605 +4451 906397 +2934 906049 +12874 905857 +1677 905839 +5228 904072 +6103 904029 +3835 903727 +4855 903439 +4750 902769 +332 902136 +9475 902096 +9005 901510 +4916 900786 +6464 899911 +3853 899655 +3268 899638 +8602 899334 +2665 899101 +6402 898959 +2744 898743 +4601 898430 +7062 898339 +8785 897821 +567 897616 +10391 897375 +7725 896852 +15320 896741 +5423 896175 +4490 895533 +6416 895290 +5713 895254 +10501 895030 +1432 894640 +9077 894615 +6483 894614 +5586 894579 +4622 894347 +20877 893427 +2079 893154 +623 893020 +4991 892646 +5690 892425 +10618 890849 +12184 890553 +8244 890099 +1780 889484 +1313 889246 +106 888842 +795 888161 +3638 887959 +4353 887887 +10542 887847 +5409 887321 +7546 887173 +2396 886955 +4347 886599 +850 886537 +629 886491 +1128 886222 +6119 886169 +6727 886128 +4236 886028 +3281 885374 +7466 885097 +873 884923 +10830 884441 +4894 884043 +3878 884013 +7389 883818 +2347 883556 +7848 883275 +5223 883182 +3900 883053 +3783 883041 +5982 882775 +5198 882436 +1736 881949 +2393 881816 +2395 881208 +6358 881038 +7683 880486 +8342 880373 +8078 879936 +10371 879935 +8761 879473 +7356 879394 +9604 879106 +6960 878827 +2624 878148 +4684 877919 +3189 877629 +2547 877507 +4920 876005 +5860 875894 +6133 875719 +5230 875515 +3539 875493 +1959 875161 +8292 875055 +6509 874958 +1012 874561 +7611 874327 +6943 874065 +7557 872959 +3977 872862 +4960 872487 +11529 870460 +3632 869034 +2474 868380 +3278 868202 +7261 868159 +5182 867690 +6196 867596 +5322 867298 +5438 867126 +5214 866142 +5836 865387 +7595 864999 +489 864993 +2199 864291 +5017 864182 +3802 864008 +6849 863857 +4929 863148 +5898 862499 +6441 862493 +7118 861461 +12551 861355 +8488 861121 +9141 861100 +1381 860833 +7533 860823 +7458 860382 +10827 860061 +1451 859861 +4615 859080 +4764 859011 +8111 858777 +4324 858591 +7647 858536 +3223 858325 +5055 857740 +4296 857441 +5797 857174 +4351 856996 +5407 856847 +3544 856775 +1795 856495 +1665 856106 +5495 855994 +4257 855834 +8050 855330 +10109 854644 +7022 854557 +7920 854365 +7799 854340 +5608 854336 +5385 854118 +3046 854067 +7194 852548 +16964 852383 +7651 852355 +12379 852095 +4213 851873 +7456 851550 +11695 851095 +7401 851086 +7412 850942 +1586 850652 +5143 850210 +902 849624 +5952 849584 +6200 849045 +11643 848152 +5071 847489 +458 847417 +4547 847319 +6907 847057 +7705 846873 +1341 846615 +10021 846038 +1902 845644 +5479 845479 +14897 844971 +2188 844313 +5455 844072 +4459 843875 +4719 843858 +2959 843654 +595 843545 +3132 843221 +5924 843160 +1232 842710 +10306 842643 +574 842221 +5287 841971 +10728 841811 +441 841635 +7794 841423 +5609 841175 +5867 841140 +9366 840353 +8403 840105 +7585 840023 +6405 840017 +6875 839897 +2834 839591 +4179 839528 +12011 839205 +7433 839201 +4292 839187 +7492 839060 +3851 838918 +747 838741 +4785 838458 +1620 837460 +1710 837330 +4343 836110 +4216 836033 +8185 835422 +4577 835155 +5529 834254 +1475 834211 +8165 834201 +7627 834128 +2836 834105 +5708 834100 +11383 834017 +9138 833943 +9008 832798 +2124 832447 +4168 832285 +6802 831767 +4952 831698 +6716 831649 +7806 831440 +4333 830985 +5295 830546 +4036 830203 +5716 830154 +5615 830060 +3644 829493 +3608 829215 +2238 829204 +1345 828544 +3048 828260 +6142 827480 +10805 827295 +5202 826828 +4620 826416 +7459 825942 +4856 825116 +5811 824590 +4232 824275 +4362 824215 +2937 823809 +8502 823728 +5358 823668 +6155 823643 +8974 823355 +9266 821977 +6215 821613 +573 821459 +4053 820973 +7017 820798 +5911 819328 +3917 819177 +4643 819129 +7868 819089 +9003 818965 +4146 818654 +7463 818425 +6175 818176 +6669 818066 +8366 817761 +6594 817656 +2853 817307 +118 817287 +5264 817284 +7421 817216 +8211 817116 +9692 817105 +6305 816562 +7973 816504 +8810 815781 +7269 815760 +2682 815662 +12516 815187 +7524 814872 +2873 814870 +2154 814832 +2949 814350 +6318 813767 +7244 813673 +497 813572 +10413 813503 +698 813483 +7859 813085 +5043 812590 +4981 812572 +4922 812555 +5004 812273 +8127 812230 +8414 811932 +6240 811897 +12147 811787 +3616 811551 +5776 811486 +9870 811062 +7740 810523 +2091 810014 +2792 809891 +3205 809207 +6622 808813 +5849 808800 +8200 808021 +585 807549 +9470 807304 +9818 807238 +7083 806891 +4082 806605 +4975 806503 +2316 806460 +3551 805935 +6493 805854 +2816 805436 +5350 804945 +4003 804496 +1706 804386 +4695 804360 +8221 804223 +3211 803968 +8708 803949 +5155 803913 +4988 803565 +4874 803237 +10106 802720 +3232 802701 +7732 802397 +5326 802066 +6427 801361 +5631 801279 +6740 800879 +6100 800742 +401 800726 +4803 800624 +6363 800619 +5727 800412 +1127 800086 +9298 800083 +4996 800014 +4106 799519 +487 799506 +4131 799341 +5610 799162 +7121 798834 +6115 798667 +6481 798460 +4829 798303 +3881 797726 +962 797583 +6642 796913 +7139 796371 +4263 795916 +3872 794949 +11957 794566 +4071 794495 +8533 794334 +8919 793766 +8639 793455 +7516 792793 +5873 792586 +6276 792041 +2034 791816 +8796 791720 +5353 791490 +7103 791434 +2548 791227 +8329 791148 +9605 791129 +9953 790868 +8372 790696 +589 790467 +8144 789428 +5221 789411 +7012 789054 +6520 788825 +7831 788498 +844 788331 +1623 788130 +5696 787166 +4645 787122 +7986 787107 +5827 787081 +5445 787003 +5243 786478 +5046 786472 +6330 786159 +5014 786066 +6379 786013 +10636 785808 +917 785162 +3124 784947 +2640 784189 +9725 783553 +9253 783425 +8049 783310 +6538 782938 +5638 782700 +8632 782584 +6807 782551 +721 782040 +16059 781941 +6333 781716 +6074 781638 +5565 781181 +1765 780911 +5810 780630 +3714 780526 +954 780449 +5778 780376 +5789 779996 +1068 779555 +3363 779048 +5593 778906 +3640 778657 +10240 778494 +4289 778292 +5114 778205 +987 778126 +46640 777912 +7413 777318 +7317 776881 +9 776842 +11115 776693 +5021 776583 +5381 776513 +6563 776178 +3753 776103 +2264 776075 +6905 775817 +6936 775626 +6050 775178 +5752 775047 +7064 774442 +4471 774370 +2007 774332 +7099 774232 +10876 773951 +7078 773426 +3963 773066 +6619 772959 +6888 772667 +8536 772525 +6670 772421 +3829 772119 +6198 771881 +14015 770622 +7002 770508 +1018 770301 +4174 770174 +4701 770100 +8395 769730 +2599 769416 +7040 769323 +5612 769037 +7970 769031 +2438 768787 +10749 768289 +8982 768073 +5161 768005 +2453 767894 +7171 767851 +3815 766903 +3996 766842 +5850 766046 +6228 765987 +4899 765434 +7271 765147 +2718 765137 +6235 765125 +8407 765069 +862 765005 +5906 764849 +1187 764634 +8636 764295 +5814 764221 +4096 764076 +1142 764058 +8620 764028 +19809 763246 +6165 763231 +7840 762640 +6614 762319 +11079 761718 +13308 761408 +10799 761174 +1192 761045 +6774 760963 +3683 760869 +2394 760818 +6482 760799 +7823 760780 +1778 760726 +4462 760542 +6131 760457 +6253 760190 +6070 760142 +9436 760093 +5885 759875 +2449 759865 +1556 759355 +2598 759301 +9648 759298 +6692 759148 +8587 759113 +9180 758708 +4889 758705 +6296 758275 +10614 758002 +6623 757773 +1516 757483 +5983 757289 +5963 757079 +9371 756527 +9839 756521 +6190 756459 +457 756319 +7423 756132 +20635 756084 +2451 756028 +8936 755576 +5517 755366 +3001 754873 +2689 754598 +7797 754588 +7411 754253 +5433 753725 +6260 753428 +11618 752903 +7519 752784 +6189 752766 +1931 752571 +11514 752163 +5441 751927 +11356 751848 +6401 751735 +1324 751409 +2339 751243 +5417 750911 +735 750739 +5022 750547 +3908 750126 +3765 750060 +9461 749906 +8282 749744 +778 748051 +3873 747968 +7212 747911 +13423 747877 +6712 746778 +5940 746619 +593 745833 +12787 745585 +8024 744764 +7874 744587 +6204 744070 +5761 743492 +408 743484 +6490 743166 +7800 742528 +5879 742339 +6266 742335 +4320 741518 +2623 741505 +5363 740788 +1017 740736 +7288 740643 +9019 740546 +6870 740253 +6693 740189 +4329 739395 +7404 739189 +4093 738959 +9825 738518 +8857 737741 +10648 737228 +5318 737201 +16175 736827 +6823 736756 +10807 735530 +3535 735395 +2892 735274 +12180 735097 +3023 734685 +5188 734061 +10006 733791 +14549 733714 +33721 733497 +5875 733377 +9087 733374 +5338 732693 +5863 732401 +12785 732355 +4845 732267 +5857 732242 +18293 731947 +4595 731814 +4222 731703 +9500 731638 +9283 731189 +8877 731155 +8470 731144 +6872 730530 +4188 730422 +6225 730269 +5108 730199 +7148 729814 +1581 729749 +10500 729418 +6832 729243 +2920 729192 +6656 728880 +2670 728538 +7312 728480 +4295 728371 +5462 728288 +6339 728140 +3862 727744 +9975 727410 +7188 727311 +5861 727283 +5387 726788 +11761 726787 +634 726665 +5118 726571 +8976 726201 +8838 725568 +744 725461 +342 725294 +7163 725203 +6709 725026 +1146 724457 +9920 724076 +5896 723335 +4781 722493 +4537 722266 +11171 722136 +4433 722080 +8415 722004 +1095 721984 +8036 721541 +8685 720983 +4934 720956 +8879 720855 +8998 719936 +10654 719841 +4760 719610 +6134 719284 +9918 719163 +7897 718765 +11754 718515 +7432 718161 +9951 718152 +5254 717916 +4158 717914 +5186 717844 +20832 717534 +10039 717274 +6626 717236 +1069 716980 +6593 716881 +6085 716690 +9671 716454 +9070 716415 +8663 716353 +10804 715896 +5706 715743 +5359 715340 +27868 715294 +6792 715292 +6741 715229 +9153 715036 +13129 715014 +5044 714833 +6647 714813 +6511 714437 +8100 714414 +13648 714321 +9735 714169 +10964 714150 +22971 713833 +6829 713647 +4249 713298 +5365 712933 +5732 712932 +4143 712823 +5996 712740 +8618 712707 +6073 712606 +5986 712473 +1030 712471 +1461 712265 +5992 712229 +2066 711631 +18841 711614 +9925 711387 +8259 711334 +7205 710271 +11117 710047 +4793 709820 +6478 709698 +3858 709005 +2996 708450 +6430 708208 +4007 707891 +768 707799 +6067 707724 +9320 707652 +1894 707449 +7431 707015 +1509 706866 +14207 706655 +5511 706560 +6553 706382 +4385 706268 +4925 706167 +8096 706036 +6143 705550 +6188 705375 +9764 705084 +13126 704724 +6554 704581 +7394 704389 +5288 704007 +7912 703955 +4483 703878 +8929 703551 +14466 702956 +9071 702931 +8518 702631 +12517 702625 +32290 702438 +8581 702251 +7962 702211 +5719 702082 +5916 701971 +2577 701821 +7328 701292 +3248 700210 +5743 699668 +5457 699547 +6126 699148 +2931 698934 +6323 698857 +7639 698407 +1494 698189 +5846 697622 +2685 697571 +7791 697392 +294 697012 +13100 696907 +5726 696441 +3200 696066 +5946 696036 +7464 695756 +6304 695647 +10504 695601 +7547 695511 +9880 695470 +4572 695347 +14237 695318 +7373 695309 +9477 695162 +6980 695128 +5806 694770 +605 694642 +2998 694435 +7895 694205 +4528 693789 +2749 693766 +5483 693676 +9566 693519 +2580 693316 +10174 693096 +8704 693080 +9909 692938 +21393 692900 +3993 692679 +12262 692396 +10464 692058 +8155 692022 +6817 691879 +2943 691761 +5645 691684 +6576 691662 +8540 691355 +8742 691341 +4116 691092 +5303 690849 +9570 690723 +2380 690285 +11394 690269 +14017 689530 +6809 689244 +6129 688667 +5369 688418 +4345 688195 +5274 688173 +239 687921 +4090 687761 +8072 687681 +5876 687636 +7195 687577 +5667 687373 +1950 686507 +2484 686452 +8179 686263 +6608 686261 +8688 686110 +6279 685683 +10729 685582 +7173 685513 +6687 685094 +6409 684986 +4469 684945 +10604 684560 +6135 684453 +8832 684446 +14708 684416 +1990 684197 +1082 684138 +5566 683609 +5559 683395 +4074 683378 +2256 683376 +2024 683346 +8073 683283 +897 683245 +6173 683136 +7734 682901 +8882 682762 +1855 682313 +5034 682278 +9852 681791 +7624 681768 +6673 681693 +6411 681081 +7364 681064 +4913 681060 +6031 680976 +7344 680890 +6827 680734 +12168 680648 +9591 680619 +7246 680607 +5644 679833 +4069 679756 +7387 679646 +7318 679542 +5456 679522 +1662 679367 +7886 679365 +7977 679346 +14403 679217 +7311 679157 +11947 679007 +7924 678757 +9755 678632 +2436 678453 +947 678119 +7884 678117 +5884 678031 +6662 677968 +7323 677870 +5543 677819 +2919 677816 +10405 677754 +9101 677581 +6093 677515 +7051 677389 +8224 677269 +7337 677160 +2295 677074 +1158 676751 +6844 676610 +5412 676453 +4306 676409 +7224 676369 +7846 676354 +11289 676149 +3237 676053 +4876 675868 +7259 675866 +9754 675784 +7256 675714 +7771 675679 +11534 675527 +8972 675521 +9761 675505 +876 675302 +4159 675287 +22940 674880 +10131 674860 +690 674447 +1904 674392 +1083 673897 +6032 673886 +5101 673784 +3738 672730 +3180 672440 +4391 672381 +10673 672248 +2857 672245 +2001 672175 +9651 671458 +5413 671453 +11133 671373 +8556 671203 +7522 671159 +4270 671110 +3682 670879 +21648 670847 +5933 670801 +11409 670739 +8362 670409 +11287 670223 +7719 670222 +3698 670219 +7016 670090 +613 669934 +5766 669916 +10093 669908 +249 669379 +5157 669251 +4935 668785 +18912 668708 +1589 668686 +6218 668678 +4221 668611 +1533 668559 +6461 668440 +6666 668008 +9679 667710 +5301 667687 +5920 667645 +6041 667255 +10026 667109 +537 666906 +4455 666658 +5787 666453 +6157 666445 +5207 666201 +17846 665807 +8305 665802 +3992 665716 +8592 665686 +2953 665632 +7482 664859 +7485 664609 +3904 664497 +1325 664191 +3955 663800 +3582 663747 +6232 663677 +5858 663380 +6675 663306 +603 663284 +3510 662971 +3198 662873 +6868 661915 +3398 661810 +2662 661713 +5548 661134 +3254 660739 +3435 660236 +3208 660141 +7602 660107 +2944 660028 +6140 660004 +9084 659896 +22346 659867 +8213 659526 +8359 659308 +8987 659019 +4252 658938 +7283 658854 +3188 658518 +11807 658016 +9906 657789 +4088 657734 +7796 657606 +1092 657505 +1431 657491 +9119 657490 +4751 657349 +5539 656930 +8672 656751 +7486 656405 +746 656119 +2705 656036 +6056 655720 +1995 655638 +8589 655599 +10397 655550 +6725 655530 +10242 655020 +7276 654911 +9074 654748 +38913 654696 +7898 654624 +1740 654207 +4327 654145 +7652 653660 +381 653525 +4061 653521 +1671 653489 +1530 653176 +7189 653134 +13430 652994 +4339 652906 +6049 652838 +37707 652638 +825 652479 +7346 652418 +2780 652110 +3113 651930 +5901 651795 +14024 651686 +6555 651683 +9659 651412 +11154 651044 +4836 650886 +10016 650673 +5887 650661 +9118 650642 +7832 650470 +5564 650455 +7334 650225 +10737 650100 +5292 649726 +2984 649605 +5428 649179 +9280 648905 +6952 648709 +920 648620 +19360 648453 +31215 648415 +5894 648264 +5390 648185 +8607 648182 +7030 647962 +5443 647898 +13785 647830 +5603 647672 +5278 647399 +127 647382 +2538 647133 +5330 646394 +7204 645847 +5488 645597 +6082 645495 +9025 645143 +13119 645117 +12673 645054 +5870 644976 +9439 644729 +6531 644334 +5524 643753 +8308 643717 +2049 643595 +3169 643406 +1102 643192 +1416 643014 +4556 642555 +6801 642064 +1452 641982 +4225 641671 +4961 641591 +664 641444 +9899 641281 +7765 641155 +3668 640797 +577 640764 +8345 640747 +5795 640537 +8411 640494 +5621 640471 +6370 640152 +9486 639933 +13089 639843 +5187 639596 +6665 639411 +8335 639410 +8475 639341 +3705 639284 +5302 638828 +9716 638798 +4361 638678 +2047 638657 +10219 638643 +310 638361 +6891 637744 +7011 637600 +3559 637533 +6559 637504 +687 637481 +10071 637167 +5035 637079 +3811 637040 +6292 636820 +7325 636136 +5679 635912 +9992 635639 +5509 635306 +3152 635231 +12074 635104 +9957 634959 +6655 634887 +9016 634863 +7410 634829 +696 634729 +7960 634441 +3876 634388 +2596 633768 +10013 633760 +6163 633090 +7849 632916 +5506 632595 +4727 632496 +2667 632345 +6768 632173 +6616 632141 +11257 632026 +10068 631604 +7123 631579 +7542 631579 +7023 631466 +7720 631250 +6796 631244 +3070 630513 +3256 630499 +2767 630481 +9974 630365 +692 630358 +5549 630267 +6896 630202 +9002 630065 +6923 629920 +15811 629850 +9738 629838 +2891 629691 +4231 629583 +8606 629251 +11823 629198 +9853 628987 +7027 628837 +3991 628805 +2190 628682 +6862 628532 +4939 627964 +14914 627859 +2902 627520 +11615 627385 +4904 627359 +10650 627124 +5279 626755 +7777 626600 +799 626519 +6183 626487 +15113 626346 +1229 626246 +12224 625949 +36826 625284 +2501 625099 +6000 624750 +9326 624573 +8978 624449 +10101 624337 +2514 624302 +11660 624213 +8318 624036 +4562 623978 +1939 623886 +1067 623705 +7981 623554 +8590 623553 +7150 623194 +7216 623063 +6640 623002 +2127 622758 +8237 622309 +4951 622270 +9885 621960 +7830 621930 +6877 621661 +5485 621547 +854 621467 +3342 621167 +5503 621125 +3745 620985 +668 620572 +1850 620506 +7342 620486 +8082 620245 +565 620179 +429 620017 +9739 619828 +4449 619512 +8737 619500 +3182 619289 +10714 618874 +3332 618818 +3901 618659 +3154 618496 +7272 618484 +7953 618134 +5734 617900 +6065 617790 +9847 617491 +5075 617361 +2257 617024 +3865 616847 +4321 616792 +7029 616539 +11897 616236 +7835 616232 +10433 615972 +9617 615869 +5490 615816 +848 615766 +10357 615359 +7810 614764 +8666 614423 +2414 614200 +5501 614089 +6838 613977 +3791 613640 +1505 613292 +11821 612953 +9024 612648 +6298 612576 +8037 612282 +9392 612232 +7444 612155 +11162 612106 +8667 611893 +859 611851 +5514 611662 +7184 611457 +6633 611425 +7625 611246 +6029 611220 +7703 611085 +1167 610976 +1112 610958 +5405 610796 +6097 610555 +5655 610530 +3834 610027 +7077 610008 +8141 609961 +11070 609752 +126 609368 +7697 608871 +3661 608867 +6337 608742 +1653 608691 +8270 608543 +3563 608510 +1173 608225 +8277 608152 +1602 608008 +5527 607857 +8148 607798 +9307 607780 +6087 607464 +3506 607304 +19195 606852 +4962 606712 +5160 606653 +6885 606329 +1522 606222 +4144 606105 +9375 605871 +23997 605838 +7215 605645 +7136 605240 +7696 605204 +5439 604950 +7867 604769 +4417 604529 +6979 604164 +594 603974 +9389 603879 +9730 603658 +11826 603599 +15007 603523 +450 603404 +7784 603393 +12858 603312 +9763 603136 +2560 602954 +8780 602780 +1015 602661 +7219 602594 +11344 602587 +5729 602572 +6550 602536 +5475 602409 +2493 602094 +2305 602075 +10481 601222 +6467 601095 +15855 601040 +11226 600883 +9406 600845 +12104 600838 +3134 600815 +11223 600279 +592 600247 +10273 600115 +10955 599932 +11523 599822 +10329 599458 +6591 599380 +2631 599358 +6536 598751 +9498 597864 +2584 597750 +942 597599 +3312 597138 +6932 596982 +10663 596904 +11582 596881 +1056 596523 +1380 596400 +7406 596161 +10856 595892 +4178 595769 +6918 595606 +7637 595338 +3262 595268 +7309 595160 +8378 595058 +5235 594694 +6373 594645 +7539 594423 +10318 594361 +8720 594360 +10490 594265 +6782 594145 +4627 594120 +4844 594063 +456 594019 +2202 593801 +10512 593750 +1610 593244 +9232 592972 +13837 592565 +6590 592522 +4944 592362 +7174 592237 +3270 591789 +5465 591294 +6958 591198 +7520 590915 +5966 590914 +6004 590867 +6230 590717 +7380 590682 +9273 590639 +7179 590581 +10252 590079 +11232 590075 +10018 589454 +4397 589171 +10553 588862 +11654 588414 +5265 588100 +10307 588072 +7049 587817 +7971 587507 +7370 587415 +2269 587239 +4827 587061 +2178 587040 +28154 586814 +7681 586670 +2777 586647 +704 586590 +2032 586160 +29240 586009 +4303 585944 +6833 585900 +13304 585788 +658 585509 +4259 584968 +11783 584697 +10010 584641 +436 584638 +4044 584624 +5072 584498 +11149 584320 +2999 583791 +6991 583506 +9359 583285 +10319 583073 +8218 583031 +745 582797 +13210 582777 +6147 582612 +8420 582572 +6275 582333 +9784 582289 +2946 582278 +6928 582062 +5016 581857 +5519 581711 +7525 581516 +4638 581509 +28749 581004 +5917 580810 +2367 580376 +9416 580198 +3110 579900 +3837 579873 +10731 579847 +9443 579795 +9279 579534 +12090 579424 +9533 579369 +9046 579350 +3101 579267 +4079 578683 +11763 578236 +15434 577988 +6356 577910 +3944 577882 +10711 577459 +8514 577435 +7441 577388 +524 577286 +34754 577196 +8694 577150 +6518 577063 +8248 577045 +4839 576905 +11343 576880 +8107 576869 +5584 576838 +1211 576832 +5116 576522 +9593 576137 +9512 575883 +5357 575872 +5981 575845 +4091 575497 +14591 575465 +9955 575415 +8090 575406 +6822 575321 +2306 575187 +6755 575130 +7691 575061 +8055 574976 +8215 574691 +7341 574574 +8083 574574 +11687 574439 +6650 574049 +9358 573897 +6834 573740 +3461 573724 +12508 573413 +7192 573384 +15110 573316 +1924 573275 +6496 573211 +794 573111 +5481 573030 +17674 572959 +8547 572682 +6632 572577 +882 572533 +8990 572530 +7043 571991 +2791 571520 +7668 571498 +8732 571494 +4051 571420 +7674 571339 +936 570961 +3721 570900 +3060 570854 +6731 570838 +6317 570838 +11852 570745 +676 570627 +6414 570552 +8848 570361 +3823 570118 +9264 569921 +6840 569894 +4360 569706 +6268 569701 +6776 569555 +7383 569475 +6551 569359 +15319 569196 +8076 569167 +7142 568628 +11416 568580 +5374 568324 +8740 568107 +1455 568100 +6044 567784 +7437 567697 +1443 567507 +6066 567414 +5865 567337 +13104 567326 +4506 567190 +8819 566704 +8528 566670 +6164 566437 +12988 566156 +7093 565967 +3549 565654 +4877 565544 +4312 565286 +6413 565089 +1140 565068 +8774 564935 +4554 564912 +2213 564896 +10808 564748 +5799 564629 +10556 564192 +2430 564111 +253 563468 +8465 563415 +17768 563337 +8962 563249 +3887 563038 +9640 562781 +907 562010 +6886 561513 +7502 561321 +3326 561303 +10607 561199 +480 561037 +1571 560925 +14987 560911 +3755 560808 +522 560722 +8326 560191 +8261 560155 +10043 560098 +451 559924 +8361 559912 +8028 559794 +9594 559778 +545 559407 +31681 559385 +6946 559306 +7108 559275 +2645 559165 +5635 558799 +8302 558731 +6588 558119 +7263 558055 +14935 557913 +9272 557817 +9589 557707 +11745 557593 +3395 557506 +16004 557248 +6861 557017 +8062 557002 +9349 556662 +4031 556646 +8245 556554 +1460 556326 +4579 556319 +5000 556167 +10105 556070 +5215 556063 +10061 556034 +8744 555748 +1435 555400 +8272 555325 +5964 555146 +12767 555106 +9073 554642 +10530 554555 +11663 554385 +3553 554378 +8198 554300 +7034 554151 +2978 553814 +120 553526 +6584 553151 +3861 553102 +1776 553022 +7748 552759 +462 552707 +2123 552637 +1667 552623 +2661 552550 +16009 552434 +9984 552378 +652 552358 +7786 552200 +16407 551865 +11849 551642 +24385 551605 +1357 551592 +9051 551540 +8384 551212 +16974 551193 +12690 551066 +6372 551013 +14100 550991 +22767 550854 +9181 550845 +3980 550594 +3763 550539 +10383 550504 +1501 550432 +9663 550249 +3454 550052 +10764 549946 +11408 549714 +13417 549671 +5238 549218 +4309 549159 +5598 549109 +7175 548618 +3365 548511 +11264 548307 +8759 548096 +1036 547793 +5575 547523 +6099 547457 +1726 547406 +9095 547200 +11016 546805 +6600 546767 +9117 546746 +4724 546694 +4535 546644 +19859 546641 +10085 546192 +11990 545927 +5616 545745 +11867 545469 +558 545369 +12750 545195 +11453 545040 +6007 545014 +4805 544806 +11536 544790 +8381 544636 +12007 544575 +1678 544421 +9354 544408 +5969 544383 +10120 544328 +2004 544322 +9336 544293 +11047 544173 +10656 544121 +9601 543949 +1133 543728 +2771 543554 +948 543517 +7264 543327 +5436 543269 +5253 543261 +2304 543194 +5464 543083 +9935 542754 +13289 542612 +3830 542534 +8826 542457 +12164 542022 +10432 541880 +9691 541784 +5472 541671 +5715 541589 +1799 541522 +8295 541394 +8390 541248 +7384 541184 +4450 541159 +4739 541155 +2945 541039 +9292 541013 +8985 540742 +14039 540629 +4224 540466 +10207 540401 +9687 540209 +14051 540174 +8015 540103 +6393 539847 +6208 539816 +8665 539668 +9176 539559 +2417 539392 +6529 539382 +6678 539231 +1806 538825 +10158 538815 +9308 538759 +9769 538636 +11798 538499 +2686 538256 +30756 538096 +12774 537997 +10202 537811 +20948 537713 +5984 537666 +6034 537532 +5269 537514 +12024 537300 +6016 537023 +3068 536803 +10653 536746 +4544 536720 +47383 536644 +6303 536574 +1670 536539 +9368 536507 +4711 536324 +6688 536289 +11374 536287 +5332 536164 +5415 535772 +686 535624 +12302 535597 +8034 535259 +7298 535243 +5027 534858 +4404 534801 +4993 534397 +4310 534235 +11474 534227 +9179 534109 +3013 533926 +4267 533674 +5947 533656 +6242 533613 +9657 533598 +6971 533518 +8683 533415 +891 533390 +9894 533388 +10278 533319 +1033 533218 +9432 533117 +10935 533048 +955 533045 +6930 532804 +6609 532747 +6216 532571 +863 532480 +10200 532443 +4973 532320 +3388 532276 +8512 532164 +8747 532078 +6754 531928 +1150 531903 +10629 531822 +9634 531776 +6851 531720 +4503 531705 +11520 531460 +2671 530938 +14536 530797 +2040 530459 +4522 530342 +3376 530119 +11366 530088 +8560 529987 +10183 529970 +3913 529729 +27572 529657 +9231 529501 +6006 529484 +12353 529322 +5582 529169 +10936 529025 +11596 528842 +13270 528815 +4778 528726 +1958 528689 +9965 528383 +7996 528093 +5256 527765 +7987 527727 +8724 527617 +12761 527594 +14205 527540 +30101 527445 +2376 527425 +11113 527174 +12108 526987 +7523 526948 +14861 526831 +6769 526760 +10251 526687 +5141 526686 +12056 526659 +11911 526627 +808 526618 +8829 526430 +15142 526406 +7932 526264 +21842 525942 +4269 525894 +11288 525883 +3703 525813 +11661 525338 +1423 525254 +5107 525204 +9038 525200 +7760 525179 +2675 525099 +7214 525047 +7997 524960 +3202 524802 +6611 524696 +1886 524345 +2200 524290 +11565 524211 +9505 524148 +10177 524099 +8391 523969 +4145 523792 +9309 523601 +1098 523601 +28589 523309 +16404 523284 +13663 523201 +11465 523027 +14943 522826 +8227 522727 +728 522579 +10315 522535 +11182 522489 +5711 522363 +9592 522137 +12901 522119 +6842 521979 +7584 521897 +3605 521789 +3459 521714 +7844 521599 +32790 521575 +9489 521286 +6270 521053 +2142 521051 +8203 521013 +5453 521007 +9836 520903 +38387 520845 +5800 520632 +2442 520625 +10763 520466 +5498 520396 +7588 520298 +8584 520208 +1834 520004 +8830 519633 +6440 519510 +3104 519421 +8519 519326 +12980 518862 +10266 518833 +8705 518717 +7507 518708 +3175 518680 +3489 518674 +10290 518362 +5268 518131 +4498 518124 +2845 518039 +7604 518016 +4871 517787 +5640 517709 +2018 517645 +9552 517437 +2025 517381 +7690 517300 +10674 517255 +1084 517164 +1774 516848 +9481 516274 +9558 516120 +11044 516115 +7543 516105 +8054 516100 +14637 516058 +5049 515800 +12989 515724 +6866 515494 +10665 515424 +5340 515194 +5698 515174 +7907 515166 +17329 515143 +4693 515073 +4349 514677 +610 514618 +4958 514534 +9658 514511 +7058 514162 +11279 514146 +5909 513815 +2403 513591 +4273 513532 +5948 513355 +3324 513090 +7622 512788 +4543 512710 +47013 512667 +15994 512605 +16020 512591 +6227 512519 +14167 512493 +122 512466 +6439 512393 +1787 512303 +4810 512061 +6970 511802 +7348 511766 +13419 511721 +3183 511719 +1697 511536 +1346 511242 +6639 511134 +5642 511062 +11372 511039 +7827 511016 +1360 510851 +7841 510829 +1694 510698 +9638 510663 +5967 510625 +6903 510571 +5425 510480 +5931 510406 +856 510365 +16213 510309 +9840 509866 +9114 509800 +6153 509717 +3503 509652 +7663 509651 +2915 509566 +6492 509531 +4859 509528 +3797 508985 +9869 508778 +7498 508579 +7090 508348 +2935 508256 +8061 508150 +5129 508134 +2850 507942 +10346 507712 +8171 507083 +11126 506565 +12313 506541 +8114 506538 +1925 506318 +7265 506203 +10308 506177 +2788 506150 +18355 506070 +6789 505868 +7428 505805 +7739 505672 +335 505540 +4437 505375 +1046 505338 +7505 505231 +1820 505189 +6646 505079 +4847 505035 +7723 504840 +1874 504583 +10769 504401 +6062 504379 +9393 504300 +3609 504287 +8022 504015 +8163 503963 +5662 503954 +4578 503944 +28808 503835 +9733 503821 +4419 503723 +12152 503716 +9584 503534 +7724 503491 +12139 503393 +1225 503219 +12451 503123 +2404 503095 +7161 503020 +38325 502957 +13064 502866 +13959 502619 +16685 502602 +8489 502599 +2801 502372 +8948 502353 +7744 502332 +11301 502329 +18317 502311 +6380 502208 +9554 502166 +10638 502156 +6350 502075 +14395 502028 +733 501896 +11255 501750 +8793 501681 +2704 501584 +7941 501562 +9142 501326 +8766 501173 +7042 501158 +9161 501143 +14316 501138 +1058 501137 +7567 500930 +17215 500926 +7722 500875 +7686 500769 +10009 500732 +8834 500182 +1267 500098 +5194 500074 +12433 499839 +10094 499728 +1153 499598 +10826 499365 +1951 499236 +6378 499083 +10309 498859 +14771 498759 +11563 498613 +7327 498161 +7069 498100 +12274 498050 +9188 497940 +3514 497889 +9859 497683 +4294 497668 +6634 497660 +8616 497651 +14456 497583 +12420 497494 +9262 497418 +1050 497098 +8849 497057 +9337 496804 +8698 496736 +5680 496519 +8714 496340 +6599 496156 +13922 496122 +12783 496080 +13956 496070 +9904 496047 +9469 495805 +9204 495517 +6856 495493 +4282 495458 +8331 495218 +7741 495080 +7876 495033 +13101 494968 +8868 494871 +10102 494813 +6537 494662 +6174 494574 +1006 494209 +11292 493769 +8138 493741 +7417 493306 +7819 493293 +4021 493280 +5069 493031 +1676 492861 +7618 492809 +619 492743 +7111 492488 +13098 492423 +5219 492395 +9549 492153 +1674 492046 +12644 491967 +5657 491932 +9238 491889 +1186 491713 +9247 491667 +6419 491609 +14080 491582 +12506 491546 +7850 491532 +8276 491506 +8319 491476 +7745 491425 +14389 491164 +11725 491143 +6948 491116 +8852 491069 +12557 491017 +2860 490863 +12438 490713 +11938 490511 +17154 490479 +12352 490449 +9209 490055 +6465 489970 +7362 489941 +9456 489939 +3808 489894 +10691 489840 +6118 489617 +3526 489349 +11717 488745 +13553 488681 +570 488430 +8846 488377 +10579 488145 +7667 488138 +9398 488058 +6252 488041 +10816 487804 +9902 487778 +6635 487683 +10986 487574 +12255 487515 +6772 487362 +10409 487277 +4035 487133 +6508 487039 +7578 486957 +38898 486923 +8209 486827 +2301 486497 +8350 486454 +8649 486382 +10226 486188 +7477 486080 +46195 486017 +4761 485991 +6572 485950 +104 485842 +10589 485787 +5368 485706 +11057 485511 +14379 485358 +22306 485315 +10064 485172 +9040 485169 +2439 484771 +1305 484754 +9301 484652 +6295 484648 +2093 484477 +7095 484430 +9465 484269 +8867 484258 +7164 484152 +30772 483866 +8104 483650 +2516 483348 +9559 483026 +4526 482958 +11345 482881 +7632 482880 +6086 482805 +13479 482529 +8018 482355 +4217 482302 +7147 482262 +10277 482204 +12534 482102 +9613 481874 +8922 481792 +7766 481713 +5908 481572 +3260 481497 +2806 481392 +3242 481272 +9103 481197 +6412 481109 +6374 480706 +5816 480672 +2649 480629 +18325 480570 +28244 480558 +4448 480540 +3257 480335 +6947 480290 +12217 480181 +7699 480013 +3433 479977 +9694 479829 +9014 479704 +14200 479703 +6282 479693 +6541 479643 +32009 479575 +8383 479249 +4598 479147 +10209 479146 +9403 479045 +6448 478902 +17692 478863 +7457 478667 +2433 478511 +724 478152 +13045 478149 +7634 478062 +12031 477941 +8565 477889 +6365 477835 +6122 477730 +1569 477700 +8239 477668 +581 477633 +864 477246 +7007 477223 +8971 477114 +10937 477035 +7487 476747 +10596 476592 +11840 476530 +8916 476449 +9276 476256 +8676 476237 +2163 476190 +875 476049 +12996 475823 +4531 475813 +11467 475808 +3771 475740 +5625 475421 +16987 475263 +10704 475232 +36430 474914 +4377 474829 +10503 474740 +10810 474711 +9759 474548 +13189 474527 +17944 474409 +4130 474295 +25650 474213 +2606 474053 +11448 473925 +6159 473703 +7552 473678 +956 473675 +2019 473515 +4304 473492 +13347 473405 +1299 473258 +8507 473017 +10930 472996 +6209 472990 +1590 472948 +6507 472885 +12155 472759 +10401 472745 +12087 472716 +6026 472567 +8847 472500 +5184 472211 +8286 472197 +9544 472194 +6105 472192 +11231 471976 +12696 471900 +9317 471819 +4625 471755 +9327 471505 +7473 470986 +2840 470738 +8197 470623 +14780 470281 +12946 470152 +3813 469939 +12382 469887 +5308 469869 +10382 469852 +17662 469530 +13509 469357 +5650 469084 +7274 468913 +3537 468876 +6512 468810 +8466 468776 +16693 468472 +4767 468256 +8461 468202 +13285 468151 +729 468066 +2737 467943 +6654 467872 +2543 467645 +539 467455 +7169 467379 +21003 467300 +7675 467213 +12815 467106 +14138 466738 +3166 466697 +5739 466627 +6177 466578 +1898 466078 +30251 465915 +7881 465902 +9772 465804 +13312 465503 +19646 465313 +5897 465263 +7560 465238 +8119 465232 +7151 465210 +15752 464964 +4476 464929 +3077 464826 +4941 464756 +10152 464493 +3255 464477 +7930 464418 +8561 464331 +8057 464290 +5073 464075 +13191 463887 +6498 463863 +7357 463530 +9815 463411 +13489 463148 +12369 463029 +6762 462915 +4482 462736 +2350 462731 +6506 462634 +2152 462592 +11622 462312 +8668 462185 +5878 462019 +2852 462006 +16145 461943 +6064 461812 +11772 461652 +3529 461506 +11617 461437 +13662 461340 +2922 461227 +21195 461196 +8733 461097 +8546 461014 +19299 460751 +9131 460711 +7863 460685 +4240 460580 +8889 460543 +25151 460407 +41615 460212 +6322 460155 +7914 460099 +15572 459295 +4580 459246 +12682 459131 +6899 458997 +9722 458947 +5951 458826 +2701 458808 +1512 458735 +10655 458723 +9674 458635 +7285 458614 +13864 458453 +7706 458443 +14463 458408 +11969 458352 +12022 458342 +8030 458206 +5718 458203 +9499 458191 +18091 457686 +10820 457682 +11153 457653 +17189 457496 +7397 457166 +15435 457165 +10404 457133 +1079 457130 +7186 456764 +11831 456653 +10633 456482 +870 456379 +10719 456339 +4089 456215 +10470 456203 +11729 456195 +13178 456192 +3768 456183 +14691 455976 +28 455976 +10976 455911 +4663 455897 +5249 455773 +8442 455449 +5282 455325 +3720 455145 +13356 455082 +11581 454949 +2381 454810 +601 454802 +903 454655 +10099 454543 +16483 454518 +10399 454507 +1252 454428 +11305 454423 +3246 454342 +12605 454177 +15247 453973 +3866 453730 +46481 453702 +4548 453681 +10325 453562 +11566 453362 +6287 453357 +9240 453334 +6244 453271 +4474 453245 +4524 453216 +16519 453141 +1897 453137 +3732 453109 +3885 453072 +15244 452997 +11434 452891 +11100 452784 +949 452773 +10747 452756 +8223 452719 +6125 452444 +8207 452351 +12053 452321 +2870 452256 +10303 452236 +8112 452129 +2044 452086 +11749 451878 +7009 451838 +8098 451813 +11233 451812 +8853 451678 +15302 451647 +7257 451557 +4851 451508 +24247 451387 +14651 451369 +5671 451350 +29690 451218 +5673 450985 +7429 450645 +14526 450579 +1489 450407 +18381 450182 +895 450118 +18279 450047 +7511 449889 +7564 449873 +2539 449697 +10423 449668 +10222 449447 +12427 449438 +8233 449184 +15446 449140 +13659 449125 +7852 449025 +4487 449014 +14486 449007 +9750 448958 +6889 448930 +11189 448906 +11769 448691 +9528 448621 +8339 448582 +7460 448524 +7480 448475 +6645 448428 +16167 448391 +5961 448383 +11468 448323 +10947 448316 +7448 448262 +10831 448231 +7743 448200 +11790 448123 +3864 448020 +9699 447968 +17741 447947 +8551 447942 +10997 447848 +631 447787 +8956 447706 +5831 447623 +741 447620 +10428 447512 +13310 447352 +1273 447226 +8084 447095 +5508 446729 +6951 446617 +6810 446454 +9252 446194 +1304 446147 +11628 446139 +12408 446021 +5600 445908 +3620 445780 +11102 445687 +1817 445635 +15260 445631 +4521 445604 +7891 445507 +9644 445478 +10540 445477 +7623 445150 +4869 445065 +15846 445057 +13965 444997 +6547 444996 +10497 444967 +7698 444957 +7262 444924 +21996 444912 +3565 444847 +7361 444827 +8601 444817 +1090 444803 +9519 444736 +4790 444625 +6867 444594 +17420 444560 +7491 444513 +8190 444458 +1513 444222 +10522 444039 +6674 444008 +7183 443956 +2088 443644 +9207 443623 +11320 443607 +10088 443581 +11136 443530 +4460 443497 +6042 443486 +7716 443421 +13854 443389 +5674 443287 +16581 443281 +9167 443098 +7165 442919 +4687 442895 +4866 442855 +10784 442654 +4732 442633 +865 442626 +5237 442517 +12652 442432 +10075 442326 +3525 442279 +3483 442207 +853 442188 +10834 442053 +10092 442033 +6544 442002 +9212 441858 +8776 441583 +12857 441523 +9588 441423 +13445 441268 +7301 441201 +9670 441165 +9208 441056 +9397 440989 +7393 440970 +30830 440747 +14959 440610 +6286 440540 +9157 440442 +10033 440162 +8313 440089 +7407 440088 +5078 439983 +10297 439973 +3585 439845 +11632 439807 +11088 439726 +8873 439603 +11905 439537 +7050 439349 +12771 439335 +10578 439236 +14006 439225 +9189 439199 +13943 439161 +8242 438925 +27737 438892 +14685 438862 +2841 438853 +8188 438775 +6060 438757 +10657 438692 +8468 438514 +6939 438377 +5224 438336 +3602 438299 +10029 437970 +7475 437917 +21675 437790 +11328 437753 +5258 437656 +793 437502 +2105 437344 +11461 437203 +18335 437071 +6366 437018 +7995 437015 +7506 436901 +10062 436677 +6846 436653 +10159 436578 +10848 436530 +10660 436510 +7369 436436 +7032 436299 +2446 436290 +5066 436196 +7974 435903 +8568 435896 +18936 435627 +10991 435619 +8386 435599 +5628 435553 +3975 435372 +796 435152 +7445 435125 +11307 434750 +4364 434663 +9667 434529 +9629 434332 +6706 434291 +8486 434203 +3981 434045 +8137 433968 +8526 433856 +11780 433721 +12803 433690 +9817 433586 +3020 433537 +10115 433505 +4903 433496 +16077 433356 +10275 433334 +11243 433238 +12088 433209 +9144 433171 +3695 433088 +7472 433029 +9319 432997 +8026 432869 +6697 432716 +9036 432689 +6505 432658 +4911 432291 +28227 432239 +4758 432224 +1845 432086 +10836 431880 +7021 431848 +8296 431812 +12328 431752 +1005 431721 +5923 431708 +13162 431573 +13097 431521 +32825 431122 +10913 431081 +9623 430972 +13682 430905 +25440 430798 +15016 430612 +2696 430394 +1587 430367 +10463 430242 +12612 430177 +10448 430165 +9196 430158 +20241 429817 +3263 429681 +12533 429646 +12577 429472 +3907 429457 +14717 429425 +9041 429321 +10140 429303 +5440 429164 +8225 429150 +7979 429111 +5162 429109 +34979 428999 +8263 428967 +9643 428904 +548 428758 +1698 428731 +2441 428648 +11058 428630 +6575 428307 +8686 428274 +14719 428107 +9980 428077 +9299 428002 +3966 427967 +11431 427858 +626 427844 +7170 427618 +8945 427570 +21703 427336 +12199 427179 +15818 427119 +9524 427061 +94 427057 +10084 427023 +8275 426906 +9803 426806 +4848 426803 +12507 426752 +8093 426716 +5400 426665 +10822 426582 +10544 426474 +8684 426470 +3474 426393 +2824 426292 +1124 426099 +5767 425935 +7763 425894 +2052 425887 +5888 425825 +17894 425817 +6910 425796 +25792 425781 +723 425679 +11668 425517 +8860 425445 +1027 425439 +6454 425414 +6787 425354 +15958 425316 +4712 425219 +23731 425071 +39990 425025 +7822 424954 +7237 424883 +7750 424792 +5469 424745 +13030 424621 +438 424384 +2569 424271 +6795 424188 +10012 424179 +16348 424166 +14359 423890 +3994 423764 +11330 423708 +9889 423613 +7767 423523 +6761 423314 +11237 423255 +9194 423211 +8025 423111 +2787 423038 +10053 423032 +8257 422993 +3838 422979 +8999 422972 +8013 422938 +14886 422812 +3974 422673 +11803 422479 +6277 422372 +10054 422115 +8006 422012 +8482 421985 +5333 421820 +4846 421787 +11123 421771 +13795 421760 +14597 421541 +12268 421468 +9539 421435 +11646 421416 +12848 421415 +10828 421149 +4465 421005 +8312 420878 +1940 420842 +9384 420774 +10343 420682 +38563 420682 +2171 420647 +10907 420307 +14847 420294 +8888 420198 +5496 420132 +4898 419944 +10821 419846 +8754 419807 +18836 419789 +7306 419436 +4480 419357 +13107 419242 +4662 419029 +13301 418988 +7620 418900 +6387 418663 +398 418591 +13316 418591 +5404 418532 +3212 418520 +14683 418447 +14372 418412 +13683 418208 +9753 418195 +4164 418183 +11476 418137 +6869 418119 +8463 418118 +10759 417587 +12478 417567 +14813 417528 +11753 417140 +7772 417075 +15152 417032 +6973 416961 +26840 416803 +2236 416706 +8454 416300 +11228 416181 +18802 416171 +12112 416118 +7600 416058 +14345 416018 +8883 416008 +13620 415903 +3550 415841 +10282 415776 +16127 415765 +9599 415745 +11376 415739 +10074 415692 +731 415656 +7243 415472 +14057 415445 +8157 415366 +5774 415223 +6117 415193 +1117 415145 +13077 415133 +17448 415079 +14424 415006 +5868 414948 +7228 414712 +8564 414687 +1496 414653 +11939 414600 +9137 414451 +18042 414426 +8871 414367 +7527 414259 +9210 414225 +6942 414161 +9284 414093 +11864 413979 +4729 413800 +15336 413797 +29749 413689 +9808 413450 +6986 413448 +6397 413294 +15598 413245 +14819 413174 +13547 413105 +6777 413085 +13804 413005 +13257 412980 +7446 412953 +6273 412946 +12620 412923 +7178 412896 +8953 412856 +10893 412416 +9031 412276 +702 412269 +15338 412154 +7747 412118 +7872 412058 +12769 412014 +11722 411923 +9983 411914 +16256 411890 +10732 411859 +19277 411821 +5883 411797 +11942 411756 +7255 411620 +10568 411592 +22917 411489 +3425 411402 +3494 411372 +18707 411253 +3185 411125 +15733 411004 +10460 410993 +4453 410817 +4401 410718 +7363 410707 +20954 410678 +10270 410579 +8404 410530 +16570 410515 +16863 410211 +11702 410131 +4235 409832 +9001 409794 +12806 409633 +12223 409518 +9396 409503 +13797 409366 +12402 409239 +7526 409230 +12600 409139 +15100 409091 +10681 409050 +10246 408972 +11872 408925 +7579 408648 +9172 408405 +17796 408380 +9115 408352 +21793 408261 +12691 408216 +6364 408060 +6859 408027 +13038 407988 +1632 407958 +12477 407924 +15378 407851 +9352 407688 +3633 407655 +5803 407653 +8646 407621 +7587 407575 +15331 407451 +1758 407253 +877 407147 +11998 407145 +11679 407103 +3601 407096 +4669 407086 +227 407081 +11334 407006 +8131 406946 +1759 406891 +8452 406865 +8997 406822 +9574 406812 +6797 406810 +11336 406762 +8501 406738 +11114 406687 +10311 406649 +10150 406623 +17689 406578 +11300 406421 +9814 406384 +8389 406349 +8271 406333 +7954 406313 +10576 406312 +6565 406306 +7545 406240 +9102 406164 +14328 406013 +2144 405990 +13673 405932 +5337 405901 +8865 405864 +1155 405852 +3719 405528 +9258 405495 +13093 405422 +9011 405319 +6257 405240 +1835 405060 +12871 405052 +5588 404835 +16629 404629 +9799 404597 +15295 404559 +11066 404496 +8716 404329 +12928 404150 +12206 404017 +9635 404014 +7635 403984 +7561 403954 +12388 403633 +13630 403434 +13592 403053 +15896 402919 +20197 402829 +9645 402757 +13393 402755 +12165 402698 +14074 402666 +9332 402472 +12406 402273 +6938 402132 +8471 402079 +15615 401716 +14410 401641 +8020 401607 +9573 401541 +813 401316 +13709 401296 +7267 401278 +4049 401218 +7817 401160 +1125 400938 +6695 400924 +753 400632 +13117 400629 +7467 400617 +7405 400469 +8741 400432 +1347 400368 +10095 400320 +10692 400261 +7558 400226 +9133 400206 +4467 400136 +8814 399952 +1648 399945 +5172 399767 +1723 399704 +5384 399677 +32425 399666 +8130 399658 +26094 399611 +6602 399400 +7501 399051 +21356 399047 +13960 399015 +26119 399002 +1177 399001 +15076 398977 +7965 398937 +14072 398855 +2545 398812 +15033 398769 +10156 398733 +9015 398630 +5550 398523 +3735 398483 +15694 398450 +12318 398428 +9709 398412 +4156 398372 +6937 398056 +13447 397970 +11083 397902 +7612 397887 +677 397838 +4709 397653 +8539 397640 +3405 397576 +15184 397562 +10880 397473 +7718 397294 +1846 397197 +13919 397195 +17199 397161 +12963 397157 +2357 397072 +8168 396992 +13067 396930 +7360 396710 +6625 396663 +12670 396660 +11363 396611 +8644 396544 +1071 396138 +9569 396044 +11103 396035 +16278 396022 +12356 395953 +20790 395715 +12160 395513 +1278 395447 +9985 395426 +6589 395388 +1322 395383 +14327 395356 +10727 395331 +6824 395153 +15301 394895 +9233 394857 +8970 394826 +8147 394812 +11406 394778 +4129 394771 +29657 394731 +13339 394630 +3141 394547 +26220 394385 +9029 394178 +14133 393701 +4313 393607 +10741 393597 +12000 393560 +11038 393455 +7708 393396 +6981 393382 +48539 393366 +12965 393321 +3051 393202 +9697 392989 +2629 392849 +556 392807 +11481 392800 +6595 392750 +1371 392745 +19099 392599 +5602 392531 +7039 392512 +14905 392497 +11118 392472 +18390 392419 +2848 392286 +7982 392117 +1034 392071 +7825 392001 +12930 391952 +8262 391872 +926 391870 +13904 391618 +3528 391502 +6259 391442 +432 391392 +4897 391238 +2797 391028 +21852 390801 +3844 390776 +7075 390638 +11972 390586 +18286 390550 +10914 390522 +12181 390457 +10274 390401 +2411 390339 +11704 390287 +9856 390187 +9268 390171 +12150 390103 +9879 389997 +7471 389987 +9185 389986 +12879 389967 +7455 389884 +1747 389879 +13340 389812 +10127 389663 +10402 389568 +19613 389434 +4285 389340 +2038 389257 +5847 389229 +8097 389217 +10014 389052 +7695 389038 +6964 388887 +11915 388736 +17305 388721 +3069 388564 +6779 388434 +7866 388384 +1789 388251 +12684 388146 +5521 388118 +10595 388088 +6873 387763 +1818 387683 +7054 387518 +10754 387382 +6451 387287 +9655 387128 +8355 387116 +8129 387080 +16393 387050 +13999 386969 +7787 386929 +11276 386759 +5562 386693 +13741 386688 +12285 386638 +6641 386580 +6737 386535 +7187 386445 +2536 386412 +14122 386389 +15682 386380 +6234 386377 +10302 386248 +14115 386219 +16666 386042 +10332 385907 +3964 385839 +6620 385645 +10361 385548 +8902 385496 +10966 385484 +14187 385467 +13633 385375 +17063 385313 +18621 385065 +10617 385042 +12539 384984 +14997 384883 +11983 384868 +5468 384814 +7238 384796 +10721 384572 +15555 384479 +1275 384392 +11432 384301 +2977 384284 +10569 384253 +13359 384110 +11560 384076 +10342 384009 +6569 383954 +19122 383938 +22611 383811 +6036 383687 +8474 383644 +5740 383429 +7692 383359 +586 383295 +1419 383273 +4066 383194 +1538 383149 +3191 382945 +12284 382931 +14620 382918 +9110 382879 +4825 382730 +21199 382531 +8126 382516 +11652 382509 +3889 382433 +4352 382237 +8478 382053 +5422 381988 +6597 381839 +8246 381825 +16052 381819 +2748 381810 +9530 381684 +16119 381666 +6445 381644 +18292 381607 +23370 381562 +10089 381523 +11077 381365 +7226 381330 +13329 381179 +14021 381124 +9758 381123 +11809 381086 +13772 381080 +3036 381064 +15223 381038 +6486 381025 +6814 380938 +13241 380861 +8027 380855 +8884 380820 +2054 380811 +19709 380793 +6504 380605 +9626 380566 +11515 380412 +8628 380336 +13367 380121 +13861 380071 +31362 380053 +1647 380010 +17820 379781 +9166 379560 +6906 379374 +14469 379282 +14227 379210 +3754 379002 +11001 378926 +11526 378867 +8347 378753 +7862 378733 +9281 378662 +17718 378652 +4293 378636 +13302 378542 +6982 378491 +14217 378485 +8494 378459 +4932 378435 +10408 378283 +15796 378222 +10588 378132 +23357 377965 +17462 377860 +8904 377819 +11000 377789 +16738 377598 +17499 377581 +11677 377538 +4936 377530 +1130 377448 +9007 377376 +9900 377348 +7382 377267 +5760 377258 +937 376979 +12252 376780 +20311 376769 +849 376755 +1389 376721 +17715 376614 +10135 376600 +14066 376597 +7026 376387 +132 376386 +6957 376353 +11734 376300 +3258 376225 +6452 376005 +909 375926 +11332 375842 +10386 375819 +10710 375699 +14825 375691 +8088 375684 +12148 375681 +9936 375544 +7270 375490 +11781 375370 +6226 375337 +2437 375301 +9846 375164 +9296 375038 +17027 374977 +2337 374938 +3912 374885 +15162 374882 +8252 374852 +10484 374831 +20251 374829 +2165 374799 +6901 374710 +10243 374664 +13671 374469 +7358 374221 +16120 374131 +3149 374119 +9021 374053 +7110 374044 +11078 373834 +1484 373662 +8069 373610 +10868 373588 +3184 373585 +2754 373555 +1292 373520 +10146 373513 +6912 373432 +7754 373402 +4013 373341 +3792 373296 +9149 373012 +11259 372932 +12159 372917 +4700 372912 +1007 372888 +21538 372840 +4959 372816 +6479 372812 +13226 372725 +7586 372712 +14105 372647 +16720 372515 +5653 372464 +13336 372347 +13612 372275 +4589 372141 +13914 372001 +14433 371923 +1631 371838 +7166 371618 +12906 371602 +8234 371586 +5705 371556 +7875 371528 +3929 371451 +13354 371418 +9677 371286 +9619 371233 +7247 371188 +9511 370981 +8794 370937 +21506 370897 +10058 370891 +10305 370833 +10192 370769 +12665 370754 +16752 370681 +7375 370584 +100 370551 +10145 370550 +12396 370469 +2930 370325 +16440 370303 +6579 370297 +8523 370210 +6450 369913 +2471 369759 +3039 369622 +823 369518 +8673 369498 +28689 369444 +14728 369442 +13019 369391 +14537 369330 +9177 369323 +11846 369307 +9564 369236 +15286 369223 +15725 369135 +8258 369120 +16094 369079 +8217 368989 +12897 368893 +14162 368880 +10918 368753 +7820 368688 +4563 368543 +9807 368432 +9561 368424 +18609 368234 +9075 368089 +9085 368050 +10231 368001 +11138 367959 +12246 367928 +972 367806 +10255 367786 +7906 367661 +8680 367603 +15115 367522 +13048 367461 +11087 367387 +9494 367342 +18071 367262 +964 367230 +12373 367196 +4175 367107 +8659 367082 +2070 367075 +7045 367061 +14510 367048 +10571 367012 +15070 366988 +12654 366942 +3310 366871 +7548 366733 +5977 366666 +12259 366570 +4909 366534 +16640 366491 +3167 366477 +4558 366360 +19610 366257 +11099 366173 +12232 366119 +7509 366023 +9374 365891 +3470 365884 +17127 365533 +8192 365498 +7114 365476 +10380 365341 +16137 365300 +4366 365277 +7946 365216 +7365 365191 +7088 364886 +9480 364726 +8216 364713 +7685 364626 +12932 364617 +2195 364581 +9931 364506 +13844 364477 +11390 364405 +8220 364387 +8253 364234 +14408 364112 +7882 364093 +12263 364016 +7152 363829 +3998 363827 +1087 363800 +2507 363606 +7427 363566 +7521 363514 +10708 363450 +7714 363438 +2248 363431 +15930 363224 +6651 363153 +7733 362858 +6457 362836 +8566 362822 +8498 362813 +1063 362682 +2353 362622 +10040 362443 +14931 361977 +23512 361973 +6766 361959 +5999 361585 +9348 361559 +9376 361481 +7603 361432 +7422 361379 +10184 361347 +5648 361297 +3320 361123 +10017 361097 +14909 361015 +9910 360987 +3824 360918 +6515 360811 +7496 360806 +19303 360766 +8791 360691 +12523 360595 +8993 360575 +5801 360431 +4529 360398 +10965 360256 +229 360226 +9457 360164 +11252 360152 +16346 360033 +14449 359968 +11686 359944 +18334 359855 +14812 359787 +10468 359667 +7998 359571 +9964 359523 +7451 359379 +9241 359237 +6711 359234 +4545 359213 +8287 359148 +816 358897 +10147 358736 +12380 358729 +9949 358648 +1196 358550 +9107 358437 +15795 358413 +5824 358312 +5372 358295 +5124 358289 +9086 358245 +6748 358216 +9168 358064 +6805 357931 +15498 357866 +14679 357853 +14300 357819 +15528 357737 +6069 357670 +40138 357664 +12763 357550 +16051 357498 +12389 357475 +4002 357463 +12069 357303 +6371 357290 +13572 357238 +15342 357172 +9860 357116 +7664 356998 +7950 356936 +10846 356850 +17547 356634 +18475 356600 +13125 356555 +9328 356554 +6543 356451 +2954 356267 +15055 356232 +10833 356213 +26878 356180 +5013 356178 +6953 356175 +5654 356154 +6101 356107 +14641 356073 +9151 356027 +4421 355987 +16535 355894 +5639 355847 +5050 355821 +9938 355713 +11325 355685 +7531 355646 +12981 355592 +7666 355538 +9399 355432 +13415 355422 +7538 355305 +1258 355163 +18545 355158 +2759 355119 +1136 355109 +6617 355081 +13571 355038 +14069 355019 +12437 354981 +17960 354969 +9325 354939 +13708 354915 +6420 354705 +14757 354591 +14919 354480 +9199 354451 +4108 354398 +12949 354382 +6578 354376 +9361 354353 +10718 354244 +13593 354199 +6052 354007 +8436 353994 +9105 353886 +12275 353856 +9121 353405 +11106 353379 +10800 353350 +8598 353325 +2100 353199 +2290 353173 +15080 353027 +11642 353004 +16542 352734 +16111 352650 +15602 352627 +10608 352611 +5313 352603 +17782 352590 +7838 352517 +16445 352493 +6909 352475 +14791 352413 +9857 352193 +17842 352066 +1638 352040 +16712 351998 +2164 351959 +14364 351891 +3287 351887 +8706 351810 +12873 351660 +13075 351610 +22484 351603 +16707 351599 +7381 351585 +4463 351585 +699 351567 +11856 351470 +25851 351312 +10364 351096 +8266 350892 +12492 350873 +14493 350708 +8854 350697 +13346 350683 +10469 350675 +10216 350657 +5147 350609 +4169 350591 +10872 350527 +10366 350409 +12801 350394 +9572 350320 +8149 350232 +6375 350198 +5447 350173 +28141 350103 +18733 350035 +13642 350000 +1144 349908 +10421 349802 +10427 349683 +14377 349622 +17826 349557 +6528 349442 +9877 349409 +4198 349335 +11550 349300 +14840 349249 +16952 349120 +7776 349119 +8914 349098 +15164 349012 +6469 348997 +6206 348967 +7731 348954 +6890 348922 +1348 348722 +10086 348564 +15037 348531 +9345 348509 +10474 348482 +8949 348330 +35327 348207 +13458 348166 +4901 348016 +13853 348002 +10925 347889 +9835 347859 +5414 347837 +3369 347791 +8582 347636 +11682 347617 +31433 347583 +9136 347427 +4702 347410 +9482 347376 +11271 347359 +566 347358 +17334 347227 +13850 347132 +11023 347130 +18263 347068 +10400 347041 +27133 346684 +6462 346645 +11272 346626 +9963 346622 +15198 346518 +7818 346505 +646 346495 +6778 346466 +9775 346459 +13485 346404 +9013 346329 +11946 346288 +10668 346248 +1330 346195 +8517 346056 +6239 345826 +9557 345703 +16978 345593 +8348 345532 +1272 345478 +11238 345461 +18555 345457 +20872 345277 +10359 345271 +17036 345196 +2120 344930 +2280 344773 +8573 344722 +10515 344702 +12020 344681 +10597 344592 +9260 344591 +12047 344581 +10429 344566 +11185 344446 +5892 344360 +5250 344253 +9160 343902 +8458 343880 +7252 343790 +31173 343777 +4022 343555 +10003 343480 +11193 343472 +6562 343348 +13925 343228 +17406 343189 +4730 343165 +11155 343055 +12231 343030 +17014 343022 +16618 342966 +7278 342897 +9211 342896 +5100 342826 +15066 342723 +11594 342721 +12527 342717 +8288 342688 +3664 342668 +9490 342586 +3839 342537 +14429 342519 +1075 342420 +2232 342183 +10980 342178 +2022 342057 +11472 342053 +3979 342036 +15346 341959 +15863 341944 +1691 341900 +15294 341872 +11112 341843 +14569 341817 +8915 341671 +10304 341465 +4843 341417 +6494 341415 +7926 341332 +8515 341225 +12046 341172 +1216 341117 +9473 341068 +9274 341050 +11570 341048 +6659 340992 +22473 340911 +8075 340763 +15393 340693 +14883 340689 +2522 340630 +9960 340607 +8212 340582 +7208 340577 +10594 340450 +21909 340415 +10765 340343 +4583 340262 +8603 340144 +13637 339972 +7138 339832 +9581 339824 +13188 339727 +15689 339721 +8749 339613 +18576 339525 +8425 339432 +10241 339308 +3902 339257 +11937 339211 +8222 339181 +7621 339139 +2518 339112 +11758 339065 +5855 338929 +12173 338908 +12615 338903 +8801 338853 +842 338791 +13142 338723 +5516 338721 +12329 338602 +7388 338534 +26105 338531 +5032 338437 +7795 338417 +8957 338345 +2483 338332 +10038 338262 +3743 338006 +8181 337997 +14952 337748 +5492 337746 +12882 337707 +11648 337676 +8806 337599 +5833 337517 +9780 337469 +4623 337392 +17880 337247 +4745 337244 +12538 337238 +6369 337188 +6758 337119 +15706 337069 +15870 336993 +9400 336894 +5721 336815 +7097 336767 +7869 336739 +20092 336483 +13229 336427 +20486 336410 +12970 336280 +8786 336264 +8440 336084 +12746 336078 +8268 335957 +19034 335955 +20082 335940 +5601 335924 +5210 335873 +17907 335805 +6075 335758 +4835 335634 +2129 335490 +10931 335423 +5607 335409 +11333 335370 +7653 335365 +12400 335354 +11602 335344 +4731 335188 +12575 335173 +1618 335125 +13606 335118 +21285 335017 +12107 334923 +1387 334914 +34033 334900 +14495 334864 +7331 334829 +18740 334712 +3987 334670 +16134 334569 +14860 334478 +1062 334442 +17185 334381 +12497 334367 +19616 334301 +3696 334292 +12772 334266 +11403 334201 +10172 334185 +2183 334078 +17409 334077 +9646 333987 +10941 333957 +4424 333731 +13109 333719 +32214 333692 +9408 333646 +5703 333577 +9901 333570 +7135 333416 +11495 333379 +13953 333300 +10182 333300 +8541 333269 +13833 333263 +9471 333259 +16527 333092 +16533 333048 +1739 333033 +2160 332999 +24520 332927 +17541 332914 +245 332905 +1601 332894 +8989 332859 +16512 332747 +4265 332705 +14576 332575 +3150 332510 +12565 332496 +10439 332445 +31112 332411 +14926 332365 +11450 332296 +1984 332256 +9871 332254 +4277 332226 +11684 332109 +6882 331909 +6733 331893 +11521 331862 +22488 331824 +10839 331793 +1055 331762 +10969 331754 +11177 331679 +18154 331626 +5737 331561 +7801 331482 +14409 331482 +11597 331282 +12023 331207 +9170 331045 +2799 331010 +8124 331002 +11553 330990 +9022 330926 +12827 330921 +10801 330902 +1699 330871 +12880 330544 +12465 330534 +14286 330531 +14783 330519 +7902 330467 +20000 330396 +10750 330328 +8887 330300 +14317 330224 +9620 330208 +13280 330181 +8712 330141 +11584 330100 +5053 329959 +5907 329943 +4415 329942 +10454 329927 +11132 329916 +14858 329894 +15670 329864 +8125 329754 +15422 329750 +12831 329749 +37666 329684 +16433 329663 +3192 329598 +11636 329464 +10193 329407 +11375 329359 +10695 329328 +14388 329322 +8180 329251 +9874 329097 +8920 329091 +4534 328825 +8903 328698 +23964 328655 +17122 328606 +7048 328597 +6211 328591 +8925 328576 +11955 328523 +2378 328502 +14896 328482 +9751 328231 +12578 328224 +1773 328185 +6338 328096 +8839 327999 +10923 327937 +7778 327911 +12617 327835 +9018 327783 +11732 327780 +7563 327739 +6803 327707 +8969 327692 +5552 327602 +3905 327557 +9911 327480 +7119 327458 +36285 327444 +7684 327424 +9616 327313 +11129 327297 +12032 327230 +11490 327080 +7792 327061 +13574 327032 +11398 327007 +8066 326906 +1952 326868 +2710 326703 +13362 326668 +14457 326642 +17339 326625 +12580 326624 +21861 326488 +21157 326473 +34119 326457 +14235 326388 +11819 326384 +4102 326329 +7209 326168 +6195 326143 +10291 326111 +8894 326110 +7330 325951 +5869 325935 +14525 325893 +13120 325862 +6167 325828 +9509 325807 +24382 325742 +7855 325721 +10078 325656 +7112 325654 +5599 325576 +8122 325525 +15105 325514 +15740 325468 +16753 325462 +9517 325427 +10953 325424 +13936 325332 +8862 325273 +3464 325244 +5452 325232 +20983 325102 +17582 325099 +1483 324860 +8035 324825 +8824 324800 +14119 324704 +12766 324689 +9412 324668 +12780 324579 +12521 324471 +9752 324452 +13923 324383 +6001 324264 +13145 324264 +9464 324208 +7336 324205 +15626 324177 +19773 324148 +8701 324135 +8504 324114 +9422 324110 +10457 324084 +5832 324083 +14654 324004 +13777 323947 +5142 323924 +3843 323885 +14264 323868 +8621 323792 +6176 323751 +10538 323625 +38836 323556 +9958 323458 +493 323456 +15406 323421 +10098 323257 +13272 323251 +14882 323077 +18369 323073 +18525 323037 +11278 322890 +24754 322889 +3536 322832 +12542 322786 +7893 322728 +2971 322724 +6949 322711 +12699 322709 +11920 322700 +9813 322679 +17078 322609 +5446 322605 +2778 322596 +13694 322559 +14567 322526 +12049 322315 +23576 322120 +4316 322022 +8699 322006 +9743 321964 +4331 321936 +10843 321863 +19310 321849 +8469 321771 +4121 321673 +12513 321647 +4470 321634 +9431 321433 +5793 321318 +7773 321315 +21279 321289 +8713 321248 +1233 321228 +3731 321181 +17381 321180 +9628 321107 +4386 321100 +9845 321023 +19250 320960 +5064 320841 +5540 320721 +4185 320694 +15469 320670 +7735 320664 +21313 320604 +965 320601 +10529 320574 +12545 320556 +725 320395 +15593 320377 +12641 320315 +3364 320117 +9429 320034 +11009 319918 +11293 319719 +17357 319656 +4105 319633 +2224 319519 +15287 319462 +15929 319083 +5500 319073 +5848 318999 +3826 318962 +7353 318844 +7721 318797 +6495 318768 +16093 318732 +12309 318723 +18672 318712 +20171 318609 +16852 318532 +11169 318478 +9314 318476 +2229 318472 +7329 318393 +7416 318319 +20384 318047 +11004 317965 +17524 317883 +8678 317800 +11540 317774 +659 317752 +5036 317723 +13106 317669 +8787 317543 +13029 317496 +20716 317472 +2821 317076 +13748 317005 +9391 316991 +19332 316987 +19483 316986 +9727 316962 +11658 316942 +12607 316917 +13883 316914 +9155 316768 +2348 316745 +17955 316743 +1373 316719 +7873 316717 +2069 316670 +21105 316623 +8771 316595 +10452 316592 +11756 316513 +17915 316507 +6455 316406 +11824 316389 +22815 316388 +13784 316374 +7424 316364 +6386 316312 +20617 316271 +5557 316215 +20357 316185 +14794 316066 +17352 316046 +28630 315957 +5968 315855 +16428 315835 +6920 315834 +11130 315824 +11545 315711 +2118 315680 +3727 315673 +16182 315559 +15229 315523 +16387 315368 +21933 315289 +11359 315283 +10649 315261 +4659 315233 +14240 315221 +17356 315168 +17192 315122 +8689 315037 +11765 315023 +6251 314974 +12141 314897 +9055 314803 +11845 314792 +13566 314771 +9934 314711 +2741 314681 +27344 314655 +8281 314654 +5874 314594 +9891 314528 +12634 314519 +8782 314510 +21865 314451 +9520 314369 +11768 314347 +10206 314338 +12759 314296 +4255 314282 +35185 314258 +10271 314254 +11926 314202 +1385 314190 +9323 314182 +25841 313849 +13644 313842 +3686 313681 +18726 313573 +10059 313482 +1328 313246 +8365 313222 +7096 313182 +12811 313118 +21877 313081 +16679 313028 +9811 313018 +11882 313016 +5416 312970 +7275 312919 +7613 312904 +4177 312865 +9460 312773 +18848 312722 +14544 312645 +16915 312621 +13901 312583 +7156 312558 +12735 312557 +14850 312525 +9373 312429 +7888 312382 +13449 312351 +29378 312294 +13307 312290 +22026 312233 +17521 312201 +9892 312174 +16476 312147 +1333 312144 +13342 312097 +14598 312036 +18859 312015 +8765 311951 +12228 311876 +5355 311833 +13933 311638 +20741 311606 +9824 311582 +9104 311540 +26138 311538 +13768 311520 +23481 311505 +14600 311385 +18309 311349 +931 311338 +3673 311267 +10651 311194 +6685 311113 +11473 311083 +13276 311034 +8880 310999 +6489 310946 +12695 310937 +4802 310930 +9355 310849 +10938 310762 +4743 310727 +15300 310686 +10768 310656 +9575 310603 +12519 310526 +10215 310423 +13124 310339 +10170 310325 +13902 310286 +13944 310233 +12540 310206 +13317 310177 +12151 310173 +1286 310125 +7125 309987 +11172 309933 +14669 309886 +12926 309859 +7661 309685 +9794 309629 +9585 309568 +14135 309560 +2194 309554 +11294 309522 +2544 309428 +13692 309380 +16248 309370 +17196 309335 +9175 309081 +15261 309005 +1473 308985 +1960 308936 +13456 308907 +10546 308821 +14762 308772 +10591 308714 +11721 308591 +3086 308558 +9363 308490 +13176 308475 +11989 308474 +10543 308427 +23781 308348 +13403 308323 +9099 308280 +11784 308078 +32089 307995 +529 307911 +95 307756 +3939 307654 +16435 307639 +20182 307587 +34847 307510 +10509 307452 +6071 307446 +11500 307410 +10384 307357 +15343 307354 +1963 307209 +9541 307199 +14939 306944 +20014 306924 +11903 306893 +5921 306873 +12723 306865 +3178 306813 +17389 306782 +12705 306774 +12068 306684 +3460 306676 +10070 306651 +11192 306520 +21841 306506 +9017 306491 +20755 306438 +6843 306369 +8670 306349 +17281 306327 +19391 306285 +9450 306242 +16574 305876 +12812 305859 +5067 305834 +7295 305811 +15604 305795 +15454 305645 +9757 305637 +7709 305617 +7443 305616 +2558 305584 +11013 305563 +11161 305551 +8661 305334 +13146 305327 +9294 305325 +6248 305317 +16457 305285 +24505 305278 +9550 305260 +535 305241 +18677 305121 +13832 305112 +5325 305045 +10630 304946 +12983 304915 +20119 304903 +8161 304873 +13840 304833 +12528 304832 +2085 304744 +15664 304720 +885 304692 +22593 304679 +9563 304674 +15392 304568 +9776 304540 +7815 304496 +5331 304477 +5023 304443 +17661 304421 +12344 304409 +14082 304392 +14737 304191 +10134 304135 +19952 304119 +12955 303828 +18505 303801 +7812 303777 +21574 303696 +5209 303694 +11234 303620 +9867 303608 +12751 303483 +12091 303455 +12606 303441 +11142 303322 +9447 303166 +11533 303122 +16168 303103 +7207 303092 +11666 302797 +9842 302745 +11322 302716 +11410 302571 +9567 302570 +18256 302546 +9706 302540 +10494 302536 +14899 302514 +8537 302402 +14101 302392 +5430 302339 +16307 302301 +3325 302118 +13766 302110 +6431 302098 +11626 302087 +1434 301879 +10518 301863 +12669 301838 +10869 301811 +7702 301726 +7258 301624 +7605 301599 +4891 301498 +1504 301484 +11665 301409 +11426 301408 +18220 301381 +13512 301342 +17068 301234 +1942 301225 +3311 301188 +9415 301179 +3820 301175 +11512 301087 +8719 300995 +8182 300908 +17235 300901 +5777 300818 +5794 300789 +7223 300767 +18864 300667 +10443 300646 +24589 300646 +17132 300620 +4340 300534 +7104 300478 +1188 300402 +3019 300388 +6992 300317 +8354 300316 +13609 300300 +4245 300252 +9079 300197 +15199 300105 +7157 300038 +762 299991 +17450 299943 +17044 299810 +16349 299699 +8975 299568 +4806 299541 +2895 299520 +15573 299493 +13116 299369 +10625 299261 +13037 299171 +2419 299150 +7610 299146 +13201 298875 +7599 298787 +3831 298725 +15984 298640 +5171 298514 +2604 298497 +13594 298417 +10287 298407 +10032 298356 +14538 298240 +31061 298197 +17630 298135 +14503 298117 +7808 298087 +4000 298047 +5660 298010 +6213 297993 +14417 297966 +8495 297957 +6245 297936 +29115 297877 +10069 297860 +5932 297831 +847 297818 +15459 297746 +7197 297706 +7937 297624 +6407 297566 +8477 297541 +17490 297510 +7957 297419 +19847 297359 +16298 297176 +16328 297109 +5097 297095 +18708 297085 +2817 297026 +10758 296992 +6941 296987 +17930 296964 +9322 296934 +10299 296918 +7644 296748 +13320 296692 +10927 296691 +11812 296683 +18151 296588 +8650 296576 +11952 296532 +653 296525 +10467 296457 +222 296347 +18985 296336 +17646 296312 +3361 296291 +15508 296281 +10908 296275 +12299 296124 +11589 296065 +4357 296044 +4649 296014 +19912 295919 +10238 295896 +7598 295800 +10902 295794 +824 295763 +2434 295738 +108 295681 +14396 295673 +19566 295671 +16313 295653 +4509 295636 +14616 295610 +17619 295541 +10999 295494 +5755 295491 +8697 295465 +13830 295385 +19045 295195 +21162 295171 +5489 295157 +11191 295115 +15153 295040 +3449 294958 +11150 294898 +18133 294799 +10989 294694 +14770 294585 +11554 294580 +10476 294501 +5002 294469 +3440 294422 +21765 294385 +11791 294325 +15840 294296 +17719 294251 +5812 294223 +25236 294167 +16721 294070 +16888 293941 +868 293915 +12118 293877 +6458 293767 +14533 293678 +10000 293634 +24664 293626 +12019 293496 +1686 293475 +8309 293429 +3055 293333 +13230 293232 +12536 293224 +15369 293223 +8941 293204 +14778 293112 +22535 293108 +11015 293042 +18825 292878 +17159 292877 +8895 292682 +1035 292678 +15372 292655 +14248 292637 +10898 292633 +13973 292544 +11801 292523 +20261 292508 +10609 292453 +11311 292434 +17671 292434 +7768 292322 +26318 292282 +16031 292266 +10948 292244 +10899 292189 +2345 292185 +12671 291955 +11889 291889 +20899 291879 +13183 291782 +5286 291580 +11909 291559 +11637 291497 +6965 291479 +12525 291428 +6170 291277 +4103 291236 +12937 291227 +15523 291170 +16156 291108 +24272 291089 +9507 291083 +11899 291014 +17857 290869 +6521 290825 +4092 290814 +7159 290777 +24320 290770 +12435 290704 +8492 290694 +11932 290634 +11206 290586 +14447 290535 +18961 290530 +48099 290522 +8640 290518 +492 290477 +1418 290394 +19982 290348 +9225 290286 +2261 290270 +11168 290218 +18479 290213 +20499 290165 +11810 290067 +11210 289892 +9329 289865 +15471 289784 +14518 289756 +18020 289680 +14488 289576 +7035 289565 +8662 289554 +11681 289503 +21562 289256 +4863 289185 +4064 289173 +19247 289165 +5195 289164 +16582 289117 +6908 289054 +21866 289013 +12266 288914 +3697 288830 +10233 288812 +12377 288775 +14734 288753 +33683 288737 +771 288733 +2030 288732 +11026 288731 +15317 288722 +6902 288708 +48796 288704 +22386 288678 +21614 288662 +2134 288587 +5765 288576 +13584 288406 +7378 288289 +819 288274 +10129 288254 +11075 288220 +2680 288189 +13831 288175 +18393 288077 +11676 288063 +15383 287991 +4359 287991 +36030 287903 +20852 287857 +5181 287657 +10726 287650 +21323 287633 +10559 287568 +14332 287540 +9555 287519 +9595 287510 +6194 287503 +6539 287493 +11818 287374 +1202 287364 +4319 287305 +18110 287296 +6573 287280 +15891 287257 +9293 287226 +12432 287194 +10030 287189 +18086 287141 +14086 287073 +10762 287047 +13465 287036 +19744 287018 +9387 287010 +14867 286974 +11464 286951 +24003 286921 +8617 286874 +10471 286857 +3112 286748 +12725 286626 +21165 286488 +7319 286337 +12036 286305 +4229 286285 +12537 286255 +11005 286249 +697 286174 +10700 286147 +12548 286127 +7515 286098 +6109 286082 +16015 286066 +8046 286061 +11544 285977 +13855 285908 +11735 285898 +15192 285893 +9653 285800 +10792 285796 +7616 285649 +15556 285556 +17799 285509 +9773 285508 +25287 285465 +14416 285439 +18572 285395 +10358 285292 +9183 285207 +12073 285159 +16754 285095 +8721 285047 +17081 285026 +12593 285023 +9568 284957 +8101 284931 +12002 284929 +8472 284908 +7770 284900 +5077 284807 +28852 284805 +19523 284761 +14219 284693 +15177 284685 +13722 284615 +12734 284575 +13404 284454 +4982 284434 +6751 284376 +10008 284360 +6096 284343 +14999 284301 +10087 284270 +11918 284068 +11699 284065 +10701 283996 +9583 283931 +6291 283902 +15196 283850 +15416 283835 +14521 283648 +13723 283591 +7993 283539 +8559 283442 +7650 283209 +11552 283208 +7929 283037 +18106 283014 +11065 283009 +9793 282858 +10860 282804 +2189 282772 +8717 282753 +2849 282749 +4014 282718 +7900 282712 +7335 282598 +14225 282574 +20228 282555 +12633 282528 +10824 282517 +11455 282474 +552 282383 +11954 282283 +3911 282276 +6757 282252 +8840 282235 +8805 282201 +19375 282196 +8580 282166 +6738 282152 +10906 282124 +13325 282069 +12111 282057 +7193 282053 +13004 281987 +904 281961 +15579 281954 +11003 281952 +17608 281805 +8135 281776 +12308 281762 +5994 281742 +12689 281646 +13678 281637 +3400 281600 +17151 281552 +20176 281468 +6381 281450 +10420 281445 +16105 281417 +6283 281376 +16774 281323 +12865 281321 +9675 281161 +21439 281160 +10073 281140 +15012 281072 +14820 281032 +4420 281012 +13389 280931 +15278 280924 +14707 280864 +11693 280834 +20989 280800 +9061 280631 +7811 280622 +17702 280585 +19712 280517 +18085 280506 +15792 280423 +19747 280412 +8315 280313 +39683 280210 +4679 280163 +14674 280064 +23039 280029 +20707 280003 +21014 279940 +10250 279930 +11936 279904 +15062 279814 +13588 279790 +11975 279759 +16528 279751 +24687 279670 +15616 279652 +11119 279609 +20355 279492 +9458 279484 +2603 279343 +6335 279249 +2340 279229 +17423 279191 +23623 279109 +17325 279054 +3570 278931 +13551 278846 +12995 278839 +18168 278838 +5781 278820 +9986 278790 +4112 278768 +13184 278726 +15890 278588 +18678 278548 +14213 278519 +1979 278509 +16420 278503 +13111 278460 +3846 278452 +5643 278436 +872 278359 +11868 278353 +12688 278278 +14273 278220 +40466 278181 +20700 278103 +14888 277999 +10218 277984 +16940 277960 +2072 277862 +12348 277847 +35946 277842 +16029 277774 +1013 277769 +28840 277738 +2896 277683 +15505 277680 +6446 277613 +8691 277549 +1298 277514 +14752 277419 +5714 277364 +8058 277325 +20894 277293 +16434 277271 +32115 277243 +10667 277221 +5889 277187 +16210 277086 +19788 277074 +3533 277064 +23555 277023 +8177 276841 +10045 276692 +1712 276688 +1335 276680 +20139 276667 +19245 276644 +10652 276628 +12798 276600 +16332 276591 +1702 276565 +13466 276542 +7894 276539 +13750 276477 +28070 276466 +4801 276375 +8758 276319 +6121 276242 +15599 276225 +9216 276173 +18913 276162 +15257 276075 +15397 276033 +12079 275990 +9154 275941 +9066 275923 +10041 275851 +18685 275759 +19362 275648 +5748 275520 +4134 275490 +10968 275417 +8116 275403 +20337 275390 +9676 275296 +14523 275249 +10280 275230 +11829 275119 +9864 275070 +2313 275056 +11010 275033 +1967 274991 +11611 274774 +17836 274745 +7967 274431 +9080 274379 +13891 274378 +11069 274377 +9197 274377 +6487 274363 +48198 274323 +11164 274304 +14725 274301 +12868 274287 +977 274251 +18358 274249 +11072 274222 +8405 273995 +13110 273961 +1436 273939 +9812 273852 +38251 273850 +4976 273834 +37175 273792 +4279 273790 +5756 273779 +10637 273707 +10130 273685 +6080 273601 +12182 273585 +7202 273453 +8850 273393 +1763 273379 +10498 273327 +2387 273318 +14058 273304 +15360 273269 +5572 273260 +6300 273225 +9193 273201 +4873 273182 +18034 273131 +10643 273103 +18966 273048 +13338 273037 +17468 273016 +8942 272969 +10706 272949 +12553 272893 +18353 272878 +15623 272786 +13573 272782 +15901 272749 +9441 272650 +10813 272641 +8735 272558 +6721 272540 +8338 272518 +22905 272496 +9639 272495 +8500 272427 +8099 272380 +9148 272311 +5854 272243 +10204 272231 +20222 272164 +10616 272112 +7036 272099 +12203 272005 +9927 271973 +2528 271969 +14672 271956 +20710 271955 +14481 271954 +13938 271929 +10776 271845 +17588 271844 +36309 271778 +6546 271746 +15296 271737 +9508 271628 +20522 271611 +2172 271603 +8202 271578 +12829 271575 +19800 271532 +26763 271446 +6752 271369 +43577 271245 +11256 271209 +19717 271164 +8095 271162 +11095 271143 +3849 271032 +8235 271029 +738 270906 +11559 270890 +20059 270852 +13408 270845 +3028 270826 +5700 270819 +2382 270815 +8033 270752 +9796 270711 +9707 270670 +15586 270602 +11263 270508 +13032 270474 +3982 270469 +18066 270444 +13059 270444 +14064 270365 +14085 270332 +28145 270328 +12911 270252 +11558 270112 +16239 270107 +15188 270048 +23761 270032 +5826 269980 +11032 269966 +20782 269958 +17838 269931 +13546 269902 +13041 269892 +16831 269852 +14779 269849 +5222 269801 +24711 269796 +36763 269737 +28125 269727 +11462 269718 +17913 269702 +5499 269615 +25335 269614 +14663 269604 +21730 269511 +14912 269493 +8711 269479 +12014 269429 +10436 269409 +14142 269313 +9888 269307 +18513 269211 +12651 269198 +13413 269143 +10261 269129 +1875 269121 +19101 269115 +20259 269074 +6581 268988 +11832 268914 +6224 268884 +11557 268878 +14667 268836 +9713 268811 +4943 268781 +16383 268725 +17704 268673 +20581 268654 +17642 268565 +15348 268558 +8798 268409 +9826 268357 +18772 268355 +13977 268336 +12436 268316 +11357 268262 +9305 268185 +11949 268141 +8509 268137 +10176 268091 +20529 268080 +14260 268054 +16246 268025 +7577 268019 +22635 267980 +12411 267878 +15220 267832 +16902 267772 +15726 267705 +17475 267693 +11997 267649 +9719 267643 +9966 267593 +11706 267584 +27993 267517 +9698 267510 +16769 267493 +12520 267425 +18939 267423 +6998 267352 +13714 267345 +7988 267328 +5432 267300 +9159 267270 +970 267247 +1883 267156 +12597 267130 +969 267116 +7851 267079 +11396 267044 +6319 267022 +7572 267019 +13487 267000 +13586 266983 +25945 266865 +11135 266858 +20426 266817 +22183 266781 +16334 266772 +20325 266757 +21494 266718 +5626 266678 +14033 266677 +2731 266637 +18212 266575 +12601 266521 +8396 266504 +10584 266501 +9829 266467 +5128 266439 +4718 266285 +8283 266232 +3163 266200 +18665 266165 +14043 266065 +2140 266017 +19553 265926 +12724 265897 +12908 265737 +22933 265703 +13990 265584 +15463 265520 +9162 265465 +14517 265285 +12336 265242 +5408 265206 +17167 265200 +15971 265164 +25651 265080 +8727 265079 +19017 265072 +33160 265025 +21147 264980 +16893 264915 +16797 264903 +13684 264899 +9849 264890 +16559 264831 +14718 264765 +12655 264748 +4023 264639 +9947 264581 +16155 264574 +31594 264544 +6104 264519 +15806 264392 +21221 264345 +1397 264227 +4083 264178 +23394 264177 +10083 264160 +33328 264146 +6156 264080 +12939 264080 +9950 264074 +8166 264059 +15030 264027 +32324 263999 +12346 263955 +13474 263950 +11874 263884 +19995 263882 +130 263839 +14634 263819 +5646 263795 +5421 263702 +7294 263697 +17165 263670 +6764 263622 +12503 263616 +6428 263585 +12225 263506 +4853 263495 +24161 263476 +7268 263464 +10829 263388 +8399 263381 +11694 263379 +8341 263349 +10689 263278 +17788 263200 +15832 263001 +10485 262951 +17053 262897 +98 262873 +15503 262806 +10393 262799 +21026 262785 +16089 262736 +7145 262696 +14329 262691 +10988 262667 +4938 262617 +6631 262431 +13094 262414 +4759 262370 +10713 262359 +722 262358 +6607 262294 +10338 262229 +27459 262199 +1985 262192 +8005 262074 +11160 262045 +20065 262043 +2530 262029 +9917 262001 +11107 261971 +5245 261877 +8842 261858 +13244 261844 +1756 261828 +18624 261697 +18496 261651 +13640 261637 +13234 261637 +12460 261630 +13688 261554 +934 261540 +13998 261539 +3949 261532 +15591 261500 +10766 261405 +13463 261377 +3498 261340 +7853 261336 +13657 261333 +16565 261324 +11266 261295 +10050 261267 +17187 261200 +7468 261148 +6570 261101 +9112 261095 +12841 261041 +15465 260960 +19868 260941 +14742 260914 +12080 260898 +12008 260822 +9514 260792 +8968 260774 +18240 260745 +10453 260671 +20614 260614 +12162 260544 +13534 260539 +1207 260515 +19675 260494 +5211 260493 +7532 260462 +27116 260264 +18014 260241 +4272 260231 +12101 260002 +20055 260002 +18184 259947 +21742 259850 +8187 259823 +789 259784 +7073 259779 +11338 259732 +14710 259554 +13820 259548 +16918 259503 +18327 259488 +5354 259482 +1154 259371 +13112 259364 +5411 259218 +17413 259156 +7570 259098 +17028 259065 +3775 259024 +7576 258971 +10495 258913 +6127 258860 +3742 258785 +3524 258778 +29002 258748 +15438 258703 +4948 258702 +19804 258665 +5899 258622 +2197 258614 +6453 258581 +13293 258579 +24880 258460 +9850 258441 +6974 258388 +6488 258313 +13475 258303 +9637 258255 +20633 258235 +20920 258220 +12910 258106 +11326 258084 +15443 258081 +6577 258043 +11323 258035 +8300 258029 +5735 257973 +15415 257934 +16449 257901 +2531 257843 +6894 257785 +6968 257729 +11418 257714 +8449 257532 +7688 257450 +22257 257392 +13134 257380 +8807 257349 +11281 257236 +16076 257119 +14110 257102 +27061 257101 +20430 257077 +6098 257021 +12890 257009 +5779 257002 +6473 256775 +11236 256721 +10403 256692 +3943 256662 +20052 256644 +8441 256628 +18899 256570 +14067 256490 +11576 256482 +10884 256474 +12616 256472 +4111 256447 +16905 256399 +14903 256344 +11195 256341 +7780 256319 +1164 256273 +8967 256253 +3493 256114 +8752 256058 +2541 256032 +10392 256030 +10581 255868 +3378 255861 +1798 255786 +5629 255752 +15191 255708 +11204 255626 +5536 255621 +18473 255525 +28209 255401 +8797 255344 +28086 255308 +5935 255300 +9369 255253 +8781 255237 +14601 255157 +235 255145 +755 255125 +6715 255118 +12457 255082 +3779 255039 +21525 255029 +18193 254999 +4447 254987 +24577 254972 +13069 254847 +12598 254776 +2564 254772 +11274 254713 +9765 254691 +16849 254651 +23425 254644 +22016 254589 +348 254584 +8977 254497 +10190 254449 +20442 254376 +10627 254325 +10417 254323 +13982 254312 +11353 254285 +24538 254246 +13478 254180 +7660 254144 +11131 254100 +14285 254067 +7646 254033 +12201 253999 +11029 253849 +11513 253830 +17218 253821 +1291 253782 +22159 253746 +11847 253742 +11422 253730 +4980 253724 +19817 253712 +9526 253590 +8324 253576 +7439 253554 +3575 253498 +6915 253302 +18406 253209 +14559 253205 +11853 253138 +10724 253101 +15976 253017 +4335 252999 +18489 252895 +19323 252807 +11746 252702 +4117 252677 +16443 252637 +14954 252628 +16028 252566 +15224 252495 +9961 252455 +9300 252455 +840 252429 +5513 252399 +6485 252340 +13016 252335 +10647 252236 +12556 252163 +36103 252155 +5822 252148 +10574 252130 +14159 252092 +14103 251995 +3007 251941 +4868 251923 +11608 251871 +15543 251818 +12627 251743 +2632 251725 +17369 251625 +21605 251579 +23201 251546 +11614 251528 +14636 251438 +14113 251425 +10996 251405 +1059 251391 +17277 251368 +12325 251362 +17676 251307 +15760 251285 +1358 251238 +11925 251213 +35851 251159 +16179 251087 +9506 251020 +9405 251012 +14581 250999 +21714 250981 +15866 250957 +19292 250939 +13778 250936 +2721 250909 +16805 250843 +9609 250738 +2894 250728 +1481 250694 +14984 250681 +12187 250629 +16209 250606 +6667 250598 +12039 250592 +26351 250590 +19464 250589 +3847 250521 +10096 250519 +28143 250431 +11561 250430 +18755 250425 +6831 250402 +5345 250399 +18303 250342 +12744 250205 +17645 250183 +472 250178 +13258 250161 +7889 250156 +13985 250150 +3100 250117 +12399 250031 +11842 250024 +6770 250012 +6028 249984 +19422 249883 +6185 249881 +16095 249875 +14442 249839 +18277 249820 +14008 249716 +4780 249716 +10564 249657 +7935 249650 +6524 249633 +14261 249603 +10942 249535 +8201 249530 +21872 249484 +12387 249481 +5159 249453 +11963 249430 +10852 249408 +20572 249303 +37985 249161 +26225 249133 +1428 249004 +23267 248991 +22347 248928 +542 248860 +28162 248806 +4226 248801 +11675 248800 +8947 248788 +1534 248786 +41224 248756 +9768 248730 +13677 248668 +10844 248660 +15123 248616 +37379 248570 +25672 248531 +9324 248483 +14196 248438 +12097 248435 +19687 248328 +8861 248293 +13279 248278 +13835 248278 +10975 248165 +25682 248143 +8532 248039 +11751 247945 +18524 247934 +19886 247924 +11483 247918 +29393 247899 +3003 247868 +21532 247858 +11886 247855 +9919 247809 +8579 247739 +7508 247717 +5178 247696 +10958 247675 +11760 247663 +8788 247636 +14000 247605 +17104 247525 +11574 247443 +16824 247389 +17551 247323 +2363 247278 +8117 247274 +15970 247271 +15401 247260 +19339 247215 +21336 247052 +5277 246999 +9905 246986 +15461 246929 +1426 246892 +12010 246866 +14665 246863 +7700 246845 +10825 246814 +1384 246762 +26360 246755 +14166 246713 +19786 246710 +1447 246566 +17871 246562 +4170 246466 +12802 246446 +17735 246443 +13863 246416 +9987 246399 +6771 246369 +12037 246276 +9922 246272 +11298 246270 +14071 246245 +12818 246200 +2002 246197 +2899 246184 +12034 246175 +14111 246152 +12287 246149 +18080 246097 +13892 246079 +6102 246042 +18158 246018 +14644 245998 +6158 245996 +12242 245993 +12058 245991 +1837 245897 +700 245844 +8554 245716 +3372 245711 +1287 245705 +1126 245700 +13237 245676 +2906 245653 +4472 245609 +14148 245596 +9715 245545 +11740 245521 +2320 245503 +24782 245428 +9721 245401 +822 245388 +23063 245383 +2416 245248 +44155 245172 +49443 245152 +19522 245137 +712 245074 +22297 245043 +10744 245042 +15395 244980 +9837 244959 +16674 244923 +9220 244912 +5532 244856 +10077 244828 +9686 244823 +14236 244822 +2676 244770 +12132 244758 +9050 244729 +14226 244674 +13157 244611 +17699 244469 +17038 244387 +6893 244356 +13504 244340 +10047 244337 +22867 244314 +13091 244305 +10435 244293 +33672 244254 +15600 244222 +21796 244154 +14971 244148 +20531 244110 +12170 244096 +24721 244062 +37129 244008 +18183 243979 +17559 243908 +15238 243887 +19436 243839 +811 243838 +15006 243778 +15097 243778 +18329 243649 +20536 243618 +4665 243601 +13969 243595 +11342 243536 +579 243534 +15013 243522 +6700 243506 +13524 243505 +7402 243497 +8280 243462 +10301 243456 +21291 243454 +19167 243434 +13472 243353 +20472 243313 +13011 243279 +16238 243267 +17996 243223 +16534 243174 +11811 243163 +1614 243152 +18704 243103 +9217 243076 +5949 243011 +19090 242932 +9923 242910 +12086 242909 +7461 242849 +12700 242795 +18373 242776 +17371 242768 +5563 242766 +47201 242738 +21070 242727 +17634 242709 +10425 242709 +8434 242578 +5305 242536 +13252 242499 +14348 242487 +9246 242471 +15150 242436 +12500 242426 +17528 242424 +18887 242414 +24116 242389 +23488 242387 +7101 242331 +12973 242273 +2617 242131 +7222 242040 +20330 241995 +15713 241800 +16787 241745 +19307 241718 +18884 241713 +14960 241706 +957 241671 +14748 241658 +7102 241557 +7006 241500 +6340 241483 +8946 241477 +17832 241462 +17905 241448 +26244 241403 +15812 241375 +11770 241368 +9661 241361 +17591 241249 +12729 241190 +26058 241190 +6663 241181 +14923 241131 +20131 241093 +21668 241073 +18470 241045 +4852 241036 +24568 241000 +8835 240978 +31259 240907 +13214 240824 +1746 240812 +4597 240806 +11006 240802 +394 240783 +14713 240716 +13535 240664 +14702 240509 +10323 240409 +19025 240405 +15403 240391 +15470 240322 +10682 240296 +10111 240284 +18518 240277 +7160 240260 +15707 240246 +16452 240209 +14178 240201 +16032 240154 +19116 240139 +8113 240113 +13852 240086 +14022 240037 +12591 239957 +14649 239945 +14339 239916 +21474 239915 +17709 239869 +21952 239817 +8264 239800 +17285 239799 +15737 239731 +17205 239671 +12622 239654 +15444 239648 +16316 239634 +2096 239603 +22948 239581 +9988 239554 +15462 239547 +13167 239539 +3989 239496 +14347 239410 +7911 239385 +18950 239341 +16012 239331 +15779 239300 +13245 239217 +10939 239205 +43208 239145 +8044 239110 +32537 239109 +9109 239055 +16468 239053 +9831 239022 +12289 239015 +12473 239008 +18956 238967 +14034 238965 +32390 238949 +1603 238943 +14759 238899 +19568 238873 +11312 238863 +33448 238862 +12602 238848 +10863 238822 +11508 238722 +1721 238695 +15347 238681 +13473 238680 +14443 238591 +32482 238579 +21305 238555 +2412 238509 +24964 238499 +9433 238479 +12778 238475 +5183 238472 +9749 238441 +6707 238429 +13232 238307 +18410 238305 +9977 238236 +8167 238147 +6089 238087 +2262 238000 +23058 237971 +14009 237931 +14218 237929 +8856 237914 +21938 237904 +9865 237862 +23698 237825 +22261 237799 +12066 237794 +19614 237663 +21550 237644 +6503 237632 +21421 237611 +4407 237500 +18728 237407 +12585 237404 +18190 237370 +13913 237323 +6327 237314 +7091 237275 +7484 237239 +14807 237233 +19919 237104 +2240 237027 +12883 237006 +12672 236984 +17245 236981 +18613 236789 +16135 236789 +44510 236747 +35409 236737 +15031 236551 +12013 236527 +15386 236516 +17176 236497 +10815 236496 +10492 236470 +10911 236469 +13537 236392 +4243 236333 +6400 236316 +29007 236291 +7631 236276 +8314 236264 +20765 236214 +23559 236191 +39859 236188 +14123 236160 +113 236154 +5580 236071 +9257 236046 +10670 236036 +15773 236026 +20395 235988 +18889 235975 +3681 235975 +19136 235965 +14552 235898 +8768 235870 +17075 235844 +7031 235821 +18073 235812 +18311 235801 +4822 235768 +25158 235758 +17906 235743 +17016 235723 +17355 235718 +11170 235613 +7129 235589 +23128 235587 +10746 235529 +3960 235490 +19819 235460 +7177 235452 +8996 235421 +16659 235413 +1070 235407 +4157 235389 +8304 235384 +20604 235267 +7834 235243 +10048 235231 +11270 235182 +13701 235151 +16689 235144 +8718 235065 +39999 235029 +15530 235010 +10598 235006 +19477 234921 +22923 234913 +19834 234897 +19603 234886 +8604 234842 +22230 234835 +10312 234832 +13885 234797 +15280 234727 +13754 234708 +15841 234647 +13622 234596 +12412 234580 +7122 234532 +22874 234512 +11546 234510 +12123 234453 +31995 234438 +18623 234425 +10090 234380 +11506 234346 +16295 234317 +28282 234313 +9907 234307 +17030 234286 +15364 234266 +2781 234214 +15273 234174 +9425 234152 +8599 234104 +10912 234073 +19887 234033 +12278 234000 +8779 233995 +928 233978 +22410 233978 +37186 233977 +10552 233962 +17785 233866 +3137 233815 +2286 233801 +12588 233798 +11446 233668 +20209 233640 +11541 233540 +13118 233501 +20994 233463 +7299 233456 +20495 233430 +14615 233395 +12028 233375 +13771 233371 +19940 233078 +10715 233075 +11940 233049 +15744 233012 +14023 233010 +21298 232991 +10755 232962 +12445 232844 +15849 232811 +12161 232793 +13488 232737 +19627 232725 +14303 232713 +9970 232693 +17155 232615 +16293 232590 +18423 232583 +12917 232573 +13225 232560 +16816 232513 +22137 232462 +2550 232439 +39970 232428 +1749 232423 +26053 232383 +7231 232347 +24540 232344 +12452 232280 +20890 232235 +5304 232190 +10861 232125 +5026 232123 +25763 232111 +15375 232100 +20882 232062 +29224 232056 +17051 232023 +6068 231967 +20377 231908 +15868 231882 +21588 231866 +478 231844 +9004 231835 +10025 231767 +3916 231764 +28058 231697 +6324 231683 +18039 231677 +14891 231669 +9009 231622 +9215 231608 +3515 231529 +12316 231500 +10225 231487 +13895 231467 +14790 231452 +1831 231443 +10537 231322 +7571 231276 +19790 231269 +16115 231247 +12990 231210 +7590 231116 +3699 231084 +3041 231078 +12566 231025 +12191 231001 +13613 230993 +8447 230991 +5770 230906 +45811 230849 +1557 230840 +12059 230775 +5120 230749 +6124 230744 +8595 230721 +9718 230711 +12872 230700 +10066 230682 +26280 230666 +14053 230646 +16126 230641 +20815 230603 +20903 230557 +916 230552 +241 230545 +17654 230421 +20202 230418 +5542 230400 +3109 230337 +17364 230305 +3174 230284 +17797 230253 +31561 230216 +13105 230200 +9656 230199 +16639 230184 +11835 230183 +6139 230117 +9082 230099 +14287 230049 +18280 230024 +25670 229987 +7430 229958 +18481 229950 +22671 229936 +11084 229910 +12174 229895 +11073 229858 +13756 229837 +17600 229795 +11447 229737 +8845 229737 +15534 229685 +21576 229658 +3522 229633 +19405 229593 +14381 229582 +18866 229566 +4033 229525 +12075 229520 +22536 229517 +19947 229507 +16280 229473 +6203 229472 +15491 229435 +8563 229365 +11680 229308 +20799 229304 +13705 229298 +18933 229208 +17266 229208 +14462 229158 +12742 229153 +23669 229130 +20382 229080 +10889 229078 +20565 229060 +12439 229004 +5686 228929 +16467 228914 +17542 228834 +11764 228816 +13693 228765 +10823 228749 +15892 228474 +30452 228448 +10472 228390 +16610 228364 +9236 228343 +24174 228330 +17476 228315 +15800 228213 +15981 228148 +7236 228132 +26199 228108 +2684 228107 +3103 228045 +12133 228039 +9672 228007 +4532 228006 +10900 227989 +8924 227958 +8799 227886 +10224 227883 +28986 227838 +10797 227833 +2782 227758 +33007 227718 +12463 227636 +29367 227597 +7221 227558 +12739 227509 +26597 227410 +12914 227402 +20656 227371 +22372 227348 +12954 227344 +4919 227333 +1996 227306 +14516 227304 +19584 227302 +33467 227285 +15549 227265 +14451 227254 +25176 227163 +8513 227110 +9265 227079 +5825 227077 +16916 227064 +8837 227058 +11304 227013 +16525 226927 +13881 226803 +10817 226749 +21513 226745 +48716 226692 +7218 226646 +5019 226635 +38526 226632 +15436 226625 +10175 226595 +8529 226551 +22167 226539 +1520 226496 +8173 226474 +8959 226444 +29411 226444 +10195 226443 +1465 226389 +1025 226359 +2265 226346 +2243 226311 +10592 226276 +12912 226248 +13819 226212 +10972 226199 +13043 226171 +21088 226141 +11620 226139 +22221 226136 +21233 226136 +2575 226078 +7729 226071 +11314 226060 +14845 225997 +5661 225991 +13273 225979 +2385 225975 +21512 225910 +5937 225907 +16330 225870 +4299 225870 +15662 225857 +22750 225820 +11478 225806 +22925 225769 +24033 225758 +13643 225757 +21097 225757 +1162 225739 +17058 225730 +17487 225677 +15077 225668 +6759 225659 +29293 225603 +17807 225584 +6181 225569 +8893 225560 +23547 225538 +17507 225495 +15928 225483 +5164 225472 +117 225463 +17195 225402 +15852 225393 +14431 225373 +19478 225368 +20695 225363 +15318 225341 +17494 225257 +12062 225251 +22908 225210 +15771 225198 +224 225168 +30958 225155 +12686 225055 +13638 225016 +19388 225000 +14703 224981 +16970 224976 +2454 224974 +17690 224952 +20409 224908 +20111 224898 +19765 224893 +11759 224893 +22081 224874 +9316 224841 +16517 224830 +6306 224819 +1540 224790 +13156 224756 +14212 224670 +14554 224670 +9978 224653 +23889 224643 +9701 224604 +10451 224582 +8534 224555 +14900 224517 +6045 224382 +8476 224300 +17691 224299 +4768 224263 +21995 224248 +8901 224228 +15313 224200 +15567 224200 +26040 224190 +36753 224177 +30289 224128 +13053 224119 +8045 224114 +12581 224098 +10532 224031 +14471 224019 +10422 224014 +14129 223973 +10619 223972 +14643 223911 +17217 223859 +18169 223852 +13355 223803 +6742 223772 +4262 223693 +2809 223686 +14430 223638 +10001 223524 +20516 223522 +12974 223505 +16566 223488 +7989 223485 +11445 223472 +13044 223433 +3856 223428 +10349 223409 +14855 223408 +17113 223375 +13388 223352 +14738 223342 +5126 223298 +6684 223218 +13675 223215 +12092 223199 +11380 223127 +14608 223060 +8011 223015 +13453 223013 +3898 222961 +20492 222958 +12300 222874 +16400 222852 +22281 222839 +12218 222818 +15608 222811 +35253 222785 +5074 222748 +13641 222731 +49280 222731 +20769 222727 +6955 222709 +7481 222695 +19502 222671 +1386 222670 +8695 222613 +9344 222608 +237 222484 +21408 222483 +5972 222470 +7196 222462 +17967 222444 +26702 222349 +20940 222340 +16403 222307 +15425 222295 +14256 222285 +17374 222212 +10580 222212 +21776 222204 +13013 222189 +24573 222178 +14109 222164 +3300 222160 +4017 222053 +12330 222052 +13679 222039 +11384 222000 +13477 221999 +8169 221984 +8068 221968 +23647 221922 +9944 221915 +906 221889 +5934 221882 +26603 221847 +10276 221841 +11382 221793 +12971 221790 +3021 221779 +10945 221752 +9221 221735 +23720 221725 +10770 221698 +14305 221674 +23990 221645 +3225 221614 +13720 221600 +13334 221578 +1151 221567 +16378 221516 +1072 221490 +48160 221489 +31033 221484 +1412 221480 +12560 221416 +13670 221410 +14096 221391 +16002 221375 +16732 221374 +3383 221339 +13514 221325 +3481 221313 +8800 221265 +17668 221262 +10690 221256 +12247 221236 +2135 221229 +17716 221203 +3080 221187 +21205 221165 +15895 221164 +9618 221164 +40196 221132 +20541 221101 +9976 221070 +5796 221049 +11436 221034 +7972 221000 +21679 220976 +5060 220952 +18653 220948 +13823 220936 +10745 220919 +18103 220907 +9521 220896 +11492 220856 +10959 220757 +16825 220751 +7080 220723 +17025 220694 +15519 220606 +11873 220606 +15698 220593 +16117 220569 +18921 220522 +2490 220511 +15449 220499 +32777 220459 +12856 220431 +14042 220401 +2535 220336 +999 220320 +8748 220319 +22966 220306 +16615 220206 +30303 220200 +21724 220193 +15679 220149 +35912 220141 +16568 220080 +13995 220065 +23075 220052 +16413 220030 +13351 220009 +5534 220005 +14044 219957 +18675 219945 +6881 219871 +18751 219864 +123 219758 +21791 219714 +15210 219709 +773 219683 +10547 219682 +5757 219678 +15018 219671 +10524 219668 +10933 219637 +18828 219624 +16013 219618 +13133 219536 +10426 219525 +16757 219524 +3903 219524 +15893 219477 +3488 219405 +13528 219373 +3014 219331 +24287 219327 +16798 219323 +19994 219277 +14958 219254 +6865 219234 +24434 219228 +42623 219200 +5231 219155 +9039 219130 +18727 219118 +16937 219109 +27943 219103 +22724 219087 +11085 219087 +23794 219086 +9607 219056 +19660 219029 +4411 218968 +14090 218928 +17337 218915 +22693 218906 +9190 218872 +12297 218858 +5263 218728 +8655 218726 +49258 218675 +3684 218645 +27620 218613 +24712 218557 +9914 218554 +11110 218532 +20893 218523 +17830 218518 +4389 218498 +23400 218425 +11583 218403 +2768 218401 +24537 218396 +7343 218395 +17370 218395 +21406 218351 +15032 218351 +10181 218300 +8231 218226 +12030 218226 +20097 218153 +13428 218146 +18532 218134 +8823 218128 +12846 218114 +9455 218081 +13209 218080 +16310 218054 +20464 218046 +11843 218044 +15203 218041 +10577 218034 +20955 217996 +8548 217983 +10849 217980 +12333 217976 +8630 217966 +18284 217888 +9740 217809 +1221 217776 +7899 217760 +10011 217740 +7626 217665 +13558 217631 +18914 217578 +15937 217549 +29611 217546 +15894 217527 +9030 217526 +18241 217517 +26315 217499 +11511 217481 +23696 217456 +11391 217437 +11067 217404 +1213 217394 +22861 217392 +12834 217369 +10283 217326 +40912 217323 +12716 217304 +14545 217260 +16381 217225 +15207 217189 +4703 217182 +13261 217111 +16050 217102 +17100 217085 +15171 217048 +12639 217029 +10960 217025 +22957 216931 +12950 216859 +18456 216850 +15757 216841 +12704 216839 +12702 216778 +17945 216772 +14342 216745 +23724 216736 +9028 216726 +18901 216711 +3543 216674 +21376 216672 +15734 216671 +3934 216663 +8722 216500 +27987 216482 +20138 216439 +14037 216414 +14491 216413 +2843 216352 +22421 216341 +13578 216333 +16862 216275 +12554 216271 +28537 216255 +5173 216251 +12549 216248 +21623 216176 +17456 216144 +16484 216142 +17659 216113 +17736 216061 +7079 216055 +21913 215938 +14764 215893 +11443 215865 +11776 215842 +31873 215831 +4738 215782 +18501 215701 +12694 215684 +19625 215672 +19003 215654 +6460 215618 +12127 215615 +16160 215607 +12094 215525 +10249 215518 +15045 215504 +10424 215428 +5605 215404 +12082 215401 +17092 215391 +13054 215323 +14288 215244 +9472 215228 +18711 215207 +16538 215140 +15074 215093 +19975 215087 +11816 215077 +28288 215071 +17884 215042 +3937 215038 +12797 214963 +22600 214939 +23475 214902 +5393 214860 +5979 214807 +10802 214804 +1801 214787 +967 214768 +10488 214663 +14263 214634 +10857 214609 +31157 214596 +8905 214546 +4714 214537 +17029 214532 +23368 214500 +15570 214476 +33496 214457 +17445 214346 +13206 214340 +24416 214331 +2487 214279 +18494 214232 +8577 214231 +22028 214210 +3617 214151 +12372 214126 +41805 214121 +13792 214118 +14869 214110 +12713 214089 +11219 214037 +9527 214012 +688 214000 +8627 213979 +3118 213865 +11022 213825 +18136 213766 +16749 213678 +2207 213676 +5167 213671 +12125 213643 +27227 213623 +6857 213550 +12864 213535 +1308 213529 +17397 213499 +23593 213362 +16035 213342 +11917 213329 +9124 213275 +13774 213224 +14183 213191 +22161 213177 +15427 213148 +15233 213144 +12055 213128 +971 213066 +3723 213027 +10640 213027 +13941 213025 +36128 213014 +11091 213006 +17034 212967 +23166 212933 +9370 212901 +10686 212857 +23751 212844 +31777 212803 +19138 212754 +24579 212728 +8858 212692 +5930 212671 +12476 212652 +6580 212651 +6972 212647 +13669 212629 +6383 212600 +18419 212599 +8284 212585 +23015 212571 +19582 212553 +8070 212508 +8855 212503 +9056 212502 +15727 212495 +5276 212469 +14833 212421 +9535 212418 +18152 212403 +9577 212394 +15240 212379 +15607 212375 +24323 212369 +11444 212362 +14973 212361 +17853 212358 +13937 212356 +17313 212347 +6808 212331 +24663 212323 +19033 212285 +11317 212254 +12296 212141 +20365 212058 +14284 212016 +15808 211962 +15955 211952 +16392 211948 +6297 211881 +1856 211881 +14186 211830 +2288 211802 +5837 211713 +2664 211706 +14604 211705 +14901 211645 +28190 211626 +22101 211577 +18330 211516 +19896 211500 +21523 211488 +11392 211463 +2618 211419 +14450 211415 +11904 211401 +11859 211384 +17444 211351 +9343 211266 +2770 211247 +20243 211246 +14659 211228 +9430 211228 +12077 211197 +14732 211181 +15704 211147 +21946 211139 +12269 211069 +11731 211062 +11672 211055 +17872 210994 +15668 210988 +14548 210950 +12969 210928 +1235 210923 +15215 210892 +10909 210879 +48798 210872 +3428 210853 +1523 210844 +12916 210814 +13027 210809 +11728 210779 +29073 210714 +13888 210709 +16846 210705 +9612 210673 +10162 210657 +16085 210643 +14320 210584 +13619 210579 +23194 210418 +19077 210354 +8159 210295 +20118 210278 +24821 210256 +16651 210150 +22637 210124 +2709 210072 +19163 210068 +15254 210067 +44600 210015 +9419 210007 +13988 209990 +15550 209888 +13204 209858 +22280 209856 +17422 209842 +11697 209839 +12936 209801 +11186 209777 +35621 209746 +10164 209721 +18918 209712 +633 209613 +20776 209595 +13737 209449 +31506 209446 +17424 209442 +8226 209393 +24868 209374 +12624 209351 +12511 209343 +2927 209279 +8080 209270 +14740 209255 +5838 209214 +5959 209211 +22548 209176 +13536 209058 +19174 209012 +24144 209012 +18845 208893 +26402 208846 +1156 208837 +12594 208796 +2533 208779 +14358 208763 +36485 208754 +11888 208739 +7232 208733 +23526 208652 +3610 208564 +20178 208484 +43998 208460 +10926 208352 +3458 208345 +13816 208330 +38742 208301 +14081 208278 +11399 208260 +20085 208192 +13961 208187 +12805 208183 +14614 208174 +3883 208152 +1885 208139 +3008 208126 +23639 208079 +17070 208016 +5283 207990 +9341 207938 +5476 207931 +9684 207911 +19661 207893 +11564 207862 +19105 207862 +22847 207837 +16007 207836 +11524 207827 +15144 207818 +21468 207755 +16914 207689 +18404 207670 +12390 207654 +17840 207497 +16008 207494 +45871 207470 +2586 207461 +14628 207455 +18265 207430 +18614 207428 +7081 207359 +8770 207356 +5297 207336 +18798 207306 +15525 207287 +19808 207251 +14506 207241 +20400 207080 +12788 207060 +14741 207043 +41153 207022 +32922 207010 +8160 206987 +1529 206971 +9379 206951 +13721 206944 +10347 206925 +3247 206897 +13387 206828 +19043 206818 +1815 206782 +23894 206771 +16053 206753 +3852 206737 +18075 206716 +9941 206709 +24576 206705 +23174 206693 +12471 206688 +2153 206680 +3304 206669 +18476 206649 +43318 206638 +14293 206636 +4494 206621 +12319 206566 +29533 206556 +12120 206522 +7931 206512 +19284 206500 +1851 206486 +5594 206411 +4990 206402 +3618 206396 +3201 206381 +14401 206362 +19902 206336 +14321 206300 +17322 206286 +11941 206270 +20110 206225 +18222 206211 +17780 206209 +17536 206137 +1118 206127 +11496 206117 +23430 206105 +10932 206102 +17390 206059 +13646 206043 +6653 206006 +16494 205987 +23358 205935 +10438 205877 +8979 205865 +15281 205825 +6302 205821 +11678 205815 +3353 205776 +20819 205742 +13224 205690 +14499 205675 +23554 205671 +28113 205647 +9495 205646 +5576 205637 +16048 205569 +12459 205499 +33597 205435 +7645 205422 +13790 205322 +15542 205315 +46245 205291 +18895 205274 +12244 205269 +20081 205229 +7284 205225 +25869 205181 +24392 205170 +10565 205124 +13611 205121 +6500 205103 +16261 205096 +14147 205064 +6649 205025 +6819 205001 +17272 204957 +8822 204956 +39726 204889 +4400 204876 +9306 204876 +9229 204814 +6747 204801 +12842 204800 +9704 204776 +35747 204774 +9830 204741 +15500 204721 +9930 204720 +14502 204642 +11315 204596 +19732 204574 +9312 204415 +21146 204408 +14496 204374 +11875 204360 +15859 204334 +30917 204299 +8763 204243 +17023 204218 +1779 204140 +13407 204123 +21462 204106 +14802 204105 +18352 204097 +14556 204094 +11378 204029 +17252 203976 +14660 203975 +30102 203960 +18730 203909 +15107 203853 +6091 203824 +6911 203759 +17432 203643 +14393 203587 +19530 203585 +9578 203567 +25890 203504 +13872 203480 +25415 203476 +20533 203450 +19911 203416 +13879 203386 +12579 203381 +3545 203356 +19210 203348 +20163 203348 +14942 203332 +21371 203316 +20091 203230 +10362 203162 +15516 203101 +24836 203043 +14877 203030 +18515 203027 +12985 203014 +11971 202922 +16490 202920 +14241 202830 +9113 202823 +17931 202801 +10862 202692 +21835 202678 +24175 202629 +8906 202566 +22799 202531 +24389 202517 +10620 202510 +23638 202485 +14946 202399 +29602 202361 +6021 202316 +15736 202310 +16078 202283 +1131 202258 +15869 202240 +13181 202236 +1311 202227 +3082 202222 +10920 202218 +24153 202190 +11173 202148 +21997 202148 +17304 202107 +18210 202094 +13803 202072 +7206 202055 +15953 202037 +13549 201990 +3558 201987 +22984 201969 +20953 201925 +13870 201922 +34630 201878 +11152 201875 +20821 201871 +18087 201855 +11815 201831 +17396 201827 +17112 201822 +26283 201750 +7565 201738 +8128 201738 +40055 201718 +22636 201689 +27883 201686 +28047 201679 +22643 201678 +15805 201639 +22207 201629 +15673 201585 +10717 201583 +16637 201556 +15089 201546 +14768 201433 +19304 201430 +25079 201428 +8940 201385 +15499 201358 +13055 201337 +23434 201293 +15035 201291 +22559 201281 +14232 201276 +5840 201267 +13467 201221 +6201 201203 +18319 201201 +18199 201115 +18078 201095 +36458 201045 +13179 201043 +34969 201035 +12555 201016 +24357 201015 +22489 200993 +19635 200941 +12294 200933 +22276 200926 +13205 200911 +31684 200896 +16055 200851 +10664 200848 +1673 200815 +5098 200813 +18322 200775 +5119 200771 +16636 200749 +29394 200726 +13627 200720 +10239 200697 +10716 200671 +8937 200664 +13215 200643 +26914 200631 +18617 200624 +4685 200606 +12822 200605 +5636 200549 +40319 200524 +16783 200520 +21335 200482 +21022 200464 +11299 200458 +27801 200441 +8254 200419 +7676 200363 +8178 200318 +26015 200274 +223 200265 +24527 200249 +2752 200218 +21819 200209 +7958 200165 +15843 200150 +11105 200111 +23524 200102 +10678 200093 +10866 200087 +21284 200061 +13471 200057 +6055 199960 +16736 199936 +18721 199855 +16518 199836 +11635 199796 +1915 199773 +13650 199720 +3828 199718 +13810 199695 +15193 199677 +9916 199675 +10081 199628 +12526 199625 +12647 199611 +25807 199610 +9664 199591 +15314 199455 +16842 199444 +11175 199437 +15531 199413 +34309 199402 +16508 199393 +11104 199386 +7730 199379 +18641 199320 +4173 199281 +8696 199241 +27928 199240 +6603 199220 +10232 199206 +28057 199161 +22553 199146 +26851 199116 +11424 199111 +18757 199088 +5289 199070 +25254 199069 +25125 199065 +14343 198992 +10365 198991 +17936 198991 +27469 198988 +17366 198972 +15154 198972 +4382 198876 +12198 198871 +16759 198834 +14337 198784 +7109 198771 +19740 198641 +17581 198607 +24379 198563 +3816 198537 +9693 198535 +15241 198532 +23817 198507 +18804 198432 +24255 198411 +6107 198349 +17362 198316 +10809 198310 +21116 198310 +5039 198291 +2308 198291 +26088 198236 +18260 198222 +14458 198213 +25389 198156 +21008 198155 +11470 198084 +21859 198070 +11283 198029 +16230 198019 +47809 197994 +13997 197990 +8267 197972 +27011 197967 +12197 197954 +12362 197946 +24131 197924 +10940 197885 +37628 197882 +8811 197822 +14509 197774 +15373 197762 +15515 197717 +16865 197711 +13815 197675 +7813 197661 +12381 197657 +3401 197647 +12273 197626 +21516 197617 +14384 197598 +103 197579 +19135 197563 +18409 197561 +39760 197544 +11634 197538 +20019 197411 +12887 197393 +5956 197370 +24668 197359 +15251 197327 +9342 197322 +27003 197303 +21050 197272 +25759 197268 +25640 197266 +9510 197262 +18204 197255 +9098 197246 +16868 197227 +21507 197214 +15882 197137 +14324 197103 +23827 197101 +9823 197072 +31340 197067 +7326 197066 +10385 196985 +18739 196964 +809 196944 +22319 196940 +19920 196910 +19688 196860 +10478 196853 +11571 196838 +12214 196815 +34058 196793 +16655 196733 +17296 196724 +14611 196714 +14412 196691 +11667 196663 +19413 196620 +8875 196593 +13165 196540 +16734 196518 +24104 196511 +8364 196502 +2326 196500 +5985 196441 +12706 196438 +8208 196390 +5329 196290 +681 196210 +31389 196208 +17135 196207 +25186 196204 +10622 196197 +19843 196177 +9932 196088 +2875 196086 +3798 196083 +9032 196057 +21782 196019 +16083 196002 +8966 195986 +18134 195959 +740 195949 +16118 195920 +17455 195892 +17958 195875 +10326 195842 +40110 195837 +3123 195811 +13035 195796 +12158 195765 +27526 195763 +16551 195691 +24590 195680 +13238 195546 +9340 195533 +19376 195525 +17270 195514 +9048 195472 +13062 195464 +17098 195441 +4968 195439 +13767 195439 +23307 195310 +15221 195299 +5121 195294 +17496 195292 +9513 195226 +9858 195219 +1742 195203 +13910 195163 +10317 195147 +15441 195142 +27606 195130 +28797 195114 +14075 195031 +15819 194979 +17161 194971 +32190 194967 +48063 194956 +2228 194853 +11148 194826 +20569 194815 +10528 194814 +18544 194813 +16384 194776 +11262 194776 +36553 194753 +12838 194747 +12167 194707 +19089 194655 +9442 194643 +15804 194612 +7408 194589 +9714 194544 +13502 194485 +30094 194423 +20758 194418 +7130 194411 +14803 194409 +30683 194325 +3495 194295 +9723 194235 +10375 194199 +19198 194180 +34016 194170 +42897 194156 +16503 194153 +15135 194135 +19964 194121 +24304 194073 +5573 194073 +14307 194006 +12280 194000 +7742 193988 +7617 193977 +10971 193959 +16546 193948 +17628 193922 +22195 193916 +18062 193910 +22516 193894 +34761 193893 +16556 193874 +13212 193838 +13728 193837 +16054 193822 +20494 193814 +8397 193791 +2 193789 +14246 193784 +15872 193776 +11137 193773 +20117 193750 +9096 193748 +12660 193694 +14441 193657 +11607 193643 +26436 193642 +21750 193638 +12547 193620 +22540 193620 +9445 193578 +23941 193545 +30382 193517 +10949 193503 +19665 193474 +12239 193456 +22687 193447 +13975 193440 +22013 193425 +17812 193409 +19639 193409 +10257 193402 +27083 193366 +4378 193331 +25375 193276 +3702 193272 +25014 193255 +1809 193231 +4864 193160 +6561 193114 +15253 193107 +23888 193103 +12564 193103 +12407 193070 +11111 193052 +12847 193023 +97 193021 +8932 193008 +19223 193008 +13324 193005 +9479 192988 +29536 192979 +33888 192911 +11913 192848 +8450 192848 +18006 192801 +4128 192796 +9404 192771 +23502 192733 +9476 192630 +22233 192603 +19984 192576 +4867 192557 +16033 192554 +19579 192541 +22418 192522 +7679 192514 +14161 192475 +12446 192437 +17652 192394 +15039 192387 +13114 192384 +23995 192370 +15283 192312 +16287 192264 +18201 192232 +22099 192198 +22596 192180 +13427 192167 +21299 192163 +5819 192093 +6058 192084 +23445 192073 +23605 192046 +16180 191892 +25715 191891 +19889 191824 +17492 191797 +1458 191777 +12922 191773 +14693 191767 +17375 191734 +13446 191698 +14801 191695 +11202 191687 +16514 191672 +15487 191656 +13791 191645 +15242 191615 +14245 191581 +3801 191536 +12226 191534 +24238 191508 +20103 191476 +2371 191454 +13151 191441 +22578 191429 +23249 191414 +8965 191396 +17578 191380 +12384 191374 +13591 191368 +13395 191318 +5805 191296 +18010 191271 +23078 191243 +7013 191207 +11923 191207 +12934 191173 +11068 191129 +20282 191124 +27290 191101 +20534 191056 +10112 191047 +18024 191030 +6220 191015 +14422 190963 +20154 190925 +24743 190912 +27905 190886 +18153 190854 +12888 190844 +12653 190841 +14765 190815 +13980 190798 +14004 190783 +18652 190783 +8876 190748 +13666 190685 +14052 190683 +16172 190678 +15699 190672 +7536 190620 +7554 190609 +12755 190599 +15426 190597 +4005 190592 +18976 190577 +13454 190558 +20501 190550 +14890 190507 +26355 190499 +14895 190386 +5012 190382 +10143 190362 +23382 190336 +13135 190315 +14143 190311 +8408 190282 +20200 190266 +15010 190236 +18732 190207 +16847 190178 +16109 190116 +24253 190092 +15264 190053 +17392 190030 +30774 190011 +13565 189985 +16499 189968 +20993 189957 +1604 189929 +9779 189922 +10795 189864 +35942 189857 +9386 189853 +25040 189836 +9174 189786 +19690 189778 +12245 189734 +20071 189723 +19574 189707 +18389 189707 +11779 189627 +26691 189621 +18198 189589 +12666 189586 +12509 189521 +10116 189504 +43967 189435 +11795 189420 +13275 189387 +15986 189374 +7057 189357 +11616 189356 +14528 189342 +14852 189333 +7037 189303 +20792 189288 +6660 189274 +13061 189255 +18786 189228 +23344 189217 +21612 189165 +13952 189163 +18394 189160 +14446 189141 +11254 189124 +10733 189118 +20669 189093 +16493 189039 +1408 189035 +17998 188970 +16861 188962 +10688 188950 +9518 188895 +16826 188888 +23040 188881 +17695 188866 +5189 188861 +7707 188860 +19550 188833 +13931 188821 +16929 188812 +11673 188801 +7788 188737 +16431 188706 +17428 188625 +12680 188619 +2964 188531 +8251 188521 +12649 188513 +19126 188486 +13614 188464 +12498 188434 +240 188366 +23960 188355 +16027 188339 +14736 188328 +8557 188257 +14504 188242 +12317 188216 +18928 188187 +13647 188177 +730 188141 +8499 188040 +25752 188019 +25799 188009 +4475 187984 +19705 187920 +242 187919 +9321 187901 +13971 187893 +17949 187866 +26546 187806 +4337 187792 +1574 187732 +16070 187679 +13314 187655 +16290 187605 +6390 187575 +32991 187551 +11080 187546 +6945 187488 +23139 187471 +23788 187464 +9365 187434 +11040 187433 +22381 187427 +18313 187380 +26085 187341 +14982 187323 +8574 187276 +19958 187261 +11414 187206 +1166 187206 +17537 187191 +15381 187178 +13385 187171 +21736 187169 +6530 187142 +7084 187139 +5528 187138 +26856 187128 +10757 187127 +13401 187114 +9991 187065 +12293 187060 +16477 187032 +4484 186968 +8790 186965 +3083 186903 +18033 186884 +17308 186848 +21609 186846 +4527 186814 +17997 186785 +17243 186783 +15114 186781 +15797 186778 +13927 186730 +10373 186727 +10874 186712 +7805 186676 +12698 186634 +14313 186602 +20320 186582 +19271 186577 +7715 186575 +16090 186565 +6236 186562 +13596 186556 +10119 186540 +27792 186538 +14668 186502 +12264 186476 +27837 186457 +32584 186446 +15897 186417 +2814 186411 +1191 186396 +12472 186389 +9173 186320 +20194 186304 +7749 186228 +7239 186223 +12044 186214 +12298 186188 +30851 186186 +10213 186182 +9427 186147 +31547 186096 +26056 186096 +5154 186083 +6548 186073 +1500 186064 +13153 186036 +21740 186013 +12450 186009 +17535 186007 +13187 185997 +15665 185967 +14772 185946 +22445 185931 +39073 185927 +28247 185912 +10671 185884 +13373 185871 +10335 185854 +9969 185840 +14892 185836 +8576 185825 +5614 185778 +5886 185753 +17332 185753 +10566 185745 +20515 185743 +12393 185692 +16300 185686 +26107 185651 +25872 185641 +7582 185618 +11050 185594 +7085 185589 +13266 185586 +10785 185573 +22931 185543 +13052 185522 +16355 185505 +17321 185499 +18563 185409 +6954 185351 +16532 185333 +9034 185322 +8631 185317 +7857 185290 +3642 185273 +13497 185252 +9353 185245 +38926 185244 +21505 185243 +32903 185225 +7785 185218 +12925 185211 +23489 185206 +121 185160 +17914 185159 +20252 185137 +6756 185118 +14333 185097 +15660 185070 +12227 185042 +8210 185040 +14727 185014 +21012 185010 +16968 185009 +30664 184980 +44144 184965 +23088 184951 +7038 184948 +22770 184841 +15980 184824 +18791 184788 +20829 184782 +46011 184765 +13457 184748 +17264 184706 +8363 184673 +5579 184646 +33864 184641 +12134 184598 +28490 184582 +6090 184556 +18088 184536 +30693 184523 +2475 184522 +17901 184511 +12568 184491 +11012 184482 +18530 184461 +21029 184451 +2673 184416 +15453 184413 +23837 184409 +19121 184397 +14995 184396 +14247 184351 +26391 184333 +6648 184215 +17083 184126 +15398 184122 +24154 184099 +17466 184079 +23566 184073 +17057 184041 +1456 184024 +36107 184020 +11441 183972 +12892 183957 +29424 183938 +17728 183935 +17096 183913 +19358 183880 +19592 183868 +12135 183847 +15617 183769 +7947 183698 +22132 183693 +17921 183642 +14930 183631 +16373 183628 +14139 183577 +15339 183563 +20208 183553 +9434 183516 +17948 183511 +15282 183476 +22558 183422 +16245 183386 +20879 183384 +20760 183322 +15112 183317 +19037 183298 +2790 183225 +9334 183222 +12586 183218 +18655 183207 +19823 183187 +21361 183163 +17471 183140 +16633 183140 +35348 183095 +28422 183085 +15329 183035 +21681 183027 +23393 183018 +14365 183016 +26661 182986 +22103 182984 +17188 182968 +6582 182948 +6247 182823 +11128 182782 +14281 182774 +9683 182761 +16821 182757 +18687 182664 +11739 182625 +31996 182584 +11109 182582 +4605 182563 +9033 182516 +9091 182502 +6148 182479 +12365 182357 +16205 182328 +28102 182310 +10703 182278 +2206 182267 +19431 182240 +13020 182239 +19316 182231 +4933 182220 +16828 182198 +3500 182192 +23288 182185 +10491 182150 +15789 182145 +8505 182107 +4886 182055 +11613 181967 +19651 181962 +7266 181950 +12421 181938 +27463 181932 +5092 181911 +26159 181884 +12238 181842 +20754 181817 +13017 181767 +15321 181759 +31415 181722 +17777 181718 +10590 181707 +3557 181705 +22182 181685 +2283 181681 +26182 181679 +15382 181678 +20915 181642 +10281 181615 +11286 181602 +25450 181598 +13081 181592 +24692 181592 +15335 181586 +10245 181585 +35240 181555 +12419 181544 +7379 181476 +12625 181426 +107 181418 +46568 181398 +29911 181382 +11996 181319 +18842 181296 +13197 181295 +26683 181289 +10983 181248 +20143 181240 +9251 181201 +58 181192 +25013 181189 +24269 181184 +8016 181180 +15138 181178 +5246 181147 +10753 181123 +25279 181114 +49188 181107 +22235 181106 +10416 181102 +15962 181083 +858 181077 +10603 181041 +28345 181027 +17517 181012 +13685 181010 +13796 180966 +5962 180927 +10560 180906 +12720 180895 +23443 180844 +14179 180758 +5936 180697 +14774 180686 +13396 180683 +13963 180683 +11806 180666 +21990 180646 +18195 180618 +9111 180605 +3032 180476 +6931 180457 +19029 180423 +26291 180419 +4674 180417 +30096 180414 +12737 180368 +10437 180314 +22937 180290 +21569 180184 +39173 180170 +6477 180162 +8340 180155 +12186 180123 +19784 180123 +12172 180100 +8067 180079 +27686 180044 +22112 180031 +20351 180027 +12386 180017 +24533 180009 +18983 179914 +26013 179907 +2169 179890 +14505 179883 +12057 179881 +19196 179873 +12703 179836 +15692 179684 +10787 179678 +20788 179665 +22382 179642 +14800 179640 +20079 179626 +25496 179602 +23957 179584 +17262 179561 +13076 179553 +7963 179552 +14792 179535 +10431 179516 +19558 179512 +19624 179503 +14444 179480 +1379 179445 +3313 179441 +9385 179437 +9546 179322 +17869 179308 +7308 179274 +21274 179269 +5775 179268 +17675 179252 +7689 179189 +13343 179180 +24797 179178 +29040 179165 +2734 179158 +15638 179153 +13203 179151 +23943 179129 +13718 179096 +24078 179090 +8692 179077 +12051 179067 +42326 179058 +14157 179052 +20884 179044 +16223 179043 +23976 178992 +12200 178959 +16748 178947 +14083 178946 +19055 178945 +24459 178906 +16988 178836 +16219 178833 +48984 178785 +7500 178772 +14297 178751 +9993 178748 +8643 178746 +3010 178732 +22586 178718 +36427 178703 +20158 178700 +18580 178676 +5574 178583 +16110 178569 +14013 178558 +30994 178554 +12157 178553 +28078 178528 +30739 178473 +17349 178470 +16427 178409 +29928 178401 +16522 178396 +16263 178391 +13734 178348 +15914 178347 +12114 178274 +8913 178254 +17774 178239 +17612 178210 +29560 178195 +14283 178152 +20169 178090 +42357 178076 +43461 178071 +22681 178031 +10398 177993 +24004 177926 +7759 177925 +15232 177918 +7440 177896 +18625 177828 +14201 177827 +12714 177772 +13246 177752 +12501 177747 +26809 177746 +1527 177730 +14720 177703 +15271 177600 +15820 177574 +17291 177547 +953 177535 +7980 177516 +16044 177494 +27639 177455 +18012 177379 +7300 177367 +16389 177354 +5783 177341 +15775 177334 +10442 177311 +21248 177251 +15945 177248 +11400 177211 +22490 177209 +16212 177168 +20432 177124 +16625 177111 +12122 177089 +12948 177075 +20312 177066 +26767 177058 +11762 177052 +6679 177034 +36061 177015 +27941 177013 +10388 177008 +8634 177003 +26685 176994 +2259 176811 +21504 176802 +21941 176770 +19051 176713 +15052 176707 +18365 176701 +22311 176694 +23629 176647 +7386 176630 +14645 176620 +19671 176614 +7442 176566 +908 176554 +963 176538 +6708 176535 +13092 176534 +21041 176525 +16584 176498 +2798 176462 +11578 176420 +11303 176408 +7273 176391 +22893 176380 +4614 176339 +6765 176307 +14590 176286 +16176 176282 +12260 176279 +13568 176271 +17986 176270 +13533 176231 +28040 176226 +3978 176223 +9627 176157 +20210 176137 +18996 176125 +21891 176057 +15678 176047 +12532 175994 +15925 175983 +12449 175949 +19925 175941 +16933 175937 +9786 175935 +16699 175919 +9390 175908 +21809 175905 +37108 175883 +12800 175860 +22962 175858 +14841 175811 +13378 175806 +14594 175747 +20353 175712 +11841 175708 +3196 175676 +8260 175670 +9424 175656 +20685 175654 +2473 175652 +12383 175641 +18141 175636 +12070 175606 +17214 175605 +23668 175599 +12878 175578 +6996 175575 +14434 175564 +5117 175538 +19137 175531 +13836 175530 +27748 175492 +24756 175452 +2876 175441 +12006 175423 +18633 175421 +11625 175394 +28111 175300 +17131 175299 +26101 175296 +4757 175293 +16190 175283 +4660 175281 +24499 175281 +13957 175252 +23592 175226 +14532 175208 +6422 175206 +9795 175192 +6320 175173 +21650 175161 +23676 175135 +7764 175042 +9290 174995 +14112 174966 +10904 174950 +10928 174887 +14735 174859 +5728 174841 +14543 174810 +22175 174807 +14530 174738 +1741 174726 +12113 174647 +27616 174644 +13028 174622 +18054 174599 +638 174583 +13182 174582 +15359 174563 +17431 174553 +43850 174526 +16319 174525 +24252 174506 +21198 174471 +20023 174399 +7063 174367 +19562 174366 +22779 174346 +4307 174344 +1769 174327 +14239 174325 +5375 174305 +24454 174256 +8023 174253 +3541 174227 +9088 174178 +12301 174161 +14655 174111 +12462 174104 +25157 174068 +27546 174066 +25995 174064 +19087 174060 +23955 174041 +22102 174021 +13703 174011 +12175 174004 +21204 173982 +17935 173962 +20412 173922 +12083 173901 +14953 173892 +13439 173877 +30175 173873 +11796 173872 +21884 173811 +27911 173805 +29311 173801 +951 173797 +7923 173778 +18526 173761 +12733 173710 +28891 173706 +13121 173684 +1548 173674 +26293 173655 +18632 173633 +29141 173580 +6385 173558 +3694 173557 +15518 173539 +12213 173531 +25225 173478 +10782 173464 +20550 173462 +7227 173435 +817 173425 +8343 173419 +16730 173409 +25147 173394 +33339 173376 +27541 173306 +26203 173301 +31167 173285 +11901 173265 +11623 173194 +14376 173160 +27042 173159 +16079 173133 +7858 173078 +12952 172985 +17110 172967 +9828 172956 +16611 172909 +14788 172908 +21455 172893 +9666 172882 +10725 172849 +14087 172813 +11061 172779 +13139 172764 +12701 172753 +13400 172750 +10645 172749 +26456 172713 +22653 172706 +12824 172703 +7883 172684 +15637 172673 +8508 172668 +3671 172656 +16197 172651 +6359 172639 +25434 172563 +5191 172562 +9915 172552 +1761 172546 +20990 172531 +18520 172530 +7305 172488 +8558 172486 +15345 172477 +19378 172448 +23681 172412 +23936 172403 +14026 172399 +26373 172356 +10194 172322 +11459 172305 +12085 172285 +16336 172271 +13405 172236 +12361 172235 +6061 172209 +3757 172202 +8550 172200 +17222 172176 +6735 172162 +7235 172062 +23975 172027 +16473 172025 +11987 172020 +22098 171992 +17461 171991 +12656 171991 +6763 171989 +46179 171979 +12347 171925 +12470 171923 +8891 171915 +21071 171901 +18870 171890 +1655 171870 +21166 171857 +6444 171853 +1840 171774 +8869 171773 +12784 171753 +28472 171729 +21571 171719 +24763 171628 +12320 171619 +21840 171608 +1927 171529 +18382 171521 +5753 171500 +17290 171498 +44892 171491 +9801 171488 +12584 171458 +101 171438 +7514 171434 +46990 171428 +29913 171428 +20771 171416 +2601 171410 +17460 171395 +21220 171384 +16895 171380 +9127 171376 +15619 171357 +19094 171350 +20455 171331 +33323 171302 +28975 171299 +4774 171292 +20319 171273 +15844 171257 +16686 171115 +27025 171095 +30646 171073 +27607 171054 +9335 171010 +18367 171000 +22569 170994 +33564 170982 +22634 170958 +22300 170944 +18729 170910 +33653 170901 +16657 170899 +7670 170892 +14480 170888 +21862 170874 +17095 170838 +22383 170834 +8991 170821 +22394 170769 +13676 170754 +35659 170660 +38858 170592 +11638 170560 +1240 170543 +26353 170531 +11609 170454 +14690 170433 +12004 170432 +9269 170423 +16146 170371 +27599 170360 +14928 170326 +1930 170295 +7454 170289 +23328 170274 +31546 170245 +5247 170202 +17803 170200 +19258 170198 +5980 170197 +24980 170151 +742 170143 +19057 170127 +4910 170077 +22953 170045 +25585 170029 +21927 170019 +29090 170017 +31159 169975 +7128 169965 +8065 169957 +8833 169947 +27582 169932 +13386 169901 +31346 169899 +11766 169880 +16047 169880 +5978 169862 +16608 169852 +10031 169833 +31078 169816 +32636 169802 +10370 169746 +10675 169728 +47979 169720 +33231 169712 +24684 169670 +16621 169667 +19764 169656 +19988 169648 +29747 169644 +44009 169639 +7074 169638 +9800 169628 +1585 169536 +24073 169523 +21087 169487 +22720 169463 +22434 169431 +25074 169400 +12659 169366 +11733 169362 +35093 169320 +15873 169309 +16602 169292 +14485 169259 +17449 169228 +14234 169204 +30245 169196 +18064 169190 +18735 169178 +12569 169172 +12493 169135 +14648 169118 +25935 169114 +26116 169108 +675 169099 +8306 169097 +2956 169065 +12229 169035 +23076 169019 +11387 168999 +18130 168991 +16930 168979 +12790 168977 +15767 168976 +21110 168952 +31703 168924 +12779 168902 +24485 168902 +29674 168898 +17120 168891 +16359 168887 +19169 168876 +1076 168872 +16894 168838 +10322 168830 +20552 168816 +16324 168804 +26078 168785 +34786 168725 +2006 168691 +11542 168666 +16932 168631 +11968 168561 +18647 168543 +11556 168522 +30384 168493 +899 168471 +13374 168460 +22225 168459 +15881 168449 +11341 168440 +6644 168425 +10712 168414 +21266 168360 +17876 168314 +8610 168313 +22541 168302 +12121 168276 +105 168274 +18053 168229 +11031 168215 +17441 168157 +11640 168079 +1788 168065 +12697 168063 +18079 168041 +14789 168034 +23577 168015 +25686 168011 +14325 168010 +14826 167973 +8808 167968 +7385 167959 +15656 167943 +26406 167933 +3477 167926 +25167 167922 +20441 167896 +9254 167895 +14102 167884 +13580 167830 +22041 167798 +27611 167791 +19711 167781 +29890 167771 +34943 167757 +32149 167722 +24207 167666 +14551 167664 +1929 167661 +5851 167643 +9792 167642 +13207 167637 +17711 167633 +23438 167558 +4095 167545 +18656 167543 +10761 167496 +9310 167481 +31509 167455 +13084 167453 +12823 167449 +12484 167418 +29272 167411 +23780 167399 +19725 167368 +20271 167359 +20386 167350 +8071 167327 +23982 167266 +26451 167221 +16675 167217 +23415 167158 +2508 167108 +17938 167087 +18519 167077 +13717 167065 +9053 167045 +11575 167045 +6723 167044 +5830 167038 +11451 167009 +20237 166982 +13656 166930 +33118 166913 +2929 166913 +34589 166887 +12886 166877 +21011 166858 +20886 166856 +11053 166854 +5225 166852 +13747 166832 +9351 166806 +14676 166779 +16171 166777 +31526 166766 +24031 166749 +26606 166743 +13889 166741 +18679 166738 +11757 166711 +30035 166709 +2114 166669 +22695 166667 +11054 166649 +11442 166649 +15413 166635 +9579 166605 +13239 166595 +13752 166594 +1567 166549 +13699 166525 +17962 166524 +13510 166490 +18387 166475 +1934 166420 +26400 166403 +36759 166372 +15172 166364 +21666 166333 +25733 166308 +24799 166303 +14386 166271 +11463 166237 +8290 166233 +25914 166221 +4207 166202 +11562 166199 +7310 166186 +12576 166158 +12236 166143 +17372 166140 +23270 166122 +10605 166100 +11349 166100 +9726 166095 +24164 166090 +6950 166083 +13267 166071 +19636 166055 +12422 166053 +10217 166036 +14340 166030 +15163 166028 +14048 166027 +14154 165992 +22309 165928 +14266 165928 +34217 165921 +11052 165919 +33399 165910 +3832 165889 +16237 165889 +7287 165873 +22351 165865 +15291 165860 +1928 165844 +17673 165838 +6615 165779 +12844 165748 +47875 165744 +713 165695 +28392 165691 +14678 165625 +16545 165608 +14492 165582 +21402 165530 +22361 165529 +19219 165467 +4596 165457 +12923 165451 +5702 165438 +11820 165426 +1089 165423 +13516 165419 +26190 165371 +25701 165371 +13827 165370 +4328 165308 +34366 165287 +19140 165262 +10320 165222 +11863 165214 +12033 165196 +28523 165172 +31791 165153 +15489 165126 +17274 165125 +15477 165115 +23829 165086 +19290 165064 +21531 165051 +13070 165041 +7710 165021 +11516 165009 +21006 165000 +23832 164975 +9948 164892 +24850 164885 +11101 164882 +27117 164844 +32564 164833 +22771 164818 +8142 164818 +34506 164763 +16192 164663 +13452 164658 +10368 164642 +4342 164624 +23325 164571 +14153 164564 +32136 164562 +19500 164555 +1777 164538 +26216 164529 +21518 164502 +17863 164502 +11662 164492 +14851 164477 +28059 164405 +27035 164382 +12144 164302 +12903 164283 +17564 164257 +13006 164252 +13753 164245 +1768 164242 +9848 164233 +23247 164218 +16788 164197 +12979 164190 +41179 164177 +2217 164136 +9255 164136 +41588 164100 +23413 164093 +14955 164090 +11664 164075 +23429 164074 +2889 164070 +20002 164057 +4630 164034 +13318 164034 +9058 164026 +11543 163998 +13608 163983 +23643 163952 +18067 163942 +4501 163926 +22158 163921 +20404 163910 +21418 163874 +17344 163864 +12444 163801 +901 163789 +29037 163767 +16500 163753 +14093 163735 +29306 163647 +7837 163646 +22223 163631 +13122 163620 +14776 163601 +5589 163591 +9989 163571 +23624 163502 +8274 163471 +16325 163463 +30031 163460 +9330 163450 +839 163430 +24397 163382 +8605 163360 +5988 163329 +11860 163296 +16765 163287 +20527 163284 +3316 163277 +915 163267 +18434 163258 +10165 163223 +14185 163180 +15464 163159 +27274 163156 +20165 163120 +11011 163101 +20390 163090 +15276 163078 +21133 163077 +32694 163019 +19745 163009 +18246 163004 +2181 162993 +8074 162946 +20304 162946 +42057 162941 +11089 162921 +17109 162920 +13799 162841 +18021 162828 +21817 162821 +21270 162821 +11870 162809 +7056 162800 +21167 162709 +19657 162699 +17887 162692 +21023 162683 +12793 162679 +22958 162650 +18862 162610 +18753 162565 +16585 162550 +30843 162549 +19432 162535 +22680 162525 +6895 162511 +18488 162509 +8435 162506 +26301 162478 +46129 162457 +17346 162449 +11094 162410 +23068 162393 +23617 162378 +24232 162378 +17742 162375 +20291 162364 +22260 162360 +22639 162344 +30872 162339 +8431 162339 +15371 162317 +15305 162307 +21896 162293 +26615 162268 +665 162268 +20714 162265 +17368 162256 +20093 162231 +21183 162191 +23200 162167 +19855 162160 +33922 162151 +1615 162141 +33724 162135 +4062 162117 +18570 162085 +7149 162079 +16853 162064 +20742 162037 +25050 162013 +7591 161991 +8369 161974 +28643 161972 +17454 161968 +15472 161941 +31005 161940 +18416 161919 +33911 161884 +10220 161861 +9942 161846 +14089 161843 +12524 161833 +18852 161808 +2218 161795 +21119 161783 +10351 161736 +27847 161736 +11624 161735 +14639 161728 +226 161620 +11736 161616 +20739 161604 +11329 161601 +20352 161563 +17054 161549 +4180 161537 +33385 161528 +25889 161523 +16320 161497 +33193 161491 +31767 161467 +11247 161429 +28759 161424 +14378 161422 +23590 161420 +20393 161410 +15654 161390 +12098 161372 +27986 161357 +18397 161348 +11649 161326 +23070 161293 +25634 161289 +19475 161286 +15916 161274 +13779 161164 +16299 161163 +3984 161154 +19517 161142 +16860 161140 +12546 161125 +8229 161124 +25793 161099 +15332 161092 +14104 161088 +27357 161081 +15099 161076 +16034 161064 +15722 161054 +18618 161004 +21158 160987 +9491 160981 +12096 160973 +18362 160959 +31230 160951 +22171 160939 +15749 160938 +23280 160928 +27180 160917 +19350 160903 +13917 160863 +28686 160840 +22387 160829 +14150 160787 +8950 160765 +7711 160757 +11433 160753 +20447 160753 +21743 160744 +17589 160726 +22830 160718 +15564 160701 +15935 160665 +17086 160655 +11025 160649 +1020 160616 +3915 160590 +21445 160576 +12860 160543 +20653 160539 +22627 160534 +28355 160530 +13727 160517 +13331 160513 +14657 160501 +15293 160493 +12345 160491 +25469 160481 +8859 160468 +21249 160462 +26538 160449 +25684 160447 +5691 160438 +14846 160434 +30427 160423 +5844 160391 +18323 160386 +14318 160373 +25535 160370 +20840 160352 +10526 160335 +16247 160329 +18666 160320 +15850 160286 +15326 160278 +23259 160245 +17260 160245 +24091 160196 +13769 160191 +27540 160149 +12898 160107 +12661 160094 +11657 160079 +5912 160074 +11738 160067 +2913 159992 +8204 159989 +26804 159960 +14595 159957 +14730 159947 +32760 159941 +32078 159907 +13460 159893 +21425 159882 +26618 159858 +32879 159838 +12592 159836 +24832 159811 +15536 159796 +16537 159787 +27828 159774 +18857 159772 +14483 159740 +25054 159712 +9586 159707 +20096 159707 +17079 159702 +14370 159698 +19207 159671 +26824 159630 +33139 159614 +25172 159602 +13686 159592 +40289 159583 +21838 159580 +18128 159571 +14733 159563 +31292 159553 +18139 159513 +44066 159510 +25165 159492 +14371 159474 +17255 159432 +5379 159426 +4119 159418 +15250 159397 +12041 159368 +39463 159338 +14862 159315 +22140 159290 +7769 159278 +8629 159228 +29714 159198 +28855 159198 +14484 159181 +16255 159158 +22945 159156 +9875 159043 +12188 159039 +12877 158992 +33605 158976 +41449 158974 +11212 158974 +32688 158957 +13455 158950 +17638 158947 +8531 158908 +21193 158855 +7220 158842 +3875 158831 +6017 158800 +9724 158796 +13808 158790 +21798 158768 +16569 158740 +8487 158725 +12891 158721 +16169 158668 +50079 158664 +14662 158663 +9565 158660 +26070 158632 +16715 158620 +25982 158612 +14921 158609 +24190 158591 +16304 158587 +16561 158553 +17052 158522 +17590 158510 +2012 158490 +40635 158487 +14193 158454 +16597 158446 +29200 158411 +25768 158388 +16599 158346 +22283 158342 +14190 158305 +17984 158291 +11368 158289 +894 158285 +8726 158253 +13271 158252 +38818 158242 +15909 158213 +15279 158211 +19987 158186 +14029 158156 +2967 158148 +7597 158145 +11282 158119 +30334 158112 +1819 158040 +7211 158026 +8804 158002 +24152 157998 +9863 157974 +15124 157954 +12189 157895 +31507 157885 +10381 157862 +20551 157853 +35293 157847 +8664 157839 +15828 157836 +17114 157813 +15585 157797 +32317 157785 +21141 157733 +29444 157718 +13570 157718 +11352 157674 +933 157613 +13222 157602 +14310 157588 +12929 157586 +18017 157577 +15647 157572 +28278 157566 +14540 157562 +9576 157531 +10599 157529 +11140 157502 +25936 157484 +39637 157468 +5684 157467 +20997 157452 +23115 157437 +18108 157436 +20705 157426 +21457 157387 +16511 157373 +28564 157373 +23420 157353 +13786 157351 +10284 157333 +11723 157333 +24005 157315 +10919 157293 +27874 157280 +16548 157274 +9995 157264 +13141 157253 +15907 157235 +12571 157218 +2247 157218 +28182 157209 +14121 157200 +1552 157195 +7400 157179 +15747 157150 +10188 157144 +13079 157126 +29443 157105 +22605 157085 +14638 157043 +22722 157041 +38061 157031 +19702 156983 +24713 156960 +29600 156910 +27622 156908 +23811 156882 +18612 156867 +29330 156804 +31071 156786 +18340 156752 +26962 156752 +16460 156724 +20126 156706 +5275 156655 +20342 156637 +18259 156614 +888 156611 +15816 156610 +30405 156596 +10742 156594 +26068 156592 +12572 156545 +23980 156530 +18468 156522 +6256 156514 +18504 156499 +4020 156492 +22323 156483 +10685 156482 +18752 156458 +17268 156414 +13788 156414 +5083 156407 +31472 156400 +2908 156375 +26807 156369 +24142 156367 +22571 156353 +11060 156348 +17093 156298 +20642 156289 +19551 156285 +3780 156274 +19495 156268 +800 156267 +20603 156263 +18147 156244 +35151 156227 +18385 156178 +18684 156168 +20601 156162 +8553 156147 +14028 156145 +46279 156133 +16456 156127 +7913 156073 +17066 156071 +12692 156046 +25978 156013 +14173 156010 +25299 156002 +23519 155994 +28335 155959 +20157 155953 +5677 155929 +3971 155902 +12267 155878 +3309 155863 +20933 155816 +24296 155768 +5821 155741 +18236 155729 +17893 155698 +17012 155685 +15902 155658 +20051 155639 +16395 155631 +25688 155605 +11711 155597 +22455 155571 +22095 155561 +18590 155536 +28052 155527 +43856 155456 +25109 155429 +19414 155419 +23546 155417 +25296 155396 +15021 155380 +8624 155360 +34338 155326 +48543 155324 +18264 155290 +2285 155264 +34139 155261 +17920 155238 +25201 155154 +12279 155145 +16075 155111 +14818 155107 +16318 155106 +16397 155078 +37244 155044 +23472 155043 +29220 155029 +11501 155027 +18446 154973 +3319 154963 +18180 154948 +15101 154935 +18105 154933 +16740 154932 +8960 154919 +17616 154899 +28280 154889 +13521 154875 +10583 154855 +7071 154845 +12326 154824 +16024 154804 +21292 154782 +1047 154750 +11188 154727 +18223 154723 +12768 154698 +13391 154683 +11948 154679 +28354 154643 +12105 154624 +24833 154599 +12179 154573 +15399 154535 +17089 154509 +14687 154465 +2374 154450 +10248 154438 +15723 154435 +15621 154410 +630 154398 +14809 154370 +17722 154361 +23424 154352 +14088 154336 +2429 154321 +14275 154316 +19026 154282 +7493 154278 +32948 154265 +3733 154263 +20270 154229 +15830 154223 +19602 154195 +6517 154179 +17989 154171 +46668 154162 +19068 154157 +10285 154148 +30223 154138 +17981 154123 +24545 154116 +8241 154084 +6404 154027 +1185 154019 +4227 154018 +14208 153999 +16811 153998 +18867 153997 +16755 153996 +36431 153984 +25613 153954 +19481 153934 +13435 153913 +34112 153902 +22676 153889 +11096 153830 +18920 153800 +40113 153775 +19447 153772 +32440 153762 +23632 153760 +27052 153749 +11945 153742 +26091 153725 +15718 153715 +31736 153713 +21821 153700 +20153 153693 +1553 153661 +24300 153645 +12395 153620 +11800 153610 +24859 153584 +31318 153521 +17254 153482 +7573 153476 +255 153473 +25230 153472 +24625 153458 +9878 153450 +10114 153435 +21400 153433 +14884 153380 +26918 153347 +12370 153341 +18432 153335 +16459 153330 +26023 153308 +8118 153298 +49355 153294 +25846 153268 +6005 153259 +34910 153259 +6392 153251 +44828 153232 +17530 153226 +20462 153208 +15867 153204 +10548 153174 +18342 153154 +24135 153137 +11335 153129 +15571 153117 +22970 153096 +16507 153049 +14440 153036 +2764 153035 +23533 153022 +8681 153019 +16195 152986 +4507 152962 +8613 152961 +14045 152955 +26326 152952 +4250 152929 +12958 152900 +29825 152864 +19830 152848 +23265 152843 +36863 152777 +31110 152776 +20670 152773 +14580 152733 +13707 152713 +28397 152701 +21820 152700 +14640 152687 +9380 152661 +10635 152624 +14814 152596 +16356 152522 +11246 152502 +40001 152483 +23013 152432 +25316 152418 +27724 152407 +25018 152406 +15628 152383 +35637 152371 +20424 152336 +18269 152305 +13172 152300 +2143 152274 +47633 152263 +3411 152256 +1190 152228 +21827 152218 +16642 152203 +22205 152196 +13148 152180 +4230 152159 +15551 152108 +8002 152108 +15226 152070 +11261 152055 +11705 152032 +20240 152015 +23918 151954 +31625 151951 +21028 151947 +43566 151927 +2341 151908 +19935 151900 +12863 151899 +32958 151881 +15691 151875 +16887 151864 +16963 151863 +14338 151859 +27194 151857 +26909 151855 +19999 151840 +6871 151824 +14593 151815 +26716 151771 +13899 151760 +20753 151700 +36095 151697 +15298 151683 +32496 151606 +2865 151590 +17809 151567 +29797 151566 +36744 151544 +22368 151535 +12271 151498 +19632 151465 +9356 151455 +20583 151415 +17061 151403 +17781 151390 +24968 151389 +41461 151377 +22877 151362 +4906 151313 +13667 151309 +27283 151288 +31764 151280 +18648 151258 +47286 151250 +31918 151179 +16277 151130 +19519 151110 +19931 151099 +21434 151094 +35385 151073 +21808 151068 +4127 151053 +30112 151050 +29402 151046 +17383 151039 +3279 151028 +41727 151001 +34549 150992 +7141 150988 +16001 150978 +12740 150977 +19013 150919 +2317 150903 +13616 150870 +19829 150866 +26544 150859 +23494 150829 +11793 150822 +5678 150789 +21346 150787 +6839 150781 +16612 150779 +2859 150761 +16133 150748 +25462 150743 +25175 150735 +29222 150706 +18266 150702 +15557 150701 +24413 150649 +17378 150591 +14714 150581 +1868 150571 +4189 150541 +2747 150526 +14470 150516 +26960 150510 +12209 150502 +1685 150501 +6880 150485 +18165 150479 +14383 150433 +4561 150430 +12804 150411 +48298 150408 +24766 150396 +22964 150364 +17837 150349 +28792 150347 +4160 150339 +3874 150327 +13113 150326 +9654 150323 +13858 150312 +13476 150291 +13000 150289 +19443 150285 +7856 150248 +20631 150223 +23663 150219 +36878 150211 +4356 150206 +12005 150181 +5061 150174 +17292 150167 +37929 150165 +1174 150149 +7100 150145 +12119 150121 +19934 150114 +25911 150114 +26985 150114 +18551 150112 +10979 150111 +6475 150106 +31063 150073 +23008 150064 +11198 150059 +19131 150055 +20156 150033 +14073 150025 +18140 150022 +14394 150016 +3438 150003 +12027 149972 +20774 149888 +21126 149879 +19097 149868 +10345 149816 +41012 149812 +14438 149801 +18553 149773 +16954 149767 +21423 149719 +32519 149709 +29074 149707 +8784 149698 +2963 149683 +13026 149678 +23052 149660 +28242 149649 +16677 149632 +13599 149629 +36692 149596 +18550 149593 +3351 149580 +3711 149539 +26454 149533 +18916 149510 +23531 149481 +17467 149475 +11207 149458 +11710 149431 +10720 149413 +3629 149411 +18315 149407 +28649 149298 +7490 149297 +43484 149267 +19315 149251 +20561 149234 +12681 149204 +31460 149200 +11707 149196 +19278 149166 +13951 149157 +25452 149155 +2898 149145 +16472 149134 +20276 149129 +16889 149126 +23786 149104 +14799 149093 +13326 149084 +37361 149083 +18499 149066 +29418 149057 +2668 149055 +23902 149051 +12839 149015 +29806 148976 +24380 148923 +28108 148871 +32939 148870 +14497 148862 +5623 148850 +17730 148845 +15635 148837 +44508 148835 +20077 148829 +11499 148813 +14271 148813 +22191 148798 +9737 148798 +16960 148777 +32682 148773 +31630 148764 +35231 148749 +21039 148721 +43841 148710 +31248 148693 +25313 148685 +25043 148683 +21112 148627 +20289 148622 +18811 148620 +16265 148608 +7655 148578 +9045 148574 +15712 148554 +26039 148540 +22264 148525 +13869 148513 +14031 148505 +8392 148502 +34783 148499 +24536 148490 +33110 148480 +21380 148469 +26686 148438 +9695 148399 +15765 148398 +10586 148396 +9834 148374 +14363 148368 +42189 148367 +8297 148330 +42235 148326 +24163 148323 +21511 148301 +13539 148287 +16259 148284 +22841 148280 +34655 148279 +13639 148270 +1873 148220 +236 148220 +28791 148173 +16981 148158 +31002 148157 +19666 148135 +17609 148080 +11527 148078 +16938 148071 +19649 148013 +961 147986 +7673 147977 +15143 147964 +14583 147917 +13645 147910 +12590 147909 +18724 147905 +27502 147901 +23942 147874 +11428 147867 +4740 147865 +27609 147860 +16425 147859 +20731 147848 +29640 147844 +3143 147820 +23852 147767 +10348 147753 +4777 147752 +11930 147739 +16904 147704 +2307 147703 +17498 147652 +12791 147635 +10107 147624 +29751 147606 +16406 147605 +12017 147592 +17625 147562 +9382 147562 +14351 147554 +22244 147533 +5725 147520 +2655 147490 +23655 147473 +21530 147408 +12657 147405 +4879 147395 +22075 147383 +21685 147355 +22786 147242 +21420 147225 +15794 147218 +3577 147175 +9536 147160 +35193 147093 +27348 147076 +17198 147049 +8615 147031 +24037 147017 +6532 147016 +25289 146998 +10897 146965 +156 146955 +39920 146925 +22647 146921 +10148 146909 +10891 146887 +14041 146872 +18143 146867 +15460 146865 +5518 146864 +2917 146864 +17123 146801 +23600 146765 +10582 146703 +23240 146699 +17502 146680 +26943 146669 +7488 146654 +14282 146636 +21161 146625 +22879 146622 +14176 146611 +28250 146596 +18654 146583 +20715 146561 +11056 146533 +16645 146525 +40160 146513 +608 146508 +13770 146488 +16830 146483 +21987 146426 +34162 146377 +14136 146323 +20505 146305 +12741 146284 +13530 146275 +27250 146260 +32489 146242 +9929 146232 +34183 146216 +27775 146212 +21644 146198 +11530 146177 +6309 146169 +19496 146142 +20047 146103 +16521 146087 +22926 146036 +3228 146019 +15108 145976 +27879 145951 +15921 145950 +18818 145948 +11379 145936 +28259 145928 +6051 145927 +27886 145925 +21919 145900 +27771 145884 +19002 145869 +28565 145863 +10229 145861 +19480 145857 +7120 145846 +1901 145839 +16470 145810 +10456 145793 +25464 145789 +11895 145789 +19597 145785 +8769 145767 +24787 145747 +35764 145738 +34116 145734 +36040 145708 +13884 145698 +29285 145693 +29484 145690 +11916 145681 +20244 145661 +26899 145652 +8739 145638 +16231 145626 +11767 145621 +27334 145619 +39823 145614 +24193 145602 +26249 145596 +31774 145592 +12889 145583 +15933 145566 +26739 145565 +48974 145558 +5146 145504 +19383 145482 +22772 145462 +18379 145458 +18695 145447 +20874 145411 +10593 145401 +37720 145373 +19783 145319 +15085 145314 +9297 145310 +17316 145293 +18522 145290 +18785 145276 +63 145276 +6229 145255 +21733 145255 +12479 145254 +23186 145227 +14489 145197 +10752 145179 +39375 145173 +31636 145170 +35597 145170 +39632 145167 +18295 145138 +41924 145119 +14448 145118 +28458 145088 +20671 145086 +10602 145079 +18595 145063 +15190 145062 +19346 145051 +11179 145035 +16216 145026 +15941 144995 +11741 144989 +9261 144984 +18094 144969 +25479 144862 +26136 144855 +27229 144848 +7761 144848 +2973 144807 +17160 144798 +24248 144764 +7200 144739 +22375 144732 +23356 144732 +14361 144729 +21057 144721 +18508 144691 +4817 144670 +9598 144624 +12709 144620 +11108 144598 +6423 144592 +14097 144587 +15822 144585 +14653 144550 +20813 144539 +21541 144521 +26242 144514 +37440 144509 +4114 144509 +13375 144474 +18575 144446 +3222 144446 +10783 144443 +24105 144434 +18283 144432 +10197 144427 +14349 144424 +38813 144413 +39249 144396 +6724 144389 +490 144388 +32452 144361 +7068 144353 +19171 144331 +26154 144327 +22336 144323 +9866 144305 +22108 144292 +6468 144287 +15825 144282 +23702 144255 +6988 144243 +10730 144241 +28414 144241 +17276 144230 +14983 144229 +14864 144214 +26348 144200 +1843 144196 +25357 144180 +14180 144170 +18662 144158 +24067 144156 +14699 144154 +10924 144126 +20162 144107 +22038 144094 +14724 144049 +27633 144028 +8310 143966 +19050 143962 +17411 143945 +2299 143915 +1617 143906 +26372 143898 +17416 143870 +37072 143867 +879 143850 +21624 143840 +6790 143798 +18750 143787 +20672 143775 +3157 143711 +2386 143672 +13143 143671 +21641 143653 +4559 143641 +26134 143618 +15807 143611 +40990 143519 +17206 143512 +12481 143506 +8510 143503 +12495 143490 +10227 143482 +20576 143478 +28379 143414 +15904 143411 +14871 143409 +12153 143373 +24194 143336 +21272 143326 +11700 143323 +19000 143288 +17004 143282 +12683 143277 +6499 143264 +20250 143258 +22543 143237 +17835 143236 +21337 143227 +36888 143176 +8524 143165 +20600 143162 +13186 143159 +5420 143155 +38488 143155 +35295 143153 +26935 143119 +29737 143113 +12290 143106 +6072 143102 +35399 143098 +15050 143097 +19544 143080 +21813 143026 +22371 143025 +13024 143020 +13529 143006 +12719 143001 +8756 142996 +26390 142984 +39099 142967 +13063 142945 +21024 142943 +15455 142914 +6574 142910 +8299 142910 +35136 142909 +22736 142908 +25647 142902 +16352 142896 +18573 142873 +2593 142862 +23312 142838 +36861 142830 +14169 142814 +7409 142806 +9756 142803 +28238 142793 +15949 142780 +24779 142717 +11894 142698 +37911 142687 +5450 142670 +8322 142664 +15133 142663 +22004 142624 +14252 142623 +16291 142622 +19996 142485 +21542 142483 +12237 142454 +22814 142400 +14323 142388 +6048 142372 +24946 142362 +16567 142345 +26046 142335 +12817 142314 +20805 142310 +7251 142296 +33338 142294 +24086 142293 +20461 142280 +2322 142269 +43812 142263 +12312 142254 +9582 142243 +38134 142234 +26907 142226 +16218 142210 +38671 142196 +13002 142196 +4826 142194 +14947 142188 +30836 142187 +10288 142181 +15175 142177 +15743 142174 +8228 142163 +17162 142134 +13005 142118 +31721 142116 +36139 142113 +10128 142107 +9688 142074 +43941 142063 +23773 142057 +14498 142033 +24740 141961 +15644 141960 +30633 141944 +15875 141934 +21114 141929 +13127 141927 +15504 141914 +8373 141897 +18969 141895 +20345 141876 +15234 141835 +8344 141831 +15043 141803 +19877 141769 +13690 141755 +17648 141754 +9285 141750 +12777 141739 +20493 141725 +33876 141723 +20175 141717 +22151 141698 +48828 141689 +25665 141686 +1259 141670 +20698 141670 +14010 141664 +28330 141659 +15349 141650 +5945 141623 +30801 141606 +26414 141595 +17800 141591 +998 141574 +16832 141552 +16492 141540 +27153 141534 +16399 141504 +13444 141490 +19848 141477 +28291 141476 +40253 141467 +21735 141446 +11685 141439 +39304 141433 +17687 141365 +11503 141351 +19534 141326 +18244 141322 +19487 141311 +15985 141281 +31612 141266 +13368 141262 +11290 141261 +10230 141260 +7657 141247 +18213 141238 +14996 141232 +23627 141202 +20075 141173 +5843 141143 +25333 141137 +22862 141121 +10477 141085 +22432 141070 +19425 141064 +20392 141053 +16361 141043 +23407 141014 +40624 141014 +27267 140988 +12258 140965 +18605 140949 +33020 140940 +28553 140933 +20974 140924 +14296 140899 +46935 140878 +2511 140811 +19437 140786 +21107 140783 +18591 140782 +6811 140766 +12243 140763 +1349 140737 +10974 140709 +5151 140698 +12748 140677 +25621 140661 +19263 140656 +16061 140651 +4432 140640 +31658 140637 +40060 140636 +19384 140632 +20814 140631 +25142 140619 +19301 140587 +23198 140585 +14131 140572 +7435 140568 +27515 140552 +15248 140540 +24581 140504 +25657 140498 +8402 140492 +15421 140465 +5720 140454 +17977 140454 +11471 140450 +24851 140420 +25308 140397 +13370 140392 +19479 140354 +22462 140350 +46000 140325 +29759 140315 +12964 140307 +11211 140304 +1197 140274 +20172 140272 +11510 140237 +17087 140207 +17897 140176 +17315 140174 +22064 140170 +37758 140169 +8608 140168 +8105 140165 +27247 140162 +12899 140156 +14436 140154 +20013 140149 +32134 140148 +10998 140138 +23354 140134 +26971 140127 +20327 140124 +26773 140115 +34982 140110 +20286 140083 +19313 140048 +23608 140032 +12505 140012 +27593 139969 +32025 139966 +19347 139957 +13365 139945 +13970 139935 +22452 139930 +8133 139915 +26445 139911 +12967 139876 +15134 139872 +9597 139860 +17107 139855 +19253 139841 +36895 139840 +2703 139835 +12029 139834 +40713 139815 +29269 139809 +17810 139778 +13517 139757 +16543 139737 +16502 139714 +14773 139695 +5351 139685 +12623 139663 +35363 139642 +27501 139623 +4715 139600 +3129 139597 +9245 139595 +9463 139588 +27885 139587 +17965 139583 +19132 139552 +13996 139549 +17282 139532 +15658 139528 +13319 139474 +5958 139469 +22218 139457 +25332 139452 +36779 139435 +22211 139423 +3379 139419 +6921 139416 +9234 139404 +17902 139396 +13288 139395 +23486 139371 +26479 139365 +37135 139360 +18371 139338 +7870 139292 +22597 139285 +26812 139273 +38704 139262 +14170 139260 +41708 139256 +19192 139241 +25003 139237 +2082 139236 +33330 139203 +39818 139192 +34105 139171 +25951 139164 +40020 139161 +21493 139131 +14128 139128 +9303 139119 +8103 139094 +14555 139070 +14156 139032 +22078 139012 +3266 138995 +14680 138963 +41941 138960 +18929 138948 +7938 138938 +17144 138922 +5724 138921 +18764 138920 +18915 138877 +10970 138844 +6255 138824 +19092 138812 +23233 138772 +16526 138749 +18657 138723 +9012 138692 +11580 138689 +16723 138684 +3144 138649 +754 138649 +5005 138632 +13012 138619 +944 138584 +10814 138583 +18560 138535 +17858 138511 +5751 138507 +29467 138495 +10354 138474 +10735 138452 +23019 138452 +29374 138452 +39946 138446 +22954 138429 +20127 138406 +15068 138388 +32644 138345 +31518 138313 +25912 138299 +24349 138293 +19989 138283 +12261 138276 +19494 138260 +28220 138234 +14546 138229 +26508 138228 +13706 138227 +21432 138224 +30093 138195 +18997 138186 +18935 138185 +19009 138150 +9596 138085 +21980 138078 +22301 138067 +21150 138016 +30311 138013 +28395 138002 +1145 137982 +17415 137982 +11892 137980 +48216 137923 +25448 137913 +14482 137889 +18965 137887 +17825 137879 +18523 137867 +11196 137844 +6850 137838 +19036 137832 +8516 137831 +10736 137823 +15686 137814 +8352 137792 +17324 137789 +25204 137788 +30315 137776 +25320 137772 +26874 137734 +13159 137723 +32369 137717 +13817 137699 +34673 137683 +9478 137680 +32646 137661 +21904 137649 +19789 137636 +25091 137583 +16700 137566 +21768 137552 +13066 137528 +23266 137523 +13893 137487 +12016 137481 +12323 137456 +25583 137455 +23300 137453 +15696 137327 +12855 137310 +49219 137299 +8151 137294 +16908 137290 +8439 137277 +6037 137276 +15285 137271 +27265 137223 +14769 137218 +15532 137198 +13763 137185 +30479 137177 +20524 137161 +28356 137159 +8575 137140 +12850 137138 +16045 137125 +27678 137117 +23226 137112 +10696 137107 +16391 137106 +20463 137105 +14743 137089 +15963 137075 +18138 137041 +31335 137033 +33709 137028 +8753 137024 +18310 136991 +8249 136987 +18116 136975 +19228 136973 +24009 136969 +21348 136945 +9997 136892 +35521 136858 +30260 136849 +20902 136836 +22641 136817 +5327 136805 +17073 136787 +13390 136757 +15934 136751 +35109 136719 +25974 136715 +24050 136706 +18903 136670 +37173 136634 +1652 136609 +23940 136579 +4223 136573 +26347 136572 +16162 136554 +16380 136548 +11851 136525 +14937 136515 +19161 136465 +22043 136462 +20849 136448 +31745 136448 +13818 136435 +23956 136431 +47144 136428 +34974 136423 +13801 136409 +22053 136378 +2525 136377 +6210 136367 +31540 136361 +1293 136354 +13993 136347 +14494 136319 +5259 136294 +4818 136274 +17864 136261 +24828 136253 +19674 136251 +25143 136242 +26913 136241 +11412 136220 +3810 136187 +24463 136173 +47962 136161 +18838 136132 +15657 136132 +34891 136078 +18302 136078 +29377 136051 +21349 136043 +28650 136013 +5088 136008 +18250 135991 +21691 135968 +15430 135949 +2654 135947 +44830 135945 +8520 135934 +12429 135919 +19888 135915 +17813 135910 +15827 135906 +22844 135900 +20434 135895 +20201 135887 +15554 135886 +12884 135878 +13859 135821 +827 135794 +27022 135785 +5973 135784 +26721 135779 +20170 135770 +19670 135770 +10892 135733 +15028 135730 +30928 135729 +9537 135713 +16771 135662 +4686 135657 +17899 135653 +7338 135653 +9742 135638 +8874 135624 +25067 135612 +2401 135602 +18644 135580 +4915 135574 +23727 135562 +15201 135534 +5746 135506 +21829 135459 +21040 135442 +17173 135442 +15768 135440 +27139 135425 +23790 135421 +16808 135408 +32722 135364 +31941 135358 +17991 135358 +9996 135356 +9360 135346 +14754 135332 +23049 135312 +18449 135257 +17153 135236 +2470 135234 +12956 135230 +1725 135221 +41107 135218 +16461 135211 +18860 135206 +15651 135184 +19110 135179 +15643 135177 +14149 135109 +16337 135106 +39897 135092 +12693 135089 +26649 135084 +11389 135075 +9145 135074 +26346 135068 +27443 135067 +22798 135063 +8377 135055 +34698 135017 +32250 135009 +24002 135005 +22663 135003 +19696 135001 +27698 134997 +21803 134967 +32108 134952 +18582 134939 +17101 134938 +32641 134906 +16038 134906 +9712 134898 +32462 134885 +15899 134884 +15468 134883 +15374 134876 +12642 134866 +22304 134858 +44133 134834 +15222 134777 +22001 134772 +29760 134753 +24553 134748 +35706 134738 +18715 134725 +9525 134724 +14716 134703 +22089 134687 +46754 134683 +9423 134677 +37118 134677 +25041 134658 +29995 134646 +24585 134646 +7340 134641 +10984 134634 +17387 134623 +32076 134607 +31178 134594 +26542 134575 +3672 134564 +5773 134545 +23006 134542 +24959 134532 +10154 134529 +22534 134525 +14979 134521 +21246 134516 +26376 134513 +12364 134511 +20725 134496 +18412 134495 +22080 134493 +7066 134484 +19412 134469 +7821 134443 +25518 134438 +24023 134427 +13540 134402 +45826 134397 +13843 134350 +17849 134341 +41992 134319 +5434 134314 +16226 134302 +24301 134299 +13742 134277 +17843 134272 +15915 134271 +33693 134260 +31958 134254 +26601 134245 +21500 134191 +33545 134161 +13213 134121 +48922 134117 +17979 134103 +11346 134093 +14723 134054 +18705 134045 +18008 134043 +19416 134036 +33167 134027 +33666 134023 +23043 133985 +8772 133973 +12440 133968 +39419 133931 +19403 133911 +25426 133910 +4454 133866 +22707 133862 +20865 133845 +26778 133839 +36538 133814 +19716 133802 +6362 133794 +32944 133782 +24017 133778 +10659 133777 +20824 133736 +37539 133714 +29771 133710 +6449 133687 +3044 133682 +19497 133674 +18081 133662 +16660 133646 +3448 133633 +18700 133633 +38735 133626 +7594 133617 +14335 133600 +12015 133581 +5460 133580 +20818 133557 +14198 133555 +20001 133546 +10396 133540 +21350 133521 +22864 133484 +33953 133448 +17534 133432 +29638 133397 +18383 133386 +7241 133382 +43630 133346 +25160 133310 +5356 133302 +24534 133271 +21660 133251 +23140 133242 +19600 133234 +17239 133213 +13559 133210 +34944 133196 +17819 133125 +24722 133121 +34509 133118 +7737 133116 +23225 133089 +12957 133079 +19158 133031 +11215 133028 +35004 133010 +8491 132998 +30461 132990 +38599 132981 +17762 132978 +21277 132978 +9587 132973 +4828 132970 +16572 132957 +27803 132945 +20155 132912 +20333 132911 +18805 132907 +1732 132894 +18258 132883 +13601 132867 +17330 132854 +806 132852 +25463 132829 +15475 132816 +24803 132814 +32074 132808 +12078 132797 +16178 132792 +18552 132780 +33362 132778 +23178 132778 +21811 132771 +20030 132746 +34747 132706 +31827 132697 +11881 132687 +7518 132678 +14985 132669 +46800 132651 +22662 132640 +31307 132614 +28231 132609 +14664 132607 +19978 132601 +10794 132574 +32897 132545 +12115 132544 +30758 132541 +27388 132532 +28894 132530 +29066 132508 +18031 132440 +5321 132440 +31156 132418 +25130 132405 +34241 132380 +22255 132338 +18252 132308 +23548 132307 +35300 132300 +26264 132293 +22403 132289 +21358 132287 +27324 132257 +35331 132241 +19623 132232 +3444 132224 +39608 132215 +34995 132211 +7245 132211 +13332 132205 +26383 132203 +19869 132142 +34802 132112 +21419 132108 +12945 132088 +23033 132074 +14352 132073 +20545 132063 +37698 132057 +18132 132029 +13364 132018 +24199 132011 +12249 132005 +21788 132003 +26459 131986 +18585 131981 +14145 131961 +15025 131951 +8094 131944 +5056 131936 +8496 131930 +7951 131926 +32464 131913 +21572 131899 +23890 131893 +31187 131878 +27094 131874 +14775 131870 +16164 131870 +44070 131862 +935 131858 +22892 131842 +33127 131841 +18535 131830 +18045 131822 +17985 131820 +27028 131794 +10990 131793 +233 131755 +1032 131748 +12810 131670 +20295 131661 +7003 131644 +13531 131639 +19283 131633 +19521 131629 +10883 131607 +3102 131603 +23310 131602 +28972 131585 +5165 131579 +17885 131576 +12185 131569 +22688 131533 +23630 131530 +6472 131510 +20857 131502 +10840 131472 +13357 131449 +16131 131445 +19305 131438 +16157 131430 +32188 131412 +19406 131393 +18030 131387 +4016 131369 +16617 131368 +18514 131367 +25953 131334 +13333 131318 +23023 131299 +24315 131296 +15195 131293 +26735 131288 +13793 131276 +26608 131252 +19280 131233 +32737 131230 +5976 131228 +28132 131197 +25623 131179 +27663 131158 +18661 131145 +20152 131143 +15687 131126 +19850 131110 +6783 131085 +13140 131080 +32382 131072 +12809 131070 +1454 131069 +13496 131061 +23342 131057 +28223 131024 +19141 131022 +16924 131003 +15799 130966 +15943 130965 +19069 130964 +38768 130960 +30680 130916 +20230 130906 +22447 130893 +20220 130885 +23641 130885 +18257 130869 +20061 130868 +29755 130867 +13798 130850 +11340 130838 +8379 130835 +9164 130831 +23254 130822 +31592 130815 +31036 130808 +23411 130797 +16746 130783 +19533 130778 +18600 130768 +18192 130748 +5669 130742 +19189 130734 +17419 130704 +18056 130682 +24424 130681 +12854 130680 +19410 130662 +5872 130645 +16784 130638 +13108 130617 +23885 130606 +27779 130603 +14811 130593 +18579 130591 +15495 130558 +20607 130555 +10076 130554 +23366 130553 +18832 130552 +11982 130540 +22126 130538 +30717 130536 +14181 130527 +15064 130516 +31585 130506 +25564 130505 +29872 130490 +26167 130479 +19066 130476 +21922 130471 +8483 130441 +10389 130430 +26338 130402 +8881 130361 +9662 130356 +11817 130350 +16139 130336 +23659 130331 +3291 130328 +10203 130318 +2833 130283 +24310 130243 +29717 130191 +11028 130165 +14760 130155 +10992 130152 +10449 130128 +40518 130127 +13981 130093 +22573 130086 +6197 130086 +37478 130063 +19086 130054 +31311 130029 +49368 130024 +14795 129994 +19386 129991 +16217 129979 +24737 129960 +9543 129951 +45091 129944 +25476 129943 +10978 129933 +5910 129924 +30858 129910 +16043 129863 +21568 129860 +23782 129837 +13256 129807 +8410 129804 +24479 129787 +28236 129781 +34540 129770 +21332 129767 +37415 129704 +12530 129690 +29045 129676 +9052 129673 +19255 129652 +15917 129642 +17261 129624 +24083 129604 +17706 129598 +13597 129594 +30629 129587 +16523 129579 +17767 129570 +34285 129568 +39873 129554 +20623 129536 +17940 129530 +13519 129530 +27583 129529 +15762 129522 +3803 129499 +36401 129483 +30825 129482 +26106 129455 +2960 129446 +28559 129444 +19993 129436 +27209 129405 +6187 129401 +29364 129394 +18577 129378 +22310 129377 +18029 129363 +25556 129363 +32802 129362 +11364 129341 +16958 129340 +17677 129312 +24554 129301 +28150 129243 +37376 129209 +34564 129195 +13825 129189 +4239 129188 +28793 129168 +28319 129154 +10153 129150 +22086 129150 +17317 129150 +14806 129144 +6799 129137 +15577 129112 +23319 129096 +20833 129088 +9410 129056 +13242 129047 +11674 129039 +23704 129017 +11992 129017 +13829 129014 +11216 129009 +29029 128983 +19308 128976 +32408 128968 +11319 128936 +16379 128905 +2871 128865 +16711 128854 +19695 128852 +10214 128837 +10411 128813 +3299 128802 +7087 128781 +15502 128780 +21509 128778 +12469 128777 +14211 128776 +13160 128763 +21076 128759 +30321 128753 +12414 128727 +27918 128698 +30258 128698 +27179 128690 +18873 128688 +16026 128688 +20622 128666 +6429 128658 +15666 128641 +20775 128611 +14722 128607 +20749 128607 +26690 128554 +44723 128548 +18482 128544 +22176 128537 +13794 128537 +34497 128533 +28080 128527 +21320 128520 +14027 128499 +13235 128496 +15275 128492 +21454 128488 +19173 128447 +16976 128441 +15923 128440 +10350 128420 +21543 128354 +17919 128353 +28992 128338 +26617 128329 +14828 128323 +20239 128323 +16257 128293 +14745 128239 +17577 128234 +33487 128231 +6605 128229 +2086 128196 +17795 128182 +11960 128158 +5763 128145 +20691 128128 +9417 128124 +24535 128114 +11848 128102 +29470 128090 +2349 128089 +19832 128082 +14130 128060 +18306 128059 +28931 128054 +14991 128049 +16464 128042 +13906 128036 +13964 128032 +24999 128027 +21608 128023 +22836 128019 +34778 128017 +1224 128012 +8156 127986 +41530 127975 +8255 127963 +30511 127955 +16463 127953 +31802 127951 +20897 127935 +20027 127933 +18447 127917 +15847 127899 +24671 127878 +4161 127872 +20726 127850 +13698 127830 +20159 127826 +7354 127810 +36399 127809 +21120 127800 +11423 127791 +24093 127773 +35482 127769 +13424 127768 +29056 127739 +16967 127728 +3855 127719 +31329 127712 +13123 127695 +31970 127686 +22198 127638 +9449 127632 +20883 127616 +26626 127615 +9437 127588 +24660 127570 +25619 127569 +16481 127568 +16479 127562 +9367 127550 +25615 127547 +14963 127542 +24126 127539 +10838 127539 +20069 127526 +2388 127526 +22821 127519 +19546 127503 +25187 127487 +6437 127487 +29328 127462 +3817 127422 +24841 127416 +11959 127411 +21381 127405 +12685 127401 +29276 127377 +12265 127352 +17435 127348 +15061 127330 +1669 127328 +14292 127320 +17713 127315 +19063 127307 +19157 127281 +8351 127277 +19264 127255 +17757 127245 +11752 127207 +14553 127192 +20188 127170 +30088 127157 +15569 127152 +22424 127141 +7191 127137 +4551 127129 +21257 127127 +31100 127127 +9267 127119 +28181 127109 +14658 127089 +17603 127087 +8851 127082 +11976 127069 +26089 127068 +14976 127059 +3868 127049 +1226 127040 +20116 127039 +16697 127037 +32426 127029 +18282 127021 +8158 127004 +26044 127003 +12176 126996 +17361 126991 +23465 126976 +34283 126916 +24110 126892 +26482 126883 +14798 126866 +36011 126836 +11698 126823 +7736 126779 +28199 126777 +18359 126777 +33157 126772 +18057 126766 +5344 126749 +29852 126727 +8426 126721 +27466 126715 +14460 126712 +3930 126684 +14582 126678 +17865 126674 +8176 126622 +31325 126613 +12128 126609 +39998 126599 +17039 126588 +15325 126572 +15700 126538 +17789 126511 +20229 126490 +27082 126490 +24173 126455 +47417 126452 +21049 126376 +12416 126374 +234 126350 +9608 126318 +10723 126314 +6344 126314 +25693 126308 +27994 126292 +23527 126266 +110 126256 +25438 126255 +20018 126243 +26401 126222 +17009 126214 +35398 126200 +27181 126178 +15394 126166 +22895 126160 +13399 126152 +6152 126070 +33051 126068 +30918 126061 +32300 126058 +9090 126034 +11147 126006 +20573 126005 +22162 125978 +1727 125971 +36872 125968 +31455 125964 +13459 125960 +12431 125944 +23369 125940 +20738 125923 +8992 125913 +31806 125894 +29864 125890 +12959 125889 +17867 125857 +20212 125844 +9126 125812 +21151 125803 +29822 125802 +17769 125762 +14032 125746 +12587 125739 +24872 125735 +27051 125733 +22145 125721 +21329 125709 +2343 125702 +21880 125672 +25026 125659 +15900 125632 +5241 125627 +6434 125614 +23003 125594 +13897 125543 +18051 125534 +13468 125528 +15265 125521 +27165 125510 +28024 125506 +22052 125501 +29111 125497 +26321 125484 +13755 125474 +15778 125466 +11240 125464 +30498 125461 +23754 125450 +23856 125414 +33379 125367 +14165 125365 +12749 125345 +25028 125333 +28313 125331 +16386 125299 +16282 125293 +16949 125290 +36983 125273 +16829 125258 +21631 125254 +4199 125233 +38579 125213 +18794 125199 +7940 125190 +26431 125158 +40396 125151 +18978 125148 +30126 125144 +28830 125126 +17275 125125 +20469 125107 +4666 125106 +16342 125106 +30644 125051 +21555 125045 +10480 125034 +10536 125025 +10756 125025 +12035 125019 +8056 125013 +15972 124988 +18131 124971 +13948 124971 +26728 124956 +14249 124947 +15927 124937 +44130 124928 +46880 124912 +11891 124899 +27120 124857 +17760 124849 +15756 124847 +15458 124842 +15218 124822 +44418 124816 +21899 124814 +16276 124788 +11799 124788 +28274 124781 +15423 124764 +15588 124733 +17626 124723 +35921 124719 +15613 124712 +20062 124697 +29388 124694 +24878 124664 +9150 124653 +5314 124647 +19170 124645 +25005 124640 +13527 124636 +13604 124619 +17604 124610 +4626 124572 +28085 124567 +21801 124559 +42506 124556 +17404 124554 +4716 124547 +11879 124527 +38943 124485 +15524 124457 +14035 124441 +49963 124398 +18502 124394 +34072 124393 +20249 124389 +5299 124386 +15669 124369 +27890 124349 +28293 124329 +18839 124308 +19273 124302 +43204 124276 +21027 124269 +27309 124267 +23692 124252 +18082 124252 +2659 124243 +22727 124243 +1199 124239 +19525 124239 +29897 124224 +22325 124221 +30681 124190 +11782 124180 +22376 124105 +20612 124096 +2373 124096 +27512 124091 +27320 124071 +23953 124058 +16851 124048 +21547 124045 +16408 124037 +19719 123989 +20440 123988 +24185 123978 +16402 123943 +23446 123940 +31832 123926 +25821 123920 +6535 123915 +37236 123894 +26624 123888 +32788 123883 +25408 123870 +36707 123865 +38477 123845 +17021 123842 +3760 123831 +7662 123825 +14962 123814 +30569 123795 +32251 123776 +14787 123775 +21517 123753 +8205 123745 +19874 123734 +14160 123724 +19642 123705 +15408 123694 +14357 123690 +11621 123689 +25522 123687 +26673 123684 +17639 123681 +26849 123656 +5429 123621 +27999 123612 +12852 123612 +27350 123608 +23842 123602 +1214 123579 +30895 123559 +45376 123535 +33461 123520 +21845 123504 +29072 123497 +19183 123465 +10374 123419 +25866 123418 +24405 123406 +31154 123400 +23397 123390 +16003 123386 +19444 123382 +14267 123353 +15259 123346 +19374 123341 +6999 123340 +19330 123337 +11701 123317 +18281 123284 +14476 123278 +30857 123273 +35571 123272 +19451 123265 +16030 123263 +10870 123261 +18403 123215 +17170 123207 +10921 123203 +15837 123185 +29049 123176 +17141 123172 +15368 123168 +16609 123161 +18822 123148 +16524 123143 +38396 123091 +28842 123082 +19249 123080 +34803 123075 +13300 123073 +19595 123057 +15751 123055 +23679 123044 +50196 123042 +27938 123041 +18658 122934 +5391 122903 +23121 122853 +18990 122850 +14816 122833 +12816 122817 +25016 122807 +20268 122806 +33743 122793 +28549 122781 +26640 122767 +34649 122757 +25123 122740 +27557 122730 +13532 122717 +24172 122715 +8199 122667 +18178 122650 +21683 122604 +16690 122592 +35773 122588 +25575 122564 +14276 122559 +17990 122553 +27820 122549 +32907 122517 +22901 122505 +19611 122498 +18044 122478 +15629 122450 +18774 122446 +9156 122434 +27702 122421 +17755 122419 +17064 122398 +23352 122397 +39489 122393 +33099 122372 +3345 122359 +15277 122337 +15821 122332 +28608 122328 +32664 122320 +31045 122298 +24800 122293 +30724 122286 +33391 122275 +16555 122274 +11487 122269 +27922 122268 +29866 122267 +20105 122265 +13589 122260 +11181 122223 +14520 122206 +23933 122204 +21067 122194 +17233 122177 +17333 122168 +28760 122160 +15483 122157 +18207 122151 +19178 122151 +12835 122146 +26896 122141 +14461 122113 +19111 122104 +29431 122093 +25577 122083 +22355 122083 +17105 122065 +12819 122057 +24224 122023 +15772 122005 +21331 121999 +35292 121948 +15000 121946 +16880 121939 +46289 121924 +38718 121920 +18650 121917 +2146 121912 +31574 121909 +23143 121899 +4594 121896 +11844 121885 +24789 121882 +18767 121867 +40418 121845 +29834 121842 +27975 121824 +11914 121808 +26577 121791 +26004 121781 +27862 121776 +18957 121771 +21089 121764 +19015 121750 +14114 121748 +22118 121746 +35825 121727 +22803 121723 +16062 121692 +20963 121687 +27027 121665 +15833 121661 +12840 121654 +43736 121641 +13218 121636 +19188 121623 +14084 121622 +30552 121613 +20841 121595 +7925 121592 +28331 121583 +16289 121565 +28948 121558 +30422 121546 +19518 121542 +16785 121516 +22777 121507 +27604 121474 +22436 121467 +10005 121465 +3079 121458 +25218 121457 +17814 121418 +14144 121412 +10178 121411 +12994 121402 +5813 121392 +17026 121374 +34271 121366 +7210 121366 +22499 121344 +15448 121313 +26380 121311 +16412 121301 +24072 121295 +10133 121291 +14944 121282 +29975 121282 +31770 121280 +24138 121264 +7839 121247 +16997 121228 +4821 121222 +26966 121221 +41770 121215 +4653 121195 +15814 121179 +17442 121169 +7575 121133 +14268 121123 +38779 121115 +15178 121110 +37506 121108 +31107 121078 +10835 121074 +14404 121073 +19684 121048 +12550 121038 +3622 121019 +34273 121016 +15488 121016 +16936 121013 +32054 121011 +24054 121010 +34361 120997 +37822 120990 +3147 120971 +23276 120969 +18157 120957 +10254 120946 +5377 120932 +8702 120927 +20875 120921 +14907 120915 +31004 120888 +2588 120876 +23431 120875 +28212 120873 +41624 120868 +2505 120854 +37213 120848 +15728 120840 +12136 120838 +4830 120827 +23455 120816 +15213 120805 +28323 120804 +20324 120794 +22700 120786 +11230 120775 +31446 120774 +4775 120761 +15957 120760 +24076 120758 +18696 120758 +12518 120749 +28470 120716 +28449 120707 +11244 120691 +15781 120676 +27174 120676 +14425 120666 +15187 120654 +17142 120653 +27655 120646 +16662 120643 +15674 120638 +20952 120631 +15389 120630 +22105 120580 +20606 120580 +9223 120553 +34855 120551 +28275 120508 +22835 120507 +9791 120495 +13503 120475 +3976 120460 +33925 120449 +15709 120440 +20371 120439 +21187 120396 +27470 120386 +14682 120381 +18988 120379 +30197 120370 +11985 120353 +16014 120342 +21900 120323 +16309 120301 +44568 120291 +29583 120286 +10995 120285 +20518 120284 +22249 120281 +17210 120279 +30145 120243 +21404 120234 +5591 120233 +28314 120227 +2537 120221 +19236 120189 +31388 120186 +23732 120184 +14277 120172 +27504 120153 +10337 120141 +18639 120121 +35410 120108 +16224 120100 +45284 120055 +20525 120041 +28382 120031 +16510 120026 +5730 120006 +32612 119981 +49746 119974 +22111 119950 +14712 119947 +43321 119941 +20889 119936 +22250 119924 +14711 119913 +16871 119905 +35686 119871 +19827 119866 +24816 119820 +14199 119818 +10812 119815 +32801 119798 +29875 119795 +24746 119781 +18663 119772 +25876 119743 +33845 119741 +19134 119731 +20246 119692 +15216 119688 +7465 119681 +2758 119676 +10946 119674 +12807 119659 +44083 119659 +28755 119659 +22202 119642 +16220 119634 +29713 119630 +15758 119592 +11397 119582 +31066 119571 +27008 119551 +18101 119524 +21676 119521 +9206 119499 +28822 119497 +38780 119495 +39597 119480 +24215 119476 +18422 119472 +17228 119434 +9484 119428 +12116 119426 +19205 119424 +24462 119423 +35837 119407 +25604 119399 +20444 119399 +15922 119371 +24482 119369 +17698 119363 +31769 119358 +16235 119356 +6249 119355 +19505 119342 +15334 119337 +19059 119312 +19224 119292 +6637 119277 +21638 119272 +40666 119252 +47563 119245 +36677 119243 +15231 119229 +17341 119228 +18413 119221 +19287 119197 +10314 119173 +19992 119162 +28818 119150 +13674 119110 +17318 119107 +42095 119104 +27154 119096 +37518 119076 +20195 119074 +19885 119066 +2462 119063 +14932 119055 +19254 119041 +36552 119014 +11358 118995 +33534 118974 +12163 118957 +27434 118926 +19644 118916 +27511 118900 +17231 118891 +48349 118867 +44337 118827 +3674 118802 +22976 118787 +1319 118776 +15641 118775 +18963 118772 +19047 118771 +31593 118770 +24268 118770 +24120 118766 +24410 118765 +7854 118753 +17658 118737 +12558 118726 +7669 118722 +23634 118718 +12250 118710 +31039 118696 +12711 118695 +24791 118694 +27136 118687 +33445 118674 +17137 118670 +22116 118652 +21372 118651 +24759 118644 +19637 118643 +12314 118624 +4024 118619 +19572 118616 +8106 118612 +21190 118608 +29329 118591 +6749 118583 +5522 118573 +21189 118538 +20727 118524 +35083 118523 +22591 118520 +8336 118518 +18421 118510 +8120 118508 +27478 118507 +27667 118497 +18531 118493 +7975 118489 +18455 118481 +24967 118452 +16622 118441 +16560 118434 +17856 118417 +25048 118416 +17247 118409 +13008 118399 +30169 118392 +27023 118392 +16415 118387 +28237 118383 +25884 118378 +14561 118377 +28623 118376 +1974 118374 +34468 118353 +30997 118352 +17479 118309 +15063 118308 +40188 118300 +12183 118289 +28487 118265 +39692 118254 +10850 118242 +596 118239 +24024 118178 +29010 118175 +26549 118175 +7544 118174 +14622 118141 +21030 118135 +32130 118109 +15964 118100 +10321 118085 +5406 118074 +12799 118067 +9248 118049 +19450 118047 +13353 118038 +20340 118037 +20945 118033 +26562 118027 +24817 118026 +23141 118026 +35232 118004 +20747 118003 +17988 117996 +7001 117976 +26363 117967 +32210 117957 +31842 117951 +29216 117939 +15376 117938 +16727 117927 +22475 117899 +35560 117898 +16101 117871 +14917 117868 +9782 117864 +19837 117850 +16385 117848 +9130 117810 +26768 117788 +997 117779 +6864 117774 +21837 117760 +32055 117755 +36316 117750 +18592 117740 +36606 117732 +6285 117725 +13281 117721 +36345 117709 +4704 117694 +21229 117692 +34174 117672 +3366 117657 +45695 117655 +15129 117648 +46441 117643 +41863 117632 +1555 117625 +16039 117620 +14729 117604 +6161 117600 +32224 117592 +24058 117564 +17326 117563 +15745 117548 +8952 117546 +19235 117537 +17286 117529 +38167 117528 +10601 117522 +10136 117507 +49583 117488 +18821 117476 +46154 117472 +19272 117463 +21343 117461 +27707 117457 +3669 117453 +15766 117428 +34488 117427 +21846 117412 +37851 117408 +26516 117401 +21143 117391 +18725 117376 +34438 117368 +18702 117362 +8162 117343 +25259 117340 +22868 117336 +27818 117332 +18344 117327 +924 117316 +21472 117310 +40439 117306 +16971 117294 +32666 117293 +19125 117285 +31006 117275 +4861 117256 +14968 117255 +18692 117226 +8609 117209 +11854 117197 +30833 117194 +14941 117190 +12924 117181 +16450 117174 +48088 117173 +3305 117171 +15998 117171 +33017 117167 +40858 117164 +32579 117161 +17446 117132 +17636 117129 +21921 117124 +45567 117124 +41445 117094 +17631 117091 +23962 117024 +28367 117022 +14380 117015 +19417 117010 +38029 116996 +708 116994 +28990 116979 +17197 116973 +12368 116961 +23538 116953 +30418 116952 +49611 116944 +20913 116940 +30241 116931 +32451 116924 +31750 116924 +6837 116916 +15228 116889 +21830 116886 +14966 116854 +11377 116852 +24854 116839 +36269 116837 +248 116834 +13939 116827 +39290 116766 +3778 116752 +9771 116747 +22846 116733 +21347 116731 +33457 116727 +18451 116724 +19681 116718 +45790 116681 +13508 116680 +22493 116676 +22598 116671 +4720 116667 +30024 116661 +20928 116656 +21096 116654 +27581 116650 +5758 116642 +12902 116633 +22778 116623 +25894 116617 +15719 116606 +39398 116603 +16807 116602 +27231 116596 +7816 116582 +34108 116573 +30567 116559 +10056 116543 +14715 116519 +14475 116497 +23910 116456 +14834 116456 +30364 116454 +25706 116453 +15753 116444 +28442 116434 +35469 116433 +12310 116425 +15910 116416 +21957 116405 +39808 116388 +16903 116385 +18989 116376 +39487 116376 +14782 116372 +44744 116356 +17180 116354 +18556 116349 +26876 116324 +27436 116313 +25482 116308 +13735 116304 +25394 116294 +26450 116277 +37616 116277 +7643 116276 +10550 116245 +19693 116243 +25852 116220 +1009 116180 +12861 116176 +11043 116174 +17124 116170 +24933 116165 +12921 116149 +18642 116146 +15402 116135 +2469 116096 +28578 116075 +23682 116074 +17939 116040 +17918 116033 +4491 116022 +4399 116019 +22232 116015 +30837 116010 +2296 116003 +19356 115997 +30738 115978 +10934 115971 +15836 115952 +19266 115932 +28571 115926 +23558 115922 +22761 115911 +30217 115908 +28083 115882 +19750 115877 +15418 115869 +7438 115863 +37918 115860 +42442 115851 +22568 115848 +4670 115832 +20537 115831 +26212 115829 +10626 115826 +25722 115824 +24952 115801 +38739 115794 +28172 115756 +22942 115738 +32246 115724 +22698 115711 +26724 115689 +16394 115688 +19860 115671 +22045 115657 +19771 115654 +7286 115652 +31393 115649 +24719 115594 +10606 115590 +11742 115574 +20417 115560 +39641 115541 +30157 115539 +18979 115489 +29784 115471 +22871 115456 +16656 115454 +17412 115448 +20185 115441 +10340 115438 +22152 115437 +22753 115424 +47970 115422 +8332 115410 +20388 115407 +40238 115385 +17136 115384 +19215 115375 +5497 115369 +24630 115363 +36026 115356 +6217 115346 +38307 115331 +12931 115321 +44985 115318 +19240 115308 +8588 115305 +17343 115297 +16965 115297 +21108 115274 +31668 115272 +23423 115269 +44021 115255 +18316 115242 +111 115218 +30017 115213 +19152 115197 +34668 115186 +43449 115178 +14746 115152 +32559 115151 +39199 115140 +19014 115138 +8325 115132 +13348 115122 +28137 115112 +17351 115074 +10961 115046 +24448 115041 +30625 115012 +32473 114997 +27243 114972 +25737 114952 +38988 114930 +33629 114917 +1198 114880 +23041 114878 +33197 114860 +32763 114850 +25732 114849 +24617 114837 +4657 114831 +24237 114822 +18492 114820 +48609 114805 +17726 114802 +19575 114794 +10324 114779 +15009 114778 +10943 114778 +46485 114767 +34609 114761 +24011 114759 +24626 114742 +11393 114718 +14262 114713 +13432 114709 +20517 114709 +18813 114701 +23897 114697 +13813 114693 +24456 114672 +17870 114659 +4820 114652 +15363 114638 +18137 114627 +27239 114619 +37520 114611 +1791 114610 +34022 114584 +15693 114577 +16314 114563 +6696 114550 +22425 114545 +14934 114533 +13068 114533 +37936 114533 +46520 114532 +7510 114530 +19692 114528 +28001 114519 +20858 114465 +15959 114460 +34410 114446 +18429 114436 +21383 114434 +29476 114425 +20830 114419 +36019 114417 +15440 114410 +31140 114376 +13886 114375 +17451 114350 +16728 114339 +19516 114329 +19218 114295 +6347 114288 +6013 114285 +25347 114282 +28960 114280 +44286 114263 +20736 114226 +7758 114219 +25837 114215 +11994 114203 +17293 114196 +9435 114179 +2244 114175 +15186 114155 +18341 114147 +29147 114139 +26803 114113 +12342 114113 +33061 114104 +24829 114104 +18135 114084 +31047 114082 +40560 114068 +21307 114058 +21416 114031 +27046 114011 +24261 114008 +47168 113996 +10344 113973 +9222 113955 +102 113953 +3564 113949 +27026 113948 +26172 113940 +3130 113924 +3063 113907 +24627 113901 +27296 113856 +10157 113854 +11950 113851 +33617 113842 +15589 113838 +31799 113808 +19340 113805 +109 113805 +2573 113794 +47180 113776 +6878 113771 +18077 113739 +17714 113729 +40159 113721 +40344 113718 +38580 113716 +35007 113711 +35053 113685 +10896 113670 +19353 113666 +11339 113666 +47652 113661 +17099 113658 +28785 113653 +25338 113646 +22076 113639 +8085 113636 +9235 113629 +11386 113612 +19344 113612 +17877 113612 +20496 113594 +24682 113590 +32127 113557 +23915 113556 +10462 113553 +2621 113550 +30554 113540 +5133 113528 +13954 113525 +47296 113443 +16726 113424 +10173 113419 +20999 113413 +17584 113387 +35440 113386 +2352 113376 +26619 113372 +14177 113343 +7234 113325 +19289 113294 +25446 113249 +31984 113228 +18682 113217 +22435 113192 +22044 113175 +36952 113170 +20285 113152 +24060 113141 +25736 113139 +30697 113084 +18984 113082 +14568 113068 +24473 113039 +17924 113037 +32376 113032 +33708 113030 +28941 113014 +17298 113013 +13878 113012 +14956 113000 +14542 112997 +29189 112980 +17166 112979 +22199 112966 +23130 112943 +34724 112929 +15683 112912 +17523 112887 +16084 112886 +17655 112871 +17558 112871 +20421 112866 +36477 112850 +17805 112849 +18450 112847 +35401 112845 +20929 112836 +14397 112833 +15181 112827 +12169 112804 +25374 112802 +20970 112801 +5482 112794 +16370 112782 +16550 112781 +21010 112776 +42071 112773 +13196 112755 +16576 112754 +15467 112752 +91 112729 +21966 112720 +29018 112719 +27788 112718 +23107 112696 +6691 112695 +38256 112694 +24610 112693 +10256 112691 +26202 112678 +23950 112673 +1563 112672 +20828 112642 +28587 112640 +16159 112637 +22518 112637 +9467 112608 +23529 112607 +10360 112607 +23948 112603 +9230 112592 +21961 112588 +26630 112585 +12961 112577 +16835 112565 +33726 112551 +28550 112537 +16042 112531 +9732 112521 +24948 112520 +10917 112519 +17238 112508 +15482 112506 +14419 112484 +16573 112478 +20756 112465 +17549 112426 +16294 112420 +18881 112418 +2851 112409 +29758 112392 +16489 112375 +14209 112371 +26585 112361 +40928 112354 +9665 112346 +17033 112337 +43225 112332 +23541 112321 +22694 112320 +25436 112317 +23825 112315 +21998 112298 +8642 112295 +24313 112284 +16840 112281 +29980 112271 +10693 112258 +30426 112255 +21702 112236 +17970 112199 +3027 112194 +24891 112183 +14572 112169 +4722 112149 +3219 112147 +1449 112130 +27496 112098 +22848 112089 +18162 112084 +28375 112054 +20294 112053 +29019 112048 +26736 112047 +5139 112032 +15618 112031 +36067 112030 +26066 112024 +9998 112023 +36518 112019 +29260 112017 +46536 112016 +22268 112007 +12618 111987 +17623 111987 +25750 111980 +30287 111957 +18384 111956 +16057 111956 +33311 111930 +33371 111930 +6606 111906 +21590 111904 +41806 111904 +23718 111903 +29553 111853 +24025 111849 +34687 111840 +27497 111833 +18120 111829 +18098 111817 +13505 111816 +2413 111776 +27906 111774 +18744 111765 +20835 111746 +1800 111745 +18622 111736 +16999 111726 +14047 111710 +26278 111708 +7885 111680 +18981 111679 +37909 111671 +14259 111665 +40098 111653 +22292 111646 +11466 111607 +27473 111603 +15975 111601 +3018 111598 +19612 111585 +18820 111566 +31946 111560 +36178 111544 +14233 111526 +12093 111521 +5336 111514 +13841 111480 +39992 111478 +25694 111476 +49487 111476 +19426 111473 +20449 111469 +1536 111462 +22843 111459 +22807 111449 +21411 111447 +22097 111387 +12510 111385 +20629 111383 +21832 111360 +15185 111321 +9973 111320 +29926 111313 +17888 111306 +25570 111298 +34521 111292 +13762 111274 +38529 111270 +32534 111267 +27119 111259 +18877 111243 +792 111231 +21570 111201 +7738 111178 +31620 111174 +31622 111172 +22581 111154 +2179 111152 +11144 111141 +5040 111121 +16747 111073 +13025 111060 +6010 111050 +17568 111050 +20433 111010 +20577 110983 +19217 110974 +29012 110948 +20562 110917 +18197 110910 +14547 110908 +33098 110894 +35783 110881 +22096 110879 +12821 110869 +16760 110864 +15131 110864 +34560 110857 +27418 110855 +974 110850 +26891 110849 +36986 110848 +36683 110815 +13433 110810 +25813 110795 +24634 110781 +5328 110780 +28604 110768 +19894 110754 +28628 110720 +17508 110714 +24884 110708 +22723 110705 +21001 110697 +4628 110669 +18233 110660 +31617 110625 +23994 110620 +14872 110618 +22537 110592 +16347 110563 +11880 110559 +33847 110559 +20471 110555 +18583 110553 +9678 110546 +13787 110530 +37081 110528 +38805 110507 +15094 110504 +33625 110502 +12397 110501 +34814 110499 +29 110485 +9377 110471 +25814 110470 +37790 110464 +17076 110419 +36199 110412 +32206 110411 +12076 110407 +23687 110402 +8729 110399 +22533 110393 +17565 110375 +25691 110364 +16416 110357 +16292 110346 +28773 110346 +16106 110343 +27147 110342 +13015 110331 +16343 110317 +30744 110312 +15968 110303 +35903 110267 +7934 110261 +243 110257 +30007 110254 +13482 110252 +12256 110244 +23017 110238 +4241 110232 +30379 110211 +20173 110198 +28276 110176 +24065 110172 +19951 110168 +45818 110162 +26420 110158 +18493 110158 +19379 110154 +36852 110123 +17229 110106 +17453 110105 +23707 110102 +12064 110092 +12707 110082 +31244 110069 +28014 110046 +2980 110045 +17556 110043 +1401 110024 +27872 110021 +12648 110011 +10796 110009 +18500 110006 +22414 109992 +30628 109992 +12178 109989 +8423 109985 +15284 109974 +23448 109945 +27206 109927 +22575 109926 +21025 109915 +24264 109903 +23384 109898 +2284 109898 +14479 109885 +29921 109881 +8757 109867 +34753 109867 +27216 109862 +19561 109854 +15731 109841 +31108 109834 +5285 109830 +14206 109802 +20406 109789 +21625 109782 +6162 109780 +27589 109766 +40162 109754 +26853 109753 +33006 109733 +13868 109724 +30215 109723 +17644 109720 +31942 109711 +20794 109705 +8870 109685 +38613 109647 +43105 109641 +21951 109636 +3508 109629 +15610 109610 +29597 109607 +37070 109563 +21823 109545 +14766 109542 +9383 109541 +9708 109539 +16925 109531 +18321 109531 +16251 109514 +22560 109502 +25340 109489 +13071 109479 +32669 109479 +18461 109462 +40532 109458 +34186 109449 +28513 109446 +37921 109420 +8652 109399 +27517 109399 +12430 109375 +14290 109374 +50240 109356 +27058 109344 +25892 109323 +18626 109313 +9178 109304 +36363 109265 +24034 109262 +27333 109250 +35553 109243 +9440 109239 +34699 109239 +49403 109230 +7020 109226 +16073 109215 +17074 109191 +16695 109190 +48583 109183 +47177 109166 +26496 109164 +8567 109162 +16158 109157 +17700 109114 +16058 109112 +27974 109089 +28879 109087 +17163 109079 +25957 109071 +24622 109053 +26049 109044 +17345 109038 +23622 109028 +25949 109027 +27833 109020 +26389 109002 +15169 109002 +25065 108979 +21651 108957 +23804 108946 +14698 108943 +23062 108941 +20387 108930 +15993 108930 +32868 108923 +27835 108908 +20101 108907 +37126 108899 +24649 108881 +23911 108878 +21200 108876 +941 108873 +13050 108861 +21099 108851 +24522 108847 +4984 108838 +15337 108834 +32992 108832 +34553 108827 +5731 108822 +20338 108811 +35345 108795 +37270 108794 +36758 108791 +8459 108790 +17656 108782 +42556 108782 +21707 108777 +41816 108764 +29842 108758 +41759 108758 +15938 108753 +29139 108746 +21068 108743 +16944 108734 +13746 108721 +2136 108712 +21566 108691 +15510 108679 +15533 108678 +26361 108677 +15939 108671 +25826 108655 +43416 108650 +11460 108646 +29761 108646 +18235 108626 +17289 108618 +17342 108612 +20031 108607 +30067 108607 +28245 108589 +29578 108583 +32444 108580 +24141 108576 +23783 108551 +21203 108544 +25203 108539 +45980 108539 +19458 108529 +10957 108528 +29588 108507 +24701 108498 +13695 108489 +8109 108486 +25141 108469 +36451 108460 +13411 108459 +12635 108442 +5558 108421 +15344 108417 +17548 108401 +25539 108399 +17775 108360 +28926 108341 +23103 108333 +45624 108330 +38602 108318 +26905 108304 +31275 108289 +39717 108286 +21673 108284 +10905 108265 +23185 108247 +34479 108242 +12251 108242 +20425 108242 +12177 108231 +21799 108218 +22673 108189 +31988 108187 +23993 108184 +28503 108175 +18427 108171 +32810 108130 +21848 108123 +33624 108112 +11598 108082 +24471 108049 +47604 108032 +18357 108026 +10707 108011 +9122 107999 +17696 107994 +22527 107990 +19647 107989 +15874 107977 +6258 107969 +16019 107967 +23973 107959 +26635 107954 +23887 107919 +13822 107917 +25965 107902 +18693 107889 +35089 107873 +28289 107850 +30770 107844 +10356 107804 +22561 107803 +42735 107795 +13745 107775 +35759 107766 +14992 107755 +28619 107755 +35206 107752 +7614 107745 +9446 107705 +36675 107690 +10327 107670 +41427 107666 +12876 107664 +23086 107651 +19901 107616 +21031 107579 +44321 107574 +33059 107560 +41253 107537 +33673 107536 +30597 107535 +14294 107534 +7250 107531 +27505 107531 +24341 107525 +36700 107516 +28485 107514 +34272 107514 +29336 107476 +20881 107471 +11653 107461 +29575 107452 +6243 107451 +23256 107447 +15529 107429 +21954 107420 +16605 107413 +2375 107413 +18436 107396 +14893 107391 +25523 107378 +7922 107373 +18179 107368 +13493 107366 +35691 107365 +17607 107348 +30401 107338 +1356 107332 +16882 107330 +25247 107317 +29521 107310 +11354 107308 +17942 107301 +23621 107297 +17439 107288 +11659 107272 +28346 107263 +17365 107258 +19658 107251 +44198 107247 +21596 107243 +25427 107235 +15017 107215 +18987 107199 +11401 107183 +10781 107179 +12375 107176 +15561 107168 +39062 107167 +4834 107159 +19038 107153 +21655 107138 +16125 107128 +23404 107127 +7952 107119 +29838 107098 +23514 107074 +22566 107038 +31876 107035 +25477 107014 +17186 107001 +25910 106995 +25824 106991 +25146 106974 +22894 106968 +29058 106953 +3718 106938 +36486 106932 +19113 106932 +24963 106930 +21046 106917 +20349 106912 +8625 106905 +25395 106878 +35188 106861 +34002 106860 +29656 106849 +932 106830 +18787 106830 +28672 106817 +39700 106798 +15058 106798 +31845 106798 +35892 106792 +30081 106760 +18399 106755 +39560 106753 +43262 106752 +13602 106745 +3384 106743 +19748 106725 +20528 106718 +29463 106712 +15645 106680 +17624 106678 +36719 106671 +19439 106659 +42668 106650 +21658 106623 +20236 106603 +12986 106599 +12870 106581 +8079 106576 +39137 106568 +18854 106552 +13664 106543 +12253 106495 +14063 106481 +25136 106471 +16762 106462 +16668 106453 +37578 106452 +29803 106432 +19168 106428 +26005 106408 +21362 106404 +21276 106400 +24271 106381 +26349 106371 +22900 106361 +20657 106360 +17753 106358 +46726 106344 +6113 106338 +24226 106337 +31074 106336 +17927 106333 +28044 106330 +4517 106327 +27687 106320 +19761 106313 +13575 106310 +18174 106282 +29820 106265 +18350 106262 +21723 106250 +37425 106240 +19654 106224 +5261 106223 +33035 106219 +36236 106203 +31333 106199 +43804 106198 +1860 106194 +11813 106179 +5685 106177 +13950 106177 +18418 106170 +16739 106164 +15936 106134 +18778 106127 +35902 106125 +32211 106122 +26815 106121 +27909 106119 +38001 106102 +48245 106097 +10339 106094 +30947 106078 +7842 106055 +24929 106038 +6391 105988 +21430 105973 +16426 105960 +15121 105958 +24128 105950 +17501 105930 +26336 105916 +25422 105910 +44844 105856 +5905 105843 +25485 105839 +2377 105825 +17586 105823 +14175 105822 +25614 105818 +22168 105808 +48560 105805 +42192 105798 +4135 105767 +12918 105742 +31117 105726 +17522 105716 +20732 105712 +16222 105709 +18035 105690 +15290 105633 +32470 105630 +22549 105607 +43013 105596 +18734 105595 +32034 105590 +13860 105581 +31232 105578 +39507 105571 +33787 105569 +5185 105569 +34992 105557 +20487 105544 +18894 105536 +14870 105520 +12941 105491 +29599 105482 +23543 105464 +16836 105460 +28229 105455 +7414 105449 +21615 105449 +36962 105418 +18028 105409 +35468 105406 +39032 105406 +24351 105397 +2220 105387 +39329 105383 +13661 105375 +985 105373 +31610 105352 +16430 105349 +20401 105348 +26038 105347 +21368 105326 +3646 105323 +26474 105320 +6767 105317 +44487 105294 +18782 105285 +17279 105275 +15497 105268 +40707 105266 +30385 105261 +29799 105256 +2557 105241 +1149 105226 +23080 105189 +17554 105179 +20160 105164 +27723 105162 +32586 105160 +10676 105154 +14107 105139 +10982 105131 +27316 105111 +10644 105109 +23365 105106 +1882 105085 +19193 105060 +24057 105059 +35507 105055 +11427 105024 +16065 105020 +18395 105017 +47975 105001 +12583 104995 +1861 104991 +38581 104975 +19589 104971 +19942 104959 +34807 104949 +14739 104948 +30446 104922 +37958 104912 +10510 104907 +6966 104889 +31376 104889 +18474 104873 +22650 104867 +28917 104843 +15857 104825 +12171 104825 +12038 104822 +42555 104819 +36547 104790 +15991 104789 +23030 104785 +19726 104780 +39911 104761 +26779 104752 +27565 104752 +22928 104741 +17184 104739 +16627 104734 +3571 104687 +15494 104677 +27337 104671 +10091 104669 +15989 104665 +26036 104660 +33500 104650 +24690 104606 +20674 104597 +27311 104596 +17783 104590 +23231 104588 +40556 104586 +28163 104581 +13397 104574 +27709 104566 +17621 104565 +30773 104562 +41582 104562 +17592 104532 +28723 104530 +36442 104526 +1264 104523 +16069 104515 +49743 104496 +32743 104485 +27585 104484 +33943 104479 +23229 104478 +30469 104472 +8337 104465 +18247 104461 +25363 104461 +22649 104458 +3969 104437 +23923 104424 +31019 104413 +4967 104390 +17778 104379 +27399 104375 +29209 104367 +13865 104354 +35079 104350 +17544 104349 +8863 104347 +21942 104345 +9703 104340 +12853 104326 +26319 104318 +30190 104315 +23184 104285 +26147 104273 +2903 104262 +20354 104253 +18671 104253 +16800 104229 +14242 104219 +8445 104217 +24824 104211 +33874 104198 +15632 104192 +15122 104183 +22804 104182 +33831 104177 +17587 104144 +31118 104114 +17686 104108 +17348 104103 +19957 104091 +24377 104085 +33828 104081 +16382 104077 +41916 104045 +17172 104043 +1376 104042 +18002 104038 +11978 104035 +20372 104024 +18898 104014 +30884 103943 +18780 103938 +19938 103936 +23151 103928 +15990 103904 +38684 103894 +18645 103870 +30596 103865 +9092 103852 +30105 103835 +47280 103829 +12040 103816 +27428 103816 +27875 103813 +9414 103804 +33442 103802 +18339 103797 +11777 103788 +42263 103767 +43602 103760 +25880 103756 +7793 103754 +19905 103747 +6083 103746 +16049 103738 +22397 103731 +26248 103720 +21553 103708 +16577 103703 +26725 103662 +13629 103660 +23610 103655 +41158 103638 +24263 103636 +24348 103590 +40370 103580 +9729 103568 +9861 103567 +29313 103564 +25756 103550 +24169 103541 +29234 103528 +47827 103506 +13345 103491 +19470 103488 +24411 103488 +17263 103478 +27658 103467 +12089 103457 +28634 103444 +1493 103440 +21160 103432 +24157 103415 +29941 103391 +26476 103389 +23147 103388 +30359 103378 +26540 103363 +11789 103361 +11437 103354 +20346 103346 +14589 103346 +19276 103339 +3339 103311 +37142 103300 +11517 103293 +20070 103275 +5842 103264 +13431 103260 +27811 103244 +45196 103236 +11908 103232 +12728 103220 +26399 103206 +20024 103200 +16194 103196 +31105 103148 +19022 103137 +2175 103129 +17784 103128 +41976 103126 +18507 103123 +9972 103123 +2302 103102 +22307 103102 +12219 103095 +39943 103079 +33089 103052 +36577 103024 +17146 103018 +30341 103009 +42896 103003 +19870 102997 +32681 102991 +29634 102984 +44081 102977 +9529 102948 +32013 102947 +35627 102934 +37568 102931 +27401 102921 +15741 102914 +4015 102903 +8653 102895 +27157 102878 +47214 102866 +13322 102862 +41124 102858 +30679 102858 +39295 102856 +24788 102840 +17175 102840 +27426 102836 +47837 102813 +8583 102810 +21104 102782 +37112 102767 +12281 102746 +33251 102743 +45015 102705 +24267 102704 +29756 102671 +6368 102668 +10151 102659 +25488 102649 +39874 102634 +39393 102632 +30938 102631 +24056 102621 +19471 102619 +9551 102609 +6345 102608 +22063 102603 +42631 102602 +17399 102593 +3870 102589 +33577 102587 +39723 102543 +12270 102529 +15136 102525 +21122 102522 +38938 102520 +2881 102514 +3370 102507 +48286 102505 +2117 102500 +37178 102500 +15268 102482 +33185 102482 +20976 102474 +30789 102472 +34325 102467 +18781 102456 +21007 102453 +10185 102450 +14197 102440 +19560 102427 +36595 102375 +31214 102367 +4591 102346 +31790 102342 +25720 102333 +33998 102322 +25758 102308 +34955 102296 +22783 102275 +33644 102260 +46614 102258 +30537 102232 +23096 102226 +12109 102226 +23264 102218 +38465 102200 +40911 102195 +21310 102194 +44634 102193 +15476 102191 +23458 102181 +29156 102179 +16225 102168 +28648 102167 +33556 102156 +18177 102139 +7332 102129 +25401 102126 +16737 102106 +22197 102104 +20880 102095 +26395 102091 +27612 102084 +26965 102082 +30966 102073 +14666 102066 +27240 102061 +3920 102059 +34612 102023 +29497 102013 +18947 102013 +34492 102003 +32835 101996 +34806 101986 +23602 101982 +20242 101975 +5473 101972 +17169 101970 +8132 101968 +33208 101965 +17754 101957 +16600 101953 +16857 101939 +24532 101910 +49927 101896 +42177 101891 +34308 101891 +22737 101889 +22729 101888 +26890 101883 +24654 101875 +22359 101875 +1163 101864 +11081 101853 +22943 101845 +20431 101843 +32748 101835 +23967 101825 +15777 101819 +23787 101814 +23189 101797 +21545 101792 +39220 101773 +25582 101761 +8562 101754 +17311 101740 +22982 101738 +29188 101725 +18565 101715 +21435 101714 +40897 101711 +19010 101700 +33670 101699 +14366 101688 +34985 101687 +9734 101682 +14289 101662 +17779 101650 +25854 101637 +13929 101621 +28187 101620 +22187 101605 +14243 101605 +7292 101591 +17557 101589 +25927 101575 +17091 101562 +18593 101542 +30164 101540 +30008 101533 +23508 101508 +19166 101497 +18326 101451 +30979 101436 +24365 101436 +33817 101433 +29618 101426 +23708 101424 +16181 101413 +43025 101391 +3042 101389 +31463 101385 +7367 101375 +35321 101374 +35532 101370 +29468 101349 +14108 101344 +25819 101342 +26850 101315 +6883 101315 +24171 101305 +46198 101304 +37686 101297 +11670 101289 +1920 101275 +10296 101271 +25656 101262 +40730 101257 +12311 101237 +31523 101234 +20720 101219 +25217 101212 +18697 101203 +40186 101194 +14563 101183 +16322 101180 +25712 101177 +11651 101167 +22412 101167 +20825 101160 +46055 101145 +14704 101139 +21567 101129 +14835 101127 +20538 101116 +14330 101095 +20078 101079 +47677 101066 +43480 101065 +19095 101037 +11773 101021 +25162 101015 +22551 101012 +19048 101010 +23204 101006 +30933 100994 +22238 100974 +29504 100967 +21664 100965 +20699 100965 +9331 100964 +16896 100963 +24150 100958 +45726 100946 +16092 100944 +22392 100928 +27545 100921 +25510 100913 +24972 100898 +38880 100887 +26166 100877 +49251 100856 +22706 100856 +24658 100850 +23014 100847 +38507 100845 +5768 100834 +29585 100820 +22978 100773 +18902 100769 +8195 100745 +14839 100734 +3116 100728 +17032 100702 +26122 100699 +41673 100697 +15481 100667 +23034 100663 +20793 100660 +17898 100658 +13542 100658 +22669 100657 +31134 100637 +14230 100636 +16953 100622 +40419 100616 +2838 100609 +10447 100600 +18737 100583 +23018 100572 +32017 100554 +13166 100545 +17485 100536 +28893 100535 +12442 100532 +18608 100530 +14924 100523 +27555 100523 +31183 100517 +27196 100509 +35852 100507 +20192 100505 +4726 100505 +3746 100502 +15702 100496 +22498 100492 +19683 100492 +16006 100456 +39929 100453 +29764 100451 +6223 100439 +21501 100433 +43724 100430 +40458 100410 +19369 100402 +28772 100394 +8303 100394 +29545 100385 +45526 100360 +39781 100356 +17085 100353 +30115 100343 +46965 100335 +9785 100334 +9766 100314 +33324 100280 +16046 100274 +5807 100274 +28528 100268 +21098 100265 +38762 100258 +10161 100256 +39268 100256 +36194 100251 +24607 100247 +29170 100235 +28800 100226 +2991 100180 +28515 100159 +26862 100146 +1316 100144 +23796 100138 +11046 100136 +19337 100127 +12608 100126 +7376 100114 +29732 100107 +17567 100098 +20991 100094 +12636 100093 +24130 100092 +45130 100092 +984 100092 +13851 100071 +33452 100071 +39410 100054 +22259 100046 +9967 100038 +30593 100023 +27548 100018 +10680 100008 +39030 100008 +29384 99993 +17724 99992 +25358 99984 +21080 99981 +3857 99981 +32719 99971 +7649 99960 +32787 99958 +36388 99950 +43708 99940 +28635 99937 +32705 99930 +13254 99925 +30776 99923 +35481 99922 +17234 99912 +15119 99899 +34908 99861 +13805 99861 +33609 99851 +22677 99843 +18276 99837 +26183 99827 +15309 99810 +20263 99799 +41405 99793 +18512 99793 +16839 99779 +13545 99766 +19239 99755 +41800 99754 +17862 99754 +1794 99749 +21647 99737 +39964 99736 +23383 99723 +31908 99720 +25999 99720 +11308 99720 +35956 99695 +5105 99693 +26207 99660 +30783 99655 +24450 99653 +27343 99648 +24842 99644 +24347 99634 +18954 99625 +20820 99621 +23461 99614 +29798 99593 +10279 99589 +25605 99582 +19910 99562 +26775 99561 +8597 99558 +14574 99545 +6269 99520 +22321 99518 +14195 99510 +48148 99507 +32098 99474 +23316 99455 +12340 99450 +26786 99446 +17572 99423 +19401 99415 +45395 99405 +2733 99392 +26670 99378 +31826 99377 +18630 99373 +22505 99372 +37388 99371 +17978 99364 +12366 99357 +18297 99352 +5315 99352 +23618 99350 +21235 99343 +13898 99335 +26345 99331 +16236 99327 +39140 99323 +38772 99314 +28038 99308 +16121 99306 +18975 99302 +23309 99263 +12434 99260 +19096 99255 +29297 99240 +31370 99229 +19318 99226 +12403 99216 +29721 99214 +14251 99213 +19268 99200 +34705 99187 +11999 99183 +13526 99183 +16717 99175 +26103 99152 +21883 99148 +19288 99131 +18953 99126 +20039 99125 +31833 99122 +23245 99121 +22692 99118 +29532 99112 +39810 99072 +21810 99069 +17640 99069 +34707 99067 +30489 99055 +33478 99054 +18229 99034 +28651 99027 +34930 99027 +26530 99022 +26555 99020 +43305 99001 +28444 99001 +38204 98999 +24638 98996 +22526 98983 +14796 98983 +19229 98967 +30399 98959 +18460 98956 +23099 98954 +17543 98953 +14627 98946 +27539 98941 +48023 98930 +27725 98918 +10508 98910 +24166 98907 +30654 98895 +27382 98894 +19321 98893 +16123 98889 +22644 98882 +17430 98881 +19605 98878 +27806 98875 +19123 98875 +23207 98824 +25987 98811 +26048 98805 +35801 98804 +11348 98796 +10944 98793 +10117 98787 +15104 98780 +25724 98770 +47804 98760 +37903 98756 +14299 98745 +22896 98739 +26211 98735 +9649 98716 +19604 98712 +14056 98710 +18370 98651 +35434 98644 +238 98630 +47317 98624 +20843 98621 +32987 98612 +34880 98609 +15387 98604 +7190 98579 +32370 98579 +22977 98578 +20611 98567 +33905 98561 +2036 98556 +31057 98553 +43265 98550 +45870 98549 +7217 98549 +16113 98546 +33997 98528 +11251 98527 +42484 98525 +18336 98522 +26409 98518 +19035 98512 +47251 98487 +15548 98478 +3630 98467 +21258 98454 +23730 98450 +16613 98449 +10793 98428 +26443 98414 +14570 98405 +4402 98405 +32364 98397 +30091 98392 +40944 98386 +12277 98362 +20947 98360 +23148 98355 +21135 98351 +42745 98344 +25185 98339 +7991 98331 +10265 98313 +28901 98292 +4593 98291 +33261 98290 +24644 98283 +13626 98282 +18587 98281 +11502 98276 +31437 98267 +26622 98265 +21764 98242 +45408 98214 +32330 98212 +15639 98198 +16777 98196 +27440 98189 +38278 98169 +24395 98167 +25051 98164 +39588 98158 +31596 98157 +32536 98133 +24558 98122 +12765 98117 +28194 98102 +28437 98098 +24955 98087 +44211 98086 +41196 98079 +37536 98069 +21635 98058 +3628 98053 +48931 98053 +15377 98041 +10771 98034 +28118 98030 +32443 98013 +21923 98008 +23916 98006 +25718 98005 +16687 98002 +16215 98001 +46819 97997 +11404 97997 +3445 97990 +15750 97970 +8623 97967 +28892 97960 +19508 97960 +13231 97943 +42883 97942 +10705 97939 +28061 97919 +29014 97915 +14887 97908 +31974 97903 +6855 97903 +17765 97903 +14155 97900 +16614 97893 +26117 97891 +37154 97883 +30890 97880 +26746 97874 +23335 97858 +10022 97838 +32020 97828 +12678 97827 +45810 97821 +28752 97795 +30996 97783 +31949 97779 +12544 97767 +39143 97757 +25967 97745 +16418 97741 +19064 97735 +30612 97693 +31357 97686 +29529 97682 +11927 97679 +17031 97679 +32597 97675 +29357 97674 +10414 97665 +45051 97663 +34827 97662 +40197 97660 +25017 97655 +13555 97649 +18288 97648 +7619 97621 +14612 97621 +12456 97621 +24723 97618 +31661 97616 +5900 97605 +32183 97601 +18026 97599 +42693 97599 +16531 97596 +26520 97586 +29667 97584 +29596 97581 +19723 97575 +27439 97575 +30757 97553 +31473 97552 +26082 97544 +26328 97542 +20908 97540 +37085 97527 +18121 97515 +33140 97514 +15633 97513 +27396 97512 +19556 97510 +22030 97498 +22888 97492 +11014 97487 +20321 97485 +33685 97481 +28744 97463 +34068 97455 +18539 97452 +15663 97451 +16731 97434 +22147 97423 +23949 97421 +28514 97417 +49173 97414 +28193 97385 +22329 97366 +25659 97354 +41035 97343 +26860 97342 +44369 97324 +39452 97317 +12371 97315 +17300 97313 +40449 97290 +20410 97288 +17145 97282 +22710 97279 +40195 97271 +19552 97265 +4246 97248 +21815 97245 +33515 97229 +31558 97228 +22060 97228 +39802 97223 +16885 97204 +43004 97203 +6701 97196 +28076 97186 +25734 97186 +35807 97182 +20571 97179 +19187 97172 +11986 97157 +11822 97156 +22295 97144 +30236 97137 +30140 97126 +14301 97108 +21085 97100 +22870 97092 +28949 97089 +23327 97080 +14927 97073 +24892 97071 +9078 97070 +25779 97053 +17080 97048 +22181 97048 +24805 97047 +48206 97042 +14014 97036 +17694 97032 +28853 97030 +25223 97019 +21092 97015 +8042 97008 +14898 97007 +34681 97005 +18594 97001 +18834 96999 +46499 96989 +18176 96985 +45212 96983 +19922 96982 +44726 96970 +4126 96968 +34755 96962 +24784 96957 +25428 96943 +48460 96934 +40388 96923 +2825 96911 +21686 96907 +32265 96898 +23301 96894 +11862 96892 +23299 96891 +21124 96879 +5106 96872 +38276 96871 +19144 96865 +26197 96858 +17511 96854 +17531 96848 +16312 96845 +15053 96835 +16716 96830 +8421 96817 +18543 96812 +18118 96805 +32615 96803 +18411 96789 +3076 96780 +20888 96775 +19459 96769 +6018 96764 +24495 96759 +22051 96753 +26299 96752 +4604 96742 +23071 96741 +1106 96729 +34327 96727 +38619 96710 +38740 96708 +43237 96702 +46097 96692 +35234 96689 +19468 96678 +8923 96678 +30785 96671 +33750 96670 +24514 96666 +17207 96649 +20420 96646 +19433 96645 +19884 96635 +24350 96635 +6661 96624 +23165 96608 +20254 96601 +33094 96592 +22979 96591 +22623 96590 +23160 96579 +13522 96578 +9203 96556 +24048 96555 +9971 96528 +10780 96521 +34701 96520 +25863 96507 +36231 96504 +24942 96501 +34060 96479 +20040 96463 +14392 96453 +29015 96451 +9696 96441 +30257 96436 +31784 96430 +13543 96429 +19267 96422 +47967 96420 +13826 96420 +36889 96417 +4798 96406 +34440 96405 +25483 96376 +19133 96356 +34780 96356 +41348 96344 +33718 96343 +10378 96333 +17486 96328 +13309 96326 +9192 96311 +21754 96301 +29136 96290 +27278 96290 +26605 96273 +31269 96262 +26843 96260 +29815 96243 +4862 96231 +21219 96219 +20787 96213 +2856 96205 +33130 96196 +28445 96191 +22604 96180 +27340 96178 +15544 96168 +25115 96151 +19165 96145 +10007 96142 +28036 96135 +20408 96116 +2725 96115 +22989 96083 +19143 96079 +27264 96072 +26077 96072 +9632 96053 +4063 96036 +24089 96036 +29239 96032 +23273 96026 +14661 96021 +35415 96012 +3043 96011 +27567 96010 +39447 96010 +9069 95993 +31171 95988 +27353 95985 +32137 95980 +40695 95952 +20750 95941 +40222 95937 +21100 95922 +21558 95913 +32410 95910 +27177 95908 +20056 95890 +31220 95888 +13905 95883 +35274 95871 +22668 95857 +18275 95855 +26594 95835 +25786 95822 +14564 95821 +27308 95810 +24281 95805 +19897 95803 +32490 95796 +14948 95762 +2017 95741 +19713 95740 +24387 95733 +25075 95724 +22474 95724 +18831 95719 +34886 95717 +27425 95714 +13315 95698 +28492 95692 +48395 95686 +30652 95683 +30651 95671 +39361 95668 +41114 95663 +35121 95658 +23867 95651 +31981 95650 +2021 95649 +21662 95645 +16632 95638 +25377 95637 +21461 95633 +28120 95633 +25646 95620 +16588 95617 +36799 95615 +9883 95607 +20925 95592 +18511 95588 +44168 95587 +39672 95576 +21164 95565 +18027 95562 +33972 95558 +31637 95548 +22365 95536 +35336 95531 +8825 95523 +22339 95522 +7608 95521 +24947 95515 +14411 95506 +1307 95505 +16989 95486 +18616 95482 +33902 95473 +25409 95465 +29488 95452 +21552 95448 +7474 95436 +32605 95425 +33147 95421 +15304 95409 +39124 95402 +30970 95402 +33859 95401 +21851 95396 +12589 95395 +25507 95358 +24262 95358 +24014 95356 +46366 95351 +4107 95324 +11491 95315 +17747 95299 +22582 95286 +33405 95282 +37066 95272 +38131 95270 +16264 95267 +16649 95265 +25729 95254 +15174 95230 +16509 95221 +23879 95218 +41688 95215 +29658 95204 +8356 95193 +17917 95190 +33352 95171 +31735 95167 +35983 95166 +8812 95147 +12196 95147 +10837 95146 +9873 95136 +20935 95134 +7469 95114 +14751 95109 +21984 95097 +19018 95075 +32714 95065 +29809 95057 +33777 95057 +25376 95057 +32678 95048 +17182 95037 +46281 95032 +39245 95014 +25303 95012 +37487 94994 +42748 94986 +31341 94966 +19923 94954 +33356 94942 +16917 94935 +1704 94933 +20299 94931 +29782 94925 +24066 94922 +20664 94915 +17973 94910 +10516 94908 +19701 94899 +9395 94892 +9346 94887 +36715 94880 +22912 94880 +21318 94874 +10788 94873 +31656 94867 +21223 94849 +34962 94832 +34462 94830 +12233 94826 +13548 94793 +36636 94775 +12202 94762 +26784 94758 +6421 94752 +27731 94750 +34710 94739 +32540 94736 +3057 94727 +11024 94710 +31565 94694 +49281 94689 +19001 94683 +31678 94677 +6545 94676 +35687 94673 +26316 94633 +21134 94630 +34050 94630 +18023 94626 +44572 94623 +44781 94623 +8743 94596 +7807 94590 +5633 94589 +13587 94580 +36978 94574 +23122 94574 +12341 94564 +36002 94562 +18142 94560 +47678 94555 +13018 94537 +27356 94533 +9937 94525 +682 94499 +21640 94495 +16243 94465 +36503 94462 +47660 94450 +33503 94441 +28303 94435 +4749 94429 +32831 94420 +8530 94420 +26487 94418 +18469 94417 +36251 94414 +29125 94408 +14091 94399 +25494 94399 +25455 94377 +14331 94376 +12248 94351 +6750 94335 +18808 94333 +21490 94320 +22733 94314 +11696 94312 +23149 94305 +17241 94299 +22185 94288 +28814 94273 +26502 94273 +14272 94272 +14700 94259 +24768 94253 +13481 94244 +6014 94234 +30279 94231 +29686 94225 +6703 94216 +17306 94216 +3323 94178 +14515 94178 +46820 94167 +3005 94160 +18364 94150 +29662 94130 +29208 94129 +10036 94128 +26253 94128 +28294 94121 +39728 94102 +18480 94091 +27821 94088 +18420 94085 +9855 94077 +41260 94067 +6897 94063 +32951 94062 +10444 94043 +13180 94043 +16619 94037 +26113 94031 +2064 94029 +16308 94028 +37401 94028 +28094 94019 +39125 94017 +27373 94015 +11775 94014 +22378 94008 +16741 94001 +16630 93994 +33984 93982 +22609 93963 +18948 93957 +13282 93949 +44323 93934 +13464 93932 +12354 93930 +14362 93923 +20328 93920 +25234 93919 +33527 93918 +15889 93906 +21021 93906 +43870 93903 +26310 93900 +12488 93900 +11855 93898 +39039 93895 +16374 93866 +26002 93859 +16744 93856 +14856 93839 +14613 93838 +17500 93826 +15323 93817 +21366 93803 +22626 93795 +15887 93795 +31704 93784 +22529 93784 +44317 93779 +40057 93769 +19985 93767 +801 93764 +27839 93746 +25263 93723 +32253 93722 +23905 93721 +13443 93719 +25281 93709 +24623 93701 +36421 93695 +22007 93687 +46306 93686 +32853 93681 +25170 93666 +15209 93656 +24677 93641 +17575 93638 +11331 93636 +1172 93625 +38188 93622 +31099 93611 +40445 93606 +27321 93601 +24230 93597 +13199 93586 +18320 93570 +18025 93568 +28996 93564 +37033 93563 +15784 93561 +21639 93553 +30828 93548 +30618 93537 +43245 93534 +31201 93532 +19024 93527 +30456 93527 +2905 93518 +8007 93500 +20265 93497 +22125 93482 +1733 93482 +39508 93474 +19844 93473 +21127 93472 +43513 93459 +35689 93452 +37907 93450 +1562 93429 +5152 93413 +33519 93375 +12026 93372 +22085 93372 +19493 93369 +6713 93365 +19504 93352 +24683 93347 +27297 93340 +34906 93338 +33458 93338 +24006 93338 +20066 93336 +23689 93335 +30286 93335 +28499 93334 +13716 93325 +17873 93324 +44502 93309 +4370 93305 +31136 93289 +10981 93284 +38881 93278 +18548 93272 +10295 93266 +50048 93265 +24391 93248 +4489 93237 +11248 93237 +17852 93233 +11902 93222 +6919 93198 +42100 93192 +26102 93178 +20680 93163 +46719 93161 +26581 93156 +20054 93155 +26906 93147 +22385 93113 +8525 93108 +23746 93103 +10212 93102 +30839 93100 +23205 93097 +19294 93086 +24918 93085 +32938 93085 +20640 93076 +667 93075 +3004 93074 +36384 93063 +15255 93058 +30600 93056 +18272 93046 +20588 93041 +29483 93036 +2309 93024 +5132 93015 +40318 92991 +8931 92991 +25620 92989 +44075 92987 +8764 92985 +25305 92983 +25277 92980 +14879 92973 +29986 92951 +16876 92946 +44041 92944 +27571 92918 +10740 92916 +7918 92914 +36516 92907 +26548 92906 +27060 92905 +9532 92905 +23095 92897 +37490 92887 +35493 92884 +14686 92881 +34593 92863 +23057 92846 +46917 92840 +20042 92840 +29070 92838 +7633 92830 +12849 92825 +1490 92822 +45303 92813 +25860 92800 +36832 92793 +26916 92791 +22019 92786 +35591 92785 +31065 92776 +3927 92775 +41164 92760 +10459 92752 +23477 92737 +26057 92730 +29210 92720 +10973 92712 +5232 92711 +24359 92705 +16766 92705 +36812 92702 +17347 92685 +32413 92675 +19201 92674 +11887 92664 +35017 92664 +28646 92661 +10418 92641 +49762 92640 +27197 92636 +19882 92616 +23453 92613 +48607 92600 +19175 92599 +26727 92597 +5102 92590 +25629 92577 +252 92571 +5028 92570 +19845 92565 +6436 92559 +36611 92554 +40021 92550 +23968 92549 +13909 92544 +25339 92539 +11980 92536 +13227 92534 +28170 92528 +26645 92527 +42499 92522 +18043 92508 +16729 92503 +12106 92502 +22299 92483 +25937 92475 +31233 92465 +15568 92444 +17090 92443 +21051 92440 +28591 92440 +6171 92411 +26655 92397 +24584 92386 +14314 92384 +19297 92377 +12896 92376 +24433 92374 +36771 92372 +12862 92368 +21452 92367 +46459 92358 +30870 92352 +7352 92337 +7061 92328 +24325 92322 +22188 92321 +38838 92316 +16446 92309 +23557 92299 +18643 92298 +10916 92283 +35078 92260 +7322 92256 +27822 92251 +24656 92247 +18463 92237 +39661 92227 +9064 92217 +20278 92214 +9496 92212 +34881 92210 +23074 92204 +25449 92191 +19883 92186 +24219 92184 +12394 92182 +3652 92170 +6683 92164 +42630 92161 +22574 92161 +26610 92156 +22106 92156 +18059 92142 +29654 92139 +22206 92133 +30325 92133 +25317 92121 +39309 92115 +10527 92106 +28393 92100 +23056 92099 +42150 92097 +21216 92095 +26800 92087 +17150 92081 +38478 92078 +20399 92072 +8048 92063 +27138 92063 +33892 92057 +7642 92046 +43326 92046 +14573 92043 +29786 92038 +11280 92028 +27662 92026 +35646 92025 +19102 92023 +7928 92018 +41345 92013 +19440 91973 +1093 91965 +7836 91956 +34405 91939 +23239 91929 +35497 91928 +6424 91916 +23217 91909 +19838 91909 +42350 91901 +26334 91895 +35816 91894 +20269 91892 +30718 91879 +37969 91869 +27397 91860 +34077 91858 +14151 91858 +9501 91855 +22423 91839 +11519 91834 +30462 91831 +14068 91822 +24402 91813 +10865 91811 +40132 91800 +48858 91799 +18221 91797 +12417 91795 +19862 91786 +46381 91781 +46260 91774 +6456 91765 +33318 91757 +25116 91754 +5200 91737 +21294 91735 +36904 91733 +20067 91721 +16288 91712 +16170 91709 +36574 91700 +30678 91699 +30437 91688 +20326 91683 +21814 91672 +45963 91662 +32659 91636 +41845 91629 +26938 91628 +20383 91621 +27957 91620 +28662 91619 +32979 91615 +30570 91607 +32002 91591 +26565 91588 +9827 91567 +34915 91566 +30012 91562 +34686 91552 +23084 91543 +17555 91541 +45229 91527 +34696 91525 +21256 91522 +4682 91516 +39303 91511 +34422 91500 +13958 91496 +22219 91493 +46422 91471 +32526 91470 +47775 91463 +6040 91460 +17909 91457 +13216 91449 +12796 91444 +27907 91443 +25173 91430 +42286 91423 +20693 91419 +16866 91415 +22765 91411 +11498 91403 +8677 91396 +8394 91383 +22730 91375 +18783 91374 +40474 91368 +29753 91348 +22889 91344 +24337 91343 +41525 91340 +22949 91334 +31890 91331 +23699 91322 +25971 91308 +26777 91308 +11291 91307 +7368 91305 +10798 91299 +18478 91272 +16641 91269 +12609 91244 +28983 91230 +15824 91227 +12562 91224 +12060 91223 +16369 91222 +23353 91220 +28411 91218 +21661 91211 +24569 91210 +20009 91210 +45371 91202 +25861 91202 +34474 91200 +25984 91189 +21879 91170 +10910 91168 +13834 91159 +19343 91156 +36588 91150 +20037 91146 +33749 91143 +22625 91131 +2509 91128 +34732 91098 +30880 91097 +25725 91070 +14821 91063 +19547 91062 +21674 91042 +31158 91034 +10028 91033 +30043 91027 +14426 91025 +17219 91024 +30046 91023 +25403 91020 +32342 91016 +39265 91015 +43664 91003 +27840 91001 +29359 90985 +19387 90978 +10505 90960 +21621 90959 +33242 90945 +13946 90944 +21915 90942 +24635 90941 +16132 90938 +9702 90936 +29489 90927 +3468 90926 +21649 90913 +20958 90900 +25923 90899 +15992 90897 +35633 90887 +6726 90881 +21971 90869 +29628 90833 +14731 90827 +44025 90826 +26695 90806 +20460 90802 +42185 90800 +35165 90794 +46561 90793 +20744 90788 +29475 90778 +8375 90769 +20905 90760 +11224 90757 +14805 90754 +20445 90749 +23401 90746 +25355 90745 +25154 90744 +11163 90741 +27523 90734 +37387 90731 +40412 90730 +3534 90717 +41346 90716 +25349 90716 +23348 90716 +20060 90705 +6571 90703 +31367 90700 +30926 90696 +24950 90694 +24346 90689 +24489 90677 +36029 90667 +12025 90667 +13058 90666 +9191 90664 +34051 90653 +16809 90649 +19322 90646 +21392 90646 +14701 90631 +17851 90631 +40644 90629 +11146 90629 +32632 90625 +12001 90620 +14874 90605 +47966 90604 +26593 90566 +24484 90565 +40288 90564 +21682 90555 +35671 90544 +39927 90542 +13569 90541 +32832 90540 +28661 90540 +6818 90494 +14843 90492 +28157 90474 +46832 90471 +22776 90466 +11814 90459 +34676 90459 +27325 90455 +35564 90433 +36985 90432 +19446 90431 +17246 90424 +14005 90400 +22332 90391 +12867 90389 +15919 90389 +28534 90385 +3054 90384 +27123 90380 +6179 90369 +15027 90348 +37874 90337 +17512 90325 +19221 90317 +26187 90309 +42284 90307 +12050 90272 +29741 90260 +13992 90260 +44340 90258 +9123 90249 +31429 90243 +9094 90233 +6351 90232 +23167 90231 +31355 90228 +21486 90212 +7659 90210 +21438 90195 +38428 90192 +30231 90185 +35874 90179 +42873 90167 +23403 90166 +19704 90164 +29570 90155 +4808 90144 +28464 90134 +30307 90129 +16603 90115 +29060 90084 +38774 90082 +7230 90081 +27075 90071 +36467 90067 +21429 90066 +20128 90058 +29459 90052 +12782 90047 +30153 90043 +25930 90039 +17738 90032 +27449 90015 +22054 90013 +21611 90009 +30305 90007 +28088 90001 +24497 89973 +18485 89969 +10080 89960 +20618 89956 +15197 89950 +33286 89941 +13083 89940 +15492 89935 +6237 89910 +99 89908 +25331 89905 +27521 89900 +30700 89889 +21094 89876 +30706 89862 +32486 89832 +34584 89809 +24903 89790 +19528 89789 +11912 89787 +24375 89784 +17102 89776 +44452 89773 +19503 89772 +41900 89768 +35896 89762 +16900 89760 +18972 89751 +42052 89745 +1887 89743 +26924 89735 +19185 89734 +20795 89727 +6024 89723 +18952 89721 +43791 89710 +34766 89704 +34138 89697 +38018 89666 +37619 89649 +10235 89647 +33072 89646 +27529 89644 +4464 89642 +28558 89637 +34017 89631 +16405 89630 +17672 89628 +37123 89621 +35047 89619 +29362 89618 +34895 89610 +32982 89591 +14562 89589 +28488 89587 +22756 89582 +37584 89569 +18959 89566 +24599 89549 +29118 89544 +42208 89537 +11567 89483 +28984 89465 +31231 89449 +22129 89445 +39739 89445 +26740 89444 +24303 89429 +36025 89413 +6442 89413 +20038 89409 +42682 89400 +25794 89392 +25056 89364 +19813 89364 +10316 89363 +22020 89356 +26762 89352 +19893 89352 +23503 89344 +11489 89341 +26296 89337 +1217 89335 +19762 89326 +18596 89322 +26387 89322 +26871 89320 +4500 89317 +10977 89305 +40378 89302 +14507 89298 +27259 89297 +9347 89282 +37690 89276 +37090 89264 +27602 89254 +20435 89238 +9744 89232 +33190 89219 +18814 89214 +33886 89204 +18529 89202 +17047 89198 +15176 89195 +15354 89191 +20050 89188 +22554 89182 +20767 89175 +36633 89171 +25107 89167 +28823 89141 +19555 89120 +12599 89115 +37217 89112 +22331 89107 +20687 89097 +36069 89093 +34601 89090 +15735 89076 +20950 89069 +43174 89068 +12130 89067 +31402 89064 +20982 89060 +34281 89046 +44467 89038 +16772 89035 +35141 89033 +14062 89029 +28176 89026 +16186 89018 +13269 89016 +41826 89014 +30883 89010 +23987 89009 +36429 88992 +16439 88987 +10211 88981 +27508 88970 +37940 88969 +28999 88958 +26772 88951 +29816 88945 +38617 88939 +26311 88925 +20923 88924 +39113 88915 +41782 88906 +17669 88900 +24632 88869 +23221 88864 +32414 88862 +27015 88857 +9760 88848 +17000 88841 +28224 88835 +26527 88835 +31920 88827 +8051 88783 +37519 88779 +15180 88745 +26930 88742 +12543 88731 +29383 88705 +26553 88695 +33014 88691 +48812 88689 +19962 88667 +41904 88658 +40974 88658 +27980 88656 +41703 88655 +1179 88655 +48822 88632 +23027 88622 +41005 88616 +10137 88611 +36971 88610 +39045 88607 +43126 88606 +20625 88601 +35692 88599 +13248 88586 +49003 88585 +30848 88577 +38317 88556 +20339 88552 +17682 88545 +31683 88530 +18586 88516 +25407 88513 +36590 88511 +20673 88510 +13217 88506 +24374 88506 +23749 88504 +15212 88500 +17284 88500 +20313 88499 +32241 88490 +17221 88486 +13809 88465 +19434 88465 +38522 88456 +32438 88443 +21431 88423 +31213 88420 +32174 88415 +24685 88383 +15355 88376 +18581 88355 +13380 88347 +12391 88343 +23934 88340 +23714 88328 +16221 88324 +10521 88319 +27624 88319 +28803 88308 +41905 88301 +12204 88288 +25588 88280 +15056 88278 +30722 88274 +21787 88272 +29043 88266 +22987 88259 +49183 88255 +13617 88253 +26816 88252 +7070 88251 +21334 88247 +18628 88212 +3133 88198 +24949 88197 +21082 88190 +4582 88188 +38758 88184 +19585 88179 +44535 88175 +35982 88163 +17480 88160 +20145 88159 +36245 88154 +15165 88151 +115 88146 +2016 88119 +34317 88106 +19186 88084 +7755 88080 +16859 88067 +18631 88056 +15420 88053 +2369 88051 +32601 88050 +27095 88048 +22837 88047 +17707 88046 +26149 88041 +30920 88040 +16064 88036 +9459 88029 +37332 88028 +18992 88026 +10379 88024 +17509 88021 +11203 88021 +16957 88014 +18285 88002 +27146 87994 +32176 87987 +23050 87981 +15801 87967 +39540 87967 +13652 87966 +25208 87947 +42605 87943 +33613 87937 +48955 87926 +11933 87924 +33375 87916 +29505 87903 +31893 87902 +27341 87889 +13848 87889 +16513 87886 +21546 87885 +22486 87883 +26898 87878 +34380 87877 +42065 87875 +28336 87874 +34136 87870 +36718 87867 +22875 87867 +23083 87850 +40766 87848 +23332 87846 +28180 87823 +19020 87823 +37685 87821 +21387 87810 +41137 87797 +20088 87795 +28431 87790 +46287 87790 +19710 87785 +4125 87779 +14356 87778 +3600 87767 +48125 87766 +17650 87762 +28775 87741 +33945 87736 +26973 87730 +30441 87726 +41578 87725 +12461 87723 +25359 87712 +45961 87709 +31618 87691 +44303 87688 +23456 87686 +9538 87672 +40782 87672 +9630 87667 +16790 87642 +14309 87637 +25290 87637 +4072 87629 +26944 87622 +18063 87615 +17299 87603 +21131 87601 +25891 87597 +49657 87597 +34916 87591 +14642 87588 +25845 87571 +38330 87569 +40833 87560 +20137 87551 +15906 87546 +22459 87541 +44824 87541 +45034 87527 +22513 87527 +6676 87523 +35917 87515 +29727 87509 +32099 87508 +5352 87502 +20439 87483 +17059 87474 +22350 87468 +39727 87467 +33742 87456 +29832 87455 +25168 87454 +24160 87451 +32919 87428 +20391 87422 +4029 87420 +23386 87417 +35062 87416 +27699 87413 +15601 87411 +11922 87411 +20466 87406 +14025 87405 +19072 87404 +30784 87397 +1113 87395 +19686 87389 +26671 87386 +8455 87381 +38442 87381 +36443 87363 +20667 87362 +19407 87355 +13483 87353 +41742 87350 +116 87319 +30174 87314 +29195 87309 +40214 87303 +7065 87302 +37756 87299 +27144 87283 +13251 87280 +32733 87276 +20080 87269 +12915 87263 +19979 87254 +25571 87253 +40735 87247 +5216 87243 +30925 87241 +23972 87238 +11979 87218 +24092 87199 +22570 87199 +32552 87193 +42644 87190 +29775 87186 +21929 87183 +33218 87179 +24749 87172 +19098 87166 +21156 87163 +21784 87162 +13277 87156 +23996 87153 +19766 87145 +21226 87139 +28160 87137 +31926 87128 +19907 87119 +30512 87116 +45446 87114 +4629 87109 +23427 87108 +18637 87107 +19878 87105 +8736 87087 +46552 87080 +16345 87068 +25351 87060 +4355 87050 +23505 87043 +41182 87032 +4124 87020 +22338 87016 +19812 87009 +21115 87005 +48855 87005 +31273 87005 +23422 87003 +19390 86988 +27768 86982 +21698 86976 +23116 86962 +20007 86959 +25566 86954 +21385 86952 +19536 86949 +8792 86943 +17968 86920 +8174 86919 +38536 86905 +6063 86901 +20703 86898 +36810 86894 +28126 86890 +16789 86885 +21853 86881 +30188 86871 +39651 86864 +33521 86862 +18182 86794 +10486 86793 +14454 86792 +44311 86779 +22363 86773 +21367 86765 +6944 86763 +6352 86755 +7155 86746 +40902 86745 +22000 86734 +33659 86734 +26157 86731 +17216 86730 +39734 86729 +18620 86726 +25273 86720 +5596 86708 +11743 86698 +29023 86698 +7059 86696 +41078 86692 +47249 86684 +21732 86675 +34899 86673 +28020 86666 +14319 86663 +38403 86650 +40208 86648 +34767 86648 +15974 86641 +16250 86637 +34662 86634 +47089 86629 +38928 86617 +30941 86595 +3374 86590 +21178 86585 +17546 86574 +11074 86569 +22287 86567 +35979 86561 +5681 86552 +9200 86549 +34914 86544 +20256 86537 +20149 86534 +20064 86533 +19506 86528 +38433 86524 +18437 86523 +31239 86522 +17710 86521 +6587 86521 +22034 86519 +35105 86513 +20411 86510 +29025 86498 +10517 86485 +20479 86485 +46950 86480 +14453 86465 +24381 86458 +12913 86443 +16285 86440 +43248 86430 +16810 86427 +31863 86426 +27307 86426 +37956 86416 +19863 86405 +31742 86403 +24211 86401 +19733 86398 +33164 86396 +21854 86393 +11267 86390 +18102 86389 +29413 86385 +24621 86382 +42700 86378 +14848 86378 +2287 86373 +12466 86365 +22999 86360 +23665 86352 +22254 86342 +6039 86338 +50158 86337 +35805 86333 +35593 86330 +21882 86328 +42168 86324 +24406 86311 +15688 86305 +16227 86302 +40669 86301 +9700 86300 +29317 86295 +40233 86280 +31865 86275 +10466 86256 +17200 86255 +19607 86252 +14513 86248 +24659 86243 +19861 86240 +6658 86221 +48297 86217 +3532 86216 +14981 86192 +18710 86180 +24775 86178 +32456 86177 +15111 86175 +35680 86154 +39995 86152 +4565 86145 +30006 86119 +19342 86114 +22010 86111 +47594 86102 +18634 86096 +46187 86094 +33337 86078 +41145 86076 +23171 86067 +23992 86054 +32405 86043 +23978 86038 +23152 86035 +42602 86035 +13991 86026 +13360 86023 +44481 86020 +43586 85995 +30985 85990 +21779 85988 +36647 85984 +32145 85984 +19091 85979 +20723 85970 +10819 85966 +23173 85955 +25124 85952 +12423 85949 +18851 85940 +33664 85931 +16583 85923 +20063 85918 +15029 85918 +41839 85915 +41960 85914 +24998 85901 +17697 85901 +27355 85892 +6313 85891 +44749 85880 +17232 85876 +33514 85867 +16453 85860 +10407 85858 +14350 85856 +22849 85835 +38665 85818 +27973 85810 +28077 85807 +19341 85804 +34682 85799 +44170 85798 +20592 85797 +21079 85791 +47538 85790 +48848 85782 +24404 85771 +37003 85769 +12363 85756 +49315 85752 +32619 85743 +11201 85714 +37006 85709 +6794 85702 +19698 85702 +21391 85696 +39282 85682 +32175 85666 +21375 85666 +13152 85656 +24435 85645 +27618 85634 +17065 85633 +18659 85631 +26374 85628 +20514 85605 +20823 85599 +23077 85598 +27213 85593 +22289 85585 +44359 85571 +25909 85565 +24187 85554 +16638 85537 +1713 85534 +19409 85527 +30496 85519 +24352 85508 +26428 85501 +13259 85501 +17157 85501 +33814 85500 +40777 85492 +21214 85486 +17520 85481 +32716 85466 +22139 85463 +47574 85451 +1300 85447 +22072 85440 +12753 85438 +20584 85429 +43784 85427 +31225 85421 +13920 85414 +26312 85404 +17242 85403 +11438 85403 +14936 85397 +23202 85390 +46185 85375 +33590 85372 +15711 85369 +34041 85365 +33579 85355 +16102 85354 +17545 85354 +42766 85336 +29679 85326 +10223 85325 +38552 85319 +20558 85307 +17395 85297 +39086 85295 +44298 85291 +2360 85279 +42086 85229 +38343 85228 +29949 85222 +32961 85219 +29445 85218 +15587 85208 +19208 85208 +28213 85206 +28099 85203 +41505 85202 +24205 85202 +43596 85194 +15780 85194 +17438 85184 +20416 85168 +13631 85165 +27437 85162 +35663 85144 +39017 85136 +22714 85100 +36976 85098 +18013 85096 +36591 85091 +28438 85079 +24097 85067 +35386 85055 +7877 85048 +31483 85027 +24451 85024 +43002 85016 +23701 85016 +33347 84993 +41200 84983 +18927 84979 +20734 84970 +22959 84955 +18689 84945 +18723 84939 +21822 84921 +31840 84912 +26262 84907 +34233 84900 +38118 84899 +34118 84893 +4954 84883 +30845 84882 +28049 84881 +25044 84875 +36646 84852 +28943 84843 +42502 84843 +15410 84842 +31088 84829 +10475 84828 +26472 84822 +22546 84798 +25243 84792 +20428 84780 +41438 84769 +20089 84735 +20708 84727 +36229 84724 +47430 84724 +28595 84720 +32280 84715 +49220 84709 +10885 84706 +23757 84702 +6169 84700 +41406 84691 +19078 84686 +24694 84679 +26378 84668 +24636 84664 +45265 84637 +2517 84635 +32314 84617 +16635 84612 +41695 84611 +22562 84610 +19856 84609 +18440 84601 +11222 84597 +14529 84591 +20770 84589 +32260 84583 +20132 84581 +42037 84578 +16189 84555 +27876 84552 +20122 84548 +35181 84541 +44113 84540 +19921 84539 +24751 84537 +13144 84535 +23373 84533 +27304 84530 +19565 84526 +8040 84520 +29324 84519 +48880 84511 +35397 84505 +34545 84501 +15611 84497 +38701 84493 +20396 84487 +5029 84481 +14866 84476 +35766 84462 +38623 84462 +16086 84456 +25569 84448 +42649 84440 +33195 84424 +10104 84420 +13713 84404 +24795 84401 +35034 84400 +39503 84396 +32022 84377 +14705 84368 +40993 84365 +19361 84360 +23436 84357 +14584 84352 +25027 84340 +49722 84326 +31067 84323 +40679 84318 +24592 84293 +30263 84286 +33881 84282 +29612 84277 +31812 84264 +26643 84257 +2600 84257 +759 84240 +36418 84236 +13949 84232 +26889 84227 +22712 84221 +15982 84216 +23399 84215 +114 84202 +10734 84196 +43575 84156 +24273 84156 +16812 84155 +31853 84150 +19908 84144 +23799 84141 +19419 84118 +31252 84118 +17598 84115 +35727 84094 +45577 84092 +29144 84089 +40626 84080 +16564 84076 +2204 84075 +32116 84069 +8444 84067 +20559 84066 +5651 84061 +23860 84058 +28064 84049 +28584 84041 +27647 84029 +14557 84024 +16104 84020 +39617 84009 +24167 83997 +3193 83990 +47995 83975 +18606 83975 +11718 83972 +37919 83962 +23218 83959 +42823 83952 +29395 83950 +21128 83946 +17892 83945 +12335 83944 +35329 83941 +45093 83930 +25711 83925 +34113 83915 +16498 83910 +9278 83906 +3093 83901 +24276 83874 +25430 83867 +32924 83857 +17519 83857 +31133 83847 +16082 83843 +34235 83842 +22430 83805 +26403 83802 +17513 83793 +22451 83789 +35464 83787 +37492 83774 +35242 83765 +46335 83757 +27645 83757 +13681 83746 +29314 83746 +28890 83724 +27810 83719 +28486 83717 +35673 83715 +4078 83714 +29777 83710 +41721 83708 +27322 83705 +35338 83703 +36694 83698 +30029 83690 +30090 83688 +39830 83687 +44145 83684 +38115 83678 +7158 83677 +23985 83676 +5634 83673 +7282 83661 +27940 83657 +35670 83649 +40599 83648 +8298 83623 +34005 83610 +20130 83610 +42525 83606 +28454 83553 +43192 83552 +44603 83550 +28910 83539 +20648 83524 +18876 83523 +26356 83521 +12146 83507 +27671 83493 +49484 83488 +8089 83484 +14514 83480 +26720 83479 +30750 83476 +45814 83473 +47989 83463 +21043 83449 +42178 83443 +18388 83431 +4741 83420 +29502 83417 +26925 83403 +18964 83394 +26140 83383 +13158 83378 +39278 83375 +47534 83374 +21415 83366 +18261 83361 +26131 83360 +17048 83359 +24655 83357 +27335 83356 +23851 83353 +13585 83348 +28136 83345 +26083 83343 +21601 83343 +19621 83339 +44024 83333 +1572 83327 +7814 83325 +27251 83321 +34852 83305 +22904 83303 +21715 83295 +18669 83293 +30927 83293 +3441 83288 +24401 83274 +25635 83268 +12349 83254 +17583 83254 +21804 83252 +21726 83251 +23734 83237 +10854 83231 +39751 83228 +19541 83222 +18290 83213 +29104 83211 +43761 83209 +25406 83202 +32189 83184 +23271 83184 +43772 83146 +19499 83145 +4147 83143 +32557 83142 +7140 83136 +8291 83130 +45646 83127 +21182 83112 +25246 83099 +27069 83097 +27170 83096 +21752 83090 +8656 83090 +32620 83075 +8827 83071 +9636 83067 +18227 83050 +23000 83046 +46508 83043 +37242 83042 +16794 83011 +36385 83008 +12881 83001 +32889 82994 +37529 82991 +13014 82987 +17211 82985 +18018 82981 +4677 82980 +9650 82977 +5683 82975 +34927 82961 +25300 82960 +27141 82953 +10019 82944 +49001 82942 +8843 82933 +7371 82932 +8321 82922 +15802 82912 +28300 82907 +18506 82899 +11630 82889 +23648 82887 +37427 82882 +20610 82882 +32365 82876 +9624 82853 +46518 82849 +15473 82831 +11351 82821 +35819 82808 +13469 82804 +42379 82793 +30595 82792 +27347 82787 +16628 82779 +39542 82771 +23485 82756 +28405 82747 +39634 82742 +39151 82740 +3509 82734 +25727 82733 +22665 82730 +15950 82729 +6214 82724 +41835 82718 +8896 82715 +37866 82710 +9999 82701 +25329 82696 +11316 82687 +6192 82680 +25567 82674 +11241 82673 +19601 82670 +18346 82669 +2186 82665 +13740 82653 +20951 82649 +1469 82635 +32267 82625 +27103 82610 +35806 82602 +25023 82588 +30198 82585 +3906 82560 +17202 82555 +13719 82553 +40209 82550 +19408 82538 +20385 82535 +21514 82521 +12254 82513 +44291 82500 +28536 82492 +17082 82488 +9275 82469 +17421 82467 +3075 82450 +45518 82444 +29669 82442 +34968 82425 +33909 82424 +23690 82423 +20227 82418 +21036 82416 +7574 82416 +3253 82415 +15237 82415 +11034 82411 +25764 82394 +15034 82390 +24282 82387 +36546 82373 +35160 82370 +23710 82362 +18598 82356 +47297 82355 +48685 82349 +15353 82346 +20862 82346 +13219 82345 +22426 82344 +27067 82335 +11166 82333 +10585 82314 +27884 82313 +19162 82312 +28804 82309 +29256 82305 +12972 82300 +8802 82297 +33306 82287 +33404 82285 +23390 82282 +24619 82276 +24302 82269 +15539 82264 +29661 82247 +30701 82247 +47618 82247 +21078 82241 +24408 82240 +23677 82232 +20850 82229 +15447 82226 +19472 82220 +14413 82219 +20309 82196 +46879 82190 +41463 82189 +16143 82184 +16196 82183 +30972 82180 +1993 82173 +31440 82162 +41668 82157 +34238 82149 +10561 82128 +43412 82123 +15636 82117 +28146 82117 +11125 82115 +34015 82104 +15831 82104 +23432 82102 +42730 82088 +30047 82085 +23929 82079 +16586 82073 +18660 82067 +18759 82066 +41459 82064 +15292 82051 +18830 82046 +25046 82045 +7233 82042 +38990 82038 +13582 82032 +9226 81980 +21247 81977 +27376 81961 +25227 81957 +42342 81947 +24902 81940 +32887 81940 +36143 81930 +15219 81922 +919 81920 +24566 81914 +20870 81910 +20140 81909 +29586 81901 +22216 81900 +45103 81893 +19486 81878 +21983 81874 +32289 81870 +30809 81868 +37053 81866 +19399 81850 +15983 81845 +22563 81842 +23572 81842 +8139 81836 +9128 81835 +46397 81830 +20293 81801 +19586 81783 +23007 81782 +29961 81764 +43287 81756 +27929 81752 +29543 81746 +23575 81736 +13022 81724 +21761 81723 +13655 81719 +11045 81715 +12142 81710 +42429 81706 +23616 81703 +29103 81702 +7789 81700 +14146 81693 +32660 81672 +30037 81671 +29742 81662 +20121 81661 +19370 81638 +26706 81626 +14588 81618 +30319 81618 +50041 81613 +29707 81611 +19190 81607 +40751 81605 +47189 81603 +28318 81591 +23715 81585 +27638 81584 +33837 81583 +17201 81577 +23038 81572 +31787 81556 +16122 81552 +50083 81547 +29337 81542 +40365 81531 +35271 81528 +45202 81525 +20232 81502 +47630 81502 +18810 81484 +19643 81480 +23091 81478 +5444 81477 +20697 81477 +25126 81475 +22144 81474 +769 81474 +28269 81463 +39043 81461 +20887 81459 +23161 81458 +23026 81451 +39926 81451 +15049 81444 +23501 81440 +40650 81435 +25524 81408 +1646 81400 +15918 81400 +38455 81393 +8367 81384 +10993 81384 +32899 81381 +50133 81375 +3473 81360 +24345 81358 +27893 81353 +12760 81344 +15646 81342 +23670 81337 +14511 81326 +15940 81319 +19755 81319 +10887 81318 +28175 81314 +14439 81312 +30572 81306 +42576 81303 +39534 81290 +12770 81290 +14326 81277 +25087 81271 +13552 81268 +16060 81259 +22092 81247 +35002 81244 +24423 81243 +38180 81235 +29594 81225 +20992 81224 +26320 81221 +21140 81215 +44786 81212 +36882 81212 +26162 81179 +32675 81171 +1515 81170 +30527 81169 +28310 81165 +19397 81164 +33143 81161 +22760 81159 +21773 81149 +22517 81149 +35893 81147 +24103 81137 +34590 81135 +27070 81133 +20582 81129 +28577 81123 +24956 81121 +25314 81114 +36746 81114 +20721 81106 +24930 81104 +18537 81102 +5063 81101 +26022 81100 +49396 81097 +30404 81074 +33728 81070 +3654 81068 +24115 81059 +41434 81057 +34520 81057 +17002 81054 +30666 81041 +31569 81037 +40130 81037 +21586 81032 +43682 81030 +29479 81029 +35603 81026 +25767 81025 +8984 80993 +4754 80989 +19594 80983 +20332 80979 +29702 80975 +23841 80974 +20003 80974 +20033 80972 +25692 80968 +35839 80965 +21289 80960 +7656 80949 +15060 80949 +6094 80930 +37544 80927 +37709 80924 +31451 80915 +25133 80906 +41401 80905 +16129 80903 +7999 80886 +41281 80883 +34086 80870 +39342 80864 +42257 80859 +40263 80859 +44365 80857 +23597 80856 +32720 80854 +21125 80848 +15269 80847 +24862 80845 +26174 80844 +37386 80843 +13175 80826 +21670 80822 +21137 80808 +17926 80808 +36867 80796 +16616 80794 +29379 80791 +30882 80789 +22408 80788 +16329 80788 +22998 80773 +20842 80757 +35877 80753 +22952 80752 +21073 80739 +30400 80735 +40775 80731 +40167 80729 +18405 80727 +17194 80723 +22296 80716 +28016 80706 +25708 80704 +9862 80691 +24523 80691 +9913 80690 +36085 80687 +23333 80685 +29843 80679 +45406 80664 +25664 80645 +18714 80643 +22612 80629 +20973 80628 +18181 80627 +46434 80609 +32153 80608 +30340 80606 +26681 80604 +14415 80598 +25011 80590 +19392 80588 +21293 80583 +24985 80568 +6360 80567 +16022 80562 +24162 80548 +32474 80544 +25073 80536 +34595 80528 +13605 80518 +17660 80516 +25495 80514 +31394 80512 +38572 80512 +29989 80509 +1999 80508 +31723 80501 +25680 80491 +12061 80482 +36831 80480 +21032 80478 +13421 80478 +22393 80463 +23371 80452 +31488 80447 +31164 80442 +26541 80433 +38717 80417 +48823 80416 +44008 80409 +7126 80385 +19981 80378 +24969 80373 +35270 80368 +31530 80362 +48114 80360 +20864 80352 +31867 80351 +30525 80350 +28856 80349 +40592 80346 +33990 80345 +31839 80344 +7833 80334 +47299 80329 +22120 80328 +31241 80322 +45788 80321 +6985 80318 +39261 80308 +21510 80308 +15770 80292 +18788 80285 +30370 80275 +48514 80274 +25827 80268 +24594 80258 +17376 80257 +20224 80253 +10888 80252 +23238 80248 +10163 80244 +17526 80238 +20374 80237 +27386 80229 +24801 80218 +2053 80217 +13915 80213 +26989 80212 +34133 80200 +20586 80200 +38672 80194 +25451 80193 +15671 80185 +23999 80169 +40886 80166 +15565 80166 +5082 80162 +30900 80156 +19769 80150 +19139 80150 +11951 80147 +21885 80143 +23844 80137 +6238 80121 +43083 80114 +30759 80114 +34443 80100 +21268 80099 +38847 80098 +21857 80073 +29900 80068 +10057 80068 +13632 80050 +19799 80048 +2132 80045 +37503 80041 +21824 80040 +25939 80038 +13987 80032 +8484 80023 +45208 80014 +21986 80012 +20678 80012 +11165 79976 +41774 79971 +44126 79970 +38388 79969 +4794 79968 +22726 79963 +8738 79960 +31868 79955 +26734 79954 +20284 79940 +19333 79939 +35618 79934 +28847 79927 +22014 79925 +21622 79923 +29423 79921 +36502 79919 +23974 79918 +29860 79915 +33173 79909 +27220 79907 +29399 79906 +35248 79906 +31314 79894 +10551 79889 +48471 79877 +16652 79863 +48053 79863 +40013 79862 +14625 79857 +21977 79829 +19581 79823 +19061 79817 +21994 79817 +32548 79813 +14003 79811 +38289 79810 +43109 79805 +41147 79756 +14508 79749 +38427 79744 +24703 79729 +19824 79727 +37321 79714 +36975 79713 +26900 79689 +33011 79682 +2527 79680 +32711 79679 +28921 79671 +35457 79666 +26441 79656 +20892 79652 +42124 79649 +47463 79637 +22782 79635 +14842 79622 +33978 79614 +32354 79610 +15883 79607 +16898 79604 +8041 79602 +17106 79601 +20484 79600 +20376 79597 +38562 79592 +21443 79587 +11260 79577 +22652 79575 +32862 79568 +41828 79563 +33126 79558 +25766 79548 +34861 79544 +14432 79544 +25420 79542 +22258 79540 +31796 79537 +22956 79516 +21534 79497 +17987 79492 +34101 79489 +17129 79481 +26020 79469 +18129 79463 +20956 79450 +30794 79439 +1463 79429 +21843 79428 +45868 79418 +2794 79412 +23965 79408 +43323 79407 +13416 79407 +35695 79396 +24270 79394 +5736 79389 +23510 79380 +23744 79372 +29169 79356 +21982 79353 +38245 79352 +45411 79323 +15715 79321 +25893 79316 +11691 79313 +27366 79309 +37259 79303 +24340 79295 +45753 79292 +29396 79292 +28121 79292 +5722 79290 +20717 79278 +39847 79265 +31537 79264 +8152 79251 +22525 79235 +32363 79232 +29827 79230 +2958 79227 +29948 79223 +21148 79222 +30254 79214 +18333 79210 +17335 79206 +38957 79204 +18495 79200 +24563 79199 +23738 79198 +28615 79192 +35530 79192 +43463 79184 +35568 79179 +18360 79168 +31701 79156 +43017 79141 +43676 79130 +34760 79122 +26602 79116 +21834 79101 +18516 79095 +42148 79070 +14354 79062 +14969 79059 +26532 79054 +38911 79053 +21751 79048 +26680 79048 +20006 79046 +50040 79033 +31462 79033 +18564 79029 +43443 79028 +23137 79022 +9908 79020 +20508 79019 +39314 79019 +30356 79017 +27690 79010 +18465 78997 +27902 78989 +27716 78971 +11059 78971 +13871 78948 +48067 78943 +28320 78927 +11518 78916 +26672 78914 +49652 78904 +15738 78895 +20187 78894 +20811 78893 +13448 78892 +43531 78889 +35506 78886 +25072 78882 +6415 78879 +25687 78877 +28716 78870 +5864 78861 +43050 78855 +7976 78850 +23797 78837 +38732 78837 +44047 78835 +5309 78835 +45178 78834 +4510 78827 +44765 78811 +12193 78808 +43512 78806 +22992 78806 +45932 78801 +33860 78799 +15954 78779 +31251 78772 +20555 78770 +47661 78770 +4214 78769 +15774 78769 +12490 78767 +29520 78767 +35289 78758 +48976 78756 +26297 78753 +30885 78744 +21603 78739 +15834 78738 +20114 78736 +10262 78730 +24738 78719 +16376 78717 +34334 78706 +19514 78701 +23540 78695 +10557 78689 +21339 78686 +31143 78680 +3413 78673 +28201 78672 +20585 78672 +2830 78670 +15088 78661 +12993 78660 +16173 78652 +22839 78637 +10473 78636 +36562 78634 +35304 78618 +41999 78610 +26579 78595 +11225 78591 +21939 78585 +38660 78578 +34265 78575 +19070 78573 +30861 78564 +43090 78563 +24496 78562 +19115 78561 +1329 78558 +27538 78547 +8543 78546 +44185 78543 +31896 78543 +10049 78542 +40434 78536 +23302 78532 +20900 78532 +23528 78532 +34481 78531 +36239 78519 +32791 78518 +32687 78515 +9897 78500 +36634 78497 +42103 78487 +33436 78484 +45667 78482 +18561 78480 +26719 78479 +43450 78473 +31258 78462 +20690 78450 +17067 78447 +27930 78444 +29102 78442 +34798 78441 +10562 78431 +20302 78424 +24486 78417 +35829 78416 +47819 78411 +12900 78406 +31176 78396 +16758 78394 +18775 78388 +24353 78386 +35390 78385 +29685 78385 +22661 78381 +38338 78375 +37704 78362 +36520 78332 +9485 78326 +28215 78324 +20681 78323 +16563 78316 +21502 78314 +43991 78311 +17463 78301 +24699 78298 +18931 78293 +11744 78286 +31028 78286 +20394 78279 +27055 78278 +21629 78275 +37932 78270 +17359 78259 +29780 78254 +19257 78246 +15871 78245 +42225 78242 +29354 78232 +17798 78232 +25343 78231 +7809 78230 +30976 78228 +41363 78228 +31145 78217 +24020 78217 +16590 78196 +12138 78196 +37575 78195 +27185 78177 +19127 78171 +28706 78158 +31038 78127 +19262 78125 +7117 78118 +46620 78116 +40176 78116 +19916 78108 +20901 78106 +24101 78101 +23518 78086 +47857 78085 +12595 78083 +31293 78076 +37979 78074 +30658 78056 +27235 78055 +9516 78038 +29547 78031 +30284 78013 +112 78012 +41271 78007 +24059 77989 +18930 77971 +4965 77967 +27848 77965 +12758 77962 +29309 77958 +29946 77948 +27269 77932 +18797 77932 +17743 77928 +31535 77926 +28612 77911 +21678 77906 +40014 77902 +9059 77897 +17908 77890 +12081 77885 +34989 77860 +17503 77857 +10511 77851 +46551 77849 +17929 77844 +11174 77839 +36395 77827 +20196 77820 +32036 77818 +43692 77817 +35221 77806 +22128 77799 +34305 77796 +23285 77795 +6144 77792 +28967 77783 +19634 77777 +17615 77773 +36374 77764 +25000 77762 +17314 77759 +36300 77753 +23703 77749 +28519 77743 +43405 77742 +7959 77740 +11048 77740 +13523 77734 +46216 77727 +19490 77726 +49490 77724 +48971 77721 +23612 77716 +44599 77715 +30036 77715 +12825 77712 +43162 77712 +30227 77693 +23762 77688 +32994 77681 +16388 77676 +23492 77674 +22461 77663 +9452 77659 +6178 77627 +27963 77625 +36805 77620 +21818 77615 +32319 77614 +40154 77613 +28970 77608 +9060 77599 +39592 77597 +13195 77580 +20594 77580 +12894 77579 +30044 77568 +21757 77563 +45056 77562 +44419 77558 +27628 77551 +15558 77546 +6788 77541 +23261 77538 +41272 77538 +34928 77528 +19782 77522 +12350 77518 +18391 77506 +44842 77504 +7905 77503 +31139 77501 +35652 77500 +35999 77499 +25875 77480 +25802 77478 +24519 77476 +8635 77474 +19441 77471 +30350 77468 +43455 77467 +17168 77462 +19997 77459 +8250 77458 +38434 77457 +20873 77449 +27668 77447 +16152 77446 +35169 77421 +36814 77420 +44544 77418 +21218 77409 +24822 77400 +13294 77395 +32450 77387 +25092 77387 +21487 77383 +40460 77380 +39211 77377 +13800 77367 +43932 77358 +18517 77355 +23866 77347 +22286 77340 +16867 77334 +25152 77324 +20553 77320 +22514 77320 +17793 77307 +33783 77303 +14586 77286 +10377 77281 +8918 77270 +25931 77259 +36419 77256 +15592 77256 +29682 77249 +15146 77246 +22444 77239 +4383 77225 +36988 77224 +33444 77218 +48983 77214 +31997 77212 +16644 77206 +11205 77205 +5998 77202 +25210 77200 +20996 77196 +48157 77195 +26415 77194 +11778 77192 +18149 77189 +34003 77187 +23474 77173 +8777 77171 +29518 77166 +17916 77163 +1152 77149 +21790 77145 +16901 77142 +20927 77138 +13541 77116 +15920 77113 +2774 77105 +20367 77099 +22265 77096 +38680 77094 +31121 77094 +39012 77089 +27677 77088 +41373 77083 +48729 77074 +21479 77073 +14998 77069 +24210 77061 +24081 77044 +35129 77042 +21102 77035 +22863 77031 +25352 77021 +43915 77018 +39869 77017 +44202 77010 +4942 77002 +41503 77001 +23470 77000 +27232 76985 +41388 76981 +29901 76961 +38044 76955 +16241 76950 +42868 76948 +19699 76941 +4592 76939 +23085 76930 +24596 76929 +26324 76928 +44289 76912 +17703 76910 +40780 76909 +24927 76889 +44748 76882 +22242 76860 +19372 76851 +19787 76843 +47457 76840 +47435 76836 +15246 76835 +24324 76834 +22420 76830 +12933 76830 +4514 76822 +33940 76814 +17745 76813 +24234 76808 +38203 76807 +23631 76806 +33363 76796 +1750 76791 +37602 76789 +39414 76789 +15173 76788 +28603 76788 +38759 76784 +32539 76783 +10234 76772 +38157 76762 +21322 76762 +28617 76760 +29525 76744 +33654 76743 +17732 76734 +41462 76728 +17923 76722 +22237 76718 +26196 76716 +27805 76710 +26839 76709 +19775 76705 +19509 76704 +37916 76704 +3789 76703 +50199 76702 +41064 76688 +29218 76679 +31368 76651 +32840 76651 +25709 76646 +41324 76644 +21692 76643 +43733 76642 +15490 76622 +20932 76622 +29808 76614 +16803 76606 +38464 76586 +23105 76585 +23255 76574 +48323 76572 +27615 76568 +33668 76568 +18304 76552 +27519 76532 +29350 76528 +47765 76527 +19435 76523 +32400 76522 +35535 76514 +18646 76508 +24384 76508 +33495 76507 +28569 76503 +42008 76499 +8393 76496 +48773 76493 +22471 76488 +25828 76481 +25232 76471 +41239 76469 +26464 76457 +34461 76455 +9728 76448 +31525 76447 +37335 76439 +40520 76426 +38500 76423 +23125 76416 +23306 76411 +21806 76410 +33358 76405 +32228 76401 +34141 76393 +47412 76393 +31797 76392 +13422 76385 +10871 76382 +7762 76380 +16919 76375 +40742 76375 +29039 76370 +25238 76367 +27074 76359 +33254 76355 +15230 76351 +11633 76342 +29523 76337 +24666 76334 +35315 76331 +40811 76328 +23102 76326 +28838 76321 +26010 76317 +11220 76316 +29496 76311 +31369 76308 +44424 76306 +14478 76304 +7116 76304 +38154 76303 +22021 76298 +27484 76298 +16480 76292 +34240 76292 +29116 76292 +26669 76281 +6250 76280 +25315 76280 +12338 76268 +19214 76260 +39855 76257 +28370 76256 +33419 76250 +17484 76243 +22608 76243 +34001 76234 +15788 76229 +29876 76229 +50170 76224 +30269 76220 +34004 76218 +23675 76213 +23913 76210 +23439 76203 +39325 76184 +32878 76174 +25159 76166 +25301 76164 +25098 76160 +12531 76153 +48841 76142 +17637 76140 +31748 76139 +22384 76131 +31049 76122 +17179 76121 +34074 76121 +8432 76118 +33642 76114 +29351 76113 +29137 76111 +35925 76102 +22333 76098 +36411 76068 +20359 76060 +30451 76057 +38294 76054 +30509 76051 +35512 76026 +29902 76024 +8398 76021 +13429 76019 +21171 76018 +29781 76017 +7185 76015 +30278 75999 +3235 75996 +34092 75987 +9631 75975 +45933 75965 +21873 75954 +23500 75953 +49182 75947 +24240 75940 +1786 75938 +28928 75933 +45197 75926 +16855 75925 +2784 75921 +25119 75920 +7307 75919 +8189 75911 +12512 75903 +12727 75899 +35559 75894 +11347 75888 +36468 75879 +18843 75876 +46050 75875 +37392 75874 +48496 75857 +19232 75856 +26758 75854 +35823 75840 +44963 75831 +24007 75816 +33572 75812 +35276 75811 +46065 75804 +30939 75794 +21867 75794 +9302 75791 +46078 75784 +5272 75778 +35833 75769 +16691 75755 +13306 75748 +37298 75744 +11708 75730 +22983 75728 +29340 75721 +8353 75717 +30914 75715 +42954 75712 +22379 75707 +19460 75701 +19906 75698 +28084 75689 +21771 75686 +37480 75684 +13873 75684 +23944 75680 +24960 75675 +27465 75674 +41637 75669 +18694 75658 +30428 75655 +26757 75645 +18205 75638 +20363 75635 +34936 75633 +22485 75627 +15102 75617 +6357 75615 +29960 75600 +49209 75584 +8679 75577 +3546 75576 +30977 75569 +25632 75557 +25833 75549 +19977 75545 +33956 75544 +24444 75531 +24106 75524 +28862 75524 +25847 75505 +25542 75499 +33128 75495 +26982 75493 +32671 75485 +41838 75484 +32819 75476 +48438 75472 +32834 75463 +18926 75459 +23658 75455 +27455 75449 +20919 75449 +22565 75446 +31928 75443 +37641 75432 +30316 75432 +29437 75427 +37788 75419 +39457 75419 +33354 75418 +35425 75415 +26711 75396 +31411 75387 +39900 75380 +43544 75378 +20358 75374 +24419 75373 +28232 75372 +9940 75371 +24557 75366 +25771 75352 +14607 75344 +32027 75329 +5903 75304 +45951 75299 +27225 75295 +22353 75291 +14040 75284 +31726 75270 +38079 75262 +39762 75258 +25662 75255 +28576 75247 +38851 75237 +5168 75233 +21778 75228 +3540 75220 +35213 75213 +27098 75211 +12978 75206 +40768 75203 +30237 75202 +48503 75183 +37036 75164 +19338 75158 +20422 75157 +21694 75142 +36697 75138 +28579 75128 +27513 75113 +46708 75106 +28582 75102 +34254 75099 +34790 75099 +45674 75095 +22193 75095 +33214 75092 +39791 75091 +34064 75074 +32313 75073 +44492 75071 +20995 75066 +17895 75063 +26587 75046 +14857 75043 +13491 75040 +17013 75037 +8612 75035 +34856 75029 +38630 75028 +20090 75026 +32684 75022 +32890 75022 +38146 75021 +13299 75006 +11167 74991 +25950 74981 +22642 74971 +32119 74969 +25012 74955 +40089 74954 +30089 74943 +3995 74942 +18533 74941 +41788 74940 +49224 74936 +1564 74935 +9106 74934 +44692 74924 +18307 74920 +37191 74908 +34550 74892 +32623 74878 +3165 74870 +18599 74862 +16396 74848 +45276 74848 +5048 74839 +24971 74824 +28679 74812 +4132 74812 +43269 74808 +27794 74802 +9893 74773 +32856 74770 +34132 74759 +24601 74750 +29963 74744 +18112 74741 +44585 74739 +35344 74730 +36047 74721 +37445 74712 +17859 74711 +38287 74711 +25555 74710 +25697 74705 +23290 74697 +44745 74696 +33179 74689 +20136 74683 +19274 74679 +23931 74675 +23359 74667 +6199 74664 +25849 74661 +21442 74654 +21491 74645 +26237 74637 +40526 74624 +18452 74624 +38632 74621 +34602 74616 +30529 74615 +26126 74614 +19656 74610 +40667 74599 +17504 74594 +25800 74593 +22377 74589 +29166 74588 +17794 74580 +26430 74573 +39898 74555 +31348 74553 +21196 74551 +5620 74551 +18917 74547 +28510 74546 +5427 74545 +18886 74536 +36200 74534 +40095 74533 +5823 74531 +15648 74530 +13434 74528 +46347 74527 +30921 74525 +16540 74524 +18607 74521 +30371 74515 +29004 74508 +29651 74506 +14134 74479 +19204 74475 +24165 74472 +19084 74468 +1170 74468 +34643 74465 +19866 74452 +15072 74450 +3879 74444 +18812 74444 +48312 74440 +21414 74438 +19234 74436 +21802 74415 +18807 74406 +25379 74399 +34638 74398 +11017 74394 +42247 74389 +48834 74384 +23880 74379 +36602 74360 +38367 74356 +2334 74356 +27669 74355 +14216 74348 +40243 74345 +21428 74345 +50203 74338 +41377 74335 +38083 74332 +42993 74331 +30320 74326 +8265 74316 +21210 74310 +21355 74300 +5606 74291 +40765 74288 +31486 74287 +31605 74283 +20008 74282 +19802 74277 +16562 74277 +26325 74277 +19100 74270 +42799 74267 +41662 74243 +38485 74229 +25810 74228 +40004 74225 +34603 74220 +21345 74218 +13621 74209 +39547 74207 +13480 74192 +24688 74191 +29339 74191 +29295 74172 +35380 74171 +30244 74161 +39836 74161 +11601 74158 +23805 74150 +13876 74150 +3799 74145 +15677 74116 +6053 74114 +32010 74112 +36359 74108 +20011 74103 +41010 74095 +35668 74092 +22056 74087 +6212 74082 +17734 74076 +42471 74073 +47793 74072 +43103 74069 +14836 74066 +23532 74065 +41398 74061 +4883 74055 +31901 74055 +32355 74046 +23822 74039 +37277 74031 +16269 74022 +46391 74015 +41059 74013 +28697 74010 +39979 74008 +8429 74002 +26170 74000 +37755 73997 +30605 73995 +21403 73992 +18816 73991 +21409 73984 +19147 73976 +29044 73968 +38109 73960 +37937 73956 +35985 73955 +46019 73951 +42266 73949 +35049 73945 +37762 73936 +23748 73913 +29692 73907 +45373 73899 +4550 73875 +32416 73858 +42445 73845 +33359 73842 +18858 73839 +2633 73835 +45377 73829 +16108 73829 +30497 73826 +30421 73826 +27096 73821 +24176 73820 +36945 73810 +48043 73804 +30915 73799 +31245 73798 +30288 73795 +11002 73795 +18466 73787 +33425 73777 +29804 73772 +43647 73770 +21020 73763 +42228 73761 +33263 73752 +11475 73752 +19396 73752 +21978 73750 +43159 73723 +32730 73711 +22946 73708 +46583 73704 +20962 73702 +13590 73700 +35373 73684 +27525 73684 +48009 73669 +27700 73666 +48371 73665 +30699 73664 +46210 73664 +17552 73662 +46530 73659 +26148 73653 +33798 73638 +23598 73632 +42603 73623 +35056 73612 +32883 73610 +33125 73606 +19075 73603 +29632 73594 +26145 73592 +29432 73590 +12968 73589 +17506 73586 +27464 73585 +19949 73585 +29831 73584 +35834 73580 +5990 73578 +26127 73576 +18194 73568 +20762 73559 +22816 73552 +42509 73551 +27184 73542 +26188 73540 +30107 73539 +38030 73536 +3233 73532 +34911 73528 +39660 73527 +9979 73519 +27059 73514 +32670 73514 +14291 73511 +8330 73508 +38282 73505 +44271 73500 +19606 73491 +42365 73489 +47882 73487 +2983 73484 +18578 73482 +34679 73479 +42884 73476 +34794 73474 +30302 73469 +39598 73466 +9890 73465 +30492 73458 +13802 73455 +40325 73443 +19269 73441 +29430 73437 +29539 73429 +24994 73416 +15262 73404 +23584 73401 +39644 73400 +44035 73400 +24390 73398 +25531 73394 +16249 73377 +21234 73365 +24924 73362 +41204 73347 +38830 73342 +29131 73339 +5038 73335 +4018 73328 +15606 73321 +44919 73320 +37948 73316 +43280 73312 +26369 73309 +36556 73307 +5791 73306 +46353 73306 +15442 73304 +11650 73293 +24500 73292 +48314 73279 +27953 73272 +8091 73272 +28691 73269 +6326 73264 +17267 73262 +38555 73256 +29264 73251 +38620 73250 +25865 73248 +27390 73238 +21245 73235 +31044 73228 +26269 73226 +4516 73220 +18300 73218 +29696 73214 +19320 73212 +17947 73206 +48863 73205 +20190 73203 +14295 73198 +41595 73180 +25111 73175 +24030 73166 +27857 73160 +45673 73154 +28997 73150 +21278 73150 +28824 73147 +37723 73144 +27637 73139 +27752 73135 +29681 73135 +18435 73121 +3634 73106 +44684 73099 +30859 73096 +26223 73095 +32930 73092 +20665 73091 +27223 73089 +22884 73084 +43931 73077 +15001 73067 +19751 73061 +26570 73061 +29148 73056 +29811 73037 +15563 73029 +12643 73025 +40422 73024 +35134 73022 +36554 73021 +17770 73018 +23361 73007 +30901 73003 +17748 72999 +12529 72998 +36176 72980 +41783 72979 +26256 72979 +2674 72964 +38075 72946 +38253 72945 +8841 72943 +31712 72943 +37791 72938 +15996 72937 +1953 72932 +3542 72927 +15680 72922 +16975 72912 +29768 72912 +47040 72905 +36815 72904 +25924 72903 +22173 72899 +36548 72896 +40997 72886 +33895 72866 +35657 72865 +25311 72845 +9067 72839 +3213 72835 +37664 72832 +49425 72831 +26564 72831 +32256 72817 +38662 72814 +32166 72810 +31623 72802 +37651 72787 +43720 72786 +16147 72777 +10082 72773 +27485 72765 +40744 72757 +28908 72743 +8000 72730 +9717 72718 +18922 72709 +40351 72708 +6281 72696 +20510 72692 +45844 72690 +4823 72684 +14873 72678 +39193 72672 +4396 72658 +29916 72628 +11715 72627 +23476 72620 +20273 72616 +22538 72609 +14617 72601 +46458 72599 +22751 72578 +28753 72578 +21539 72572 +25508 72570 +35480 72570 +49063 72568 +19328 72567 +33989 72558 +8710 72553 +42722 72551 +7824 72542 +36955 72530 +28195 72523 +8400 72507 +26834 72505 +31864 72501 +30984 72480 +32411 72480 +16834 72479 +23389 72458 +34331 72455 +24177 72455 +24430 72447 +15793 72444 +39404 72440 +12603 72437 +41489 72434 +25888 72429 +12322 72428 +9135 72421 +25244 72417 +25070 72415 +22878 72408 +44398 72404 +26017 72400 +32839 72390 +20005 72387 +25625 72364 +29092 72356 +19349 72342 +31606 72321 +11644 72320 +5051 72318 +4098 72313 +22990 72310 +43640 72310 +12786 72308 +8493 72308 +28695 72300 +32359 72299 +21794 72290 +38977 72287 +36884 72287 +12991 72284 +30881 72260 +30711 72255 +37383 72251 +33073 72250 +12947 72241 +19430 72240 +20233 72232 +26014 72231 +44878 72217 +3579 72212 +18454 72211 +25573 72210 +23246 72203 +21033 72202 +40649 72186 +24730 72182 +45011 72178 +29186 72178 +21989 72176 +38681 72168 +6836 72156 +32758 72156 +46373 72155 +48007 72154 +26041 72153 +26958 72151 +17729 72145 +28918 72142 +24139 72140 +20712 72140 +16815 72138 +28307 72086 +20347 72078 +23840 72069 +33587 72055 +17790 72052 +27900 72051 +24107 72049 +22354 72048 +23374 72038 +18562 72036 +17062 72034 +21925 72026 +31285 72024 +17951 72016 +28819 72016 +4374 72011 +4573 72010 +36980 72007 +17514 71996 +46643 71986 +1922 71979 +6690 71978 +43703 71974 +23101 71974 +40498 71967 +11265 71961 +10642 71957 +32955 71942 +26285 71937 +9047 71934 +19104 71933 +13414 71929 +9100 71928 +39072 71921 +31138 71921 +15761 71915 +29292 71915 +27491 71913 +41736 71911 +40892 71907 +26021 71902 +38789 71898 +10051 71896 +19960 71888 +37260 71870 +27315 71863 +23060 71860 +40394 71857 +25082 71849 +17283 71841 +23195 71835 +34286 71835 +49521 71822 +18366 71820 +39016 71820 +46625 71816 +49618 71805 +34269 71799 +39576 71799 +17382 71795 +24901 71787 +47489 71781 +15631 71777 +37717 71774 +28110 71766 +14880 71764 +12195 71755 +24887 71753 +43731 71748 +16921 71740 +49117 71738 +45856 71731 +11258 71724 +22326 71717 +47813 71700 +1980 71685 +30323 71681 +36041 71670 +6106 71670 +31256 71669 +24790 71665 +16305 71650 +40899 71647 +36671 71646 +8821 71639 +48018 71638 +33948 71636 +38809 71632 +26665 71631 +8571 71628 +42920 71621 +15024 71618 +17084 71609 +40846 71601 +27873 71601 +4057 71593 +17972 71585 +30022 71585 +45301 71581 +35018 71576 +24883 71549 +25717 71546 +26073 71546 +37968 71542 +49528 71504 +43816 71495 +11719 71494 +11277 71488 +34659 71487 +46936 71484 +21690 71480 +22441 71474 +46321 71472 +40065 71470 +38297 71469 +32655 71468 +27580 71466 +33807 71442 +21159 71439 +21453 71437 +19826 71429 +25520 71426 +7277 71426 +19062 71420 +39958 71414 +27574 71403 +22349 71388 +13932 71386 +39236 71384 +22721 71365 +23324 71362 +28594 71358 +24429 71354 +42389 71352 +29779 71351 +23693 71343 +23766 71342 +42990 71339 +46502 71337 +27113 71334 +41763 71319 +42620 71318 +32691 71317 +34009 71310 +13085 71295 +7596 71293 +20248 71292 +45749 71290 +24098 71289 +28461 71286 +36679 71281 +38817 71277 +35383 71271 +10890 71269 +21325 71253 +9921 71251 +34214 71246 +11071 71244 +23281 71240 +20542 71240 +48807 71238 +29263 71236 +27942 71233 +28284 71225 +24114 71218 +21636 71218 +38981 71216 +38687 71215 +21042 71211 +49808 71207 +21528 71201 +29365 71200 +18973 71191 +39109 71190 +11973 71187 +19810 71186 +30055 71185 +29499 71183 +28148 71183 +23213 71174 +21213 71149 +4985 71145 +21522 71134 +30886 71129 +39116 71127 +21710 71121 +43792 71116 +33376 71113 +20936 71108 +3373 71094 +27476 71092 +21083 71068 +28763 71062 +36192 71061 +17319 71038 +37399 71036 +17208 71034 +22972 71024 +16036 71024 +37257 71012 +43152 71009 +47712 70998 +37443 70993 +29745 70981 +12885 70978 +33557 70969 +45913 70959 +26657 70959 +33996 70951 +26051 70948 +27457 70943 +10334 70943 +23642 70932 +48912 70930 +37938 70918 +11439 70912 +20102 70906 +12718 70905 +25638 70900 +18348 70896 +2521 70892 +17571 70886 +41584 70886 +44436 70880 +22267 70872 +38286 70867 +12129 70857 +15367 70854 +30879 70846 +27336 70843 +16504 70842 +42373 70836 +24241 70830 +20567 70830 +30054 70825 +24329 70824 +40680 70823 +41227 70820 +35703 70819 +22576 70814 +42281 70809 +23530 70803 +34860 70795 +45606 70782 +40250 70782 +41749 70770 +49194 70768 +37742 70766 +23798 70763 +17601 70762 +10450 70755 +29178 70752 +8368 70747 +26275 70735 +24808 70734 +15065 70731 +35840 70723 +20838 70716 +25747 70709 +2062 70701 +41803 70700 +26251 70693 +25815 70675 +48467 70673 +28517 70672 +22318 70667 +40942 70666 +23158 70655 +40875 70652 +29535 70652 +22855 70635 +9774 70630 +45026 70622 +12674 70621 +29996 70619 +25413 70596 +39953 70596 +34660 70595 +32829 70593 +36126 70588 +39365 70580 +16817 70579 +9868 70578 +22660 70577 +28507 70577 +23388 70576 +26931 70576 +17427 70574 +24600 70568 +37595 70567 +22100 70565 +23791 70561 +14697 70556 +19155 70555 +16506 70537 +36525 70535 +13610 70529 +1206 70521 +27648 70515 +27328 70514 +15649 70511 +25416 70502 +20616 70494 +23809 70485 +2553 70479 +32164 70470 +11420 70470 +19300 70468 +23776 70465 +16315 70464 +19898 70464 +41082 70455 +41396 70443 +32767 70430 +254 70422 +18742 70420 +40764 70418 +19821 70413 +26790 70412 +11493 70412 +24447 70406 +19774 70400 +35301 70391 +990 70390 +37634 70379 +28372 70378 +25740 70374 +21363 70373 +21473 70372 +43235 70368 +39607 70364 +26228 70359 +24216 70351 +27767 70349 +4123 70346 +16252 70339 +48633 70332 +46608 70328 +7447 70321 +17834 70313 +8462 70308 +32121 70306 +25972 70300 +26234 70298 +38201 70296 +38971 70292 +14619 70278 +40814 70275 +21503 70267 +25137 70263 +3581 70263 +24306 70260 +21540 70244 +21000 70244 +12833 70237 +27105 70234 +44406 70234 +22036 70232 +32466 70231 +49404 70224 +22169 70222 +30160 70214 +19225 70193 +21613 70192 +21805 70190 +18763 70186 +4520 70181 +42355 70181 +8939 70179 +1836 70177 +580 70171 +18868 70166 +36233 70163 +12468 70157 +29591 70156 +22364 70156 +7609 70155 +41390 70148 +6513 70148 +28538 70146 +22107 70144 +28546 70143 +27063 70143 +22084 70142 +40011 70134 +26027 70132 +29278 70131 +45181 70127 +24317 70124 +28533 70122 +36782 70115 +22520 70109 +28583 70109 +42746 70097 +26081 70085 +29885 70083 +14675 70079 +35281 70074 +36348 70072 +30840 70067 +29699 70066 +24853 70057 +21296 70056 +4782 70038 +24068 70032 +12482 70030 +31353 70017 +17226 70016 +6556 70011 +1825 70008 +28412 69996 +18070 69988 +24628 69971 +39209 69959 +13856 69952 +34904 69950 +40840 69948 +15932 69944 +30242 69937 +19032 69933 +20234 69924 +20262 69918 +33188 69907 +39136 69891 +21048 69889 +31976 69883 +31481 69881 +18127 69880 +34203 69878 +752 69878 +6610 69871 +3455 69861 +18904 69854 +32461 69854 +40269 69845 +16781 69841 +18892 69828 +18878 69827 +4541 69827 +9487 69827 +6806 69819 +43346 69816 +14353 69798 +25030 69794 +49626 69792 +26838 69791 +45549 69787 +21869 69774 +23683 69763 +30183 69759 +33174 69758 +41979 69756 +12762 69751 +21354 69736 +43047 69736 +16270 69734 +50026 69732 +22632 69732 +14747 69731 +17338 69723 +38643 69717 +32902 69712 +23110 69705 +27549 69700 +9277 69694 +3507 69686 +44805 69681 +30534 69678 +24865 69678 +6784 69672 +36900 69668 +34250 69664 +32205 69664 +28717 69662 +26954 69656 +28509 69654 +42038 69649 +23932 69648 +21719 69646 +12515 69640 +24681 69637 +44416 69624 +28831 69615 +37042 69608 +38982 69601 +28726 69584 +33329 69576 +29454 69562 +1888 69560 +31990 69558 +49798 69544 +32629 69541 +24208 69540 +23845 69536 +36267 69535 +21934 69530 +16823 69514 +6307 69514 +17952 69508 +22898 69507 +38921 69502 +45341 69498 +46579 69491 +26426 69491 +38057 69486 +148 69485 +19571 69469 +35189 69467 +37377 69461 +30822 69455 +17657 69455 +16920 69447 +19238 69442 +43428 69439 +48588 69438 +14258 69432 +20318 69428 +44373 69421 +17384 69421 +22343 69416 +42273 69407 +36073 69393 +44099 69386 +34558 69382 +2182 69372 +44122 69362 +8958 69360 +15987 69355 +36721 69351 +14001 69349 +15236 69341 +32168 69336 +44799 69329 +30259 69321 +45754 69314 +23082 69307 +34344 69301 +43561 69281 +11425 69264 +38700 69258 +37729 69256 +27384 69253 +41071 69239 +29931 69229 +21573 69226 +18907 69225 +25274 69223 +27859 69222 +36389 69219 +31470 69218 +41325 69218 +10573 69208 +38152 69199 +25572 69189 +31291 69186 +11688 69169 +15708 69166 +47847 69162 +39068 69161 +47431 69152 +41555 69145 +47711 69141 +45044 69135 +13099 69132 +33413 69129 +21059 69125 +29650 69123 +17596 69119 +38872 69117 +13223 69117 +32308 69116 +34939 69107 +24657 69092 +31212 69088 +27518 69087 +36425 69083 +45293 69073 +43537 69071 +27361 69066 +42764 69047 +13311 69045 +16544 69045 +37448 69037 +48876 69037 +31288 69035 +35359 69023 +8585 69014 +11898 69008 +10236 69006 +38952 69005 +26861 68998 +30154 68997 +15596 68994 +17149 68989 +38839 68989 +8401 68987 +27330 68980 +8043 68980 +50090 68980 +37013 68972 +16592 68972 +28969 68964 +45294 68963 +5347 68961 +27817 68960 +23563 68958 +28098 68954 +31515 68953 +29849 68952 +30282 68946 +20186 68945 +19828 68944 +4892 68940 +25690 68935 +32268 68934 +16436 68934 +5769 68933 +20912 68927 +17265 68920 +40056 68920 +29055 68908 +19760 68907 +7753 68904 +1935 68903 +7076 68901 +20167 68891 +27432 68890 +46828 68889 +24910 68885 +20547 68875 +3567 68863 +24425 68852 +8654 68852 +23223 68835 +11197 68830 +35421 68824 +8910 68812 +33217 68811 +28493 68809 +45863 68800 +18052 68798 +46535 68794 +47925 68792 +26722 68790 +3089 68790 +28325 68788 +31316 68779 +28206 68778 +40946 68772 +18225 68769 +22328 68767 +41769 68761 +17670 68748 +15595 68747 +33005 68742 +19181 68738 +22091 68727 +33632 68725 +2567 68720 +9641 68707 +9195 68698 +23367 68695 +26963 68694 +42953 68682 +17437 68678 +48757 68677 +48847 68674 +28021 68673 +23873 68672 +42629 68663 +45347 68658 +32448 68654 +43154 68647 +26902 68647 +19792 68633 +34748 68631 +27409 68630 +33533 68623 +36508 68621 +42999 68618 +22094 68611 +42113 68606 +16879 68604 +33423 68602 +26831 68566 +29534 68559 +36498 68554 +14098 68548 +49495 68547 +22200 68542 +32331 68535 +28991 68534 +37558 68531 +44349 68530 +28652 68526 +11020 68522 +18215 68522 +39036 68522 +40770 68514 +44911 68509 +6514 68491 +31123 68490 +32980 68488 +2234 68488 +20866 68486 +38261 68481 +35798 68480 +42574 68479 +46388 68479 +7933 68469 +23498 68452 +40072 68449 +26510 68442 +43623 68440 +17388 68438 +6389 68436 +24257 68423 +38059 68421 +42736 68418 +45936 68413 +26128 68413 +39313 68412 +35323 68403 +35594 68392 +39025 68389 +42094 68389 +17847 68386 +16631 68385 +15328 68375 +32043 68370 +19849 68366 +26490 68365 +22066 68357 +18046 68351 +2303 68337 +9876 68336 +13137 68333 +23823 68332 +15652 68332 +35080 68330 +23323 68327 +43890 68326 +46732 68320 +19103 68319 +27939 68311 +27234 68306 +27391 68301 +29140 68300 +26137 68300 +36333 68299 +8183 68290 +31189 68284 +36576 68283 +16187 68280 +27568 68277 +15538 68276 +44018 68273 +47717 68253 +39905 68252 +24436 68249 +27407 68240 +21591 68238 +40015 68237 +19345 68231 +20226 68230 +21894 68221 +38372 68212 +30151 68212 +31568 68190 +16858 68186 +18664 68180 +35249 68168 +46578 68143 +33915 68142 +34093 68118 +20437 68114 +37946 68102 +40317 68102 +40543 68098 +18741 68094 +21709 68085 +36662 68083 +42705 68059 +8789 68055 +16703 68053 +40696 68051 +49293 68048 +12291 68045 +41971 68045 +47349 68038 +18815 68034 +38511 68033 +49894 68021 +26547 68021 +34536 68018 +28207 68017 +41086 68006 +47822 68005 +1970 67984 +24028 67974 +20934 67971 +36987 67966 +10113 67960 +22951 67949 +22142 67944 +21035 67942 +36353 67938 +20780 67936 +47074 67928 +25705 67927 +21916 67919 +8230 67917 +44050 67910 +21365 67885 +19371 67884 +25240 67878 +26835 67875 +22601 67874 +35432 67872 +16056 67863 +27298 67860 +48711 67849 +23865 67829 +22827 67823 +30974 67809 +27989 67792 +21344 67777 +38992 67776 +36472 67776 +14786 67766 +39026 67764 +31861 67757 +43814 67744 +21792 67733 +18510 67733 +41540 67732 +23490 67731 +40528 67730 +13087 67730 +9580 67729 +26240 67727 +1472 67727 +33601 67721 +47265 67721 +38919 67714 +20986 67710 +13384 67707 +49338 67694 +12828 67690 +30957 67687 +19454 67685 +18278 67654 +33513 67647 +14398 67646 +22594 67641 +11786 67639 +11876 67631 +2965 67624 +21427 67616 +20959 67612 +18376 67612 +26566 67609 +7201 67606 +27878 67604 +35933 67590 +13174 67587 +13839 67580 +4210 67579 +25112 67570 +46533 67567 +15967 67567 +9288 67558 +39154 67553 +39091 67544 +19917 67544 +20474 67543 +23774 67539 +38058 67537 +15299 67536 +19082 67534 +3795 67530 +26329 67512 +30455 67511 +41422 67509 +37016 67505 +32741 67502 +30062 67498 +24362 67497 +19206 67497 +36755 67486 +8457 67486 +26071 67485 +38495 67483 +21769 67476 +40983 67460 +41618 67451 +19325 67443 +14904 67441 +27088 67432 +32952 67427 +36507 67406 +29180 67403 +27947 67402 +30438 67398 +19930 67392 +20307 67391 +34187 67386 +45899 67384 +13033 67380 +26821 67373 +36933 67370 +7199 67368 +36592 67367 +34430 67365 +38080 67336 +3153 67334 +17688 67334 +32404 67333 +49369 67330 +20718 67327 +25327 67321 +43487 67314 +43229 67311 +10044 67310 +31277 67305 +29684 67302 +19903 67297 +22590 67291 +27183 67288 +15754 67287 +19222 67287 +22744 67282 +39046 67281 +38798 67277 +10613 67264 +39690 67260 +29711 67255 +21407 67245 +6207 67241 +19074 67237 +27029 67230 +28889 67226 +28267 67224 +12483 67216 +35335 67215 +49118 67214 +47097 67210 +16796 67207 +4233 67207 +16011 67191 +12458 67184 +22115 67183 +48831 67181 +19040 67180 +5570 67179 +4140 67176 +17983 67174 +35060 67167 +40688 67167 +26573 67160 +16365 67158 +24332 67149 +17756 67147 +35022 67146 +13042 67146 +2205 67137 +35127 67135 +40399 67133 +31147 67131 +24769 67131 +31931 67100 +27252 67100 +19909 67095 +19954 67094 +29630 67093 +141 67090 +19118 67089 +28332 67086 +45269 67082 +13824 67082 +36709 67080 +23594 67072 +27498 67061 +34922 67056 +44421 67043 +43765 67042 +27681 67037 +3314 67021 +31096 67016 +25250 67005 +17759 67001 +18571 66993 +9553 66992 +20179 66988 +29426 66977 +39944 66971 +19939 66965 +13624 66964 +9690 66958 +25772 66950 +5943 66948 +914 66937 +27208 66937 +38707 66931 +17010 66916 +8196 66915 +20868 66914 +42849 66913 +33021 66909 +23959 66906 +16354 66884 +26191 66878 +29595 66874 +9426 66872 +32800 66871 +25192 66866 +20100 66864 +21770 66862 +22050 66851 +24715 66851 +31422 66848 +46574 66843 +28105 66838 +14925 66831 +49858 66830 +15137 66829 +36131 66822 +11825 66818 +24530 66807 +13924 66806 +24716 66805 +26590 66799 +29451 66789 +16326 66788 +20836 66786 +30065 66785 +24796 66784 +7060 66782 +30060 66781 +30849 66779 +17585 66776 +24905 66776 +7680 66766 +6271 66765 +41037 66764 +46355 66764 +44272 66760 +29793 66742 +26500 66741 +8480 66739 +20917 66730 +23311 66727 +43861 66724 +20124 66724 +40563 66706 +41665 66705 +40524 66696 +21777 66695 +21536 66694 +23717 66689 +19609 66688 +46039 66687 +31879 66686 +24507 66676 +26888 66675 +34037 66674 +26810 66672 +40752 66671 +29290 66670 +25721 66667 +10363 66663 +40376 66660 +10419 66658 +34143 66645 +15274 66641 +21549 66633 +22427 66614 +37878 66586 +50236 66581 +42540 66581 +40645 66581 +22231 66577 +42847 66574 +45277 66567 +26432 66561 +24561 66549 +19485 66549 +34245 66545 +23834 66544 +40030 66540 +28459 66539 +7916 66538 +19507 66528 +20975 66527 +16552 66525 +36290 66524 +36254 66523 +45109 66517 +35696 66517 +20796 66516 +23771 66513 +26636 66502 +34639 66497 +4680 66491 +41913 66486 +28233 66484 +45327 66484 +37521 66479 +20048 66470 +25878 66469 +4804 66460 +5792 66455 +49398 66448 +21240 66426 +43757 66408 +27861 66400 +47515 66397 +15417 66396 +29105 66386 +37131 66383 +35459 66374 +4600 66368 +27415 66365 +22256 66364 +12012 66358 +28962 66357 +28225 66354 +15997 66351 +24386 66347 +21225 66346 +24012 66345 +26596 66344 +35449 66338 +47447 66338 +25858 66334 +41514 66330 +42542 66326 +29287 66319 +36398 66313 +32019 66307 +26206 66303 +26928 66297 +44294 66297 +21697 66296 +49767 66295 +33078 66293 +27718 66291 +30466 66290 +23134 66287 +39370 66268 +25433 66267 +25988 66261 +24506 66244 +30555 66234 +20578 66233 +37339 66227 +21718 66226 +24922 66224 +1937 66223 +17402 66221 +20615 66220 +21433 66216 +35032 66210 +23586 66204 +17824 66200 +33171 66198 +37556 66196 +30888 66190 +37499 66186 +21712 66183 +45177 66183 +31846 66165 +13381 66156 +25809 66154 +19088 66145 +19180 66144 +39288 66144 +33769 66140 +24578 66140 +26936 66139 +32826 66138 +41946 66127 +28123 66126 +49783 66126 +24049 66125 +42439 66123 +36524 66121 +37951 66112 +19645 66109 +36031 66104 +23275 66104 +17393 66104 +35466 66102 +18104 66083 +3527 66079 +20729 66079 +15155 66078 +21377 66075 +23570 66072 +33628 66058 +23938 66048 +47699 66043 +14990 66037 +34937 66035 +31453 66030 +19445 66030 +19348 66026 +45346 66016 +15079 66007 +35715 66005 +38548 66000 +38153 65991 +37517 65991 +30166 65984 +45603 65971 +9562 65967 +24777 65964 +25424 65959 +23667 65959 +25103 65957 +40935 65951 +35754 65946 +40701 65945 +27561 65943 +40274 65937 +44107 65934 +20675 65933 +38826 65927 +25318 65925 +26043 65925 +34268 65910 +48041 65908 +48117 65899 +28079 65896 +40633 65893 +31302 65891 +33280 65891 +36696 65891 +39745 65887 +35043 65887 +24291 65887 +36963 65885 +38262 65885 +40691 65883 +29050 65883 +34299 65881 +38111 65877 +23654 65875 +20834 65872 +18159 65871 +17400 65869 +27697 65862 +41611 65849 +46669 65845 +18558 65830 +35724 65825 +27851 65810 +29107 65807 +27262 65806 +13040 65802 +35845 65801 +23263 65798 +38459 65796 +30171 65791 +43774 65789 +20511 65786 +31320 65784 +41223 65776 +16996 65766 +22947 65764 +48309 65762 +40863 65761 +6785 65758 +38222 65757 +6804 65742 +33071 65740 +37276 65739 +33122 65739 +18629 65736 +23818 65735 +27475 65734 +32860 65727 +8622 65724 +23442 65717 +32358 65707 +27474 65707 +23824 65698 +36386 65698 +5577 65688 +10496 65681 +41943 65678 +11454 65675 +35966 65675 +19672 65672 +4807 65671 +13513 65669 +32969 65668 +28756 65657 +33568 65656 +35953 65653 +19244 65645 +29569 65643 +25425 65639 +31757 65634 +18273 65628 +6015 65625 +23009 65625 +22670 65621 +19972 65621 +15424 65619 +14192 65616 +35954 65615 +32445 65608 +29202 65608 +48164 65606 +16980 65604 +42475 65604 +32596 65603 +17118 65600 +44801 65600 +46406 65590 +32735 65589 +28011 65586 +27284 65582 +7583 65578 +43207 65573 +16931 65572 +11991 65561 +19233 65546 +29155 65542 +18298 65537 +33262 65533 +21154 65530 +43741 65528 +15456 65524 +39718 65508 +32569 65507 +15705 65504 +20350 65502 +24587 65498 +38164 65495 +29398 65495 +43393 65493 +24562 65489 +19114 65489 +20668 65477 +30538 65466 +34700 65463 +22049 65448 +34548 65447 +34014 65440 +27182 65432 +41702 65430 +21669 65427 +23372 65425 +35446 65425 +38178 65413 +21144 65412 +17749 65408 +34892 65405 +29232 65404 +43742 65401 +42151 65399 +25834 65394 +34315 65393 +14837 65392 +40685 65390 +16654 65389 +21607 65389 +35297 65378 +21816 65366 +19023 65364 +26852 65347 +27807 65347 +19736 65337 +21850 65336 +18574 65335 +21172 65322 +16833 65304 +31532 65302 +30973 65298 +29168 65274 +32762 65273 +9057 65269 +29450 65265 +8164 65261 +2760 65256 +35485 65254 +19865 65248 +46051 65245 +12984 65244 +43226 65242 +30172 65239 +18955 65239 +6084 65232 +31381 65230 +29614 65225 +31747 65202 +13369 65200 +38863 65198 +46653 65189 +37796 65188 +41140 65183 +12295 65181 +22547 65180 +39479 65177 +1506 65172 +25212 65167 +41902 65161 +39148 65157 +30793 65151 +29235 65150 +1361 65149 +6874 65148 +31001 65148 +2552 65138 +48484 65133 +13202 65132 +29242 65132 +37030 65130 +47154 65121 +21374 65119 +28526 65113 +44119 65112 +37180 65104 +28491 65102 +33368 65100 +44143 65096 +44381 65091 +17890 65088 +31956 65082 +18668 65067 +31781 65053 +18345 65041 +40992 65037 +9647 65032 +42125 65030 +35351 65029 +42498 65016 +9357 65005 +33522 64997 +30510 64996 +29065 64992 +30889 64992 +16966 64975 +26614 64971 +33936 64956 +40807 64948 +27819 64940 +27995 64930 +17301 64924 +31647 64923 +41310 64920 +30713 64919 +1491 64917 +11837 64914 +39360 64913 +24755 64910 +12552 64907 +25547 64904 +9822 64903 +34369 64900 +23981 64897 +34623 64895 +5493 64885 +49042 64883 +32914 64880 +26647 64878 +25241 64876 +19281 64872 +32753 64865 +26139 64864 +44386 64859 +27281 64851 +24739 64846 +44918 64838 +37566 64837 +22406 64833 +29844 64824 +42824 64805 +38063 64802 +14189 64796 +19664 64796 +5226 64782 +45463 64775 +47735 64766 +24780 64765 +22033 64742 +27048 64735 +28010 64734 +20083 64723 +27044 64718 +5458 64715 +37097 64709 +35971 64708 +3308 64701 +46811 64664 +34295 64662 +41896 64661 +27200 64652 +9062 64637 +26016 64631 +21890 64630 +26886 64628 +25980 64626 +33663 64621 +38719 64620 +3948 64618 +22780 64614 +22739 64601 +45556 64598 +44626 64582 +45125 64580 +40308 64579 +27899 64569 +24311 64568 +30375 64567 +32232 64565 +23591 64564 +49872 64553 +4323 64552 +6978 64549 +29382 64548 +19727 64540 +3231 64538 +30449 64533 +48906 64531 +21016 64531 +22248 64528 +16184 64525 +27276 64518 +21643 64514 +48769 64514 +39203 64511 +28530 64511 +23176 64508 +31332 64504 +31290 64502 +28654 64500 +7919 64500 +25397 64494 +28527 64492 +47595 64484 +33052 64481 +26215 64471 +18396 64466 +32607 64461 +21825 64459 +5360 64453 +16669 64450 +44179 64446 +28197 64443 +10286 64436 +19836 64433 +33806 64431 +25293 64419 +48361 64416 +43419 64408 +20108 64400 +38355 64398 +39358 64397 +15202 64381 +30256 64355 +36740 64355 +34998 64354 +36304 64343 +38320 64341 +18971 64336 +33081 64334 +34518 64329 +32083 64325 +30000 64322 +22701 64318 +19818 64306 +37313 64295 +40978 64294 +26730 64290 +24846 64271 +36121 64270 +27558 64261 +36009 64260 +18977 64259 +41926 64244 +26609 64238 +46779 64238 +28387 64238 +21826 64235 +40907 64224 +29671 64224 +24989 64214 +22656 64210 +25592 64206 +20743 64200 +25784 64193 +42392 64193 +24939 64189 +13398 64180 +45399 64171 +14635 64156 +26001 64155 +32029 64152 +48677 64145 +29927 64140 +42288 64135 +48302 64135 +4515 64131 +2101 64129 +38230 64125 +25655 64125 +14076 64121 +28779 64117 +5647 64115 +15217 64114 +36092 64113 +41967 64112 +16161 64107 +16487 64103 +22494 64102 +25063 64100 +33041 64096 +27759 64090 +24488 64083 +28973 64075 +46889 64075 +17515 64063 +31725 64060 +23769 64050 +35675 64047 +20161 64046 +26688 64040 +26112 64036 +28153 64031 +26470 64024 +44210 64015 +34571 64013 +42754 64007 +3921 64003 +21230 63999 +24356 63996 +46949 63996 +49860 63995 +43442 63995 +35128 63992 +2384 63991 +32033 63990 +9182 63984 +16274 63982 +15225 63979 +4070 63978 +31639 63975 +22465 63974 +41651 63973 +9448 63964 +35198 63962 +32504 63961 +45187 63958 +13103 63957 +12960 63950 +38360 63945 +13880 63937 +9804 63930 +11313 63921 +19772 63918 +47592 63915 +29333 63914 +20546 63914 +41073 63912 +37924 63907 +34821 63904 +24916 63903 +35444 63900 +44352 63891 +25076 63890 +4648 63889 +43985 63881 +6719 63881 +7803 63878 +35377 63878 +38523 63877 +38285 63866 +33899 63865 +6929 63864 +40352 63860 +46539 63858 +22303 63842 +36437 63837 +30061 63837 +35756 63836 +18041 63835 +41533 63833 +30987 63821 +22967 63820 +18722 63818 +34744 63812 +22745 63805 +25997 63798 +49577 63798 +17510 63796 +25519 63785 +41892 63784 +41965 63770 +19538 63757 +33702 63754 +25368 63752 +16096 63747 +45732 63743 +11362 63741 +38174 63739 +25776 63730 +36302 63728 +31101 63726 +24643 63725 +25699 63722 +46994 63713 +4636 63712 +47693 63712 +14838 63705 +10300 63703 +20646 63699 +28784 63696 +20634 63694 +22358 63685 +20144 63683 +28477 63682 +11184 63680 +24000 63673 +21261 63668 +32885 63667 +9819 63666 +16773 63664 +29020 63660 +37134 63652 +31477 63638 +46729 63633 +45555 63632 +11866 63631 +15903 63631 +14646 63631 +19130 63628 +18542 63623 +33753 63623 +25963 63620 +17385 63616 +29207 63608 +37461 63607 +19265 63594 +21508 63593 +22160 63584 +26536 63580 +42628 63580 +14254 63575 +43376 63574 +36762 63562 +34796 63561 +38315 63559 +31104 63552 +33861 63550 +18951 63549 +34351 63535 +25268 63532 +45064 63530 +29353 63529 +5659 63528 +35201 63526 +20853 63522 +19811 63520 +37043 63510 +12535 63506 +25976 63503 +30705 63500 +43868 63497 +11176 63484 +31352 63473 +39387 63471 +25493 63470 +45067 63470 +18768 63469 +30186 63465 +32249 63463 +31480 63457 +39011 63446 +22335 63443 +7861 63443 +24793 63441 +14602 63437 +17641 63429 +31498 63427 +25134 63426 +17297 63425 +29494 63420 +16486 63417 +21060 63409 +30663 63408 +35296 63401 +32967 63395 +33228 63392 +21729 63392 +17140 63374 +5317 63374 +30292 63371 +38234 63366 +37286 63360 +10141 63355 +19734 63350 +11804 63348 +19474 63340 +22247 63340 +12944 63322 +24018 63317 +38558 63316 +44936 63315 +33675 63309 +26224 63307 +26732 63300 +18058 63298 +27965 63296 +26684 63290 +27887 63277 +27221 63276 +1193 63271 +33453 63265 +21585 63258 +33578 63258 +40959 63243 +18720 63239 +24995 63234 +27004 63227 +27127 63224 +30647 63222 +20334 63219 +12997 63206 +24231 63200 +35554 63196 +25557 63194 +46989 63192 +48316 63191 +40074 63184 +38606 63184 +10994 63179 +47894 63176 +19442 63173 +15714 63170 +11284 63168 +46411 63167 +38129 63165 +24478 63158 +21255 63158 +49482 63158 +1407 63144 +21388 63141 +24069 63136 +34990 63135 +38646 63131 +25678 63125 +34596 63123 +45189 63122 +34040 63116 +34231 63112 +18905 63097 +11857 63093 +38358 63092 +36806 63091 +28776 63083 +37931 63081 +31892 63075 +29439 63072 +33290 63072 +47005 63070 +26424 63069 +25560 63058 +40430 63056 +48158 63054 +17464 63051 +22113 63047 +35065 63031 +50075 63023 +21667 63021 +20034 63019 +29943 63018 +28092 63017 +2108 63016 +41640 63013 +1132 63012 +20624 63009 +36876 63005 +27305 63002 +20381 63002 +39523 62996 +20478 62995 +49250 62994 +14797 62994 +13060 62992 +44310 62991 +23574 62990 +45476 62967 +26830 62965 +11296 62964 +9362 62963 +32507 62960 +31379 62955 +45033 62949 +23284 62947 +35938 62944 +22131 62944 +29135 62942 +25077 62938 +21578 62927 +25773 62927 +39372 62925 +3990 62921 +43341 62916 +23816 62916 +31091 62911 +33075 62909 +32442 62896 +18248 62881 +38014 62879 +26304 62872 +22352 62867 +8081 62866 +12976 62864 +50231 62857 +24921 62842 +18636 62837 +19846 62837 +41155 62821 +26574 62810 +42923 62806 +20844 62801 +31198 62794 +19650 62783 +17240 62778 +16856 62777 +11555 62777 +18049 62755 +25381 62751 +20043 62746 +36903 62744 +15624 62742 +23853 62741 +43678 62733 +6435 62733 +42952 62723 +14967 62723 +13049 62721 +19052 62720 +31351 62705 +23145 62704 +40596 62682 +27065 62678 +32805 62667 +33745 62661 +23417 62652 +27408 62651 +25071 62648 +35037 62647 +49067 62645 +38534 62633 +30712 62631 +42838 62626 +37629 62624 +25654 62620 +4142 62620 +44760 62618 +15845 62603 +3447 62602 +34809 62595 +7004 62593 +46137 62589 +45329 62587 +42862 62584 +26421 62583 +13696 62574 +15835 62573 +34529 62558 +25544 62533 +42345 62518 +23183 62516 +29987 62514 +25789 62501 +38162 62501 +39854 62496 +26397 62485 +20822 62484 +36839 62474 +31682 62473 +31443 62465 +32135 62459 +47955 62453 +37095 62453 +43097 62450 +34061 62442 +33633 62434 +28340 62429 +24786 62427 +17103 62419 +27327 62417 +40359 62413 +28234 62412 +36997 62408 +7254 62406 +8593 62406 +25580 62406 +31086 62406 +6652 62405 +21965 62398 +2445 62396 +28246 62395 +37075 62394 +31720 62392 +28767 62388 +11458 62385 +18534 62382 +36391 62380 +24648 62377 +31163 62375 +32385 62372 +22679 62367 +42328 62360 +24475 62355 +23741 62350 +36354 62339 +24672 62336 +29817 62332 +33782 62331 +19531 62331 +26218 62328 +17470 62306 +30050 62304 +43415 62300 +16985 62293 +38487 62277 +28364 62268 +47304 62257 +23421 62252 +10458 62249 +20260 62247 +25543 62245 +18869 62239 +36735 62235 +43256 62231 +28574 62228 +25562 62227 +42521 62222 +36768 62221 +230 62217 +30040 62215 +33880 62212 +35104 62207 +21489 62201 +13001 62195 +35256 62193 +24986 62193 +49018 62190 +9338 62183 +27855 62171 +31603 62153 +12721 62149 +34685 62149 +46977 62149 +41969 62142 +37732 62140 +20316 62132 +21888 62131 +43611 62127 +28671 62123 +23180 62123 +38137 62122 +12480 62120 +32628 62118 +35024 62117 +27495 62108 +49145 62102 +30250 62091 +24734 62064 +30913 62064 +34429 62054 +46327 62051 +11531 62033 +13154 62028 +19418 62024 +34475 62022 +38087 62021 +231 62021 +15396 62008 +34199 62003 +1715 61999 +48674 61998 +46409 61995 +30339 61994 +36897 61990 +42261 61989 +42621 61987 +29573 61985 +13377 61984 +31513 61978 +29042 61962 +15605 61953 +48734 61946 +24203 61944 +38725 61937 +10901 61934 +30868 61934 +29772 61921 +18175 61918 +31689 61915 +41804 61914 +25943 61906 +26519 61903 +29837 61901 +15810 61896 +15813 61893 +23117 61892 +17776 61889 +46227 61866 +35357 61862 +35685 61857 +43431 61855 +40129 61852 +40808 61849 +44014 61847 +13096 61846 +42554 61846 +39292 61842 +49002 61837 +22121 61814 +47891 61810 +32925 61807 +26394 61798 +40587 61798 +30721 61796 +15630 61788 +26951 61786 +23336 61766 +32302 61755 +22531 61745 +25781 61738 +23755 61736 +22236 61733 +12043 61731 +36743 61729 +18188 61723 +46072 61722 +39123 61718 +30755 61718 +7829 61705 +41718 61704 +43296 61704 +20010 61702 +34677 61701 +14046 61698 +16991 61697 +37553 61697 +3560 61695 +20676 61692 +42198 61685 +31934 61682 +26879 61678 +15785 61673 +24308 61668 +42356 61666 +22859 61660 +36858 61655 +45402 61650 +31810 61638 +47688 61619 +22703 61619 +17481 61615 +35517 61613 +44410 61596 +35228 61593 +12003 61583 +40349 61581 +29026 61576 +48273 61569 +28218 61562 +38920 61559 +25731 61552 +22655 61546 +32945 61545 +28161 61541 +46611 61534 +45022 61528 +13193 61514 +21056 61506 +47554 61503 +15357 61502 +32757 61495 +31238 61489 +30391 61486 +42518 61482 +24407 61479 +32046 61475 +39244 61451 +24246 61448 +10533 61447 +28082 61441 +35733 61435 +44252 61429 +21868 61424 +31517 61419 +17758 61418 +23236 61418 +12283 61412 +29301 61405 +24925 61402 +45996 61401 +8001 61395 +47727 61390 +36605 61372 +19177 61372 +39094 61368 +26386 61359 +10531 61355 +34166 61355 +32533 61342 +29259 61333 +33917 61333 +33652 61327 +44838 61322 +11977 61317 +41930 61315 +30349 61314 +28357 61311 +34081 61307 +17886 61305 +20266 61299 +6990 61299 +28344 61296 +34264 61290 +29036 61289 +34282 61284 +32202 61283 +14756 61281 +35003 61281 +36246 61280 +19915 61265 +37307 61264 +24312 61257 +32985 61253 +18289 61249 +12332 61245 +17766 61244 +34845 61239 +45369 61233 +30082 61222 +21652 61216 +23695 61211 +33585 61199 +15405 61190 +16081 61188 +15015 61185 +41476 61183 +19590 61181 +23859 61170 +18863 61167 +18490 61161 +11187 61159 +42786 61148 +44843 61148 +27346 61147 +35406 61139 +33867 61134 +44840 61129 +36773 61125 +36432 61123 +50070 61122 +32028 61117 +45562 61116 +12938 61113 +14831 61105 +20942 61104 +13335 61100 +48405 61090 +34226 61088 +8481 61085 +15478 61083 +49748 61083 +21207 61079 +25144 61077 +46286 61073 +31469 61072 +43130 61062 +39822 61061 +25907 61048 +36045 61046 +47561 61037 +22769 61036 +34838 61036 +5424 61034 +38633 61033 +11007 61028 +31083 61025 +20759 61025 +30224 61022 +44914 61021 +22483 61020 +16869 61019 +31788 61019 +19798 61014 +50198 61001 +7355 60988 +21580 60971 +23855 60969 +27135 60966 +48654 60963 +38931 60960 +29194 60955 +40390 60945 +30877 60939 +36405 60929 +42073 60929 +40542 60924 +39772 60914 +30084 60911 +18392 60902 +38822 60895 +46277 60890 +4298 60888 +14049 60881 +32972 60873 +27962 60865 +29716 60851 +11249 60842 +15537 60840 +45005 60835 +26124 60831 +42291 60828 +29461 60826 +26717 60826 +28607 60825 +39161 60824 +36964 60824 +34207 60824 +26457 60816 +5592 60812 +15194 60805 +9158 60805 +21581 60802 +26370 60800 +48937 60796 +18032 60795 +31604 60791 +26340 60789 +1405 60786 +37525 60786 +22320 60783 +38502 60765 +39952 60762 +25738 60756 +32064 60753 +33677 60751 +30873 60749 +22828 60744 +46744 60739 +24217 60738 +29370 60732 +41645 60726 +39656 60717 +48535 60715 +26194 60715 +44288 60714 +23098 60713 +45155 60711 +42226 60701 +46898 60673 +23625 60672 +29862 60671 +48857 60666 +25276 60654 +33681 60648 +28836 60647 +15266 60643 +41852 60638 +13966 60622 +25219 60620 +31600 60617 +31549 60616 +48639 60615 +23089 60611 +47319 60606 +25121 60605 +38554 60603 +29199 60601 +30544 60598 +25221 60595 +17436 60594 +19659 60593 +20022 60593 +28641 60590 +17115 60590 +43599 60589 +15584 60589 +27692 60585 +42183 60566 +15947 60565 +42196 60562 +32392 60560 +25704 60558 +22448 60555 +21932 60549 +24458 60547 +39330 60543 +29537 60537 +32528 60527 +29635 60520 +44997 60518 +45073 60514 +28837 60511 +47802 60499 +46766 60497 +45475 60497 +17020 60484 +14977 60483 +42619 60480 +25906 60478 +30771 60467 +2658 60460 +19814 60460 +47552 60455 +41886 60448 +36106 60444 +47956 60433 +15634 60428 +11577 60425 +20343 60420 +18680 60417 +49984 60413 +28142 60403 +4681 60401 +44790 60400 +40636 60397 +23895 60390 +32745 60388 +46151 60384 +9372 60384 +32874 60383 +32964 60374 +26844 60369 +46193 60365 +18823 60359 +26653 60359 +30942 60355 +37816 60352 +22705 60342 +45194 60327 +44111 60323 +23440 60322 +44358 60319 +27211 60315 +33024 60303 +40917 60302 +35771 60302 +39092 60300 +40769 60294 +19554 60291 +29738 60290 +47403 60284 +30909 60271 +46191 60271 +25546 60261 +32484 60261 +27452 60257 +38965 60250 +13704 60250 +31022 60249 +30063 60249 +46134 60246 +35855 60243 +39829 60238 +4668 60237 +18218 60237 +29542 60233 +37684 60224 +48403 60210 +37470 60207 +12645 60205 +21926 60196 +13764 60195 +16558 60195 +4348 60194 +31939 60194 +27492 60183 +34608 60180 +37159 60179 +48423 60162 +47485 60161 +29556 60161 +39097 60161 +34664 60152 +49200 60151 +37914 60142 +25500 60137 +18549 60126 +28443 60112 +20713 60101 +13894 60095 +25094 60095 +22740 60094 +42441 60086 +43898 60085 +28816 60073 +44949 60068 +45585 60067 +23556 60066 +16983 60058 +28415 60056 +19974 60049 +33511 60047 +44443 60044 +39480 60041 +24250 60038 +30874 60037 +21630 60035 +21831 60035 +18777 60032 +29787 60031 +30393 60023 +38426 60015 +40588 60009 +37240 60004 +2755 59997 +28188 59996 +25996 59996 +29255 59994 +20895 59989 +34146 59983 +41953 59981 +32625 59979 +17248 59979 +5104 59972 +48283 59969 +37391 59969 +37119 59967 +15429 59965 +35020 59965 +30954 59962 +31309 59954 +27782 59938 +6138 59937 +37897 59935 +35122 59932 +26781 59931 +21208 59929 +25639 59924 +39671 59923 +23133 59922 +15120 59920 +33760 59907 +31782 59900 +5080 59899 +44391 59896 +20757 59894 +47148 59891 +35142 59889 +46684 59885 +17792 59884 +32727 59878 +20802 59870 +19679 59863 +36800 59857 +33636 59854 +15716 59853 +2090 59853 +24758 59852 +17806 59850 +32715 59848 +27173 59847 +8490 59846 +27951 59844 +41407 59843 +47919 59843 +20785 59838 +31432 59834 +28214 59832 +30855 59825 +4278 59824 +35647 59823 +32609 59822 +35984 59820 +27312 59812 +11532 59807 +35734 59805 +3752 59802 +22909 59794 +33421 59791 +43923 59788 +22253 59785 +30503 59777 +44525 59772 +34164 59772 +19548 59770 +26258 59768 +16667 59767 +25062 59765 +41161 59755 +17007 59751 +43728 59748 +48269 59743 +7890 59742 +22186 59740 +15627 59733 +20867 59729 +44437 59723 +22017 59722 +31195 59722 +25457 59721 +33775 59719 +44192 59717 +22415 59711 +22631 59708 +42614 59705 +8146 59693 +6564 59688 +22366 59684 +25181 59682 +3692 59681 +11469 59681 +37216 59677 +20403 59676 +36788 59673 +13051 59672 +9777 59668 +39777 59660 +38979 59657 +35101 59653 +31829 59642 +30490 59641 +34311 59634 +33588 59631 +39042 59629 +15086 59629 +42351 59619 +22127 59613 +20446 59613 +32193 59611 +22487 59610 +24696 59607 +23691 59598 +2355 59598 +22470 59594 +30632 59593 +16066 59586 +22994 59578 +18611 59575 +16206 59575 +30720 59574 +19524 59571 +42997 59570 +13759 59568 +44871 59566 +23759 59565 +39909 59558 +21967 59549 +30814 59546 +28416 59542 +15409 59539 +25453 59537 +21444 59536 +41282 59530 +23662 59528 +30447 59521 +42386 59519 +36506 59517 +43084 59516 +31889 59513 +6310 59502 +30860 59487 +43771 59483 +45023 59478 +7019 59472 +44354 59465 +33983 59464 +41790 59457 +6263 59445 +25530 59441 +31321 59441 +32468 59441 +24233 59440 +48130 59438 +19685 59431 +5919 59426 +25904 59423 +29912 59421 +9214 59415 +26412 59413 +31392 59409 +15308 59408 +30030 59404 +24938 59402 +25177 59394 +30113 59390 +39421 59380 +29895 59379 +637 59379 +29731 59377 +25024 59376 +23760 59371 +4924 59370 +47312 59370 +19107 59368 +7978 59362 +26219 59359 +38351 59358 +10015 59354 +49737 59352 +2122 59351 +32814 59351 +9242 59350 +21958 59347 +11086 59347 +35831 59334 +8307 59333 +1425 59327 +36219 59316 +15046 59308 +23752 59307 +18640 59302 +16696 59300 +29491 59298 +21228 59290 +17533 59289 +15625 59288 +42692 59287 +48421 59280 +31831 59278 +23387 59264 +4284 59263 +7896 59262 +24707 59254 +16962 59245 +17963 59238 +34102 59236 +22370 59236 +34365 59235 +44677 59233 +34967 59231 +20476 59229 +21047 59228 +24221 59226 +37604 59224 +10110 59224 +35311 59223 +16331 59221 +24080 59216 +12140 59213 +18287 59204 +28087 59203 +36745 59201 +19933 59201 +46527 59199 +1579 59198 +24873 59187 +11327 59179 +8453 59171 +17280 59162 +38462 59156 +27430 59155 +26029 59149 +32480 59149 +26289 59148 +39720 59144 +25525 59137 +39337 59135 +43675 59128 +37484 59115 +44691 59114 +19943 59106 +32295 59101 +39733 59099 +24544 59095 +43855 59091 +50024 59086 +4288 59084 +4699 59083 +33198 59083 +42541 59070 +46488 59067 +30473 59066 +31922 59065 +28347 59052 +39566 59035 +14255 59033 +42167 59029 +26221 59028 +24314 59027 +34123 59026 +20057 59025 +36866 59020 +14915 59014 +33163 59014 +30519 59013 +13806 59001 +31365 59000 +27091 58999 +29417 58999 +43775 58997 +42290 58994 +27869 58990 +26648 58982 +22025 58979 +37438 58979 +42845 58975 +18756 58971 +46997 58969 +10611 58969 +12494 58967 +40425 58967 +50251 58964 +47686 58963 +20323 58958 +33976 58945 +49486 58945 +37899 58943 +24848 58943 +44660 58932 +15451 58930 +42207 58925 +46089 58925 +30829 58919 +30130 58917 +26855 58915 +33779 58912 +28476 58907 +33009 58904 +34161 58898 +23405 58896 +21185 58889 +41524 58885 +35774 58884 +50089 58880 +43945 58873 +23482 58867 +26241 58853 +24090 58852 +4055 58849 +25751 58830 +21617 58829 +36761 58820 +24889 58820 +42599 58819 +38028 58818 +9809 58816 +47220 58809 +35577 58807 +41556 58807 +27410 58805 +27870 58805 +36608 58803 +24501 58798 +28902 58792 +45083 58791 +38879 58785 +18792 58785 +22579 58780 +22618 58773 +19577 58770 +38223 58764 +41870 58746 +17731 58745 +34818 58744 +25616 58740 +42436 58738 +29237 58738 +35110 58734 +20918 58733 +39459 58725 +28738 58715 +37463 58713 +39812 58709 +46767 58707 +14306 58705 +28783 58701 +20733 58699 +9962 58697 +44236 58696 +23278 58694 +29898 58694 +35209 58689 +34854 58685 +36438 58681 +25804 58680 +46007 58680 +26698 58677 +26875 58672 +22503 58672 +3140 58670 +40894 58669 +16793 58662 +44970 58661 +28844 58652 +34988 58638 +40442 58638 +33269 58635 +15147 58634 +29327 58633 +21695 58622 +20368 58617 +30485 58613 +40934 58611 +40389 58609 +21935 58608 +32141 58606 +31155 58593 +46463 58590 +35374 58589 +41701 58588 +30588 58588 +7556 58582 +35651 58581 +48014 58574 +10951 58573 +22775 58569 +20839 58564 +42994 58559 +41643 58555 +21887 58552 +22217 58545 +39291 58543 +26031 58542 +2455 58537 +25966 58536 +33596 58532 +46123 58530 +45633 58527 +35719 58524 +36830 58521 +41480 58517 +20058 58513 +23303 58513 +31095 58511 +40950 58509 +28317 58508 +18050 58492 +44108 58487 +36642 58468 +19655 58465 +41457 58463 +27562 58462 +33085 58453 +39871 58444 +19296 58443 +30937 58443 +7162 58440 +26877 58436 +23182 58431 +44848 58423 +9228 58421 +21529 58418 +44493 58418 +24040 58414 +36457 58410 +40929 58405 +3639 58404 +26880 58403 +3490 58403 +12282 58393 +30726 58389 +36514 58387 +42293 58387 +42928 58378 +26658 58374 +32215 58370 +10199 58369 +31698 58368 +37824 58366 +25432 58364 +35717 58359 +24490 58347 +20644 58343 +29493 58341 +47328 58335 +26061 58334 +49457 58333 +3796 58323 +14164 58318 +30002 58318 +36586 58317 +26986 58314 +28606 58312 +22040 58310 +43989 58306 +14019 58297 +24421 58293 +7483 58288 +18850 58283 +17627 58279 +35014 58255 +32881 58255 +12137 58253 +39708 58249 +20632 58244 +27010 58234 +49318 58218 +9896 58209 +19924 58190 +33315 58190 +40665 58189 +49537 58186 +28305 58184 +6020 58183 +23694 58181 +50135 58179 +41213 58174 +30275 58170 +44559 58165 +36944 58162 +40264 58158 +30853 58143 +41034 58141 +30051 58128 +25108 58126 +21326 58123 +40424 58118 +47665 58118 +26100 58118 +26186 58117 +33824 58116 +21002 58116 +19567 58105 +19420 58099 +31818 58099 +47928 58097 +31403 58095 +34851 58094 +17259 58085 +23649 58080 +4612 58079 +49622 58074 +27306 58073 +23961 58067 +17213 58060 +23112 58048 +41326 58037 +26668 58033 +32872 58029 +38302 58027 +32333 58025 +27931 58021 +37915 58020 +47689 58020 +45239 58013 +21290 58011 +28429 58011 +16841 58007 +26582 58002 +46451 58002 +32483 58001 +24178 57999 +30607 57998 +38493 57996 +40915 57983 +38099 57982 +48453 57976 +43474 57970 +37051 57967 +24260 57964 +25798 57964 +32095 57962 +34056 57956 +29861 57953 +21554 57950 +23179 57940 +8448 57938 +23810 57932 +36162 57927 +10483 57927 +40977 57926 +44150 57920 +28502 57918 +45587 57910 +21422 57909 +30592 57908 +32613 57895 +13623 57890 +37599 57890 +42127 57890 +26969 57887 +18368 57885 +21265 57877 +27268 57871 +29345 57862 +20709 57862 +45775 57858 +46278 57852 +16969 57849 +16799 57847 +37718 57845 +1859 57835 +39257 57835 +32677 57812 +23258 57812 +36896 57812 +15710 57809 +34177 57809 +14721 57805 +41429 57804 +19591 57799 +30239 57797 +37976 57793 +33871 57788 +35654 57781 +33555 57780 +24725 57779 +20477 57769 +29652 57766 +2966 57764 +37562 57760 +15898 57751 +24228 57750 +13010 57750 +11159 57746 +9851 57740 +48591 57739 +40228 57735 +16475 57733 +34552 57731 +46691 57726 +35589 57719 +37538 57717 +3333 57717 +46905 57712 +24521 57712 +32453 57712 +29361 57708 +36292 57703 +23706 57697 +48500 57690 +24560 57689 +24082 57684 +37356 57680 +34010 57679 +5507 57677 +31449 57675 +36583 57673 +21239 57664 +25260 57662 +5656 57660 +43310 57659 +45031 57659 +39828 57655 +39619 57654 +26192 57654 +7540 57648 +27163 57642 +39722 57639 +8675 57634 +30176 57634 +28451 57630 +42417 57625 +20730 57612 +38175 57608 +31522 57604 +29405 57604 +31421 57592 +38242 57585 +49165 57573 +46507 57569 +30415 57568 +33611 57531 +9493 57527 +30300 57527 +41648 57526 +21606 57519 +30775 57514 +29254 57513 +33285 57511 +29955 57497 +47458 57491 +17701 57484 +28882 57476 +25913 57473 +28885 57472 +26798 57469 +10641 57466 +46368 57462 +49325 57455 +34356 57454 +39616 57454 +28539 57452 +14322 57451 +21169 57445 +25631 57441 +34029 57439 +20450 57438 +41045 57437 +30524 57435 +7956 57430 +43701 57424 +36644 57420 +31265 57420 +40718 57412 +5663 57408 +34034 57406 +17855 57404 +32521 57402 +29151 57392 +45108 57388 +36541 57386 +28506 57372 +37751 57368 +19200 57366 +6540 57363 +13102 57358 +23244 57352 +9956 57349 +20752 57346 +2796 57338 +21960 57327 +21593 57325 +27684 57319 +34954 57316 +28602 57313 +30255 57309 +9503 57289 +92 57268 +35622 57267 +34763 57264 +23603 57258 +39654 57255 +20151 57254 +33265 57249 +47879 57242 +10841 57235 +45496 57232 +24070 57230 +33658 57225 +34262 57220 +28521 57218 +35113 57216 +33835 57211 +32906 57205 +7213 57201 +11417 57197 +14270 57194 +31977 57187 +17950 57178 +38045 57170 +42368 57164 +4799 57161 +26042 57158 +40837 57146 +29265 57144 +14455 57141 +48714 57140 +21976 57126 +23601 57117 +25541 57116 +24494 57115 +15327 57113 +43999 57107 +7640 57103 +35437 57099 +36327 57091 +37352 57086 +10055 57082 +14269 57078 +44978 57077 +44333 57074 +44042 57065 +34735 57059 +31734 57056 +32679 57052 +32132 57050 +38814 57042 +5233 57039 +2131 57037 +23349 57036 +23713 57034 +38935 57024 +7549 57023 +39255 57022 +36205 57015 +23181 57007 +12392 57000 +4611 56997 +47697 56991 +10445 56985 +46884 56981 +30132 56975 +15412 56974 +16899 56965 +7757 56961 +30042 56960 +43349 56955 +35006 56948 +45966 56948 +25004 56945 +14274 56929 +29267 56926 +31027 56918 +23164 56918 +24182 56915 +25292 56902 +24928 56902 +29360 56900 +44343 56899 +42476 56895 +32701 56892 +36995 56889 +34428 56886 +25509 56886 +41430 56885 +2571 56885 +45774 56882 +18911 56874 +14603 56874 +43748 56867 +45247 56860 +29229 56854 +29466 56847 +30761 56839 +44128 56834 +35786 56825 +43379 56824 +24338 56822 +23666 56814 +28980 56814 +24760 56811 +45658 56806 +42627 56806 +20779 56803 +30777 56791 +37610 56790 +38244 56789 +21688 56787 +8467 56787 +47609 56787 +47518 56786 +17539 56785 +23292 56784 +25677 56778 +6331 56778 +12410 56776 +4182 56760 +22580 56753 +23153 56752 +46429 56749 +37439 56748 +2715 56743 +23848 56742 +25565 56740 +27605 56739 +39490 56731 +22156 56730 +18459 56725 +26497 56719 +41898 56715 +25325 56711 +41252 56710 +18967 56709 +44483 56703 +35658 56701 +44019 56697 +35362 56684 +38435 56682 +43479 56681 +47186 56681 +45644 56679 +27762 56677 +37884 56675 +30935 56659 +25309 56658 +6959 56658 +29456 56655 +28639 56646 +31731 56635 +49627 56635 +32774 56628 +10855 56627 +21450 56620 +25266 56612 +30508 56607 +37990 56606 +30179 56602 +14920 56602 +29653 56601 +26718 56596 +48988 56590 +26744 56589 +41778 56586 +37373 56586 +11944 56581 +21584 56570 +37761 56569 +13978 56568 +22395 56567 +21795 56564 +14565 56563 +9610 56556 +45795 56554 +16465 56554 +40421 56554 +38423 56551 +28625 56551 +22595 56550 +45209 56538 +28903 56537 +20938 56528 +6003 56526 +49197 56525 +20467 56516 +26980 56515 +44718 56514 +37318 56513 +26274 56511 +25398 56509 +18111 56505 +26771 56503 +44500 56502 +36791 56501 +15078 56500 +39845 56496 +8443 56486 +45400 56483 +16571 56481 +31395 56477 +36096 56475 +15157 56474 +36656 56459 +20694 56450 +25421 56435 +15170 56430 +24913 56430 +27479 56430 +27592 56429 +9444 56424 +39406 56421 +23097 56418 +11970 56414 +38608 56412 +33340 56409 +43425 56406 +15729 56403 +19197 56400 +15721 56391 +1783 56378 +27719 56373 +43995 56372 +37954 56368 +28109 56358 +35803 56358 +33482 56352 +35337 56350 +32882 56344 +4815 56338 +39034 56337 +47608 56334 +33181 56327 +20806 56323 +21295 56321 +28593 56319 +44067 56314 +16909 56311 +49037 56310 +22986 56305 +29452 56304 +24662 56297 +9682 56294 +47015 56286 +24831 56285 +7349 56285 +36074 56282 +33973 56279 +32736 56278 +22638 56278 +11645 56278 +26379 56277 +12663 56274 +18846 56271 +38398 56269 +42158 56265 +42395 56261 +41120 56258 +49842 56258 +48781 56253 +32254 56248 +47012 56247 +22196 56247 +5944 56245 +21755 56232 +30033 56219 +47164 56219 +20798 56212 +28633 56207 +19363 56204 +39451 56202 +40559 56195 +20807 56192 +20831 56189 +37198 56188 +21783 56184 +33736 56183 +32472 56179 +38583 56177 +28916 56177 +38820 56176 +40088 56171 +30668 56136 +20281 56136 +47586 56130 +18271 56125 +9685 56124 +25769 56120 +11190 56116 +33946 56112 +39630 56109 +32578 56106 +27079 56102 +12632 56092 +11402 56091 +29957 56084 +16199 56073 +29406 56069 +36448 56063 +45942 56060 +26271 56059 +19932 56054 +40870 56054 +27217 56052 +7015 56046 +33600 56044 +33255 56038 +11865 56033 +25280 56033 +26444 56031 +25696 56031 +2065 56019 +45897 56008 +17459 56005 +6008 56004 +11671 56003 +44366 56001 +24041 55998 +1488 55996 +32530 55992 +35923 55988 +11218 55985 +31599 55983 +30080 55976 +21693 55973 +43811 55970 +40745 55968 +33247 55956 +33907 55953 +30069 55944 +11194 55942 +14036 55942 +25466 55940 +36610 55939 +37121 55938 +3391 55932 +21074 55932 +28286 55931 +39965 55927 +26384 55926 +12752 55921 +16814 55920 +22613 55915 +15096 55898 +16670 55896 +25190 55889 +29871 55885 +18603 55880 +27134 55878 +15051 55871 +17878 55869 +43859 55861 +7315 55860 +35326 55852 +11522 55852 +5378 55849 +27364 55837 +27107 55833 +35210 55831 +19311 55830 +18879 55824 +5955 55818 +23523 55816 +26009 55814 +32848 55813 +5015 55811 +26509 55809 +23159 55806 +44250 55802 +3346 55802 +29802 55802 +24369 55802 +40855 55798 +28090 55795 +39205 55793 +26974 55791 +28359 55790 +13984 55790 +37714 55786 +29663 55782 +24993 55780 +20768 55775 +19334 55762 +49112 55756 +37084 55748 +42408 55740 +10570 55739 +27845 55732 +37329 55724 +16927 55723 +23729 55720 +4311 55715 +17622 55707 +42760 55700 +37325 55691 +7293 55688 +20981 55688 +16340 55686 +34632 55682 +36070 55679 +14500 55678 +50066 55675 +25148 55670 +14652 55668 +20436 55668 +25390 55658 +29385 55652 +39670 55647 +24718 55646 +23243 55641 +23208 55638 +26012 55637 +18993 55632 +37514 55622 +46049 55621 +29527 55618 +30038 55617 +21054 55616 +19770 55610 +24984 55608 +11572 55606 +32710 55597 +36512 55593 +23604 55588 +39343 55587 +39150 55583 +41069 55580 +28829 55578 +28034 55573 +28046 55565 +41950 55564 +41328 55563 +24420 55562 +32151 55558 +43653 55551 +25068 55547 +29093 55545 +32560 55544 +38808 55541 +45851 55541 +45982 55540 +43308 55535 +21579 55532 +25036 55520 +34110 55510 +47261 55508 +41764 55506 +10482 55505 +33627 55504 +20937 55497 +6720 55495 +48968 55483 +22228 55479 +20073 55476 +24651 55475 +41046 55474 +43858 55473 +2023 55471 +42221 55463 +19630 55459 +40776 55457 +23678 55455 +25411 55446 +28713 55443 +43042 55438 +22853 55436 +38400 55433 +33025 55431 +49108 55431 +18676 55426 +24588 55426 +35106 55418 +44923 55414 +24720 55411 +30276 55404 +43688 55403 +27675 55399 +12668 55398 +40801 55396 +37708 55386 +33839 55382 +19008 55378 +5547 55375 +25749 55373 +24706 55368 +10333 55361 +18239 55361 +26917 55359 +26452 55350 +33710 55348 +26494 55342 +38927 55341 +26521 55341 +36132 55339 +30423 55339 +30912 55336 +42573 55334 +36989 55319 +1159 55317 +26785 55317 +25179 55316 +6254 55315 +9886 55315 +37322 55315 +41360 55313 +32084 55306 +4215 55303 +21093 55295 +41817 55292 +42431 55288 +35924 55287 +36440 55283 +37254 55279 +36778 55270 +41683 55269 +28203 55268 +21836 55268 +28580 55267 +19569 55260 +33848 55259 +28563 55259 +36358 55240 +39350 55237 +38102 55229 +29981 55228 +34840 55222 +37728 55218 +49375 55217 +12582 55217 +28385 55203 +44385 55201 +9606 55200 +27903 55194 +42334 55191 +34395 55188 +13349 55188 +29089 55184 +24773 55181 +34629 55179 +30020 55178 +38651 55178 +3380 55177 +24051 55176 +35935 55175 +38191 55172 +27224 55154 +17866 55153 +46107 55151 +37206 55147 +37314 55142 +28295 55130 +27414 55129 +39206 55129 +46362 55127 +12613 55119 +24013 55119 +37465 55110 +46849 55105 +46132 55100 +43911 55096 +30204 55092 +44470 55088 +33388 55088 +14854 55084 +43881 55084 +15227 55080 +28450 55073 +44922 55068 +24815 55067 +21478 55067 +49696 55063 +19402 55060 +28273 55059 +24119 55057 +31375 55056 +26646 55054 +30013 55048 +29009 55044 +50106 55032 +22153 55030 +35130 55022 +47545 55019 +49006 55019 +39019 55013 +31878 54992 +37745 54990 +9731 54987 +2786 54984 +1237 54980 +15084 54978 +18908 54970 +37808 54965 +38283 54963 +32327 54961 +36066 54961 +37752 54960 +35860 54960 +40931 54957 +33517 54955 +37768 54955 +21748 54952 +22584 54951 +27362 54948 +49570 54941 +23507 54936 +21192 54934 +34012 54933 +3604 54928 +49407 54927 +39834 54919 +32111 54909 +46921 54896 +35546 54890 +38730 54887 +33291 54885 +14578 54872 +30623 54867 +34905 54864 +13402 54851 +25365 54845 +45747 54842 +41352 54842 +24053 54840 +43997 54836 +33466 54832 +36274 54831 +28423 54820 +27704 54818 +24071 54814 +28616 54814 +5554 54807 +1945 54798 +29914 54797 +41493 54795 +19851 54794 +32502 54786 +31087 54777 +24673 54771 +49247 54770 +27043 54766 +24039 54762 +19980 54751 +30902 54750 +40513 54747 +49195 54742 +44282 54741 +6962 54740 +33446 54738 +42397 54735 +49185 54735 +31501 54730 +35368 54728 +34573 54726 +41454 54719 +16417 54718 +16422 54702 +42239 54701 +38337 54695 +18681 54683 +1442 54682 +45813 54682 +6329 54675 +11965 54674 +41875 54669 +42803 54656 +47692 54656 +14906 54654 +18349 54651 +29579 54648 +7046 54648 +30696 54642 +30229 54641 +37113 54638 +28568 54637 +47470 54636 +29689 54632 +23917 54627 +38031 54626 +44117 54623 +17848 54622 +49873 54615 +15655 54615 +43052 54614 +158 54613 +7106 54607 +6395 54603 +22995 54602 +30522 54599 +30827 54589 +35233 54585 +35573 54579 +32281 54578 +31174 54577 +3787 54571 +34782 54569 +33201 54556 +44864 54556 +31413 54556 +6474 54555 +50219 54539 +22699 54535 +2210 54524 +4163 54521 +30357 54521 +27142 54520 +24646 54519 +28572 54516 +29673 54514 +23135 54512 +32387 54512 +16279 54498 +43251 54489 +6775 54488 +24571 54483 +33799 54482 +22963 54472 +16661 54472 +46116 54471 +38796 54465 +15851 54456 +37305 54454 +27101 54452 +33630 54448 +42857 54446 +43593 54445 +18148 54437 +29128 54422 +27721 54421 +17046 54415 +26210 54413 +20483 54411 +48994 54409 +31884 54408 +16945 54405 +23452 54403 +36232 54402 +19839 54401 +30413 54400 +11227 54394 +37551 54379 +13875 54376 +26637 54372 +13691 54362 +17458 54359 +21626 54349 +26808 54348 +19880 54341 +25806 54340 +10687 54336 +27695 54334 +34958 54334 +42971 54330 +42718 54327 +29850 54320 +20279 54312 +28682 54304 +38964 54284 +42358 54281 +31998 54278 +31531 54265 +29983 54262 +19179 54261 +40153 54257 +33124 54257 +3145 54254 +10845 54253 +25229 54249 +2418 54241 +25288 54238 +13649 54236 +29243 54236 +1684 54235 +27864 54231 +30386 54227 +41758 54226 +17008 54222 +46194 54218 +26819 54216 +17693 54216 +9633 54216 +40457 54213 +12830 54211 +49862 54210 +29704 54209 +13268 54209 +22405 54207 +40514 54205 +6780 54204 +25695 54200 +8600 54185 +42626 54183 +17380 54181 +25360 54178 +15581 54176 +35082 54174 +39585 54163 +30953 54160 +16371 54148 +28849 54147 +16653 54143 +10867 54143 +16491 54137 +38716 54134 +27888 54133 +49637 54129 +19780 54129 +13409 54124 +6718 54118 +8570 54118 +23048 54115 +25501 54100 +37408 54097 +31075 54093 +13436 54090 +39095 54089 +30064 54085 +33584 54078 +20931 54076 +16272 54070 +36188 54070 +42658 54069 +31003 54068 +39543 54067 +47404 54064 +16360 54062 +23375 54058 +25478 54054 +38886 54052 +6800 54049 +28729 54035 +7537 54035 +43968 54033 +45488 54033 +41262 54032 +22819 54032 +25006 54032 +49179 54032 +45159 54028 +42813 54028 +14951 54027 +15385 54026 +20609 54023 +24888 54021 +46346 54011 +25473 54010 +32595 54009 +35605 54006 +22792 54005 +46924 53997 +15003 53994 +49980 53992 +19004 53991 +9037 53986 +40727 53986 +18861 53980 +41314 53971 +22965 53970 +44953 53968 +32807 53966 +30740 53952 +47137 53950 +26903 53934 +27996 53933 +42157 53933 +40595 53933 +30690 53932 +22220 53931 +12496 53928 +5560 53928 +41440 53923 +36847 53922 +24046 53919 +19216 53915 +37264 53915 +38510 53911 +24810 53909 +24124 53906 +16283 53900 +37476 53899 +26143 53896 +45524 53883 +29889 53882 +35606 53878 +14599 53877 +22373 53874 +31465 53866 +31670 53855 +27395 53855 +44348 53848 +23011 53847 +14618 53847 +46081 53846 +34208 53839 +2916 53837 +24785 53828 +47358 53827 +45861 53821 +28285 53818 +40570 53809 +12907 53808 +37079 53803 +42569 53792 +18882 53788 +29729 53785 +18444 53782 +34047 53777 +28746 53774 +35159 53772 +23989 53771 +16702 53767 +34470 53762 +9767 53761 +21875 53758 +36909 53757 +42077 53751 +19641 53747 +40497 53744 +30767 53737 +15106 53732 +30220 53732 +37009 53730 +29154 53726 +13039 53723 +693 53718 +23069 53718 +29959 53714 +42211 53714 +26359 53711 +3655 53710 +30048 53694 +28790 53688 +28701 53687 +42427 53682 +33773 53681 +35196 53676 +42916 53673 +8544 53661 +37063 53660 +26893 53655 +44633 53654 +19156 53654 +35019 53645 +40355 53644 +23408 53643 +21378 53639 +29109 53638 +21252 53638 +16984 53632 +27078 53629 +39712 53628 +24896 53624 +17017 53619 +26788 53616 +38870 53615 +12072 53614 +35730 53613 +34216 53613 +43459 53609 +20247 53600 +19145 53600 +5738 53598 +28023 53598 +11505 53582 +30545 53572 +21949 53572 +22974 53571 +11802 53566 +26600 53561 +28932 53556 +23419 53553 +43022 53552 +26300 53550 +22711 53549 +1932 53546 +32650 53542 +36892 53536 +33846 53534 +34527 53531 +9453 53529 +24343 53528 +2133 53526 +26408 53521 +19482 53516 +40598 53513 +34858 53513 +29944 53509 +23499 53509 +40332 53509 +13653 53506 +30135 53503 +29724 53501 +19648 53498 +49383 53497 +45483 53495 +4344 53489 +26837 53478 +35413 53477 +30630 53469 +25025 53464 +40924 53457 +30686 53455 +34292 53453 +29932 53445 +32693 53445 +40447 53439 +44886 53438 +25607 53437 +47461 53435 +46233 53432 +45680 53429 +45777 53428 +20225 53424 +25805 53422 +42568 53421 +34491 53420 +19708 53419 +39028 53418 +33506 53415 +23304 53414 +27804 53410 +36747 53397 +29219 53395 +28678 53393 +26910 53391 +35583 53388 +38264 53387 +45517 53387 +27926 53385 +7181 53382 +45088 53382 +48854 53378 +28805 53378 +31390 53376 +41027 53375 +22890 53369 +28898 53368 +29576 53368 +44178 53364 +21654 53362 +24764 53356 +49019 53352 +24977 53350 +49134 53349 +14120 53348 +48886 53346 +45263 53346 +29117 53341 +24502 53339 +20107 53335 +37105 53333 +33565 53329 +24691 53326 +21870 53320 +47756 53316 +40377 53314 +37896 53313 +42645 53308 +22910 53306 +38934 53284 +26770 53282 +38039 53278 +12398 53275 +37406 53273 +29481 53269 +10244 53259 +35138 53258 +25596 53250 +22464 53242 +48209 53237 +14344 53233 +46269 53231 +45676 53229 +16149 53216 +15200 53214 +6689 53213 +27433 53210 +26074 53207 +47216 53205 +29762 53200 +15952 53198 +7249 53195 +26306 53194 +26827 53185 +28554 53185 +22495 53185 +2226 53182 +35023 53182 +31645 53181 +23199 53180 +27482 53178 +34599 53178 +20191 53177 +33067 53174 +44404 53171 +34666 53171 +30792 53170 +47974 53170 +16886 53170 +32150 53168 +50151 53164 +34070 53163 +39133 53148 +22577 53145 +43588 53145 +25503 53145 +24556 53143 +26270 53130 +37890 53124 +12514 53120 +17379 53106 +13200 53101 +25610 53099 +34671 53093 +140 53091 +45078 53087 +27654 53083 +21910 53078 +42404 53064 +18232 53057 +28669 53054 +46929 53051 +19329 53049 +25336 53042 +41746 53036 +34304 53034 +43092 53028 +22478 53022 +20465 53014 +32420 53012 +22042 53003 +12067 53001 +41535 53000 +27601 52999 +36364 52989 +39127 52985 +5569 52983 +28436 52975 +13450 52973 +35287 52972 +36601 52968 +28947 52967 +29028 52961 +47714 52961 +21467 52957 +244 52954 +29624 52945 +20845 52944 +34502 52941 +35980 52941 +30392 52936 +28005 52935 +26926 52930 +23536 52926 +48320 52918 +40471 52914 +29030 52908 +44870 52900 +45183 52897 +47499 52897 +2009 52895 +26133 52884 +29883 52883 +31445 52877 +25754 52873 +34579 52856 +43809 52853 +40583 52852 +41074 52852 +36444 52850 +31405 52844 +15087 52843 +43657 52840 +21905 52838 +31363 52837 +48498 52832 +42453 52829 +21129 52825 +39730 52824 +16843 52823 +32104 52822 +16072 52816 +44588 52815 +31541 52815 +12154 52809 +20219 52802 +34383 52800 +16021 52799 +5343 52799 +14355 52789 +39083 52788 +24109 52785 +39569 52775 +20029 52769 +46133 52765 +41731 52761 +12628 52748 +40753 52744 +10489 52743 +24235 52736 +34234 52736 +28518 52736 +22659 52725 +43738 52724 +27360 52720 +33667 52719 +35918 52717 +6826 52717 +30301 52717 +22817 52715 +426 52710 +34260 52700 +35782 52699 +43524 52693 +7289 52692 +26708 52692 +32325 52690 +39465 52687 +44610 52686 +1391 52686 +22624 52685 +32162 52667 +46034 52666 +40847 52662 +42805 52661 +18896 52661 +14400 52660 +37928 52659 +18673 52648 +23146 52640 +27429 52639 +22416 52632 +40212 52625 +33719 52625 +33305 52619 +21616 52619 +27544 52595 +21940 52589 +28251 52588 +9076 52588 +40789 52577 +28007 52574 +42058 52572 +33906 52563 +25138 52560 +33050 52558 +25879 52557 +47535 52556 +39983 52546 +22629 52543 +25304 52537 +40302 52527 +31609 52523 +21937 52520 +14849 52518 +18754 52518 +39435 52515 +48378 52513 +21874 52510 +21696 52510 +32673 52504 +45215 52500 +35510 52499 +45910 52497 +27834 52496 +33731 52489 +35118 52483 +28308 52479 +33757 52476 +30500 52475 +28610 52464 +41559 52463 +24680 52451 +44663 52449 +32932 52449 +17472 52447 +31177 52443 +43934 52431 +16442 52430 +48115 52429 +20348 52427 +40851 52424 +28131 52421 +33364 52420 +21103 52419 +43348 52417 +19184 52416 +27152 52410 +22672 52409 +45059 52406 +40151 52402 +4443 52397 +49270 52393 +36000 52391 +29541 52391 +23444 52390 +13147 52384 +46837 52383 +29622 52383 +23609 52379 +35239 52370 +36317 52362 +2255 52360 +2162 52359 +11275 52357 +49362 52356 +30791 52352 +27406 52346 +31825 52344 +42152 52339 +24982 52336 +22124 52336 +28739 52334 +19545 52332 +32783 52327 +41772 52322 +24813 52321 +31638 52319 +35146 52312 +10624 52311 +20032 52310 +50050 52305 +45867 52302 +48249 52302 +32631 52298 +8506 52297 +33158 52291 +32063 52290 +20370 52286 +19948 52285 +3172 52285 +19473 52284 +49661 52284 +23656 52278 +32262 52270 +23908 52268 +9943 52263 +33093 52257 +21311 52257 +32784 52257 +32554 52256 +37358 52253 +32911 52252 +41077 52248 +31969 52246 +26749 52244 +33348 52244 +37370 52235 +42724 52234 +23127 52234 +41546 52229 +9741 52228 +24869 52222 +29574 52220 +45594 52219 +47905 52217 +41594 52214 +22234 52206 +34180 52205 +24297 52203 +40476 52201 +23871 52200 +14885 52199 +15245 52190 +7086 52188 +24727 52183 +28581 52180 +6845 52180 +21379 52176 +24354 52173 +11042 52171 +30645 52170 +11712 52166 +14656 52159 +40879 52156 +22209 52156 +47318 52152 +46212 52149 +42433 52143 +21600 52139 +11883 52139 +21657 52137 +25928 52133 +36912 52131 +38252 52126 +35025 52123 +22450 52118 +36846 52117 +29617 52111 +42710 52110 +30993 52108 +26339 52103 +48334 52095 +20135 52089 +19058 52087 +47058 52085 +21671 52085 +21972 52077 +41085 52076 +43276 52074 +42236 52065 +36455 52063 +38479 52063 +34762 52062 +31967 52061 +26760 52058 +24653 52055 +24551 52052 +31420 52051 +32598 52048 +13034 52046 +20549 52045 +10743 52042 +16455 52038 +1905 52035 +8172 52034 +21717 52032 +42888 52026 +34172 52023 +13240 52016 +17164 52015 +27124 52015 +9083 52013 +32857 52002 +39732 52001 +25090 52000 +32837 51993 +17310 51987 +27089 51983 +28939 51981 +21095 51980 +35107 51967 +28139 51965 +35865 51963 +43423 51955 +23462 51949 +46584 51949 +26179 51944 +28030 51942 +40084 51942 +5240 51939 +20764 51937 +46595 51936 +31328 51936 +35849 51934 +45415 51932 +35031 51930 +37089 51917 +43867 51914 +25441 51904 +19918 51896 +13660 51895 +27394 51894 +42640 51893 +48576 51891 +24652 51885 +47647 51872 +34537 51870 +20123 51868 +19973 51868 +25049 51866 +38458 51864 +32931 51863 +33332 51860 +30821 51858 +38309 51853 +29853 51842 +44194 51831 +25237 51830 +21440 51828 +40676 51823 +44703 51823 +25209 51822 +19617 51821 +28504 51820 +22440 51819 +11858 51811 +38207 51809 +35690 51803 +22583 51802 +31665 51800 +23395 51795 +38695 51794 +36134 51789 +47983 51788 +30170 51788 +41675 51786 +17550 51784 +40500 51782 +9994 51782 +22521 51779 +34477 51778 +21963 51777 +47972 51777 +37237 51767 +25917 51765 +33473 51765 +22850 51763 +31933 51763 +19835 51763 +37789 51761 +31874 51757 +36780 51755 +1599 51750 +37468 51740 +29366 51740 +36289 51731 +34392 51731 +47290 51722 +33259 51719 +16658 51714 +32384 51713 +17941 51713 +14390 51703 +23904 51703 +19955 51700 +38736 51694 +42609 51694 +33865 51693 +40426 51673 +7005 51663 +11590 51655 +38597 51652 +20418 51644 +14020 51643 +33401 51643 +44956 51638 +32543 51626 +48722 51623 +36426 51621 +31888 51616 +4728 51615 +48946 51612 +19971 51611 +5809 51604 +34426 51599 +19619 51599 +43976 51593 +15288 51591 +31378 51588 +33841 51587 +27073 51583 +45552 51582 +28719 51582 +33275 51581 +23169 51580 +33134 51573 +27978 51572 +18942 51572 +49068 51570 +28178 51569 +33575 51569 +20389 51560 +8140 51560 +39246 51558 +26942 51555 +1991 51554 +12405 51541 +33002 51539 +34358 51531 +46795 51531 +4555 51518 +36283 51518 +31236 51518 +11600 51509 +42729 51506 +9171 51503 +25579 51502 +814 51501 +49408 51493 +13341 51489 +41575 51489 +41698 51484 +23469 51467 +47680 51463 +15612 51463 +27979 51458 +50054 51457 +44459 51457 +37557 51452 +18738 51450 +27795 51448 +8010 51447 +30580 51441 +21174 51434 +45978 51434 +30878 51425 +32283 51425 +44537 51420 +42179 51416 +30059 51408 +29567 51407 +25643 51404 +24976 51403 +26507 51402 +45316 51399 +36775 51395 +47706 51393 +27635 51393 +45744 51392 +17831 51391 +39561 51387 +17993 51384 +13358 51383 +49474 51382 +32379 51378 +40124 51370 +26466 51368 +21386 51368 +13563 51360 +49470 51355 +26666 51353 +31935 51352 +30594 51351 +10455 51345 +31663 51333 +46413 51331 +27001 51329 +45047 51329 +24826 51328 +36224 51313 +27062 51308 +47451 51304 +44882 51300 +36168 51297 +38290 51296 +46184 51291 +36129 51291 +22818 51284 +46892 51280 +18347 51273 +20106 51271 +3983 51270 +25326 51268 +38916 51261 +16724 51260 +46685 51260 +14059 51259 +31822 51256 +25337 51255 +13776 51255 +36464 51245 +45962 51245 +47474 51241 +16961 51238 +10293 51234 +25829 51227 +32959 51227 +44413 51224 +47437 51223 +16240 51217 +21786 51209 +9844 51209 +29754 51205 +45651 51197 +19970 51179 +22134 51177 +9339 51165 +34996 51164 +37319 51155 +24431 51155 +17489 51153 +47225 51146 +20809 51141 +28942 51141 +33733 51140 +30274 51139 +30346 51138 +36856 51134 +19365 51129 +26405 51124 +28053 51104 +29227 51103 +13247 51102 +24555 51100 +25080 51088 +18486 51085 +23983 51085 +24900 51081 +39386 51079 +21526 51079 +25761 51078 +35294 51075 +28621 51071 +30137 51062 +34557 51061 +34501 51061 +41715 51057 +15370 51052 +30436 51050 +31582 51049 +41088 51049 +39774 51047 +50214 51040 +25916 51040 +44322 51029 +38656 51025 +40794 51024 +37194 51021 +27627 51020 +16939 51015 +24931 51014 +29440 51010 +32306 51008 +37803 51007 +34246 51006 +41135 51002 +35949 50997 +29296 50997 +1066 50992 +2738 50991 +36250 50981 +38026 50975 +48133 50974 +40603 50972 +22308 50965 +36433 50954 +17328 50953 +9843 50953 +32090 50951 +5251 50944 +44851 50934 +23044 50932 +28750 50925 +34130 50924 +11141 50919 +28241 50917 +20706 50916 +3590 50913 +24953 50912 +24417 50908 +38000 50905 +44044 50905 +35864 50898 +7146 50894 +27826 50894 +8021 50891 +48694 50888 +48756 50887 +35595 50885 +36312 50870 +25291 50855 +15547 50852 +31459 50850 +44998 50845 +41191 50841 +9219 50826 +16437 50813 +32792 50812 +14878 50805 +7512 50797 +25037 50796 +30158 50787 +23534 50784 +46760 50782 +12215 50775 +40304 50772 +45926 50771 +14535 50770 +48470 50762 +33586 50761 +8715 50761 +34678 50761 +34531 50760 +39636 50758 +149 50753 +14587 50750 +24570 50749 +15701 50748 +48684 50747 +38494 50732 +34933 50731 +45221 50726 +28471 50726 +29097 50723 +41329 50723 +39554 50718 +48664 50712 +32766 50697 +28210 50694 +6519 50692 +7053 50688 +17966 50686 +28895 50684 +39517 50683 +33836 50671 +14753 50663 +35280 50660 +8422 50654 +18424 50653 +7489 50648 +21766 50645 +49554 50637 +34890 50631 +14677 50628 +41598 50626 +24550 50623 +40975 50623 +14250 50622 +22334 50622 +18407 50613 +7860 50598 +26298 50597 +38336 50596 +17139 50591 +34723 50591 +19260 50591 +32244 50586 +19950 50581 +32740 50577 +48506 50573 +37843 50572 +23568 50571 +15513 50571 +27351 50564 +30778 50556 +37671 50554 +21081 50550 +21973 50548 +8927 50545 +23016 50544 +26633 50537 +2519 50537 +2452 50533 +46985 50530 +29176 50521 +11217 50514 +24140 50505 +45683 50504 +26434 50502 +42908 50502 +22881 50500 +37005 50484 +49411 50476 +25762 50476 +27318 50473 +28866 50470 +38236 50468 +23053 50467 +7528 50466 +30862 50464 +45808 50462 +12832 50455 +43943 50448 +44833 50448 +47930 50444 +7983 50440 +46117 50435 +37069 50427 +27150 50422 +12935 50421 +31132 50418 +37315 50417 +29387 50414 +28548 50413 +33091 50413 +36816 50412 +36497 50409 +16142 50409 +28754 50402 +27014 50395 +36494 50392 +46234 50389 +21044 50387 +29480 50380 +43183 50377 +27338 50373 +18833 50371 +31679 50370 +31109 50367 +27168 50366 +22654 50362 +33474 50355 +25047 50354 +30865 50354 +34291 50353 +33343 50348 +26250 50345 +36994 50342 +41379 50341 +26929 50336 +24464 50328 +35515 50322 +26477 50320 +33955 50315 +14633 50314 +47908 50314 +24218 50313 +36628 50313 +37953 50310 +22924 50309 +41494 50309 +28655 50296 +26881 50289 +28135 50289 +43081 50275 +30810 50272 +30959 50261 +20566 50259 +21592 50255 +22008 50255 +27164 50249 +44127 50245 +49939 50241 +44216 50237 +40996 50230 +19291 50228 +22463 50225 +31295 50220 +47528 50218 +14007 50214 +46106 50210 +31354 50208 +26413 50206 +36569 50204 +28993 50199 +21072 50194 +36701 50191 +22969 50191 +32697 50186 +10915 50183 +28806 50178 +1796 50175 +22150 50168 +28560 50164 +16601 50150 +6975 50149 +6315 50149 +17651 50140 +43268 50139 +37930 50136 +20458 50134 +34154 50133 +27460 50129 +43029 50127 +6728 50123 +18217 50117 +28368 50114 +29835 50107 +44209 50105 +29642 50098 +40947 50092 +38625 50091 +48195 50079 +35259 50077 +29000 50077 +22524 50075 +20402 50071 +43070 50067 +10228 50059 +25474 50054 +22960 50054 +26934 50051 +39538 50048 +14298 50043 +23560 50040 +45773 50032 +29358 50031 +3678 50028 +22029 50028 +25505 50024 +36195 50015 +24757 50015 +28498 50006 +9492 50005 +20812 50005 +49523 49984 +39604 49984 +25985 49971 +14414 49966 +27367 49965 +4850 49956 +50224 49950 +29098 49944 +44038 49938 +46008 49937 +32909 49937 +11871 49933 +29997 49933 +7497 49929 +20647 49911 +15520 49911 +34252 49911 +30408 49909 +49653 49906 +34321 49904 +23313 49903 +37197 49902 +34929 49901 +29974 49898 +16704 49893 +27793 49884 +41777 49884 +9132 49873 +34342 49867 +20473 49864 +15951 49864 +18294 49863 +28009 49861 +19149 49861 +19833 49859 +36272 49857 +17003 49853 +17961 49851 +33307 49843 +2145 49840 +27114 49840 +12303 49838 +19730 49834 +35372 49831 +24641 49823 +24987 49820 +24298 49815 +33331 49815 +28002 49811 +19142 49808 +24650 49805 +43581 49805 +26783 49800 +28685 49797 +43543 49791 +30980 49791 +27499 49784 +46864 49783 +29076 49783 +25885 49779 +33941 49777 +22980 49777 +36665 49775 +21139 49763 +29380 49762 +27735 49761 +46562 49759 +30747 49759 +31555 49758 +45711 49758 +28545 49747 +21738 49746 +44654 49734 +36071 49732 +21632 49723 +39916 49723 +45132 49721 +18527 49720 +47479 49718 +39533 49714 +47246 49714 +18231 49708 +13560 49705 +10684 49703 +28389 49700 +5709 49699 +20916 49692 +43178 49690 +36714 49689 +11365 49683 +48064 49679 +44379 49675 +26332 49674 +27736 49672 +2037 49665 +30261 49665 +47725 49661 +30353 49656 +39567 49653 +33019 49650 +12418 49640 +27623 49639 +32984 49638 +23378 49635 +26279 49634 +20781 49629 +32724 49624 +49266 49623 +22603 49622 +25386 49617 +46988 49608 +41471 49598 +15140 49597 +31994 49596 +44579 49595 +45264 49589 +20728 49587 +47681 49584 +35223 49584 +28796 49577 +35454 49574 +36540 49572 +34069 49565 +31247 49565 +30367 49565 +21956 49560 +20596 49555 +25330 49548 +39620 49547 +33381 49545 +27625 49542 +24604 49522 +41987 49519 +7279 49518 +8170 49515 +31186 49508 +49769 49496 +28733 49492 +37648 49490 +43590 49484 +34355 49484 +28774 49483 +14435 49476 +4264 49476 +5786 49473 +27317 49473 +36240 49472 +42687 49470 +25783 49463 +42251 49460 +40973 49454 +44466 49448 +25110 49444 +29765 49442 +34613 49442 +30310 49441 +27997 49432 +36054 49431 +20909 49427 +43244 49427 +32520 49414 +30967 49413 +21212 49412 +38748 49408 +34597 49406 +38085 49403 +25856 49402 +46305 49400 +25439 49400 +10122 49386 +46834 49384 +29894 49383 +10341 49374 +13190 49373 +25743 49369 +45614 49367 +31331 49365 +46882 49365 +16994 49363 +48487 49361 +28887 49360 +23674 49356 +43039 49352 +31250 49350 +44135 49348 +36483 49342 +26467 49341 +12324 49341 +23385 49336 +5494 49330 +43974 49327 +24183 49324 +23059 49319 +22808 49316 +3385 49311 +39159 49311 +36197 49310 +24147 49304 +6924 49302 +8930 49301 +33045 49292 +19429 49288 +38591 49285 +25739 49285 +39476 49283 +29723 49269 +25069 49269 +36270 49268 +38376 49265 +42478 49265 +46855 49263 +42070 49262 +13208 49255 +30834 49251 +1784 49245 +33427 49243 +31719 49240 +14346 49240 +15885 49235 +46903 49229 +12428 49225 +35866 49223 +4872 49218 +13363 49213 +35135 49211 +12604 49209 +41391 49206 +34485 49198 +39735 49197 +24063 49190 +3097 49186 +36682 49185 +43144 49184 +19065 49181 +32344 49177 +25744 49174 +22557 49169 +33168 49166 +17482 49164 +32510 49162 +42815 49158 +25986 49154 +36686 49154 +43106 49152 +21045 49150 +36293 49150 +44188 49148 +17946 49128 +44636 49125 +28324 49121 +15783 49119 +32195 49119 +22716 49115 +18351 49113 +37726 49108 +29778 49103 +31070 49101 +16441 49100 +37731 49095 +45094 49089 +21152 49085 +28381 49076 +28068 49076 +41334 49076 +9044 49068 +16200 49064 +36110 49063 +5089 49060 +26006 49059 +24483 49044 +43400 49042 +24582 49038 +47755 49036 +28596 49032 +27843 49027 +48253 49023 +34205 49021 +50209 49013 +19742 49006 +42734 49005 +32600 49004 +39614 49000 +29373 48996 +29604 48993 +19694 48991 +47170 48989 +45759 48988 +28027 48981 +46930 48975 +23660 48974 +19796 48973 +30574 48968 +41716 48956 +42737 48956 +45588 48956 +26580 48950 +38408 48948 +47978 48944 +21201 48944 +36717 48942 +37632 48938 +14709 48933 +36294 48933 +28704 48933 +12212 48926 +29016 48916 +36212 48912 +46854 48910 +35495 48905 +25702 48904 +2867 48903 +36114 48901 +40343 48900 +34715 48895 +35183 48886 +31015 48884 +21123 48884 +16596 48880 +19112 48876 +30587 48871 +25859 48867 +27631 48852 +25791 48847 +29887 48846 +12920 48846 +16620 48844 +44043 48839 +24470 48838 +49132 48837 +38674 48837 +27441 48831 +23094 48828 +17721 48826 +10903 48825 +31274 48824 +22511 48824 +19620 48819 +21892 48813 +47817 48808 +18651 48808 +27045 48804 +19427 48799 +23334 48799 +31261 48798 +17336 48796 +48199 48791 +9787 48786 +37221 48772 +43579 48768 +36351 48764 +36242 48761 +43127 48760 +14253 48757 +35199 48750 +32772 48748 +25922 48745 +28854 48743 +30930 48743 +32219 48728 +28144 48724 +20198 48721 +19753 48720 +32793 48715 +48039 48715 +26664 48713 +25644 48711 +29708 48709 +26110 48708 +13972 48703 +39397 48694 +38914 48689 +19794 48689 +21680 48676 +37116 48673 +39005 48672 +22073 48657 +31608 48655 +29403 48647 +25242 48646 +33154 48641 +28202 48637 +26535 48633 +35094 48632 +37184 48631 +28952 48630 +35862 48624 +46516 48622 +29091 48620 +33764 48618 +33226 48617 +41220 48613 +4428 48613 +19542 48612 +33541 48610 +38034 48608 +31983 48606 +22057 48597 +22746 48593 +27241 48574 +25533 48572 +4300 48571 +40611 48569 +46395 48566 +18949 48559 +29332 48557 +27630 48555 +37813 48554 +31597 48550 +43274 48548 +25602 48546 +34541 48540 +28025 48538 +24611 48538 +12415 48533 +13262 48531 +25459 48527 +49553 48527 +34874 48527 +46057 48527 +49372 48523 +38866 48509 +24778 48508 +34224 48505 +22437 48495 +45468 48490 +17975 48490 +24256 48482 +29031 48476 +37882 48475 +39163 48473 +40255 48472 +26811 48466 +36450 48465 +1852 48465 +27121 48464 +19367 48463 +19961 48459 +44518 48455 +44655 48454 +40793 48454 +12464 48453 +34384 48452 +34902 48450 +36172 48441 +16339 48441 +29079 48437 +10760 48437 +33469 48435 +27587 48432 +36776 48426 +35208 48425 +19054 48425 +25516 48424 +34078 48422 +41973 48414 +11122 48412 +38173 48400 +4584 48399 +34882 48397 +26277 48391 +34434 48390 +29683 48382 +7752 48377 +45983 48376 +40910 48374 +31848 48373 +39769 48366 +12222 48364 +49752 48364 +23726 48363 +17019 48362 +26366 48359 +37091 48357 +36991 48356 +49851 48341 +22374 48341 +47572 48339 +21309 48338 +23337 48332 +28262 48329 +29246 48326 +49598 48325 +26130 48321 +18690 48315 +38594 48314 +23487 48314 +12573 48312 +31017 48310 +45736 48307 +21273 48305 +8648 48300 +29355 48295 +41089 48293 +7672 48292 +28714 48291 +20661 48276 +34912 48270 +45568 48263 +8671 48262 +44485 48260 +2690 48257 +27912 48256 +45494 48256 +21645 48246 +36559 48242 +46567 48239 +30622 48235 +26557 48229 +44523 48227 +36960 48227 +19867 48223 +10461 48221 +17723 48219 +29549 48218 +44862 48218 +24904 48218 +45142 48216 +23100 48213 +37279 48209 +9887 48209 +46470 48205 +18230 48202 +45511 48202 +48850 48202 +39037 48199 +33119 48197 +24814 48195 +42792 48194 +9042 48189 +39685 48187 +23253 48186 +34276 48182 +43373 48177 +43120 48174 +36193 48173 +5090 48171 +49902 48167 +24894 48161 +36833 48160 +43663 48159 +37248 48150 +28799 48150 +42898 48146 +32656 48145 +39339 48140 +47199 48135 +31068 48133 +35455 48131 +21725 48119 +47916 48119 +41336 48115 +33271 48114 +33977 48108 +35008 48107 +49432 48106 +30383 48106 +24975 48102 +47773 48099 +23506 48097 +33133 48091 +36612 48090 +5306 48090 +36998 48065 +37073 48064 +20206 48062 +11934 48060 +32728 48056 +45425 48054 +16341 48052 +36281 48044 +17363 48035 +35465 48033 +15880 48030 +16357 48025 +21527 48023 +22275 48023 +33502 48015 +13672 48014 +41679 48012 +31986 48012 +28867 48011 +33755 48009 +24776 48006 +8357 48004 +37292 47998 +34746 47994 +37027 47992 +40519 47990 +36279 47980 +6417 47979 +36083 47978 +28456 47977 +31114 47970 +38527 47969 +34567 47968 +39302 47965 +45504 47964 +25600 47960 +32377 47960 +27712 47958 +33424 47957 +10811 47948 +28771 47935 +35262 47924 +26539 47917 +29282 47917 +3643 47915 +39412 47915 +18255 47913 +14750 47910 +38088 47905 +6398 47904 +40470 47903 +45246 47901 +40662 47899 +24504 47899 +41229 47898 +49292 47898 +46160 47896 +35309 47896 +47329 47892 +40453 47890 +26352 47889 +22682 47885 +21262 47885 +39441 47884 +27238 47884 +28378 47879 +14132 47878 +21969 47877 +31533 47876 +33851 47875 +30582 47875 +22342 47874 +15075 47874 +40955 47871 +35332 47870 +27244 47864 +26181 47863 +22832 47858 +43956 47853 +29846 47852 +2697 47844 +20666 47844 +26281 47843 +18268 47843 +9928 47836 +2083 47836 +32350 47834 +37342 47831 +21315 47829 +27132 47825 +15809 47819 +38987 47819 +34375 47811 +47668 47809 +41836 47806 +25284 47803 +17273 47802 +39862 47800 +16311 47791 +1476 47789 +47853 47786 +39027 47783 +48213 47783 +8012 47782 +33063 47765 +27445 47759 +34849 47745 +41970 47743 +42170 47742 +42876 47742 +1161 47739 +36742 47733 +47151 47729 +13115 47725 +18301 47725 +35272 47723 +27219 47708 +3416 47707 +28177 47699 +38738 47699 +29770 47687 +26465 47686 +38383 47685 +22069 47680 +21456 47679 +14174 47674 +23539 47667 +46247 47665 +34459 47663 +22179 47659 +22449 47658 +42791 47654 +39603 47653 +50169 47652 +26667 47651 +44498 47645 +44868 47642 +28876 47640 +26701 47638 +5109 47637 +19327 47637 +31064 47634 +30573 47633 +35905 47625 +45232 47624 +43616 47621 +28217 47618 +48642 47617 +27086 47617 +42992 47611 +42327 47611 +23688 47609 +26282 47598 +36099 47596 +40852 47596 +43766 47594 +33297 47593 +49466 47591 +17209 47585 +43670 47582 +24748 47581 +32353 47571 +48342 47569 +38757 47561 +32905 47561 +32085 47556 +9250 47542 +35729 47542 +37663 47540 +16112 47538 +33694 47537 +31556 47535 +28277 47531 +25104 47526 +38429 47525 +39074 47524 +40404 47523 +46739 47521 +28940 47519 +27329 47518 +23753 47514 +29041 47510 +31267 47506 +35163 47506 +18150 47505 +33159 47503 +24811 47492 +22148 47488 +25342 47487 +38812 47487 +39283 47486 +25808 47483 +15486 47483 +12808 47482 +30103 47481 +19159 47481 +32572 47477 +30430 47475 +25881 47464 +37486 47459 +27506 47446 +28334 47432 +21561 47418 +18801 47417 +30180 47414 +39383 47411 +25587 47410 +28091 47408 +41626 47405 +47228 47403 +29419 47402 +1480 47400 +30363 47399 +33319 47394 +43558 47393 +27946 47391 +16623 47391 +37749 47390 +31271 47390 +18619 47388 +44003 47374 +21458 47373 +29181 47371 +38119 47362 +31807 47362 +10963 47356 +28060 47355 +46922 47354 +37331 47351 +27577 47345 +32884 47333 +22515 47327 +19853 47322 +19354 47319 +40431 47314 +16275 47311 +5915 47310 +44339 47303 +35489 47299 +43398 47294 +35096 47293 +18799 47293 +46577 47288 +29005 47287 +25534 47286 +27657 47281 +20298 47279 +42299 47272 +27613 47271 +29341 47270 +45723 47268 +38908 47263 +37164 47253 +38260 47252 +1251 47243 +47848 47234 +35817 47231 +28453 47230 +28878 47227 +5065 47223 +20214 47217 +13220 47216 +25649 47214 +38186 47210 +46822 47209 +34693 47207 +17827 47207 +19520 47204 +19756 47202 +24249 47201 +32514 47200 +2844 47198 +49416 47195 +36874 47186 +39962 47184 +37800 47183 +30669 47181 +43100 47180 +25671 47175 +15450 47174 +28888 47160 +34079 47156 +19484 47156 +42274 47154 +6278 47154 +37793 47153 +33816 47153 +31693 47149 +47218 47148 +33858 47146 +40689 47137 +21287 47137 +40105 47133 +30571 47133 +32402 47132 +38009 47131 +24294 47129 +22930 47120 +19707 47115 +34941 47114 +42425 47112 +19498 47111 +28609 47109 +34134 47104 +44056 47100 +19976 47094 +17678 47084 +47562 47083 +15310 47073 +16458 47068 +37228 47064 +23756 47063 +15590 47060 +19355 47056 +42906 47056 +45512 47056 +32529 47052 +11593 47052 +31528 47052 +21713 47051 +28880 47045 +20800 47043 +22291 47033 +40511 47032 +48569 47016 +48060 47011 +43799 47003 +20113 47001 +36579 46998 +36748 46996 +34263 46996 +49520 46994 +38686 46986 +35177 46971 +42967 46970 +14336 46967 +45184 46965 +47413 46965 +34789 46965 +45139 46964 +3065 46962 +23921 46961 +28687 46955 +32654 46950 +44738 46946 +35153 46942 +32761 46937 +17647 46935 +46150 46934 +38761 46931 +35355 46921 +12454 46913 +30876 46903 +3552 46902 +27458 46902 +23277 46901 +10465 46898 +35662 46897 +16643 46895 +41794 46895 +39524 46892 +42899 46892 +39164 46887 +36101 46879 +11370 46878 +12221 46873 +32293 46870 +29641 46869 +28424 46863 +28309 46861 +13775 46860 +39767 46856 +29343 46855 +34155 46853 +35932 46852 +46875 46850 +50173 46846 +35426 46843 +7615 46840 +42004 46840 +46447 46837 +28786 46834 +37048 46829 +32122 46829 +29280 46827 +1091 46827 +10709 46824 +42571 46820 +30468 46812 +35526 46807 +28807 46786 +6059 46785 +9313 46784 +43652 46779 +39640 46776 +48568 46764 +5957 46763 +25194 46763 +15432 46761 +50186 46760 +26135 46759 +36360 46755 +11019 46751 +30097 46749 +27952 46749 +32808 46748 +3935 46742 +25584 46738 +47377 46725 +26996 46725 +24134 46725 +24709 46720 +23585 46717 +36865 46715 +31910 46710 +16854 46708 +36093 46703 +48853 46698 +19900 46698 +43104 46694 +31349 46692 +41238 46686 +21521 46678 +49412 46678 +44026 46677 +45332 46677 +14406 46675 +25371 46664 +43257 46655 +33656 46655 +46861 46652 +27201 46649 +24944 46646 +29101 46642 +32343 46636 +17999 46634 +21306 46629 +32294 46629 +49104 46628 +23581 46624 +30875 46614 +48794 46612 +45645 46611 +45516 46610 +32396 46608 +20922 46608 +38406 46606 +11321 46605 +49612 46603 +35441 46600 +49190 46592 +9805 46592 +48701 46584 +30968 46582 +30066 46577 +46340 46575 +31377 46575 +35901 46567 +22208 46565 +21232 46562 +19108 46557 +45542 46557 +21194 46551 +35631 46546 +42580 46546 +29619 46542 +40943 46540 +44614 46537 +26957 46535 +32393 46532 +27377 46527 +47219 46524 +26975 46524 +24415 46523 +19680 46517 +40530 46513 +30811 46500 +30264 46491 +37054 46491 +18011 46489 +23250 46486 +21499 46482 +37818 46472 +22032 46469 +19831 46467 +34498 46463 +38833 46459 +12732 46458 +25710 46456 +29433 46452 +31207 46449 +47272 46447 +34569 46444 +33018 46438 +5530 46432 +18040 46428 +39891 46423 +28033 46423 +47353 46423 +33471 46415 +16678 46414 +35941 46414 +34191 46402 +18970 46402 +15183 46398 +17220 46397 +35180 46394 +13665 46383 +23126 46383 +32556 46382 +23087 46378 +47295 46377 +47196 46366 +31043 46364 +8110 46360 +13494 46358 +40698 46356 +40363 46355 +31168 46353 +35182 46351 +11143 46351 +34935 46346 +34122 46340 +27791 46338 +31172 46333 +40702 46330 +6172 46330 +16682 46322 +49597 46319 +23564 46319 +28590 46318 +31894 46316 +32542 46310 +24299 46303 +23214 46302 +6202 46299 +25598 46299 +13350 46299 +30548 46293 +49825 46288 +40062 46287 +34862 46283 +21163 46283 +20637 46283 +47560 46276 +30381 46274 +11599 46272 +27717 46270 +6120 46269 +21242 46269 +20025 46268 +33431 46263 +23447 46262 +36319 46260 +46980 46258 +32806 46258 +15008 46253 +29950 46251 +33570 46249 +14894 46239 +26761 46237 +19270 46234 +32921 46233 +42818 46228 +7671 46225 +37644 46222 +27254 46222 +47984 46221 +32729 46216 +2909 46212 +40657 46210 +29621 46209 +33713 46208 +14106 46206 +26034 46200 +22204 46196 +14265 46195 +11035 46193 +42437 46189 +44302 46189 +49967 46178 +44301 46177 +22633 46176 +40490 46165 +36930 46164 +8320 46159 +18093 46152 +11528 46151 +37026 46151 +35045 46149 +35370 46147 +30168 46146 +4772 46145 +43747 46137 +41130 46132 +34076 46130 +35088 46124 +16505 46123 +37428 46120 +20638 46115 +43007 46112 +43944 46109 +16258 46106 +22567 46098 +36214 46098 +23807 46093 +46972 46091 +9939 46091 +25384 46088 +29283 46087 +37293 46082 +7296 46079 +26707 46079 +36513 46072 +26173 46070 +32794 46069 +37891 46068 +42886 46058 +28328 46053 +44131 46048 +29397 46040 +42232 46039 +40741 46034 +26000 46034 +12205 46032 +37457 46032 +20964 46031 +19252 46030 +26753 46030 +32479 46029 +39078 46024 +25419 46023 +27642 46023 +45827 46017 +42097 46016 +32773 46015 +24227 46013 +35842 46009 +49300 46004 +32204 46002 +37554 46000 +23589 45991 +45046 45989 +48742 45987 +30355 45987 +34038 45986 +15307 45982 +39784 45980 +39987 45978 +39956 45976 +28388 45976 +22823 45976 +26473 45974 +22024 45969 +48437 45964 +38960 45958 +39716 45952 +31545 45952 +18458 45949 +45344 45949 +33561 45942 +34875 45941 +15861 45939 +32120 45939 +35639 45935 +32769 45930 +26990 45927 +43953 45925 +28954 45923 +38540 45916 +50172 45916 +40033 45912 +31448 45909 +39067 45905 +44769 45904 +29094 45900 +44768 45900 +18638 45900 +47244 45896 +29722 45892 +39960 45887 +46330 45887 +24372 45887 +16761 45865 +28107 45865 +8154 45864 +28031 45863 +36111 45861 +47950 45860 +32894 45859 +13908 45858 +47513 45857 +34151 45856 +27404 45855 +20453 45853 +14214 45848 +20751 45847 +43066 45844 +48901 45838 +20454 45834 +28271 45830 +35988 45827 +17134 45825 +11669 45820 +34243 45820 +47722 45818 +26471 45818 +39131 45806 +28448 45805 +46033 45802 +34316 45800 +20300 45799 +37676 45798 +34195 45795 +7550 45791 +26063 45787 +39420 45776 +30232 45771 +28183 45769 +26641 45765 +44818 45764 +40192 45764 +43176 45763 +36145 45762 +12942 45758 +26463 45756 +45878 45751 +26153 45749 +12192 45748 +37347 45747 +20979 45745 +44846 45740 +24843 45739 +42324 45735 +44220 45734 +28858 45732 +28351 45729 +27151 45728 +35011 45726 +26592 45726 +39235 45724 +27198 45724 +21721 45721 +37975 45715 +29442 45714 +17860 45699 +36383 45699 +23268 45698 +38754 45697 +37900 45696 +48709 45695 +37261 45695 +9926 45687 +45238 45687 +34152 45684 +2397 45683 +26333 45682 +37550 45681 +40966 45675 +31151 45673 +27769 45670 +8687 45664 +30185 45662 +27412 45661 +15787 45653 +44820 45649 +33536 45648 +33900 45647 +38705 45646 +25417 45642 +21767 45642 +22362 45640 +39678 45636 +50220 45628 +35073 45627 +41523 45625 +14065 45624 +24344 45624 +43153 45623 +31749 45622 +44652 45618 +36058 45615 +45413 45615 +39453 45611 +40641 45609 +27126 45601 +40136 45601 +36910 45601 +49628 45598 +32920 45597 +43492 45595 +49323 45589 +18005 45585 +29211 45582 +21762 45581 +49796 45577 +23831 45574 +36308 45571 +27758 45570 +40835 45569 +34486 45557 +37234 45553 +11720 45552 +34885 45548 +31629 45546 +46644 45537 +27253 45531 +26919 45525 +22136 45522 +29924 45521 +31489 45520 +49987 45520 +22564 45510 +29546 45510 +11537 45509 +10415 45508 +19259 45507 +24572 45499 +12304 45497 +40606 45494 +47748 45493 +42329 45492 +33790 45490 +15854 45490 +24170 45487 +45958 45477 +18076 45476 +41740 45473 +23550 45470 +38854 45468 +16183 45461 +41113 45458 +11055 45456 +35940 45455 +36394 45452 +21064 45451 +46544 45449 +9673 45449 +31059 45449 +9548 45448 +36769 45446 +28737 45442 +40307 45440 +23672 45439 +34663 45437 +23162 45434 +38689 45431 +42139 45426 +45200 45424 +40900 45424 +17620 45423 +38878 45422 +47657 45421 +39493 45401 +33182 45401 +19467 45398 +30795 45397 +16260 45391 +32367 45390 +35550 45386 +49341 45385 +40461 45380 +18219 45374 +8638 45371 +26484 45361 +21447 45360 +19941 45353 +10554 45352 +1823 45347 +29369 45345 +24702 45344 +13255 45341 +15098 45339 +29462 45334 +39373 45334 +41529 45333 +36941 45332 +23945 45328 +16010 45325 +50028 45324 +41928 45319 +21396 45318 +27162 45318 +49655 45310 +37222 45308 +26526 45303 +19852 45300 +27742 45296 +33105 45293 +4247 45291 +47410 45289 +37327 45285 +22717 45285 +46958 45282 +41053 45278 +20109 45272 +25514 45268 +6425 45263 +23272 45241 +21856 45239 +27036 45239 +24168 45230 +43939 45229 +15379 45223 +21352 45223 +25903 45223 +26949 45221 +30019 45218 +31130 45216 +41484 45215 +15125 45210 +32993 45207 +25685 45203 +38048 45200 +16911 45199 +45049 45195 +38932 45191 +9143 45188 +35794 45186 +37743 45183 +42186 45178 +33464 45174 +19172 45170 +17594 45164 +30551 45164 +36288 45164 +17133 45162 +27371 45162 +26093 45154 +48335 45154 +24309 45147 +28747 45147 +31991 45146 +38862 45145 +28978 45145 +29062 45144 +26887 45143 +25591 45143 +8145 45141 +29735 45139 +35906 45138 +23426 45132 +42850 45127 +36098 45122 +22035 45122 +35822 45120 +47573 45116 +26692 45113 +2361 45108 +15878 45107 +48687 45101 +24513 45096 +33270 45094 +22458 45092 +5145 45089 +26704 45087 +38378 45077 +26822 45071 +21515 45070 +43533 45069 +25830 45065 +42753 45063 +6940 45049 +28192 45048 +34683 45045 +32123 45042 +10035 45039 +44486 45035 +5042 45034 +29051 45032 +40859 45030 +7580 45025 +40687 45024 +30506 45013 +28959 45012 +42802 45012 +32816 45007 +22826 44996 +9474 44990 +31955 44989 +33187 44989 +49232 44988 +31202 44986 +9781 44978 +39076 44977 +39308 44974 +34965 44971 +24094 44964 +228 44960 +30549 44958 +23175 44953 +35613 44952 +17727 44938 +29985 44937 +11369 44930 +48604 44924 +22210 44923 +47877 44920 +10698 44909 +25797 44905 +20414 44904 +34030 44904 +27666 44903 +40904 44898 +39652 44897 +42828 44896 +27573 44894 +24288 44891 +49498 44886 +20485 44883 +24158 44882 +14558 44881 +17237 44879 +6293 44877 +22245 44876 +39679 44873 +13921 44870 +42490 44862 +38041 44860 +22911 44854 +46690 44854 +39128 44852 +39826 44851 +27109 44840 +23055 44839 +18875 44838 +19678 44838 +41520 44837 +5135 44836 +35584 44835 +30246 44828 +45222 44827 +24143 44827 +39411 44818 +49901 44817 +30559 44817 +48592 44815 +33963 44813 +23191 44808 +45892 44797 +30621 44795 +32154 44793 +25964 44793 +7949 44790 +34019 44787 +9243 44787 +1420 44786 +36703 44785 +42587 44782 +13131 44771 +46825 44767 +26820 44766 +9745 44766 +2498 44761 +42606 44758 +17817 44754 +18487 44752 +29524 44752 +32160 44750 +38189 44750 +39085 44745 +36587 44742 +10858 44739 +42378 44730 +31142 44728 +42495 44724 +19461 44719 +16806 44718 +15452 44716 +35873 44714 +39778 44711 +28801 44710 +26782 44709 +34480 44705 +33224 44703 +1269 44703 +32435 44702 +16364 44699 +36712 44699 +44913 44696 +11318 44691 +23537 44690 +11787 44689 +34903 44688 +21599 44683 +33896 44682 +27746 44679 +30568 44673 +30627 44670 +26576 44668 +43273 44658 +18107 44654 +29805 44652 +35429 44650 +49668 44648 +15411 44640 +47169 44633 +32581 44631 +37324 44631 +33344 44627 +25245 44625 +20084 44614 +33612 44606 +32167 44606 +19251 44605 +23684 44597 +7316 44597 +33732 44594 +23234 44593 +29086 44588 +12998 44588 +37378 44584 +31428 44584 +35212 44582 +47747 44581 +41630 44577 +26524 44575 +30086 44574 +16578 44571 +26978 44571 +47162 44567 +30844 44566 +4717 44565 +46662 44563 +14002 44556 +21259 44556 +9295 44554 +21441 44549 +33097 44546 +25498 44544 +30982 44543 +7921 44543 +27393 44541 +20914 44540 +27829 44539 +48073 44537 +22981 44534 +41909 44533 +232 44532 +34966 44528 +48028 44524 +31103 44522 +31450 44521 +22154 44521 +3924 44512 +17045 44511 +47830 44509 +43171 44509 +29697 44507 +46505 44502 +32049 44502 +21395 44500 +25307 44497 +45760 44496 +26774 44494 +22763 44486 +38094 44484 +34336 44481 +33852 44479 +46598 44478 +35278 44478 +36915 44475 +23454 44472 +6549 44471 +31648 44466 +22047 44464 +47533 44455 +48127 44454 +36582 44453 +38728 44453 +1184 44453 +23051 44443 +33189 44442 +38590 44435 +48965 44433 +32225 44432 +33418 44428 +41545 44427 +47399 44426 +25129 44425 +22194 44423 +9139 44423 +25929 44422 +32954 44421 +21062 44419 +48368 44416 +18206 44413 +15650 44407 +37829 44404 +27326 44404 +10441 44401 +26818 44398 +30416 44397 +1955 44397 +42917 44396 +37748 44394 +27688 44394 +38777 44394 +10487 44393 +41007 44390 +42610 44378 +35889 44373 +4607 44372 +15815 44371 +41218 44363 +28815 44362 +41980 44359 +38760 44357 +26243 44355 +20177 44355 +30004 44351 +24088 44348 +10121 44346 +45998 44341 +21282 44341 +48279 44340 +22651 44339 +45817 44335 +12754 44332 +23341 44328 +29262 44327 +26217 44324 +36619 44321 +18944 44321 +40459 44320 +50232 44317 +32235 44315 +28875 44314 +22790 44313 +46547 44313 +38176 44310 +8086 44307 +38962 44305 +40891 44299 +30796 44295 +25882 44295 +28826 44294 +35075 44292 +29183 44291 +33935 44290 +38868 44289 +16429 44288 +14418 44287 +43285 44280 +2249 44278 +24705 44274 +41985 44271 +25760 44270 +32630 44269 +14280 44268 +7390 44265 +41819 44261 +26422 44260 +22229 44256 +43150 44253 +28417 44252 +37437 44249 +46474 44245 +39835 44244 +42546 44238 +35471 44234 +10832 44232 +29643 44231 +25921 44227 +17042 44226 +48184 44221 +21497 44212 +25393 44210 +38883 44207 +27166 44207 +23109 44206 +14650 44198 +46673 44197 +40199 44184 +8866 44184 +22481 44183 +28994 44182 +20221 44181 +20305 44181 +27892 44179 +35173 44176 +23891 44174 +34290 44173 +28265 44166 +24487 44164 +32662 44164 +19822 44164 +43493 44160 +14922 44159 +18415 44158 +31137 44154 +29244 44153 +31706 44152 +28629 44152 +22068 44145 +35158 44145 +20766 44142 +36880 44140 +22413 44135 +23801 44135 +10539 44131 +13782 44130 +23671 44125 +23467 44124 +43842 44112 +35879 44109 +20969 44104 +40581 44101 +37692 44100 +31487 44096 +42575 44095 +35238 44095 +35245 44093 +36710 44091 +30530 44088 +41954 44085 +44795 44078 +13420 44076 +39167 44074 +36183 44073 +24857 44067 +9218 44066 +41719 44065 +12194 44063 +45830 44062 +38316 44057 +40772 44057 +26826 44049 +32481 44036 +37371 44030 +39857 44029 +33964 44028 +38794 44027 +46339 44023 +38951 44019 +37278 44017 +18938 44012 +40790 44012 +14902 44011 +33879 44009 +32114 44002 +19854 44002 +29962 44001 +41566 43997 +27405 43980 +33082 43978 +48695 43976 +19501 43972 +45024 43971 +19129 43971 +33260 43971 +43644 43970 +46218 43963 +29791 43962 +47041 43962 +32270 43961 +11285 43960 +44490 43956 +4276 43950 +39675 43948 +39438 43943 +39041 43943 +37029 43941 +26364 43936 +44752 43934 +29830 43928 +48918 43924 +21254 43920 +38291 43916 +46593 43910 +12009 43909 +30471 43901 +47600 43900 +18372 43897 +35424 43887 +37316 43881 +47096 43881 +38368 43877 +39917 43875 +25735 43874 +31283 43872 +28944 43871 +2338 43871 +25334 43867 +18761 43862 +22401 43856 +22507 43853 +32913 43850 +22172 43849 +46221 43847 +19700 43846 +27191 43844 +42458 43838 +28433 43838 +36263 43835 +24818 43828 +35501 43825 +19394 43824 +38775 43821 +35968 43820 +40568 43802 +38003 43798 +30247 43788 +24414 43787 +21533 43786 +29623 43783 +45129 43777 +37973 43774 +34624 43773 +38181 43771 +18016 43766 +31906 43758 +15546 43750 +31012 43748 +32240 43744 +49565 43741 +37660 43741 +46496 43736 +7944 43735 +36151 43734 +40045 43730 +49676 43728 +39824 43726 +42998 43725 +23163 43724 +16972 43722 +30365 43718 +16302 43716 +35534 43711 +36184 43711 +8102 43710 +33589 43709 +32776 43708 +48271 43698 +35473 43696 +16912 43694 +27064 43694 +32237 43692 +31210 43686 +27937 43684 +49906 43684 +38299 43677 +13583 43672 +15312 43671 +44180 43669 +41249 43662 +34099 43659 +37965 43652 +32222 43650 +48292 43647 +32813 43645 +28097 43641 +16185 43641 +28748 43636 +28541 43636 +25183 43632 +36324 43631 +36051 43631 +5280 43627 +31229 43626 +46982 43625 +36708 43624 +34221 43622 +33184 43621 +14868 43619 +20784 43609 +36334 43608 +45527 43607 +44200 43601 +15526 43601 +47766 43599 +24027 43598 +33593 43589 +34759 43587 +32018 43585 +19842 43584 +24605 43582 +36622 43577 +32309 43575 +13926 43563 +14421 43561 +12776 43559 +29563 43556 +31804 43553 +32469 43546 +16018 43542 +15559 43541 +36811 43540 +33729 43534 +48660 43533 +30502 43527 +25703 43522 +23457 43519 +5960 43519 +26956 43499 +36266 43496 +26578 43485 +29637 43484 +34740 43484 +24498 43483 +43457 43482 +35222 43480 +45354 43479 +28179 43476 +28765 43469 +40621 43464 +32067 43462 +18701 43462 +39770 43457 +20296 43457 +17841 43455 +18824 43455 +40677 43453 +16698 43452 +37416 43446 +42657 43444 +15092 43443 +41711 43441 +31856 43438 +25088 43436 +33875 43434 +42409 43433 +45789 43431 +16242 43428 +27936 43426 +45262 43425 +16706 43425 +24747 43423 +46301 43422 +48417 43418 +29899 43417 +43350 43416 +22619 43414 +27895 43410 +46427 43408 +17879 43406 +30477 43403 +29956 43397 +50146 43390 +47443 43389 +45235 43385 +34188 43384 +34127 43381 +42438 43381 +38553 43380 +7314 43380 +43715 43380 +30786 43377 +34884 43376 +49235 43376 +44792 43372 +33680 43370 +40856 43364 +41878 43363 +34170 43363 +31165 43361 +17323 43360 +18328 43358 +48257 43358 +27693 43351 +41893 43349 +34020 43343 +33884 43341 +28032 43339 +28403 43335 +29935 43334 +37317 43333 +32956 43329 +39552 43326 +28965 43322 +27773 43320 +34419 43317 +21333 43317 +22388 43311 +29217 43310 +8433 43307 +15597 43302 +18871 43296 +43833 43295 +38132 43288 +27472 43285 +44441 43285 +31182 43282 +44699 43282 +34837 43276 +35254 43274 +12789 43274 +20589 43271 +22141 43268 +26096 43267 +37247 43262 +26263 43259 +22922 43259 +29338 43255 +31669 43255 +44866 43252 +12305 43251 +37871 43250 +28006 43244 +42504 43242 +29482 43235 +48303 43232 +44972 43232 +47977 43222 +25527 43220 +6361 43218 +48614 43217 +18443 43212 +18228 43212 +26067 43212 +45391 43209 +27400 43206 +50058 43205 +26723 43204 +30184 43202 +33805 43197 +38096 43188 +41225 43184 +16890 43181 +31860 43173 +33516 43172 +36062 43170 +17011 43165 +34994 43163 +39673 43155 +39381 43152 +41756 43151 +32311 43150 +34454 43149 +35405 43148 +42841 43147 +45707 43144 +32995 43137 +22411 43133 +27257 43129 +44429 43129 +8333 43128 +31772 43127 +33292 43123 +42749 43121 +47143 43120 +41175 43119 +28003 43117 +38065 43116 +49462 43116 +45691 43112 +22555 43110 +44595 43110 +32721 43109 +45375 43107 +27780 43106 +26481 43105 +45715 43102 +31780 43099 +21684 43096 +43383 43096 +37432 43093 +34397 43093 +47129 43092 +25948 43087 +20791 43084 +48705 43082 +24322 43079 +33177 43076 +40586 43074 +24823 43067 +25674 43063 +7089 43061 +33763 43060 +43175 43053 +42048 43049 +31779 43046 +24339 43044 +17685 43044 +37451 43044 +40221 43043 +11429 43041 +47770 43039 +28680 43034 +18536 43032 +42116 43026 +7248 43023 +23005 43016 +33877 43012 +13789 43012 +38561 43010 +49756 43006 +40958 43006 +23777 43005 +25900 43004 +44268 43004 +28699 43001 +25135 43001 +27827 42996 +31505 42994 +4546 42994 +23875 42990 +50081 42989 +44941 42985 +27921 42970 +17937 42964 +35947 42961 +33619 42959 +44255 42956 +32423 42955 +31588 42954 +33655 42954 +43075 42953 +28168 42952 +37291 42950 +41269 42950 +21701 42949 +24698 42944 +25550 42944 +9401 42937 +37727 42928 +17119 42924 +37493 42919 +42689 42916 +25470 42915 +28384 42914 +46664 42912 +29368 42904 +44243 42900 +29464 42893 +44874 42892 +34087 42891 +26937 42890 +42410 42886 +33802 42883 +9660 42879 +27000 42870 +42694 42869 +13544 42864 +20636 42857 +22391 42854 +34147 42853 +23588 42852 +26731 42848 +40898 42848 +48256 42844 +18809 42842 +28846 42840 +42142 42835 +38091 42834 +26765 42828 +36641 42826 +29225 42825 +44316 42824 +29138 42819 +44717 42816 +36175 42816 +10155 42813 +26084 42813 +17072 42812 +24111 42810 +31866 42808 +40171 42806 +37625 42804 +16262 42804 +48784 42803 +15054 42801 +29877 42801 +50217 42796 +26460 42794 +49241 42794 +40502 42791 +39511 42791 +6583 42786 +24827 42784 +35456 42783 +29063 42783 +30445 42783 +38225 42779 +19248 42776 +31372 42769 +27798 42769 +38504 42768 +48666 42768 +28963 42766 +42020 42764 +24893 42754 +30079 42754 +27617 42753 +31534 42740 +22389 42738 +35581 42737 +35463 42737 +37367 42734 +36557 42731 +24409 42729 +33720 42727 +36589 42724 +21173 42721 +26832 42714 +47971 42712 +7115 42708 +23120 42706 +20977 42702 +29867 42701 +25713 42697 +40913 42690 +7878 42688 +43770 42687 +25669 42686 +21260 42680 +44630 42677 +37343 42676 +43259 42670 +48516 42669 +37040 42668 +34031 42666 +33068 42665 +23065 42665 +16756 42663 +48222 42657 +23355 42657 +23963 42656 +13441 42655 +49076 42653 +3566 42647 +28909 42644 +32024 42642 +23045 42629 +46699 42624 +42890 42623 +36004 42623 +36566 42622 +23297 42621 +22607 42621 +49852 42618 +29407 42618 +19213 42617 +26607 42616 +13169 42606 +47018 42602 +25993 42602 +44756 42599 +16676 42594 +32692 42591 +37245 42590 +27188 42589 +19071 42588 +37784 42586 +39408 42585 +44554 42585 +25156 42581 +29874 42578 +39536 42578 +44932 42570 +40936 42567 +42774 42565 +36630 42564 +40854 42561 +28055 42556 +31464 42554 +44147 42548 +30011 42548 +32415 42547 +10210 42541 +36355 42541 +44524 42539 +34259 42538 +3922 42538 +30253 42534 +1279 42534 +34940 42533 +27877 42532 +1926 42531 +8633 42531 +36808 42529 +22645 42528 +32234 42528 +38900 42525 +29739 42520 +27462 42509 +29718 42504 +42511 42503 +25402 42497 +36706 42493 +22269 42489 +14452 42483 +33833 42482 +29514 42479 +40982 42469 +10369 42469 +47364 42468 +44731 42466 +22732 42465 +37000 42461 +34202 42455 +28731 42452 +5759 42450 +39676 42448 +44945 42448 +3453 42445 +17193 42445 +24195 42441 +38698 42441 +26115 42436 +15956 42435 +8898 42432 +29789 42431 +18084 42430 +27420 42429 +37565 42429 +45510 42425 +48113 42424 +39631 42424 +29404 42421 +45807 42420 +47741 42418 +42852 42417 +18338 42416 +50073 42414 +28158 42411 +28848 42409 +37100 42409 +3548 42407 +20570 42407 +14423 42403 +35505 42402 +39473 42401 +26571 42398 +30481 42395 +37626 42391 +27591 42389 +33992 42388 +37753 42387 +29605 42385 +24278 42380 +28668 42376 +11440 42372 +29648 42371 +44299 42366 +25578 42360 +23064 42358 +40648 42357 +2803 42357 +39935 42351 +30149 42343 +39080 42343 +22337 42333 +44574 42328 +43074 42324 +26814 42317 +27041 42314 +13323 42314 +29615 42308 +22087 42302 +45928 42297 +48233 42296 +48750 42291 +1427 42285 +28874 42278 +33264 42276 +22886 42272 +36167 42261 +39795 42260 +31581 42246 +29376 42245 +31016 42244 +38682 42241 +16708 42237 +37560 42236 +31474 42234 +47507 42227 +23298 42222 +23188 42222 +30852 42221 +34483 42220 +33233 42214 +17708 42214 +46941 42212 +11984 42209 +30507 42207 +27756 42201 +32553 42198 +37589 42197 +23922 42195 +29608 42194 +33863 42191 +22587 42188 +46714 42184 +23262 42183 +27621 42182 +18940 42179 +48120 42178 +16694 42175 +36160 42173 +33599 42168 +49314 42168 +49157 42166 +26511 42160 +37301 42151 +48138 42149 +22852 42147 +10679 42144 +48066 42143 +32947 42139 +48111 42138 +43573 42133 +26118 42132 +28127 42131 +24137 42130 +7418 42123 +49030 42123 +16091 42120 +36189 42119 +33752 42116 +43542 42116 +34702 42114 +34225 42112 +38769 42110 +43014 42105 +41439 42099 +21405 42099 +25862 42091 +44615 42090 +15126 42090 +30850 42086 +37862 42079 +31025 42078 +26050 42078 +44312 42074 +24286 42070 +35765 42067 +47508 42067 +41151 42067 +26554 42066 +37935 42065 +14477 42065 +20848 42065 +37299 42064 +43086 42063 +26195 42063 +31296 42061 +41354 42054 +38999 42049 +35268 42048 +20356 42039 +1344 42033 +13847 42032 +26114 42031 +35969 42026 +50062 42024 +36684 42023 +40544 42022 +14220 42021 +24804 42016 +35927 42015 +22023 42012 +17875 42006 +12491 42002 +14382 42000 +22327 41999 +41957 41998 +28827 41988 +40795 41986 +26741 41982 +39725 41979 +29172 41978 +35576 41976 +33417 41974 +36415 41973 +47438 41973 +7167 41970 +8430 41967 +34731 41967 +28742 41967 +49377 41966 +41437 41961 +42843 41960 +13500 41957 +35250 41957 +37099 41956 +33970 41954 +31987 41953 +35154 41952 +42450 41948 +44341 41944 +44564 41944 +47122 41943 +41773 41936 +24761 41935 +41126 41935 +25460 41934 +46009 41933 +43190 41933 +28870 41916 +28964 41912 +30023 41911 +32433 41910 +33546 41906 +41318 41902 +48268 41899 +24549 41899 +46587 41893 +4658 41878 +28946 41877 +38068 41876 +46742 41876 +36939 41875 +30648 41873 +32726 41869 +28637 41867 +31085 41866 +49829 41861 +30124 41849 +4194 41845 +27118 41844 +43261 41842 +44158 41841 +21155 41839 +23362 41833 +29592 41827 +44775 41825 +643 41823 +46703 41822 +30161 41821 +49431 41819 +42085 41812 +35197 41807 +29503 41805 +29300 41804 +43965 41801 +36783 41797 +31692 41794 +38616 41791 +49774 41791 +47181 41788 +45450 41784 +42209 41781 +44992 41780 +16350 41778 +37876 41773 +49101 41773 +26795 41767 +33914 41764 +25803 41764 +34372 41762 +43520 41759 +42140 41752 +37620 41742 +40815 41737 +30990 41734 +28467 41730 +37872 41728 +22190 41728 +49589 41727 +18155 41725 +29933 41722 +46412 41720 +40181 41719 +21269 41714 +37993 41709 +22697 41707 +23237 41706 +37764 41706 +24368 41701 +37267 41700 +43381 41699 +24907 41699 +47547 41699 +44160 41698 +20443 41697 +28511 41696 +49595 41696 +49088 41693 +31430 41690 +27256 41689 +35981 41683 +47444 41682 +33662 41680 +31060 41678 +14204 41678 +35650 41677 +47095 41675 +30550 41673 +36550 41670 +49744 41667 +6736 41667 +10187 41666 +40448 41665 +5239 41661 +35547 41660 +38421 41659 +5234 41658 +46609 41649 +16163 41647 +29691 41646 +29038 41645 +22883 41645 +25096 41640 +44284 41637 +14579 41628 +24574 41625 +11993 41622 +47952 41619 +41189 41618 +40280 41617 +40690 41615 +23220 41606 +35701 41605 +35205 41604 +45058 41604 +14304 41602 +18683 41602 +31468 41601 +26247 41600 +49839 41596 +17353 41592 +38631 41591 +37429 41590 +42047 41590 +38801 41590 +26204 41589 +11268 41586 +28658 41584 +42123 41579 +16074 41575 +27487 41572 +46175 41572 +25445 41564 +36953 41562 +41273 41559 +43851 41558 +29633 41556 +15763 41554 +30517 41553 +5324 41552 +36034 41551 +12110 41544 +31113 41543 +27368 41542 +23685 41538 +48590 41530 +35698 41527 +1403 41526 +25010 41525 +28279 41521 +33687 41517 +35308 41516 +47500 41515 +49634 41515 +24181 41513 +47100 41508 +35672 41507 +31598 41504 +29145 41504 +21699 41491 +35616 41490 +46361 41489 +49297 41482 +14522 41481 +33878 41481 +20016 41478 +38005 41473 +29473 41473 +34228 41470 +29952 41470 +28778 41469 +29655 41467 +7569 41467 +45800 41462 +43499 41461 +27176 41460 +6816 41454 +37128 41454 +30041 41453 +17223 41449 +46157 41449 +40185 41446 +34824 41446 +32927 41445 +31999 41437 +43157 41437 +46515 41434 +15315 41431 +28703 41425 +31056 41421 +40372 41419 +45318 41414 +32747 41414 +45615 41414 +13290 41400 +25521 41397 +40066 41394 +30144 41393 +48501 41391 +50210 41390 +23619 41390 +18146 41386 +36256 41384 +32910 41382 +40252 41381 +26427 41381 +43902 41378 +28386 41377 +29906 41375 +34592 41371 +19176 41367 +41353 41365 +49739 41364 +26591 41362 +34781 41361 +18897 41349 +41565 41346 +50017 41345 +49994 41342 +32421 41342 +27596 41340 +29187 41337 +36015 41337 +46907 41336 +38019 41334 +32960 41334 +20597 41333 +32946 41318 +31235 41318 +28705 41316 +46073 41308 +41779 41307 +27263 41305 +46264 41303 +32576 41299 +35785 41299 +30674 41299 +24767 41292 +37462 41292 +35784 41287 +27761 41285 +48155 41283 +29517 41282 +47380 41282 +41738 41273 +47579 41271 +37984 41270 +31289 41268 +36213 41268 +27199 41266 +43018 41265 +40960 41262 +49468 41260 +33238 41252 +44763 41246 +13730 41245 +48015 41244 +23800 41239 +36049 41238 +39053 41236 +14945 41236 +43554 41236 +28408 41234 +23815 41220 +15739 41219 +22709 41217 +31685 41217 +27228 41209 +24909 41205 +41860 41197 +31843 41193 +41188 41192 +35525 41189 +15350 41187 +29915 41178 +46946 41176 +24399 41174 +49759 41170 +2992 41169 +14125 41168 +14437 41163 +15365 41162 +31641 41162 +37303 41159 +28069 41159 +37886 41159 +43255 41153 +46013 41152 +17253 41151 +18100 41150 +22749 41149 +28845 41148 +35683 41146 +34797 41137 +35341 41137 +28711 41131 +17561 41129 +29013 41124 +6732 41122 +22985 41112 +37812 41100 +20072 41098 +33826 41097 +7942 41096 +21911 41096 +30963 41094 +40323 41091 +41483 41088 +29206 41086 +41404 41078 +20595 41077 +20827 41075 +35261 41075 +5248 41073 +20509 41072 +5630 41067 +20797 41065 +21437 41060 +24403 41046 +49683 41046 +30649 41046 +42474 41043 +26337 41041 +35951 41041 +37114 41040 +28226 41036 +25675 41034 +34963 41032 +23628 41032 +45261 41025 +38560 41010 +49690 41008 +14988 41003 +37668 41003 +34819 41002 +15995 40999 +47031 40998 +28494 40998 +34173 40992 +34528 40991 +19404 40990 +37489 40987 +26863 40987 +48741 40985 +27205 40985 +44447 40979 +2424 40976 +27481 40973 +12272 40969 +31360 40968 +1594 40962 +34815 40960 +22572 40956 +42549 40955 +30252 40955 +22189 40954 +40018 40952 +24825 40952 +48119 40948 +7144 40943 +40551 40943 +46773 40942 +30125 40941 +25321 40937 +16941 40932 +38413 40932 +46131 40924 +47061 40923 +35097 40917 +25429 40915 +49059 40913 +20724 40912 +31875 40906 +43671 40898 +39271 40887 +23209 40882 +21338 40881 +49536 40878 +42040 40877 +31586 40876 +42618 40876 +39915 40874 +27230 40871 +16232 40868 +9409 40868 +39178 40866 +23700 40866 +39001 40860 +36090 40859 +34150 40858 +28933 40856 +30943 40848 +46896 40845 +24277 40842 +48258 40841 +33631 40835 +3456 40830 +23716 40825 +46450 40824 +31824 40821 +3606 40820 +34006 40819 +27710 40817 +18995 40808 +32752 40805 +40338 40804 +32696 40798 +41597 40785 +40827 40784 +18784 40782 +41837 40780 +29420 40779 +34551 40766 +47342 40762 +48171 40761 +24602 40749 +31193 40749 +40661 40747 +21731 40736 +32213 40734 +18568 40733 +33230 40732 +39047 40730 +18308 40724 +34614 40723 +4440 40723 +43414 40722 +37357 40710 +20133 40708 +43754 40706 +40224 40701 +39262 40700 +34863 40698 +30299 40695 +27969 40689 +10520 40689 +30412 40688 +44146 40684 +46426 40681 +20215 40679 +17477 40678 +38411 40673 +29979 40673 +33657 40671 +44516 40671 +22791 40668 +43188 40661 +17815 40659 +16647 40658 +19752 40656 +23673 40655 +27649 40651 +44473 40650 +28211 40648 +46919 40647 +4949 40647 +23315 40645 +34654 40644 +43726 40642 +28186 40638 +37172 40636 +35629 40634 +22523 40633 +32181 40631 +27017 40630 +33107 40629 +33822 40626 +20816 40625 +29252 40622 +45687 40622 +38193 40618 +33904 40617 +44096 40612 +22117 40611 +45566 40610 +27143 40603 +33595 40601 +36138 40600 +38645 40597 +41246 40593 +27416 40592 +15433 40592 +26350 40591 +31149 40590 +20199 40586 +8039 40584 +37880 40575 +32170 40570 +21286 40569 +19891 40568 +48967 40564 +28239 40556 +33598 40555 +40183 40555 +43933 40547 +37459 40545 +21426 40544 +26506 40544 +39240 40541 +14137 40540 +43288 40534 +29165 40532 +28304 40532 +24136 40527 +16432 40526 +47997 40522 +37001 40522 +24265 40515 +43972 40513 +24096 40506 +28982 40504 +23347 40500 +26897 40497 +18145 40494 +44076 40493 +35797 40490 +12919 40490 +44285 40488 +44620 40484 +29580 40482 +40771 40479 +25270 40479 +28552 40479 +35303 40477 +23142 40459 +43988 40456 +37546 40454 +36190 40452 +27644 40451 +39088 40450 +40773 40447 +35055 40446 +33294 40441 +12306 40439 +40144 40437 +36820 40431 +13989 40429 +9806 40429 +26754 40428 +27776 40428 +39864 40426 +41994 40425 +29130 40418 +17006 40414 +33229 40413 +40617 40411 +20068 40407 +18377 40403 +30745 40401 +24197 40394 +8954 40393 +21898 40391 +17340 40391 +36660 40386 +45443 40386 +35520 40381 +45678 40381 +37885 40381 +1822 40380 +41720 40379 +20924 40378 +16529 40371 +39281 40369 +40906 40345 +50180 40342 +33256 40339 +38308 40329 +12487 40328 +44715 40323 +40986 40323 +19079 40320 +47014 40317 +36691 40309 +39221 40308 +39160 40308 +43003 40307 +22829 40300 +19230 40298 +19564 40292 +35492 40288 +26208 40285 +36489 40283 +17579 40282 +28263 40279 +45378 40274 +48390 40274 +38639 40273 +46902 40273 +28446 40272 +39273 40268 +31564 40268 +31343 40268 +40000 40266 +25319 40265 +30407 40265 +35934 40258 +40048 40255 +42545 40254 +34297 40253 +1396 40249 +44456 40245 +31089 40245 +12234 40236 +13758 40229 +12747 40228 +27774 40228 +22932 40226 +33550 40225 +31975 40223 +40634 40214 +49882 40211 +49952 40208 +32647 40206 +32566 40194 +26995 40192 +31580 40191 +42837 40182 +40614 40181 +35275 40180 +15206 40172 +29972 40168 +48226 40167 +39002 40165 +25447 40165 +28598 40164 +1094 40157 +31031 40157 +16191 40155 +48434 40151 +32094 40150 +13376 40144 +26411 40140 +48759 40139 +29303 40137 +29909 40137 +36672 40136 +25728 40136 +46629 40132 +37341 40125 +23516 40122 +31278 40122 +34725 40120 +41415 40119 +44771 40116 +13260 40112 +23721 40110 +28551 40108 +34644 40108 +37271 40105 +39796 40101 +27192 40099 +20851 40097 +45030 40097 +28715 40097 +27778 40096 +42613 40096 +31454 40091 +40909 40090 +42892 40089 +47149 40089 +31434 40088 +39663 40082 +35939 40080 +30748 40076 +42119 40074 +41903 40074 +13550 40071 +32497 40070 +9545 40066 +21390 40065 +35856 40063 +15786 40060 +34839 40060 +28930 40058 +29776 40056 +17465 40056 +32699 40055 +19706 40054 +22243 40050 +46190 40050 +17288 40042 +32798 40040 +12149 40040 +33950 40033 +46295 40031 +37140 40026 +47624 40026 +41522 40025 +25960 40024 +48367 40022 +36529 40021 +37986 40020 +26977 40020 +35878 40019 +29947 40013 +28103 40013 +45548 40010 +47492 40003 +42858 40002 +41448 40002 +47042 40001 +47140 39987 +49870 39984 +37607 39979 +31792 39970 +28700 39963 +48218 39958 +33039 39958 +29119 39956 +13564 39956 +43200 39953 +21992 39948 +43314 39941 +19638 39941 +29559 39931 +17823 39928 +49290 39927 +12795 39925 +50045 39923 +49866 39916 +24492 39909 +32933 39906 +42371 39904 +13164 39903 +29268 39899 +6746 39896 +32476 39893 +19936 39893 +48456 39893 +38537 39889 +33237 39888 +40678 39885 +25035 39883 +27948 39877 +8890 39877 +36158 39876 +25059 39864 +19815 39864 +48153 39863 +47004 39863 +30238 39861 +45099 39856 +41378 39841 +30820 39839 +36849 39836 +26884 39834 +21037 39832 +21749 39829 +35090 39814 +45419 39812 +29414 39809 +39564 39808 +36326 39801 +42826 39801 +25356 39800 +46379 39796 +14822 39795 +48296 39795 +41532 39791 +43896 39791 +32492 39791 +27753 39790 +48128 39789 +17613 39787 +48904 39786 +49420 39782 +15208 39781 +19231 39779 +17580 39775 +19720 39775 +22865 39773 +29939 39772 +42514 39766 +37215 39765 +40320 39762 +31219 39761 +35582 39761 +35016 39760 +35769 39759 +43424 39758 +9239 39754 +38787 39752 +31869 39746 +25788 39746 +31323 39745 +26533 39740 +39369 39736 +24616 39733 +37987 39731 +45036 39731 +31819 39730 +6382 39727 +37524 39727 +48580 39725 +30495 39725 +36661 39724 +46851 39721 +37464 39717 +34244 39717 +23906 39713 +29767 39711 +21991 39707 +17128 39706 +34025 39705 +20292 39696 +31146 39690 +44221 39679 +28900 39673 +44628 39673 +24676 39672 +14465 39667 +48881 39665 +37738 39664 +20423 39663 +48424 39661 +43278 39661 +34857 39660 +30052 39658 +36906 39657 +33512 39651 +7350 39646 +24274 39645 +37722 39645 +41534 39644 +25944 39644 +42880 39641 +27790 39641 +30304 39640 +31650 39637 +29184 39637 +34417 39636 +13849 39634 +25920 39634 +43113 39625 +36573 39617 +45513 39616 +3736 39616 +26923 39613 +39138 39613 +45763 39610 +36161 39604 +21304 39604 +32110 39602 +37606 39597 +34970 39597 +37683 39585 +3476 39583 +38228 39582 +26019 39581 +30095 39580 +18117 39577 +46041 39577 +12156 39574 +46004 39574 +39510 39573 +35382 39572 +31651 39571 +45121 39568 +32592 39566 +6813 39566 +48022 39565 +35057 39561 +19462 39560 +7153 39559 +36511 39559 +47788 39558 +23021 39555 +36756 39547 +45220 39546 +41824 39545 +13916 39544 +31543 39542 +28567 39541 +24509 39539 +47331 39535 +36637 39534 +22457 39531 +15999 39530 +44803 39530 +33530 39529 +14202 39528 +43359 39528 +41696 39528 +26478 39524 +6892 39524 +26322 39515 +46738 39513 +37623 39512 +28369 39509 +30614 39502 +32683 39495 +30354 39494 +48914 39487 +39798 39485 +13986 39485 +47769 39481 +16097 39479 +38496 39479 +49286 39476 +31794 39471 +44618 39471 +45829 39466 +39593 39461 +15675 39459 +36264 39458 +41891 39449 +8328 39446 +21758 39446 +24936 39437 +42625 39430 +16673 39421 +41410 39418 +27799 39418 +30280 39417 +37952 39416 +34453 39413 +39502 39412 +33919 39409 +41865 39409 +27998 39408 +42402 39406 +25032 39399 +30219 39397 +20104 39395 +15977 39394 +30147 39393 +37149 39393 +10751 39388 +31356 39387 +36012 39380 +3627 39377 +16144 39374 +23392 39364 +33183 39360 +36050 39357 +27172 39351 +28257 39349 +38550 39345 +29253 39338 +28138 39320 +43170 39318 +36068 39318 +42966 39318 +35445 39317 +37810 39316 +22348 39315 +45459 39315 +46035 39312 +26792 39303 +24118 39300 +14473 39299 +45602 39298 +49807 39295 +29132 39292 +20968 39290 +25256 39286 +18060 39283 +30940 39281 +38443 39276 +30948 39268 +32133 39267 +25770 39263 +22950 39256 +30032 39256 +27673 39254 +47405 39250 +40684 39249 +35216 39245 +28447 39240 +12425 39240 +24906 39237 +31209 39234 +35091 39233 +44226 39232 +46637 39219 +27112 39216 +41871 39213 +25061 39213 +46414 39210 +27588 39209 +27218 39207 +30272 39205 +27715 39202 +27289 39202 +41040 39201 +29687 39200 +29449 39200 +43264 39198 +37246 39198 +22800 39197 +24254 39197 +7240 39196 +48026 39193 +43949 39193 +2878 39191 +27171 39188 +20245 39186 +33411 39183 +30501 39183 +26260 39182 +21917 39180 +32117 39172 +47099 39171 +24876 39170 +44708 39169 +25345 39169 +35711 39168 +36519 39165 +27970 39161 +35077 39159 +34328 39158 +47745 39156 +34451 39155 +39832 39152 +24940 39151 +25959 39150 +38648 39141 +17763 39141 +17525 39133 +36332 39131 +11485 39129 +16874 39128 +45546 39126 +27777 39119 +13974 39112 +29965 39111 +18047 39111 +25667 39111 +36339 39109 +31270 39109 +42578 39108 +46878 39108 +48343 39107 +28396 39104 +39865 39104 +37019 39102 +30904 39100 +2336 39100 +39399 39090 +25191 39089 +31111 39080 +26090 39076 +33334 39075 +44058 39073 +44940 39073 +8388 39070 +31427 39063 +29558 39061 +37294 39060 +37949 39057 +21009 39053 +42021 39050 +16409 39045 +40468 39043 +37721 39041 +24870 39039 +36280 39037 +28130 39034 +34741 39033 +40693 39027 +32926 39026 +50191 39025 +44880 39023 +35995 39022 +47750 39021 +32618 39017 +17764 39014 +13278 39009 +44175 39000 +21077 38999 +42201 38998 +34691 38996 +32789 38989 +41209 38985 +36146 38975 +14916 38973 +6311 38970 +50008 38970 +39263 38969 +21492 38968 +42340 38967 +24591 38965 +28520 38962 +28597 38951 +40440 38951 +27345 38949 +23326 38943 +11587 38943 +35184 38933 +28174 38926 +19631 38924 +44697 38924 +45447 38923 +31930 38921 +31557 38918 +34326 38914 +33866 38914 +20044 38912 +27680 38912 +43112 38908 +48049 38907 +25286 38897 +23451 38895 +40273 38893 +22278 38891 +39694 38890 +47959 38890 +49335 38889 +24772 38887 +41884 38886 +44091 38881 +23350 38878 +15720 38876 +33770 38876 +41264 38875 +16414 38873 +24726 38871 +8479 38865 +34611 38862 +40123 38862 +27203 38861 +25707 38859 +46337 38856 +43064 38856 +28936 38855 +36534 38852 +40817 38852 +39110 38851 +21895 38839 +45396 38839 +41342 38837 +44512 38829 +29512 38827 +27917 38827 +21858 38825 +49590 38822 +39477 38821 +20803 38816 +29213 38814 +43942 38807 +17771 38807 +20861 38805 +40905 38799 +11961 38798 +38379 38790 +27564 38790 +48563 38788 +24693 38787 +21153 38784 +28420 38783 +34894 38773 +23092 38771 +49524 38769 +34738 38767 +16099 38767 +37285 38766 +17469 38764 +24279 38760 +48622 38756 +22429 38755 +27009 38754 +47347 38749 +45699 38746 +30110 38740 +17250 38737 +46702 38733 +41370 38730 +48572 38727 +44770 38724 +43929 38714 +35038 38714 +17278 38711 +48274 38710 +6860 38704 +41414 38703 +28825 38699 +42375 38699 +14407 38698 +45429 38693 +41149 38689 +49847 38686 +23036 38685 +10067 38683 +22466 38683 +25785 38674 +34737 38671 +41063 38670 +33849 38667 +41883 38659 +36453 38657 +43032 38653 +35919 38652 +6773 38647 +2466 38644 +15366 38642 +20777 38640 +40268 38636 +40291 38635 +41516 38633 +19044 38633 +35524 38629 +32144 38628 +31805 38627 +38086 38626 +40270 38621 +36336 38621 +33387 38620 +17818 38619 +35800 38617 +34360 38615 +20772 38614 +9821 38611 +46288 38609 +22691 38600 +48422 38597 +25272 38591 +29645 38590 +41018 38589 +20711 38588 +24962 38587 +49510 38587 +47828 38586 +33520 38585 +14368 38577 +32261 38575 +20211 38575 +39198 38573 +46895 38569 +38889 38566 +32391 38562 +44666 38561 +47257 38554 +35710 38554 +47098 38553 +39013 38552 +37274 38544 +23483 38536 +28585 38532 +32109 38530 +46791 38526 +22813 38524 +35870 38516 +25782 38513 +46357 38512 +9271 38512 +45211 38510 +27033 38510 +28976 38498 +45248 38498 +33047 38498 +36380 38494 +47713 38493 +37086 38490 +30624 38490 +38745 38488 +30514 38487 +26611 38487 +2886 38486 +10873 38485 +43351 38484 +41562 38483 +42191 38481 +34841 38481 +45994 38474 +45712 38474 +22921 38473 +41567 38467 +28925 38464 +21577 38461 +21084 38459 +12331 38458 +4187 38456 +29133 38455 +26529 38455 +41013 38451 +48998 38450 +31475 38448 +22657 38445 +29551 38444 +47313 38442 +45769 38424 +38192 38424 +34688 38420 +42240 38414 +18483 38412 +34135 38410 +33488 38403 +31759 38398 +23571 38396 +36946 38395 +41929 38393 +40003 38393 +40878 38387 +34898 38380 +13155 38379 +18160 38378 +2711 38376 +27640 38374 +26485 38370 +41737 38366 +21759 38355 +27486 38353 +38612 38348 +11350 38348 +33591 38347 +28159 38346 +48486 38344 +25823 38344 +46657 38340 +21186 38338 +46312 38332 +29951 38331 +33065 38329 +43504 38328 +22302 38327 +11726 38323 +22622 38322 +35325 38320 +28204 38319 +32680 38318 +23460 38317 +35112 38316 +14964 38314 +34176 38313 +11878 38313 +48406 38312 +40037 38311 +12794 38311 +33048 38310 +30601 38310 +49844 38305 +40964 38304 +31081 38297 +16979 38294 +23402 38292 +17108 38291 +26284 38289 +46852 38285 +39153 38284 +12574 38284 +39180 38282 +27960 38281 +1753 38281 +28627 38280 +40798 38279 +37256 38278 +47982 38274 +45074 38274 +20643 38274 +28588 38272 +2529 38269 +24920 38267 +37624 38267 +9198 38265 +7391 38263 +49187 38262 +23463 38259 +44488 38253 +21797 38246 +26950 38245 +35174 38243 +12940 38229 +34920 38224 +28802 38217 +34641 38216 +44234 38214 +34464 38206 +21985 38199 +24845 38188 +37681 38185 +44767 38185 +38182 38184 +48106 38182 +16913 38182 +17610 38180 +24745 38174 +21610 38172 +30533 38169 +26423 38168 +9289 38166 +34425 38161 +29214 38160 +45342 38154 +14215 38152 +20876 38146 +38425 38145 +31342 38143 +32221 38140 +48566 38137 +32674 38136 +9249 38132 +46730 38132 +32357 38131 +3897 38129 +47022 38124 +20658 38124 +34332 38120 +35427 38120 +26660 38119 +13733 38118 +39580 38113 +32744 38109 +39574 38109 +49400 38108 +37995 38104 +26392 38101 +36919 38101 +37104 38097 +43955 38091 +31572 38090 +14974 38085 +20957 38085 +37630 38085 +2289 38083 +39146 38079 +28140 38078 +39589 38076 +33162 38065 +12409 38065 +29057 38064 +31773 38062 +34869 38059 +10179 38054 +21847 38048 +36539 38044 +24528 38043 +34089 38041 +40496 38041 +47130 38038 +47993 38036 +36817 38036 +38585 38035 +41119 38032 +30376 38031 +46942 38031 +41031 38028 +31359 38025 +48531 38025 +32877 38023 +28306 38023 +3029 38020 +29033 38017 +34948 38016 +44376 38015 +33592 38013 +23580 38012 +38520 38011 +35894 38006 +8014 38001 +39853 37999 +48843 37998 +28302 37993 +16718 37987 +32052 37986 +20911 37986 +47028 37978 +48731 37970 +21382 37967 +44495 37962 +17974 37958 +34298 37958 +6601 37958 +32201 37957 +39015 37956 +29649 37954 +13501 37951 +23437 37948 +32611 37938 +25840 37935 +39348 37935 +29068 37935 +25499 37933 +41861 37932 +27734 37931 +42761 37930 +8637 37929 +30989 37927 +37868 37926 +32547 37921 +18703 37920 +25778 37917 +37333 37911 +37646 37910 +36824 37907 +4435 37901 +39525 37900 +33155 37899 +44151 37890 +45156 37889 +34774 37885 +18069 37882 +35529 37877 +45916 37870 +32337 37866 +36459 37864 +27560 37862 +29538 37862 +30453 37860 +15511 37857 +20413 37857 +21241 37855 +21953 37854 +42320 37853 +26697 37852 +48102 37850 +46783 37847 +49252 37846 +12987 37840 +26983 37839 +16398 37839 +21537 37839 +36016 37837 +17680 37837 +13983 37836 +34441 37836 +37537 37835 +25171 37835 +44390 37829 +37643 37829 +41714 37826 +25870 37822 +24783 37819 +29526 37818 +14191 37818 +34987 37817 +24184 37817 +41320 37813 +18890 37808 +32900 37805 +39556 37800 +43793 37798 +28116 37795 +33508 37787 +32754 37787 +14487 37785 +38209 37779 +45223 37779 +45162 37778 +31018 37777 +20605 37775 +24245 37773 +20497 37773 +24612 37771 +44888 37767 +43503 37766 +49255 37765 +41680 37762 +25404 37762 +42110 37756 +47796 37754 +9652 37754 +22497 37740 +6743 37737 +33178 37735 +44904 37733 +29082 37732 +38853 37730 +23740 37724 +34505 37724 +24180 37715 +20856 37715 +22312 37711 +31227 37707 +20506 37704 +25322 37700 +28727 37699 +40585 37697 +35548 37696 +37611 37694 +45498 37692 +43307 37690 +40714 37688 +35557 37687 +14539 37686 +40249 37684 +17828 37684 +41664 37684 +49874 37684 +37980 37679 +14184 37678 +42101 37673 +26308 37671 +13603 37668 +34872 37666 +45846 37666 +48928 37665 +36575 37664 +28481 37662 +42743 37662 +38272 37662 +22119 37659 +50060 37658 +15803 37651 +18019 37650 +43783 37649 +42241 37647 +21728 37646 +39082 37626 +36234 37622 +40755 37614 +23653 37610 +44766 37607 +43744 37604 +8578 37601 +45054 37601 +31510 37600 +32338 37599 +834 37595 +30194 37591 +40593 37584 +24213 37583 +25149 37579 +48681 37575 +46016 37572 +27140 37567 +33509 37567 +44635 37562 +47825 37562 +38359 37561 +13689 37551 +32298 37549 +45237 37543 +24468 37539 +43709 37534 +9451 37532 +26872 37526 +22913 37525 +18601 37524 +19570 37521 +22163 37517 +34771 37517 +49727 37511 +40919 37510 +38306 37505 +22226 37503 +44507 37500 +33004 37499 +41236 37491 +47416 37490 +35168 37489 +15884 37488 +32156 37485 +21780 37484 +32161 37482 +15552 37479 +32493 37479 +39501 37479 +46313 37477 +13437 37473 +36689 37468 +32209 37464 +32299 37460 +29024 37458 +39114 37454 +26168 37454 +20264 37453 +36237 37450 +35598 37448 +40547 37444 +12905 37430 +38060 37421 +20943 37420 +47739 37419 +5020 37415 +24991 37414 +26987 37412 +34039 37411 +25083 37408 +29270 37408 +22648 37399 +39484 37399 +22991 37397 +36238 37392 +40868 37391 +16284 37386 +5841 37383 +40327 37382 +38127 37380 +35688 37374 +35996 37373 +15042 37371 +22324 37369 +24222 37366 +41268 37365 +24275 37365 +33036 37362 +30692 37362 +32012 37362 +48306 37361 +44827 37356 +26705 37354 +44331 37353 +36322 37351 +29555 37350 +16598 37342 +47055 37339 +33735 37339 +18937 37337 +35761 37335 +26045 37333 +13732 37328 +25648 37325 +38876 37325 +36097 37322 +15445 37321 +47062 37318 +27277 37318 +30788 37316 +11413 37316 +27522 37314 +14577 37310 +34594 37309 +42346 37304 +36478 37303 +42264 37302 +29157 37302 +31300 37302 +36340 37299 +35723 37296 +49789 37293 +7434 37292 +20855 37288 +40410 37287 +44368 37286 +45075 37283 +44049 37282 +28740 37279 +31287 37278 +17529 37277 +17138 37271 +40170 37271 +26448 37263 +30685 37263 +22419 37258 +41846 37255 +33966 37253 +42951 37251 +46675 37251 +26979 37249 +50071 37249 +24839 37248 +34378 37244 +25399 37238 +37088 37238 +27255 37237 +47584 37236 +30499 37235 +10572 37235 +18173 37231 +28647 37231 +3289 37225 +4110 37224 +39813 37220 +16802 37217 +26912 37216 +36349 37214 +43227 37212 +47039 37210 +37306 37209 +10877 37205 +29318 37205 +37901 37204 +5111 37204 +16087 37202 +40953 37199 +29807 37198 +45143 37198 +49417 37193 +36412 37190 +43369 37190 +40152 37176 +21839 37167 +41917 37166 +17618 37151 +19385 37151 +44388 37148 +34401 37143 +49558 37140 +18209 37140 +47704 37139 +32593 37136 +35148 37132 +38439 37126 +43386 37115 +38884 37115 +25095 37111 +28164 37104 +22059 37103 +3276 37102 +36035 37102 +20482 37101 +46043 37100 +47203 37089 +8316 37089 +32817 37087 +35826 37081 +26787 37078 +48629 37077 +27656 37075 +46466 37075 +15876 37064 +38975 37064 +39973 37064 +33492 37060 +43222 37058 +37220 37058 +47566 37049 +42418 37046 +42515 37043 +20599 37042 +34289 37042 +37526 37041 +43497 37041 +36179 37039 +47248 37034 +47767 37032 +19944 37027 +33438 37026 +33840 37025 +12726 37025 +39931 37024 +21633 37020 +40991 37016 +28522 37014 +40796 37012 +25796 37008 +35737 37003 +34494 37001 +22728 36999 +29992 36997 +47494 36996 +34387 36992 +45782 36990 +31897 36990 +31485 36981 +13284 36981 +39207 36980 +12143 36980 +44232 36977 +37873 36975 +46105 36973 +25161 36970 +23211 36968 +39851 36967 +18747 36964 +41315 36964 +32455 36960 +21013 36959 +30638 36957 +13857 36955 +29587 36954 +35052 36953 +43751 36953 +28017 36952 +24678 36948 +18426 36944 +45927 36942 +20053 36940 +21303 36938 +24624 36937 +31243 36936 +15746 36935 +34063 36935 +45161 36932 +14055 36932 +28200 36929 +37064 36926 +25369 36924 +42851 36921 +45043 36915 +46696 36908 +26108 36908 +24603 36907 +8214 36907 +27149 36904 +42252 36900 +19737 36893 +24974 36890 +41039 36886 +42664 36885 +17943 36885 +30332 36885 +5925 36880 +27207 36880 +17024 36878 +26972 36871 +6730 36869 +32008 36867 +20961 36862 +21217 36860 +47631 36849 +43186 36848 +42017 36846 +15826 36843 +5723 36842 +31169 36838 +27057 36838 +37192 36829 +29968 36829 +20427 36826 +18910 36823 +33811 36822 +48179 36822 +27402 36822 +36599 36817 +43203 36811 +32231 36808 +32532 36804 +29349 36804 +16725 36803 +47496 36802 +36860 36795 +38669 36791 +45497 36785 +4234 36784 +23464 36776 +41400 36770 +28071 36761 +16286 36760 +47460 36759 +26086 36753 +36838 36750 +38221 36748 +44257 36741 +16198 36730 +10632 36727 +29598 36725 +32107 36725 +31766 36720 +38051 36719 +39454 36717 +45995 36715 +19689 36713 +45948 36711 +28788 36710 +36844 36708 +2757 36708 +21981 36706 +49230 36704 +22988 36703 +18457 36701 +37409 36700 +22279 36698 +41521 36691 +38722 36691 +36973 36682 +35867 36678 +32626 36677 +44584 36674 +32651 36674 +20459 36673 +47888 36672 +35458 36671 +34716 36669 +19146 36665 +45609 36664 +29763 36659 +43206 36655 +44725 36655 +31503 36650 +40381 36649 +16234 36647 +42926 36646 +41693 36645 +19803 36640 +33862 36639 +21902 36630 +33603 36629 +35356 36625 +49606 36620 +48373 36619 +10410 36617 +49890 36612 +39635 36611 +9710 36610 +38323 36610 +33109 36610 +25285 36609 +11407 36606 +38386 36599 +48261 36598 +47387 36594 +33928 36591 +25622 36591 +17822 36591 +21281 36589 +46525 36588 +25946 36587 +49581 36580 +34834 36579 +10180 36579 +33560 36576 +43562 36576 +20141 36569 +45386 36569 +44275 36564 +44098 36563 +16485 36560 +36901 36559 +7783 36557 +25418 36556 +33266 36551 +35139 36546 +24452 36542 +37087 36538 +24875 36538 +27800 36537 +34711 36537 +28857 36534 +26407 36533 +23020 36533 +29286 36531 +5113 36529 +30734 36525 +24770 36523 +41554 36520 +10879 36518 +28728 36517 +16928 36514 +21202 36503 +32172 36503 +34538 36501 +38961 36499 +36344 36496 +41539 36494 +49414 36493 +27982 36491 +34266 36483 +35218 36480 +32708 36474 +30425 36468 +34503 36467 +30463 36464 +45535 36457 +31029 36450 +37543 36449 +22620 36449 +24970 36448 +37454 36447 +46407 36444 +34109 36442 +27372 36441 +7182 36439 +35353 36438 +39287 36436 +28868 36435 +39338 36433 +29134 36428 +18941 36425 +44094 36418 +25653 36415 +33781 36413 +42639 36412 +11269 36410 +46711 36403 +27563 36401 +9394 36398 +37062 36397 +33138 36395 +29561 36386 +31925 36377 +47339 36364 +25278 36362 +31521 36360 +16770 36356 +38265 36356 +35137 36352 +35714 36346 +22530 36342 +31858 36341 +13862 36341 +47108 36340 +10775 36334 +24943 36333 +41579 36330 +42910 36328 +946 36325 +40007 36319 +45764 36316 +35161 36316 +40140 36308 +40737 36289 +39190 36287 +43115 36287 +13712 36286 +45503 36280 +26829 36280 +48786 36269 +47223 36267 +30991 36264 +9407 36260 +13928 36258 +28907 36255 +37355 36254 +27785 36251 +32698 36248 +32037 36243 +47640 36241 +36958 36238 +19967 36234 +38078 36231 +33015 36231 +16071 36226 +27653 36222 +23565 36222 +46467 36222 +42362 36221 +29429 36220 +47260 36217 +33408 36216 +29801 36215 +23318 36214 +29728 36213 +31738 36212 +22396 36208 +35015 36203 +8003 36198 +47805 36195 +16266 36193 +35899 36184 +46619 36182 +27967 36177 +19795 36173 +45062 36165 +28979 36163 +46548 36163 +16751 36161 +24149 36159 +35028 36155 +27896 36154 +41710 36150 +40857 36147 +38036 36146 +32950 36142 +34581 36135 +28817 36130 +48215 36128 +44967 36127 +41247 36127 +36082 36123 +42447 36116 +46208 36113 +13080 36113 +49777 36112 +11656 36112 +28301 36109 +9534 36108 +35863 36104 +43627 36102 +25382 36102 +37814 36095 +49547 36094 +31188 36089 +31753 36089 +20702 36087 +26915 36082 +46062 36080 +18776 36079 +26492 36077 +31093 36075 +42046 36069 +36666 36067 +38046 36066 +47453 36065 +35519 36064 +40757 36064 +44001 36063 +19583 36062 +41754 36061 +15047 36061 +37115 36061 +6744 36058 +46769 36055 +25998 36053 +48803 36049 +39239 36047 +14078 36047 +16942 36045 +28974 36044 +49162 36042 +40071 36039 +46656 36039 +1608 36037 +28896 36036 +44875 36035 +15640 36035 +24911 36031 +42028 36031 +48776 36031 +38200 36029 +42941 36022 +18055 36021 +42882 36014 +37185 36014 +20930 36012 +33027 36010 +8745 36003 +29008 36002 +47478 36002 +48667 36002 +44611 36000 +34993 35998 +27889 35998 +44823 35994 +42391 35988 +32855 35987 +16233 35985 +43840 35983 +35484 35982 +27760 35976 +33289 35970 +9590 35970 +38304 35969 +19963 35962 +50142 35961 +48357 35955 +40697 35955 +48450 35952 +28953 35946 +48574 35944 +42231 35942 +49879 35942 +38185 35931 +25022 35930 +15127 35929 +22713 35923 +49445 35922 +21360 35921 +29099 35917 +36527 35910 +43960 35909 +49363 35905 +20205 35904 +37223 35903 +32755 35900 +8413 35899 +49772 35896 +45484 35895 +37125 35895 +13313 35894 +47268 35893 +42292 35892 +46292 35887 +35531 35880 +42870 35880 +17230 35878 +27081 35878 +26682 35876 +42972 35876 +2699 35875 +27533 35875 +33499 35873 +24856 35867 +24305 35866 +21227 35865 +41214 35864 +15681 35860 +34232 35858 +42900 35856 +30005 35850 +34080 35849 +42594 35847 +28592 35843 +39226 35843 +44571 35842 +41528 35833 +26993 35830 +35431 35828 +23835 35828 +26598 35820 +31412 35805 +41840 35803 +46658 35802 +14231 35801 +33982 35794 +23881 35794 +15048 35791 +8926 35791 +27040 35789 +31170 35786 +44581 35779 +45457 35779 +39674 35779 +32227 35778 +25153 35774 +2880 35769 +7704 35767 +23468 35762 +23971 35758 +47083 35754 +42517 35751 +17303 35748 +36102 35744 +30741 35743 +47401 35734 +43185 35725 +30727 35722 +32568 35720 +44907 35720 +9982 35713 +40761 35712 +28096 35711 +29976 35704 +39129 35700 +10886 35696 +34923 35692 +42462 35690 +32274 35688 +19085 35678 +19926 35674 +28430 35673 +17287 35666 +49684 35665 +35021 35663 +28297 35658 +43388 35658 +27659 35654 +37571 35653 +26968 35653 +40120 35653 +43131 35653 +49909 35652 +16990 35649 +15484 35647 +49334 35646 +41108 35642 +37413 35642 +41685 35640 +38598 35635 +30615 35635 +27249 35635 +31730 35634 +15204 35633 +12646 35631 +40126 35629 +42927 35628 +46635 35627 +41254 35626 +30528 35620 +39539 35620 +27019 35616 +11794 35615 +41226 35610 +41125 35610 +31814 35609 +34756 35609 +18567 35607 +49009 35603 +47664 35603 +31211 35600 +41854 35597 +28843 35595 +3693 35594 +31716 35591 +21288 35591 +24877 35586 +46242 35583 +44989 35583 +41123 35582 +50176 35577 +44355 35575 +43342 35575 +35545 35574 +23836 35571 +39863 35570 +13911 35570 +18943 35558 +34689 35558 +26268 35552 +45240 35547 +38991 35547 +5148 35544 +46814 35535 +3396 35534 +42370 35530 +28676 35528 +29954 35523 +22240 35522 +30483 35522 +48525 35509 +39081 35508 +29818 35499 +46829 35499 +48202 35493 +42088 35491 +32112 35488 +36079 35487 +48383 35487 +37111 35486 +10612 35485 +9614 35485 +36668 35481 +32549 35481 +37210 35480 +14016 35477 +42423 35477 +22860 35475 +48986 35473 +7094 35473 +27158 35471 +28922 35468 +34253 35460 +38791 35458 +33689 35457 +47868 35448 +40054 35446 +30835 35446 +35960 35441 +31010 35438 +11869 35437 +16017 35432 +46715 35430 +23833 35430 +21495 35430 +22606 35428 +40608 35427 +25086 35427 +25341 35425 +9213 35424 +38227 35424 +40450 35418 +28640 35418 +26273 35416 +20735 35411 +29500 35409 +46631 35407 +24575 35399 +40070 35397 +39384 35396 +31192 35395 +42106 35393 +25540 35390 +16451 35387 +49935 35380 +10722 35379 +46626 35371 +7717 35370 +13383 35370 +33148 35364 +47476 35363 +21170 35360 +38912 35356 +42508 35356 +46909 35350 +38190 35349 +40300 35348 +35704 35345 +43978 35343 +20405 35335 +25437 35334 +24724 35332 +26246 35330 +11488 35329 +18893 35328 +27392 35326 +14312 35325 +46534 35325 +20315 35325 +45735 35322 +24242 35319 +45405 35317 +33083 35316 +28850 35315 +48748 35314 +6704 35312 +47163 35312 +33016 35310 +18795 35307 +39741 35305 +33960 35302 +48221 35300 +32779 35299 +35005 35295 +27189 35294 +11371 35292 +34035 35284 +20987 35280 +43511 35277 +14647 35269 +34690 35268 +48358 35267 +35643 35266 +26313 35264 +34036 35264 +45350 35263 +48420 35263 +46348 35261 +34785 35258 +43958 35257 +30248 35252 +39885 35252 +25089 35251 +15496 35245 +27797 35240 +26276 35239 +6145 35233 +38848 35230 +31102 35227 +32171 35225 +40078 35223 +42825 35219 +49654 35218 +48656 35218 +15148 35216 +29087 35211 +49679 35210 +6812 35205 +40831 35205 +36972 35201 +42741 35189 +40515 35184 +31649 35174 +6760 35169 +40395 35168 +24820 35166 +25511 35165 +10355 35164 +48636 35163 +13295 35163 +34670 35161 +40589 35155 +23042 35154 +30267 35154 +7154 35154 +36446 35144 +40446 35139 +44565 35137 +21019 35128 +33211 35128 +49530 35126 +27339 35122 +32786 35117 +38238 35113 +28048 35113 +42002 35109 +36738 35109 +23144 35107 +38267 35105 +42339 35105 +33416 35105 +32575 35104 +44219 35104 +8928 35102 +25269 35097 +6433 35096 +49575 35089 +40326 35089 +36014 35087 +2762 35080 +29069 35075 +48347 35074 +37733 35074 +25898 35071 +34341 35070 +32653 35065 +43837 35062 +39155 35059 +23912 35058 +17018 35051 +11385 35051 +29204 35045 +29174 35043 +44981 35040 +39496 35037 +42836 35032 +31009 35026 +31566 35026 +32142 35020 +32996 35020 +34658 35017 +20203 35016 +45098 35010 +12315 35005 +12611 35003 +21018 34996 +4733 34995 +36057 34994 +3168 34984 +42607 34983 +25991 34979 +42422 34979 +31947 34976 +46652 34975 +44590 34974 +39797 34972 +40871 34969 +44810 34969 +19203 34962 +26551 34961 +41795 34952 +30802 34949 +44306 34948 +41586 34939 +29096 34936 +36564 34935 +33341 34931 +38006 34931 +41918 34931 +43716 34931 +45315 34930 +38135 34929 +42988 34929 +37041 34928 +29322 34914 +38373 34914 +20810 34913 +40115 34913 +32886 34910 +41699 34908 +45652 34907 +34859 34905 +39389 34901 +21881 34898 +14385 34898 +32143 34897 +45085 34897 +13265 34896 +45017 34895 +39233 34893 +45172 34892 +38966 34888 +27910 34888 +29122 34887 +39740 34880 +30505 34877 +37212 34877 +32185 34869 +16520 34868 +41231 34865 +33121 34865 +34721 34860 +17190 34857 +34730 34856 +41747 34856 +28684 34850 +40294 34847 +26525 34838 +32129 34834 +47421 34829 +38842 34825 +4655 34821 +45407 34816 +24100 34815 +30971 34811 +31166 34808 +46964 34803 +39439 34801 +33475 34799 +42022 34799 +43853 34797 +44806 34796 +26895 34794 +17309 34794 +21928 34793 +28872 34793 +30707 34790 +34278 34788 +47450 34785 +32621 34780 +31476 34778 +29298 34770 +46881 34770 +45502 34767 +38008 34766 +45525 34763 +12792 34761 +46622 34759 +44643 34757 +23123 34753 +26381 34752 +36209 34744 +24750 34742 +23644 34740 +30373 34739 +41855 34738 +6471 34737 +31397 34737 +41478 34736 +26146 34736 +24736 34733 +43189 34733 +21524 34731 +38621 34728 +36932 34727 +29448 34726 +12679 34717 +47193 34711 +49278 34711 +20641 34710 +17171 34709 +46891 34701 +23219 34700 +31135 34696 +39229 34694 +31607 34694 +47027 34690 +19559 34686 +24917 34683 +33607 34679 +35556 34679 +29552 34676 +47495 34674 +46017 34673 +24043 34668 +27676 34664 +31778 34656 +6604 34655 +5459 34649 +43507 34645 +49791 34645 +30805 34643 +29903 34641 +33301 34640 +6406 34638 +35131 34630 +42793 34628 +41186 34627 +38731 34627 +23924 34626 +41157 34625 +49304 34622 +27531 34622 +40760 34620 +8232 34617 +50136 34615 +18072 34615 +39793 34608 +22356 34606 +32051 34603 +9668 34602 +23998 34602 +40716 34601 +36177 34597 +40137 34595 +25536 34594 +44182 34592 +37661 34591 +41431 34590 +25400 34588 +46230 34584 +30199 34583 +39322 34583 +30584 34582 +36135 34580 +49827 34580 +31786 34580 +27159 34579 +12893 34579 +28439 34573 +41654 34564 +49600 34559 +26675 34558 +32016 34558 +32686 34557 +48598 34556 +22409 34556 +38206 34551 +23552 34550 +12621 34549 +29275 34547 +24155 34545 +25822 34541 +30361 34540 +47428 34538 +47610 34538 +46709 34535 +44845 34534 +42767 34533 +45334 34533 +32545 34530 +39460 34529 +42727 34528 +32375 34528 +34484 34522 +40131 34521 +43051 34519 +27920 34516 +35569 34513 +34382 34513 +30780 34506 +26303 34504 +28881 34499 +32378 34493 +36033 34486 +24895 34484 +32759 34479 +21090 34473 +30155 34472 +46549 34472 +45226 34469 +36043 34468 +49817 34468 +37782 34466 +35483 34466 +22754 34465 +42205 34464 +19543 34460 +20341 34457 +48537 34457 +29129 34456 +28782 34455 +5376 34452 +13425 34451 +17684 34448 +13907 34447 +49119 34445 +47434 34442 +50128 34441 +31514 34440 +28362 34428 +47229 34422 +34532 34419 +23496 34419 +48596 34418 +28692 34416 +45918 34414 +30435 34413 +39326 34409 +36006 34402 +14940 34402 +36105 34400 +18667 34398 +33040 34395 +32386 34395 +23338 34392 +40487 34390 +30148 34389 +30616 34385 +34057 34381 +41250 34380 +30338 34373 +36367 34371 +29923 34370 +18007 34369 +21901 34368 +19336 34366 +41321 34366 +23079 34364 +16819 34362 +21897 34362 +39981 34360 +8731 34357 +40309 34352 +24732 34352 +13484 34350 +48985 34350 +21132 34348 +33149 34348 +44474 34347 +34452 34347 +42879 34346 +32591 34345 +21466 34343 +45698 34338 +41393 34335 +29528 34332 +23222 34331 +24665 34329 +24792 34324 +34418 34321 +27988 34317 +17516 34305 +39191 34289 +46848 34288 +39838 34287 +48180 34286 +23573 34286 +34568 34280 +29215 34276 +18554 34275 +33620 34274 +47620 34273 +30749 34271 +30804 34270 +47662 34268 +42566 34261 +41549 34253 +16488 34252 +30121 34251 +32561 34247 +25663 34246 +43527 34243 +36819 34242 +43067 34237 +21328 34233 +33451 34231 +26309 34230 +44995 34227 +27359 34226 +31924 34225 +31667 34223 +30806 34220 +39152 34220 +32781 34219 +48586 34217 +12021 34216 +30560 34214 +9043 34211 +21557 34203 +32820 34198 +45972 34193 +31082 34192 +19453 34188 +27860 34185 +8619 34185 +49243 34184 +49840 34181 +12745 34180 +27641 34180 +15132 34173 +43079 34172 +26864 34170 +32861 34170 +48118 34167 +14127 34166 +45436 34164 +44674 34159 +46913 34159 +36968 34157 +30458 34152 +8134 34149 +35162 34147 +19366 34144 +23874 34139 +33443 34136 +24525 34136 +40830 34124 +26854 34124 +34864 34124 +20146 34122 +17744 34115 +45138 34114 +49708 34113 +28736 34104 +45655 34101 +25312 34099 +25344 34097 +40565 34092 +45241 34086 +32191 34086 +5590 34083 +28065 34079 +40834 34073 +49248 34072 +22544 34071 +15856 34066 +45275 34064 +43802 34061 +30639 34056 +48833 34053 +44244 34050 +40930 34048 +33456 34044 +38347 34041 +33282 34041 +11785 34041 +49792 34034 +45997 34030 +33776 34029 +21482 34029 +48521 34026 +46517 34026 +37407 34020 +34887 34017 +11361 34017 +32292 34016 +49911 34012 +33688 34010 +32192 34010 +38923 34007 +34826 34006 +16884 33999 +25378 33998 +44645 33991 +26235 33991 +19588 33983 +37052 33979 +35408 33978 +47146 33976 +16768 33975 +5131 33974 +47367 33968 +47165 33965 +34011 33964 +40473 33963 +33535 33962 +34194 33959 +43865 33958 +34493 33957 +46196 33952 +38130 33952 +24552 33950 +34449 33949 +41237 33949 +49868 33942 +15479 33940 +10851 33939 +15004 33939 +32118 33936 +19452 33930 +39680 33926 +49699 33922 +36021 33922 +29167 33914 +10661 33914 +49000 33912 +5784 33912 +36531 33902 +37375 33900 +14681 33900 +25899 33899 +46317 33899 +45168 33898 +6501 33898 +14755 33897 +25239 33890 +28484 33889 +48696 33888 +31279 33888 +40121 33887 +45179 33883 +45954 33880 +13867 33879 +24367 33879 +24202 33877 +41160 33873 +15717 33868 +49099 33867 +4212 33865 +47742 33861 +35820 33858 +7774 33856 +27012 33854 +47761 33854 +44505 33853 +39565 33850 +27030 33848 +44580 33848 +40813 33848 +44283 33847 +46390 33844 +33930 33842 +18417 33842 +41475 33835 +45061 33834 +27018 33833 +35176 33832 +42959 33824 +37252 33824 +37970 33822 +30673 33822 +22088 33821 +44240 33821 +34230 33821 +40210 33817 +40494 33812 +49831 33809 +39388 33804 +46524 33795 +47550 33795 +18674 33790 +45365 33786 +8555 33783 +26160 33780 +44428 33780 +41016 33779 +30403 33778 +36449 33772 +21111 33770 +26569 33767 +40949 33765 +37183 33759 +29626 33758 +43332 33758 +42893 33757 +4882 33755 +22996 33752 +41956 33751 +46706 33751 +10034 33748 +39579 33748 +46307 33745 +27696 33744 +47887 33741 +48878 33738 +34192 33731 +30936 33717 +29197 33716 +45939 33712 +20504 33710 +42874 33710 +39993 33709 +43818 33706 +34423 33705 +43553 33703 +38156 33702 +31760 33701 +47866 33689 +32258 33685 +30558 33682 +39642 33680 +32838 33679 +36202 33679 +44005 33677 +37594 33673 +5431 33673 +34396 33664 +31834 33662 +19535 33660 +36535 33654 +30222 33647 +37933 33646 +41350 33645 +19512 33643 +37397 33640 +47810 33639 +9483 33638 +31677 33631 +25839 33628 +27729 33626 +42807 33620 +40237 33620 +36585 33615 +36923 33615 +46646 33605 +16037 33592 +25700 33590 +22239 33583 +21034 33580 +30318 33577 +35086 33577 +38341 33571 +25383 33570 +38050 33570 +27757 33567 +42265 33566 +40722 33565 +38584 33564 +42099 33561 +45306 33560 +4468 33552 +29794 33550 +32551 33549 +39425 33548 +36466 33543 +35640 33540 +41259 33537 +28681 33535 +25353 33533 +30368 33531 +34125 33526 +34925 33525 +45642 33525 +49032 33524 +48977 33518 +19067 33516 +33397 33514 +34846 33514 +34322 33512 +45313 33511 +27503 33509 +42688 33502 +21863 33502 +33046 33498 +40150 33484 +34511 33479 +28460 33476 +33057 33475 +24765 33473 +31914 33473 +37080 33472 +38869 33472 +21944 33470 +34065 33468 +24733 33466 +32709 33465 +23291 33464 +37881 33463 +36223 33462 +34412 33459 +11484 33459 +42444 33458 +33671 33457 +49384 33454 +26028 33453 +41421 33448 +34918 33447 +10772 33446 +32565 33443 +15877 33442 +16973 33442 +42129 33441 +36218 33440 +49142 33438 +28543 33437 +17911 33437 +41751 33436 +47105 33435 +43210 33433 +10646 33432 +45832 33414 +46737 33408 +37353 33405 +36845 33405 +17717 33405 +41001 33405 +33287 33403 +40174 33399 +38090 33398 +9895 33397 +44371 33395 +35883 33391 +38397 33390 +16539 33381 +23379 33372 +45950 33372 +48595 33371 +28115 33369 +36794 33368 +36736 33364 +27385 33364 +35516 33361 +29477 33360 +28977 33358 +32329 33357 +38327 33357 +14596 33356 +40102 33353 +44364 33346 +34514 33345 +33055 33343 +28440 33341 +48519 33338 +24674 33335 +27450 33332 +22262 33330 +48240 33325 +7693 33325 +38123 33324 +27665 33323 +30070 33322 +35872 33322 +28149 33321 +42262 33319 +46961 33315 +47433 33314 +23035 33311 +10881 33308 +43880 33307 +47591 33307 +19393 33307 +15672 33301 +14152 33298 +12327 33297 +46174 33296 +33327 33296 +43165 33292 +33537 33292 +40229 33290 +44735 33287 +41109 33280 +40534 33277 +49364 33277 +42913 33274 +47002 33273 +18254 33270 +10440 33263 +43672 33260 +29606 33260 +19807 33258 +33166 33256 +14970 33253 +49859 33251 +25590 33251 +20218 33250 +42968 33250 +7033 33249 +39976 33248 +37024 33248 +24285 33242 +35225 33241 +35527 33241 +46225 33240 +40508 33237 +49291 33237 +39486 33235 +26677 33233 +30798 33229 +41623 33228 +36825 33225 +29201 33223 +34318 33223 +41117 33219 +43344 33219 +23863 33217 +33304 33216 +28208 33214 +38751 33212 +36893 33209 +44277 33207 +34157 33207 +18934 33204 +42361 33204 +21209 33201 +44619 33195 +29077 33193 +45324 33193 +24133 33191 +20548 33189 +40625 33187 +43852 33186 +32057 33186 +47729 33178 +38677 33177 +16000 33176 +37754 33176 +49838 33175 +31553 33170 +22735 33165 +13438 33163 +19428 33162 +32841 33161 +46382 33161 +23340 33158 +21449 33156 +33626 33155 +35358 33152 +39300 33151 +46401 33149 +13828 33147 +41921 33147 +26097 33146 +40896 33144 +18430 33141 +21484 33141 +37502 33139 +20451 33139 +26531 33138 +28290 33137 +47691 33135 +46226 33131 +13935 33126 +27303 33124 +26230 33124 +32297 33121 +35491 33118 +49910 33115 +29001 33112 +34277 33110 +50100 33109 +41907 33108 +3997 33107 +42255 33104 +8820 33102 +41975 33101 +20087 33096 +26069 33092 +25873 33088 +36032 33087 +47917 33086 +45010 33082 +19596 33082 +33774 33080 +22794 33077 +49178 33077 +24863 33075 +42598 33072 +7843 33070 +41558 33064 +39989 33064 +49349 33063 +39966 33061 +30100 33059 +38348 33054 +43044 33049 +35461 33044 +30799 33043 +41802 33041 +27626 33039 +41879 33038 +26552 33038 +26200 33036 +47645 33036 +27961 33036 +28072 33034 +30290 33032 +29607 33023 +32563 33022 +27706 33021 +30703 33019 +30742 33010 +42647 33006 +14094 33006 +26845 33001 +38486 33000 +39351 32999 +43360 32998 +46243 32998 +44716 32993 +47222 32991 +33679 32989 +44937 32986 +35725 32982 +36638 32982 +2707 32979 +44462 32977 +40995 32977 +31675 32976 +26970 32967 +22340 32967 +40883 32966 +42491 32964 +21222 32960 +30689 32952 +43494 32950 +31722 32945 +45550 32944 +37844 32942 +45779 32939 +38749 32938 +43088 32936 +20028 32934 +32349 32933 +7775 32931 +34115 32929 +43635 32926 +44304 32923 +4192 32923 +42617 32923 +24613 32919 +42146 32919 +30181 32919 +17605 32915 +44712 32915 +31511 32908 +48145 32906 +34394 32906 +34942 32904 +37255 32902 +33754 32897 +33106 32895 +44720 32895 +38042 32895 +29061 32894 +17037 32890 +44861 32887 +28734 32885 +6984 32878 +44377 32872 +45451 32865 +43567 32865 +17204 32865 +43133 32864 +19220 32864 +13896 32862 +38573 32861 +44656 32853 +29584 32853 +33690 32852 +33712 32852 +27913 32852 +43598 32849 +28287 32848 +30896 32845 +42659 32844 +22011 32844 +28562 32843 +36115 32843 +49573 32838 +13451 32833 +18484 32833 +49559 32825 +37792 32824 +28877 32823 +37807 32821 +44142 32820 +47782 32817 +25009 32812 +8017 32810 +42633 32809 +31921 32808 +49331 32807 +17980 32805 +23830 32799 +34868 32796 +19998 32793 +25042 32792 +35858 32792 +37107 32791 +27783 32789 +24595 32784 +40968 32782 +45360 32780 +46392 32776 +43279 32773 +28468 32772 +40416 32770 +15214 32769 +31900 32765 +47958 32764 +17954 32763 +49237 32758 +42200 32751 +24192 32751 +22357 32750 +32409 32750 +36222 32749 +20904 32745 +5893 32745 +31899 32744 +6913 32742 +33390 32740 +41038 32739 +43560 32737 +41122 32732 +35333 32730 +20429 32730 +45041 32729 +9838 32729 +46806 32726 +22284 32724 +37398 32723 +31707 32723 +37202 32722 +41210 32717 +14334 32716 +48743 32715 +42755 32714 +27714 32713 +35108 32712 +31640 32712 +31550 32711 +48829 32711 +5993 32703 +46843 32701 +43107 32698 +13710 32698 +39959 32696 +35634 32692 +26150 32690 +42377 32690 +21560 32689 +39395 32686 +34833 32684 +40597 32683 +32525 32681 +11595 32680 +40017 32679 +6146 32677 +27424 32677 +30429 32677 +26948 32675 +32764 32674 +28327 32670 +36001 32669 +41674 32668 +49725 32666 +8380 32665 +29251 32661 +30766 32660 +11750 32656 +8521 32652 +48329 32647 +42128 32647 +34598 32643 +33504 32642 +32303 32640 +10196 32640 +34465 32636 +28557 32635 +18528 32634 +3293 32630 +45558 32630 +36765 32629 +36187 32629 +4080 32627 +32220 32625 +27781 32620 +36465 32613 +40191 32612 +28221 32602 +46188 32598 +41154 32598 +49245 32597 +29441 32597 +2467 32596 +1696 32595 +46419 32587 +3171 32585 +42859 32579 +35290 32577 +27652 32573 +39430 32573 +33169 32569 +37785 32565 +31084 32565 +36396 32562 +47019 32559 +24700 32558 +10847 32555 +23196 32551 +40501 32548 +39541 32545 +34563 32542 +48715 32541 +36318 32540 +33298 32539 +47320 32539 +34409 32539 +30127 32538 +30995 32537 +38691 32535 +48704 32533 +802 32530 +41418 32529 +16177 32526 +25855 32526 +26231 32525 +35857 32522 +19457 32522 +30123 32521 +35599 32521 +45802 32521 +35084 32519 +30434 32518 +43136 32515 +47948 32514 +47115 32513 +26342 32510 +18746 32509 +50102 32504 +39474 32502 +44193 32497 +27222 32497 +9311 32497 +8899 32494 +2027 32494 +38985 32491 +23364 32486 +8317 32483 +47119 32482 +4416 32477 +37312 32475 +3243 32475 +21181 32468 +42513 32468 +27342 32468 +36155 32468 +36731 32467 +40527 32467 +47820 32464 +46624 32460 +37433 32459 +46959 32451 +32389 32450 +13636 32449 +35669 32446 +15156 32445 +12773 32444 +36841 32444 +35563 32441 +39577 32433 +28062 32427 +49038 32426 +29854 32418 +47008 32417 +34751 32413 +48621 32412 +40525 32411 +27212 32409 +47054 32406 +45120 32403 +21551 32403 +49198 32398 +47619 32397 +14781 32397 +43082 32396 +50185 32395 +26265 32395 +33440 32391 +34275 32385 +47386 32379 +41428 32378 +35959 32377 +45889 32375 +34323 32372 +39599 32370 +41750 32368 +46991 32365 +40536 32363 +41527 32361 +39937 32361 +32282 32356 +35931 32353 +8569 32352 +11714 32346 +39804 32337 +5463 32336 +31496 32333 +33389 32333 +34757 32330 +21184 32330 +32637 32325 +40836 32323 +6863 32321 +46400 32320 +35322 32315 +43091 32309 +27287 32302 +42844 32297 +38515 32293 +24885 32293 +30832 32291 +40535 32290 +35352 32289 +38010 32288 +31883 32285 +32446 32283 +30824 32275 +36947 32275 +48363 32273 +38441 32273 +25818 32259 +36331 32253 +27728 32250 +41961 32248 +30869 32246 +40180 32244 +36855 32239 +37609 32238 +40529 32237 +30733 32237 +29930 32235 +42278 32235 +42414 32235 +24438 32233 +40409 32227 +34357 32224 +18731 32224 +41996 32224 +46345 32221 +40028 32219 +37110 32217 +33604 32217 +38945 32214 +11906 32214 +44262 32213 +42798 32212 +17801 32211 +32850 32211 +48910 32206 +28230 32205 +35880 32205 +35389 32203 +50099 32200 +35054 32196 +47749 32193 +47968 32193 +46945 32191 +28981 32190 +33267 32187 +34791 32176 +34071 32169 +33785 32167 +27453 32167 +11036 32166 +38424 32164 +22696 32164 +38436 32164 +47543 32163 +40699 32158 +46908 32158 +33064 32156 +12307 32156 +8988 32154 +29564 32152 +37700 32146 +36711 32145 +29855 32133 +13330 32132 +30291 32132 +12211 32132 +29813 32130 +20910 32121 +35897 32120 +19825 32119 +18998 32116 +29509 32113 +9381 32113 +45665 32108 +26286 32105 +30819 32104 +34114 32103 +14428 32102 +48455 32099 +12288 32098 +19540 32095 +39713 32092 +37417 32091 +43638 32090 +44553 32089 +46420 32089 +37109 32088 +37679 32087 +23775 32080 +34977 32079 +13625 32077 +19728 32075 +26583 32074 +29236 32073 +32522 32069 +18211 32065 +32638 32063 +36651 32062 +31717 32062 +43508 32058 +32045 32049 +36078 32048 +14070 32047 +42454 32043 +49127 32040 +35314 32038 +41951 32031 +43893 32031 +46967 32031 +36784 32029 +12826 32024 +47029 32022 +38049 32020 +13418 32019 +33326 32017 +36053 32013 +34616 32011 +37479 32010 +17130 32010 +43043 32009 +20535 32009 +36943 32008 +32066 32007 +12048 32006 +48837 31988 +47239 31984 +18627 31978 +42304 31974 +47267 31974 +9615 31967 +44794 31966 +30336 31965 +21175 31960 +13934 31957 +41106 31953 +14832 31953 +31980 31952 +47893 31950 +35164 31948 +44414 31947 +34587 31946 +35026 31943 +32732 31939 +2812 31939 +50160 31938 +42932 31938 +35371 31934 +43684 31933 +16338 31933 +45931 31930 +40296 31927 +21267 31926 +27049 31923 +26226 31921 +31570 31920 +34052 31918 +27977 31911 +20267 31909 +36182 31907 +44446 31906 +50034 31903 +29907 31900 +38444 31898 +6739 31897 +32544 31889 +41894 31888 +31180 31882 +27128 31882 +29695 31881 +34646 31880 +41244 31878 +31850 31878 +29620 31878 +32718 31876 +49285 31871 +19914 31870 +37861 31868 +47653 31867 +40087 31866 +36005 31858 +35033 31857 +22146 31853 +13634 31853 +49458 31850 +16664 31849 +44273 31846 +46173 31844 +50195 31840 +40313 31839 +25838 31838 +40330 31837 +30981 31836 +46461 31836 +32652 31836 +43545 31833 +13760 31832 +27679 31828 +42949 31828 +33435 31827 +48093 31827 +29348 31820 +18061 31817 +28660 31817 +27832 31811 +49264 31807 +22602 31805 +18770 31803 +17953 31802 +22431 31797 +32702 31794 +39248 31792 +44849 31792 +13057 31791 +15574 31788 +50013 31787 +35365 31787 +30440 31784 +27842 31784 +32159 31780 +14315 31779 +30387 31775 +29920 31769 +38074 31768 +44476 31766 +18113 31766 +31190 31764 +36329 31760 +39638 31757 +41809 31756 +38811 31756 +29856 31753 +48475 31748 +34444 31748 +47926 31746 +40068 31745 +46108 31742 +40774 31742 +8746 31729 +18216 31729 +35081 31728 +47107 31728 +23808 31727 +33191 31726 +25765 31724 +38331 31721 +34082 31721 +25553 31720 +34446 31720 +40112 31717 +24642 31717 +2730 31717 +29548 31716 +20540 31710 +28466 31703 +1668 31703 +38781 31703 +18906 31698 +50215 31698 +43068 31697 +23583 31695 +40980 31694 +44773 31693 +41286 31692 +45848 31686 +26742 31685 +11295 31679 +13842 31678 +22787 31678 +21168 31677 +30723 31676 +44776 31676 +40334 31674 +37552 31673 +36369 31673 +23892 31671 +27137 31669 +17249 31669 +31690 31667 +35808 31664 +23061 31661 +12413 31661 +41856 31657 +47259 31653 +47841 31652 +7010 31651 +40032 31644 +1409 31644 +43712 31644 +30092 31643 +38724 31640 +41177 31636 +50225 31632 +48509 31629 +26239 31621 +29075 31619 +30114 31610 +5902 31608 +48723 31599 +39809 31598 +41413 31596 +13658 31595 +49753 31595 +31911 31594 +14541 31593 +33144 31591 +22858 31588 +23876 31587 +35661 31581 +47865 31579 +29474 31578 +47254 31573 +24645 31573 +36544 31572 +26883 31572 +15541 31562 +37719 31562 +41655 31561 +47316 31560 +29516 31560 +26429 31555 +25994 31552 +28483 31550 +42204 31548 +28469 31541 +40869 31539 +28937 31538 +36402 31538 +11509 31537 +15622 31532 +30577 31530 +36959 31529 +30362 31528 +15839 31525 +44889 31523 +30057 31523 +45112 31522 +20587 31518 +25489 31513 +39254 31510 +47734 31507 +47204 31506 +45445 31506 +46070 31504 +35436 31503 +34750 31501 +23410 31499 +42248 31497 +36404 31493 +13595 31490 +35361 31490 +38240 31488 +43195 31486 +36654 31484 +44181 31481 +42193 31480 +33115 31479 +38850 31477 +29540 31475 +26065 31475 +24838 31474 +41631 31470 +27867 31466 +10289 31466 +6734 31465 +31713 31463 +27990 31459 +15829 31457 +43693 31455 +31240 31452 +44346 31447 +24988 31446 +49013 31446 +48973 31445 +23409 31445 +38024 31443 +39844 31442 +29288 31433 +37820 31431 +25480 31428 +34810 31426 +27854 31424 +33544 31422 +38475 31421 +23849 31420 +15560 31419 +40094 31417 +41517 31407 +43609 31402 +27578 31389 +36362 31387 +34144 31387 +30911 31386 +43556 31382 +26290 31375 +38531 31374 +42403 31371 +39955 31371 +37989 31370 +41399 31368 +18856 31366 +13470 31366 +45309 31360 +44205 31360 +35587 31359 +34665 31357 +39514 31354 +42596 31345 +47158 31341 +31345 31340 +44577 31340 +24932 31340 +22788 31338 +37582 31335 +48354 31333 +41062 31333 +48214 31332 +31602 31329 +23491 31328 +32847 31326 +36186 31326 +44722 31325 +36920 31325 +42461 31323 +46377 31322 +42497 31319 +39532 31317 +8004 31316 +19777 31316 +40873 31315 +35592 31314 +44834 31312 +40579 31311 +25405 31307 +32339 31304 +32749 31301 +38361 31299 +6967 31297 +27720 31295 +44678 31293 +27597 31289 +22067 31288 +35853 31288 +41118 31279 +13506 31277 +16454 31268 +22674 31267 +31601 31261 +38890 31261 +40818 31257 +17251 31256 +37137 31255 +48491 31254 +29191 31250 +45852 31248 +43371 31246 +46113 31242 +6231 31241 +48917 31234 +30182 31233 +17111 31233 +2591 31232 +6558 31229 +18758 31229 +11655 31228 +15093 31228 +27689 31226 +43578 31225 +28516 31223 +42153 31222 +39362 31220 +29750 31220 +38940 31214 +32424 31213 +32974 31213 +6820 31213 +10372 31208 +34190 31201 +40064 31193 +483 31186 +19191 31185 +44534 31184 +29498 31172 +35376 31168 +31672 31164 +48172 31156 +7985 31154 +30195 31153 +15407 31153 +48824 31152 +22664 31147 +34209 31146 +40562 31145 +1383 31144 +24774 31143 +38468 31141 +31632 31140 +23930 31135 +47961 31132 +44811 31131 +32624 31130 +40142 31124 +25989 31123 +40939 31122 +9093 31122 +16875 31120 +35552 31117 +12559 31116 +38405 31115 +35462 31112 +24686 31111 +5505 31109 +41536 31109 +35963 31106 +37382 31103 +41116 31102 +30847 31100 +46128 31099 +40750 31098 +50043 31096 +38545 31095 +42394 31095 +33026 31094 +29190 31093 +49309 31080 +40623 31079 +10631 31078 +18126 31074 +39975 31073 +21460 31073 +11569 31072 +37208 31072 +48308 31071 +32035 31068 +44527 31068 +17632 31066 +29571 31066 +26823 31058 +22556 31057 +32471 31056 +10987 31055 +29833 31054 +43325 31054 +31816 31053 +35132 31049 +47587 31048 +11525 31047 +28292 31045 +14585 31043 +5255 31040 +36454 31039 +42285 31037 +45583 31037 +33850 31036 +32075 31035 +31575 31024 +24453 31021 +47667 31020 +33145 31016 +23418 31016 +36441 31015 +47779 31013 +22589 31013 +46124 31011 +46638 31004 +47395 31001 +31306 31001 +42045 30999 +42958 30995 +5568 30994 +27881 30994 +30842 30992 +29171 30989 +46398 30983 +18171 30981 +34952 30976 +44085 30975 +46494 30974 +25594 30973 +27763 30971 +37658 30971 +49982 30968 +38410 30964 +38098 30960 +26375 30959 +23819 30952 +48692 30947 +34726 30944 +47448 30940 +38007 30939 +49556 30938 +45466 30937 +47751 30927 +49344 30926 +31756 30925 +10507 30922 +48044 30920 +38246 30917 +19019 30916 +35665 30914 +33037 30912 +33649 30912 +15419 30905 +36730 30904 +31055 30899 +39877 30898 +37687 30897 +39130 30895 +37959 30889 +21224 30883 +30714 30880 +23553 30876 +42308 30876 +47414 30873 +7562 30873 +37840 30872 +44300 30867 +36869 30866 +47519 30860 +21589 30860 +49591 30858 +41199 30856 +25790 30854 +46627 30853 +48097 30851 +48939 30849 +44245 30847 +32395 30844 +34499 30841 +35522 30841 +8527 30840 +48805 30839 +37967 30839 +43829 30837 +32827 30834 +24280 30833 +8725 30829 +37783 30828 +34107 30827 +43613 30825 +35152 30822 +37302 30822 +25150 30821 +11551 30816 +48669 30814 +33901 30812 +22070 30812 +7694 30810 +20314 30801 +22822 30799 +31916 30798 +21211 30797 +5388 30794 +34800 30789 +46531 30789 +49083 30789 +48454 30785 +46265 30781 +18546 30778 +42768 30777 +39939 30776 +31297 30769 +48020 30767 +45230 30765 +32859 30765 +23680 30765 +46564 30763 +34672 30762 +14744 30760 +37533 30760 +5266 30755 +41653 30754 +43035 30752 +23235 30748 +33058 30748 +50076 30741 +29048 30739 +34406 30739 +17403 30730 +6664 30726 +46394 30726 +22009 30724 +38668 30722 +16955 30721 +38939 30721 +46168 30712 +17786 30706 +44313 30704 +31380 30701 +20283 30701 +44809 30701 +23322 30698 +33686 30691 +40712 30691 +20189 30690 +30797 30688 +45455 30688 +23914 30686 +21988 30679 +45540 30677 +38609 30674 +37501 30673 +45794 30670 +25487 30669 +50153 30663 +26561 30659 +47136 30658 +27944 30657 +40826 30654 +46867 30652 +16775 30650 +35859 30649 +40145 30648 +37421 30647 +36798 30645 +29159 30644 +22796 30640 +29925 30640 +29455 30635 +25323 30634 +38647 30629 +47607 30626 +23210 30621 +21753 30616 +42835 30616 +48241 30613 +29879 30611 +25777 30608 +42732 30599 +40590 30597 +49529 30596 +41184 30594 +26518 30591 +1246 30591 +17891 30590 +14278 30590 +45092 30588 +41442 30587 +39132 30584 +41843 30583 +31504 30582 +42337 30580 +11367 30578 +35776 30578 +41110 30577 +15948 30575 +37482 30575 +29958 30572 +31237 30570 +13761 30569 +36417 30567 +10167 30565 +35601 30562 +13507 30559 +42025 30558 +41332 30557 +30949 30555 +44353 30551 +37300 30550 +44432 30542 +22074 30542 +22477 30541 +45260 30541 +37850 30540 +32989 30539 +16587 30539 +38198 30536 +42833 30535 +9810 30534 +27102 30530 +31591 30530 +48001 30530 +38708 30530 +35175 30526 +11457 30525 +25857 30524 +10535 30524 +44693 30517 +37530 30515 +29331 30514 +27047 30513 +41997 30513 +36140 30505 +45593 30502 +48446 30497 +18462 30496 +37795 30492 +15912 30480 +12489 30479 +41574 30476 +26156 30469 +30111 30462 +45581 30462 +3107 30460 +43983 30460 +35607 30459 +42708 30449 +45370 30448 +31284 30446 +31344 30444 +46981 30442 +39875 30440 +49567 30438 +18119 30435 +37179 30433 +31414 30433 +27233 30432 +11830 30431 +39775 30427 +46125 30424 +39977 30423 +46206 30423 +45292 30422 +45432 30422 +5687 30418 +37144 30416 +30098 30416 +42016 30410 +42160 30401 +30119 30400 +33890 30398 +26613 30397 +29083 30394 +23970 30394 +46418 30387 +18837 30385 +22271 30385 +47131 30383 +8908 30383 +44607 30379 +32642 30379 +48336 30378 +39867 30376 +28683 30376 +42041 30376 +32973 30375 +41395 30374 +40101 30372 +16923 30371 +4791 30369 +43409 30368 +25310 30367 +6935 30364 +18242 30356 +42912 30355 +32039 30354 +30864 30353 +44657 30352 +44714 30351 +50020 30350 +44472 30349 +2170 30349 +40168 30347 +11245 30346 +32077 30341 +38942 30337 +27661 30336 +25253 30335 +32690 30334 +46937 30332 +48644 30329 +42806 30327 +29053 30324 +16591 30324 +41091 30320 +43374 30319 +41968 30316 +44497 30312 +35788 30306 +33393 30306 +13136 30303 +36877 30300 +24099 30300 +50086 30295 +22734 30295 +49023 30290 +39498 30288 +13192 30285 +48575 30281 +45919 30273 +39311 30272 +1470 30270 +44297 30270 +27285 30267 +45204 30266 +30983 30263 +20965 30263 +21236 30261 +20598 30260 +23739 30256 +45374 30248 +38097 30247 +40594 30245 +27431 30239 +17121 30238 +26992 30230 +17478 30230 +18291 30229 +46944 30225 +20985 30225 +10875 30222 +14308 30220 +39659 30219 +43746 30217 +31700 30217 +16390 30217 +38101 30217 +42018 30205 +46694 30202 +46853 30200 +34566 30199 +39293 30192 +27332 30192 +11890 30188 +30955 30188 +47207 30187 +24123 30184 +30285 30183 +48778 30177 +37854 30175 +41187 30172 +49066 30170 +19691 30169 +44016 30168 +39331 30164 +38319 30163 +47791 30158 +39145 30157 +21520 30157 +9421 30157 +42779 30156 +41197 30154 +39469 30153 +37211 30151 +8995 30150 +23232 30147 +34466 30143 +36159 30142 +42715 30138 +6267 30135 +48830 30135 +9006 30133 +37263 30129 +21342 30117 +18343 30113 +26343 30112 +16992 30107 +48045 30106 +41834 30105 +46818 30103 +18597 30102 +26514 30102 +47188 30101 +42551 30101 +8334 30099 +37583 30096 +26024 30095 +46215 30094 +45479 30092 +48870 30091 +40810 30086 +20216 30085 +46454 30082 +36522 30081 +36154 30081 +45709 30080 +13056 30080 +46590 30078 +3120 30076 +49181 30074 +25085 30073 +27085 30071 +38895 30070 +41728 30070 +49259 30070 +49804 30069 +45397 30069 +38268 30066 +34313 30060 +30962 30058 +38596 30057 +34120 30054 +17925 30052 +41056 30051 +47052 30047 +31408 30035 +31338 30033 +40428 30031 +6160 30028 +43452 30025 +40800 30020 +50233 30016 +45404 30011 +28051 30005 +33034 30003 +46801 30002 +43080 29999 +43338 29992 +50156 29991 +35844 29989 +30450 29987 +27210 29987 +39980 29986 +48966 29986 +30737 29986 +41869 29981 +44670 29979 +28821 29976 +19840 29972 +35340 29971 +21747 29970 +30099 29969 +14918 29968 +15352 29965 +28171 29964 +45389 29956 +39052 29954 +48920 29946 +31887 29944 +41881 29944 +33551 29938 +49522 29936 +43662 29932 +25491 29930 +15979 29930 +38771 29929 +26674 29928 +28659 29926 +12018 29921 +49180 29917 +25372 29916 +42242 29914 +37617 29912 +30523 29909 +29284 29906 +48483 29903 +40723 29901 +33394 29898 +38578 29897 +44852 29895 +46336 29894 +45612 29893 +8751 29892 +46776 29892 +49389 29891 +42134 29890 +33706 29890 +33001 29887 +20217 29882 +45598 29876 +41402 29875 +41690 29875 +24983 29870 +33927 29869 +37759 29867 +48733 29866 +30665 29866 +19759 29866 +44222 29859 +45308 29857 +49207 29857 +49350 29857 +35674 29856 +41067 29856 +36890 29851 +38384 29849 +46790 29848 +10023 29848 +43123 29846 +44740 29843 +47973 29840 +35682 29839 +38273 29829 +32081 29828 +16202 29827 +36036 29824 +47091 29807 +33303 29806 +28479 29806 +44522 29804 +19106 29803 +10666 29799 +43466 29799 +38303 29799 +47334 29792 +41044 29783 +47523 29783 +10434 29783 +44924 29775 +33273 29772 +40999 29770 +5987 29769 +43779 29766 +14949 29763 +27660 29762 +42671 29756 +45164 29755 +33868 29755 +27375 29753 +35195 29750 +22741 29748 +13724 29747 +42275 29739 +21627 29737 +45151 29736 +38849 29732 +44109 29731 +42738 29730 +17443 29730 +47980 29729 +28342 29728 +41266 29728 +16801 29727 +36741 29726 +14994 29725 +48105 29710 +35186 29709 +20373 29708 +45967 29708 +44345 29704 +28042 29702 +38723 29694 +33370 29691 +15942 29690 +33820 29688 +29315 29688 +39071 29687 +45045 29686 +44173 29685 +9035 29685 +14464 29679 +34106 29676 +34692 29663 +26631 29661 +39475 29658 +31046 29656 +44637 29655 +43981 29651 +28315 29650 +34404 29650 +41432 29649 +34547 29648 +37205 29647 +19892 29645 +44141 29644 +42023 29642 +33618 29642 +27066 29638 +32871 29637 +30442 29634 +33300 29634 +25164 29629 +48266 29629 +31940 29628 +41070 29625 +47643 29624 +40315 29617 +40077 29614 +38255 29613 +5037 29608 +28540 29607 +36313 29606 +21772 29605 +39189 29598 +50103 29597 +31282 29597 +26842 29597 +39537 29593 +47001 29592 +15388 29592 +36296 29589 +36065 29587 +30457 29581 +44247 29581 +48492 29579 +43309 29579 +44039 29578 +41608 29578 +43247 29576 +23212 29574 +34956 29573 +49471 29573 +28166 29570 +45171 29565 +39894 29565 +46784 29564 +23154 29563 +32272 29561 +36262 29556 +33562 29552 +27468 29551 +33350 29551 +36089 29549 +41621 29549 +37627 29549 +33013 29546 +40293 29543 +23073 29536 +24010 29533 +4254 29529 +20490 29528 +20489 29528 +14222 29527 +10985 29515 +44064 29511 +30808 29510 +25412 29508 +23947 29503 +9945 29501 +43700 29498 +45894 29495 +26847 29494 +41933 29489 +41098 29489 +41004 29489 +18425 29489 +46833 29486 +34709 29484 +45153 29483 +28035 29482 +27984 29480 +14830 29475 +38756 29475 +22456 29473 +41288 29473 +19513 29472 +39423 29472 +31785 29470 +48061 29469 +30335 29468 +31224 29464 +25414 29462 +28311 29461 +15703 29461 +21844 29456 +34986 29456 +48325 29456 +46066 29455 +25871 29453 +42656 29452 +28219 29447 +49996 29447 +8503 29445 +48663 29445 +31125 29440 +39612 29434 +19640 29433 +35000 29433 +42256 29431 +34576 29428 +40469 29426 +34091 29425 +13132 29423 +15527 29421 +34772 29420 +45274 29418 +42176 29416 +26161 29412 +25214 29412 +2820 29409 +33531 29408 +49402 29407 +33043 29407 +48116 29406 +12321 29406 +42706 29405 +40096 29404 +49549 29403 +14550 29402 +9788 29402 +19491 29400 +47135 29399 +3659 29397 +24742 29396 +32148 29390 +36673 29390 +49186 29386 +49305 29384 +17408 29382 +47778 29379 +46166 29378 +16713 29377 +35972 29365 +32858 29364 +36100 29364 +26455 29359 +36724 29358 +28828 29355 +48963 29354 +38126 29353 +24284 29349 +37999 29346 +45255 29340 +36377 29339 +49663 29336 +39846 29336 +36993 29333 +39521 29329 +23657 29328 +33974 29326 +40557 29324 +5817 29322 +45888 29321 +36593 29320 +45622 29318 +22919 29317 +1857 29317 +49419 29314 +1554 29308 +47895 29307 +35752 29306 +44115 29304 +46504 29303 +31444 29302 +29392 29298 +42079 29290 +39858 29289 +41602 29284 +36221 29284 +35518 29281 +22061 29277 +13243 29273 +33407 29272 +41128 29272 +48411 29271 +30923 29270 +49758 29269 +38930 29265 +3762 29262 +22704 29261 +33432 29259 +18428 29253 +42677 29252 +45906 29252 +32770 29251 +20481 29250 +27260 29243 +45981 29243 +27494 29242 +17666 29241 +11395 29240 +39267 29239 +47844 29234 +25127 29233 +35435 29231 +33576 29229 +40297 29228 +32823 29228 +23868 29224 +20407 29224 +44475 29223 +35588 29218 +14172 29218 +37035 29214 +47737 29214 +26354 29213 +26433 29212 +36215 29204 +35229 29204 +37018 29201 +29175 29199 +16067 29198 +35227 29197 +36060 29197 +36153 29193 +20564 29191 +42665 29189 +26726 29188 +47874 29187 +47628 29182 +37590 29182 +42772 29180 +45039 29180 +47327 29178 +37170 29175 +38765 29173 +2028 29172 +28810 29169 +46058 29169 +37290 29168 +30826 29168 +43272 29166 +27108 29163 +31763 29161 +26868 29157 +47565 29154 +22502 29154 +47325 29154 +36042 29147 +44499 29141 +18467 29138 +25532 29138 +9798 29136 +31552 29129 +41307 29128 +42933 29125 +44460 29123 +38614 29120 +47501 29119 +27824 29118 +35821 29118 +33038 29117 +18208 29111 +46563 29109 +41768 29103 +43713 29098 +26079 29098 +38133 29096 +13812 29094 +41550 29090 +42560 29088 +23927 29087 +45105 29084 +41978 29083 +45843 29083 +37207 29082 +47541 29081 +18986 29075 +43365 29074 +35778 29074 +34522 29070 +35354 29069 +42674 29064 +20978 29063 +39471 29063 +25897 29059 +15653 29058 +45174 29057 +38492 29057 +35367 29056 +30460 29052 +33949 29050 +34975 29048 +34843 29047 +31697 29046 +26946 29045 +38349 29044 +24615 29044 +37904 29042 +38888 29040 +48388 29038 +21519 29037 +45087 29035 +45586 29035 +35451 29029 +40582 29028 +31203 29026 +26567 29025 +43033 29020 +39879 29018 +25933 29016 +30557 29016 +44407 29013 +43906 29013 +23067 29008 +45690 29006 +17152 29002 +30704 29001 +40867 29000 +36435 29000 +15695 28998 +36617 28996 +34770 28995 +48095 28994 +31963 28991 +48217 28988 +36022 28988 +45190 28980 +44434 28978 +49452 28977 +30015 28976 +8658 28973 +21734 28973 +41606 28973 +28529 28973 +25681 28968 +1827 28967 +28129 28963 +41301 28961 +8175 28959 +30480 28958 +37281 28957 +37106 28957 +46167 28955 +18191 28955 +39024 28955 +30760 28954 +37585 28954 +8374 28950 +30131 28949 +22277 28949 +44533 28948 +26748 28947 +17667 28945 +45040 28945 +10952 28938 +26793 28936 +31423 28927 +39055 28924 +38763 28924 +44073 28924 +49726 28915 +30083 28910 +20125 28910 +43139 28907 +34267 28904 +2609 28901 +18698 28899 +45312 28899 +44105 28898 +35214 28895 +43798 28894 +33543 28893 +45985 28891 +40284 28890 +36789 28889 +22390 28886 +24283 28886 +47166 28886 +34274 28880 +25848 28879 +35257 28879 +13372 28876 +9854 28871 +37011 28871 +43317 28866 +40227 28864 +14881 28864 +32513 28864 +47289 28863 +30271 28860 +49040 28856 +25350 28852 +29896 28847 +28043 28846 +44885 28844 +26882 28842 +3676 28842 +42464 28841 +36669 28838 +20984 28836 +28184 28836 +42957 28831 +44594 28829 +10336 28829 +34279 28826 +37830 28825 +43286 28823 +27598 28823 +38258 28823 +47422 28821 +11612 28816 +48397 28814 +9010 28810 +33957 28809 +41217 28806 +47940 28805 +25019 28805 +39951 28805 +37402 28802 +39033 28801 +47301 28797 +17426 28796 +38530 28794 +28720 28794 +36667 28793 +48259 28792 +31985 28789 +44624 28788 +37385 28787 +23886 28787 +48707 28784 +39705 28784 +47034 28782 +25002 28781 +41739 28780 +28884 28779 +40738 28779 +32328 28778 +36311 28775 +29973 28764 +43646 28761 +29436 28761 +37061 28760 +38369 28759 +10100 28759 +28912 28755 +35768 28753 +11836 28753 +45501 28752 +31710 28749 +30372 28747 +43141 28747 +25298 28746 +44964 28744 +46283 28743 +29308 28741 +27766 28740 +40658 28739 +23345 28738 +36403 28736 +16114 28735 +34324 28734 +41234 28733 +37209 28731 +23742 28727 +40675 28727 +17540 28724 +43124 28720 +26189 28719 +33818 28718 +30803 28718 +28256 28717 +33472 28715 +15166 28715 +9600 28713 +32990 28705 +48134 28705 +18398 28705 +16438 28703 +13394 28699 +42078 28698 +41042 28689 +8387 28688 +48384 28685 +28371 28684 +15730 28684 +36795 28681 +48993 28681 +31398 28678 +18509 28678 +33645 28675 +33316 28673 +43295 28673 +32266 28671 +49734 28671 +11090 28670 +45805 28670 +36286 28668 +15791 28668 +47183 28668 +45004 28665 +34433 28665 +33903 28663 +33053 28662 +22018 28660 +44732 28658 +21066 28657 +48464 28654 +14696 28653 +29726 28652 +34385 28645 +39269 28642 +35219 28639 +34628 28637 +15059 28636 +22215 28636 +46142 28634 +29416 28633 +30807 28631 +45456 28630 +3986 28622 +49775 28622 +36871 28621 +33639 28620 +35288 28618 +40200 28617 +37657 28615 +19928 28615 +31416 28610 +25388 28607 +42222 28600 +29088 28600 +34561 28598 +29814 28598 +11479 28597 +34831 28596 +49779 28596 +49301 28595 +43181 28589 +7495 28589 +45084 28589 +40075 28587 +37136 28585 +36273 28579 +36720 28578 +12292 28570 +32531 28569 +33355 28569 +49236 28569 +48698 28569 +10272 28568 +39947 28567 +37760 28566 +27971 28562 +43028 28556 +47094 28554 +40479 28552 +16244 28552 +49320 28552 +12455 28548 +22789 28546 +18296 28542 +37243 28541 +29305 28540 +28426 28537 +27610 28531 +7966 28529 +26976 28521 +49465 28521 +27145 28519 +50162 28517 +38901 28516 +49978 28510 +49212 28508 +39519 28508 +29766 28507 +40316 28506 +26075 28506 +35641 28506 +32459 28505 +45858 28504 +34391 28504 +29892 28501 +45055 28501 +50134 28500 +34512 28500 +27310 28499 +39418 28498 +25517 28497 +14054 28496 +29893 28492 +1354 28492 +34415 28491 +42374 28491 +47093 28490 +33741 28490 +29381 28488 +40838 28486 +32658 28482 +26920 28481 +28950 28479 +39185 28478 +39063 28477 +15978 28475 +32199 28475 +45703 28470 +33748 28467 +21544 28466 +42161 28464 +29390 28461 +38121 28459 +25812 28457 +39440 28455 +30343 28454 +31128 28453 +27841 28451 +35095 28445 +45096 28444 +47606 28444 +34515 28443 +2772 28442 +50072 28439 +14972 28438 +35447 28435 +18214 28433 +40802 28432 +30591 28426 +31828 28424 +24394 28423 +39144 28423 +45283 28420 +39843 28415 +47568 28414 +37283 28414 +39924 28408 +44034 28404 +30743 28400 +49435 28396 +28441 28392 +42388 28389 +39202 28388 +49227 28388 +48415 28386 +32187 28384 +49507 28382 +48377 28382 +36821 28371 +30419 28371 +40329 28367 +39238 28367 +46060 28365 +40463 28362 +38702 28362 +34722 28361 +38840 28361 +1121 28361 +45231 28359 +48969 28357 +44264 28356 +93 28353 +49401 28353 +16351 28347 +21979 28345 +43347 28342 +40023 28342 +20045 28341 +42699 28338 +35046 28338 +28927 28334 +11041 28333 +45663 28328 +28674 28326 +35868 28326 +33479 28324 +49871 28323 +25730 28322 +38867 28318 +49164 28316 +29937 28315 +42816 28313 +45414 28313 +4350 28312 +45393 28308 +20544 28302 +28709 28302 +47629 28302 +50042 28301 +38968 28297 +50190 28292 +38081 28291 +39982 28288 +37360 28283 +47266 28282 +45730 28282 +27966 28280 +15545 28276 +49242 28275 +45146 28274 +31729 28271 +39988 28270 +42102 28268 +42212 28267 +31524 28265 +25220 28264 +30465 28263 +35537 28261 +48355 28255 +36371 28253 +18909 28247 +37473 28246 +48352 28235 +22048 28231 +40271 28229 +2970 28219 +14427 28216 +42331 28214 +28270 28214 +34296 28212 +41704 28212 +48929 28206 +20235 28206 +23664 28204 +49957 28204 +31194 28198 +32042 28191 +38570 28190 +37750 28189 +39856 28184 +13021 28184 +49206 28183 +46240 28180 +38158 28179 +41487 28177 +28413 28177 +12619 28177 +49729 28176 +40721 28176 +13406 28172 +49786 28172 +35757 28168 +24633 28163 +33821 28162 +42831 28157 +46559 28156 +36013 28155 +45600 28150 +38404 28150 +34212 28146 +22840 28141 +27451 28140 +27713 28140 +30045 28139 +21741 28137 +44541 28137 +27087 28135 +36123 28134 +33253 28128 +34458 28124 +27934 28123 +49078 28121 +21716 28120 +30120 28119 +49698 28118 +33830 28118 +47890 28118 +16977 28116 +27417 28115 +14986 28114 +26997 28113 +23820 28108 +22907 28108 +43764 28107 +11747 28105 +30191 28102 +39048 28101 +45694 28100 +26165 28100 +24830 28099 +43622 28092 +28531 28091 +26120 28087 +25139 28084 +38773 28084 +49080 28077 +27507 28076 +35117 28072 +39247 28072 +47341 28072 +49308 28071 +37507 28065 +45296 28065 +18779 28065 +46845 28061 +30586 28060 +38589 28059 +33212 28056 +43472 28049 +12738 28046 +37964 28043 +5190 28035 +31048 28035 +48982 28034 +23723 28033 +14785 28031 +48927 28031 +40038 28030 +48706 28024 +20095 28022 +46553 28022 +39645 28019 +23578 28019 +43882 28019 +37419 28011 +24752 28009 +49448 28007 +39349 28006 +34307 28003 +17712 28002 +40272 28002 +42341 28000 +22479 28000 +49399 27997 +20539 27997 +43905 27996 +50065 27996 +14402 27994 +14118 27992 +32030 27992 +41537 27990 +44335 27989 +37734 27989 +18982 27988 +48100 27986 +22768 27984 +26651 27983 +47943 27981 +38622 27979 +32182 27979 +34432 27974 +50077 27971 +48074 27970 +46873 27969 +19027 27968 +40504 27966 +39594 27964 +24472 27964 +40788 27963 +34476 27962 +32695 27958 +18262 27958 +29999 27957 +22795 27956 +47992 27954 +48281 27952 +38476 27951 +35891 27949 +31014 27944 +42723 27943 +39171 27940 +20663 27938 +14534 27937 +26780 27936 +46473 27934 +42935 27934 +34066 27934 +36957 27930 +25021 27929 +41194 27925 +38792 27920 +18712 27919 +24997 27919 +34526 27915 +19549 27913 +47695 27913 +9428 27913 +22532 27909 +48101 27905 +41366 27900 +47884 27898 +31655 27897 +2379 27897 +15182 27896 +25264 27895 +34182 27894 +39606 27894 +29471 27891 +40093 27891 +29998 27890 +49587 27889 +10950 27885 +24961 27883 +22109 27877 +49354 27877 +42775 27876 +50011 27875 +25515 27872 +43845 27871 +48293 27866 +34353 27864 +31903 27864 +36908 27861 +41669 27858 +24965 27858 +45671 27856 +29769 27855 +40444 27851 +37381 27848 +23170 27848 +33378 27842 +25868 27837 +28056 27836 +46927 27836 +47332 27836 +49295 27835 +43534 27834 +36257 27828 +49620 27828 +21264 27827 +28240 27827 +31228 27825 +41851 27823 +37879 27822 +40369 27820 +48238 27819 +23561 27817 +9224 27808 +47832 27805 +22491 27803 +20094 27802 +1916 27800 +48051 27797 +46163 27791 +22476 27787 +48360 27780 +31907 27776 +21634 27775 +21602 27774 +47338 27774 +32407 27774 +17049 27773 +28945 27765 +13495 27764 +26833 27761 +22715 27759 +45743 27758 +18164 27754 +44772 27754 +45381 27753 +29409 27746 +23542 27745 +27349 27742 +32242 27741 +32935 27737 +47382 27734 +19966 27734 +39842 27734 +42218 27734 +8961 27731 +36165 27728 +31862 27725 +32100 27725 +32880 27725 +46647 27725 +27272 27718 +40845 27717 +49887 27713 +27178 27708 +32312 27705 +36495 27702 +42154 27702 +8019 27699 +49428 27697 +43843 27694 +9720 27693 +46778 27692 +36220 27691 +44129 27691 +34642 27689 +27552 27687 +40610 27681 +44477 27680 +31548 27679 +39336 27679 +27242 27678 +19456 27677 +30141 27673 +32417 27671 +42811 27669 +30536 27661 +37927 27659 +39704 27653 +40941 27651 +43914 27651 +15614 27649 +41605 27648 +34669 27646 +31578 27645 +40903 27643 +45437 27642 +21005 27642 +45273 27642 +45291 27638 +46213 27636 +29228 27630 +41717 27630 +49120 27628 +42510 27626 +41368 27626 +31702 27625 +40049 27625 +28532 27624 +38120 27624 +15316 27623 +44071 27623 +36185 27621 +40067 27619 +48797 27619 +43916 27619 +45756 27618 +27527 27616 +42773 27615 +34413 27615 +40933 27607 +33660 27605 +46605 27604 +44653 27604 +46054 27602 +44583 27602 +36277 27596 +20651 27594 +24992 27589 +36063 27588 +31877 27588 +20148 27586 +43071 27581 +38937 27580 +48815 27575 +41889 27575 +24639 27574 +33277 27564 +45541 27563 +27461 27555 +35190 27551 +41991 27546 +38877 27545 +39379 27544 +22212 27542 +46351 27542 +43417 27541 +15073 27540 +47632 27539 +42907 27538 +42260 27536 +41927 27534 +41278 27534 +39613 27533 +21642 27529 +32957 27528 +17896 27527 +26764 27524 +36491 27520 +41337 27517 +37034 27512 +25251 27512 +28781 27511 +47351 27511 +39669 27508 +38690 27508 +23743 27501 +36086 27498 +26213 27492 +45828 27489 +16203 27478 +29095 27478 +23551 27476 +48788 27475 +43552 27475 +39805 27472 +37147 27471 +46229 27470 +28063 27468 +9054 27466 +30406 27466 +39214 27465 +36999 27463 +49976 27463 +37962 27462 +45733 27461 +43110 27461 +8963 27460 +7513 27459 +8376 27458 +11449 27458 +31691 27457 +41015 27456 +48611 27450 +35830 27450 +28478 27449 +33834 27447 +24038 27447 +49824 27439 +47077 27437 +20419 27436 +25366 27436 +27099 27435 +37494 27435 +43886 27429 +40643 27428 +32523 27424 +39390 27424 +46030 27421 +36378 27415 +30695 27412 +49340 27412 +32388 27408 +31733 27407 +41663 27406 +33396 27405 +33771 27405 +34523 27405 +23535 27404 +30730 27399 +48637 27395 +44668 27389 +37532 27388 +28462 27386 +48029 27384 +48690 27383 +47602 27379 +46236 27378 +31841 27377 +48016 27376 +30702 27367 +34059 27365 +44463 27361 +47366 27361 +24481 27354 +40724 27352 +33032 27351 +42878 27347 +47256 27346 +12475 27343 +25741 27342 +38905 27342 +25942 27342 +38056 27342 +26030 27341 +25481 27340 +31196 27339 +6470 27338 +45955 27336 +47483 27335 +26512 27334 +21397 27333 +30919 27326 +32804 27324 +32799 27321 +50139 27315 +28067 27305 +39868 27304 +47024 27301 +30122 27297 +38332 27295 +27516 27293 +46718 27292 +36157 27291 +19226 27291 +44999 27289 +43919 27289 +39783 27287 +36688 27287 +40604 27285 +50202 27284 +6221 27279 +47270 27279 +41760 27278 +38305 27275 +34247 27275 +44011 27273 +29100 27271 +47092 27267 +30225 27267 +1372 27265 +44854 27263 +34044 27262 +20626 27261 +43489 27260 +35660 27260 +44246 27259 +39602 27258 +45831 27239 +44350 27237 +41241 27235 +42701 27235 +15724 27226 +42653 27220 +39699 27217 +30932 27215 +45716 27212 +40027 27211 +8647 27211 +19041 27210 +36653 27204 +46753 27203 +48186 27202 +11082 27199 +48094 27196 +13729 27193 +43356 27191 +23024 27190 +32700 27188 +43299 27188 +37846 27187 +33372 27184 +40706 27184 +35402 27182 +32585 27182 +11486 27178 +44517 27175 +26543 27168 +40134 27167 +21739 27162 +14853 27161 +43501 27159 +42777 27158 +37621 27157 +40187 27157 +48058 27155 +1369 27154 +15823 27153 +22482 27152 +49642 27150 +28638 27149 +38747 27147 +30944 27147 +37238 27146 +33507 27146 +39135 27145 +9820 27144 +32508 27140 +39059 27137 +7592 27136 +38170 27135 +37701 27132 +29032 27130 +33069 27129 +23747 27127 +33844 27124 +41138 27122 +47441 27118 +43621 27117 +31912 27113 +40844 27113 +22213 27111 +34374 27109 +25836 27106 +46688 27103 +42420 27099 +36116 27096 +42162 27096 +13579 27096 +42984 27093 +37847 27090 +38634 27087 +39840 27087 +34249 27085 +40306 27082 +17225 27081 +12570 27078 +46280 27078 +34489 27075 +37268 27072 +42087 27072 +42642 27067 +14965 27066 +36615 27065 +32040 27061 +34482 27061 +11373 27057 +35882 27055 +33882 27055 +47816 27051 +25431 27049 +41820 27045 +47867 27045 +34377 27041 +48673 27039 +40462 27035 +40303 27033 +4997 27033 +36936 27032 +29198 27031 +40922 27028 +16767 27025 +26461 27021 +11619 27015 +37645 27014 +40888 27014 +42467 27013 +47721 27012 +30146 27012 +22689 27010 +18090 27008 +29858 27006 +41479 27006 +34873 27006 +44389 27005 +47856 27004 +49880 27002 +17882 27001 +44658 26999 +42564 26995 +25114 26995 +16986 26995 +33400 26992 +38125 26991 +33008 26976 +21646 26969 +18760 26967 +35502 26965 +39204 26963 +38893 26962 +31662 26960 +21548 26956 +48166 26955 +31499 26949 +39899 26947 +35328 26941 +43650 26941 +27853 26940 +46630 26939 +44536 26939 +10628 26936 +33638 26934 +36517 26933 +44138 26933 +29371 26930 +43587 26929 +49202 26927 +21459 26924 +45560 26922 +46901 26922 +47509 26920 +45768 26920 +4008 26920 +11588 26919 +45613 26919 +36887 26918 +32998 26917 +42238 26913 +44102 26912 +42347 26911 +49924 26909 +47564 26906 +24914 26905 +25627 26903 +25140 26903 +32050 26901 +10694 26899 +38340 26899 +32460 26898 +19150 26898 +24295 26894 +37127 26893 +29326 26887 +20288 26883 +41385 26883 +35722 26880 +39903 26879 +45834 26877 +37673 26877 +43056 26874 +34471 26871 +22824 26871 +43483 26864 +16088 26863 +39277 26860 +36626 26859 +39392 26859 +36072 26855 +6396 26852 +47277 26852 +31339 26851 +42673 26844 +30768 26843 +39750 26843 +8844 26843 +38810 26838 +29744 26834 +23615 26833 +33117 26830 +47655 26830 +39402 26828 +43236 26828 +32373 26822 +45879 26815 +35499 26813 +44545 26813 +29627 26811 +15684 26810 +36243 26810 +19164 26808 +7936 26807 +50166 26805 +44053 26804 +37563 26803 +44068 26800 +6887 26799 +24398 26796 +32152 26794 +48140 26794 +42270 26791 +27916 26789 +35375 26789 +32399 26789 +20457 26784 +47187 26784 +48350 26783 +49680 26782 +29153 26781 +27838 26780 +43548 26776 +35677 26775 +32731 26775 +46090 26773 +48234 26773 +37241 26770 +44947 26768 +27115 26767 +9797 26766 +31020 26765 +7347 26764 +5829 26761 +37547 26761 +31837 26760 +15603 26760 +44426 26758 +45363 26757 +41284 26753 +33558 26752 +7398 26751 +49382 26745 +39954 26733 +21672 26733 +41112 26728 +23515 26727 +34314 26725 +33916 26725 +25726 26724 +61 26723 +24055 26720 +38823 26720 +32712 26717 +37320 26717 +40429 26716 +24229 26714 +46721 26713 +11538 26713 +42894 26709 +9642 26708 +34436 26705 +36375 26703 +49571 26700 +48730 26700 +31160 26698 +41306 26698 +11027 26697 +45922 26694 +40024 26693 +39279 26692 +37004 26685 +15509 26674 +37694 26673 +30504 26673 +34256 26671 +34159 26671 +24855 26671 +31654 26671 +45253 26669 +43232 26669 +48511 26668 +38013 26668 +28152 26668 +36625 26667 +24026 26665 +46139 26665 +38753 26663 +14050 26662 +24798 26660 +50131 26659 +17367 26656 +40215 26653 +38635 26653 +43333 26648 +43606 26648 +46119 26646 +22166 26646 +34424 26638 +47157 26636 +46252 26631 +34801 26631 +35059 26630 +9990 26629 +45233 26628 +43396 26626 +42335 26625 +30992 26623 +46027 26617 +34835 26617 +41243 26616 +38364 26613 +48211 26612 +25113 26612 +43674 26609 +42143 26605 +7892 26603 +15297 26601 +22785 26600 +47510 26595 +10352 26592 +44480 26592 +23722 26588 +35916 26588 +43435 26587 +29081 26587 +34951 26586 +46266 26585 +35124 26584 +39764 26581 +34606 26581 +6680 26580 +29120 26580 +31855 26579 +33552 26579 +12230 26579 +32811 26574 +33988 26573 +29819 26570 +38750 26570 +44782 26567 +36217 26563 +34184 26561 +26756 26558 +36879 26557 +49950 26557 +45643 26557 +22842 26556 +40432 26555 +50206 26551 +31280 26549 +18849 26548 +23113 26543 +49672 26538 +36649 26538 +33812 26536 +38726 26535 +44672 26534 +43891 26534 +46813 26534 +34825 26533 +50212 26532 +45968 26530 +35273 26521 +43973 26521 +35243 26515 +35617 26515 +16705 26513 +43470 26508 +27927 26507 +49296 26507 +36565 26507 +43439 26506 +42934 26504 +20854 26502 +49263 26494 +33542 26492 +22854 26491 +41129 26488 +24209 26487 +34743 26487 +32097 26478 +45625 26469 +32180 26468 +46272 26468 +32073 26468 +19956 26464 +15161 26462 +47224 26461 +44118 26461 +16421 26461 +26885 26458 +18855 26457 +45481 26450 +46705 26449 +48012 26445 +35824 26444 +36328 26444 +22298 26444 +47590 26441 +35246 26441 +48626 26439 +49410 26431 +35871 26427 +35360 26420 +15391 26418 +39600 26415 +49981 26411 +44062 26411 +36039 26402 +35998 26402 +42434 26401 +39035 26398 +36141 26398 +49353 26395 +8775 26393 +36210 26387 +44017 26386 +35419 26385 +24129 26384 +42961 26384 +39978 26380 +44976 26379 +24669 26378 +5950 26373 +30787 26369 +24897 26367 +27155 26366 +35076 26361 +42922 26361 +44853 26359 +26007 26358 +32643 26358 +27258 26356 +45060 26351 +35989 26346 +49376 26343 +31714 26339 +28556 26339 +21327 26337 +40295 26337 +25940 26337 +26988 26337 +42132 26336 +41207 26336 +32128 26335 +133 26334 +27411 26333 +38887 26332 +44832 26329 +46557 26328 +41024 26327 +38568 26323 +49889 26319 +12401 26319 +34450 26317 +33538 26316 +35068 26314 +8818 26313 +26869 26313 +33569 26313 +24446 26312 +7751 26310 +46101 26309 +36905 26308 +48122 26307 +48242 26306 +32746 26305 +41084 26304 +25905 26302 +21781 26301 +49861 26301 +7826 26301 +47696 26295 +20591 26292 +49588 26291 +35804 26291 +45553 26290 +49159 26289 +38151 26288 +44092 26282 +45188 26281 +28915 26280 +15905 26279 +47442 26276 +37151 26274 +43607 26273 +28841 26273 +31026 26272 +44983 26271 +3238 26268 +13635 26268 +22005 26265 +17457 26264 +34339 26264 +23339 26264 +44680 26256 +38714 26255 +45812 26255 +46750 26253 +37580 26252 +41672 26251 +22508 26244 +30390 26238 +46275 26236 +47263 26235 +40715 26233 +12722 26231 +18251 26230 +26550 26227 +42352 26223 +26769 26223 +37513 26223 +26556 26222 +44314 26221 +36373 26219 +45418 26218 +42970 26214 +15151 26211 +13882 26205 +21931 26203 +49631 26202 +41885 26200 +29203 26197 +38784 26196 +31076 26192 +5820 26191 +15838 26190 +15457 26187 +45905 26186 +47436 26186 +47391 26185 +29108 26179 +34140 26178 +34229 26175 +36149 26174 +20415 26166 +31216 26165 +22968 26164 +25396 26162 +24327 26161 +42149 26157 +39340 26154 +39548 26153 +40080 26147 +37939 26146 +35404 26146 +1782 26145 +45499 26144 +33961 26141 +46782 26136 +43936 26135 +11755 26133 +8730 26129 +42679 26127 +46952 26126 +35264 26124 +39761 26118 +15594 26112 +16296 26110 +35126 26109 +40571 26109 +32285 26107 +22246 26107 +30657 26106 +48755 26105 +28399 26103 +40427 26098 +44986 26096 +11178 26096 +40670 26095 +40622 26093 +43330 26092 +49913 26092 +37193 26091 +48152 26089 +11713 26089 +42683 26087 +20375 26087 +31952 26086 +32494 26085 +1580 26085 +47589 26084 +48299 26082 +30410 26078 +20017 26074 +27111 26068 +22138 26067 +45014 26066 +33985 26066 +40016 26066 +40194 26063 +29386 26063 +24125 26062 +8497 26061 +37171 26061 +25328 26060 +42049 26059 +38333 26057 +40086 26057 +1038 26050 +45102 26050 +41021 26048 +43500 26047 +23398 26046 +48916 26044 +42465 26044 +15258 26044 +27814 26043 +49208 26039 +14824 26035 +32361 26032 +46698 26031 +28173 26031 +30162 26030 +48247 26029 +35679 26027 +23768 26023 +48517 26021 +39535 26020 +17969 26019 +33137 26014 +48151 26013 +35586 26012 +28573 26007 +31491 26007 +25055 25998 +40467 25997 +14140 25997 +38603 25994 +42713 25994 +28341 25992 +3637 25991 +19527 25991 +38210 25988 +41193 25988 +41212 25987 +27749 25986 +13940 25984 +46423 25982 +46938 25981 +40567 25980 +49192 25977 +28555 25977 +36460 25977 +37853 25977 +42067 25972 +49596 25963 +18844 25960 +21417 25959 +47964 25959 +39527 25959 +29703 25959 +49455 25959 +49272 25957 +29143 25954 +31471 25952 +19242 25951 +29675 25949 +48077 25949 +19652 25946 +39625 25945 +28155 25945 +45584 25944 +37639 25943 +32997 25940 +27646 25938 +19663 25934 +27898 25933 +9402 25932 +42914 25932 +35074 25930 +34179 25924 +25801 25923 +32056 25923 +49085 25919 +46232 25919 +41283 25916 +25324 25915 +39928 25913 +46069 25909 +35536 25908 +47300 25908 +22743 25907 +30960 25903 +17611 25903 +15431 25903 +27796 25893 +35667 25892 +49673 25891 +48356 25891 +36618 25888 +22224 25886 +32622 25882 +9515 25880 +17071 25878 +20366 25877 +26620 25875 +18540 25874 +40580 25873 +32093 25872 +9165 25870 +22122 25869 +20944 25863 +25132 25863 +44757 25851 +44186 25851 +42801 25850 +43395 25848 +46308 25848 +45471 25846 +41139 25845 +38248 25843 +24958 25841 +22801 25840 +40085 25839 +40335 25834 +38683 25834 +24670 25833 +15659 25831 +40492 25829 +44511 25827 +27446 25823 +9187 25820 +40398 25819 +36410 25809 +11158 25807 +49393 25805 +34626 25803 +1519 25803 +36392 25802 +15361 25800 +45785 25800 +50243 25799 +49761 25796 +34347 25792 +42415 25792 +17995 25791 +43426 25789 +27202 25787 +33933 25783 +38766 25779 +3890 25779 +34830 25779 +21907 25775 +25603 25774 +25816 25774 +23864 25773 +35904 25770 +44100 25765 +22588 25761 +25548 25754 +34752 25752 +49014 25750 +31334 25749 +11724 25739 +30129 25733 +44006 25733 +36597 25726 +39212 25726 +41498 25723 +34565 25722 +39506 25722 +15159 25722 +34938 25718 +29936 25718 +16204 25718 +9304 25716 +22891 25715 +15946 25714 +11120 25711 +26710 25709 +42810 25708 +39624 25707 +49311 25706 +35150 25705 +39901 25703 +27770 25703 +49385 25702 +45957 25702 +36966 25699 +46322 25691 +35987 25689 +21283 25689 +38652 25683 +41687 25680 +27190 25679 +34201 25677 +44938 25676 +39195 25672 +28283 25670 +31011 25669 +20871 25666 +24062 25666 +26733 25662 +34510 25658 +49609 25654 +41465 25653 +42224 25652 +38077 25652 +40615 25649 +30790 25648 +25883 25647 +38944 25647 +48341 25647 +40694 25646 +38393 25646 +40090 25646 +47284 25644 +29757 25641 +43888 25638 +34896 25638 +28702 25636 +38324 25633 +31222 25632 +39646 25619 +36766 25618 +45469 25614 +42716 25613 +43478 25611 +33156 25608 +47406 25605 +10853 25603 +44813 25597 +35412 25595 +26634 25590 +37196 25584 +20521 25578 +40932 25575 +49636 25572 +40358 25570 +37887 25568 +19781 25568 +37982 25565 +47388 25565 +43574 25560 +31406 25559 +44570 25553 +30200 25551 +30433 25549 +23104 25538 +43871 25537 +49310 25535 +7658 25534 +40336 25532 +41041 25532 +40509 25531 +39054 25527 +35042 25526 +32457 25526 +41617 25523 +48961 25519 +37809 25519 +14684 25518 +33540 25518 +43340 25516 +6566 25516 +47059 25514 +42969 25513 +33965 25513 +39272 25512 +45934 25510 +4012 25505 +41168 25502 +30389 25499 +32842 25498 +35342 25497 +49639 25497 +30841 25496 +29478 25496 +39038 25496 +37710 25490 +41799 25487 +42354 25485 +39376 25483 +50112 25482 +38409 25480 +20303 25479 +22616 25477 +27863 25477 +21999 25476 +41613 25474 +4087 25473 +38239 25473 +48071 25471 +48581 25468 +45048 25467 +42325 25466 +28626 25465 +44548 25463 +8194 25463 +50150 25463 +46439 25462 +37161 25461 +11703 25459 +46591 25458 +19715 25458 +35551 25457 +38311 25453 +47455 25452 +22975 25452 +42457 25450 +49175 25448 +30682 25447 +43555 25445 +34784 25433 +24075 25430 +13327 25429 +8803 25428 +46969 25426 +25864 25421 +12966 25417 +28374 25413 +31527 25411 +17060 25404 +29192 25403 +26659 25401 +18003 25400 +44730 25399 +42083 25395 +32102 25394 +35120 25392 +48962 25392 +42865 25391 +37330 25391 +42387 25389 +41083 25388 +38366 25388 +31298 25387 +46465 25384 +33437 25377 +27739 25376 +48243 25374 +47043 25373 +30999 25373 +49265 25369 +22509 25369 +43924 25365 +14221 25364 +47774 25362 +24597 25358 +31054 25358 +46365 25357 +37230 25355 +47872 25353 +44662 25353 +46586 25352 +36023 25351 +36244 25347 +47912 25345 +32912 25345 +42733 25339 +47902 25337 +45480 25335 +47075 25333 +32334 25330 +49441 25326 +14375 25325 +28971 25324 +33022 25318 +40829 25315 +3030 25314 +24316 25314 +24812 25314 +49723 25312 +36087 25306 +45042 25304 +38450 25303 +41014 25300 +37593 25299 +6321 25297 +15071 25296 +27266 25296 +11808 25295 +43737 25295 +41724 25293 +41639 25291 +31260 25290 +24289 25289 +49830 25289 +41593 25283 +39930 25282 +25197 25279 +18402 25279 +37287 25278 +34126 25274 +41087 25273 +32218 25273 +7904 25271 +37572 25270 +40472 25267 +43528 25265 +41131 25265 +13009 25264 +40362 25260 +30563 25260 +48579 25252 +42982 25247 +43928 25246 +34637 25245 +49025 25244 +38328 25243 +7470 25240 +37309 25240 +38229 25232 +28998 25232 +50087 25230 +44993 25229 +25886 25225 +46859 25223 +46603 25218 +38922 25218 +41028 25212 +44777 25211 +43694 25211 +29334 25210 +15014 25209 +20682 25209 +7321 25209 +35965 25206 +40923 25203 +11834 25202 +11591 25200 +23066 25199 +8981 25196 +22876 25195 +36681 25194 +26961 25194 +20049 25192 +46797 25192 +21302 25190 +45837 25188 +48280 25187 +39264 25179 +39710 25175 +31953 25174 +48550 25173 +35310 25171 +22083 25171 +36676 25171 +46172 25170 +43773 25170 +3959 25168 +42203 25167 +49069 25166 +33459 25166 +40169 25165 +39591 25161 +32173 25161 +30566 25156 +47869 25156 +21855 25153 +36509 25152 +47842 25150 +36793 25148 +45714 25147 +46469 25140 +23308 25139 +38541 25138 +38322 25135 +12522 25129 +44737 25124 +36094 25124 +36207 25123 +23466 25121 +36705 25118 +42426 25118 +39006 25115 +49391 25114 +22178 25114 +32000 25113 +28380 25112 +25364 25110 +21720 25108 +38611 25107 +46122 25105 +35305 25103 +43114 25097 +12992 25096 +37635 25096 +8143 25089 +45921 25083 +38915 25083 +23351 25083 +35155 25083 +38150 25079 +41786 25078 +30283 25072 +31037 25071 +43354 25069 +42156 25069 +11121 25063 +47855 25062 +47309 25060 +35907 25057 +30752 25056 +42076 25056 +10072 25055 +25100 25054 +24016 25053 +35267 25053 +48418 25052 +46951 25051 +13814 25050 +46487 25048 +12502 25048 +46040 25048 +25975 25047 +42421 25046 +38446 25045 +8327 25044 +26595 25043 +25045 25040 +24074 25040 +19731 25037 +48606 25031 +38163 25029 +31418 25028 +37145 25027 +36536 25027 +10394 25023 +20679 25023 +24580 25021 +25252 25017 +33206 25017 +32079 25017 +42765 25016 +34980 25016 +39056 25015 +8323 25014 +49897 25012 +44103 25009 +48315 25006 +48149 25005 +49564 25003 +38259 25000 +49805 24999 +47716 24998 +41638 24998 +46029 24994 +33573 24994 +32140 24991 +42925 24990 +33646 24989 +36108 24989 +19128 24988 +18089 24988 +38212 24986 +29991 24984 +14767 24984 +38997 24980 +40540 24979 +38712 24976 +49483 24975 +33580 24974 +49593 24974 +33123 24968 +24151 24966 +47142 24966 +41864 24966 +35709 24963 +47362 24962 +11683 24961 +33342 24959 +48981 24957 +38076 24955 +13095 24953 +33548 24952 +39677 24951 +19608 24949 +38329 24949 +37008 24949 +26222 24948 +47487 24945 +29647 24944 +18096 24940 +23549 24939 +28690 24939 +36080 24938 +17094 24937 +33958 24937 +27186 24932 +41290 24929 +45186 24928 +27106 24925 +46943 24924 +25774 24919 +48990 24918 +33525 24917 +45110 24914 +32436 24913 +21038 24911 +33869 24911 +46594 24910 +40314 24910 +38506 24908 +17593 24908 +14519 24908 +39497 24906 +27034 24906 +33827 24904 +15501 24904 +23129 24903 +20966 24903 +28191 24902 +45741 24901 +49629 24901 +47481 24901 +21912 24900 +41121 24899 +38171 24898 +18123 24893 +36898 24893 +44491 24892 +46592 24891 +27037 24891 +45245 24888 +44496 24887 +35993 24885 +32908 24885 +14374 24884 +43576 24878 +28693 24876 +49619 24875 +47291 24874 +34620 24870 +37098 24867 +43281 24866 +42294 24864 +16549 24861 +46734 24855 +49154 24854 +18318 24854 +45420 24854 +40488 24850 +25755 24846 +38859 24845 +46087 24832 +23567 24829 +39318 24829 +4363 24828 +40862 24826 +39595 24825 +49380 24816 +48921 24812 +38089 24809 +32158 24808 +31384 24804 +41890 24801 +45780 24801 +47007 24794 +22433 24793 +13046 24793 +38524 24792 +38054 24792 +10259 24789 +10149 24784 +39416 24783 +45320 24783 +17570 24777 +28281 24775 +18829 24774 +46291 24773 +34403 24771 +36366 24768 +20012 24767 +43250 24766 +14829 24765 +21370 24762 +31115 24759 +5470 24758 +36864 24755 +38483 24753 +36306 24751 +27314 24750 +33788 24749 +42662 24749 +48348 24748 +47843 24745 +15158 24743 +44689 24743 +10534 24734 +48766 24734 +26959 24732 +41285 24732 +20563 24732 +34559 24728 +47851 24722 +47221 24722 +43161 24720 +18186 24713 +44201 24711 +29438 24701 +2273 24701 +5790 24699 +30443 24696 +49440 24695 +35324 24690 +39468 24689 +49105 24689 +30021 24687 +44951 24685 +10818 24684 +15067 24682 +48651 24682 +42526 24680 +31407 24680 +28045 24679 +48135 24678 +38721 24677 +12276 24677 +47462 24676 +37833 24672 +35247 24666 +42505 24666 +43370 24663 +44841 24660 +35708 24660 +48025 24659 +42446 24656 +24508 24656 +46529 24655 +11235 24655 +39064 24649 +37229 24648 +44089 24648 +15480 24643 +36627 24642 +45731 24641 +24148 24641 +24044 24636 +30671 24634 +38657 24632 +45141 24630 +45629 24628 +39744 24626 +34817 24623 +43603 24623 +41180 24621 +45423 24620 +48430 24619 +30398 24619 +47243 24617 +27808 24616 +19151 24614 +42108 24610 +42137 24604 +43963 24603 +6274 24601 +46273 24600 +33215 24599 +29774 24597 +44088 24597 +30964 24597 +45002 24595 +34625 24595 +29977 24591 +36480 24589 +47390 24588 +35699 24585 +39837 24583 +24467 24583 +47780 24581 +38084 24577 +43486 24576 +14311 24574 +47337 24573 +47352 24572 +29969 24571 +41544 24567 +50147 24554 +37102 24551 +27093 24549 +32321 24549 +37570 24548 +41755 24548 +20719 24546 +35740 24545 +4085 24542 +49599 24541 +20639 24537 +24204 24535 +9205 24529 +34490 24525 +11928 24520 +32226 24517 +42707 24513 +33669 24512 +45578 24509 +27569 24507 +40383 24506 +29810 24506 +42315 24505 +31150 24503 +46042 24503 +37251 24500 +46674 24494 +43658 24494 +41061 24491 +35649 24490 +43151 24490 +24432 24488 +34370 24487 +40791 24486 +39686 24485 +23802 24482 +11964 24482 +29603 24481 +36144 24481 +28770 24478 +39009 24474 +32627 24472 +44514 24469 +47153 24469 +24629 24468 +33088 24468 +33192 24466 +44526 24463 +19879 24462 +29185 24461 +49246 24457 +32062 24452 +30539 24448 +36657 24447 +39158 24445 +40286 24444 +47833 24444 +40812 24444 +33938 24442 +43166 24441 +12626 24441 +48024 24439 +29408 24438 +28151 24430 +28124 24427 +49848 24426 +47208 24421 +7654 24421 +48957 24419 +46282 24419 +33317 24416 +7499 24416 +9291 24410 +37614 24408 +41472 24408 +26714 24408 +1968 24407 +29839 24407 +8921 24406 +46462 24405 +49932 24404 +26797 24403 +33227 24402 +29346 24402 +37466 24402 +27750 24397 +36777 24390 +38172 24388 +36551 24385 +27825 24382 +46580 24379 +45874 24379 +40464 24377 +2602 24376 +38675 24373 +32427 24373 +45185 24371 +30214 24369 +16935 24368 +21321 24365 +49447 24363 +43122 24359 +44022 24359 +49975 24358 +22155 24357 +44958 24353 +36629 24352 +16877 24351 +28390 24350 +45956 24348 +38875 24348 +44631 24340 +43935 24339 +26151 24338 +37542 24335 +41258 24333 +29501 24333 +49919 24328 +36397 24324 +10186 24323 +42744 24318 +22812 24314 +19677 24313 +24631 24313 +19466 24312 +48474 24311 +1284 24310 +31882 24310 +37076 24309 +44966 24306 +29400 24302 +49968 24301 +37068 24301 +38974 24299 +44695 24298 +28343 24294 +35615 24287 +164 24283 +27380 24283 +20507 24282 +49956 24281 +46098 24279 +45607 24279 +34310 24278 +34287 24277 +45343 24277 +34535 24276 +32277 24273 +49895 24271 +43224 24268 +30643 24267 +39682 24266 +40206 24263 +6525 24261 +29506 24260 +40787 24258 +40305 24256 +44979 24255 +38232 24253 +45000 24250 +21597 24249 +30152 24242 +46899 24242 +48244 24241 +30028 24240 +33971 24240 +35970 24239 +48225 24239 +16733 24235 +42651 24235 +32217 24235 +44789 24230 +23640 24229 +29173 24228 +36680 24228 +28722 24218 +18900 24215 +38746 24215 +45478 24215 +27784 24214 +40609 24213 +44681 24205 +43677 24201 +36979 24194 +37992 24194 +39100 24193 +29616 24193 +5880 24189 +44065 24187 +42364 24186 +35403 24184 +27514 24183 +41959 24181 +43138 24177 +6463 24174 +27871 24174 +24899 24174 +45976 24172 +43885 24167 +46074 24167 +47709 24165 +36393 24160 +37169 24156 +31964 24155 +35846 24155 +32562 24153 +21436 24148 +41515 24147 +3593 24147 +31304 24146 +36749 24140 +22845 24138 +33698 24136 +26586 24134 +38909 24134 +49191 24132 +37002 24132 +9531 24128 +45969 24121 +34407 24119 +30546 24118 +47378 24117 +46164 24115 +41132 24109 +45355 24108 +39721 24108 +23779 24104 +42227 24104 +36247 24102 +33220 24101 +39380 24100 +43957 24098 +50127 24097 +19946 24096 +16804 24096 +24809 24096 +8053 24096 +30578 24095 +21179 24095 +37528 24095 +48412 24091 +45016 24089 +22702 24087 +15580 24086 +45477 24086 +25666 24084 +38595 24083 +47840 24080 +48996 24075 +41797 24073 +12757 24070 +48083 24066 +23279 24057 +36760 24056 +4616 24056 +38873 24054 +33981 24050 +40076 24049 +46628 24047 +44822 24047 +46641 24043 +35602 24042 +20980 24041 +42363 24038 +38380 24037 +5561 24037 +39706 24035 +17839 24031 +48787 24029 +5918 24029 +23317 24027 +23495 24027 +1289 24026 +37469 24026 +34583 24023 +23449 24020 +47753 24016 +36570 24013 +36323 24012 +43329 24012 +33102 24009 +44530 24009 +41526 24007 +46323 24007 +22293 24006 +30484 24002 +22214 24002 +34467 24000 +32139 23999 +36434 23999 +31035 23998 +46794 23995 +25492 23994 +22764 23993 +46436 23988 +48036 23982 +27550 23977 +43220 23970 +29457 23965 +12337 23961 +35638 23960 +18068 23958 +29601 23957 +26750 23957 +41367 23955 +37396 23955 +35010 23955 +40739 23954 +48793 23954 +47923 23954 +19400 23946 +23582 23946 +40569 23945 +47953 23945 +37282 23945 +42937 23943 +36298 23942 +49048 23942 +33832 23940 +11604 23939 +45460 23938 +24667 23933 +41656 23931 +45660 23929 +49745 23929 +48196 23928 +49163 23928 +28605 23923 +25189 23919 +21924 23918 +18124 23917 +38339 23917 +43836 23913 +28029 23913 +44093 23913 +30377 23912 +19913 23911 +14527 23905 +13581 23903 +49741 23903 +48643 23902 +38431 23902 +43495 23899 +26679 23897 +49854 23893 +49941 23893 +38989 23892 +47906 23888 +42570 23886 +37682 23885 +25265 23883 +33302 23880 +48055 23878 +5244 23876 +31072 23872 +38976 23871 +42319 23870 +4118 23869 +16795 23867 +47740 23867 +18736 23864 +49904 23862 +21727 23862 +26468 23855 +38547 23854 +47673 23853 +34822 23852 +163 23851 +45176 23849 +39882 23848 +44664 23848 +26255 23847 +18305 23840 +37730 23836 +38525 23832 +48710 23831 +31162 23827 +37998 23821 +31835 23819 +48255 23817 +47944 23817 +26141 23811 +42648 23811 +37431 23810 +35286 23807 +33872 23807 +40125 23807 +45564 23806 +33921 23803 +26650 23802 +46601 23801 +40103 23801 +33665 23800 +43759 23799 +16469 23797 +36907 23796 +41029 23793 +29458 23791 +38107 23791 +21659 23791 +21238 23786 +47873 23785 +40758 23777 +46138 23775 +45326 23775 +31497 23774 +44206 23773 +46396 23771 +49378 23769 +39472 23766 +12124 23764 +8349 23761 +33079 23761 +50117 23759 +26713 23756 +15189 23755 +43788 23752 +41174 23751 +22177 23749 +35978 23748 +29422 23745 +39215 23745 +25962 23744 +29258 23744 +49566 23743 +13866 23740 +23450 23738 +45543 23731 +42305 23730 +44865 23729 +16735 23729 +41564 23728 +38737 23727 +45679 23726 +43469 23721 +42584 23719 +33610 23717 +26404 23712 +40437 23711 +22820 23711 +36462 23710 +48459 23709 +32953 23709 +15576 23705 +16140 23704 +39256 23701 +49225 23699 +33819 23698 +49716 23695 +14163 23694 +5546 23694 +40600 23693 +46324 23686 +29922 23685 +50229 23684 +42795 23682 +38147 23681 +49992 23678 +40887 23673 +4972 23666 +7568 23664 +39058 23663 +41882 23662 +49077 23661 +35092 23661 +31466 23660 +38871 23658 +32583 23658 +23899 23658 +20649 23656 +32672 23655 +49695 23653 +45727 23652 +22090 23650 +43743 23649 +39305 23649 +32703 23646 +40022 23644 +39549 23643 +48675 23643 +39609 23642 +33969 23640 +43149 23638 +39623 23638 +44263 23637 +10292 23633 +14117 23632 +44002 23629 +34085 23624 +44290 23622 +34728 23620 +22802 23616 +27603 23615 +43538 23614 +42206 23610 +42051 23610 +46731 23606 +42537 23605 +45289 23604 +20207 23604 +40198 23599 +39139 23595 +43831 23593 +48112 23591 +40732 23590 +44027 23590 +12448 23589 +38539 23589 +24113 23587 +11097 23583 +50082 23581 +26175 23580 +26295 23579 +36330 23575 +23925 23573 +47132 23570 +47674 23570 +48898 23570 +42155 23570 +3882 23569 +43147 23569 +35066 23568 +49148 23568 +36052 23567 +40660 23567 +10956 23566 +48862 23564 +28688 23564 +37923 23564 +49124 23563 +43711 23562 +16554 23561 +36357 23557 +20530 23551 +47687 23551 +35716 23545 +43964 23545 +39773 23538 +42489 23534 +35910 23533 +42582 23532 +46983 23531 +43980 23530 +34645 23529 +38249 23529 +40354 23524 +32904 23520 +37132 23519 +41081 23517 +20470 23515 +37622 23514 +37071 23510 +44471 23509 +26417 23508 +19080 23508 +47398 23507 +47836 23505 +40321 23495 +48275 23493 +38440 23489 +9186 23489 +44915 23489 +44682 23483 +37225 23476 +33042 23474 +31191 23474 +28081 23472 +41859 23466 +30746 23465 +12866 23465 +32535 23465 +13128 23464 +30106 23462 +19881 23458 +39003 23457 +34131 23448 +27500 23447 +38706 23447 +16595 23444 +35848 23442 +32663 23439 +13283 23437 +43984 23436 +43951 23434 +12610 23433 +48903 23430 +41983 23428 +16837 23423 +20896 23423 +20926 23420 +6830 23419 +38275 23415 +40882 23413 +28435 23413 +30764 23413 +34376 23412 +44551 23412 +39119 23412 +36024 23411 +43011 23410 +43218 23400 +17821 23400 +12814 23397 +43918 23394 +9184 23392 +39252 23389 +38224 23385 +30193 23384 +8038 23383 +46789 23377 +50035 23376 +35085 23374 +39382 23373 +46099 23372 +32477 23365 +15022 23362 +18923 23361 +31823 23360 +38159 23360 +45158 23356 +41634 23356 +28708 23353 +34717 23351 +36891 23350 +24102 23350 +49951 23349 +35366 23344 +43482 23342 +22938 23341 +3928 23337 +42590 23337 +24518 23337 +36914 23334 +10567 23333 +41412 23333 +37308 23332 +11360 23331 +17320 23331 +42608 23330 +43334 23326 +23986 23320 +12975 23320 +49623 23319 +31847 23318 +43234 23314 +39108 23313 +42272 23313 +15540 23312 +26087 23306 +34917 23306 +40893 23301 +41303 23297 +38205 23293 +37364 23287 +30488 23287 +49392 23286 +5058 23282 +38473 23280 +2572 23280 +40455 23280 +24383 23279 +19056 23273 +17015 23272 +37474 23272 +28812 23271 +37249 23269 +45428 23269 +32320 23268 +26789 23265 +8247 23264 +45514 23263 +41447 23262 +46443 23260 +44465 23260 +11953 23259 +47230 23255 +41632 23255 +28260 23253 +41619 23252 +47829 23249 +48389 23249 +37703 23247 +47446 23246 +45244 23246 +47234 23245 +37020 23243 +29629 23243 +26121 23238 +12207 23230 +42297 23228 +26499 23226 +24206 23224 +46844 23224 +36488 23222 +19492 23222 +40740 23220 +45806 23219 +47669 23219 +39997 23213 +29022 23211 +42762 23210 +17116 23206 +11929 23204 +49433 23201 +35697 23199 +18960 23198 +47452 23196 +37021 23193 +42661 23193 +26288 23193 +37515 23185 +38733 23185 +48827 23185 +37825 23182 +10208 23181 +17802 23178 +24393 23178 +48943 23174 +38346 23164 +35990 23163 +45095 23153 +6328 23153 +45317 23152 +34420 23152 +37603 23149 +42936 23147 +38569 23145 +44816 23143 +38575 23142 +42611 23141 +28989 23141 +5130 23141 +13382 23136 +45822 23135 +38402 23134 +45799 23133 +21705 23133 +27595 23130 +27959 23128 +40047 23121 +49317 23118 +47626 23117 +36931 23116 +43921 23115 +33241 23115 +41677 23115 +39409 23114 +42013 23114 +47938 23113 +37153 23113 +46416 23112 +42989 23110 +27280 23109 +38741 23106 +33759 23100 +37823 23100 +39328 23098 +11051 23096 +24637 23091 +36809 23090 +36913 23089 +34578 23089 +46318 23087 +39352 23084 +29688 23080 +47134 23076 +22037 23074 +22914 23073 +26523 23072 +43096 23072 +41775 23072 +26955 23065 +28391 23064 +36859 23064 +40821 23063 +46755 23062 +46565 23060 +43407 23059 +27850 23053 +48520 23051 +162 23050 +32863 23047 +30273 23046 +32126 23044 +5418 23043 +19005 23042 +38022 23041 +41167 23040 +48197 23034 +49846 23033 +30677 23029 +47049 23029 +30816 23029 +36996 23028 +31234 23025 +47910 23022 +41322 23022 +43187 23022 +43132 23018 +35394 23017 +45207 23014 +25456 23013 +13781 23011 +36974 23008 +28675 23008 +27740 23008 +6815 23007 +49473 23006 +22628 23005 +36505 23002 +35549 22999 +48154 22998 +30085 22997 +34776 22996 +47803 22995 +38313 22985 +35467 22983 +47627 22977 +24095 22973 +45657 22971 +40965 22969 +19873 22965 +46408 22962 +37869 22960 +48934 22957 +23433 22957 +45604 22956 +36951 22949 +27413 22948 +29693 22948 +46338 22945 +43366 22944 +23441 22935 +47057 22933 +45631 22931 +39667 22930 +38254 22929 +38472 22927 +35623 22926 +32962 22925 +48577 22922 +26699 22919 +24547 22919 +45191 22917 +39466 22916 +48461 22916 +47047 22916 +45621 22913 +38533 22910 +25066 22910 +47104 22909 +42279 22907 +34427 22906 +29978 22903 +27226 22902 +34251 22894 +11456 22893 +43072 22887 +32725 22882 +35962 22879 +39320 22879 +9282 22875 +8186 22873 +49867 22871 +45534 22868 +35391 22868 +37895 22866 +3209 22864 +46493 22861 +39448 22861 +18745 22858 +41785 22856 +25528 22854 +31705 22853 +33616 22852 +39518 22852 +43156 22849 +43705 22847 +32488 22847 +42449 22841 +38959 22839 +27294 22838 +31218 22837 +47045 22835 +34765 22825 +48542 22824 +43539 22822 +22941 22821 +47112 22819 +40881 22816 +44457 22816 +31452 22815 +31516 22812 +33962 22812 +49820 22811 +34343 22810 +41923 22806 +38831 22806 +36678 22805 +40328 22805 +34416 22805 +45001 22805 +10563 22804 +34574 22804 +39799 22803 +41338 22803 +39545 22802 +37060 22801 +40681 22801 +39495 22801 +25262 22800 +15620 22799 +45974 22799 +22398 22796 +49721 22795 +21464 22789 +46671 22789 +40141 22788 +24717 22781 +48913 22781 +38666 22780 +41649 22776 +48883 22773 +12334 22773 +36729 22773 +33887 22772 +48426 22767 +33697 22767 +48811 22767 +28407 22765 +25257 22764 +44455 22762 +35876 22761 +25184 22761 +46831 22759 +47194 22755 +23569 22753 +40408 22751 +49914 22746 +37737 22740 +29152 22739 +37618 22739 +33808 22737 +30296 22735 +20021 22735 +29910 22733 +30610 22733 +36734 22733 +31751 22731 +45331 22731 +4779 22722 +37711 22720 +41569 22719 +42872 22716 +32082 22709 +41589 22708 +50007 22705 +41093 22702 +37864 22701 +32322 22697 +48817 22697 +6346 22692 +46735 22690 +36723 22684 +41612 22684 +50193 22684 +33104 22684 +15358 22683 +37831 22679 +39563 22679 +46642 22678 +35171 22677 +44361 22670 +33791 22666 +31024 22665 +49326 22665 +39610 22664 +50078 22661 +38967 22653 +40554 22652 +30631 22644 +35369 22640 +48868 22633 +45650 22633 +35885 22632 +39101 22631 +47883 22630 +39709 22629 +34153 22627 +46186 22625 +20120 22623 +30731 22622 +50208 22621 +43485 22621 +41491 22621 +44582 22620 +27427 22617 +37857 22615 +34439 22613 +32060 22610 +40819 22606 +12339 22601 +36699 22599 +33700 22599 +46376 22599 +43570 22598 +37576 22594 +8586 22594 +36785 22577 +2981 22577 +24061 22576 +37893 22571 +42725 22571 +27543 22568 +50105 22568 +49453 22567 +42794 22566 +42050 22561 +19697 22561 +46241 22560 +39985 22558 +11716 22558 +47449 22553 +46199 22552 +46094 22552 +43565 22552 +43337 22551 +22806 22551 +42081 22550 +35291 22549 +40231 22547 +37972 22545 +16344 22543 +24675 22541 +40652 22541 +39512 22538 +45068 22535 +47102 22534 +40149 22534 +27964 22534 +35562 22533 +49177 22527 +41190 22526 +32263 22526 +46446 22525 +30342 22524 +50023 22522 +48345 22522 +49079 22522 +35700 22518 +48559 22518 +33924 22517 +39925 22517 +34185 22516 +37856 22516 +47372 22515 +48324 22514 +48513 22512 +17077 22512 +13654 22511 +47314 22510 +47480 22509 +48872 22507 +31242 22507 +46350 22506 +36260 22502 +36088 22501 +40861 22499 +36561 22496 +41453 22496 +48950 22492 +36118 22485 +33246 22484 +36048 22483 +48683 22483 +35475 22481 +21263 22479 +43101 22477 +21760 22474 +23286 22473 +24244 22471 +39093 22470 +45070 22469 +35544 22467 +25261 22466 +46309 22464 +38422 22462 +16541 22457 +39779 22457 +21595 22455 +45019 22449 +34961 22447 +46001 22445 +49920 22443 +47302 22441 +42069 22434 +37028 22434 +15428 22434 +47356 22433 +18946 22432 +15390 22429 +2976 22426 +50046 22426 +44784 22425 +47684 22423 +48079 22421 +41172 22419 +46141 22419 +8885 22417 +16023 22416 +59 22415 +48419 22413 +48792 22412 +42122 22410 +46682 22409 +16515 22408 +21480 22407 +18332 22405 +9542 22404 +28764 22404 +46389 22395 +42516 22395 +36368 22395 +4413 22392 +5712 22390 +33932 22388 +14673 22384 +46082 22384 +44550 22383 +42398 22380 +22443 22379 +30374 22378 +44641 22377 +38298 22373 +39174 22371 +23607 22368 +35739 22367 +40956 22365 +22539 22364 +30908 22356 +47599 22355 +48370 22354 +42846 22354 +42043 22351 +27489 22348 +32146 22348 +47355 22348 +42163 22347 +48359 22347 +43355 22346 +18588 22345 +26625 22344 +47159 22340 +25954 22339 +32617 22339 +42666 22338 +7143 22338 +13900 22334 +46804 22334 +50021 22333 +34346 22331 +23637 22330 +47067 22330 +30117 22327 +16367 22326 +31144 22321 +40619 22320 +49624 22318 +7917 22316 +32676 22315 +42330 22313 +20498 22308 +36801 22305 +21582 22305 +29680 22300 +44074 22300 +44228 22300 +30206 22297 +16297 22296 +30077 22296 +21706 22292 +40749 22291 +20686 22288 +2914 22286 +41425 22284 +35012 22282 +20737 22281 +41734 22279 +5138 22278 +28860 22265 +44382 22261 +44665 22261 +31992 22259 +50197 22259 +40985 22258 +22104 22256 +47679 22255 +15095 22253 +42061 22252 +26644 22249 +45029 22249 +19797 22247 +49799 22240 +47475 22240 +49801 22239 +39299 22233 +34812 22232 +32867 22230 +38986 22225 +30243 22224 +46836 22224 +42487 22220 +32538 22219 +42111 22218 +47459 22213 +39018 22210 +37334 22209 +10525 22208 +22438 22203 +22006 22202 +30314 22200 +20041 22199 +47324 22198 +48427 22198 +29162 22192 +34907 22189 +3045 22187 +20099 22186 +38371 22183 +40190 22177 +29783 22177 +42695 22174 +41144 22173 +39483 22172 +24358 22171 +47354 22169 +42399 22166 +49647 22160 +32005 22156 +49456 22154 +34053 22153 +33076 22150 +40438 22143 +31206 22140 +14957 22140 +22610 22138 +38549 22134 +48005 22133 +36203 22132 +47557 22131 +27816 22130 +34463 22128 +32751 22127 +49093 22127 +45279 22124 +38374 22123 +33602 22122 +42714 22121 +40499 22119 +50012 22114 +21914 22114 +39949 22114 +47363 22113 +39743 22113 +40163 22112 +40333 22107 +37272 22106 +40762 22104 +34330 22102 +33310 22101 +38356 22100 +45984 22100 +27483 22097 +12756 22095 +35282 22093 +30277 22092 +18238 22092 +13185 22090 +34158 22090 +29581 22090 +1609 22088 +30924 22087 +18793 22086 +49754 22085 +28645 22083 +22899 22080 +37615 22077 +44230 22075 +37636 22073 +31754 22072 +9063 22070 +24867 22067 +22939 22061 +29865 22061 +30846 22057 +46255 22057 +48624 22053 +19011 22052 +48284 22051 +48480 22046 +36530 22044 +48248 22043 +41165 22040 +30694 22039 +42366 22037 +45382 22034 +22404 22032 +11690 22031 +45787 22030 +27021 22021 +45737 22017 +32516 22017 +24326 22016 +42144 22012 +24132 22007 +25174 22007 +35642 22004 +29059 22003 +42947 22003 +13518 22003 +39243 22002 +25392 22000 +49172 21998 +34950 21998 +44106 21997 +42107 21997 +45757 21997 +30863 21997 +36956 21995 +41289 21995 +26178 21994 +47850 21991 +12715 21991 +35973 21987 +15879 21985 +37794 21984 +31800 21980 +3040 21979 +41211 21979 +25545 21979 +45173 21979 +25835 21978 +28911 21977 +24449 21977 +48201 21972 +45314 21970 +6781 21969 +32165 21968 +38903 21961 +48332 21959 +37567 21958 +33794 21957 +17377 21954 +37414 21954 +49990 21950 +45792 21945 +34096 21942 +48632 21941 +29266 21939 +47241 21936 +37735 21934 +31982 21933 +49306 21933 +16557 21932 +37338 21930 +39544 21928 +49329 21926 +19154 21925 +2510 21924 +31891 21922 +32648 21922 +29249 21921 +45971 21918 +49508 21917 +39653 21917 +49514 21916 +21401 21916 +29054 21916 +9556 21915 +47929 21912 +48721 21901 +48512 21899 +19117 21897 +30464 21895 +45572 21892 +35071 21891 +37776 21888 +35735 21887 +41075 21887 +43704 21887 +45012 21886 +40841 21881 +34530 21877 +16138 21875 +43231 21871 +39122 21871 +36620 21871 +38310 21870 +38629 21862 +42380 21860 +36249 21856 +43434 21852 +38352 21852 +41304 21850 +29299 21850 +42712 21849 +49231 21849 +6925 21848 +44569 21847 +17833 21843 +35681 21843 +45213 21842 +17417 21841 +49586 21840 +32430 21836 +47124 21836 +21619 21833 +49706 21833 +47814 21833 +34959 21832 +42534 21829 +46431 21827 +45259 21821 +37747 21820 +26846 21819 +39553 21816 +39004 21816 +33374 21814 +48004 21811 +34546 21811 +2765 21810 +28718 21809 +16353 21808 +29792 21808 +27279 21807 +27379 21802 +43315 21801 +47128 21801 +21280 21800 +5604 21798 +34828 21797 +23206 21795 +40876 21790 +16501 21788 +47909 21786 +38092 21786 +43184 21785 +21243 21785 +35133 21784 +18166 21781 +38703 21780 +49986 21780 +45654 21779 +26302 21777 +47885 21777 +37434 21773 +39918 21773 +47023 21772 +41308 21771 +10878 21768 +44539 21768 +35678 21765 +27629 21765 +34137 21764 +41423 21761 +23203 21760 +39558 21759 +27672 21758 +14933 21757 +48008 21755 +40885 21752 +36314 21752 +39586 21751 +40081 21751 +41580 21748 +49888 21748 +37613 21745 +33398 21744 +7399 21743 +26776 21740 +48949 21739 +27836 21736 +33345 21736 +27398 21735 +35815 21733 +24198 21733 +42905 21733 +17069 21731 +8917 21731 +42704 21731 +38995 21729 +37094 21729 +43134 21729 +44815 21728 +50064 21727 +38456 21726 +44860 21725 +36299 21723 +10198 21722 +41789 21722 +39689 21722 +19895 21722 +18790 21719 +34302 21717 +40275 21717 +36201 21716 +41813 21715 +48070 21714 +4994 21713 +30732 21711 +17746 21706 +43862 21704 +36496 21701 +44903 21700 +29700 21698 +46472 21697 +31383 21694 +35664 21692 +44461 21688 +41287 21687 +35809 21686 +44430 21685 +37777 21682 +6753 21678 +32869 21678 +12909 21678 +28037 21676 +41291 21674 +24047 21673 +36271 21671 +46492 21670 +17148 21669 +48783 21667 +33608 21663 +40234 21662 +45661 21659 +35721 21654 +44593 21653 +40184 21651 +47076 21651 +37804 21651 +27291 21650 +49714 21649 +49010 21643 +36643 21638 +28694 21638 +25155 21637 +41548 21636 +1707 21636 +45409 21634 +24156 21630 +49983 21621 +44251 21621 +35890 21614 +45722 21614 +41660 21613 +24225 21612 +40877 21608 +43509 21605 +47411 21604 +27904 21604 +10139 21598 +19539 21595 +45090 21592 +42780 21589 +41275 21589 +49386 21586 +48091 21583 +16153 21583 +38466 21581 +39771 21580 +32567 21577 +41822 21577 +29052 21576 +26008 21575 +34820 21574 +13036 21573 +46962 21571 +47326 21570 +46960 21563 +40618 21562 +32970 21562 +43536 21559 +16907 21557 +48311 21550 +38783 21550 +39526 21548 +46526 21547 +45551 21546 +49719 21544 +35472 21541 +16005 21540 +47336 21537 +25118 21536 +26330 21536 +46197 21533 +13047 21533 +42529 21532 +26738 21532 +22742 21529 +43666 21523 +20523 21522 +26305 21520 +40823 21513 +32439 21511 +27537 21511 +44149 21509 +39117 21504 +39942 21500 +40405 21498 +43289 21497 +41477 21497 +27891 21497 +41542 21496 +39880 21494 +28957 21493 +46663 21491 +40385 21489 +13874 21486 +30521 21485 +22667 21480 +4442 21479 +40058 21475 +46290 21475 +46762 21473 +18835 21472 +17354 21469 +46724 21465 +46606 21464 +44439 21464 +41577 21462 +48924 21461 +10954 21455 +7041 21454 +37505 21449 +40495 21448 +46661 21444 +47205 21442 +24318 21442 +49555 21442 +32058 21436 +41810 21432 +45649 21431 +33987 21430 +27743 21423 +29840 21423 +37055 21421 +14863 21420 +46298 21418 +39530 21417 +40276 21416 +44857 21415 +7494 21413 +26092 21413 +39801 21406 +33979 21401 +36664 21396 +42194 21395 +43624 21391 +42309 21390 +28073 21388 +40860 21388 +49747 21382 +48649 21381 +38856 21381 +38605 21377 +43780 21376 +29796 21374 +32824 21374 +2281 21373 +43860 21373 +49688 21371 +28402 21368 +46044 21362 +46885 21356 +40128 21355 +16820 21355 +20847 21354 +48176 21352 +26922 21351 +48250 21351 +45025 21350 +37483 21348 +49809 21346 +29733 21345 +45521 21338 +42921 21335 +42055 21334 +18635 21332 +21389 21329 +44994 21328 +20378 21327 +10138 21324 +49184 21322 +46916 21322 +48425 21321 +36376 21319 +40671 21317 +41557 21315 +16692 21312 +48661 21310 +42634 21306 +28041 21301 +46509 21301 +30986 21300 +49276 21299 +44048 21299 +33462 21296 +26261 21293 +49640 21293 +49102 21292 +6618 21288 +45193 21287 +40954 21287 +44137 21286 +43125 21285 +47101 21282 +5771 21281 +33484 21277 +14889 21276 +38576 21275 +41101 21271 +42533 21269 +46293 21266 +31938 21266 +37475 21266 +49692 21264 +18038 21260 +48200 21260 +29067 21259 +40785 21258 +44987 21258 +40889 21256 +39357 21251 +43523 21248 +45993 21247 +19006 21245 +30931 21241 +1116 21237 +30606 21237 +43803 21227 +45505 21225 +13967 21223 +43168 21223 +46254 21223 +36208 21222 +12065 21221 +28677 21221 +49478 21220 +46585 21219 +41292 21216 +39578 21216 +39344 21216 +43293 21212 +42280 21207 +21301 21206 +33621 21206 +41666 21201 +30662 21201 +35875 21201 +31199 21201 +30650 21201 +31120 21200 +17483 21196 +32243 21191 +35418 21188 +43403 21182 +39201 21176 +42960 21173 +22805 21171 +39736 21171 +37304 21171 +32200 21168 +42472 21168 +44281 21168 +13903 21167 +39312 21167 +40051 21166 +4184 21164 +10683 21163 +39251 21157 +32371 21154 +39166 21154 +33883 21153 +32968 21152 +49973 21151 +21101 21148 +41066 21146 +26155 21146 +23709 21141 +41590 21140 +39111 21138 +31495 21137 +41460 21137 +48987 21127 +13821 21127 +38073 21123 +46005 21123 +23155 21122 +47988 21116 +2868 21115 +38906 21115 +48564 21112 +40245 21112 +40173 21103 +31880 21103 +44162 21100 +27403 21100 +134 21099 +34865 21094 +11156 21093 +36981 21089 +43016 21089 +21871 21086 +27559 21086 +45682 21085 +31350 21083 +43874 21082 +39023 21078 +46847 21076 +38505 21074 +46067 21071 +48182 21066 +21687 21066 +28730 21057 +50189 21056 +42875 21054 +34335 21052 +11229 21050 +10922 21050 +37348 21049 +36927 21046 +42726 21046 +46550 21045 +26588 21044 +13065 21040 +13090 21037 +22834 21036 +13082 21035 +42268 21030 +45630 21029 +8451 21029 +47792 21024 +28757 21022 +34312 21022 +49461 21022 +49233 21019 +38407 21018 +16742 21017 +39186 21016 +35140 21014 +20949 21013 +45458 21008 +49979 21004 +37323 21001 +43626 21001 +30420 21001 +32915 20999 +43253 20992 +45136 20991 +40700 20990 +1889 20989 +26123 20988 +33553 20987 +38546 20983 +46271 20981 +5853 20980 +42015 20974 +25283 20974 +39216 20973 +30565 20972 +43820 20971 +44957 20971 +35145 20971 +37472 20971 +43610 20970 +36445 20969 +49381 20968 +47283 20968 +14975 20966 +34178 20966 +32704 20965 +35720 20963 +36902 20961 +34647 20959 +35030 20959 +46612 20958 +5877 20957 +35378 20956 +32975 20954 +35755 20947 +14238 20947 +41166 20946 +46378 20944 +27556 20942 +38846 20941 +18521 20938 +35388 20935 +40116 20935 +36549 20933 +48399 20928 +37531 20927 +39449 20923 +47303 20919 +12999 20917 +25235 20916 +27701 20916 +42005 20914 +46146 20913 +33483 20912 +37117 20907 +39327 20907 +30234 20907 +43633 20902 +18122 20898 +45493 20895 +6933 20893 +7845 20884 +33723 20877 +24418 20874 +39904 20872 +48736 20870 +8660 20868 +26307 20867 +49122 20866 +33449 20863 +30270 20862 +30893 20861 +48726 20861 +45072 20860 +38636 20860 +49282 20859 +42010 20859 +49418 20858 +34359 20856 +37716 20855 +24543 20853 +33284 20852 +40157 20851 +17156 20851 +48809 20847 +49697 20843 +40957 20842 +34742 20840 +42747 20839 +41858 20836 +37769 20835 +22058 20835 +43363 20833 +48680 20831 +5927 20830 +13228 20826 +33856 20826 +37780 20825 +48230 20824 +40375 20819 +48212 20819 +3712 20814 +45719 20809 +26267 20809 +23072 20809 +41043 20808 +33647 20804 +49940 20803 +47287 20798 +3138 20795 +27129 20794 +32599 20788 +22012 20787 +30018 20780 +49024 20775 +39118 20775 +30317 20775 +36337 20773 +28004 20773 +47551 20773 +47904 20770 +43690 20770 +39513 20765 +18448 20763 +9790 20762 +28022 20761 +45325 20759 +33056 20758 +31409 20757 +39940 20757 +25983 20755 +40979 20754 +45485 20753 +45704 20752 +36059 20752 +4552 20746 +34288 20745 +40285 20744 +29878 20744 +35156 20742 +37902 20741 +48263 20741 +29593 20740 +43510 20740 +41195 20739 +49542 20738 +40523 20738 +43221 20737 +2400 20736 +39895 20734 +40182 20732 +49625 20732 +8778 20731 +50032 20729 +32276 20725 +40564 20723 +24441 20722 +28352 20717 +6261 20716 +33797 20715 +34618 20715 +49707 20715 +45929 20713 +4878 20713 +43116 20713 +47009 20712 +28833 20712 +38066 20708 +16946 20707 +16575 20707 +45693 20706 +48052 20704 +34435 20703 +46460 20703 +26817 20700 +22266 20699 +39738 20697 +27068 20694 +13561 20693 +35997 20692 +37806 20691 +45128 20690 +39831 20689 +24661 20687 +47861 20686 +20098 20686 +44004 20680 +44261 20678 +39242 20675 +41148 20674 +11586 20671 +45118 20671 +4832 20671 +40043 20667 +44184 20659 +22797 20659 +25033 20657 +43322 20654 +22454 20653 +33258 20653 +34793 20649 +38220 20648 +40248 20648 +36722 20646 +43996 20645 +48189 20643 +10079 20642 +43540 20641 +47913 20635 +16709 20626 +43193 20622 +37765 20614 +39768 20612 +45028 20607 +47484 20606 +12743 20606 +45632 20606 +13962 20604 +21250 20597 +38082 20597 +28667 20596 +42740 20595 +49561 20587 +15535 20587 +43367 20585 +41364 20585 +43410 20584 +40987 20584 +10672 20583 +49109 20581 +38055 20580 +40179 20578 +42583 20574 +25642 20573 +24753 20570 +34124 20569 +49944 20568 +49351 20563 +49288 20562 +31041 20560 +34456 20556 +41019 20556 +32918 20553 +50000 20552 +37845 20550 +33239 20549 +41344 20546 +19738 20545 +35926 20544 +43391 20542 +36265 20542 +47799 20541 +37349 20539 +45900 20534 +47408 20528 +35474 20526 +42105 20524 +49853 20523 +42885 20521 +41473 20519 +40005 20515 +39179 20514 +46984 20512 +2201 20512 +36424 20512 +12843 20509 +29508 20507 +23909 20504 +39378 20503 +46415 20502 +30691 20502 +47276 20501 +45668 20495 +42175 20493 +41017 20493 +17982 20491 +50213 20491 +48062 20490 +37860 20490 +40175 20489 +23661 20489 +1508 20487 +37224 20484 +14761 20479 +48159 20478 +10294 20477 +44817 20476 +47426 20473 +37258 20473 +41847 20473 +34046 20471 +18009 20470 +38586 20470 +29715 20470 +47702 20469 +43169 20466 +33993 20462 +44218 20460 +41745 20459 +34400 20459 +44925 20458 +32059 20458 +5965 20458 +20456 20448 +39731 20446 +45758 20446 +48866 20445 +39188 20445 +43912 20444 +11062 20440 +34813 20437 +42117 20437 +45421 20436 +33758 20435 +46827 20433 +34739 20431 +34524 20429 +15578 20427 +6019 20426 +40638 20426 +25228 20426 +24008 20424 +37289 20423 +11415 20422 +42778 20422 +47666 20421 +4456 20417 +37680 20416 +29984 20416 +35736 20416 +38053 20415 +45538 20413 +45940 20413 +47897 20412 +37827 20412 +5084 20411 +17665 20411 +24461 20410 +34300 20409 +41495 20409 +23346 20408 +42918 20407 +28613 20406 +31674 20405 +43420 20404 +45590 20403 +46387 20402 +27288 20401 +44927 20400 +47969 20400 +45840 20399 +42215 20396 +21470 20396 +41136 20392 +16366 20388 +43685 20387 +39403 20386 +41302 20384 +46759 20383 +45850 20380 +40147 20379 +24882 20376 +35396 20376 +41232 20373 +30139 20372 +33272 20369 +48429 20369 +43707 20367 +45366 20365 +35200 20365 +37802 20363 +18688 20363 +41143 20362 +19633 20361 +24019 20356 +40486 20356 +47818 20354 +21197 20353 +9832 20347 +35908 20344 +42650 20342 +45089 20342 +28664 20341 +33854 20341 +22274 20340 +38907 20338 +35810 20337 +45720 20336 +26939 20333 +25271 20332 +24334 20332 +19317 20331 +34169 20330 +33367 20329 +48229 20326 +4977 20324 +42819 20321 +31589 20317 +46352 20316 +44576 20312 +50181 20307 +43631 20301 +41486 20301 +48466 20298 +36603 20298 +41295 20298 +40820 20297 +29988 20295 +49125 20294 +46023 20294 +41706 20293 +35964 20293 +45280 20292 +46435 20287 +32238 20286 +37453 20284 +32069 20276 +42199 20275 +45842 20274 +46169 20268 +48294 20266 +31080 20265 +40940 20264 +32739 20262 +37424 20257 +32923 20256 +35828 20256 +20655 20255 +26794 20254 +39315 20254 +36928 20254 +40433 20252 +33967 20246 +5217 20240 +45131 20238 +42213 20237 +21319 20237 +33402 20235 +24531 20235 +45339 20232 +39368 20222 +30813 20221 +21618 20219 +40797 20219 +38885 20215 +16634 20204 +33186 20201 +31634 20199 +50140 20197 +29734 20196 +38628 20195 +48544 20192 +20397 20187 +33112 20184 +13749 20183 +3962 20183 +39096 20181 +45009 20180 +37852 20180 +46147 20178 +46341 20176 +32475 20171 +4575 20170 +42353 20170 +36076 20170 +33549 20167 +24981 20167 +46235 20166 +47106 20165 +49898 20165 +30641 20163 +48909 20161 +17488 20159 +25746 20155 +47623 20154 +36600 20154 +48136 20151 +30192 20145 +18541 20142 +31148 20142 +31633 20136 +29884 20131 +48782 20129 +30518 20125 +15289 20115 +32795 20114 +32738 20113 +38312 20109 +44793 20108 +47957 20103 +23012 20098 +33463 20089 +49089 20089 +49921 20088 +41065 20086 +48380 20083 +49674 20079 +47985 20078 +16714 20077 +42871 20077 +30189 20077 +49644 20075 +43875 20074 +14141 20073 +25990 20073 +39697 20073 +25099 20071 +30969 20069 +26266 20064 +18765 20063 +28358 20063 +49934 20062 +45385 20060 +18156 20060 +37988 20059 +17992 20059 +40035 20051 +44736 20050 +44260 20047 +13515 20045 +40423 20044 +45797 20043 +36870 20042 +49012 20035 +44400 20032 +48386 20028 +33939 20026 +25506 20025 +23928 20024 +42411 20022 +13651 20022 +39120 20021 +45427 20021 +41198 20018 +22528 20017 +48463 20014 +47642 20012 +8416 20009 +21918 20007 +40311 20005 +22809 20002 +25979 20002 +39664 19999 +30087 19998 +36961 19997 +37826 19996 +43697 19995 +3229 19992 +39210 19990 +43057 19984 +15609 19981 +35490 19980 +48295 19977 +27787 19976 +38697 19972 +21860 19971 +40393 19970 +46180 19968 +49244 19964 +36226 19964 +45106 19961 +30396 19959 +49995 19957 +47675 19953 +32362 19947 +18314 19946 +25679 19945 +37601 19944 +47578 19944 +32318 19943 +13628 19943 +42276 19941 +48291 19940 +45401 19938 +13887 19933 +36725 19932 +41600 19930 +44136 19929 +41948 19928 +47069 19926 +33366 19923 +45252 19920 +46636 19911 +37015 19908 +13073 19906 +42428 19904 +38918 19903 +43901 19901 +30322 19900 +39878 19899 +47330 19892 +33199 19890 +46617 19889 +27968 19889 +41938 19887 +41752 19886 +42091 19885 +44123 19885 +39628 19882 +40465 19881 +48057 19881 +44639 19880 +18380 19879 +1060 19878 +45472 19877 +11988 19875 +4971 19874 +17910 19872 +22690 19867 +42939 19864 +47468 19863 +23651 19858 +20112 19858 +42430 19856 +34487 19851 +46201 19845 +34437 19840 +41257 19837 +37775 19833 +50085 19831 +38438 19831 +42074 19827 +38237 19826 +45107 19822 +46314 19820 +49503 19820 +29401 19820 +22157 19819 +25117 19815 +42413 19815 +34792 19812 +20448 19811 +46758 19806 +43600 19805 +33101 19805 +32852 19803 +44908 19800 +37203 19799 +45111 19798 +12658 19791 +25661 19788 +32941 19784 +40573 19783 +39107 19777 +44705 19772 +42250 19772 +49488 19770 +9622 19769 +45721 19762 +40256 19761 +34964 19761 +42601 19761 +43876 19761 +32001 19760 +46792 19757 +38802 19755 +48376 19752 +41722 19750 +27894 19748 +48177 19747 +45113 19746 +40848 19745 +11877 19744 +12235 19744 +36276 19743 +18048 19742 +42790 19741 +48408 19736 +34631 19736 +47601 19734 +15514 19733 +36104 19727 +42769 19726 +44881 19726 +39872 19725 +44617 19724 +36693 19721 +48745 19720 +32316 19720 +38654 19717 +9872 19716 +23172 19712 +43808 19710 +48882 19709 +43580 19707 +44578 19705 +39696 19701 +42962 19698 +30235 19698 +27992 19698 +44928 19697 +43673 19697 +49650 19696 +40654 19695 +45531 19695 +17751 19694 +19007 19692 +41647 19691 +41261 19690 +3121 19689 +16998 19689 +38970 19688 +45288 19686 +32616 19685 +42789 19684 +38070 19683 +46356 19683 +48584 19681 +46438 19679 +23812 19678 +48108 19678 +10803 19678 +47824 19675 +30620 19674 +12359 19674 +41604 19669 +41895 19668 +31184 19667 +27880 19665 +45124 19664 +47070 19661 +46883 19660 +46302 19656 +44667 19652 +23093 19650 +16684 19649 +43432 19646 +44969 19644 +16530 19643 +49097 19643 +6854 19643 +17573 19642 +7992 19637 +31000 19634 +44977 19633 +40682 19631 +28631 19629 +38457 19628 +15507 19626 +49196 19625 +35438 19624 +47576 19619 +26694 19617 +46020 19615 +47308 19615 +41458 19612 +43571 19610 +28761 19608 +40572 19606 +41914 19605 +48739 19605 +20280 19603 +47491 19602 +32928 19601 +25346 19599 +46258 19598 +43451 19592 +47493 19590 +46028 19584 +32454 19583 +10600 19583 +43813 19580 +47482 19574 +31094 19573 +37167 19571 +42690 19569 +42343 19568 +42463 19564 +44363 19556 +28869 19555 +42776 19547 +40417 19545 +33505 19545 +44893 19544 +43026 19544 +49216 19542 +20335 19541 +28254 19539 +40747 19538 +30409 19536 +31551 19536 +6694 19534 +38445 19534 +42115 19533 +30903 19532 +24292 19531 +48030 19530 +42214 19526 +50168 19524 +48638 19523 +20193 19521 +20502 19520 +48110 19519 +15911 19517 +48194 19515 +40659 19515 +43284 19514 +26729 19513 +40620 19511 +35795 19510 +49446 19508 +38843 19508 +18442 19507 +23396 19504 +34694 19504 +26766 19500 +38142 19497 +38828 19495 +33070 19495 +21968 19493 +46061 19493 +44549 19492 +50080 19491 +28106 19487 +13344 19486 +18099 19484 +41381 19483 +26025 19482 +33751 19479 +44399 19479 +22165 19479 +48392 19474 +38793 19474 +42321 19471 +21889 19465 +38117 19459 +43778 19457 +46894 19455 +34431 19454 +46572 19452 +37586 19449 +46632 19449 +29891 19447 +40279 19447 +46417 19447 +48264 19446 +26233 19445 +36580 19444 +38015 19444 +49837 19443 +37522 19443 +28455 19442 +18885 19441 +46333 19437 +43908 19436 +10201 19435 +27300 19433 +31286 19429 +48488 19428 +30765 19426 +49115 19425 +31563 19422 +38314 19420 +35609 19415 +43948 19414 +22135 19412 +31613 19407 +40639 19405 +31972 19404 +47521 19401 +34615 19394 +40972 19394 +38139 19392 +30482 19390 +39912 19389 +37832 19389 +39332 19388 +18584 19388 +42548 19386 +47899 19386 +47498 19385 +38071 19384 +44879 19381 +32340 19380 +1261 19379 +45960 19378 +39147 19373 +22504 19372 +45864 19371 +49123 19371 +46442 19370 +39089 19370 +50161 19369 +38350 19369 +38202 19362 +41599 19361 +38600 19361 +23190 19359 +22550 19359 +38954 19358 +23952 19358 +46871 19356 +47990 19356 +39112 19355 +21722 19355 +42956 19354 +44649 19354 +40756 19352 +34808 19349 +30424 19349 +42032 19348 +11021 19348 +40207 19347 +49064 19346 +35143 19345 +49953 19344 +36275 19344 +38852 19344 +49147 19343 +34026 19341 +39800 19341 +25512 19341 +43006 19341 +4989 19336 +24480 19335 +41456 19335 +21463 19335 +9411 19330 +23645 19328 +23435 19326 +46239 19325 +7462 19324 +47760 19323 +35854 19323 +9378 19322 +18713 19321 +35167 19320 +43719 19319 +45006 19319 +42130 19315 +48856 19313 +48634 19313 +46886 19311 +38177 19310 +47369 19309 +42759 19308 +24945 19307 +31577 19306 +44215 19304 +28864 19303 +41642 19302 +43182 19298 +46925 19296 +38509 19295 +44468 19295 +49319 19292 +48764 19292 +37839 19291 +42336 19291 +31050 19287 +44338 19283 +28114 19281 +46096 19278 +16606 19277 +45734 19276 +25052 19274 +34023 19272 +44031 19269 +37050 19268 +41833 19267 +38116 19266 +36555 19264 +29289 19263 +48092 19261 +18363 19260 +41541 19258 +23522 19258 +47931 19257 +45989 19254 +47683 19253 +49701 19250 +46052 19249 +34621 19246 +33203 19245 +40711 19243 +47900 19243 +47233 19242 +2874 19237 +46501 19232 +49111 19231 +18386 19230 +38062 19230 +46056 19227 +15676 19227 +38375 19226 +45838 19225 +47200 19223 +48724 19223 +39102 19222 +49439 19218 +42075 19216 +18144 19214 +45881 19211 +46872 19211 +29344 19210 +47949 19209 +39573 19207 +41925 19206 +45349 19206 +44896 19203 +38685 19202 +44397 19198 +41169 19197 +21177 19196 +8460 19196 +35123 19194 +48573 19184 +18095 19182 +16956 19180 +1914 19179 +24990 19179 +16786 19176 +23562 19176 +10042 19175 +25029 19175 +34619 19175 +37652 19174 +36813 19172 +36948 19170 +20773 19161 +24412 19159 +20763 19158 +29304 19157 +27319 19156 +38412 19156 +22918 19155 +46109 19154 +40441 19147 +37798 19144 +39405 19140 +48150 19139 +39785 19137 +29114 19137 +38604 19131 +43462 19129 +44408 19124 +45122 19121 +17434 19121 +34200 19120 +46749 19118 +27130 19114 +35509 19113 +49509 19113 +50010 19112 +42407 19112 +45452 19112 +44960 19110 +46868 19108 +45696 19107 +48096 19106 +46270 19105 +27270 19098 +22902 19089 +34393 19089 +10519 19088 +33074 19085 +43864 19082 +42174 19080 +25633 19080 +44560 19078 +48630 19075 +11850 19073 +48524 19071 +36305 19069 +22869 19066 +18719 19064 +43048 19060 +44891 19058 +28871 19058 +47670 19057 +47880 19057 +44513 19053 +14688 19052 +31643 19047 +48260 19047 +33651 19047 +39413 19045 +46263 19044 +33571 19039 +17191 19036 +43009 19036 +44176 19033 +41792 19032 +34779 19031 +28886 19030 +46158 19027 +46993 19026 +48825 19023 +45944 19022 +19741 19019 +41583 19018 +21936 19016 +49845 19013 +48398 19012 +25058 19011 +5467 19008 +36282 19007 +41492 19006 +24512 19005 +48187 19004 +42763 19002 +29274 19001 +16982 19001 +47237 19001 +33325 19001 +50027 18997 +42435 18992 +35230 18989 +46589 18989 +40631 18985 +18749 18983 +26368 18982 +36228 18982 +21663 18979 +4027 18979 +13807 18978 +36499 18978 +36109 18977 +46176 18972 +26491 18968 +39443 18965 +49974 18963 +31457 18963 +46103 18954 +47785 18952 +47016 18951 +27175 18951 +43986 18950 +41072 18949 +34408 18947 +48813 18946 +7290 18945 +47396 18945 +41280 18945 +48645 18944 +30187 18936 +50167 18933 +43339 18933 +38650 18932 +29163 18931 +12650 18931 +40736 18930 +47915 18929 +17181 18928 +39584 18927 +20619 18925 +47305 18924 +44063 18924 +30142 18919 +44214 18918 +10027 18917 +26967 18914 +41279 18912 +48170 18912 +45225 18910 +50222 18905 +22192 18898 +46285 18894 +39324 18893 +37446 18892 +20297 18891 +43119 18888 +13163 18888 +43465 18885 +13297 18883 +41627 18882 +49397 18881 +43445 18881 +37458 18881 +47941 18881 +48246 18878 +8289 18877 +37889 18876 +42562 18875 +33995 18875 +49964 18873 +49052 18862 +49857 18861 +32606 18860 +41874 18856 +44598 18854 +29277 18853 +46623 18852 +41765 18852 +42011 18850 +38169 18849 +27991 18847 +34349 18846 +5312 18842 +46250 18839 +21704 18836 +40251 18831 +26901 18830 +23770 18828 +47350 18821 +27214 18820 +48930 18819 +19735 18817 +35387 18813 +39509 18810 +29410 18809 +47807 18803 +49489 18800 +17040 18794 +48462 18794 +34850 18790 +34411 18788 +34610 18787 +47694 18786 +2203 18785 +40401 18782 +43427 18779 +13264 18779 +25775 18778 +43819 18776 +27636 18773 +46476 18770 +37941 18768 +33086 18768 +41955 18764 +45705 18762 +45441 18758 +43300 18751 +8535 18747 +43854 18745 +44265 18740 +38318 18739 +35087 18736 +18991 18735 +8522 18729 +45857 18727 +37691 18725 +44685 18725 +49499 18724 +27215 18723 +34718 18721 +35950 18719 +43564 18718 +23168 18717 +24085 18717 +19489 18716 +16448 18715 +43822 18715 +48665 18711 +49550 18711 +42145 18708 +10639 18702 +26836 18702 +48697 18696 +39568 18695 +27844 18695 +48772 18694 +47397 18693 +48037 18692 +25367 18692 +39615 18686 +44504 18685 +48442 18683 +43385 18683 +5666 18682 +42675 18680 +43883 18680 +24647 18678 +42271 18676 +41403 18674 +5914 18674 +26892 18674 +48864 18672 +48647 18667 +17566 18666 +14391 18664 +42283 18664 +32031 18662 +21113 18661 +24212 18661 +48381 18658 +37400 18656 +46649 18649 +24360 18645 +34919 18645 +47613 18644 +39550 18641 +33934 18639 +41998 18636 +16424 18628 +32942 18626 +24112 18626 +36241 18625 +26098 18622 +40414 18621 +36818 18620 +48648 18618 +47812 18613 +49130 18607 +9333 18606 +21930 18604 +41050 18602 +26991 18599 +47901 18598 +42443 18596 +30380 18595 +42938 18592 +45791 18592 +34829 18591 +43722 18590 +44372 18589 +36255 18584 +28919 18584 +17271 18581 +23862 18578 +25955 18577 +24121 18576 +42482 18574 +36056 18572 +44675 18570 +40804 18569 +24355 18569 +49816 18569 +42702 18568 +37905 18568 +46743 18567 +36949 18567 +10430 18566 +41060 18561 +8456 18559 +25612 18558 +31728 18557 +44269 18555 +29919 18555 +42405 18554 +16579 18546 +24084 18544 +20150 18544 +34888 18543 +46692 18542 +30297 18541 +31737 18540 +18497 18538 +22062 18535 +48489 18534 +44622 18532 +49720 18532 +23736 18528 +47085 18527 +34712 18524 +35443 18524 +44087 18524 +32501 18516 +19537 18515 +26341 18513 +28547 18510 +32527 18508 +35712 18505 +45639 18502 +41330 18501 +47838 18500 +48719 18500 +25593 18499 +38752 18498 +44955 18496 +27547 18490 +39866 18490 +6900 18490 +22285 18490 +17350 18490 +43930 18489 +32179 18487 +44586 18486 +48808 18482 +9802 18478 +35064 18476 +19424 18473 +50205 18471 +29982 18470 +48861 18470 +49330 18470 +42307 18469 +47037 18465 +44055 18465 +33096 18465 +42412 18464 +35035 18463 +48006 18463 +43389 18462 +34333 18462 +39334 18459 +15026 18458 +46752 18457 +10859 18456 +44753 18455 +43950 18454 +41183 18453 +8032 18452 +24145 18449 +47646 18445 +47275 18444 +13031 18444 +44327 18442 +41815 18441 +46678 18441 +44573 18439 +38226 18434 +42604 18433 +26994 18431 +46296 18430 +29929 18429 +39921 18429 +13607 18421 +42493 18421 +39168 18419 +38902 18416 +46026 18412 +44396 18410 +38187 18410 +37452 18410 +19465 18406 +49150 18402 +43557 18402 +49424 18399 +48578 18393 +33838 18392 +41531 18390 +41962 18390 +16845 18385 +35775 18380 +48305 18380 +46900 18379 +11605 18378 +31439 18378 +27949 18377 +48337 18369 +23789 18368 +46681 18368 +21086 18368 +43191 18367 +31257 18366 +23733 18366 +39492 18364 +43680 18361 +45839 18358 +47963 18356 +34652 18353 +8206 18352 +42739 18347 +41610 18346 +43559 18346 +44295 18341 +41228 18336 +23156 18336 +43446 18336 +26857 18335 +40908 18335 +45943 18335 +6969 18330 +44719 18329 +43521 18329 +38565 18327 +49666 18324 +46093 18323 +45702 18321 +44659 18315 +40350 18313 +45628 18312 +17418 18309 +46860 18307 +30352 18307 +47512 18307 +48618 18306 +43277 18303 +17881 18301 +38588 18300 +34897 18298 +35350 18293 +31989 18291 +36823 18290 +33131 18289 +39776 18289 +50074 18289 +48602 18286 +40348 18281 +45486 18277 +48414 18276 +47488 18275 +21118 18274 +35513 18271 +45214 18270 +16536 18267 +43468 18267 +27789 18267 +43847 18264 +17425 18262 +41850 18261 +12357 18261 +36954 18258 +50029 18255 +40052 18254 +34773 18253 +14124 18252 +43283 18248 +46478 18248 +48603 18244 +45319 18244 +31255 18241 +48740 18240 +43059 18238 +31671 18236 +38451 18236 +11931 18232 +32216 18224 +47389 18218 +44172 18216 +49033 18214 +32007 18214 +22055 18214 +22592 18212 +44847 18211 +40451 18211 +48892 18209 +41051 18207 +28524 18205 +24511 18205 +30613 18203 +40505 18201 +33142 18200 +45515 18198 +48653 18198 +44375 18197 +13337 18196 +44612 18192 +32203 18188 +17407 18187 +38642 18187 +45256 18185 +48767 18185 +4667 18183 +43665 18182 +48156 18180 +30026 18180 +13161 18180 +37699 18179 +42001 18179 +50052 18177 +28089 18176 +20627 18176 +38508 18171 +9680 18171 +29080 18170 +42635 18169 +42955 18169 +35635 18164 +45157 18162 +33923 18160 +38219 18160 +40287 18158 +42996 18155 +36523 18155 +35251 18152 +8417 18152 +19872 18151 +24704 18150 +23108 18145 +43312 18144 +41963 18142 +23428 18135 +46079 18130 +31793 18124 +45891 18123 +31664 18119 +25723 18118 +17791 18117 +44494 18117 +44305 18116 +29670 18116 +31904 18115 +22851 18114 +38161 18108 +19945 18106 +31305 18105 +31715 18103 +45007 18101 +19743 18100 +17725 18095 +49051 18091 +47582 18090 +43172 18088 +34390 18085 +29550 18084 +48075 18084 +44542 18083 +15493 18083 +47570 18081 +44826 18078 +29746 18077 +32106 18074 +19241 18074 +47274 18070 +37363 18070 +38692 18067 +6012 18067 +47859 18061 +29149 18060 +34097 18059 +27935 18054 +24437 18054 +39961 18049 +48441 18044 +23611 18037 +43243 18037 +41263 18034 +38627 18032 +13072 18031 +43563 18028 +48945 18028 +32315 18026 +26486 18023 +28575 18019 +43239 18018 +35914 18016 +48165 18016 +28095 18011 +42187 18007 +16306 18006 +21015 18005 +41596 18004 +42622 18003 +42312 18003 +43668 17999 +44307 17998 +36804 17997 +47240 17993 +49348 17993 +26388 17987 +5307 17986 +38471 17985 +34167 17985 +34706 17982 +48465 17981 +44694 17977 +30867 17976 +36659 17975 +32208 17975 +32512 17973 +43887 17967 +41235 17966 +44114 17966 +45728 17963 +22467 17962 +49641 17960 +20308 17957 +42237 17956 +39134 17954 +39633 17952 +44743 17949 +38179 17947 +41841 17945 +47896 17934 +49900 17929 +49366 17926 +45523 17919 +39601 17918 +12360 17917 +49803 17910 +32449 17909 +19357 17908 +48054 17902 +31007 17899 +37045 17899 +9097 17898 +47612 17893 +14445 17893 +43098 17891 +40143 17891 +39450 17890 +33429 17888 +41601 17886 +35051 17881 +32649 17881 +49915 17880 +40663 17875 +44887 17874 +47111 17871 +31554 17869 +49875 17868 +19319 17868 +17391 17866 +37994 17865 +37836 17863 +20846 17860 +16870 17859 +27524 17858 +4742 17855 +22469 17855 +47726 17851 +41095 17849 +41732 17849 +46618 17848 +49313 17847 +46914 17846 +24802 17846 +27664 17844 +44543 17843 +39417 17837 +38213 17836 +48556 17831 +22272 17831 +34575 17827 +31761 17827 +21176 17827 +49579 17827 +37152 17825 +30541 17822 +24510 17821 +19816 17821 +22993 17820 +37074 17818 +49234 17818 +43656 17817 +41990 17817 +46440 17815 +49254 17815 +48623 17812 +44253 17811 +48090 17810 +48086 17810 +15311 17810 +37057 17809 +30675 17808 +11827 17808 +44203 17802 +22512 17799 +46888 17798 +44943 17789 +44552 17787 +26446 17786 +47965 17783 +13221 17780 +45938 17780 +11535 17780 +10702 17777 +29519 17777 +35298 17773 +17850 17772 +28906 17768 +44661 17766 +37637 17764 +29917 17764 +1764 17764 +34337 17762 +24290 17761 +22407 17760 +49437 17759 +18243 17757 +28905 17756 +45462 17756 +44532 17754 +49436 17753 +20164 17753 +29857 17748 +18819 17747 +10260 17747 +47152 17738 +46046 17738 +48444 17734 +17177 17733 +37828 17730 +24609 17724 +8760 17719 +39647 17715 +41351 17714 +37288 17714 +48947 17714 +20683 17709 +26055 17708 +45114 17707 +11049 17705 +42676 17704 +43739 17702 +31960 17702 +30233 17702 +42188 17698 +50223 17698 +35838 17697 +44897 17695 +23792 17695 +47758 17692 +12875 17691 +44259 17690 +25683 17688 +43805 17686 +37394 17686 +43046 17681 +49782 17681 +44069 17680 +48047 17675 +43490 17672 +24559 17671 +44020 17670 +23977 17670 +27454 17664 +45616 17663 +28863 17661 +41369 17661 +2463 17659 +29126 17658 +45216 17653 +12443 17647 +20630 17643 +29247 17642 +47731 17641 +43732 17638 +41705 17636 +45825 17635 +47044 17635 +40104 17635 +44169 17634 +25652 17634 +23521 17634 +41793 17633 +35379 17633 +32661 17631 +33108 17629 +44060 17628 +8645 17628 +42322 17623 +39121 17621 +33481 17619 +46371 17616 +43336 17615 +47191 17615 +39090 17610 +34128 17603 +47056 17595 +42084 17595 +40392 17594 +41141 17594 +30226 17592 +27369 17588 +43209 17587 +30329 17585 +2216 17584 +31950 17583 +40839 17583 +32870 17580 +37346 17579 +14911 17577 +35731 17576 +11030 17576 +42782 17576 +11018 17576 +40324 17575 +31336 17575 +37779 17575 +43946 17573 +49856 17572 +45738 17571 +43787 17570 +40260 17567 +34684 17566 +44934 17566 +20971 17565 +50183 17563 +46159 17563 +44673 17562 +24035 17561 +28329 17561 +38744 17560 +40201 17558 +45465 17557 +46267 17556 +39887 17553 +26331 17551 +48236 17546 +42258 17546 +10328 17546 +43605 17545 +39572 17541 +44965 17539 +45670 17534 +43027 17534 +46841 17533 +35909 17532 +30074 17531 +50238 17530 +36252 17522 +49075 17517 +45636 17515 +31310 17514 +46665 17514 +44000 17514 +46519 17510 +39570 17507 +38497 17506 +39437 17505 +38896 17504 +35911 17502 +18538 17502 +39816 17499 +46248 17497 +27020 17497 +47184 17490 +20238 17490 +17614 17490 +40301 17490 +35744 17489 +11981 17488 +43589 17485 +19899 17484 +41094 17483 +46887 17483 +35961 17481 +43625 17479 +46432 17476 +48884 17475 +27480 17474 +44121 17474 +6030 17473 +28321 17469 +43529 17469 +35211 17468 +29123 17468 +2862 17463 +26940 17461 +48555 17460 +43063 17460 +38624 17458 +42488 17457 +42964 17452 +33913 17451 +13303 17450 +18999 17447 +2847 17442 +44973 17441 +29882 17440 +25020 17436 +42131 17434 +45080 17434 +38385 17433 +37592 17423 +43240 17423 +44442 17421 +14913 17419 +37696 17416 +29511 17416 +6671 17416 +49602 17416 +26458 17415 +49703 17414 +21594 17414 +45182 17409 +48818 17406 +14606 17403 +37296 17403 +39491 17397 +41761 17396 +45964 17396 +15082 17393 +33441 17391 +45935 17391 +41376 17388 +42581 17388 +46468 17386 +27374 17386 +40647 17385 +41409 17385 +31098 17383 +41047 17382 +36479 17381 +45648 17380 +841 17379 +20745 17379 +44078 17378 +42965 17378 +17005 17376 +13462 17375 +3591 17374 +42980 17373 +49257 17370 +29078 17370 +23037 17369 +13249 17368 +32895 17367 +50165 17366 +45761 17363 +37174 17360 +25213 17357 +49303 17354 +24866 17353 +36227 17352 +39183 17351 +44153 17345 +39923 17344 +4148 17343 +48926 17343 +18431 17343 +46815 17343 +29150 17341 +43292 17341 +47370 17341 +19295 17335 +19211 17334 +46717 17331 +49211 17330 +48327 17329 +5772 17327 +43994 17322 +19285 17321 +34997 17317 +44764 17317 +24064 17316 +49356 17316 +18471 17316 +33554 17316 +43223 17314 +28780 17314 +19676 17314 +49394 17313 +36191 17312 +4566 17311 +43683 17310 +35881 17310 +42494 17306 +26908 17306 +48552 17305 +43146 17300 +41159 17291 +41700 17290 +48228 17290 +42643 17288 +16750 17287 +13287 17286 +11134 17284 +44329 17284 +41561 17283 +45949 17282 +36352 17281 +38513 17277 +24189 17277 +19768 17271 +37078 17271 +42301 17265 +34667 17256 +35945 17254 +33975 17251 +35157 17251 +47776 17251 +49114 17249 +44077 17245 +42141 17242 +9108 17242 +27187 17240 +14993 17240 +46781 17239 +35625 17238 +48959 17237 +48188 17236 +15522 17235 +24729 17232 +48964 17230 +40733 17229 +33635 17226 +47946 17224 +28425 17218 +31042 17216 +22658 17215 +49525 17213 +32178 17211 +36728 17204 +46807 17203 +28599 17201 +20020 17201 +38679 17198 +2570 17197 +40545 17194 +41455 17194 +47127 17192 +40384 17191 +42703 17191 +42563 17187 +30583 17187 +9244 17187 +26245 17184 +48952 17183 +31686 17181 +34211 17174 +44900 17174 +14817 17166 +32186 17166 +37201 17164 +45659 17161 +25435 17159 +39196 17158 +44980 17157 +38208 17155 +44417 17152 +31993 17147 +47544 17145 +48894 17144 +12220 17144 +37496 17141 +38864 17139 +44489 17133 +30010 17132 +26755 17131 +40202 17130 +27919 17130 +47555 17125 +49730 17120 +46091 17120 +20306 17116 +42978 17116 +47871 17116 +39422 17115 +24196 17109 +2110 17108 +37007 17107 +43514 17105 +44249 17104 +43228 17104 +10221 17104 +26632 17102 +38418 17102 +31401 17102 +46095 17102 +43515 17101 +44528 17101 +46953 17101 +32717 17101 +43687 17097 +50006 17096 +23509 17095 +33528 17092 +45765 17089 +11962 17088 +29544 17087 +30009 17085 +3475 17079 +18547 17077 +30516 17077 +49289 17076 +34090 17073 +31951 17072 +45559 17072 +41644 17071 +44877 17069 +31126 17069 +38972 17069 +50055 17061 +35149 17061 +37883 17060 +37460 17060 +17147 17058 +49189 17057 +44729 17055 +44171 17054 +16593 17053 +49053 17046 +31021 17044 +25551 17044 +37354 17043 +33691 17042 +43619 17041 +46112 17039 +30344 17037 +44183 17037 +33980 17033 +36170 17031 +47603 17028 +43413 17026 +46798 17025 +39061 17021 +41827 17020 +7909 17019 +47672 17015 +40108 17011 +49253 17010 +26439 17004 +48528 17003 +49645 16995 +26419 16995 +49532 16993 +44589 16990 +35745 16989 +44950 16988 +13321 16985 +21237 16981 +40262 16980 +26358 16978 +41867 16977 +42520 16972 +36038 16970 +46114 16970 +50019 16969 +47210 16968 +47036 16967 +46571 16967 +16327 16966 +46542 16965 +3471 16963 +21993 16961 +35743 16958 +48840 16956 +32803 16956 +33704 16956 +31741 16956 +36390 16952 +46877 16947 +24079 16945 +34472 16941 +29273 16940 +15252 16940 +46809 16938 +26560 16938 +5744 16937 +45886 16934 +28322 16933 +41267 16931 +20650 16928 +24428 16927 +22545 16923 +33336 16919 +32381 16917 +39106 16914 +42385 16912 +42820 16907 +44701 16906 +31740 16906 +30910 16906 +30173 16905 +38484 16904 +49459 16904 +22123 16903 +24978 16902 +32428 16902 +21628 16901 +17227 16900 +34600 16896 +20704 16895 +14474 16895 +10774 16892 +44509 16891 +45100 16888 +20204 16886 +42856 16886 +42054 16885 +47641 16884 +40277 16884 +44116 16883 +25606 16883 +38947 16878 +49700 16877 +47539 16877 +48431 16876 +41300 16875 +33202 16874 +14038 16873 +40849 16872 +48262 16871 +46803 16870 +45557 16870 +24443 16869 +47156 16868 +50119 16868 +35330 16862 +37649 16862 +48518 16859 +45467 16851 +33430 16851 +38467 16850 +43807 16849 +39528 16846 +45053 16841 +42946 16841 +47898 16838 +49735 16836 +20181 16836 +15128 16833 +27579 16833 +49768 16833 +32707 16831 +45952 16827 +45666 16824 +4797 16819 +16948 16817 +47360 16816 +40193 16815 +44383 16814 +32264 16805 +22251 16800 +50226 16799 +11076 16795 +34478 16795 +35915 16793 +40602 16791 +41784 16790 +33701 16790 +30817 16790 +46503 16789 +40926 16788 +26584 16786 +40673 16785 +45206 16784 +31644 16776 +43723 16775 +46928 16774 +46303 16773 +40575 16773 +40984 16768 +23251 16767 +45862 16766 +37165 16765 +31830 16762 +44384 16758 +46704 16757 +15356 16757 +36594 16753 +43824 16753 +42253 16751 +27365 16750 +45987 16747 +47907 16745 +38353 16742 +21275 16739 +49669 16738 +49787 16736 +20526 16736 +50242 16733 +25574 16732 +20960 16730 +39765 16728 +17117 16725 +48747 16725 +17932 16724 +47424 16723 +46862 16723 +23750 16723 +40366 16722 +46012 16720 +2212 16720 +43140 16720 +42483 16717 +19423 16717 +43952 16714 +31438 16709 +29828 16708 +41134 16708 +47994 16705 +44647 16704 +29248 16703 +50174 16697 +42600 16696 +37512 16693 +43275 16691 +42672 16689 +49500 16688 +39557 16687 +49087 16686 +31615 16682 +17041 16681 +44030 16677 +46405 16676 +37336 16676 +41563 16676 +36924 16673 +17854 16669 +42313 16668 +49822 16663 +45601 16662 +41872 16660 +18769 16659 +49541 16659 +45902 16657 +49685 16656 +46325 16653 +37797 16651 +43021 16650 +40380 16650 +44207 16647 +37689 16645 +31595 16644 +26984 16641 +32196 16637 +23545 16636 +33135 16631 +40435 16631 +23197 16627 +41103 16626 +34823 16625 +49750 16621 +41723 16620 +45372 16619 +39217 16618 +16466 16613 +42024 16607 +38567 16606 +25932 16605 +41934 16605 +44395 16602 +44741 16599 +10264 16599 +47886 16597 +21785 16597 +49481 16589 +39702 16589 +48820 16588 +39984 16586 +33730 16585 +43937 16580 +25660 16579 +47921 16574 +49784 16574 +49512 16569 +29106 16567 +15960 16565 +25595 16560 +47787 16560 +49733 16560 +45126 16558 +20921 16556 +34008 16551 +28566 16549 +27080 16549 +42053 16546 +41922 16541 +45770 16541 +43615 16539 +11603 16536 +22752 16534 +39852 16533 +45310 16533 +31185 16532 +49718 16531 +44931 16527 +2625 16525 +22288 16524 +45692 16524 +48979 16522 +45508 16522 +31624 16518 +48428 16516 +17617 16514 +30378 16509 +48109 16507 +1876 16507 +46410 16506 +20277 16505 +11180 16502 +33947 16498 +34921 16496 +38734 16494 +46761 16494 +43835 16491 +47846 16487 +975 16486 +24474 16485 +27747 16483 +36452 16482 +48409 16482 +38138 16479 +44780 16475 +44869 16474 +49682 16474 +28958 16469 +49878 16465 +30602 16464 +2819 16462 +47616 16457 +46256 16457 +48365 16455 +32478 16453 +48612 16452 +38140 16451 +44134 16448 +39933 16448 +47849 16446 +28601 16443 +43645 16442 +17883 16436 +34129 16434 +45290 16434 +42557 16432 +11337 16432 +34570 16427 +49294 16424 +950 16420 +49592 16418 +49922 16417 +27053 16415 +42691 16410 +50088 16410 +45766 16409 +48679 16409 +26026 16404 +47548 16403 +44671 16402 +19368 16398 +38827 16394 +42480 16391 +23331 16389 +31836 16388 +38257 16388 +23878 16386 +14194 16385 +46918 16382 +40503 16382 +32044 16381 +24695 16378 +46957 16375 +48098 16375 +42853 16369 +6316 16368 +41511 16367 +49287 16367 +20223 16366 +16780 16365 +37841 16364 +34007 16361 +48839 16360 +5904 16360 +44856 16358 +35648 16358 +49949 16356 +42486 16354 +24117 16354 +34675 16351 +47597 16351 +45500 16350 +37688 16350 +38556 16348 +48530 16348 +27786 16348 +40282 16344 +8382 16343 +40218 16342 +20580 16340 +47503 16338 +24366 16336 +19673 16333 +47580 16330 +29319 16327 +41466 16324 +44152 16322 +44921 16322 +9903 16321 +30617 16319 +32796 16318 +27858 16316 +31387 16316 +49936 16316 +14609 16306 +49055 16305 +33204 16305 +26537 16298 +26382 16296 +47728 16296 +27727 16293 +6137 16291 +49971 16288 +43608 16288 +43686 16287 +33320 16286 +17295 16286 +44422 16285 +16650 16284 +48173 16280 +35758 16279 +47247 16275 +45605 16271 +17976 16266 +45571 16262 +46736 16261 +49943 16260 +50123 16251 +44761 16251 +29487 16250 +42459 16249 +47346 16248 +22380 16246 +8703 16245 +48338 16243 +38806 16242 +15130 16240 +49638 16240 +38195 16237 +45925 16231 +5487 16231 +40962 16226 +19381 16225 +42787 16223 +46511 16221 +29788 16220 +46660 16219 +42416 16219 +35504 16218 +46500 16216 +44778 16214 +46192 16214 +49617 16212 +28505 16210 +49327 16209 +31752 16206 +25122 16206 +50234 16205 +8907 16205 +45506 16201 +36150 16200 +37739 16199 +50014 16198 +45877 16197 +41450 16195 +17307 16193 +22082 16192 +27383 16188 +37491 16187 +29193 16187 +12764 16186 +49307 16186 +44698 16185 +36925 16185 +21312 16184 +28751 16183 +11145 16181 +11956 16179 +41984 16178 +6629 16174 +32657 16174 +33908 16172 +35772 16172 +42060 16170 +24526 16167 +44750 16166 +47155 16165 +42572 16165 +48771 16162 +2294 16161 +38017 16157 +31371 16156 +23858 16156 +46839 16147 +35258 16144 +39077 16143 +30636 16138 +49193 16138 +27722 16138 +6534 16137 +43977 16127 +31404 16126 +37166 16126 +3410 16123 +45439 16122 +19335 16121 +32335 16118 +47192 16116 +29665 16115 +37849 16113 +49614 16108 +37577 16108 +927 16104 +30402 16100 +34661 16100 +26398 16099 +42080 16098 +45362 16097 +8236 16096 +21369 16096 +21145 16095 +36885 16092 +48960 16090 +46393 16090 +1274 16089 +44324 16087 +45701 16086 +45224 16086 +30472 16085 +25282 16084 +49717 16083 +47385 16083 +7871 16081 +45050 16080 +44229 16078 +15117 16076 +33894 16076 +11627 16073 +40019 16073 +2542 16072 +38710 16071 +49218 16071 +37059 16068 +9438 16067 +43252 16067 +46556 16067 +48372 16067 +50068 16065 +40299 16065 +48628 16062 +37540 16062 +41650 16059 +15748 16057 +17889 16055 +19806 16054 +37678 16050 +40353 16050 +6377 16043 +43795 16040 +44132 16040 +41299 16040 +41496 16034 +14203 16034 +32587 16032 +48167 16031 +48655 16030 +40521 16028 +45304 16026 +15044 16021 +35869 16015 +45127 16015 +47467 16015 +40225 16014 +41201 16012 +43199 16011 +33422 16009 +45653 16008 +43128 16008 +28361 16006 +31276 16004 +10446 16003 +21300 16000 +45116 15998 +45909 15997 +41936 15995 +45618 15995 +13461 15995 +27456 15994 +49959 15992 +46342 15991 +44125 15988 +49533 15987 +41932 15984 +32038 15983 +48223 15979 +36568 15973 +46756 15971 +49517 15971 +37925 15969 +29873 15967 +40538 15965 +28795 15963 +33175 15960 +37163 15960 +39701 15957 +20579 15954 +31852 15952 +21498 15952 +9134 15945 +46992 15941 +35684 15941 +44427 15940 +44609 15936 +47860 15935 +47374 15929 +17752 15927 +12474 15923 +35976 15921 +26252 15919 +30143 15918 +2161 15916 +43324 15914 +26314 15912 +22149 15909 +31062 15908 +43438 15908 +40382 15907 +32271 15906 +10412 15906 +47202 15895 +48548 15895 +20817 15892 +50192 15891 +6132 15888 +6791 15886 +34893 15885 +49760 15883 +48717 15882 +46018 15882 +29142 15882 +37774 15881 +25673 15877 +49199 15877 +39285 15876 +31124 15874 +26254 15871 +37640 15870 +20183 15869 +20941 15868 +38819 15868 +43758 15866 +37204 15863 +41513 15859 +17538 15856 +36303 15856 +9736 15854 +44796 15851 +48640 15849 +47567 15849 +14808 15849 +26035 15848 +28951 15836 +7302 15836 +43335 15834 +38659 15834 +27323 15830 +34414 15829 +28475 15828 +43568 15827 +49985 15827 +49659 15825 +45570 15820 +39766 15815 +8750 15815 +30576 15810 +43230 15809 +50187 15806 +25842 15806 +12596 15804 +32351 15804 +26676 15801 +44561 15798 +31152 15798 +31821 15797 +32876 15796 +40970 15796 +34320 15793 +42612 15792 +45804 15787 +46484 15785 +44254 15782 +43776 15780 +13538 15774 +31153 15772 +39182 15771 +44040 15770 +38069 15770 +32610 15769 +45573 15769 +39306 15768 +48896 15766 +36148 15764 +41416 15764 +47634 15763 +32434 15759 +26505 15758 +48691 15756 +42834 15753 +38025 15751 +38498 15750 +35217 15750 +15965 15748 +9147 15747 +32412 15747 +22685 15747 +43421 15746 +14405 15744 +47976 15741 +23192 15740 +42911 15739 +10189 15739 +45453 15739 +23857 15738 +24548 15736 +16362 15736 +40340 15735 +23847 15733 +10506 15733 +48081 15727 +30262 15727 +38770 15726 +30397 15726 +38699 15725 +37838 15725 +48554 15724 +28421 15720 +43643 15720 +15562 15720 +35191 15720 +35486 15719 +2554 15718 +37677 15718 +24476 15718 +42796 15718 +44367 15714 +11539 15713 +48129 15712 +39649 15712 +48980 15712 +16482 15710 +44084 15705 +33715 15702 +20967 15700 +46064 15699 +49373 15699 +37667 15699 +48089 15699 +37182 15698 +41327 15698 +23260 15694 +23937 15693 +29522 15690 +43550 15690 +26171 15685 +27381 15683 +43846 15676 +43585 15675 +31492 15675 +20076 15672 +37926 15672 +39848 15671 +42864 15670 +21637 15665 +9954 15663 +24979 15662 +43502 15662 +37488 15661 +24330 15661 +34572 15659 +43148 15658 +29179 15658 +37744 15657 +43498 15656 +44650 15656 +40298 15655 +31973 15655 +48540 15654 +14623 15654 +50182 15652 +41178 15651 +23029 15650 +43959 15648 +31635 15648 +30324 15646 +44095 15646 +49766 15645 +22825 15642 +49961 15639 +21565 15638 +41009 15637 +8831 15636 +35334 15632 +34083 15628 +47876 15628 +13912 15625 +37564 15623 +42027 15622 +32177 15616 +39515 15612 +41003 15609 +32163 15604 +36739 15604 +49156 15600 +34871 15600 +39346 15595 +6917 15594 +40146 15592 +49141 15591 +32614 15590 +34799 15588 +43390 15587 +48891 15587 +19326 15585 +28028 15584 +31583 15581 +49039 15579 +44774 15578 +39208 15578 +37058 15575 +8596 15570 +13274 15569 +5383 15568 +43155 15566 +38795 15566 +30454 15565 +48161 15564 +33523 15562 +40531 15560 +34736 15559 +42991 15558 +4723 15556 +48587 15555 +45801 15553 +39790 15551 +22071 15546 +40784 15543 +44235 15543 +34117 15542 +45217 15541 +28419 15541 +12358 15538 +32304 15537 +40584 15534 +45305 15532 +49423 15529 +47420 15525 +44124 15519 +41607 15518 +43639 15514 +35814 15512 +43863 15510 +13023 15510 +43884 15508 +49469 15505 +47211 15504 +43938 15504 +11943 15503 +26558 15501 +28897 15501 +36645 15495 +45752 15494 +32257 15492 +50069 15486 +4338 15486 +28000 15481 +45257 15476 +50132 15476 +40100 15476 +39817 15472 +43584 15470 +28777 15468 +20488 15468 +41636 15468 +44370 15466 +45097 15465 +2789 15465 +50095 15463 +38194 15463 +26873 15461 +28418 15460 +34931 15457 +32812 15457 +32633 15456 +19718 15456 +46486 15455 +34160 15445 +28261 15444 +49262 15440 +22856 15432 +26703 15428 +47273 15426 +27435 15426 +16063 15416 +3647 15414 +42401 15414 +47038 15413 +46856 15413 +9884 15412 +13286 15409 +48712 15407 +15667 15406 +38956 15402 +48718 15401 +48290 15400 +38279 15398 +44905 15395 +25853 15395 +37966 15389 +46874 15387 +43045 15386 +46693 15386 +46088 15385 +6114 15383 +23883 15383 +43060 15382 +45295 15382 +41949 15381 +50141 15379 +29748 15376 +28249 15372 +48019 15372 +46926 15368 +43927 15364 +18187 15363 +45219 15363 +35958 15361 +27590 15360 +35948 15359 +40400 15356 +38782 15355 +15019 15348 +45575 15345 +44629 15340 +19983 15339 +34972 15329 +15908 15329 +48970 15323 +6054 15323 +21908 15320 +42528 15318 +39807 15316 +43526 15311 +47359 15311 +22480 15309 +39444 15306 +41829 15304 +39957 15303 +41712 15299 +24045 15299 +33412 15298 +31058 15296 +37600 15294 +41572 15292 +12664 15291 +27532 15290 +32934 15287 +35578 15280 +42987 15279 +46209 15274 +45930 15273 +43637 15271 +41830 15270 +39896 15266 +46970 15263 +49594 15262 +45860 15261 +42891 15260 +45781 15259 +35241 15259 +46048 15257 +23412 15253 +26799 15251 +30068 15248 +43547 15246 +41171 15242 +38743 15241 +24422 15241 +33415 15240 +138 15238 +40373 15237 +46725 15235 +47553 15233 +46319 15233 +39748 15230 +46152 15230 +28985 15230 +30058 15226 +43926 15225 +38973 15224 +16778 15218 +28299 15217 +49823 15215 +29469 15215 +19279 15215 +25475 15210 +8311 15210 +33695 15205 +38711 15205 +38503 15202 +40743 15202 +41426 15201 +32487 15201 +42854 15199 +21341 15199 +29421 15198 +38845 15197 +42318 15194 +16688 15191 +43142 15190 +48800 15188 +6092 15186 +31927 15185 +44884 15185 +21556 15183 +48935 15182 +46162 15181 +36757 15180 +44688 15180 +32347 15178 +48307 15177 +19703 15169 +35614 15169 +44638 15162 +46940 15160 +13711 15159 +49713 15159 +49557 15156 +49757 15156 +21394 15154 +44739 15153 +34210 15152 +39950 15149 +21774 15142 +42121 15141 +18773 15141 +43872 15139 +11629 15138 +47922 15136 +10513 15128 +47789 15124 +23136 15123 +40118 15119 +30133 15118 +22599 15118 +49841 15117 +48627 15115 +47141 15109 +19532 15107 +44328 15104 +46560 15101 +27110 15097 +44890 15096 +45438 15096 +8657 15095 +50239 15094 +27751 15089 +42333 15088 +4617 15087 +44045 15086 +39505 15086 +18234 15086 +45134 15084 +30118 15081 +31776 15081 +42863 15077 +31097 15076 +42456 15075 +48597 15072 +48326 15071 +30988 15070 +48143 15070 +49764 15067 +47927 15065 +32775 15064 +46359 15063 +28248 15062 +5177 15060 +38786 15059 +35718 15059 +47920 15057 +24926 15056 +18958 15054 +39341 15052 +47529 15051 +42007 15051 +43030 15050 +24439 15048 +38607 15046 +43760 15043 +43290 15041 +9748 15039 +23737 15036 +49552 15033 +50049 15032 +23587 15032 +30653 15031 +46838 15029 +41443 15028 +44821 15028 +27363 15024 +33897 15022 +44721 15020 +29664 15020 +40616 15016 +35115 15011 +50254 15011 +21061 15011 +18375 15010 +35187 15010 +42632 15009 +49170 15009 +33594 15008 +22500 15008 +42220 15006 +49005 15002 +41587 15000 +7607 14995 +43440 14988 +43592 14986 +47752 14985 +47588 14983 +8412 14982 +41676 14980 +33033 14980 +15011 14980 +18037 14978 +21812 14975 +26589 14975 +49352 14975 +28920 14969 +37801 14967 +42003 14960 +26236 14957 +45338 14953 +33498 14952 +43817 14948 +39289 14943 +27100 14939 +43454 14939 +46607 14933 +41553 14931 +38419 14930 +42589 14928 +49954 14927 +46498 14926 +39870 14925 +24837 14924 +37449 14924 +49544 14921 +25969 14920 +46528 14919 +39780 14914 +39974 14914 +26209 14911 +27976 14910 +45210 14905 +43826 14905 +45580 14903 +17562 14894 +37977 14893 +36640 14888 +45345 14886 +42092 14885 +18826 14885 +43725 14878 +33944 14877 +30348 14877 +42349 14875 +22666 14874 +34218 14874 +5346 14873 +44237 14871 +40046 14868 +48439 14864 +22003 14861 +44802 14860 +37943 14860 +41919 14860 +45924 14859 +15002 14859 +47464 14858 +33842 14855 +38342 14853 +36235 14851 +35284 14851 +22442 14851 +45820 14851 +33136 14849 +33335 14848 +45718 14847 +38216 14843 +40726 14841 +49988 14840 +47736 14840 +49113 14839 +27 14839 +44734 14839 +25641 14833 +10669 14833 +45489 14830 +42068 14828 +47379 14826 +37162 14818 +44929 14818 +45071 14813 +40475 14806 +16683 14805 +16993 14802 +49336 14801 +29112 14796 +25222 14791 +18891 14789 +48732 14788 +47439 14783 +44037 14783 +44521 14782 +34294 14779 +15888 14778 +15091 14777 +19805 14777 +40646 14773 +37218 14772 +48328 14772 +48635 14771 +34984 14768 +49131 14766 +49800 14766 +41812 14765 +42012 14764 +34900 14764 +41255 14759 +22002 14758 +17212 14745 +25558 14743 +14158 14742 +16214 14739 +4937 14737 +32965 14736 +26323 14735 +46300 14734 +21191 14734 +26859 14727 +39162 14724 +43040 14724 +45079 14722 +11828 14719 +48232 14717 +32588 14713 +5024 14710 +45424 14708 +13947 14707 +18361 14705 +12378 14701 +40655 14698 +28205 14698 +27248 14697 +25443 14697 +16363 14697 +15642 14696 +27039 14695 +40008 14695 +36984 14693 +40576 14692 +38196 14686 +42544 14682 +29161 14680 +30709 14675 +41294 14675 +34909 14673 +48192 14664 +49021 14663 +36921 14662 +46857 14661 +48449 14660 +40411 14658 +50148 14656 +40938 14654 +39276 14653 +41543 14653 +50067 14652 +38661 14651 +34733 14647 +45596 14647 +37702 14640 +26752 14639 +34213 14638 +50228 14637 +46910 14637 +19030 14637 +46770 14636 +37284 14635 +45165 14635 +32198 14635 +19042 14631 +50091 14627 +44469 14619 +35423 14617 +47236 14612 +24465 14611 +46821 14611 +25608 14611 +18874 14608 +7701 14606 +43411 14599 +49463 14595 +42812 14590 +49649 14589 +42909 14585 +48768 14585 +47003 14585 +35384 14583 +29677 14580 +48252 14579 +43617 14579 +30128 14575 +48322 14574 +29644 14570 +17740 14568 +35626 14567 +47733 14567 +38168 14566 +10514 14564 +45574 14561 +16016 14559 +47440 14558 +40884 14555 +48775 14552 +30907 14552 +30619 14550 +4651 14549 +33532 14548 +44690 14546 +2942 14544 +31642 14542 +49092 14535 +41058 14534 +43069 14528 +16926 14527 +41347 14526 +49442 14525 +35400 14524 +49395 14523 +14810 14522 +46480 14519 +32286 14519 +27682 14518 +48810 14514 +49047 14511 +20906 14506 +17126 14505 +47176 14504 +31308 14501 +47121 14499 +42770 14497 +45576 14497 +24744 14497 +47053 14491 +48777 14488 +41508 14488 +49656 14487 +46633 14484 +48888 14483 +29823 14479 +30945 14478 +49049 14476 +47517 14474 +26745 14473 +13738 14470 +30056 14470 +32380 14469 +45426 14465 +27650 14464 +50031 14463 +38112 14461 +11221 14460 +16423 14458 +39662 14456 +33809 14454 +8008 14454 +17956 14453 +33322 14452 +44959 14449 +37898 14447 +33060 14445 +49299 14444 +48615 14441 +48751 14440 +21653 14437 +49497 14436 +8542 14430 +41811 14430 +27651 14428 +34329 14428 +44440 14426 +31746 14426 +39259 14426 +39968 14418 +36624 14417 +49421 14416 +48925 14415 +20317 14415 +46385 14414 +43001 14411 +20142 14409 +15439 14404 +25231 14402 +46876 14399 +46869 14399 +47744 14397 +41823 14393 +30975 14393 +32929 14390 +47719 14389 +41981 14389 +42406 14388 +37888 14387 +49477 14385 +49491 14385 +31849 14381 +3973 14380 +29709 14379 +26927 14379 +37535 14378 +33622 14377 +48132 14377 +48000 14375 +47032 14374 +46489 14373 +26904 14370 +45448 14366 +43448 14362 +31385 14362 +39562 14356 +19194 14355 +42298 14355 +26981 14354 +45431 14353 +44344 14353 +37912 14352 +38933 14349 +9778 14348 +46816 14346 +38532 14345 +33281 14344 +37670 14343 +48168 14340 +48072 14339 +9420 14338 +49540 14334 +27831 14331 +36614 14330 +31885 14329 +42681 14329 +45272 14327 +43496 14326 +41670 14326 +49917 14325 +40312 14322 +41467 14322 +20808 14321 +19199 14319 +24460 14317 +43781 14314 +32431 14312 +45620 14307 +46823 14305 +45473 14302 +48479 14301 +34165 14301 +40728 14300 +29562 14297 +38658 14296 +24886 14293 +32041 14286 +41339 14286 +32279 14283 +44438 14282 +34319 14282 +48942 14280 +10962 14277 +50005 14272 +9488 14271 +46457 14269 +48571 14267 +49815 14265 +39029 14264 +48546 14263 +47323 14263 +36152 14262 +24236 14261 +46558 14259 +27125 14256 +22114 14255 +35666 14253 +31870 14252 +22313 14249 +21964 14248 +31709 14248 +28339 14247 +37044 14245 +45895 14241 +21745 14239 +49359 14239 +19757 14237 +41507 14228 +12166 14226 +27271 14226 +43201 14225 +41988 14224 +28363 14223 +30781 14222 +2464 14222 +25820 14220 +30598 14218 +37056 14215 +13680 14215 +46712 14215 +26335 14214 +30561 14214 +21475 14214 +35009 14214 +30637 14205 +47381 14204 +44762 14203 +40967 14199 +37067 14199 +30708 14197 +46751 14196 +45619 14195 +44640 14194 +38512 14191 +32780 14191 +49072 14188 +41276 14184 +35728 14183 +48254 14179 +36774 14177 +21446 14175 +24331 14175 +21308 14174 +40029 14173 +46645 14173 +18789 14170 +40566 14169 +38199 14167 +42259 14165 +35166 14162 +34084 14159 +39941 14154 +45610 14150 +165 14149 +36934 14148 +29725 14146 +43679 14146 +8427 14140 +50221 14133 +38709 14131 +43464 14130 +41844 14127 +39363 14127 +12210 14126 +36439 14123 +42096 14121 +26616 14119 +11324 14118 +49274 14117 +39066 14116 +44676 14116 +12367 14114 +49538 14106 +49298 14105 +46370 14104 +20939 14099 +17957 14099 +45020 14097 +36422 14094 +24220 14093 +17429 14091 +14689 14090 +16607 14088 +6657 14080 +49605 14078 +47525 14076 +27846 14072 +38538 14071 +38917 14070 +4672 14066 +33676 14066 +45491 14065 +45937 14061 +45688 14060 +37569 14056 +27056 14054 +49892 14052 +46456 14052 +8279 14045 +34636 14045 +9560 14044 +32571 14044 +15249 14037 +39687 14037 +42190 14035 +33804 14031 +39963 14027 +39367 14026 +19986 14025 +45388 14023 +43745 14021 +26032 14019 +38033 14015 +18274 14015 +38414 14014 +46223 14014 +46670 14013 +20258 14008 +24996 14006 +44996 14004 +49937 14000 +11748 13999 +47033 13998 +46588 13997 +25832 13996 +47583 13994 +40189 13994 +25468 13993 +28934 13992 +43058 13989 +31119 13986 +39270 13984 +44540 13982 +49810 13981 +50124 13978 +42977 13977 +48887 13976 +44787 13976 +47743 13976 +38489 13972 +16827 13971 +41181 13966 +45412 13965 +32372 13964 +38474 13962 +11547 13961 +37917 13960 +36532 13959 +8933 13955 +34622 13954 +48785 13953 +25202 13953 +45579 13952 +41725 13952 +41011 13948 +23032 13947 +48270 13943 +47577 13941 +49938 13939 +31322 13937 +39141 13936 +31978 13935 +33210 13924 +13088 13919 +16624 13917 +32269 13912 +33640 13908 +36474 13906 +44007 13905 +5551 13905 +37158 13899 +37280 13898 +45323 13898 +48276 13898 +39546 13896 +25968 13896 +30076 13892 +48905 13890 +50107 13890 +45917 13890 +48208 13889 +31268 13880 +35452 13876 +43447 13876 +48013 13873 +9946 13873 +40216 13869 +43163 13869 +26052 13866 +46200 13866 +17055 13865 +45882 13854 +43954 13852 +36091 13851 +47593 13850 +46380 13849 +35192 13849 +30575 13849 +40443 13849 +28988 13843 +31200 13843 +35099 13841 +29971 13831 +50063 13830 +49923 13830 +29182 13829 +10331 13827 +31319 13827 +17244 13819 +22617 13818 +24923 13814 +49271 13814 +39629 13814 +19721 13809 +39319 13809 +10634 13808 +23459 13803 +36829 13802 +34981 13800 +45920 13800 +49916 13798 +34062 13794 +8409 13787 +24860 13787 +37160 13786 +47011 13785 +28296 13780 +10037 13778 +49903 13775 +46863 13772 +32844 13768 +27685 13767 +35920 13766 +35511 13765 +145 13758 +11797 13757 +26628 13754 +46974 13754 +8817 13753 +42817 13752 +37944 13750 +43402 13746 +24608 13743 +37894 13742 +36007 13741 +29160 13733 +43019 13728 +46224 13728 +23901 13726 +10406 13725 +42056 13723 +45328 13722 +40042 13720 +44279 13716 +26435 13715 +34045 13715 +43591 13715 +9711 13714 +32006 13714 +23633 13713 +3408 13712 +32401 13712 +38610 13709 +45945 13709 +23697 13708 +15235 13707 +16779 13706 +50004 13705 +19937 13704 +50115 13704 +11297 13701 +33361 13699 +33414 13695 +40628 13694 +45086 13692 +47197 13687 +42029 13687 +41568 13686 +16872 13686 +15404 13684 +26184 13682 +33150 13681 +38713 13674 +46003 13672 +31399 13672 +47343 13670 +35957 13666 +40036 13664 +31621 13662 +45884 13657 +45821 13657 +48351 13655 +19046 13655 +39079 13652 +32346 13651 +25188 13650 +48802 13645 +38655 13644 +23504 13643 +9026 13640 +41417 13640 +36848 13639 +26515 13635 +44984 13635 +25844 13634 +49223 13632 +43062 13632 +48340 13632 +23215 13629 +45287 13627 +19599 13624 +50018 13623 +33740 13621 +18185 13617 +35789 13616 +27378 13613 +46181 13611 +49090 13610 +8897 13610 +6585 13602 +17928 13602 +43873 13600 +25467 13600 +22748 13599 +45487 13598 +27576 13596 +18817 13596 +28075 13595 +37675 13593 +47079 13592 +37295 13591 +6526 13591 +45152 13590 +43202 13588 +26612 13584 +39529 13584 +8690 13584 +34352 13583 +8219 13581 +48440 13580 +39366 13578 +42369 13573 +41221 13572 +37369 13571 +43832 13568 +4841 13565 +22747 13562 +34844 13561 +43179 13561 +22031 13560 +45359 13560 +39754 13555 +43364 13555 +49240 13554 +49480 13552 +46716 13548 +30922 13548 +24697 13544 +49931 13543 +4408 13542 +39385 13541 +42295 13541 +22143 13540 +46299 13540 +45669 13540 +47549 13539 +2719 13538 +25716 13537 +16864 13536 +32383 13536 +43785 13534 +23636 13531 +11381 13530 +46597 13529 +41547 13527 +16881 13519 +34680 13519 +19243 13516 +26099 13515 +41452 13512 +33696 13512 +48846 13507 +41277 13501 +42093 13498 +32287 13498 +33216 13492 +39481 13491 +36797 13489 +34469 13489 +18968 13488 +44097 13482 +37441 13477 +48078 13476 +46810 13475 +8973 13475 +48207 13474 +21598 13472 +45361 13470 +38535 13470 +14171 13469 +19153 13469 +40754 13467 +36842 13463 +39494 13460 +27237 13457 +48210 13456 +23988 13456 +49818 13455 +41435 13452 +44394 13451 +27054 13450 +46963 13448 +48435 13448 +26064 13447 +22290 13446 +36537 13443 +40379 13443 +30556 13437 +28570 13437 +42244 13437 +41684 13435 +4208 13434 +4195 13431 +38824 13431 +16665 13427 +40040 13427 +28724 13425 +31106 13416 +34533 13415 +30526 13414 +16701 13413 +34088 13413 +26205 13411 +6002 13408 +45180 13408 +45855 13408 +41887 13405 +23579 13402 +20272 13400 +38729 13396 +46171 13385 +47373 13381 +46328 13377 +49928 13375 +49613 13373 +48317 13373 +28272 13371 +40226 13370 +31584 13369 +48374 13360 +49711 13358 +28093 13356 +25057 13356 +10502 13351 +6627 13351 +49020 13351 +47575 13350 +46713 13349 +25742 13349 +19965 13349 +9227 13348 +45330 13341 +36572 13341 +23784 13339 +43378 13334 +30590 13332 +20783 13329 +40601 13329 +23293 13327 +21063 13325 +18706 13324 +39819 13323 +1003 13321 +46038 13318 +49965 13312 +49203 13308 +43473 13307 +47821 13304 +43541 13303 +43129 13301 +48010 13300 +48770 13297 +27705 13295 +18649 13293 +47269 13291 +47138 13290 +42709 13287 +48445 13286 +31961 13285 +30491 13282 +44855 13281 +22282 13278 +47996 13278 +50098 13277 +31479 13275 +16719 13274 +23132 13270 +28665 13268 +48339 13266 +47811 13266 +5310 13264 +42561 13263 +48042 13262 +49345 13262 +147 13262 +30729 13259 +42460 13251 +17493 13245 +48735 13245 +47637 13240 +10004 13239 +33066 13238 +38389 13238 +45440 13236 +37934 13236 +33235 13234 +36635 13232 +43005 13231 +38064 13230 +33146 13229 +36751 13227 +25934 13226 +46826 13224 +18691 13221 +29391 13218 +12441 13215 +25628 13213 +24191 13212 +19767 13209 +7629 13208 +38144 13198 +36802 13198 +11200 13191 +43121 13190 +11497 13188 +31902 13188 +20344 13188 +45991 13185 +16228 13184 +161 13183 +12837 13182 +24951 13179 +42269 13178 +37654 13177 +40050 13177 +41265 13177 +48282 13175 +19298 13174 +40642 13170 +12638 13168 +47870 13167 +26371 13166 +36484 13163 +39684 13160 +25563 13159 +32937 13155 +35655 13154 +8986 13152 +33942 13148 +46053 13147 +15141 13147 +23898 13146 +34973 13145 +45589 13143 +41419 13141 +15553 13136 +2480 13134 +22916 13132 +48050 13132 +4076 13128 +33703 13125 +32374 13122 +32245 13120 +28835 13119 +44835 13119 +41625 13115 +45149 13114 +41464 13113 +48532 13111 +38861 13107 +43699 13103 +37328 13103 +34215 13102 +8944 13101 +23951 13101 +40177 13101 +17705 13100 +22184 13099 +48104 13096 +38437 13096 +48849 13095 +47161 13092 +47777 13092 +31544 13089 +40989 13088 +46689 13088 +33351 13087 +12541 13087 +38416 13085 +39260 13083 +46522 13080 +14360 13077 +43311 13075 +45322 13074 +29730 13067 +45387 13064 +35428 13063 +15303 13061 +24258 13060 +49998 13057 +36335 13056 +50036 13051 +29678 13051 +3631 13048 +30034 13047 +37638 13042 +48646 13042 +38233 13041 +31562 13040 +45470 13039 +48331 13036 +49569 13036 +41944 13035 +23283 13030 +24529 13028 +38280 13028 +38301 13027 +49881 13026 +49989 13024 +46772 13021 +15168 13021 +50137 13020 +36578 13020 +37471 13019 +35072 13018 +41730 13017 +45915 13017 +40779 13017 +19286 13013 +41616 13009 +47757 13007 +47238 13005 +45923 13005 +30395 13005 +42332 13004 +31315 13004 +17178 12995 +19511 12995 +28710 12993 +42512 12993 +32463 12990 +48502 12989 +10065 12986 +43211 12985 +39972 12981 +15160 12980 +23763 12973 +32822 12969 +50096 12969 +50154 12965 +2109 12963 +42531 12963 +17934 12960 +28216 12955 +44621 12954 +36456 12953 +47264 12952 +33382 12948 +19669 12944 +24223 12944 +9504 12942 +42136 12941 +47321 12941 +16680 12935 +26393 12934 +37368 12931 +24819 12928 +34193 12926 +40522 12925 +49635 12925 +44883 12922 +31249 12921 +38788 12921 +42072 12917 +43756 12917 +40039 12916 +36492 12916 +47622 12914 +34605 12909 +17804 12902 +47231 12899 +49966 12899 +38994 12898 +49812 12897 +38996 12897 +46316 12896 +35542 12894 +24159 12892 +7553 12892 +44445 12891 +31872 12888 +13615 12885 +36420 12882 +9571 12880 +41362 12879 +15384 12878 +38559 12871 +40763 12869 +38395 12867 +32845 12865 +48065 12865 +37559 12857 +29310 12856 +30754 12849 +35111 12849 +36206 12846 +40683 12842 +45946 12842 +36315 12840 +23343 12839 +36416 12839 +39371 12837 +5204 12836 +35596 12834 +31567 12830 +25867 12826 +46802 12825 +49776 12825 +44733 12822 +41022 12817 +48689 12815 +45883 12815 +13739 12808 +43306 12806 +35936 12805 +13410 12799 +45013 12792 +47839 12792 +23725 12789 +17569 12788 +43362 12779 +18771 12775 +32307 12774 +49671 12773 +32768 12769 +32397 12768 +41935 12765 +20907 12764 +35285 12764 +11064 12761 +9816 12761 +49479 12759 +20575 12757 +35487 12754 +36413 12753 +18602 12753 +49814 12752 +45203 12752 +16850 12751 +45885 12748 +34883 12743 +37971 12742 +41689 12741 +47407 12741 +33029 12725 +40951 12722 +47048 12722 +39913 12719 +31461 12717 +39485 12711 +14784 12708 +45815 12705 +37138 12704 +19246 12700 +36010 12698 +48385 12692 +27076 12691 +40533 12687 +43970 12686 +23850 12686 +45849 12683 +42538 12682 +45816 12678 +32505 12678 +49681 12677 +46482 12676 +35832 12676 +46475 12670 +11692 12669 +24477 12669 +47536 12668 +5974 12668 +42585 12660 +20654 12658 +22857 12658 +15776 12658 +33200 12658 +2410 12658 +48021 12656 +12708 12652 +13702 12652 +46817 12650 +50114 12649 +30779 12645 +32999 12644 +40731 12641 +48940 12637 +21065 12636 +23606 12633 +38394 12630 +33737 12624 +28787 12623 +22510 12620 +42147 12620 +48076 12619 +45333 12607 +35619 12606 +48619 12602 +50129 12600 +48752 12597 +6993 12596 +36967 12595 +13490 12594 +44241 12591 +23778 12588 +12102 12587 +13918 12583 +47081 12582 +28353 12581 +19758 12573 +28512 12572 +34236 12569 +42904 12568 +38950 12567 +33222 12564 +34054 12561 +35644 12559 +37122 12559 +44605 12555 +27131 12555 +34197 12550 +39049 12548 +34848 12546 +33244 12542 +25297 12541 +34719 12541 +33090 12541 +16995 12540 +19093 12538 +21886 12538 +46596 12537 +11124 12537 +36543 12536 +49268 12534 +44072 12528 +39571 12527 +15521 12526 +16838 12524 +38829 12522 +23216 12521 +34513 12520 +41270 12518 +46774 12516 +49275 12512 +20860 12511 +15690 12509 +41912 12506 +41509 12505 +8052 12505 +50194 12503 +44814 12502 +32898 12501 +13744 12501 +50056 12499 +38047 12496 +48978 12488 +37715 12487 +16722 12487 +32940 12482 +46740 12481 +20519 12481 +25206 12480 +9746 12476 +41173 12475 +40114 12474 +11213 12463 +37997 12461 +43969 12457 +41659 12454 +21488 12454 +49260 12453 +48033 12452 +47914 12452 +22872 12451 +30670 12449 +25484 12446 +38557 12444 +49343 12443 +32278 12443 +33403 12442 +17737 12442 +5397 12441 +31294 12439 +33268 12437 +43629 12434 +48508 12434 +34507 12433 +26999 12431 +23939 12430 +39758 12429 +50230 12427 +40127 12424 +48353 12423 +43794 12421 +39060 12421 +41500 12418 +29110 12418 +28050 12418 +39442 12415 +46723 12412 +31435 12411 +41030 12404 +13138 12403 +26033 12403 +45859 12400 +13838 12398 +30330 12394 +40371 12393 +19904 12392 +32828 12390 +34517 12390 +32765 12388 +37805 12382 +49360 12382 +25630 12377 +40346 12374 +47125 12368 +49732 12367 +22472 12366 +36836 12365 +21535 12365 +29257 12363 +42390 12362 +9023 12361 +41371 12359 +48919 12359 +27594 12358 +37892 12356 +47858 12354 +43669 12354 +28377 12346 +39719 12342 +49169 12341 +12667 12340 +43696 12338 +47425 12337 +44266 12337 +26462 12336 +49886 12336 +49367 12336 +11310 12334 +48059 12334 +9081 12333 +22614 12333 +42800 12325 +38011 12323 +40002 12323 +30688 12323 +39020 12322 +26144 12322 +41857 12321 +27897 12320 +35448 12317 +39889 12313 +46765 12310 +45063 12310 +12447 12310 +32500 12309 +13846 12306 +22762 12303 +12687 12301 +45383 12300 +40357 12298 +48178 12296 +31959 12296 +15969 12294 +37983 12291 +26125 12288 +44895 12287 +41571 12286 +48533 12279 +39176 12279 +44961 12276 +47087 12275 +21604 12275 +48842 12273 +24606 12268 +48401 12266 +33165 12263 +26709 12263 +35777 12263 +45394 12261 +18226 12257 +28743 12255 +36407 12252 +18299 12251 +43849 12248 +39237 12245 +36372 12244 +44029 12240 +11098 12239 +5835 12238 +38345 12236 +45880 12235 +29465 12235 +49449 12234 +41240 12233 +34024 12232 +42616 12232 +45076 12227 +45947 12227 +49153 12227 +36163 12220 +45482 12220 +17599 12218 +25458 12218 +39969 12211 +4253 12210 +49460 12208 +43205 12208 +39250 12206 +43196 12205 +43659 12204 +33100 12204 +43294 12204 +3569 12202 +41469 12202 +31373 12201 +47315 12199 +41105 12199 +38797 12193 +30547 12192 +28913 12191 +49755 12189 +44804 12188 +46433 12187 +28768 12187 +43519 12186 +6394 12186 +34653 12185 +43752 12183 +33857 12180 +29035 12180 +38857 12179 +40165 12179 +49106 12178 +48859 12177 +19968 12176 +47759 12174 +50218 12172 +48287 12169 +32004 12167 +17001 12167 +47392 12165 +45903 12153 +48224 12153 +39794 12149 +49016 12148 +28904 12147 +42019 12144 +44433 12143 +25568 12143 +38980 12141 +49152 12139 +45959 12135 +22936 12132 +44080 12124 +10020 12124 +48819 12122 +46262 12121 +5971 12113 +39294 12112 +20074 12110 +10739 12110 +4549 12107 +50053 12105 +25973 12105 +38832 12102 +28496 12102 +45904 12101 +45965 12097 +40553 12095 +31313 12095 +49526 12095 +48387 12093 +20556 12092 +47051 12092 +46986 12090 +33559 12086 +34055 12083 +32668 12073 +26018 12072 +8538 12072 +39436 12069 +43992 12068 +33793 12067 +42931 12066 +36261 12064 +24890 12063 +25097 12061 +4880 12061 +39356 12057 +48085 12057 +30431 12053 +35364 12051 +27299 12050 +32558 12050 +4567 12048 +32366 12048 +47617 12047 +32893 12046 +17360 12044 +37265 12042 +42217 12040 +48482 12035 +46771 12034 +31616 12032 +49387 12032 +49724 12031 +47110 12027 +48478 12025 +23705 12022 +33674 12021 +34049 12020 +23047 12017 +13379 12016 +28366 12014 +37516 12011 +27882 12010 +25504 12009 +49977 12008 +42500 12005 +17497 12004 +46971 12001 +26806 12000 +27815 12000 +40337 11998 +42756 11996 +31520 11995 +35751 11995 +49662 11994 +41958 11991 +35100 11991 +50057 11987 +49907 11987 +8240 11987 +47556 11985 +31789 11979 +43634 11977 +43345 11976 +48871 11975 +46920 11972 +47311 11970 +22897 11970 +7828 11969 +39715 11967 +30946 11967 +41048 11967 +15267 11964 +48481 11958 +41691 11956 +14670 11955 +34223 11955 +48251 11953 +45970 11953 +49972 11948 +48048 11948 +46987 11945 +45713 11943 +49664 11940 +25526 11938 +43249 11936 +36493 11934 +30887 11934 +43516 11927 +28169 11926 +34627 11926 +41451 11925 +31040 11924 +18438 11911 +37819 11908 +12084 11907 +30735 11903 +37310 11899 +3419 11898 +34591 11897 +48139 11896 +46024 11895 +3317 11895 +21485 11894 +35928 11894 +42277 11892 +26418 11892 +39746 11891 +27331 11890 +49054 11889 +33641 11889 +49667 11886 +50241 11882 +46205 11882 +40122 11880 +49100 11877 +48779 11876 +31943 11873 +16188 11873 +29124 11866 +44962 11862 +48391 11859 +42945 11853 +27535 11852 +44212 11851 +48758 11849 +39626 11849 +49098 11842 +41389 11836 +47771 11836 +41906 11827 +49826 11824 +44224 11821 +43947 11819 +40824 11817 +7132 11815 +41787 11814 +27738 11813 +23471 11813 +26493 11810 +39991 11808 +5206 11806 +48693 11800 +40866 11799 +43920 11797 +49321 11795 +42839 11794 +47615 11793 +48300 11792 +20568 11791 +47878 11790 +35713 11789 +21353 11788 +17597 11788 +28349 11785 +45656 11784 +33279 11783 +28409 11781 +49238 11781 +38217 11780 +43838 11780 +38673 11778 +37963 11777 +34399 11774 +17331 11774 +49740 11767 +33739 11766 +41697 11763 +6904 11758 +48203 11756 +20253 11755 +22201 11750 +44411 11749 +27490 11747 +38664 11744 +48068 11741 +32068 11740 +48668 11739 +33308 11736 +48616 11735 +47465 11732 +41842 11730 +46768 11729 +21244 11725 +41316 11724 +36137 11724 +47854 11722 +44479 11721 +46686 11721 +46575 11719 +47335 11719 +31312 11718 +46246 11714 +31030 11712 +44899 11712 +41796 11709 +42832 11709 +36687 11706 +15517 11703 +23765 11702 +24919 11700 +47998 11700 +48040 11697 +47088 11694 +48989 11690 +20274 11687 +28185 11687 +42919 11682 +41733 11682 +30203 11676 +21559 11675 +29158 11675 +47080 11675 +11250 11672 +6522 11671 +28400 11671 +22615 11668 +34656 11667 +46276 11656 +28117 11653 +11242 11646 +30025 11645 +47801 11644 +41397 11644 +35392 11644 +50122 11643 +50164 11643 +50244 11642 +35044 11642 +42686 11641 +49947 11640 +46793 11631 +27554 11630 +32580 11629 +36136 11628 +40656 11625 +45599 11624 +39806 11623 +12710 11620 +45380 11619 +11355 11619 +42393 11617 +46569 11614 +14228 11612 +44139 11604 +41502 11602 +40578 11601 +30655 11601 +20310 11595 +42552 11593 +35884 11592 +13877 11591 +49835 11590 +36807 11590 +40512 11589 +37549 11589 +46978 11587 +25034 11585 +40843 11584 +47532 11581 +11116 11577 +47103 11570 +29003 11566 +45065 11564 +44325 11564 +23187 11560 +47278 11560 +37598 11559 +45845 11558 +36631 11557 +41497 11557 +36230 11557 +42230 11556 +47520 11555 +27423 11554 +48507 11553 +47073 11553 +43429 11553 +29666 11552 +45873 11552 +27471 11549 +49205 11549 +41801 11548 +33926 11544 +12286 11544 +43821 11542 +3135 11538 +26377 11537 +37038 11535 +36716 11534 +48851 11533 +42827 11532 +45167 11531 +43750 11531 +17574 11525 +25373 11524 +27708 11524 +50211 11524 +37103 11520 +49210 11517 +44700 11517 +171 11513 +42112 11511 +31217 11511 +46404 11510 +44351 11510 +48394 11510 +25952 11510 +16211 11509 +40945 11507 +24127 11506 +32873 11506 +34398 11504 +35237 11499 +46032 11494 +26559 11490 +48032 11489 +47456 11482 +49502 11480 +25038 11480 +27923 11479 +35763 11479 +33428 11478 +17808 11477 +25896 11476 +34028 11473 +31299 11472 +25166 11471 +35204 11466 +46573 11465 +36409 11463 +49633 11460 +40805 11460 +24491 11459 +32589 11459 +36325 11451 +45793 11448 +26528 11448 +14630 11447 +45611 11443 +37947 11441 +42118 11440 +855 11436 +49405 11434 +36113 11432 +43800 11429 +47514 11425 +49103 11425 +37705 11425 +31948 11425 +18196 11424 +40976 11422 +44558 11421 +26894 11417 +40244 11416 +20789 11414 +35027 11411 +42267 11410 +49027 11408 +25877 11407 +49062 11406 +32866 11404 +32723 11401 +40969 11400 +39359 11398 +35417 11397 +44602 11396 +44933 11396 +45847 11395 +50038 11390 +27683 11389 +23525 11389 +46006 11388 +19729 11386 +24370 11386 +22460 11381 +42323 11379 +46244 11374 +16377 11373 +31013 11369 +40510 11363 +8641 11363 +29590 11360 +25908 11355 +40539 11350 +44342 11347 +25093 11346 +14244 11346 +43797 11346 +30891 11345 +46523 11342 +7345 11339 +25981 11337 +33780 11337 +31034 11337 +49962 11336 +30687 11335 +46483 11334 +32604 11329 +47754 11329 +36501 11328 +48932 11325 +17874 11324 +27754 11322 +17829 11319 +44454 11319 +41821 11317 +34388 11317 +33103 11317 +49534 11316 +18092 11315 +28406 11315 +45710 11311 +50120 11311 +8294 11309 +44982 11306 +49689 11305 +45569 11300 +43506 11298 +19331 11297 +20361 11297 +23900 11294 +45069 11292 +47700 11288 +29245 11284 +29625 11282 +25956 11280 +25196 11279 +10313 11275 +34788 11275 +48907 11271 +42039 11268 +47942 11267 +49773 11266 +47933 11265 +43143 11264 +33257 11259 +23517 11258 +42848 11256 +45686 11254 +47710 11252 +42469 11252 +26921 11250 +45057 11250 +44059 11249 +43917 11247 +44990 11247 +46063 11243 +25552 11240 +32236 11239 +47621 11238 +37405 11237 +50253 11233 +45637 11233 +13170 11233 +15988 11232 +31374 11231 +32503 11230 +38263 11228 +27865 11228 +39504 11224 +42044 11223 +31560 11221 +9129 11216 +34163 11214 +41311 11213 +35539 11213 +38295 11213 +41036 11211 +17903 11208 +50138 11202 +37039 11197 +39461 11196 +14011 11191 +46747 11190 +49585 11189 +22422 11187 +10738 11186 +36216 11185 +35543 11183 +17257 11182 +36287 11182 +21069 11181 +41142 11181 +39396 11177 +46375 11176 +24542 11173 +40204 11173 +35694 11173 +30308 11171 +41603 11170 +37837 11166 +11199 11165 +34978 11165 +45623 11165 +45641 11164 +49371 11164 +18827 11160 +8385 11159 +18001 11159 +44315 11158 +43399 11157 +47954 11157 +41877 11154 +38100 11143 +6410 11140 +45536 11139 +27530 11137 +34368 11131 +19739 11128 +33801 11122 +43535 11121 +36639 11118 +11958 11117 +40605 11110 +40916 11108 +49332 11107 +31317 11100 +31366 11092 +12836 11089 +23872 11088 +39522 11086 +12504 11085 +32157 11084 +49166 11080 +46667 11077 +8184 11072 +14467 11066 +8938 11063 +28054 11063 +16375 11061 +18312 11060 +39010 11060 +30411 11053 +39936 11052 +38470 11052 +34714 11051 +40865 11050 +46830 11048 +34878 11047 +49948 11047 +21359 11047 +30366 11044 +31808 11044 +32854 11044 +32124 11043 +29177 11043 +34971 11042 +42523 11032 +37232 11025 +41899 11024 +29425 11023 +46403 11016 +28015 11014 +47235 11013 +32296 11010 +11309 11010 +33999 11007 +35263 11004 +41163 11003 +42400 11002 +35836 10999 +19714 10998 +30196 10996 +38592 10994 +44248 10994 +2268 10990 +30417 10990 +2359 10988 +45052 10986 +47114 10985 +34729 10982 +46540 10982 +24879 10980 +46576 10979 +20364 10979 +16910 10975 +33288 10966 +27122 10966 +2325 10965 +46015 10963 +20475 10960 +39084 10959 +49365 10958 +41692 10948 +40734 10947 +4748 10946 +35753 10942 +36528 10941 +27744 10941 +13618 10939 +38778 10934 +26932 10934 +47123 10933 +46354 10931 +42751 10929 +33219 10928 +29231 10926 +44023 10925 +46161 10918 +29427 10917 +45117 10917 +13554 10912 +36350 10910 +48889 10910 +36225 10909 +5782 10909 +50121 10907 +31079 10906 +31587 10904 +45307 10903 +29490 10901 +37275 10899 +10237 10892 +42944 10889 +7242 10887 +43595 10886 +41102 10886 +48804 10885 +45771 10885 +31657 10884 +36164 10882 +33225 10882 +23177 10875 +42641 10874 +47371 10873 +36075 10870 +28427 10867 +45104 10863 +46687 10862 +19377 10860 +14229 10860 +23635 10860 +32422 10858 +38104 10858 +34768 10857 +16401 10857 +27301 10856 +46259 10855 +43460 10854 +31529 10853 +15270 10850 +23295 10849 +46931 10847 +29084 10844 +46399 10842 +44401 10840 +25559 10837 +21398 10836 +43649 10835 +48017 10833 +13845 10832 +42126 10830 +41646 10824 +36020 10824 +36055 10822 +4154 10819 +25958 10814 +49017 10813 +29375 10812 +24849 10811 +12117 10811 +27204 10810 +25609 10810 +45021 10809 +44101 10803 +42963 10800 +17035 10799 +32499 10798 +32087 10798 +34189 10797 +135 10793 +41384 10793 +15057 10789 +39426 10789 +22164 10787 +33738 10784 +46490 10778 +41920 10776 +30075 10775 +47368 10775 +47126 10774 +45591 10766 +31458 10763 +18464 10763 +28189 10761 +44713 10761 +36873 10759 +46425 10758 +45745 10755 +38432 10754 +23861 10752 +25233 10750 +39428 10749 +39932 10749 +47831 10748 +22774 10743 +26438 10742 +46153 10741 +27741 10736 +14758 10733 +6677 10731 +16474 10728 +43041 10728 +38517 10727 +43632 10726 +23296 10724 +37297 10724 +47707 10719 +9259 10715 +36835 10714 +17959 10708 +7377 10708 +27295 10707 +25825 10707 +46666 10704 +29812 10702 +48239 10698 +46145 10697 +47293 10695 +35422 10695 +18569 10689 +4644 10688 +20380 10687 +12563 10684 +28463 10683 +32896 10680 +41408 10679 +39892 10679 +34745 10678 +41825 10676 +50002 10672 +44412 10668 +31500 10666 +32053 10665 +24188 10665 +41766 10664 +46453 10663 +37032 10661 +46045 10660 +32977 10660 +28434 10658 +38023 10655 +43076 10652 +23882 10650 +41245 10650 +34145 10649 +42536 10642 +23479 10641 +20761 10641 +24266 10640 +17868 10640 +38993 10639 +27354 10638 +48432 10636 +41713 10636 +13687 10635 +36342 10634 +35317 10626 +34421 10625 +16710 10624 +41331 10623 +21469 10619 +34021 10619 +44484 10617 +45416 10616 +43137 10615 +33205 10613 +48594 10612 +49143 10610 +20362 10610 +49610 10602 +30207 10601 +41355 10599 +18962 10598 +36423 10596 +33095 10596 +26176 10593 +38363 10592 +34811 10591 +33455 10584 +1250 10583 +49958 10580 +45357 10579 +28535 10574 +38969 10572 +39353 10569 +29990 10568 +30163 10565 +46835 10564 +25258 10563 +36381 10562 +19148 10558 +32936 10557 +42104 10546 +48821 10544 +13492 10543 +49107 10541 +40292 10537 +15769 10536 +43327 10535 +46720 10532 +41361 10531 +43522 10530 +42303 10528 +42559 10527 +21136 10526 +49519 10523 +42306 10517 +33727 10515 +40178 10513 +46648 10506 +39948 10506 +11631 10506 +42808 10505 +38910 10502 +40961 10502 +30116 10501 +33581 10497 +40686 10497 +28834 10496 +49993 10495 +38678 10493 +48235 10488 +25617 10486 +30823 10477 +27932 10477 +42233 10470 +13725 10469 +42829 10468 +47504 10466 +13168 10459 +29434 10458 +29824 10457 +31519 10457 +34239 10453 +46812 10449 +45776 10441 +5632 10440 +18331 10439 +16025 10436 +47394 10436 +41831 10435 +43382 10435 +47784 10432 +39499 10431 +16317 10427 +11507 10417 +34957 10416 +28586 10412 +47659 10411 +22684 10404 +38670 10404 +49821 10394 +40937 10392 +48992 10391 +20620 10389 +25561 10382 +6636 10380 +27071 10377 +21399 10377 +32026 10375 +13811 10374 +25599 10374 +11008 10373 +11209 10373 +32574 10369 +47281 10368 +40872 10366 +41986 10363 +17602 10360 +34853 10358 +35653 10357 +47310 10356 +34734 10344 +18980 10343 +31226 10340 +43215 10339 +44944 10339 +41133 10337 +12355 10336 +42902 10336 +23920 10334 +35796 10329 +37311 10325 +44876 10325 +31771 10323 +18766 10320 +17398 10320 +4613 10320 +30815 10319 +26480 10317 +3302 10313 +44356 10312 +9120 10312 +25008 10312 +47800 10311 +46865 10310 +35478 10306 +48169 10298 +25198 10293 +8813 10291 +24541 10286 +25015 10286 +30228 10285 +37908 10284 +48601 10284 +15858 10282 +20452 10281 +7984 10281 +14793 10280 +33767 10277 +41910 10276 +41635 10276 +47179 10274 +16672 10271 +34542 10270 +20166 10266 +38449 10265 +46437 10262 +29836 10260 +29127 10258 +29011 10257 +50094 10255 +41319 10254 +44403 10253 +18888 10250 +36827 10245 +27438 10243 +35811 10240 +20692 10240 +32577 10239 +43358 10235 +38357 10234 +29967 10234 +30532 10233 +48915 10232 +29279 10232 +12099 10231 +38490 10230 +47790 10230 +23650 10228 +40148 10227 +32506 10225 +9125 10219 +43372 10215 +25361 10214 +45268 10210 +32011 10207 +26568 10207 +21297 10203 +42697 10201 +23896 10201 +23813 10198 +39284 10196 +29389 10196 +2729 10196 +43173 10195 +44988 10193 +34651 10188 +9140 10186 +43823 10185 +37867 10182 +43031 10181 +35762 10180 +38785 10177 +42302 10177 +41735 10176 +21708 10173 +19653 10169 +30078 10168 +46543 10167 +46364 10164 +26059 10163 +31262 10163 +44420 10163 +31653 10161 +26489 10159 +48676 10158 +43401 10154 +5578 10151 +37555 10150 +32750 10146 +20696 10142 +37442 10139 +16663 10139 +39884 10139 +40630 10139 +17183 10139 +40119 10138 +28673 10137 +32864 10135 +46445 10134 +49765 10131 +48436 10130 +18557 10129 +22903 10129 +42577 10128 +22708 10127 +33450 10126 +2335 10124 +46334 10122 +36120 10118 +44710 10112 +42219 10112 +39040 10108 +47635 10107 +40783 10103 +35048 10101 +47167 10099 +38955 10099 +19448 10098 +44894 10092 +27488 10091 +33243 10090 +42981 10089 +16848 10088 +39648 10087 +43012 10086 +47262 10086 +21130 10086 +46204 10085 +2639 10084 +45725 10082 +38037 10081 +23712 10080 +34555 10080 +49004 10077 +34175 10075 +23406 10074 +26453 10072 +13567 10072 +49710 10071 +16321 10071 +46934 10067 +36400 10065 +14938 10057 +37786 10056 +33747 10056 +35541 10054 +41510 10052 +47375 10051 +26164 10051 +26259 10049 +27160 10047 +49678 10047 +35203 10045 +45638 10045 +168 10045 +40880 10044 +32986 10034 +32498 10029 +32273 10026 +24052 10025 +50130 10022 +36081 10022 +41880 10020 +48002 10017 +34946 10016 +21970 10012 +8626 10011 +31795 10010 +44187 10010 +48816 10008 +18374 10008 +23828 10007 +48174 10007 +32305 10000 +46178 9992 +49738 9991 +22294 9986 +34362 9978 +31494 9968 +45278 9968 +43530 9966 +136 9966 +38166 9965 +45778 9963 +22227 9962 +50022 9958 +38284 9955 +36652 9953 +39698 9952 +50039 9948 +17904 9947 +46654 9946 +19309 9946 +26952 9939 +38803 9938 +49229 9935 +46148 9934 +8951 9934 +29964 9931 +45018 9929 +35114 9929 +45348 9928 +33526 9925 +44280 9924 +48146 9923 +27467 9915 +44362 9915 +26449 9913 +30579 9913 +24937 9906 +37023 9905 +49450 9900 +46372 9899 +31694 9898 +19081 9895 +40006 9891 +36268 9888 +40082 9885 +42109 9881 +18503 9880 +47636 9878 +48220 9877 +34657 9876 +18491 9874 +18354 9871 +44696 9869 +18324 9868 +31628 9867 +50152 9865 +44347 9865 +38266 9864 +39169 9863 +42985 9861 +22920 9861 +48672 9857 +20778 9853 +28995 9853 +11253 9847 +11208 9846 +48708 9838 +41312 9835 +9522 9833 +24614 9830 +38247 9829 +45740 9829 +31281 9826 +40994 9823 +11893 9823 +38924 9822 +45444 9821 +22399 9821 +43061 9821 +16419 9821 +48510 9820 +25586 9817 +38420 9816 +35416 9816 +46805 9815 +41192 9814 +17447 9808 +48832 9807 +14989 9802 +19628 9802 +32447 9801 +40555 9797 +48605 9795 +14876 9794 +41474 9793 +34588 9791 +49429 9791 +49091 9790 +23124 9790 +4557 9789 +23478 9788 +23274 9785 +28008 9784 +44616 9782 +30358 9779 +49675 9776 +48038 9775 +31646 9775 +33369 9773 +5484 9770 +44287 9768 +3467 9765 +35812 9764 +41952 9761 +38516 9756 +40491 9753 +30763 9751 +25843 9751 +49677 9748 +50044 9746 +42698 9745 +4115 9745 +43601 9743 +32836 9739 +42282 9739 +39213 9738 +34960 9737 +26132 9735 +19598 9734 +30470 9732 +31482 9730 +151 9730 +41897 9727 +42593 9722 +35787 9716 +48551 9715 +50111 9708 +35312 9708 +16496 9705 +35226 9705 +48035 9703 +27092 9702 +28026 9701 +49060 9700 +24108 9700 +26488 9694 +32336 9693 +37741 9690 +37534 9687 +42860 9683 +48899 9682 +39729 9680 +7436 9676 +24874 9675 +49576 9675 +24794 9670 +44519 9670 +49204 9669 +11838 9669 +25105 9669 +42595 9665 +38514 9665 +20134 9664 +47644 9661 +27236 9660 +27016 9659 +33113 9659 +49415 9656 +49705 9654 +25689 9654 +23954 9652 +43467 9650 +47698 9647 +19124 9647 +31197 9645 +24762 9645 +27104 9637 +43263 9635 +44906 9635 +49284 9634 +16174 9628 +34348 9628 +46973 9627 +31324 9625 +20628 9622 +24251 9621 +47376 9619 +10132 9619 +35850 9611 +3531 9609 +32976 9606 +24593 9606 +49283 9604 +41866 9604 +32301 9603 +34100 9603 +37450 9596 +49548 9592 +33392 9591 +22738 9590 +26813 9590 +41682 9588 +38004 9587 +49136 9585 +34067 9585 +19352 9584 +18224 9584 +21180 9583 +31303 9583 +4211 9581 +28798 9580 +45066 9580 +37608 9578 +33460 9578 +32582 9577 +43698 9577 +41485 9572 +1651 9569 +11388 9569 +42448 9559 +39432 9557 +13731 9557 +44378 9555 +35827 9555 +20035 9546 +50061 9542 +5553 9541 +33823 9536 +27954 9533 +45973 9533 +34544 9532 +49616 9532 +46449 9531 +5862 9528 +24445 9526 +31337 9524 +42742 9524 +33959 9520 +46906 9519 +45697 9518 +39876 9515 +43352 9515 +650 9513 +24735 9513 +48662 9510 +43304 9507 +31361 9507 +42138 9505 +46149 9504 +21483 9504 +24858 9499 +43620 9494 +22439 9493 +42579 9492 +19389 9491 +43830 9490 +30073 9489 +43909 9489 +44242 9481 +8238 9481 +27812 9480 +34073 9465 +33049 9460 +46170 9458 +41942 9457 +32983 9456 +24427 9454 +28987 9453 +3295 9452 +48141 9451 +10258 9450 +20878 9448 +16154 9438 +31673 9431 +43767 9430 +42667 9429 +24371 9429 +27691 9426 +49584 9425 +14929 9419 +43111 9417 +30515 9417 +47526 9415 +32248 9411 +7687 9407 +8256 9407 +32778 9406 +47986 9395 +15764 9391 +13976 9391 +37447 9383 +32071 9378 +44393 9376 +18880 9371 +19626 9370 +38072 9364 +13955 9361 +47017 9359 +29845 9358 +2653 9355 +49328 9353 +8446 9351 +36969 9349 +41576 9346 +39639 9345 +18762 9343 +49651 9339 +33614 9336 +9789 9327 +42034 9326 +27981 9326 +40454 9321 +26185 9321 +39433 9321 +28600 9321 +44902 9321 +48703 9319 +49269 9316 +18000 9316 +43505 9316 +18378 9308 +43254 9307 +48011 9307 +30177 9306 +48490 9301 +49215 9301 +42591 9299 +42830 9297 +37541 9296 +35732 9295 +9270 9295 +49022 9292 +22344 9289 +34870 9286 +48175 9285 +35567 9284 +31923 9284 +2015 9283 +36650 9282 +49043 9281 +45708 9279 +17294 9278 +12775 9278 +39587 9278 +24781 9277 +36482 9276 +38501 9276 +44858 9274 +38382 9272 +27387 9267 +33312 9267 +50059 9265 +25007 9265 +37974 9264 +50184 9261 +41176 9260 +48795 9258 +34713 9258 +44746 9256 +45836 9255 +50104 9246 +39581 9245 +11579 9243 +46031 9240 +49793 9240 +30611 9232 +36992 9230 +47060 9225 +25031 9224 +34508 9222 +45595 9218 +48237 9216 +33003 9215 +49160 9213 +45896 9211 +48137 9209 +50037 9208 +43476 9207 +35624 9206 +37497 9204 +39297 9202 +9540 9201 +33873 9198 +49171 9197 +39424 9196 +41156 9191 +20520 9190 +23484 9187 +47502 9187 +29071 9181 +41501 9180 +14079 9179 +45742 9177 +39022 9177 +4926 9172 +31699 9171 +10842 9170 +42821 9169 +40803 9169 +46998 9167 +8669 9167 +27493 9166 +38720 9165 +44079 9164 +16745 9161 +47650 9156 +37781 9153 +27302 9150 +46130 9149 +15167 9143 +49955 9141 +43755 9140 +47605 9138 +47786 9138 +28104 9138 +34257 9136 +37037 9134 +16128 9134 +48780 9133 +48865 9130 +38136 9124 +32988 9120 +45461 9110 +44839 9108 +43594 9107 +48936 9105 +43681 9102 +41937 9096 +47845 9094 +48565 9092 +26513 9086 +9163 9084 +46363 9083 +38694 9081 +47415 9080 +29859 9080 +28432 9080 +14626 9077 +31539 9076 +49361 9072 +37219 9071 +48382 9069 +48713 9067 +24620 9065 +22811 9061 +42930 9061 +43135 9060 +38755 9060 +44975 9058 +44357 9057 +45597 9057 +19302 9057 +39104 9055 +47808 9053 +44788 9050 +29196 9049 +41054 9043 +39445 9042 +37950 9039 +32255 9038 +33954 9036 +44164 9034 +30961 9031 +49333 9031 +45266 9031 +44597 9030 +42532 9027 +23150 9026 +48407 9026 +26805 9026 +32809 9023 +1260 9023 +43301 9022 +48671 9020 +34043 9019 +40998 9017 +30104 9015 +28489 9011 +48163 9005 +49084 9003 +24864 8996 +47250 8993 +40918 8992 +40213 8991 +40342 8989 +31962 8988 +50246 8988 +49539 8986 +33510 8984 +28832 8983 +37591 8983 +50201 8982 +26483 8975 +30951 8972 +31571 8972 +34879 8967 +2020 8966 +10002 8964 +35299 8963 +23031 8960 +10124 8957 +12982 8952 +40061 8951 +32021 8949 +40220 8948 +46261 8948 +44909 8946 +49863 8943 +36702 8942 +32233 8940 +14632 8939 +40921 8939 +33395 8934 +7339 8929 +49545 8927 +39627 8926 +29233 8924 +40729 8923 +31131 8919 +30294 8918 +40436 8917 +28741 8916 +38804 8915 +24567 8913 +30978 8907 +35477 8903 +14694 8902 +30369 8900 +45390 8897 +32515 8895 +21975 8892 +27755 8892 +44687 8891 +41170 8887 +41757 8884 +33278 8883 +37130 8881 +27084 8880 +42567 8877 +50255 8875 +18414 8874 +37875 8874 +38825 8873 +35748 8871 +47035 8868 +26080 8862 +14571 8856 +39031 8848 +30053 8847 +48515 8846 +50171 8845 +37561 8841 +46870 8841 +45267 8841 +37326 8839 +24915 8836 +19929 8832 +21410 8831 +50003 8825 +39050 8825 +31223 8824 +44702 8824 +41374 8822 +37587 8821 +22317 8820 +35411 8817 +24565 8816 +21587 8815 +36965 8813 +49034 8811 +42678 8807 +36609 8804 +48826 8802 +35194 8802 +42033 8801 +33563 8796 +23230 8795 +27090 8794 +47999 8793 +47724 8792 +31484 8791 +49161 8787 +44623 8786 +48599 8783 +30309 8782 +33889 8780 +27077 8778 +43343 8775 +30459 8775 +39583 8773 +4109 8772 +35349 8767 +45410 8766 +29530 8765 +29706 8762 +29568 8760 +44751 8759 +43604 8759 +49065 8759 +30333 8758 +17772 8757 +39657 8756 +35224 8754 +31426 8752 +35450 8752 +22731 8750 +23919 8746 +42597 8744 +38693 8742 +48725 8740 +45893 8737 +32971 8735 +25780 8733 +38949 8732 +46071 8727 +29970 8725 +6418 8719 +7887 8719 +48190 8719 +17312 8717 +10806 8715 +49891 8715 +48951 8715 +17994 8714 +7566 8712 +30178 8712 +49693 8710 +39596 8710 +20652 8707 +47344 8701 +1019 8701 +35636 8699 +15860 8698 +25145 8694 +10555 8692 +2924 8692 +19864 8690 +33491 8690 +48879 8690 +26662 8685 +41152 8683 +16335 8681 +48410 8681 +14373 8679 +48126 8675 +43093 8673 +50110 8672 +33493 8666 +49797 8665 +43940 8665 +8651 8657 +48476 8655 +29238 8652 +28348 8649 +44556 8645 +7682 8639 +40254 8638 +38844 8634 +29609 8634 +37092 8629 +39665 8625 +42781 8624 +26743 8621 +31744 8620 +49715 8619 +29363 8618 +46655 8612 +19120 8612 +6262 8611 +45634 8611 +18974 8611 +16763 8611 +32348 8610 +34171 8608 +16130 8607 +48400 8605 +39401 8603 +28809 8601 +49658 8599 +13525 8596 +10376 8593 +26385 8592 +39075 8592 +41686 8591 +47185 8590 +50252 8589 +40963 8587 +40725 8585 +34381 8584 +46068 8584 +17088 8582 +49133 8582 +13366 8580 +35749 8579 +38582 8578 +40552 8578 +46211 8577 +28128 8577 +21700 8576 +21448 8575 +30866 8574 +40217 8573 +40607 8571 +21746 8568 +17394 8563 +35316 8559 +44529 8557 +6022 8553 +6963 8553 +26169 8553 +32943 8551 +38480 8549 +41652 8548 +37746 8547 +41876 8547 +49543 8543 +41023 8542 +40205 8540 +28495 8537 +35116 8537 +30513 8536 +47806 8536 +35898 8534 +12257 8533 +48877 8532 +43877 8529 +36663 8528 +47497 8527 +37366 8526 +33360 8524 +25215 8520 +37877 8520 +30635 8517 +35986 8511 +29790 8510 +38984 8508 +33346 8506 +31803 8505 +39307 8505 +30039 8502 +17971 8501 +16447 8500 +34363 8499 +30634 8498 +14210 8496 +40166 8494 +20543 8492 +37049 8490 +22222 8489 +1331 8483 +36414 8483 +47511 8483 +48897 8481 +37724 8477 +26177 8476 +47862 8475 +20500 8474 +26627 8472 +47072 8467 +44157 8467 +46059 8466 +50097 8464 +4218 8463 +40746 8463 +29321 8463 +27161 8461 +25387 8458 +47178 8457 +46650 8454 +49785 8448 +43180 8447 +41814 8444 +39274 8443 +39464 8440 +36790 8439 +40172 8438 +38093 8438 +46707 8437 +46555 8435 +29908 8430 +44901 8427 +34500 8423 +44939 8421 +14875 8419 +17452 8413 +35994 8412 +19991 8409 +49221 8409 +49261 8409 +23248 8408 +49876 8407 +42246 8405 +48495 8398 +24146 8397 +28636 8396 +50245 8395 +42372 8393 +31092 8393 +28929 8391 +38958 8386 +28501 8386 +48333 8386 +48923 8385 +41762 8384 +12561 8380 +39051 8380 +34094 8379 +39234 8378 +25676 8376 +44863 8376 +39605 8373 +35566 8372 +18800 8370 +21373 8370 +44873 8370 +29694 8366 +46304 8366 +28642 8363 +38108 8362 +35179 8360 +38401 8359 +39782 8357 +32352 8356 +35886 8354 +18924 8354 +32491 8353 +45979 8351 +43164 8349 +43430 8348 +46915 8346 +39014 8342 +32194 8337 +45258 8333 +35125 8331 +38983 8331 +33194 8330 +37372 8329 +13511 8328 +23377 8325 +31330 8324 +29034 8323 +19793 8321 +34836 8321 +44482 8319 +39218 8310 +38334 8305 +40341 8304 +40239 8300 +27005 8298 +32061 8297 +35313 8296 +27442 8293 +38800 8291 +21564 8291 +35610 8290 +47345 8290 +35252 8287 +47245 8283 +23227 8279 +45464 8278 +34075 8275 +34306 8273 +9237 8272 +40828 8268 +41518 8267 +12190 8261 +33141 8259 +30414 8259 +43219 8259 +46537 8256 +41771 8253 +34777 8249 +39400 8247 +39335 8247 +44669 8246 +45561 8242 +31652 8239 +35900 8238 +40482 8234 +46214 8229 +37181 8227 +34648 8227 +41743 8226 +3358 8225 +33240 8223 +37231 8222 +40952 8221 +17050 8221 +44308 8220 +33722 8217 +13250 8216 +42731 8216 +37253 8215 +34220 8213 +25442 8212 +39228 8208 +3485 8202 +35580 8202 +26469 8202 +19261 8201 +35215 8201 +45236 8200 +41694 8198 +42663 8193 +28196 8190 +19668 8187 +27726 8186 +33313 8184 +13743 8183 +40484 8183 +25817 8179 +42344 8170 +26751 8167 +46104 8163 +21893 8163 +28316 8162 +10621 8158 +46513 8156 +16553 8156 +28859 8156 +44258 8155 +45796 8150 +34367 8149 +14706 8145 +7581 8140 +34913 8140 +46207 8139 +30725 8139 +47217 8136 +32821 8135 +6111 8134 +23119 8133 +639 8131 +39914 8128 +23893 8127 +31979 8123 +2076 8121 +39057 8118 +44837 8116 +47685 8111 +42861 8111 +46135 8109 +40407 8107 +36156 8102 +49031 8099 +49044 8095 +4824 8092 +49316 8091 +43710 8088 +38897 8087 +42383 8085 +2459 8082 +16271 8082 +32125 8082 +22838 8078 +33891 8076 +39227 8075 +21188 8075 +42229 8075 +37659 8075 +41420 8074 +42181 8072 +41873 8070 +19749 8070 +44783 8068 +33497 8068 +30476 8068 +3766 8068 +45270 8067 +42950 8065 +48504 8061 +20722 8060 +41798 8059 +25078 8058 +49648 8058 +26841 8051 +45192 8051 +44606 8051 +40708 8051 +17327 8049 +6722 8048 +47477 8045 +44449 8044 +48087 8042 +29631 8041 +29701 8040 +43546 8039 +37596 8035 +42842 8035 +48121 8035 +28398 8034 +28473 8034 +22519 8033 +35498 8032 +39296 8031 +25811 8029 +40612 8027 +43982 8027 +35479 8027 +29938 8026 +35565 8024 +24021 8022 +44380 8021 +15400 8011 +12977 8010 +23946 8007 +45875 8007 +38544 8003 +35943 8003 +39241 8002 +23620 7999 +38447 7998 +11494 7998 +33615 7996 +36950 7990 +22882 7987 +25601 7986 +34983 7986 +30313 7984 +26362 7983 +19053 7982 +20988 7974 +47723 7974 +47306 7973 +25120 7970 +25668 7969 +46284 7968 +16207 7963 +36211 7959 +27866 7957 +18267 7950 +40550 7947 +35612 7946 +24966 7941 +46084 7939 +35888 7938 +36018 7936 +43436 7935 +16411 7932 +8614 7928 +41620 7925 +38676 7923 +13352 7919 +40374 7915 +19119 7915 +33711 7911 +49413 7910 +25938 7910 +35013 7909 +32594 7909 +42338 7905 +24708 7904 +30108 7902 +31141 7893 +44601 7891 +35937 7886 +48589 7880 +18036 7877 +40079 7877 +46014 7875 +48620 7874 +43689 7874 +46680 7867 +43618 7866 +40483 7866 +31762 7864 +35070 7855 +46203 7850 +44800 7846 +18445 7844 +16124 7841 +47298 7840 +34389 7839 +41032 7838 +20147 7836 +39170 7830 +26437 7828 +48699 7828 +36837 7824 +25513 7823 +38882 7821 +20287 7816 +28480 7814 +15512 7813 +45772 7811 +49864 7811 +47409 7805 +43730 7804 +49578 7802 +47889 7792 +38587 7792 +14763 7790 +49158 7789 +32640 7788 +48890 7773 +38978 7769 +37410 7766 +1606 7765 +38638 7763 +8934 7763 +45532 7762 +32901 7760 +49833 7760 +32981 7759 +24598 7755 +33567 7753 +43655 7745 +39301 7743 +45154 7740 +14575 7736 +26654 7736 +34867 7735 +45081 7734 +44791 7734 +29566 7734 +49467 7732 +29261 7727 +29531 7725 +44567 7725 +34447 7725 +29271 7724 +27732 7723 +46842 7720 +35991 7718 +26072 7715 +32155 7713 +21833 7710 +30027 7709 +29821 7702 +38453 7702 +25537 7701 +38293 7700 +17633 7699 +44196 7698 +43397 7698 +90 7697 +42452 7694 +18718 7694 +476 7692 +50250 7690 +32458 7690 +27245 7689 +49302 7685 +36990 7675 +33377 7672 +35929 7670 +49493 7667 +48844 7664 +38454 7661 +33765 7661 +21121 7659 +33952 7659 +48288 7654 +32888 7654 +44061 7653 +42942 7644 +48762 7640 +44503 7640 +41490 7640 +27261 7638 +24200 7636 +37025 7629 +30608 7627 +46616 7625 +31851 7622 +36899 7622 +25211 7621 +35319 7621 +24640 7619 +27273 7616 +37767 7616 +26201 7613 +36077 7613 +43642 7610 +45681 7608 +29888 7604 +40767 7603 +35574 7602 +34695 7601 +15263 7601 +49930 7600 +17373 7599 +41127 7594 +32875 7594 +37957 7593 +48027 7589 +33574 7585 +41628 7583 +19959 7582 +19256 7580 +25039 7576 +11730 7576 +167 7576 +48494 7574 +30394 7567 +2532 7564 +33249 7562 +37548 7561 +37189 7558 +36868 7554 +38288 7553 +34866 7551 +48836 7547 +37362 7542 +50047 7542 +25831 7540 +49632 7540 +47663 7537 +25354 7537 +15023 7535 +34270 7535 +49279 7534 +38767 7533 +48570 7527 +39103 7526 +29676 7523 +42940 7522 +45746 7519 +41274 7517 +49454 7513 +49942 7509 +46075 7509 +41025 7508 +27419 7507 +24328 7507 +32602 7506 +48549 7505 +11157 7501 +14566 7497 +16813 7496 +37384 7492 +26425 7491 +41681 7490 +38002 7487 +39087 7485 +36008 7484 +8767 7479 +10268 7479 +6879 7477 +50207 7476 +39427 7473 +48561 7472 +44644 7470 +26652 7468 +27421 7467 +20998 7466 +12851 7465 +38391 7464 +49121 7464 +37047 7459 +35244 7454 +36840 7450 +38231 7449 +4857 7443 +25977 7441 +31129 7436 +42455 7436 +40548 7434 +39225 7433 +30328 7432 +12951 7431 +21004 7429 +50126 7425 +30659 7424 +49580 7424 +38415 7423 +36295 7422 +31508 7421 +29668 7414 +11935 7407 +15205 7407 +37065 7406 +26003 7404 +39230 7403 +30281 7401 +41356 7400 +38837 7396 +36500 7394 +16229 7389 +34204 7389 +41206 7388 +38141 7386 +10479 7385 +44920 7382 +34554 7381 +46126 7375 +49970 7373 +30306 7373 +42670 7373 +40842 7370 +26696 7370 +2595 7366 +41341 7365 +44742 7362 +47213 7359 +38143 7359 +45509 7354 +13773 7354 +18806 7352 +6976 7352 +41570 7349 +35255 7348 +32441 7347 +30240 7344 +44374 7340 +39197 7334 +46604 7333 +33161 7330 +12631 7327 +35590 7321 +45282 7319 +34586 7313 +21620 7313 +18408 7311 +21357 7310 +34953 7306 +18161 7306 +49686 7300 +14624 7297 +33383 7296 +15330 7295 +38482 7291 +31272 7290 +34926 7287 +35570 7287 +23958 7286 +44563 7284 +26953 7280 +37365 7279 +49869 7277 +31493 7275 +49492 7273 +24042 7273 +48799 7273 +43892 7267 +48451 7265 +37834 7261 +42296 7260 +28732 7259 +17563 7259 +32771 7256 +7641 7254 +48272 7254 +24259 7253 +7426 7252 +15582 7250 +28923 7247 +37404 7246 +29294 7245 +47133 7244 +40506 7242 +32949 7236 +45898 7234 +41079 7233 +20086 7230 +47516 7229 +36284 7228 +45627 7225 +34048 7224 +13498 7223 +40368 7219 +5881 7218 +37504 7216 +44724 7216 +29006 7215 +46728 7213 +42006 7213 +49572 7209 +49712 7206 +23843 7202 +49008 7201 +26747 7199 +42771 7198 +21215 7195 +41931 7194 +19573 7193 +32105 7192 +49379 7189 +20468 7188 +44679 7182 +37821 7176 +40097 7176 +34947 7176 +28039 7175 +44292 7173 +11861 7172 +25294 7171 +7790 7168 +20740 7167 +25106 7161 +35119 7159 +39156 7156 +39478 7155 +38020 7154 +44812 7152 +42466 7148 +37697 7147 +36648 7145 +46251 7145 +24396 7145 +39861 7142 +27447 7141 +18065 7136 +1054 7134 +25306 7131 +44360 7130 +9833 7126 +48593 7125 +31400 7124 +47937 7123 +48582 7122 +25919 7121 +37139 7120 +37740 7119 +20438 7118 +3680 7114 +41208 7112 +33433 7105 +43894 7104 +28721 7102 +30542 7101 +44387 7094 +16950 7091 +46933 7089 +47585 7088 +43404 7087 +36772 7086 +27477 7084 +33010 7083 +32645 7079 +8243 7078 +41488 7077 +24583 7074 +8700 7067 +47294 7066 +42317 7064 +43827 7063 +30769 7063 +50025 7059 +36822 7058 +48631 7054 +17664 7051 +39881 7050 +49144 7049 +48485 7040 +15742 7040 +19746 7039 +29205 7039 +36883 7037 +45853 7034 +17679 7032 +31755 7030 +36881 7027 +32797 7025 +43878 7022 +42550 7022 +37945 7021 +27958 7021 +26629 7018 +41499 7015 +45166 7003 +45336 7001 +39839 6998 +9770 6994 +44054 6993 +41298 6992 +23493 6991 +31968 6990 +44227 6987 +19312 6987 +8191 6983 +43320 6982 +34379 6981 +49516 6981 +36803 6976 +32550 6976 +44759 6975 +27536 6973 +40139 6971 +36526 6970 +31919 6969 +23228 6968 +49095 6968 +40988 6967 +33357 6966 +22621 6966 +43387 6964 +29492 6964 +18994 6963 +46546 6963 +17733 6963 +43441 6960 +49828 6955 +10773 6955 +2315 6955 +46975 6953 +11477 6951 +46683 6950 +42979 6950 +34455 6948 +36834 6946 +7559 6944 +43456 6943 +49138 6942 +24834 6941 +31590 6941 +19615 6939 +31122 6939 +39902 6938 +46102 6937 +44276 6937 +17043 6937 +26964 6935 +36754 6932 +40331 6931 +39200 6930 +46002 6930 +26062 6926 +41744 6924 +14472 6918 +15380 6916 +41633 6916 +37269 6916 +44991 6916 +18202 6912 +44159 6911 +20026 6910 +31419 6910 +49370 6910 +48124 6909 +29307 6908 +24342 6901 +29223 6893 +29221 6891 +33786 6883 +39458 6882 +50188 6882 +38241 6880 +22585 6876 +3836 6870 +29017 6868 +49562 6865 +28018 6862 +41748 6859 +33349 6859 +49802 6859 +33028 6853 +5970 6851 +40853 6847 +48791 6844 +49709 6842 +14629 6836 +13078 6836 +36607 6834 +38027 6832 +50178 6831 +49176 6830 +43437 6830 +42696 6829 +37674 6827 +31053 6814 +42171 6814 +34242 6813 +49670 6812 +30603 6812 +40478 6812 +36843 6811 +22927 6810 +49422 6806 +43740 6805 +12962 6805 +16776 6802 +18453 6795 +40107 6791 +40397 6791 +22955 6788 +47172 6788 +28745 6781 +38035 6781 +13576 6781 +41862 6779 +12629 6779 +46120 6776 +33684 6775 +10387 6773 +19593 6769 +31815 6763 +46025 6760 +33077 6759 +18615 6759 +17505 6756 +46022 6753 +48567 6753 +14950 6753 +36122 6748 +17922 6745 +18743 6744 +28298 6743 +40091 6743 +32916 6740 +43828 6736 +20684 6732 +16444 6731 +26257 6731 +28100 6730 +8077 6728 +48895 6728 +10166 6727 +13263 6726 +41008 6725 +25947 6725 +35608 6724 +46477 6723 +32184 6722 +36894 6719 +46421 6719 +44591 6717 +25385 6715 +40981 6714 +31442 6712 +48084 6712 +43517 6709 +38899 6709 +43572 6709 +24361 6708 +14126 6708 +30642 6708 +31798 6706 +43614 6705 +21206 6705 +26447 6703 +16951 6700 +47781 6700 +24835 6696 +47117 6693 +40360 6691 +43612 6691 +49790 6689 +20613 6683 +50016 6681 +35645 6676 +41901 6675 +32665 6675 +35750 6670 +32851 6669 +46369 6669 +33485 6669 +38998 6666 +40901 6666 +37436 6663 +38688 6663 +23935 6660 +46077 6658 +19576 6654 +33650 6652 +12904 6652 +15732 6646 +2333 6644 +42310 6644 +45038 6643 +28019 6642 +26152 6638 +35802 6636 +44501 6635 +19209 6633 +16764 6632 +40044 6631 +27619 6630 +41242 6629 +19578 6629 +45685 6627 +15145 6622 +23767 6617 +10623 6615 +37870 6614 +10677 6613 +12095 6612 +31116 6611 +39462 6610 +36733 6608 +43777 6608 +10699 6607 +44819 6602 +4812 6600 +13556 6600 +28101 6595 +42503 6595 +139 6594 +37150 6591 +40920 6590 +35835 6585 +47537 6584 +46610 6583 +31945 6582 +29942 6579 +26599 6578 +23806 6578 +16594 6576 +35630 6574 +45271 6573 +49884 6573 +2358 6572 +18400 6571 +44798 6568 +42481 6559 +42685 6557 +50015 6552 +7113 6550 +31857 6550 +32138 6548 +29347 6548 +48765 6547 +46076 6544 +32429 6542 +49728 6540 +33080 6540 +45869 6534 +17649 6533 +42948 6532 +26663 6526 +48529 6526 +35220 6525 +24388 6525 +16873 6521 +47206 6518 +15973 6517 +47918 6517 +25200 6516 +42135 6516 +46613 6516 +10118 6513 +20602 6509 +10126 6508 +49811 6508 +15341 6508 +25423 6507 +41202 6507 +38095 6500 +28465 6498 +48103 6497 +25163 6496 +46374 6496 +42485 6493 +31358 6492 +45119 6492 +29918 6491 +27856 6489 +40322 6489 +13942 6488 +40914 6488 +25636 6486 +19314 6486 +20689 6485 +13194 6482 +33373 6480 +27024 6480 +35742 6478 +7419 6477 +48852 6477 +49918 6473 +23520 6471 +31436 6468 +6668 6467 +27195 6464 +31809 6462 +16943 6454 +5515 6450 +39841 6448 +47050 6448 +36533 6446 +40059 6446 +37835 6443 +33314 6440 +48954 6439 +41251 6438 +47705 6437 +45337 6437 +14302 6436 +7107 6434 +28820 6430 +40481 6427 +2979 6425 +20863 6424 +44478 6423 +26272 6423 +43975 6421 +10786 6421 +49028 6420 +43331 6419 +44910 6418 +16947 6418 +32113 6417 +24469 6417 +17302 6413 +39788 6411 +47524 6410 +8419 6406 +15583 6402 +48413 6401 +44935 6400 +14279 6399 +46311 6398 +40864 6397 +27956 6396 +41767 6396 +46228 6394 +14823 6393 +25941 6390 +30520 6390 +47116 6389 +44213 6384 +35726 6382 +33333 6381 +31627 6369 +25486 6369 +46850 6354 +37340 6353 +23903 6350 +41591 6349 +40387 6347 +42440 6347 +12453 6343 +36604 6342 +15256 6342 +8892 6339 +40230 6339 +49110 6338 +38835 6337 +8371 6334 +4927 6331 +15103 6329 +49096 6327 +46780 6326 +27634 6324 +49126 6322 +3480 6319 +18853 6318 +46315 6318 +20168 6316 +48493 6316 +38452 6310 +7798 6309 +34000 6309 +35746 6309 +38936 6309 +43020 6308 +33283 6308 +36515 6306 +34674 6305 +49770 6304 +45234 6299 +32485 6298 +6432 6296 +47150 6296 +29027 6295 +28663 6293 +40705 6293 +17203 6292 +48379 6289 +15864 6281 +33410 6279 +47065 6274 +42887 6273 +46253 6271 +29880 6270 +6077 6269 +36133 6259 +41614 6254 +26129 6253 +42797 6249 +40339 6249 +48545 6248 +14061 6247 +30014 6246 +33716 6245 +27745 6244 +42243 6239 +42501 6236 +17739 6232 +13968 6232 +33353 6229 +35283 6229 +27608 6228 +42721 6225 +21053 6213 +32223 6211 +48790 6210 +31915 6208 +45565 6206 +46634 6204 +28698 6204 +21117 6201 +7727 6200 +29356 6199 +13499 6197 +34617 6194 +20557 6192 +45783 6188 +48613 6187 +21496 6186 +28066 6185 +44758 6183 +43749 6182 +39431 6181 +45582 6180 +47852 6177 +33276 6175 +33480 6172 +47242 6165 +29291 6161 +35790 6160 +22961 6160 +8273 6155 +46521 6153 +39707 6151 +43099 6145 +21878 6143 +17224 6141 +31688 6139 +43714 6139 +32555 6131 +47285 6129 +28410 6128 +45977 6128 +25302 6127 +34556 6125 +21106 6123 +1457 6123 +15324 6122 +43641 6122 +8269 6119 +17258 6118 +44336 6117 +45841 6114 +14399 6112 +49045 6108 +40825 6106 +7551 6106 +21330 6102 +24881 6101 +45353 6100 +36046 6096 +34104 6094 +29610 6093 +48468 6092 +22079 6089 +26687 6089 +20301 6086 +16136 6084 +31932 6083 +15118 6079 +48659 6078 +31614 6075 +28497 6075 +49999 6072 +17663 6069 +49563 6068 +48814 6067 +45352 6066 +49780 6065 +28735 6065 +45677 6065 +35381 6065 +34539 6062 +46127 6060 +35460 6054 +50237 6048 +31801 6048 +28813 6047 +28873 6047 +20257 6044 +43036 6041 +26504 6040 +32394 6037 +26522 6033 +49621 6033 +41470 6033 +6624 6031 +23966 6030 +17606 6030 +33365 6029 +45751 6029 +16681 6027 +45833 6026 +13253 6026 +3547 6022 +22360 6015 +46428 6015 +30676 6014 +38727 6014 +26563 6013 +46700 6012 +42784 6011 +25961 6010 +13007 6009 +16906 6008 +33170 6005 +6828 6003 +10310 6002 +37235 6001 +41076 6001 +33299 6000 +32072 5999 +42788 5998 +47625 5993 +32103 5993 +48749 5992 +35955 5991 +36787 5990 +43904 5986 +46840 5981 +16604 5979 +49515 5978 +146 5978 +45529 5976 +6384 5973 +42814 5970 +40640 5969 +32323 5968 +48433 5967 +36346 5966 +46786 5965 +47190 5965 +45901 5963 +35503 5960 +30710 5957 +34280 5952 +46231 5952 +39787 5949 +37656 5946 +38344 5945 +31559 5942 +41394 5941 +25199 5941 +12614 5941 +27520 5937 +16547 5936 +43844 5935 +35302 5932 +43108 5932 +43392 5932 +49277 5931 +31695 5931 +50175 5930 +39792 5926 +38618 5921 +28452 5921 +29800 5921 +46080 5919 +47658 5919 +35843 5919 +39456 5917 +46785 5913 +43368 5913 +46297 5911 +30337 5911 +30894 5908 +47427 5908 +28851 5907 +40890 5903 +15437 5903 +47288 5901 +38103 5900 +43990 5893 +30956 5892 +8864 5892 +18716 5891 +15333 5885 +39286 5884 +14182 5883 +50051 5879 +39919 5878 +42881 5874 +46294 5871 +50144 5870 +37393 5864 +38114 5860 +46144 5857 +33494 5854 +40692 5852 +33476 5849 +44278 5846 +37736 5844 +37573 5835 +23028 5835 +24546 5834 +45430 5834 +33380 5833 +27038 5832 +34350 5825 +41581 5823 +48867 5822 +5477 5818 +14490 5815 +45592 5815 +31502 5814 +44968 5811 +38183 5808 +32706 5808 +42254 5807 +49946 5806 +36732 5806 +28482 5805 +23054 5803 +20331 5799 +26517 5795 +45854 5793 +36343 5792 +20659 5791 +29146 5788 +42133 5781 +39429 5778 +30486 5776 +47255 5775 +44199 5773 +46538 5773 +39938 5772 +26801 5771 +49485 5768 +43198 5766 +48789 5765 +37725 5763 +18114 5762 +24954 5762 +39888 5760 +27097 5756 +26604 5753 +8911 5748 +48034 5745 +14369 5744 +13173 5744 +31859 5743 +47173 5739 +30543 5737 +24321 5735 +24840 5735 +23869 5735 +8552 5733 +33245 5732 +10541 5730 +46599 5729 +47471 5729 +36727 5726 +35628 5723 +5218 5721 +28644 5720 +45608 5714 +150 5711 +46219 5710 +47522 5708 +40632 5705 +30854 5700 +46383 5698 +41358 5696 +29515 5693 +48941 5693 +27553 5692 +27072 5684 +42098 5683 +30003 5682 +27915 5682 +12730 5675 +41057 5675 +32345 5672 +26495 5672 +42419 5669 +44566 5669 +48902 5665 +37672 5658 +37942 5658 +24243 5657 +48536 5654 +33853 5652 +27148 5649 +34227 5647 +32978 5645 +17844 5643 +40874 5642 +37865 5642 +37960 5637 +23252 5636 +12895 5636 +22027 5635 +31391 5631 +44270 5628 +18709 5628 +36341 5624 +49687 5624 +39908 5623 +49691 5619 +44596 5615 +9933 5614 +21665 5613 +41667 5613 +37770 5612 +29121 5608 +40653 5605 +20946 5602 +24912 5601 +42014 5596 +18439 5596 +21789 5592 +49494 5588 +49406 5588 +35977 5585 +36064 5580 +8572 5578 +45912 5578 +36147 5575 +30474 5573 +43532 5573 +49081 5572 +45198 5572 +41915 5568 +35320 5566 +4026 5566 +26848 5565 +22915 5564 +46748 5562 +47834 5560 +37418 5560 +42975 5559 +34198 5558 +40925 5558 +47730 5555 +48838 5555 +31205 5555 +9898 5552 +44319 5551 +26621 5550 +31396 5550 +28561 5544 +35738 5539 +29513 5538 +32833 5537 +41317 5536 +47454 5530 +18203 5530 +26416 5529 +13236 5527 +27389 5527 +47772 5527 +25753 5523 +19380 5521 +13074 5520 +39575 5519 +36027 5517 +33815 5516 +26109 5515 +36198 5513 +46710 5511 +16743 5510 +31626 5508 +50177 5508 +25554 5506 +24524 5502 +43925 5500 +46727 5499 +30698 5492 +31971 5491 +13305 5490 +1824 5490 +28861 5489 +42182 5483 +15239 5483 +36347 5483 +17576 5482 +44709 5479 +13930 5478 +34042 5475 +28914 5475 +10789 5474 +47419 5471 +42809 5470 +31441 5469 +34634 5461 +4945 5454 +50248 5452 +8360 5450 +38113 5446 +24333 5446 +47357 5445 +50092 5443 +23010 5440 +47402 5436 +27694 5433 +27193 5433 +29320 5432 +30892 5424 +32635 5424 +33582 5421 +27313 5420 +40164 5418 +47960 5415 +2515 5412 +46639 5411 +46510 5406 +23599 5406 +26287 5405 +35676 5403 +26858 5403 +25901 5403 +46183 5402 +41622 5396 +21149 5391 +13890 5388 +38270 5386 +10894 5385 +35575 5382 +46566 5379 +28956 5379 +3204 5377 +42031 5374 +27031 5374 +20180 5371 +46085 5368 +35393 5368 +8293 5368 +49357 5367 +49239 5367 +43406 5361 +38615 5361 +13149 5359 +40710 5358 +45201 5355 +46452 5353 +30934 5353 +43085 5352 +23416 5351 +30493 5347 +33634 5345 +42036 5343 +22468 5341 +31253 5340 +36204 5339 +14387 5339 +24426 5328 +45530 5326 +44831 5324 +42114 5324 +44225 5322 +48562 5320 +30156 5320 +40759 5319 +49057 5318 +42895 5316 +11921 5308 +29672 5307 +48652 5306 +44531 5306 +34924 5306 +9602 5304 +44604 5303 +9924 5301 +16478 5301 +9350 5300 +40546 5298 +45417 5293 +33092 5292 +50125 5292 +46360 5289 +4358 5288 +25624 5288 +49574 5285 +30230 5282 +28614 5280 +40627 5277 +28122 5274 +28268 5272 +7131 5270 +46897 5270 +38032 5269 +29302 5268 +44515 5267 +23391 5265 +35780 5263 +47783 5263 +44156 5262 +14168 5262 +43651 5260 +36428 5260 +2578 5254 +29945 5251 +27830 5246 +24378 5244 +33855 5244 +25454 5237 +39007 5235 +42547 5232 +45133 5230 +41995 5230 +10615 5229 +45647 5227 +23969 5223 +13233 5221 +46455 5221 +15036 5220 +26357 5219 +34769 5215 +44859 5214 +39407 5210 +36632 5210 +47911 5209 +30345 5209 +32465 5207 +26870 5203 +26828 5198 +35895 5196 +31711 5189 +37855 5187 +47892 5187 +46177 5186 +34442 5182 +43866 5179 +33898 5173 +45300 5166 +50157 5165 +20837 5165 +38948 5164 +3239 5161 +29934 5159 +17495 5158 +48657 5151 +40232 5150 +21656 5150 +23793 5148 +24015 5148 +18925 5145 +30581 5144 +45941 5139 +45285 5137 +33762 5136 +31898 5135 +34877 5134 +49742 5134 +44727 5131 +35781 5131 +24516 5130 +38518 5127 +46911 5120 +24364 5117 +42507 5115 +44296 5114 +29660 5112 +11924 5111 +12485 5111 +11788 5107 +49702 5105 +44562 5104 +45140 5102 +27925 5098 +31659 5097 +7203 5095 +30218 5094 +19016 5093 +39934 5090 +2500 5088 +19763 5086 +17056 5084 +42588 5081 +16107 5081 +40577 5080 +5320 5080 +22077 5078 +33172 5073 +21413 5071 +34371 5071 +44683 5064 +48473 5061 +20231 5059 +9881 5059 +46367 5058 +23314 5057 +23758 5057 +25444 5055 +36487 5054 +39668 5053 +49140 5051 +43054 5049 +15924 5048 +14978 5048 +46384 5048 +12859 5046 +46037 5045 +35913 5044 +49451 5039 +40009 5037 +42042 5035 +36125 5033 +15782 5032 +45547 5031 +27586 5030 +44451 5029 +28653 5024 +46329 5022 +48278 5022 +44086 5022 +27448 5020 +9315 5020 +26802 5019 +18559 5018 +27282 5017 +1880 5016 +39219 5012 +6349 5012 +32091 5011 +44330 5011 +38799 5007 +19160 5006 +15966 5006 +16372 5005 +8029 5002 +16358 5002 +21351 5000 +24515 5000 +46808 4995 +18097 4995 +42901 4993 +31127 4993 +16671 4992 +28839 4990 +26365 4989 +17964 4980 +21800 4977 +49444 4977 +41908 4974 +26737 4973 +43869 4973 +47763 4972 +6560 4971 +15848 4971 +40809 4967 +35346 4966 +19722 4963 +20701 4963 +43260 4962 +19563 4961 +48267 4957 +33825 4956 +45368 4952 +23838 4947 +44032 4945 +37124 4945 +13371 4943 +41296 4941 +39551 4941 +32096 4940 +43661 4939 +26294 4937 +49912 4936 +40480 4935 +26163 4935 +14605 4934 +38715 4933 +14749 4932 +38197 4930 +33054 4929 +48477 4928 +40083 4926 +21906 4926 +31611 4925 +47639 4924 +22880 4923 +22369 4920 +41848 4919 +47020 4919 +33129 4918 +11239 4916 +28935 4914 +25757 4914 +38566 4909 +17595 4908 +48738 4908 +33132 4906 +48948 4904 +43810 4899 +39455 4896 +30589 4895 +10791 4891 +41387 4891 +28258 4888 +28508 4885 +16580 4884 +20891 4884 +22646 4883 +22400 4882 +33734 4881 +35170 4880 +14910 4878 +30016 4876 +17097 4874 +17861 4871 +47466 4869 +29352 4869 +11995 4868 +37120 4866 +3784 4861 +21465 4860 +43418 4857 +46514 4855 +24455 4849 +45101 4845 +23686 4845 +25391 4844 +45403 4843 +21231 4842 +39650 4840 +43762 4835 +39559 4834 +45175 4834 +45243 4832 +31811 4832 +48191 4832 +38821 4827 +6222 4824 +47333 4817 +19109 4816 +46047 4814 +46615 4810 +40281 4810 +13442 4805 +27772 4801 +43660 4798 +30150 4796 +31382 4792 +33248 4790 +41411 4789 +32966 4772 +47025 4769 +17491 4769 +36982 4766 +44231 4764 +35236 4764 +46111 4757 +27566 4749 +15865 4747 +20369 4746 +33744 4743 +29639 4741 +31425 4739 +30001 4736 +38521 4736 +25101 4734 +42995 4733 +46602 4733 +49094 4730 +25169 4730 +33196 4728 +43167 4726 +44120 4725 +42384 4723 +21962 4717 +17143 4715 +8285 4712 +41424 4712 +44239 4711 +41233 4710 +10267 4710 +41446 4706 +24201 4703 +40133 4701 +16040 4699 +14631 4699 +42553 4698 +5192 4698 +1189 4696 +32608 4696 +6009 4695 +4368 4694 +45299 4694 +39555 4694 +18932 4693 +36476 4690 +40347 4690 +19031 4690 +33234 4689 +42223 4687 +41097 4682 +43118 4679 +40850 4679 +9169 4676 +40651 4676 +33518 4672 +46995 4665 +39622 4665 +49475 4665 +30071 4664 +48301 4663 +43377 4661 +43628 4659 +13361 4659 +40258 4652 +43258 4650 +6354 4648 +37712 4643 +39833 4643 +4183 4642 +28228 4637 +36504 4625 +19801 4622 +43471 4621 +20885 4619 +46659 4617 +42711 4616 +43194 4615 +48056 4613 +49438 4612 +45729 4612 +32101 4609 +33766 4608 +22552 4600 +26572 4600 +40402 4599 +20593 4596 +45914 4596 +37214 4596 +34749 4596 +24908 4594 +40799 4591 +23646 4587 +10353 4587 +20574 4587 +35779 4584 +23814 4582 +16254 4577 +36471 4576 +49604 4576 +41205 4576 +22015 4569 +44190 4568 +23821 4562 +32356 4561 +21109 4559 +20898 4559 +48538 4557 +42477 4556 +46155 4554 +29868 4552 +35442 4551 +48131 4549 +34562 4549 +41359 4548 +49788 4545 +33152 4545 +47109 4545 +16497 4544 +50200 4541 +10895 4538 +34149 4537 +19778 4535 +38365 4533 +48860 4532 +41966 4527 +24179 4525 +29507 4523 +33789 4522 +30736 4518 +33951 4515 +26865 4514 +25249 4512 +34293 4511 +19212 4511 +46021 4511 +31718 4510 +20688 4508 +27286 4506 +10103 4504 +40668 4500 +21575 4496 +44425 4496 +30388 4495 +15944 4494 +46824 4493 +33772 4488 +48682 4488 +19871 4486 +48885 4485 +38941 4483 +14116 4480 +22866 4478 +15414 4474 +9523 4473 +36942 4473 +45035 4467 +38110 4466 +35952 4462 +14224 4460 +46722 4459 +44850 4457 +3479 4456 +37273 4454 +19724 4453 +27955 4452 +35414 4450 +37155 4448 +43815 4447 +25461 4442 +46955 4442 +19359 4441 +33406 4439 +39347 4438 +32023 4436 +44538 4432 +7453 4430 +24517 4429 +31871 4427 +33661 4427 +34577 4425 +6038 4424 +48142 4424 +49646 4420 +30535 4418 +7098 4416 +34496 4414 +11306 4414 +46343 4413 +36571 4413 +36320 4410 +49511 4405 +3462 4404 +28266 4402 +29428 4401 +35632 4399 +17440 4399 +18847 4398 +42822 4397 +44642 4394 +33717 4392 +20360 4391 +43008 4391 +24214 4390 +36084 4386 +9625 4383 +50118 4383 +43597 4375 +49217 4375 +29323 4373 +38644 4373 +37773 4373 +47598 4368 +37012 4366 +44191 4366 +38148 4365 +36387 4363 +3749 4362 +17474 4358 +41185 4356 +47473 4347 +35741 4345 +36510 4334 +44166 4331 +17236 4329 +44409 4328 +15038 4327 +23795 4324 +38215 4320 +166 4320 +48003 4312 +36613 4307 +28961 4305 +16368 4304 +40247 4303 +38399 4300 +28712 4296 +50204 4290 +31680 4288 +37188 4287 +26545 4287 +33274 4285 +42432 4285 +45675 4282 +33678 4281 +32086 4278 +20015 4278 +23002 4278 +46217 4273 +39945 4272 +38564 4271 +13994 4270 +35813 4268 +37815 4268 +14459 4267 +38274 4267 +20129 4266 +44613 4262 +28376 4261 +11093 4255 +29720 4253 +48875 4252 +42359 4250 +28074 4242 +49347 4241 +37500 4240 +50109 4233 +18919 4232 +47935 4230 +30540 4229 +8193 4226 +24586 4223 +37766 4218 +23735 4218 +42903 4217 +46775 4217 +36117 4216 +29736 4214 +46733 4213 +39582 4209 +38043 4209 +7969 4208 +33968 4204 +18074 4203 +31895 4202 +32603 4199 +46320 4199 +14621 4196 +33062 4195 +14844 4194 +36911 4191 +3821 4190 +7678 4187 +27358 4187 +27050 4184 +3880 4183 +34795 4181 +38891 4180 +49751 4180 +38499 4172 +5435 4165 +4770 4165 +20826 4164 +8809 4163 +43316 4158 +49749 4155 +49531 4153 +9462 4149 +36781 4149 +32419 4147 +45077 4143 +43790 4135 +10168 4132 +34303 4132 +37351 4130 +28119 4129 +37148 4129 +11709 4128 +9152 4127 +42059 4126 +4256 4126 +43721 4126 +43270 4125 +38038 4124 +26639 4122 +32065 4121 +32495 4121 +49222 4120 +5754 4120 +30221 4120 +37920 4117 +28252 4117 +28474 4117 +45988 4115 +33321 4108 +41020 4108 +28333 4101 +25538 4097 +29453 4092 +44808 4089 +48608 4083 +42197 4083 +22773 4079 +10558 4077 +43654 4076 +29064 4075 +39223 4067 +41982 4067 +48082 4063 +34457 4061 +43094 4059 +38641 4059 +37344 4059 +30998 4058 +31069 4054 +9547 4052 +39275 4051 +25267 4049 +43987 4048 +48625 4045 +21945 4042 +26111 4042 +9783 4039 +10777 4037 +36259 4031 +28147 4031 +40203 4026 +41940 4026 +8594 4023 +50149 4023 +30564 4020 +45367 4020 +30950 4020 +22885 4018 +37655 4017 +38790 4017 +49041 4011 +28769 4011 +46672 4010 +44707 4010 +43525 4003 +44898 4001 +25576 4000 +36558 3996 +1832 3995 +47252 3991 +31775 3988 +49836 3987 +15005 3984 +24036 3984 +43384 3983 +3658 3983 +28428 3981 +15575 3978 +34635 3974 +45242 3974 +131 3972 +41781 3971 +36291 3969 +43233 3968 +33606 3968 +32032 3962 +11183 3962 +47924 3961 +28373 3959 +34703 3956 +40637 3956 +38815 3955 +37250 3953 +32252 3953 +42166 3952 +26317 3951 +19083 3950 +33236 3947 +49061 3946 +48558 3946 +32782 3945 +38640 3944 +19076 3943 +47823 3942 +34764 3938 +36598 3936 +24564 3934 +19275 3934 +22719 3934 +4412 3932 +32634 3931 +36770 3930 +46430 3929 +23772 3927 +34168 3927 +15179 3925 +49899 3924 +26396 3922 +46697 3922 +28789 3919 +1253 3918 +35767 3913 +45205 3912 +35967 3911 +41560 3910 +11452 3910 +44592 3909 +39266 3908 +24941 3907 +22522 3905 +16844 3905 +49128 3904 +48447 3902 +33501 3899 +30952 3899 +43895 3892 +26214 3890 +29230 3888 +7304 3887 +30326 3881 +12103 3881 +45027 3878 +37022 3877 +41671 3877 +39069 3875 +25255 3869 +46100 3868 +38807 3866 +30672 3863 +42592 3858 +47113 3856 +10263 3855 +22683 3855 +42009 3854 +49501 3854 +18401 3853 +41609 3850 +47638 3849 +26180 3847 +6353 3846 +39126 3846 +47365 3842 +44946 3839 +23803 3837 +8816 3836 +38491 3834 +41741 3824 +38855 3824 +18498 3824 +44028 3823 +27933 3819 +15322 3818 +40211 3816 +36616 3816 +45835 3815 +40034 3815 +32147 3814 +33800 3813 +28766 3808 +32169 3808 +25637 3808 +34148 3808 +2783 3804 +47798 3803 +42539 3801 +28338 3801 +31909 3799 +41482 3798 +19622 3798 +2827 3797 +18270 3793 +38874 3793 +45254 3792 +3273 3789 +41026 3789 +28404 3788 +37713 3787 +28611 3787 +38417 3783 +19776 3780 +10024 3778 +36463 3777 +5619 3776 +48754 3775 +32846 3774 +33486 3769 +26344 3769 +8912 3768 +33176 3767 +36850 3766 +45700 3762 +46220 3762 +48330 3758 +41989 3758 +44458 3757 +22170 3755 +41219 3754 +49926 3754 +29486 3749 +49151 3746 +17518 3746 +31929 3746 +44223 3744 +45684 3742 +30871 3741 +49908 3741 +29870 3740 +33796 3739 +26367 3736 +12208 3731 +36258 3730 +47279 3726 +19449 3720 +49933 3718 +23241 3713 +28622 3712 +10367 3711 +35395 3711 +35178 3708 +46799 3708 +10929 3708 +38637 3705 +31696 3702 +21849 3701 +30609 3698 +47071 3697 +48181 3696 +42473 3696 +41104 3694 +17256 3692 +18610 3691 +13757 3690 +49256 3687 +41552 3684 +15466 3678 +12351 3676 +46182 3675 +49560 3674 +37403 3673 +22929 3670 +43087 3664 +35791 3664 +37146 3661 +8935 3660 +34999 3660 +16301 3659 +43782 3657 +39967 3657 +18167 3656 +30265 3655 +18172 3655 +46676 3655 +31954 3651 +39345 3650 +37647 3650 +38663 3646 +32685 3642 +41162 3640 +46358 3633 +41000 3632 +23877 3631 +36674 3631 +42195 3624 +44747 3622 +48547 3622 +49849 3620 +39825 3619 +26037 3618 +26440 3616 +22810 3615 +37510 3614 +28883 3613 +43582 3611 +19754 3611 +23114 3607 +38601 3607 +45037 3602 +37017 3602 +33547 3600 +44435 3597 +47718 3593 +29046 3591 +47815 3591 +45798 3591 +32830 3590 +12145 3589 +28222 3589 +31410 3589 +39849 3587 +38296 3587 +37477 3587 +13751 3586 +40517 3586 +36977 3585 +10046 3583 +26060 3578 +48310 3574 +47348 3574 +42180 3574 +26198 3568 +27614 3567 +44318 3566 +33439 3564 +42840 3563 +41392 3562 +33539 3561 +33714 3560 +19488 3560 +50145 3558 +18717 3557 +24335 3556 +43907 3554 +17386 3554 +35279 3553 +40106 3546 +24741 3545 +36124 3545 +45890 3545 +35620 3544 +23826 3544 +44706 3541 +29085 3541 +47531 3538 +49388 3537 +34445 3530 +19618 3527 +28312 3525 +38481 3523 +42652 3522 +5649 3522 +2660 3518 +41512 3514 +36623 3510 +31417 3510 +17125 3510 +36436 3505 +26656 3504 +25205 3500 +33087 3498 +41055 3498 +46701 3497 +19557 3497 +14815 3495 +32070 3495 +27908 3490 +18337 3490 +4707 3490 +20336 3487 +26638 3487 +44464 3485 +36542 3482 +7555 3476 +43271 3474 +15109 3473 +48319 3470 +37996 3469 +39815 3468 +40025 3467 +39590 3466 +46326 3461 +39531 3459 +23513 3459 +37187 3458 +49139 3457 +39470 3456 +11430 3455 +3390 3454 +49794 3453 +45809 3452 +49026 3452 +5196 3451 +33637 3450 +48686 3449 +40832 3449 +10493 3448 +21677 3448 +26193 3448 +33931 3447 +43319 3447 +35770 3442 +46912 3442 +30331 3437 +20275 3437 +36851 3436 +35347 3436 +47026 3435 +46893 3435 +44267 3433 +29435 3432 +34248 3430 +46143 3430 +37077 3427 +33293 3425 +47322 3422 +47361 3419 +43158 3416 +43160 3413 +37481 3410 +36037 3409 +41090 3409 +41506 3408 +43477 3407 +40709 3406 +38841 3404 +35974 3403 +22492 3403 +18249 3400 +30448 3400 +10587 3398 +47209 3397 +7333 3396 +36028 3396 +26238 3396 +27002 3393 +49155 3393 +49601 3393 +31743 3392 +15340 3391 +5480 3388 +18589 3387 +33784 3384 +42165 3384 +35792 3383 +43212 3383 +32815 3381 +23380 3379 +37010 3377 +44942 3377 +24844 3376 +19364 3373 +11151 3370 +22630 3369 +47227 3368 +33707 3366 +26693 3366 +35202 3366 +10545 3365 +1518 3363 +44754 3361 +26158 3359 +34364 3357 +23480 3357 +19293 3355 +14257 3354 +38321 3347 +21251 3344 +44807 3341 +25064 3340 +50093 3339 +41707 3336 +48289 3329 +30818 3329 +7990 3323 +4228 3322 +27849 3320 +21744 3319 +43806 3316 +11127 3315 +22252 3313 +32341 3312 +42082 3309 +14012 3308 +44954 3304 +34258 3301 +18872 3299 +43216 3296 +44154 3295 +40778 3295 +26501 3294 +28012 3294 +44334 3294 +49945 3293 +44238 3293 +38460 3293 +21075 3291 +20621 3289 +44836 3288 +33409 3285 +49036 3284 +34805 3282 +39658 3281 +48522 3281 +35760 3275 +18604 3274 +33420 3274 +35265 3273 +45755 3267 +33447 3263 +35102 3258 +38040 3253 +33792 3252 +27983 3251 +31073 3250 +46257 3247 +18245 3245 +15759 3243 +40806 3237 +35500 3235 +3322 3234 +31254 3229 +20480 3229 +40781 3228 +41808 3227 +20255 3224 +35420 3223 +35147 3223 +8370 3220 +49893 3212 +41709 3209 +17643 3208 +39070 3207 +48402 3207 +33120 3207 +39232 3207 +20184 3206 +22873 3206 +13600 3203 +40063 3201 +45872 3200 +47118 3200 +48933 3196 +23111 3194 +35058 3190 +35488 3184 +28938 3184 +42976 3183 +19237 3182 +36356 3178 +49608 3177 +26104 3175 +46932 3174 +43303 3174 +49850 3174 +18865 3173 +43979 3173 +44090 3171 +32368 3167 +22935 3167 +49506 3166 +44217 3164 +28525 3162 +44177 3162 +35538 3160 +40310 3157 +30444 3156 +44450 3153 +35433 3152 +18441 3151 +32229 3147 +45160 3144 +47120 3143 +23652 3141 +29485 3140 +30467 3138 +6552 3135 +31008 3134 +46331 3131 +45866 3131 +6876 3130 +41436 3130 +22314 3125 +38542 3125 +32247 3121 +15485 3121 +16891 3119 +48641 3118 +38764 3117 +7047 3113 +35172 3110 +42289 3109 +42184 3108 +23321 3107 +34032 3103 +32963 3102 +36196 3102 +37046 3101 +3062 3100 +49496 3095 +37653 3093 +48650 3091 +13577 3080 +9681 3079 +40135 3078 +16818 3065 +28326 3064 +49116 3064 +48346 3060 +29565 3060 +36567 3058 +3179 3056 +44825 3056 +25850 3055 +31467 3050 +43214 3047 +24400 3047 +44632 3047 +10697 3047 +24852 3044 +19012 3043 +11639 3039 +24728 3038 +37778 3037 +12063 3034 +27914 3034 +29335 3032 +20329 3030 +4921 3030 +36248 3028 +46512 3025 +38128 3024 +45528 3022 +2225 3022 +41080 3022 +39184 3021 +32818 3020 +48975 3013 +12343 3012 +5697 3011 +45169 3010 +7449 3010 +37359 3010 +38012 3010 +42646 3009 +22428 3008 +33209 3007 +37395 3006 +13003 3000 +20859 2999 +35277 2996 +28811 2996 +13668 2995 +46010 2993 +42660 2993 +47445 2991 +41780 2991 +35001 2990 +17527 2986 +35528 2984 +26715 2983 +22305 2981 +16151 2980 +46679 2978 +39986 2978 +36767 2975 +2103 2974 +22417 2974 +36857 2973 +5144 2969 +41911 2968 +35533 2967 +33643 2967 +32275 2964 +41382 2964 +9981 2963 +13086 2962 +19306 2959 +46115 2958 +45311 2952 +23839 2948 +17174 2947 +38052 2942 +28865 2940 +44195 2940 +40792 2934 +47046 2933 +27950 2933 +38106 2932 +31579 2930 +25102 2921 +169 2921 +22678 2920 +39688 2920 +37799 2919 +26941 2918 +36970 2915 +19351 2912 +19857 2908 +30715 2907 +39355 2904 +35494 2902 +23360 2902 +45195 2899 +32467 2898 +41357 2895 +32197 2895 +11549 2894 +37693 2894 +14501 2891 +41111 2890 +22506 2887 +47064 2886 +43834 2884 +31619 2882 +27575 2881 +12467 2879 +39789 2876 +45147 2875 +14095 2875 +5319 2874 +20660 2872 +44779 2872 +48845 2872 +35041 2871 +10864 2871 +25490 2870 +22453 2869 +32418 2866 +38277 2862 +38577 2858 +46332 2857 +16166 2856 +23414 2849 +26327 2845 +40927 2843 +23289 2842 +32570 2841 +49883 2841 +38865 2839 +37597 2838 +11573 2837 +46757 2836 +33746 2834 +38816 2833 +45490 2833 +49885 2833 +29863 2832 +21828 2831 +44012 2829 +36112 2827 +49806 2821 +37422 2819 +48728 2817 +42120 2816 +39354 2815 +27945 2814 +47384 2810 +24503 2810 +19282 2807 +26232 2807 +49046 2805 +16589 2804 +38392 2803 +38354 2801 +3330 2800 +48700 2799 +45522 2798 +30205 2792 +37262 2790 +48107 2788 +9762 2784 +34582 2782 +19580 2780 +24087 2778 +30661 2776 +33870 2776 +38292 2774 +39516 2773 +47558 2772 +34889 2764 +38067 2759 +26623 2758 +43753 2758 +25925 2753 +27169 2749 +25714 2746 +50235 2745 +43217 2744 +37978 2742 +28457 2742 +27292 2742 +43695 2741 +29719 2737 +44608 2730 +37157 2729 +21384 2727 +35029 2727 +35558 2726 +48938 2726 +22367 2725 +40010 2725 +23025 2722 +42173 2719 +41068 2715 +23745 2715 +47145 2712 +44952 2711 +46954 2708 +23269 2708 +38281 2705 +32689 2705 +30487 2705 +14980 2703 +16141 2698 +38904 2694 +31966 2693 +37031 2693 +41203 2690 +49615 2690 +27510 2686 +30753 2684 +18566 2683 +19324 2677 +35470 2677 +1792 2677 +33114 2676 +42522 2674 +39323 2668 +47084 2666 +49763 2665 +17414 2664 +38448 2664 +49513 2664 +20869 2663 +4181 2663 +4209 2662 +47006 2662 +8709 2661 +37177 2655 +17022 2654 +21974 2651 +26142 2650 +21340 2647 +22203 2644 +34111 2640 +40099 2638 +34787 2636 +31077 2635 +28167 2633 +47030 2629 +37612 2628 +47174 2628 +15817 2626 +18699 2626 +48443 2626 +40257 2625 +35841 2625 +37156 2623 +6367 2622 +34727 2622 +40613 2621 +42216 2621 +49427 2617 +30293 2616 +44110 2614 +38269 2614 +16897 2613 +45537 2613 +37200 2612 +38235 2611 +40895 2610 +5541 2609 +49071 2608 +35693 2608 +45824 2606 +45135 2606 +39759 2604 +41807 2603 +11610 2603 +48162 2601 +8723 2595 +33910 2594 +4052 2584 +22887 2584 +49832 2584 +44711 2580 +34095 2577 +31301 2576 +16273 2575 +48147 2570 +37511 2568 +980 2568 +41776 2566 +35476 2563 +21471 2561 +46554 2560 +47469 2555 +19526 2555 +37411 2553 +19875 2549 +41972 2548 +32713 2547 +41256 2547 +1591 2545 +18748 2544 +25180 2543 +43706 2542 +30249 2541 +34876 2540 +23081 2540 +44163 2539 +47506 2539 +46471 2538 +9146 2537 +18796 2532 +16516 2529 +49337 2528 +22755 2528 +43583 2524 +30585 2519 +26759 2514 +4840 2510 +42974 2509 +42064 2508 +4766 2507 +44453 2506 +48658 2505 +31266 2505 +31447 2504 +39994 2501 +50030 2499 +28620 2499 +36379 2495 +38571 2493 +18022 2493 +43691 2492 +33986 2490 +44189 2487 +5525 2485 +34181 2480 +19411 2479 +36321 2474 +49505 2474 +758 2471 +36361 2470 +27528 2468 +32785 2467 +38925 2467 +49228 2467 +19382 2464 +44930 2463 +26011 2462 +31181 2462 +41375 2460 +49342 2458 +45492 2457 +30728 2455 +41033 2454 +42451 2453 +22180 2453 +18109 2452 +46495 2452 +31542 2445 +30599 2444 +47951 2441 +36670 2437 +41993 2437 +36621 2434 +34142 2433 +23157 2430 +36017 2425 +40117 2425 +35887 2424 +50163 2424 +47651 2424 +30216 2421 +48744 2419 +31512 2418 +35040 2416 +35847 2416 +23118 2414 +44046 2409 +48469 2408 +47764 2406 +21317 2405 +7168 2404 +41729 2404 +29510 2402 +49137 2400 +21055 2399 +30553 2396 +39194 2393 +22501 2390 +11792 2390 +38946 2385 +33768 2384 +29904 2383 +42558 2380 +25472 2376 +30312 2376 +48900 2375 +25380 2374 +39175 2373 +49795 2372 +34495 2371 +45454 2369 +20004 2369 +49174 2369 +17410 2368 +45672 2366 +39814 2364 +8707 2363 +30782 2363 +11884 2362 +21689 2362 +23870 2359 +21920 2356 +34804 2352 +22093 2347 +5208 2346 +9065 2345 +43145 2344 +14671 2342 +19515 2340 +13783 2337 +14961 2335 +45689 2335 +31631 2334 +46156 2333 +31838 2332 +29905 2331 +45724 2328 +41372 2325 +34720 2320 +47472 2317 +34932 2316 +36690 2316 +40452 2316 +33583 2314 +45519 2314 +24871 2310 +48505 2309 +43786 2309 +47708 2306 +46479 2306 +46121 2303 +40574 2302 +47010 2299 +37199 2298 +22022 2298 +24319 2296 +40246 2294 +44974 2290 +30347 2288 +45803 2286 +47086 2283 +39724 2283 +28618 2280 +39065 2278 +5641 2276 +30604 2275 +10142 2272 +21476 2272 +23257 2270 +40155 2269 +40259 2268 +28968 2265 +24771 2263 +46136 2261 +41947 2260 +36658 2257 +13557 2252 +27985 2250 +34354 2250 +48499 2249 +27600 2246 +27167 2246 +44448 2245 +8964 2243 +17750 2242 +16495 2239 +22542 2239 +47701 2239 +46947 2237 +49358 2237 +42624 2235 +49226 2234 +28198 2231 +48321 2230 +40485 2230 +41100 2230 +10778 2229 +23979 2228 +34196 2227 +21864 2226 +35705 2226 +43702 2225 +31844 2223 +39890 2221 +33725 2221 +40971 2220 +19858 2218 +36370 2217 +33912 2214 +10967 2210 +159 2209 +25178 2208 +8682 2204 +12731 2204 +31179 2204 +48760 2201 +47720 2200 +45876 2196 +42855 2195 +36127 2195 +34340 2193 +39910 2190 +28365 2187 +22330 2183 +6527 2182 +846 2179 +50249 2178 +17635 2177 +38155 2173 +32212 2170 +37859 2169 +28656 2165 +37633 2163 +45662 2163 +39971 2161 +40541 2161 +28762 2160 +48956 2160 +30856 2159 +49896 2157 +48806 2157 +21412 2155 +48869 2155 +49630 2154 +27703 2151 +32230 2150 +42986 2148 +49214 2145 +49665 2142 +43000 2141 +24861 2141 +31660 2141 +44405 2140 +42719 2140 +48369 2139 +39021 2136 +7879 2135 +49905 2132 +44208 2132 +37195 2131 +3679 2129 +47068 2128 +27275 2127 +42758 2124 +37665 2123 +47835 2120 +20290 2116 +42169 2115 +43049 2111 +45286 2111 +41052 2109 +11774 2107 +48523 2100 +32667 2100 +46237 2100 +45351 2098 +49504 2097 +17269 2092 +48720 2091 +36253 2089 +21017 2089 +42867 2089 +23004 2088 +26047 2088 +32734 2081 +46890 2080 +28624 2079 +36408 2078 +40413 2074 +42530 2074 +33044 2069 +32573 2068 +29250 2066 +45008 2065 +49374 2064 +15931 2062 +45908 2062 +47649 2061 +37910 2058 +19890 2057 +44971 2052 +49050 2050 +48362 2047 +10882 2047 +47063 2045 +49925 2036 +12126 2035 +5785 2034 +41444 2034 +46165 2034 +46582 2029 +40591 2028 +11771 2024 +40053 2022 +47732 2022 +9454 2020 +38667 2019 +36044 2019 +48285 2019 +34460 2019 +33920 2018 +46787 2017 +13715 2016 +25529 2015 +4322 2014 +25224 2013 +9201 2012 +39500 2012 +11139 2008 +14188 2005 +33111 2005 +26575 2000 +41753 1998 +19469 1998 +9959 1996 +48452 1995 +30640 1995 +39377 1994 +23595 1993 +24376 1992 +14092 1991 +43717 1990 +28337 1989 +48183 1987 +30167 1986 +25348 1985 +11405 1984 +41099 1983 +48375 1982 +16791 1982 +22046 1980 +23320 1978 +44010 1977 +28794 1974 +36916 1974 +22759 1974 +21876 1974 +28758 1973 +48046 1972 +16883 1967 +20560 1965 +30626 1965 +16878 1964 +45356 1960 +15798 1960 +31364 1959 +15566 1959 +30475 1958 +19182 1956 +20748 1953 +12376 1952 +13291 1952 +22793 1951 +18883 1950 +40356 1949 +50247 1947 +35523 1945 +46621 1944 +18237 1943 +29881 1941 +37430 1940 +41519 1938 +49434 1936 +39044 1936 +47656 1934 +31327 1933 +21583 1933 +28134 1933 +26947 1933 +39000 1931 +38860 1929 +44015 1923 +45507 1923 +26945 1922 +44797 1921 +28707 1920 +13726 1916 +9418 1914 +29829 1913 +40158 1912 +137 1911 +33000 1910 +4907 1909 +38649 1909 +42381 1908 +49834 1907 +35039 1904 +21058 1902 +32014 1901 +45364 1899 +45163 1899 +36918 1896 +36406 1895 +48617 1894 +5512 1894 +23907 1893 +43037 1890 +19667 1890 +32892 1889 +2559 1886 +38696 1882 +32080 1880 +22833 1877 +21756 1876 +11805 1875 +32259 1872 +35975 1872 +30201 1871 +45249 1867 +13328 1867 +22402 1864 +13292 1864 +36545 1863 +49322 1862 +1147 1857 +34013 1857 +44140 1856 +27809 1856 +35307 1855 +26410 1852 +43857 1849 +42973 1848 +34098 1848 +44867 1847 +19990 1845 +11967 1843 +46386 1843 +23381 1836 +31264 1832 +47292 1829 +42866 1827 +33030 1826 +35540 1819 +20608 1818 +39149 1818 +44625 1817 +46238 1814 +48763 1812 +26292 1811 +35453 1810 +43034 1807 +29886 1806 +24957 1806 +47768 1804 +10063 1804 +35611 1802 +46846 1800 +22130 1799 +37093 1797 +28899 1796 +43789 1795 +12676 1795 +38528 1790 +47160 1788 +33566 1787 +37498 1785 +40558 1785 +45554 1776 +1421 1775 +5294 1775 +17358 1774 +18253 1773 +27444 1772 +44728 1769 +19028 1768 +48344 1763 +12567 1762 +27670 1758 +2235 1757 +42915 1753 +35861 1752 +43475 1751 +25895 1750 +7479 1750 +22973 1742 +43267 1739 +38776 1738 +45170 1737 +50227 1736 +46651 1736 +42929 1734 +16892 1733 +49346 1731 +37133 1729 +37350 1729 +21943 1728 +22766 1726 +4011 1725 +49865 1725 +39621 1725 +35656 1721 +7450 1720 +49213 1719 +49877 1715 +25748 1715 +41115 1713 +29743 1711 +23193 1707 +43422 1705 +31347 1705 +33810 1704 +25275 1703 +27674 1702 +15090 1701 +39157 1697 +33648 1695 +42636 1694 +42063 1694 +20746 1694 +31913 1693 +24807 1691 +14859 1690 +44402 1689 +14908 1689 +40261 1688 +13736 1685 +49168 1683 +40267 1683 +11907 1683 +36655 1681 +39643 1681 +16410 1679 +39316 1677 +49819 1674 +19463 1673 +40507 1673 +31246 1673 +46448 1673 +42316 1671 +31687 1671 +9466 1671 +49603 1670 +7726 1667 +34156 1665 +25471 1663 +15020 1663 +34301 1661 +1947 1661 +36181 1660 +31326 1657 +30266 1656 +43458 1656 +41964 1650 +38469 1650 +44293 1649 +33692 1647 +43380 1643 +26678 1643 +43433 1642 +31758 1641 +47400 1639 +2477 1638 +36307 1638 +48991 1637 +10549 1636 +32332 1634 +16268 1633 +28966 1632 +30656 1632 +47226 1629 +46979 1629 +19373 1628 +35036 1626 +38929 1624 +6312 1624 +42615 1622 +45819 1621 +47863 1620 +47195 1619 +26933 1617 +46695 1615 +27711 1614 +18686 1613 +28394 1613 +33386 1612 +42524 1612 +48205 1608 +32360 1605 +9318 1604 +45626 1603 +48526 1601 +29710 1601 +25874 1601 +14610 1599 +3472 1597 +44309 1594 +39922 1594 +19510 1590 +38381 1590 +34121 1588 +28253 1587 +49660 1586 +29773 1585 +32048 1582 +34219 1582 +45664 1581 +46464 1579 +5271 1578 +48911 1578 +22110 1573 +42720 1572 +34018 1569 +38335 1569 +44704 1569 +15149 1568 +45137 1567 +42172 1566 +35339 1563 +43910 1562 +45250 1560 +13392 1560 +23022 1559 +33012 1559 +48277 1557 +16333 1553 +23764 1553 +19202 1553 +19060 1551 +30049 1550 +43993 1546 +32432 1541 +41657 1541 +12374 1540 +31905 1536 +45032 1536 +43408 1534 +39224 1531 +42717 1531 +44423 1528 +19876 1528 +12054 1524 +13412 1523 +34816 1522 +28383 1518 +30295 1516 +39298 1514 +21142 1513 +30327 1513 +19953 1512 +39850 1511 +21563 1505 +17653 1504 +5099 1502 +48123 1499 +30268 1498 +39181 1497 +45297 1495 +36854 1494 +21947 1494 +32639 1494 +21271 1493 +18200 1493 +45748 1491 +43961 1488 +7847 1488 +48534 1487 +43073 1486 +29826 1485 +49991 1483 +42492 1482 +3603 1481 +40704 1480 +43024 1476 +45739 1475 +35585 1471 +41222 1470 +3728 1468 +30360 1465 +40489 1465 +37455 1464 +29712 1463 +30136 1462 +28112 1462 +21052 1459 +33699 1459 +31676 1457 +44036 1454 +43729 1454 +39310 1453 +43078 1453 +33250 1451 +15926 1446 +44506 1442 +29869 1440 +28156 1440 +44520 1438 +47690 1435 +142 1433 +33223 1432 +48761 1432 +34775 1431 +11900 1431 +39187 1430 +47746 1429 +29316 1428 +42468 1428 +36365 1427 +27823 1424 +30562 1424 +31263 1423 +40031 1422 +3429 1421 +30159 1420 +44415 1418 +11039 1418 +47066 1415 +47090 1415 +47212 1412 +20491 1409 +34402 1408 +5008 1408 +28924 1406 +37706 1404 +26227 1404 +45384 1401 +47991 1401 +11411 1399 +20972 1399 +41049 1399 +19662 1399 +38211 1399 +46858 1397 +21314 1391 +13780 1391 +42360 1391 +33705 1387 +49736 1386 +36563 1386 +44013 1384 +37337 1383 +46923 1379 +49312 1379 +23242 1377 +46763 1375 +31490 1375 +49929 1373 +35944 1370 +11482 1370 +38300 1369 +50155 1366 +32590 1363 +43825 1362 +47881 1360 +46349 1360 +48585 1360 +40156 1359 +47797 1359 +37141 1358 +32326 1355 +41305 1355 +12630 1351 +45767 1350 +34945 1350 +29698 1349 +19438 1348 +25182 1344 +24457 1341 +43444 1340 +23473 1339 +24935 1339 +27422 1338 +49035 1338 +40403 1336 +41818 1334 +35600 1334 +6284 1331 +7665 1331 +36171 1328 +3343 1326 +45887 1325 +40092 1324 +23511 1321 +47215 1320 +23626 1319 +11833 1317 +16080 1317 +49267 1316 +46581 1313 +33756 1308 +45251 1308 +25787 1307 +16041 1304 +27901 1302 +36382 1300 +33885 1300 +37906 1296 +19529 1295 +45907 1294 +37239 1293 +3282 1290 +38834 1290 +29613 1290 +18170 1289 +45533 1288 +31221 1285 +6852 1285 +36698 1285 +19395 1285 +40406 1285 +42519 1285 +23106 1283 +47947 1282 +33180 1281 +46570 1281 +37508 1278 +42300 1277 +27813 1275 +8485 1274 +39391 1274 +45750 1272 +27730 1271 +42680 1270 +45999 1270 +23305 1269 +36695 1267 +47546 1266 +11641 1263 +28401 1263 +50084 1263 +26825 1258 +45302 1254 +14524 1254 +33295 1253 +12100 1253 +38953 1253 +34697 1252 +39222 1252 +38021 1252 +36560 1250 +36180 1249 +3422 1248 +40456 1246 +16100 1245 +38543 1242 +37390 1240 +11037 1238 +25216 1235 +42479 1234 +26866 1234 +48835 1233 +34708 1232 +47393 1231 +43727 1229 +39827 1228 +29557 1227 +40069 1227 +26796 1226 +19455 1226 +44197 1225 +34640 1225 +10748 1224 +2155 1224 +27765 1216 +29241 1215 +43117 1215 +47795 1212 +47738 1211 +49694 1211 +20046 1211 +47864 1209 +46344 1208 +33623 1206 +43763 1204 +14060 1200 +40161 1199 +24539 1196 +16648 1195 +29281 1193 +17912 1193 +22065 1188 +43971 1188 +37863 1186 +35061 1186 +25618 1184 +12927 1183 +18125 1181 +37523 1179 +37101 1179 +41333 1177 +160 1176 +12241 1175 +14804 1175 +17720 1174 +27370 1173 +41248 1170 +17816 1169 +46746 1167 +39520 1165 +38271 1163 +34255 1163 +11727 1162 +33232 1162 +43879 1162 +21950 1162 +39098 1161 +22944 1161 +36750 1161 +10779 1160 +34519 1160 +25918 1158 +45281 1157 +36461 1154 +7503 1153 +36297 1152 +29572 1152 +37650 1150 +7180 1149 +30351 1149 +36764 1147 +23046 1145 +32756 1144 +48472 1143 +29472 1143 +5227 1142 +14865 1140 +12813 1140 +28243 1137 +8943 1137 +37848 1136 +19969 1136 +47527 1135 +34261 1132 +21903 1129 +35260 1127 +14560 1126 +42030 1126 +41592 1124 +47171 1124 +12426 1122 +46956 1122 +12071 1121 +20662 1119 +21711 1117 +36713 1117 +48227 1116 +22781 1115 +30660 1115 +43551 1113 +43328 1111 +41504 1109 +18163 1108 +31431 1106 +48873 1103 +31051 1102 +38184 1102 +40816 1097 +37345 1096 +35103 1095 +34704 1094 +27764 1092 +41096 1088 +47429 1088 +35561 1087 +41468 1086 +49551 1085 +43089 1083 +27570 1082 +47945 1078 +18477 1077 +49146 1077 +6847 1076 +16208 1073 +16934 1072 +16281 1067 +37420 1067 +24239 1065 +50143 1065 +2698 1064 +39786 1063 +37168 1061 +47082 1060 +45340 1059 +38124 1058 +16922 1056 +46310 1054 +11568 1053 +42527 1053 +32131 1053 +29460 1053 +44082 1052 +32849 1052 +18083 1052 +46796 1049 +49056 1049 +13486 1045 +30719 1041 +46966 1039 +28255 1036 +23138 1035 +22273 1035 +12486 1030 +45145 1028 +49070 1027 +37380 1027 +39695 1025 +41661 1024 +47611 1019 +48144 1019 +22345 1018 +44204 1017 +45640 1017 +45115 1017 +42752 1016 +31944 1016 +40674 1015 +41343 1014 +8762 1014 +45148 1013 +44575 1011 +33843 1011 +30667 1010 +43266 1008 +44948 1006 +33252 1005 +26911 1005 +35930 1004 +42654 1003 +42245 1002 +21424 999 +33151 998 +45865 997 +44052 996 +40822 996 +23294 993 +31386 993 +12640 989 +9705 989 +20786 986 +16471 985 +29495 984 +49535 984 +12953 982 +25915 982 +9020 978 +11647 976 +1209 976 +13211 972 +47139 966 +43966 966 +22174 961 +41002 960 +46976 959 +39258 959 +46086 958 +39611 958 +43922 956 +48080 956 +39231 955 +35407 955 +19820 951 +21324 950 +19421 950 +47826 949 +45442 949 +33031 946 +48997 943 +24714 941 +33221 940 +23001 940 +6043 939 +41585 939 +26076 938 +27156 935 +47715 934 +40561 931 +29325 931 +48313 931 +48364 929 +47340 928 +12045 927 +37605 925 +36853 922 +30751 920 +12675 919 +47671 915 +40041 914 +38165 911 +9497 911 +36752 911 +10790 908 +7948 906 +15697 905 +44165 904 +33213 903 +28544 903 +35707 902 +44917 901 +29848 900 +12736 898 +26475 896 +31965 892 +37096 892 +25195 892 +48958 891 +45992 889 +38963 888 +38218 887 +27632 887 +42655 880 +36119 879 +32403 878 +25001 873 +37771 873 +17845 872 +18670 872 +29312 869 +41726 862 +15139 862 +25597 862 +43488 859 +21481 858 +25081 858 +40223 857 +45474 856 +22718 853 +27972 853 +46497 850 +32003 849 +21737 847 +24029 847 +47676 846 +31023 846 +37695 843 +21477 843 +37955 842 +42669 840 +32092 840 +43549 838 +39666 836 +39482 833 +19682 833 +46764 832 +7535 832 +6927 832 +14367 831 +28165 830 +25745 830 +34222 828 +31724 827 +143 824 +42000 823 +48670 823 +17158 821 +11839 820 +33524 817 +39105 816 +25902 815 +29554 815 +41791 815 +22316 814 +44646 813 +32291 809 +49426 805 +37456 803 +41309 803 +43015 803 +38551 800 +42565 799 +2549 797 +44829 796 +9263 796 +38461 795 +43889 795 +35269 794 +29994 794 +36003 794 +31917 793 +22270 791 +12424 791 +46274 791 +33470 790 +33778 789 +46092 788 +46036 785 +41335 784 +31456 784 +21775 783 +15211 783 +15081 782 +3786 781 +29841 781 +16116 780 +43297 777 +43903 776 +34976 772 +19587 772 +4431 772 +40391 771 +49201 771 +21763 769 +21091 768 +45298 767 +45990 766 +48265 763 +37374 760 +29847 760 +42924 759 +39763 756 +7961 755 +41150 753 +37014 753 +30298 751 +30838 750 +144 748 +175 748 +40664 746 +9202 745 +15362 745 +38626 745 +39415 744 +36875 741 +15886 741 +5392 741 +15306 741 +23497 739 +25410 738 +45398 736 +14030 735 +38430 734 +28133 734 +22758 733 +18189 733 +34284 730 +30138 729 +46741 728 +23991 728 +29940 727 +36584 727 +37423 727 +49568 726 +20379 725 +47903 722 +8795 721 +6141 719 +31813 719 +48678 717 +22341 716 +46202 715 +27802 714 +41293 713 +27352 711 +44148 709 +37961 708 +29785 706 +40477 705 +29412 705 +21948 704 +43900 701 +40367 699 +22446 699 +43718 697 +42684 692 +37772 690 +45911 688 +46866 686 +30831 685 +8728 685 +36685 685 +10108 684 +33918 684 +39618 682 +48746 681 +39317 681 +34585 680 +3556 679 +21955 678 +15790 674 +6336 673 +9603 673 +47505 672 +46189 668 +25226 668 +23719 667 +49518 665 +37083 664 +39691 661 +43238 658 +25295 656 +6480 653 +23884 653 +48688 652 +20645 650 +36310 650 +48972 650 +31424 647 +48702 646 +34534 642 +41216 642 +30916 640 +33084 639 +37588 639 +45358 638 +40265 637 +49007 636 +30929 636 +8815 635 +44274 634 +8955 633 +38145 630 +43357 628 +37581 627 +29705 626 +43962 626 +33116 626 +22934 624 +46506 623 +42376 622 +36142 622 +11966 622 +32284 620 +36278 619 +9841 617 +29966 616 +38105 616 +17433 615 +19779 614 +170 613 +25060 613 +7280 612 +36737 612 +29415 612 +24679 611 +22784 611 +47762 610 +982 608 +48416 606 +9468 604 +46083 603 +42348 602 +43023 599 +29047 599 +46532 598 +29582 598 +34237 597 +24442 597 +22263 596 +41146 595 +12712 594 +45617 593 +27643 593 +47078 593 +39893 591 +49704 589 +49339 589 +32288 588 +49969 587 +37176 586 +43913 586 +24032 585 +15474 583 +44557 583 +2468 581 +43055 580 +34580 580 +37190 576 +8773 576 +19841 575 +46118 573 +35266 573 +43291 571 +43491 569 +22906 568 +13700 567 +37266 567 +28657 566 +10523 566 +38593 566 +6710 565 +31175 565 +13562 562 +40717 561 +23854 555 +38574 555 +48553 554 +34758 554 +29740 553 +28235 552 +9882 552 +50101 549 +39747 547 +37527 547 +29953 545 +45228 545 +43095 543 +44627 542 +40748 540 +40948 539 +14077 535 +41092 535 +42314 535 +43636 531 +37763 530 +29212 529 +42757 527 +46777 526 +42249 524 +28350 524 +23224 523 +27032 522 +41215 521 +15083 519 +14726 519 +28360 518 +44051 518 +16148 517 +16792 515 +13298 515 +36338 514 +48393 513 +16150 512 +31854 512 +15661 511 +43648 511 +31817 511 +36470 510 +21316 507 +47581 506 +35430 506 +38463 505 +48774 505 +49135 504 +48801 503 +27246 502 +32015 502 +21364 501 +19785 498 +45520 498 +24122 497 +35067 497 +5622 497 +15961 496 +48557 493 +23287 492 +10144 491 +10097 491 +26867 487 +41888 487 +49409 484 +24336 484 +39488 483 +25248 482 +20398 481 +38892 479 +20322 479 +16626 477 +35508 476 +26791 476 +40290 475 +49546 475 +16253 474 +43768 473 +43197 473 +14099 470 +47418 469 +9287 468 +14692 468 +25053 467 +45227 466 +42164 466 +14512 466 +28632 465 +45986 464 +30134 462 +38519 462 +28955 460 +29636 458 +33489 457 +44057 457 +20036 457 +23376 456 +15272 455 +36166 452 +29577 450 +32541 449 +24373 448 +24689 448 +39737 447 +5571 447 +10160 446 +49582 446 +49015 446 +44755 444 +23711 442 +42804 441 +28013 441 +40672 440 +37143 440 +35439 439 +8346 439 +35050 437 +15116 436 +36447 436 +44926 435 +16323 434 +31768 434 +44174 431 +14777 430 +15853 430 +21451 428 +41340 428 +47232 428 +46999 427 +35799 425 +35555 425 +25131 424 +19927 423 +41349 423 +8983 421 +34991 417 +20213 416 +34949 415 +33682 415 +44332 414 +29589 413 +49771 413 +49129 412 +24186 412 +33803 411 +48893 410 +25645 408 +19415 407 +44547 407 +35235 405 +39321 404 +40786 403 +37389 401 +34650 401 +45717 399 +20590 397 +8994 396 +26054 393 +48031 393 +13697 391 +2955 390 +40266 386 +1977 385 +17405 384 +2115 382 +3207 382 +45082 381 +3156 381 +32517 381 +43102 381 +40537 380 +43375 378 +47932 378 +30109 377 +48737 376 +16165 373 +19629 371 +30072 370 +37485 368 +3286 368 +48995 367 +24493 367 +25465 366 +43848 366 +21253 365 +24440 364 +49607 359 +37227 359 +48541 358 +41573 358 +40386 353 +7601 352 +29851 352 +22133 352 +29659 352 +49011 351 +45218 351 +18004 350 +40720 349 +37467 346 +42783 345 +32398 343 +49960 342 +35306 342 +34345 341 +27007 341 +12404 340 +42543 339 +42638 338 +49167 335 +31208 334 +45784 333 +33296 333 +20512 332 +32742 332 +18356 331 +44587 331 +30165 329 +41433 327 +40364 327 +9049 326 +2918 326 +17683 325 +17681 325 +24077 324 +25611 324 +6166 323 +39681 323 +49855 323 +44256 322 +39333 322 +49073 320 +31937 320 +37509 317 +43077 316 +15842 316 +33384 315 +25497 315 +30965 315 +48497 315 +16193 315 +47987 313 +22725 313 +46140 313 +2804 312 +32524 311 +20532 310 +47939 310 +28542 310 +39192 309 +10125 308 +17773 305 +47423 305 +46268 301 +24363 301 +20513 300 +26498 300 +50179 299 +36469 298 +34543 297 +20115 297 +28696 297 +36786 296 +26712 294 +31090 293 +42062 290 +23131 287 +38326 287 +34373 287 +26095 286 +25626 286 +46424 285 +15506 285 +9000 285 +31936 283 +30800 282 +49058 281 +27551 280 +44648 276 +12240 276 +18803 275 +7589 274 +41641 274 +40109 274 +1896 273 +47000 273 +32518 272 +45379 272 +45635 272 +33529 272 +13765 270 +40420 270 +12845 269 +16201 268 +33465 268 +24001 267 +49249 267 +41853 267 +4010 266 +8418 265 +49813 265 +25581 264 +43481 263 +46788 262 +41629 262 +24710 261 +35144 261 +39907 260 +47021 260 +20801 260 +25128 259 +30812 259 +48304 258 +18115 257 +48404 256 +33829 256 +23846 255 +6112 254 +5997 254 +6987 253 +7677 253 +26503 253 +44161 253 +46491 252 +48231 251 +11910 251 +37669 250 +35702 249 +49464 248 +41323 248 +37233 247 +26998 246 +36828 246 +24806 244 +32207 243 +30762 242 +42637 242 +16103 242 +172 242 +49472 242 +25084 241 +7105 239 +19791 238 +41974 237 +45335 237 +31052 236 +37426 236 +13177 235 +38894 235 +37495 234 +25549 234 +25795 234 +43801 234 +43282 233 +13296 231 +38149 231 +24466 230 +28264 230 +34525 230 +48448 228 +36581 227 +36521 227 +49082 225 +48318 225 +8755 225 +36792 223 +43353 223 +19039 223 +43302 222 +20677 220 +47307 220 +32310 220 +31732 220 +19021 219 +17401 219 +36922 219 +12717 218 +33426 217 +46904 217 +33309 216 +33795 215 +29993 214 +4242 214 +32546 212 +35818 208 +47175 208 +11896 208 +34934 208 +48908 208 +42367 208 +43769 207 +50159 207 +35514 205 +45495 204 +43213 203 +2590 203 +4895 203 +27542 202 +39996 201 +35063 201 +47569 200 +40283 198 +37817 198 +1543 197 +37757 197 +14592 196 +27733 195 +39394 195 +13979 194 +12662 193 +2887 193 +42287 191 +22241 191 +32891 189 +11548 189 +26642 188 +11480 187 +15755 186 +10060 186 +15913 186 +45144 185 +42026 185 +19227 184 +33761 184 +25207 182 +38122 182 +44916 181 +29113 181 +22831 179 +13945 179 +6681 178 +34901 178 +22322 178 +50108 177 +29164 176 +23544 174 +46545 174 +15351 173 +22496 173 +39742 173 +11606 173 +36796 173 +26229 171 +39115 171 +35572 170 +16959 169 +30716 168 +37642 167 +44651 167 +27852 167 +40073 167 +37913 167 +45975 166 +46968 166 +38362 165 +29446 165 +7359 164 +17473 163 +40012 163 +47258 163 +46677 162 +49390 162 +45150 162 +24022 161 +24898 160 +32239 160 +45434 159 +27509 159 +39703 159 +3587 157 +11974 157 +36726 156 +15685 155 +38653 154 +24618 154 +17532 153 +24973 151 +41386 151 +48944 151 +41678 151 +7804 149 +33991 148 +21959 146 +14531 145 +41441 144 +37412 144 +41658 143 +6399 142 +2941 142 +33893 141 +41849 140 +11919 140 +44912 139 +46541 138 +32088 137 +35604 137 +48610 137 +50033 137 +19476 137 +25926 136 +23330 136 +49843 136 +50001 135 +48999 134 +48458 134 +36301 133 +16646 132 +47271 132 +40111 132 +23329 132 +41313 131 +23728 130 +33207 128 +47542 127 +37082 127 +10269 126 +39434 126 +48204 125 +43667 124 +6533 122 +49273 122 +40361 122 +49324 121 +25589 121 +8438 120 +2432 120 +17761 120 +10052 120 +37435 119 +47981 118 +12869 116 +34607 115 +41006 113 +47596 113 +45123 112 +43246 110 +40719 110 +26700 110 +41945 109 +152 109 +16098 109 +6598 109 +47253 107 +46249 107 +29021 106 +45563 106 +47147 105 +45539 105 +35343 105 +43241 105 +50216 105 +43734 105 +35922 104 +29226 104 +44431 104 +221 104 +25970 104 +27293 103 +1841 103 +14468 103 +42311 103 +47559 102 +49643 102 +35069 101 +8115 101 +44233 100 +16303 100 +47282 99 +40629 99 +39008 99 +32509 98 +32917 98 +42396 98 +176 97 +36169 96 +48753 96 +10253 94 +40516 94 +3523 93 +48185 93 +49086 93 +44104 92 +31204 92 +43839 91 +27924 90 +17933 90 +49476 89 +36596 89 +46745 89 +44167 87 +30531 87 +47486 87 +40549 86 +35992 85 +34832 85 +33468 85 +46996 84 +40493 84 +43242 84 +7134 83 +11585 80 +22640 80 +37811 80 +42869 80 +39860 79 +25719 78 +23984 77 +11273 76 +41481 76 +20554 75 +13198 75 +28725 74 +42210 74 +155 73 +48727 73 +35318 72 +41365 71 +32511 70 +40345 70 +31708 69 +44872 69 +41832 69 +9968 69 +28670 69 +30478 67 +10298 67 +11885 66 +11737 66 +17787 66 +41939 65 +41868 65 +39467 65 +41538 64 +44546 63 +20503 62 +47682 61 +4060 61 +16068 60 +48457 59 +36473 59 +45449 59 +50113 58 +15040 58 +46110 58 +47614 58 +50116 57 +43313 57 +18945 57 +29795 57 +37858 56 +26534 55 +18433 55 +39886 55 +33490 54 +40415 54 +36704 53 +44785 53 +43899 51 +32865 51 +47530 51 +39364 50 +42535 49 +25887 49 +37981 49 +49527 49 +33937 48 +36940 48 +22315 47 +48874 47 +4204 47 +23596 47 +31478 47 +42035 46 +9286 46 +38016 46 +11689 46 +42785 45 +46222 45 +33994 45 +47794 45 +43897 45 +42877 45 +48953 44 +44686 44 +37545 44 +45435 43 +34386 42 +7260 42 +13171 42 +23926 41 +24307 40 +31820 40 +44392 39 +48600 38 +41230 38 +38390 38 +13150 37 +154 37 +38243 36 +29447 36 +44326 36 +29646 35 +19073 35 +15041 35 +45433 34 +42382 32 +38377 32 +47540 32 +47936 32 +47432 31 +49149 31 +30432 31 +39280 30 +48366 30 +12943 29 +37922 29 +14695 29 +13426 29 +16782 28 +49997 28 +36926 28 +23613 28 +22675 28 +35098 28 +6408 28 +33153 28 +31739 28 +43518 27 +7782 27 +29752 27 +23363 26 +45823 26 +43053 26 +40236 24 +37787 24 +49029 24 +33023 24 +20804 24 +34103 24 +27013 23 +37991 23 +42943 23 +44033 23 +41380 22 +25698 22 +42750 22 +25362 22 +44555 22 +22039 20 +42983 20 +45762 20 +27006 20 +12677 19 +48219 19 +43394 19 +37662 19 +17553 18 +45422 18 +30439 18 +14341 18 +11592 17 +33929 17 +17629 17 +42889 17 +153 17 +14223 17 +43010 17 +32843 16 +48193 16 +35793 16 +36475 16 +31161 15 +16822 15 +47182 15 +44112 14 +34604 14 +31881 14 +46402 13 +36937 12 +28500 12 +24731 12 +27584 12 +43796 11 +36481 11 +40703 11 +19049 10 +44444 10 +4690 10 +42470 9 +41977 9 +36917 9 +46948 9 +10658 9 +38250 9 +43298 8 +45953 8 +36929 8 +42234 7 +38160 7 +47490 7 +40235 7 +5808 7 +45786 7 +36490 6 +5367 6 +27534 6 +21807 5 +36886 5 +39693 5 +30684 5 +37226 5 +15243 5 +34633 5 +22997 5 +25658 4 +45321 4 +8980 4 +47648 4 +34206 4 +43569 4 +36862 3 +49778 3 +45392 3 +42066 3 +36130 3 +46939 3 +6438 3 +34842 2 +48527 2 +38370 2 +34473 2 +40278 2 +20174 2 +5815 1 +9364 1 +39142 1 +47703 1 +49074 1 +31536 1 +14827 1 +23090 1 +43735 1 +24847 1 +40219 1 +32437 1 +31727 1 +124 0 +125 0 +173 0 +174 0 +177 0 +178 0 +179 0 +180 0 +181 0 +182 0 +183 0 +184 0 +185 0 +186 0 +187 0 +188 0 +189 0 +190 0 +191 0 +192 0 +193 0 +194 0 +195 0 +196 0 +197 0 +198 0 +199 0 +200 0 +201 0 +202 0 +203 0 +204 0 +205 0 +206 0 +207 0 +208 0 +209 0 +210 0 +211 0 +212 0 +213 0 +214 0 +215 0 +216 0 +217 0 +218 0 +219 0 +628 0 +1849 0 +4603 0 +5624 0 +8828 0 +11504 0 +12781 0 +17811 0 +17900 0 +18472 0 +22686 0 +22757 0 +23282 0 +23614 0 +23785 0 +24293 0 +24934 0 +25193 0 +25502 0 +25992 0 +28666 0 +29342 0 +29372 0 +30202 0 +30208 0 +30209 0 +30210 0 +30211 0 +30212 0 +30213 0 +30897 0 +30898 0 +30899 0 +30905 0 +30906 0 +31032 0 +31538 0 +31573 0 +31576 0 +31666 0 +31765 0 +31783 0 +31886 0 +31957 0 +32047 0 +32406 0 +33434 0 +33454 0 +33477 0 +33813 0 +34027 0 +34448 0 +34504 0 +34516 0 +35207 0 +35496 0 +35579 0 +36173 0 +36174 0 +36935 0 +36938 0 +37444 0 +37574 0 +37579 0 +37631 0 +37842 0 +38214 0 +39165 0 +39172 0 +39177 0 +39253 0 +39374 0 +39446 0 +39655 0 +39714 0 +39749 0 +39752 0 +39753 0 +39755 0 +39756 0 +39757 0 +39803 0 +39811 0 +39820 0 +39821 0 +39906 0 +40240 0 +40241 0 +40242 0 +41297 0 +41383 0 +41551 0 +42089 0 +42090 0 +42202 0 +42424 0 +42496 0 +42586 0 +42728 0 +43038 0 +43065 0 +43177 0 +43361 0 +43453 0 +44320 0 +45003 0 +45199 0 +45544 0 +45545 0 +45706 0 +46600 0 +47198 0 +47571 0 +47654 0 +47934 0 +48069 0 +48396 0 +49731 0 +49781 0 +50009 0 +50256 0 +madeupword0000 0 +madeupword0001 0 +madeupword0002 0 \ No newline at end of file diff --git a/qa_mdt/checkpoints/robertabase/merges.txt b/qa_mdt/checkpoints/robertabase/merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..6ab4032dacb06f495710b1fdabd1e14281fed6d3 --- /dev/null +++ b/qa_mdt/checkpoints/robertabase/merges.txt @@ -0,0 +1,50001 @@ +#version: 0.2 +Ġ t +Ġ a +h e +i n +r e +o n +Ġt he +e r +Ġ s +a t +Ġ w +Ġ o +e n +Ġ c +i t +i s +a n +o r +e s +Ġ b +e d +Ġ f +in g +Ġ p +o u +Ġa n +a l +a r +Ġt o +Ġ m +Ġo f +Ġ in +Ġ d +Ġ h +Ġan d +i c +a s +l e +Ġt h +i on +o m +l l +en t +Ġ n +Ġ l +s t +Ġ re +v e +Ġ e +r o +l y +Ġb e +Ġ g +Ġ T +c t +Ġ S +i d +o t +Ġ I +u t +e t +Ġ A +Ġ is +Ġ on +i m +a m +o w +a y +a d +s e +Ġth at +Ġ C +i g +Ġf or +a c +Ġ y +v er +u r +Ġ u +l d +Ġs t +Ġ M +' s +Ġ he +Ġ it +at ion +it h +i r +c e +Ġy ou +i l +Ġ B +Ġw h +o l +Ġ P +Ġw ith +Ġ 1 +t er +c h +Ġa s +Ġw e +Ġ ( +n d +i ll +Ġ D +i f +Ġ 2 +a g +er s +k e +Ġ " +Ġ H +e m +Ġc on +Ġ W +Ġ R +he r +Ġw as +Ġ r +o d +Ġ F +u l +at e +Ġa t +r i +p p +o re +ĠT he +Ġs e +u s +Ġp ro +Ġh a +u m +Ġa re +Ġd e +a in +an d +Ġo r +ig h +es t +is t +a b +r om +Ġ N +t h +Ġc om +Ġ G +u n +o p +0 0 +Ġ L +Ġn ot +es s +Ġe x +Ġ v +re s +Ġ E +e w +it y +an t +Ġb y +e l +o s +or t +o c +q u +Ġf rom +Ġha ve +Ġs u +i ve +ou ld +Ġs h +Ġth is +n t +r a +p e +igh t +ar t +m ent +Ġa l +u st +en d +- - +al l +Ġ O +ac k +Ġc h +Ġ le +i es +re d +ar d +â Ģ +ou t +Ġ J +Ġa b +e ar +i v +al ly +ou r +o st +g h +p t +Ġp l +as t +Ġc an +a k +om e +u d +T he +Ġh is +Ġd o +Ġg o +Ġh as +g e +' t +Ġ U +r ou +Ġs a +Ġ j +Ġb ut +Ġw or +Ġa ll +e ct +Ġ k +am e +Ġw ill +o k +Ġw he +Ġthe y +id e +0 1 +f f +ic h +p l +t her +Ġt r +. . +Ġin t +i e +u re +ag e +Ġn e +i al +a p +in e +ic e +Ġm e +Ġo ut +an s +on e +on g +ion s +Ġwh o +Ġ K +Ġu p +Ġthe ir +Ġa d +Ġ 3 +Ġu s +at ed +ou s +Ġm ore +u e +o g +ĠS t +in d +i ke +Ġs o +im e +p er +. " +b er +i z +a ct +Ġon e +Ġsa id +Ġ - +a re +Ġyou r +c c +ĠT h +Ġc l +e p +a ke +ab le +i p +Ġcon t +Ġwh ich +i a +Ġ im +Ġab out +Ġwe re +ver y +u b +Ġh ad +Ġ en +Ġcom p +, " +ĠI n +Ġu n +Ġa g +i re +ac e +a u +ar y +Ġw ould +as s +r y +Ġ âĢ +c l +o ok +e re +s o +Ġ V +ig n +i b +Ġof f +Ġt e +v en +Ġ Y +i le +o se +it e +or m +Ġ2 01 +Ġre s +Ġm an +Ġp er +Ġo ther +or d +ul t +Ġbe en +Ġl ike +as e +an ce +k s +ay s +ow n +en ce +Ġd is +ct ion +Ġan y +Ġa pp +Ġs p +in t +res s +ation s +a il +Ġ 4 +ic al +Ġthe m +Ġhe r +ou nt +ĠC h +Ġa r +Ġ if +Ġthe re +Ġp e +Ġy ear +a v +Ġm y +Ġs ome +Ġwhe n +ou gh +ac h +Ġth an +r u +on d +ic k +Ġo ver +ve l +Ġ qu +Ċ Ċ +Ġs c +re at +re e +ĠI t +ou nd +p ort +Ġal so +Ġp art +f ter +Ġk n +Ġbe c +Ġt ime +en s +Ġ 5 +op le +Ġwh at +Ġn o +d u +m er +an g +Ġn ew +-- -- +Ġg et +or y +it ion +ing s +Ġj ust +Ġint o +Ġ 0 +ent s +o ve +t e +Ġpe ople +Ġp re +Ġit s +Ġre c +Ġt w +i an +ir st +ar k +or s +Ġwor k +ad e +o b +Ġs he +Ġo ur +w n +in k +l ic +Ġ1 9 +ĠH e +is h +nd er +au se +Ġh im +on s +Ġ [ +Ġ ro +f orm +i ld +at es +ver s +Ġon ly +o ll +Ġs pe +c k +e ll +am p +Ġa cc +Ġb l +i ous +ur n +f t +o od +Ġh ow +he d +Ġ ' +Ġa fter +a w +Ġat t +o v +n e +Ġpl ay +er v +ic t +Ġc ould +it t +Ġa m +Ġf irst +Ġ 6 +Ġa ct +Ġ $ +e c +h ing +u al +u ll +Ġcom m +o y +o ld +c es +at er +Ġf e +Ġbe t +w e +if f +Ġtw o +oc k +Ġb ack +) . +id ent +Ġu nder +rou gh +se l +x t +Ġm ay +rou nd +Ġp o +p h +is s +Ġd es +Ġm ost +Ġd id +Ġad d +j ect +Ġin c +f ore +Ġp ol +on t +Ġag ain +cl ud +ter n +Ġkn ow +Ġne ed +Ġcon s +Ġc o +Ġ . +Ġw ant +Ġse e +Ġ 7 +n ing +i ew +ĠTh is +c ed +Ġe ven +Ġin d +t y +ĠW e +at h +Ġthe se +Ġp r +Ġu se +Ġbec ause +Ġf l +n g +Ġn ow +ĠâĢ ĵ +c om +is e +Ġm ake +Ġthe n +ow er +Ġe very +ĠU n +Ġse c +os s +u ch +Ġe m +Ġ = +ĠR e +i ed +r it +Ġin v +le ct +Ġsu pp +at ing +Ġl ook +m an +pe ct +Ġ 8 +ro w +Ġb u +Ġwhe re +if ic +Ġyear s +i ly +Ġd iff +Ġsh ould +Ġre m +T h +I n +Ġe v +d ay +' re +ri b +Ġre l +s s +Ġde f +Ġr ight +Ġs y +) , +l es +00 0 +he n +Ġth rough +ĠT r +_ _ +Ġw ay +Ġd on +Ġ , +Ġ1 0 +as ed +Ġas s +ub lic +Ġre g +ĠA nd +i x +Ġ very +Ġin clud +ot her +Ġim p +ot h +Ġsu b +ĠâĢ Ķ +Ġbe ing +ar g +ĠW h += = +ib le +Ġdo es +an ge +r am +Ġ 9 +er t +p s +it ed +ation al +Ġb r +Ġd own +Ġman y +ak ing +Ġc all +ur ing +it ies +Ġp h +ic s +al s +Ġde c +at ive +en er +Ġbe fore +il ity +Ġwe ll +Ġm uch +ers on +Ġth ose +Ġsu ch +Ġ ke +Ġ end +ĠB ut +as on +t ing +Ġl ong +e f +Ġth ink +y s +Ġbe l +Ġs m +it s +a x +Ġo wn +Ġpro v +Ġs et +if e +ment s +b le +w ard +Ġsh ow +Ġp res +m s +om et +Ġo b +Ġs ay +ĠS h +t s +f ul +Ġe ff +Ġg u +Ġin st +u nd +re n +c ess +Ġ ent +ĠY ou +Ġgo od +Ġst art +in ce +Ġm ade +t t +st em +ol og +u p +Ġ | +um p +Ġhe l +ver n +ul ar +u ally +Ġa c +Ġm on +Ġl ast +Ġ2 00 +1 0 +Ġst ud +u res +ĠA r +sel f +ar s +mer ic +u es +c y +Ġm in +oll ow +Ġc ol +i o +Ġm od +Ġc ount +ĠC om +he s +Ġf in +a ir +i er +âĢ Ķ +re ad +an k +at ch +e ver +Ġst r +Ġpo int +or k +ĠN ew +Ġs ur +o ol +al k +em ent +Ġus ed +ra ct +we en +Ġs ame +ou n +ĠA l +c i +Ġdiff ere +Ġwh ile +---- ---- +Ġg ame +ce pt +Ġs im +.. . +Ġin ter +e k +Ġre port +Ġpro du +Ġst ill +l ed +a h +Ġhe re +Ġwor ld +Ġth ough +Ġn um +ar ch +im es +al e +ĠS e +ĠI f +/ / +ĠL e +Ġre t +Ġre f +Ġtr ans +n er +ut ion +ter s +Ġt ake +ĠC l +Ġcon f +w ay +a ve +Ġgo ing +Ġs l +u g +ĠA meric +Ġspe c +Ġh and +Ġbet ween +ist s +ĠD e +o ot +I t +Ġe ar +Ġagain st +Ġh igh +g an +a z +at her +Ġex p +Ġo p +Ġin s +Ġg r +Ġhel p +Ġre qu +et s +in s +ĠP ro +is m +Ġf ound +l and +at a +us s +am es +Ġp erson +Ġg reat +p r +Ġs ign +ĠA n +' ve +Ġs omet +Ġs er +h ip +Ġr un +Ġ : +Ġt er +ire ct +Ġf ollow +Ġd et +ic es +Ġf ind +1 2 +Ġm em +Ġc r +e red +e x +Ġex t +ut h +en se +c o +Ġte am +v ing +ou se +as h +at t +v ed +Ġsy stem +ĠA s +d er +iv es +m in +Ġle ad +ĠB l +c ent +Ġa round +Ġgo vern +Ġc ur +vel op +an y +Ġc our +al th +ag es +iz e +Ġc ar +od e +Ġl aw +Ġre ad +' m +c on +Ġre al +Ġsupp ort +Ġ1 2 +.. .. +Ġre ally +n ess +Ġf act +Ġd ay +Ġb oth +y ing +Ġs erv +ĠF or +Ġth ree +Ġw om +Ġm ed +od y +ĠThe y +5 0 +Ġex per +t on +Ġe ach +ak es +Ġc he +Ġc re +in es +Ġre p +1 9 +g g +ill ion +Ġg rou +ut e +i k +W e +g et +E R +Ġm et +Ġs ays +o x +Ġd uring +er n +iz ed +a red +Ġf am +ic ally +Ġha pp +ĠI s +Ġch ar +m ed +v ent +Ġg ener +i ent +p le +i et +re nt +1 1 +v es +pt ion +Ġ2 0 +form ation +Ġc or +Ġoff ic +ie ld +Ġto o +is ion +Ġin f +Ġ Z +t he +o ad +Ġp ublic +Ġpro g +r ic +* * +Ġw ar +Ġp ower +v iew +Ġf ew +Ġl oc +Ġdiffere nt +Ġst ate +Ġhe ad +' ll +Ġp oss +Ġst at +re t +ant s +Ġv al +Ġis s +Ġc le +i vers +an c +Ġex pl +Ġan other +Ġ Q +Ġa v +th ing +n ce +W h +Ġch ild +Ġs ince +i red +l ess +Ġl ife +Ġde velop +itt le +Ġde p +Ġp ass +ã ĥ +Ġt urn +or n +Th is +b ers +ro ss +ĠA d +Ġf r +Ġres p +Ġsec ond +o h +Ġ / +Ġdis c +Ġ & +Ġsomet hing +Ġcomp le +Ġ ed +Ġf il +Ġmon th +a j +u c +Ġgovern ment +Ġwith out +Ġle g +Ġd ist +Ġp ut +Ġqu est +an n +Ġpro t +2 0 +Ġne ver +i ence +Ġle vel +Ġar t +Ġth ings +Ġm ight +Ġeff ect +Ġcont ro +Ġc ent +Ġ1 8 +Ġall ow +Ġbel ie +ch ool +ot t +Ġinc re +Ġfe el +Ġres ult +Ġl ot +Ġf un +ot e +Ġt y +ere st +Ġcont in +Ġus ing +Ġb ig +2 01 +Ġas k +Ġb est +Ġ ) +I N +Ġo pp +3 0 +Ġnum ber +in ess +S t +le ase +Ġc a +Ġm ust +Ġd irect +Ġg l +Ġ < +Ġop en +Ġp ost +Ġcom e +Ġse em +ord ing +Ġwe ek +ate ly +it al +Ġe l +ri end +Ġf ar +Ġt ra +in al +Ġp ri +ĠU S +Ġpl ace +Ġfor m +Ġto ld +" : +ain s +at ure +ĠTr ump +Ġst and +Ġ # +id er +ĠF r +Ġne xt +Ġs oc +Ġp ur +Ġle t +Ġl ittle +Ġh um +Ġ i +r on +1 5 +Ġ1 5 +Ġcomm un +Ġm ark +ĠThe re +Ġw r +ĠTh at +Ġin formation +w ays +Ġb us +a pp +Ġinv est +m e +Ġh ard +ain ed +e ad +Ġim port +Ġapp ro +Ġt est +Ġt ri +Ġre st +os ed +Ġf ull +Ġc are +ĠS p +Ġc ase +O N +Ġs k +Ġl ess +Ġ + +Ġpart ic +ĠP l +ab ly +u ck +is hed +ch n +b e +Ġl ist +at or +Ġto p +Ġad v +ĠB e +ru ct +Ġd em +r ation +l ing +g y +re en +g er +Ġh ome +Ġle ft +Ġbet ter +Ġd ata +Ġ1 1 +Ġatt ack +Ġpro ble +l ine +ard s +Ġbe h +r al +ĠH ow +ĠS he +ar ge +Ġ -- +: // +Ġb ro +ĠP h +at s +Ġbu ild +w w +id ed +a im +as es +en cy +Ġm ain +in ed +Ġinclud ing +Ġ { +Ġg ot +Ġint erest +Ġke ep +Ġ X +Ġe as +ain ing +Ġcl ass +âĢ ¦ +ĠN o +Ġv ar +Ġsm all +amp le +A T +Ġ ide +ĠS o +Ġre ce +Ġpol it +Ġm ov +Ġpl an +Ġper cent +iv ing +Ġc amp +Ġp ay +1 4 +s c +is ed +Ġu nt +one y +pl oy +== == +Ġdid n +ĠI nd +el s +ert ain +Ġp os +__ __ +i ver +Ġpro cess +Ġprog ram +if ied +ĠR ep +1 6 +u ro +olog y +at ter +in a +Ġn ame +ĠA ll +Ġf our +Ġret urn +v ious +b s +Ġcall ed +Ġm ove +ĠS c +ir d +Ġgrou p +Ġb re +Ġm en +Ġc ap +t en +e e +Ġd ri +le g +he re +uth or +Ġp at +Ġcur rent +id es +Ġp op +t o +ent ion +Ġal ways +Ġm il +Ġwom en +Ġ1 6 +Ġo ld +iv en +ra ph +ĠO r +r or +ent ly +Ġn ear +ĠE x +re am +s h +Ġ1 4 +Ġf ree +iss ion +st and +ĠC on +al ity +us ed +1 3 +Ġdes ign +Ġch ange +Ġch ang +Ġb o +Ġv is +em ber +Ġb ook +read y +Ġk ill +2 5 +pp ed +Ġa way +Ġab le +Ġcount ry +Ġcon st +ar n +Ġor der +A R +i or +i um +or th +1 8 +ail able +Ġs w +Ġm illion +Ġ1 3 +at ic +t ed +ĠG o +Ġo per +en g +Ġth ing +aj or +con om +ĠCom m +Ġwh y +u red +ur al +Ġs chool +b y +ĠM ar +Ġa ff +Ġd ays +Ġan n +us h +an e +I f +e g +Ġpro f +Ġhe alth +ou th +B ut +ion al +. , +Ġs ol +Ġal ready +Ġ3 0 +Ġchar act +H e +Ġf riend +E S +i ans +ic le +' d +ĠO n +Ġle ast +Ġp rom +Ġd r +Ġh ist +it her +Ġ est +i qu +1 7 +s on +Ġte ll +Ġt alk +oh n +o int +le ction +A N +Ġunt il +au gh +Ġl ater +Ġ ve +Ġv iew +end ing +iv ed +Ġwor d +w are +Ġc ost +Ġen ough +Ġg ive +ĠUn ited +Ġte chn +are nt +O R +Ġp ar +ĠD r +Ġ201 6 +r ist +er ing +Ġ  +Ġl arge +s ide +ac y +cc ess +Ġw in +Ġimport ant +Ġ19 9 +Ġdoes n +Ġ1 7 +Ġbus iness +Ġcle ar +Ġre se +" , +ur y +Ġe qu +as ter +al f +ĠAmeric an +n ect +Ġex pect +ivers ity +Ġo cc +ĠF l +Ġk ind +Ġme an +Ġp ast +Ġde v +Ġb as +le t +ra ft +Ġor gan +Ġde l +Ġper form +Ġst ory +Ġse ason +ĠC ol +Ġcl aim +Ġc ame +Ġwith in +Ġl ine +Ġpro ject +ĠA t +Ġcontro l +end ed +ĠS y +Ġa ir +iz ation +Ġ * +le y +Ġm oney +id d +Y ou +f or +Ġfam ily +Ġm aking +Ġb it +Ġpol ice +Ġhapp en +Ġ vers +on y +u ff +ĠW hen +Ġs it +ide o +l f +is on +Ġsu re +g in +Ġapp ear +Ġl ight +Ġ es +o f +Ġw ater +Ġt imes +n ot +Ġg row +Ġcomp any +ĠT e +ow s +Ġm ar +our ce +i ol +ar m +b r +Ġex ample +Ġcon c +Ġf ore +ĠT o +p ro +E N +ri es +Ġ2 5 +ĠC an +ne y +Ġact ually +Ġe ver +ur ity +ak en +ap s +Ġt ax +Ġm ajor +am a +Ġof ten +er al +Ġhum an +Ġj ob +is ter +Ġav ailable +oc r +en n +a id +iv id +Ġrec ord +? " +Ġs ing +ĠA m +id ence +Ġnew s +st er +Ġe conom +Ġfollow ing +ĠB r +is ing +Ġh our +m ost +um ent +Ġse x +Ġdes c +Ġbec ome +ĠE d +Ġto ok +Ġha ving +Ġprodu ct +a ult +A s +ar ing +Ġme ans +Ġh op +un e +Ġch o +Ġc ertain +Ġn on +Ġde al +2 4 +le ment +oc i +en e +Ġs ide +ĠP r +ĠM ay +Ġre ason +u ed +c hed +ul ation +Ġe lect +Ġoffic ial +Ġposs ible +Ġh old +and s +ot s +Ġc ity +or ies +Ġse ver +Ġchild ren +Ġon ce +Ġact iv +l er +Ġn ight +it ions +ĠJ ohn +a pe +pl ay +Ġd one +Ġl im +Ġwork ing +ĠP res +or ld +e b +ĠC o +Ġb ody +ail s +ut es +ĠM r +Ġwhe ther +Ġa uthor +ro p +Ġpro per +Ġse en +) ; +Ġf ac +ĠS u +Ġcon d +it ing +Ġcour se +Ġ } +-------- -------- +a ign +Ġev ent +Ġen g +Ġp ot +Ġin tern +i am +Ġsh ort +em pt +ã Ĥ +ĠG od +il ar +8 0 +Ġor ig +I S +our n +ab ility +it ive +Ġd am +Ġ1 00 +Ġp ress +Ġdo ing +Ġprot ect +r ing +Ġthough t +Ġquest ion +re w +ĠW ar +Ġsever al +ĠSt ate +Ġg iven +Ġf und +ĠT w +Ġw ent +an ces +w ork +p or +m y +4 0 +Ġar g +art ment +ust om +Ġpol ic +Ġme et +Ġc reat +2 2 +ĠSt ates +Ġg ames +ra w +ut ure +Ġunder stand +ur s +ĠO b +l ish +s y +Ġm akes +Ġw on +ag on +Ġh tt +Ġl ove +ent ial +Ġcomple te +p ar +ĠI m +A L +Ġacc ount + ł +ore d +ver t +Ġ ident +Ġ201 5 +Ġother s +ĠM in +i ber +ver age +The re +ition al +d d +Ġpro b +Ġyou ng +Ġal ong +Ġacc ording +Ġy et +Ġmem bers +ĠWh at +o id +ĠM an +A nd +Ġam ong +a i +Ġem ploy +ĠR es +Ġ > +Ġinv ol +Ġl ow +a f +ĠC ar +Ġh ig +ĠO ne +ĠS ec +in ation +Ġlike ly +Ġan t +ag ed +ĠR uss +Ġb en +Ġre le +F or +b ack +ĠN ot +Ġpres ident +b all +Ġacc ess +ivid ual +ĠD em +ĠE uro +6 0 +Ġkn own +ir l +ĠG r +Ġear ly +u se +iet y +âĢ ĵ +Ġf ight +Ġs ent +Ġto day +Ġmark et +" . +Ġb ased +Ġstr ong +ur ther +Ġde b +m ber +Ġproble m +Ġde ath +Ġsoc ial +im ate +A S +ort un +Ġcamp aign +er y +C h +Ġe y +i ally +Ġm us +w h +p os +Ġ er +Ġsa f +Ġmonth s +ir on +Ġv iol +Ġf ive +Ġst re +Ġplay ers +in c +al d +y ear +a un +Ġsu ccess +Ġpres ent +ere nce +Ġ201 4 +Ġsu gg +Ġpartic ular +Ġtr y +Ġsugg est +ĠCh rist +on es +Ġpri v +2 3 +Ġc rit +Ġl and +Ġloc al +if y +2 9 +Ġa ut +E D +ĠG u +Ġm ult +Ġpolit ical +Ġask ed +Ġfor mer +it ter +ri pt +Ġcl ose +Ġp ract +ĠY ork +Ġget ting +Ġac ross +Ġcom b +Ġbelie ve +Ġ z +Ġto get +Ġtoget her +ĠC ent +ir c +Ġind ividual +ĠM c +2 7 +is k +ĠE ng +Ġf ace +Ġ2 4 +Ġval ue +Ġare a +e v +Ġw rit +ĠPres ident +Ġv ot +Ġke y +Ġm om +p ut +Ġany thing +Ġexper ience +att le +Ġm ind +a ff +om m +Ġf uture +g ed +Ġc ut +Ġto t +it ch +Ġv ideo +Ġinvest ig +Ġn et +ĠM y +r ict +i en +. ) +Ġimp ro +th ough +ward s +Ġcon nect +ĠM ed +sel ves +ens ive +m b +o ber +at ors +A n +Ġ5 0 +Ġre du +res ent +Ġab ove +Ġf re +ĠEuro pe +s w +Ġam ount +ĠA pp +Ġe ither +Ġmil it +Ġan al +Ġf ail +ĠE n +al es +Ġspec ial +Ġbl ack +I T +c her +Ġlook ing +Ġf ire +y n +Ġal most +o on +Ġstud y +Ġm iss +c hes +ro wn +Ġt re +Ġcommun ity +Ġmed ia +Ġf ood +Ġcom es +ĠUn iversity +Ġsing le +Wh at +u ly +Ġh alf +ag ue +h od +ĠRep ublic +Ġstart ed +Ġqu ick +ot o +b ook +Ġiss ue +it or +Ġel se +Ġcons ider +2 6 +ro du +Ġt aken +2 8 +9 9 +ĠW ith +Ġtr ue +Ġw a +Ġtr ad +Ġag o +Ġm ess +ie f +Ġadd ed +o ke +Ġb ad +Ġf av +3 3 +Ġsim ilar +as k +ĠD on +Ġcharact er +ort s +ĠH ouse +Ġreport ed +Ġty pe +v al +i od +ĠHow ever +Ġt arg +Ġent ire +pp ing +Ġhist ory +Ġl ive +ff ic +.... .... +ed eral +Ġtr ying +Ġdisc uss +ĠH ar +ac es +l ished +Ġse lf +os p +re st +Ġro om +el t +Ġf all +ol ution +Ġe t +Ġ x +Ġis n +Ġide a +b o +Ġs ound +ĠD ep +Ġsome one +ci ally +ull y +Ġf oc +Ġob ject +if t +ap er +Ġplay er +Ġr ather +Ġserv ice +as hing +ĠD o +ĠP art +ru g +m on +p ly +Ġm or +Ġnot hing +Ġprov ide +I C +un g +Ġpart y +Ġex ist +Ġm ag +7 0 +Ġr ul +Ġh ouse +Ġbeh ind +Ġhow ever +ĠW orld +Ġs um +Ġapp lic +Ġ ; +Ġfun ction +g r +ĠP ol +Ġfr ont +2 00 +Ġser ies +Ġt em +Ġty p +ill s +Ġo pt +Ġpoint s +Ġbel ow +itt ed +Ġspec ific +Ġ201 7 +um b +Ġr a +Ġpre vious +Ġpre t +re me +Ġc ustom +Ġcour t +ĠM e +Ġre pl +Ġwho le +g o +c er +Ġt reat +ĠA ct +Ġprob ably +Ġle arn +end er +ĠA ss +Ġvers ion +n ow +Ġche ck +ĠC al +R E +min ist +O n +our ces +Ġben ef +Ġd oc +Ġdet er +Ġen c +Ġsu per +Ġadd ress +Ġv ict +Ġ201 3 +Ġme as +t r +Ġf ield +W hen +Ġsign ific +u ge +Ġfe at +Ġcomm on +l oad +Ġbe gin +Ġbr ing +Ġa ction +er man +Ġdesc rib +Ġind ust +Ġwant ed +ri ed +m ing +Ġatt empt +4 5 +f er +Ġd ue +ress ion +# # +Ġsh all +Ġs ix +o o +Ġst ep +Ġp ub +Ġhim self +Ġ2 3 +Ġc op +Ġd est +Ġst op +A C +ib ility +Ġl ab +ic ult +Ġhour s +Ġcre ate +Ġf urther +ĠAmeric a +ĠC ity +Ġd ou +he ad +S T +ĠN orth +c ing +Ġn ational +u le +ĠIn st +Ġt aking +ĠQ u +ir t +Ġre d +Ġrese arch +v iron +ĠG e +Ġbre ak +an a +Ġsp ace +ater ial +Ġrec ent +ĠA b +Ġgener al +Ġh it +Ġper iod +Ġevery thing +ive ly +Ġph ys +Ġsay ing +an ks +Ġc ou +Ġc ult +ac ed +e al +u ation +Ġc oun +l u +Ġinclud e +Ġpos ition +ĠA fter +ĠCan ad +ĠE m +Ġim m +ĠR ed +Ġp ick +Ġcom pl +Ġm atter +re g +e xt +ang u +is c +o le +a ut +Ġcomp et +e ed +f ect +Ġ2 1 +ĠS en +ĠThe se +as ing +Ġcan not +Ġin it +Ġrel ations +ac hed +Ġb ar +Ġ4 0 +ĠT H +Ġ201 2 +Ġv ol +Ġg round +Ġsec urity +Ġup d +il t +3 5 +Ġconc ern +ĠJ ust +Ġwh ite +Ġseem s +ĠH er +pe cially +i ents +Ġann oun +Ġf ig +ight s +Ġst ri +l ike +id s +Ġs us +Ġw atch +Ġ â +Ġw ind +ĠC ont +Ġit self +Ġm ass +A l +y le +iqu e +ĠN ational +Ġab s +Ġp ack +Ġout side +Ġan im +Ġp ain +et er +Ġman ag +du ct +og n +Ġ ] +ĠSe pt +se c +o ff +ĠJ an +Ġf oot +ad es +Ġth ird +Ġm ot +Ġev idence +int on +Ġth reat +a pt +pl es +c le +Ġl o +Ġde cl +Ġit em +med i +Ġrep resent +om b +am er +Ġsignific ant +og raph +s u +Ġc al +i res +00 00 +I D +A M +Ġsim ply +Ġlong er +Ġf ile +O T +c he +S o +ate g +or g +ĠH is +Ġen er +Ġd om +Ġup on +il i +": " +Ġthem selves +Ġcom ing +Ġqu ite +Ġdiff icult +ĠB ar +il ities +re l +end s +c ial +6 4 +Ġwom an +ra p +y r +Ġne cess +ip s +Ġte xt +Ġrequ ire +Ġmilit ary +Ġre view +Ġresp ons +7 5 +Ġsub ject +Ġinst ead +Ġiss ues +Ġg en +" ," +Ġmin utes +Ġwe ap +r ay +am ed +t ime +b l +H ow +Ġc ode +ĠS m +Ġhig her +ĠSt e +r is +Ġp age +Ġstud ents +ĠIn tern +Ġmet hod +ĠA ug +ĠP er +ĠA g +Ġpolic y +ĠS w +Ġex ec +Ġac cept +um e +rib ut +Ġword s +Ġfin al +Ġchang es +ĠDem ocr +Ġfriend s +Ġres pect +Ġe p +Ġcomp an +iv il +Ġdam age +** ** +og le +viron ment +Ġne g +ent al +Ġa p +Ġtot al +iv al +! " +l im +Ġneed s +Ġag re +Ġdevelop ment +Ġa ge +ip le +2 1 +Ġresult s +ĠA f +S h +Ġg un +ĠOb ama +ro ll +Ġ @ +Ġright s +ĠB rit +Ġrun ning +Ġwas n +Ġp ort +Ġr ate +Ġpret ty +Ġtarg et +Ġsa w +Ġc irc +Ġwor ks +ic ro +al t +o ver +ww w +Th at +l ier +Ġevery one +ud e +Ġp ie +idd le +ra el +Ġr ad +Ġbl ock +Ġw alk +T o +ã ģ +n es +ĠA ust +a ul +ro te +ĠS outh +ess ion +op h +Ġshow s +Ġs ite +Ġj o +Ġr isk +cl us +l t +Ġin j +id ing +ĠS pe +Ġch all +ir m +Ġ2 2 +itt ing +st r +Ġh y +L E +ke y +Ġbe gan +at ur +ashing ton +l am +ĠD av +b it +Ġs ize +ĠP ar +3 8 +ourn al +f ace +Ġdec ision +Ġl arg +Ġj ud +re ct +Ġcontin ue +ĠO ct +ove red +ĠI nt +==== ==== +Ġp arent +ĠW ill +Ġeas y +Ġd rug +ang er +Ġs ense +Ġd i +id ay +Ġener gy +ist ic +Ġass oci +ar ter +ob al +e ks +ĠE l +ur ch +Ġg irl +o e +it le +Ġ2 8 +ĠC he +Ġrequ est +Ġso on +Ġh ost +k y +Ġst ates +om es +Ġm aterial +le x +Ġmom ent +Ġan sw +on se +Ġes pecially +Ġn orm +Ġserv ices +p ite +r an +Ġro le +4 4 +) : +Ġc red +C l +____ ____ +Ġm at +Ġl og +ĠCl inton +O U +Ġoff ice +Ġ2 6 +Ġch arg +Ġtr ack +m a +Ġhe art +Ġb all +Ġperson al +Ġbuild ing +n a +s et +b ody +ĠBl ack +Ġincre ase +itt en +Ġneed ed +3 6 +3 2 += " +Ġl ost +Ġbec ame +Ġgrou ps +ĠM us +Ġw rote +ĠP e +Ġpro p +j oy +à © +ĠWh ite +Ġde ad +. ' +Ġhtt p +Ġwe bs +O S +Ġins ide +Ġwr ong +Ġstat ement +Ġ ... +y l +Ġfil m +Ġmus ic +Ġsh are +ific ation +Ġre lease +Ġfor ward +Ġst ay +Ġcomp ut +it te +s er +Ġorig inal +Ġc ard +Ġc and +Ġd iv +at ural +Ġfav or +O M +Ġc ases +us es +Ġse ction +Ġle ave +g ing +ov ed +ĠW ashington +3 9 +ĠG l +Ġrequ ired +act ion +ap an +o or +it er +ĠK ing +Ġcount ries +ĠG erman +ll ing +Ġ2 7 +3 4 +Ġquest ions +Ġpr im +Ġc ell +Ġsh oot +Ġany one +ĠW est +Ġaff ect +ep end +Ġon line +ĠIs rael +ĠSept ember +Ġab ility +Ġcont ent +is es +Ġre ve +Ġl aun +Ġind ic +Ġfor ce +c ast +Ġso ld +av ing +f l +Ġso ft +Ġcompan ies +ce ed +Ġart icle +Ġa ud +Ġre v +Ġed uc +Ġplay ing +0 5 +Ġhe ld +ct or +Ġrele ased +Ġf ederal +3 7 +Ġad minist +Ġinter view +Ġinst all +Ġrece ived +Ġs ource +u k +P h +Ġser ious +Ġcre ated +Ġc ause +Ġim medi +Ġdef in +u el +ĠDep artment +ct ions +ĠC our +ĠN ow +z e +it es +it ution +Ġl ate +Ġspe ak +n ers +Ġleg al +ar i +ĠC or +Ġwe eks +Ġmod el +Ġp red +Ġex act +B C +ĠB y +IN G +os ing +Ġt akes +Ġreg ard +Ġopp ortun +Ġpr ice +Ġ19 8 +ĠA pr +f ully +Ġor d +Ġproble ms +ru ction +h am +ĠC ount +le ge +Ġlead ers +E T +le v +Ġde ep +olog ical +es e +h aps +ĠS ome +Ġp ers +Ġcont ract +Ġrelations hip +s p +ou d +Ġb ase +4 8 +m it +A d +anc ial +Ġcons um +Ġpot ential +Ġl angu +re m +et h +Ġrel ig +ress ed +6 6 +Ġl ink +Ġl ower +ay er +ĠJ une +Ġf em +un t +er c +ur d +Ġcont act +Ġ ill +Ġm other +Ġest ab +h tt +ĠM arch +ĠB ro +ĠCh ina +Ġ2 9 +Ġs qu +Ġprov ided +Ġa verage +as ons +Ġ201 1 +Ġex am +l in +5 5 +n ed +Ġper fect +Ġt ou +al se +u x +Ġbu y +Ġsh ot +Ġcol lect +Ġph ot +Ġplay ed +Ġsur pr +Ġofficial s +Ġsim ple +av y +Ġindust ry +Ġhand s +g round +Ġp ull +Ġr ound +Ġus er +Ġr ange +u ary +Ġpriv ate +op s +e es +Ġw ays +ĠM ich +Ġve h +Ġex cept +Ġter ms +im um +pp er +I ON +ore s +ĠDr agon +ou l +Ġd en +Ġperform ance +Ġb ill +c il +4 7 +Ġen vironment +Ġex c +ad d +Ġwor th +Ġp ict +Ġch ance +Ġ201 8 +b or +Ġspe ed +ict ion +Ġal leg +ĠJ apan +at ory +re et +Ġm atch +ĠI I +Ġst ru +ord er +Ġst e +Ġl iving +Ġst ruct +in o +Ġse par +her n +Ġresp onse +Ġen joy +Ġv ia +A D +um ents +ace book +Ġmem ber +ib r +iz ing +Ġto ol +ĠM on +ĠWh ile +h ood +ĠA ng +ĠD ef +Ġoff er +T r +a ur +Ġturn ed +ĠJ uly +d own +an ced +Ġrec ently +ĠE ar +Ġc e +ĠSt ar +ĠC ong +rough t +Ġbl ood +Ġhop e +Ġcom ment +ain t +Ġar ri +il es +Ġpartic ip +ough t +ri ption +0 8 +4 9 +Ġg ave +Ġse lect +Ġkill ed +sy ch +Ġgo es +i j +Ġc oll +Ġimp act +at ives +ĠS er +0 9 +ĠAug ust +Ġb oy +d e +ĠD es +Ġf elt +U S +Ġexpect ed +Ġim age +ĠM ark +cc ording +o ice +E C +ĠM ag +en ed +h old +ĠP ost +Ġpre vent +N o +Ġinvol ved +Ġey es +Ġquick ly +A t +un k +Ġbeh av +Ġ ur +Ġl ed +c ome +e y +Ġcand id +Ġear lier +Ġfoc us +et y +P ro +led ge +ix ed +ill ed +Ġpop ular +A P +Ġset t +l ight +Ġvar ious +in ks +Ġlevel s +Ġro ad +ell ig +ab les +he l +itte e +ĠG ener +y pe +Ġhe ard +ic les +Ġm is +Ġus ers +ĠS an +Ġimpro ve +Ġf ather +Ġse arch +The y +v il +Ġprof ess +Ġkn ew +Ġl oss +Ġev ents +6 5 +Ġb illion +0 7 +0 2 +ĠNew s +ĠA M +Ġco ver +w here +ens ion +Ġb ott +Ġare as +en ces +op e +ĠTw itter +a el +Ġget s +ĠGo ogle +Ġs n +i ant +Ġv ote +Ġnear ly +Ġinclud ed +Ġrec ogn +z z +m m +al ed +Ġhappen ed +0 4 +Ġh ot +Ġwho se +Ġc ivil +Ġsu ff +o es +it iz +ĠSy ri +Ġresp ond +Ġh on +Ġfeat ures +Ġeconom ic +ĠApr il +r im +Ġtechn ology +Ġo ption +ag ing +Ġpur ch +R e +Ġl at +ch ie +is l +Ġrec omm +u f +Ġtr aining +Ġeffect s +Ġf ast +Ġ201 0 +Ġocc ur +Ġwebs ite +Ġem ail +Ġs ens +e ch +Ġo il +Ġinf lu +Ġcurrent ly +ĠS ch +ĠAd d +Ġgo al +Ġsc ient +Ġcon v +1 00 +em y +Ġdec ided +Ġtra vel +Ġm ention +L L +0 3 +Ġe lection +Ġph one +Ġlook s +Ġsit uation +Ġc y +Ġh or +b ed +ĠCour t +a ily +av es +Ġqu ality +ĠCom p +w ise +Ġt able +Ġst aff +ĠW ind +et t +Ġtri ed +ide red +Ġadd ition +Ġb ox +Ġl ack +ar ily +Ġw ide +Ġm id +Ġbo ard +ys is +Ġant i +h a +Ġd ig +en ing +Ġd ro +C on +6 8 +Ġsl ow +b ased +se qu +Ġp ath +E x +ak er +Ġwork ed +Ġp en +Ġeng ine +Ġlook ed +ĠSu per +ĠS erv +Ġvict im +U n +Ġproper ty +Ġint rodu +Ġexec ut +ĠP M +L e +Ġcol or +ĠM ore +Ġ6 0 +Ġnet work +Ġd ate +c ul +id ge +Ġext ra +3 1 +Ġs le +6 7 +Ġw ond +Ġreport s +j ust +ĠAust ral +Ġcap ital +Ġen s +Ġcomm and +Ġallow ed +Ġpre p +Ġca pt +h ib +Ġnum bers +ch an +Ġf air +m p +om s +Ġre ach +W ith +t ain +Ġbro ad +Ġcou ple +ec ause +ly ing +ĠF eb +Ġsc reen +Ġl ives +Ġpri or +ĠCong ress +A r +Ġappro ach +Ġe mer +ar ies +ĠD is +s erv +ĠN e +Ġbu ilt +c ies +Ġre pe +Ġrul es +for ce +ĠP al +Ġfin ancial +Ġcons idered +ĠCh ar +n ces +ĠI S +Ġb rought +Ġb i +i ers +ĠS im +O P +Ġproduct s +Ġvis it +Ġdoc ument +Ġcon duct +Ġcomplete ly +in ing +ĠCal if +ib ly +Ġwr itten +ĠT V +em ents +Ġd raw +O ne +Ġpub lished +Ġsec ret +r ain +he t +ĠF acebook +ond ay +ĠU p +Ġsex ual +Ġth ous +ĠP at +Ġ ess +Ġstand ard +Ġar m +g es +ect ion +Ġf ell +Ġfore ign +an i +ĠFr iday +Ġreg ular +in ary +Ġincre ased +Ġus ually +Ġdem on +Ġd ark +Ġadd itional +ro l +ĠO f +Ġprodu ction +! ! +und red +Ġintern ational +id ents +ĠF ree +rou p +Ġr ace +Ġm ach +Ġh uge +A ll +le ar +ove mber +Ġto wn +Ġatt ention +ĠO ff +y ond +ĠThe n +f ield +Ġter ror +ra z +ĠB o +Ġmeet ing +ĠP ark +Ġar rest +Ġf ear +Ġa w +ĠV al +or ing +' , +Ġext reme +ar r +Ġwork ers +A fter +Ġ3 1 +n et +am ent +Ġdirect ly +Ġpop ulation +ub e +ĠOct ober +ĠI N +ĠJan uary +5 9 +ĠDav id +Ġc ross +ce mber +ĠF irst +Ġmess age +ir it +Ġn ation +Ġp oll +is ions +Ġansw er +n y +is ode +Ġcar ry +ĠRuss ia +Ġhe ar +eng th +ro y +Ġn atural +in ally +Ġdo g +m itted +Ġtr ade +Ġsub st +Ġmult iple +ĠAf ric +Ġf ans +Ġs ort +Ġgl obal +ic ation +ĠW ed +ar a +Ġa chie +Ġlangu age +ve y +Ġt al +Ġnecess ary +Ġdet ails +Ġs en +ĠS und +ĠRe g +ĠR ec +0 6 +Ġs il +ress ive +Ġmed ical +un ch +orn ia +Ġu nd +f ort +oc ks +ĠM onday +ues day +c raft +7 7 +ur t +Ġ ver +ĠH ill +Ġrece ive +Ġmor ning +es tern +Ġb ank +Ġs at +ir th +ĠH igh +Ġdev ice +ĠTH E +ĠCent er +Ġsaf e +Ġp le +ĠCanad a +Ġsystem s +Ġass ist +Ġsur v +Ġb attle +ĠS oc +vert is +S he +Ġp aper +Ġgrow th +Ġc ast +S c +Ġpl ans +ll ed +Ġpart s +Ġw all +Ġmove ment +Ġpract ice +im ately +Ġdis play +Ġsomet imes +om p +ĠP aul +ĠY es +k ing +5 8 +o ly +Ġs on +Ġav oid +ok es +ĠJ ew +Ġto wards +as c +Ġ // +ĠK ore +Ġtalk ing +Ġcor rect +Ġsp ent +ic ks +i able +e ared +Ġter m +Ġwant s +om ing +Ġ ut +Ġdou b +Ġfor ces +Ġp lease +6 9 +ĠN ovember +at form +ond on +Ġon es +Ġimmedi ately +ĠRuss ian +ĠM et +Ġde g +Ġparent s +C H +ĠAmeric ans +al y +ĠM od +Ġsh own +Ġcond itions +Ġst uff +Ġre b +ĠY our +Ġinclud es +n own +ĠS am +Ġexper ien +m ission +ĠE ven +augh t +Ġannoun ced +ĠRepublic an +Ġdeter min +Ġdescrib ed +ĠCount y +( ) +Ġdo or +Ġchang ed +Ġne igh +ĠH ere +Ġcle an +Ġp an +ĠDe cember +ĠEurope an +ir ing +ap ter +Ġcl ub +ĠT uesday +Ġp aid +ĠN et +Ġattack s +Ġcharact ers +Ġal one +Ġdirect or +d om +Ġ3 5 +Ġl oad +Ġr out +ĠCalif ornia +Ġfin ally +Ġr ac +Ġcont r +Ġexact ly +res h +p ri +ĠIs lam +Ġn ature +Ġcare er +Ġlat est +Ġcon vers +ĠS l +p ose +ci ent +ĠIn c +iv ity +8 8 +ĠA tt +ĠM or +nes day +Ġwe ight +k en +Ġnot e +Ġteam s +Ġ \ +air s +ĠG reen +Ġh undred +on ent +Ġstre ng +Ġcons ist +ic ated +Ġreg ul +Ġl ic +ast ic +Ġt en +urs day +ellig ence +ous ly +ĠU K +B I +Ġcost s +Ġind epend +ĠA P +Ġnorm al +Ġh om +Ġob vious +Ġs we +Ġst ar +Ġread y +ac her +Ġimp lement +g est +Ġs ong +ĠG et +ĠL ab +Ġinterest ing +us ing +Ġg iving +ĠSund ay +Ġet c +Ġm iddle +Ġrem ember +r ight +os ition +ut ions +Ġm ax +4 6 +Ġyour self +Ġdem and +Ġtreat ment +Ġd anger +ĠC ons +Ġgu y +ĠBrit ish +Ġphys ical +Ġrel ated +Ġrem ain +Ġcould n +Ġref er +Ġc itiz +b ox +EN T +bo ard +Ġin n +I G +er o +ĠSt reet +osp ital +ren ch +cher s +Ġst ra +O L +ag er +ĠA N +Ġeas ily +I A +en ge +in y +Ġcl os +ock ed +Ġus es +ĠC oun +I m +u ild +? ? +m ore +Ġan g +Ġwr ite +ol ute +5 7 +Ġlead er +Ġread ing +< / +Ġaut om +est s +4 3 +Ġleg isl +ĠG old +Ġdesign ed +ĠS T +ĠLe g +a res +Ġbe aut +ĠT ex +Ġappear s +Ġstru gg +ĠR om +Ġ 00 +Ġcho ice +Ġparticular ly +ĠF rom +op er +ĠL ondon +ann ed +Ġallow s +ob ile +Ġdiffere nce +âĢ ¢ +ĠV iew +ĠWed nesday +Ġal though +Ġrel ative +Ġapplic ation +ate ver +Ġare n +Ġmy self +Ġim ag +Ġdis e +Ġsoc iety +Ġfre qu +ĠEng lish +Ġpo or +ĠD ay +Ġwrit ing +Ġse ven +Ġstart ing +Ġb ud +Ġpr int +ĠTr ans +uf act +ĠSt ud +n ew +Ġcr im +Ġg ives +Ġco ol +a e +i ance +ĠGener al +Ġthink ing +Ġsa ve +Ġlim ited +ĠPart y +Ġmean ing +p en +ow ers +ĠJ ack +E M +Ġn ice +ru pt +Ġg as +Ġe ight +Ġfe et +Ġeff ort +Ġ ign +ic it +B l +co in +Ġop in +Ġbr ain +Wh ile +he st +ĠTh ursday +Ġwould n +augh ter +Ġtou ch +le ments +Ġstud ies +Ġcent er +c ont +or ge +Ġcomput er +Ġinvestig ation +P l +or ks +Ġ200 8 +Ġincre asing +Ġst ore +Ġcom ments +Ġb al +m en +Ġdo ll +Ġl iber +Ġw ife +Ġlaw s +atur day +it ness +Ġmod ern +ĠS k +Ġadminist ration +Ġopportun ity +Ġs al +Ġpower ful +M y +Ġclaim s +ĠEar th +ord s +Ġt itle +Ġes c +n ame +N ot +om en +Ġbe yond +Ġc amer +Ġse ll +it ute +ear ch +Ġapp l +im ent +4 2 +ĠAr t +Ġun f +Ġviol ence +ur g +ĠE ast +Ġcomp ared +Ġopt ions +Ġthrough out +Ġv s +ig r +. [ +ac hes +7 8 +Ġfil es +F L +E L +ar ian +ĠJ ames +ĠA ir +an ch +Ġdet ail +Ġpie ce +P S +Ġn amed +Ġeduc ation +Ġdri ve +Ġitem s +Ġstud ent +ic ed +: : +ic o +Ġth row +Ġsc ene +Ġcomple x +Ġ200 9 +Ġpre c +ĠB re +7 9 +Ġcon cept +Ġstat us +am ing +Ġd ied +Ġknow ledge +Ġbegin ning +O D +ru ary +Ġcertain ly +Ġgu ys +Ġsl ight +in n +ound s +Ġf ine +Ġf at +ic ations +Ġper haps +ĠA nt +Ġinc ome +Ġhtt ps +Ġmajor ity +port s +st on +Ġgreat er +Ġfe ed +ent ially +Ġsaf ety +Ġun ique +and om +Ġg one +Ġshow ed +Ġhist or +Ġcoun ter +i us +id a +Ġlead ing +i pe +Ġs end +ĠDon ald +er ve +Ġdef ense +ines e +Ġy es +ĠF ire +ĠMus lim +ra q +Ġcontin ued +os h +Ġprov ides +Ġpr ison +ĠP re +Ġhapp y +Ġeconom y +Ġtr ust +ag s +ĠG ame +Ġweap ons +um an +ĠC le +it ation +Ġanal ysis +ĠT imes +Ġsc ience +- > +Ġfig ure +Ġdis app +ent y +Ġsoft ware +Ġu lt +Ġoffic ers +N ew +I s +Ġrem ains +ĠInd ia +Ġp sych +ri ef +Ġc at +es c +Ġob serv +Ġst age +ĠD ark +Ġent er +ch ange +Ġpass ed +Ġdes pite +ĠO ut +Ġmov ie +r s +Ġv oice +m ine +ĠPl ay +Ġto ward +ĠT er +Ġreg ion +Ġval ues +or ters +Ġm ount +Ġoffic er +ĠO ther +b an +Ġh ous +w ood +ro om +I V +ĠS un +se e +ĠO ver +ro g +9 0 +Ġl ay +ĠT ur +a wn +Ġpress ure +ĠS ub +Ġbook s +ed om +ĠS and +A A +ag o +Ġre asons +f ord +Ġactiv ity +U T +N ow +ĠSen ate +ce ll +n ight +Ġcall s +in ter +Ġlet ter +ĠR ob +ĠJ e +Ġcho ose +ĠL aw +G et +B e +Ġro b +Ġtyp es +Ġpl atform +Ġqu arter +R A +ĠT ime +Ġmay be +ĠC r +9 5 +p re +Ġmov ing +Ġl if +Ġgo ld +Ġs om +Ġpat ients +Ġtr uth +ĠK e +ur ance +ant ly +m ar +Ġchar ge +ĠG reat +Ġce le +---------------- ---------------- +Ġro ck +ro id +an cy +Ġcred it +a ud +B y +ĠE very +Ġmov ed +ing er +rib ution +Ġn ames +Ġstra ight +ĠHe alth +ĠW ell +Ġfe ature +Ġr ule +Ġsc he +in ated +ĠMich ael +ber g +4 1 +il ed +b and +Ġcl ick +ĠAng el +on ents +Â Ń +ĠI raq +ĠS aturday +Ġa ware +p art +Ġpat tern +O W +ĠL et +Ġgr ad +ign ed +Ġassoci ated +Ġst yle +n o +i ation +a ith +il ies +Ġst ories +ur ation +Ġindividual s +ĠâĢ ¦ +m iss +ĠAss oci +ish ing +ab y +Ġsum mer +ĠB en +Ġ3 2 +Ġar ch +ut y +ĠTex as +h ol +Ġfull y +Ġm ill +Ġfollow ed +ĠB ill +ĠInd ian +ĠSec ret +ĠB el +ĠFeb ruary +Ġjob s +Ġseem ed +ĠGo vern +i pped +Ġreal ity +Ġl ines +Ġp ark +Ġmeas ure +ĠO ur +I M +Ġbro ther +Ġgrow ing +Ġb an +Ġest im +Ġc ry +ĠS chool +Ġme chan +ĠO F +ĠWind ows +Ġr ates +ĠO h +Ġpos itive +Ġcult ure +ist ics +ic a +Ġh ar +y a +ite ly +i pp +Ġm ap +en cies +ĠWill iam +I I +ak ers +5 6 +ĠM art +ĠR em +Ġal tern +it ude +Ġco ach +row d +D on +Ġk ids +Ġj ournal +Ġcor por +Ġf alse +Ġwe b +Ġsle ep +Ġcont ain +Ġst o +Ġb ed +iver se +ĠR ich +ĠCh inese +Ġp un +Ġme ant +k nown +Ġnot ice +Ġfavor ite +a ven +Ġcond ition +Ġpur pose +) ) +Ġorgan ization +Ġchall eng +Ġman ufact +Ġsus p +ĠA c +Ġcrit ic +un es +uc lear +Ġm er +vent ion +Ġ8 0 +Ġm ist +ĠU s +ĠT or +htt p +ol f +Ġlarg er +Ġadv ant +Ġrese ar +Ġact ions +m l +Ġke pt +Ġa im +, ' +c ol +Ġbenef its +if ying +Ġact ual +ĠIntern ational +Ġveh icle +Ġch ief +Ġeff orts +ĠLe ague +ĠM ost +Ġwa it +Ġad ult +Ġover all +Ġspe ech +Ġhigh ly +Ġfem ale +Ġer ror +Ġeffect ive +5 4 +Ġenc our +w ell +Ġfail ed +Ġcons erv +Ġprogram s +Ġt rou +Ġa head +5 00 +vertis ement +I P +ĠF ound +p ir +Ġ % +Ġcr ime +and er +Ġloc ation +ĠI ran +Ġbehav ior +az ing +Ġr are +Ġem b +Ġca used +Ġsh ip +Ġact ive +Ġcont ribut +Ġg reen +Ġac qu +Ġref lect +ven ue +Ġf irm +Ġb irth +] . +Ġclear ly +Ġem ot +Ġag ency +ri age +Ġmem ory +9 8 +S A +ĠSe e +ac ing +C C +Ġbig gest +Ġr ap +Ġbas ic +Ġb and +e at +Ġsus pect +ĠM ac +Ġ9 0 +m ark +ist an +Ġsp read +am s +k i +as y +ra v +ĠR ober +Ġdemon str +r ated +Ġabs olute +Ġpl aces +Ġim pl +ibr ary +Ġc ards +Ġdest roy +Ġv irt +ve re +Ġapp eared +y an +p oint +Ġbe g +Ġtem per +s pe +ant ed +ear s +ĠD irect +Ġl ength +Ġbl og +am b +Ġint eg +Ġres ources +ac c +if ul +Ġsp ot +Ġfor ced +Ġthous ands +ĠMin ister +Ġqu al +ĠF rench +at ically +Ġgener ally +Ġdr ink +Ġth us +I L +od es +Ġappro pri +ĠRe ad +Ġwh om +Ġey e +Ġcol lege +Ġ4 5 +ire ction +Ġens ure +Ġapp arent +id ers +Ġrelig ious +Ġmin or +ol ic +Ġt ro +ĠWh y +rib ute +m et +Ġprim ary +Ġdevelop ed +Ġpe ace +Ġsk in +st e +av a +Ġbl ue +Ġfam ilies +Ġ ir +Ġapp ly +Ġin form +ĠSm ith +C T +i i +Ġlim it +Ġres ist +........ ........ +um n +Ġconf lic +Ġtw e +ud d +ĠT om +Ġl iter +qu e +b on +Ġha ir +Ġevent ually +Ġp us +Ġhelp ed +Ġag g +or ney +ĠApp le +Ġf it +ĠS ur +Ġpre m +Ġs ales +Ġsecond s +Ġstreng th +Ġfeel ing +¿ ½ +Ġt our +Ġknow s +o om +Ġex erc +Ġsom ew +ï ¿½ +> > +Ġsp okes +Ġide as +Ġreg ist +so ft +ĠD el +ĠP C +Ġpro pos +Ġlaun ch +Ġbott om +T H +ĠP lease +v est +it z +ĠIn ter +Ġsc ript +Ġr at +ar ning +Ġ il +ĠJ er +ĠA re +Ġwh atever +ok en +ci ence +Ġmod e +Ġag ree +Ġs ources +Ġinit ial +Ġrest rict +Ġwond er +us ion +## ## +ĠS il +vil le +Ġb urn +t w +as ion +Ġ £ +Ġn or +u ing +Ġre ached +Ġs un +Ġc ateg +ig ration +Ġc ook +Ġprom ot +Ġm ale +Ġcl imate +Ġf ix +Ġalleg ed +U R +all ed +Ġim ages +C ont +ot a +Ġschool s +i os +Ġd rop +Ġst ream +ĠM o +Ġprevious ly +al ing +Ġp et +Ġdou ble +Ġ( @ +ann el +Ġdef ault +t ies +Ġr ank +ĠD ec +ĠCoun cil +Ġweap on +Ġst ock +Ġanal y +ĠSt r +Ġpict ure +ĠPol ice +f erence +Ġcent ury +Ġcitiz ens +Ġon to +Ġexp and +Ġhe ro +ĠS ol +Ġw ild +Ġupd ate +Ġcustom ers +r ont +d ef +Ġl ik +Ġcrim inal +ĠChrist ian +S P +7 6 +Ġle aving +Ġother wise +ĠD ist +Ġbas is +5 2 +5 3 +ic ip +ĠB er +Ġrecomm end +Ġfl oor +Ġc rowd +ol es +Ġ7 0 +Ġcent ral +ĠE v +Ġd ream +Ġdown load +Ġconf ir +ĠTh om +Ġwind ow +Ġhapp ens +Ġun it +Ġt end +Ġs pl +Ġbec omes +Ġfight ing +Ġpred ict +ĠP ress +ĠP ower +Ġhe avy +ak ed +Ġf an +or ter +ate gy +B A +iz es +Ġsp end +H ere +Ġ200 7 +Ġad op +ĠH am +Ġfoot ball +ĠP ort +od ay +5 1 +amp ions +Ġtrans fer +h t +Ġ3 8 +ter m +ac ity +Ġb ur +] , +tern al +r ig +b ut +Ġthere fore +ĠB ecause +res p +re y +Ġm ission +S ome +Ġnot ed +Ġass um +Ġdise ase +Ġed it +Ġprog ress +r d +ĠB rown +oc al +Ġadd ing +Ġra ised +ĠAn y +Ġt ick +Ġsee ing +ĠPe ople +Ġagre ement +Ġser ver +Ġw at +Ġdeb ate +Ġsupp osed +il ing +Ġlarg est +Ġsuccess ful +ĠP ri +ĠDemocr atic +Ġj ump +ĠSyri a +Ġown ers +Ġoff ers +Ġshoot ing +Ġeff ic +se y +Ġha ven +ver se +te red +ĠL ight +im al +ĠB ig +Ġdef end +Ġbe at +Ġrecord s +% ) +Ġsc en +Ġemploy ees +Ġdev ices +he m +Ġcom mer +ĠM ex +Ġbenef it +ĠPro f +Ġil leg +Ġsur face +ĠAl so +Ġh arm +ing ly +w ide +ĠA lex +Ġsh ut +ĠC ur +Ġl ose +p m +Ġchall enge +se mb +Ġst ation +Ġint elligence +Ġacc ur +ĠFl or +Ġrequ ires +ĠM al +b um +Ġh ospital +Ġsp irit +Ġoff ered +Ġprodu ce +ĠComm un +Ġcreat ing +Ġcr is +s pect +Ġend ed +Ġd aily +Ġvot ers +land s +i as +i h +on a +Ġsm art +ĠOff ice +ĠL ord +ri al +ĠIntern et +Ġcirc um +Ġextreme ly +' . +Ġopin ion +ĠM il +Ġg ain +B S +ĠF in +y p +Ġuse ful +Ġbud get +Ġcom fort +is f +Ġback ground +el ine +Ġep isode +Ġen emy +Ġtri al +Ġestab lish +d ate +ĠC ap +Ġcontin ues +Ġshow ing +ĠUn ion +w ith +Ġpost ed +ĠSy stem +Ġe at +ri an +Ġr ise +ĠGerman y +il s +Ġsign ed +Ġv ill +Ġgr and +m or +ĠEng land +Ġproject s +um ber +Ġconf erence +z a +Ġrespons ible +ĠAr ab +Ġlearn ed +âĢĶ âĢĶ +i pping +ĠGe orge +O C +Ġreturn ed +ĠAustral ia +Ġb rief +Q u +Ġbr and +ill ing +ab led +Ġhig hest +Ġtr ain +ĠComm ission +wh ile +Ġn om +cept ion +Ġm ut +ĠBl ue +Ġinc ident +v ant +8 6 +ĠI D +Ġn uclear +7 4 +ĠL ike +ĠR E +ĠM icro +l i +m ail +Ġcharg es +8 9 +Ġad just +ad o +Ġear th +N A +Ġpr ices +P A +Ġd raft +Ġrun s +Ġcandid ate +ens es +Ġmanag ement +ĠPh il +ĠM iss +Ġte ach +g ram +Ġunderstand ing +a it +ic ago +A dd +ĠE p +sec ut +Ġsepar ate +Ġinst ance +Ġe th +Ġun less +**** **** +ĠF ore +in ate +Ġoper ations +S p +Ġf aith +g ar +ĠCh urch +ron ic +Ġconf ig +os ure +Ġactiv ities +Ġtrad itional +Ġ3 6 +Ġd irection +Ġmach ine +Ġsur round +Ġp ush +un ction +ĠE U +Ġeas ier +Ġarg ument +G B +Ġm icro +Ġsp ending +iz ations +Ġthe ory +ad ow +Ġcall ing +ĠL ast +Ġd er +Ġinflu ence +Ġcomm it +Ġph oto +Ġun c +ist ry +g n +ast e +ack s +Ġdis p +ad y +d o +ĠG ood +Ġ ` +Ġw ish +Ġreve aled +Âł Âł +l ig +Ġen force +ĠComm ittee +Ġche m +Ġmil es +Ġinterest ed +Ġsol ution +ic y +in ct +Ġ- > +ĠD et +Ġrem oved +Ġcomp ar +e ah +Ġpl ant +ĠS ince +Ġachie ve +Ġadvant age +Ġslight ly +b ing +Ġpl aced +u nder +201 5 +ĠM ad +Ġt im +os es +Ġc ru +ĠR ock +Ġmost ly +Ġneg ative +Ġset ting +Ġprodu ced +Ġm ur +Ġconnect ion +ĠM er +Ġdri ver +Ġexecut ive +Ġass ault +Ġb orn +ĠV er +t ained +Ġstruct ure +Ġredu ce +Ġdec ades +Ġd ed +u ke +ĠM any +idd en +Ġle ague +S e +Ġjo in +Ġdis co +Ġd ie +c ks +act ions +Ġass ess +ag n +Ġgo als +our s +I R +Ġsen ior +ill er +m od +ip ment +oc ol +u y +ĠQ ue +Ġpart ies +ir gin +Ġle arning +it able +Ġstre et +Ġcamer a +A pp +Ġsk ills +b re +c ious +Ġcele br +ĠFr anc +Ġexist ing +Ġwill ing +l or +Ġ id +ĠSp ace +Ġcrit ical +ĠL a +ortun ately +Ġser ve +Ġc old +Ġspec ies +T S +Ġanim als +ĠB ay +Ġold er +ĠU nder +est ic +ĠT re +Ġte acher +Ġpre fer +v is +Ġth read +ĠM att +Ġmanag er +ãĥ » +Ġprofess ional +ĠV ol +Ġnot es +The se +ul a +Ġf resh +ent ed +u zz +ed y +clus ion +ĠR el +Ġdoub t +E O +Ġopen ed +ĠB it +Ad vertisement +Ġgu ess +ĠU N +Ġse qu +Ġexpl ain +ott en +Ġatt ract +ak s +Ġstr ing +Ġcont ext +oss ible +ĠRepublic ans +Ġsol id +Ġc ities +Ġask ing +Ġr andom +u ps +ur ies +ar ant +dd en +g l +ĠFlor ida +Ġdep end +ĠSc ott +Ġ3 3 +Ġi T +ic on +Ġmention ed +Ġ2 000 +Ġclaim ed +Ġdefin itely +ul f +Ġc ore +Ġopen ing +ĠCon st +wh ich +ĠT ra +A G +7 2 +Ġbelie ved +ad a +Ġ4 8 +ĠSec urity +yr ight +ĠP et +ĠL ou +Ġhold ing +======== ======== +Ġ ice +Ġb row +Ġauthor ities +h ost +w ord +Ġsc ore +ĠD iv +Ġcell s +Ġtrans l +Ġneigh bor +Ġrem ove +u ct +Ġdist rict +ĠA ccording +Ġwor se +Ġconcern s +Ġpresident ial +Ġpolic ies +ĠH all +7 3 +Ġh us +A Y +Ġ200 6 +ĠJ ud +Ġindepend ent +ĠJust ice +ili ar +pr int +igh ter +Ġprotect ion +z en +Ġsu dden +h ouse +ĠJ es +P R +ĠIn f +Ġb ul +Ġ _ +ĠServ ice +ĠP R +Ġstr ategy +ff ect +Ġgirl s +Ġmiss ing +oy al +ĠTe am +ul ated +Ġd at +Ġpolit ics +ab or +A ccording +Ġspe ll +Ġg raph +ort hern +T C +A b +Ġlab or +is her +Ġk ick +ĠiT unes +Ġstep s +pos es +Ġsmall er +E n +ber t +Ġro ll +Ġresear chers +Ġcl osed +Ġtrans port +Ġlaw y +________ ________ +ĠCh icago +Ġas pect +Ġn one +Ġmar riage +9 6 +Ġe lements +ĠF re +ĠS al +Ġd ram +F C +t op +e qu +Ġhe aring +Ġsupport ed +Ġtest ing +co hol +Ġmass ive +Ġst ick +Ġgu ard +is co +ph one +F rom +How ever +Ġb order +Ġcop y +ograph y +l ist +7 1 +Ġown er +cl ass +ru it +r ate +ĠO nce +Ġdig ital +Ġt ask +ER S +Ġinc red +t es ++ + +ĠFr ance +Ġb reat +ow l +Ġiss ued +ĠW estern +Ġdet ect +Ġpart ners +Ġsh ared +ĠC all +Ġcan cer +ac he +rib e +Ġexpl ained +Ġhe at +{ " +Ġinvest ment +ĠB ook +Ġw ood +Ġtool s +ĠAl though +Ġbelie f +Ġcris is +Ġg e +ĠM P +Ġoper ation +ty pe +~ ~ +g a +Ġcont ains +ant a +Ġexp ress +ĠG roup +ĠJ ournal +k a +Ġam b +ĠUS A +Ġfind ing +Ġfund ing +h ow +Ġestab lished +ide os +Ġdeg ree +Ġdanger ous +ang ing +Ġfre edom +pp ort +out hern +Ġch urch +Ġc atch +ĠTw o +Ġpres ence +ĠGu ard +U p +Ġauthor ity +ĠPro ject +Ġbut ton +Ġcon sequ +Ġval id +Ġwe ak +Ġstart s +Ġref erence +ĠM em +" ) +U N +or age +ĠO pen +Ġcol lection +y m +g ency +Ġbeaut iful +ro s +Ġtell s +Ġwa iting +n el +Ġprov iding +ĠDemocr ats +Ġd aughter +Ġm aster +Ġpur poses +ĠJapan ese +Ġequ al +Ġturn s +Ġdoc uments +Ġwatch ing +R es +Ġr an +201 4 +Ġre ject +ĠKore a +Ġvictim s +Le vel +ere nces +Ġw itness +Ġ3 4 +Ġre form +com ing +Ġocc up +Ġc aught +Ġtra ffic +ad ing +Ġmod els +ar io +Ġserv ed +Ġb atter +u ate +ĠSecret ary +Ġagre ed +Ġtr uly +yn am +ĠR et +Ġun its +ĠRes earch +h and +az ine +ĠM ike +Ġvar iety +ot al +Ġam azing +Ġconfir med +Ġentire ly +Ġpurch ase +Ġe lement +Ġc ash +Ġdeter mine +D e +Ġc ars +ĠW all +â ĸ +Ġview s +Ġdrug s +Ġdep artment +ĠSt ep +u it +Ġ3 9 +as ure +ĠCl ass +Ġc overed +ĠB ank +Ġme re +u ana +Ġmult i +Ġm ix +Ġun like +lev ision +Ġsto pped +Ġs em +ĠG al +ul es +Ġwe l +ĠJohn son +l a +Ġsk ill +Ġbec oming +ri e +Ġappropri ate +f e +ell ow +ĠPro t +ul ate +oc ation +Ġweek end +od ies +Ġsit es +Ġanim al +ĠT im +Ġsc ale +Ġcharg ed +Ġinst ruct +ill a +Ġmethod s +Ġc ert +Ġjud ge +ĠH el +Ġdoll ars +Ġstand ing +ĠS qu +Ġdeb t +l iam +Ġdri ving +ĠS um +ĠEd ition +Ġal bum +and on +I F +ĠU k +6 3 +ad er +Ġcommer cial +es h +ĠGovern ment +Ġdisc overed +Ġout put +ĠHill ary +ĠCar ol +Ġ200 5 +Ġab use +anc ing +Ġsw itch +Ġann ual +T w +Ġst ated +ag ement +in ner +Ġdem ocr +Ġres idents +Ġallow ing +Ġfact ors +od d +Ġf uck +em ies +Ġoccur red +ot i +Ġn orth +ĠP ublic +Ġinj ury +Ġins urance +C L +oll y +ã Ģ +Ġrepe ated +Ġar ms +ang ed +Ġconst ruction +Ġf le +P U +ic ians +Ġfor ms +ĠMc C +ant ic +Ġm ental +p ire +Ġequ ipment +Ġf ant +Ġdiscuss ion +Ġregard ing +k in +ar p +Ġch air +og ue +Ġpro ceed +ĠI d +O ur +Ġmur der +M an +Ġ4 9 +as p +Ġsupp ly +Ġin put +Ġwe alth +liam ent +Ġpro ced +or ial +ĠSt at +ĠN FL +hen s +ĠInst itute +Ġput ting +ourn ament +et ic +Ġloc ated +Ġk id +er ia +r un +Ġpr inc +Ġ ! +go ing +ĠB et +Ġcl ot +Ġtell ing +Ġprop osed +i ot +or ry +Ġfund s +g ment +ĠL ife +Ġb aby +ĠB ack +Ġsp oke +Im age +Ġear n +ĠA T +g u +Ġex change +ĠL in +ov ing +Ġp air +M ore +az on +Ġarrest ed +Ġkill ing +c an +ĠC ard +y d +Ġident ified +Ġm obile +Ġthan ks +ony m +ĠF orm +Ġhundred s +ĠCh ris +ĠC at +Ġtre nd +h at +ĠA v +om an +Ġelect ric +ĠW il +S E +O f +Ġrest aur +ot ed +Ġtr ig +Ġn ine +Ġb omb +Wh y + ¯ +Ġco verage +Ġapp eal +ĠRober t +ĠS up +Ġfin ished +Ġfl ow +Ġdel iver +Ġcal cul +Ġphot os +Ġph il +Ġpie ces +Ġapp re +k es +Ġr ough +D o +Ġpart ner +Ġconcern ed +Ġ3 7 +ĠG en +C ol +ct ors +Ġ= > +st ate +Ġsuggest ed +ĠFor ce +C E +Ġher self +ĠPl an +w orks +o oth +ren cy +Ġcor ner +Ġhus band +Ġintern et +ĠA ut +em s +os en +ĠAt l +g en +Ġbal ance +6 2 +Ġsound s +te xt +Ġar r +ov es +Ġmill ions +Ġrad io +Ġsat isf +ĠD am +M r +G o +S pe +Ġcomb at +r ant +ĠG ree +Ġf uel +Ġdist ance +Ġtest s +Ġdec re +ĠE r +Ġman aged +D S +Ġt it +Ġmeas ures +ĠL iber +Ġatt end +as hed +ĠJ ose +ĠN ight +d it +ĠN ov +ĠE nd +out s +Ġgener ation +Ġadv oc +y th +Ġconvers ation +ĠS ky +act ive +ce l +ri er +ĠFr ank +Ġg ender +Ġcon cent +Ġcar ried +and a +ĠV irgin +Ġarri ved +ic ide +ad ed +Ġfail ure +Ġmin imum +le ts +Ġwor st +Ġkeep ing +Ġint ended +Ġilleg al +Ġsub sc +Ġdetermin ed +Ġtri p +Y es +Ġra ise +Ġ ~ +Ġfeel s +Ġpack age +ĠJ o +h i +201 6 +re al +Ġf ra +Ġsy mb +M e +uck y +p ret +ĠK h +ĠEd it +ĠWe b +em ic +ĠCol or +Ġjust ice +I nt +Ġfar m +ck now +" > +el ess +Ġredu ced +Ġ5 00 +x x +ĠR ad +ĠW ood +Ġcl in +Ġhy p +il er +ur a +k ins +8 5 +6 1 +ĠThe ir +ĠM ary +Ġs an +Ġno vel +ĠWh o +Ġcap acity +Ġimp ossible +Ġpl ays +Ġmin ister +ij uana +ic ate +ĠS et +Ġf ram +Ġ ing +Ġcommun ities +ĠF BI +it a +Ġb on +Ġstr ateg +Ġinterest s +l ock +g ers +m as +ĠAN D +Ġconflic t +Ġrequire ments +Ġs ac +Ġoper ating +in i +rel ated +Ġcomm itted +Ġrelative ly +Ġs outh +¯ ¯ +Ġaff ord +Ġident ity +Ġdec isions +Ġacc used +pl ace +Ġvict ory +o ch +i at +N ame +C om +t ion +ed s +Ġsee k +Ġt ight +ĠIm ages +Ġinit i +Ġhum ans +Ġfam iliar +Ġaud ience +Ġintern al +vent ure +Ġs ides +ĠT O +Ġd im +Ġcon clud +Ġapp oint +Ġenforce ment +ĠJ im +ĠAssoci ation +Ġcircum st +ĠCanad ian +Ġjo ined +Ġdiffere nces +ĠL os +Ġprot est +Ġtw ice +w in +Ġgl ass +ars h +ĠAr my +Ġexp ression +Ġdec ide +Ġplan ning +an ia +Ġhand le +ĠMicro soft +ĠN or +Ġmax imum +ĠRe v +Ġse a +Ġev al +Ġhel ps +re f +Ġb ound +Ġm outh +Ġstand ards +Ġcl im +ĠC amp +ĠF ox +cl es +Ġar my +ĠTe chn +ack ing +x y +S S +Ġ4 2 +Ġbu g +ĠUk rain +ĠM ax +ĠJ ones +ĠSh ow +l o +Ġplan et +Ġ7 5 +Ġwin ning +Ġf aster +Ġspe ct +Ġbro ken +T R +Ġdef ined +Ġhealth y +Ġcompet ition +htt ps +ĠIs land +ĠF e +Ġannoun ce +ĠC up +ĠInst ead +Ġcl ient +Ġposs ibly +se ction +ock et +l ook +Ġfin ish +Ġcre w +Ġres erv +Ġed itor +Ġh ate +Ġs ale +Ġcontro vers +Ġp ages +w ing +Ġnum er +Ġopp osition +Ġ200 4 +Ġref uge +Ġfl ight +Ġap art +ĠL at +A meric +ĠAfric a +Ġapplic ations +ĠPal est +ĠB ur +Ġg ar +ĠSoc ial +Ġup gr +Ġsh ape +Ġspe aking +ans ion +a o +ĠS n +Ġwor ry +ĠBrit ain +P lease +rou d +Ġh un +Ġintrodu ced +Ġd iet +I nd +ĠSec ond +Ġfun ctions +ut s +ĠE ach +ĠJe ff +Ġst ress +Ġaccount s +Ġgu arant +ĠAn n +ed ia +Ġhon est +Ġt ree +ĠAfric an +ĠB ush +} , +Ġs ch +ĠOn ly +Ġf if +ig an +Ġexerc ise +ĠEx p +Ġscient ists +Ġlegisl ation +ĠW ork +ĠS pr +à Ĥ +ĠH uman +Ġ è +Ġsur vey +Ġr ich +ri p +Ġmain tain +Ġfl o +Ġleaders hip +st ream +ĠIslam ic +Ġ 01 +ĠCol lege +Ġmag ic +ĠPr ime +Ġfig ures +201 7 +ind er +x ual +ĠDe ad +Ġabsolute ly +Ġfour th +Ġpresent ed +resp ond +rib le +Ġal cohol +at o +ĠD E +por ary +Ġgr ab +Ġvar i +Ġqu ant +ĠPh oto +Ġpl us +r ick +ar ks +Ġaltern ative +Ġp il +Ġappro x +th at +Ġobject s +ĠR o +ĠAnd roid +Ġsignificant ly +ĠR oad +k ay +R ead +av or +Ġa cknow +ĠH D +ĠS ing +O r +ĠM ont +Ġun s +pro f +Ġneg oti +ĠAr ch +ik i +Ġte levision +ĠJew ish +Ġcomm ittee +Ġmot or +Ġappear ance +Ġs itting +Ġstri ke +ĠD own +com p +ĠH ist +Ġf old +ac ement +ĠLou is +Ġbel ong +ĠâĢ ¢ +Ġm ort +Ġprep ared +Ġ6 4 +ĠM aster +Ġind eed +ĠD en +Ġre nt +T A +our ney +ar c +S u +9 7 +Ġadv ice +Ġchang ing +Ġlist ed +Ġlaun ched +is ation +ĠP eter +is hes +Ġl ived +ĠM el +ĠSup reme +ĠF ederal +Ġ) ; +ruct ure +Ġset s +Ġphil os +u ous +Ġ ł +Ġappl ied +ĠN OT +Ġhous ing +ĠM ount +Ġo dd +Ġsu st +D A +ffic ient +Ġ ? +ol ved +Ġp owers +Ġth r +Ġrem aining +ĠW ater +L C +Ġca uses +ãģ ® +Ġman ner +ad s +Ġsuggest s +Ġend s +stand ing +f ig +ĠD un +id th +Ġg ay +Ġter min +ĠAngel es +M S +Ġscient ific +Ġco al +ap ers +b ar +ĠThom as +Ġsy m +ĠR un +th is +P C +igr ants +Ġmin ute +ĠDist rict +cell ent +Ġle aves +Ġcomple ted +am in +Ġfoc used +Ġmon itor +Ġveh icles +M A +ĠM ass +ĠGr and +Ġaffect ed +itution al +Ġconst ruct +Ġfollow s +Ġt on +re ens +Ġh omes +ĠE xt +ĠLe vel +r ast +ĠI r +Ġel im +Ġlarge ly +ĠJ oe +Ġvot es +all s +Ġbusiness es +ĠFound ation +ĠCent ral +Ġy ards +Ġmaterial s +ul ner +Ġgu ide +Ġclos er +um s +Ġsp orts +ed er +J ust +Ġtax es +8 4 +ĠO ld +Ġdec ade +ol a +Ġv ir +Ġdro pped +Ġdel ay +it ect +Ġsec ure +ste in +le vel +Ġtre ated +Ġfil ed +ain e +Ġv an +Ġm ir +Ġcol umn +ict ed +e per +Ġro t +Ġcons ult +Ġent ry +Ġmar ijuana +ĠD ou +Ġapparent ly +ok ing +clus ive +Ġincre ases +an o +Ġspecific ally +Ġte le +ens ions +Ġrelig ion +ab ilities +Ġfr ame +ĠN ote +ĠLe e +Ġhelp ing +Ġed ge +ost on +Ġorgan izations +à ĥ +ĠB oth +hip s +Ġbig ger +Ġbo ost +ĠSt and +Ġro w +ul s +ab ase +Ġr id +L et +are n +ra ve +Ġst ret +P D +Ġv ision +Ġwe aring +Ġappre ci +Ġa ward +ĠU se +Ġfact or +w ar +ul ations +) ( +Ġg od +Ġter rit +Ġpar am +ast s +8 7 +Ġen emies +ĠG ames +F F +Ġacc ident +W ell +ĠMart in +T ER +Ġat h +ĠHe ll +Ġfor g +Ġve ter +ĠMed ic +f ree +Ġst ars +Ġexp ensive +Ġac ad +ra wn +ĠW he +Ġl ock +Ġform at +Ġsold iers +s m +Ġag ent +Ġrespons ibility +or a +ĠS cience +Ġrap id +Ġt ough +ĠJes us +Ġbelie ves +M L +Ġwe ar +le te +Ãĥ ÃĤ +ĠD ri +Ġcomm ission +ĠB ob +O h +ap ed +Ġwar m +ÃĥÃĤ ÃĥÃĤ +Ġ200 3 +ort ion +Ġhas n +ust er +Ġun ivers +ĠI ll +Ġk ing +olog ies +9 4 +ĠT em +ĠM os +Ġpat ient +ĠMex ico +ce an +ĠDe ath +ĠSand ers +y ou +ĠC ast +ĠComp any +pt y +Ġhappen ing +F P +ĠB attle +Ġb ought +A m +M od +U s +ut ers +ĠC re +ĠTh ose +Ġ4 4 +is er +Ġs oul +ĠT op +ĠHar ry +ĠA w +Ġse at +ff ee +Ġrev olution +Ġ( " +ĠD uring +et te +Ġr ing +Ġoff ensive +Ġreturn s +Ġv ideos +Ġdis cl +Ġfam ous +en ced +ĠS ign +ĠR iver +Ġ3 00 +P M +ĠB us +ĠC H +Ġcandid ates +ard en +Ġpercent age +Ġvis ual +Ġthan k +Ġtrou ble +ner gy +Ġ200 1 +Ġpro ve +ash ion +Ġen h +ĠL ong +U M +Ġconnect ed +Ġposs ibility +O ver +Ġexper t +Ġl ibrary +art s +ĠDirect or +Ġfell ow +9 2 +ir ty +Ġd ry +Ġsign s +ĠL ove +Ġqu iet +f oot +Ġp ure +ĠH un +Ġf illed +ph as +ĠE lect +end ment +ĠEx pl +Ġun able +n s +m o +Ġv ast +ob e +Ġident ify +app ing +ĠCarol ina +g ress +Ġpro te +Ġf ish +Ġcircumst ances +raz y +ĠPh ot +Ġb odies +ĠM ur +Ġdevelop ing +ĠA R +Ġexperien ced +Ġsubst ant +ĠBo ard +es ome +Ġdom estic +Ġcomb ined +ĠP ut +Ġchem ical +ĠCh ild +Ġpo ol +ĠC y +Ġe gg +c ons +st ers +Ġh urt +Ġmark ets +Ġconserv ative +Ġsupp orters +Ġag encies +id el +O b +ur b +Ġ4 3 +ĠDef ense +y e +ĠA p +du le +Ġtemper ature +Ġconduct ed +ĠCh ief +Ġpull ed +Ġf ol +L ast +ont o +os is +V ER +D es +ĠP an +F irst +Ġadv ance +Ġlic ense +r ors +ĠJ on +Ġimag ine +Ġhe ll +Ġf ixed +Ġinc or +os ite +ĠL og +ick en +] : +Ġsurpr ise +h ab +Ġc raft +ol t +ĠJ ul +Ġd ial +Ġrele vant +Ġent ered +Ġlead s +ĠA D +ĠCle an +Ġpict ures +ess or +Ġal t +Ġpay ing +P er +ĠMark et +Ġupd ates +am ily +ĠT ype +ĠH ome +Ġ5 5 +semb ly +rom e +8 3 +Ġgreat est +Ġhe ight +Ġhe av +ain ts +Ġlist en +as er +ĠS H +Ġcap able +ac le +Ġpers pect +in ating +Ġoff ering +ry pt +ĠDe velop +ab in +r c +Ġbr ight +al ty +ar row +Ġsupp l +ind ing +ack ed +gy pt +ĠAn other +p g +ĠVirgin ia +ĠL u +Ġpl anned +Ġp it +Ġswe et +T ype +ĠD i +Ġtyp ically +ĠFranc isco +Ġpro spect +ĠD an +Ġte en +re es +Ġsc hed +Ġh ol +Ġsc r +Ġlot s +l ife +Ġnews p +Ġfor get +ĠN one +ĠM iddle +ĠR yan +ed d +Ġse vere +Ġsu it +ll er +9 3 +Ġcor respond +Ġexpl os +u ations +Ġfl ag +g ame +r id +Ġpr in +ĠD ata +Ġde ploy +ĠEn ter +su it +gh an +ĠM en +Ġthough ts +Ġmat ters +Ġad apt +ĠA ri +Ġf ill +Ġfor th +Ġs am +Ġ4 1 +Ġpay ment +ĠH or +Ġsp ring +du c +Ġl osing +Ġbring ing +F O +al a +Ġdist ribution +he red +b our +ĠIsrael i +om a +Ġcomb ination +Ġpl enty +V E +C an +ĠH aw +Ġper man +ĠSpe cial +Ġto w +Ġsee king +Ġexam ples +Ġclass es +c r +Ġbe er +Ġmov es +ĠI P +ĠK n +Ġpan el +E ven +Ġproper ly +Ġr is +Ġpl ug +Ġestim ated +E very +Ġdef ensive +ag raph +Ġpre gn +Ġinst it +ĠV ict +Ġvol ume +Ġpos itions +Ġl inks +ĠPro gram +ĠWe ek +ag ues +Ġtrans form +k er +ĠC EO +Ġc as +Ġopp onent +Ġtwe et +ĠC ode +Ġsh op +Ġf ly +Ġtal ks +Ġb ag +Ph one +Ġa id +Ġpl ants +Ġ6 5 +Ġatt orney +ar ters +qu est +ĠMag ic +Ġbeg ins +Ġmy ster +Ġenvironment al +Ġst orage +N N +Ġm arg +Ġs ke +Ġmet al +ell y +Ġord ered +Ġrem ained +Ġl oved +Ġprom pt +Ġupd ated +Ġexper ts +Ġwalk ing +Ġan cient +Ġperform ed +AT E +Ġne ither +i ency +Ġmanufact ure +ĠP ak +Ġselect ed +Ġm ine +Ġult imately +Ġexpl an +Ġlab el +ĠServ ices +ribut ed +Tr ump +Ġsy n +ĠU lt +S C +Ġme at +Ġg iant +ĠW ars +ĠO N +Ġad m +Ġinter pret +Ġeven ing +Ġev il +ĠB oston +ĠW ild +Ġ à +ĠBit coin +ĠAm azon +D r +ĠIn formation +Ġobvious ly +Ġadv anced +Ph oto +ol ar +Ġwe ather +Ġsymb ol +Ġso le +Ġpot entially +ost er +Ġorig inally +m un +3 00 +az e +ess ions +Ġde ck +Ġst ood +Ġyou th +ĠB ern +R ep +ĠT est +Ġbas ically +ot ic +Ġinvol ve +ol it +ly n +S ee +Ġair craft +Ġconf irm +E W +Ġmess ages +ĠRich ard +Ġk it +Ġpro hib +Ġv ulner +is ters +Ġexist ence +Ġturn ing +ĠS P +Ġdes ire +Ġfl at +Ġm ent +se ason +ang es +Ġneighbor hood +ĠL ake +AT ION +Ġpoint ed +b ur +Ġinn ov +uc ks +U L +Ġprofess or +Ġexp ressed +A B +ic ious +Ġ200 2 +ĠDe v +Ġs ession +Ġb are +s en +Ġdis s +ĠC ath +ĠP ass +ĠP oint +Ġdo ctor +or row +ail ed +ĠR ub +ĠD C +ĠChar l +p erson +Ġwrit er +igh ters +ure au +Ġob lig +Ġrecord ed +Ġbro ke +Ġord ers +il ty +Ġmot ion +in ity +l aw +ad ium +Ġimm igration +Ġcontr ast +Ġb att +Ġex cellent +Ġtechn ical +am i +Ġt un +Ġcl oud +ĠY ear +ge on +Ġcre ation +Ġstr ange +Ġa uth +Ġfor t +b orn +Ġext ent +ĠT oday +ĠCl ub +Ġr ain +Ġs ample +Ġaccept ed +Ġt act +Ġf ired +ĠS on +Ġstand s +Ġb oot +Ġ4 7 +Ġstat ements +Ġvers ions +Ġse lling +ound ed +Ġ199 0 +Ġwere n +ĠW atch +Ġexper iment +P ost +Ġret ail +ul ed +In st +un te +ãĥ ¼ +Ġdep art +Ġb ond +i very +om pl +Ġre action +ĠSyri an +ĠP ac +app ed +ani el +D P +Ġres olution +Ġre act +Ġappro ved +on om +m ond +ĠO ffic +-- - +Ġrepl ace +Ġt ack +Ġsp ort +Ġch ain +Ġemer gency +r ad +ĠPalest in +Ġ4 6 +Ġautom atically +Ġrout e +Ġp al +Ġb anks +ĠPar is +ĠMed ia +ro ad +ic ing +i xt +ist ed +Ġg rew +Ġco ord +ĠW here +om in +Ġsub s +� � +Ġ ± +Ġcorpor ate +Ġse lection +n oon +ĠRep ort +c s +clud ing +ord ers +anc he +ĠIt s +Ġslow ly +ĠE gypt +ĠA cc +Ġcol le +iqu es +E X +Ġattempt s +ur l +ĠC ross +Ġfind ings +ĠS C +ĠO R +Ġind ex +ens ity +ĠW ay +ĠL and +Ġsh ock +d is +Ġd ynam +Ġc art +m osp +S ince +i est +ĠB oy +Ġst orm +ĠCont in +201 3 +he w +il it +Ġess ential +iqu id +O ther +ive red +Ġreason able +A ct +Ġsub sequ +ĠP ack +ĠF ort +Ġconsider ing +Ġun iversity +l og +Ġmar ried +Ġill ust +ĠTr ue +£ ı +Ġnumer ous +rast ructure +Ġserious ly +Ġrefer red +u a +Ġconsist ent +on na +ĠRe al +ru ption +ci ples +Ġfact s +9 1 +ot es +er g +The n +Ġacc ompl +N ote +Ġre venue +Ġpass ing +Ġm al +e en +ĠY et +Ġg ather +ter day +ew ork +ĠA uthor +P e +Ġopt im +Ġr ub +Ġè £ı +Ġun known +st one +Ġun ion +ol ve +Ġopportun ities +Ġbrow ser +ĠW al +ĠC ost +Ġreport ing +st s +p et +Ġs and +Ġsudden ly +Ġsurpr ising +ĠV R +Ġsomew hat +ĠB as +ult ure +iz z +ĠC D +Ġchalleng es +Ġsett ings +Ġexperien ces +ĠF ull +Ġcan n +Ġrece iving +ES T +Ġj oint +Ġcult ural +Ġa st +8 2 +as tern +ce ived +ĠC ru +Ġb ull +p ired +am m +Ġfac ing +p ower +Ġb oss +ĠH ol +Ġinst r +Ġincreasing ly +Ġsh ift +Ġstre ets +ĠWilliam s +ab b +Ġl ie +Ġl augh +ĠC a +P L +Ġadult s +Ġcustom er +Ġob tained +Ġsupport ing +ht ml +f ire +Ġdetail ed +Ġpick ed +ĠR ight +ld er +E E +st ood +ĠK im +Ġw ire +Ġs ight +Ġdevelop ers +Ġpers ons +Ġs ad +Ġc up +Ġwar ning +Ġboy s +l ong +Ġb ird +f o +Ġw al +Ġobserv ed +Ġz one +iven ess +Ġch annel +c ript +Ġref used +ĠAg ain +Ġsu c +Ġspokes man +ĠRe f +r ite +ou ston +ãĥ ³ +ĠS her +Ġact s +ĠN ame +Ġstrugg le +ar ry +omet imes +Ġdisc rim +H T +Ġcateg ory +Ġreal ize +Ġemploy ee +ĠAf ghan +en ger +Ġgun s +ĠSte ve +ĠM ot +ĠO l +ok ed +Ġth ick +Ġfair ly +ill y +Ġsur ve +ĠM at +we ight +â Ķ +Ġtro ops +Ġag ents +Ġbatter y +Ġmot iv +à ¡ +S ec +d en +o very +L S +Ġfl u +Ġconf ident +ĠO per +Ġem pty +Ġp hen +Ġse ctor +Ġexc ited +Ġrem ote +ap h +o en +Ġdestroy ed +Ġmor al +ĠH P +ĠR on +Ġd ress +ĠB at +Ġl it +ĠM S +Ġa f +H L +r um +is ms +Ġshould n +Ġsym pt +ĠTor onto +het ic +Ġcar bon +Ġinstall ed +Ġviol ent +Ġsol ar +j a +Ġpract ices +Ġr ide +ĠP enn +Ġimpro ved +Ġaud io +Ġbehav i +ĠP S +Ġe ating +D ata +ĠRe view +p ass +cl aim +u ated +ang ers +c hen +Ġproper ties +Ġany where +An other +Ġbl ow +ĠJack son +Ġp roud +Ġplan e +l ines +Ġsqu are +Ġpro of +ans as +Ġtalk ed +m akers +Ġs ister +Ġhold s +Ġres ident +Ġ= = +Ġresist ance +Ġspl it +Ġpro secut +Ġconf idence +res ents +Ġcut s +Ġexcept ion +Ġz ero +Get ty +Ġcop yright +Ġtot ally +orm al +ific ations +ĠAustral ian +Ġs ick +Ġ1 50 +Ġhouse hold +Ġfe es +Ġdri vers +og en +ĠN Y +Ġnecess arily +Ġregul ations +ear ing +s l +Ġperspect ive +c are +ic ial +H is +Ġesc ape +Ġsurpr ised +ĠV an +ur rent +Ġv ac +8 1 +ĠTh us +Ġem phas +ĠCh ampions +ĠI ce +Ġn arr +Ġhead s +Ġca using +b el +f ortunately +ĠM a +Ġtarg ets +ci pl +Ġafter noon +Ġadd s +ĠMay be +ĠF our +ess ed +ple te +Ġus ual +ch o +ing u +Ġwith d +ĠE nergy +ĠE conom +O O +Ġart icles +Ġinj ured +Ġman age +Ġexpl ains +Ġdi agn +R ec +at ures +Ġlink ed +Ġdiscuss ed +Ġexpl o +Ġocc asion +ath an +Ġopp osite +Ġfac es +Ġden ied +ĠK night +Ġn ut +Ġapprox imately +Ġdisapp oint +onym ous +ĠB est +ĠL o +ĠH y +ĠA ff +Ġvot ing +an while +ĠII I +Ġinstit utions +ag ram +ĠD aily +Ġdr ag +Ġnear by +Ġgu ilty +Ġcon ver +P re +s hip +Ġre ward +Ġphilos oph +ĠS S +u gh +Ġapp s +f riend +Ġu pper +Ġad vert +Ġs now +Ġfr ust +Ġour selves +F r +ĠD ie +amp ion +Ġdis miss +Ġc ere +Ġsign al +f rom +Ġ ). +Ġ5 2 +Ġcr imes +it ors +est ival +use um +Ġcoun cil +ĠS aud +M ay +ĠG un +ic ian +et her +Ġsu fficient +ĠH en +so le +Ġhistor ical +ĠF ar +ĠT urn +Ġp in +Ġsuc ceed +m at +ly mp +Ġtrad ition +ĠO k +Ġc ro +Ġdesc ription +al le +Ġsk y +T e +Ġwide ly +Ġw ave +Ġdefin ition +ĠJew s +Ġcy cle +Ġref ere +Ġbr ings +us al +Ġal ive +Ġfrequ ently +Ġint ention +ĠCont rol +l v +y stem +Ġpriv acy +g ent +ren ce +ĠQu est +ĠChrist mas +Ġr ail +Ġco oper +Ġtest ed +ĠC apt +as ks +Ġcomfort able +Ġdel ivered +sc ape +Ġdep th +ĠG OP +Ġwrit es +Ġass ets +Ġsa v +im ents +Ġtrans ition +Ġart ist +ĠL ook +Ġl ob +Ġcomp onents +ar ity +Ġwalk ed +Ġro ot +Ġparticip ants +Ġnot iced +Ġres c +Ġn av +ĠAd minist +d a +ut ral +pl ate +Ġimport ance +Ġass ert +ious ly +c ription +Ġinj uries +ĠChe ck +Ġregist ered +Ġint ent +Ġmiss ed +ograph ic +Ġsent ence +oun ter +Ġassist ance +ev in +Ġdat abase +Ġbuild ings +Ġclass ic +Ġth inks +ĠOh io +P r +ug g +Ġfe e +p an +Ġeffect ively +Ġfac ility +Ġbe ar +Ġch apter +Ġdog s +ĠCol umb +Ġl atter +it ial +Ġad mitted +T V +ĠGe org +Ġpost s +\ \ +Ġlawy er +Ġequ ival +Ġm and +Ġcontro lled +ĠW alk +ĠAnd rew +Ġmen u +am ental +Ġprotect ed +v a +Ġadminist r +or al +Ġre in +ĠS ar +Ġamount s +Ġn ative +ĠM oon +Ġrep resents +Ġab andon +Ġcarry ing +Ġt ank +m ary +Ġdecl ared +T ube +Ġh at +Ġpun ish +el lect +m es +Ġun iverse +ĠR od +ph y +Ġinf rastructure +Ġ5 1 +Ġopp osed +ow nt +c a +ĠM ake +Ġhard ware +Ġco ffee +R el +b al +w orld +ĠS af +ĠSe a +in als +Ġown ed +Ġh all +ers ion +Ġdescrib e +ĠP ot +Ġport ion +Ġat mosp +Ġgovern ments +Ġdep ending +Ġoff ense +Ġtr ick +aw a +ĠL ine +ĠV is +ĠH ard +ĠOr ig +ĠCl ick +Ġdes k +ĠVal ley +ĠS ov +Ġmov ies +Ġrem ark +Ġm ail +Ġcons cious +Ġrul ing +ĠR ights +Ġmed ic +he nt +ĠW omen +> < +Ġrepl aced +ĠP rem +ĠTh anks +Ġre new +ĠB all +if orm +Ġsh ots +C omm +Ġar med +Ġconst ant +Ġt aste +Ġreal ized +Ġbu ff +Ġm o +Ġeffic ient +M ost +or ation +if ies +Ġcommun ication +Ġfl ood +Ġconsequ ences +Ġany way +ig g +ĠG M +ĠTh ank +Ġ iron +Ġev olution +ĠC op +tw itter +Ġ9 5 +Ġrelationship s +ad el +ĠYou ng +Ġpropos al +ay ers +uild ing +ĠH ot +OR E +c os +Ġcoll abor +P G +ax y +Ġknow ing +Ġsupport s +ow ed +Ġcontrol s +Ġmere ly +um er +Ġath let +Ġf ashion +p ath +Ġg ift +Ġer a +AN D +Ġkind s +ĠKore an +Ġleg it +ul ous +Ġess entially +Ġthe rap +n ic +Ġsuff ered +Ġh ur +Ġprom ise +Ġex cess +Ġover w +Ġpr ime +ĠH ouston +er ry +ĠM s +R S +201 2 +Ġst ores +ĠO lymp +Ġj ourney +Al though +S ub +ĠE duc +ĠCh apter +Ġrequest s +Ġconsum ers +Ġt iny +Ġis ol +ĠF air +b a +ĠY OU +Ġcr ash +ce ler +Ġemot ional +Ġgood s +Ġelect ed +Ġmod er +ĠLin ux +Ġbl ocks +Ġis land +ĠSoc iety +Ġelect ions +Ġbroad cast +Ġche ap +Ġn ations +Ġse asons +4 00 +Ġwas te +ĠS at +Ġfield s +em ploy +Ġprof ile +Ġauth ors +AL L +ĠG ra +w est +ĠT y +Ġdeath s +Ġv acc +Ġfor med +Ġd u +Ġon going +ĠMuslim s +el f +ig ure +Ġass ume +ĠUkrain e +w ater +Ġco ast +Ġvot ed +g or +ĠA S +ĠMich igan +az a +ĠAr m +i ro +Ġf lex +as ters +' ' +Ġwel come +ar l +Ġloc ations +ig ation +ĠF il +Ġbu ying +Ġarch itect +Ġhard er +ĠC ub +Ġinter face +Ġrestaur ant +Ġdisco ver +Ġex ceed +Ġfav our +ger y +Ġd uty +Ġp itch +ad or +ĠM ach +b oy +Ġrespond ed +Ġext ended +her s +M any +ra id +if er +ĠIn s +S er +Ġmed ium +s he +ĠS ports +Ġmag azine +ut ation +Ġlim its +ĠG all +Ġex ternal +raz il +Ġyoung er +t le +Ġrem ind +ĠC ON +Ġimmedi ate +Ġh idden +Ġvol unte +Ġsim pl +od cast +Ġph ase +d r +Ġpl ot +Ġexp osure +R I +og rap +v in +an ish +ĠAc ad +ĠEng ine +Ġexp ansion +ĠP ay +Y our +Ġpus hed +ĠE ll +ĠHe ad +Ġmarket ing +ĠA C +k et +Ġh its +Ġg ro +ĠA ge +ĠSc ot +] [ +Ġst im +Ġi Phone +Ī Ĵ +Ġn arrow +ĠGet ty +ĠTur key +Ġperfect ly +Ġen able +ut ch +Ġprec ise +Ġreg ime +Ġsh if +Ġcomp ens +g un +d iv +Ġch osen +ĠK en +An y +Ġtre es +Ġrecomm ended +ĠR en +u able +ĠH T +F ollow +E G +ĠH and +ĠK enn +Ġarg uments +Ġex ists +Ġb ike +ĠCons erv +Ġbre aking +ĠG ar +Ġc razy +Ġvirt ual +ay lor +ix el +Ġ19 80 +Ġper mission +ĠSer ies +Ġconsum er +Ġclose ly +c alled +Ġ5 4 +Ġhop es +Ġar ray +ĠW in +ĠLab our +Ġsp ons +ĠI re +Ġp ow +Ġread ers +Ġemploy ment +Ġcreat ure +Ġresult ing +Ġaccur ate +Ġmom ents +Ġarg ued +Ġp ed +D uring +Ġ5 3 +ĠT al +Ġs ought +Ġsuff ering +Ġ icon +le e +Ġ( $ +al ian + ° +Ġp ra +Ġbon us +( " +k o +Ġact ing +D E +f all +Ġcompar ison +Ġsm ooth +ĠN AS +u pp +ĠJose ph +ep ing +ĠT ake +ĠM id +Ġs ending +f ast +ĠF all +Ġdeal ing +us er +ĠOr gan +C o +Ġatt ached +Ġse es +% . +Ġtyp ical +AR T +Ġfind s +ĠAs ia +um in +ĠC ore +ĠE nt +in ent +u ce +ĠBl ood +ĠN ever +Ġem ails +Ġhigh light +Ġconf ront +at us +ut ed +Ġun us +Ġtop ic +ĠAd am +Ġb le +at i +Ġunder stood +S et +st ruct +T P +Ġm ob +a a +ĠSt art +pect ed +se ll +Ġded icated +ĠC A +u an +Ġsong s +esc ription +Ġte ch +Ġr ape +Ġas ide +Ġgr ant +Ġ5 6 +s ub +Ġarg ue +Ġcont aining +Ġsche dule +Ġliber al +Ġpublic ly +Ġheav ily +ĠU t +in er +ĠS ection +ĠC are +we et +l s +D is +âĶ Ģ +ĠF ollow +B ack +ĠI T +Ġb es +j i +ĠH it +est ed +Ġevery body +ĠSw ed +Ġfem in +Ġfac ilities +Ġcon ven +C omp +ĠO S +c ore +Ġan x +Ġdiv ision +ĠC am +ĠSt an +m ates +Ġexpl ore +pl om +Ġsh ares +pl oad +an es +Ġide al +et ers +ĠB ase +Ġpl astic +Ġdist inct +ĠNet work +ĠSe attle +Ġtrad ing +ens us +int end +Ġex hib +Ġinit ially +ĠF ood +Ġthous and +ĠBus iness +act er +Ġpar agraph +Ġrough ly +Ġw ww +Ġcreat ive +ĠCon f +Ġconsum ption +Ġfil ms +ag an +Ġob tain +Ġt all +Ġt or +Ġacknow led +Ġg rown +al o +K E +Ġ4 00 +end ers +t aining +U G +Ġsu icide +Ġwat ched +ĠL ist +al i +re hens +Ġsurround ing +Ġp ip +Ġf lying +ĠJ ava +ord an +Ġserv ing +in ations +p ost +Ġsh o +A v +Ġj ail +z y +Ġ199 9 +Ġ< / +Ġliter ally +ĠS ir +Ġexp osed +Ġl ies +st ar +Ġb at +Ġear ned +ĠD ig +Ġspec ified +ĠSe ason +Ġdeg rees +Don ald +Ġcent re +Ġsh aring +Ġwin ter +ĠC O +C he +Ġ Î +M P +Ġun w +Ġfew er +ĠM ir +Ġsomew here +ĠK ey +Ġattack ed +ĠK ir +Ġdom ain +Ġstrong er +Ġ9 9 +Ġpen alty +I d +Sc ript +Ġdecl ined +Ġne ck +Ġfra ud +Ġcur rency +Ġr ising +R C +âĢ¦ âĢ¦ +H z +Ġt ab +Ġtal ent +n am +ĠN BA +Ġvill age +Ġleg s +ĠN ext +E d +Ġac id +Ġhy d +8 00 +Ġinvol ving +ĠIm age +ĠBe fore +F l +Ġyes terday +S ource +Ġterror ist +Ġsu p +Ġsy nt +ĠSaud i +Ġw est +Ġr u +b urg +Ġvis ible +Ġstru ck +r ison +Ġaw esome +Ġd rawn +Ġansw ers +ĠG irl +ĠR am +Ġthreat s +Ġdef eat +os it +Ġv ent +atur ally +Americ an +end a +ĠH oly +Ġr um +% , +c ase +ĠHist ory +ĠYou Tube +Ġsit uations +ĠD NA +S te +Ġsa ved +It em +Ġrec ip +olog ist +Ġfac ed +Ġel ig +O nce +ĠL i +u h +Ġmist ake +ĠDiv ision +ĠB ell +Ġsympt oms + ® +Ġdom in +Ġfall ing +Ġend ing +as hes +Ġmat ches +ĠOn line +Ġexplan ation +D ef +red it +Ġany more +ĠT otal +ĠF OR +us hed +Ġlet ters +Ġris ks +ĠO K +Ġreported ly +: \ +Ġpl ate +Ġsubject s +Ġattempt ed +if ier +ian a +Ġunlike ly +ĠTh ough +um a +ĠIn vest +ĠPr in +ic an +ĠD ar +ĠColor ado +au g +Ġve get +a os +ri a +Ġshe l +Ġmark ed +Ġ( ) +Ġsp r +p o +ĠL ink +Ġdef e +ĠJ r +Ġthem e +Ġpass ion +ĠP en +Ġinf o +iz er +Ġsh it +ĠC ivil +ap se +c re +Ġpo ly +Ġcomp onent +ĠChar les +ĠIre land +ĠPro v +Ġdo ctors +Ġgr anted +Ġpain t +Ġhon or +Ġsm oke +Ġpay ments +Ġprim arily +ĠKing dom +r ich +ate ll +Ġde als +Ġsched uled +Ġfund amental +Ġprote in +Ġnewsp aper +Ġcl ients +yth on +ĠD ate +h us +Ġfeed back +Ġstret ch +Ġc ock +Ġhot el +ĠQue en +Ġsu gar +Ġj u +Ġmil k +Ġappro val +ĠL ive +Ġequival ent +ef ully +Ġins ert +z ona +Ġext ension +d ri +J ohn +Ġacc omp +S m +ĠF und +Ġconst antly +Ġ` ` +Ġgener ated +ĠA ction +ĠP sych +ĠT ri +Ġrecogn ize +Ġv ary +ph a +ĠR a +d f +et ch +ĠSov iet +Tw o +Ġpattern s +Ġprof ession +an ing +T ime +ĠL im +Ġcol ors +ĠA z +ĠT R +Ġinf ect +Ġphen omen +Ġshe ll +Al so +Ġput s +Ġdel ivery +Ġbro wn +Ġprocess ing +Ġlight s +ess age +ĠBro ok +ĠA ud +l ation +Ġindust rial +L ike +ĠB razil +rou s +ES S +ĠL uc +Ġsome how +Ġ8 5 +Ġpro port +Ġpolit icians +Ġindic ate +Ġh ole +Ġtechn iques +Ġcompet itive +Ġph r +Ġv o +ist ent +ĠD ream +Ġcamp us +Ġaspect s +Ġhelp ful +Ġsh ield +or se +Ġtrig ger +m al +Ġ5 8 +Ġt ort +Ġperson ally +Ġt ag +Ġkeep s +ĠV ideo +Ġben ch +Ġg ap +a ire +Ġe ast +Ġrec overy +per ial +Ġprof it +ĠM ic +Ġ5 7 +Ġcol on +Ġstrong ly +st yle +Ġalleg ations +h an +Ġrep orters +j o +r ine +arg et +and al +Ġ0 3 +Ġfl ash +tr ans +Ġstr ict +Ġpark ing +ĠPak istan +Ġl i +Ġwe ird +ĠE ric +Ġreg ions +ĠJ un +Ġint ellect +ĠW H +od ing +rib utes +up id +ĠT it +Ġf inger +or ia +Ġe lev +ĠF ield +Ġcon clusion +; ; +Ġfeel ings +Ġext ensive +Ġm ixed +Ġne uro +v y +Ġhar ass +ĠC irc +ou ch +Ġterrit ory +Ġsuccess fully +M ar +Ġing red +Ġoverw hel +Ġl ayer +V iew +Ġall ies +ill ance +ĠTh ree +Ġb unch +Ġnorm ally +Ġnet works +Ġsac r +ĠC IA +b les +Ġch ose +Ġopp onents +Ġregard less +Ġfr anch +Ġpre f +ĠP o +Ġbr idge +ann a +ĠSil ver +Ġw age +p age +ri or +Ġrad ical +ĠL ittle +Ġman ip +Ġsecret ary +Ġg ang +D R +F A +Ġdec ent +ĠSp irit +Ġun cle +ĠDevelop ment +Ġinvest ors +Ġwall s +Ġpub lish +Ġgener ate +iss ions +c ar +Ġprom ote +Ġcut ting +Ġche st +Ġdrink ing +Ġcollect ed +Ġ7 2 +Ġhop ing +Ġem br +gor ith +Ġwar ned +Ġinstruct ions +O G +ĠD id +ĠAg ency +Ġg ear +Ġcritic ism +ĠF urther +Ġut il +ann y +R ed +Ġcoun sel +ĠAs ian +Ġredu ction +p ool +Ġteach ing +Ġdeep ly +i y +Ġestim ates +Ġcho ices +Ġperman ent +in em +ke l +Ġf asc +p se +f ile +ĠL ow +ĠP erson +Ġt ournament +st al +Ġm el +U ST +ĠR ay +az i +V al +Ġcont ained +ĠH olly +Ġw ake +Ġreve al +Ġprocess es +ĠIS IS +Ġ0 9 +Ġbl ind +Ġste el +ĠB ad +Ġcare fully +app y +ro it +Ġg aming +Ġhous es +ĠC oll +Ġtr uck +er m +Ġsc ored +Ġocc as +ret urn +b ound +v ar +Ġsh arp +Ġaf raid +ĠE X +am ber +c ific +Ġsche me +N C +ĠPol it +Ġdecl ine +Ġ199 8 +Ġpus hing +Ġposs ession +Ġpriv ile +Ġteacher s +Ġy ield +H A +ĠDav is +it led +#### #### +Ġr ig +ĠD aniel +ac on +Ġh ide +ut en +Ġcolle agues +Ġprin ciples +Ġl oud +Ġs in +ĠDem on +Ġst one +Ġ0 2 +Ġt aught +Ġter rible +Ġst uck +ĠPol icy +te en +Ġimplement ation +ĠB BC +ĠAP I +Ġwhe el +all as +Ġch ampions +ol ars +play er +Ġrepeated ly +ĠSt ill +Ġlik es +ast y +es ter +ĠCath olic +R L +Ġb ath +Ġno ise +t itle +Ġn orthern +P art +Ġmag n +Ġf ab +ĠAs h +Ġdis pl +Ġtick et +Ġm urd +Ġalong side +ĠMus ic +Ġr iver +ĠSte el +ĠC L +ĠPl ayer +ĠM ult +ow ing +re p +s ize +Ġt ur +ĠGeorg ia +isc al +ra ction +Ġc able +Ġ5 9 +Ġw ins +Ġup coming +Ġsurv ive +Ġins pired +ĠEduc ation +Ġstat istics +ĠF oot +iam i +Ġy ellow +ĠP age +. - +ĠH as +Ġur ban +Ġa x +es sel +\ " +Ġquarter back +Ġreg ister +ĠLab or +Ġab ilities +ĠF amily +Ġvar iable +ĠPr ice +Ġcont em +Ġth in +ĠE qu +d ata +Ġg otten +Ġconst it +Ġas ks +Ġt ail +Ġexc iting +ĠE ffect +ĠSp anish +Ġencour age +ins on +ĠA h +Ġcommit ment +C S +Ġr ally +Ġ: : +Ġsubs id +Ġsp in +Ġcapt ured +201 8 +Ġinn oc +Ġalleged ly +ĠC ome +Ġart ists +ĠN umber +Ġelect ronic +Ġreg ional +ap es +Ġw ra +Ġmy th +pr ise +ĠM iller +ĠC reat +ĠEp isode +b ell +Ġdirect ed +Ġext ract +Ġs orry +Ġv ice +ag ger +ĠSu pport +Ġ6 6 +ĠI ron +Ġwonder ful +Ġg ra +N et +ion e +E ng +Ġsh ips +ik es +ĠK evin +it ar +Ġactiv ists +tr ue +ĠAri zona +ent h +ĠDes pite +ĠS E +Ġha bit +ern el +Ġin qu +Ġab ortion +Ġv oid +Ġexpl icit +Ġeng aged +Ġang ry +Ġr ating +Ġfr ag +b ro +ick ing +d ev +Ġwor ried +Ġob ser +Ġap artment +ĠG T +Ġest ate +ĠConst itution +em on +ĠS now +Ġcount y +Ġdis ag +ĠStep hen +Ġimm igrants +w ind +ĠN ations +Ġfol ks +O ut +Ġg all +Ġtarget ed +Ġst ead +ĠB on +ĠL ib +Ġinform ed +Ġ12 0 +ch ain +idel ines +or ough +Ġdri ven +Ġregular ly +Ġbas ket +Ġprinc iple +oc ument +Ġst un +ib ilities +ĠRom an +ĠAb out +Ġal ert +Ġdemocr acy +Ġrepresent ed +H S +c ers +p arent +Ar t +p ack +Ġdi plom +re ts +ĠN O +Ġcapt ure +ĠAd v +Ħ ¢ +Ġannounce ment +ĠL ear +Ġh ook +Ġpur s +ĠS uch +ĠC amer +Ġrefuge es +ĠV e +P ol +Ġrecogn ized +l ib +Ġhad n +A ss +Ġpil ot +us hing +Ġreturn ing +Ġtra il +ĠSt one +Ġrout ine +Ġcour ts +Ġdes per +Ġfriend ly +ĠIt aly +Ġpl ed +Ġbreat h +Ġstud io +N S +Ġimp ressive +ĠAfghan istan +Ġf ing +Ġd ownt +ink ing +ĠR og +i ary +col or +se x +ar on +Ġf ault +ĠN ick +D own +ĠR ose +ĠS outhern +X X +is odes +L ist +6 00 +Ġout come +er r +Ġelse where +Ġret ire +Ġp ounds +ĠGl obal +Pe ople +Ġcommun ications +Ġlo an +Ġrat io +ĠEm pire +Ġg onna +Ġinv ent +D F +Ġ19 70 +ĠComm on +p at +Ġprom ised +Ġd inner +ĠH om +Ġcreat es +Ġoper ate +ver ty +ĠJ ordan +et ime +Ġsust ain +R eg +Ġincred ible +im a +Ġwar rant +Ġm m +A tt +Ġlaw suit +Ġreview s +it ure +ĠS ource +l ights +ĠF ord +Ġ6 3 +g roup +st ore +Ġfeat ured +Ġfore ver +Ġpo verty +ĠP op +ĠC NN +az z +ab is +ach ing +Ġl aid +ĠSu pp +Ġfil ter +en a +ĠCommun ity +Ġcreat ures +u ction +ĠR oyal +Ġassoci ation +ĠCon nect +ĠBr ad +âĸ Ī +l ers +the re +ĠG i +Ġval uable +AC K +ĠT aylor +Ġl iquid +ĠAtt orney +ĠCar l +ĠF inal +ag a +ĠWil son +B ecause +ĠProf essor +ak a +Ġincred ibly +r ance +! ) +R ef +s k +Ġsol utions +Ġatmosp here +Ġbl ame +um es +ĠN ob +C A +um ps +r ical +ĠPut in +ĠD est +or ic +ĠP A +Ġrespect ively +w an +Ġfif th +â Ħ¢ +ĠC ry +Ġgovern or +res ident +Ġpurch ased +Ġh ack +Ġint ense +ob s +Ġorig in +Ġdef ine +Ġcare ful +** * +Ġshould er +Cl ick +Ġt ied +Ġdest ruction +ou red +Ġno body +Ġh o +ĠEx per +Ġt ip +" ; +Ġtechn ique +Ġj ur +ĠP ok +b ow +Ġleg end +Ġacc ord +Ġbus y +ĠInt el +Ġh ang +ak i +. ] +âĢĶâĢĶ âĢĶâĢĶ +Ġsur gery +Ġrep rodu +Ġun iform +Ġscen es +c ode +Ġ6 2 +l isher +ĠH ave +ph ia +Ġcry pt +Ġrec on +Ġsc ream +Ġadop ted +Ġsc ores +N e +ĠIt alian +in cluding +B O +Ġindic ated +Ġent ertain +G u +T ext +i el +Ġtw enty +Ġeng age +off s +ĠPac ific +Ġsm ile +Ġperson nel +Ġto ler +Ġdo ors +Ġt one +Ġmach ines +Ġent ering +ten ance +C O +ĠJer sey +Ġfore st +Ġhor se +Ġcompl aint +ĠSpr ing +y o +ĠPl us +ed ing +ĠRet urn +qu arters +ial s +c ow +Ġacad emic +Ġf ruit +Ġ199 6 +og ether +Ġw ine +Ġpur su +ĠSte ven +Ġlic ens +Wh o +Ġclot hes +re ction +Ġsqu ad +Ġst able +Ġr aw +z ens +St ar +ut ies +anc er +Ġke ys +ĠM u +Ġcompl icated +ig er +ĠTe xt +Ġabs or +Ġ6 8 +Ġfun ny +Ġrel ief +ĠL ew +ĠC ook +Ġch art +Ġdraw ing +G E +Ġmod ule +ĠB ull +I LL +Ġs alt +0000 0000 +il le +Ġres ource +aw ay +adel phia +ĠB ru +Ġ6 7 +Ġsome body +Ġparticip ate +Ġro se +we red +Ġmus cle +Ġcons ent +Ġcontin uing +ĠGuard ian +ĠOr der +reg on +Ġre ar +Ġprov ision +Ġlik ed +ri ent +Ġb ra +Tr ans +Ġmeet ings +Ġto x +Ġcon vent +Ġaut o +Ġrec ording +ĠSo ft +00 1 +ĠR oll +Ġprogram ming +Ġp ic +Ġprov ed +Ġst ab +ĠA st +Ġca ption +ul ating +ĠAtt ack +Ġnew ly +Ġ199 7 +f r +Ġdis cipl +ĠGree k +Ġed ition +ĠDo es +ĠB ox +if le +ack et +Ġpass es +Ġgu est +Ġac celer +it als +U D +Ġaut hent +ĠR est +ov al +t a +u ine +Ġarm or +ĠT own +Ġcomp at +Ġinc hes +Des pite +Ġass ign +he rent +Ġprep are +ĠM eg +oc key +Ġdep ends +Ġtrack s +w atch +Ġl ists +ĠN orthern +Ġal ter +re c +ĠE astern +Ġcond em +Ġevery where +? ' +Ġaff ili +Ġf ought +": {" +Ġm ac +it arian +Ġsc ope +ĠA L +aw s +ar ms +Ġqu e +Ġenjoy ed +nes ota +Ġagg ressive +ĠSt ory +ĠI V +Ġrec ipe +Ġrare ly +ĠMed ical +val ue +ang el +ay ing +omet hing +Ġsub section +Ġs outhern +Ġfrequ ency +re te +roll ed +ult s +ĠN ic +Ġbeh alf +Ġsequ ence +ab et +Ġcontrovers ial +Ġcomp rom +Ġwork er +Ġmain ly +Ġal gorith +ĠM ajor +or ce +g ender +Ġorgan ized +Ġf ake +Ġconclud ed +ĠE D +ĠEx ec +r age +Ġch ances +ber ry +ĠTr ad +Ġconfig uration +Ġwithd raw +Ġf ro +ud es +ĠBro ther +ĠB rian +Ġtri es +Ġsam ples +Ġb id +ĠGold en +Ġphot ograph +if est +ĠD O +ĠPar liament +******** ******** +R em +Ġcont est +Ġsign ing +p x +ĠZ eal +âĶĢ âĶĢ +E ar +Ġex it +Be fore +ĠCor por +n ull +mon th +Ġrac ial +ott ed +ĠV eg +ĠRe uters +Ġsw ord +ps on +ĠRom ney +a ed +Ġt rib +Ġin ner +Ġprot ocol +ĠB i +ĠM iami +ever al +p ress +Ġsh ipping +ĠAm endment +ĠHow ard +con nect +ĠD isc +ĠJ ac +iam ond +ĠThere fore +s es +ĠPrin cess +ĠUS B +ĠAn th +Ġsurve illance +Ġap olog +Ġ6 1 +ow a +Ġf ulf +j s +Ġl uck +ust ed +Ġ § +n i +Ġant icip +em an +Ġwin ner +Ġsil ver +ll a +ic ity +Ġunus ual +Ġcr ack +Ġt ies +e z +Ġpract ical +Ġprov ince +ĠPl ace +Ġprior ity +IC E +Ġdescrib es +Ġbr anch +F orm +ask a +miss ions +b i +Ġp orn +ĠTur k +Ġent hus +Ġf ighters +Ġ0 8 +ĠDet roit +Ġfound ation +av id +A re +Ġjud gment +cl ing +Ġsol ve +ĠDes ign +W here +hes is +ĠT ro +a fter +Ġne utral +ĠPalestin ian +ĠHolly wood +Ġadv is +ĠN on +y es +ol is +Ġrep utation +Ġsm ell +Ġb read +ĠB ul +ĠBe ach +Ġclaim ing +Ġgen etic +Ġtechn ologies +Ġupgr ade +row s +Ġdevelop er +ĠJ osh +ĠDis ney +erv ed +ip al +Ġun ex +Ġbare ly +t hen +ĠP ub +Ġill ness +et ary +ĠB al +Ġp atch +Ġbut t +Ġst upid +ĠD og +ĠD allas +f ront +ie ce +Ġprot ests +Ġch at +oen ix +Ġw ing +Ġpar liament +Ġ7 7 +ose xual +Ġre nder +pt ions +ĠCo ast +os a +ĠG reg +h op +ĠMan agement +Ġbit coin +Ġrec over +Ġincor por +or ne +ĠUs ing +Ġpre ced +Ġthreat ened +Ġspirit ual +ĠE vent +ĠF red +Ġadvert ising +Ġimprove ments +ĠC ustom +Ġer rors +Ġsens itive +ĠN avy +Ġcre am +L ook +Ġex clusive +Ġcomp rehens +Ġde leg +Ġcon ce +Ġrem em +Ġstruct ures +Ġst ored +N D +Ġ1 000 +U P +ĠB udd +A F +w oman +ĠAcad emy +ð Ł +se a +Ġtem porary +Ab out +es ters +Ġtick ets +Ġposs ess +in ch +o z +Ġl a +Ġcontract s +Ġun p +Ġc ig +ĠK at +ult ural +as m +Ġmount ain +ĠCapt ain +St ep +m aking +ĠSp ain +Ġequ ally +Ġl ands +at ers +Ġreject ed +er a +im m +ri x +C D +Ġtrans action +g ener +less ly +Ġ| | +Ġc os +ĠHen ry +Ġprov isions +Ġg ained +Ġdirect ory +Ġra ising +ĠS ep +ol en +ond er +Ġcon sole +in st +Ġb om +Ġunc ertain +1 50 +ock ing +Ġmeas ured +Ġpl ain +Ġse ats +Ġd ict +S L +af e +Ġest imate +iz on +at hered +Ġcontribut ed +Ġep isodes +omm od +G r +AN T +Ġ6 9 +G ener +Ġ2 50 +vious ly +rog en +Ġterror ism +Ġmove ments +ent le +oun ce +ĠS oul +Ġpre v +ĠT able +act s +ri ors +t ab +Ġsuff er +Ġn erv +Ġmain stream +ĠW olf +Ġfranch ise +b at +Ġdem ands +Ġag enda +Ġdo zen +Ġclin ical +iz ard +ĠO p +t d +Ġvis ited +ĠPer haps +Ġact or +Ġde lic +Ġcont ribute +Ġin ject +ĠE s +ac co +Ġlist ening +Ġcon gress +epend ent +Ġprem ium +Ġ7 6 +ĠIr ish +Ġass igned +ĠPh ys +Ġworld wide +Ġnarr ative +ot ype +m ont +b ase +ĠB owl +ĠAdminist ration +Ġrel ation +ĠE V +C P +Ġco vers +Ġ7 8 +Ġcert ific +Ġgr ass +Ġ0 4 +pir acy +ir a +Ġengine ering +ĠM ars +Ġun employ +ĠFore ign +st ract +Ġv en +Ġst eal +Ġrepl ied +Ġult imate +Ġtit les +d ated +Ġj oy +a us +Ġhy per +ak u +Ġoffic ially +ĠPro duct +Ġdifficult y +per or +Ġresult ed +rib ed +l ink +wh o +~~ ~~ +ĠSpe ed +ĠV iet +W ind +ĠBar ack +Ġrestrict ions +ĠSh are +Ġ199 5 +ition ally +Ġbeaut y +op t +Ġm aps +ĠC R +ĠN ation +ĠCru z +W ill +Ġelectric ity +Ġor g +Ġb urd +Ġviol ation +Ġus age +Ġper mit +ĠCh ron +ĠF ant +Ġn aturally +Ġ0 7 +Ġth rown +ĠAw oken +Ġal ien +ĠHer o +ĠK ent +ĠR ick +ri ke +Ġp ace +}, {" +G L +Ġpo ison +ĠT ower +Ġform al +al ysis +Ġgen uine +Ġk il +a ver +Ġproced ure +ĠPro p +intend o +ĠM ain +as ant +Ġtr ained +G ame +ĠL oad +ĠM A +Ġcru cial +Ġle ts +ĠF R +Ġch ampion +1 01 +ĠCon ference +Ġwrit ers +Ġconnect ions +Ġo kay +ir ms +ĠR and +Ġenc ounter +ĠB uff +Ġachie ved +Ġche cks +isc ons +Ġassist ant +Ġwhen ever +ĠA ccess +ĠU r +b in +Ġcl ock +is p +op her +Ġb orrow +Ġm ad +Ġperson ality +on ly +IS T +ab ama +Ġg ains +Ġcommon ly +Ġter r +Ġhyp ot +Ġre ly +Ġt iss +iscons in +Ġrid ic +f unction +ĠO regon +Ġun com +r ating +el and +ĠN C +Ġm oon +ann on +Ġvulner able +ut ive +³³ ³³ +ĠRad io +Ġw estern +se ct +ĠT ony +Ġocc urs +ĠO s +ĠH on +Ã Ń +Ġv essel +ĠScot land +Ġdiscrim ination +Ġsubsequ ent +st ring +Ġfant asy +ĠSh adow +Ġtest im +W E +it i +r as +Ġbo at +Ġmar ks +Ġord inary +Ġre n +Ġrepresent ative +Ġpet ition +Ġ7 3 +Ġad venture +Ġign ore +ĠPhil adelphia +ĠS av +V P +Ġfact ory +Ġt asks +Ġdep ression +z ed +................ ................ +ĠSt orm +Ġc ogn +Ġelig ible +Ġredu cing +v ia +Ġ0 5 +Ġstri king +Ġdoll ar +h o +O V +Ġinstr ument +Ġphilosoph y +ĠMo ore +ĠA venue +Ġrul ed +ĠFr ont +IN E +ĠM ah +Ġscen ario +ĠNAS A +Ġen orm +Ġdeb ut +Ġte a +T oday +Ġabs ence +S im +Ġh am +le ep +Ġt ables +ĠHe art +M I +K e +re qu +V D +m ap +Ġchair man +Ġp ump +Ġrapid ly +v i +Ġsubstant ial +E P +d es +ch ant +ili pp +ĠS anta +ri ers +anche ster +L oad +ĠC ase +Ġsa ving +Ġ7 4 +ĠA FP +er ning +oun ced +ĠMin nesota +ĠW as +Ġrec ru +Ġassess ment +ĠB ron +U E +Ġdynam ic +Ġf urn +ul ator +Ġprop ag +h igh +Ġacc ommod +Ġst ack +ĠS us +w rit +Ġre ven +ĠGod d +ĠZeal and +ab s +Ġbr ut +Ġper pet +h ot +Ġhard ly +ĠB urn +ãĤ ¹ +Ġst y +Ġtrans actions +Ġg ate +Ġsc reens +Ġsub mitted +Ġ1 01 +Ġlangu ages +ugh t +em en +Ġfall s +Ġc oc +Ĥ ¬ +Ġstri kes +p a +Ġdel iber +ĠI M +Ġrel ax +ann els +ĠSen ator +Ġext rem +Ġ} , +ĠDe b +Ġbe ll +Ġdis order +c ut +Ġi OS +Ġl ocked +Ġem issions +Ġshort ly +" ] +ĠJud ge +ĠS ometimes +Ġr ival +Ġd ust +Ġreach ing +F ile +¯¯ ¯¯ +ino is +ĠJ ason +Ġs atell +are t +Ġst ations +Ġag ric +ĠTechn ology +com es +ĠUn fortunately +ĠChild ren +Ġappl ies +ast ed +Ġan ger +ail ability +ĠDam age +Ġcomp are +ĠStand ard +Ġaim ed +ĠB a +angu age +Ġreg ulation +Ġj ury +Ġair port +Ġse ctions +ĠPr ince +em ed +Ġmedic ine +Ġh itting +Ġsp ark +ol ves +Ġad s +St ate +Ġfood s +Ġrepl acement +Ġch icken +Ġlow est +Ġmind s +Ġinvol ves +u i +Ġarr ang +Ġproced ures +ĠWh ich +ivers ary +Ġb ills +Ġimprove ment +Ġin ev +Ġexpect ations +Ġintellect ual +Ġsp aces +Ġmechan ism +2 50 +bre ak +ĠZ e +ĠT enn +ĠB alt +Ġbar rel +Ġstat ic +man n +Pol ice +Ġt ips +Ġhand ling +c us +od ed +il ton +ir y +Ġjournal ists +our se +Ġcom ic +Ġnom ine +IT Y +Ġvers us +Ġlo op +Ġsur f +ĠInd ust +ĠHun ter +Ġbelief s +is an +Ġset up +Ġbre w +im age +Ġcomput ers +f ol +} ," +ĠMed al +Ġtax p +Ġdisplay ed +Ġg rav +Ġf iscal +M on +ĠMos cow +ĠK ong +ĠCent re +Ġcamer as +ĠMr s +ĠH ay +Ġa ver +ĠK elly +p y +Ġrequire ment +Ġent itled +omb ie +Ġsh adow +ag ic +ĠA k +Ġel ite +Ġdiv ided +Ġhead ing +Ġcop ies +Ġloss es +Ġv it +k ed +ĠB ry +Ġan s +ĠSte am +Ġrep orter +he im +ĠIt em +Ġsuper ior +d on +ere nt +à ¶ +Ġtherap y +Ġpe ak +ĠMod el +Ġl ying +Ġg am +z er +r itten +Ġrespons es +Ġconsider ation +ĠB ible +Ġl oyal +Ġinst ant +Ġp m +ĠFore st +à ¼ +Ġext end +Ġconv icted +Ġfound er +Ġconv in +ĠO ak +che ck +Ġsch olars +p ed +Ġover se +T op +c ount +ĠAr k + · +Ġ0 6 +ĠL A +m d +ĠLat in +im ental +ĠC PU +Ġsubst ance +Ġminor ity +Ġmanufact uring +E r +ocol ate +Ġatt ended +ĠMan ager +r ations +Ġappreci ate +om y +GB T +id ency +B L +Ġguarant ee +pos ition +Ġo cean +clud e +Ġhead ed +Ġt ape +Ġlo ose +Ġlog ic +Ġpro ven +Ġsp ir +Ġad mit +is a +Ġinvestig ate +Ġ199 4 +sy lv +ĠL ost +c est +Ġ7 1 +Ġrequest ed +Ġwind ows +ĠPok é +ĠWith out +M et +Ġbehavi our +Ġread er +Ġh ung +ĠKe ep +Ġro les +Ġimplement ed +Ġbl ank +Ġserv es +ĠJ ay +Ġc ited +ĠF riend +prof it +ap on +Ġrep air +it em +arr ass +Ġcrit ics +ad i +ĠF ather +Ġsh out +Ġf ool +Ġ8 8 +Ġprodu cing +Ġl ib +Ġround s +Ġcirc le +Ġpre par +Ġsub mit +Ġn ic +mor row +ãĥ « +U nder +Ġv ital +ater n +Ġpass word +Ġpublic ation +Ġprom inent +Ġspeak s +Ġb ars +Ġde eper +ĠM ill +port ed +Ġw id +Ġbut ter +Ġsm oking +Ġindic ates +K ey +rop ri +ĠF ile +all ing +ast ing +ĠR us +Ġad j +Ġ7 9 +av al +Ġpres um +bur gh +on ic +Ġf ur +Ġpoll s +ik a +Ġsecond ary +Ġmon ster +ig s +ĠCur rent +E vent +Ġowners hip +end ar +Ġarri ve +ĠT ax +Ġn ull +ĠPri v +Ġth ro +Ġk iss +c at +Ġup set +ang le +it ches +ect or +olog ists +ĠGal axy +Ġcor ruption +Ġh int +ent er +ĠH ospital +Ġgreat ly +Ġbeg un +es y +Ġso il +ĠAnt on +Ġmain tenance +ãĥ © +Ġdo zens +Ġhuman ity +ĠAl abama +Ġr om +w orth +ap ing +sylv ania +l ah +Ġg athered +G A +Ġattack ing +f ound +ĠSqu are +Ġar bit +ict ions +ĠW isconsin +Ġd ance +ĠS aint +arch y +Ġbase ball +Ġcontribut ions +Ġliter ature +Ġex ha +per ty +t est +Ġb ab +Ġcontain er +let ter +Ġfall en +Ġwebs ites +Ġbott le +ĠS ac +Ġbre ast +ĠP L +Ġveter an +Ġinterview s +ĠA le +Ġb anned +eng ers +ĠRev olution +in th +Ġconc erning +IV E +Ġexp enses +ĠMatt hew +ĠColumb ia +d s +ist ance +Ġent ity +.. ." +Ġrel iable +Ġpar alle +ĠChrist ians +Ġopin ions +Ġin du +l ow +Ġcompet e +Ġth orough +Ġemploy ed +Ġestablish ment +ig en +ĠC ro +Ġlawy ers +ĠSt ation +T E +ĠL ind +ĠP ur +it ary +Ġeffic iency +âĢ IJ +ĠL y +Ġm ask +Ġdis aster +Ġag es +ER E +es is +ĠH old +Ġcas ual +b led +Ġen abled +ĠEn vironment +ĠInt elligence +i per +ĠM ap +ĠB E +Ġemer ged +is dom +Ġc abin +Ġregist ration +Ġfing ers +Ġro ster +Ġfram ework +ĠDo ctor +et ts +Ġtransport ation +Ġaware ness +H er +Ġattempt ing +O ff +ĠSt ore +ÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤ +ĠK now +Ġdef ence +Ġsc an +ĠT en +ĠCh air +ĠP H +ĠAtl anta +Ġfuck ing +Ġans wered +b n +ĠK ar +Ġcateg ories +Ġr ational +Ġc ust +Ġrob ot +Ġcorrect ly +Ġg if +Ġgraph ics +m ic +Ġground s +ĠO pp +i ate +Ġdist ributed +Ġsan ctions +Ġchalleng ing +ut o +Ġingred ients +Ġinv ited +Ġfound ed +ĠRe qu +d ed +Ġb owl +Ġbrother s +ĠH a +I O +Ġw ages +im ore +oc ial +Ġse ed +ative ly +Ġaddress es +ĠI owa +ab eth +Ġatt itude +is d +ch ild +Ġm ole +Ġdisco very +y ard +B r +Ġ8 2 +Ġsuppl ies +ell ing +Ġdist ingu +C R +Ġre cept +Ġ vert +Ġsw im +b ec +d oor +ĠY eah +Ġg al +Ġinter act +ĠE SP +ĠC S +amp s +Ġconvin ced +Ġobject ive +Ġdis h +ĠPhot os +l ad +Ġdownt own +o il +in ction +Ġto morrow +ĠC OM +Ġsurv ival +sh ot +Ġsett lement +C ons +ĠX box +int erest +ĠS M +arg o +en ess +Ġeth nic +b ered +M in +ĠT ok +Ġinc ent +ĠComm and +Ġmain tained +Ġbreak s +br idge +at ar +ag g +ĠF inally +un icip +ĠO nt +le ft +Ġrecogn ition +Ġ* / +ĠP ers +Ġwe lf +Ġaddress ed +ĠK ansas +Ġvir us +Ġwhere as +Ġp apers +ram s +ĠMin istry +Ġple asure +Ġacqu ired +Ġd uration +j pg +Ġcal m +ĠN HL +Ġburn ing +Ġfold er +ick ed +ĠP y +ĠIll inois +Cl ass +ĠGodd ess +Ġperform ing +Ġwelf are +j ar +In ter +Ġl in +Ġenh ance +Ġnot ion +f are +yp es +ĠAre a +Ġcann abis +ĠDie go +f s +ĠM anchester +com m +in ite +Ġcover ing +ĠS ound +Ġ19 60 +Ġ8 4 +e lect +z ing +Ġcitiz en +Ġph ones +Ġr aid +Ġign ored +ĠOb ject +Ġu pload +c ard +Ġmod ified +Ġroom s +ia h +r ange +he ast +ach us +Ġsuggest ing +âĢ ĭ +gr ade +E l +Ġclot hing +Ġr h +ĠH an +un ity +en cing +ĠAust in +sec ution +t ra +d em +ĠQ ual +Ġhe aven +Ġst ages +Ġw edd +pl us +ific ial +ĠIm m +ĠH o +iet ies +Ġphr ase +Ġbr ill +act ory +Ġprov iders +Ġsil ence +Ġa er +ĠA I +ĠAd venture +Ġplatform s +Ġdemonstr ated +Ġinter f +ing ton +Ġr aces +Ġgr ade +ult ane +ĠTh rough +f alse +Ġb ow +ĠA B +Ġfl avor +Ġhistor ic +g ov +Ġcol our +Ġview ed +ĠEm ail +el come +Ġinter vention +Ġd iversity +Ġperiod s +Ġre verse +ĠV ery +Ġqu ote +ĠLe ft +th rough +Ġsc rew +Ġland ing +Ġp ill +Ġw et +Ġprot esters +Ġrepe at +av ed +er k +Ġsal ary +ĠPenn sylvania +St ill +Ġmay or +Ġkit chen +Ġfeat uring +ĠM useum +ĠT ournament +ĠF al +Ġser vers +U C +Ġany body +im g +ĠTr ade +ixt ure +the less +Ġfin ance +Ġcl osing +ĠPat ri +i ac +ab el +Ġ> > +or ous +Ġf irms +sc reen +un a +Ġemb arrass +ul se +Ġlet ting +Ġth rew +ile y +Ġch annels +l an +ĠVeg as +Ġse ar +Ġfant astic +ar re +uzz le +ĠD er +Th ose +Ġsw ing +Ġshe et +ind ex +co ver +og an +Ġvari ables +ĠTe ch +Ġsp oken +ac hel +ĠD a +ĠMount ain +Ġload ed +Ġfoot age +vers ion +Ġun l +ĠPh oenix +Ġthrow ing +Ġf iring +Ġtrack ing +Ġw idth +Ġstrugg ling +ro oms +ot ion +Ġmonth ly +ĠSer ver +Ġegg s +op en +M C +Ġ199 3 +Ġh ired +Ġstay ed +ĠAll en +Ġst ro +Ġ9 8 +st ep +ĠTurk ish +Ġfab ric +ist ing +ĠD om +Ġd ates +Ġpr on +Ġbasket ball +Ġl ucky +ĠArab ia +Ġassum ed +est y +Ġaff airs +Ġgl ad +ĠInd eed +ĠF A +ĠW ord +Ġjo ining +if ice +p read +ir ts +ĠSe lect +Ġpop ulations +aw are +Ġn ose +Ġcompl aints +st art +Ġsc oring +Th anks +Ġmin ing +Ġvisit ors +S H +Ġdam aged +Ġcharacter istics +ĠP ent +D C +Ġ8 3 +ĠS ix +r ates +Ġfl ags +ĠB rew +d og +M ark +// // +Ġexec ution +Ġj oke +ph ones +Ġtestim ony +Ġob st +Q L +ĠC ut +Ġstud ied +ĠN intendo +ick et +ĠN BC +Ġl ad +ĠB ra +ĠM oh +Ġk ernel +Ġoverwhel ming +Ġag ed +Ġapplic able +ĠC ond +Ġroad s +ĠBl ock +m ade +od ge +Ġcomm ands +Ġoff ices +vel and +Ġt ut +Ġrece iver +ĠF ro +Ġsho pping +Ġi P +ĠSt re +ĠA BC +Ġentertain ment +ĠB ow +ort ed +M c +Ġread s +gr ad +ĠCol lect +Ġâ ĪĴ +ĠCap ital +eder ation +Ġemploy er +Ġinvolve ment +Ġanx iety +al ia +Ġro of +ĠAm ong +ĠDemocr at +Ġstat s +ĠV ill +Ġconst itutional +Ġrefer ring +itt y +Ġtack le +out ube +Ġback ed +ĠH ong +ĠBro ad +Ġe le +ĠO tt +Ġ199 2 +h our +achus etts +C al +Ġdefe ated +Ġ8 1 +es p +Ġseem ingly +w as +ĠJ enn +ĠK urd +Ġg ene +Ġdisc ount +R et +EC T +( ); +Ġclub s +Ġs id +ĠM arsh +Che ck +Ġp p +ĠE ag +ides pread +Ġbe ings +F T +Ġintrodu ction +ĠCh ange +AR D +Ġ1 10 +ad ows +ier ce +Ġme al +a uthor +ĠB ang +lah oma +Ġr anks +201 1 +?? ?? +m ax +Ġcoll apse +Ġop ens +Ġe cho +Ġs oph +Ġrac ist +Ġenorm ous +Ġw aves +Ġt ap +Ġcomprehens ive +. -- +ĠR oy +Ġfarm ers +Rel ated +a ired +ron es +ĠC rim +Ġproport ion +Ġdesign s +Ġnegoti ations +Ġvirt ually +ĠBat man +Ġwar n +Ġlegit imate +m ate +Ġcon vention +, , +net ic +ĠS D +Ġconsist ently +Ġcompens ation +Ġpunish ment +Ġy e +Ġt ie +ĠB ureau +ir lf +ĠB u +ĠA ren +ĠPh ilipp +Ġkn ife +Ġmem ories +ĠR oss +Ġang le +Ġ8 6 +ĠTh under +Ġre nd +ĠT our +Ġcount s +s ung +ĠIm p +Ġeduc ational +Ġaccess ible +C OM +Ġd rew +y er +G l +am ine +OR T +O B +I B +m aster +Ġtri als +og y +h ar +ĠTr ust +Ġprefer red +irlf riend +ĠN ev +Ġb in +Ġc ow +P age +Ġsign ature +ĠB L +7 00 +Ġret ired +Ġby tes +Ġneigh b +ĠLeg end +Ġdev ast +Ġsuspect ed +is ons +ĠPoké mon +sc ale +Ġcap abilities +Ġre vel +Ġche ese +d y +igr ant +Ġfail ing +b its +ĠHer oes +ĠG host +ĠS cient +Ġappoint ed +ur i +Ġinst itution +Ġexpand ed +g reg +Ġmonitor ing +Ġp odcast +Ġcoal ition +Ġ9 6 +J o +Ġst olen +ĠS ab +Ġstop s +Ġhol iday +Ġint r +C ar +Bl ack +ĠL GBT +Ġwar ming +ĠAnd erson +Ġ8 9 +Ġprodu cer +M ed +Ġaccur acy +ĠMar vel +iz abeth +ĠPat rick +m ony +Ġmin i +ac les +Ġover t +the y +Ġmembers hip +ĠV en +Ġex ch +Ġrem oval +ĠD ave +T Y +m ad +ĠF ind +Ġad equ +Ġe c +Ġte eth +Ġemot ion +Ġper m +Ġsole ly +d b +Ġextra ord +IG HT +c al +Ġgu idelines +Ġd ying +Ġsusp ended +ĠPrem ier +ĠAnth ony +el ve +Ġd ad +ĠE th +ĠFoot ball +Ġabandon ed +Ġ< < +Ġm arch +Ġhor ror +âĢ¦ " +Ġchild hood +Ġcampaign s +Ġl unch +ĠAl bert +bl ock +âĸĪ âĸĪ +ound ing +Ġb one +or gan +ad ers +ĠFl ash +ĠDri ve +Ġton ight +Ġw ars +ĠF L +Ġform ation +con st +New s +Ġcom pe +or ious +ĠSt aff +Ġdiscuss ions +ĠProt ection +ĠJ am +Ġcrit eria +Ġinstall ation +Ġaccompl ish +iz za +Ġpub lisher +Ġresc ue +ĠT ry +U LL +ĠS om +ĠH op +ore t +th s +ord on +Ġp ocket +ĠIn v +Down load +ĠCr ime +Ġb ene +ĠGu ide +ĠAs sembly +Ġparam eters +I E +ĠAlex ander +Ġconc ert +ĠSc he +Ġsh oes +Ġvis iting +Ġrec all +Ġb ub +Ġr ural +Ġconc rete +ĠR os +N ext +R uss +Ġlo ans +ĠSh ield +Ġtre m +hem at +k g +ĠHar ris +is ition +ĠM ove +ĠF C +Ġf ate +ĠCh o +Ġt ired +Ġprinc ipal +h ist +ien ces +ath y +Ġse vent +Ġm ood +Ġstrateg ic +Ġdise ases +Ġfor um +Ġtem por +Ġhead quarters +P ar +ig e +fl ix +Ġgu itar +Ġ9 4 +On ly +Ġrele ases +ro ph +================ ================ +Ġ6 00 +ĠContin ue +ig ate +ĠC rit +sy stem +Ġdis abled +Ġunex pected +ith ub +Ġuncle ar +ĠE st +Ġcontr ad +Ġstrateg ies +vent ures +Ġpass age +AM E +Ġimpro ving +Ġreve als +Ġdecre ase +ov a +Ġann oy +ĠSh ort +ĠL ibrary +Ġcy ber +n ell +ĠH ur +ĠC B +Ġphot ograp +U I +Ġs ed +G e +Ġ8 7 +Ġd iverse +Ġencour aged +Ġcons piracy +Ġbird s +Ġoper ator +Ġhand ful +Ġclass ified +? ) +Ġdram atic +Ġinvestig ators +it o +Ġw idespread +ĠR oom +-------------------------------- -------------------------------- +Ġcollect ive +Ġjournal ist +St ring +Ġtemper atures +il a +Ġgu id +Ġins pect +Ġmiss ile +ĠMay or +Ġman ual +Ġsim ultane +Ġrat ings +Ġsu ck +Ġ9 7 +Ġunivers al +Ġph arm +Ġdis rupt +ian o +A V +Ġf t +Ġstat ist +old s +ĠWalk er +ph p +Ġunder t +ĠL as +ish op +nt il +res hold +ĠWhe ther +M s +Ġden y +ĠCl oud +Ġprov ider +Ġsurv iv +ĠUp date +h as +Ġmist akes +ch arge +pl ed +r ity +Ġn ode +ĠMass achusetts +ool s +lic ation +Ġf ails +em ale +or i +back s +Ġsh irt +Ġ' ' +ĠN AT +Ġwat ers +els on +Ġe ase +Ġsc ar +Ġcont ents +m ind +Ġcont ribution +Ġsh r +Ġhand ed +Ġst ability +Ġtra ve +E m +Ġmir ror +12 3 +Ġwe igh +Ġf iction +ou ver +ist ant +r ition +ĠF ed +Ġphys ically +Ġst ake +ĠArt icle +ĠAr c +ĠLew is +ĠM ind +Ġdemonstr ate +Ġprof its +v ision +om ic +ol id +Ġbatt les +Ġdri ves +Ġeas tern +ĠS ony +!! ! +ar ation +v ard +ĠG L +port ation +Ġ9 2 +Ġlaw makers +Ġprotect ing +ĠE PA +Ġy eah +Ġsh ame +ol ph +e ven +x it +Ġatt ach +Ġrepresent ing +Ġob s +ĠUt ah +iff s +ĠFre edom +à ³ +A K +Ġinc idents +it age +Ġview ers +c d +Ġm ouse +Ġcl ar +Ġaccord ance +Ġb ot +c or +ĠSum mer +he ld +Ġinnoc ent +Ġiniti ative +ol s +________________ ________________ +Ġsp ots +p ace +Ġconvent ional +Ġcorpor ations +Ġblock ed +H D +at tered +Ġref ers +Ġbu ck +ĠDig ital +12 0 +Ġtop ics +T F +Ä ģ +br id +re ement +Ġunder lying +ĠM ember +Ġinvestig ating +Ġpregn ancy +Ġtouch down +ĠB and +ĠCall er +Ġinst ances +P P +w a +G ood +Ġ199 1 +ĠC old +Ġfear s +Ġrem arks +Ĩ Ĵ +at al +Ġm it +Ġexper iments +i pt +Col or +ind u +Up date +Ġ9 3 +A g +Ġ å +anc ouver +B oth +Ġjud ges +Ob ject +Ġst ere +umb n +Ġparticip ation +ĠSt ars +ĠJ ere +Ġweek ly +ĠB an +Ġconvers ations +ĠP itt +u z +ĠIndian a +ĠK ick +Ġinf ection +Ġhero es +Ġsett led +Ġstri p +Ġh al +Ġd ump +ĠS ci +Ġl es +Ġref erences +ĠU RL +ĠBr idge +Ġwant ing +For ce +Ġex clus +Me anwhile +m n +Ġg entle +m aker +sen al +ĠG ro +ou ri +ĠR ain +ĠAll iance +Ġl ift +el a +S D +ĠCle veland +Ġrank ed +Ġst adium +Ġdead ly +ä ¸ +Ġr iding +ar ia +ĠAr mor +Ġdocument ation +ĠGree ce +ree k +Ġl ens +ĠS a +Ġg ross +ĠE mer +ag ers +ĠD ub +ĠR h +ĠAM D +Ġarri val +Ġdes ert +Ġsupp lement +ĠRes p +Ġkn ee +Ġmarg in +f ont +og g +201 0 +ĠP ir +ĠP rom +iv als +Ġint ake +Ġdifferent ly +ug s +Ġb its +clud ed +Ġsearch ing +ĠD u +um ble +Ġfunction al +ĠBalt imore +ĠC ould +Ġdes ired +Ġcirc uit +ĠL yn +ĠG O +ĠF alse +re pre +' : +alt ies +Ġmin im +Ġdro ve +ĠSh ould +Ġh ip +Ġpro s +Ġut ility +ĠN ature +ĠM ode +P resident +o pp +r at +form ance +Ġconcent ration +Ġf ont +ĠB ud +Ġam id +Ġre vers +ĠM L +B ar +Ġinter action +Ġjur isd +Ġspell s +d ep +f il +Ġcivil ians +ut ter +ĠCo oper +ĠBel ow +Ġent rance +Ġcon vert +Ġcontrovers y +ow ered +Ġcontr ary +Ġar c +ĠExec utive +ĠOffic er +Ġpack ages +Ġprog ressive +w idth +Ġreserv ed +v ol +ĠSam sung +Ġprint ed +Ġcent ers +Ġintrodu ce +ĠKenn edy +Ġodd s +Ġsure ly +Ġindepend ence +Ġpass engers +repre ne +ĠBe h +Ġl oves +ĠESP N +Ġfac ilit +Ġident ical +Ġdo ct +Ġpartners hip +con f +ĠH ide +Ġconf used +ĠC ow +M en +Ġw rest +ĠIraq i +Ġh oles +ĠStud ies +Ġpregn ant +h ard +Ġsign als +I X +Ġpull ing +Ġgrad uate +Ġnomine e +D ate +Ġper mitted +Ġâ Ĥ¬ +ĠOk lahoma +St art +Ġauthor ized +Ġal arm +ĠC os +v an +Ġgener ations +c ular +Ġdr agon +ĠSoft ware +ĠEd ward +Ġcontro ller +S en +ge red +ĠV ik +Ġappro ached +Th ank +Ġcan ce +Ġform ula +ĠSm all +Ġweak ness +Ġr amp +it udes +j ud +Ġbrill iant +Ġacc us +s ource +Ġ8 00 +ĠE vil +S w +Ġhom eless +we ek +i ens +r ics +ĠTh ird +T O +Ġorgan ic +Ġpresent ation +ag h +ĠDown load +v ation +Ġas sembly +or able +hold ers +ĠBern ie +ĠHel p +Ġt ong +ĠF ight +Ġbe ach +B ook +ĠL ic +Ġr ush +ĠR ound +ou p +ĠMar x +Ġcalcul ated +ĠDe vil +ĠSar ah +Ġoccasion ally +Ġbul let +Av ailable +g ate +Ġ9 1 +Ġh osp +Ġprom ises +ĠH IV +ĠSt adium +ĠSt ock +ĠCorpor ation +g age +N G +ĠC redit +Ġs ne +ib l +Ġacc um +s uch +Ġterror ists +Ġconscious ness +ĠZ h +Ġdram a +ool a +pir ation +Ġlab our +ĠN in +Ġut ter +Ġdemocr atic +Ġass ass +il ation +Ġg est +Ġab road +Ġmet ab +Ġs orts +Ġfl av +U B +Ġm g +ĠNot hing +ĠO d +Ġmus ical +200 9 +Ġdro ps +oc ated +ater al +0000 00 +Ġg re +Ġequ ality +Ġburd en +Ġv ig +ĠLe ader +-------- ---- +Ġcere mony +Ġf ighter +Ġact ors +Ġ æ +am an +F i +Ġal ign +put er +Ġe lder +ĠN SA +Ġrepresent ation +ĠOnt ario +IT H +usal em +Ġharass ment +itz er +Ġsy mp +Ġbox es +ĠD R +Ġman ifest +at re +Ġ ^ +Ġd ies +le ton +Ġmiss ions +et he +Ġres olve +Ġfollow ers +Ġas c +Ġk m +l ord +am med +Ġsil ent +ĠAssoci ated +Ġtim ing +Ġprison ers +ĠK ings +ĠF ive +Ġtow er +Ġappro aches +Ġprecise ly +Ġb ureau +ĠM other +ĠI ss +Ġkey board +it ual +Ġfund ed +Ġstay ing +Ġpsych ological +Ġm ile +ĠLe on +ĠBar b +w ill +Ġw ider +ĠAtl antic +Ġt ill +ĠR ome +ro t +Ġaccomp an +Ġfl our +ac o +W orld +ĠExp ress +ĠY u +C or +Ġple ased +part y +Ġpoint ing +Ġinf lation +Ġro y +Ġ ), +ain er +Ġwedd ing +orm on +Ġrequ iring +Ġqual ified +Ġse gment +EN D +Ġs izes +e als +Ġcor rupt +ass ador +Ġcele b +Ġdream s +ĠM ess +Ġcheck ing +ĠV ersion +Ġprep aring +Ġact ively +ĠD iff +Ġl ux +ĠW inter +act eria +ĠN E +Ġdep uty +Ġtrans gender +Ġsum mary +Ġin her +er ies +ch ar +ĠY an +Ġkn ock +ĠP ath +Ġl ip +roll er +Ġimp ression +Ġcelebr ate +Ġsl ide +Ġgu ests +Ġcl ip +F S +Ġsav ings +Ġcapt ain +Ġleg acy +ĠDen ver +Ġw ounded +tab oola +AC T +Ġpurs ue +Ġo xy +Ġ q +Ġsem i +ĠN eed +ĠAff airs +Ġob sc +Ġcheck ed +Ġd ual +C ode +ĠM D +le m +ult y +Ġ © +ĠEl izabeth +Ġcent uries +ard ed +s rc +Ġev ident +enn is +at in +Ġunemploy ment +ĠMar io +Ġint im +Ch rist +Ġbi ological +Ġsold ier +ĠAdd ed +Ġm ath +ĠG il +Ġbi as +Ġd ating +ĠO cean +Ġm ice +M us +h ire +ĠT es +Ser ver +lim ited +S ize +Ġmet ers +Ġrock et +es see +Ġcertific ate +ĠIran ian +AS S +Ġgr id +D ec +Ġro lling +com mun +ĠSwed en +b ury +Ġtiss ue +Ġrac ism +ĠL ocal +Ġmyster y +Ġexam ine +Ġst em +Ġs its +Ġhop ed +ot ing +Ġdial ogue +Ġpers u +W atch +l ay +M AN +Ġch ronic +ĠPort land +mark et +ĠS EC +Ġparalle l +Ġsc andal +Ġcar ries +Ġphenomen on +h uman +ack er +ĠO x +Ġretire ment +tain ment +ov ie +ĠG ear +Ġd uties +Ġdo se +Ġsc roll +M B +in f +Ġsa uce +Ġland scape +red dit +ĠChampions hip +ĠRed dit +al id +Ġco in +Ġover s +Ġpost ing +ab out +Ġf el +and y +Ġb old +Ġfocus ing +e ffect +G R +Ġde emed +Ġrecommend ations +Ġste pped +Ġvot er +ĠDe ep +ĠInst agram +Ġmoder ate +ĠMary land +Ġrestrict ed +ĠM B +ĠCh all +Ġto b +Ġc ir +ĠO cc +ĠE ver +Ġcoll aps +IN FO += - +ĠP ict +ĠAcc ount +n c +Ġo ught +Ġex port +Ġdr unk +( ' +Ġw ise +ĠM ort +ne cess +Ġan cest +ĠInc re +Ġfrequ ent +m ir +Ġinterpret ation +Ġdepend ent +Ġco ins +ĠB ol +V ideo +ĠJust in +Ġfat al +Ġcook ing +Ġconf usion +ip her +Ġcust ody +ĠMor gan +om ach +ĠGovern or +Ġrestaur ants +el ing +Ġacknowled ged +Ġthe r +Ġgen es +ch ing +He y +Ġtact ics +ĠMex ican +Ġv end +Ġhe s +qu er +Ġnot ing +ĠCamer on +Ġtarget ing +ro ck +Ġcred its +Ġemot ions +Ġrepresent atives +new s +Ġlegisl ative +Ġrem oving +Ġtweet ed +ĠCar ter +ĠF ixed +Ġfor cing +Ġspeak er +Ġm ales +ĠViet nam +l ined +Ġconcept s +Ġvo ices +o ir +ĠT rib +W he +ĠJer usalem +ĠS ant +Ġc ul +Ġl ady +ĠHaw ai +Ġar ts +ĠIn n +ĠMach ine +ĠEm peror +Ġsl ot +g ly +ĠPro cess +II I +Ġathlet es +ĠTem ple +ĠRep resent +Ġpres c +Ġt ons +Ġgold en +Ġp unch +ĠG R +iver pool +Ġen act +Ġlob by +Ġm os +Ġpick ing +Ġlif etime +Ġcogn itive +E ach +z o +Ġd ub +Ġcons ists +ol n +Ġf estival +am ous +Ġint ellig +w ords +ĠSm art +Ġde le +Ġl apt +Ġmag ical +ĠS in +b us +ur ities +igh th +ĠRub y +ĠS ure +ol ving +Ġj un +O ST +Ġimp osed +Ġast ron +Ġcor rel +ĠN S +ĠK it +ĠF uture +b urn +Ġimm une +oc us +Ġcour ses +ĠSt ring +Ġle an +Ġg host +Ġout comes +Ġexp ense +Ġevery day +Ġaccept able +A h +Ġequ ipped +Ġor ange +F R +ĠD utch +Th ough +ĠR ank +Q U +ĠRober ts +wh at +re nd +Ġdisapp ear +Ġsp awn +ĠL am +o is +Ġdes erve +Ġmin imal +Ġnerv ous +ĠW ould +Ġro ok +ĠV ancouver +Ġres ign +sh ire +ĠW orks +ĠB uild +Ġafford able +ĠG ary +ĠAren a +Ġh anging +Ġimpl ications +ĠS ong +Ġmain taining +Ġgu ards +C ON +Ġder ived +Ġexecut ed +Ġthe ories +Ġqu oted +ĠAnd re +og a +sel ess +in fo +ĠBel g +Ġt ears +ĠSur v +Ġbirth day +ig ious +im mer +Ġspect rum +Ġarchitect ure +Ġrec ruit +arm a +T able +Ġmon sters +ĠG ov +Ġdest ination +Ġattract ive +Ġf oss +ĠMore over +Ġpres ents +TH E +Ġrep ly +pt on +Ġc um +Ġdel ight +Ġaffect s +Ġdon ations +ĠT oy +ĠH im +M ENT +Ġover come +it ched +ĠFant asy +ĠH at +ĠBe ast +b ott +Ġinvestig ations +R un +Ġhun ting +d i +f und +Ġs essions +est yle +Ġport ray +oid s +Y eah +Ġcommun icate +Ġcom edy +ĠY ang +Ġbel t +ĠMar ine +Ġpredict ed +Pl ay +Ġimportant ly +Ġremark able +Ġelim inate +D avid +Ġb ind +V ID +Ġadvoc ates +ĠG aza +im p +D B +ĠN a +ĠSim ilar +I ES +Ġchar ity +v as +m ath +Ġâ ĸ +ok er +nd um +Ġcap s +ĠH al +2 000 +e an +Ġfle et +Ġrec re +R ight +Ġsleep ing +ij ing +k ind +Ġdesign ated +à ¤ +Ġanim ation +ke e +ĠInt rodu +Ġ/ > +Ġdelay ed +Ġtrem end +Ġcur ious +U se +Ġle ct +d am +Ġinnov ation +ĠPoint s +Ġload ing +Ġdisp ute +ct ic +ird s +ĠB Y +Ġn urs +ĠVal ue +ION S +ĠH um +Ġtem plate +m ers +Ġappear ances +ĠEnter tainment +Ġtransl ation +Ġsa ke +Ġbene ath +Ġin hib +Ġe uro +abet es +Ġstud ying +ĠM as +Ġper ceived +Ġexam ined +Ġe ager +Ġco aches +Ġim per +ch i +Ġprodu ces +" ). +ĠEvery one +Ġm unicip +Ġg irlfriend +Ġh ire +ĠV ice +Ġsu itable +op y +Ġin equ +ĠD uke +f ish +f irst +ĠO bs +Ġinter ior +ĠBru ce +ĠR y +Ġanal ys +Ġconsider able +Ġfore cast +Ġf ert +ors hip +ĠD rug +ĠA LL +: " +th ur +ĠM ail +Ġball ot +Ġinst antly +ĠCh annel +Ġp icks +Ġ198 9 +Ġt ent +ol i +Ġcivil ian +b ling +ell o +b u +Ġin ch +Ġlog o +Ġcooper ation +Ġwal ks +Ġinvest ments +Ġimp rison +ĠF estival +ĠK y +Ġleg ally +Ġg ri +ch arg +S l +Ġthreat ening +du ction +fl ow +Ġdismiss ed +ibr aries +c ap +e le +ĠMc G +ĠHar vard +ĠConserv ative +ĠC BS +p ng +Ġro ots +ĠH aving +umb led +ĠF un +\ / +ĠS earch +ple x +Ġdiscuss ing +Ġcontin u +ĠT ai +ĠW ik +F ree +f it +Ġref use +Ġmanag ing +Ġsy nd +ip edia +w alk +Ġprofession als +Ġguid ance +Ġunivers ities +Ġas semb +unt u +F inally +AS E +ĠAut o +ĠH ad +Ġann iversary +L D +ĠD ur +ĠUlt imate +ih ad +pro duct +Ġtrans it +Ġrest ore +Ġexpl aining +Ġass et +Ġtransfer red +Ġbur st +ap olis +ĠMag azine +ĠC ra +ĠB R +gg ed +ĠH E +M ich +b et +ĠL ady +yl um +erv es +Ġme ets +wh ite +L og +Ġcorrespond ing +Ġins isted +G G +Ġsurround ed +Ġt ens +Ġl ane +Ġco inc +h ome +Ġexist ed +ect ed +ĠDou ble +lam m +Ġske pt +ex p +Ġper ception +ie v +ĠBe ing +o ft +Ġadop t +. : +] ; +Wind ows +Ġsatell ite +AS H +Ġinf ant +d escription +ĠMe anwhile +c m +oc a +ĠT reat +act or +Ġtob acco +ĠN orm +em ption +Ġfl esh +Ġj e +o op +ĠHe aven +Ġbe ating +an im +Ġgather ing +Ġcult iv +G O +ab e +ĠJon athan +ĠSaf ety +Ġbad ly +pro t +Ġcho osing +Ġcontact ed +Ġqu it +Ġdist ur +Ġst ir +Ġto ken +D et +ĠP a +Ġfunction ality +00 3 +s ome +Ġlimit ations +Ġmet h +b uild +con fig +N T +re ll +ble m +ĠM om +Ġveter ans +ĠH u +Ġtrend s +are r +ĠG iven +ĠCa ption +m ay +AS T +Ġwond ering +ĠCl ark +n ormal +Ġsepar ated +Ġdes p +st ic +b rew +Ġrel ating +ĠN ik +ĠF arm +Ġenthus i +g ood +d eb +Ġactiv ist +Ġm art +Ġexplos ion +ĠEconom ic +L ink +Ġins ight +Ġconven ient +Ġcounter part +su pport +ĠV irt +ag en +ĠTenn essee +ĠSim on +ĠA ward +OC K +ĠF igure +Ġoverse as +Ġpr ide +ĠC as +n ote +m g +C urrent +Ġdispl ays +cont ent +Ġtravel ing +Ġhosp itals +ĠFin ancial +ĠP ast +Ġdefend ant +Ġstream ing +m ble +ĠBer lin +uk i +Ġdist ribut +Ġant ib +Ġch ocolate +ĠCast le +Ġinter rupt +ĠR ow +Ġconvers ion +Ġbug s +ĠR ather +li est +L Y +ĠJe an +com mon +ak h +Ġ1 30 +ot ton +ĠDe an +Ġam endment +Ġgame play +ĠWar ren +od a +Ġhigh lights +Ġir re +ĠNAT O +Ġball s +Ġdemand ing +U RE +ĠL uke +F igure +st op +on ia +z one +iz ers +ĠW R +Ġaward ed +Ġregul atory +ĠH art +ĠS N +pl ing +Ġs our +ĠP ixel +us ive +Ġf et +ĠS ent +Ġautom atic +Ġf er +vern ment +ĠKh an +T ON +f ather +Ġextraord inary +th rop +ĠP ython +ĠG PU +Ġsex ually +Ġdesk top +it ivity +ĠAnton io +Ġo rient +Ġe ars +ob by +ous es +vertis ements +Ġmanufacture rs +ic ient +min ute +Ġconv iction +Ġg arden +p ublic +Ġsatisf ied +f old +O K +Ġin hab +ĠTh ink +Ġprogram me +Ġst omach +Ġcoord in +Ġh oly +Ġth reshold +Ġr het +Ġser ial +Ġemploy ers +ĠEvery thing +ra h +Ġb other +Ġbr ands +Val ue +ĠT ed +ĠPlan et +Ġp ink +ĠFurther more +s a +P E +re ck +ĠUS D +ot te +Ġ& & +Ġland ed +g ets +Ġprodu cers +Ġhealth care +Ġdomin ant +Ġdest ro +Ġam ended +ch ron +Ġf its +ĠSy d +ĠAuthor ity +AT CH +Ġfight s +ĠL LC +Ġ-- - +ĠCor p +Ġtox ic +spe cific +ĠC orn +ĠChe l +Ġtele phone +ĠP ant +Ġmyster ious +aun ch +od ox +med ia +Ġwitness es +ag u +Ġquestion ed +ĠBre xit +ĠRem ember +ene z +Ġend orse +iat ric +ĠId ent +Ġridic ulous +1 10 +Ġpr ayer +Ġscient ist +Ġ19 50 +ĠA qu +Ġunder ground +ĠU FC +m are +ĠL ater +w ich +Ġsubsc rib +Ġhost s +Ġer r +Ġgr ants +ant om +Ġsum mon +ear ly +ĠC lear +ĠPr im +Ġsusp ension +Ġguarant eed +app er +Ġr ice +ĠSe an +ĠSh in +Ġrefere ndum +Ġfl ed +r ust +Ġ3 60 +ter y +Ġsh ocked +B R +ĠO il +ĠAll ah +Ġpart ly +Ġign or +Ġtrans mission +Ġhom osexual +ivers al +Ġhop efully +ãĤ ¤ +Ġless on +L eg +Ġ .. +Y et +t able +app ropri +re tt +Ġbo ards +Ġincor rect +Ġb acteria +ar u +am ac +Ġsn ap +.' " +Ġpar ad +t em +he art +Ġav ailability +Ġw isdom +Ġ( + +Ġpri est +ĠÂł ĠÂł +O pen +Ġsp an +Ġparam eter +Ġconv ince +Ġ( %) +r ac +Ġf o +Ġsafe ly +Ġconver ted +ĠOlymp ic +Ġres erve +Ġhe aling +ĠM ine +M ax +Ġin herent +ĠGra ham +Ġinteg rated +D em +Ġpip eline +Ġapp lying +Ġem bed +ĠCharl ie +Ġc ave +200 8 +Ġcons ensus +Ġre wards +P al +ĠHT ML +Ġpopular ity +look ing +ĠSw ord +ĠAr ts +' ) +Ġelect ron +clus ions +Ġinteg rity +Ġexclus ively +Ġgr ace +Ġtort ure +Ġburn ed +tw o +Ġ18 0 +P rodu +Ġent reprene +raph ics +Ġg ym +ric ane +ĠT am +Ġadministr ative +Ġmanufacture r +Ġ vel +ĠN i +Ġisol ated +ĠMedic ine +Ġback up +Ġpromot ing +Ġcommand er +Ġfle e +ĠRus sell +Ġforg otten +ĠMiss ouri +Ġres idence +m ons +Ġrese mb +Ġw and +Ġmeaning ful +P T +Ġb ol +Ġhe lic +Ġwealth y +Ġr ifle +str ong +row ing +pl an +as ury +âĢ¦ . +Ġexpand ing +ĠHam ilton +Ġrece ives +S I +eat ures +ĠAn im +RE E +P ut +Ġbrief ly +ri ve +Ġstim ul +Ġ`` ( +Ġ __ +Ġch ip +Ġha z +Ġpri ze +ĠTh ings +AC E +ul in +d ict +ok u +Ġassoci ate +ock ets +y outube +St ory +ateg ory +Ġm ild +ail ing +ĠY e +O rig +ĠK a +or ig +Ġpropag anda +Ġan onymous +Ġstrugg led +Ġout rage +AT ED +ĠBe ijing +r ary +Ġle ather +Ġworld s +Ġbroad er +12 5 +id al +ĠBet ter +Ġt ear +E xt +Ġpropos als +Ġit er +ĠSqu ad +Ġvol unt +m i +D id +ĠP u +p in +Ġspeak ers +Ġb orders +Ġfig ured += ' +Ġsimultane ously +aed a +Ġcharg ing +Ġur ged +Ġcon j +25 6 +ĠG ordon +mer ce +Ġdocument ary +Sh are +it ol +ON E +ĠG arden +h att +ĠThom pson +ane ous +ap ore +Ġt anks +Ġless ons +tr ack +Ġout standing +Ġvolunte ers +Ġsp ray +Ġmanag ers +l arge +Ġcamp s +Ġart ificial +ĠR u +Ġb ags +th al +Ġcompat ible +ĠBl ade +Ġf ed +Ġarg ues +F I +Ġunf air +Ġcor n +Ġoff set +Ġdirect ions +Ġdisappoint ed +ĠCon vention +Ġview ing +M E +oc ity +Ġtown s +Ġlay ers +Ġro lled +Ġjump ed +Ġatt ribute +Ġun necess +inc oln +Ġsupp ose +ĠNet her +ch a +Ġbur ied +Ġsix th +B en +ress ing +OU R +Ġw ound +Ġcy cl +Ġmechan isms +Ġcongress ional +ĠE lement +Ġagre ements +Ġdec or +Ġclos est +ĠM it +Go ogle +} } +Ġm ixture +Ġflu id +S ign +ĠSch olar +Ġp ist +ask et +ab ling +Ġrac ing +he ro +ri el +ass y +Ġche aper +b en +Ġvert ical +amac are +ĠRead ing +g ments +Ġhelic op +Ġsacr ifice +ay a +p aren +V A +ĠL es +ĠStud io +Ġviol ations +ĠAn na +ac er +é ¾ +ĠR at +ĠBe ck +ĠD ick +ĠA CT +Ġcomp osition +Ġtext ure +ĠO wn +Ġsmart phone +ĠN A +Ġfor b +im port +Ġdef ending +il st +re r +Ġo h +ĠJere my +Ġbank ing +cept ions +Ġrespect ive +/ . +Ġdr inks +ĠW i +Ġb ands +ĠL iverpool +Ġg rip +ĠB uy +Ġopen ly +Ġreview ed +per t +Ġver ify +ĠCo le +ĠW ales +M O +Ġun pre +Ġshel ter +ĠIm perial +Ġgu i +ĠD ak +Ġsuggest ions +Ġexplicit ly +Ġsl ave +Ġblock chain +Ġcompet ing +Ġprom ising +S ON +Ġsoc cer +Ġconst itution +4 29 +Ġdist ract +ĠU ser +es ides +ĠMet hod +ĠTok yo +Ġaccompan ied +Cl ient +s ur +al og +Ġident ification +Ġinv asion +as ma +Ġindust ries +pp ers +Ġsub tle +ĠUn it +n atural +Ġsurv ived +Ġfl aw +ĺ ħ +ĠH oll +Ġdef icit +Ġtut orial +ĠCh ance +Ġarg uing +Ġcontem porary +Ġinteg ration +for ward +Ġt um +it is +Ġh iding +ĠD omin +ĠT an +ĠB uilding +ĠV in +Ġspokes person +ĠNot es +Ġemer ging +Ġprepar ation +Ġpro st +Ġsuspect s +Ġaut onom +D escription +Ġdeal t +ĠP ear +Ġstead y +Ġdecre ased +Ġso vere +ĠCl in +Ġgrad ually +ors es +ĠW AR +S erv +ãĤ ¢ +h r +Ġd irty +ĠB arn +ĠB C +Ġd il +Ġcal endar +Ġcompl iance +Ġch amber +b b +Ġpass enger +ate ful +ĠT itle +ĠSyd ney +ĠG ot +Ġdark ness +Ġdef ect +Ġpack ed +ass ion +Ġgod s +Ġh arsh +IC K +le ans +Ġalgorith m +Ġoxy gen +Ġvis its +Ġbl ade +Ġkil omet +ĠKent ucky +Ġkill er +P ack +enn y +Ġdiv ine +Ġnom ination +be ing +Ġeng ines +Ġc ats +Ġbuff er +ĠPh ill +Ġtra ff +AG E +Ġtong ue +Ġrad iation +ere r +m em +ĠExpl icit +é¾ į +Ġcou ples +Ġphys ics +ĠMc K +Ġpolit ically +aw ks +ĠBl oom +Ġwor ship +e ger +ut er +ĠF O +Ġmat hemat +Ġsent enced +Ġdis k +ĠM arg +Ġ/ * +P I +Ġoption al +Ġbab ies +Ġse eds +ĠScott ish +Ġth y +] ] +ĠHit ler +P H +ng th +Ġrec overed +ing e +Ġpow der +Ġl ips +Ġdesign er +Ġdis orders +Ġcour age +Ġch aos +" },{" +Ġcar rier +b ably +H igh +ĠR T +es ity +l en +Ġrout es +u ating +F il +N OT +w all +s burgh +Ġeng aging +ĠJava Script +ore r +li hood +Ġun ions +ĠF ederation +ĠTes la +Ġcomple tion +ĠT a +Ġprivile ge +ĠOr ange +Ġne ur +paren cy +Ġb ones +Ġtit led +Ġprosecut ors +ĠM E +Ġengine er +ĠUn iverse +ĠH ig +n ie +o ard +Ġheart s +ĠG re +uss ion +Ġmin istry +Ġpen et +ĠN ut +ĠO w +ĠX P +in stein +Ġbul k +S ystem +ic ism +ĠMarket able +Ġpre val +Ġpost er +Ġatt ending +ur able +Ġlicens ed +ĠG h +et ry +ĠTrad able +Ġbl ast +à ¤ +ĠTit an +ell ed +d ie +H ave +ĠFl ame +Ġprof ound +Ġparticip ating +Ġan ime +ĠE ss +Ġspec ify +Ġregard ed +ĠSpe ll +Ġs ons +own ed +Ġm erc +Ġexper imental +land o +h s +ĠDun geon +in os +Ġcomp ly +ĠSystem s +ar th +Ġse ized +l ocal +ĠGirl s +ud o +on ed +ĠF le +Ġconstruct ed +Ġhost ed +Ġsc ared +act ic +ĠIs lands +ĠM ORE +Ġbl ess +Ġblock ing +Ġch ips +Ġev ac +P s +Ġcorpor ation +Ġo x +Ġlight ing +Ġneighb ors +ĠU b +ar o +Ġbe ef +ĠU ber +F acebook +ar med +it ate +ĠR ating +ĠQu ick +Ġoccup ied +Ġaim s +ĠAdd itionally +ĠInt erest +Ġdram atically +Ġhe al +Ġpain ting +Ġengine ers +M M +ĠM ust +Ġquant ity +P aul +Ġearn ings +ĠPost s +st ra +ãĥ¼ ãĥ +Ġst ance +Ġdro pping +sc ript +Ġd ressed +M ake +Ġjust ify +ĠL td +Ġprompt ed +Ġscr ut +Ġspeed s +ĠGi ants +om er +ĠEd itor +Ġdescrib ing +ĠL ie +ment ed +Ġnow here +oc aly +Ġinst ruction +fort able +Ġent ities +Ġc m +ĠN atural +Ġinqu iry +Ġpress ed +iz ont +for ced +Ġra ises +ĠNet flix +ĠS ide +Ġout er +Ġamong st +im s +ows ki +Ġclim b +ne ver +Ġcomb ine +d ing +Ġcomp r +Ġsignific ance +Ġremem bered +ĠNev ada +ĠT el +ĠSc ar +ĠWar riors +ĠJ ane +Ġcou p +b as +Ġtermin al +, - +O H +Ġt ension +Ġw ings +ĠMy ster +�� �� +ĠUn like +val id +viron ments +ĠAl i +Ġn aked +book s +ĠM un +ĠG ulf +Ġd ensity +Ġdim in +Ġdesper ate +Ġpres idency +Ġ198 6 +h y +IN D +Ġun lock +im ens +Ġhand led +ĠE b +Ġdisapp eared +Ġgen re +Ġ198 8 +Ġdetermin ation +St ream +ik o +ap ters +Ġacknow ledge +J an +Ġcapital ism +P at +Ġ20 20 +Ġpain ful +Ġcur ve +Ġbom bs +st orm +ĠMet al +en cer +ĠF ig +ĠA aron +anc hes +Ġins piration +Ġexha ust +t ains +ash i +Ġdesc ript +Ġr itual +ĠChel sea +Ġpromot ion +ĠH ung +ĠW ard +iv a +ĠE T +Ġto ss +all ow +ĠFranc is +D ep +Ġhapp iness +ĠGl ass +Ġbet a +Ġstreng then +N E +o a +Ġbutt ons +ĠMur ray +Ġkick ed +Qu est +ĠT alk +ĠS everal +ĠZ ero +Ġdr one +ul k +Ġc am +ĠM obile +Ġprevent ing +Ġret ro +ĠA x +Ġcru el +Ġflo at +. ), +Ġfil ing +ĠGr ant +ĠB or +Ġr ib +Ġchampions hip +ĠM erc +Ġsty les +Ġc ake +Ġbuild s +ĠS elf +io x +Ġep ic +oy d +B el +ĠSt ew +. ( +ah u +ĠBe yond +Ġout s +Ġsol o +ĠT ree +Ġpres erve +Ġt ub +AR E +ro c +ĠIm pro +ĠW right +Ġbu nd +Ġtr aged +Ġoccas ional +b ian +Sec ond +r ons +Ġinter actions +form ed +s ing +Ġown s +Ġh ockey +Gener al +Ġlog ical +Ġexp end +Ġesc al +ĠGr iff +ĠC rown +ĠRes erve +Ġsto pping +Ġexc use +sec ond +Ġoper ated +Ġre aches +ĠMal ays +Ġpoll ution +ĠBrook lyn +Ġde lete +Ġhas h +Bl ock +ah a +âĢ ³ +Ġsh orter +p iece +> >> +ĠM ormon +t or +Ġpartic les +ĠB art +ry ption +Ġad min +Ġsqu ee +VID IA +Ġcreat or +iam eter +ic ular +N BC +Ġgrab bed +Ġn odd +Ġr ated +Ġrot ation +Ġgr asp +Ġexcess ive +ĠE C +ĠWh it +Ġinvent ory +ault s +ĠF B +Ġe cosystem +Ġbill ions +Ġvent ure +n amed +Ġdef ender +out e +Inst ead +ir able +W ar +Ġassum ption +Ġb ite +Ġearth qu +t ail +sp ace +Ġgif ts +boy s +Ġinev itable +Ġstruct ural +Ġbenef icial +Ġcompe lling +h ole +erv ation +Ġco at +o j +inc arn +ĠY ears +Ġdetermin ing +Ġrhet oric +Ġbound aries +Ġwh ites +A nt +add y +) - +ra ham +eter min +Ġhar vest +ĠCon c +Ġlapt op +ĠM atch +Ġenjoy ing +cc a +oll ar +Ġtri ps +Ġadd iction +ĠS ak +Ġpow ered +Ġc ous +ĠRuss ians +ie re +Ġret rie +qu ality +Ġdiff er +Ġking dom +ĠL aur +ĠCap itol +Ġcon clusions +ĠAl tern +ĠN av +Ġtrans parent +B ER +G roup +ĠCom plete +Ġinf er +Ġint rig +Ġins ane +R O +oph ob +is en +qu al +Mich ael +Ġm useum +ĠP ope +Ġres et +r ative +f ive +Ġagg reg +itte es +osit ory +Ġcar b +ĠRec ord +Ġdec ides +ĠF ix +Ġexcept ions +ĠCommission er +un s +ĠEnvironment al +Ġlegend ary +ist ence +Ġtun nel +k m +Ġins ult +Ġt roll +Ġsh ake +Ġdet ention +qu es +ĠCh rome +ĠF iles +Ġsub t +Ġprospect s +Ġpro l +re nder +pro of +Ġperform ances +St r +Ġh ref +ern ame +Ġachieve ment +Ġf ut +F ull +ĠLe ban +go ogle +ãĥ Ī +amp a +May be +Ġproject ed +ĠE mb +Ġcol leg +Ġa wards +Ġâ Ķ +G old +ĠBl ake +ĠR aj +if ting +Ġp ending +Ġinst inct +Ġdevelop ments +Con nect +ĠM and +ĠW ITH +ĠPhilipp ines +prof ile +Ġalt ogether +ĠB und +ĠT D +oo oo +amp ed +ip h +Ġste am +Ġold est +Ġdet ection +ul pt +Ġ ç +ĠWay ne +200 6 +f a +Ġcir cles +ĠF u +Ġdon ors +appropri ate +ĠDak ota +j amin +Ġmotiv ated +Ġpurch ases +ĠLouis iana +ĠS pl +Ġgl obe +Ġ10 5 +z ip +c all +Ġdepart ments +Ġsustain able +10 5 +ĠO P +if iers +Ġprevent ed +Ġinc omp +ĠComm ander +Ġdom inated +Ġ » +Ġinvest ed +Ġcomplex ity +Ġin cl +Ġens uring +Ġreal m +yn c +ĠInd ependent +r ained +ĠJ en +ĠFl ight +Ġat he +Ġspec ulation +ĠT E +oc ate +t ic +Ġpl aint +her ry +Ġto y +Ġ1 11 +Ġpl ates +st atus +ĠIs a +Ġdev oted +C op +ĠE S +25 5 +ur rency +M ain +Ġsl aves +Ġpe pper +Ġqu otes +Ġce iling +ĠF ish +Ġtrans formation +Ġfra ction +Ġadvant ages +Ġto ile +Ġstun ning +Ġmo ist +bre aking +s i +ĠL ocation +ĠMed ium +Ġtext s +Ġu gly +Ġb io +. âĢĶ +ĠB ased +Ġtr ains +ĠW ing +ĠAn cient +ĠRec ords +ĠH ope +Spe cial +ades h +ob i +[ / +Ġtempor arily +V er +h u +os er +Ġover night +Ġm amm +ĠTre asury +ĠV enezuel +ĠMeg a +Ġt ar +Ġexpect s +bl ack +or ph +\\ \\ +Ġaccept ance +Ġrad ar +s is +Ġjun ior +Ġfram es +Ġobserv ation +ac ies +P ower +ĠAdv anced +M ag +olog ically +ĠMe chan +Ġsent ences +Ġanaly sts +augh ters +force ment +Ġv ague +Ġcl ause +Ġdirect ors +Ġeval uate +Ġcabin et +M att +ĠClass ic +A ng +Ġcl er +ĠB uck +Ġresear cher +Ġ16 0 +Ġpoor ly +Ġexperien cing +ĠP ed +ĠMan hattan +Ġfre ed +Ġthem es +ad vant +Ġn in +Ġpra ise +10 4 +ĠLib ya +b est +Ġtrust ed +Ġce ase +Ġd ign +D irect +Ġbomb ing +Ġm igration +ĠSci ences +Ġmunicip al +ĠA verage +Ġgl ory +Ġreve aling +Ġare na +Ġuncertain ty +Ġbattle field +ia o +G od +Ġc inem +ra pe +el le +ap ons +Ġlist ing +Ġwa ited +Ġsp otted +ke ley +ĠAud io +e or +ard ing +idd ing +ig ma +ĠN eg +Ġl one +Ġ ---- +ex e +d eg +Ġtrans f +Ġwas h +Ġsl avery +Ġexpl oring +ĠW W +ats on +Ġen cl +l ies +ĠC reek +Ġwood en +Man ager +ĠBr and +um my +ĠAr thur +Ġbureau cr +Ġbl end +ar ians +F urther +Ġsupposed ly +Ġwind s +Ġ19 79 +Ġgrav ity +Ġanalys es +ĠTra vel +ĠV eter +Ġd umb +Ġaltern ate +g al +Ġconsum ed +Ġeffect iveness +.' ' +Ġpath s +ond a +L A +ĠStr ong +Ġen ables +Ġesc aped +Ġ" " +Ġ1 12 +Ġ198 3 +Ġsm iled +Ġtend ency +F ire +Ġp ars +ĠR oc +Ġl ake +Ġf itness +ĠA th +ĠH orn +Ġh ier +Ġimp ose +m other +Ġp ension +ic ut +bor ne +ic iary +. _ +ĠS U +Ġpol ar +is y +eng u +itial ized +AT A +w rite +Ġexerc ises +ĠD iamond +ot ypes +Ġharm ful +on z +Ġprint ing +st ory +Ġexpert ise +ĠG er +Ġtraged y +ĠF ly +Ġd ivid +amp ire +st ock +M em +Ġre ign +Ġun ve +Ġam end +ĠProp het +Ġmut ual +ĠF ac +Ġrepl acing +H ar +ĠCirc uit +Ġthro at +ĠSh ot +Ġbatter ies +Ġto ll +Ġaddress ing +ĠMedic aid +Ġp upp +ĠN ar +ol k +Ġequ ity +M R +ĠHis pan +ĠL arge +m id +D ev +Ġexp ed +Ġdem o +ĠMarsh all +erg us +Ġf iber +Ġdiv orce +ĠCre ate +Ġsl ower +ĠPark er +ĠStud ent +ĠTr aining +Ret urn +ĠT ru +Ġc ub +ĠRe ached +Ġpan ic +Ġqu arters +Ġre ct +Ġtreat ing +Ġr ats +ĠChristian ity +ol er +Ġsac red +Ġdecl are +ul ative +et ing +Ġdeliver ing +est one +Ġt el +ĠL arry +Ġmet a +ac cept +art z +ĠRog er +hand ed +Ġhead er +Ġtra pped +ĠCent ury +Ġkn ocked +ĠOx ford +Ġsurviv ors +b ot +Ġdemon stration +Ġd irt +Ġass ists +OM E +ĠD raft +ortun ate +fol io +pe red +ust ers +g t +ĠL ock +Ġjud icial +ver ted +Ġsec ured +out ing +ĠBook s +Ġhost ing +Ġlif ted +l ength +Ġj er +Ġwhe els +ĠR ange +umbn ails +Ġdiagn osis +te ch +ĠStew art +ĠP ract +Ġnation wide +Ġde ar +Ġoblig ations +Ġgrow s +Ġmand atory +Ġsusp icious +! ' +A pr +G reat +Ġmort gage +Ġprosecut or +Ġeditor ial +ĠK r +Ġprocess ed +ung le +Ġflex ibility +Ear lier +ĠC art +ĠS ug +Ġfoc uses +Ġstart up +Ġbre ach +ĠT ob +cy cle +ãĢ Į +ro se +Ġb izarre +ãĢ į +Ġveget ables +$ $ +Ġret reat +osh i +ĠSh op +ĠG round +ĠSt op +ĠHawai i +ĠA y +Per haps +ĠBe aut +uff er +enn a +Ġproduct ivity +F ixed +cont rol +Ġabs ent +ĠCamp aign +G reen +Ġident ifying +Ġreg ret +Ġpromot ed +ĠSe ven +Ġer u +ne ath +aug hed +ĠP in +ĠL iving +C ost +om atic +me ga +ĠN ig +oc y +Ġin box +Ġem pire +Ġhor izont +Ġbr anches +Ġmet aph +Act ive +ed i +ĠFil m +ĠS omething +Ġmod s +inc ial +ĠOrig inal +G en +Ġspir its +Ġear ning +H ist +Ġr iders +Ġsacr ific +M T +ĠV A +ĠS alt +Ġoccup ation +ĠM i +Ġdis g +lic t +Ġn it +Ġn odes +e em +ĠP ier +Ġhat red +ps y +ãĥ ī +Ġthe ater +Ġsophistic ated +Ġdef ended +Ġbes ides +Ġthorough ly +ĠMedic are +Ġbl amed +arent ly +Ġcry ing +F OR +pri v +Ġsing ing +ĠI l +Ġc ute +o ided +olit ical +ĠNe uro +å ¤ +Ġdon ation +ĠEag les +ĠG ive +T om +Ġsubstant ially +ĠLic ense +ĠJ a +Ġg rey +ĠAn imal +ĠE R +ĠU nd +Ġke en +Ġconclud e +ĠMississ ippi +Eng ine +ĠStud ios +P ress +o vers +ll ers +Ġ3 50 +ĠR angers +Ġr ou +ert o +E p +iss a +iv an +Ġse al +ĠReg ist +dis play +Ġwe aken +u um +ĠComm ons +ĠS ay +Ġcult ures +Ġl aughed +Ġsl ip +Ġtreat ments +iz able +m art +ĠR ice +Ġbe ast +Ġob esity +ĠLa ure +ig a +Wh ich +hold er +Ġelder ly +Ġp ays +Ġcompl ained +Ġc rop +Ġpro c +Ġexplos ive +ĠF an +ĠAr senal +A uthor +ef ul +Ġme als +Ġ( - +id ays +Ġimag ination +Ġann ually +Ġm s +as ures +H ead +ik h +m atic +Ġboy friend +ĠCom puter +Ġb ump +Ġsur ge +ĠCra ig +ĠKir k +D el +medi ate +Ġscen arios +ĠM ut +ĠSt ream +Ġcompet itors +Ù Ħ +ĠStan ford +ĠRes ources +az ed +b age +Ġorgan is +ĠRe lease +Ġsepar ately +Ġha bits +Ġmeasure ments +ĠCl ose +Ġaccomp any +Ġg ly +Ġt ang +ĠR ou +Ġplug in +Ġcon vey +ĠChall enge +oot s +j an +Ġcur s +ĠRel ations +ke eper +Ġapproach ing +p ing +Spe aking +Ġarrang ement +ĠV I +are ttes +Ġaffect ing +Ġperm its +b ecause +Ġu seless +ĠH us +!! !! +Ġdestro ying +Un fortunately +Ġfasc inating +S em +Ġelect oral +Ġtrans parency +ĠCh aos +Ġvolunte er +Ġstatist ical +Ġactiv ated +ro x +We b +H E +ĠHamp shire +is ive +M ap +Ġtr ash +ĠLaw rence +st ick +C r +Ġr ings +EX T +Ġoper ational +op es +D oes +ĠEv ans +Ġwitness ed +P ort +Ġlaunch ing +ec onom +w ear +ĠPart icip +um m +cul es +ĠR AM +ĠT un +Ġass ured +Ġb inary +Ġbet ray +Ġexpl oration +ĠF el +Ġad mission +it ated +S y +Ġav oided +ĠSim ulator +Ġcelebr ated +ĠElect ric +¥ ŀ +Ġcl uster +itzer land +he alth +L ine +ĠN ash +at on +Ġsp are +Ġenter prise +ĠD IS +clud es +Ġfl ights +Ġreg ards +ĠÃ Ĺ +h alf +Ġtr ucks +Ġcontact s +Ġunc ons +ĠCl imate +Ġimm ense +N EW +oc c +ect ive +Ġemb od +Ġpat rol +Ġbes ide +Ġv iable +Ġcre ep +Ġtrig gered +ver ning +Ġcompar able +q l +Ġg aining +ass es +Ġ( ); +ĠG rey +ĠM LS +s ized +Ġpros per +" ? +Ġpoll ing +Ġsh ar +ĠR C +Ġfire arm +or ient +Ġf ence +Ġvari ations +g iving +ĠP i +osp el +Ġpled ge +Ġc ure +Ġsp y +Ġviol ated +Ġr ushed +Ġstro ke +ĠBl og +sel s +ĠE c +,' ' +Ġp ale +ĠColl ins +ter ror +ĠCanad ians +Ġt une +Ġlabor atory +Ġn ons +t arian +Ġdis ability +ĠG am +Ġsing er +al g +ĠSen ior +Ġtrad ed +ĠWar rior +Ġinf ring +ĠFrank lin +Ġstr ain +ĠSwed ish +Ġsevent h +ĠB enn +ĠT ell +Ġsynd rome +Ġwond ered +id en +++ ++ +ig o +Ġpur ple +Ġjournal ism +Ġreb el +Ġf u +bl og +Ġinv ite +ren cies +ĠCont act +Is rael +ĠCont ent +Ġche er +Ġbed room +ĠEngine ering +ĠQue ens +Ġd well +ĠPlay Station +ĠD im +ĠCol on +l r +Ġoper ates +Ġmotiv ation +US A +ast ered +C ore +ĠTr uth +ol o +OS E +ĠMem ory +Ġpred ec +Ġan arch +Ġ19 20 +ĠY am +à ¨ +b id +Ġgr ateful +Ġexc itement +Ġtre asure +Ġlong est +ct ive +Ġdes erves +Ġreserv es +Ġcop s +ĠOtt awa +ĠEgypt ian +ank ed +Ġart if +Ġhypot hesis +: / +Ġpurch asing +Ġlove ly +H P +Ġdiv ide +Ġstrict ly +Ġquestion ing +Ġtaxp ayers +ĠJ oy +Ġroll s +ĠHe avy +Ġp orts +Ġmag netic +Ġinf lamm +Ġbr ush +t ics +â ĪĴ +Ġbott les +pp y +Ġp add +ãĤ ¯ +m illion +Ġdevast ating +Ġcomp iled +Ġmed ication +Ġtw elve +ĠPer ry +Sp ace +im b +y our +Ġle aked +ĠT ar +Ġun ity +Ġinfect ed +Ġtravel ed +ID E +ĠMc Donald +t xt +ĠPr inc +Ġinter ven +ĠTai wan +ĠP ow +Ġbe aring +ĠTh read +Ġz ones +iz ards +un ks +Ch apter +ll or +Ġ · +Ġw ounds +Ġdisc retion +Ġsucceed ed +ik ing +Ġicon ic +C all +Ġscreen ing +ĠM is +ict s +Ġmin isters +Ġsepar ation +Pl ayer +Ġb ip +Ġbel oved +Ġcount ing +ĠE ye +ar ound +ing ing +Ġtable t +Ġoff ence +in ance +h ave +ĠInf o +ĠNin ja +Ġprotect ive +ĠC ass +M ac +ĠQual ity +N orth +Ġ ic +ĠCub a +ĠChron icle +ĠPro perty +Ġfast est +ot os +ĠG erm +OW N +Ġbo om +ĠStan ley +ergus on +Ġcle ver +Ġent ers +m ode +ter ior +ĠS ens +Ġlin ear +AR K +Ġcomp aring +Ġpure ly +Ġsaf er +ĠPot ter +Ġc ups +R T +Ġgl uc +Ġatt ributed +Ġdu pl +ĠP ap +Ġprec ious +Ġp a +iction ary +ĠT ig +ĠTo o +ol utions +st an +Ġrob ots +Ġlob b +Ġstat ute +Ġprevent ion +w estern +16 0 +ĠAct ive +ĠMar ia +h al +N one +ell ar +ĠK B +ĠPart ners +ĠSing le +ĠFollow ing +ang o +ac ious +Ġth ou +Ġk g +Ġinflu ential +ĠFriend s +S ur +ain ted +Ġfor ums +Ġst arter +Ġcitizens hip +ĠE lection +on ge +ot ation +os ph +;; ;; +ut ical +p ur +ere n +Ġaccus ations +bit ious +ab bit +ĠOr d +Post ed +ir k +Ġsens itivity +ic he +ĠAm y +ĠF ab +Ġsum mit +Ġped est +Ġrub ber +Ġagric ultural +Ġcan cel +A E +Ġin aug +Ġcont am +Ġfirm ly +i w +st age +ĠK an +Ġt ier +Ġinv ention +Ġtransl ated +ĠR ules +B ox +Tw itter +ID S +Ġp izza +Ġdeb ug +ĠD rop +v s +Ġh orses +b ig +Ġb oring +Ġh ood +ĠMcC ain +at ched +ĠBro s +Ġsk ip +Ġess ay +st at +ĠLeg ends +Ġam munition +au c +Ġshoot er +Ġun h +Ġsuppl ied +Ġgener ic +ĠS K +ib an +yr ics +Ġ25 5 +Ġclim bing +Form er +Ġfl ip +Ġjump ing +Ġfrust ration +ĠTer ry +Ġneighborhood s +Ġmed ian +be an +Ġbr ains +Follow ing +Ġsh aped +Ġdraw s +Ġal tered +J ack +Ġrecip es +Ġsk illed +we alth +ach i +e lection +Ġbehavi ors +de als +ĠU ntil +F e +Ġdecl aration +mar ks +ĠBet ween +cel ona +Ġres on +Ġbub ble +Am ong +Ġim perial +G S +Ġfemin ist +200 5 +ĠK yle +Ġaccount ing +ĠTe le +ĠT yr +Ġconnect ing +Ġre hab +ĠP red +s im +Ġmeant ime +Ġphys ician +M W +ĠCamp bell +ĠBr andon +Ġcontribut ing +ĠR ule +ĠWe ight +ĠN ap +Ġinter active +Ġv ag +Ġhel met +ĠCom b +f our +Ġsh ipped +Ġcomple ting +ĠP D +PD ATE +Ġspread ing +Ġsc ary +erv ing +ĠG as +Ġfr ank +s chool +Ġrom antic +Ġstab il +R ob +Ġaccur ately +Ġac ute +ĠH ann +Ġsymbol s +Ġcivil ization +ĠA W +Ġlight ning +Ġcons iders +Ġven ue +Ġ × +Ġo ven +ĠS F +h is +Ġn u +ĠLear n +Ġpe oples +Ġst d +Ġsle e +Ġs lic +ĠStat istics +Ġcor ners +ĠB aker +Ġ: ) +ment ation +ol ver +Ġlaugh ing +ĠT odd +ond e +ĠH ills +Ġn uts +ĠW oman +pl ane +Ġl iver +ĠIn side +S orry +Ġagre es +Ġfund ament +ĠF isher +Ġa uction +Ġthread s +gl as +ĠBas ic +ĠN at +Ġlack ing +Ġceleb ration +j u +Ġs illy +E uro +Ġt att +ight y +cont rolled +T est +ĠSing h +Ġr age +Ġrh yth +o ffic +ĠPh antom +Ġhead lines +Ġrespond ing +ĠMor ning +Ġvit amin +Ġboot s +ĠS ite +al in +p i +Ġvir al +ĠU C +D ER +ĠSe x +Ġst ocks +c urrent +Ġch urches +ĠR are +ĠMur phy +Ġden ial +ĠG aming +Ġtou g +Ġn ick +Ġm akers +ĠRon ald +Ġgener ous +ĠD oc +ĠMor ris +Ġtransform ed +ĠN ormal +Ġ10 4 +ĠKick starter +ĠUp on +On line +ĠI RS +Ġw rap +Ġl oving +Ġarri ves +ĠD ue +Ġhe ter +ĠM ade +Ġrent al +Ġbelong s +Ġatt orneys +Ġcro ps +Ġmat ched +ul um +ol ine +10 9 +Ġdis par +Ġbuy ers +ĠCam bridge +Ġeth ics +rou ps +Ġjust ified +Ġmarg inal +Ġrespect ed +win ning +Ġnodd ed +ĠSer ge +ĠForm er +C raft +######## ######## +ĠWar ner +Ġd ash +et e +Ġent ert +ĠE scape +out heast +Ġkn ees +ĠB omb +Ġr ug +P ass +Ġatt itudes +go vernment +ĠPri or +Ġqual ities +Ġnot ification +ĠPh one +l ie +Ġanticip ated +ĠCom bat +ĠBar ry +Ġ198 2 +Us ers +on er +Ġcomput ing +ĠConnect icut +Ġless er +Ġpe ers +ĠC u +Ġtechn ically +Ġsub mission +ĠUn iversal +Ġman ually +our ge +Ġrespond ents +ĠB TC +ĠH ost +Ġf are +ĠB ird +Ġrece ipt +al so +Ġj ack +Ġagric ulture +Ġsk ull +Ġ! = +Ġpass ive +ĠC I +Ġsoc ieties +Ġremind ed +Ġinter ference +B uy +Ġâ ľ +g on +Ġscrut iny +ĠW itch +Ġconduct ing +Ġ ãĥ +Ġexch anges +ĠMit chell +Ġinhab it +Ġtw ist +B D +Ġwhere ver +group on +Ġj okes +ĠBen jamin +ĠR andom +fr ame +ĠL ions +Ġhighlight ed +ĠArk ansas +E nt +Ġp ile +Ġpre lim +g s +mind ed +Ġfel ony +ĠG A +ĠL uck +Ġpract ically +ĠB os +Ġact ress +D am +ĠB ou +Ġvis a +Ġembed ded +Ġhy brid +Ġear liest +Ġsoon er +s ocial +ĠH A +Ġste ep +Ġdis advant +Ġexplo it +ĠE gg +ĠUlt ra +Ġnecess ity +L ocal +ie ge +Ġd ated +Ġmass es +Ġsubsc ription +pl ess +Ġan onym +Ġpresum ably +Bl ue +The ir +asket ball +ĠPhil ip +Ġcom ed +load ed +r ane +Ġref lection +Ch ina +Ġext ends +Ġform ing +Ġund ers +200 1 +Ġgr at +Ġconcent rations +Ġins ulin +Ġsec ular +Ġwh ilst +Ġwin ners +Ad vertisements +Ġdeliber ately +ĠWork ing +Ġs ink +et ics +d ale +Ġmand ate +Ġg ram +Ġvac ation +Ġwarn ings +ri pp +ĠTH AT +Ġcomment ary +Ġint u +Ġa est +Ġreason ing +Ġbreak down +ĠZ ombie +Ġ-- > +ĠPolit ical +c ott +Ġthr ust +Ġtechn ological +Ġdec iding +Ġtraff icking +L ong +W elcome +pr ising +ĠCommun ications +Ġend ors +Ġsw ift +Ġmetab ol +co ins +res a +ĠHT TP +Ġen roll +ĠH appy +us r +int age +Ġ[ " +u ably +ĠM aterial +Ġrepe al +Se pt +k h +ĠMod i +Ġunder neath +ĠI L +sh ore +Ġdiagn osed +ace utical +Ġsh ower +au x +ĠSw itch +ĠStre ngth +Ġj ihad +n ational +Ġtra uma +uss y +on i +Ġcons olid +Ġcal ories +ĠF lynn +ag ged +16 8 +ĠP ink +Ġfulf ill +Ġch ains +Ġnot ably +ĠA V +L ife +ĠCh uck +m us +ĠUr ban +ĠH end +Ġdep osit +ĠS ad +Ġaff air +OR K +ie val +ĠF DA +Ġt rop +ĠOver all +Ġvirt ue +Ġsatisf action +au nd +Ġl un +ĠSw itzerland +ĠOper ation +pro cess +Ġsh ook +Ġcount ies +le ased +ĠCharl otte +1 12 +Ġtrans cript +Ġre dd +p ush +ĠHe y +ĠAn alysis +[ " +Ġaltern atives +ard less +Ġele ph +Ġpre jud +ĠLe af +H aving +ĠH ub +Ġexpress ions +ĠVol ume +Ġshock ing +ĠRed s +Ġread ily +Ġplan ets +ad ata +Ġcollaps ed +ĠMad rid +Ġir rit +i pper +ĠEn c +ĠW ire +Ġbu zz +ĠG P +ash a +Ġaccident ally +ur u +Ġfrust rated +ĠS A +Ġhung ry +ĠH uff +Ġlab els +ant o +ĠE P +Ġbar riers +) | +ĠBer keley +ĠJ ets +Ġp airs +ĠL an +J ames +ĠB ear +Ġhum or +ĠLiber ty +Ġmagn itude +Ġag ing +ĠM ason +Ġfriends hip +umb ling +Ġemer ge +Ġnewsp apers +Ġam bitious +ĠRich ards +atern al +Ġ198 1 +Ġcook ies +Ġsc ulpt +Ġpur suit +L ocation +Ġscript s +p c +Ġarrang ements +Ġd iameter +Ġl oses +am ation +Ġl iqu +ĠJ ake +aret te +Ġunderstand s +ĠZ en +v m +Ġappro ve +Ġw ip +Ġult ra +Ġint end +ĠD I +asc ular +Ġst ays +ĠK or +ĠK l +Ġinvest ing +L a +Ġbelie ving +b ad +m outh +Ġtaxp ayer +ãĥ ĥ +ĠQue bec +Ġl ap +ĠSw iss +d rop +Ġdr ain +ir i +et c +ft en +ĠN ex +Ġst raw +Ġscream ing +Ġcount ed +Ġdam aging +Ġamb assador +cent ury +Ġpro x +Ġarrest s +u v +il ateral +ĠCh arg +Ġpresc ribed +Ġindepend ently +Ġf ierce +ĠB aby +Ġb rave +Ġsu its += > +Ġbas eline +ĠR ate +Ġis lands +Ġ( ( +g reen +ix els +Ġname ly +ĠVill age +th an +am y +V ersion +g mail +ential s +ĠS ud +ĠMel bourne +Ġarri ving +Ġquant um +e ff +rop olitan +T ri +Ġfun eral +ĠI R +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +ĠC ob +it ably +Ġt urb +Ġcomb o +Re view +Ġdeploy ment +u ity +ĠB ott +Ġinv isible +Ġrender ing +Ġunl ocked +Ġa qu +ĠVlad imir +Ġp ad +ĠBr ain +ĠLeg acy +dr agon +ĠKurd ish +Ġsound ed +Ġdet ained +ĠD M +g ary +Ġd aughters +Ġdistur bing +uk a +ĠPar ad +Ġt ast +Ġunf ortunate +Ġu l +em in +Ġattend ance +tr l +Ġpar ks +ĠMem orial +ĠAl ice +oth y +gu ard +ĠD ise +ĠSh an +ĠFor um +R ich +Ġshif ted +ue z +Ġl ighter +ĠMag n +Ġc od +S ch +ham mad +P ub +3 50 +ĠP okemon +Ġprot otype +Ġun re +B ase +ĠStud ents +ĠRep ly +ĠCommun ist +Ġg au +ĠTy ler +I Z +Ġparticip ated +Ġsup rem +ĠDet ails +Ġvessel s +ro d +Ġt ribe +ke ep +Ġassum ptions +Ġp ound +Ġcr ude +ĠAv ailable +Ġswim ming +Ġin clusion +Ġadv ances +c ulation +Ġconserv ation +Ġover d +ĠBuff alo +Art icle +ed ge +Ġaw a +ĠMad ison +Ġsid ew +Ġcat ast +ĠK rist +uc le +ĠHigh way +ĠTer ror +Ġactiv ation +Ġuncons cious +ĠSat an +ĠSus an +ill ery +Ġarr anged +i op +Ġrum ors +ur ring +th ink +ĠKe ith +ĠK ind +Ġavoid ing +by n +n ut +ĠSpe aker +r us +n ames +Ġgu ilt +ĠOlymp ics +Ġsa il +ĠM es +lev ant +ĠColumb us +a ft +C ity +S outh +ĠHar vey +ĠP un +S everal +Ġment ally +Ġimp ress +m ount +ĠUb untu +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +ĠSuper man +ĠMP s +Ġintent ions +ĠR acing +Ġlike lihood +Ġ2 40 +T otal +Ġto ys +ĠW atson +Ġur ge +L ear +ĠP aper +Ġoccur ring +ĠB eng +ĠC ert +Ġst ones +T im +ĠTw in +z b +ĠD ynam +Ġpolit ician +k ens +ĠEnter prise +UT ERS +Ġab ol +Ġref resh +Ġarbit rary +pe ction +Ġtrou bles +Ġ} ); +t v +Ġpil ots +Ġdist ribute +Ġaud it +Ġp ause +orig inal +Ġr ivals + £ +F ig +T L +ab il +ry ing +L in +ion ed +l on +Ġf ancy +Ġcr ashed +Ġt ract +Ġshe d +Ġcons ume +B ased +down load +in it +Ġvolt age +Int rodu +Ġcondem ned +ĠFin ance +res pect +Ġex cluded +Ġestablish ing +her ic +Ġher itage +Ġspect acular +Ġun st +ĠSnow den +ĠL ane +S an +Ġprotect ions +st ruction +inc inn +Ġmac ro +C ustom +ios ity +Ġes p +Ġfunction ing +Ġm ush +Ġp uzzle +Ġeth ical +M al +Ġgo verning +ĠF erguson +Ġrest ored +Ġst ressed +ĠCoun ter +ĠK as +cl ip +AN S +Ġse iz +U K +by ss +old own +ap i +Ġperman ently +oun ters +W est +Th rough +L ight +at oes +Ġne at +Ġc ord +ure r +Ġsevere ly +ĠA ven +Ġinter rog +Ġtri ple +G iven +N umber +Ġar ise +Ġs her +pl ant +Ġfl ower +ĠC ou +Ġat e +Ġnew er +b ul +Ġmean while +ĠL air +Ġadjust ment +ĠCop yright +Ġd ivers +i ological +Ġgam ers +o at +Ġhistor ically +Ġanal og +Ġlong time +Ġpres cription +ĠM ist +ĠHy per +ĠM aine +ĠDe ity +Ġmulti pl +ĠRe incarn +ĠH yd +ĠP ic +S il +r ants +ĠC ris +. ; +( { +epend ence +Ġrec y +ate ur +Ġqu ad +Ġgl ob +Ġcon ced +te am +Ġcapital ist +ĠL ot +Ġroy al +ĠCy ber +Ġblack s +met ic +ri v +ĠD anny +Ġsp o +ĠR O +Ġanim ated +rypt ed +ĠDep uty +Ġrend ered +F E +Ġstre ak +Ġcloud s +ĠDou g +~~~~ ~~~~ +Ġdisc our +ĠVe h +Ġpsych ology +ĠJ ourney +Ġcry stal +ĠFro st +Ġsuspic ion +Ġrel ate +or us +ĠC rypt +ĠN VIDIA +com ed +ut ing +incinn ati +Ġvulner ability +ost ic +Ġisol ation +Ġcool ing +ĠCoal ition +Ġ1 19 +F our +ĠDe al +Ġâ ī +se mble +ram ent +ĠBar celona +Ġ10 2 +Ġcoc aine +ocaly pse +F eb +ogen ic +Ġmut ation +Ġcrypt oc +ĠK el +ĠG it +a is +Ġs isters +AN K +Ġactiv ate +T er +Ġd read +yl on +Ġprop ri +A ust +ĠDef ault +Ġout door +Ġshe er +ce ive +Ġg ently +Ð ¾ +Pro gram +Ġâ ĨĴ +Ġve gan +ĠCr us +Ġrespons ibilities +ĠH R +OL D +Ġprev ents +Ġst iff +ĠW ere +Ġathlet ic +ĠSc ore +Ġ) : +Ġcolumn s +ĠL oc +av ailable +ĠF ram +ĠS essions +Ġcompan ion +Ġpack s +14 0 +ĠKn ights +Ġf art +Ġstream s +Ġsh ore +Ġapp eals +ĠPer formance +h aul +ĠSt ra +ĠN ag +10 3 +ĠTrans portation +B B +E v +z an +P ublic +Ġtw in +uls ion +M ult +Ġelect ro +Ġstat ue +ation ally +ĠN ort +Ġins pection +/ * +ig ue +Ġcomp assion +ĠT ales +ĠSte in +ĠSc reen +ĠB ug +ĠL ion +g irl +Ġwithdraw al +Ġobject ives +Ġblood y +Ġprelim inary +Ġj acket +Ġdim ensions +ĠC ool +ĠOcc up +Ġw reck +Ġdoub led +ank ing +Ġ19 75 +Ġglass es +ĠW ang +pro v +P ath +connect ed +ĠMult i +ĠNor way +agon ist +Ġfe ared +Ġtouch ing +Ġarg uably +¯¯¯¯ ¯¯¯¯ +ĠNC AA +che m +Ġsp at +ĠW WE +ĠC el +ig ger +Ġattack er +ĠJo in +ob ject +ett a +Ġelim inated +d et +Ġdest ruct +ĠLuc as +ct uary +18 0 +ĠBr ady +ĠBl ues +B ay +au kee +Ġtim eline +Ġdeleg ates +w ritten +uff icient +Ġsh apes +Cop yright +ou ble +serv ice +Ġp ione +Ġcolleg es +Ġrow s +Ġsp ite +Ġassess ed +3 60 +Ġle ase +Ġconfident ial +ck er +ĠMan ning +ĠV oice +Ġse aled +Ġcalcul ate +N O +ĠAss istant +Ġteen ager +ul ent +ather ine +Ġm ock +Ġd iamond +Ġf est +Ġsw itched +Ġres ume +ĠPu erto +Ġl anes +ir ation +ĠSimilar ly +Ġro d +ĠS el +ĠPal ace +ĠLim ited +e ous +Ġvar iant +Ġw ard +Ġ) ) +Sh ow +OO K +A lex +ĠN ep +br is +ĠWik ipedia +Ġexcept ional +Ġman ages +ĠD raw +Ag ain +Ġco pper +ut t +Ġex ports +Ġport folio +Ġelev ated +R ated +ĠOther wise +ĠT act +ĠShe l +ĠT X +" âĢĶ +Ġres ur +ĠW a +ven ant +Ġmon etary +pe ople +E mail +Ġfif ty +ĠS weet +ĠMalays ia +Ġconf using +ĠR io +ud a +uten ant +" ); +Ġpra ised +Ġvol umes +t urn +Ġm ature +Ġnon profit +Ġpassion ate +ĠPriv ate +Ġ10 3 +Ġdesc end +ç ¥ŀ +uff y +head ed +Whe ther +ri en +ze ch +be it +Ġch rom +ĠMc M +Ġd ancing +Ġe leg +ĠNot iced +11 5 +Ġadvoc acy +ENT S +amb ling +ĠMin or +ĠF inn +Ġprior ities +Ġthere of +ĠSt age +ĠRog ers +Ġsubst itute +ĠJ ar +ĠJeff erson +Ġlight ly +10 2 +ĠL isa +u its +ys ical +Ġshif ts +Ġd rones +Ġwork place +Ġres id +ens ed +ah n +Ġpref erences +ser ver +Ġdeb ates +d oc +ĠGod s +Ġhelicop ter +Ġhon our +Ġconsider ably +ed ed +ĠF emale +ĠAn ne +Ġre un +ĠF ace +ĠHall ow +ĠBud get +Ġcondem n +Ġt ender +Pro f +ocr atic +ĠTurn er +ĠAg ric +Ġ19 76 +Ġa pt +d isc +ĠF ighter +ĠA ur +Ġgar bage +in put +ĠK arl +ĠOl iver +ĠL anguage +k n +N on +ĠCl ar +Ġtrad itions +Ġad vertisement +ĠS or +Ġarch ive +Ġvill ages +7 50 +Ġimplement ing +w aukee +Ġdiet ary +Ġswitch ing +Rep ublic +Ġvel ocity +Ġc it +ĠA wards +Ġfin ancing +Ġlast ed +) ] +Ġrem inder +P erson +Ġprec ision +Ġdesign ers +ĠF ried +ĠB order +Ġtr agic +Ġw ield +Ġiniti atives +ĠT ank +w er +Ġjo ins +R o +in ery +Ġar row +Ġgener ating +found er +Ġsear ches +Ġrandom ly +A ccess +Ġb atch +Ġp osed +l at +Ġpursu ing +as a +Ġtest ified +form ing +ĠSh ar +w iki +ĠE ither +S ometimes +Ġsen ators +ĠJohn ny +ĠTal iban +ĠG PS +":" / +ãģ® å +Ġanaly zed +ĠRub io +ĠMove ment +op ard +ii i +St and +f ight +Ġign oring +i ang +ĠG N +so ever +ĠST AT +Ġref using +Ġswe at +Ġb ay +P ORT +ir med +ak y +Ġdis pro +Ġlabel ed +Ġ10 8 +H ello +Ġple asant +ab a +Ġtri umph +Ġab oard +Ġinc om +ĠC row +le tt +Ġfol k +Ġch ase +` ` +ĠBr us +Ġte ens +c ue +Ġter rain +h yd +il ight +OR Y +Su pport +ew s +ll i +rain ts +ĠC and +Ġab used +ach ment +l arg +B as +ĠC ancer +Ġ19 78 +Ġsupp orter +ac cess +ĠTer min +ĠT ampa +ĠAN Y +Ġnew est +ĠCrim inal +ed u +Ġ19 30 +Ġadm its +Ġend e +Ġfail ures +ur ate +ful ness +cy cl +ĠSub ject +Ġinf inite +th ree +W A +p it +ĠInst all +R ad +ili ation +G M +Ġcontin ent +Ġaccommod ate +ĠCl ay +Ġp up +ĠF unction +Ġham mer +ĠAlbert a +Ġrev ised +Ġminor ities +Ġmeasure ment +Con nell +Ġdis able +ĠM ix +In cre +Ġfor k +ĠR osen +Ġimpl ies +umb lr +AN G +Ġprote ins +Ġagg ression +Ġfacilit ate +S N +Ġilleg ally +u er +Ġacad em +Ġp uzz +ĠSh ift +p ay +oll o +Ġaud iences +B uild +Ġno ble +Ġsynt ax +â ĺħ +Ġbe am +ĠB ed +ĠA ld +Ġorig ins +v ideo +Ġ19 77 +ĠAss ault +Ġgar age +Te am +Ġver dict +Ġd war +ĠVirt ual +e vent +Ke ep +Ġsent iment +Ġwild life +sh irt +Ġb urg +Ġrecommend ation +rep resent +Ġgall ery +own ers +Ġsch olar +Ġconven ience +ĠSw ift +Ġconv inc +C ap +Ġwar fare +ĠVis ual +Ġconst itute +Ġab ort +ĠWe ather +ĠLook ing +ĠH em +Ġmart ial +Ġinc oming +et ition +Ġtoler ance +ĠCre ated +Ġfl ows +ĠE lder +Ġsoul s +Ġf oul +ĠP ain +ĠC AN +Ġ2 20 +b c +he nd +Ġgen ius +R eal +ĠW r +omet er +p ad +Ġlim iting +ĠS i +ĠL ore +ĠAd ventures +Ġvar ied +D isc +f in +ĠPerson al +Ch ris +Ġinv ented +Ġd ive +ĠR ise +Ġo z +ĠCom ics +Ġexp ose +ĠRe b +let ters +s ite +im ated +Ġh acking +Ġeduc ated +ĠNob ody +Ġdep ri +Ġincent ive +ãĤ · +Ġovers ight +Ġtrib es +ĠBelg ium +Ġlicens ing +our t +Produ ct +ah l +ĠG em +Ġspecial ist +Ġc ra +ann ers +ĠCor byn +Ġ19 73 +RE AD +Ġsum mar +Ġover look +ĠApp lication +Ġin appropriate +Ġdownload ed +Q ue +ĠB ears +Ġth umb +ĠChar acter +ĠReincarn ated +ĠS id +Ġdemonstr ates +s ky +ĠBloom berg +ĠAr ray +ĠRes ults +ĠFour th +ĠED T +ĠO scar +c end +Ġ10 6 +ĠN ULL +ĠH ERE +m atch +ĠBr un +Ġgluc ose +ie g +eg u +Ġcert ified +Ġrel ie +Ġhuman itarian +Ġpr ayers +K ing +Ġn an +h ou +10 8 +ul u +Ġrenew able +Ġdistingu ish +Ġd ense +ĠV ent +ĠPack age +ĠB oss +Ġedit ors +Ġm igr +T ra +ĠPet ers +ĠAr ctic +200 4 +ĠC ape +Ġloc ally +Ġlast ing +Ġhand y +. ). +P an +ĠR ES +Ind ex +Ġt ensions +Ġformer ly +Ġide ological +Ġsens ors +Ġdeal ers +Ġdef ines +S k +Ġproceed s +Ġpro xy +az ines +ĠB ash +ĠP ad +ĠC raft +eal ous +Ġshe ets +omet ry +J une +cl ock +T T +ĠThe atre +ĠB uzz +Ġch apters +Ġmill enn +Ġd ough +ĠCongress ional +Ġimag ined +av ior +Ġclin ic +Ġ19 45 +Ġhold er +ro ot +oles ter +Ġrest art +B N +ĠHam as +ĠJ ob +Ġor b +Ġr am +Ġdiscl ose +Ġtransl ate +Ġimm igrant +Ġannoy ing +Ġtreat y +an ium +ĠTe a +ĠLeg ion +Ġcrowd s +ĠB ec +ĠA er +oh yd +B ro +Look ing +Ġl bs +Ġagg ress +Ġse am +Ġinter cept +ĠM I +mer cial +act iv +ĠC it +Ġdim ension +Ġconsist ency +Ġr ushing +ĠDou glas +Ġtr im +Inst all +ick er +Ġsh y +10 6 +Ġment ions +pe lled +ĠT ak +c ost +Ġclass room +Ġfort une +dri ven +Ġun le +ĠWhe el +Ġinvest or +ĠM asters +k it +Ġassoci ations +ĠEv olution +op ing +us cript +Ġprov incial +ĠWal ter +av i +S O +Ġun limited +Eng lish +ĠC ards +ĠEb ola +ne red +Ġreven ge +Ġout right +um per +Ġf itting +ĠSol id +Ġform ally +Ġproblem atic +Ġhaz ard +Ġenc ryption +Ġstraight forward +ĠA K +Ġp se +ĠOr b +ĠCh amber +ĠM ak +Cont ents +Ġloyal ty +Ġl yrics +ĠSy m +Ġwel comed +Ġcook ed +Ġmon op +Ġn urse +Ġmis leading +Ġe ternal +Ġshif ting +Ġ+ = +V is +Ġinst itutional +ill ary +Ġp ant +VER T +ĠA CC +ĠEn h +Ġinc on +ĠRE UTERS +Ġdon ated +âĢ¦âĢ¦ âĢ¦âĢ¦ +In tern +Ġexhib it +Ġt ire +ĠR ic +ĠCh ampion +ĠMu hammad +N ING +ĠSoc cer +Ġmob ility +Ġvary ing +ĠM ovie +Ġl ord +o ak +F ield +Ġve ctor +us ions +Ġsc rap +Ġen abling +m ake +T or +. * +| | +ĠWe bsite +ĠN PC +Ġsocial ist +ĠBill y +ĠAdd itional +Ġc argo +Ġfar ms +ĠSo on +ĠPri ze +Ġmid night +Ġ9 00 +se en +ĠSp ot +Ġshe ep +Ġspons ored +ĠH i +ĠJ ump +Ġ19 67 +Micro soft +ĠAg ent +Ġch arts +d ir +Ġadj acent +Ġtr icks +Ġman ga +Ġex agger +/ > +foot ball +ĠF CC +G C +ĠT ier +and ra +OU ND +% ), +Ġfru its +V C +ĠA A +R ober +Ġmid st +â Ĺ +ank a +Ġlegisl ature +ĠNe il +Ġtour ists +" " +ĠWar ning +ĠNever theless +ĠOffic ial +ĠWh atever +Ġm old +Ġdraft ed +Ġsubst ances +Ġbre ed +Ġt ags +ĠT ask +Ġver b +Ġmanufact ured +com ments +ĠPol ish +Pro v +Ġdetermin es +Ob ama +k ers +Ġutter ly +Ġse ct +sc he +ĠG ates +ĠCh ap +Ġal uminum +Ġz ombie +ĠT ouch +ĠU P +Ġsatisf y +Ġpred omin +asc ript +Ġelabor ate +Ġ19 68 +Ġmeas uring +ĠV ari +any ahu +Ġs ir +ul ates +id ges +ick ets +ĠSp encer +T M +oub ted +Ġpre y +Ġinstall ing +ĠC ab +re ed +re ated +Su pp +Ġwr ist +ĠK erry +10 7 +ĠK le +ĠR achel +Ġc otton +ĠA RE +ĠE le +Cont rol +Ġload s +ĠD od +an as +b one +Ġclass ical +ĠReg ional +ĠInt eg +V M +Ġdes ires +Ġaut ism +support ed +ĠM essage +Ġcomp act +writ er +Ġ10 9 +ĠHur ricane +c ision +Ġcy cles +Ġdr ill +Ġcolle ague +Ġm aker +G erman +Ġmist aken +S un +ĠG ay +Ġwhat soever +Ġsell s +ĠA irl +l iv +ĠO ption +Ġsol ved +Ġse ctors +Ġhorizont al +Ġequ ation +ĠSk ill +ĠB io +g ement +ĠSn ap +ĠLeg al +Ġtradem ark +Ġmake up +Ġassemb led +Ġsa ves +ĠHallow een +ĠVer mont +ĠFR OM +Ġfar ming +ĠP odcast +accept able +ĠHig her +Ġas leep +ull ivan +Ġrefere n +ĠLe v +Ġbul lets +ok o +H C +Ġst airs +Ġmain tains +ĠL ower +ĠV i +Ġmar ine +Ġac res +Ġcoordin ator +ĠJ oh +Ġcounterpart s +ĠBrother s +Ġind ict +b ra +Ġch unk +Ġc ents +H ome +ĠMon th +Ġaccording ly +if les +ĠGerm ans +ĠSy n +H ub +Ġey eb +âĶĢâĶĢ âĶĢâĶĢ +Ġr anges +ĠHoll and +ĠRob ot +f c +M ike +Ġpl asma +Ġsw ap +Ġath lete +ĠR ams +,' " +Ġinfect ions +Ġcor rid +Ġv ib +Ġpat ches +Ġtradition ally +Ġrevel ation +Ġswe ep +Ġgl ance +Ġin ex +200 3 +ĠR aw +work ing +os ures +ĠD at +ĠLyn ch +Ġle verage +ĠRe id +Ġcorrel ation +ian ces +av ascript +Ġrep ository +ret ty +Ġ19 72 +24 0 +Ġo un +p ol +ĠRe ed +Ġtact ical +is ite +App le +ĠQu inn +Ġrap ed +ill o +Euro pe +Ġalgorith ms +ĠRod rig +i u +Ġill um +Ġf ame +Ġintrodu cing +Ġdel ays +ĠRaid ers +Ġwh istle +Ġnovel s +ĠRe ally +Ġder iv +Ġpublic ations +ĠNe ither +ĠCom merce +Ġa ston +l anguage +Not es +ĠR oth +ĠF ear +Ġm ate +Ġpar ade +ĠQ B +Ġman eu +ĠC incinnati +m itting +Ġwa ist +ĠR ew +Ġdisc ont +Ð ° +Ġst aring +Ġal ias +Ġsec urities +Ġtoile t +ĠJ edi +Ġun law +v ised +//// //// +] ( +ĠWe iss +Ġpre st +ĠComp an +Ġmem o +ĠGr ace +J uly +ĠEl ite +cent er +ĠSt ay +Ġgal axy +Ġto oth +ĠS ettings +Ġsubject ed +ãĤ ¦ +Ġline back +Ġretail ers +ĠW ant +Ġd angers +A ir +Ġvolunt ary +ew ay +Ġinterpret ed +ot ine +à § +Ġp el +Serv ice +ĠEvent ually +Ġcare ers +Ġthreat en +Ġmem or +ĠBrad ley +anc ies +s n +ĠUn known +N ational +Ġsh adows +ail and +ĠD ash +Every one +izz ard +M arch += ( +Ġpull s +Ġstr anger +Ġback wards +ĠBern ard +imens ional +Ġch ron +Ġtheoret ical +k top +Ġw are +ĠInvest ig +ĠIn iti +ĠOper ations +o ven +oc ide +* / +Ġfl ames +ĠC ash +sh it +Ġc ab +ĠAn aly +ĠSe ah +Ġdefin ing +Ġorder ing +Ġimm un +Ġpers istent +AC H +Russ ian +m ans +Ġh ind +Ġphot ography + © +Ġh ug +Ġ10 7 +ĠH ence +i ots +ude au +Ġsubsid ies +Ġroutine ly +ĠDev ice +it ic +Ġdisg ust +land er +Ġ19 40 +Ġassign ment +ĠB esides +w ick +ĠD ust +us c +struct ed +11 1 +de velop +Ġf ond +Ġinter section +Ġdign ity +Ġcommission er +With out +re ach +Ġcart oon +Ġsc ales +ãĥ Ń +F IG +Ġsurve ys +ĠIndones ia +Ġart work +Ġun ch +Ġcy cling +un ct +au er +or ate +ĠOb viously +Ġcharacter ized +fe ld +Ġaff irm +Ġinn ings +Ġ é +Ġal iens +Ġcl oth +et ooth +ĠC ertain + § +Ġdig est +k now +ĠX L +Ġpredict ions +Ġd in +W AR +Ġafter math +Ex ample +ĠSu ccess +ĠTh r +IG N +Ġmin er +B us +Ġcl arity +heim er +ĠO UT +ĠS end +ĠCirc le +ĠD iet +Ġpron ounced +Ġcreat ors +Ġearthqu ake +atter y +ge ons +Ġo d +Ġlay ing +or p +U lt +pro ject +Ġunder min +Ġsequ el +S am +ĠDark ness +Ġre ception +b ull +Y S +ĠV ir +Ġsequ ences +ĠCo in +Ġout fit +ĠW ait +1 19 +Ġdel ivers +.... .. +Ġbl own +ĠE sc +ĠM ath +per m +ĠU l +Ġgl im +Ġfac ial +Ġgreen house +Ġto kens +/ - +ĠAnn ual +ĠON E +Ġteen age +ĠPhys ical +ĠL ang +ĠC elt +Ġsu ed +ivid ually +Ġpat ience +ch air +reg ular +Ġa ug +in v +ex cept +ĠL il +Ġn est +f d +s um +ĠCh ase +Russ ia +ĠJenn ifer +Ġoff season +Over all +F ore +Ġr iot +A ud +form er +Ġdefend ers +ĠC T +iot ic +rib ly +Ġautom ated +Ġpen is +Ġins ist +Ġdi agram +ĠS QL +ĠG arc +Ġw itch +cl ient +ier ra +am bers +Ġrec ount +f ar +V ery +oster one +Ġappreci ated +ĠPer fect +S ection +Ġd oses +oca ust +Ġcost ly +Ġg rams +ĠSh i +Ġwrest ling +Ġ19 71 +Ġtro phy +Ġn erve +ĠK az +ĠExper ience +Ġpled ged +Ġplay back +Ġcreat ivity +by e +Ġattack ers +Ġhold ers +ĠCo ach +ĠPh D +Ġtransf ers +Ġcol ored +ĠH indu +Ġd rown +Ġlist ened +ĠW A +ias m +P O +Ġappeal ing +Ġdiscl osed +ĠCh icken +ag ging +Ġple aded +Ġnav igation +ĠReturn s +Ġ[ [ +R OR +E A +Ġphotograp her +ĠR ider +ipp ers +Ġsl ice +Ġe rect +Ġhe d +iss ance +ĠVik ings +ur ious +Ġapp et +oubted ly +Ch ild +Ġauthent ic +o os +ĠM aking +Ġannoun cing +Ġb od +Ġmet er +ĠN ine +ĠR ogue +Ġwork force +Ġrenew ed +Ġorganis ations +ac s +P LE +Sh ort +Ġcomp ounds +ĠVis it +Ġen velop +ear th +Ġsupport ive +gg le +ĠBrus sels +ĠGu ild +Cre ate +RE L +Ġaver aged +Ġ19 69 +ri ages +Ġlength y +Ġforg ot +O kay +ĠE rd +Ġdeal er +Ġrec ession +D D +Ġdesper ately +Ġhun ger +Ġst icks +Ġm ph +ĠF aith +Ġintention ally +Ġdem ol +ue ller +ĠS ale +Ġde bris +s pring +Ġle ap +>> >> +Ġcontain ers +se lling +rane an +atter ing +Ġcomment ed +ĠC M +on ut +Ġwood s +es pecially +Ġorgan ize +iv ic +ĠWood s +ang a +s qu +Ġm aj +am on +Ġax is +Ġ19 74 +ĠDen mark +Ġwar rior +ĠP and +Ġout lined +ĠB O +ins ula +z illa +eb ook +Ġd are +Ġsear ched +Ġnav igate +S n +writ ing +Ġun ited +J apan +ĠHe brew +Ġfl ame +Ġrel ies +Ġcatch ing +ĠSh o +Ġimprison ment +Ġp ockets +Ġclos ure +ĠF am +t im +ade qu +Act ivity +Ġrecru iting +ĠW ATCH +ĠArgent ina +d est +Ġapolog ize +or o +Ġlack s +Ġtun ed +ĠGriff in +Ġinf amous +Ġcelebr ity +ss on +Ġ ---------------------------------------------------------------- +ĠIs is +ĠDis play +Ġcred ibility +Ġeconom ies +Ġhead line +ĠCow boys +Ġind ef +Ġl ately +Ġincent ives +but ton +ĠM ob +A ut +Ġres igned +ĠO m +c amp +Ġprof iles +Ġsche mes +olph ins +ay ed +Cl inton +en h +ĠY ahoo +Ġab st +Ġan k +su its +Ġw ished +ĠMar co +udd en +Ġsp here +ĠB ishop +Ġincorpor ated +ĠPl ant +11 4 +Ġh ated +p ic +Ġdon ate +Ġl ined +Ġbe ans +Ġsteal ing +Ġcost ume +Ġsher iff +Ġfor ty +Ġint act +Ġadapt ed +Ġtrave lling +b art +Ġnice ly +Ġdri ed +Ġsc al +os ity +NOT E +ĠB h +ĠBron cos +ĠI gn +Ġint imate +Ġchem istry +Ġopt imal +D eb +ĠGener ation +Ġ] , +ich i +ĠW ii +ĠYOU R +vent ions +W rite +Ġpop ul +un ning +ĠW or +V ol +Ġqu een +head s +K K +Ġanaly ze +op ic +ear chers +Ġd ot +leg raph +ast ically +Ġupgr ades +Ġca res +Ġext ending +Ġfree ze +Ġin ability +Ġorg ans +Ġpret end +Ġout let +11 3 +ol an +ĠM all +ul ing +t alk +Ġexpress ing +ĠAl ways +ĠBe gin +f iles +Ġlic enses +% % +ĠM itt +Ġfil ters +ĠMil waukee +G N +Ġunf old +M o +Ġnut rition +pp o +B o +Ġfound ing +Ġunder mine +Ġeas iest +ĠC zech +ĠM ack +Ġsexual ity +ĠN ixon +W in +ĠAr n +ĠK in +ãĤ £ +ic er +Ġfort un +Ġsurf aces +agh d +Ġcar riers +ĠP ART +ĠT ib +Ġinter val +Ġfrust rating +ĠSh ip +ĠAr med +ff e +Ġbo ats +ĠAb raham +in is +Ġsu ited +th read +i ov +ab ul +ĠVenezuel a +Ġto m +su per +Ġcast le +alth ough +iox ide +ec hes +Ġevolution ary +Ġnegoti ate +Ġconfront ed +Rem ember +Ġ17 0 +S uch +Ġ9 11 +m ult +ĠA byss +ur ry +ke es +spe c +ĠBarb ara +Ġbelong ing +Ġvill ain +ist ani +Ġaccount able +Ġport ions +ĠDe cl +U r +ĠK ate +g re +Ġmag azines +UC K +Ġregul ate +om on +ĠAl most +Ġover view +Ġsc ram +Ġl oot +ĠF itz +Ġcharacter istic +ĠSn ake +s ay +ĠR ico +Ġtra it +ĠJo ined +au cus +Ġadapt ation +ĠAirl ines +Ġarch ae +ĠI de +Ġb ikes +Ġliter ary +Ġinflu ences +ĠUs ed +C reat +Ġple a +ĠDef ence +ĠAss ass +Ġp ond +UL T +) " +Ġeval uated +Ġob taining +Ġdem ographic +Ġvig il +ale y +Ġsp ouse +ĠSeah awks +resp ons +ĠB elt +um atic +Ġr ises +run ner +ĠMichel le +Ġpot ent +r ace +ĠP AC +F ind +olester ol +IS S +ĠIntrodu ced +ress es +ign ment +O s +ĠT u +ĠDe x +ic ides +Ġspark ed +ĠLaur a +ĠBry ant +Ġsm iling +ĠNex us +Ġdefend ants +ĠCat al +Ġdis hes +sh aped +Ġpro long +m t +( $ +ãĢ Ĥ +Ġcalcul ations +ĠS ame +Ġp iv +H H +Ġcance lled +Ġgr in +Ġterrit ories +ist ically +C ome +ĠP arent +Pro ject +Ġneg lig +ĠPriv acy +Ġam mo +LE CT +olute ly +ĠEp ic +Ġmis under +w al +Apr il +m os +path y +ĠC arson +Ġalbum s +ĠE asy +Ġpist ol +< < +Ġ\ ( +t arget +hel p +Ġinter pre +cons cious +ĠH ousing +ĠJ oint +12 7 +Ġbe ers +s cience +ĠFire fox +effect ive +ĠC abin +ĠO kay +ĠApp lic +Ġspace craft +ĠS R +ve t +ĠStr ange +S B +Ġcor ps +iber al +e fficient +Ġpreval ence +Ġeconom ists +11 8 +Th read +ord able +OD E +ĠC ant +=- =- +if iable +ĠA round +Ġpo le +Ġwilling ness +CL A +ĠK id +Ġcomple ment +Ġsc attered +Ġin mates +Ġble eding +e very +Ġque ue +ĠTr ain +Ġh ij +Ġme lee +ple ted +Ġdig it +Ġg em +offic ial +Ġlif ting +Ð µ +Re qu +it utes +Ġpack aging +ĠWork ers +h ran +ĠLeban on +ol esc +Ġpun ished +ĠJ uan +Ġj am +ĠD ocument +Ġm apping +ic ates +Ġinev itably +Ġvan illa +ĠT on +Ġwat ches +Ġle agues +Ġiniti ated +deg ree +port ion +Ġrec alls +Ġru in +Ġm elt +I AN +Ġhe m +Ex p +Ġb aking +ĠCol omb +at ible +Ġrad ius +pl ug +ĠI F +et ically +Ġf ict +H ER +ĠT ap +atin um +Ġin k +Ġco h +ĠW izard +b oth +te x +Ġsp ends +ĠCurrent ly +ĠP it +Ġneur ons +ig nt +Ġr all +Ġbus es +b uilding +Ġadjust ments +Ġc ried +ibl ical +att ed +ĠZ ion +ĠM atter +Ġmed itation +ĠD ennis +Ġour s +ĠT ab +Ġrank ings +ort al +Ġad vers +Ġsur render +ĠG ob +ci um +om as +im eter +Ġmulti player +Ġhero in +Ġoptim istic +Ġindic ator +ĠBr ig +Ġgro cery +Ġapplic ant +ĠRock et +v id +Ex ception +p ent +Ġorgan izing +Ġenc ounters +ĠT OD +Ġjew el +S ave +ĠChrist ie +Ġhe ating +Ġl azy +ĠC P +Ġcous in +Con fig +Ġreg ener +Ġne arest +Ġachie ving +EN S +th row +ĠRich mond +ant le +200 2 +Ġan ten +b ird +13 3 +Ġn arc +r aint +un ny +ĠHispan ic +ourn aments +Ġprop he +ĠTh ailand +ĠT i +Ġinject ion +Ġinher it +rav is +Ġmed i +Ġwho ever +ĠDE BUG +G P +ĠH ud +C ard +p rom +Ġp or +Ġover head +L aw +Ġviol ate +Ġhe ated +Ġdescript ions +Ġachieve ments +ĠBe er +ĠQu ant +W as +Ġe ighth +ĠI v +Ġspecial ized +U PDATE +ĠD elta +P op +J ul +ĠAs k +oph y +Ġnews letters +ĠT ool +Ġg ard +ĠConf eder +ĠGM T +ĠAb bott +Ġimm unity +ĠV M +Is lam +Ġimpl icit +w d +Ġ19 44 +rav ity +omet ric +Ġsurv iving +ur ai +ĠPr ison +Ġr ust +ĠSk etch +Ġbe es +ĠThe ory +Ġmer it +T ex +ch at +Ġm im +Ġpast e +ĠK och +Ġignor ance +ĠSh oot +Ġbas ement +Un ited +ĠAd vis +he ight +Ġf oster +Ġdet ain +in formation +Ġne ural +' ; +Ġprov es +all ery +Ġinv itation +um bers +Ġc attle +Ġbicy cle +z i +Ġconsult ant +Ġap ology +ĠT iger +Ġ12 3 +99 9 +Ġind ividually +r t +ig ion +ĠBrazil ian +Ġdist urb +Ġentreprene urs +Ġfore sts +cer pt +pl ates +p her +clip se +Ġtw itter +Ġac ids +ograph ical +h um +ĠB ald +if ully +Ġcomp iler +ĠD A +Ġdon or +as i +Ġtrib al +l ash +ĠCon fig +Ġapplic ants +Ġsal aries +13 5 +Put in +ĠF ocus +ir s +Ġmisc onduct +ĠH az +Ġeat en +M obile +Mus lim +ĠMar cus +v iol +Ġfavor able +Ġst ub +ad in +ĠH ob +Ġfaith ful +Ġelectron ics +Ġvac uum +w ait +back ed +econom ic +d ist +Ġten ure +Ġsince re +ĠT ogether +ĠW ave +Ġprog ression +Ġden ying +Ġdist ress +br aska +th ird +Ġmix ing +Ġcolon ial +Ġpriv ately +Ġun rest +atern ity +Ġprem ises +ant i +greg ation +Ġlic ence +ĠH ind +ĠSam uel +Ġconvinc ing +ĠA ce +ĠR ust +ĠNet anyahu +Ġhand les +ĠP atch +orient ed +ah o +ĠG onz +Ġhack ers +claim er +Ġcustom s +ĠGr an +f ighters +Ġl uc +Ġman uscript +aren thood +Ġdev il +Ġwar riors +Ġoff enders +Will iam +Ġhol idays +Ġnight mare +Ġle ver +iff erent +St at +Ġexhib ition +put ed +ĠP ure +Ġal pha +Ġenthus iasm +ĠRepresent atives +E AR +ĠT yp +Ġwhe at +ĠAl f +Ġcor rection +Ġev angel +AT T +M iss +Ġs oup +Ġimpl ied +par am +Ġsex y +ĠL ux +Ġrep ublic +p atch +ab lish +Ġic ons +Ġfather s +ĠG ET +ĠCar ib +Ġregul ated +ĠCo hen +ĠBob by +Ġn er +Ġb ent +vent ory +ĠAl ong +ĠE ST +ĠWall ace +Ġmurd ers +r ise +ke ll +ĠCommon wealth +Ġn asty +et a +ĠM IT +Ġadminist ered +Ġgenuine ly +Ed itor +n ick +Ġhyd ro +**************** **************** +ĠB le +Ġfin es +Ġg orge +aus ible +r h +Ġapp le +ment ioned +Ġro pe +ot yp +H R +Ġdisappoint ing +Ġc age +n ik +Ġdoub ts +ĠF REE +print s +ĠM UST +Ġvend ors +ĠIn qu +Ġliber als +Ġcontract or +Ġup side +child ren +Ġtrick y +Ġregul ators +charg ed +l iter +Ġ *** +Ġreb ell +l ang +Ġloc als +Ġphys icians +Ġhe y +ar se +t m +ĠLe x +Ġbehavior al +success ful +F X +Ġbr ick +ov ic +Ġcon form +Ġreview ing +Ġins ights +Ġbi ology +ĠRem ove +ĠExt ra +Ġcomm itting +indu ced +ignt y +ig m +Ġat omic +Comm on +ĠE M +ĠP ere +ĠIt ems +e h +Ġpres erved +ĠH ood +Ġprison er +Ġbankrupt cy +Ġg ren +us hes +Ġexplo itation +Ġsign atures +Ġfin an +] ," +ĠM R +Ġme g +rem lin +Ġmusic ians +Ġselect ing +Ġexam ining +IN K +l ated +H i +Ġart ic +Ġp ets +Ġimp air +ĠM AN +Ġtable ts +in clude +R ange +Ġca ut +Ġlog s +Ġmount ing +Ġun aware +Ġdynam ics +ĠPalest ine +ĠQu arter +ĠPur ple +Ġm a +ĠIm port +Ġcollect ions +ci ation +Ġsuccess or +Ġcl one +Ġaim ing +Ġposs essed +Ġstick ing +Ġsh aking +Ġloc ate +ĠH ockey +T urn +17 0 +Ġfif teen +ĠHar rison +Ġcontinu ously +ĠT C +ĠVal ent +ĠRes cue +Ġby pass +am ount +Ġm ast +Ġprotect s +Ġart istic +Ġsomet ime +Ġsh oe +Ġshout ed +ific ant +et itive +ĠReg ister +ĠJ in +Ġconcent rated +ling ton +on ies +Ġgener ator +yr im +ĠAr men +Ġclear ing +id o +ĠT W +al ph +Ġlad ies +H ard +Ġdial og +Ġinput s +æ ľ +Ġpos es +Ġsl ots +ĠPrem ium +Ġle aks +Ġboss es +Ġ11 3 +c ourse +A cc +ĠNew ton +ĠAust ria +ĠM age +Ġte aches +ab ad +Ġwe ars +Ġc yl +Ġcur se +ĠS ales +ĠW ings +Ġp sy +Ġg aps +ĠIce land +ĠP interest +Ġland lord +Ġdefin itions +ĠK er +Ġsufficient ly +ĠP ence +ĠArch itect +Ġsur pass +Ġ11 4 +Ġsuper hero +ĠDise ase +Ġpri ests +ĠC ulture +Ġdefin itive +Ġsecret ly +ĠD ance +inst all +ch ief +ĠJess ica +W ould +Up dated +Ġlock er +ĠK ay +Ġmem orial +è ¦ +f at +Ġdis gu +Ġflav ors +ĠBase ball +ĠRes istance +Ġk icks +Ġen v +Ġteen agers +D ark +ĠC AR +Ġh alt +ĠL G +ĠGab riel +Ġfe ver +Ġs atur +Ġm all +Ġaffili ate +ĠS leep +ĠSpe cific +ĠV el +Ġj ar +ĠSac red +ĠEd wards +ĠA CL +Ġret ained +ĠG iant +Ġlim itation +in ces +Ġref usal +ĠT ale +ĠBut ler +Ġacc idents +ĠC SS +Ġimport ed +ĠCop y +Î ± +ER T +z el +Ġdiv isions +h ots +ĠAl b +ĠD S +Load er +W ashington +at isf +ĠCreat ive +\ . +ĠAut om +red ict +Ġrecept or +ĠCarl os +Met hod +ok a +Ġmal icious +Ġste pping +, [ +ĠD ad +Ġatt raction +ĠEffect s +ĠPir ate +ĠC er +ĠIndust ry +ĠR ud +Ġchar ter +Ġd ining +Ġins ists +Ġconfig ure +Ġ( # +ĠSim ple +ĠSc roll +UT C +17 5 +ĠK on +Ġmarket place +Ġ ãĤ +Ġref res +Ġg ates +er red +ĠP od +Ġbeh ave +Fr ank +n ode +Ġendors ed +he tt +as ive +ĠHom eland +Ġr ides +ĠLe ave +er ness +Ġflood ing +A FP +Ġris en +Ġcontin ually +Ġun anim +ĠCont ract +ĠP as +Ġgu ided +ĠCh ile +b d +Ġsu cc +pt ic +Ġcomm ittees +ĠL uther +ĠAny one +Ġs ab +12 4 +Ġp ixel +ĠB ak +ĠT ag +ĠBenn ett +En ter +sm all +ĠPresident ial +Ġp ul +Ġcontr ace +arch ive +Ġcoast al +ĠK ids +19 2 +âĢ ² +ick y +ING TON +Ġw olf +ĠSt alin +T ur +id get +am as +ĠUn less +Ġspons or +Ġmor ph +ĠCho ose +Ġrun ner +Ġun bel +Ġm ud +ĠMan a +Ġdub bed +Ġg odd +ure rs +wind ow +Ġrel ied +Ġcelebr ating +os c +Ġ13 5 +Ġlobb ying +Ġincom plete +Ġrestrict ion +Ġinc ap +it us +Ġexpect ation +ĠAp ollo +Ġint ens +Ġsyn c +G H +Ġmanip ulation +B Y +Ġspe ar +Ġbre asts +Ġvol can +il ia +M aterial +Ġform ats +ĠB ast +Ġparliament ary +Ġsn ake +Ġserv ants +ĠTr udeau +ĠGr im +ĠArab ic +ĠSC P +ĠBoy s +st ation +Ġprospect ive +ord e +in itialized +Ġb ored +AB LE +Ġaccess ed +Ġtax i +ĠShe ll +aid en +urs ed +in ates +ĠIns urance +ĠPet e +Sept ember +6 50 +Ġad ventures +ĠCo ver +Ġt ribute +Ġsk etch +Ġem power +Ġ Ø +ĠGl enn +ĠD aw += \" +ĠPolit ics +Ġgu ides +Ġd ioxide +ĠG ore +ĠBr ight +ĠS ierra +Ġval ued +c ond +Ġpo inter +Se lect +Ġrisk y +Ġabsor b +im ages +Ġref uses +Ġbon uses +__ _ +Ġh ilar +ĠF eatures +2 20 +ĠCollect or +F oot +Ġ19 64 +cul us +Ġd awn +Ġwork out +ĠL O +Ġphilosoph ical +ĠSand y +ĠYou th +Ġl iable +A f +bl ue +Ġovert urn +less ness +ĠTrib une +ĠIn g +Ġfact ories +Ġcat ches +Ġpr one +Ġmat rix +Ġlog in +Ġin acc +Ġex ert +s ys +Ġneed le +ĠQ ur +Ġnot ified +ould er +t x +Ġremind s +Ġpublisher s +Ġn ort +Ġg it +Ġfl ies +ĠEm ily +Ġflow ing +ĠAl ien +ĠStr ateg +Ġhard est +Ġmod ification +AP I +ĠM Y +Ġcr ashes +st airs +n umber +Ġur ging +ch annel +ĠFal con +Ġinhabit ants +Ġterr ifying +Ġutil ize +Ġban ner +Ġcig arettes +Ġsens es +ĠHol mes +Ġpract ition +ĠPhill ips +ott o +Ġcomp ile +Mod el +ĠK o +Ġ[ ] +Americ ans +ĠTer ms +Ġmed ications +ĠAn a +Ġfundament ally +ĠNot ice +Ġwe aker +Ġ 0000 +Ġgar lic +Ġout break +Ġeconom ist +ĠB irth +Ġobst acles +ar cer +ĠOr thodox +Ġplace bo +ĠC rew +asp berry +ĠAng els +Ġdis charge +Ġdestruct ive +11 7 +ĠR ising +Ġd airy +l ate +Ġcoll ision +ĠTig ers +ean or +ocument ed +ĠIn valid +Ġd ont +ĠL iter +ĠV a +Ġhyd rogen +Ġvari ants +ĠBrown s +Ġ19 65 +Ġind igenous +Ġtrad es +Ġremain der +Ġswe pt +ĠImp act +Ġred ist +Ġun int +grad uate +ãĥ ķ +ĠW ILL +ãģ® ç +ĠCrit ical +Ġf isher +Ġv icious +Ġrevers ed +Y ear +ĠS ox +Ġshoot ings +Ġfil ming +Ġtouchdown s +ai res +m el +Ġgrand father +Ġaffect ion +ing le +Ġover ly +Add itional +Ġsup reme +ĠGr ad +Ġsport ing +Ġmer cy +ĠBrook s +ount y +Ġperform s +Ġtight ly +Ġdem ons +Ġkill ings +Ġfact ion +ĠNov a +aut s +Ġund oubtedly +ar in +Ġunder way +ra k +Ġl iv +ĠReg ion +Ġbrief ing +s ers +cl oud +ĠM ik +us p +Ġpred iction +az or +Ġport able +ĠG and +Ġpresent ing +Ġ10 80 + » +ush i +ĠSp ark +there um +Ġjust ification +ĠN y +Ġcontract ors +ming ham +ĠSt yle +å ħ +ĠChron icles +ĠPict ure +Ġprov ing +Ġw ives +set t +Ġmole cules +ĠFair y +Ġconsist ing +Ġp ier +al one +in ition +Ġn ucle +j son +Ġg otta +Ġmob il +Ġver bal +ar ium +Ġmon ument +uck ed +Ġ25 6 +T ech +mine craft +ĠTr ack +Ġt ile +Ġcompat ibility +as is +Ġs add +Ġinstruct ed +ĠM ueller +Ġle thal +Ġhorm one +Ġor che +el se +Ġske let +Ġentert aining +Ġminim ize +ag ain +Ġunder go +Ġconst raints +Ġcig arette +ĠIslam ist +Ġtravel s +ĠPant hers +l ings +C are +Ġlaw suits +ur as +Ġcry st +Ġlow ered +Ġaer ial +Ġcomb inations +Ġha un +Ġch a +Ġv ine +Ġquant ities +Ġlink ing +b ank +Ġso y +B ill +ĠAngel a +Ġrecip ient +ĠProt est +Ġs ocket +Ġsolid arity +Ġâ Ĩ +m ill +Ġvar ies +ĠPak istani +Dr agon +Ġun e +Ġhor izon +³³³³ ³³³³ +Ġprov inces +Ġfrank ly +Ġenact ed +not es +[ ' +Ġ19 2 +ocr acy +Ġendorse ment +Ġover time +Tr ue +L ab +lic ted +ĠD NC +Ġbe ats +ĠJam ie +15 2 +ĠIN T +Cont act +Ġaccount ed +h ash +ĠPack ers +p ires +Ġles bian +Ġamend ments +Ġhop eful +ĠFin land +Ġspot light +Ġconfig ured +Ġtrou bled +Ġg aze +ĠCal gary +Ġrel iability +Ġins urg +sw er +b uy +ĠSk in +Ġp ixels +Ġhand gun +Ġpar as +Ġcateg or +ĠE L +ĠRe x +Ind eed +Ġkind a +Ġconj unction +ĠBry an +ĠMan ufact +y ang +Pl us +S QL +ish ment +Ġdom inate +Ġn ail +Ġo ath +Ġeru pt +ĠF ine +it bart +ĠCh ip +ĠAb d +ĠN am +Ġbuy er +Ġdiss ent +Le aks +Cont in +Ġr ider +ĠSome one +Ġill usion +c in +ĠBoe ing +Ġin adequ +ov ation +i ants +Ġreb uild +4 50 +ĠDest iny +S W +ĠT ill +H it +ia z +ĠBang l +acher s +ĠRe form +Ġse gments +Ġsystem atic +d c +ĠConserv atives +Ġport al +h or +ĠDragon bound +Ġdrag ged +om o +Ġthe e +ad vert +ĠRep orts +ĠE t +Ġbarrel s +Aug ust +Ġcompar isons +Ġhe x +Ġan throp +" [ +bor ough +ab i +Ġpict ured +play ing +ĠAdd ress +ĠMir ror +Sm ith +Ġt ires +ĠN PR +AA AA +Ġclass ification +ĠTh an +ĠH arm +ĠR A +Ġreject ion +min ation +Ġr anged +ĠF alls +D I +H ost +ãĤ ´ +ĠEx ample +list ed +th irds +Ġsaf egu +br and +Ġprob able +Can ada +IT ION +ĠQ aeda +Ġch ick +Ġimport s +h it +l oc +W W +Ġble w +Ġany time +Ġwh oles +ik ed +Ġcal culation +cre ate +ĠO ri +Ġupgr aded +Ġapp ar +ut ory +ĠM ol +B rit +ĠJ ong +IN AL +ĠStart ing +Ġd ice +urt le +Ġre lying +cl osure +Ġprof itable +Ġsl aughter +ĠMan ual +c aster +Ġ" $ +Ġfe ather +ĠSim ply +ie ves +Ġdeter ior +ĠPC I +Ġst amp +Ġfl aws +Ġsh ade +ham mer +Ġpass port +Ġcont ing +am el +Ġobser vers +Ġneg lect +ĠR B +ĠBrother hood +Ġskept ical +f amily +us k +Ġemotion ally +â Ļ +ĠBet a +ason able +id ity +ĠM ul +Ġkick ing +ĠC arm +oll ah +VERT IS +ĠAt hen +Ġlad der +ĠBul let +å £ +00 01 +ĠWild life +ĠM ask +ĠN an +R ev +Ġun acceptable +leg al +Ġcrowd ed +ag i +ĠC ox +j e +Ġmor ality +Ġfu els +Ġc ables +Ġman kind +ĠCarib bean +Ġanch or +Ġby te +ĠO ften +ĠO z +Ġcraft ed +Ġhistor ian +ĠW u +Ġtow ers +ĠCitiz ens +Ġhel m +Ġcred entials +Ġsing ular +ĠJes se +Ġtack les +Ġcont empt +Ġa fore +ĠSh adows +Ġn il +Ġur gent +app le +bl ood +Ġv on +Ġoff line +Ġbreat he +Ġj umps +Ġirre levant +ox ic +om al +import ant +J im +Ġgl oves +arm ing +dep th +Ġtal ents +ook ie +ĠS B +Ġpal m +uff s +est a +IG H +Ġcan on +ĠVer izon +ĠP le +Ġcou pled +vel t +Ġfundra ising +ĠGet ting +ĠD LC +Ġmathemat ical +ĠH S +ĠCard inals +te lling +Ġspons ors +Ġ Ï +ĠBull s +op tion +Ġprop ose +Ġmem orable +Ġembr aced +Ġdecl ining +He alth +ed a +Ġ} ; +Ġsp am +m ile +Ġpit cher +ĠE ight +Ġcar ing +ut ic +ro le +Ġair line +ernand ez +ĠAth let +Ġcert ification +ux e +rig er +Ġem pir +Ġsens ation +Ġdis m +Ġb olt +Ġev olve +H ouse +Ġconsult ation +ĠD uty +Ġtou ches +ĠN athan +Ġf aint +h ad +" ( +ĠCons umer +ĠExt reme +Ġ12 7 +ĠHer m +ĠSac rament +iz oph +Ġanx ious +ul ously +Ġsoc ially +ĠU TC +Ġsol ving +ĠLet ter +Hist ory +ed uc +Pr ice +) ); +Ġrel oad +am ic +Ġp ork +Ġdisc ourse +Ġt ournaments +ai ro +ĠK ur +ĠCost a +Ġviol ating +Ġinterf ere +Ġrecre ational +uff le +Ġspe eches +Ġneed ing +Ġremem bers +Ġcred ited +n ia +f ocused +amer a +Ġb ru +um bs +ĠCub an +Ġpreced ing +Ġnons ense +ac ial +Ġsmart phones +ĠSt ories +S ports +ĠEmer gency +oun cing +ef ined +Ġb er +Ġconsult ing +Ġm asters +he astern +." [ +ĠRun ning +Ġsus cept +ĠF eng +Americ a +pr ises +st itial +ĠWeek ly +ĠGreat er +mod ules +if ter +G raphics +ul er +Ġwho lly +Ġsupp ress +Ġconce aled +Ġhapp ily +Ġaccept s +ĠEn joy +Ġr ivers +ĠEx cept +2 25 +ĠN HS +ĠMc Connell +Ġp ussy +fer red +ut able +Ġatt ain +Ġ> = +Ġdepos its +roph ic +Ġnot orious +ĠSh aw +il itation +Ġepid emic +all ic +Ġsmall est +ov ich +Ġaccess ories +per ties +Ġsur plus +ĠMe ch +Ġamb ig +ĠImm igration +Ġch im +ev al +Ġpract icing +ĠMyster y +Ġdom ains +ĠSil icon +app s +Ġkilomet ers +e a +ĠSm ash +Ġwarrant y +Ġn ost +s il +re v +J on +ĠDub lin +Ġtast es +Ġb out +g reat +er ror +Ġsw itches +ĠB apt +D O +ok i +Ġsour ced +pro du +Ġattach ment +ĠIss ue +ĠQuest ion +Jo in +Ġf itted +Ġunlaw ful +^ ^ +ere k +Ġauthent ication +Ġst ole +Ġaccount ability +l abel +S earch +Ġal beit +atic an +fund ed +ĠAdd ing +ĠI Q +Ġsub mar +l it +a que +ĠLear ning +Ġint eger +M aster +ĠCh rom +Ġprem ier +O p +ĠLi u +Ġbl essed +ĠGl obe +ĠResp onse +Ġlegit im +ĠMer kel +Ġdispos al + ´ +Ġgau ge +pe at +Ġindu ced +Ġquestion able +arth y +ĠV it +ĠF eed +U ntil +U t +worth y +R Y +ĠH erald +ĠHam mer +Ġmed al +ĠR ivers +ĠH ack +Ġclar ify +Ġtrack ed +Ġautonom ous +Ġten ant +ĠQ atar +er ie +Ġgr im +ĠMon itor +Ġresist ant +ĠSpe c +ĠWell s +N AS +14 8 +Ġmin ers +iot ics +Ġmiss es +11 6 +g ian +g it +ĠE yes +p res +Ġgrad uated +Ġang el +Ġsyn chron +Ġefficient ly +Ġtrans mitted +H arry +Ġglob ally +EN CE +ĠMont ana +r aged +ĠPre vention +Ġp iss +ĠL l +Ġshe lf +ĠB JP +ĠTest ament +ĠL ate +ik er +ĠH app +ĠJul ian +h all +Ġsp ont +Ġshut down +Ġincons istent +Ġsubscrib ers +Ġske leton +ĠNe braska +Ġins pire +ĠV oid +F eed +Ġang les +ĠSpr ings +Ġbench mark +Ġvacc ines +izoph ren +se xual +uff ed +Ġsh ine +ĠK ath +Ġgest ure +ine a +Ġr ip +Ġopp ression +Ġcons cience +b t +ĠL um +Ġinc idence +ĠF a +w r +Ġmin eral +ĠSp urs +alk y +Ġth under +Ġop io +Be ing +ĠPal m +Ġwas ted +Ġl b +i aries +ĠIniti ative +Ġcur ric +Ġmark er +ĠMc L +Ġext ensions +ĠP v +ĠAr ms +Ġoffer ings +Ġdef enses +Ġvend or +Ġcontrad ict +ĠCol in +Ġredd it +Ġper ipher +12 2 +Ġs ins +E dit +IC T +So ft +ĠSh ah +Ġadministr ator +ĠT rip +Ġporn ography +Ġtu ition +in ence +ĠPro gress +Ġcat alog +Ġsu ite +Ġh ike +Ġreprodu ctive +eng ine +Ġd rought +ĠNo ah +Ġ2 30 +Ġd ude +Ġrelax ed +Ġpart ition +Ġparticip ant +Ġtel esc +Ġfe as +ĠF F +own er +Ġswe eping +Ġl enses +Ġmatch up +ĠRe pl +ourn als +Ġcred ible +Ġgrand mother +Ġther mal +Ġsubscrib ing +Ġident ities +col m +U CT +Ġreluct ant +us ers +ĠC ort +Ġassist ed +OS S +ATION S +IS H +Ġpharm aceutical +ic able +ad ian +ĠSon ic +ĠF ury +ĠM ong +A H +ĠPsych ology +Ġph osph +Ġtreat s +Ń Ķ +Ġstead ily +ĠHell o +Ġrel ates +Ġcl ue +Ex pl +a uth +Ġrev ision +Ġe ld +os ion +Ġbr on +14 4 +ri kes +Ġmin es +Ġblank et +ĠF ail +el ed +ĠIm agine +ĠPl anned +a ic +Re quest +M ad +ĠHor se +ĠEag le +Ġcap ac +15 7 +Ġl ing +ĠN ice +ĠP arenthood +min ster +og s +ens itive +Not hing +Ġcar n +F in +ĠP E +Ġr ifles +ĠL P +S and +Ġgui Active +Ġtour ist +C NN +Ġunve iled +Ġpredec essor +} { +u ber +Ġoff shore +Ġopt ical +ĠR ot +ĠPear l +et on +Ġst ared +Ġfart her +at ility +cont in +ĠG y +ĠF oster +ĠC oc +ri ents +Ġdesign ing +ĠEconom y +ON G +W omen +ĠN ancy +er ver +Ġmas cul +Ġcasual ties +Ġ2 25 +ĠS ullivan +ĠCh oice +Ġa ster +w s +Ġhot els +Ġconsider ations +Ġcou ch +ĠSt rip +ĠG n +Ġmanip ulate +l ied +Ġsynt hetic +Ġassault ed +Ġoff enses +ĠDra ke +Ġim pe +Oct ober +ĠHer itage +h l +ĠBl air +Un like +Ġg rief +Ġ4 50 +Ġopt ed +Ġresign ation +il o +Ġver se +ĠT omb +Ġu pt +Ġa ired +ĠH ook +ĠML B +Ġassum es +out ed +ĠV ers +Ġinfer ior +Ġbund le +ĠD NS +ograp her +Ġmult ip +ĠSoul s +Ġillust rated +Ġtact ic +Ġdress ing +Ġdu o +Con f +Ġrel ent +Ġc ant +Ġscar ce +Ġcand y +ĠC F +Ġaffili ated +Ġspr int +yl an +ĠGarc ia +Ġj unk +Pr int +ex ec +C rit +Ġport rait +ir ies +ĠOF F +Ġdisp utes +W R +L ove +ãģ Ħ +ĠRe yn +Ġh ipp +op ath +Ġflo ors +ĠFe el +Ġwor ries +Ġsett lements +ĠP os +Ġmos que +Ġfin als +Ġcr ushed +ĠPro bably +ĠB ot +ĠM ans +ĠPer iod +Ġsovere ignty +Ġsell er +Ġap ost +Ġam ateur +Ġd orm +Ġconsum ing +Ġarm our +ĠRo ose +Ġint ensive +Ġelim inating +ĠSun ni +ĠAle ppo +j in +Ġadv ise +p al +ĠH alo +Ġdes cent +Ġsimpl er +Ġbo oth +ST R +L ater +ĠC ave +== = +Ġm ol +Ġf ist +Ġshot gun +su pp +Ġrob bery +E ffect +Ġobsc ure +ĠProf essional +Ġemb assy +Ġmilit ant +Ġinc arcer +Ġgener ates +Ġlaun ches +Ġadministr ators +Ġsh aft +Ġcirc ular +Ġfresh man +ĠW es +ĠJo el +ĠD rew +ĠDun can +ĠApp arently +s ight +ĠIntern al +ĠInd ividual +ĠF E +Ġb ore +ĠM t +Ġbroad ly +ĠO ptions +ount ain +ip es +ĠV ideos +20 4 +Ġh ills +Ġsim ulation +Ġdisappoint ment +it an +ĠLabor atory +Ġup ward +Ġbound ary +Ġdark er +h art +Ġdomin ance +C ong +ĠOr acle +ĠL ords +Ġscholars hip +ĠVin cent +ed e +ĠR ah +Ġencour ages +ro v +Ġqu o +Ġprem ise +ĠCris is +ĠHol ocaust +Ġrhyth m +Ġmet ric +cl ub +Ġtransport ed +Ġn od +ĠP ist +Ġancest ors +ĠFred er +th umbnails +ĠC E +ON D +Ph il +ven ge +ĠProduct s +cast le +Ġqual ifying +ĠK aren +VERTIS EMENT +Ġmight y +Ġexplan ations +Ġfix ing +D i +Ġdecl aring +Ġanonym ity +Ġju ven +ĠN ord +ĠDo om +ĠAct ually +O k +ph is +ĠDes ert +Ġ11 6 +I K +ĠF M +Ġinc omes +V EL +ok ers +Ġpe cul +Ġlight weight +g ue +Ġacc ent +Ġincre ment +ĠCh an +Ġcompl aining +ĠB aghd +Ġmidfield er +Ġover haul +Pro cess +ĠH ollow +ĠTit ans +Sm all +man uel +ĠUn ity +ĠEv ents +S ty +Ġdispro portion +n esty +en es +ĠC od +Ġdemonstr ations +ĠCrim son +ĠO H +Ġen rolled +Ġc el +ĠBre tt +Ġa ide +Ġhe els +Ġbroad band +Ġmark ing +Ġw izard +ĠN J +ĠChief s +Ġingred ient +Ġd ug +ĠSh ut +urch ase +end or +Ġfar mer +ĠGold man +12 9 +15 5 +Or der +Ġl ion +i ably +Ġst ain +ar ray +ilit ary +ĠFA Q +Ġexpl oded +ĠMcC arthy +ĠT weet +ĠG reens +ek ing +l n +ens en +Ġmotor cycle +Ġpartic le +Ġch olesterol +B ron +Ġst air +Ġox id +Ġdes irable +ib les +Ġthe or +for cing +Ġpromot ional +ov o +b oot +ĠBon us +raw ling +Ġshort age +ĠP sy +Ġrecru ited +Ġinf ants +Ġtest osterone +Ġded uct +Ġdistinct ive +Ġfirm ware +bu ilt +14 5 +Ġexpl ored +Ġfact ions +Ġv ide +Ġtatt oo +Ġfinan cially +Ġfat igue +Ġproceed ing +const itutional +Ġmis er +Ġch airs +gg ing +ipp le +Ġd ent +Ġdis reg +ç Ķ +st ant +ll o +b ps +aken ing +Ġab normal +ĠE RA +å£ « +ĠH BO +ĠM AR +Ġcon cess +Ġserv ant +Ġas pir +l av +ĠPan el +am o +Ġprec ip +Ġrecord ings +Ġproceed ed +Ġcol ony +ĠT ang +ab lo +Ġstri pped +Le ft +to o +Ġpot atoes +Ġfin est +% ). +Ġc rap +ĠZ ach +ab ases +ĠG oth +Ġbillion aire +w olf +Ġsan ction +S K +Ġlog ged +P o +ey ed +un al +Ġcr icket +Ġarm ies +Ġunc overed +Cl oud +ó n +Ġreb ounds +Ġm es +O per +P ac +Ġnation ally +Ġinsert ed +p ict +Ġgovern ance +Ð ¸ +Ġprivile ges +G ET +Ġfavor ites +im ity +Ġlo ver +the m +em pl +Ġgorge ous +An n +Ġsl ipped +Ġve to +B ob +Ġsl im +u cc +ĠF ame +udden ly +Ġden ies +ĠM aur +Ġdist ances +Ġw anna +t ar +ĠS ER +Ġâ Ī +Ġle mon +at hetic +Ġlit eral +Ġdistingu ished +Ġansw ering +G I +Ġrelig ions +ĠPhil os +ĠL ay +Ġcomp os +ire ments +ĠK os +ine z +roll ing +Ġyoung est +and ise +ĠB orn +Ġalt ar +am ina +ĠB oot +v oc +Ġdig ging +Ġpress ures +Ġl en +26 4 +Ġassass ination +ĠBir mingham +ĠMy th +Ġsovere ign +ĠArt ist +ĠPhot ograph +Ġdep icted +Ġdisp ens +orth y +Ġamb ul +int eg +ĠC ele +ĠTib et +Ġhier archy +Ġc u +Ġpre season +ĠPet erson +Ġcol ours +Ġworry ing +Ġback ers +ĠPal mer +ĠÎ ¼ +Ġcontribut or +Ġhear ings +Ġur ine +Ġ Ù +ourge ois +Sim ilar +ĠZ immer +s omething +ĠUS C +Ġstrength s +ĠF I +Ġlog ging +As ked +ĠTh ai +in qu +ĠW alt +Ġcrew s +it ism +3 01 +Ġshar ply +um ed +Ġred irect +r ators +In f +ĠWe apons +Ġte asp +19 99 +L ive +ĠEs pecially +ĠS ter +ĠVeter ans +Ġint ro +other apy +Ġmal ware +Ġbre eding +Ġmole cular +ĠR oute +ĠCom ment +oc hem +Ġa in +Se ason +Ġlineback er +Ä « +ĠEconom ics +es ar +ĠL ives +ĠEm ma +Ġk in +ĠTer rit +Ġpl anted +ot on +ĠBut ter +ĠSp ons +P ER +Ġdun geon +Ġsymb olic +Ġfil med +Ġdi ets +Ġconclud es +Ġcertain ty +ĠForm at +Ġstr angers +form at +ĠPh ase +Ġcop ied +Ġmet res +ld a +ĠUs ers +Ġdeliber ate +Ġwas hed +ĠL ance +im ation +Ġimpro per +ĠGen esis +ick r +ĠK ush +Ġreal ise +Ġembarrass ing +alk ing +b ucks +Ġver ified +Ġout line +year s +ĠIn come +20 2 +Ġz ombies +F inal +ĠMill enn +Ġmod ifications +ĠV ision +ĠM oses +ver b +iter ranean +ĠJ et +Ġnav al +ĠA gg +Ġur l +Ġvict ories +Ġnon etheless +Ġinj ust +ĠF act +ç ļ +Ġins ufficient +re view +face book +Ġnegoti ating +Ġguarant ees +im en +uten berg +Ġg ambling +Ġcon gr +Load ing +Ġnever theless +Ġpres idents +ĠIndust rial +Ġ11 8 +Ġp oured +ĠT ory +Ġ17 5 +Ġ: = +Sc ott +ange red +T ok +Ġorgan izers +M at +ĠG rowth +Ġad ul +Ġens ures +Ġ11 7 +é¾į å +Ġmass acre +Ġgr ades +be fore +AD VERTISEMENT +ĠSl ow +ĠM MA +âĢĶ " +ĠV atican +Q aeda +Ġo we +66 66 +ĠS orry +ĠGr ass +Ġbackground s +Ġexha usted +Ġcl an +Ġcomprom ised +ĠE lf +ĠIsa ac +ens on +In vest +IF A +Ġinterrupt ed +ãĥī ãĥ© +Ġtw isted +ĠDrag ons +M ode +ĠK remlin +Ġfert il +he res +ph an +ĠN ode +f ed +ĠOr c +Ġunw illing +C ent +Ġprior it +Ġgrad uates +Ġsubject ive +Ġiss uing +ĠL t +Ġview er +Ġw oke +Th us +bro ok +Ġdep ressed +Ġbr acket +ĠG or +ĠFight ing +Ġstri ker +Rep ort +ĠPortug al +Ġne o +w ed +19 9 +Ġflee ing +sh adow +ident ified +US E +Ste am +Ġstret ched +Ġrevel ations +art ed +ĠD w +Ġalign ment +est on +ĠJ ared +S ep +Ġblog s +up date +g om +r isk +Ġcl ash +ĠH our +Ġrun time +Ġunw anted +Ġsc am +Ġr ack +Ġen light +on est +ĠF err +Ġconv ictions +Ġp iano +Ġcirc ulation +ĠW elcome +Ġback lash +ĠW ade +Ġrece ivers +ot ive +J eff +Ġnetwork ing +ĠPre p +ĠExpl orer +Ġlect ure +Ġupload ed +ĠMe at +B LE +ĠNaz is +ĠSy nd +st ud +ro ots +ri ans +Ġportray ed +Ġ ?? +ĠBudd ha +s un +Rober t +ĠCom plex +Ġover see +Ġste alth +T itle +ĠJ obs +ĠK um +Ġappreci ation +ĠM OD +Ġbas ics +Ġcl ips +Ġnurs ing +Ġpropos ition +Ġreal ised +ĠNY C +Ġall ocated +ri um +ar an +ĠPro duction +ĠV ote +Ġsm ugg +Ġhun ter +az er +ĠCh anges +Ġfl uct +y on +Ar ray +Ġk its +W ater +Ġuncom mon +Ġrest ing +ell s +w ould +Ġpurs ued +Ġassert ion +omet own +ĠMos ul +ĠPl atform +io let +Ġshare holders +Ġtra ils +P ay +ĠEn forcement +ty pes +ĠAn onymous +Ġsatisf ying +il ogy +Ġ( ' +w ave +c ity +Ste ve +Ġconfront ation +ĠE ld +C apt +ah an +ht m +ĠC trl +ON S +2 30 +if a +hold ing +Ġdelic ate +Ġj aw +ĠGo ing +or um +S al +Ġd ull +ĠB eth +Ġpr isons +Ġe go +ĠEl sa +avor ite +ĠG ang +ĠN uclear +Ġsp ider +ats u +Ġsam pling +Ġabsor bed +ĠPh arm +iet h +Ġbuck et +ĠRec omm +O F +ĠF actory +AN CE +Ġb acter +H as +ĠObs erv +12 1 +Ġprem iere +De velop +Ġcur rencies +C ast +Ġaccompany ing +ĠNash ville +Ġfat ty +ĠBre nd +Ġloc ks +Ġcent ered +ĠU T +augh s +or ie +ĠAff ordable +v ance +D L +em et +Ġthr one +ĠBlu etooth +Ġn aming +if ts +AD E +Ġcorrect ed +Ġprompt ly +ĠST R +Ġgen ome +Ġcop e +Ġval ley +Ġround ed +ĠK end +al ion +p ers +Ġtour ism +Ġst ark +v l +Ġblow ing +ĠSche dule +st d +Ġunh appy +Ġlit igation +ced es +Ġand roid +Ġinteg ral +ere rs +ud ed +t ax +Ġre iter +ĠMot ors +oci ated +Ġwond ers +ĠAp ost +uck ing +ĠRoose velt +f ram +Ġyield s +Ġconstit utes +aw k +Int erest +Ġinter im +Ġbreak through +ĠC her +Ġpro sec +ĠD j +ĠM T +Res p +ĠP T +Ġs perm +ed it +B T +Lin ux +count ry +le ague +Ġd ick +Ġo ct +Ġinsert ing +Ġsc ra +ĠBrew ing +Ġ19 66 +Ġrun ners +Ġpl un +id y +ĠD ian +Ġdys function +Ġex clusion +Ġdis gr +Ġincorpor ate +Ġrecon c +Ġnom inated +ĠAr cher +d raw +achel or +Ġwrit ings +Ġshall ow +Ġh ast +ĠB MW +ĠR S +Ġth igh +Ġ19 63 +Ġl amb +Ġfav ored +ag le +Ġcool er +ĠH ours +ĠG U +ĠOrig in +Ġglim pse +---------------- ---- +L im +Ġche ek +Ġj ealous +- ' +Ġhar ness +ĠPo ison +Ġdis abilities +ne apolis +Ġout look +Ġnot ify +ĠIndian apolis +Ġab rupt +ns ic +Ġenc rypted +Ġfor fe +reat h +Ġr abb +Ġfound ations +Ġcompl iment +ĠInter view +ĠS we +Ġad olesc +Ġmon itors +ĠSacrament o +Ġtime ly +Ġcontem pl +Ġposition ed +Ġpost ers +ph ies +iov ascular +v oid +ĠFif th +Ġinvestig ative +OU N +Ġinteg rate +ĠIN C +ish a +ibl ings +ĠRe quest +ĠRodrig uez +Ġsl ides +ĠD X +Ġfemin ism +Ġdat as +Ġb end +ir us +ĠNig eria +F ox +Ch ange +Ġair plane +ĠLad en +Ġpublic ity +ixt y +Ġcommit ments +Ġaggreg ate +Ġdisplay ing +ĠAr row +Ġ12 2 +Ġrespect s +and roid +s ix +ĠSh a +Ġrest oration +) \ +W S +oy s +Ġillust rate +with out +12 6 +ĠâĶ Ĥ +Ġpick up +n els +Ġ .... +f ood +ĠF en +) ? +Ġphenomen a +Ġcompan ions +ĠW rite +Ġsp ill +Ġbr idges +ĠUp dated +ĠF o +Ġinsect s +ASH INGTON +Ġsc are +il tr +ĠZh ang +Ġsever ity +Ġind ul +14 9 +ĠCo ffee +Ġnorm s +Ġp ulse +ĠF T +Ġhorr ific +ĠDest roy +ĠJ SON +Ġo live +Ġdiscuss es +R est +E lect +ĠW inn +ĠSurv iv +ĠH ait +S ure +op ed +Ġro oted +ĠS ke +ĠBron ze +Ġl ol +Def ault +Ġcommod ity +red ited +Ġliber tarian +Ġforb idden +Ġgr an +à ¨ +Ġl ag +en z +dri ve +Ġmathemat ics +Ġw ires +Ġcrit ically +Ġcarb ohyd +ĠChance llor +ĠEd die +Ġban ning +ĠF ri +Ġcompl ications +et ric +ĠBangl adesh +Ġband width +St op +ĠOrig inally +Ġhalf way +yn asty +sh ine +Ġt ales +rit ies +av ier +Ġspin ning +ĠWH O +Ġneighbour hood +b ach +Ġcommer ce +ĠS le +B U +Ġentreprene ur +Ġpecul iar +ĠCom ments +f re +3 20 +IC S +Ġimag ery +ĠCan on +ĠElect ronic +sh ort +( ( +D ig +Ġcomm em +u ced +Ġincl ined +ĠSum mon +Ġcl iff +ĠMed iterranean +Ġpo etry +Ġprosper ity +ĠRe ce +Ġp ills +m ember +Ġfin ale +un c +ĠG ig +ä ½ +Ġl od +Ġback ward +- + +ĠFor ward +Ġth ri +s ure +Ġso ap +ĠF X +R ES +ĠSe xual +oul os +Ġfool ish +Ġright eous +Ġco ff +terror ism +ust ain +ot er +Ġab uses +ne xt +Ġab usive +Ġthere after +Ġprohib ition +ĠS UP +Ġd ip +Ġr ipped +Ġinher ited +Ġb ats +st ru +G T +Ġflaw ed +ph abet +Ġf og +do ors +Ġim aging +Ġdig its +ĠHung ary +Ġar rog +Ġteach ings +Ġprotocol s +ĠB anks +à ¸ +p ound +ĠC urt +." ) +. / +Ġex emption +end ix +ĠM ull +Ġimpro ves +ĠG amer +d imensional +I con +ĠMarg aret +St atus +d ates +Ġint ends +Ġdep ict +Ġpark ed +J oe +ĠMar ines +chn ology +! ). +Ġjud ged +Ġwe ights +R ay +Ġapart ments +he ster +Ġrein force +Ġoff ender +occ up +Ġs ore +e pt +ĠPH P +ĠB row +Ġauthor ization +ĠR isk +ĠDel aware +ĠQ U +Ġnot ifications +Ġsun light +Ġex clude +d at +Ġm esh +ĠSud an +Ġbelong ed +Ġsub way +Ġno on +ĠInter ior +ol ics +ĠL akers +Ġc oding +Dis claimer +Cal if +O ld +Ġdis l +???? ? +Ġconfir ms +Ġrecruit ment +Ġhom icide +Cons ider +ĠJeff rey +ft y +} ; +Ġobject ion +do ing +ĠLe o +W ant +Ġgl ow +ĠClar ke +ĠNorm an +Ġver ification +Ġpack et +ĠForm ula +Ġpl ag +es ville +Ġshout ing +Ġo v +ĠR EC +ĠB ub +Ġn inth +Ġener g +Ġvalid ity +Ġup s +j ack +Ġneighbor ing +ĠN ec +ew orks +ĠH ab +are z +Ġsp ine +Ġevent ual +ĠLe aders +ĠC arn +Ġprob ation +Ġrom ance +ms g +ĠMechan ical +ER Y +R ock +Ġpart isan +N ode +ass ets +min ent +Ġforeign ers +Ġtest ify +ĠUs ually +l ords +ĠG ren +ĠPow ell +BI L +Ġs r +Ġadd ict +Ġshell s +Ġs igh +ĠY ale +tern ity +Ġ7 50 +E U +ĠR ifle +Ġpat ron +em a +ĠB annon +an ity +Ġtrop ical +ĠV II +c ross +Every thing +ĠIS O +Ġhum ble +ass ing +ĠF IG +Ġupd ating +ys on +Ġcal cium +Ġcompet ent +Ġste ering +Pro t +ĠS Y +ĠFin als +ĠR ug +15 9 +13 7 +ĠG olf +Ġ12 6 +Ġaccommod ation +ĠHug hes +Ġaest hetic +art isan +ĠTw ilight +Ġpr ince +ĠAgric ulture +ĠDis co +Ġpreced ent +Ġtyp ing +author ized +O ption +ĠA ub +l ishes +ach t +m ag +P eter +ĠU FO +mont on +ĠL ith +Ġa rom +Ġsec uring +Ġconf ined +priv ate +Ġsw ords +Ġmark ers +Ġmetab olic +se lect +ĠCur se +ĠO t +g ressive +Ġinc umb +ĠS aga +Ġpr iced +Ġclear ance +Cont ent +Ġdr illing +Ġnot ices +Ġb ourgeois +Ġv est +Ġcook ie +ĠGuard ians +ry s +in yl +Ġ12 4 +Ġpl ausible +on gh +ĠOd in +Ġconcept ion +ĠY uk +ĠBaghd ad +ĠFl ag +Aust ral +ĠI BM +Ġintern ationally +ĠWiki Leaks +I ED +Ġc yn +Ġcho oses +ĠP ill +Ġcomb ining +Ġrad i +ĠMoh ammed +def ense +atch ing +Sub ject +ic iency +Fr ame +Ġ{ " +Ġche ss +Ġtim er +19 0 +Ġt in +Ġord inance +emet ery +Ġacc using +Ġnotice able +Ġcent res +Ġl id +ĠM ills +img ur +Ġz oom +erg ic +Ġcomp ression +pr im +f ind +Ġsur g +Ġp and +ĠK ee +ĠCh ad +cell ence +oy le +Ġsocial ism +ĠT ravis +ĠM Hz +Ġgu ild +ALL Y +ĠSub scribe +ĠRel ated +Ġoccur rence +itch ing +Ġfict ional +Ġcr ush +ĠE A +c od +m ix +ĠTri ple +Ġretrie ve +Ġstimul us +Ġpsych iat +ĠDo or +Ġhomosexual ity +Ġelement ary +Ġcell ular +id ian +ĠL aun +Ġintrig uing +Ġfo am +ĠB ass +id i +its u +Ġass ure +Ġcongr at +Ġbusiness man +ĠBo ost +cl ose +Ġl ied +Ġsc iences +ĠO mega +ĠG raphics +Ġ< = +sp oken +Ġconnect ivity +S aturday +ĠAven gers +Ġto ggle +Ġank le +Ġnational ist +mod el +ĠP ool +ophob ia +V ar +ĠM ons +ator ies +Ġaggress ively +C lear +For ge +act ers +Ġhed ge +Ġpip es +Ġbl unt +Ġs q +Ġremote ly +W ed +as ers +Ġref riger +Ġt iles +Ġresc ued +Ġcompr ised +ins ky +Ġman if +avan augh +Ġprol ifer +Ġal igned +x ml +Ġtri v +Ġcoord ination +ĠP ER +ĠQu ote +13 4 +b f +ĠS aw +Ġtermin ation +Ġ19 0 +Ġadd itions +Ġtri o +Ġproject ions +Ġpositive ly +Ġin clusive +Ġmem br +19 90 +old er +Ġpract iced +ink le +Ar ch +Ġstar ters +ari us +Ġinter mediate +ĠBen ef +ĠK iller +Ġinter ventions +ĠK il +ĠF lying +In v +Ġprem ature +Ġpsych iatric +Ġind ie +Ġcoll ar +ĠRain bow +af i +Ġdis ruption +ĠFO X +cast ing +Ġmis dem +c ro +Ġw ipe +ard on +Ġb ast +ĠTom my +ĠRepresent ative +Ġbell y +ĠP O +ĠBre itbart +13 2 +Ġmess aging +Sh ould +Ref erences +ĠG RE +ist ical +L P +ĠC av +ĠC razy +Ġintu itive +ke eping +ĠM oss +Ġdiscont in +ĠMod ule +Ġun related +ĠPract ice +ĠTrans port +Ġstatist ically +orn s +Ġs ized +p u +Ġca f +ĠWorld s +ĠRod gers +ĠL un +ĠCom ic +l iving +Ġc ared +Ġclim bed +) { +Ġconsist ed +Ġmed ieval +fol k +Ġh acked +Ġd ire +ĠHerm ione +Ġt ended +ce ans +D aniel +w ent +Ġlegisl ators +Ġred es +g ames +Ġg n +am iliar +Ġ+ + +gg y +th reat +Ġmag net +Ġper ceive +Ġz ip +Ġindict ment +Ġcrit ique +g ard +ĠSaf e +ĠC ream +Ġad vent +ob a +Ġv owed +ous ands +Ġsk i +Ġabort ions +u art +Ġstun ned +Ġadv ancing +Ġlack ed +Ġ\ " +Ġsch izophren +Ġeleg ant +Ġconf erences +Ġcance led +ĠHud son +ĠHop efully +Ġtr ump +Ġfrequ encies +Ġmet eor +ĠJun ior +ĠFle et +ĠMal colm +ĠT ools +Ġ ........ +Ġh obby +ĠEurope ans +Ġ15 00 +ĠInt o +Ġs way +ĠApp ro +ĠCom pl +Comm unity +Ġt ide +ĠSum mit +ä » +Ġinter vals +ĠE ther +Ġhabit at +ĠSteven s +lish ing +ĠDom ain +Ġtrig gers +Ġch asing +Ġchar m +ĠFl ower +it ored +Ġbless ing +Ġtext ures +F ive +Ġliqu or +R P +F IN +Ġ19 62 +C AR +Un known +Ġres il +ĠL ily +Ġabund ance +Ġpredict able +r ar +Ġbull shit +le en +che t +M or +M uch +ä ¹ +Ġemphas ized +Ġcr ust +Ġprim itive +Ġenjoy able +ĠPict ures +Ġteam mate +pl er +ĠT ol +ĠK ane +Ġsummon ed +th y +ram a +ĠH onda +Ġreal izing +Ġquick er +Ġconcent rate +cle ar +Ġ2 10 +ĠErd ogan +ar is +Ġrespond s +ĠB I +Ġelig ibility +Ġpus hes +ĠId aho +Ġagg rav +Ġru ins +ur ations +Ġb ans +Ġan at +sh are +Ġgr ind +h in +um en +Ġut ilities +ĠYan kees +Ġdat abases +ĠD D +Ġdispl aced +Ġdepend encies +Ġstim ulation +h un +h ouses +ĠP retty +ĠRaven s +ĠTOD AY +Ġassoci ates +Ġthe rape +cl ed +Ġde er +Ġrep airs +rent ice +Ġrecept ors +Ġrem ed +ĠC e +Ġmar riages +Ġball ots +ĠSold ier +Ġhilar ious +op l +13 8 +Ġinherent ly +Ġignor ant +Ġb ounce +ĠE aster +REL ATED +ĠCur rency +E V +ãĥ ŀ +ĠLe ad +Ġdece ased +B rien +ĠMus k +J S +Ġmer ge +heart ed +c reat +m itt +m und +ĠâĢ ĭ +ĠB ag +Ġproject ion +Ġj ava +ĠStand ards +ĠLeon ard +Ġcoc onut +ĠPop ulation +Ġtra ject +Ġimp ly +Ġcur iosity +ĠD B +ĠF resh +ĠP or +Ġheav ier +ne ys +gom ery +Ġdes erved +Ġphr ases +ĠG C +Ġye ast +d esc +De ath +Ġreb oot +Ġmet adata +IC AL +Ġrep ay +ĠInd ependence +Ġsubur ban +ical s +Ġat op +Ġall ocation +gener ation +ĠG ram +Ġmoist ure +Ġp ine +ĠLiber als +Ġa ides +Ġund erest +ĠBer ry +Ġcere mon +3 70 +ast rous +ĠPir ates +Ġt ense +ĠIndust ries +ĠApp eals +ĠN ear +Ġè£ı ç +Ġlo vers +ĠC AP +ĠC raw +Ġg iants +Ġeffic acy +E lement +ĠBeh avior +ĠToy ota +Ġint est +P riv +A I +Ġmaneu ver +Ġperfect ion +Ġb ang +p aper +r ill +Ge orge +b order +in ters +ĠS eth +Ġcl ues +ĠLe vi +ĠRe venue +14 7 +Ġv apor +Ġfortun ate +Ġthreat ens +Ġve t +Ġdepend ency +ers ed +art icle +ĠBl izzard +Ġch lor +Ġmin us +ĠB ills +Ġcryptoc urrency +Ġmetabol ism +ter ing +Ġp estic +step s +ĠTre asure +ract ed +ĠConst ant +Ġtem p +13 9 +ĠDet ective +ur ally +Ġrecover ing +Ġcort ex +Ġ14 4 +cl osed +Ġprejud ice +aun ted +Ġstorm s +ĠN OW +Ġmach inery +Add ress +Ġcompe lled +27 0 +Ġdesp air +b ane +Ġveget able +Ġbed s +Lear n +Ġcolor ful +Ġsp ike +Ġmarg ins +Ġsymp athy +Ġworks hop +ĠC BC +S at +Ġburn s +ĠG ender +Ġ12 9 +ĠC able +Ġdeb ts +ĠThe resa +Ġreflect ing +Ġa irst +Ġr im +ram id +Ġweakness es +W rit +ogg le +t i +ĠCh arge +Ġwe ighed +Ġ( . +Ġl aughter +Ġrou ter +ĠDemocr acy +D ear +Ġhas ht +Ġd y +Ġhint s +run ning +Ġfin ishes +ar us +M ass +res ult +asc us +Ġv intage +Ġcon qu +Ġwild ly +ac ist +Ġl ingu +Ġprot agonist +st rom +te enth +ĠSol o +m ac +f illed +Ġre nown +it ives +Ġmot ive +ĠAnt ar +ĠM ann +ĠAd just +Ġrock ets +Ġtrou bling +e i +Ġorgan isms +ass is +Christ ian +Ġ14 5 +ĠH ass +Ġsw all +Ġw ax +ĠSurv ival +V S +ĠM urd +v d +stand ard +Ġdrag ons +Ġacceler ation +r ational +f inal +Ġp aired +ĠE thereum +Ġinterf aces +Ġres ent +Ġartif acts +Å « +are l +Ġcompet itor +ĠNich olas +ĠSur face +c pp +ĠT ot +Ġeconom ically +Ġorgan ised +Ġen forced +in ho +Ġvar ieties +Ġab dom +ĠBa iley +id av +ĠSal v +p aid +Ġalt itude +ess ert +ĠG utenberg +are a +op oulos +Ġprofess ors +igg s +ĠF ate +he y +Ġ3 000 +D ist +Ġtw ins +c ill +ĠM aps +Ġtra ps +Ġwe ed +ĠK iss +Ġy oga +Ġrecip ients +ĠWest minster +Ġpool s +ĠWal mart +18 8 +ĠSchool s +att ack +ĠAR M +par agraph +W arning +j l +Ġself ish +anche z +ĠHe ights +F re +ĠS oph +Ġ -------------------------------- +t ml +33 3 +Ġraid s +Ġsatell ites +KE Y +Ġlast s +Ñ Ĥ +In s +ĠD ame +Ġunp redict +// / +gh ai +Ġart illery +Ġcru ise +Ġg el +ĠCabin et +Ġbl ows +ĠE sp +Ġprox imity +ot he +ĠSk ills +ĠU pper +ob o +ĠN DP +Ġenjoy s +Ġrepe ating +ĠConst ruction +ĠQuest ions +H illary +Ġu int +Ġprocess ors +ĠGib son +ĠMult iple +q a +ĠB om +ĠM iles +vent ional +Ġhur ts +s kin +ĠA IDS +Ġadvis ers +ĠR oot +Ġmethod ology +ĠD ale +Ġdet on +ĠKnow ledge +sequ ently +Ġ12 1 +Ġconnect s +C y +ĠD anger +Ġcontribut ors +ĠB ent +Ġbr ass +ĠGun s +int o +ĠFort une +Ġbro ker +bal ance +Ġlength s +Ġv ic +Ġaver aging +Ġappropri ately +ĠCamer a +Ġsand wich +ĠCD C +Ġcoord inate +Ġnav ig +Ġgood ness +l aim +Ġbra ke +Ġextrem ist +ĠW ake +ĠM end +ĠT iny +ĠC OL +ĠR F +ĠD ual +ĠW ine +C ase +Ġref ined +Ġl amp +L ead +Ġb apt +ĠCar b +ĠS add +ĠMin neapolis +PD F +Ear ly +ĠH idden +I ts +ĠT IME +Ġp ap +Ġcommission ed +ĠF ew +ĠCol ts +ĠB ren +Ġbot hered +Ġlike wise +Ex per +ĠSch w +c ry +n n +ĠM itch +im on +M G +b m +UM P +r ays +Ġregist ry +Ġ2 70 +ach ine +re lla +ant ing +00 000 +Ġru ined +sp ot +Ġt a +Ġmaxim ize +Ġincon ven +D ead +H uman +En abled +ĠMar ie +Ġch ill +ĠParad ise +Ġstar ring +ĠLat ino +ĠProt ocol +ĠE VER +Ġsuppl iers +m essage +ĠBro ck +Ġser um +âĸĪâĸĪ âĸĪâĸĪ +Ġen comp +Ġamb ition +ues e +Ġar rows +And rew +Ġanten na +Ġ19 61 +ĠB ark +Ġb ool +ãĤ ª +ĠSt orage +Ġrail way +Ġtoug her +ĠC ad +Ġwas hing +P y +' ] +em bed +ĠMem phis +ack le +Ġfam ously +ĠF ortunately +ov ies +Ġmind set +Ġsne ak +ĠD h +RA W +ĠSim pson +Ġliv est +Ġland mark +Ġc ement +L ow +Ġthr illed +ĠCour se +in el +Ġch uck +id ate +gl obal +Ġwh it +Ġ � +ad ays +s ki +ĠS V +Ġvir uses +30 6 +ĠResp ons +Ġthe aters +ĠBr anch +ĠGene va +ĠM K +Ġunbel iev +Ġcommun ist +Orig inal +ĠRe ceived +ĠTrans fer +ĠAr g +In put +ĠStr ategy +Ġpal ace +the ning +D ri +Ġsent encing +umbn ail +Ġp ins +re cy +Ġs iblings +Get ting +ĠB U +ĠNorth west +Ġprolong ed +ĠSak ura +C omb +ĠB our +Ġinadequ ate +ĠK ash +Ġus ername +ĠImpro ve +Ġbatt ling +ĠM AC +Ġcurric ulum +Ġs oda +ĠC annon +Ġsens ible +sp ons +De cember +Ġw icked +ĠP engu +Ġdict ators +ĠHe arts +og yn +Ġsimilar ities +ĠSt ats +Ġh ollow +it ations +": [ +Ġh over +ĠList en +s ch +S und +Ġc ad +ĠPar ks +Ġl ur +Ġhy pe +ĠL em +N AME +is ure +Fr iday +Ġshoot s +Ġclos es +Ġd b +ĠR idge +ĠDiff erent +Ġrepl ies +ĠBroad way +op ers +Ġint oler +ĠZe us +akes pe +Ġpropri etary +Ġrequest ing +Ġcontro llers +ĠM IN +im edia +be cca +Ġexp ans +Ġoil s +B ot +ĠCh and +Ġpr inter +Ġto pped +ĠP OL +ĠEar lier +S ocial +av in +Ġdecre ases +ĠSe b +Ġspecific ations +ĠBl ast +ĠK urt +Ġfre el +B rown +Ġdil ig +ro e +ĠPro blem +ĠQu ad +Ġdecent ral +ĠV ector +an ut +Ġplug ins +ĠGreg ory +Ġfuck ed +el ines +ĠAmb assador +t ake +Ġcle ans +ong yang +An onymous +st ro +" } +al ine +ĠO dd +ĠE ug +2 16 +Ġbo il +ĠP owers +Ġnurs es +Ob viously +ĠTechn ical +Ġexceed ed +OR S +Ġextrem ists +Ġtr aces +ex pl +Ġcom r +ĠS ach +) / +Ġm asks +Ġsc i +B on +Ġreg ression +we gian +Ġadvis or +it ures +ĠV o +ex ample +ĠInst ruct +Ġs iege +Ġredu ctions +pt r +Ġstat utory +Ġrem oves +Ġp uck +red its +Ġbe e +Ġsal ad +Ġpromot ions +ĠJosh ua +with standing +ET H +ĠCh a +im us +Ġexpend iture +aun ting +Ġdelight ed +Ġ15 5 +be h +Ġcar pet +ĠSp art +Ġj ungle +l ists +Ġbull ying +ĠNob el +ĠGl en +Ġreferen ced +Ġintrodu ces +se in +Ġcho pped +gl ass +ĠW rest +Ġneutral ity +Ġâ Ļ +Ġinvestig ator +Ġshel ves +Ġun constitutional +Ġreprodu ction +Ġmer chant +m ia +Ġmet rics +Ġexplos ives +ĠSon ia +Ġbod ily +Ġthick ness +Ġpredomin antly +ĠAb ility +Ġmon itored +IC H +Ġ] . +ĠMart inez +Ġvis ibility +Ġqu eries +Ġgen ocide +ĠWar fare +Qu ery +Ġstud ios +Ġemb ry +Ġcorrid or +Ġclean ed +com plete +ĠM H +Ġenroll ment +ING S +Ġimpact ed +Ġdis astrous +ĠY un +ĠCl aire +ĠBas ically +y t +uster ity +Ġindirect ly +w ik +Ġd od +ĠCar r +Ġam p +Ġprohib it +ĠIn itial +ĠR d +ij i +Ġeduc ate +c orn +i ott +ĠBeaut y +Ġdetect ive +ĠCon n +s ince +Ġst agger +Ġob ese +Ġb ree +olog ic +is se +walk er +Ġbl ades +Ġlaw ful +fun c +ĠBeh ind +Ġappet ite +Ġ( * +Ġt ennis +Ġoff spring +Ġj ets +Ġstruct ured +Ġafore mentioned +N ov +Ġsc aling +f ill +Ġst ew +Ġcur b +ĠStep han +ed In +S F +ob ic +é ŃĶ +ou g +ĠM M +Ġgen etically +ope z +13 6 +Ġu mb +anc ers +Ġcoh ort +Ġmerch andise +Ġimp osing +ĠLegisl ature +ĠArch ive +iv ia +ĠN aval +Ġoff ences +Ġmir acle +Ġsn apped +Ġf oes +Ġextensive ly +ĠR af +Ġc ater +ed ience +K it +ĠB in +Ġrecomm ends +ĠC ities +Ġrig id +ĠRE AD +ĠNob le +ĠT ian +Ġcertific ates +ant is +o iler +ĠBudd hist +d id +Ġsurvey ed +Ġdown ward +Ġprint s +ĠMot ion +ron ics +ĠS ans +oss ibly +u ctions +Ġcolon ies +ĠDan ish +un it +Ġsp oil +Ġadvis ory +ber ries +Pl an +Ġspecific ation +op hers +ĠRes ource +Ġsh irts +prising ly +commun ications +Ġtriv ial +Ġmention ing +ise xual +Ġsupp lements +Ġsuper vision +B P +v or +Ġw it +Ġco oldown +Ġplaint iff +ĠReview s +ĠS ri +ĠM int +ĠSug ar +Ġafter ward +ĠPri est +ĠInvest ment +og ene +ĠT aking +Ġstretch ing +Ġinflamm ation +ĠTe hran +Ġl ining +Ġfree zing +ĠEnt ity +Ġins piring +spe cial +pr ice +Ġsu e +ĠP orter +oun ge +ET A +ĠD erek +ĠLu is +u o +ym ph +Ġex terior +ih il +ĠAsh ley +in ator +Ġnut rients +ĠTh rones +Ġfin ances +ĠIn spect +Ġspe cially +ĠRequ ired +ĠP TS +ĠViol ence +oint ed +sh ots +Ġex cerpt +co on +IN S +ĠG ri +Ġrecogn ised +We ek +You ng +Ġv om +is le +ĠCur ry +ĠBudd h +Ġnot ebook +Ġd urable +/ ? +ĠG ad +ĠP upp +Ġforg ive +p ark +Ġpersonal ities +an alysis +cl amation +Ġelev ator +Ġware house +ĠR ole +un n +Ġillust ration +ĠSc an +Ġatmosp heric +Im port +AN C +rict ed +f u +01 0 +Ġar che +Ġreward ed +akespe are +Ġintern ally +ĠR BI +alk er +Ġeleph ant +ow itz +ĠP izza +Ġbip artisan +é s +Ġslow ed +ĠSt ark +Ġover ride +OU S +Ġ3 20 +undred s +ĠDe ck +ĠC ensus +be e +14 6 +ot or +Ġ ip +Ġu b +oc ations +ĠBut ton +r ice +Ġc ripp +ff f +Ġorig inated +Ġoverwhel med +app a +Ġfore most +âĢ ij +ĠL EG +re lease +eat ured +at ches +Ġre ps +Ġl ending +ĠRe ference +ĠCl ient +16 5 +vent h +Com plete +ĠPat rol +Ġsw orn +c am +Ġshut tle +ĠR alph +Ġh ometown +- , +on al +ĠB P +å ı +Ġpersu ade +ĠAlex and +Ġcomb ines +Ġv ivid +ĠL ag +Ġenc oding +Ġsal vation +w en +ĠRec overy +i ya +Un iversity +ĠB iden +Ġbud gets +ĠTex ans +f its +Ġhon ored +Ġp ython +T D +## # +cl one +Ġbl ink +ĠL iquid +Ġunemploy ed +Ġcl ashes +ĠCoun sel +Ġdirect ing +Ġpun ct +ĠFal cons +Ġsh ark +ĠDam ascus +Ġje ans +Ġemb ark +Ġse ize +Ġup wards +2 80 +ĠE z +ĠAny thing +Ġex otic +l ower +ĠCreat or +ĠU m +Ġsubur bs +ber ger +ĠW end +Ġm int +ĠX X +ĠD ro +Ġsuff ers +Ġher b +t ree +Ġfrag ile +Ġflood ed +ĠAl cohol +ole an +ny der +ĠK O +F ram +Ġ13 6 +Ġow ed +ĠMe lee +ĠH ash +Ġwh isk +Ġsu do +r r +Qu ick +app ro +Ġi i +ĠEx amples +he e +Ġpromot es +per ature +k ar +ĠHon or +Ġs odium +ĠL if +ros so +intend ent +Ġcorrespond ent +F ound +sec ret +Ġident ifies +ag ne +Ġl ou +ĠP P +Ġcoinc idence +m ove +Ġmilit ia +Ġinf iltr +ĠPrim ary +Ġpitch ing +ĠI b +ĠGO OD +ãĤ ¸ +ĠW izards +ir al +ĠVen us +R R +ĠâĢ ķ +ĠCase y +Ġsad ly +Ġadm ire +Ġembarrass ed +c b +M el +Ġtub es +Ġbeaut ifully +ĠQueens land +Bel ow +re z +qu et +ple asant +Ġ « +C amp +Ġdec isive +19 98 +ĠL amb +ut ton +h n +ĠJ agu +au nder +ĠC ord +Ġcl erk +Ġca ffe +Ġwip ed +Ġre im +ĠMount ains +Ġimprison ed +Ġdevelop s +ĠP ra +Ġmodel ing +Any one +ance l +ĠS it +Ġshield s +Ġl awn +Ġcard iovascular +Ġdemonstr ating +Ġpar se +ĠIsrael is +Ġeuro s +14 3 +Ġgl orious +ins ki +ec d +Ġcondition ing +Ġhel pless +Ġmicro sc +ĠHar bor +Ġst akes +Ġ2 60 +Ġun equ +ĠFl oyd +Ġd amp +Ġappar atus +ĠLaw s +Ġcoun ters +Ġindu ce +at able +ĠAh med +Ġsl am +N ovember +Ġpers ist +Ġim minent +á n +Ġsh red +Ġph ases +ĠEd monton +ĠArm strong +ĠMe et +ĠK itty +Ñ Ģ +c irc +ĠAd ult +Ġa rose +ĠX en +D an +g ow +Ġsuper f +ĠAd mir +Ġend ure +Ġkey word +yr us +Ġy arn +Ġpath way +ĠHop kins +mid t +Ġcens orship +d ependent +Ġinstruct or +S ources +Ġto e +Ġball oon +N ob +Ġsw ear +ĠCast ro +Ġgl oss +ĠK avanaugh +Ġremark ably +Ph otos +ĠN om +ĠS outheast +y ers +Ġvalid ation +Ġcann on +ĠVict ory +ĠPier re +Ġcaut ious +Aud io +Ġf etch +ĠG ift +ĠH yp +Ġrem edy +Z E +Ġsc ent +Ġbe ard +ĠR ut +- " +Ġpat ents +H y +Ġun just +Ġpot ato +Ġforth coming +Ġche f +ĠR ift +aff e +ĠR OM +ĠL aunch +Ġp ads +ĠNe o +Ġon set +Ġsquee ze +s afe +Ġpref ix +ĠT M +ĠN early +ĠClin ical +ĠM ental +ot iation +ĠUn ic +ant ry +ĠC ir +Ġep it +à ¦ +Ġextract ed +verse ly +ri ad +Ġstr ains +Ġto ps +Ġpo em +ĠRand y +ĠMap le +TH ER +up iter +ĠSS D +ļ é +Ġun con +per ing +Ġsle pt +in ers +Ġunder water +ĠEv idence +g one +20 5 +Ġhistor ians +Ġsynt hesis +Ġf rog +b asketball +Ġvibr ant +Ġsub ord +Ġ3 65 +ĠD ial +Ġcooper ate +HA HA +Ġgreet ed +15 8 +Ġj azz +Ġinto x +ĠWalk ing +Ġsuper visor +ĠF usion +ĠMer cedes +s end +H am +s d +n l +Ġtour s +ĠF IFA +Ġcul p +g d +30 4 +Ġple as +Ġillust rates +ĠColomb ia +Ġhighlight ing +ĠSum mary +Ġexp osing +ĠD ru +Ġir ony +r itional +ĠCar roll +ĠEll is +P ict +ĠR apt +Ġad apter +Ġun m +Ġcor pse +Ġceleb rities +D en +at um +ĠAp ocalypse +ĠW ag +lin ing +Ġhorm ones +R ub +ĠX i +ĠV aults +20 8 +alky rie +inos aur +Ġfeed s +v ity +Ġdefe ating +W ait +Ġemphas ize +ĠSteel ers +yr inth +le ys +ĠWhe never +Current ly +ĠCl ock +Ġcollect ively +any on +ĠJ P +Ġment ality +Ġdownload s +Ġsurround ings +ĠBarn es +Ġflags hip +Ġindic ators +Ġgra pp +Jan uary +ĠElement al +ĠAthen a +ib al +Ġs ights +Ġcap ita +ĠTreat y +Ġvo iced +ĠG az +let te +Ġy a +Ġexp ired +Leg end +H ot +n ature +Ġunst able +Ġ2 80 +à º +Com ment +AL E +Ġquest s +Ġhand ler +n is +Ġvers atile +Ġconce al +enge ance +ĠInter active +Ġobs essed +ĠDog s +Ġcr acked +S ound +s v +ĠD ylan +ro ads +f x +ĠCath olics +ĠH ag +Ġsl ammed +Ġgl owing +s ale +Ġtiss ues +ĠCh i +ne e +Ġc her +s ic +ur rection +Ġb acon +ul atory +) ." +Ġir regular +FOR M +ass ed +Ġintention al +Ġcompens ate +ĠSpe aking +ĠS ets +15 3 +Ġconvent ions +b ands +em ade +Ġe cc +ĠWin ston +ĠAssass in +ĠBelg ian +Ġdepend ence +Ġnic he +Ġb ark +ĠJ azz +Ġdisadvant age +Ġgas oline +Ġ16 5 +çļ Ħ +ess a +mod ule +ang ular +O Y +ĠTreat ment +it as +ol ation +ĠArn old +Ġfe ud +ĠN est +Ġthe atre +ew ater +Ġmin ors +olic y +ĠH aven +div ision +Ġtr unk +F ar +ĠP ull +Ġcapt uring +Ġ18 00 +ĠTe en +Ġex empl +Ġclin ics +ĠB urg +Ġsubst it +Ġpay load +ĠL av +ĠT roy +ĠW itness +Ġfrag ments +Ġpass words +Ġg ospel +ĠG in +Ġten ants +ol ith +S ix +Pre vious +ĠAg es +ĠDar win +Ġbl at +Ġem pathy +sm ith +b ag +ĠE cho +ĠC amb +ĠM add +ĠB oo +Ġred e +ĠBurn ing +Ġsmooth ly +ĠAd rian +ĠV ampire +ĠMon sters +ste am +Sty le +M a +re a +ĠD war +aly st +urs or +Ġelim ination +Ġcrypt o +ch t +ĠE ternal +âĢ¦ ] +ĠS orce +I ll +N ER +Ġu h +Con clusion +w age +Ġresp ir +Ġrem inis +het ical +Ġg y +Ġutil ized +ic idal +Ġ19 00 +Ġhun ters +ĠSw an +ĠRe act +Ġvis itor +ĠThanks giving +30 8 +Post s +Ġh ips +19 97 +om ers +Ġkn ocking +ĠVeh icle +Ġt il +Ġ13 8 +Ġm i +ĠInvest igation +ĠKen ya +Ġcas ino +Ġmot ives +Ġreg ain +re x +Ġweek ends +Ġstab bed +bor o +Ġexplo ited +ĠHA VE +ĠTe levision +c ock +Ġprepar ations +Ġende av +ĠRem ote +ĠM aker +ĠPro du +ĠEv an +Ġinform ational +ĠLouis ville +15 4 +ĠDream s +Ġpl ots +ĠRun ner +Ġhur ting +Ġacad emy +ĠMont gomery +n m +ĠL anc +ĠAl z +2 10 +el ong +Ġretail er +Ġar ising +Ġrebell ion +Ġbl onde +play ed +Ġinstrument al +C ross +Ġret ention +Ġtherape utic +Ġse as +Ġinfant ry +ĠCl int +Ġprompt ing +Ġbit ch +Ġst ems +ĠK ra +Ġthe sis +ĠB og +ru ed +Ġk ings +Ġcl ay +ific ent +ĠY ES +ĠTh ing +ĠCub s +vey ard +els h +in arily +ĠE y +ĠRoll ing +Ġev olving +Ind ia +Ġrecogn izes +Ġgrad uation +is ers +Ġfert ility +ĠMil an +Comm and +Ġbox ing +Ġ19 43 +Ġgl uten +ĠEm ir +Ġid ol +Ġcon ceived +ĠCre ation +Mer it +udd y +uss ions +ĠLie utenant +iet al +Ġunch anged +ĠSc ale +ĠCrime a +ball s +ator ial +Ġdepth s +Ġempir ical +Ġtrans m +Ġuns afe +miss ible +com fort +15 6 +Ġmechan ic +00 2 +l ins +Ġsm oked +P os +Ġslow ing +Ġl av +Tex as +Ġche ating +ĠMet ropolitan +eth yl +Ġdiscover ing +as se +Ġpen cil +ĠPy ongyang +Ġclos et +ĠShe et +ĠEnt ry +ou stic +Ġmy st +er ate +ari at +Ġminer als +Ġmusic ian +ĠP ul +ĠM az +24 9 +Ġper missions +Ġ iv +en ary +ick ers +ĠB ing +he a +en able +Ġgri ev +Ġassert ed +ĠColon el +Ġaff idav +w o +Ġse ated +ĠR ide +Ġpaint ings +ĠP ix +Ġ13 7 +ish i +umb ai +g otten +ĠEar l +Ġin ning +Ġc ensus +Ġtrave lled +ĠCons ult +18 5 +b ind +Ġsimpl icity +Ġoverlook ed +ĠHelp ful +Ġmon key +Ġoverwhelming ly +Bl ood +ĠFl int +ĠJ ama +ĠPres ent +ĠR age +ĠT A +pt ive +Ġturn out +w ald +ĠD olphins +ĠV PN +Ġon ion +Ġcraft ing +m ma +ĠMerc ury +Ġarr ange +Ġalert s +ĠO T +zb ollah +Ġg ases +ĠRichards on +s al +l ar +Ġfro st +Ġlower ing +Ġacc laim +Ġstart ups +ĠG ain +ess ment +Ġguard ian +äº º +ĠP ie +ĠL inks +Ġmer its +Ġaw ake +Ġparent al +Ġexceed s +Ġid le +ĠPil ot +Ġe Bay +ĠAc cept +ipe g +C am +ĠK ot +Ġtrad ers +olit ics +unk er +ĠP ale +os i +an mar +Ġ19 47 +ĠF ell +est ial +it ating +G F +ĠS r +if ted +Ġconnect or +ĠB one +ill es +2 60 +h ma +Ġoverl ap +ĠGit Hub +Ġclean er +ĠBapt ist +ĠW AS +Ġlung s +Ñ ģ +ĠB UT +Ġc ite +Ġpit ched +reat ment +Ġtro phies +ĠN u +38 6 +ĠPr ide +Ġattend ees +[ ] +17 9 +Ġspat ial +Ġpri zes +ĠRel igion +Ġshow case +ĠC ategory +vid ia +T arget +Pro perty +? , +Ġf usion +p ie +ĠU CLA +Ġsound track +Ġprin cess +ĠC aval +sh ould +Ġlim bs +Back ground +Ġlone ly +Ġc ores +ĠT ail +she et +Ġ13 2 +R a +ãĤ « +ĠB olt +Ġbook ed +Ġadmin ister +Ġequ als +w y +Ġobserv ing +ĠBar on +ĠAd obe +Ġv irgin +ĠSocial ist +M ove +gh azi +ĠLind a +2 12 +Ġbre wing +Ġmerch ants +bur se +Ġdiv or +Ġmet als +ĠN er +Ġsum s +ĠEn emy +Ġen vision +Ġgrant ing +ĠH oney +ĠSk yrim +Ġsoc io +gr aded +Ġselect ive +W ASHINGTON +Ġ19 48 +ĠSir ius +ĠG ross +act ivity +ĠI van +Ġfur ious +BS D +ĠPre vious +Ġrespons ive +Ġchar itable +Ġle aning +ĠP ew +Ġviol ates +\\\\ \\\\ +ĠCom ing +w ire +Ġpo et +Ġres olutions +comm and +ĠPortug uese +Ġnick name +Ġde af +Feb ruary +Ġrecogn ise +Ġentire ty +Ġseason al +pl aced +ĠTe legraph +Ġmicro phone +our ing +Ġgr ains +Ġgovern ed +Ġpost p +ĠW aters +in ement +Ġund ocumented +ĠCom cast +Ġf ox +Ġassault s +re on +man y +ĠJen kins +ĠAny way +Ġassess ments +Ġdown s +ĠM ouse +Ġsuper b +k t +ĠD ow +Ġtax ation +4 01 +Ġsm iles +Ġundert aken +Ġex h +Ġenthusi astic +Ġtw ent +Ġgovernment al +Ġautonom y +ĠTechn ologies +ĠCh ain +Ġpreval ent +f b +Ġnic otine +og ram +j ob +Ġawa iting +ĠMen u +Ġdep uties +k ov +ish ops +But ton +ĠShan ghai +Ġdies el +ĠD uck +R yan +ĠPC s +N F +j ury +ent e +Ġinacc urate +edd y +Wh atever +Ġshow c +ĠN ad +od us +et r +Ġplaint iffs +ĠW OR +ĠAss ange +Ġpriv at +Ġpremium s +Ġt am +UR L +Ġel ites +ĠR anger +otten ham +ĠH off +ĠAt hens +Ġdefin ite +Ġs ighed +Ġeven ly +2 11 +ĠAm ber +ak ia +Ġmail ing +Ġcr ashing +ĠConfeder ate +ru gged +W al +ĠDep ths +Ġjuven ile +Ġreact or +Introdu ction +ĠDel uxe +19 95 +ĠS anchez +ĠM ead +iv able +: - +ĠPlan ning +ĠT rap +qu in +ĠProt ect +ve red +In formation +Ġkid ney +inn amon +l as +Ġpolic ing +Ġtoler ate +ĠQ i +Ġbi ased +F ort +ĠK i +s ave +Ġprivile ged +Ġbe asts +ĠGl as +ĠC inem +Ġcome back +Sund ay +Ġext inction +h ops +Ġtrans mit +Ġdoub les +ĠFl at +16 7 +Ġdis puted +Ġinjust ice +f oo +V ict +role um +ĠJul ie +Con text +ĠR arity +iss ue +Comp onent +Ġcounsel ing +an ne +d ark +Ġobject ions +u ilt +Ġg ast +Ġpl ac +Ġun used +ãĥ ĩ +ĠT rial +ĠJ as +hed ral +ob b +Ġtempor al +ĠPR O +ĠN W +ĠAnn iversary +L arge +Ġther m +Ġd avid +Ġsystem ic +ĠSh ir +m ut +ĠNe pt +add ress +Ġscan ning +Ġunderstand able +Ġcan vas +C at +ĠZ oo +Ġang els +L O +ĠStat ement +ĠS ig +ov able +ĠA way +sh aring +ocr ats +st ated +Ġweigh ing +N or +w ild +B ey +Ġaston ishing +ĠReyn olds +Ġop ener +Ġtrain er +Ġsurg ical +p n +Ġadjust ing +whe el +Ġf rown +erv ative +Ġsusp end +With in +te in +Ġobst acle +Ġliber ties +ym es +Ġur anium +ans om +an ol +ub a +ĠL oss +Ġa rous +ĠHend erson +W ow +s pl +c ur +ĠÂ Ń +Ġtheir s +Dam age +Ġdownload ing +Ġdisc ern +ĠSt o +ĠFl a +Ġh ath +ĠA j +Ġun pleasant +Europe an +exp ensive +Ġscreens hot +ĠU V +Ġall ied +ĠPers ian +Ġmonop oly +Ġat om +ĠReds kins +"> < +Ġcan cell +Ġcinem a +13 1 +f air +ĠAlf red +Ġd uck +arg s +22 3 +ĠIS I +Ġsign aling +in ar +Ġlaugh s +Ġfor wards +Ġreck less +Ġlisten ers +at ivity +Ġvast ly +n ant +L ess +ĠHun ting +ĠScient ific +IT ED +Ġkn ight +ĠH TC +us a +t mp +Ġr ude +ĠLegend ary +Ġar ises +B ad +ĠCl aim +pe g +Ġreal ities +Th ink +Ġ ° +Ġro de +Ġstri ve +Ġan ecd +Ġshort s +Ġhypot hes +Ġcoord inated +ĠGand hi +ĠF PS +R ED +Ġsuscept ible +Ġshr ink +ĠCh art +Hel p +Ġ ion +de ep +rib es +ĠK ai +ĠCustom er +Sum mary +Ġc ough +w ife +Ġl end +Ġposition ing +Ġlot tery +ĠC anyon +Ġf ade +Ġbron ze +ĠKenn y +Ġbo asts +ĠEnh anced +rec ord +Ġemer gence +Ġa kin +ĠB ert +it ous +âĸ ij +Ġst ip +Ġexch anged +om ore +als h +Ġreserv oir +Ġstand point +W M +Ġiniti ate +Ġdec ay +Ġbrew ery +Ġter ribly +Ġmort al +lev ard +Ġrev is +N I +el o +Ġconf ess +ĠMS NBC +Ġsub missions +Cont roller +Ġ20 2 +ĠR uth +} ); +ĠAz ure +Ġ ." +20 6 +ĠMarket ing +Ġl aund +ien cies +Ġrenown ed +ĠT rou +ĠN GO +ble ms +Ġterr ified +Ġwar ns +Ġper t +Ġuns ure +4 80 +ale z +ult z +ĠOut side +Ġst yl +ĠUnder ground +Ġp anc +Ġd ictionary +Ġf oe +rim inal +ĠNor wegian +Ġj ailed +Ġm aternal +é e +ĠLu cy +c op +Ch o +Ġuns igned +ĠZe lda +ĠIns ider +ĠContin ued +Ġ13 3 +ĠNar uto +ĠMajor ity +16 9 +ĠW o +ãĤ ĵ +Ġpast or +Ġinform al +Ð ½ +an throp +jo in +ãģ Ĺ +it ational +N P +ĠWrit ing +f n +ĠB ever +19 5 +Ġy elling +Ġdr astically +Ġe ject +Ġne ut +Ġth rive +ĠFre qu +ou x +Ġpossess es +ĠSen ators +ĠD ES +ĠSh akespeare +ĠFran co +ĠL B +uch i +Ġinc arn +Ġfound ers +F unction +Ġbright ness +ĠB T +Ġwh ale +ĠThe ater +m ass +ĠD oll +S omething +Ġecho ed +ĠHe x +c rit +af ia +Ġgodd ess +Ġele ven +ĠPre view +ĠAur ora +Ġ4 01 +uls ive +ĠLog an +in burgh +ĠCent ers +ĠON LY +ĠA id +Ġparad ox +Ġh urd +ĠL C +D ue +c ourt +Ġoff ended +Ġeval uating +ĠMatthew s +Ġto mb +Ġpay roll +Ġextra ction +ĠH ands +if i +Ġsuper natural +ĠCOM M +] = +dog s +Ġ5 12 +ĠMe eting +Rich ard +ĠMax imum +Ġide als +Th ings +m and +ĠReg ardless +Ġhum ili +b uffer +L ittle +ĠD ani +ĠN ak +Ġliber ation +ĠA be +ĠO L +Ġstuff ed +ac a +ind a +raph ic +Ġmos qu +Ġcampaign ing +Ġoccup y +S qu +r ina +ĠW el +ĠV S +Ġphys ic +Ġp uls +r int +oad ed +ET F +ĠArch ives +Ġven ues +h ner +ĠTur bo +Ġl ust +Ġappeal ed +que z +il ib +ĠTim othy +Ġo mn +d ro +Ġobs ession +ĠSav age +19 96 +Gl obal +J es +2 14 +Ġsl iding +Ġdisapp ro +ĠMag ical +Ġvolunt arily +g b +ane y +Ġprop het +ĠRe in +ĠJul ia +ĠW orth +aur us +Ġb ounds +ie u +)) ) +Ġcro re +ĠCitiz en +S ky +Ġcolumn ist +Ġseek ers +ond o +IS A +ĠL ength +Ġnost alg +Ġnew com +Ġdet rim +ent ric +3 75 +ĠG E +Ġaut op +Ġacadem ics +App Data +ĠS hen +Ġid iot +ĠTrans it +Ġteasp oon +W il +K O +ĠCom edy +> , +Ġpop ulated +W D +Ġp igs +ĠO culus +Ġsymp athetic +Ġmar athon +19 8 +Ġseiz ure +s ided +Ġd op +irt ual +L and +ĠFl oor +osa urs +... ] +Ġl os +Ġsubsid iary +E Y +ĠPart s +ĠSt ef +ĠJud iciary +Ġ13 4 +Ġmir rors +Ġk et +t imes +Ġneuro log +Ġc av +ĠGu est +Ġtum or +sc ill +ĠLl oyd +E st +Ġcle arer +Ġstere otypes +Ġd ur +not hing +Red dit +Ġnegoti ated +---------------- -------- +23 5 +Ġfl own +ĠSe oul +ĠRes ident +ĠS CH +Ġdisappear ance +ĠV ince +g rown +Ġgrab s +r il +ĠInf inite +ĠTw enty +Ġpedest rian +Ġjer sey +ĠF ur +ĠInf inity +ĠEll iott +Ġment or +Ġmor ally +Ġob ey +sec ure +iff e +Ġantib iotics +ang led +ĠFre eman +ĠIntrodu ction +J un +Ġm arsh +ic ans +ĠEV ENTS +och ond +W all +icult y +Ġmisdem eanor +Ġl y +Th omas +ĠRes olution +Ġanim ations +ĠD ry +Ġinter course +ĠNew castle +ĠH og +ĠEqu ipment +17 7 +Ġterrit orial +Ġarch ives +20 3 +Fil ter +ĠMun ich +Ġcommand ed +ĠW and +Ġpit ches +ĠCro at +Ġrat ios +ĠM its +Ġaccum ulated +ĠSpecific ally +Ġgentle man +acer b +Ġp enn +Ġa ka +ĠF uk +Ġinterven e +ĠRef uge +ĠAlz heimer +Ġsuccess ion +oh an +d oes +L ord +Ġsepar at +Ġcorrespond ence +Ġsh iny +P rior +Ġs ulf +Ġmiser able +Ġded ication +( ). +Ġspecial ists +Ġdefect s +ĠC ult +ĠX ia +Ġje opard +ĠO re +Ab ility +Ġle ar +Ġamb itions +ĠB MI +ĠArab s +Ġ19 42 +Ġpres ervation +ific ate +Ġash amed +l oss +ĠRest aur +Ġrese mble +Ġen rich +ĠK N +ĠCl an +fl oat +Ġplay able +IT T +Ġharm ony +arr ison +ĠWe instein +w ere +Ġpoison ing +ĠCom put +ĠWord Press +m ajor +ĠVal ve +F an +ĠTh row +ĠRom ans +ĠDep ression +ad os +Ġtort ured +Ġbal ancing +bott om +Ġacqu iring +ĠMon te +ard i +Ġa ura +Ġ# # +ĠStand ing +ĠAtl as +C F +Ġintr ins +ĠBen ghazi +Ġcamp ing +Ġt apped +bl ade +st rous +ĠR abb +ĠW ritten +t ip +ĠNe igh +ster dam +ĠAll ow +ĠHe aling +ĠR hod +n um +Ġcaffe ine +ĠPer cent +Ġbo o +Ġapp les +30 5 +Ġwel coming +Ġappl aud +Ġa usterity + ± +ĠRe ality +ef e +å ® +Ġsu cks +Ġtab s +ĠPay Pal +Ġback pack +Ġgif ted +abul ary +ĠSc out +ir teen +Ġch in +Ġo mitted +Ġnegative ly +Ġaccess ing +ĠE arn +Ġambul ance +Ġhead phones +Ġ20 5 +ĠRef resh +p resident +ĠKit chen +ĠEnt ered +ĠS nyder +00 5 +om ical +Ġborrow ed +ĠN em +Ġav iation +Ġst all +rim ination +Ġuniform s +it ime +ĠSim mons +ener gy +ab lished +y y +qual ified +Ġrall ies +ĠSt uart +fl ight +Ġgang s +r ag +Ġv ault +lu x +ĠCom par +Ġdesign ation +20 9 +ĠJ os +d ollar +z ero +Ġwell s +30 3 +Ġconstitu ents +Ġhe ck +Ġc ows +Ġcommand ers +Ġdifferent ial +ĠC atherine +29 9 +Ġval ve +Ġbr ace +Ġperspect ives +c ert +f act +icular ly +ĠMc N +pl anes +Ġint ric +Ġpe as +ov an +Ġtoss ed +ret ch +ĠL opez +Ġunf amiliar +de ath +ĠA part +ĠCh ang +Ġrelie ved +rop he +Ġair ports +Ġfre ak +ut il +M ill +ĠCh in +ĠOw en +m ale +ĠBro ken +ĠWind s +ro b +r ising +Ġfire fighters +Ġauthor itarian +Ġ14 8 +Bit coin +ex ternal +Ġbrow sers +iche ver +or ian +Ġun b +Ġpo ke +ĠZ ot +M id +ĠPop ular +Ġco vert +Ġcont ributes +Ġ6 50 +Ġcont ention +G ate +Ġcons oles +Ġchrom os +ĠI X +Ġvis ually +ĠE isen +Ġjewel ry +Ġdeleg ation +Ġacceler ate +ĠR iley +Ġsl ope +Ġind oor +it ially +Ġhuge ly +Ġtun nels +Ġfin ed +Ġdirect ive +Ġfore head +ustom ed +Ġsk ate +Mus ic +g as +Ġrecogn izing +am bo +Ġover weight +ĠGr ade +Ù Ĭ +Ġsound ing +Ġlock ing +ĠR EM +St ore +Ġexc av +ĠLike wise +ĠL ights +Ġel bow +ĠSupp ly +w ic +Ġhands ome +19 94 +C oll +Ġadequ ately +ĠAssoci ate +Ġstri ps +Ġcrack down +Ġmar vel +ĠK un +Ġpass ages +@@ @@ +ĠT all +Ġthought ful +names e +Ġprost itution +bus iness +Ġball istic +person al +c ig +iz ational +R ound +ĠÂłĠÂł ĠÂłĠÂł +ĠCole man +Ġadm itting +ĠPl ug +Ġbit coins +ĠSu z +Ġfair ness +Ġsupp lier +Ġcatast rophic +ĠHel en +o qu +M arc +ĠArt icles +g ie +Ġend angered +Ġdest iny +ĠVol t +ol ia +ax is +Ġche at +Ġun ified +IC O +qu ote +30 2 +ĠS ed +Ġsupp ression +Ġanaly zing +Ġsqu at +Ġfig uring +Ġcoordin ates +Ġch unks +Ġ19 46 +Ġsub p +Ġw iki +ĠFor bes +ĠJ upiter +ĠE rik +im er +ĠCom mercial +\ ) +Ġlegitim acy +Ġd ental +ĠMe an +Ġdefic its +5 50 +Orig inally +ĠHor ror +Ġcontam ination +ll ah +Ġconf isc +ĠCl are +T B +ĠF ailed +an ed +Ġrul er +ĠCont roller +Ġfemin ists +F ix +g ay +20 7 +Ġr abbit +Th ird +ownt own +Ġgl ue +Ġvol atile +Ġsh ining +Ġf oll +Ġimp aired +Ġsup ers +æ Ī +Ġcl utch +ļé ĨĴ +Ġpro let +Ġ( ! +Ġy elled +ĠK iev +ĠEr n +ĠSh ock +K B +Ġsit uated +qu ery +ĠN as +Ġan nex +char acter +ĠHol iday +Ġautom ation +ĠJ ill +ĠRem astered +Ġl inem +Ġwild erness +ĠHor izon +ĠGu inea +A Z +Ġmain land +Ġsec recy +LE ASE +Ġp unk +ĠProv ince +( ), +Spe ed +Ġhand ing +ĠSeb ast +S ir +r ase +Ġj ournals +Ġcon gest +ĠT ut +ir rel +Ġschizophren ia +Ġmis ogyn +health y +I ron +Ġreact ed +- $ +25 2 +Ġpl ural +Ġpl um +Ġbarg ain +Ġground ed +f inder +Ġdis se +ĠL az +O OD +Ġat roc +F actory +Ġmin ions +Ġo ri +ĠB rave +ĠP RE +ĠMy anmar +ĠH od +Ġexped ition +Ġexpl ode +ĠCo ord +Ġext r +ĠB rief +ĠAD HD +Ġhard core +feed ing +Ġd ile +ĠF ruit +Ġvacc ination +ĠM ao +osp here +Ġcont ests +- | +Ġf ren +isp here +R om +ĠSh arp +ĠTre nd +Ġdis connect +âĢ¢ âĢ¢ +Ġper secution +Ear th +Ġhealth ier +38 4 +Ġc ob +ĠTr inity +OW S +AN N +Ġspecial ty +Ġg ru +Ġcooper ative +wh y +Start ing +ĠIss ues +st re +ens or +Ġ18 5 +Ad v +! ? +ĠRe vel +em ia +ĠH ulk +Ġcelebr ations +ĠS ou +ra ud +ĠKle in +Ġun real +con text +Ġpartners hips +Ġadop ting +t ical +Ġspl ash +ĠHe zbollah +c ategory +cycl op +xt on +ĠD ot +urd y +t z +Ġenvelop e +ĠN L +â ķ +Ġwhere in +Spe c +18 4 +Ġte lev +al iation +Ġmyth s +å ° +Ġrig orous +Ġcommun icating +Ġobser ver +Ġre he +ĠW ash +Ġapolog ized +ĠT in +Ġexpend itures +work ers +d ocument +Ġhes itate +ĠLen in +Ġunpredict able +Ġrenew al +cl er +ok ia +ĠCON T +Ġpost season +Tok ens +Ġex acerb +Ġbet ting +Ġ14 7 +Ġelev ation +W ood +ĠSol omon +19 4 +00 4 +out put +Ġredu nd +ĠM umbai +Ġp H +Ġreprodu ce +ĠD uration +MA X +Ġb og +C BS +ĠBal ance +ĠS gt +ĠRec ent +Ġc d +Ġpo pped +Ġincomp et +pro p +ay an +g uy +Pac ific +Ġty r +Ġ{ { +ĠMy stic +ĠD ana +Ġmast urb +Ġge ometry +à ¢ +ĠCor rect +Ġtraject ory +Ġdistract ed +Ġf oo +ĠW elsh +L uc +m ith +Ġrug by +Ġrespir atory +Ġtri angle +Ġ2 15 +Ġunder graduate +ĠSuper ior +ch anging +_ - +Ġright ly +Ġrefere e +Ġluc rative +Ġun authorized +Ġresemb les +ĠGN U +ĠDer by +Ġpath ways +ĠL ed +Ġend urance +Ġst int +Ġcollect or +F ast +Ġd ots +Ġnational s +ĠSec urities +Ġwh ip +Par am +Ġlearn s +M agic +Ġdetail ing +m oon +Ġbroadcast ing +Ġb aked +26 5 +hol m +ĠS ah +ĠHus sein +ĠCourt esy +17 4 +Ġ14 6 +Ġge ographic +pe ace +Ġjud ging +ĠS tern +B ur +Ġstory line +G un +ĠSt ick +24 5 +30 7 +ãĤ´ ãĥ³ +ĠAdminist rator +Ġbur nt +Ġp ave +ch oes +Ex ec +Ġcamp uses +Res ult +Ġmut ations +ĠCh arter +Ġcapt ures +Ġcomp ares +Ġbad ge +S cient +Ġer ad +ier y +o i +ett es +ĠE state +Ġst rap +Ġproud ly +Ġf ried +Ġwithd rawn +ĠV oy +ph ony +It ems +ĠP ierce +b ard +Ġann otation +ant on +ill on +Im pro +... ) +Ġhapp ier +---- -- +ad just +Ġstaff ers +Ġactiv ism +Ġper f +Ġal right +N eed +Ġcomm ence +Ġopio id +ĠAm anda +E s +ĠP ars +ĠK aw +W orks +24 8 +Ġind o +t c +end ant +ĠM oto +Ġlegal ization +OT E +Ġtask ed +Ġt sp +ĠACT IONS +16 6 +Ġrefres hing +ĠN R +ĠPere z +Ġinfring ement +S Y +List en +in ning +k u +Ġrot ate +pro gram +ar ah +Des ign +Ġ( £ +Ġst oring +Ġwar rants +Ġjud gement +ĠB rist +us ually +ph oto +ĠR an +ĠP ine +Ġoutrage ous +ĠValent ine +lu ence +ĠEvery body +Al tern +Ġrele vance +Ġtermin ated +Ġd essert +Ġfulf illed +Ġprosecut ed +ĠW ords +Ġm igrant +Ġcultiv ation +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +idel ity +ĠV ern +ĠLog in +Ġmetaph or +ĠT ip +Ġrecru its +ĠP ig +rib ing +Ġenthusi asts +ex per +Ġfright ening +ĠH air +ans on +str ate +Ġh i +He ight +Ġown ing +n one +Ġdis like +Ġkn ives +pher d +Ġloud ly +ĠAP Is +Dis play +ĠL ac +ĠUS S +ab l +ver ages +J ew +Ġ17 2 +ĠHist orical +at oon +ĠPhys ics +in tern +Ġwarm th +Ġto pp +D M +Ġgun man +Ġem peror +od i +ãĥ £ +in atory +ĠR ib +Ġ13 1 +ĠSat urn +ĠSh ining +Ġw aking +Qu otes +Ġcomed ian +en berg + ½ +Ġbelie vers +Ġpaper work +c ustom +Ġle v +Ġl ament +Ġpour ing +22 2 +p olitical +ĠSupp lement +m aid +Ġcruel ty +Ġt read +ys ics +A w +rit es +Ġmod ifier +ĠP osition +Ad am +l b +ub s +Ġimper fect +Ġcl usters +ĠEngine er +ĠC herry +Ġinaug uration +ĠS au +Ġembod iment +ĠUn cle +Ġover r +Ġexplos ions +c ule +ĠPrinc eton +ĠAndre a +Ġincorrect ly +Ġearn est +Ġpil gr +ĠS print +Ġslee ve +Ġhe ars +ĠAm azing +Ġbrow sing +ag in +Ġhom eland +Ġha w +Ġd iving +ist ered +17 8 +Ġbarg aining +ĠArc ade +Ġdeleg ate +ters on +................................ ................................ +ĠJackson ville +27 5 +Ġst agn +Ġad am +ĠSher man +C B +Ġsub urb +ĠFood s +Ġconver ting +ĠAr ist +Ġch ambers +l ove +Ġam ino +ĠG an +Ġmad ness +m c +ĠUS E +def ined +Ġul tr +ind ust +Ġw olves +l ance +Add itionally +Ġcr acks +as ia +ĠRe ason +ĠP ump +Ġaccident al +ĠL aser +ĠR id +Ġinitial ized +ell i +Ġun named +Ġn oun +ĠPass ed +Ġhost age +ĠEth iop +sh irts +Ġun rel +ĠEmb assy +Ġ19 41 +Ġat oms +Ġpur ported +16 4 +ĠF i +Ġgall ons +ĠMon ica +Ġp g +en ment +Ġsort ed +ĠG ospel +Ġhe ights +Ġtr aced +Ġunder going +She ll +Ġs acks +Ġproport ions +Ġhall uc +F ont +ac et +Ġwar mer +ĠIN TER +Ġgrab bing +Pl ug +Ġreal ization +ĠBur ke +Ġen chant +AT ER +ĠSe ed +Ġabund ant +F M +Ġc ivic +V s +is i +Ġv ow +Ġre per +ĠPartners hip +Ġpenet ration +Ġax e +Ġsh attered +ĠZ ombies +Ġv inyl +ĠAl ert +e on +Ġoblig ed +ĠIll ust +ĠPl aza +ĠFront ier +Ġdavid jl +ĠSer ial +ĠH av +ĠNut rition +B i +Ġâĸ Ī +ĠJ ays +lin ux +Ġhur ry +Ġv oy +Ġhop eless +ĠSte alth +Ġ ãģ +ess ors +tt le +b org +ĠSaf ari +f ell +Ġw ary +d ue +ĠAb ove +H a +E LL +Ġnot or +ĠW on +T oo +Ġoccup ations +Ġposs essions +Ġinv iting +Ġpred ators +Ġacceler ated +Ġ15 7 +uter te +ĠC ube +e ast +acc ount +G ive +Ġtrans plant +red ients +id able +Ġscreens hots +ĠG und +ĠF S +Ġtravel ers +Ġsens ory +ĠF iat +ĠRock ets +İ ĭ +_ { +F riend +Ġchar ming +AL S +Ġenjoy ment +m ph +Ġ5 000 +ĠRE G +Ù Ĩ +b ia +Ġcomp ilation +ro st +ĠV P +ĠSch ne +201 9 +Ġcop ying +M ORE +ĠFl ore +f alls +2 15 +t otal +Ġdis ciples +d ouble +Ġexceed ing +Ġsm ashed +Ġconcept ual +ĠRom ania +ĠB rent +ĠI CE +ĠT ou +Ġg rap +Ġn ails +18 9 +ãĥ ĺ +Ġproc ure +e ur +Ġconfir ming +ĠC ec +aw i +ĠEd en +Ġn g +Ġengine ered +at ics +Ġhook ed +Ġdisgust ing +ĠMur der +ãĤ ¿ +L ibrary +Ġ16 8 +Al most +hem atic +Men u +ĠNot re +ĠJ ur +Ġkidn apped +Ġhack er +ĠJ ade +Ġcreep y +Ġdraw ings +ĠSpons or +Ġcycl ists +ĠGob lin +Ġoptim ized +Ġst aged +ĠMc D +bet ween +A ge +en o +S ex +ĠW ide +n ings +av is +Ġincap able +ĠK ob +Ġreward ing +ĠL one +oles cent +Ġcontract ed +Ġstick y +J ose +B all +f est +ĠIn put +ĠRec ently +Ġto mat +squ are +App lication +Ġnit rogen +Ġdupl icate +ĠRec on +ĠD ear +L ondon +Ġint ra +Ġd ock +Ġout reach +ĠM illion +Ġmamm als +am pton +V AL +Ġsn aps +Ġd os +ĠWh ole +ĠRead y +T ry +ĠWinn ipeg +ear ance +Ġinc urred +ren ched +ĠNS W +il ot +rain e +Ġc ube +g ot +Ġrun way +etermin ed +ĠHaw ks +Ġsurviv or +ĠW ish +ĠD in +ĠDE F +ĠV ault +18 7 +Ġmush rooms +Ġcris p +be y +ĠDisco very +Ġdevelopment al +Ġparad igm +Ġcha otic +ĠT su +Ġ3 33 +b ons +Ġbacter ial +Ġcomm its +Ġcos mic +Ġme ga +oc ative +ĠP aint +ophob ic +Ġv ain +Ġcar ved +ĠTh ief +ĠG ul +ows hip +Ġc ites +ĠEd inburgh +Ġdimin ished +Ġacknowled ges +ĠK ills +Ġmic row +ĠHer a +Ġsen iors +Ġwhere by +H op +at ron +Ġun available +ĠN ate +Ġ4 80 +Ġsl ated +ĠRe becca +ĠB attery +Ġgram mar +Ġhead set +Ġcurs or +Ġex cluding +any e +aunder ing +eb in +Ġfeas ible +ĠPub lishing +ĠLab s +ĠCl iff +ĠFerr ari +Ġp ac +vis ible +mark ed +pe ll +Ġpol ite +Ġstagger ing +ĠGal actic +Ġsuper st +Ġpar an +ĠOffic ers +ãĢ ģ +Ġspecific s +ul us +23 9 +ĠP aste +AM P +ĠPan ama +ĠDe lete +angu ard +rest rial +Ġhero ic +ĠD y +ا ÙĦ +Ġincumb ent +Ġcr unch +t ro +Ġsc oop +Ġblog ger +Ġsell ers +ure n +Ġmedic ines +ĠC aps +ĠAnim ation +ox y +Ġout ward +Ġinqu iries +22 9 +Ġpsych ologist +ĠS ask +ev il +Ġcontam inated +ãĤ ¨ +he rence +Ġbrand ed +ĠAbd ul +z h +Ġparagraph s +Ġmin s +Ġcor related +er b +Ġimp art +Ġmil estone +ĠSol utions +ot le +Ġunder cover +Ġmar ched +ĠCharg ers +f ax +ĠSec rets +Ġr uth +we ather +Ġfemin ine +Ġsh am +Ġprest igious +igg ins +Ġs ung +hist ory +ett le +gg ie +Ġout dated +ol and +Ġper ceptions +ĠS ession +ĠDod gers +u j +ĠE ND +D oc +Ġdefic iency +Gr and +ĠJ oker +Ġretro spect +Ġdiagn ostic +Ġharm less +Ġro gue +ĠA val +E qu +Ġtrans c +ĠRoberts on +ĠDep ending +ĠBurn s +iv o +Ġhost ility +F eatures +ĵ ĺ +Ġdis comfort +ĠL CD +spec ified +ĠEx pect +3 40 +Ġimper ative +ĠReg ular +Ch inese +Ġstate wide +Ġsy mm +Ġlo ops +Ġaut umn +N ick +Ġsh aping +Ġqu ot +Ġc herry +ĠCross ref +è¦ ļéĨĴ +Stand ard +he ed +ĠD ell +ĠViet namese +Ġo st +ĠV alkyrie +O A +Ass ad +Ġreb ound +ĠTra ffic +pl aces +æ ĺ +ĠB uc +17 2 +Ġshel ters +Ġins isting +ĠCertain ly +ĠKenn eth +ĠT CP +Ġpen al +ĠRe play +he ard +Ġdial ect +iz a +ĠF Y +it cher +ĠD L +Ġspir al +Ġquarterback s +Ġh ull +Ġgo ogle +Ġto dd +ĠSter ling +ĠPl ate +Ġsp ying +mb ol +ĠReal m +ĠPro ced +ĠCr ash +Ġtermin ate +Ġprotest ing +C enter +gu ided +Ġun cover +Ġboy cott +Ġreal izes +s ound +Ġpret ending +ĠV as +19 80 +Ġfram ed +Ġ13 9 +Ġdesc ended +Ġrehab ilitation +Ġborrow ing +ĠB uch +Ġbl ur +R on +ĠFro zen +en za +Ch ief +ĠP oor +Ġtransl ates +M IN +Ġ2 12 +J ECT +Ġerupt ed +Ġsuccess es +S EC +Ġpl ague +Ġg ems +d oms +Ġstret ches +ĠSp y +Ġstory telling +C redit +ĠP ush +Ġtra ction +Ġin effective +ĠL una +Ġt apes +Ġanaly tics +erc ise +Ġprogram mes +ĠCar bon +Ġbeh old +he avy +ĠConserv ation +ĠF IR +Ġs ack +ter min +ric ks +Ġhous ed +Ġunus ually +I ce +Ġexecut ing +ĠMor oc +ed ay +Ġed itions +Ġsm arter +ĠB A +Ġout law +Ġvan ished +ib a +AL SE +ĠSil va +23 8 +C ould +Ġphilos opher +Ġevac uated +Sec ret +14 2 +Ġvis as +ãĤ ¬ +ĠM alt +ĠClear ly +ĠN iger +ĠC airo +ĠF ist +3 80 +ĠX ML +aut o +it ant +Ġrein forced +Rec ord +ĠSurviv or +G Hz +Ġscrew s +parent s +Ġo ceans +ma res +Ġbra kes +vas ive +Ġhell o +ĠS IM +rim p +Ġo re +ĠArm our +24 7 +Ġterr ific +Ġt ones +14 1 +ĠMin utes +Ep isode +Ġcur ves +Ġinflamm atory +Ġbat ting +ĠBeaut iful +L ay +Ġunp op +v able +Ġr iots +ĠTact ics +b augh +ĠC ock +Ġorg asm +ĠS as +Ġconstruct or +et z +G ov +Ġant agon +Ġthe at +Ġde eds +ha o +c uts +ĠMc Cl +Ġu m +ĠScient ists +Ġgrass roots +ys sey +"] => +Ġsurf aced +Ġsh ades +Ġneighb ours +Ġad vertis +oy a +Ġmer ged +Up on +Ġg ad +Ġanticip ate +Any way +Ġsl ogan +Ġdis respect +I ran +ĠT B +act ed +Ġsubp oen +medi ately +OO OO +Ġwa iver +Ġvulner abilities +ott esville +ĠHuff ington +J osh +ĠD H +M onday +ĠEll en +K now +x on +it ems +22 8 +Ġf ills +ĠN ike +Ġcum ulative +and als +I r +Ġ ì +Ġfr iction +ig ator +Ġsc ans +ĠVi enna +ld om +Ġperform ers +P rim +Ġb idding +M ur +Ġlean ed +ĠPri x +al ks +Ġ[ âĢ¦] +ĠTw itch +ĠDevelop er +ĠG ir +Ġcall back +Ab stract +Ġacc ustomed +Ġfreed oms +ĠP G +ur acy +Ġl ump +is man +,, ,, +19 92 +ĠR ED +Ġwor m +M atch +ĠPl atinum +I J +ĠOwn er +Tri via +com pl +Ġnew born +Ġfant as +O wn +Ġ19 59 +Ġsymp ath +Ġub iqu +Ġoutput s +Ġal lev +Ġpr ag +K evin +Ġfav ors +Ġbur ial +Ġn urt +so lete +c ache +Ġ15 6 +Ġunl ocks +te chn +M aking +Ġcon quer +ad ic +æ ĸ +Ġel f +Ġelect orate +ĠKurd s +ĠSt ack +ĠSam urai +Ġâ ĺħ +Ġ{ } +ĠS aid +ĠFall out +Ġkind ness +ĠCustom s +ĠBou levard +Ġhelicop ters +ot ics +ĠVe get +com ment +Ġcritic ised +Ġpol ished +ĠRem ix +ĠC ultural +Ġrec ons +Ġdo i +at em +Sc reen +Ġbar red +Com ments +ĠGener ally +Ġsl ap +7 20 +V ari +p ine +Ġem pt +Ġh ats +ĠPlay ing +l ab +a verage +form s +ĠC otton +Ġcan s +ĠD ON +ĠSom alia +C rypt +ĠIncre ases +E ver +mod ern +Ġsur geon +3 000 +Ġrandom ized +================================ ================================ +B ern +im pl +ĠC OR +Ġpro claim +th ouse +Ġto es +Ġam ple +Ġpres erving +Ġdis bel +gr and +B esides +Ġsil k +ĠPat tern +h m +Ġenter prises +Ġaffidav it +ĠAdvis ory +Ġadvert ised +ĠRel igious +se ctions +psy ch +ĠField s +aw ays +Ġhasht ag +ĠNight mare +Ġv ampire +Ġfore nsic +rosso ver +n ar +Ġn avy +Ġvac ant +ĠD uel +Ġhall way +Ġface book +ident ally +ĠN RA +Ġm att +Ġhur ricane +ĠKir by +ĠP uzzle +Ġsk irt +ou st +du llah +Ġanal ogy +in ion +Ġtomat oes +ĠN V +ĠPe ak +ĠMe yer +Ġappoint ments +Ġm asc +Ġal ley +re hend +Ġchar ities +Ġund o +Ġdest inations +ĠTest ing +"> " +c ats +* . +Ġgest ures +gener al +Le ague +Ġpack ets +ĠInspect or +ĠBer g +Ġfraud ulent +Ġcritic ize +F un +Ġbl aming +nd ra +Ġsl ash +ĠE ston +Ġpropos ing +Ġwh ales +Ġtherap ist +Ġsub set +Ġle isure +EL D +ĠC VE +ĠAct ivity +Ġcul min +sh op +ĠD AY +is cher +ĠAdmir al +ĠAtt acks +Ġ19 58 +Ġmem oir +Ġfold ed +Ġsex ist +Ġ15 3 +ĠL I +Ġread ings +Ġembarrass ment +ĠEmploy ment +w art +ch in +Ġcontin uation +l ia +Rec ently +Ġd uel +Ġevac uation +ĠKash mir +Ġdis position +ĠR ig +Ġbol ts +Ġins urers +4 67 +M ex +Ġret aliation +Ġmis ery +Ġunre asonable +r aining +I mm +ĠP U +em er +Ġgen ital +ãĤ ³ +ĠC andy +Ġon ions +ĠP att +lin er +Ġconced ed +Ġf a +Ġfor c +ĠH ernandez +ĠGe off +deb ian +ĠTe ams +Ġc ries +Ġhome owners +23 7 +A BC +Ġst itch +Ġstat istic +Ġhead ers +ĠBi ology +Ġmot ors +ĠG EN +ĠL ip +Ġh ates +Ġhe el +S elf +i pl +ED IT +ort ing +Ġann ot +ĠSpe ech +old emort +ĠJ avascript +ĠLe Bron +Ġfoot print +Ġf n +Ġseiz ures +n as +h ide +Ġ19 54 +ĠBe e +ĠDecl aration +ĠKat ie +Ġreserv ations +N R +f emale +Ġsatur ated +Ġb iblical +Ġtroll s +Dev ice +ph otos +Ġdr ums +ãĥīãĥ© ãĤ´ãĥ³ +N ight +f ighter +ĠH ak +ri ber +Ġc ush +Ġdiscipl inary +ba um +ĠG H +ĠSch midt +ilib rium +Ġs ixty +ĠKush ner +ro ts +Ġp und +ĠR ac +Ġspr ings +Ġcon ve +Bus iness +F all +Ġqual ifications +Ġvers es +Ġnarc iss +ĠK oh +ĠW ow +ĠCharl ottesville +ed o +Ġinterrog ation +ĠW ool +36 5 +B rian +Ġâľ ĵ +Ġalleg es +ond s +id ation +ĠJack ie +y u +Ġl akes +Ġworth while +Ġcryst als +ĠJud a +Ġcomp rehend +Ġfl ush +Ġabsor ption +ĠO C +Ġfright ened +ĠCh ocolate +Mart in +Ġbu ys +Ġbu cks +Ġapp ell +ĠChampions hips +Ġlist ener +ĠDef ensive +Ġc z +ud s +ĠM ate +Ġre play +Ġdecor ated +Ġs unk +ĠV IP +ĠAn k +Ġ19 5 +aa aa +Nob ody +ĠMil k +ĠG ur +ĠM k +ĠS ara +Ġse ating +ĠW id +Tr ack +Ġemploy s +Ġgig antic +AP P +ãĤ § +in ventory +Ġtow el +at che +l asting +ĠT L +Ġlat ency +Ġkn e +B er +me aning +Ġup held +Ġplay ground +Ġm ant +S ide +Ġstere o +Ġnorth west +Ġexception ally +Ġr ays +Ġrec urring +D rive +Ġup right +Ġab duct +ĠMar athon +Ġgood bye +Ġal phabet +h p +Ġcourt room +ring ton +ot hing +T ag +Ġdiplom ats +Ġbar bar +ĠAqu a +18 3 +33 33 +Ġmat urity +Ġinst ability +ĠAp ache +Ġ= == +Ġfast ing +ĠGr id +Mod Loader +Ġ15 2 +A bs +ĠOper ating +ett i +Ġacqu aint +Don nell +ĠK em +ĠFor ge +Ġarm ored +M il +Ġphilos ophers +in vest +Pl ayers +â Ī +Ġmy riad +Ġcomr ades +R ot +Ġremember ing +Ġcorrespond s +Ġprogram mers +ĠLyn n +Ġo lig +Ġco herent +yn chron +ĠChem ical +Ġj ugg +p air +post s +E ye +ĠIn ner +Ġsem ester +ott est +ĠEmir ates +ric anes +or ously +m its +ĠW is +Ġd odge +l ocation +Ġf aded +Am azon +ĠPro ceed +ĠIN FO +j ournal +ĠTru ck +T en +Ġ2 17 +Ġstat utes +m obile +ĠT ypes +Rec omm +b uster +pe x +Ġleg ends +Ġhead ache +f aced +ĠWi Fi +if ty +ĠH ER +Ġcirc uits +ER ROR +22 6 +ol in +Ġcyl inder +osp ace +ik ers +P rem +Qu ant +Ġconflic ting +Ġslight est +Ġfor ged +ion age +Step hen +ĠK ub +ĠOpp ortun +ĠHe al +Ġbl o +Ġrul ers +Ġh uh +Ġsubmar ine +f y +ass er +Ġallow ance +ĠKas ich +ĠT as +ĠAustral ians +Forge ModLoader +ĠâĨ ij +ĠMat rix +am ins +Ġ12 00 +ĠAc qu +23 6 +D ocument +ĠBre aking +19 3 +ĠSub st +ĠRoll er +ĠPro perties +ĠN I +t ier +Ġcr ushing +Ġadvoc ating +Further more +keep ers +Ġsex ism +x d +Ġcall er +ĠS ense +chie ve +ĠT F +Ġfuel ed +Ġreminis cent +Ġobs ess +ur st +Ġup hold +ĠF ans +het ics +Ġâ Ĺ +ĠB ath +Ġbe verage +Ġo scill +25 4 +Ġpol es +Ġgrad ual +Ġex ting +ĠS uff +ĠS uddenly +Ġlik ing +Ġ19 49 +un ciation +am ination +ĠO mar +ĠL V +ĠCon sequently +Ġsynt hes +ĠG IF +Ġp ains +Ġinteract ing +u ously +inc re +Ġrum or +ĠScient ology +19 7 +ĠZ ig +Ġspe lling +ĠA SS +Ġexting u +ms on +Ġg h +Ġremark ed +ĠStrateg ic +ĠM ON +å ¥ +g ae +ĠWH AT +E ric +ĠCamp us +Ġmeth ane +Ġimag in +J UST +ĠAl m +X T +i q +ĠR SS +Ġwrong doing +att a +Ġbig ot +Ġdemonstr ators +ĠCal vin +ĠV illa +Ġmembr ane +ĠAw esome +Ġbenef ic +26 8 +Ġmagn ificent +ĠL ots +G reg +ĠBor is +Ġdetain ees +ĠH erman +Ġwhis pered +Ġa we +Prof essor +fund ing +Ġphys iological +ĠDest ruction +Ġlim b +Ġmanip ulated +Ġbub bles +Ġpse ud +Ġhyd ra +ĠBrist ol +Ġst ellar +ĠExp ansion +ĠK ell +ĠInterest ingly +Ġm ans +Ġdrag ging +Ġec ological +ĠF it +Ġg ent +Ġbenef ited +ĠHait i +Ġpoly g +ãĥ İ +Ġ20 30 +Ġpro w +Ġrecon struction +Ġwas t +Ġpsych ic +ĠGree ks +Hand ler +16 2 +ĠP ulse +Ġsol icit +Ġsy s +Ġinflu x +ĠG entle +per cent +Ġprolifer ation +Ġtax able +Ġdisreg ard +Ġesc aping +Ġg inger +Ġwith stand +Ġdevast ated +ĠD ew +ser ies +Ġinject ed +ela ide +Ġturn over +he at +Ļ Ĥ +H appy +ĠSil ent +ãĤ Ń +iv ism +Ġir rational +AM A +Ġre ef +r ub +Ġ16 2 +Ġbank ers +ĠEth ics +v v +Ġcritic isms +K n +18 6 +M ovie +ĠT ories +Ġno od +Ġdist ortion +F alse +od ore +Ġt asty +Res earch +ĠU ID +- ) +Ġdivor ced +ĠM U +ĠHay es +ĠIs n +ian i +ĠH Q +Ġ" # +ign ant +Ġtra umatic +ĠL ing +H un +Ġsab ot +on line +r andom +Ġren amed +ra red +K A +d ead +é t +ĠAss istance +Ġse af +++++ ++++ +Ġse ldom +ĠWeb b +Ġbo olean +u let +Ġref rain +ĠDI Y +ru le +Ġshut ting +Ġutil izing +load ing +ĠPar am +co al +oot er +Ġattract ing +ĠD ol +Ġher s +ag netic +ĠRe ach +im o +Ġdisc arded +ĠP ip +01 5 +ü r +Ġm ug +Im agine +C OL +Ġcurs ed +ĠSh ows +ĠCurt is +ĠSach s +spe aking +ĠV ista +ĠFram ework +ong o +Ġsub reddit +Ġcr us +ĠO val +R ow +g rowing +Ġinstall ment +Ġgl ac +ĠAdv ance +EC K +ĠLGBT Q +LE Y +Ġac et +Ġsuccess ive +ĠNic ole +Ġ19 57 +Qu ote +Ġcircumst ance +ack ets +Ġ14 2 +ort ium +Ġguess ed +ĠFr ame +Ġperpet rators +ĠAv iation +ĠBen ch +Ġhand c +A p +Ġ19 56 +25 9 +r and +Net Message +d in +urt les +h ig +ĠV III +ff iti +ĠSw ords +b ial +Ġkidn apping +dev ice +Ġb arn +ĠEl i +auc as +S end +Con structed +Ġ ½ +Ġneed les +Ġad vertisements +Ġv ou +Ġexhib ited +ĠFort ress +As k +B erry +TY PE +Ġcan cers +ump ing +ĠTerrit ory +Ġpr ud +Ġn as +Ġathe ist +Ġbal ances +ãģ Ł +ĠSh awn +& & +Ġland sc +ĠR GB +Ġpet ty +Ġex cellence +Ġtransl ations +Ġpar cel +ĠChe v +E ast +ĠOut put +im i +Ġamb ient +ĠTh reat +Ġvill ains +Ġ5 50 +IC A +Ġtall er +Ġle aking +c up +Ġpol ish +Ġinfect ious +ĠK C +Ġ@ @ +back ground +Ġbureaucr acy +ĠS ai +un less +it ious +ĠSky pe +At l +ID ENT +00 8 +Ġhyp ocr +Ġpit chers +Ġguess ing +ĠF INAL +Bet ween +Ġvill agers +Ġ25 2 +f ashion +ĠTun is +Be h +ĠEx c +ĠM ID +28 8 +ĠHas kell +19 6 +ĠN OR +Ġspec s +Ġinv ari +Ġgl ut +ĠC ars +Ġimp ulse +Ġhon ors +g el +Ġjurisd ictions +ĠBund le +ul as +Calif ornia +ĠIncre ase +Ġp ear +Ġsing les +Ġc ues +Ġunder went +ĠW S +Ġexagger ated +Ġdub ious +Ġfl ashing +L OG +) ]. +J ournal +t g +V an +ĠI stanbul +ĠIn sp +ĠFrank en +D raw +Ġsad ness +Ġiron ic +ĠF ry +x c +Ġ16 4 +is ch +W ay +ĠProtest ant +h orn +Ġun aff +ĠV iv +ill as +ĠProduct ions +ĠH ogan +Ġper imeter +ĠS isters +Ġspont aneous +Ġdown side +Ġdescend ants +Ġor n +w orm +Japan ese +Ġ19 55 +Ġ15 1 +ĠDo ing +els en +umb les +Ġrad ically +ĠDr um +ĠB ach +Ġli abilities +ĠO B +ĠElement ary +Ġmem e +yn es +Ġfinger print +ĠGr ab +Ġundert ake +Mem bers +ĠRead er +ĠSim s +g od +Ġhypot hetical +s cient +ĠA J +Ġchar ism +Ġad missions +ĠMiss ile +tr ade +Ġexerc ising +ĠBack ground +W ritten +Ġvoc als +whe ther +Ġv i +ĠW inner +Ġl itter +ĠSh ooting +ST EM +ãĤ ¡ +ĠA FL +Ġvari ability +Ġe ats +ĠD PS +b row +Ġeleph ants +Ġstr at +Ġ Å +Ġsett lers +Matt hew +Ġin advert +H I +ĠIM F +ĠGo al +Ġnerv es +John son +ey e +ablish ment +Th ursday +BIL ITY +H ad +am oto +het amine +ep s +Ġmit ochond +Ġcomp ressed +ĠTre vor +ĠAnim als +T ool +L ock +Ġtwe ak +Ġpin ch +Ġcancell ation +P ot +Ġfoc al +ĠAst ron +17 3 +ĠA SC +ĠO THER +umn i +Ġdem ise +d l +Ù ħ +Sem itism +Ġcr acking +Ġcollabor ative +Ġexpl ores +s ql +Ġher bs +Ġconfig urations +m is +ĠRes ult +ace y +ĠSm oke +Ġsan ct +el ia +Ġdeg ener +Ġdeep est +Ġscream ed +Ġn ap +Soft ware +ĠST AR +E F +ĠX in +spons ored +mans hip +23 3 +Ġprim aries +Ġfilter ing +Ġas semble +m il +ĠMy ers +b ows +Ġpun ched +M ic +Ġinnov ations +Ġfun c +and o +Ġfr acking +ĠV ul +о Ð +osh op +ĠIm mun +Ġsett ling +Ġadolesc ents +Ġreb uilding +Ġtransform ing +Ġpar ole +Ġhar bor +Ġbook ing +ot ional +onge vity +ĠY o +b ug +Ġemer ges +ĠMethod s +ĠCh u +P res +ĠDun geons +Ġtra iling +ĠR um +ĠH ugh +å¤ © +ĠE ra +ĠBatt les +Res ults +ĠTr ading +Ġvers a +c ss +ax ies +he et +Ġgre ed +19 89 +Ġgard ens +Ġconting ent +P ark +ĠLeaf s +h ook +ro be +Ġdiplom acy +ĠF uel +ĠInv asion +Ġupgr ading +M ale +Ġe lic +Ġrelent less +ĠCo venant +ap esh +ĠT rop +T y +pro duction +art y +Ġpun ches +ak o +cyclop edia +ĠR abbit +ĠHD MI +Ġ14 1 +Ġf oil +Item Image +ĠF G +Ġimplement ations +ĠP om +ixt ures +Ġaw ait +Ġ3 30 +am us +Ġumb rella +Ġfore see +se par +Ġcircum cision +Ġperipher al +S ay +ĠExper t +In c +Ġwithd rew +ĠAnd ers +f ried +Ġradio active +ĠOp ening +Ġboard ing +ĠN D +Ġover throw +Act iv +W P +ĠAct s +× Ļ +Ġmot ions +v ic +ĠM ighty +ĠDef ender +a er +Ġthank ful +ĠK illing +ĠBr is +mo il +Ġpredict ing +26 6 +ch oice +Ġkill ers +Ġinc ub +ĠChe st +ather ing +Ġpro claimed +fl ower +oss om +umbled ore +ĠCy cling +ĠOccup y +AG ES +P en +ĠY ug +Ġpack aged +Ġheight ened +c ot +st ack +C ond +Ġst amps +m age +Ġpersu aded +Ġens l +ĠCard inal +Ġsol itary +Ġpossess ing +ĠC ork +Ġev id +ĠT ay +Ġbl ues +Ġextrem ism +Ġlun ar +Ġcl own +Te chn +Ġfest ivals +ĠPv P +ĠL ar +Ġconsequ ently +p resent +Ġsom eday +ç İĭ +ĠMet eor +Ġtour ing +c ulture +Ġbe aches +S hip +c ause +ĠFl ood +ãĥ ¯ +Ġpur ity +th ose +Ġem ission +b olt +Ġch ord +ĠScript ure +L u +Ġ$ { +cre ated +Other s +25 8 +Ġelement al +Ġannoy ed +ĠA E +d an +ĠS ag +Res earchers +Ġfair y +âĢĵ âĢĵ +======== ==== +Sm art +GG GG +Ġskelet ons +Ġpup ils +link ed +Ġur gency +en abled +ĠF uck +Ġcoun cill +r ab +U AL +T I +Ġlif es +Ġconf essed +B ug +Ġharm on +ĠCON FIG +ĠNe utral +D ouble +Ġst aple +ĠSH A +Brit ish +ĠSN P +AT OR +oc o +Ġswing ing +ge x +ole on +pl ain +ĠMiss ing +ĠTro phy +v ari +ran ch +Ġ3 01 +4 40 +00000000 00000000 +Ġrest oring +Ġha ul +uc ing +ner g +Ġfut ures +Ġstrateg ist +quest ion +Ġlater al +ĠB ard +Ġs or +ĠRhod es +ĠD owntown +????? - +ĠL it +ĠB ened +Ġco il +st reet +ĠPort al +FI LE +ĠG ru +* , +23 1 +ne um +Ġsuck ed +Ġr apper +Ġtend encies +ĠLaure n +cell aneous +26 7 +Ġbrow se +Ġover c +head er +o ise +Ġbe et +ĠG le +St ay +Ġm um +Ġtyp ed +Ġdiscount s +T alk +ĠO g +ex isting +ĠS ell +u ph +C I +ĠAust rian +ĠW arm +Ġdismiss al +Ġaver ages +c amera +Ġalleg iance +L AN +=" # +Ġcomment ators +ĠSet ting +ĠMid west +Ġpharm ac +ĠEX P +Ġstain less +Ch icago +Ġt an +24 4 +Ġcountry side +ĠV ac +29 5 +Ġpin ned +Ġcr ises +Ġstandard ized +T ask +ĠJ ail +ĠD ocker +col ored +f orth +" }, +Ġpat rons +Ġsp ice +Ġm ourn +ĠM ood +Ġlaund ry +Ġequ ip +ĠM ole +y ll +ĠTH C +n ation +ĠSher lock +Ġiss u +ĠK re +ĠAmeric as +ĠA AA +Ġsystem atically +Ġcont ra +ĠS ally +Ġrational e +Ġcar riage +Ġpe aks +Ġcontrad iction +ens ation +ĠFail ure +Ġpro ps +Ġnames pace +Ġc ove +field s +ãĤ ĭ +Ġw ool +ĠC atch +Ġpresum ed +ĠD iana +r agon +ig i +Ġh amm +Ġst unt +ĠG UI +ĠObserv atory +ĠSh ore +Ġsmell s +ann ah +Ġcock pit +ĠD uterte +8 50 +Ġopp ressed +bre aker +ĠCont ribut +ĠPer u +ĠMons anto +ĠAtt empt +Ġcommand ing +Ġfr idge +ĠR in +ĠChe ss +ual ity +Ġo l +Republic an +ĠGl ory +ĠW IN +.... ... +ag ent +read ing +Ġin h +J ones +Ġcl icks +al an +Ġ[ ]; +ĠMaj esty +ĠC ed +op us +ate l +à ª +AR C +ĠEc uador +ãĥ ł +ĠK uro +Ġritual s +Ġcapt ive +Ġoun ce +Ġdisag reement +Ġsl og +f uel +P et +M ail +Ġexerc ised +Ġsol ic +Ġrain fall +Ġdev otion +ĠAss essment +Ġrob otic +opt ions +ĠR P +ĠFam ilies +ĠFl ames +Ġassign ments +00 7 +aked own +Ġvoc abulary +Re illy +Ġc aval +g ars +Ġsupp ressed +ĠS ET +ĠJohn s +Ġwar p +bro ken +Ġstat ues +Ġadvoc ated +Ġ2 75 +Ġper il +om orph +ĠF emin +per fect +Ġh atch +L ib +5 12 +Ġlif elong +3 13 +Ġche eks +Ġnum bered +ĠM ug +B ody +ra vel +We ight +ĠJ ak +ĠHe ath +Ġkiss ing +ĠJ UST +Ġw aving +u pload +Ġins ider +ĠPro gressive +ĠFil ter +tt a +ĠBe am +Ġviol ently +ip ation +Ġskept icism +Ġ19 18 +ĠAnn ie +ĠS I +Ġgen etics +Ġon board +at l +ĠFried man +ĠB ri +cept ive +Ġpir ate +ĠRep orter +27 8 +Ġmyth ology +Ġe clipse +Ġsk ins +Ġgly ph +ing ham +F iles +C our +w omen +Ġreg imes +Ġphotograp hed +K at +ĠMA X +Offic ials +Ġunexpected ly +Ġimpress ions +F ront +;;;; ;;;; +Ġsuprem acy +Ġs ang +Ġaggrav ated +Ġabrupt ly +ĠS ector +Ġexc uses +Ġcost ing +ide press +St ack +ĠR NA +ob il +Ġghost s +ld on +at ibility +Top ics +Ġreim burse +ĠH M +ĠDe g +Ġth ief +y et +ogen esis +le aning +ĠK ol +ĠB asketball +Ġf i +ĠSee ing +Ġrecy cling +Ġ[ - +Cong ress +Ġlect ures +P sy +Ġne p +Ġm aid +Ġori ented +A X +Ġrespect ful +re ne +fl ush +ĠUn loaded +re quest +gr id +ĠAltern atively +ĠHug o +Ġdec ree +ĠBuddh ism +and um +And roid +ĠCong o +ĠJoy ce +Ġacknowled ging +hes ive +ĠTom orrow +ĠH iro +th ren +ĠM aced +Ġho ax +ĠIncre ased +ĠPr adesh +W ild +____ __ +16 1 +Ġa unt +Ġdistribut ing +ĠT ucker +ĠSS L +ĠW olves +B uilding +ou lt +ĠLu o +ĠY as +ĠSp ir +ĠSh ape +ĠCamb od +ĠIP v +Ġm l +Ġext rad +39 0 +ĠPenn y +d ream +Ġstation ed +opt ional +ew orthy +. +ĠWorks hop +ĠRet ail +ĠAv atar +6 25 +N a +ĠV C +ĠSec ure +M Y +19 88 +oss ip +Ġpro state +Ġund en +Ġg amer +ĠCont ents +ĠWar hammer +ĠSent inel +3 10 +Ġse gregation +ĠF lex +ĠM AY +Ġdr ills +ĠDrug s +Islam ic +Ġsp ur +Ġca fe +Ġimag inary +Ġgu iding +Ġsw ings +ĠThe me +ob y +Ġn ud +Ġbe gging +Ġstr ongh +Ġreject ing +Ġpedest rians +ĠPro spect +R are +s le +Ġconcess ions +ĠConst itutional +Ġbe ams +Ġfib ers +p oon +Ġinstinct s +pro perty +ĠB IG +Sand ers +im ates +Ġco ating +Ġcorps es +ĠTR UE +check ed +Ġ16 6 +A sh +ĠJ S +ĠF iction +Ġcommun al +Ġener getic +oooo oooo +Ġnow adays +IL D +ib o +ĠSU V +R en +Ġdwell ing +Sil ver +Ġt ally +ĠM oving +Ġcow ard +Ġgener als +Ġhorn s +Ġcirc ulated +Ġrob bed +ĠUn limited +Ġharass ed +Ġinhib it +Ġcomp oser +ĠSpot ify +Ġspread s +3 64 +Ġsu icidal +Ġno ises +ĠSt ur +Ġs aga +ĠK ag +is o +Ġtheoret ically +M oney +Ġsimilar ity +Ġslic ed +ut ils +ing es +" - +Ġan th +Ġimp ed +Mod ule +Through out +Ġmen us +comm ittee +and i +ob j +in av +f ired +ĠAb dullah +Ġund ead +Ġfont s +H old +EN G +Ġsustain ability +Ġfl ick +Ġr azor +ĠF est +ĠChar acters +Ġword ing +Ġpopul ist +Ġcritic izing +Ġm use +v ine +Ġcard board +Ġkind ly +Ġfr inge +ĠThe ft +icult ural +Ġgovern ors +Ġ ���� +Ġ16 3 +Ġtime out +ĠA uth +Child ren +A U +Ġred emption +ĠAl ger +Ġ19 14 +Ġw aved +Ġastron auts +og rams +Ġsw amp +ĠFinn ish +Ġcand le +Ġton nes +ut m +Ġr ay +Ġsp un +Ġfear ful +art icles +Ġca us +or ically +ĠRequ ires +ĠG ol +Ġpop e +Ġinaug ural +Ġg le +AD A +ĠIS IL +ĠOff ensive +Ġwatch dog +Ġbal con +ent ity +ĠH oo +Ġgall on +AC C +Ġdoub ling +Ġimpl ication +ĠS ight +Ġdoct r +---- --- +Ġ\ \ +Ġm alt +R oll +Ġâī ¥ +Ġrec ap +add ing +u ces +ĠB end +fig ure +Ġtur key +Ġsoc ietal +ĠT ickets +Ġcommer cially +Ġsp icy +Ġ2 16 +ĠR amp +Ġsuperior ity +à ¯ +ĠTr acker +C arl +ĠC oy +ĠPatri ot +Ġconsult ed +Ġlist ings +Ġsle w +reens hot +ĠG one +Ġ[ ...] +30 9 +Ġh ottest +Ø ± +Ġrock y +ĠD iaz +Ġmass age +Ġpar aly +Ġp ony +A z +Ġcart ridge +ĠN Z +Ġsn ack +ĠLam ar +ple ment +ĠLes lie +Ġm ater +Ġsn ipp +24 6 +Ġjoint ly +ĠBris bane +ĠiP od +Ġpump ing +Ġgo at +ĠSh aron +eal ing +Ġcor on +Ġan omal +rah im +ĠConnect ion +Ġsculpt ure +Ġsched uling +ĠD addy +at hing +Ġeyeb rows +Ġcur ved +Ġsent iments +Ġdraft ing +D rop +( [ +Ġnom inal +ĠLeaders hip +ĠG row +Ġ17 6 +Ġconstruct ive +iv ation +Ġcorrupt ed +ger ald +ĠC ros +ĠChe ster +ĠL ap +ãģ ª +OT H +D ATA +Ġal mond +pro bably +I mp +Ġfe ast +ĠWar craft +F lor +Ġcheck point +Ġtrans cription +Ġ20 4 +Ġtwe aks +Ġrel ieve +S cience +Ġperform er +Z one +Ġtur moil +ig ated +hib it +ĠC afe +the med +Ġflu or +ben ch +Ġde com +ĠU nt +ĠBar rett +ĠF acts +Ġt asting +ĠPTS D +ĠSe al +ĠJuda ism +ĠDynam ic +ĠC ors +V e +ĠM ing +ĠTrans form +v on +ĠDef enders +ĠTact ical +ĠV on +ĠUn ivers +Ġdist orted +ĠB reath +?' " +Ġag on +ĠDead ly +Ġl an +ĠCy cle +orn ed +Ġrel iably +Ġgl or +ĠMon key +ãĥ ¡ +Ġad ren +Ġmicrow ave +ĠAl ban +irc raft +dig it +sm art +ĠD read +¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ +{ { +ĠRoc hester +Ġsimpl ified +Ġinf licted +Ġtake over +Ġyour selves +ad itional +Ġmus cular +K S +Ġing en +T ax +ĠFe ature +27 7 +Ġcru c +Ġcr ate +Ġun identified +Ġacclaim ed +ĠM anga +ĠFr ances +ĠNep al +ĠG erald +ĠKu wait +Ġsl ain +ĠHe b +ĠG oku +ãģ® æ +28 6 +M rs +ĠC ody +ĠSan ctuary +01 6 +Ġdism ant +Ġdatas et +ĠH ond +b uck +ĠPat terson +Ġpal ette +ĠG D +ic ol +ĠL odge +Ġplanet ary +ak in +ĠRegist ered +ab we +ĠPeters burg +Ġha iled +ĠP iece +S che +ĠDO J +Ġen umer +18 1 +ĠObs erver +ĠB old +f ounded +com merce +Ġexplo its +ĠF inding +UR N +ĠS ne +ĠAc id +ay ette +ĠVal ues +Ġdr astic +Ġarchitect ural +Ġ" . +× ķ +ump ed +Ġwra pping +Ġwid ow +ĠSl ayer +l ace +on ce +German y +av oid +Ġtem ples +P AR +à ´ +ĠLuc ifer +ĠFl ickr +l ov +for ces +Ġsc outing +Ġlou der +tes y +Ġbefore hand +Ä ĵ +ĠNe on +ĠW ol +ĠTyp ically +ĠPolit ico +-+ -+ +Ġbuild er +Ġder ive +K ill +Ġp oker +Ġambig uous +Ġlif ts +Ġcy t +Ġrib s +ood le +ĠS ounds +h air +ĠSynd rome +t f +Ġproport ional +u id +Ġper taining +ĠKind le +ĠNeg ro +Ġreiter ated +ĠTon ight +oth s +ĠCorn ell +Ġo wing +Ġ20 8 +elf are +oc ating +ĠB irds +Sub scribe +Ġess ays +Ġburd ens +Ġillust rations +ar ious +ER AL +ĠCal cul +Ġx en +ĠLink edIn +ĠJ ung +Ġredes ign +Con nor +29 6 +Ġrevers al +ĠAd elaide +ĠL L +Ġs inking +Ġg um +US H +c apt +ĠGr imm +Ġfoot steps +ĠCB D +isp ers +Ġpro se +Wed nesday +ĠM ovies +ed in +Ġoverturn ed +Ġcontent ious +US B +~~~~~~~~ ~~~~~~~~ +ĠCo pper +Ġpoint less +N V +val ues +olph in +d ain +Ġdepos ited +ĠG W +Ġpreced ed +ĠCl a +ĠGo lem +ĠN im +ĠÎ ² +ĠEngine ers +m iddle +Ġfl att +oper ative +Ġcouncil s +imb abwe +el in +Ġstress ful +ĠL D +Ġres h +l ake +Ġwheel chair +ĠAltern ative +Ġoptim ize +oper ation +Ġpe ek +Ġones elf +ig il +Ġtrans itions +op athy +bl ank +Ġ16 9 +17 1 +________________________________ ________________________________ +Ġl aundering +En c +ĠD EC +Ġwork outs +Ġsp ikes +Ġdin osaurs +Ġdiscrim inatory +P ool +R ather +38 5 +R NA +tes ters +et o +ĠIdent ity +Ġve in +ĠBur ton +Ġarc ade +4 20 +Ult imately +ĠSad ly +à ° +p ill +Ġcub ic +ĠSpect rum +the se +st ates +Ġun official +h awks +ĠEVER Y +Ġrain bow +Ġincarcer ation +and ing +Ġsy ll +ĠEver ton +Ġ17 9 +ĠSer bia +Ġ18 9 +m eter +ĠMic key +Ġant iqu +Ġfact ual +ne ck +ĠN are +n orm +m ust +Ġhigh ways +Ġgl am +Ġdivid ing +ĠSquad ron +ĠMar tha +Ġbirth s +C over +//////// //////// +ĠW ong +Ph ot +ĠA LS +ri o +ĠNon etheless +ĠL emon +Ġ20 6 +ĠE E +Ġderiv ative +ĠWW II +v ote +Ġthere in +Ġsepar ating +44 6 +sy nc +ĠStre ets +Ġr att +Ġmunicip ality +ĠShort ly +Ġmon k +) ," +Ġscr ub +Ġoper atives +Ne ither +Pl ace +ĠLim it +F emale +ĠAct or +Char acter +Ġconstit uted +35 7 +Ġprotest ed +ĠSt raw +ĠHe ight +ild a +ĠTy ph +Ġflood s +Ġcos metic +W AY +pert ure +up on +t ons +ess ing +ĠP ocket +Ġro oft +ĠC aucas +Ġant idepress +Ġincomp atible +EC D +Ġoper a +ĠCont est +Ġgener ators +l ime +Def ense +19 87 +for um +Ġsav age +ĠHung arian +n z +Ġmet allic +Ġex pelled +Ġres idency +Ġdress es +66 6 +ĠC lement +f ires +C ategory +Ġge ek +al is +Ġc emetery +educ ated +Ġc rawl +ĠUn able +ĠT yson +ak is +Ġp ardon +ĠW ra +Ġstrengthen ed +ĠF ors +33 5 +ĠH C +ĠM ond +Ġvisual s +ĠBeat les +ett lement +Ġ ï +g ro +Ġb ash +Ġpo orest +Ġex cel +Ġaspir ations +ĠM unicip +ens ible +Ġceremon ies +Ġintimid ation +ĠCON TR +be ck +ĠK ap +as u +Ġtradem arks +ĠS ew +ĠComp etition +net work +ĠAr ri +ĠT et +Ro aming +W C +D at +Ġso b +Ġpair ing +Ġoverd ose +SA Y +ab er +Ġrev olt +ĠF ah +act ing +e q +est ation +F ight +ĠMar ks +27 3 +Ġ17 8 +R aw +ãģ ĭ +34 9 +bl ocks +Ġver ge +est ine +ĠPod esta +Ġinv asive +Ġprofound ly +ĠA o +e ach +Ġl est +inter pret +Ġshr inking +Ġerr one +Ġche es +ly s +ĠI vy +ĠDirect ory +Ġhint ed +V ICE +Ġcontact ing +ĠG ent +he i +Ġlabel ing +Ġmerc ury +ĠL ite +Ġexp ires +Ġdest abil +rit is +c u +Ġfeather s +Ġste er +Ġprogram med +ĠV ader +Go ing +ĠE lim +Ġy o +ĠMic he +Ġ20 3 +Ġslee ves +Ġb ully +ĠHum ans +36 8 +Ġcomp ress +ĠBan ner +AR S +Ġa while +Ġcal ib +Ġspons orship +ĠDiff iculty +ĠP apers +Ġident ifier +} . +Ġy og +ĠSh ia +Ġclean up +Ġvib e +int rodu +im ming +Austral ia +Ġout lines +ĠY outube +tr ain +ĠM akes +Ġde ported +Ġcent r +ĠD ug +ĠB oulder +ĠBuff y +Ġinj unction +ĠHar ley +ĠG roups +ĠD umbledore +ĠCl ara +Ġ" - +Ġsacrific ed +ep h +Sh adow +ib ling +Ġfreel ance +Ġevident ly +ph al +Ġret ains +M ir +Ġfin ite +d ar +ĠC ous +Ġrep aired +Ġperiod ic +Ġchampions hips +Ġaster oid +bl ind +Ġexpress ly +ĠAst ros +Ġsc aled +Ġge ographical +ĠRap ids +En joy +Ġel astic +ĠMoh amed +Mark et +be gin +Ġdisco vers +Ġtele communications +Ġscan ner +Ġen large +Ġsh arks +Ġpsy chedel +ĠRou ge +Ġsnap shot +is ine +X P +Ġpestic ides +ĠL SD +ĠDist ribution +re ally +Ġde gradation +Ġdisgu ise +Ġbi om +ĠEX T +Ġequ ations +Ġhaz ards +ĠComp ared +) * +Ġvirt ues +Ġeld ers +Ġenh ancing +ĠAc ross +er os +ang ling +Ġcomb ust +ucc i +Ġconc ussion +Ġcontrace ption +ĠK ang +Ġexpress es +Ġa ux +ĠP ione +Ġexhib its +Deb ug +OT AL +ĠAl ready +ĠWheel er +Ġexp ands +? : +Ġreconc iliation +Ġpir ates +Ġpur se +Ġdiscour age +Ġspect acle +R ank +Ġwra ps +ĠTh ought +Ġimp ending +O pp +ĠAng lo +ĠE UR +Ġscrew ed +ret ched +Ġencour agement +mod els +Ġconf use +mm m +ĠVit amin +âĸij âĸij +C ru +Ġkn ights +Ġdisc ard +Ġb ishops +ĠW ear +ĠGar rett +k an +ãĥ Ł +Ġmascul ine +cap ital +ĠA us +Ġfat ally +th anks +ĠA U +ĠG ut +12 00 +Ġ 00000000 +Ġsur rog +ĠBI OS +ra its +ĠWat ts +Ġresur rection +ĠElect oral +ĠT ips +4 000 +Ġnut rient +Ġdepict ing +Ġspr ink +Ġm uff +ĠL IM +ĠS ample +ps c +ib i +gener ated +Ġspec imens +Ġdiss atisf +Ġtail ored +Ġhold ings +ĠMonth ly +ĠE at +po ons +Ġne c +ĠC age +ĠLot us +ĠLan tern +Ġfront ier +Ġp ensions +Ġj oked +ĠHard y +=-=- =-=- +r ade +U ID +Ġr ails +Ġem it +Ġsl ate +Ġsm ug +Ġsp it +ĠCall s +ĠJac obs +f eat +ĠU E +Ġrest ruct +Ġregener ation +Ġenerg ies +ĠCon nor +OH N +ĠChe ese +Ġg er +Ġresur rect +man agement +N W +Ġpres ently +ĠBru ins +M ember +ĠM ang +id an +Ġboost ing +w yn ++ . +requ isite +ĠNY PD +ĠMe gan +ĠCond itions +Ġp ics +nes ium +ĠR ash +Ġ17 4 +ĠD ucks +Ġemb ro +z u +on ian +rel igious +Ġc raz +ĠAC A +ĠZ ucker +EM A +ĠPro s +We apon +ĠKn ox +ĠAr duino +Ġst ove +Ġheaven s +ĠP urchase +Ġher d +Ġfundra iser +Dig ital +5 000 +Ġprop onents +/ âĢĭ +Ġj elly +ĠVis a +Ġmon ks +Ġadvance ment +ĠW er +Ġ18 7 +e us +ert ility +Ġfet al +Ġ19 36 +L o +Ġout fits +Ġstair case +b omb +Ġcustom ized +cl air +T ree +Ġm apped +ĠConsider ing +ĠTor res +Ġmeth yl +Ġapprox imate +Ġdo om +ĠHans en +Ġc rossover +Ġstand alone +ä ¼ +Ġinv ites +Ġgra veyard +Ġh p +Donald Trump +Ġesc ort +G ar +Ġpredec essors +Ġh ay +Ġen zyme +ĠStra ight +vis ors +I ng +ane ously +ĠApp lied +Ġf ec +ĠDur ant +Ġout spoken +or b +Ġz eal +Ġdisgr ace +' ). +ĠChe ng +28 9 +ĠRen a +ĠSu icide +29 4 +Ġout raged +ĠNew man +ĠN vidia +ĠA ber +ĠB ers +Ġrecre ation +Wind ow +ĠD P +x e +Ġped oph +Ġfall out +ambo o +Ġpresent ations +ĠApp s +Ġh tml +3 45 +ĠX XX +Ġrub bing +ĠLe ather +Ġhum idity +se ys +est ablished +ĠUn its +64 6 +Ġrespect able +A uto +Ġthri ving +ĠInn ovation +ang s +Ext ra +reg ulation +29 8 +p ick +Ex amples +ĠC J +Att ack +Ġdr acon +L T +Ġstick er +re rs +Ġsun ny +I ss +reg ulated +d im +ĠAb stract +Ġhus bands +Off ice +om ination +it ars +AN GE +asc al +ĠK ris +ĠInf antry +Ġm alf +ĠA the +ĠR ally +bal anced +................ ........ +OU P +Ġmole cule +met ics +ĠSpl it +ĠInstruct ions +ĠN ights +c ards +Ġt ug +Ġcon e +å Ń +Ġt x +ĠDisc ussion +Ġcatast rophe +pp e +g io +Ġcommun ism +Ġhal ted +ĠGu ant +cle an +ĠSc hed +ĠK anye +Ġw ander +ĠSer iously +Ġ18 8 +enn ial +f ollow +product ive +ĠFl ow +ĠS ail +Ġc raw +Ġsim ulations +or u +ang les +ĠN olan +Ġmen stru +4 70 +Ġ20 7 +aj a +Ġcas ually +board ing +Ġ2 22 +ov y +ĠN umbers +um at +O E +28 7 +ĠCle mson +Ġcert s +Ġsl id +ĠT ribe +Ġto ast +Ġfort unes +Ġf als +ĠComm ittees +Ġg p +Ġf iery +ĠN ets +ĠAn ime +Pack age +ĠComp are +l aughter +in fect +Ġatroc ities +Ġjust ices +Ġins ults +ĠVern on +Ġsh aken +Ġperson a +est amp +36 7 +br ain +Ġexperiment ing +K en +ĠElect ronics +Ġ16 1 +dom ain +Ġgraph ical +b ishop +Ġwho pping +ĠEv angel +Ġadvertis ers +ĠSpe ar +Ġb ids +Ġdestro ys +ut z +Ġunders c +ĠAD D +Ġan ts +ĠC um +ipp les +ĠF ill +Ġgl anced +Ġind icted +ĠE ff +Ġmis con +ĠDes ktop +Ġab ide +ãĥ Ģ +ĠI o +ĠC oul +Ġcaps ule +ĠCh rys +M ON +Ġund es +ĠI RA +Ġc itation +Ġdict ate +ĠNet works +ĠConf lict +ĠSt uff +x a +is ec +ĠChem istry +Ġquarter ly +William s +an an +O pt +ĠAlexand ria +out heastern +ĠSpring field +ĠBlack s +Ġge ography +24 2 +Ġut most +ĠEx xon +ab outs +E VA +ĠEn able +ĠBar r +Ġdisag reed +ĠCy prus +Ġdement ia +Ġlab s +Ġubiqu itous +ĠLO VE +Ġconsolid ated +s r +Ġcream y +ĠTim ber +Reg ardless +ĠCert ificate +Ġ" ... +ogen ous +Capt ain +Ġinsult ing +ĠSor os +ĠInst r +ĠBulgar ia +bet ter +Ġsuck ing +ĠDavid son +at z +Ġcoll ateral +g if +Ġplag ued +ĠC ancel +ĠGard ner +R B +Ġsix teen +Rem ove +ur istic +c ook +R od +Ġcompr ising +f le +) âĢĶ +ĠVik ing +g rowth +agon al +Ġsr f +af ety +m ot +N early +st own +ĠF actor +Ġautom obile +Ġproced ural +m ask +amp ires +Ġdisapp ears +j ab +3 15 +Ġ19 51 +ne eded +Ġd aring +le ader +Ġp odium +Ġun healthy +Ġm und +Ġpy ramid +oc re +Ġkiss ed +Ġdream ed +ĠFant astic +ĠG ly +å Ĭ +Ġgreat ness +Ġsp ices +Ġmet ropolitan +Ġcomp uls +i ets +101 6 +ĠSh am +ĠP yr +fl ies +ĠMid night +Ġswall owed +Ġgen res +ĠL ucky +ĠRew ards +Ġdisp atch +ĠI PA +ĠApp ly +Ġa ven +al ities +3 12 +th ings +Ġ( ). +Ġm ates +ĠS z +ĠC OP +ol ate +O FF +Ġre charge +c aps +ĠYork er +ic one +Ġgal axies +ile aks +D ave +ĠP uzz +ĠCelt ic +ĠA FC +27 6 +ĠS ons +Ġaffirm ative +H or +Ġtutorial s +ĠC ITY +ĠR osa +ĠExt ension +Ser ies +Ġf ats +Ġr ab +l is +Ġun ic +Ġe ve +ĠSp in +Ġadul thood +ty p +Ġsect arian +Ġcheck out +ĠCy cl +S ingle +Ġmart yr +Ġch illing +88 8 +ou fl +Ġ] ; +Ġcongest ion +m k +ĠWhere as +Ġ19 38 +ur rencies +er ion +Ġbo ast +ĠPat ients +Ġch ap +ĠB D +real DonaldTrump +Ġexam ines +h ov +Ġstart ling +ĠBab ylon +w id +om ew +br ance +ĠOd yssey +w ig +Ġtor ch +ĠV ox +ĠMo z +ĠT roll +ĠAn s +Similar ly +ĠF ul +00 6 +Un less +ĠAl one +st ead +ĠPub lisher +r ights +t u +ĠDoes n +Ġprofession ally +Ġcl o +ic z +Ġste als +Ġ á +19 86 +Ġst urdy +ĠJoh ann +Ġmed als +Ġfil ings +ĠFr aser +d one +Ġmult inational +Ġf eder +Ġworth less +Ġp est +Yes terday +ank ind +Ġg ays +Ġb orne +ĠP OS +Pict ure +Ġpercent ages +25 1 +r ame +Ġpot ions +AM D +ĠLeban ese +Ġr ang +ĠL SU +ong s +Ġpen insula +ĠCl ause +AL K +oh a +ĠMac Book +Ġunanim ous +Ġl enders +Ġhang s +Ġfranch ises +ore rs +ĠUp dates +Ġisol ate +and ro +S oon +Ġdisrupt ive +ĠSur ve +Ġst itches +ĠSc orp +ĠDomin ion +Ġsupp lying +Ar g +Ġtur ret +ĠL uk +Ġbr ackets +* ) +ĠRevolution ary +ĠHon est +Ġnot icing +ĠSh annon +Ġafford ed +Ġth a +ĠJan et +! -- +ĠNare ndra +ĠPl ot +H ol +se ver +e enth +Ġobst ruction +Ġ10 24 +st aff +j as +or get +sc enes +l aughs +ĠF argo +cr ime +Ġorche str +Ġde let +ili ary +rie ved +Ġmilit ar +ĠGreen e +âĹ ı +ãģ ¦ +ĠGu ards +Ġunle ashed +ĠWe ber +Ġadjust able +Ġcal iber +Ġmotiv ations +Ġà ł +m Ah +ĠL anka +hand le +Ġp ent +ĠR av +ĠAng ular +ĠK au +umb ing +Ġphil anthrop +Ġde hyd +Ġtox icity +e er +ĠY ORK +w itz +å ¼ +ĠI E +commun ity +ĠA H +Ġret ali +Ġmass ively +ĠDani els +ĠD EL +Ġcar cin +Ur l +Ġrout ing +ĠNPC s +ĠR AF +ry ce +Ġwa ived +ĠGu atem +Every body +Ġco venant +Ġ17 3 +Ġrelax ing +Ġqu art +al most +Ġguard ed +ĠSold iers +ĠPL AY +Ġout going +L AND +Ġre write +ĠM OV +ĠIm per +ĠS olution +Ġphenomen al +Ġl ongevity +Ġimp at +ĠN issan +ir ie +Ġod or +ĠZ ar +ok s +Ġmilit ias +ĠSP EC +Ġtoler ated +ars er +ĠBrad ford ++ , +Ġsur real +s f +Can adian +Ġresemb lance +Ġcarbohyd rate +VI EW +Ġaccess ory +me al +larg est +ieg el +Some one +Ġtoug hest +os o +Ġfun nel +Ġcondemn ation +lu ent +Ġw ired +ĠSun set +Jes us +ĠP ST +ĠP ages +ĠTy coon +ĠP F +Ġselect ions +Ġ ठ+part isan +Ġhigh s +ĠR une +Ġcraft s +le ad +ĠParent s +Ġre claim +ek er +ĠAll ied +ae per +Ġlo oming +Ġbenefic iaries +ĠH ull +Stud ents +Jew ish +d j +Ġp act +tem plate +ĠOffic ials +ĠBay lor +Ġhe mp +Ġyouth s +ĠLevel s +ĠX iao +ĠC hes +Ġende avor +ĠRem oved +Ġhipp ocamp +H ell +ãĤ Ĭ +80 5 +Ġd inosaur +ĠWr ath +ĠIndones ian +Ġcalcul ator +ĠD ictionary +Ġ4 20 +ĠM AG +( _ +! , +t arians +Ġrestrict ing +rac use +Ġweek day +OU NT +Ġsh rugged +leg round +Ġb ald +ĠDo ctors +Ġt outed +ĠMax well +Ġ2 14 +Ġdiplom at +Ġrep ression +Ġconstitu ency +v ice +r anked +ĠNap oleon +g ang +ĠFore ver +t un +Ġbul b +ĠPD T +ĠC isco +V EN +Ġres umed +Ste ven +ĠManit oba +Ġfab ulous +ĠAg ents +19 84 +Ġam using +ĠMyster ies +Ġor thodox +fl oor +Ġquestion naire +Ġpenet rate +Ġfilm makers +ĠUn c +Ġst amped +Ġth irteen +Ġout field +Ġforward ed +Ġapp ra +Ġa ided +t ry +Ġunf ocused +ĠL iz +ĠWend y +ĠSc ene +Ch arg +Ġreject s +Ġleft ist +ĠProv idence +ĠBr id +reg n +Ġprophe cy +ĠL IVE +4 99 +Ġfor ge +ĠF ML +Ġintrins ic +ĠF rog +Ġw ont +ĠH olt +Ġfam ed +CL US +aeper nick +ĠH ate +ĠC ay +Ġregister ing +ort ality +rop y +ocaly ptic +a an +n av +Ġfasc ist +IF IED +Ġimpl icated +ĠRes ort +ĠChand ler +ĠBr ick +P in +ys c +Us age +ĠHel m +us ra +âĺħ âĺħ +ĠAb bas +Ġunanim ously +Ġke eper +Ġadd icted +?? ? +Ġhelm ets +Ġant ioxid +aps ed +80 8 +gi ene +Ġwa its +Ġmin ion +ra ved +ĠP orsche +Ġdream ing +Ġ17 1 +ĠC ain +Ġun for +ass o +ĠConfig uration +k un +hard t +Ġn ested +ĠL DS +L ES +Ġt ying +en os +Ġc ue +ĠMar qu +sk irts +Ġclick ed +Ġexp iration +ĠAccording ly +ĠW C +Ġbless ings +Ġaddict ive +ĠN arr +y x +ĠJagu ars +Ġrent s +ĠS iber +Ġt ipped +ous se +ĠFitz gerald +Ġhier arch +out ine +Ġwa velength +> . +ch id +ĠProcess ing +/ + +r anking +E asy +ĠConst ruct +Ġt et +ins ured +H UD +Ġqu oting +Ġcommun icated +in x +Ġin mate +Ġerect ed +ĠAbs olutely +ĠSure ly +Ġun im +ĠThr one +he id +Ġcl aws +Ġsuper star +ĠL enn +ĠWh is +U k +ab ol +Ġsk et +ĠN iet +Ġper ks +Ġaff inity +Ġopen ings +phas is +Ġdiscrim inate +T ip +v c +Ġgr inding +ĠJenn y +Ġast hma +hol es +ĠHom er +Ġreg isters +ĠGl ad +Ġcre ations +Ġlith ium +Ġappl ause +unt il +Just ice +ĠTur ks +Ġsc andals +Ġb ake +t ank +M ech +ĠMe ans +ĠM aid +Republic ans +is al +wind ows +ĠSant os +Ġveget ation +33 8 +t ri +Ġfl ux +ins ert +Ġclar ified +Ġmort g +ĠCh im +ĠT ort +Ġdiscl aim +met al +ĠAs ide +Ġindu ction +Ġinf l +Ġathe ists +amp h +Ġe ther +ĠV ital +ĠBu ilt +M ind +Ġweapon ry +S ET +Ġ18 6 +ad min +g am +cont ract +af a +Ġderiv atives +Ġsn acks +Ġch urn +E conom +Ġca pped +ĠUnder standing +ĠH ers +ĠI z +Ġd uct +I ENT +augh ty +Ġâľ Ķ +ĠN P +Ġsa iling +In itialized +Ġt ed +Ġreact ors +ĠL omb +Ġcho ke +ĠW orm +Ġadm iration +Ġsw ung +ens ibly +Ġr ash +ĠGo als +ĠImport ant +Sh ot +ĠR as +Ġtrain ers +ĠB un +Work ing +Ġhar med +ĠPand ora +ĠL TE +Ġmush room +ĠCH AR +ĠF ee +ĠM oy +B orn +ol iberal +ĠMart ial +Ġgentle men +Ġling ering +Offic ial +Ġgra ffiti +ĠN ames +D er +Ġqu int +ist rate +aze era +ĠNOT ICE +ĠFlore nce +Ġpay able +Ġdep icts +ĠSpe cies +He art +âĶĢâĶĢâĶĢâĶĢ âĶĢâĶĢâĶĢâĶĢ +Ġencl osed +Incre ases +D aily +ĠL is +Ġenact ment +ĠB acon +ĠSt eele +dem and +Ġ18 3 +Ġmouth s +Ġstr anded +Ġenhance ment +01 1 +ĠWh ats +Ġhe aled +en y +ĠR ab +Ġ3 40 +ĠLab yrinth +ro ach +ĠY osh +ĠCl ippers +Ġconcert s +Intern et +35 5 +Ġstick ers +Ġter med +ĠAx e +Ġgrand parents +Fr ance +ĠCl im +ĠU h +ul ic +Ġthr ill +cent ric +ĠOver view +ĠCond uct +Ġsubstant ive +Ġ18 2 +m ur +Ġstr ay +ĠCo ff +Ġrep etitive +ĠFor gotten +Ġqual ification +ew itness +ĠZ imbabwe +Ġsim ulated +ĠJ D +25 3 +ĠW are +Ġun sc +T imes +Ġsum mons +Ġdis connected +Ġ18 4 +ci us +ĠGu jar +od ka +Ġer ase +ĠTob acco +elect ed +Ġun cont +ĠShe pard +ĠL amp +Ġalert ed +Ġoper ative +arn a +u int +Ġneglig ence +ac ements +Ġsup ra +Ġprev ail +ĠSh ark +Ġbel ts +ãģ « +Ġt ighter +Engine ers +Ġin active +Ġexp onent +ĠWill ie +a ples +Ġhe ir +ĠH its +ian n +ĠS ays +Ġcurrent s +ĠBeng al +Ġar ist +B uffer +Ġbree ze +ĠWes ley +Col a +Ġpron oun +Ġde ed +ĠK ling +Ġof t +Ġinf lict +Ġpun ishing +Ġn m +ik u +OD UCT +01 4 +Ġsubsid y +ĠDE A +ĠHer bert +ĠJ al +B ank +Ġdef erred +Ġship ment +B ott +Ġal le +b earing +HT ML +Off line +Ġ2 13 +Ġscroll ing +Ġsc anned +ĠLib yan +ĠT OP +ch rom +d t +col umn +Psy NetMessage +Z ero +Ġtor so +0 50 +âķ IJ +Ġimp erson +ĠSchw artz +ud ic +Ġpiss ed +ĠS app +25 7 +ĠIS Ps +og l +Ġsuper vised +Ġad olescent +Ġatt ained +ĠDel ivery +ĠB unny +Ġ19 37 +Ġmini ature +Ġo s +Ġ3 70 +60 8 +ĠMour inho +Ġinn ate +Ġtem po +ĠN M +ĠFall en +00 9 +Ġprov ocative +Stream er +ĠBened ict +ĠBol she +Ġt urtle +ĠPC B +ĠEqu al +Direct or +ĠR end +Ġflu ids +Author ities +Ġcous ins +requ ency +ĠNeigh bor +s ets +sh ared +Char les +pass word +Ġg ears +Ġ2 11 +ĠHard ware +ri ka +Ġup stream +H om +Ġdisproportion ately +iv ities +Ġund efined +Ġelect rons +Ġcommem or +Event ually +Ġ> < +Ġir responsible +2 18 +ĠRe leased +ĠO VER +ĠI GN +ĠB read +st ellar +ĠS age +tt ed +dam age +ed ition +ĠPre c +Ġl ime +Ġconf inement +Ġcal orie +we apon +Ġdiff ering +ĠS ina +m ys +am d +Ġintric ate +k k +ĠP AT +ã o +st ones +lin ks +Ġr anch +Sem itic +Ġdifferent iate +ĠS inger +occup ied +Ġfort ress +c md +Ġinter ception +ĠAnk ara +Ġre pt +ĠSol itaire +Ġrem ake +p red +Ġd ared +aut ions +ĠB ACK +Run ning +Ġdebug ging +Ġgraph s +3 99 +ĠNig el +Ġb un +Ġpill ow +Ġprog ressed +fashion ed +Ġob edience +ER N +Ġrehe ars +C ell +t l +S her +Ġher ald +ĠPay ment +ĠC ory +ĠDe pt +Ġrep ent +ĠWe ak +uck land +Ġple asing +Ġshort ages +Ġjur ors +ĠK ab +q qa +Ant i +Ġw ow +ĠRC MP +Ġt sun +ĠS ic +Ġcomp rises +Ġsp ies +Ġprec inct +n u +Ġur ges +Ġtim ed +Ġstrip es +ĠB oots +Ġy en +Adv anced +Ġdisc rete +ĠArch angel +employ ment +D iff +Ġmon uments +Ġ20 9 +work er +Ġ19 6 +ĠI g +utter stock +T PS +J ac +Ġhomeless ness +Ġcomment ator +Ġrac ially +f ing +se ed +E le +ell ation +Ġeth anol +Ġpar ish +ĠD ong +ĠAw akening +Ġdev iation +ĠB earing +ĠTsu k +Ġrec ess +Ġl ymph +ĠCann abis +å ľ +ĠNEW S +Ġd ra +ĠStef an +ĠWr ong +ĠS AM +Ġloose ly +Ġinterpre ter +ĠPl ain +Go vernment +Ġbigot ry +Ġgren ades +ave z +pict ured +Ġmand ated +ĠMon k +ĠPed ro +Ġl ava +27 4 +Ġcyn ical +ĠScroll s +l ocks +M p +Ġcon gregation +orn ings +ph il +ĠI bid +Ġf erv +Ġdisapp earing +Ġarrog ant +sy n +ĠMa ver +ĠSu it +24 1 +Ġab bre +ack ers +P a +ĠY el +Whe never +Ġ23 5 +ĠV ine +ĠAn at +Ġext inct +LE T +Ġexecut able +V ERS +ox ide +D NA +ĠP rel +Ġresent ment +Ġcompr ise +ĠAv iv +Ġinter ceptions +Ġprol ific +IN A +ĠEr in +though t +2 19 +ĠPsychiat ry +un ky +chem ist +H o +ĠMcC oy +Ġbr icks +L os +ri ly +ĠUS SR +Ġr ud +Ġl aud +ĠW ise +ĠEmer ald +Ġrev ived +Ġdam ned +ĠRep air +id em +ct ica +Ġpatri arch +ĠN urs +me g +Ġcheap est +re ements +empt y +ĠCele br +Ġdepri vation +ch anted +ĠTh umbnails +E nergy +ĠEth an +ĠQ ing +Ġopp oses +W IND +v ik +ĠM au +ĠS UB +66 7 +G RE +ĠVol unte +nt on +C ook +å IJ +es que +Ġplum met +Ġsu ing +Ġpron ounce +Ġresist ing +ĠF ishing +ĠTri als +Ġy ell +Ġ3 10 +Ġin duct +Ġpersonal ized +oft en +R eb +EM BER +Ġview point +Ġexist ential +() ) +rem ove +MENT S +l asses +Ġev apor +Ġa isle +met a +Ġreflect ive +Ġentit lement +Ġdev ised +mus ic +asc ade +Ġwind ing +off set +Ġaccess ibility +ke red +Bet ter +ĠJohn ston +th inking +S now +ĠCroat ia +ĠAt omic +27 1 +34 8 +Ġtext book +ĠSix th +Ġ اÙĦ +Ġsl ider +ĠBur ger +b ol +S ync +Ġgrand children +Ġc erv ++ ) +Ġe ternity +Ġtweet ing +Ġspec ulative +Ġpiv otal +ĠW P +ĠT ER +ynam ic +Ġu pl +ĠC ats +per haps +Ġclass mates +Ġblat ant +' - +Ġl akh +ant ine +ĠB org +i om +/ ( +ĠAthlet ic +Ġs ar +OT A +ĠHoff man +Never theless +Ġad orable +Ġspawn ed +Ass ociated +ĠDom estic +Ġimpl ant +ĠLux em +ĠK ens +Ġp umps +ĠS AT +Att ributes +50 9 +av our +Ġcentral ized +ĠT N +Ġfresh ly +ĠA chieve +Ġouts iders +her ty +ĠRe e +ĠT owers +ĠD art +ak able +Ġm p +ĠHeaven ly +Ġr ipe +ĠCarol ine +ry an +Ġclass ics +Ġret iring +Ġ2 28 +Ġa h +Ġdeal ings +Ġpunch ing +ĠChap man +O ptions +max well +vol ume +Ġst al +Ġex ported +ĠQu ite +Ġnumer ical +B urn +F act +ĠKey stone +Ġtrend ing +Ġalter ing +ĠAfric ans +47 8 +ĠM N +ĠKn ock +Ġtempt ation +Ġprest ige +Over view +ĠTrad itional +ĠBah rain +Priv ate +ĠH OU +Ġbar r +ĠT at +C ube +US D +ĠGrand e +ĠG at +ĠFl o +Ġres ides +Ġind ec +vol ent +Ġperpet ual +ub es +Ġworld view +ĠQuant um +Ġfil tered +Ġen su +orget own +ERS ON +ĠM ild +37 9 +OT T +à ¥ +Ġvit amins +Ġrib bon +Ġsincere ly +ĠH in +Ġeight een +Ġcontradict ory +Ġgl aring +Ġexpect ancy +Ġcons pir +Ġmon strous +Ġ3 80 +re ci +Ġhand ic +Ġpump ed +Ġindic ative +Ġr app +Ġav ail +ĠLEG O +ĠMar ijuana +19 85 +ert on +Ġtwent ieth +################ ################ +ĠSw amp +Ġval uation +Ġaffili ates +adjust ed +ĠFac ility +26 2 +Ġenz ymes +itud inal +Ġimp rint +S ite +Ġinstall er +ĠT RA +m ology +lin ear +ĠCollect ive +ig ating +ĠT oken +Ġspec ulated +K N +ĠC ly +or ity +Ġdef er +Ġinspect ors +appro ved +R M +ĠSun s +Ġinform ing +ĠSy racuse +ib li +7 65 +Ġgl ove +Ġauthor ize +âĢ¦âĢ¦âĢ¦âĢ¦ âĢ¦âĢ¦âĢ¦âĢ¦ +ĠCru ise +Ġcontract ing +she ll +IF E +ĠJew el +p ract +ĠPhot oshop +ĠKnow ing +h arm +Ġattract ions +ad an +et us +01 8 +w agen +Al t +Ġmultip ly +Ġequ ilibrium +: { +ĠF ighters +ĠEd gar +Ġfour teen +Go vern +Ġmis use +Ġab using +Ġancest ry +ram er +64 4 +Ġwor ms +Ġthick er +ĠComb ine +Ġpeas ants +Ġv ind +Ġcon quest +Ġm ocked +Ġc innamon +ĠC ald +ĠGall up +Ġavoid ance +Ġincarn ation +ĠStr at +Ġt asted +ent a +ĠN eal +p ared +Ġtermin ology +ject ion +Scient ists +ĠIN S +ĠDe e +Ġdirect ories +R oad +ĠSh ap +br ight +ĠDirect ors +ĠCol umn +Ġb ob +Ġprefer ably +Ġgl itch +f urt +Ġe g +id is +C BC +Ġsur rendered +Ġtest ament +33 6 +ug gest +ĠN il +an other +Ġpat hetic +ĠDon na +Ġ2 18 +ĠA very +Ġwhis key +Ġf ixture +ĠCon quest +Ġbet s +O cc +ĠLe icester +] ." +Ġ) ); +Ġfl ashes +45 6 +Ġmask ed +ge bra +Ġcomput ed +che l +aud er +Ġdefe ats +ĠLiber ation +ĠOs ama +ĠV ive +Ch anges +Ch annel +Ġtar iffs +Ġm age +ĠS ax +Ġinadvert ently +ĠC RE +ĠRe aper +ink y +gr ading +Ġstere otyp +Ġcur l +ĠF ANT +Ġfram eworks +M om +ĠAn ch +Ġflav our +car bon +Ġperm itting +let cher +ĠMo zilla +ĠPark ing +ĠCh amp +Sc roll +Ġmurd erer +Ġrest ed +Ġow es +ĠP oss +AD D +IF F +res olution +ĠMin ing +Ġcompar ative +D im +Ġneighbour ing +ĠA ST +ĠT oxic +Ġbi ases +Ġgun fire +ur ous +ĠMom ent +19 83 +Ġper vasive +tt p +ĠNorm ally +r ir +S arah +ĠAlb any +Ġun sett +ĠS MS +ip ers +l ayer +ĠWh ites +up le +Ġtur bo +ĠLe eds +Ġthat s +ĠMin er +M ER +ĠRe ign +Ġper me +ĠBl itz +Ġ19 34 +Ġintimid ating +t ube +Ġecc entric +ab olic +box es +ĠAssoci ates +v otes +Ġsim ulate +um bo +aster y +Ġship ments +FF FF +an th +Ġseason ed +Ġexperiment ation +âĸ ł +law s +Me et +idd les +ant ics +R ating +IS IS +h ift +Ġfront s +b uf +01 7 +Ġun att +ĠD il +le ases +ĠGard ens +77 7 +t ouch +ve ll +45 8 +Ġ= ==== +s aving +Ġer osion +ĠQu in +Ġearn s +Ġaccomplish ment +ĠWe i +Ġ< [ +____ _ +Ġir rig +ĠT eddy +Ġconqu ered +ĠArm ored +Ġassert s +Ġmanip ulating +r é +Ġtranscript s +G allery +Ġplot ting +Ne il +Ġbetray al +load er +ĠS ul +Ġdispl acement +Ġroy alty +ĠW I +he it +ĠDev ices +alle l +Ġmunicipal ities +Ġcan al +St ars +ĠU AE +Ġ" âĢ¦ +ĠC U +ab ove +Ġreson ance +ĠguiActive Un +add ed +ĠBra ves +ĠI bn +Ġhere by +ĠB RE +Ġshare holder +ĠH ir +ĠJ i +Ġstrange ly +Ġadm ired +Ġpl ight +Ġb achelor +ĠP ole +cipl inary +T ony +ĠArmen ian +Ġun man +ĠZion ist +St age +isco ver +Ġautom otive +Ġs idelines +Ġsl ick +ĠRena issance +ĠF UN +Im ages +ĠH aj +Ġp ing +Ġshort cut +ĠBl vd +ĠLook s +Ġbur sts +Ġcl amp +Ġm ish +Ġsort ing +Ġpatri ot +Ġcorrect ness +ĠScand inav +ĠCaval iers +p ython +az ar +Ġ3 75 +ĠJa une +40 9 +Ġdetrim ental +Ġstab bing +Ġpoison ed +Ġf ountain +oc ent +or st +ĠMar i +Ġr ains +ĠO vers +ĠInst itution +ud get +AM Y +t ale +ĠK R +ĠPr ices +Ġhead aches +Ġlands l +ĠA ura +Bon us +ĠZ hao +ĠH ip +Ġhop s +ĠKurd istan +Ġexplo iting +ry n +Ġhypocr isy +op ening +Ġgun shot +Ġw ed +inter stitial +Inter stitial +Ġam en +Bre aking +Ġmarket ed +W ire +ĠC rowd +Contin ue +ĠK nown +ĠEffect ive +ore an +iz ons +Jose ph +Ġescal ation +us ername +Ġcur tain +AT ES +ĠP AR +ĠM iy +Ġcounter fe +l ene +Ġcont enders +d aily +ĠAs c +ĠPhill ip +most ly +Ġfil ename +he ne +Ġresemb ling +Ġst aging +ĠCh loe +Ġw iring +H on +ĠRen ew +ott age +ĠHy brid +m uch +Ġstro kes +Ġpolicy makers +AP TER +ĠArk ham +pl ot +Ġassist ants +Ġde port +ĠSe ga +Ġinflu enza +ĠC ursed +ĠK obe +Ġskin ny +Prov ider +ĠR ip +Ġincrement al +product s +B F +Ġd ome +ĠC redits +Ġlos ers +int s +ĠBet ty +ĠTal ent +ĠD AM +L v +E ss +Ġd ens +tem p +J udge +od ic +Ġ' ( +UR ES +ets k +V O +Ġretrie ved +Ġarchitect s +Ù ĩ +Ġeth ic +ĠSecond ary +st ocks +ad ia +Ġ3 25 +ĠOp inion +Ġsimultane ous +Ġd izz +ul p +Ġsmugg ling +ipp ery +R andom +f acing +ĠD as +Ġstock p +Ġdiscl osures +po inter +Ġcor al +ĠSe lection +ĠP ike +ival ent +Ġruth less +ĠR im +Ġensu ing +ĠExper iment +Ġcongress man +Ġbelie ver +Ġun specified +ĠM ord +Ġknowledge able +ĠV ERY +T X +Ġstra ps +Ġtur f +apesh ifter +Ġmar ital +Ġfl ock +ãģ Ĩ +26 3 +AM ES +ĠOpp osition +Ġtre asures +ĠG OD +Ġmodel ed +ĠWOR LD +Ġ( [ +ĠUs age +H F +Ġ$ ( +uss ed +Ġpione er +E ight +par se +b read +rit z +ĠMir anda +ĠK ant +++ ) +ore n +Ġprov oked +Ġbre eds +ĠIn cludes +ĠPast ebin +ĠFl ip +J ava +Ġbr ink +Ġrum ored +Ġun seen +Ġgar nered +ĠDef in +al ted +Ġtatt oos +Ġhes itation +is itions +ĠWe aver +ĠReport ing +Ġtherap ies +Ġconsult ants +Ġresid ual +ĠMal i +ĠRom a +i ago +ĠRes idents +ub i +Ġremed ies +Ġadapt ive +ĠAl ive +ĠBar cl +Ġwal lets +c rypt +etermin ation +ĠPel osi +Ġsl ipping +oton in +Ġall iances +pat rick +ir is +Ġor th +ĠPer kins +ĠDe V +ĠG ets +Ġdry ing +ge e +fore st +ĠFor get +ore m +33 9 +Ġvague ly +ĠD ion +ĠP orn +ĠH OW +Ġp neum +Ġrub ble +ĠT aste +enc ia +ĠG el +Ġd st +Ġ24 5 +ĠMoroc co +inf lamm +ĠTw ins +Ġb ots +d aughter +ĠB alk +Ġbre thren +Ġlog os +Ġgo bl +f ps +Ġsub division +Ġp awn +Ġsquee zed +Ġmor ale +ĠD W +' " +Ġkn ot +ook y +Ġdiv isive +Ġboost ed +ch y +ãĥ IJ +if act +Ġnewcom ers +ĠWrest ling +Ġsc outs +w olves +R at +Ġnin eteenth +ĠOs borne +St ats +Ġem powered +Ġpsych opath +ĠO EM +ugg age +ĠP K +ĠMoh ammad +P ak +Ġanarch ists +ĠExt ract +est hes +ĠStock holm +l oo +ĠG raph +Ġdeploy ing +ĠStr anger +ĠM old +Ġstaff er +Ġdiscount ed +uck le +ple ase +ĠLand ing +ÃŃ a +Ġ19 3 +Ġan te +Ġrep etition +Ġ+ /- +Ġpar ody +Ġlive ly +AA A +ĠHor us +Ġp its +ind ers +L OC +ĠVen ice +40 6 +ĠDis cover +â Ĩ +ellect ual +Ġp ens +Ġey el +ig uous +Im pl +Ġj oking +Ġinv al +ĠBel fast +Ġcredit ors +ĠSky walker +ov sky +Ġcease fire +Ġse als +is oft +) ). +ĠFel ix +IT S +Ġt resp +ĠBlock chain +ew are +ĠSch war +en ne +mount ed +ĠBe acon +les h +Ġimmense ly +Ġche ering +Em ploy +sc ene +ish ly +atche wan +ĠNic olas +Ġdr ained +ĠEx it +ĠAz erb +j un +Ġflo ated +u ania +De ep +Ġsuper v +Ġmyst ical +ĠD ollar +ĠApost le +ĠR EL +ĠProv ided +ĠB ucks +ãĥ ´ +cut ting +Ġenhance ments +ĠPengu ins +ĠIsa iah +Ġj erk +ĠW yn +Ġst alled +Ġcryptoc urrencies +ĠR oland +sing le +Ġl umin +ĠF ellow +ĠCap acity +ĠKaz akh +W N +Ġfin anced +38 9 +Ġt id +Ġcoll usion +ĠMy r +î Ģ +Sen ator +Ġped iatric +Ġneat ly +Ġsandwic hes +ĠArchitect ure +Ġt ucked +Ġbalcon y +Ġearthqu akes +qu ire +F uture +Ġhe fty +é Ĺ +Ġspecial izes +Ġstress es +Ġs ender +Ġmisunder standing +Ġep ile +Ġprov oke +ĠCol ors +Ġdis may +uk o +[ _ +58 6 +ne utral +Ġdon ating +ĠRand all +Mult i +Ġconvenient ly +ĠS ung +ĠC oca +Ġt ents +ĠAc celer +Ġpart nered +27 2 +ir ming +ĠB AS +s ometimes +Ġobject ed +ub ric +p osed +LC S +gr ass +Ġattribut able +V IS +Israel i +Ġrepe ats +ĠR M +v ag +ut a +in ous +Ġin ert +ĠMig uel +æ Ń +ĠHawai ian +B oard +Ġart ific +ĠAzerb ai +as io +ĠR ent +A IN +Ġappl iances +Ġnational ity +Ġass hole +ĠN eb +Ġnot ch +h ani +ĠBr ide +Av ailability +Ġintercept ed +Ġcontin ental +Ġsw elling +ĠPers pect +b ies +. < +ith metic +ĠL ara +Ġtempt ing +add r +Ġoversee ing +cl ad +ĠD V +ĠGing rich +Ġm un +ĠApp ropri +Ġalter ations +ĠPat reon +Ġha voc +Ġdiscipl ines +Ġnotor iously +aku ya +ier i +? ). +ĠW ent +Ġsil icon +Ġtre mb +Cont ainer +K nown +Ġmort ar +est e +ick a +Ar thur +ĠPre viously +ĠMart y +Ġsp arse +g ins +Ġin ward +ĠParticip ant +C opy +ĠM isc +Ġantib iotic +ĠRet ro +Ġel usive +Ġass ail +ĠBatt alion +ĠB ought +Ġdimin ish +ĠEuro pa +s ession +ĠDanger ous +ies el +Ġdisbel ief +Ġbl asts +ext reme +ĠBoy d +ĠProject s +ĠGu ys +Ġunder gone +Ġgr ill +ĠDw ight +Ġ19 7 +US ER +Ġfiles ystem +Ġcl ocks +T aylor +Ġwra pper +Ġfold ing +ous and +ĠPhilipp ine +ATION AL +ĠPer th +Ġas hes +Ġaccum ulate +ĠGate way +Sh op +orks hire +H an +ĠBar rel +ĠLe h +ĠX V +Ġwh im +Ġrep o +ĠC G +ĠM am +Ġincorpor ating +Ġbail out +Ġlingu istic +Ġdis integ +C LE +Ġcinem atic +ĠF iber +S yn +il ion +ĠCom pos +c hens +Ġne oc +Ġbo iled +F INE +on o +un cle +ik en +ĠB M +Î ¹ +Ġreceipt s +Ġdisp osed +ĠTh irty +ĠR ough +ĠA BS +Ġnot withstanding +oll en +# $ +Ġunrel iable +Ġbl oom +Ġmedi ocre +Ġtr am +ĠTas man +Ġsh akes +Ġmanifest o +ĠM W +Ġsatisf actory +Ġsh ores +Ġcomput ation +Ġassert ions +orm ons +ar ag +ab it +Dem ocrats +ĠL oot +ĠVol ks +ha ired +Ġgrav itational +S ing +ĠM iz +Ġthro ttle +Ġtyr anny +ĠView s +Ġrob ber +ĠMinor ity +Ġsh rine +sc ope +pur pose +Ġnucle us +our cing +ĠUS DA +ĠD HS +w ra +ĠBow ie +Sc ale +ĠB EL +x i +I ter +Ġ( ), +w right +Ġsail ors +ous ed +NAS A +ĠPro of +ĠMin eral +t oken +ĠF D +R ew +Ġe ll +6 30 +Ġchance llor +ĠG os +Ġamount ed +ĠRec re +ome z +ĠOpt im +ĠOl ive +Ġtrack er +ow ler +ĠUn ique +R oot +Ġmar itime +ĠQur an +ĠAd apt +Ġecosystem s +ĠRe peat +ĠS oy +ĠI MP +Ġgrad uating +and em +P ur +ĠRes et +ĠTr ick +ĠPh illy +ĠT ue +ĠMalays ian +Ġclim ax +Ġb ury +Ġcons pic +ĠSouth ampton +ĠFl owers +Ġesc orted +ĠEduc ational +ĠI RC +Ġbrut ally +e ating +Ġpill ar +ĠS ang +ĠJ ude +ar ling +ĠAm nesty +Ġrem inding +ĠAdminist rative +hes da +Ġfl ashed +ĠP BS +per ate +fe ature +Ġsw ipe +Ġgra ves +oult ry +26 1 +bre aks +ĠGu er +Ġsh rimp +ĠV oting +qu ist +Ġanaly tical +Ġtables poons +ĠS OU +Ġresear ched +Ġdisrupt ed +Ġj our +Ġrepl ica +Ġcart oons +b ians +} ) +c opy +G ot +ou ched +P UT +Ġsw arm +not ations +s aid +Ġreb uilt +Ġcollabor ate +Ġr aging +Ġn ar +Ġdem ographics +ĠD DR +Ġdist rust +oss ier +ĠK ro +Ġpump kin +Ġreg rets +Ġfatal ities +ĠL ens +ĠO le +p d +Ġpupp et +ĠOut look +ĠSt am +O l +F air +U U +Ġre written +Ä ± +Ġfasc inated +Ġve ctors +Ġtrib unal +u ay +ĠM ats +ĠCo ins +[ [ +Ġ18 1 +Ġrend ers +ĠK aepernick +Ġesp ionage +Ġsum m +Ġd itch +Acc ount +Ġspread sheet +Ġmut ant +p ast +40 7 +Ġd ye +Ġinit iation +Ġ4 000 +Ġpunish able +Ġth inner +ĠKh al +Ġinter medi +D un +ĠGoth am +Ġeager ly +Ġvag inal +p owers +V W +ĠWATCH ED +Ġpred ator +ams ung +Ġdispar ity +Ġ[ * +Ġam ph +Ġout skirts +ĠSpir its +Ġskelet al +Ð » +ĠR ear +Ġissu ance +ĠLog ic +re leased +Z Z +ĠB ound +Ent ry +Ġex its +is ol +ĠFound er +Ġw re +ĠGreen land +ĠM MO +t aker +IN C +ãģ ¾ +Ġhour ly +hen ko +Ġfantas ies +Ġdis ob +Ġdemol ition +ãĥ ĭ +Ġen listed +rat ulations +Ġmis guided +Ġens ured +Ġdiscour aged +m ort +Ġfl ank +Ġc ess +Ġreact s +ĠS ere +s ensitive +ĠSer pent +ass ad +Ġ24 7 +Ġcalm ly +b usters +Ġble ed +ĠSt ro +Ġamuse ment +ĠAntar ctica +Ġs cept +ĠG aw +a q +ason ic +Ġsp rawling +n ative +atur ated +ĠBattle field +IV ERS +E B +ĠG ems +ĠNorth western +ĠFil ms +ĠAut omatic +Ġappre hend +ãģ ¨ +Ġgui Name +Ġback end +Ġevid enced +ge ant +01 2 +ĠS iege +Ġexternal To +Ġunfocused Range +ĠguiActiveUn focused +Ġgui Icon +ĠexternalTo EVA +ĠexternalToEVA Only +F ri +ch ard +en aries +Ġchief s +Ġc f +ĠH UD +Ġcorro bor +Ġd B +ĠT aken +ĠPat ricia +ra il +ĠCh arm +ĠLiber tarian +rie ve +Person al +ĠO UR +ger ies +Ġdump ing +Ġneurolog ical +it imate +ĠClint ons +raft ed +ĠM olly +Ġtermin als +reg ister +Ġfl are +Ġenc oded +Ġautop sy +p el +m achine +Ġexempt ions +ĠRoy als +d istance +Ġdraft s +Ġl ame +ĠC unning +Ġsp ouses +ĠMark ets +ĠCar rier +Ġimp lying +ĠY ak +s id +Ġl oser +Ġvigil ant +Ġimpe achment +Ġaug mented +ĠEmploy ees +Ġunint ended +tern ally +ĠW att +Ġrecogn izable +ess im +æ Ŀ +Ġco ated +r ha +Ġlie utenant +ĠLegisl ation +pub lished +44 4 +01 3 +Ġide ally +ĠPass word +Ġsimpl ify +ĠMet a +ĠM RI +Ġple ading +organ ized +hand ler +Ġun ravel +cor rect +Ġ icy +Ġparan oid +Ġpass er +Ġinspect ions +of er +ĠHealth care +28 3 +ĠBr ut +iol a +for ge +ĠMed ieval +MS N +ie vers +ĠProgram ming +å ī +Ġ2 23 +m u +ĠC LE +ug a +Ġsho ppers +Ġinform ative +ĠPl ans +Ġsupplement ation +ĠT ests +ty ard +ocy tes +ĠVeg a +ĠGujar at +erman ent +Ex cept +ĠL OT +all a +ĠC umm +ĠO sw +Ġven om +ĠDeb t +ĠD OWN +Ġreun ion +Ġm uc +ĠRel ief +Ġge op +ĠðŁ ĺ +al ogue +An th +ech o +Ġcor ros +Ġrepl ication +ĠBl azing +ĠD aughter +Ġinf lic +ĠLind sey +Ù Ī +28 4 +Ex it +Ġgl oom +TA IN +Ġundermin ing +Ġadv ising +h idden +Ġover flow +Ġg or +urd ue +Ġe choes +enh agen +Ġimp uls +d rug +c ash +Ġas ync +Ġmir ac +at ts +p unk +Ġpiv ot +ĠLegisl ative +Ġblog gers +ĠCl aw +s burg +d yl +ĠRecomm end +Ġver te +Ġprohib iting +ĠPant her +Jon athan +Ġo min +Ġhate ful +28 1 +ĠOr che +ĠMurd och +down s +Ġas ymm +G ER +Al ways +Ġinform s +ĠW M +ĠP ony +ĠApp endix +ĠAr lington +J am +Ġmedic inal +ĠS lam +IT IES +Ġre aff +ĠR i +F G +S pring +b ool +Ġthigh s +Ġmark ings +ĠRa qqa +ĠL ak +p oll +ts ky +ĠMort y +ĠDef inition +Ġdeb unk +end ered +ĠLe one +a vers +Ġmortg ages +App arently +N ic +ha us +ĠTh ousands +au ld +Ġm ash +sh oot +Ġdi arr +Ġconscious ly +H ero +e as +ĠN aturally +ĠDestroy er +Ġdash board +serv ices +R og +Ġmillenn ials +Ġinv ade +- ( +Ġcomm issions +ĠA uckland +Ġbroadcast s +Ġfront al +Ġcr ank +ĠHist oric +Ġrum ours +CT V +Ġster il +Ġboost er +rock et +ãĤ ¼ +ut sche +ĠP I +Ġ2 33 +ĠProdu cer +ĠAnaly tics +Ġinval uable +Ġunint ention +ĠC Y +Ġscrut in +Ġg igg +Ġeng ulf +Ġprolet ariat +Ġh acks +ĠH ew +ar ak +ĠSl ime +ield ing +ag her +ĠEll iot +Ġtele com +Ġ2 19 +ult an +ĠAr bor +ĠSc outs +B an +Ġlifes pan +Ġbl asp +38 8 +Ġjud iciary +ĠContin ental +ask ing +Mc C +L ED +Ġbag gage +ĠSorce rer +Ġrem nants +ĠGriff ith +ets u +ĠSub aru +ĠPerson ality +des igned +ush ima +agn ar +Ġrec oil +Ġpass ions +\ ": +Ġte e +Ġabol ition +ĠCreat ing +j ac +Ġ19 4 +01 9 +Ġpill ars +ric hed +/ " +t k +Ġlive lihood +Ġro asted +ah on +ĠH utch +ass ert +Ġdivid end +Ġkn it +Ġd aunting +Ġdisturb ance +Ġsh ale +Ġcultiv ated +Ġrefriger ator +L B +ĠN ET +Ġcommercial s +Ġthink ers +45 5 +Ġch op +B road +Ġsuspic ions +Ġtag ged +l ifting +Ġsty lish +ĠShield s +Short ly +Ġt ails +A uth +ST E +ĠG AME +Ġse ism +ĠK is +olog ne +Ġcow ork +Ġforc ibly +Ġthy roid +ĠP B +AN E +mar ried +h orse +Ġpoly mer +ĠCh al +od or +DE BUG +ĠCon text +Ġbl iss +Ġpin point +ĠMat hemat +leg ram +ĠWeek end +Ġlab elled +Ġb art +it les +Ġest rogen +âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ +" ' +Ġvis ibly +Ġouts ider +aid a +Are a +Ġdisse min +Ġdish onest +ĠCl osed +ĠBullet in +ĠRam sey +sw ord +ĠX I +our ced +S ame +34 6 +ĠRe pe +ĠK ou +c ake +em is +C ache +ĠMe aning +ĠEn light +onom y +Ġmanifest ation +sw orth +J ay +Ġch ore +ö r +D ream +Ġsanction ed +Ġcult urally +ĠA ra +N av +Ġthe ological +Ġstr ut +ĠV O +ĠHand book +Ġconstruct ing +Ġ ¶ +ĠBenef its +ĠPsych ological +s ac +å ¸ +p olicy +ĠMat ters +ĠReport ed +ĠBy te +Ġvit ro +ĠM aiden +Ġl am +ĠJenn ings +Ġgar ment +ĠRut gers +ĠStaff ord +ĠWell ington +Ġinter mitt +Ġn pm +Ġord eal +Ġplug ged +o oming +in ished +fram ework +Ġtim ber +Ġc ass +Ġ8 50 +il ess +ĠRed ux +7 68 +St re +Ġsurpass ed +w hel +Ġparalle ls +Ġve il +ĠG I +ĠR EST +Ġread iness +s ort +Ġmod ifying +ĠSl ate +ru ff +Ġmar ble +Ġinf rared +Ġaud itor +ĠFANT ASY +ĠP overty +ĠS PD +Ġ" ( +K y +RA Y +Ġexecut ions +ĠBever ly +ĠMarx ism +ĠBur st +ĠK ali +est ones +Clear ly +E ll +ãģ § +ĠProceed ings +T oken +IF IC +ñ a +Cent ral +ĠH aley +ĠD rama +Ġform ations +OR N +Book s +Ġdom inating +ĠFly ers +ĠCompan ion +Ġdiscipl ined +ĠYug oslav +ĠSpell s +Ġv engeance +Ġland lords +L en +ĠO gre +ano ia +Ġpier cing +Ġcon greg +Ġscore r +ob ia +Ġnic kel +ĠLear ns +Ġre jo +Ġmaster piece +Fl ash +Ġinhab ited +ĠOpen GL +ĠD ud +ĠI CO +Ġar ter +Ġpl ur +Ġmaster y +Ġlong standing +st ed +Ġw ines +Ġtelev ised +ĠSh rine +ĠBay ern +Ġâ ĵĺ +Ġencl osure +j ohn +Ġprophe ts +ĠRes urrection +ĠOrd ers +Ġun even +r als +Ġd wind +ĠL ah +ĠSl oven +37 8 +Ġins istence +aff le +ĠCl one +Ġhard ship +ĠCongress man +Ġple ad +Ġreview ers +Ġc ured +Ġ19 35 +as ley +f ake +ĠTh inking +yd ia +P ART +ĠD ota +o it +Ġwh ipped +Ġb ouncing +ĠHispan ics +com ings +Ġcann abin +ĠCh ambers +ĠZ ack +Option al +Ġco ats +Ġprow ess +ĠNort on +Ġplain ly +Ġfre ight +Ġinhib ition +Ġcl am +Ġ30 3 +ke f +ale igh +L uke +Ġpsych o +ator ium +M ED +Ġtreat ies +Ġind isc +Ġd c +OP S +Ġresil ient +ĠInter state +Ġsl ack +Ġmund ane +Ġestab lishes +35 9 +Ġstr ained +Ġn ond +S us +Ġcast e +ar ate +ie ving +Ġunfair ly +Ġpars er +on ial +urs ive +V ia +ĠOtt o +ĠAuthor ities +stro ke +K R +ĠMer cy +Ġfurn ished +Ġout set +Ġmet ic +19 82 +olith ic +ĠT ent +og ical +ĠA ircraft +Ġh ides +ĠBec ame +Ġeduc ators +re aching +Ġvol atility +Ġtodd ler +ĠNAS CAR +ĠTw elve +ĠHigh lights +Ġgra pe +Ġspl its +Ġpe asant +Ġre neg +ĠMS I +Tem p +st ars +Ġtre k +ĠHy de +b inding +Ġreal ism +Ġox ide +ĠH os +Ġmount s +Ġbit ing +Ġcollaps ing +Ġpost al +Ġmuse ums +Ġdet ached +Ġrespect ing +Ġmonop ol +Ġwork flow +ĠC ake +Tem plate +ĠOrgan isation +Ġpers istence +36 9 +C oming +B rad +Ġredund ant +ĠG TA +Ġb ending +Ġrev oked +Ġoff ending +Ġfram ing +Ġprint f +Comm un +mem bers +Out side +Ġconst rued +Ġc oded +F ORE +Ġch ast +Ch at +Ind ian +ĠY ard +? !" +ĠP orts +ĠX avier +ĠR ET +' ." +ĠBo at +iv ated +ich t +umer able +D s +ĠDun n +Ġcoff in +Ġsecure ly +ĠRapt ors +ĠB es +Install ation +Ġin ception +ĠHealth y +end ants +Ġpsych ologists +ĠShe ikh +c ultural +ĠBlack Berry +sh ift +F red +oc he +Ġc akes +ĠS EO +ĠG ian +ĠAs ians +og ging +e lement +Ġpund its +ĠV augh +ĠG avin +Ġh itter +Ġdrown ed +Ġch alk +ĠZ ika +Ġmeas les +80 2 +âĢ¦ .. +ĠAW S +] " +Ġdist ort +ĠM ast +Ġantib odies +ĠM ash +Mem ory +ĠUg anda +ĠPro b +Ġvom iting +ĠTurn s +Ġoccup ying +Ġev asion +ĠTher apy +Ġprom o +Ġelect r +Ġblue print +ĠD re +pr iced +ĠDep ot +Ġallev iate +ĠSom ali +m arg +n ine +Ġnostalg ia +ĠShe pherd +Ġcaval ry +Ġtor ped +ĠBlood y +x b +Ġs ank +Ġgo alt +report print +embed reportprint +clone embedreportprint +ĠIn itially +ĠF ischer +Ġnot eworthy +c ern +Ġin efficient +raw download +rawdownload cloneembedreportprint +c ation +ĠD ynasty +l ag +D ES +Ġdistinct ly +ĠEston ia +Ġopen ness +Ġg ossip +ru ck +W idth +ĠIb rahim +Ġpet roleum +Ġav atar +ĠH ed +ath a +ĠHog warts +Ġc aves +67 8 +Ġsafegu ard +ĠM og +iss on +ĠDur ham +sl aught +ĠGrad uate +Ġsub conscious +ĠEx cellent +ĠD um +---- - +Ġp iles +ĠW ORK +ĠG arn +ĠF ol +ĠAT M +Ġavoid s +ĠT ul +Ġble ak +EL Y +iv ist +light ly +P ers +ĠD ob +ĠL S +Ġins anity +Î µ +atal ie +En large +Ġtw ists +Ġfault y +Ġpir acy +Ġimp over +Ġrug ged +ĠF ashion +Ġs ands +' ? +sw ick +Ġn atives +Ġhe n +ĠNo ise +ãĥ Ĺ +Ġg reens +Ġfree zer +Ġd ynasty +ĠFather s +ĠNew ark +Ġarchae ological +Ġo t +ob ar +Ġblock ade +Ġall erg +L V +Ġdeb it +ĠR FC +ĠMil ton +ĠPress ure +Ġwill ingly +Ġdisproportion ate +Ġopp ressive +Ġdiamond s +Ġbelong ings +19 70 +Ġbell s +Ġimperial ism +Ġ2 27 +Ġexpl oding +ĠE clipse +Ġ19 19 +Ġr ant +Ġnom inations +34 7 +Ġpeace fully +ric a +ĠF UCK +Ġvib ration +mal ink +Ġro pes +ĠIv anka +ĠBrew ery +ĠBook er +ĠOw ens +go ers +Serv ices +ĠSn ape +Ġ19 1 +39 5 +Ġ2 99 +just ice +Ġb ri +Ġdisc s +Ġprom inently +Ġvul gar +Ġsk ipping +l ves +Ġtsun ami +37 4 +ĠU rug +ĠE id +rec ated +p hen +Ġfault s +ĠStart ed +9 50 +Ġp i +Ġdetect or +Ġbast ard +Ġvalid ated +Space Engineers +OUR CE +Ġ( ~ +Ġuns ur +Ġaff irmed +Ġfasc ism +Ġres olving +ĠCh avez +ĠC yn +Ġdet ract +L ost +Ġrig ged +Ġhom age +ĠBrun o +55 5 +ec a +Ġpress es +Ġhum our +Ġsp acing +Ġ' / +olk ien +C oun +OP ER +T re +S on +ĠCambod ia +ier re +m ong +o zy +Ġliquid ity +ĠSov iets +ĠFernand o +Ġ2 29 +Ġsl ug +ĠCatal an +elect ric +Ġsc enery +ĠH earth +Ġconst rained +Ġgoal ie +ĠGu idelines +ĠAm mo +ĠPear son +Ġtax ed +Ġfet us +Resp onse +ĠAlex is +th ia +G uy +Ġrecon struct +Ġextrem es +Ġconclud ing +ĠP eg +ook s +Ġded uctions +R ose +Ġground breaking +ĠT arg +ãĥ ģ +ĠRe ve +res ource +Ġmo ons +Ġelectrom agnetic +Ġamid st +ĠVik tor +N ESS +B ACK +Ġcomm ute +ĠAna heim +Ġfluct uations +6 40 +Ġnood les +ĠCop enhagen +ĠT ide +ĠGri zz +ĠS EE +Ġpip elines +Ġsc ars +end o +ag us +ĠE TF +/ # +ĠBec ome +44 8 +Ġvis c +ĠRecomm ended +Ġj umper +Ġcogn ition +Ġassass in +Ġwitness ing +ĠSet up +Ġl ac +v im +IS M +p ages +SS L +35 8 +Ġad ject +indust rial +l ore +cher y +Ġgl itter +Ġc alf +Flor ida +Ġspoil ers +Ġsucceed s +Ġch anting +Ġslog ans +ĠTr acy +Vis it +rol ogy +Ġm ornings +Ġline age +Ġs ip +Ġintense ly +Ġflour ish +ĠSle eping +ĠF em +or por +ĠK lan +ĠDar th +h ack +ĠNi elsen +Ġtum ors +Ġprocure ment +ĠY orkshire +Ġra ided +K Y +An na +Ġ// [ +ĠDis order +ĠMust ang +ĠW en +ĠTry ing +s q +Ġdeliver ies +Ġshut ter +Ġcere bral +Ġbip olar +ĠC N +l ass +j et +Ġdeb ating +> : +Ġe agle +gr ades +ĠD ixon +UG C +M AS +ĠDr aco +ĠMach ines +aff er +Ġem an + ² +pr on +ĠG ym +Ġcompar atively +ĠTrib unal +PR O +Ġle x +Ġfert ile +Ġdep ressing +Ġsuperf icial +ess ential +ĠHun ters +g p +Ġprom inence +L iber +ĠAn cest +ote chnology +Ġm ocking +ĠTra ff +ĸ ļ +Med ium +I raq +Ġpsychiat rist +Quant ity +ĠL ect +Ġno isy +5 20 +G Y +Ġsl apped +ĠM TV +Ġpar a +p ull +Mult iple +as her +Ġn our +ĠSe g +Spe ll +v ous +ord ial +Sen ior +ĠGold berg +ĠPl asma +ne ed +Ġmess enger +ere t +Ġteam ed +Ġliter acy +ĠLe ah +ĠD oyle +Ġem itted +U X +Ġev ade +Ġm aze +Ġwrong ly +ĠL ars +Ġstere otype +Ġpled ges +Ġarom a +ĠM ET +Ġac re +ĠO D +Ġf f +Ġbrew eries +ĠH ilton +und le +ĠK ak +ĠThank fully +ĠCan ucks +in ctions +ĠApp ears +Ġco er +Ġundermin ed +ro vers +And re +Ġbl aze +um ers +Ġfam ine +amp hetamine +ulk an +Am ount +Ġdesper ation +wik ipedia +develop ment +ĠCor inth +uss ia +Jack son +L I +N ative +R s +Oh io +ĠKath leen +F ortunately +Ġattend ant +ĠPre ferred +ĠDid n +ĠV s +M is +Ġrespond ent +Ġb oun +st able +Ġp aved +Ġunex pl +ĠChe ney +L M +ĠC ull +bl own +Ġconfront ing +oc ese +serv ing +W i +ĠLith uania +ann i +Ġst alk +h d +Ġv ener +AP H +ynchron ous +UR R +um ably +hist oric +H alf +H ay +Ġresil ience +spe ction +Ġabandon ing +O bs +ĠDeb bie +Ġgrad ient +ĠPl aint +ĠCan al +AR CH +Ġexpans ive +Ġfun g +Ġb ounced +U nd +Ġprec autions +Ġclar ification +Ġd agger +Ġgri ps +Ġ µ +ĠRiver a +ĠUnd ead +is ites +ĠFIR ST +ñ o +aud i +Ġhost ages +Ġcompl iant +Ġal umni +Se ven +Ġcyber security +e ither +Col lect +Ġinvari ably +ĠS oci +Ġlaw maker +Ġa le +ĠPerson ally +N azi +Ġcustom ization +ĠPro c +ĠSask atchewan +eat uring +Ġsp ared +Ġdiscontin ued +Ġcomput ational +ĠMotor ola +Ġsuprem acist +government al +Ġparad ise +ĠDown ing +ĠNik on +Ġcat alyst +ber ra +Tor onto +8 75 +bet a +ĠMac ron +Ġunreal istic +ve ctor +ĠVeh icles +it iveness +ĠR V +ĠCol bert +s in +o ji +ent in +ĠKr ish +hell o +ff ield +ok y +ĠT ate +Ġmap le +Ġa ids +chem ical +33 4 +n uts +ĠWar p +Ġx x +ĠRob b +umer ous +_- _ +ft ime +ĠV W +Ġw inger +ĠD ome +t ools +ĠP V +ĠGe orgetown +Ġg eared +Ġjihad ists +Ġc p +Ġster oids +M other +cler osis +ĠDR M +nes ia +Ġl inger +Ġimm ersive +ĠC OUN +Ġoutwe igh +ens ual +B and +Ġtransform s +mat ched +ps ons +ĠJud icial +f actor +Ġrefer ral +Ġodd ly +ĠW enger +B ring +ĠB ows +60 2 +IC LE +Ġl ions +ĠAcad emic +ĠTh orn +ĠRa ider +kef eller +St orage +L ower +ĠOr t +ĠEqu ality +AL T +ĠS OC +T ypes +Ġl yn +ĠAss et +co at +TP P +C VE +ĠPione er +app lication +Mod ern +ĠH K +En vironment +Al right +R ain +IP P +ĠShi ite +Ġm ound +ĠAb ilities +cond ition +St aff +Ġcompet ence +ĠM oor +ĠDi ablo +Ġwith held +Ġost ensibly +ĠB rom +Ġms g +Ġden omin +ĠRef erences +ĠF P +Ġplun ged +Ġp amph +m oving +cent ral +Ġdown right +Ġf ading +T al +T yp +ĠTh y +uk es +it he +Ġo ve +Ġbatt led +Ġseaf ood +Ġfig ur +ĠR D +c rop +Ġsqu ads +{ \ +à ¹ +ĠE h +Ġinterview ing +ĠQ in +Ġas piring +PL IC +Ġcla uses +ĠG ast +ĠN ir +Ġl uggage +Ġh ose +Ġsystem d +Ġdesc ending +ĠRev ised +ĠR ails +al ign +70 9 +33 7 +Ġf ug +charg ing +t ags +Ġut er +k ish +WAR NING +49 0 +prof its +Ġvoy age +Ġa ce +ĠV anguard +ĠT anks +ĠM uk +Ġ2 26 +S afe +Ar mor +Ġvolcan ic +Ġwom b +ĠM IL +Ġbegin ner +ĠRec ogn +ĠA AP +PL AY +) ! +Ġdetect ing +c n +Ġbre aches +Bas ically +ĠP ag +ĠMunicip al +ĠInd ie +ĠL af +ĠDis able +ĠOl son +Ġrest rained +Ġrul ings +Ġhum ane +ev ents +ĠCinem a +display Text +ĠH atch +action Date +onna issance +Ġassault ing +ĠL ug +CH AT +Ġvig orous +ĠPer se +Ġintoler ance +ĠSnap chat +ĠSh arks +Ġd ummy +ĠDi agn +ĠGu itar +im eters +40 3 +RE G +A x +Ġsepar ates +ĠMah m +Ġt v +j ah +O OL +C irc +ĠWinds or +uss ian +Ġintu ition +Ġdis dain +ĠDon ovan +Ġ2 21 +E mb +Ġcondem ning +Ġgener osity +zz y +Ġpant ies +ĠPre vent +Action Code +AN A +34 2 +external ActionCode +Ġspec ifying +Ġcryst all +J ere +Ġru pt +ĠApp rentice +Ġprof iling +Ð º +St rike +Ġsid eline +Ġoblig ated +Ġocc ult +Ġbureaucr atic +ant ically +rupt ed +neg ative +ĠEthiop ia +ĠC ivic +Ġins iders +el igible +ĠTV s +ĠB AR +ĠT I +i ologist +ĠA IR +Ġsubstit uted +Ar ab +ĠS aul +ĠY og +p rem +Ġbuild ers +Ġstation ary +Ġdoubt ful +Ġvig orously +Ġthr illing +Ph ysical +ĠCare y +ĠHyd ra +geon ing +ĠS ly +y ton +Ġborrow ers +ĠPark inson +Ġ ë +ĠJama ica +Ġsat ir +Ġinsurg ents +ĠF irm +Ġis ot +ĠK arn +our ning +ak ens +doc s +l ittle +ĠMon aco +CL ASS +Tur key +L y +ĠCon an +ass ic +Ġstar red +ĠPac ers +et ies +Ġt ipping +M oon +ĠR w +s ame +Ġcav ity +Ġgo of +ĠZ o +Sh ock +um mer +Ġemphas izes +Ġreg rett +Ġnovel ty +Ġen vy +ĠPass ive +r w +50 5 +Ġind ifferent +ĠR ica +ĠHim self +ĠFred die +Ġad ip +ä¸ Ģ +Ġbreak out +Ġhur ried +ĠHu ang +ĠD isk +Ġro aming +?????- ?????- +U V +ĠRick y +ĠS igma +Ġmarginal ized +Ġed its +Ġ30 4 +mem ory +Ġspec imen +29 3 +ãģ ¯ +Ġvert ically +Ġaud ition +ĠHe ck +Ġc aster +ĠHold ings +ad al +ĠC ron +ĠL iam +Ġdef lect +P ick +ĠDeb ug +RE F +Ġvers atility +ot hes +class ified +ĠMah ar +ĠH ort +C ounter +st asy +not iced +33 1 +ĠSh im +f uck +ĠB ie +Ġair ing +ĠPro tein +ĠHold ing +Ġspect ators +ili ated +ĠThat cher +n osis +ãĥ¼ ãĥ³ +Te le +B oston +ĠTem pl +st ay +Ġdecl arations +47 9 +Vol ume +ĠDesign er +ĠOver watch +id ae +Ġon wards +Ġn ets +ĠMan ila +part icularly +Ġpolit ic +o other +Ġport raits +Ġpave ment +c ffff +Ġs aints +Ġbegin ners +ES PN +Ġshort comings +âķIJ âķIJ +Ġcom et +ĠOrgan ic +qu el +Ġhospital ized +Bre ak +Ġpe el +dyl ib +asp x +ur ances +ĠT IM +P g +Ġread able +ĠMal ik +Ġm uzzle +Ġbench marks +d al +ĠV acc +ĠH icks +60 9 +ĠB iblical +he ng +Ġover load +ĠCivil ization +Ġimm oral +Ġf ries +ãĤ Ĵ +Ġreprodu ced +Ġform ulation +j ug +ire z +g ear +Ġco ached +Mp Server +ĠS J +ĠK w +In it +d eal +ĠO ro +ĠL oki +ĠSong s +Ġ23 2 +ĠLou ise +asion ally +Ġunc ond +olly wood +Ġprogress ives +ĠEn ough +ĠDo e +Ġwreck age +Ġbr ushed +ĠBase Type +Ġz oning +ish able +het ically +ĠC aucus +ĠH ue +Ġk arma +ĠSport ing +Ġtrad er +Ġseem ing +ĠCapt ure +4 30 +b ish +Ġt unes +Ġindo ors +ĠSp here +ĠD ancing +TER N +Ġno b +ĠG ST +m aps +Ġpe ppers +F it +Ġoverse es +ĠRabb i +ĠR uler +vert ising +off ice +xx x +Ġra ft +Ch anged +Ġtext books +L inks +ĠO mn +ãĢ ij +Ġinconven ience +ĠDon etsk += ~ +Ġimplicit ly +Ġboost s +ĠB ones +ĠBo om +Cour tesy +Ġsens ational +AN Y +Ġgre edy +ed en +Ġinex per +ĠL er +ĠV ale +Ġtight en +ĠE AR +ĠN um +Ġancest or +S ent +ĠH orde +urg ical +all ah +Ġsa p +amb a +ĠSp read +tw itch +Ġgrand son +Ġfract ure +Ġmoder ator +ĠSe venth +ĠRe verse +Ġestim ation +Cho ose +Ġpar ach +Ġbar ric +ãĢ IJ +Ġcomp ass +Ġall ergic +âĢ ķ +OT HER +err illa +Ġw agon +Ġz inc +Ġrub bed +ĠFull er +ĠLuxem bourg +ĠHoo ver +Ġli ar +ĠEven ing +ĠCob b +est eem +Ġselect or +ĠB rawl +is ance +ĠE k +Ġtro op +Ġg uts +ĠApp eal +ĠTibet an +Ġrout ines +ĠM ent +Ġsummar ized +steam apps +Ġtr anqu +Ġ19 29 +or an +ĠAut hent +Ġg maxwell +Ġappre hens +Ġpo ems +Ġsa usage +ĠWeb ster +ur us +Ġthem ed +Ġl ounge +Ġcharg er +Sp oiler +Ġsp illed +h og +ĠSu nder +ĠA in +ĠAng ry +Ġdis qual +ĠFrequ ency +ĠEther net +Ġhel per +Per cent +Ġhorr ifying +Ġa il +ĠAll an +EE E +ĠCross ing +44 9 +Ġh olog +ĠPuzz les +ĠGo es +eren n +60 4 +ãģ ı +ĠRaf ael +Ġatt en +ĠE manuel +Ġup ro +ĠSus p +P sych +ĠTr ainer +ĠN ES +ĠHun ts +bec ue +Ġcounsel or +R ule +Ġtox ins +Ġb anners +r ifice +Ġgreet ing +Ġfren zy +Ġall ocate +Ġ* ) +ex pr +50 3 +ĠCh ick +ĠT orn +Ġconsolid ation +ĠF letcher +sw itch +fr ac +cl ips +ĠMcK in +ĠLun ar +Mon th +IT CH +Ġscholar ly +rap ed +39 8 +Ġ19 10 +Ġe greg +Ġin secure +Ġvict orious +cffff cc +Ġsing led +Ġel ves +ĠW ond +bur st +Ġcam oufl +ĠBL ACK +Ġcondition ed +ç ī +ans wered +Ġcompuls ory +asc ist +Ġpodcast s +ĠFrank furt +bn b +Ġne oliberal +ĠKey board +ĠBel le +w arm +Ġtrust s +Ġins ured +ĠBu cc +us able +60 7 +ĠPl ains +Ġ18 90 +Ġsabot age +Ġlod ged +f elt +Ġg a +ĠN arc +ĠSal em +Ġsevent y +ĠBl ank +p ocket +Ġwhis per +Ġm ating +om ics +ĠSal man +ĠK ad +Ġan gered +Ġcoll isions +Ġextraord inarily +Ġcoerc ion +G host +b irds +è Ģ +k ok +Ġper missible +avor able +Ġpo inters +Ġdiss ip +ac i +Ġtheat rical +ĠCos mic +Ġforget ting +Ġfinal ized +å¤ § +y out +l ibrary +Ġbo oming +ĠBel ieve +ĠTe acher +ĠL iv +ĠGOOD MAN +ĠDomin ican +OR ED +ĠPart ies +Ġprecip itation +ĠSl ot +R oy +ĠComb ined +Ġinteg rating +Ġch rome +Ġintest inal +ĠRe bell +Ġmatch ups +Ġblock buster +ĠLore n +ĠLe vy +Ġpre aching +ĠS ending +ĠPur pose +ra x +f if +Ġauthor itative +ĠP ET +ast ical +Ġdish on +Ġchat ting +Ġ"$ :/ +Connect ion +Ġrecre ate +Ġdel inqu +Ġbro th +ĠD irty +ĠAd min +z man +Ġscholars hips +Ġ25 3 +cont act +als a +7 67 +c reen +abb age +Ġ19 15 +Ġbl ended +Ġal armed +L anguage +35 6 +Ġbl ends +ĠCh anged +W olf +Ġhe pat +Creat ing +Ġper secut +Ġsweet ness +art e +Ġforfe iture +ĠRober to +im pro +N FL +ĠMag net +Det ailed +Ġinsign ificant +ĠPOL IT +ĠBB Q +ĠC PS +Ġse aw +amin er +m L +end if +f inals +Ġ26 5 +u ish +Ġ} ) +ĠPro blems +Ġem blem +Ġserious ness +Ġpars ing +Ġsubst itution +Ġpress ured +Ġrecy cled +ale b +Rub y +Ġprof iciency +Dri ver +ĠW ester +: ' +AF TA +Ġm antle +ĠClay ton +fl ag +Ġpractition er +c overed +ĠSt ruct +add afi +4 25 +ĠTown ship +ĠHyd ro +Lou is +34 3 +Ġcond o +ĠT ao +Ġutil ization +Ġnause a +ĠDem s +rid ges +p ause +Ġform ulas +Ġchall enger +37 6 +Ġdefect ive +ĠRail way +ĠPub Med +Ġyog urt +l bs +ĠNor folk +OP E +ĠMood y +Ġdistribut or +Ġscroll s +Ġextract s +St an +Ġv iability +Ġexp oses +Ġstar vation +ĠStep s +ĠD odd +f ew +ST D +33 2 +Ġclos ures +Ġcomplement ary +ĠS asha +ump y +Ġmon et +Ġartic ulate +ĠDo ct +k iller +Ġsc rim +Ġ2 64 +Ġprost itutes +Ġse vered +Ġattach ments +Ġcool ed +L ev +ĠF alk +f ail +Ġpolic eman +ĠD ag +Ġpray ed +ĠK ernel +Ġcl ut +Ġc ath +Ġan omaly +St orm +em aker +ĠBreak fast +ul i +o ire +J J +h z +Oper ation +ĠS ick +35 4 +ĠGuatem ala +R ate +Ġexp osures +f aces +ĠArch ae +ra f +ĠM ia +Ġ20 25 +Ġop aque +Ġdisgu ised +ĠHead quarters +S ah +Ġp ots +9 78 +ĠM alf +Ġfrown ed +Ġpoison ous +ĠCon vers +ee ks +Ġcr ab +." " +Ġtre ason +Ġr anc +Ġescal ating +Ġwar r +Ġmob s +Ġl amps +ĠSun shine +ĠBrun swick +Ph ones +Ġspe lled +ĠSk ip +Ġ20 50 +Ġ19 11 +ĠPl uto +ĠAm end +Ġme ats +38 7 +Ġst omp +ĠZh ou +ĠLevi athan +ĠHaz ard +ad v +ĠOr well +Ġal oud +Ġb umper +ĠAn arch +ub untu +ĠSer ious +f itting +ĠOption al +ĠCec il +RE AM +Ġser otonin +Ġcultiv ate +ag ogue +} \ +Ġmos ques +ĠSun ny +Ġre active +rev olution +ĠL up +ĠFed ora +Ġdefense man +ĠV ID +ist ine +Ġdrown ing +ĠBroad casting +Ġthr iller +ĠS cy +Ġacceler ating +Ġdirect s +od ied +b ike +d uration +Ġpain fully +R edd +Ġproduct ions +Ġg ag +Ġwh ist +Ġs ock +Ġinf initely +ĠConc ern +ĠCit adel +Ġlie u +Ġcand les +ogene ous +arg er +Ġheaven ly +inflamm atory +Per formance +C s +ruct ose +az aki +Ġp essim +Ġinf erence +Ġpow d +ĠZ oe +Ġpain ts +Ġd azz +pt a +-------- --- +Ġins pir +ĠExper imental +ĠKn ife +reg or +b ors +Ġshow ers +rom eda +Ġs aint +Ġben ign +ĠJ iang +Ġenvision ed +Ġsh roud +IF T +H O +Ġsh uff +ĠI CC +Ġse greg +Ġrevis it +ighth ouse +L i +Ġsub strate +ĠSe as +ĠRew ard +ĠH ep +ĠBr ass +s bm +Ġelim inates +Ġst amina +ĠV AT +ĠLo an +Ġconst raint +Ġappropri ated +Ġp es +ĠA LE +r anging +Ġ40 4 +39 2 +Ġintellectual s +ach u +Ġrestruct uring +ĠLe vin +Ġrun es +Ġdelight ful +Ġcarbohyd rates +ĠMod els +ĠExp o +Ġtransport ing +all oc +Ġring ing +S amsung +Ġscarce ly +ĠURL s +ĠM AS +Ġprot otypes +Ġnarr ator +ĠCPU s +cd n +ĠBart on +Ġdecided ly +ĠSh u +ix ir +oc ious +ĠMy st +N intendo +Ġre use +Ġforg iven +F ew +in ical +n at +Ġseam less +ĠEv a +ĠE VE +ĠJ O +land ers +Ġso fter +neg ie +Ġtrans ient +Ġorb ital +Ġfulf il +ĠK om +Hop efully +Ġdynam ically +ĠHun ger +å Ľ +ĠArmen ia +el man +ber to +Ġp ige +ĠID s +lim it +Ġve ins +Ġso aring +p acks +Gold en +ĠCr ab +ist or +ĠR PM +Ġ$ $ +g ression +Ġjihad ist +Ġgam ble +Ġcare g +Ġinf lated +F ace +ĠFire arms +ĠEm manuel +â Ŀ +Ġsh ocks +gr ab +Ġspl end +ĠHP V +ab ortion +Ab ove +Ent ity +play ers +Ġcomm enced +ul ence +Ġfulfill ment +Ġembod iments +ĠW elfare +Ġha il +Ġ< @ +tt en +Ġcat cher +ĠJ azeera +Ġvolcan o +Ġstabil ize +ĠHand ler +Ġintens ified +ĠAb rams +Ġhum iliation +p aced +60 5 +ĠCent OS +Spe cific +Ġhe ed +ĠC AM +ĠGal ile +D ie +Ġabol ished +ĠThom son +ĠTe achers +ĠW ass +j ong +ĠIS BN +ĠAll ies +sh ake +å · +v ict +How ard +Ġde em +Ġexceed ingly +ĠSmart stocks +ib e +Ġdoor way +Ġcompet ed +ig mat +Ġnational ists +Ġg room +ĠKe en +Ġdispos able +de cl +ĠT olkien +ĠSche me +Ġb iod +Ġav id +ĠEl on +ag ar +ĠT SA +R oman +Ġartific ially +Ġadvis ors +X L +ĠInf erno +36 6 +Ġted ious +ĠPhot ography +ĠCar rie +Ġtro pe +ĠSand ra +Ġdec imal +Que en +ĠGund am +ĠO M +ote ch +N BA +Ġ19 32 +Ġent renched +ĠMar ion +Ġfr aternity +Lab our +Hen ry +Ġlat itude +E ither +Ġenh ances +ĠPot ential +Ġsh ines +id ad +Ġbread th +Ġcapac ities +ĠðŁ ĻĤ +ĠBron x +Ġsex es +Ġdifferent iation +Ġheavy weight +ĠT aj +d ra +Ġmigr ate +Ġexhaust ion +ĠR UN +els ius +ĠCu omo +Ġgu itars +Ġcl ones +ĠSom ew +ĠP ry +------------ - +Ġwarr anted +cy cles +Ġsalv age +Ġdis ks +R ANT +ĠNGO s +ĠMart ian +":[ {" +Ġadd icts +oj ure +il let +Ġamazing ly +art ments +p ixel +ĠGPU s +Lay out +è £ +ĠTam il +ĠBas il +Ġimpart ial +ĠSt ructure +f ork +b ryce +Ġr idge +ĠHamb urg +ri ous +Ġbl itz +cig arettes +Ġcan ned +40 2 +Ġiron ically +Ġcompassion ate +ĠHaw kins +. # +ĠCat hedral +Ġrall ied +in ternal +Ġqu ota +st akes +T EXT +m om +Ġcomple tes +Ġ23 8 +Ġsh rug +ãĥ ij +ĠN inth +Ġrev ise +ĠProv ider +Ġtre acher +Ġqu asi +ĠPR ES +Ġdep osition +Ġconfidential ity +iss ors +Ġim balance +Ġspan ning +Ġang ular +ĠC ul +commun ication +ĠNor a +ĠGen ius +op ter +Ġs acked +Sp ot +Ġfine ly +ĠCH R +28 2 +w aves +Pal est +ĠRo hing +N L +è ¿ +Ġsh itty +ĠSc alia +4 75 +Pro gress +Ġreferen cing +Ġclass rooms +ab ee +Ġs od +hes ion +70 8 +ĠZucker berg +ĠFin ish +ĠScot ia +ĠSav ior +ĠInstall ation +an tha +( - +Ġ30 2 +ĠP unk +Ġcr ater +yout u +Ġro ast +Ġinflu encing +Ġd up +ĠJ R +ĠG rav +Ġstat ure +Ġbath rooms +A side +W iki +me an +ĠZ ak +ĠOn es +ĠN ath +Ġhyper t +Ġcommence ment +C ivil +Ġmoder ately +Ġdistribut ors +Ġbreast feeding +Ġ9 80 +ĠS ik +ĠC ig +ĠAM ER +R IP +ĠCare er +ust ing +Ġmess ed +Ġe h +ĠJ ensen +/ $ +Ġblack mail +Ġconvers ions +Ġscientific ally +Ġmant ra +p aying +Ġiv ory +ĠCour ts +OU GH +aunt let +Ser ial +B row +ĠH undreds +3 23 +Ġpe e +Ġlin ux +Ġsub mer +ĠPrinc ipal +48 5 +ĠD SL +ĠCous ins +Ġdoctr ines +ĠAthlet ics +Ġ3 15 +ĠK arma +Ġatt ent +ur ger +Ġpresc ribe +Ġenc aps +ĠC ame +Ġsecret ive +ĠCr imes +d n +C lean +ĠEgypt ians +ĠCar penter +Ġ ll +H um +ĠMil o +Ġcapital ists +Ġbrief ed +T we +ĠBas in +elve t +M os +Ġplun ge +ĠKa iser +ĠFu j +ill in +Ġsafegu ards +Ġo ste +ĠOpportun ity +ĠM afia +ĠCall ing +ap a +ur ban +br ush +ill ard +c é +int elligence +ĠL ob +ĠDru id +Ġsm oother +Ġfoot ing +Ġmotor ists +arc ity +Ġmascul inity +Ġm ism +Ġabdom inal +ĠTa vern +ĠR oh +Ġesc apes +s igned +Anth ony +Ġsacrific ing +Ġintim acy +Ġan terior +ĠK od +Ġmot if +Ġg raz +Ġvisual ization +Ġguitar ist +ĠTro tsky +m agic +D ar +ĠMor i +Ġw ards +Ġtoile ts +l est +Ġtele port +ĠSund ays +ĠPl at +ET S +Ġe Sports +Pat rick +ĠK atherine +en ko +Ġhas sle +ĠM ick +gg les +Ġh ob +aint ain +Ġair borne +Ġsp ans +Ġch ili +Ġa perture +Ġvolunte ered +ĠInc ident +ĠF res +ĠVeter an +augh tered +ing o +Ġun insured +CL OSE +Ġf use +Ġer otic +Ġadvert ise +ra ising +Text ure +Ġatt ends +ĠRE AL +udd led +Ġsm oot +Ġ30 5 +ĠWill is +Ġbl ond +An alysis +ĠV T +on ica +Ġstrongh old +R F +N M +. >> +Ġprosper ous +Ġbo asted +29 2 +ĠManufact uring +PR ESS +g ren +Ġpharm acy +ĠRoc kefeller +k ai +Ġth umbs +ĠH ut +Ġmother board +Ġguard ians +ĠAl ter +ll ular +Ġsh ack +Ġwise ly +Ġback bone +erv a +Ġsu icides +ĠMcG regor +ij ah +E mer +ĠB rav +Ġdesign ate +P OST +produ ced +Ġcleans ing +irl wind +ex istent +ĠHum ph +ĠPay ne +Ġv ested +Å ¡ +Ġstring ent +ion a +Ġuns ub +Ġsum med +ĠHer cules +sub ject +ĠR agnar +ĠN os +Ġcharacter ization +Ġsav vy +ĠDaw son +ĠCas ino +Ġf ri +ĠBar rier +Ġmis information +Ġins ulation +Ġcorrid ors +Ġair planes +ĠNo ct +ah i +Ġ19 16 +k b +arm ac +Ġsh un +Ġsche ma +Ġhorr ified +Ġ23 9 +aund ers +N B +i ates +er ity +ĠSh ard +Ġr arity +Ġgroup ed +ĠGh ana +again st +ĠBi ological +ĠA ware +ow ell +Ï Ħ +ĠBe au +sh aw +H ack +ĠJul ius +US S +ol son +aun a +c ru +ĠMaur ice +ĠI k +Ġsequ encing +Ġradical s +Ġ( ?, +v irtual +Ġany ways +Ġreper c +Ġhand lers +Ġhes itant +é ĥ +ĠM F +ple mentation +ass ociated +Ġcampaign ed +ĠY ue +ut ations +ĠY oga +Ġsim mer +Ġro ds +Ġmel ody +Ġconv oy +v ideos +Ġscreen ed +N eg +ochem ical +Ġ( )) +Ġultr as +Ġant ip +ĠIsland ers +70 4 +Ġfet ish +Ġridic ulously +ĠK art +Ġmitochond rial +Ġinterf ering +Build er +Ġover fl +Ġac ne +ĠM ud +ĠK err +f lex +ĠPost al +ĠBalt ic +47 7 +ĠPers ons +our age +H B +ĠM use +ĠImm ortal +ĠDri ving +Ġpet itions +Ġsubsc ript +Ġs orce +ĠProcess or +ut on +S ony +Ġph on +Ġr aced +ĠAnth rop +Ġday time +ĠEx ercise +Add ing +Ġeng ages +ĠQual comm +Ġmir acles +Ġmem es +ĠDr ink +ĠOri oles +Ġhair s +ĠPol ar +ath om +Ġsl ippery +ĠR emy +Ġcar amel +ĠY EAR +Ġal k +I gn +a ution +ĠMer lin +ĠC ran +Ġap ologies +Ġ4 10 +Ġout ing +ĠMem ories +app ointed +Ġcount ered +u ld +pos ing +Ġfire wall +ĠW ast +ĠW et +work ed +se ller +Ġrepe aled +ere o +ass uming +BL IC +m ite +ĠCEO s +ĠChap el +ellig ent +________________ ________ +D og +Ġw art +Ġsubsc riber +s ports +Ġbe gged +ĠM V +Ġsem if +eth ical +Ġpre ach +Ġrev ital +Ġpun itive +Ġshort cuts +Ġinstit uted +ĠWars aw +Ġabdom en +ĠK ING +Ġsuper intendent +Ġf ry +ĠGe o +T OR +Ġcontrad ictions +apt ic +Ġlandsc apes +b ugs +Ġcl ust +Ġvol ley +c ribed +Ġt andem +Ġrob es +WH AT +Ġpromot er +Ġel oqu +review ed +ĠD K +ĠPl ato +Ġf ps +T ank +ĠDer rick +Ġpriorit ize +as per +ĠHond uras +ĠCom pleted +ne c +Ġm og +n ir +ĠMay o +DE F +st all +in ness +ĠVolks wagen +Ġprec aution +ĠM ell +i ak +ist ries +Ġ24 8 +Ġoverl apping +Sen ate +ĠEnh ance +res y +rac ial +OR TS +ĠM ormons +Str ong +ĠCo ch +Mex ico +ĠMad uro +Ġj ars +Ġcan e +W ik +oll a +iff erence +Ġphysic ist +ĠMag gie +Ġ28 5 +Ġdep iction +ĠMcL aren +J u +Ġsl ows +Ġcommission ers +ĠWill ow +ĠExpl os +hov ah +Ġtechn ician +Ġhom icides +ĠFl av +ĠTr uman +Ġ100 00 +u ctor +Ġsh ader +News letter +45 7 +Ġre ver +Ġhard ened +Ġwhere abouts +Ġrede velop +Ġcar bs +Ġtra vers +Ġsqu irrel +Ġfoll ower +Ġs ings +50 8 +Ġrabb its +emon ium +Ġdocument ing +Ġmisunder stood +) ' +R ick +gg ies +Ġprem ie +Ġsk ating +Ġpass ports +Ġf ists +aged don +H aw +AC P +0 80 +ĠThough ts +ĠCarl son +Ġpriest hood +h ua +Ġdun geons +ĠLo ans +Ġant is +Ġfamiliar ity +ĠS abb +op al +ĠIn k +st rike +Ġc ram +Ġlegal ized +Ġcu isine +Ġfib re +Tra vel +ĠMon ument +OD Y +eth y +Ġinter state +ĠP UR +em porary +ĠArab ian +develop ed +Ġsadd le +Ġg ithub +ĠOff er +ĠIS P +ro let +ĠSUP ER +ĠDen is +Ġmultipl ier +Ġstir red +Interest ingly +Ġcustom ary +Ġbill ed +he x +Ġmultipl ied +Ġfl ipping +ĠCros by +Ġfundament als +ia e +ĠPlay ed +ĠAt om +am azon +ĠFl am +ee z +activ ated +Ġtables poon +Ġliberal ism +ĠPal in +ĠP atel +N um +ĠT AM +Ġs urn +ĠRel oaded +Ġco ined +" ], +ĠCl ash +ĠAg u +Ġprag matic +ĠActiv ate +Ġ8 02 +Ġtrail ers +Ġsil hou +Ġprob es +Ġcirc us +ĠB ain +ĠLind say +ĠAb bey +Del ivery +Ġconcess ion +Ġgast ro +ĠSpr ite +Ä Ł +and el +Ġg imm +Ġaut obi +ĠT urtle +Ġwonder fully +ĠHar am +ĠWorld wide +ĠHand le +Ġtheor ists +Ġsle ek +ĠZh u +ograph ically +EG A +ĠOwn ers +ath s +ĠAntar ctic +n atal +=" " +fl ags +`` `` +Ġs ul +K h +Ġpot assium +Ġlinem an +Ġcere al +ĠSe asons +Ġ20 22 +Ġmat hematic +Ġastron omers +prof essional +Ġf ares +cknow led +Ġch i +Ġyoung sters +Ġmistaken ly +Ġhem isphere +ĠDiv inity +r one +Ġ" , +r ings +Ġattract s +v ana +å ¹ +C AP +Ġplay list +Ġpor ch +ãģ £ +Ġincorpor ates +Ġso ak +Ġassert ing +ĠTerror ism +ĠP ablo +J a +ces ter +Ġfear ing +ĠPr ayer +Ġescal ated +G W +Ġro be +ĠBright on +ac ists +ĠSym phony +ĠDwar f +ĠPar ade +ĠLe go +Ġinex pl +Ġl ords +le af +RA G +l iber +Ġcig ars +ĠJe hovah +60 6 +WIND OWS +ĠLiber ia +eb us +He avy +Ġl ubric +ĠR W +angu ages +Ġnarrow ed +com puter +ĠE mber +Ġmurder ing +Ġdown stream +ĠT uls +ĠT ables +Top ic +ĠAcc uracy += / +l ost +ĠRe i +Ġprogress es +b ear +Ġestablish ments +Just in +ĠPe ach +ĠG omez +å ¿ +ĠTri angle +Id ent +ĠH ive +Res ources +Ġmix es +ĠAss uming +M u +Ġhyp oc +Ġs ane +ĠW an +id ious +Su ccess +Ġ io +Ang el +Ġdanger ously +ĠCreat ure +W ORK +: [ +ĠKat rina +List ener +M iller +ĠId lib +h ang +Ġcircum vent +h ref +Ġcel estial +ĠWe eks +ĠP ug +ĠDal ton +Ġsubpoen a +uk u +Ġpers isted +pe i +old ing +ĠDoc uments +ĠH ast +ĠC ENT +Ġprim er +Ġsyn onymous +Ġn ib +om bs +Ġnot ation +ĠD ish +ĠAt mosp +Ġforb id +ĠAN G +pat tern +l os +Ġproject iles +b rown +." , +ĠVen om +Ġfierce ly +ub lished +ĠU ran +ĠNic arag +4 10 +ĠC AL +OT OS +ĠMir acle +ĠEn chant +Ġguard ing +app end +Att ach +Ġlevel ed +Ġcond oms +ih ilation +64 9 +Ġnight mares +ĠTHE Y +ĠST ART +ĠK inn +Ġroomm ate +Ġhy giene +o pping +J ob +Ġl vl +ĠV ER +ĠKe eping +ab etic +Ġformat ting +eral a +Ġrev isions +Ġres urg +T el +ĠGood man +35 3 +p od +Ġind isp +ĠTrans lation +Ġg own +ĠM und +Ġc is +Ġby stand +col lect +ĠPun jab +act ively +ĠG amb +te ll +Ġimport ing +g encies +Ġloc om +ĠBr ill +H oly +ĠBer ger +Ġshow down +Ġrespond ers +IL Y +Ġt akedown +le ted +Ġmat tered +Ġpredict ive +Ġover lay +G PU +ĠV ick +Ġconvey ed +T ab +pe er +Sc an +Ġdefensive ly +v ae +Ġappro ving +Ġt iers +ĠV ia +quer ade +ĠSaud is +Ġdemol ished +ĠProp he +Ġmon o +Ġhospital ity +H AM +ĠAri el +M OD +ĠTor ah +Ġbl ah +ĠBel arus +erent ial +ĠT uc +Ġbank er +39 7 +Ġmosqu it +ĠScient ist +ĠMus ical +Ġh ust +Sh ift +Ġtor ment +Ġstand off +E duc +ĠF og +Ġampl ifier +Sh ape +Inst ance +ĠCrit ics +Ġda emon +H ouston +Ġmatt ress +ĠID F +Ġobsc ene +ĠA mer +hett i +Ġcomp iling +35 2 +vere tt +ĠRed uction +ist ration +ĠBl essed +ĠB achelor +3 16 +Ġpr ank +ĠVul can +dd ing +Ġm ourning +ĠQu int +ĠBl aster +test ing +Ġsed iment +>> > +ĠE ternity +ĠWH ERE +ĠM aze +Ġreact ing +ĠAl v +oms day +ĠC RA +Ġtransl ator +Ġbog us +at u +We bsite +oll s +Ġbapt ism +Ġs ibling +ĠAut umn +ve z +ãģ® é +gu ards +Ge org +assad ors +ĠFre ud +Ġcontin ents +ĠReg istry +Bern ie +ĸļ 士 +Ġtoler ant +ĠU W +Ġhor ribly +99 5 +ĠMID I +Ġimpat ient +oc ado +er i +ĠWor st +ĠNor ris +ĠTalk ing +Ġdef ends +ens able +Ġ20 21 +Ġanat omy +L ew +Ġdraw er +ĠCan berra +Ġpatri otic +é¾įå ĸļ士 +ĠAv g +AR M +Ġundis closed +Ġfare well +45 9 +b able +ĠAll ison +OL OG +Ġcon co +t ight +ĠAC PI +ĠM ines +l ich +ĠâĶ ľ +represent ed +200 000 +Ġenthusi ast +OT S +b il +ĠIng redients +Ġinvent or +ĠMy SQL +³³ Âł +ĠAB OUT +with in +Ġm k +B ul +ĠF ake +Ġdracon ian +W a +hel m +ĠTer ran +erv ille +Ġcommon place +SI ZE +Ġ" < +re place +ograph s +ĠSE LECT +inc ible +ĠMost ly +ĠShe ffield +ĠID E +ugg le +Ġcit ations +h urst +ĠUn ix +Ġunle ash +ĠP iper +ĠN ano +Ġsucc umb +Ġreluct ance +Ġ25 00 +ĠMer chant +Ġwire t +Ġcomb os +ĠBirth day +Ġchar coal +ĠU PS +ĠFair fax +Ġdrive way +ĠT ek +ĠP itch +ove re +Ġtechn icians +ĠAct ual +fl ation +ĠF iscal +ĠEm pty +an amo +Ġmag nesium +Ġsl ut +Ġgrow ers +Invest igators +( ): +ĠS atellite +ĠKe ynes +miss ive +l ane +Ġb orough +3 44 +ĠTE AM +ĠBet hesda +C V +h ower +ĠR AD +Ġch ant +ĠR iy +Ġcompos itions +Ġmild ly +Ġmedd ling +Ġag ility +ane ers +5 01 +Ġsyn th +ling er +29 1 +Ġex claimed +Part y +Ġcont amin +ĠMan or +ĠResp ond +Ġpra ising +Ġman ners +fle et +Sum mer +ĠLy nd +ĠDef initely +gr im +Ġbow ling +st ri +ç Ľ +y nt +Ġmand ates +D IV +Ġreconc ile +view s +ĠDam on +vet te +F lo +ĠGreat est +il on +ic ia +Ġportray al +Ġcush ion +50 4 +19 79 +oss al +App lic +sc ription +Ġmit igation +AT S +p ac +Ġer ased +Ġdefic iencies +ĠHolland e +ĠX u +Ġb red +Ġpregn ancies +f emin +Ġem ph +Ġpl anners +Ġout per +utter ing +Ġperpet rator +Ġm otto +ĠEll ison +ĠNE VER +Ġadmitted ly +AR I +ĠAzerbai jan +Ġmill isec +Ġcombust ion +ĠBott le +ĠL und +ĠP s +ĠD ress +Ġfabric ated +Ġbat tered +Ġs idel +ĠNot ting +Fore ign +ĠJer ome +0 20 +ĠAr bit +Ġkn ots +ĠR IGHT +M oving +ãģ Ļ +Ġsur geries +Ġcour thouse +Ġm astered +Ġhover ing +ĠBr an +ĠAl ison +Ġsaf est +m ilitary +Ġbull ied +Ġbar rage +Read er +ES E +ĠGe ographic +T ools +3 14 +ĠGe ek +ro th +gl ers +ĠF IN +Ï ģ +ĠA ston +al tern +48 8 +Ġveter in +G amer +Ġint el +ren ches +Sh ield +Ġam nesty +ĠB har +Ġp iled +Ġhonor able +ĠInst itutes +Ġso aked +Ġcom a +ĠE FF +34 1 +by tes +ĠG mail +le in +ĠCanad iens +m aterial +I l +Ġinstruct ors +ĠK Y +Ġconce ive +ub b +ĠP ossible +Ġeas ing +ĠChrist ina +Ġcar ic +ĠHD R +R OM +Ġsho vel +de lete +Ġp uff +ĠCh anging +Ġseam lessly +Att ribute +Ġacqu isitions +ak ery +ĠE F +Ġaut istic +ĠT akes +ĠPow der +ĠSt ir +5 10 +ĠBub ble +sett ings +ĠF owler +Ġmust ard +Ġmore over +Ġcopyright ed +ĠLED s +15 00 +æ ī +ĠH IS +en f +Ġcust od +ĠH uck +G i +Ġim g +An swer +C t +j ay +ĠInf rastructure +Ġfeder ally +L oc +Ġmicro bes +Ġover run +dd s +ot ent +adi ator +>>>> >>>> +Ġtorn ado +Ġadj ud +Ġintrig ued +Ġs i +ĠRevel ation +pro gress +Ġburgl ary +ĠSai yan +ĠK athy +Ġser pent +ĠAndre as +Ġcomp el +ess ler +ĠPl astic +ĠAd vent +ĠPos itive +ĠQ t +ĠHind us +reg istered +ular ity +Ġrighteous ness +Ġdemon ic +u itive +ĠB DS +ĠGre gg +c ia +ĠCrus ade +ĠSina i +W ARE ++ ( +Ġme ll +Ġder ail +y ards +A st +Ġnotice ably +ĠO ber +R am +Ġun noticed +Ġse q +av age +T s +Ġ6 40 +Ġconced e +Ġ] ) +F ill +Ġcapt ivity +ĠImprove ment +ĠCrus ader +ara oh +M AP +æ Ĺ +Ġstr ide +al ways +F ly +N it +Ġal gae +ĠCook ing +ĠDo ors +Mal ley +Ġpolic emen +ãģ į +Ġastron aut +access ible +49 5 +ĠR AW +cl iffe +udic rous +Ġdep ended +al ach +Ġvent ures +ra ke +Ġt its +ĠH ou +Ġcond om +ormon al +Ġind ent +Ġupload ing +Foot note +Import ant +Ġ27 1 +Ġmind ful +Ġcont ends +C ra +Ġcal ibr +ĠO ECD +plug in +F at +ĠIS S +ĠDynam ics +ans en +68 6 +' ), +Ġsp rite +Ġhand held +ĠH ipp +=~ =~ +Tr ust +Ġsem antics +ĠBund es +ĠRen o +ĠLiter ature +s ense +G ary +ĠA eg +ĠTr in +EE K +Ġcler ic +ĠSS H +Ġch rist +Ġinv ading +ib u +Ġen um +aur a +Ġal lege +ĠInc redible +B BC +Ġth ru +Ġsa iled +Ġem ulate +Ġin security +Ġc rou +Ġaccommod ations +Ġincompet ent +Ġsl ips +ĠEarth qu +s ama +IL LE +Ġi Phones +as aki +Ġby e +Ġar d +Ġext ras +Ġsl aughtered +Ġcrowd funding +res so +Ġfil ib +ĠER ROR +ĠT LS +e gg +ĠIt al +Ġen list +ĠCatal onia +ĠSc ots +Ġser geant +Ġdiss olve +N H +Ġstand ings +ri que +I Q +Ġbenef iciary +Ġaqu arium +You Tube +ĠPower Shell +Ġbright est +ĠWar rant +S old +Writ ing +Ġbegin nings +ĠRes erved +ĠLatin os +head ing +Ġ4 40 +Ġrooft op +AT ING +Ġ3 90 +VP N +G s +k ernel +turn ed +Ġprefer able +Ġturn overs +ĠH els +S a +ĠShin ji +ve h +ĠMOD ULE +V iol +Ġex iting +Ġj ab +ĠVan illa +Ġac ron +ĠG ap +ber n +A k +ĠMc Gu +Ġend lessly +ĠFar age +ĠNo el +V a +M K +Ġbr ute +ĠK ru +ĠES V +ĠOl ivia +âĢ ł +ĠK af +Ġtrust ing +Ġh ots +3 24 +Ġmal aria +Ġj son +Ġp ounding +ort ment +Count ry +Ġpostp oned +Ġunequ iv +? ), +ĠRo oney +udd ing +ĠLe ap +ur rence +sh apeshifter +ĠH AS +os ate +Ġca vern +Ġconserv atism +ĠB AD +Ġmile age +Ġarrest ing +V aults +Ġmix er +Dem ocratic +ĠB enson +Ġauth ored +8 000 +Ġpro active +ĠSpirit ual +t re +Ġincarcer ated +ĠS ort +Ġpe aked +Ġwield ing +re ciation +×Ļ × +P atch +ĠEm my +Ġex qu +tt o +ĠRat io +ĠP icks +ĠG ry +ph ant +Ġf ret +Ġeth n +Ġarch ived +% - +c ases +ĠBl aze +Ġim b +c v +y ss +im ony +Ġcount down +Ġaw akening +ĠTunis ia +ĠRe fer +ĠM J +Ġun natural +ĠCar negie +iz en +ĠN uggets +he ss +Ġev ils +64 7 +Ġintrodu ctory +l oving +ĠMcM ahon +Ġambig uity +L abel +ĠAlm ighty +Ġcolor ing +ĠCl aus +set ting +N ULL +ĠF avorite +ĠS IG +> ( +ĠSh iva +ĠMay er +Ġstorm ed +ĠCo verage +we apons +igh am +Ġun answered +Ġle ve +Ġc oy +c as +b ags +as ured +Se attle +ĠSant orum +ser ious +Ġcourage ous +ĠS oup +Ġconfisc ated +Ġ// / +Ġuncon ventional +Ġmom s +ĠRohing ya +ĠOrche stra +ĠPot ion +Ġdisc redit +ĠF IL +f ixed +ĠDe er +do i +ĠDim ension +Ġbureaucr ats +et een +Ġaction Group +oh m +Ġb umps +ĠUt ility +Ġsubmar ines +ren heit +re search +ĠShap iro +Ġsket ches +Ġde ceptive +ĠV il +es ame +ĠEss entially +Ġramp age +isk y +Ġmut tered +th ritis +Ġ23 6 +f et +b ars +Ġpup il +ĠTh ou +o S +s ong +Ġfract ured +Ġre vert +pict ure +Ġcrit erion +us her +Ġreperc ussions +ĠV intage +ĠSuper intendent +Offic ers +Ġflag ged +Ġbl ames +Ġin verse +ograp hers +Ġmakes hift +Ġdev oid +Ġfoss ils +ĠArist otle +ĠFund s +Ġde pleted +ĠFl u +ĠY uan +Ġw oes +Ġlip id +Ġsit u +requ isites +Ġfurn ish +ĠSam ar +Ġshame ful +Ġadverse ly +Ġad ept +Ġrem orse +Ġmurder ous +uck les +ĠE SL +Ġ3 14 +s ent +Ġred ef +ĠC ache +ĠP urs +ig ans +Ġ4 60 +Ġpres criptions +Ġf res +F uck +ocr ates +Tw enty +ĠWe ird +ĠT oggle +ĠC alled +itiz ens +Ġp oultry +Ġharvest ing +ãĤ¦ ãĤ¹ +Bott om +Ġcaution ed +t n +39 6 +ĠNik ki +Ġeval uations +Ġharass ing +Ġbind ings +ĠMon etary +Ġhit ters +Ġadvers ary +un ts +Ġset back +Ġenc rypt +ĠC ait +Ġl ows +eng es +ĠN orn +Ġbul bs +Ġbott led +ĠVoy ager +3 17 +Ġsp heres +p olitics +Ġsubt ract +Ġsens ations +Ġapp alling +Ġ3 16 +Ġenvironment ally +ĠST EM +Ġpub lishes +5 60 +Ġdilig ence +48 4 +Ġadv ises +Ġpet rol +Ġimag ining +Ġpatrol s +ĠInt eger +ĠAs hes +act us +ĠRad iant +ĠL T +it ability +ht aking +Set ting +Ġnu anced +ĠRe ef +ĠDevelop ers +N i +pie ces +99 0 +Lic ense +Ġlow ers +ĠOtt oman +3 27 +oo o +Ġqu itting +mark ets +Beh ind +Ġbas in +Ġdoc s +an ie +fl ash +ct l +Ġcivil ized +ĠFuk ushima +"] ," +ĠK S +ĠHonest ly +ar at +Ġconstruct s +ĠL ans +ĠD ire +ĠLI KE +ĠTrou ble +Ġwith holding +ĠOb livion +Ġsan ity +any a +Con st +Ġgro cer +ĠC elsius +Ġrecount ed +ĠW ife +B order +ate red +h appy +Ġspo iler +Ġlog ically +H all +Ġsucceed ing +Ġpoly morph +Ġax es +ĠShot gun +ĠS lim +ĠPrin ciples +ĠL eth +art a +Ġsc or +Sc reenshot +Ġrelax ation +#$ #$ +Ġdeter rent +idd y +Ġpower less +Ġles bians +Ġch ords +ĠEd ited +se lected +Ġseparat ists +000 2 +Ġair space +Ġturn around +Ġc unning +P ATH +P oly +Ġbomb ed +Ġt ion +x s +Ġwith hold +Ġw aged +ĠLiber ties +Fl ag +Ġcomfort ing +45 4 +ĠI ris +are rs +Ġr ag +Ġrel ocated +ĠGu arant +Ġstrateg ically +Ġgam ma +uber ty +ĠLock heed +g res +Ġgr illed +ĠLow e +st ats +ĠR ocks +Ġsens ing +Ġrent ing +ĠGe ological +ا Ø +ot rop +Ġse w +Ġimproper ly +48 6 +Ġâĸ ł +Ġstar ving +ĠB j +Disc ussion +3 28 +ĠCom bo +ĠFix es +N AT +Ġstri ving +th ora +Ġharvest ed +ĠP ing +Ġplay ful +Ġaven ues +Ġoccup ational +Ġw akes +ĠCou rier +Ġdrum mer +ĠBrow ser +ĠH outh +it u +Ġapp arel +p aste +Ġhun ted +ĠSecond ly +l ain +X Y +ĠP IN +ic ons +Ġcock tails +Ġs izable +Ġhurd les +est inal +ĠRecre ation +Ġe co +64 8 +ĠD ied +m int +Ġfinger prints +Ġdis pose +ĠBos nia +ts y +22 00 +Ġins pected +ĠF ou +Ġf uss +Ġamb ush +ĠR ak +Ġmanif ested +Pro secut +Ġsuff ice +ren ces +Ġcompens ated +ĠC yrus +Ġgen us +ĠWolver ine +ĠTrend s +Ġh ikes +ĠSe en +Ġen rol +C old +Ġpol itely +ĠSl av +ĠRu pert +Ġey ewitness +ĠAl to +Ġun comp +Ġposter ior +M ust +ĠHer z +Ġprogress ively +Ġ23 4 +Ġind ifference +ĠCunning ham +Ġacadem ia +Ġse wer +Ġast ounding +ĠA ES +r ather +Ġeld est +Ġclim bs +ĠAdd s +Ġout cry +Ġcont ag +ĠH ouses +Ġpe pt +ĠMel ania +interest ed +ĠU CH +ĠR oots +ĠHub bard +ĠT BD +ĠRoman ian +fil ename +St one +ĠIm pl +Ġchromos ome +C le +d x +Ġscram bled +ĠP t +Ġ24 2 +OP LE +Ġtremend ously +St reet +Ġcra ving +Ġbund led +ĠR G +p ipe +Ġinj uring +Ġarc ane +Part icip +ĠHero ic +st y +Ġto pping +ĠTemp est +rent ices +b h +Ġpar anoia +ĠUnic ode +Ġegreg ious +Ġ\ ' +ĠOsw ald +Ġgra vel +ĠSim psons +Ġbl and +ĠGuant anamo +Writ er +lin ers +ĠD ice +J C +Ġpar ity +Ġs ided +Ġ23 7 +ĠPyr rha +at ters +d k +F ine +comp an +Ġform ulated +ĠId ol +il ers +hem oth +ĠF av +Ġintr usion +Ġcar rots +ĠL ayer +ĠH acker +Ġ ---------------- +Ġmoder ation +é ģ +oc oc +Ġcharacter ize +ĠTe resa +Ġsocio economic +Ġper k +ĠParticip ation +tr aining +ĠPaul o +ph ys +Ġtrust worthy +Ġembod ied +ĠMer ch +c urrency +ĠPrior ity +Ġte asing +Ġabsor bing +Ġunf inished +ĠCompar ison +Ġdis ple +writ ers +Ġprofess ions +ĠPengu in +Ġang rily +ĠL INK +68 8 +ĠCor respond +Ġprev ailed +Ġcart el +l p +as ms +ĠRed emption +ĠIslam ists +effect s +d ose +ĠL atter +ĠHal ifax +Ġv as +ĠTop ics +ĠN amed +advert ising +zz a +IC ES +Ġret arded +ach able +ĠPupp et +ĠItem Level +Ġret ract +Ġident ifiable +A aron +ĠB uster +s ol +hel le +as semb +H ope +r anged +B a +ĠP urch +é Ģ +ĠSir i +Ġarri vals +Ġ19 12 +Ġshort ened +Ġ3 12 +Ġdiscrep ancy +ĠTem perature +ĠWal ton +Ġkind erg +p olit +Ġrem ix +Ġconnect ors +ãĥĺ ãĥ© +ĠKazakh stan +dom inated +Ġsu gars +im ble +ĠPan ic +ĠDem and +ĠCol ony +on en +ĠM ER +7 75 +ur ia +aza ar +ĠDeg ree +P ri +Ġsun shine +Ġ25 1 +Ġpsychedel ic +Ġdigit ally +ĠBra un +Ġsh immer +Ġsh ave +ĠTel esc +ĠAst ral +ĠVenezuel an +ĠO G +Ġc rawling +Int eg +ĠFe ather +Ġunfold ing +Ġappropri ation +Ġè£ı è +ĠMob ility +ĠN ey +- . +b ilt +L IN +ĠT ube +ĠCon versely +Ġkey boards +ĠC ao +Ġover th +Ġla ure +>> \ +ĠV iper +ach a +Off set +ĠR aleigh +ĠJ ae +J ordan +j p +Ġtotal itarian +Connect or +Ġobserv es +ĠSpart an +ĠIm mediately +ĠSc al +C ool +Ġt aps +Ġro ar +P ast +Ġch ars +ĠB ender +ĠShe ldon +Ġpain ter +Ġbe acon +ĠCreat ures +Ġdownt urn +Ġh inder +ĠAnd romeda +à Ľ +cc oli +ĠF itness +et rical +Ġutil izes +Ġsen ate +Ġen semble +Ġche ers +T W +Ġaff luent +k il +ry lic +ord ering +Com puter +Ġgru esome +ost ics +ĠUb isoft +ĠKel ley +Ġw rench +Ġbourgeois ie +IB LE +ĠPrest on +w orn +ar ist +reat ing +Ġst ained +ar ine +Ġsl ime +EN N +Ġche sts +Ġground water +ann ot +ĠTr ay +ĠLoc ke +ĠC TR +Ġd udes +ĠEx ternal +ĠDec oder +Ġpar amed +ĠMed line +80 9 +ĠD inner +rup al +g z +ĠG um +ĠDem o +j ee +Ġd h +ber man +arch s +Ġen qu +ĠEp stein +Ġdevast ation +Ġfriends hips +ĠAr d +Ġ23 1 +ĠRub in +ĠDist ance +Ġsp urred +Ġd ossier +Ġover looking +\\\\\\\\ \\\\\\\\ +Fore st +ĠCom es +\ ", +ĠIran ians +Ġf ixtures +L aughs +Ġcur ry +ĠKing ston +Ġsqu ash +Ġcat alogue +Ġabnormal ities +Ġdigest ive +.... ..... +Ġsubord inate +og ly +Ġ24 9 +M iddle +Ġmass ac +Ġburg ers +Ġdown stairs +Ġ19 31 +39 4 +ĠV G +Ġl asers +ĠS ikh +ĠAlex a +der ived +Ġcycl ist +ãģ® éŃĶ +onel iness +!!!! !!!! +Ġbuff s +leg ate +Ġrap ing +Ġrecomm ending +ro red +Ġmult icultural +un ique +Ġbusiness men +Ġune asy +ĠM AP +Ġdisp ersed +cipl ine +J ess +ĠK erala +å § +Ġabst raction +Sur v +U h +Ġprin ters +ij a +ow der +Ġanalog ous +ĠA SP +af er +Ġunfold ed +Ġlevel ing +Ġbre ached +ĠH earing +Ġn at +Ġtransl ating +crit ical +Ġant agonist +ĠYes terday +Ġfuzz y +w ash +m ere +Ġbe wild +ĠM ae +V irgin +ph rase +Ġsign aled +ĠH IGH +Ġprot ester +Ġgar ner +unk nown +Ġk ay +Ġabduct ed +Ġst alking +am n +Ġdes erving +ĠR iv +ĠJ orge +Ġscratch ing +ĠS aving +ip ing +Ġte ase +Ġmission ary +ĠMor row +T IME +P resent +Ġchem otherapy +tern ess +ĠH omes +ĠP urdue +Ġst aunch +ĠWhit ney +ĠTH ERE +Î ¼ +iat us +ĠErn est +ĠDe ploy +Ġcove ted +F ML +ĠDial ogue +Ġex ited +f ruit +Ġner d +":" "," +Ġv ivo +ru ly +4 60 +ĠAm en +rehens ible +Ġâ ĺ +D IR +Ġad herence +Ġche w +ĠCo ke +ĠSerge i +dig ital +ĠNe ck +g ently +enth al +/ ) +Ġwe ary +Ġgu ise +ĠConc ord +ĠOn ion +at cher +Ġb inge +ĠDirect ive +Ġman ned +ans k +Ġill usions +Ġbillion aires +38 3 +oly n +odynam ic +ĠWhe at +ĠA lic +Ġcol oured +ĠN AFTA +ab o +Ġmac ros +ind ependent +s weet +Ġsp ac +ĠK abul +Ġ Ä +em e +Ġdict ated +Ġsh outs += { +Ġr ipping +ĠSh ay +ĠCr icket +direct ed +Ġanalys ed +ĠWAR RANT +ag ons +ĠBlaz ers +Ġche ered +Ġar ithmetic +ĠTan z +37 3 +ĠFl ags +Ġ29 5 +Ġw itches +ĠIn cluded +ĠG ained +ĠBl ades +G am +ĠSam antha +ĠAtl antis +ĠPr att +Ġspo iled +ĠI B +ĠRam irez +Pro bably +re ro +ĠN g +ĠWar lock +t p +Ġover he +Ġadministr ations +Ġt int +Ġreg iment +Ġpist ols +Ġblank ets +Ġep ist +Ġbowl s +Ġhydra ulic +Ġde an +Ġj ung +Ġasc end +70 5 +ĠSant iago +à ® +Ġun avoid +ĠSh aman +re b +Ġstem ming +99 8 +ĠM G +st icks +esthes ia +ER O +Ġmor bid +ĠGr ill +ĠP oe +any l +Ġdele ting +ĠSurve illance +Ġdirect ives +Ġiter ations +ĠR ox +ĠMil ky +F ather +Ġpat ented +44 7 +Ġprec ursor +Ġm aiden +ĠP hen +ĠVe gan +ĠPat ent +K elly +Redd itor +Ġn ods +Ġvent ilation +ĠSchwar z +Ġw izards +Ġomin ous +ĠHe ads +ĠB G +Ġl umber +ĠSp iel +Ġis Enabled +Ġancest ral +ĠSh ips +Ġwrest ler +ph i +Ġy uan +ĠRebell ion +Ġice berg +Ġmag ically +Ġdivers ion +ar ro +yth m +ĠR iders +ĠRob bie +ĠK ara +ĠMain tenance +ĠHer b +Ġhar ms +p acked +ĠFe instein +Ġmarry ing +Ġbl ending +ĠR ates +Ġ18 80 +Ġwr ink +ĠUn ch +ĠTor ch +desc ribed +Ġhuman oid +ilit ating +ĠCon v +ĠFe ld +IGH TS +Ġwhistlebl ower +ort mund +ets y +arre tt +ĠMon o +ĠI ke +ĠC NBC +ĠW AY +ĠMD MA +ĠIndividual s +Ġsupplement al +Ġpower house +ĠSt ru +F ocus +aph ael +ĠCol leg +att i +Z A +Ġp erenn +ĠSign ature +ĠRod ney +Ġcub es +idd led +ĠD ante +ĠIN V +iling ual +ĠC th +Ġso fa +Ġintimid ate +ĠR oe +ĠDi plom +ĠCount ries +ays on +Ġextrad ition +Ġdis abling +ĠCard iff +Ġmemor andum +ĠTr ace +Ġ?? ? +se ctor +ĠRou hani +ĠY ates +ĠFree ze +Ġbl adder +M otor +ĠProm ise +ant asy +Ġforesee able +ĠC ologne +cont ainer +ĠTre es +ĠG ors +ĠSin clair +Ġbar ring +key e +Ġsl ashed +ĠStat istical +é ĩ +Ġâĸ º +All ows +Ġhum ility +Ġdr illed +ĠF urn +44 3 +Ġse wage +Ġhome page +Ġcour tyard +Ġv ile +Ġsubsid iaries +aj o +direct ory +Ġam mon +V ers +charg es +Ġ} } +ĠCh ains +Ġ24 6 +n ob +Ġper cept +Ġg rit +Ġfisher men +ĠIraq is +ĠDIS TR +ĠF ULL +ĠEval uation +g raph +at ial +Ġcooper ating +Ġmel an +Ġenlight ened +Ġal i +t ailed +Ġsal ute +Ġweak est +ĠBull dogs +U A +ĠAll oy +Ġsem en +oc ene +ĠWilliam son +s pr +, âĢĶ +ĠG F +itt ens +Be at +ĠJ unk +iph ate +ĠFarm ers +ĠBit coins +ig ers +d h +ĠL oyal +p ayer +Ġentert ained +Ġpenn ed +Ġcoup on +Que ue +Ġweaken ing +c arry +Ġunderest imate +Ġshoot out +Ġcharism atic +ĠProced ure +Ġprud ent +in ances +Ġric hes +Ġcort ical +Ġstr ides +Ġd rib +ĠOil ers +5 40 +ĠPer form +ĠBang kok +Ġe uth +S ER +Ġsimpl istic +t ops +camp aign +Q uality +Ġimpover ished +ĠEisen hower +Ġaug ment +ĠH arden +Ġinterven ed +Ġlist ens +ĠK ok +Ġs age +Ġrub bish +ĠD ed +Ġm ull +pe lling +Ġvide ot +Produ ction +D J +m iah +Ġadapt ations +Ġmed ically +Ġboard ed +Ġarrog ance +Ġscra pped +Ġopp ress +FORM ATION +Ġj unction +4 15 +EE EE +S kill +Ġsub du +ĠSug gest +ĠP ett +Ġle tt +ĠMan ip +ĠC af +ĠCooper ation +T her +Ġreg ained +¶ æ +ref lect +Ġth ugs +ĠShel by +Ġdict ates +ĠWe iner +ĠH ale +Ġbatt leground +s child +Ġcond ol +h unt +osit ories +Ġacc uses +Fil ename +Ġsh ri +Ġmotiv ate +Ġreflect ions +N ull +ĠL obby +¥ µ +ĠS ATA +ĠBack up +Ñ ĥ +n in +ĠCor rection +Ġju icy +ut ra +ĠP ric +Ġrest raining +ĠAir bnb +ĠAr rest +Ġappropri ations +Ġsl opes +Ġmans laughter +Ġwork ings +ĠH uss +ĠF rey +Le ave +ĠHarm ony +ĠF eder +Ġ4 30 +Ġt rench +Ġglad ly +Ġbull pen +ĠG au +b ones +Ġgro ove +Ġpre text +ã ħĭ +Ġtransm itter +ĠComp onent +Ġunder age +ĠEm pires +T ile +Ġo y +ĠMar vin +ĠC AS +Ġbl oss +Ġrepl icated +ĠMar iners +Marc us +ĠBl ocks +Ġliber ated +Ġbutter fly +Fe el +Ġfer mentation +Ġyou tube +Ġoff end +ĠTer m +res ist +Ġcess ation +Ġinsurg ency +Ġb ir +ĠRa ise +59 5 +Ġhypothes es +50 2 +Ġpl aque +ocr at +Ġjack ets +ĠHuff Post +am ong +Ġconf er +48 7 +ĠL illy +Ġadapt ing +ĠF ay +Ġsh oved +ve c +Ġref ine +Ġg on +Ġgun men +z ai +ĠShut tle +ĠI zan +Ġ19 13 +Ġple thora +· · +Ġ5 10 +Ġp uberty +Ġ24 1 +ĠWe alth +ĠAl ma +ĠM EM +ĠAd ults +C as +pr ison +R ace +Ġwater proof +Ġathlet icism +Ġcapital ize +ĠJu ice +Ġillum inated +ĠP ascal +Ġirrit ation +ĠWitness es +ad le +ĠAst ro +Ġf ax +ĠEl vis +Prim ary +ĠL ich +ĠEl ves +Ġres iding +Ġst umble +3 19 +ĠP KK +Ġadvers aries +D OS +ĠR itual +Ġsm ear +Ġar son +ident al +Ġsc ant +Ġmon archy +Ġhal ftime +Ġresid ue +Ġind ign +ĠSh aun +ĠEl m +aur i +A ff +W ATCH +ĠLy on +hel ps +36 1 +Ġlobby ist +Ġdimin ishing +Ġout breaks +Ġgo ats +f avorite +ĠN ah +son ian +ĠBo oster +Ġsand box +ĠF are +ĠMalt a +Ġatt Rot +ĠM OR +ld e +Ġnavig ating +T ouch +Ġunt rue +ĠDis aster +Ġl udicrous +Pass word +ĠJ FK +blog spot +4 16 +ĠUN DER +ern al +Ġdelay ing +T OP +Ġimpl ants +ĠAV G +ĠH uge +att r +Ġjournal istic +ĠPe yton +ĠI A +R ap +go al +ĠProgram me +Ġsm ashing +w ives +print ln +ĠPl ague +in us +EE P +Ġcru iser +ĠPar ish +umin ium +Ġoccup ants +ĠJ ihad +m op +Ġp int +Ġhe ct +ĠMe cca +direct or +ĠFund ing +ĠM ixed +Ġst ag +T ier +Ġg ust +Ġbright ly +ors i +Ġup hill +R D +Ġles ions +ĠBund y +liv ious +Ġbi ologist +ĠFac ulty +ĠAuthor ization +Ġ24 4 +All ow +ï ¸ +ĠGi ul +Ġpert inent +ot aur +es se +ĠRo of +Ġunman ned +35 1 +ĠSh ak +ĠO rient +Ġend anger +D ir +Ġrepl en +ed ient +Ġtail or +Ġgad gets +Ġaud ible +âĺ Ĩ +N ice +Ġbomb ard +ĠR ape +Ġdef iance +ĠTW O +ĠFilip ino +Ġunaff ected +erv atives +Ġso ared +ĠBol ton +Ġcomprom ising +ĠBrew ers +R AL +ĠA HL +icy cle +Ġv ampires +Ġdi pped +oy er +ĠX III +Ġsidew ays +ĠW aste +ĠD iss +ĠâĶľ âĶĢâĶĢ +$ . +Ġhabit ats +ĠBe ef +tr uth +tr ained +spl it +R us +And y +ĠB ram +RE P +p id +è£ ħ +ĠMut ant +An im +ĠMar ina +Ġfut ile +hig hest +f requency +Ġepile psy +Ġcop ing +Ġconc ise +Ġtr acing +ĠS UN +pan el +ĠSoph ie +ĠCrow ley +ĠAd olf +ĠShoot er +Ġsh aky +ĠI G +ĠL ies +ĠBar ber +p kg +Ġupt ake +Ġpred atory +UL TS +/ ** +Ġintox icated +ĠWest brook +od der +he ment +Ġbas eman +AP D +st orage +ĠFif ty +ed itor +G EN +UT ION +ir ting +Ġse wing +r ift +Ġag ony +ĠS ands +Ġ25 4 +C ash +Ġl odge +Ġp unt +N atural +ĠIde as +Ġerrone ous +ĠSens or +ĠHann ity +Ġ19 21 +Ġm ould +ĠG on +kay a +Ġanonym ously +ĠK EY +Ġsim ulator +W inter +Ġstream ed +50 7 +? ", +Ġte ased +Ġco efficient +Ġwart ime +ĠTH R +' '. +ĠBank ing +mp ire +Ġf andom +Ġl ia +G a +Ġdown hill +Ġinterpre ting +Ind ividual +N orm +Ġjealous y +bit coin +Ġple asures +ĠToy s +ĠChev rolet +ĠAd visor +IZ E +Ġrecept ions +70 6 +C ro +Ġ26 2 +Ġcit rus +ir u +Review er +ject ed +U ES +an z +19 81 +ĠWork er +Ġcompl ied +ores cent +contin ental +T on +ĠPr ism +ĠShe ep +Ġ28 8 +n ox +ĠV og +O rd +Ġreal ms +te k +Ġirrig ation +Ġbicy cles +Ġelectron ically +p oly +t all +() ); +Ġaest hetics +ĠInteg rated +Expl ore +Ġd unk +47 6 +p ain +ĠJac ques +ĠD mit +Fram es +Ġreun ited +Ġhum id +D ro +P olitical +Ġyouth ful +Ġent ails +Ġmosqu ito +36 3 +spe cies +Ġcoord inating +ĠMay hem +ĠMagn us +M ount +Impro ved +ĠST ATE +ATT LE +Ġflow ed +Ġtack led +Ġfashion ed +Ġre organ +iv ari +f inger +Ġreluct antly +et ting +ĠV and +you ng +ĠGar land +Ġpresum ption +Ġamen ities +ĠPle asant +on ential +ĠO xy +Ġmor als +ĠY ah +Read y +Sim on +En h +D emon +Ġcl ich +Mon itor +ĠD U +Ġwel comes +Ġstand out +Ġdread ful +Ġban anas +Ġball oons +h ooting +bas ic +Ġsuff ix +Ġd uly +can o +Ch ain +at os +Ġgeop olitical +Ġ( & +ĠGem ini +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +Ġacqu itted +L uck +prot ect +10 24 +Ġsc arcity +Ġmind fulness +ec ided +D N +pr ime +ĠPres idents +ĠVID EO +Ġ( âĪĴ +add ock +N OR +ĠP ru +p un +ĠL OL +)) )) +ĠL iqu +ĠS AS +Ġsty ling +Ġpunish ments +Ġnum b +Ġasc ertain +ĠRock ies +f lu +Th umbnail +Ġperpet rated +ĠSem i +Ġdis arm +ĠOld er +ĠEx ception +Ġexponent ially +ĠCommun ities +Ġabol ish +ĠPart ner +pt oms +Ġ7 77 +ĠFo ley +ĠC ases +Ġgre ase +ĠReb irth +G round +Ġ; ) +ĠDoct rine +ik ini +Y e +ĠBl ossom +Ġpers ists +b ill +Ġinf usion +Ġbud dies +9 11 +ĠPat ient +Ġdem os +Ġacquaint ance +ĠP aw +at ari +Ġx ml +Ġfasc ination +ĠSer ve +Ï Ĥ +br anded +Ġa z +Return s +Ġover shadow +Ġro am +Ġspeed y +n umbered +hel ial +Ġdisc iple +Ġass urances +g iven +pect ing +ĠN atalie +çĶ ° +Ġmosquit oes +rote in +Ġnumer ic +Ġindepend ents +Ġtrans itional +Ġreaction ary +ĠMech dragon +do ctor +Ġshort est +Ġsequ ential +ĠB ac +ĠAccount s +ãģ Į +ach y +ract ive +ĠReg iment +Ġbreat htaking +ffic iency +ĠB ates +Ġ3 11 +Ġward robe +ft s +ĠBer k +Sim ply +ĠRivers ide +iver ing +ident ial +lu cent +Ġen riched +ĠCon ver +ĠG iving +ãĥ Ļ +Ġlegal ize +ĠF TC +Ġfre aking +M ix +Ġter restrial +es ian +ci ents +W ing +LO AD +Ġled ge +ĠViol ent +ĠMet all +Ġ30 8 +Ġs outheastern +hett o +M eat +Ġslow down +Ġret reated +Jere my +end as +**** * +er ic +Ġre ins +opp able +ĠHuman ity +ear ances +rig an +C amera +Ġwa ivers +s oc +Ġalter ation +trans form +ĠC emetery +50 6 +Ġindef inite +Ġstim ulating +y g +60 3 +ĠS op +Ġdescript ive +Ph ase +ĠEd mund +Ġpneum onia +vent us +A mb +Ġlabor atories +ĠEx clusive +ug ar +W ere +Ġmalf unction +Ġhomosexual s +Ġ---- --- +un i +Ġturb ines +ĠEqu ity +D u +Ġmind ed +ĠR H +ĠBlack hawks +Ġfe ats +Ġ17 00 +re pl +36 2 +lad en +Ġindisp ensable +ly ss +tt i +Ġre el +Ġdiver ted +Ġlik eness +Ġsubscript ions +Ġfing ert +Ġfil thy +dest ruct +d raft +ĠBernard ino +l aunch +Ġper plex +ĠS UM +car b +Ġswe ater +ĠVent ure +ĠJ ag +ĠCele b +ĠV oters +Ġstead fast +Ġathlet ics +ĠHans on +ĠDr ac +Tr acker +Ġcomm end +ĠPres idency +ĠD ID +in formed +Ġweb page +P retty +Ġforce fully +ãĥĥ ãĤ¯ +Ġrel ocation +Ġsat ire +â ī +ĠSunder land +æ Ħ +V oice +???? ???? +Ġinform ant +Ġbow el +ĠUn iform +Ġ ..." +Ġpur ge +Ġpic nic +ĠU mb +ĠU PDATE +ĠSapp hire +ĠSt all +le arn +Ġobject ively +Ġob liter +Ġlooph ole +Ġjour neys +Ġo mission +Pro s +ĠSid ney +pl oma +Ġspray ed +Ġg uru +Ġtra itor +Ġtim et +Ġsn apping +ĠSe vent +urn al +ĠUk ip +Ġb owed +por al +l iberal +R os +Quest ions +i OS +Ġsummar ize +ST AT +Ġ18 50 +ap est +Ġl ender +ĠVari able +br inging +ĠL ORD +, ) +Ġcollaps es +x iety +ĠN ed +Y D +ĠSch a +Ġantib ody +Ġdis band +y re +ill usion +Ġro ver +s hed +ĠHiro sh +cc i +Ġcal am +ĠMort on +P interest +Ġ19 28 +ĠE uras +ord es +Ġf ences +ĠIn ventory +ĠVal encia +ĠU d +ĠT iff +Ġsqu e +Ġqu otation +Ġtroubles ome +er ker +QU EST +ĠKing doms +s outh +Ġle vy +Pr ince +ĠSt ing +Ġnick named +Ġapp e +Ġphot ographic +Ġcorp us +re ference +ĠT rog +U nt +) =( +ĠLat via +Ġactiv ating +Ġlicense e +Ġdispar ities +ĠNews letter +ãĥĥ ãĥĪ +Ġfree ing +ĠJe ep +ĠPer ception +ins k +Ġsil icone +ĠHay den +Le an +ĠSuz uki +ibr arian +66 8 +Ġsp or +Ġcorrel ations +ag hetti +Ġtu ber +ĠIP CC +il us +ĠV u +Ġwealth iest +ĠCarb uncle +an za +Ġfool ed +ĠZ ur +Ġd addy +ran o +il ian +Ġknock out +f man +requ ired +ĠWik ileaks +ĠD uffy +ON T +Ġins ol +ĠObject s +Ġb ou +ĠNord ic +ĠIns ert +sc an +Ġd ancers +Ġid iots +major ity +ĠNev ille +ĠFree BSD +Ġt art +pan ic +69 0 +Ġcoc oa +Ġsam pled +Ġlook up +Ind ust +Ġinject ions +gen re +Ġa u +Ġroad way +Ġgen itals +K ind +ĠEx aminer +ĠY az +F resh +Ġpar alysis +ĠAl uminum +Ġre ap +ok é +Ġsl oppy +ĠTun nel +pos ium +ner y +en ic +Ġher bal +ĠOut er +ĠBuild er +Ġinc ur +Ġide ologies +Ġback ups +cons uming +ĠDet ect +de ck +ĠKN OW +ĠG ret +ĠM IC +Ġtough ness +ĠEx hibit +Ġh ive +L es +ĠSCH OOL +ĠAt ari +ald e +ĠN ull +and estine +m ouse +Ġbrig ade +48 9 +Ġrev ol +ĠLaw son +ĠW ah +op oly +eb ted +ĠS aunders +Ġ3 13 +ĠW inc +Ġtab oo +ĠHel met +Ġw edge +ch ip +ĠT ina +b g +Ġinf uri +r n +Ġanomal ies +ĠSy nc +ĠEx am +ĠComm it +ĠDi ary +ĠALS O +ĠDe bor +omed ical +Ġcomprehens ion +6 55 +Ġempower ing +Ġ ire +Ġju ices +ĠE TH +ĠBox ing +=" / +Ġfacilit ated +p oke +ĠPars ons +ĠMod er +tra vel +Ġcivil izations +Ġliber tarians +Ġrun e +ĠCl arks +at hed +Ġcampaign ers +ĠDis patch +ĠFah renheit +ĠCap com +-------- -- +Ġl ace +Ġdr aining +Ġl iner +ĠArt ificial +é n +t ask +] ). +ĠGM O +ĠOper ator +ord inary +ĠInf luence +ĠU ps +Ġpot ency +uss en +osp ons +ĠSw im +ĠDead line +Un ity +Ġcul inary +Ġenlight enment +Ġwe arer +Ġmin ed +Ġp ly +Ġinc est +ĠDVD s +W alk +B TC +Tr ade +Ġdev al +ib and +ĠOvers ight +Palest inian +Ġd art +Ġm ul +L R +Ġrem ovable +ĠReal ms +ì Ŀ +Ġmisc ar +ĠV ulkan +68 5 +è re +ĠS ap +Ġmer ging +ĠCar ly +che ster +Ġbr isk +Ġlux urious +ĠGener ator +Ġbit terness +Ġed ible +Ġ24 3 +T G +Ġrect angle +With No +bel ow +J enn +Ġdark est +Ġh itch +Ġdos age +Ġsc aven +ĠK eller +ĠIllust rated +Certain ly +ĠMaver icks +Marg inal +Ġdiarr hea +Ġenorm ously +Ġ9 99 +sh r +qu art +Ġadam ant +ĠM ew +Ġren ovation +Ġcerv ical +ĠPercent age +en ers +ĠKim ber +Ġflo ats +Ġde x +ĠW itcher +ĠSwan sea +d m +Ġsal ty +y ellow +Ġca pe +ĠDr ain +ĠPaul a +ĠTol edo +les i +Mag azine +ĠW ick +ĠM n +ĠA ck +ĠR iding +AS ON +Ġhom ophobic +AR P +Ġwand ered +C PU +ood oo +ĠP ipe +Ġtight ening +ĠBut t +3 18 +Ġdesert ed +S ession +Ġfacilit ating +J ump +Ġemer gencies +OW ER +Ġexhaust ive +ĠAF TER +Ġheart beat +ĠLab el +ack y +ĠCert ified +ilt ration +Z e +ĠU tt +Ġ13 00 +Ġpres ume +ĠDis p +Ġsur ged +Ġdoll s +Col umb +Ġchim pan +ĠR azor +Ġt icks +Ġcouncill or +Ġpilgr image +ĠReb els +ĠQ C +ĠA uction +x ia +ik k +b red +Ġinsert ion +Ġco arse +d B +SE E +ĠZ ap +ĠF oo +Ġcontem por +ĠQuarter ly +ot ions +ĠAl chemist +ĠT rey +ĠDu o +S weet +80 4 +ĠGi ov +Ġfun n +N in +h off +Ġram ifications +Ġ19 22 +ĠExper ts +az es +Ġgar ments +ar ial +ĠN ab +Ġ25 7 +ĠV ed +Ġhum orous +ĠPom pe +Ġn ylon +Ġlur king +ĠSerge y +ĠMatt is +Ġmisogyn y +ĠComp onents +ĠWatch ing +ĠF olk +ract ical +B ush +Ġt aped +Ġgroup ing +Ġbe ads +Ġ20 48 +Ġcon du +quer que +Read ing +Ġgriev ances +Ult ra +Ġend point +H ig +ĠSt atic +ĠScar borough +L ua +ĠMess i +a qu +ĠPsy Net +ĠR udd +Ġa venue +v p +J er +Ġsh ady +ĠRes ist +ĠArt emis +Ġcare less +Ġbro kers +Ġtemper ament +Ġ5 20 +T ags +ĠTurn ing +Ġut tered +Ġp edd +Ġimpro vised +Ġ: ( +Ġtab l +Ġpl ains +16 00 +press ure +ĠEss ence +marg in +friend s +ĠRest oration +Ġpoll ut +ĠPok er +ĠAugust ine +ĠC IS +ĠSE AL +or ama +Ġth wart +se ek +Ġp agan + º +cp u +Ġg arn +Ġass ortment +ĠI LCS +t ower +Recomm ended +Ġun born +ĠRandom Redditor +ĠRandomRedditor WithNo +Ġparaly zed +Ġeru ption +Ġinter sect +ĠSt oke +ĠS co +B ind +å ¾ +ĠP NG +ĠNeg ative +ĠNO AA +Le on +Ġall oy +ĠL ama +ĠD iversity +5 75 +Ġunderest imated +ĠSc or +Ġm ural +Ġb usted +so on +l if +Ġnone x +Ġall ergy +ĠUnder world +ĠR ays +ĠBl asio +Ġh rs +ĠD ir +Ġ3 27 +by ter +Ġrepl acements +Ġactiv ates +ri ved +M H +Ġp ans +ĠH I +Ġlong itudinal +Ġnu isance +al er +Ġsw ell +ĠS igned +s ci +ĠIs les +ĠA GA +Ġdef iant +Ġson ic +oc on +K C +ĠA im +t ie +ah ah +Ġm L +D X +Ġb isc +ĠBill board +ĠSY STEM +NE Y +ga ard +Ġdist ressed +former ly +Al an +Ġche fs +Ġopt ics +ĠC omet +ĠAM C +Ġredes igned +irm ation +Ġsight ings +38 2 +3 11 +ĠW B +Ġcont raction +ĠT OTAL +D ual +Ġstart led +Ġunderstand ably +Ġsung lasses +ETH OD +Ġd ocker +Ġsurf ing +ĠH EL +ĠSl ack +ton es +Ġsh alt +Vis ual +49 8 +Dep artment +c ussion +Ġunrest ricted +Ġt ad +Ġre name +employ ed +Ġeduc ating +Ġgrin ned +bed room +ĠActiv ities +ĠV elvet +ĠSW AT +Ġsh uffle +ig or +Ġsatur ation +F inding +c ream +ic ter +Ġv odka +tr acking +te c +Ġfore ground +iest a +Ġve hement +ĠEC B +ĠT ie +E y +Ġt urtles +ĠRail road +ĠKat z +ĠFram es +Ġmen ace +ĠFell owship +ĠEss ential +ugg ish +Ġdri p +ch witz +ĠKy oto +s b +ĠN ina +Param eter +Ġal arms +ĠCl aud +Ġpione ering +Ġchief ly +ĠSc ream +Col lection +Ġthank fully +ĠRonald o +åŃ IJ +st rip +ĠDisney land +com mercial +See ing +S oul +Ġevac uate +Ġc iv +ĠAs he +Ġdiv ides +ĠD agger +rehens ive +Ġber ries +ĠD F +Ġs ushi +Ġplur ality +W I +Ġdisadvant aged +Ġbatt alion +ob iles +45 1 +Ġcl ing +Ġunden iable +ĠL ounge +Ġha unt +p he +Ġquant ify +Ġdiff ered +Ġ[* ] +ĠV iz +c um +sl ave +Ġvide og +Ġqu ar +Ġbund les +ĠAl onso +t ackle +Ġneur onal +Ġlandsl ide +conf irmed +ĠDep th +Ġrenew ables +B ear +ĠMaced onia +Ġjer seys +Ġb unk +ĠSp awn +ĠControl s +ĠBuch anan +Ġrobot ics +Ġemphas izing +ĠTut orial +h yp +ist on +Ġmonument al +æ ° +ĠCar ry +Ġt bsp +en ance +H ill +art hed +Ġro tten +De an +Ġtw isting +Ġgood will +Ġimm ersion +L iving +Ġbr ushes +ĠC GI +ĠAt k +tr aditional +Ġph antom +ĠSt amina +Ġexpans ions +ĠMar in +Ġembark ed +ĠE g +int estinal +ĠPE OPLE +ĠBo oth +ĠApp alach +Ġreleg ated +V T +M IT +Ġmust er +Ġwithdraw ing +Ġmicrosc ope +ĠG athering +ĠC rescent +ĠArgent ine +ĠDec re +ĠDomin ic +Ġbud s +ant age +ĠI on +Ġwid ened +ONS ORED +ĠGl oves +iann opoulos +raz en +fe el +Ġrepay ment +Ġhind sight +ĠRE ALLY +ĠPist ol +ĠBra h +Ġwat ts +Ġsurv ives +Ġfl urry +iss y +Al ert +ĠUrug uay +Ph oenix +S low +ĠG rave +ĠF ir +Ġmanage able +Ġtar iff +ĠU DP +ĠPist ons +ĠNiger ian +Ġstrike outs +Ġcos metics +whel ming +f ab +c ape +pro xy +Ġre think +Ġover coming +sim ple +Ġw oo +Ġdistract ing +ĠSt anton +ĠTuls a +ĠD ock +65 9 +Ġdisc ord +ĠEm acs +ĠV es +ĠR OB +Ġreass uring +Ġcons ortium +Muslim s +3 21 +Ġprompt s +se i +ĠH itch +imp osed +ĠF ool +Ġindisc rim +wr ong +bu querque +D avis +! ] +Ġtim eless +ĠNE ED +Ġpestic ide +Ġrally ing +ĠCal der +Ġå ¤ +Ġx p +ĠUn le +ĠEx port +lu aj +B uff +) [ +Ġsq or +S audi +Ġis tg +Ġindul ge +pro c +Ġdisg usted +Ġcomp ounded +Ġn em +Ġschool ing +ĠC ure +process ing +S ol +Ġpro verb +it ized +ĠAlv arez +Ġscar f +Ġrect angular +re ve +Ġh ormonal +ĠSt ress +itiz en +Ġ4 25 +girl s +ĠNo ir +ĠR app +Ġmar ches +ch urch +ĠUs es +Ġ40 5 +ĠBer m +Ġord inances +ĠJud gment +Charg es +ĠZ in +Ġdust y +Ġstraw berries +Ġper ce +ĠTh ur +ĠDebor ah +net flix +ĠLam bert +Ġam used +ĠGu ang +Y OU +R GB +ĠC CTV +Ġf iat +r ang +Ġf ederation +ĠM ant +ĠB ust +ĠM are +respect ive +ĠM igration +ĠB IT +59 0 +Ġpatriot ism +Ġout lining +reg ion +ĠJos é +Ġbl asting +ĠEz ra +B s +Ġundermin es +ĠSm ooth +Ġcl ashed +rad io +Ġtransition ing +ĠBucc aneers +ĠOw l +Ġplug s +Ġh iatus +ĠPin ball +Ġm ig +ĠNut r +ĠWolf e +Ġinteg ers +Ġor bits +ĠEd win +ĠDirect X +b ite +Ġbl azing +v r +Ed ge +ĠP ID +ex it +ĠCom ed +ĠPath finder +ĠGu id +ĠSign s +ĠZ er +ĠAg enda +Ġreimburse ment +M esh +i Phone +ĠMar cos +ĠS ites +h ate +en burg +Ġs ockets +p end +Bat man +v ir +ĠSH OW +Ġprovision al +con n +ĠDeath s +AT IVE +Pro file +sy m +J A +Ġnin ja +inst alled +id ates +eb ra +ĠOm aha +Ġse izing +ĠBe asts +Ġsal ts +M ission +Gener ally +ĠTr ilogy +he on +leg ates +Ġd ime +Ġf aire +par able +G raph +Ġtotal ing +Ġdiagram s +ĠYan uk +ple t +ĠMe h +Ġmyth ical +ĠStep hens +aut ical +ochem istry +Ġkil ograms +Ġel bows +anc ock +ĠB CE +ĠPr ague +Ġimpro v +ĠDev in +Ġ" \ +par alle +Ġsuprem acists +ĠB illion +Ġreg imen +inn acle +Ġrequ isite +ang an +ĠBur lington +ain ment +ĠObject ive +oms ky +G V +Ġun ilateral +Ġt c +Ġh ires +ment al +Ġinvol untary +Ġtrans pl +ĠASC II + ¨ +Ev ents +Ġdoub ted +ĠKa plan +ĠCour age +ig on +ĠMan aging +ĠT art +Ġfalse hood +ĠV iolet +Ġair s +Ġfertil izer +Brit ain +Ġaqu atic +ou f +W ords +ĠHart ford +Ġeven ings +ĠV engeance +qu ite +G all +ĠP ret +Ġp df +ĠL M +ĠSo chi +ĠInter cept +9 20 +Ġprofit ability +ĠId le +ĠMac Donald +ĠEst ablishment +um sy +Ġgather ings +ĠN aj +Charl ie +Ġas cent +ĠProt ector +Ġal gebra +Ġbi os +for ums +EL S +Introdu ced +Ġ3 35 +Ġastron omy +Cont ribut +ĠPol ic +Pl atform +Ġcontain ment +w rap +Ġcoron ary +ĠJ elly +man ager +Ġheart breaking +c air +ĠChe ro +c gi +Med ical +ĠAccount ability +! !" +oph ile +Ġpsych otic +ĠRest rict +Ġequ itable +iss ues +Ġ19 05 +ĠN ek +c ised +ĠTr acking +Ġo zone +Ġcook er +ros is +Ġre open +Ġinf inity +ĠPharm aceutical +ens ional +Att empt +ĠR ory +Mar co +Ġawa its +H OW +t reated +Ġbol st +Ġreve red +Ġp ods +opp ers +00 10 +Ġampl itude +ric an +SP ONSORED +Ġtrou sers +Ġhal ves +ĠK aine +ĠCut ler +ĠA UTH +Ġsplend id +Ġprevent ive +ĠDud ley +if acts +umin ati +ĠY in +Ġad mon +ĠV ag +Ġin verted +Ġhast ily +ĠH ague +L yn +Ġled ger +Ġastron omical +get ting +Ġcirc a +ĠC ic +ĠTenn is +Lim ited +Ġd ru +ĠBY U +Ġtrave llers +Ġp ane +ĠInt ro +Ġpatient ly +Ġa iding +Ġlo os +ĠT ough +Ġ29 3 +Ġconsum es +Source File +Ġ"" " +Ġbond ing +Ġtil ted +Ġmenstru al +ĠCel estial +UL AR +Plug in +Ġrisk ing +N az +ĠRiy adh +Ġacc redited +Ġsk irm +é Ľ +Ġexam iner +Ġmess ing +Ġnear ing +ĠC hern +ĠBeck ham +Ġsw apped +Ġgo ose +K ay +Ġlo fty +ĠWal let +Ġ[ ' +Ġap ocalypse +Ġb amboo +ĠSP ACE +ĠEl ena +Ġ30 6 +ac ons +Ġtight ened +Ġadolesc ence +Ġrain y +Ġvandal ism +ĠNew town +Ġcon ject +c akes +Ġche ated +Ġmoder ators +par ams +E FF +Ġdece it +ĠST L +ĠTanz ania +ĠR I +Ġ19 23 +ĠEx ile +the l +Ġthe olog +Ġquir ky +ĠIr vine +Ġneed y +or is +U m +K a +Ġmail box +3 22 +Ġb os +ĠPet ra +K ING +Ġenlarg ed +O ften +Ġbad ass +Ġ3 43 +ĠPl aces +ĠC AD +Ġpr istine +Ġinterven ing +d irection +Ġl az +ĠD SM +Ġproject ing +ĠF unk +ag og +pay ment +n ov +Ġch atter +AR B +Ġexam inations +ĠHouse hold +ĠG us +F ord +4 14 +B oss +Ġmy stic +Ġle aps +ĠB av +ul z +b udget +Foot ball +Ġsubsid ized +Ġfirst hand +Ġcoinc ide +oc ular +Con n +ĠColl abor +Ġfool s +am ura +ah ar +r ists +Ġsw ollen +Ġexp ended +ĠP au +s up +Ġsp ar +Ġkey note +s uff +Ġunequ al +Ġprogress ing +str ings +ĠGamer gate +Dis ney +ĠEle ven +om nia +Ġscript ed +Ġear ners +bro ther +ĠEn abled +æ ³ +Ġlar vae +ĠL OC +m ess +Wil son +ĠTem plate +success fully +Ġparam ount +Ġcamoufl age +Ġbind s +ĠQu iet +ĠSh utterstock +r ush +Ġmasc ot +fort une +ĠCol t +ĠBe yon +hab i +Ġha irc +Ġ26 7 +ĠDe us +Ġtw itch +Ġconcent rating +Ġn ipples +c ible +Ġg ir +N Z +M ath +n ih +Requ ired +Ġp onder +ĠS AN +Ġwedd ings +Ġl oneliness +N ES +ĠMah jong +69 5 +add le +ĠGar ner +ĠC OUR +Br idge +Ġsp ree +ĠCald well +Ġbri bery +Ġ���� ���� +plug ins +Ġr acket +Ġchamp agne +vers ible +V ote +Ġmod ifiers +May or +6 80 +Ġassemb lies +ĠS ultan +ĠN ing +ĠLad ies +Ġsulf ur +Ġor bs +Ġ---- - +____ ___ +ĠJournal ism +Ġes ports +Ġl ush +Ġh ue +Ġspect ral +H onest +ãĥ ı +Ġbus hes +Ġrein forcement +Ġre opened +ĠWhe els +ĠM org +rie ving +Ġaux iliary +Ġj Query +ĠB AT +tes que +Ġver tex +p ure +f rey +ãĤ º +d os +Ġty ph +Ġc ull +Ġe q +Ġdec on +Ġtoss ing +Ġdispar ate +ĠBr igham +print f +led ged +Ġsu nd +Ġco zy +Ġhepat itis +per forming +Ġav al +ĠG G +f uture +Ġpet ertodd +ĠKos ovo +Ġmagn ets +Al ready +ĠEd ison +ĠCe res +ĠRA ID +Ġbrill iance +57 6 +Ġder ives +Ġhypert ension +ĠÎ Ķ +Ġlamb da +Ġfl air +Ġmission aries +Ġrap es +ĠSt arter +ĠMon ths +Ġdef y +Ġseism ic +ĠR aphael +Ġeuro zone +65 6 +z sche +Ġscr atched +Ġb ows +ĠLenn on +ĠGa ia +Ġdri pping +f acts +A le +Ġfrog s +ĠBre ast +ogene ity +ĠProsecut or +Ġampl ified +ĠHod g +ĠF n +Th ousands +ĠNI H +ĠMonitor ing +FT WARE +ĠPri ebus +ĠG rowing +hun ter +Ġdiagn ose +ĠM ald +ĠL R +Ġcrown ed +Ġburst ing +Ġdiss olution +j avascript +Ġuseful ness +ĠExec ution +: ( +ĠIv ory +a ah +Ġpersecut ed +viol ence +ist as +ĠCr ate +Ġimpuls es +ĠSp ani +ed es +Hand le +ĠZ erg +think able +Last ly +Ġspont aneously +Ġinconven ient +Ġdismiss ing +Ġpl otted +Ġeight y +Ġ7 37 +r ish +ĠThor nton +ath am +Ġsit com +V en +Rec ipe +t el +l und +Ġcle ars +ĠSas uke +Ġ25 8 +Ġopt ing +Ġen raged +est hetic +ĠA e +uch s +Pre p +Fl ow +Ġrun off +ĠE ating +ĠG iles +ĠAct ing +res ources +ib aba +Ġr pm +Ġske wed +ĠBl anc +ĠS akuya +Ġhot ter +Ġ19 24 +op ian +ck o +Ġcr umbling +Ġcapt ains +ĠAppropri ations +le aders +dro pping +an uts +Ġrevers ing +ĠP ose +ĠS ek +Sc ot +ĠIde a +c ise +ĠSloven ia +Ġ3 17 +Do ctor +Ġcro cod +ald i +Se a +ĠFar rell +Ġmerc enaries +ĠR NC +ĠGu ess +Ġp acing +M achine +Streamer Bot +ĠChar ity +Ġ29 8 +Ġcann ons +ĠTob y +TPP StreamerBot +ĠPass ion +cf g +Th om +Ġbad ges +ĠBern stein +. âĢĵ +ĠP OP +ĠCon j +Ġinitial ization +Ġbiod iversity +D ub +Ġfeud al +Ġdisclaim er +Ġc row +Ġign ition +ar f +S HA +Ġk Hz +h azard +ĠArt ists +oe uv +67 9 +ĠRud y +N ine +ĠRam adan +å ½ +itt o +Ġadren aline +C ert +Ġsmell ed +Ġimp unity +Ġag endas +ĠRe born +ĠCon cent +ĠSe ems +Ġo mega +ĠDust in +Ġback er +ĠSau ce +ĠBoy le +W IN +Ġsp ins +Ġpa uses +u pt +Ġshred ded +Ġstra pped +ĠCor ruption +Ġscr atches +Ġn i +Ġatt ire +ĠS AF +Factory Reloaded +ĠI PS +Ġ( % +Ġsem inar +f ocus +c ivil +Ġ18 60 +int osh +Ġcontin ual +Ġabbre vi +ĠS ok +oc obo +X M +Ġfr antic +Ġunavoid able +Ġar tery +Ġannot ations +b ath +Cl imate +Ġd ors +ĠSl ide +co ord +ĠRel oad +ĠL DL +ĠLove craft +Ġunim agin +Ġresemb led +Ġbarr acks +n p +Ġsurrog ate +Ġcategor ized +ãĤ © +Ġvacc inated +Ġdrain age +Ġind ist +ĠWhats App +Ġ18 70 +oler ance +inv oke +am orph +Ġrecon nect +Ġem anc +Ġblind ness +Ġ12 80 +intern et +c ollar +Ġalt ru +Ġab yss +ĠT RI +65 7 +Ġinf used +HE AD +Ġforest ry +ĠWood y +ĠC i +w i +s am +78 4 +hol iday +Ġmog ul +ĠF ees +ĠD EN +In ternal +ur bed +f usc +at om +ĠIll usion +Ġpoll ed +Ġfl ap +Ġco ax +L GBT +An aly +ĠSect ions +ĠCalif orn +em n +Ġh ither +ĠN IGHT +Ġn ailed +ĠPip eline +39 1 +o of +ĠPr imal +vere nd +Ġsl ashing +Ġret ri +avi our +Ġdepart ing +g il +IS C +Ġmid way +Ġultras ound +Ġbeh aving +ĠT ara +class es +V irtual +ĠColon ial +Ġstri pping +Ġorchestr ated +ĠGra ves +45 2 +ĠIron ically +ĠWrit ers +Ġl ends +ĠMan z +Ġra ven +Ġoxid ative +Ġ26 6 +EL F +act ually +asc ar +D raft +Ġfavour able +Ġhumili ating +Ġf idelity +ĠH of +ĠX uan +49 6 +Ġlay ered +at is +79 0 +Ġpay check +it on +K ar +ĠVM ware +ĠFar mer +Ġserv ic +gl omer +Ġsl ump +ĠFab ric +ĠD OC +est ing +Ġreass ure +Ġph yl +v olt +it ory +R ules +Ġoxid ation +Ġpri zed +Ġmist ress +ĠDj ango +WAR N +å ij +Ġenc ode +ĠFeed back +Ġstupid ity +I an +ĠYugoslav ia +× ¨ +ac l +UT E +19 77 +Ġqual ifies +Ġpuls es +pret ty +Ġfro ze +Ġs s +Iter ator +Ġur gently +Ġm ailed +ĠCh am +Ġsust aining +Ġbas il +Ġpupp ies +il ant +ĠP LEASE +l ap +ace ous +F ear +ĠMaster y +aut omatic +ĠT AG +Ġant im +ag les +47 3 +fram es +Ġwh ispers +ĠWho ever +Ġbra very +ĠUK IP +ract ions +"" " +Ġt ame +Ġpart ed +every thing +CON T +Ġind ebted +Ġadd r +re k +IR ED +Ġem inent +cl inton +Ġo usted +Ġreview er +Ġmelt down +Ġre arr +ĠY ao +the real +aby te +Ġst umbling +Ġbat ches +Ġ25 9 +Ġcontrace ptive +Ġprost itute +ens is +De cl +ĠSt rikes +M ilitary +ĠO ath +v acc +pp ings +05 2 +Ġpart Name +amp ing +Rep orts +K I +CH R +Ġsubt ly +sw ers +Bl ake +us ual +Ġcontest ants +Ġcart ridges +ĠGRE AT +Ġbl ush +ĠâĢ º +47 2 +Ġreason ed +ãĥ ¤ +paralle led +Ġd yn +ag ate +Ġnight ly +å Ĩ +55 6 +Ġsem antic +ĠAdv oc +Ġ !! +Ġdisag rees +ĠB W +V eh +Ġharm ing +Ġembr aces +Ġstri ves +Ġin land +ĠK ard +Ġhe ats +ĠGin ny +ut an +ern aut +yl ene +ĠE lev +J D +Ġh ars +ĠStar r +Ġsk ysc +Ġcollabor ators +Us ually +Ġrev olutions +ĠSTAT S +Ġdism antle +Ġconfident ly +Ġkin etic +Al i +Ġpercent ile +Ġextract ing +ill ian +est ead +Ġphysic ists +ĠMarsh al +Ġfell owship +Ġd ashed +ĠU R +ĠSi oux +ĠComp act +am ide +P ython +ĠLe igh +ĠPharm ac +ist rates +her ical +Ġf ue +ĠE min +Ġ( { +ĠNeighbor hood +Ġdisrupt ing +ĠD up +Ġg land +ĠSe v +ĠMar ian +arg on +ĠD und +Ġ< !-- +Ġstr and +Ġstadium s +z os +Ġpsych osis +ĠR ack +Ġbrilliant ly +ï¸ ı +Ġsubmer ged +ĠInst it +ĠCh ow +Ġc ages +ĠH ats +ĠU rs +Ġdil uted +us at +ien ne +ĠMembers hip +ĠBur k +Ġ ie +Ġarche type +D rug +ult on +ĠSp ock +ĠMcK ay +ĠDep end +F eatured +S oc +19 78 +ĠB ere +Ġrelent lessly +Ġcripp ling +Ġar thritis +çĶ Ł +ĠTrop ical +ĠBul g +ĠCher yl +Ġadm irable +Ġsub title +Over ride +Ġorig inating +ĠC CP +Ġsw ore +ĠSo le +ĠDis orders +3 29 +Ġprocess ion +Ġref urb +Ġimm ersed +requ ently +Ġskept ics +Ġcer amic +m itter +en stein +b elt +ĠT IT +b idden +Ġf ir +m ist +> ] +Ġwe ave +ĠParad ox +Ġentr usted +ĠBarcl ays +Ġnovel ist +og ie +80 6 +Ġnin ety +Ġdisag reements +@@@@ @@@@ +ĠAus chwitz +c ars +ĠL ET +t ub +arant ine +P OS +Ġback story +Ġcheer ful +ĠR ag +ek a +bi ased +Ġinexper ienced +ak ra +ĠW itt +t an +Ġrap ist +Ġplate au +ch al +ĠInqu is +exp ression +Ġc ipher +Ġsh aving +add en +re ly +( \ +ism a +ĠReg ulatory +CH AR +ily n +N VIDIA +G U +Ġmur m +la us +Christ opher +Ġcontract ual +ĠPro xy +ĠJa ime +ĠMethod ist +Ġstew ards +st a +per ia +Ġphys iology +Ġbump ed +Ġf ructose +Austral ian +ĠMet allic +ĠMas querade +ar b +Ġprom ul +Ġdown fall +Ġbut cher +Ġb our +ĠIN FORMATION +ĠB is +pect s +ad ena +Ġcontempl ating +ar oo +cent ered +ĠPe aks +Us ed +Ġmod em +Ġg enders +Ġ8 000 +37 1 +Ġm aternity +ĠR az +Ġrock ing +Ġhandgun s +ĠD ACA +Aut om +ĠN ile +Ġtum ult +ĠBenef it +ĠAppro ach +works hop +ĠLe aving +G er +inst ead +Ġvibr ations +Ġrep ositories +49 7 +ĠA unt +ĠJ ub +ĠExp edition +Al pha +Ġs ans +Ġoverd ue +Ġoverc rowd +Ġlegisl atures +Ġp aternal +ĠLeon ardo +Ġexp ressive +Ġdistract ions +Ġsil enced +tr ust +Ġb iking +Ġ5 60 +Ġpropri et +Ġimp osition +Ġcon glomer +Ġ= ================================================================ +ĠTe aching +ĠY ose +int ensive +T own +Ġtroll ing +ĠGr ac +ĠAS US +Y o +Ġspecial s +ĠNep h +ĠGod zilla +Dat abase +ĠHe gel +Ġ27 2 +19 76 +ĠGl oria +Ġdis emb +ĠInvestig ations +ĠB ane +ag ements +St range +Ġtre asury +ĠPl ays +Ġundes irable +Ġwid ening +Ġverb ally +Ġinf ancy +Ġcut ter +f ml +Ġ21 00 +prot otype +f ine +Ġdec riminal +Ġdysfunction al +Ġbes ie +ĠErn st +z eb +Ġnort heastern +Ġa ust +por ate +ĠMar lins +Ġsegreg ated +ew orld +ĠMa her +Ġtra verse +Ġmon astery +ur gy +G ear +s and +Com pl +ĠE MP +Ġpl ent +ĠMer cer +Ġ27 6 +TA BLE +Config uration +H undreds +Ġpr ic +Ġcollabor ating +ĠPar amount +ĠCumm ings +Ġ( < +Ġrecord er +Ġfl ats +Ġ4 16 +wh ose +Font Size +ĠOr bit +Y R +Ġwr ists +Ġb akery +) } +ĠB ounty +ĠLanc aster +Ġend ings +acc ording +ĠSal am +e asy +75 5 +ĠBur r +ĠBarn ett +onom ous +Un ion +Ġpreced ence +ĠScholars hip +ĠU X +Ġroll out +Ġbo on +al m +ĠCan ter +æ µ +Ġround ing +Ġcl ad +Ġv ap +ĠF eatured +is ations +Ġ5 40 +pol ice +Ġunsett ling +Ġdr ifting +ĠLum ia +ĠObama Care +ĠF avor +Hy per +ĠRoth schild +ĠMil iband +an aly +ĠJul iet +H u +Ġrec alling +a head +69 6 +Ġunf avorable +Ġd ances +O x +Ġleg ality +Ġ40 3 +rom ancer +Ġinqu ire +ĠM oves +\ "> +ĠVari ant +ĠMess iah +ĠL CS +ĠBah á +75 6 +Ġeyeb row +Ġ ¥ +ĠMc F +ĠFort y +M as +Ġpan icked +Ġtransform ations +q q +Ġrev olves +ring e +ĠA i +ax e +Ġon ward +ĠC FR +ĠB are +log in +Ġliqu ids +Ġde comp +second ary +il an +ĠCon vert +ami ya +Ġprosecut ing +Ġâī ¡ +ĠYork ers +ĠByr ne +sl ow +aw ei +J ean +Ġ26 9 +ĠSky dragon +Ġ é +ĠNicarag ua +ĠHuck abee +ĠHigh ly +Ġamph ib +ĠPast or +ĠL ets +Ġbl urred +Ġvisc eral +ĠC BO +Ġcollabor ated +z ig +Leg al +Ġapart heid +Ġbr id +Ġpres et +ĠD ET +ĠAM A +× Ķ +arch ing +auc uses +build er +Ġpo etic +Ġem ulator +ĠMole cular +Ġhon oring +ise um +Ġtract or +ĠCl uster +ĠCal m +ared evil +Ġsidew alks +Ġviol in +Ġgeneral ized +ĠAle c +Ġemb argo +Ġfast ball +ĠHT TPS +ĠL ack +ĠCh ill +ri ver +C hel +ĠSw arm +ĠLev ine +ro ying +L aunch +Ġkick er +Ġadd itive +ĠDe als +W idget +cont aining +Ġescal ate +ĠOP EN +Ġtwe aked +Ġst ash +Ġsp arks +ĠEs sex +ĠE cc +Ġconv ict +Ġblog ging +I ER +ĠH L +Ġmurd erers +75 9 +ĠH ib +Ġde pl +ĠJ ord +S ac +Ġdis sect +ĠHow e +os her +Ġcustom izable +ĠFran z +Ġat ro +Ä ĩ +Ġ000 4 +Ġout post +R oss +Ġglyph osate +ĠHast ings +ĠBE FORE +Ġsh ove +o pped +ĠSc ala +Ġam ulet +an ian +Ġexacerb ated +Ġe ater +47 1 +UM E +Ġpul p +izont al +ĠZ am +ĠAT I +imm une +aby tes +Ġunnecess arily +ĠC AT +ĠAx is +Ġvisual ize +à ī +ĠRad ical +f m +Doc uments +ĠFor rest +Ġcontext ual +ĠSy mbol +Ġtent ative +ĠDO ES +ĠGood s +Ġintermitt ent +} : +medi ated +Ġridic ule +Ġathe ism +Ġpath ogens +ĠM um +Ġre introdu +Ġ30 7 +i HUD +Ġflash light +Ġsw earing +Ġp engu +B u +Ġrot ated +ĠCr ane +Ġ() ); +Ġfashion able +Ġendors ing +46 3 +) [ +Ġingest ion +Ġcook s +Ġ9 50 +ot omy +ĠIm am +Ġk a +Ġte aser +ĠGhost s +ĠãĤ µ +19 69 +Ï ĥ +ub by +Ġconver ter +zan ne +end e +ĠPre par +ĠNic kel +ĠChim era +h im +ĠTyr ann +ĠSabb ath +ĠNich ols +Ġra pt +ih ar +Ġshe lling +Ġillum inate +Ġdent ist +ut or +ĠInteg ration +Ġwh ims +ĠLiter ary +Be aut +Ġp archment +ag ara +Br and +Ġder og +âĢ¦ ) +ĠNor se +Ġunw itting +Ġc uc +Ġborder line +Ġupset ting +Ġrec ourse +Ġd raped +ĠRad ar +Ġcold er +ĠPep si +im inary +], [ +65 8 +V i +ĠF rem +ĠP es +Ġveter inary +ĠT ED +ĠEp idem +n ova +k id +Ġdev out +o ct +j ad +M oh +ĠP AY +Ġge ometric +Ġ3 23 +Ġcircum ference +ich ick +19 75 +ĠY uri +ĠSh all +ĠH over +un in +S pr +Ġg raft +ĠHapp iness +Ġdisadvant ages +att acks +Ġhub s +ĠStar Craft +é ĸ +Ġgall eries +ĠKor ra +Ġgrocer ies +ĠGors uch +Ġrap ists +Ġfun gi +ĠTyph oon +V ector +ĠEm press +b attle +4 68 +Ġparas ite +ĠBom ber +S G +ex ist +ĠP f +Ġun se +Ġsurge ons +B irth +ĠUn sure +ĠPrint ed +ĠBehavior al +ĠA ster +Pak istan +Ġun ethical +Ġs v +ĠIo T +Ġlay outs +P ain +Ġconst ants +ĠL W +ĠB ake +Ġtow els +Ġdeterior ation +ĠBol ivia +Ġblind ed +ĠW arden +ĠMist ress +Ġon stage +Ġcl ans +ĠB EST +19 60 +Ġant ique +Ġrhet orical +ĠPer cy +ĠRw anda +, . +B ruce +Ġtra umat +ĠParliament ary +Ġfoot note +id ia +ĠLear ned +se eking +gen ic +Ġdim ensional +H ide +èĢ ħ +Ġintrig ue +in se +Ġle ases +Ġapp rentices +w ashing +Ġ19 26 +V ILLE +Ġsw oop +s cl +Ġbed rooms +on ics +ĠCr unch +comp atible +Ġincap ac +ĠYemen i +ash tra +z hou +d anger +Ġmanifest ations +ĠDem ons +AA F +Secret ary +ACT ED +L OD +Ġam y +ra per +eth nic +4 17 +Ġpos itives +Ġ27 3 +ĠRefuge es +Ġus b +ĠV ald +odd y +ĠMahm oud +As ia +Ġskull s +ĠEx odus +ĠComp et +ĠL IC +ĠM ansion +ĠA me +Ġconsolid ate +storm s +ont ent +99 6 +Ġcl en +Ġm ummy +fl at +75 8 +ĠV OL +oter ic +n en +ĠMin ute +S ov +Ġfin er +R h +ly cer +Ġreinforce ments +ĠJohann es +ĠGall agher +Ġgym n +S uddenly +Ġext ortion +k r +i ator +T a +Ġhippocamp us +N PR +ĠComput ing +Ġsquare ly +Ġmod elling +ĠFor ums +ĠL isp +ĠKrish na +Ġ3 24 +Ġr ushes +Ġens ued +Ġcre eping +on te +n ai +il ater +ĠHorn ets +Ġob livious +IN ST +55 9 +Ġjeopard y +Ġdistingu ishing +j ured +Ġbeg s +sim ilar +ph ot +5 30 +ĠPark way +Ġs inks +ĠHearth stone +ib ur +ĠBat on +Av oid +Ġd ancer +Ġmag istrate +ary n +Ġdisturb ances +ĠRom ero +Ġpar aph +Ġmis chief +âĸ ĵ +ĠSh aria +Ġur inary +r oute +iv as +f itted +Ġeject ed +ĠAl buquerque +Ġ4 70 +Ġirrit ated +ĠZ ip +ĠB iol +à į +Ġden ounce +Ġbin aries +ĠVer se +Ġopp os +ĠKend rick +ĠG PL +Ġsp ew +ĠEl ijah +ĠE as +Ġdr ifted +so far +Ġannoy ance +ĠB ET +47 4 +ĠSt rongh +it ates +ĠCogn itive +oph one +ĠIdent ification +ocr ine +connect ion +Ġbox er +ĠAS D +ĠAre as +Y ang +t ch +ull ah +Ġdece ive +Comb at +ep isode +cre te +W itness +Ġcondol ences +ht ar +Ġhe als +Ġbuck ets +ĠLA W +B lu +Ġsl ab +ĠOR DER +oc l +att on +ĠSteven son +ĠG inger +ĠFriend ly +ĠVander bilt +sp irit +ig l +ĠReg arding +ĠPR OG +Ġse aling +start ing +Ġcard inal +ĠV ec +ĠBe ir +Ġmillisec onds +we ak +per se +Ġster ile +ĠCont emporary +ĠPh ant +ĠCl o +Ġout p +Ġex iled +Ġ27 7 +Ġself ie +Ġman ic +Ġn ano +ter ms +Alex ander +Ġres olves +Ġmillenn ia +Ġexpl odes +Ġconst ellation +Ġadul tery +m otion +D OC +Ġbroad casters +Ġkinderg arten +ĠMay weather +ĠE co +ich o +Ġ28 7 +l aun +Ġm ute +Ġdisc reet +Ġpres chool +Ġpre empt +De lete +ĠFre ed +P i +H K +Ġblock er +ĠC umber +Ġw rought +d ating +Ġins urer +Ġquot as +Ġpre ached +Ġev iction +ĠReg ina +ĠP ens +Ġsevent een +ĠN ass +D ick +Ġfold s +Ġd otted +ĠA ad +Un iversal +Ġp izz +ĠG uru +Ġso ils +Ġno vice +ĠNe ander +Ġst ool +Ġdeton ated +ĠPik achu +ĠMass ive +IV ER +ĠAb del +Ġsubdu ed +Ġtall est +Ġprec arious +Ġa y +r ification +ĠOb j +c ale +Ġun question +cul osis +ad as +igr ated +D ays +Ġque ens +ĠGaz ette +ĠCol our +ĠBow man +ĠJ J +ï ve +Ġdomin ates +Stud ent +Ġm u +Ġback log +ĠElect ro +Tr uth +48 3 +Ġcond ensed +r ules +ĠCons piracy +Ġacron ym +hand led +ĠMat te +j ri +ĠImp ossible +l ude +cre ation +Ġwar med +ĠSl ave +Ġmis led +Ġfer ment +ĠK ah +ink i +ke leton +cy l +ĠKar in +Hun ter +Reg ister +ĠSur rey +Ġst ares +ĠW idth +ĠN ay +ĠSk i +Ġblack list +uck et +Ġexp ulsion +im et +Ġret weet +vant age +Fe ature +Ġtro opers +Ġhom ers +9 69 +Ġconting ency +ĠW TC +ĠBrew er +fore ign +W are +S olar +Ġund ue +RE C +ulner able +path ic +ĠBo ise +Ġ3 22 +Ġarous ed +ĠY ing +ä¸ į +uel ess +Ġp as +Ġmor p +Ġfl oral +Ex press +ud ging +k B +ĠGr anted +Ø ¯ +ĠMich a +ĠGoth ic +ĠSPEC IAL +ĠRic ardo +F ran +Ġadminister ing +6 20 +por a +Ġ ® +Ġcomprom ises +Ġb itten +Ac cept +Th irty +Ð ² +Ġmater ially +ĠTer r +ig matic +ch ains +Ġdo ve +stad t +Mar vel +FA ULT +Ġwind shield +Ġ3 36 +ad ier +Ġsw apping +Ġflaw less +ĠPred ator +ĠMiche le +Ġprop ulsion +ĠPsych ic +Ġassign ing +Ġfabric ation +Ġbar ley +l ust +Ġtow ering +Ġalter cation +ĠBent ley +Sp here +Ġtun a +ĠClass es +Fre edom +un er +L ady +v oice +Ġcool est +or r +Ġpal p +$ { +Ġhyster ia +ĠMet atron +p ants +Ġspawn ing +Exper ts +ĠInvest ors +ĠAn archy +Ġshr unk +ĠVict im +Ġ28 9 +Ġec stasy +ĠB inding +58 5 +ĠMel ody +57 8 +ot ally +ĠE tsy +lig a +Ġapplaud ed +Ġswe ating +Ġredist ributed +Ġpop corn +Ġsem inal +f ur +ĠNeuro science +R and +ĠO st +ĠMadd en +ĠIncre asing +ĠDaw kins +ĠSub way +Ġar sen +cons erv +B UR +Ġsp iked +ĠLy ft +ĠImper ium +ĠDrop box +Ġfav oured +Ġencomp asses +gh ost +Ġins pires +Ġbur geoning +ĠY oshi +ĠVert ical +ĠAud itor +Ġint ending +Ġfilib uster +Bl oom +f ac +ĠCav s +ign ing +Ġcowork ers +ĠBarb arian +rem ember +FL AG +Ġaudit ory +ason ry +Col lege +Ġmut ed +gem ony +ob in +ĠPsych o +9 68 +Ġlav ish +Ġhierarch ical +ĠDr one +ou k +Ġcripp led +ĠMax im +Sl ot +Ġqu iz +ĠV id +if ling +Ġarchae ologists +Ġabandon ment +d ial +le on +ĠF as +T ed +Ġr aspberry +Ġmaneu vers +Ġbehavi ours +Ġins ure +Ġrem od +Sw itch +h oe +Ġsp aced +Ġafford ability +ĠF ern +not ation +ĠBal anced +Ġoccup ies +en vironment +Ġneck lace +Ġsed an +F U +ĠBrav o +Ġab users +ĠAn ita +met adata +ĠG ithub +ait o +ĠF aster +ĠWass erman +ĠF lesh +Ġth orn +r arily +ĠMer ry +w ine +Ġpopul ace +ĠL ann +Ġrepair ing +Ġpsy che +Ġmod ulation +aw aru +âĢĭ âĢĭ +ari j +Ġdecor ations +Ġapolog ise +ĠG arg +app ly +Ġgive away +ĠFl an +ĠWy att +U ber +Ġauthor ised +ĠMor al +HAHA HAHA +activ ate +Ġtorped o +ĠF AR +Ġam assed +ĠA ram +ark in +ĠVict ims +st ab +Ġo m +ĠE CO +Ġopio ids +Ġpurpose ly +ĠV est +Ġer g +at an +ĠSur gery +Ġcorrect ing +ĠOrt iz +ĠBe et +Ġrev oke +Ġfre eway +ĠH iggins +F ail +ĠFar ms +ĠAT P +h ound +Ġp oking +ĠCommun ists +mon ster +iment ary +Ġunlock ing +Ġunf it +we ed +en ario +at ical +ĠEnlight enment +ĠN G +ĠComp ensation +de en +ĠWid ow +ĠCind y +ĠAfter wards +Ġ6 000 +ikh ail +ag ically +Ġrat ified +Ġcasual ty +H OME +p sey +f ee +Ġspark ling +Ġd é +Ġconcert ed +C atal +Ġcomp lying +ĠA res +ĠD ent +Sh ut +Ġsk im +ad minist +Ġhost ilities +ĠG ins +Ġ6 08 +Ġm uddy +ĠMc Int +ĠDec ay +5 25 +Ġconspic uous +ĠEx posure +Ġresc ind +Ġwear able +Ġ3 28 +our met +ah s +ĠRob ots +Ġe clips +inst ance +ĠRE PORT +ĠApp l +0 30 +ĠSk ies +01 00 +Ġfall acy +S ocket +ĠRece iver +Ġsol ves +ĠButter fly +ĠSho pping +ĠFI RE +65 4 +Med ic +Ġsing ers +ĠNeed less +'' '' +isher s +ĠD ive +58 8 +Ġselect ively +Ġcl umsy +88 9 +Ġpurch aser +ear ned +ard y +Ġbenef iting +eng lish +Ġyield ing +ĠP our +Ġspin ach +Ġdel ve +ĠC rom +6 10 +Ġexport ing +ĠMA KE +Ġ26 3 +Ġg rop +Ġenv oy +ĠInqu iry +ĠLu igi +d ry +ĠT uring +Thumbnail Image +ĠVar iety +Ġfac et +Ġfl uffy +Ġexcerpt s +Ġsh orth +ĠOl sen +CL UD +Ġrel iant +ĠUN C +T our +Ġbat hing +Comp any +Ġglobal ization +P red +ĠMalf oy +Ġh oc +j am +craft ed +ĠBond s +ĠKiss inger +Eng land +Ġorder ly +cat entry +Ġ26 1 +Ġexch anging +ĠInt ent +ĠAmend ments +D OM +Ġst out +³³³³³³³³ ³³³³³³³³ +ĠAir bus +Ġ27 8 +hy de +P oll +Item ThumbnailImage +Ġlooph oles +ĠPill ar +Ġexpl or +St retch +A part +Ġun married +Lim it +ĠTransform ers +Ġintellect ually +unct ure +18 00 +Ġd arn +B razil +Ġleft over +ber us +f red +Mine craft +3 26 +ĠForm s +Ġproof s +ĠDes igned +Ġindex es +ĠSupp ose +EM S +ĠL oving +ĠBon nie +im ating +OT US +Ġconduct or +Ġbehav ed +ĠF ren +Ġsy nerg +Ġmillenn ium +Ġcater ing +ĠL auder +W r +ĠY iannopoulos +ĠAT F +Ġensl aved +Ġawaken ed +D VD +ĠED ITION +ĠConc ert +ĠChall enger +ĠH aku +umer ic +Ġdep recated +ĠSH AR +4 12 +Ġdy stop +Ġtremb ling +Ġdread ed +ĠSp ac +p adding +Re pl +ĠG arrison +M ini +Ġun paralleled +am ar +URR ENT +w reck +c ertain +t al +ĠC LS +app ings +Ġsens ed +Ġf encing +ĠPas o +ĠDes k +Ġsc off +Ġcontem plate +ĠL iga +l iquid +75 7 +Ġapp rentice +ĠUCH IJ +5 70 +ĠTh ousand +ĠIll um +Ġchampion ed +ãĤ Į +Ġelect ors +Ġ3 98 +ĠH ancock +round ed +ĠJ OHN +Ġuns atisf +Ġqual ifier +ĠGad get +EN E +Ġdead liest +ĠPl ants +Ġ ions +Ġacc ents +Ġtwe aking +Ġsh aved +F REE +ĠCh aser +Again st +9 60 +Ġmeth amphetamine +Ġnormal ized +Ġ$ \ +ĠPre cision +ĠGu am +Ġch oked +ĠX II +ĠCast ing +Tor rent +Ġscal p +ĠJagu ar +w it +Ġsem ic +ix ie +ĠG ould +Ġconf ines +N usra +ĠL on +ĠJ ugg +y cle +ĠCod ec +E gypt +Ġrest rain +ĠAl iens +Ġch oking +ĠD unk +ĠBell a +ab c +Ġsl ang +Ġneuro trans +s av +Ġempower ment +â ĨĴ +Ġclim bers +ĠM im +ĠF ra +ros se +Cap ital +ĠCth ulhu +Inter face +Ġprof icient +ĠIN TO +Ġ3 18 +ront al +5 80 +ĠDes pair +K enn +Ġscrim mage +ĠCo at +as ions +Ġwall paper +ĠJ ol +Ġresurg ence +Ġant iv +ĠB alls +² ¾ +Ġbuff ers +Ġsub system +ĠSt ellar +ĠL ung +A IDS +Ġerad icate +Ġblat antly +Ġbehav es +ĠN un +Ġant ics +ex port +DE V +w b +Ġph p +ĠInteg rity +Ġexplore r +Ġrev olving +auth ored +g ans +Ġbas k +Ġas ynchronous +å į +TH ING +69 8 +G ene +ĠR acer +ĠN ico +iss ued +Ġser mon +p ossibly +Ġsize of +Ġentrepreneur ial +ox in +ĠMin erva +Ġpl atoon +n os +ri ks +A UT +ĠAval anche +ĠDes c +ij 士 +ĠP oc +Ġconf erred +Î » +Ġpat ched +F BI +66 2 +Ġfract ures +Ġdetect s +Ġded icate +Ġconstitu ent +Ġcos mos +W T +Ġswe ats +Ġspr ung +b ara +s olid +Ġuns us +Ġbul ky +ĠPhilipp e +ĠFen rir +Ġtherap ists +ore al +^^ ^^ +Ġtotal ed +Ġboo ze +ĠR PC +Prosecut ors +Ġdis eng +ĠSh ared +Ġmotor cycles +Ġinvent ions +Ġlett uce +ĠMer ge +ĠJ C +Ġspiritual ity +ĠWAR NING +Ġunl ucky +ĠT ess +Ġtong ues +ĠD UI +T umblr +Ġle ans +Ġinv aders +Ġcan opy +ĠHur ricanes +ĠB ret +ĠAP PLIC +id ine +ick le +Reg arding +Ġve ggies +Ġe jac +ju ven +F ish +D EM +ĠD ino +Th row +ĠCheck ing +be ard +( & +Ġj ails +Ġh r +trans fer +iv ating +Ġfle ets +ĠIm ag +ĠMc Donnell +Ġsnipp et +Is a +ĠCh att +ĠSt ain +ĠSet FontSize +ĠO y +ĠMathemat ics +49 4 +Ġelectro ly +ĠG ott +ĠBr as +B OOK +ĠF inger +d ump +Ġmut ants +Ġrent als +Ġinter tw +Ġc reek +ail a +Bro ther +ĠDisc ord +pe e +raw ler +Ġcar p +Ġ27 9 +ãĤ· ãĥ£ +rel ations +Ġcontr asts +Col umn +Ġrec onnaissance +Ġun know +Ġl ooting +Ġregul ates +Ġopt imum +ĠChero kee +ĠA ry +Lat est +Ġroad side +Ġd anced +ĠUnic orn +A cknowled +Ġuncont roll +ĠM US +at io +ch ance +ha ven +VAL UE +Ġfavour ites +Ġceremon ial +b inary +pe ed +wood s +EM P +Ġv ascular +Ġcontempl ated +Ġbar ren +ĠL IST +Y ellow +ospons ors +Ġwhisk y +ĠM amm +ĠDeV os +min imum +H ung +44 2 +P ic +ĠSnap dragon +77 6 +Ġcar ving +Ġund ecided +Ġadvantage ous +Ġpal ms +ĠA Q +Ġst arch +L oop +Ġpadd le +Ġfl aming +ĠHor izons +An imation +bo ost +Ġprob abilities +ĠM ish +Ġex odus +ĠEditor ial +Ġfung us +Ġdissent ing +ĠDel icious +rog ram +ĠD yn +d isk +t om +Ġfab rics +ĠC ove +ĠB ans +Ġsoft en +ĠCON S +Ġin eligible +Ġestim ating +ĠLex ington +pract ice +of i +Ġshe dding +ĠN ope +Ġbreat hed +ĠCorinth ians +y ne +ek i +B ull +Ġatt aching +reens hots +Ġanaly se +ĠK appa +Ġuns ustainable +Ġinter pol +ank y +he mer +Ġprot agonists +Ġform atted +ĠBry ce +ĠAch illes +ĠAb edin +sh ock +Ġb um +b os +qu a +ĠW arn +q t +ĠDi abetes +8 64 +ĠIn visible +Ġvan ish +Ġtrans mitting +Ġmur ky +ĠFe i +Ġawa ited +ĠJur assic +umm ies +Ġmen acing +g all +C ath +B uilt +ild o +ĠV otes +Ġon t +Ġmun itions +ĠFre em +ÃŃ n +Ġdec ency +lo pp +ie ved +ĠG ord +Ġun thinkable +ĠNews week +Ġ3 21 +He at +Ġpresent er +ji ang +Ġpl ank +ĠAval on +Ġben z +ĠR out +Ġslam ming +ĠD ai +ou ter +ĠCook ie +ĠAlic ia +ge y +Ġvan ity +Ġow l +á µ +t ested +ĠAw akens +Ġcan v +Ġblind ly +ĠRid ley +ĠEm ails +Requ ires +ĠSer bian +ograp hed +if rame +eter ia +Ġaltern ating +qu iet +Ġsoc iology +ĠUn lock +ĠCommun ism +Ġo ps +Ġatt ribution +Ġab duction +ĠAb ram +Ġsidel ined +ĠB OOK +Ġref ining +ĠFe eling +ĠOs lo +ĠPru itt +r ack +ang ible +Ġcaut iously +ĠM ARK +eed s +M ouse +ĠStep h +ĠP air +S ab +99 7 +ĠBa al +B ec +Ġcomm a +ĠP all +ĠG ael +Ġmisunder stand +ĠP esh +Order able +Ġdis mal +ĠSh iny +% " +Ġreal istically +Ġpat io +ĠG w +ĠVirt ue +Ġexhaust ing +wh atever +oph ys +y ip +4 18 +Ad just +ĠWa iting +ess on +ĠMaz da +ĠDo zens +Ġstream lined +Ġincompet ence +ĠM eth +Ġeth os +ON ES +Ġincent iv +Ġgr itty +ĠBut cher +Head er +Ġexp onential +à Ł +Ġcorrel ate +Ġcons ensual +s ounding +R ing +Orig in +Ġcon clusive +fe et +ac ly +ĠF ernandez +Buy able +Ġd ucks +aunt lets +Ġel ong +Ġ28 6 +Ġsim ul +G as +ĠK irst +Ġprot r +ĠRob o +ĠAo E +op ol +Ġpsych ologically +sp in +ilater ally +ĠCon rad +W ave +44 1 +ĠAd vertisement +ĠHarm on +ĠOri ental +is Special +Ġpresum ptive +Ġw il +ĠK ier +ne a +Ġp pm +Ġhar bour +ĠW ired +comp any +Ġcor oner +atur days +ĠP roud +ĠN EXT +ĠFl ake +val ued +ce iver +Ġfra ught +Ġc asing +Ġrun away +Ġg in +ĠLaure nt +ĠHar lem +ĠCur iosity +qu ished +Ġneuro science +ĠH ulu +Ġborrow er +Ġpetition er +ĠCo oldown +W ARD +Ġinv oking +conf idence +For ward +Ġst s +pop ulation +Delivery Date +Fil m +ĠC ov +quick Ship +quickShip Available +prim ary +isSpecial Orderable +inventory Quantity +channel Availability +BO X +ĠMulti player +ĠJen ner +77 8 +ĠM d +Ġ~ /. +M N +Ġchild ish +Ġantioxid ant +ĠChrom ebook +Ġ27 4 +Ġscreen play +Ġadvent urous +ĠRelations hip +respons ive +ming ton +Ġcorner stone +ĠF ey +F IR +Ġrook ies +ĠF eaturing +Ġorig inate +Ġelectro des +ant es +Ġscript ures +Ġgl ued +Ġdiscont ent +Ġaff licted +lay out +B rave +Ġm osa +ĠQuant ity +ĠH ik +w inner +H ours +Ġent ail +ĠCell s +olog ue +Ġv il +Ġpre acher +Ġdecor ative +d ifferent +Ġprejud ices +ĠSm oking +ĠNotting ham +so Type +Ġrhyth ms +ĠAl ph +bl ast +Ste el +ĠDaniel le +Ġstr ife +Ġrem atch +so DeliveryDate +ĠF ork +t rip +ol ulu +hes es +C G +ĠPOLIT ICO +ost a +ĠDr ift +é¾įå ¥ +é¾įå¥ ij士 +Ġvet ting +ĠJin ping +ĠRec ession +Min or +ĠF raud +enf ranch +Ġconven ed +ĠNA ACP +ĠMill ions +ĠFarm ing +ĠW oo +ĠFl are +rit o +imm igrant +Ġvac ancy +ĠHE AD +ĠV aj +eg al +ĠV igil +Stud y +Ġru ining +Ġr acks +Ġhe ater +ĠRand olph +ĠBr ush +ĠT ir +Ø ¨ +Ġc ov +% ] +Ġrecount s +ĠO PT +ĠM elt +Ġtr uce +Ġcas inos +Ġcrus ade +Ġcarn age +Ġstri pe +ĠK yl +Text ures +Ġ6 98 +Ġpro clamation +Ġgood ies +Ġ........ .. +pro claimed +P olit +Ġtop ical +Ġspecial ize +ĠA min +g m +Ġanch ored +Ġbear ings +s ample +ĠHigh land +ĠAut ism +Ġmerc enary +Ġinterview er +L ER +ĠSom ers +Ġembry o +ĠAss y +Ġ28 1 +ĠEd iting +ĠCh osen +6 60 +Ġp ci +ĠThunder bolt +BI LL +Ġchuck led +jri wal +h of +Ġearth ly +() { +ind ependence +Ġdisp ers +ĠV endor +ĠG areth +Ġp als +P enn +ĠSub mit +ic um +Th u +Ġcl andestine +Ġcann ibal +ĠCl erk +E Stream +gal itarian +âĻ ¥ +g ew +Ġhor rend +ĠL ov +ĠRe action +ocr in +Class ic +Ġecho ing +Ġdiscl osing +ĠIns ight +og un +ĠInc arn +upload s +pp erc +guy en +Ġ19 01 +ĠB ars +68 7 +Ġb ribes +ĠFres no +ur at +ĠRe ese +Ġintr usive +Ġgri pping +ĠBlue print +ĠR asm +un ia +man aged +ĠHeb do +Ġ3 45 +Ġdec oding +Ġpo ets +Ġj aws +ĠF IGHT +am eless +ĠMead ows +ĠHar baugh +Inter view +ĠH osp +ĠB RA +Ġdelet ion +m ob +W alker +ĠMoon light +ĠJ ed +ĠSoph ia +Ġus ur +Ġfortun ately +ĠPut ting +ĠF old +Ġsan itation +Ġpart isans +IS ON +B ow +ĠCON C +ĠRed uced +ĠS utton +Ġtouch screen +Ġembry os +âĢ¢âĢ¢ âĢ¢âĢ¢ +ĠK rug +com bat +ĠPet roleum +Ġam d +ĠCos mos +Ġpresc ribing +Ġconform ity +ours es +Ġplent iful +Ġdis illusion +ĠEc ology +itt al +Ġf anc +Ġassass inated +regn ancy +Ġperenn ial +ĠBul lets +Ġst ale +Ġc ached +ĠJud ith +ĠDise ases +All en +Ġl as +Ġsh ards +ĠSu arez +ĠFriend ship +inter face +ĠSupp orters +add ons +46 2 +ĠIm ran +ĠW im +Ġnew found +ĠM b +An imal +Ġd arling +and e +Ġrh y +ĠTw isted +pos al +yn ski +Var ious +× ľ +ĠK iw +uy omi +Ġwell being +ĠL au +an os +Ġunm ist +Ġmac OS +Ġrest room +ĠOl iv +ĠAir ways +Ġtimet able +9 80 +Ġrad ios +v oy +ias co +Ġcloud y +ĠDraw ing +Any thing +Sy ria +ĠH ert +st aking +Ġun checked +Ġb razen +ĠN RS +69 7 +onom ic +est ablish +Ġl eng +Ġdi agonal +ĠF ior +L air +ĠSt ard +Ġdef icient +jo ining +be am +Ġomn ip +Ġbl ender +Ġsun rise +Mo ore +ĠF ault +ĠCost ume +ĠM ub +Fl ags +an se +Ġpay out +ĠGovern ors +ĠD illon +ĠBan ana +N ar +Ġtra iled +Ġimperial ist +um ann +ats uki +4 35 +ĠRoad s +Ġsl ur +ĠIde ally +Ġt renches +C trl +Ġmir rored +ĠZ el +ĠC rest +Comp at +ĠRoll s +sc rib +ĠTra ils +omet ers +w inter +Ġimm ortality +il ated +Ġcontrad icts +un iversal +ill ions +ĠM ama +opt im +AT URE +Ġge o +et ter +ĠCar lo +4 24 +Ġcanon ical +ĠStrongh old +n ear +Ġperf ume +Ġorche stra +od iac +Ġup he +Ġreign ing +vers ive +Ġc aucuses +ĠD EM +Ġinsult ed +Ġ---- -- +ĠCr ush +Ġroot ing +ĠWra ith +Ġwh ore +Ġto fu +C md +ĠB ree +Ġ$ _ +Ġr ive +ĠAd vertising +Ġw att +ĠH O +Ġpersu asive +ĠParam eters +Ġobserv ational +ĠN CT +ĠMo j +ĠSal on +Ġtr unc +Ġexqu isite +ĠMar a +Ġpo op +ĠAN N +Ex c +ĠWonder ful +ĠT aco +Ġhome owner +ĠSmith sonian +orpor ated +mm mm +Ġlo af +ĠYam ato +ĠInd o +Ġcl inging +á s +Ġimm utable +h ub +Or ange +Ġfingert ips +ĠWood en +ĠK idd +ĠJ PM +ĠDam n +C ow +c odes +48 2 +Ġiniti ating +ĠEl k +ĠCut ting +Ġabsent ee +ĠV ance +ĠLil ith +G UI +Ġobsc ured +Ġdwar ves +ĠCh op +ĠB oko +Val ues +Ġmult imedia +Ġbrew ed +Reg ular +CRIP TION +ĠMort al +Ġa pex +Ġtravel er +Ġbo ils +Ġspray ing +Rep resent +ĠStars hip +4 28 +Ġdisappro val +Ġshadow y +Ġlament ed +ĠRe place +ĠFran ç +67 7 +d or +Ġunst oppable +Ġcoh orts +gy n +ĠClass ics +ĠAm ph +Ġsl uggish +ĠAdd iction +ĠPad res +Ġins cription +Ġin human +min us +ĠJere miah +at ars +Ter ror +ĠT os +ĠSh arma +ast a +c atch +Ġpl umbing +ĠTim bers +Sh ar +H al +ĠO sc +Ġcou pling +hum ans +Ġsp onge +Ġid ols +ĠSp a +ĠAdv ocate +ĠBe ats +lu a +Ġtick ing +Ġload er +ĠG ron +8 10 +Ġstim ulated +Ġside bar +ĠManufact urer +ore And +19 73 +Ġpra ises +ĠFl ores +dis able +ĠElect rical +ra ise +E th +Ġmigr ated +Ġlect urer +K ids +ĠCa vern +Ġk ettle +Ġgly c +ĠMand ela +ĠF ully +å§ « +FIN EST +Ġsquee zing +ĠRy der +amp oo +oreAnd Online +Inst oreAndOnline +Buyable InstoreAndOnline +Ġcommem orate +ĠRamp age +Aust in +ĠSh roud +ĠRu ins +9 15 +ĠK H +Ġwater front +ĠE SC +b aby +ĠC out +ĠEm blem +Ġequival ents +49 2 +Un ique +ĠNiet zsche +brow ser +Ġim itation +ĠWere wolf +ĠKir in +ac as +' ," +Ġà ¾ +Review ed +Ġc unt +Ġvo ic +ĠLen ovo +Ġbond ed +48 1 +Ġinhib itors +Ġendeav ors +ĠHav ana +ĠSt out +ĠJ olly +A ctor +*/ ( +Ġoccur rences +ĠT ens +Incre ased +ĠACT ION +Ġ ãĢĮ +ĠRank ings +ĠB reat +Ġ30 9 +D ou +Ġimpact ing +ĠDuc hess +pre fix +Q B +Ġsummon ing +Ġbest owed +ĠKe pler +ĠPOW ER +c ube +ĠK its +ĠG rip +Ġop ium +Ġrep utable +t oc +ich ael +ĠR ipple +Ġcaf é +ĠZ oom +ĠBur ma +Ġwa ive +Ġst alls +Ġdem eanor +inc erity +Ġfluor ide +ĠSH OULD +Par is +Ġlong ing +Ġpl at +Ġgross ly +Ġbull s +Ġshowc asing +ex pected +ĠG addafi +engine ering +Re peat +ĠK ut +Ġconce ivable +Ġtrim med +osc ope +ĠCand idate +ĠT ears +rol og +Lew is +S UP +Ġroad map +Ġsal iva +Ġtrump et +Jim my +Ġmirac ulous +Ġcolon ization +Ġam put +ĠGN OME +ate ch +D ifferent +ĠE LE +ĠGovern ments +ĠA head +ãħĭ ãħĭ +word press +L IB +ĠIn clude +ĠDor othy +0 45 +ĠColomb ian +Ġle ased +88 4 +Ġde grading +ĠDa isy +i ations +Ġbapt ized +Ġsurn ame +co x +Ġblink ed +ãĥ ¢ +Ġpoll en +Ġder mat +Ġre gex +ĠNich olson +ĠE ater +ç ľ +rad or +Ġnarrow er +Ġhur ricanes +Ġhalluc inations +r idden +ISS ION +ĠFire fly +Ġattain ment +Ġnom inate +Ġav ocado +ĠM eredith +Ġt s +Ġreve rence +Ġe uph +Ġcr ates +ĠT EXT +Ġ4 43 +Ġ3 19 +J SON +iqu ette +Ġshort stop +ic key +Ġpro pelled +Ġap i +ĠTh ieves +77 9 +Ġovers aw +Ġcol i +ĠNic ola +Ġover cl +ik awa +ĠC yr +Ġ38 4 +78 9 +ĠAll ows +10 27 +Det roit +TR Y +set up +ĠSocial ism +Sov iet +s usp +ĠAP R +ĠShut down +Ġal uminium +zb ek +ĠL over +GGGG GGGG +Ġdemocr acies +Ġ19 08 +ĠMer rill +ĠFranco is +gd ala +Ġtraff ickers +ĠT il +ĠGo at +Ġsp ed +ĠRes erv +Ġpro d +55 2 +Ġc ac +ĠUn iv +ĠSch we +Ġsw irling +ĠWild erness +ĠEgg s +Ġsadd ened +Ġarch aic +H yd +Ġexcess ively +B RE +Ġaer ospace +ĠVo ices +Cra ig +Ġign ited +In itially +ĠMc A +Ġhand set +Ġreform ing +Ġfrust rations +ĠDead pool +ĠBel ichick +ract or +ĠRagnar ok +ĠD rupal +ĠApp roximately +19 20 +ĠHub ble +arm or +ĠSar as +ĠJon as +Ġnostalg ic +Ġfeas ibility +Sah aran +Ġorb iting +Ġ9 70 +R u +Ġsh in +ĠInvestig ators +Ġinconsist encies +ĠP AN +B G +Ġgraz ing +Ġdetect ors +ĠStart up +ĠFun ny +ĠNa omi +Consider ing +Ġh og +ut f +ce mic +Ġfort ified +ĠFun ctions +Ġcod ec +nut rition +H at +" ! +micro soft +55 8 +ĠTh in +ĠA CE +Al ias +ĠO PS +p apers +P K +ãĢ İ +Ġimpro bable +N orthern +equ al +Ġlook out +Ġty res +ĠMod ified +ĠK op +Abs olutely +Ġbuild up +sil ver +Ġaud i +Ġgro tesque +ĠSab er +ĠPres byter +ON Y +Ġglac iers +ĠSho als +ĠK ass +ĠH RC +ĠNic ol +ĠL unch +ĠF oss +âĸ Ĵ +AD RA +ĠOne Plus +o ing +ground s +Ġincident al +Ġdatas ets +68 9 +ĠClarks on +Ġassemb ling +ĠCorrect ions +Ġdrink ers +Ġqual ifiers +Ġle ash +Ġunf ounded +ĠH undred +Ġkick off +T i +Ġrecon cil +ĠGr ants +ĠCompl iance +ĠDexter ity +Ġ19 06 +w arn +D allas +Max imum +n ard +av ia +be aut +ens itivity +tr ace +Ġpione ers +ĠF ract +ãĢ ı +Ġpre cept +Ġgloss y +ĠI EEE +Ac ross +Ġ6 80 +S leep +che on +Ġsatir ical +ĠMin otaur +ĠCla ude +Ġr é +ape go +Ġcar rot +ĠSem in +ino a +Ġz o +Ind ependent +Ġdiagn oses +ĠC ue +M AR +Ġrend ition +ĠK ik +Ġpath ology +Ġselect s +Link edIn +Ġass ay +ĠD res +Ġtext ual +post ed +IT AL +ĠM aul +N eal +Ġinter connected +Ġerr atic +ĠVir us +Ġ5 30 +Ġenvironmental ists +ĠP helps +Ġeng agements +ĠIN ST +Ġeconom ical +nox ious +Ġg earing +izz y +Ġfavor ably +ĠMcG ill +T erm +Ġh anged +Ġball park +ĠRe yes +Ġbe ware +ĠP sal +ĠMass acre +q i +Ġin accessible +acly sm +Ġfr ay +ill ac +Ġbitter ly +ĠCert ification +Mich igan +Ġir respective +al ore +Em pty +Ġendorse ments +Ġund et +f g +equ ipped +Ġmerc iless +ĠC ust +Ġimm ature +Ġvou cher +ĠBlack well +Ñ ı +h awk +dis ciplinary +ile e +ĠMak oto +ĠD ude +ãĥĩ ãĤ£ +Y ears +Ġin ver +Ġsh aman +ĠY ong +ip el +ell en +ĠCath y +br ids +Ġs arc +65 1 +N ear +Ġground work +Ġam az +Ġ4 15 +ĠHunting ton +hew s +ĠB ung +Ġarbit rarily +ĠW it +ĠAl berto +Ġdis qualified +best os +46 1 +Ġp c +Ġ28 4 +ro bat +Rob in +Ġh ugs +ĠTrans ition +ĠOcc asionally +Ġ3 26 +ĠWh ilst +ĠLe y +Ġspaces hip +cs v +Ġun successfully +ĠA u +le ck +ĠWing ed +ĠGrizz lies +. � +Ġne arer +ĠSorce ress +ĠInd igo +El se +8 40 +let es +Co ach +Ġup bringing +ĠK es +Ġseparat ist +Ġrac ists +Ġch ained +Ġabst inence +lear ning +Ġrein stated +Ġsymm etry +Ġremind ers +ĠChe vy +Ġm ont +Ġexempl ary +ĠT OR +Z X +Ġqual itative +ĠSt amp +ĠSav annah +ĠRoss i +Ġp aed +Ġdispens aries +ĠWall s +ĠCh ronic +Ġcompliment ary +ĠBeir ut +Ġ+ --- +igs list +Ġcrypt ographic +mas ters +ĠCap itals +Ġmax imal +Ġent ropy +Point s +Ġcombat ants +l ip +ĠGl ob +ĠB MC +ph ase +th ank +HT TP +Ġcomm uter +Ġ\( \ +.. / +ĠReg ener +ĠDO I +ĠActiv ision +Ġsl it +os al +RE M +Ġch ants +Y u +Ke ys +Bre xit +ĠFor ced +Ari zona +Ġsquad ron +IS O +ĠMal one +Ġ3 38 +Ġcontrast ing +Ġt idal +Ġlib el +Ġimpl anted +Ġupro ar +ĠC ater +Ġpropos itions +M anchester +ĠEuro s +it amin +G il +ĠEl ven +ĠSe ek +ĠB ai +Ġredevelop ment +ĠTown s +ĠL ub +! ", +al on +K rist +Ġmeas urable +Ġimagin able +Ġapost les +Y N +7 60 +Ġster oid +Ġspecific ity +ĠL ocated +ĠBeck er +ĠE du +ĠDiet ary +uts ch +ĠMar ilyn +Ġbl ister +ĠM EP +ĠK oz +ĠC MS +y ahoo +ĠCar ney +Ġbo asting +ĠC aleb +By te +read s +ad en +Pro blem +ĠWood ward +S we +S up +ĠK GB +Set up +Ġtac it +Ġret ribution +Ġd ues +ĠM ü +. ? +ä¸ Ń +p ots +Ġcame o +ĠP AL +educ ation +A my +like ly +g ling +Ġconstitution ally +ĠHam m +ĠSpe ak +Ġwid gets +br ate +Ġcra ppy +ĠI ter +Ġanticip ating +ĠB out +P ixel +ĠY ep +ĠLaur ie +Ġh ut +Ġbullet in +ĠSal vation +Ġch ats +ear able +Honest ly +AL TH +onse qu +c ult +isco very +ovy ch +Ġse lves +ĠSat oshi +S ounds +Ġconver gence +ĠRosen berg +19 74 +Ġnas al +Ġfull est +Ġfer ocious +x us +ist e +AM S +Ġlobb ied +Ġso othing +ĠGun n +t oday +0 24 +Ġinspir ational +ĠN BN +p b +g ewater +or ah +all owed +ĠCol iseum +Ġspecial izing +Ġinsane ly +ĠT ape +del ay +Ġt arn +ĠP ound +Ġmel anch +Ġdeploy ments +il and +Ġless en +Ġfur ry +ĠUE FA +Ġblood shed +ĠMe ier +ither ing +Ġhe irs +ĠJ aw +ax ter +ĠPublic ations +Ġal ters +int ention +ĠWinc hester +d etermination +ĠLif etime +th in +Mon ster +7 80 +Ġapprox imation +Ġsuper markets +ĠSecond s +or os +h uge +Ġb ribe +ĠLIM ITED +un ed +Ġmis interpret +ĠIn jury +Ġ3 67 +Ġthreshold s +ĠCarn ival +Ġgastro intestinal +Ġguid eline +Ġde ceived +f eatures +Ġpurported ly +ĠRon nie +ĠNew t +Ġsp acious +as us +Ġsuperhero es +ĠCyn thia +le gged +k amp +ch io +Ġth umbnail +ĠShir ley +ill ation +Ġshe ds +ĠZ y +E PA +Ġdam s +Ġy awn +n ah +ĠPe ggy +ĠE rie +ĠJu ventus +ĠF ountain +r x +don ald +al bum +ĠComp rehensive +Ġc aching +ĠU z +ulner ability +ĠPrinc iple +ĠJ ian +ing ers +cast s +ĠOs iris +ch art +t ile +ĠTiff any +ĠPatt on +ĠWh ip +Ġovers ized +J e +ĠCind erella +ĠB orders +ĠDa esh +M ah +Ġdog ma +Ġcommun ists +v u +Coun cil +Ġfresh water +Ġw ounding +Ġdeb acle +Ġyoung ster +Ġthread ed +ĠB ots +ĠSav ings +ãģ Ĥ +ol ing +oh o +Ġillum ination +M RI +Ġlo osen +tr ump +ag ency +ur ion +Ġmoment arily +ĠCh un +ĠBud apest +ĠAl ley +D isk +Ġaston ished +ĠCon quer +ĠAccount ing +h aving +ĠWe in +ĠAl right +Ġrev olver +Ġdel usion +Ġrelic s +Ġad herent +qu ant +Ġhand made +or io +Ġcomb ating +c oded +Ġquad ru +re th +N ik +ĠTrib al +ĠMyster ious +Ġin hal +ĠWin ning +ĠClass ification +ch anged +Ġun ab +Ġsc orn +icip ated +w l +ond uctor +Ġrein forcing +ĠChild hood +an ova +Ġadventure r +Ġdoctor al +ĠStrateg ies +Ġengulf ed +ĠEnc ounter +Ġl ashes +Crit ical +ric ular +ĠU TF +oci ation +check ing +ĠConsult ing +Run time +per iod +ĠAs gard +Ġdist illed +ĠPas adena +ĠD ying +ĠCOUN TY +Ġgran ite +Ġsm ack +Ġparach ute +ĠS UR +Virgin ia +ĠF urious +78 7 +ĠO kin +Ġcam el +ĠM bps +19 72 +ĠCh ao +ĠC yan +j oice +ef er +ĠW rap +ĠDeb ate +S eg +Ġfore arm +ĠIgn ore +Ġtim estamp +Ġprob ing +ĠNo on +ĠGra il +f en +Ġdorm ant +ĠFirst ly +ĠE ighth +ĠH UN +ĠDes ire +or as +Girl s +ĠDes mond +z ar +am ines +O AD +exec ute +Ġbo obs +ĠAT L +_ ( +Chel sea +Ġmasturb ation +ĠCo C +Ġdestroy er +ĠCh omsky +Ġsc atter +ĠAss ets +79 6 +ĠC argo +Ġrecept ive +ĠSc ope +Ġmarket ers +Ġlaun chers +Ġax le +ĠSE A +se q +ĠM off +f inding +ĠGib bs +Georg ia +extreme ly +N J +Ġlab orers +st als +Ġmed iation +ĠH edge +at own +Ġi od +des pite +v ill +J ane +ex istence +Ġcoinc ided +ĠUt ilities +ĠChe ap +Ġlog istical +Ġcul mination +ĠNic otine +p ak +F older +Ġrod ents +st uff +Ġlaw fully +Ġreper to +io ch +j j +Dial ogue +HH HH +lic tion +Look s +Ġ29 7 +Ġtur rets +ĠAb andon +Ġinc ess +ĠTraff ord +Ġcur led +Ġprefer ring +Ġprivat ization +Ġir resist +ĠP anda +ĠSh ake +ĠMc Gr +ãĥ Ħ +und ers +Ġdiscrim inated +Ġbart ender +I LE +Atl antic +Ġprop ensity +ĠW iz +ĠG im +con ference +Ġrein forces +G h +w agon +Ġe erie +F al +Ġhug ged +rac ist +R IC +F u +Ġf iller +ĠSt ub +Ġeng raved +ĠWrest le +Ġimagin ative +ĠPe er +ĠFact ors +an us +ĠDrac ula +mon itor +Ġrou ters +ib ia +ĠBoo lean +end ale +ĠSl aughter +ĠSh ack +R FC +ĠSpiel berg +S ax +ĠPH OTO +ĠCl over +ĠR ae +Dep ending +ĠMem or +ar am +Ġpier ced +Ġcur tains +v ale +ĠInqu isition +ĠP oke +Ġforecast ing +Ġcompl ains +S ense +ĠHer mes +isc overed +Ġb ible +ĠMor ph +Ġg erm +78 5 +D ON +Ġcon gen +Ġcr ane +ĠD PR +Ġrespect fully +R oom +ĠN aw +ĠDal ai +re ason +ĠAng us +Educ ation +ĠTitan ic +Ë ľ +Ġo val +un ited +Ġthird s +Ġmoist ur +ĠC PC +M iami +Ġtent acles +ĠPol aris +ex c +ex clusive +ĠPra irie +Ġcol ossal +ĠBl end +sur prisingly +ÃŃ s +Ġindo ctr +Ġbas al +ĠMP EG +und o +Spl it +Develop ment +Ġlan tern +19 71 +Ġprov ocation +Ġang uish +ĠB ind +ĠLe ia +duc ers +ipp y +conserv ancy +Ġinitial ize +ĠTw ice +ĠSu k +Ġpred ic +Ġdi ploma +Ġsoc iop +Ing redients +Ġhamm ered +ĠIr ma +Q aida +Ġglim ps +ĠB ian +Ġst acking +Ġf end +gov track +Ġun n +dem ocratic +ig ree +Ġ5 80 +Ġ29 4 +Ġstraw berry +ID ER +Ġcher ished +ĠH ots +Ġinfer red +Ġ8 08 +ĠS ocrates +O regon +ĠR oses +ĠFO IA +Ġins ensitive +Ġ40 8 +Recomm end +ĠSh ine +Ġpain staking +UG E +ĠHell er +ĠEnter prises +I OR +ad j +N RS +L G +Ġalien ated +Ġacknowled gement +ĠA UD +ĠRen eg +Ġvou chers +Ġ9 60 +Ġm oot +ĠDim ensions +Ġc abbage +B right +g at +ĠK lu +Ġlat ent +Ġz e +ĠM eng +Ġdis perse +Ġpand emonium +H Q +Ġvirt uous +ĠLoc ations +ee per +prov ided +Ġse ams +ĠW T +iz o +PR OV +Ġtit anium +Ġrecol lection +Ġcr an +Ġ7 80 +ĠN F +49 1 +64 2 +p acking +59 8 +text ure +Sp ider +fre edom +cipl ed +ĠTAM ADRA +âĻ ¦ +aut hent +ĠW ANT +r ified +Ġr ites +Ġuter us +k iss +Ġâī ¤ +Ġsk illet +Ġdis enfranch +ĠGa al +Comp an +Ġage ing +gu ide +B alt +Ġiter ator +Ġdiscretion ary +t ips +Ġprim ates +ĠTechn ique +ĠPay ments +az el +ĠR OCK +stant ial +0 60 +Ġd mg +ĠJack ets +ĠPlay off +Ġnurs ery +ĠSy mb +art on +Ġannex ation +Color ado +Ġco ils +ĠSh oes +âĦ¢ : +ĠRo z +COM PLE +ĠEve rest +ĠTri umph +J oy +G rid +à ¼ +process or +ĠPros per +ĠSever us +ĠSelect ed +r g +ĠTay yip +St ra +Ġski ing +Ġ? ) +Ġpe g +Tes la +Ġtime frame +Ġmaster mind +ĠN B +scient ific +ĠSh it +gener ic +IN TER +N UM +Ġst roll +ĠEn ix +ĠM MR +ĠE MS +m ovie +Ĥ ª +Ġminim izing +idd ling +Ġilleg itimate +Ġprot otyp +Ġpremature ly +Ġmanual s +obb ies +ĠCass idy +D EC +des ktop +Ġaer os +Ġscreen ings +Ġdeb ilitating +ĠGr ind +nature conservancy +Ġf ades +ter mination +assets adobe +F actor +Ġdefinitive ly +P oké +ap ult +ĠLaf ayette +C orn +ĠCor al +Ġstagn ant +T ue +Ġdissatisf action +G ender +Ġkid neys +ĠG ow +ĠDef eat +ĠAsh ton +Ġcart els +Ġfore closure +ĠExpl ore +stre ngth +ot in +Ġveterin arian +Ġf umble +Ġpar ap +ĠSt rait +r ils +Ġpr ick +ĠBerm uda +ĠAm munition +skin ned +Ġab ound +ĠB raz +Ġshar per +ĠAsc ension +Ġ9 78 +Ġpreview s +Ġcommun ion +ĠX Y +Ġph ony +Ġnewcom er +Ġ3 32 +." ," +Ġredist ribution +Prot ect +ĠSo f +K al +Ġlip stick +w orst +Ġtang led +Ġretrospect ive +int eger +Ġvolunte ering +Ġ19 07 +Ġ -------------------- +ic hen +Ġunve iling +Ġsen seless +Ġfisher ies +\ - +Ġh inges +Ġcalcul us +My th +Ġund efeated +Ġoptim izations +Ġdep ress +Ġbill board +ĠY ad +ĠPy ramid +Is n +I de +Ġleg ion +ĠK ramer +ent anyl +Ġpenet rating +ĠHaw th +ĠPR ODUCT +ĠGer ard +ĠP act +ĠIn cluding +ĠEl ias +ĠEl aine +vis ual +Ġhum ming +Ġcond esc +ĠF asc +ä¸ Ĭ +Ġe galitarian +Ġdev s +ĠD ahl +O ps +D H +ĠB ounce +id ated +ald o +Ġrepublic an +Ġh amb +ĠS ett +ograph ies +CH APTER +Ġtrans sexual +Ġsky rocket +ans wer +Ġmark up +Ø ª +Ġhero ine +Comp are +ĠT av +Be ast +Ġsuccess ors +Ġna ïve +ĠBuck ley +st ress +me at +Ġdownload able +Ġindex ed +Ġsc aff +ĠL ump +ĠHom o +Stud io +In sp +Ġr acked +far ious +ĠPet ty +Ex ternal +Ġ19 09 +W ars +com mit +put ers +Ġun ob +ĠEr r +ĠE G +ĠAl am +ĠSiber ia +ĠAtmosp heric +IS TER +ĠSatan ic +trans lation +ĠL oud +tra umatic +l ique +Ġreson ate +ĠWel ch +Ġspark ing +ĠT OM +t one +Ġout l +Ġhandc uffed +ĠSer ie +8 01 +Ġland marks +ĠRee ves +Ġsoft ened +Ġdazz ling +ĠW anted +month s +Mag ikarp +Ġunt reated +ĠBed ford +M i +ĠDynam o +O re +79 5 +Ġwrong ful +Ġl ured +Ġcort isol +Ġve x +d rawn +ile t +Download ha +ĠF action +Ġlab yrinth +Ġhij acked +w aters +er ick +Ġsuper iors +ĠRow ling +ĠGu inness +Ġt d +99 2 +Ġune arthed +Ġcentr if +Ġsham eless +P od +ĠF ib +Ġ icing +Ġpredict or +Ġ29 2 +fore station +con struct +C and +@ # +Ġag itated +Ġre pr +OV A +Ġkn itting +ĠLim a +Ġf odder +68 4 +ĠPerson a +k l +7 01 +Ġbreak up +á ¸ +Ġapp alled +Ġantidepress ants +ĠSus sex +Har ris +ĠTher mal +ee ee +U pload +Ġg ulf +Ġdoor step +ĠSh ank +L U +ĠM EN +ĠP ond +s orry +Ġmis fortune +n ance +Ġb ona +M ut +Ġde graded +ĠL OG +ĠN ess +an imal +Ġa version +und own +Ġsupplement ed +ĠC ups +Ġ50 4 +Ġdep rive +ĠSpark le +Å Ĥ +ĠMed itation +auth ors +ĠSab an +ĠN aked +air d +ĠMand arin +ĠScript ures +ĠPerson nel +ĠMahar ashtra +Ġ19 03 +ĠP ai +ĠMir age +omb at +Access ory +Ġfrag mented +T ogether +Ġbelie vable +ĠGl adiator +al igned +ĠSl ug +M AT +Ġconvert ible +ĠBour bon +amer on +ĠRe hab +nt ax +Ġpowd ered +pill ar +Ġsm oker +ĠMans on +ĠB F +5 11 +ĠGood ell +ĠD AR +m ud +g art +Ġob edient +ĠTrans mission +ĠDon ation +8 80 +Ġbother ing +Material s +ãĤ ± +dest roy +Ġfore going +Ġanarch ism +ĠK ry +ice ps +Ġl ittered +ĠSch iff +Ġanecd otal +un its +Ġf ian +ĠSt im +ĠS OME +ĠInv aders +Ġbehaviour al +ĠVent ures +Ġsub lime +Ġfru ition +ĠPen alty +Ġcorros ion +¶ ħ +Ġlik ened +Ġbesie ged +ween ey +ĠCre ep +Ġlinem en +mult i +ic ably +ud der +Ġvital ity +Ġshort fall +ĠP ants +ap ist +H idden +ĠDro ps +med ical +Ġpron unciation +ĠN RL +Ġinsight ful +J V +ĠBe ard +ĠCh ou +Ġchar ms +Ġb ins +Ġamb assadors +ĠS aturdays +Ġinhib itor +ĠFr anch +6 01 +', ' +ĠCon or +art ney +ĠX peria +g rave +be es +ĠProtest ants +Ġso aking +ĠM andal +Ġph ased +Ġ6 60 +Ġsc ams +Ġbuzz ing +ĠItal ians +ĠLoren zo +ĠJ A +Ġhes itated +Ġcl iffs +ĠG OT +ingu ishable +Ġk o +Ġinter ruption +Z ip +Lear ning +Ġundersc ores +ĠBl ink +K u +57 9 +ĠAut ob +I RE +Ġwater ing +Ġpast ry +8 20 +Ġvision ary +ĠTempl ar +awa ited +Ġpist on +Ġant id +current ly +Ġp ard +Ġw aging +Ġnob ility +ĠY us +Ġinject ing +f aith +ĠP ASS +å º +Ġret ake +ĠPR OC +Ġcat hedral +b ash +Ġwrest lers +Ġpartner ing +Ġn oses +Ġ3 58 +Trans form +am en +Ġb outs +ĠId eal +ĠConstant in +Ġse p +ĠMon arch +att en +ĠPe oples +mod ified +Ġmor atorium +Ġpen chant +Ġoffensive ly +Ġprox ies +ok ane +ĠTaiwan ese +ĠP oo +ĠH OME +us ional +Ġver bs +ĠO man +vis ory +Ġpersu asion +Ġmult it +Ġsc issors +G ay +ow ay +oph ysical +l us +gn u +Ġap ocalyptic +Ġabsurd ity +Ġplay book +Ġautobi ography +I UM +Ġsne aking +ĠSim ulation +pp s +ell ery +Plan et +Ġright fully +Ġn iece +ĠN EC +ĠIP O +ĠDis closure +lean or +ous y +ST ER +Ġ28 2 +Cru z +Ch all +64 3 +ĠSurv ive +ĠF atal +ĠAm id +ap o +We apons +D EN +7 70 +ĠGreen wald +Ġlin en +al os +Ġpollut ants +ĠPCI e +k at +Ġp aw +ĠK raft +C hem +ĠTermin ator +Ġre incarn +Ġ] [ +ĠSe eds +Ġsilhou ette +ĠSt ores +Ġgro oming +ĠD irection +ĠIs abel +ĠBr idges +ðŁ ij +E ED +ĠM orsi +Ġval ves +ĠRank ed +ĠPh arma +ĠOrgan izations +Ġpenet rated +ĠRod ham +ĠProt oss +Ġove rest +Ġex asper +ĠT J +Ġ 000000 +Ġtrick le +Ġbour bon +WH O +Ġw retched +Ġmicrosc opic +Ġcheck list +Ġad orned +R oyal +Ad minist +ĠRet irement +ĠHig hest +We ather +ile ge +Ġincre ments +ĠC osponsors +Ġmas se +ĠS inn +r f +Ġh ordes +as sembly +75 4 +ĠNat asha +ĠTY PE +ĠGEN ERAL +Ġarr anging +Ġ40 7 +l ator +Ġg lean +Ġdisc redited +Ġclin icians +UN E +Ġachie ves +ĠEm erson +com plex += [ +Ġprincip ally +Ġfra il +p icked +Ġthan king +Ġre cl +ĠL AST +Ġsupp ressing +il ic +Ġantidepress ant +ĠLis bon +Ġth or +Ġsp a +Ġking doms +ĠPear ce +em o +Ġpl ung +Ġdiv est +Ġ ******************************** +b is +osp els +ad r +Sp irit +hall a +P ink +end ez +Ġresurrect ed +esc ape +ĠRosen stein +Ġge ological +Ġnecess ities +Ġcarn iv +ĠE lys +ĠBar ney +Ġ29 6 +dig y +ST ON +D OWN +Ġmil estones +Ġk er +Ġdismant ling +Ġre prim +Ġcross ings +19 45 +Ġpatri archy +Ġblasp hemy +Ġ3 59 +met ry +ĠOb esity +ĠDiff erences +bl ocking +ãĥķ ãĤ¡ +ich ita +ĠSab ha +ph alt +ĠCol o +ual a +effic ients +ĠMed ina +con sole +55 7 +ĠHann ibal +ĠHab it +ĠF ever +Ġthen ce +Ġsyn agogue +Ġessential s +Ġw ink +ĠTr ader +ID A +ĠSp oiler +ĠIceland ic +ĠHay ward +Ġpe ac +Ġmal ice +Ġflash back +Ġth w +Ġlay offs +L iquid +Ġtro oper +Ġh inge +ĠRead ers +Ph ill +ĠB auer +Cre ated +Ġaud its +ac compan +Ġunsus pecting +ier a +6666 6666 +Ġbro ch +Ġapprehend ed +ĠM alk +cer ning +ĠCod ex +O VER +M arsh +ĠD eng +ĠExp ression +Ġdisrespect ful +Ġasc ending +t ests +ĠPlaint iff +ster y +ĠAl ibaba +din and +ĠDem psey +Applic ations +mor al +Ġthrough put +Ġquar rel +Ġm ills +Ġhe mor +ĠC ASE +terror ist +st im +ifest yle +ro zen +CE PT +Ar k +u ci +lect ic +Ġirrit ating +she ets +A y +Ġrede emed +Ġhorn y +ĠTe ach +ĠS ear +dem ocracy +4 65 +ĠRest ore +Ġstand by +ĠP is +iff in +Ġsleep y +Ġextr ater +Ġcompl iments +Fram eworks +Ġinstall s +Ġb anging +sur face +found land +Ġmetaph ysical +Ġ28 3 +oul s +dev ices +Ar gs +ĠSac rifice +ĠMcC orm +es on +Cons ervative +ĠM ikhail +see ing +is ively +ĠRo oms +ĠGener ic +Ġenthusi astically +Ġgri pped +Ġcomed ic +ĠElectric ity +Ġgu errilla +Ġdec oration +ĠPerspect ive +Ġconsult ations +Ġun amb +Ġplag iar +Ġmagic ian +Ġe rection +ĠTour ism +or ied +ro xy +11 00 +T am +Ī è +Î ³ +× ª +ĠPred ators +Nit rome +Ġtelesc opes +project s +Ġun protected +Ġst ocked +ĠEnt reprene +nex pected +Ġwast ewater +V ill +Ġint imately +Ġi Cloud +ĠConst able +Ġspo of +Ġne farious +Ġfin s +Ġcens or +ĠMod es +ĠEs per +ar bon +Ġinter sections +Ġlaud ed +Ġphys i +Ġgener ously +ĠThe Nitrome +ĠTheNitrome Fan +Ġar isen +ĠÙ Ī +Ġg lands +ĠPav ilion +ĠGu pta +Ġuniform ly +Ġr amps +ri et +ĠWH EN +ĠVan essa +Ġrout ed +Ġlim p +ĠC PI +p ter +int uitive +Ġv aping +Ġexperiment ed +ĠOlymp us +ĠAm on +Ġsight ing +Ġinfiltr ate +ĠGentle man +Ġsign ings +ĠMe ow +ĠNav igation +che cks +4 33 +Ġel apsed +ĠBulg arian +esp ie +ĠS OM +d uring +Ġsp ills +anc a +ĠPly mouth +M AL +Ġdomest ically +ĠWater gate +ĠF AM +k illed +ed ited +ĠYour self +Ġsynchron ization +ĠPract ices +ST EP +Ġgen omes +ĠQ R +not ice +Ġloc ating +z in +Ġ3 29 +al cohol +Ġk itten +V o +Ġr inse +Ġgrapp le +ĠSc rew +ĠD ul +A IR +Ġle asing +ĠCaf é +Ġro ses +ĠRes pect +Ġmis lead +Ġperfect ed +Ġnud ity +Ġnon partisan +ĠCons umption +Report ing +Ġnu ances +Ġdeduct ible +ĠSh ots +Ġ3 77 +Ġæ ľ +ano oga +Ben ef +ĠB am +ĠS amp +if ix +Ġgal van +ĠMed als +rad ius +Ġno bles +Ġe aves +igr ate +K T +ĠHar bour +u ers +Ġrisk ed +re q +Ġneuro t +get table +ain a +Rom ney +Ġunder pin +Ġlo ft +ĠSub committee +ĠMong ol +b iz +Ġmanif ests +ass isted +ĠG aga +Ġsy nergy +Ġreligious ly +ĠPre f +ĠG erry +T AG +ĠCho i +4 66 +beh ind +ĠO u +Gold Magikarp +Ġhemor rh +R iver +Ġtend on +Ġinj ure +ĠF iona +Ġp ag +Ġag itation +|| || +ur an +ĠE SA +Ġest eem +Ġdod ging +Ġ4 12 +r ss +Ġce ases +ex cluding +Ġint akes +Ġinsert s +Ġemb old +ĠO ral +up uncture +4 11 +ĠUn ified +ĠDe le +Ġfurn ace +ĠCoy otes +ĠBr ach +L abor +Ġhand shake +Ġbru ises +Gr ade +éĹ ĺ +ĠGram my +ile en +St ates +ĠScandinav ian +ĠKard ash +8 66 +Ġeffort lessly +ĠDI RECT +ĠTH EN +ĠMe i +ert ation +19 68 +Ġgro in +w itch +Requ irements +98 5 +Ġroof s +Ġest ates +ĠH F +Ġha ha +Ġdense ly +ĠO CT +Ġpl astics +Ġincident ally +ĠTr acks +ĠTax es +Ġch anted +Ġforce ful +ĠBie ber +ĠK ahn +K ent +ĠC ot +lic ts +F ed +Ġhide ous +ĠVer d +ĠSynd icate +ĠIl legal +J et +ĠD AV +re asonable +c rew +Ġfundamental ist +Ġtruth ful +ĠJ ing +Ġl il +Ġdown ed +Ġen chanted +ĠPolic ies +ĠMcM aster +ĠH are +ides how +Ġpar ams +en cers +gorith m +Ġallow ances +Ġturb ulent +Ġcomplex ities +ĠK T +Ġ3 37 +ĠGen etic +F UN +D oug +t ick +Ġg igs +ument hal +Ġpatriarch al +Ġcal c +, ... +Ġc out +ĠGu an +Ġpath ological +ĠR ivals +Ġunder rated +Ġflu orescent +ĠJ iu +arna ev +ĠQu an +Ġ4 29 +Ġ ਠ+M ario +Con struct +ĠC itation +ĠR acial +ĠR SA +ĠF idel +Ġ3 95 +Person ally +C ause +à » +rad ical +in en +Ġvehement ly +ĠPap a +Ġintern ship +Ġfl akes +ĠRe ck +Luck ily +B ra +20 20 +rav ings +R N +W onder +Ser iously +Ġre usable +Ġpoll uted +ĠP eng +le igh +ind le +Ġcircuit ry +ĠMad onna +ĠB ART +Res idents +att ribute +Phil adelphia +Cl ub +Ġplan ner +Ġfr antically +Ġfaith fully +ĠTerrit ories +ĠL AT +ĠAnders en +an u +ĠP ARK +ĠS ora +i age +ĠPlay offs +ĠG CC +4 27 +Ġab norm +ĠL ever +Ġdisob edience +As ync +ĠShe a +V ert +Ġsk irts +ĠSaw yer +x p +Ġwors ening +Ġsc apego +ĠAng le +oth al +Ġtro ve +ĠSt y +ĠN guyen +mar ine +ide on +Dep ths +Bl og +ĠIll uminati +Ġtract s +Ġorgan ise +Ġo str +F s +Ġlever aging +ĠD aredevil +as ar +Ġl ang +Ġex termin +urs ions +ĠRom o +ãĤ¤ ãĥĪ +Ġcont ended +Ġencounter ing +ĠTable t +ĠAltern ate +sk ill +Ġswe ets +Ġco hesive +cap acity +Ġrep ud +Ġl izard +ro o +Ġpilgr ims +ĠR uff +ĠInstr ument +ĠLog o +uit ous +E H +Ġsales man +Ġank les +L ed +ĠPat ty +ud os +Own er +Ġdiscrep ancies +k j +M U +Ġuncond itional +Dragon Magazine +i ard +O ak +ĠConvers ation +be er +ĠOs aka +D elta +us ky +Ġsecret ion +Ġpl aza +Ġm ing +Ġde pletion +ĠM ous +ĠI TS +ĠH imal +ĠFle ming +Ġcyt ok +ĠH ick +Ġbat ters +ĠInt ellectual +6 75 +é r +IS ION +ĠQu entin +ĠCh apters +ih adi +Ġco aster +WAY S +ĠL izard +ĠY or +and ering +S kin +ha ust +ab by +Ġportray ing +Ġwield ed +d ash +Ġprop onent +Ġr ipple +Ġgrap hene +Ġfly er +Ġrec urrent +Ġdev ils +Ġwater fall +æĺ ¯ +go o +Text Color +Ġtam pering +IV ES +TR UMP +ĠAb el +ĠS AL +ĠHend ricks +ĠLu cius +b ots +Ġ40 96 +IST ORY +Gu est +ĠN X +in ant +Ben z +ĠLoad ed +ĠCle ver +t reatment +Ġta vern +Ġ3 39 +ĠT NT +ific antly +Tem perature +F el +Ġunder world +ĠJud ges +Ġ< + +Ġst ump +Ġoccup ancy +Ġab er +ĠF inder +) ", +ĠN unes +res et +in et +ect omy +Ġwell ness +ĠP eb +quart ered +and an +Ġneg atives +ĠTh iel +ĠCl ip +ĠL TD +Ġbl ight +Ġreperto ire +K yle +Ġqu er +ĠC es +Ġha pl +98 9 +ĠTh ames +isc opal +Des k +ivari ate +ĠEx cellence +found ation +Ġâ ĩ +X i +Ġmyster iously +esty les +Ġper ish +ĠEng els +ĠDE AD +09 0 +}} } +ĠUn real +Ġrest less +ID ES +orth odox +ĠInter mediate +Ġdin ners +ĠTr out +ĠSe ym +ĠHall s +og ged +Ġtraged ies +Ġdid nt +67 6 +Ġail ments +Ġobserv able +ĠV ide +ad apt +ĠD usk +Ġprofessional ism +ĠPres cott +ĠInd ies +p ox +ĠMe hran +W ide +Ġend emic +ĠPar an +B ird +Ġped als +ĠI U +ĠAdam ant +ĠH urt +Ġcorrel ates +urd en +Ġspons oring +cl imate +ĠUnivers ities +ĠK not +enn es +ĠDam ian +ĠAx el +S port +Ġbar b +ĠS no +sh own +ste en +ud ence +Ġnon violent +Ġhom ophobia +Ġbiom ass +ĠDet ail +Ġsrf N +ĠT une +accompan ied +I ENCE +Al bert +ĠMong o +z x +ĠCer berus +or bit +c ens +Ġsl ay +SH ARE +H Y +Ġb rawl +ĠPro be +Ġnonex istent +ĠClare nce +ĠBlack burn +Ġport als +ĠR ita +ĠRem ain +ĠLe vant +Ġtrick ed +ĠF erry +aver ing +ĠStraw berry +ĠAn swers +Ġhorrend ous +ĠA man +Supp lement +ĠT oad +Ġpe eled +Ġman oeuv +ĠU zbek +mond s +ĠH ector +Ġ40 2 +pe es +fix es +Ġd j +Ġres umes +Ġaccount ant +Ġadvers ity +Ġham pered +ĠL arson +Ġd oping +part s +H ur +Ġbe arded +Ġy r +ĠPlug in +å¥ ³ +Ġ/ ** +rol ley +Ġwaters hed +ĠSub mission +if lower +AS C +Ġcho ir +Ġsculpt ures +m A +incre asing +ai i +Ġsne akers +Ġconfront s +ĠEle phant +ĠEl ixir +Ġrec al +ĠT TL +w idget +ĠW ax +ĠGr ayson +Ġha irst +Ġhumili ated +ĠWAR N +app iness +ĠT TC +F uel +Ġpol io +Ġcomplex es +Ġbab e +ĠX IV +P F +). [ +P arts +Ġ4 35 +M eg +ĠY ards +ĠAL P +Ġy ells +Ġprin ces +Ġbull ies +ĠCapital ism +ex empt +FA Q +ĠSp onge +ĠAl a +Ġpleas antly +Ġbu f +Ġden ote +Ġunp ublished +Ġkne eling +asc a +Ġl apse +al ien +99 4 +Ġrefere es +ĠLaw yers +S anta +Ġpuzz ling +ĠProm etheus +ĠPh araoh +ĠDel ay +Ġfacilit ates +ĠC ES +Ġjew els +Ġbook let +ond ing +Ġpolar ization +ĠMor an +ĠSal ad +ĠS OS +ĠAdv ice +PH OTOS +IC AN +iat ures +ex press +ĠWonder land +ĠC ODE +ĠCL ASS +9 75 +Ġg rep +ĠD iesel +ĠGl ac +! ?" +Ġr m +o ine +disc rimination +ĠN urse +m allow +Ġv ortex +ĠCons ortium +Ġlarge Download +stra ight +augh lin +G rad +Ġpublic ized +ĠW aves +ĠRed d +Ġfest ivities +ĠM ane +ar ov +Ġfleet ing +ĠDr unk +ug en +C ele +Ġchromos omes +ĠD OT +-+-+ -+-+ +Ġbus iest +ĠBe aver +Sy rian +ĠK yr +k as +ĠCross Ref +19 50 +76 01 +Ġrepe aling +ĠWin ners +ĠMac ro +ĠD OD +bl ance +S ort +64 1 +Ġmet re +ĠD irk +Ġgo ggles +Ġdraw backs +Ġcomplain ant +Ġauthor izing +Ġantit rust +oper ated +Ġm ah +Ġexagger ation +Am azing +ĠSer aph +Ġha ze +w ow +Ġextingu ished +Ġcan yon +ĠB osh +Ġv ents +Ġsc rape +Cor rect +4 26 +Ġav g +Dem and +ĠâĪ ¼ +Ġmicrobi ota +"} ]," +ĠSt ev +B io +ĠPlan es +Ġsuggest ive +Ġdec ipher +ĠRefuge e +ĠKe jriwal +ĠGreen peace +Ġdecl ass +ĠSound ers +Ġth o +Ġdec rypt +Ġbr ushing +ĠJane iro +ip op +S i +8 77 +ĠGeoff rey +Ġc pu +ĠHaz el +Ġview points +Ġcris py +ĠNot ification +Ġsold er +ĠMod est +ĠHem isphere +Ġcass ette +in cludes +Ġident ifiers +ĠC ALL +in cent +T odd +ĠSwe ep +Ġ3 34 +b oss +Ġsm ir +gin x +Ġtown ship +Ġg rieving +ĠMos que +Net flix +AS ED +ĠMillenn ials +oc om +19 67 +Ġbold ly +s leep +Ġes che +arij uana +Ġsw irl +ĠPen al +Ġneglig ent +ĠStephen son +K ER +ĠZ oro +ris is +Ġlocal ization +ĠSeym our +ĠAng lic +red itation +prot ection +ĠPa ige +Ġo mit +ĠR ousse +ĠT ub +Ġinv itations +t ty +Ġm oss +ph ysical +C redits +Ġan archy +Ġchild care +Ġl ull +ĠM ek +ĠL anguages +lat est +ĠSan ford +Ġus ability +Ġdiff use +ĠD ATA +Ġsp rites +ĠVeget a +ĠProm otion +ãĥ¼ ãĤ¯ +rict ing +z ee +Tur kish +ĠTD s +pro ven +57 1 +Ġsmug glers +707 10 +Ġreform ed +ĠLo is +Ġun fl +ĠWITH OUT +ĠReturn ing +ann ie +ĠTom as +Fr anc +ĠProf it +ĠSER V +ĠR umble +ik uman +es an +Ġt esters +Ġgad get +Ġbrace let +ĠF SA +comp onent +Ġparamed ics +Ġj an +ĠRem em +ĠSk inner +Ġl ov +ĠQu ake +rom a +Ġfl ask +Pr inc +Ġover power +Ġlod ging +ĠK KK +ret te +Ġabsor bs +w rote +Ġ ," +K ings +ĠH ail +ĠFall ing +xt ap +ĠHel ena +ire ns +L arry +Ġpamph let +ĠC PR +G ro +ĠHirosh ima +Ġhol istic +". [ +Ġdet achment +Ġas pire +Ġcompl icit +ĠGreen wood +Ġresp awn +ĠSt upid +ĠFin ished +f al +b ass +Ġab hor +Ġmock ery +ĠFe ast +VID EO +Ġcon sec +ĠHung ry +P ull +ĠH ust +it ance +? ãĢį +) -- +ĠPar allel +con v +4 69 +ha ar +w ant +P aper +m ins +ĠTor o +ĠTR UMP +ĠR ai +D W +ĠW icked +ĠL ep +Ġfun ky +Ġdetrim ent +ios is +ache v +Ġde grade +im ilation +Ġret ard +Ġfrag mentation +Ġcow boy +ĠY PG +ĠH AL +Parent s +ĠS ieg +ĠStra uss +ĠRub ber +× IJ +Fr ag +Ġp t +Ġoption ally +ĠZ IP +ĠTrans cript +ĠD well +88 2 +M erc +ĠM OT +ãĥ¯ ãĥ³ +Ġhun ts +Ġexec utes +In cludes +Ġacid ic +ĠRespons ibility +ĠD umb +we i +And erson +ĠJas per +ight on +abs olutely +Ad ult +Ġpl under +Mor ning +ĠT ours +ĠD ane +Î º +ĠT EST +ĠG ina +Ġcan ine +aw an +Ġsocial ists +ĠS oda +Ġimp etus +ĠSupplement ary +oli ath +ĠKinn ikuman +mitted ly +second s +Ġorganis ers +Ġdocument aries +Vari able +GRE EN +Ġres orts +Ġbr agging +Ġ3 68 +Art ist +w k +bl ers +Un common +ĠRet rieved +Ġhect ares +Ġtox in +r ank +Ġfaith s +ĠG raphic +Ġve c +ĠL IA +Af rican +Ġard ent +end iary +L ake +ĠD OS +cient ious +ĠOk awaru +ĠAll y +ĠTim eline +D ash +ĠI c +contin ue +Ġt idy +Ġinstinct ively +ĠP ossibly +ĠOut door +ĠWould n +Ġl ich +ĠBr ay +ĠA X +Ġà ī +Ġ+ # +\ ' +Direct ory +ab iding +Ġf eral +ic ative +but t +Ġper verse +S alt +Ġwar ped +Ġnin eteen +Ġcabin ets +Ġsrf Attach +ĠSl oan +Ġpower ing +reg ation +F light +se vere +Ġst ren +Ġc og +ap ache +Ġâ Ŀ +Ġcaf eteria +p aces +ĠGrim oire +uton ium +Ġr aining +Ġcir cling +Ġlineback ers +c redit +Ġrep atri +ĠCam den +lic ense +Ġly ric +Ġdescript or +Ġval leys +Ġre q +Ġback stage +ĠPro hibition +ĠK et +Op ening +S ym +æĸ ¹ +Ġserv ings +Ġoverse en +Ġaster oids +ĠMod s +ĠSpr inger +ĠCont ainer +è » +ĠM ens +Ġmult im +Ġfire fighter +pe c +Ġchlor ine +Ð ¼ +end i +Ġsp aring +Ġpolyg amy +ĠR N +ĠP ell +Ġt igers +Ġflash y +ĠMad ame +S word +Ġpref rontal +Ġpre requisite +uc a +Ġw ifi +Ġmiscon ception +Ġharsh ly +ĠStream ing +ot om +ĠGiul iani +foot ed +Ġtub ing +ind ividual +z ek +n uclear +m ol +Ġright ful +49 3 +Ġspecial ization +Ġpassion ately +ĠVel ocity +ĠAv ailability +T enn +Ġl atch +ĠSome body +Ġhel ium +cl aw +Ġdi pping +XX X +Ġinter personal +7 10 +Ġsub ter +Ġbi ologists +ĠLight ing +Ġopt ic +Ġden im +end on +ĠC orm +Ġ3 41 +ĠC oup +Ġfear less +Ġal ot +ĠCliff ord +ĠRun time +ĠProv ision +up dated +lene ck +Ġneur on +Ġgrad ing +ĠC t +sequ ence +in ia +con cept +Ġro aring +ri val +ĠCaucas ian +Ġmon og +key es +Ġappell ate +Ġlia ison +EStream Frame +ĠPl um +! . +Ġsp herical +Ġper ished +Ġbl ot +Ġben ches +Ġ4 11 +Ġpione ered +Ġhur led +Jenn ifer +ĠYose mite +Ch air +Ġreef s +Ġelect or +ĠAnt hem +65 2 +Ġun install +Ġimp ede +Ġbl inking +Ġgot o +Dec re +A ren +Ġstabil ization +ĠDis abled +ĠYanuk ovych +Ġoutlaw ed +ĠVent ura +ten ess +Ġplant ation +Ġy acht +ĠHu awei +Ġsol vent +Ġgr acious +Ġcur iously +Ġcapac itor +Ġc x +ĠRef lex +Ph ys +ĠC f +pt in +cons ervative +Ġinv ocation +c our +F N +ĠNew ly +H our +As ian +ĠLe ading +ĠAer ospace +An ne +Ġpre natal +Ġdeterior ating +H CR +ĠNorm andy +ol ini +ĠAm bro +9 10 +Ġset backs +ĠT RE +Ġs ig +ĠSc ourge +59 7 +79 8 +Game play +Ġm sec +M X +Ġprice y +ĠL LP +aker u +Ġover arching +ĠB ale +Ġworld ly +Cl ark +Ġscen ic +Ġdisl iked +ĠCont rolled +T ickets +ĠE W +ab ies +ĠPl enty +Non etheless +Ġart isan +Trans fer +ĠF amous +Ġinf ield +ble y +Ġunres olved +ĠML A +ãĤ Ĥ +Cor rection +Ġdemocr at +ĠMore no +ro cal +il ings +Ġsail or +Ġr ife +h ung +Ġtrop es +Ġsn atched +ĠL IN +ĠB ib +ES A +ĠPre v +ĠCam el +run time +Ġob noxious +4 37 +Ġsum mers +Ġunexpl ained +ĠWal ters +cal iber +Ġg ull +ĠEnd urance +ä½ ľ +Ġ3 47 +Ir ish +Ġaer obic +Ġcr amped +ĠHon olulu +à © +us erc +ec ast +AC Y +ĠQu ery +ãĤ¹ ãĥĪ +Bet a +Ġsuscept ibility +ĠSh iv +ĠLim baugh +Ġà ĸ +ĠN XT +ĠM uss +ĠBrit ons +ES CO +EG IN +Ġ% % +Ġsec ession +ĠPat ron +ĠLu a +n aires +ĠJPM organ +us b +ocy te +Ġcouncill ors +ĠLi ang +f arm +Ġnerv ously +Ġattract iveness +ĠK ov +j ump +Pl ot +Ġst ains +ĠStat ue +ĠApost les +he ter +ĠSUP PORT +Ġoverwhel m +Y ES +Ġ29 1 +d ensity +Ġtra pping +M it +Ġf ide +ĠPam ela +atl antic +Dam n +Ġp ts +OP A +Ġserv icing +Ġoverfl owing +ul o +ĠE rit +t icket +light ing +ĠH mm +ãĥ¼ ãĥ« +im oto +Ġchuck le +4 23 +ãģ ķ +sh ape +Ġque ues +Ġanch ors +ãĤ¼ ãĤ¦ãĤ¹ +F er +Ġaw oke +Ġ6 66 +h ands +Ġdiver gence +Ġ50 5 +T ips +Ġdep ot +Ġske w +ĠDel iver +op ot +Ġdiv ul +ĠE B +uns igned +ĠUn i +X box +Ġfor ks +Ġ7 02 +å ¯ +Ġpromot ers +ĠV apor +Ġlev ied +sl ot +Ġpig ment +Ġcyl inders +C RE +Ġsn atch +Ġperpet ually +Ġl icking +ĠFe et +ĠKra ken +ĠHold en +ĠCLS ID +m r +Ġproject or +Ġden otes +Ġchap el +ĠTor rent +b ler +R oute +ĠDef endant +ĠPublisher s +ĠM ales +ĠInn ov +ĠAg ility +rit er +ty mology +st ores +L ind +Ġf olly +ĠZur ich +B le +Ġnurt ure +Ġcoast line +uch in +D omin +Ġfri vol +ĠCons olid +res ults +M J +Ġphyl ogen +Ġha uled +ĠW iley +ĠJess ie +ĠPrep are +ĠE ps +Ġtreasure r +I AS +Ġcolon ists +Ġin und +ĠWW F +ĠCon verted +6 000 +out side +ĠApp earance +ĠRel ic +ĠM ister +s aw +Ġresult ant +Ġadject ive +ĠLaure l +ĠHind i +b da +Pe ace +Ġreb irth +Ġmembr anes +Ġforward ing +Ġcoll ided +ĠCar olyn +K ansas +5 99 +ĠSolid GoldMagikarp +Be ck +Ġstress ing +ĠGo o +ĠCooper ative +Ġf s +ĠAr chie +L iter +ĠK lopp +J erry +Ġfoot wear +War ren +Ġsc ree +h are +Under standing +P ed +Ġanth ology +ĠAnn ounce +M ega +Ġflu ent +Ġbond age +ĠDisc ount +il ial +C art +ĠNight mares +Sh am +ĠB oll +uss ie +H ttp +Atl anta +Ġun recogn +ĠB id +Ġunder grad +Ġforg iving +ĠGl over +AAAA AAAA +4 45 +V G +pa io +kill ers +Ġrespons ibly +Ġmobil ize +Ġeffect ed +ĠL umin +Ġk ale +Ġinfring ing +ann ounced +Ġf itt +b atch +ĠT ackle +ĠL ime +ĠAP P +uke mia +Ġrub y +Ġex oner +ĠCas ual +0 70 +Ġpel vic +Ġautom ate +ĠK ear +ĠCoast al +Ġcre ed +Ġbored om +ĠSt un +ri ott +Ĥ İ +Ġregener ate +Ġcomed ians +ĠOP ER +Sp ons +id ium +on is +L ocated +05 7 +Ġsusp ense +ĠD ating +C ass +Ġneoc ons +ĠShin zo +Ġaw oken +ch rist +ĠMess ages +att led +ĠSpr ay +ĠSp ice +C W +Ġshield ing +ĠG aul +Am id +Ġparam ilitary +Ġmult if +ĠTan ner +il k +Ġgodd amn +g ements +Ġbe friend +m obi +Ġ3 88 +fold er +acc a +Ġins in +g ap +N ev +fif th +Ġpsychiat ry +b anks +TH IS +Ġhar b +ac qu +Ġfac ade +ĠPower Point +80 3 +Ġbl uff +Sh ares +Ġfavor ing +El izabeth +Ãį Ãį +Ġr anger +77 2 +ĠAr che +h ak +ĠGen etics +ĠF EMA +Ġev olves +Ġest e +ĠP ets +ĠM é +ĠInterest ing +ĠCanter bury +ch apter +ĠStar fleet +Sp anish +Ġdraw back +ĠNor wich +9 70 +n orth +ag anda +Ġtransform ative +ram ids +bi ology +ad ay +Ġpropag ation +ĠGam ma +ĠDen ise +ĠCalcul ator +ent imes +ĠB ett +Ġapp endix +ĠHD D +AK ING +Ġst igmat +Ġhol ster +Ġord inarily +Ch ance +ĠCont rary +Ġad hesive +Ġgather s +6 12 +re au +ony ms +ew ays +Ġindu ces +Ġinterchange able +se m +Wh it +Ġtr ance +Ġincorpor ation +ĠExt ras +Fin ancial +Ġawkward ly +ĠStur geon +ĠH Y +Norm ally +ĠEnd ing +ĠAss ist +enc rypted +Ġsub jug +Ġn os +Ġfan atic +C ub +C U +?" . +Ġirre versible +å Ĥ +03 1 +ĠH AR +sp read +ul ia += $ +Sc ope +L ots +Ġlif estyles +ol on +Ġf eds +Ġcongrat ulate +web kit +Ġindist inguishable +ĠSw ing +Ġcommand ments +qu ila +ab ella +m ethyl +ann abin +Ġo vere +Ġlob ster +ĠQU EST +ĠCONT IN +bern atorial +:::: :::: +ĠTra ve +ĠSam oa +AN I +75 2 +Ð ´ +userc ontent +ĠMod erate +y eah +ĠK itt +Ġwe e +Ġstuff ing +ĠInter vention +ĠD ign +Ġware houses +ĠF iji +Ġpel lets +Ġtake away +ĠT ABLE +ĠClass ical +col lection +Ġland fall +ĠMus cle +Ġsett les +ĠAD V +Ġ3 44 +L aura +Ġf ared +ĠPart ial +4 36 +oss ibility +ĠD aly +ĠT arant +ĠFu ji +am l +c ence +55 1 +ĠProced ures +ĠO CD +ĠU D +t in +Q UI +ach o +4 38 +Ġgl itches +Ġenchant ment +Ġcalcul ates +IR O +ĠH ua +alys es +ĠL ift +um o +Ġle apt +Ġhypothes ized +ĠGust av +it ans +VERS ION +æ ł +Rog er +Ġr and +ĠAd apter +Ġ3 31 +ĠPet ition +k ies +M ars +Ġunder cut +ze es +ĠLy ons +ĠDH CP +Miss ing +Ġretire es +Ġins idious +el i +> ) +. ãĢį +Ġfinal ists +ĠA ure +Ġacc user +Ġwas tes +ĠY s +ĠL ori +Ġconstitu encies +Ġsupp er +Ġmay hem +or ange +Ġmis placed +Ġmanager ial +Ġex ce +ĠCL I +Ġprim al +ĠL ent +Cry stal +h over +ĠN TS +end um +Ġd w +ĠAl c +n ostic +Ġpres erves +ĠTs arnaev +Ġtri pled +rel ative +Arc ade +k illing +ĠW EEK +ĠH anna +D ust +Com pleted +ģ « +Ġappro ves +ĠSur f +ĠLuther an +ven ants +Ġrobber ies +we ights +soft ware +at ana +ug al +Ġgrav y +ĠC ance +OLOG Y +ly ak +Ton ight +Ġunve il +Ġ19 04 +ĠMin ion +ent ious +st ice +pack ages +ĠG EAR +Ġg ol +ĠHutch inson +ĠProf ession +ĠG UN +ĠDiff erence +ĠTsuk uyomi +ĠLes bian +6 70 +Ġfug itive +ĠPlan etary +-------------------------------- ------------------------ +Ġacc rued +Ġch icks +Ġsto pp +Ġblock ers +C od +Ġcomment ers +ĠSomew here +ĠPhot ographer +the me +Ġmay oral +w u +Ġanten nas +Ġrev amped +ĠSubject s +it é +im ura +Ġentr ances +liter ally +Ġten ets +ĠO MG +ĠMP H +ĠDon key +ĠOff ense +Ġ" + +Sn ap +ĠAF B +Ġan imate +ĠS od +His panic +Ġinconsist ency +D b +F Y +Ex port +Ġa pe +Ġpear l +ib el +ĠPAC s +Ġ{ \ +Ġact u +ĠHS BC +camp us +Ġpay off +Ġde ities +ĠN ato +ou ple +Ġcens ored +ĠCl ojure +Ġconf ounding +en i +Ġreck on +op he +Ġspot ting +Ġsign ifies +Ġprop el +Ġfest ive +S uggest +Ġpled ging +ĠB erman +Ġrebell ious +Ġovershadow ed +Ġinfiltr ated +j obs +67 2 +Ġscal able +Ġdomin ion +ĠNew foundland +ĠMead ow +Ġpart itions +AM I +Ġsupplement ary +str ument +Ġhair y +Ġperpet uate +Ġnuts hell +ĠPot ato +ĠHob bit +Ġcur ses +Flo at +Ġquiet er +Ġfuel ing +Ġcaps ules +ĠL ust +ĠH aunted +Exec utive +Ġchild birth +G re +Ġrad iant +å İ +Ġm alls +Ġin ept +ĠWarrant y +Ġspect ator +E h +t hens +Ġculmin ating +æ © +ary a +ãĤ ® +ilit arian +ĠOR IG +ĠSp ending +pt ives +ĠS iren +ĠRec ording +ay ne +Ġv im +Ġspr ang +T ang +ĠM FT +mor ning +ĠWe ed +m peg +cess ion +ĠCh ung +7 30 +w arning +56 2 +handed ly +P oor +P olitics +: # +Ġp ian +Ġfec es +ĠDocument ation +Ġban ished +Ġ3 99 +ĠAR C +Ġhe inous +J ake +ĠAm ir +way ne +v re +os henko +Ġnotebook s +Ġfound ational +Ġmarvel ous +ixt ape +Ġwithdraw als +Ġh orde +ĠD habi +is able +ĠK D +Ġcontag ious +ĠD ip +ĠAr rows +Ġpronoun s +Ġmorph ine +ĠB US +68 2 +Ġk osher +fin ished +ĠInstr uments +Ġf used +yd en +ĠSal mon +F ab +aff ected +K EN +C ENT +Dom ain +Ġpoke mon +ĠDr inking +G rowing +ĠInvestig ative +ĠA ether +em i +Ġtabl oid +Ġrep ro +ĠNot withstanding +ĠBers erker +Ġdram as +Ġclich é +Ġb ung +ĠU RI +ĠD os +0 44 +Ġpast ors +Ġl s +Ġac rylic +aun ts +Ed ward +Ġmajor ities +B ang +Ġfield ing +ĠRepl acement +ĠAl chemy +pp ard +ĠRome o +ĠSan ct +ĠLav rov +ib ble +Inst ruct +Ġimp ractical +ĠPlay boy +ce phal +Ġsw aps +Ġk an +ĠThe o +Ġillust rating +Ġdismant led +ĠTrans gender +ĠG uth +UG H +Ġtriumph ant +Ġencomp ass +Ġbook mark +udd in +j er +Ġpred icate +ES H +Ġwhen ce +ĠAB E +Ġnon profits +Se qu +Ġdi abetic +Ġp end +Ġheart felt +sh i +Ġinter acts +ĠTele com +Ġbombard ment +dep ending +ĠLow ry +ĠAd mission +ĠBl ooming +ust ration +ene gger +B rew +Ġmol ten +ĠNer d +P IN +âĸ Ģ +ave ment +Ġtou red +Ġco efficients +ĠTray von +ans son +Ġsand y +t old +fl ows +Ġpop ulous +ĠT inder +ĠBl iss +R achel +Min imum +Ġcontest ant +ĠRed uce +ĠMor se +ĠGrass ley +ĠClick er +Ġexp r +Ġs incerity +Ġmar qu +Ġelic it +ĠPro position +ĠDemon ic +Ġtac os +G reek +Ġpost war +Ġin sofar +ĠP ork +Ġ35 2 +doctor al +walk ing +Ġmid term +ĠSam my +sight ed +ĠTR ANS +ic i +AL D +ĠUS L +ĠF ISA +ĠAm pl +ĠAlex andra +ine lli +Tr ain +Ġsign ify +ĠVers us +Ġob fusc +Ġk h +Ġagg ro +ĠRen ault +Ġ3 48 +5 18 +ox icity +0 22 +ĠTw ist +Ġgoof y +D ynamic +Ġbrief ings +m ight +8 99 +Ġderog atory +T ro +Ġfor ging +ĠKor an +ĠMar ried +ĠBuc s +Ġpal ate +ĠCon version +m able +4 13 +Ġ( _ +Ġs iph +ĠN EO +col lege +Ġmarg inally +Ġfl irt +ĠTra ps +ĠP ace +é »Ĵ +Ġgoalt ender +Ġforb ids +Ġcler ks +ĠT ant +ĠRobb ins +ĠPrint ing +Ġpremie red +Ġmagn ification +ĠT G +ĠR ouse +ĠM ock +odynam ics +Ġpre clude +ism o +ĠPul itzer +Ġaval anche +ĠK odi +rib une +ĠL ena +Elect ric +Ġref inery +Ġend owed +Ġcounsel ors +Ġd olphin +ĠM ith +Ġarm oured +hib ited +Beg in +ĠP W +O il +ĠV or +ĠShar if +ĠFraz ier +est ate +Ġj ams +Pro xy +Ġband its +ĠPresbyter ian +ĠPrem iere +t iny +ĠCru el +Test ing +Ġhom er +ĠV ERS +ĠPro l +ĠDep osit +ĠCoff in +Ġsemin ars +Ġs ql +ĠDef endants +Altern atively +ĠR ats +ç « +ethy st +' > +Ġiss uer +58 9 +Ġch aired +ĠAccess ories +man ent +Ġmar row +ĠPrim ordial +C N +Ġlimit less +ĠCarn age +Ġund rafted +q v +IN ESS +on ew +Ġco hesion +98 7 +Ġne cks +Ġfootball er +ĠG ER +Ġdetect able +ĠSupport ing +ĠCS V +oc ally +k Hz +Ġund e +Ġsh one +Ġbud ding +tra k +Stand ing +ĠStar craft +ĠKem p +Ben ch +Ġthw arted +ĠGround s +ath i +L isa +Dial og +ĠS X +V ision +Ġingen ious +Ù IJ +Ġfost ering +ĠZ a +ĠIn gram +Ġ" @ +N aturally +6 16 +0 35 +ĠF AC +H mm +55 4 +Ġacceler ator +ĠV end +Ġsun screen +Ġtuber culosis +rav iolet +ĠFunction al +ĠEr rors +ed ar +19 66 +ĠSpect re +ĠRec ipes +88 5 +ĠM ankind +L iverpool +Ġ| -- +Ġsubst itutes +ĠX T +w ired +Ġinc o +ĠAf gh +E va +ic c +S ong +K night +Ġdilig ently +ĠBroad cast +A id +Ġaf ar +ĠH MS +aton in +ĠGr ateful +Ġfire place +ĠOm ni +e uro +ĠF RE +ĠSh ib +ĠDig est +t oggle +Ġheads ets +Ġdiff usion +ĠSqu irrel +ĠF N +Ġdark ened +out her +Ġsleep s +ĠX er +gun s +Ġset ups +Ġpars ed +Ġmamm oth +ĠCur ious +g ob +ĠFitz patrick +ĠEm il +im ov +........ ..... +ĠB enny +Second ly +Ġheart y +Ġcons on +st ained +Ġgal actic +cl ave +Ġplummet ed +Ġp ests +Ġsw at +Ġrefer rals +ĠLion el +h oly +Ġunder dog +ĠSl ater +ĠProv ide +ĠAm ar +ress or +å Į +ong a +Ġtim id +Ġp iety +ĠD ek +Ġsur ging +az o +Ġ6 10 +Ġdes ks +ĠSp okane +ĠAn field +Ġwars hips +ĠCob ra +Ġar ming +clus ively +ĠBad ge +ag ascar +ĠPR ESS +ĠMcK enzie +ĠFer dinand +burn ing +Af ee +Ġtyr ann +ĠI w +ĠBo one +100 7 +ĠRe pt +Ċ Âł +Ġcar avan +ĠD ill +ĠBundes liga +Ch uck +Ġheal er +ãĥ¼ãĥ Ĩ +ĠH obby +Ġneg ate +Ġcrit iques +section al +mop olitan +Ġd x +Ġouts ourcing +ĠC ipher +t ap +Sh arp +Ġup beat +Ġhang ar +Ġcru ising +ĠNi agara +Ġ3 42 +ill us +ĠS v +Ġsubt itles +Ġsqu ared +Ġbook store +Ġrevolution aries +ĠCarl ton +ab al +Ut ah +Ġdesp ise +ĠU M +cons ider +aid o +Ġc arts +ĠT urtles +Tr aining +Ġhonor ary + ¢ +Ġtri angles +4 22 +Ġreprint ed +Ġgrace ful +ĠMong olia +Ġdisrupt ions +ĠB oh +Ġ3 49 +Ġdr ains +Ġcons ulate +Ġb ends +Ġm afia +ur on +ĠF ulton +m isc +Ġren al +Ġin action +ck ing +Ġphot ons +Ġbru ised +ĠC odes +og i +Ġn ests +ĠLove ly +ĠLib re +ĠD aryl +Ġ# ## +S ys +. ," +Ġfree zes +est ablishment +and owski +Ġcum bers +ĠSt arg +ĠBom bs +Ġleg ions +Ġhand writing +Ġgr un +ĠC ah +sequ ent +Ġm oth +ĠMS M +Ins ert +F if +Ġmot el +Ġdex ter +ĠB ild +hearted ly +Ġpro pe +ĠText ure +ĠJ unction +ynt hesis +oc ard +ĠVer a +ĠBar th +Ġμ g +Ġl ashed +Ġ35 1 +ĠZ amb +ĠSt aples +ĠCort ex +ĠCork er +Ġcontinu um +ĠWR ITE +unt a +rid or +Ġde ems +0 33 +ĠG OLD +p as +Ġrep ressive +ãĥĨ ãĤ£ +Ġbaff led +Sc ar +Ġc rave +Ġ ______ +Ġentrepreneurs hip +ĠDirector ate +Ġ' [ +Ġv ines +Ġasc ended +ĠGR OUP +ĠGood bye +Ġdo gged +ãĥ´ ãĤ¡ +Man ufact +Ġunimagin able +ri ots +ier rez +Ġrel ativity +ĠCraft ing +ra ught +ud en +c ookie +Ġassass ins +Ġdissatisf ied +ac ci +Ġcondu it +Sp read +ĠR ican +n ice +izz le +Ġsc ares +ĠWH Y +ph ans +5 35 +Ġprot racted +ĠKrist en +5 36 +ĠSc rib +ĠNe h +Ġtwent ies +Ġpredic ament +Ġhandc uffs +Ġfruit ful +ĠU L +ĠLud wig +Ġatt est +ĠBre aker +Ġbi ologically +ĠDeal er +Ġrenov ations +f w +ess en +Al ice +ĠHen ri +Ġun ilaterally +ĠS idd +h ai +ĠSt retch +S ales +Ġcumbers ome +ĠJ avier +Ġtrend y +Ġrot ting +ĠChall enges +Ġscra ps +Ġfac ets +ĠVer onica +ĠVer ge +ĠS ana +Al ien +ĠR ih +Ġrad ial +ect ar +Ġ6 30 +cl i +Mar ie +Ġwild fire +ĠCat o +h ander +Ġwait ress +Ġch ops +ĠS ECTION +Ġblunt ly +ĠCat alog +n ian +stud y +Ġpat rolling +ĠT enth +nex us +ĠN ON +op sy +Ġsc athing +s ie +Ġdeterior ated +V B +Naz is +Ġdep ictions +Ġauthent icated +ĠCon ce +k rit +Ġpromul g +ĠL ONG +U FC +ĠVis itors +ĠRec all +Ġrehab ilit +ĠSL I +Ġglac ier +ĠB ite +Ġ50 3 +Ġvom it +Ġfer mented +ĠKh alid +Ġgrad ed +ĠMag icka +ĠIch igo +power ful +ic ators +75 3 +Ġsh rew +Ġ35 6 +Ġlegal izing +Ġall otted +ĠArch demon +ith ing +igg urat +V OL +Le od +Ġo ily +Ġindu cing +Ġamy gdala +Ġadm ins +ĠAcqu isition +C AN +Ġsche matic +Ġmo an +ĠCamer oon +Ġt ink +Ġmer ry +Ġbutter flies +ĠGo ff +Ġworks pace +ĠCor ona +Ġj avascript +ĠD olphin +ĠCant or +4 64 +to e +AP S +ĠAg ing +Ġpadd ed +ĠZ heng +ĠHe ld +Ġest ranged +Ġ7 70 +. } +ĠDun ham +Ġsm okes +Ġcap itals +und ai +Sh in +ĠFound ing +Ġent itle +Ġcenter piece +D iscover +Ġthere to +al ert +ĠN ou +ĠAnaly st +l c +F H +FI ELD +ĠP OV +gr ay +Ġar cs +ĠH OT +Ġr s +Ġoblig atory +ĠArchitect s +ĠS ven +ĠF EC +0 200 +Christ mas +ĠAlban ia +rat om +58 7 +Ġhard ships +Ġaut os +ĠCharg es +Ġap es +Ġ3 76 +wal let +Ġintox ication +Ġgobl in +Ġ5 70 +++++++++ ++++++++ +ĠYel p +ĠMag netic +ĠBr iggs +R ail +Ġspawn s +ĠW iggins +Ġshowc ased +Ġres orted +ub en +Ġwh ipping +Ġim itate +Ġdigest ion +ĠUS PS +ĠG est +Ġye a +ĠT ight +ind al +ic as +` . +C AST +'' ; +ĠF et +opath ic +In valid +Ġregrett ed +Ġbro ccoli +ĠSc ores +e ve +Ġpost ings +Ġaccum ulating +Ġneed less +elf th +Ġmay ors +Ġsc rib +Ġanecd otes +Ġbot ched +ĠRib bon +ĠConstant ine +i uses +ess es +Ġdev ise +Comp ared +Ġp udding +Ġg arg +Ġev oke +79 7 +Ġdet ox +9 09 +ĠPie ces +ĠMcC artney +Ġmet ast +ĠK rypt +P OR +Ġt ending +ĠMerch ants +Pro of +ĠV arg +ĠPort able +ãĥ¼ãĥĨ ãĤ£ +B rain +25 00 +Ġfol iage +Ø ¹ +Ġment ors +ĠA ires +Ġminimal ist +Ġing ested +ĠTro jan +ĠQ ian +inv olved +0 27 +Ġer oded +RA FT +Ġbl urry +M ob +Ġbuff et +ĠFn atic +ae a +KN OWN +ĠIn it +s afety +en um +ACT ION +ĠCrus her +ĠD ates +Ġ ................ +c alling +ak ov +Ġvent ured +Ġ5 55 +au ga +H art +ĠA ero +M AC +Ġthin ly +Ġar ra +ST ATE +ild e +ĠJac qu +ĠFem ales +Ġthe orem +Ġ3 46 +Ġsmart est +ĠPU BLIC +ĠK ron +ĠB its +ĠV essel +ĠTele phone +Ġdec ap +Ġadj unct +ĠS EN +mer ga +Ġred acted +Ġpre historic +Ġexplan atory +ĠRun s +ĠUtt ar +ĠM anny +ĠAUTH OR +ĠUnle ashed +ĠBow ling +be ans +79 3 +Ġunivers es +Ġsens it +ĠK ung +re peat +ctr l +Ġp aced +Ġfull er +Cl ock +Ġrec omb +ĠF aul +ĠB unker +Ġpool ed +Ġan a +ĠM outh +LL OW +hum ane +Ġbull do +ĠMicha els +f am +Ġwreck ed +Ġport rays +ĠWh ale +ĠH es +Ġguess es +ĠBrow se +ĠL APD +Ġconsequ ential +ĠInn ocent +ĠD RAG +Ġtrans gress +ĠO aks +Ġtri via +ĠRes on +ĠA DS +-- + +ĠT oll +Ġgrasp ing +ĠTHE M +ĠT ags +ĠCon clusion +Ġpract icable +Ġho op +Ġunintention ally +Ġign ite +ĠM ov +ur ized +le hem +Ter min +Ġcolour ful +ĠLin ear +ĠEll ie +G y +Ġman power +Ġj s +Ġem oji +ĠSHAR ES +_ . +0000 7 +Ġsophistic ation +Ġunders core +Ġpract ise +Ġbl ob +op ens +Uk raine +Ke eping +Y C +J R +ult imate +Cl aim +Ġautom obiles +99 3 +ste el +Ġpart ing +ĠL ank +... ? +Ġ38 5 +Ġremem brance +Ġe ased +Ġcov ari +ĠS ind +Effect ive +Ġdisse mination +ĠMo ose +ĠCl apper +br ates +App ly +Ġinv is +Ġwors ened +âĢĶ - +Ġlegisl ator +ĠL ol +ĠRow e +Ġdealers hip +um ar +id ences +Ġinvestig ates +Ġc ascade +Ġbid der +ĠB EN +Iron ically +Ġpres iding +Ġd ing +Ġcontrad icted +Ġshut s +ĠF IX +Ġ3 66 +Dist rict +Ġsin ful +ĠChar isma +o ops +Ġtot ality +Ġrest itution +ĠOpt imus +ĠD ah +Ġcl ueless +urn ed +Ġnut rit +Ġland owners +Ġfl ushed +Ġbroad en +m ie +Ġprint ln +Ġn ig +ĠCorp us +J en +Ġprot o +ĠWik imedia +ĠPal o +C OR +Ġstory lines +Ġevangel icals +ĠDar rell +Ġrot or +ĠH W +sk illed +ery l +Ġbe gg +ĠBl umenthal +Ġwe aving +Ġdown wards +ĠJack et +ĠANG EL +Te chnology +Ġes oteric +alde hyde +Ġfur iously +Ġforeign er +We ak +CH O +ĠH ound +Exper ience +ĠPlay station +ĠM IA +ĠU ng +cl oth +ag all +Ġcal ming +iz ens +St ruct +ĠW itches +ĠCeleb ration +Ġ........ ...... +pt roller +ĠTC U +Ġb unny +ãĥ į +ut orial +Ġup scale +ĠSt a +ĠCol ossus +Ġchlor ide +ĠZ ac +ĠRe asons +ĠBrook ings +ĠWH ITE +][ / +ĠL ose +9 05 +Ġunders ide +ern els +Ġv ape +do zen +upp et +ĠST OP +mat ical +ĠStat ements +hed dar +P AC +Custom er +Ġmem os +ĠP J +end ars +ĠLim its +l augh +Ġstabil ized +ĠALE C +Y A +Up grade +al am +Ġtechn o +Ġan ew +fore seen +Ġcolleg iate +ĠPy ro +ĠD ism +Ġfront line +Ġammon ia +I U +Qu ite +John ny +ass in +G OP +ĠSt yles +ĠSovere ign +acter ial +5 49 +ĠR IP +ĠL ists +Ġ3 64 +ĠRece p +s ocket +ĠByr d +ĠCand le +An cient +Ġappell ant +en forcement +ace a +ans ki +Ġold s +88 6 +Ġsl urs +Ġem pires +Ġbuck le +Ġalien ation +ĠAber deen +Ġunic orn +Ġoverr iding +ĠL X +pp a +Ġdesp ised +ĠB ugs +ĠB ST +S outhern +5 33 +Ġhall mark +ĠPost er +Ġstem med +Ġprincip als +ĠT ECH +ĠSand wich +It aly +Ġche esy +ĠSet TextColor +ĠProt ective +ĠC ohn +J O +apt op +Re ason +Lead er +ĠUnder stand +ĠFr idays +ĠContin uous +Ġcl ipping +ĠR ye +Ġber th +tim er +ann is +re act +Ġbuff alo +ĠPar as +Ġ6 55 +Ġpres ided +ĠSun rise +Ġve ts +Ġcl oves +ĠMcC ull +Stre ngth +G AN +Ġill iter +ĠPric ing +l é +Ġresist or +Ġbr un +ĠSuff olk +Ñ ĭ +ĠL iver +Re leased +Ġwhat s +8 60 +ĠMe asures +Ġden ouncing +ĠRy zen +Ġsou ven +Ġcareg ivers +ch ini +ĠScar lett +Ġt rough +Cong ratulations +Ġtax is +ĠTrad ition +j it +Ġtable top +Ġhither to +Ġdis information +off ensive +h ra +ĠDISTR ICT +Ġcompl icate +chen ko +ĠRecon struction +Ġpalp able +Ġa usp +Ġ4 28 +Ġshowc ases +ĠPublic ation +know ledge +inn on +4 19 +Ġretri eval +and ers +Ġref ute +Ġinqu ired +g ur +Ġneg ativity +Ġcons erve +Ġafter life +Ġpres upp +ĠGill espie +Ġm t +ĠD N +T ap +Ġper pend +ĠS my +does n +Ġsp illing +Ġhyp ers +K ate +® , +ke pt +ĠP owered +Ġj a +ĠK lux +ard e +ab an +Ġ4 44 +Ġflatt ened +ĠImprove ments +urg a +ĠK und +Ġins cribed +Ġfac ult +Ġunpre pared +ĠCons umers +Ġsatisf ies +Ġpul monary +Ġinf iltration +Ġex ternally +Ġcongrat ulations +ag han +Ġair liner +Ġfl ung +Ġfly ers +G D +Ġsnipp ets +Ġrec ursive +Ġmaster ing +L ex +Ġovert ly +v g +Ġluck ily +Ġenc ro +ĠLanc et +ĠAbyss al +function al +Ġs ow +Ġsqu id +Ġnar ration +Ġn aughty +ĠHon our +ĠSpart ans +Ġsh atter +ĠTac oma +ĠCal ories +ĠR aces +Sub mit +Ġpurpose fully +w av +ĠY ok +F est +ĠG err +Met ro +Ġit iner +f amous +Ġ" { +in line +was her +Iss ue +ĠCL IENT +oz o +Vers ions +7 25 +ĠGl ock +Ġshield ed +ĠPC R +ENC Y +ĠWe ld +ĠSim pl +Ġredirect ed +ĠK ham +Ġ( > +Ġlab ou +Ġdi apers +ss l +Ġcell ar +organ isms +ore sc +ĠBer ks +did n +Sh ipping +C hest +Ġund one +Ġmillion aire +Ġc ords +ĠYoung er +appropri ately +Ġsequ els +u ve +ant icipated +Ġle wd +ĠSh irt +ĠDmit ry +V eter +Ġsl aying +ĠY ar +Ġcompl ication +I owa +ĠEric a +ĠBL M +g irlfriend +b odied +6 26 +19 63 +Ġintermedi ary +Ġcons olation +M ask +ĠSi em +ow an +Beg inning +Ġfix me +Ġculmin ated +Ġcon duc +ĠVolunte er +Ġpos itional +Ġgre ets +ĠDefin itions +Ġthink er +Ġingen uity +Ġfresh men +ĠMom ents +Ġ35 7 +ate urs +ĠFed Ex +s g +69 4 +Ġdwind ling +ĠBO X +sel age +Ġt mp +Ġst en +ĠS ut +Ġneighbourhood s +Ġclass mate +f ledged +Ġleft ists +Ġclim ates +ATH ER +ĠScy the +ul iffe +Ġs ag +Ġho pped +ĠF t +ĠE ck +ĠC K +ĠDo omsday +k ids +Ġgas ped +Ġmon iker +ĠL od +ĠC FL +t ions +r ums +fol ios +Ġm d +Ġunc anny +Ġtrans ports +ĠLab rador +Ġrail ways +Ġappl iance +ĠCTR L +æ Ģ +Pop ulation +ĠConfeder acy +Ġunb earable +Ġdors al +ĠIn form +op ted +ĠK ILL +Mar x +Ġhypoc ritical +q us +ĠN umerous +ĠGeorg ian +ĠAmbro se +ĠL och +Ġgu bernatorial +ĠX eon +ĠSupp orts +ens er +ee ly +ĠAven ger +19 65 +Ar my +Ġju xtap +Ġcho pping +ĠSpl ash +ĠS ustainable +ĠFin ch +Ġ18 61 +ict ive +at meal +ĠG ohan +Ġlights aber +ĠG PA +ug u +ĠRE PL +vari able +Ġher pes +Ġdesert s +ac iously +Ġsitu ational +week ly +ob l +Ġtext ile +ĠCorn wall +Ġcontrace ptives +ĠA ke +] - +ä¹ ĭ +: , +ĠW em +ĠB ihar +Ġ' . +Ġbe re +Ġanal ogue +ĠCook ies +Ġtake off +Whe el +Ġmaj estic +Ġcomm uting +0 23 +ĠCor pse +ass ment +min i +Ġgor illa +ĠAl as +ere e +Ġacquaint ances +ĠAd vantage +Ġspirit ually +Ġey ed +pm wiki +ĠE nder +Ġtrans lucent +Ġnight time +ĠIM AGES +5 45 +ĠK amp +ĠFre ak +Ġ ig +Port land +4 32 +ĠM ata +Ġmar ines +Ġh ors +ater asu +ĠAtt ribution +Ġ-------- - +Ġk ins +ĠBEL OW +++ + +Ġre eling +ol ed +Ġcl utter +ĠRel ative +Ġ4 27 +B US +Ġa vert +ĠChe ong +ĠA ble +ĠPry or +Develop er +Ġen cyclopedia +ĠUSA F +ĠG arry +Sp ain +Bl ocks +Ġexp osition +ĠGamer Gate +W OR +Ġstockp ile +Ġclot hed +ĠT one +ĠR ue +t umblr +Ġtreacher ous +Ġf rying +Ñ Į +ĠS ph +Ġrest raints +Ġemb odies +ĠG es +S afety +Ġnegoti ators +min ing +ĠAppalach ian +L OS +ĠJenn a +Ġpass ers +ç ĭ +sn ap +Ġshort en +creat or +Ġinn umerable +uther land +67 4 +ĠW OM +ĠAs cend +ĠArm ory +ĠTrans action +K ick +Ġsuit case +day Name +Ġwaste ful +mar riage +ĠMcC abe +ite ch +ĠO ss +Cl osure +ĠTreasure r +Ġindec ent +ĠD ull +Ġresid ences +19 59 +ĠS ettlement +Ham ilton +Ġself ies +ĠRank ing +ĠBark ley +ĠB ore +ĠW CS +ĠMar itime +ĠH uh +ĠForest ry +Ġcultiv ating +ĠBall ard +Ġg arrison +ĠSD L +9 30 +Ġnas cent +Ġirresist ible +Ġaw fully +\/ \/ +Ġequ ate +Ġanthrop ology +ĠSylv ia +Ġintest ine +Ġinnoc uous +cess ive +ag ra +ĠMet roid +G rant +8 55 +ģ ĸ +Ġ" _ +ãĥĥ ãĥī +Ġappra isal +ĠFred dy +04 6 +Ġ40 6 +Ġ18 30 +Ġd ocking +St atic +Ġp ont +ĠVolt age +ĠSt ead +ĠMort gage +ĠJon ah +Y L +CLASS IFIED +Ġas bestos +nik ov +Ġcoll agen +ĠOrb ital +P ocket +7 99 +Ġhy brids +inc hes +Ġinv oice +und y +Ġinequ alities +T rend +w ashed +B ALL +Ġluc id +ĠComment ary +Ġw itty +Br andon +Ġbru ising +Ġ6 20 +es cent +box ing +P OL +Ġ3 78 +R ect +Ġlic ences +ĠMcG ee +p ressed +D anny +Ġj ammed +ord inate +Ġle th +Ġdistingu ishes +ĠYam aha +IL S +ĠH ume +ĠC ategories +Rober ts +Ch art +Ġbeet le +ĠGra veyard +Ġ($ ) +o ÄŁ +Ġtw ilight +are lla +á ½ +Ġbooth s +ĠH HS +ĠFeld man +Ġexcav ation +Ġphilosoph ies +at ography +ĠGar age +te chnology +Ġunfor gettable +Ġver ifying +Ġsubord inates +E ls +Ġne b +G aming +EN A +ĠAchieve ment +it ters +ĠG abe +Ġd umps +for cer +Ġpo ignant +ĠM BA +ĠHe idi +ime i +Ġm ages +Ġliber ate +Ġcircum cised +ĠMer maid +ĠMat th +t ogether +ĠW ichita +Ġstore front +ĠAd in +V II +Four th +Ġexplore rs +W ER +Not able +Bro ok +m ens +F aith +-------- - +ĠJ ou +¬ ¼ +Ġpine apple +Ġam alg +el n +ark able +ĠãĤµ ãĥ¼ãĥĨãĤ£ +ĠãĤµãĥ¼ãĥĨãĤ£ ãĥ¯ãĥ³ +Ġov arian +ĠE choes +Ġhairc ut +Ġp av +Ġch illed +anas ia +Ġsty led +Ġd ab +ni per +Ġminister ial +ĠD UP +T an +Ġsul ph +ĠD eter +ĠBo hem +od an +Ġeduc ator +â ĵĺ +sp ir +Ch icken +ĠE leanor +Ġqu i +Ġheav iest +Ġgrasp ed +U RA +Ġcro oked +Jess ica +pro blem +Ġpred etermined +Ġman iac +Ġbreath s +ĠLauder dale +Ġh obbies +y z +Cr ime +Ġcharism a +d L +Ġle aping +Ġk ittens +Ang elo +ĠJ ACK +ĠSu zanne +Ġhal ting +ENT ION +Ġswall owing +ĠEarthqu ake +Ġeight eenth +ĠN IC +ĠIN F +ĠCons cious +Ġparticular s +circ le +7 40 +Ġbene volent +Ġ7 47 +Ġ4 90 +Ġr undown +ĠVal erie +ĠB UR +Ġcivil isation +ĠS chn +W B +ot ide +intern ational +Ġj ohn +Ġ19 02 +Ġpe anuts +Ġflav ored +k us +Ġro ared +Ġcut off +é £ +Ġorn ament +Ġarchitect ures +Ġ3 69 +ol or +ĠWild e +ĠC RC +ĠAdjust ed +Ġprov oking +land ish +Ġrational ity +Ġjust ifies +Ġdisp el +Ġa meric +ĠPol es +Ø © +Ġen vis +ĠD oodle +ä½ ¿ +igs aw +auld ron +Techn ical +T een +up hem +ĠX iang +Ġdetract ors +ĠZ i +ĠJournal ists +Ġconduc ive +ĠVolunte ers +Ġs d +Know ing +Ġtrans missions +ĠPL AN +ĠL IB +Ġall uded +Ġob e +Ġd ope +ĠGold stein +Ġwavelength s +ĠDest ination +nd a +ug i +Ġattent ive +ĠLe an +ral tar +Ġman g +mb uds +ak ings +b ender +Ġacc ol +Ġcraw led +N OW +Min nesota +Ġflour ished +ĠZ up +ĠSuper visor +ĠOliv ier +Ex cellent +Ġwid en +D one +Ġw ig +Ġmiscon ceptions +Cor p +W an +Ġvener able +ĠNot ably +ĠKling on +an imate +Bo ost +ĠS AY +miss ing +ibli ography +mel on +Ġpay day +Ø ³ +bo le +Ġve iled +ĠAl phabet +It alian +Ġever lasting +ĠR IS +ĠC ree +rom pt +Ġh ating +Ġgrin ning +Ġge ographically +OS H +Ġwe eping +ĠÂłĠÂłĠÂłĠÂł ĠÂłĠÂłĠÂłĠÂł +Ġimpe cc +Let ter +Ġblo ated +PL A +ĠFe in +Ġper sever +Th under +Ġa ur +ĠR L +Ġpit falls +âĸ º +Ġpredomin ant +Ġ5 25 +7 18 +AP E +7 14 +Ġfarm land +ĠQ iao +Ġv iolet +ĠBah amas +Ġinflic ting +ĠE fficiency +Ġhome brew +Ġundert ook +Ġcur ly +ĠHard ing +man ia +59 6 +Ġtem pered +Ġhar rowing +ĠP ledge +ĠFranken stein +è ª +M otion +Ġpredict ably +ĠExpl osion +oc using +er d +col o +FF ER +Ġback field +ĠV IDE +ue bl +N arr +ĠArg ument +Ġgen omic +Ġbout ique +Ġbatt ed +ĠB inary +Ġg amb +ĠRh ythm +67 3 +Ġa float +ĠOlymp ia +Y ING +Ġend if +is in +Ġwin ters +Ġsc attering +I v +D istance +Ġtr u +ĠCom fort +Ġne xus +Ġair flow +ĠByz antine +p ayers +con i +ĠB etsy +D eal +ĠN ug +ĠContin ent +red ibly +Ġoptim izing +al beit +Ġec static +ĠPro to +ç · +iv ot +âĸ Ħ +em p +rou nder +Ġcl out +ĠI ST +66 3 +ĠDoll ars +ĠD AC +Ġsubsc ribed +Ġrehears al +Ġam ps +ĠSh ang +es m +Ġspr inkle +Ġassail ant +ĠO o +ĠCoin base +T act +Ġret ina +Ġn uns +R ON +att o +Ġj ug +ĠSV G +Ġb ikini +ĠFI LE +ĠFound ers +ep ort +ĠK P +Ġrest ores +ĠTh ick +Ġash ore +Ġappro vals +R ender +M AG +G raham +ĠCort ana +ãĥ³ ãĤ¸ +ss h +or ians +ars ity +ĠInsp ired +u pper +Ġsign alling +Ġreb uke +Ġfl ares +Ġdownt ime +Stud ies +Ġstagn ation +ĠSequ ence +Ġgr unt +Ġass ures +ĠPL A +59 2 +Ġintra ven +d epend +Sus an +ĠManz iel +Man ia +Cont ract +Ġsl ams +Ġcult ured +Ġcred itor +L IST +ĠH UM +ĠChatt anooga +serv ed +Ġclo aked +ĠF TP +p owder +ĠSt ella +uct ive +Ġcheap ly +ĠMU CH +ĠGalile o +Ġsu ites +spe ech +Ġdeliber ations +ĠCh ips +« ĺ +Bal ance +ĠWyn ne +ĠAk ron +Ass et +Ġhon oured +Ġed ged +Like wise +anim ous +ĠW age +ĠEz ek +ad vertisement +ĠRT X +ĠM AD +Ġmigr ating +ĠS QU +Ġ4 75 +Ed ited +Ġshorth and +ĠBas ics +Ġcro tch +ĠEV EN +Ġv m +effic iency +Ġcal ves +ĠF rie +ĠBrill iant +Ġstri kers +Ġrepent ance +Ġarter ies +r l +B ed +h ap +Ġcrypt ography +ĠSab res +Ġ4 14 +vi ks +ih ara +aps es +T alking +Ġintertw ined +Ġdoc ks +Ġalle le +ĠArt ifact +ĠH IM +t orn +ç ķ +Ġop acity +ĠE ly +os uke +Ġn ipple +Ġhand written +ĠV K +ĠChamber lain +ĠLa os +ig raph +g row +Ġtr illions +Ġdescend ant +ĠSail or +as uring +Ġce ilings +ĠWare house +f lying +ĠGl ow +Ġn ont +Ġmiscar riage +Ġrig s +Ġmin istries +Ġelabor ated +Ġdel usional +ĠHum ane +Ġ3 79 +n ets +Ġblack out +add ers +Ġn p +ĠT ire +ro sc +Ġsub div +Ġlink age +Ġchron ological +ĠHER O +Ġres ettlement +ĠVin yl +Ġpast oral +ĠMob il +ĠBar bar +Co oldown +ĠF ritz +c riminal +re pe +Ġbell ig +ĠBre ed +Ġ4 18 +Ġsem blance +ij k +Ġcur tail +Ġclin ch +cont ained +ĠProm pt +ast on +Ġw i +Ġpursu its +5 15 +ĠGl oss +Ġfl ips +Ġcoup ons +Ġcl oning +ĠLike ly +Rem oved +ĠQu artz +r ices +ĠSpe ars +Ġp ious +Ġdep reciation +ĠD are +oun ces +am az +O nt +Ġp innacle +d ocker +0 26 +ĠW yr +ĠPro per +Ë Ī +n il +By tes +Ġseek er +t rial +Ġunf olds +ĠMar se +Ġextravag ant +ĠSurviv ors +RED ACTED +ĠSpeed way +ĠCra igslist +sub mit +ĠGener ations +Ġup holding +Ġblood stream +ĠMiss ions +ĠL awn +Ġlim bo +ene i +H uh +ĠWild cats +pre p +ĠMark us +ĠFor bidden +rit ic +IN O +Ġexhib iting +requ ent +ch uk +Ġhabit ual +ĠComp atibility +Dr ag +RIP T +uj ah +GR OUND +Ġdelinqu ent +Ġburn er +Ġcontempor aries +Ġgimm ick +load s +Ġno zzle +p odcast +ĠW ak +ĠStat en +ĠK uh +ãģ ĵ +inter rupted +Ġinv incible +ĠBurn ett +cig arette +ĠPeb ble +ĠTem porary +ĠMar ino +58 2 +Ġwast eland +ident ly +T x +Ġr ite +ĠPan asonic +ĠM iddles +ĠHort on +ae us +Ġc uring +Ġm ats +Ġadj ourn +Ġfears ome +pe z +bo ats +Ġpro pell +Ġconflic ted +ĠAng er +Ġinsurg ent +K arl +Ġco ales +Ġsouth western +Ġdis su +ĠO vert +******** **** +Ġbox ed +ĠBr une +aa a +Ġgard ening +ĠEng el +tr acks +Ġpur ified +Ġplace holder +ĠL ikes +Ġd an +G ab +Ġe ct +ĠF aw +ĠEl iot +Ġ' , +otrop ic +ĠRu in +hed on +Ġca ul +Ġa ft +ĠCad illac +gh a +ass ian +ud eb +ĠT ick +Ġadjust s +AR GET +5 37 +isc he +ant y +ĠFried rich +ĠBl izz +ĠA OL +Camp aign +Ġmamm al +ĠVe il +ĠK ev +ĠMaur it +ĠDam ien +N ation +E astern +Ġ{ : +Ġ= ================================ +Ġstereotyp ical +Ġatt ic +ĠCy borg +requ ire +Ġaward ing +ĠPap ua +bt n +b ent +B oo +Ġ( = +ĠX ander +ĠSomers et +Ġcatch y +Ġcert ify +STR UCT +Ġit al +Ġt ides +ĠBr ands +G ray +comp etitive +Ġcur ator +ĠD G +omin ium +ĠGM Os +ci ating +ĠCarm en +ow ard +Balt imore +Ġr gb +C u +Ġwip es +spe ll +IT NESS +Ġsummar izes +ĠRe vis +Ġwhistlebl owers +ĠBre ach +Ġcro chet +k os +ews ki +Ġrep et +Ġcrim son +ĠKar achi +read able +dim ension +ĠI gor +ild ed +ĠZ ed +ĠKe ane +ĠCos metic +DE P +Ġretreat ing +ĠU A +ens ical +Ġd usk +ĠDick ens +Ġaren as +ĠPass age +level s +Ġcur v +P ope +Ġch ores +ĠEl ise +ĠComp ass +b ub +Ġmamm alian +ĠSans krit +ĠAN C +ĠCr ack +Q ual +L aun +amp unk +Ġlearn ers +Ġglam orous +Ġfur the +erm ott +c and +Gener ic +Ġnarr ated +Ġdisorder ly +ĠTrans actions +ĠDet ention +ĠR oku +Ä į +Ġunder statement +ĠS aur +ĠRodrig o +ĠAS AP +S in +Ġre joice +Method s +Ġelectro de +Ġworsh ipped +Ġid i +ĠPhys icians +Ġpop up +Ġde ft +ĠRem oval +ĠBu enos +ver bs +Ġfun k +ush a +rict ion +ore a +ĠBang alore +ĠKen obi +zz i +Ġnorm ative +Ġgobl ins +Ġcaf es +ĠUN CLASSIFIED +ĠF ired +S IGN +Ġs clerosis +ĠV oter +ĠSon ny +ĠExt end +ĠEV s +Ar senal +Ġp si +Ġwid est +ĠT us +Ġlo oms +Ġjust ifying +ĠGr anger +è ¯ +Ref er +58 3 +Ġflour ishing +ab re +Ġr ave +ĠCont ra +Ġ18 98 +Add s +Ġf ul +ĠCo oke +some one += # +67 1 +Ġy ak +Ġar te +ĠMis cellaneous +ĠDet ection +ĠCl ancy +â ģ +ass ies +Ġval iant +ĠFemin ist +cor ruption +V el +P ear +Ġsucc inct +Ġquick est +k w +Ġsp itting +ĠL ibraries +åħ ī +ant z +D ad +ĠSpec ifications +rup ulous +and r +RES ULTS +Ġsnow ball +Ġpred is +ĠB axter +ĠNurs ing +ĠCh aff +s we +Ġout age +Ġnest ing +Ġnotor iety +tr igger +on ite +j on +Ġf ou +ook ed +ĠCelebr ity +re ality +Ġfat ig +Ġhug ging +Ġbother s +ĠPan zer +ĠCh andra +fig ured +Ġvol ts +ĠCloud s +Ġfee ble +ĠCur ve +ĠAs us +78 6 +abs or +ĠV ICE +ĠH ess +Ġmanufact ures +Ġgri zz +ĠPower ful +ac id +Ġsub sections +ĠKrug man +ĠAl ps +is u +Ġsequ est +ĠUlt ron +ĠT inker +ĠGo ose +Ġmism atch +Att orney +Ġmorph ology +ĠSix ers +ut tered +ĠE LECT +gr an +Rus sell +ĠG SL +Ġfort night +Ġ. ) +Ġapost le +pr one +el ist +Unt itled +ĠIm plementation +ist ors +Ġtank er +Ġpl ush +Ġattend ants +ĠT ik +ĠGreen wich +ĠY on +ĠSP L +cell s +unt led +S olution +ĠQu é +Ġvac ated +Ġupt ick +ĠMer idian +æ ĥ +ĠDr ill +9 25 +58 4 +Ġrenov ated +ĠKub rick +zy k +Ġl ousy +pp el +ohyd rate +ĠI zzy +lesi astical +CC C +ĠAj ax +Ġad apters +ĠPetra eus +Ġaffirm ation +ĠST OR +le ms +ad oes +ĠConstantin ople +Ġp onies +Ġl ighthouse +Ġadherent s +ĠBre es +omorph ic +Fight ing +Ġpl aster +ĠP VC +ĠOb st +Ġdear ly +ĠTo oth +icks on +Ġsh aming +P lex +A gg +ĠâĢ¦ " +Ġsub reddits +Ġpige on +ĠResident ial +ĠPass ing +Ġl um +ĠP ension +Ġpessim istic +Ġ4 32 +z inski +c ade +0 75 +Ġapolog ised +iy ah +Put ting +Ġgloom y +ĠLy me +=-=-=-=- =-=-=-=- +ĠT ome +ĠPsych iatric +ĠH IT +c ms +ap olog +Ġbreak er +Ġdeep en +Ġtheor ist +ĠHigh lands +Ġb aker +Ġst aples +Ġinterf ered +ĠAb ortion +jo ined +ch u +Ġform ulate +Ġvacc inations +Ġban ter +phe us +Ġoutfield er +ĠM eter +Ġ# #### +Ġ18 95 +Ġnarrow ing +ĠST ORY +f p +ĠC ST +ign ore +Ġproclaim ing +ĠR U +ĠB ALL +yn a +65 3 +Ġpos it +P RE +59 4 +ĠRegist rar +ĠPil grim +ic io +Ġpre tt +Ġlif eless +Ġ__ _ +Ne igh +ĠCh urches +orn o +Ġor cs +Ġkind red +ĠAud it +Ġmillenn ial +ĠPers ia +g ravity +ĠDis ability +ĠD ARK +W s +od on +Ġgrand daughter +ĠBro oke +ĠA DA +ER A +Ġpick ups +ĠWil kinson +ĠSh ards +ĠN K +Ġexp el +ĠKis lyak +Ġj argon +Ġpolar ized +ian e +Pub lisher +Ġreb utt +Ġapprehens ion +ĠK essler +Ġpr ism +F UL +19 64 +ĠL oll +ä ¿ +le thal +Å Ł +Ġg hetto +Ġb oulder +ĠSlow ly +ĠOsc ars +ĠInst ruction +ĠUl tr +ĠM oe +N ich +ĠP ATH +( * +ĠRE LEASE +un ing +rou se +en eg +Ġre imb +ĠDet ected +Do S +Ġster ling +Ġaggreg ation +ĠLone ly +ĠAtt end +hig her +Ġairst rike +ks on +SE LECT +Ġdef lation +ĠHer rera +C ole +rit ch +Ġadvis able +F ax +Ġwork around +Ġp id +mort em +ers en +Ġtyp o +Ġal um +78 2 +ĠJam al +script s +Ġcapt ives +ĠPres ence +ĠLie berman +angel o +Ġalcohol ism +ass i +Ġrec ite +Ġgap ing +Ġbask ets +ĠG ou +Brow ser +ne au +Ġcorrect ive +und a +sc oring +ĠX D +Ġfil ament +Ġdeep ening +ĠStain less +Int eger +Ġbu ggy +Ġten ancy +ĠMub arak +Ġt uple +ĠD roid +ĠS itting +Ġforfe it +ĠRasm ussen +ixt ies +es i +ĠKim mel +Ġmetic ulously +Ġap opt +ĠS eller +08 8 +ec ake +hem atically +T N +Ġmind less +Ġdig s +ĠAcc ord +ons ense +em ing +br ace +Ġe Book +ĠDist ribut +ĠInvest ments +w t +] ), +beh avior +56 3 +Ġbl inding +ĠPro testers +top ia +Ġreb orn +ĠKel vin +ĠDo ver +ĠD airy +ĠOut s +Ġ[ / +Ï Ģ +b p +ĠVan ity +ĠRec ap +ĠHOU SE +ĠF ACE +Ġ4 22 +69 2 +ĠAnt ioch +cook ed +Ġcoll ide +Ġa pr +Ġsle eper +ĠJar vis +Ġalternative ly +ĠLe aves +ĠM aw +Ġantiqu ity +ĠAdin ida +Ġab user +Poké mon +Ġass orted +ĠRev ision +ĠP iano +ĠG ideon +O cean +Ġsal on +Ġbust ling +ogn itive +ĠRah man +Ġwa iter +Ġpres ets +ĠO sh +ĠG HC +oper ator +Ġrept iles +Ġ4 13 +ĠG arr +ĠCh ak +Ġhas hes +Ġfail ings +Ġfolk lore +Ġab l +ĠC ena +ĠMac Arthur +ĠCOUR T +Ġperipher y +app ers +Ġreck oned +ĠInf lu +ĠC ET +Ġ3 72 +ĠDefin itive +ass ault +4 21 +Ġreservoir s +Ġd ives +ĠCo il +DA Q +Ġvivid ly +ĠR J +ĠBel lev +Ġec lectic +ĠShow down +ĠK M +ip ed +reet ings +ĠAs uka +L iberal +ĠÏ Ħ +Ġbystand ers +ĠGood win +uk ong +S it +ĠT rem +Ġcrim inally +ĠCirc us +ch rome +88 7 +Ġnan op +ĠOb i +ĠL OW +o gh +ĠAuth ors +ob yl +Ur ban +Ġt i +ĠWe ir +t rap +ag y +Ġparent heses +Ġout numbered +Ġcounter productive +ĠTob ias +ub is +P arser +ST AR +Ġsyn aptic +ĠG ears +Ġh iber +Ġdebunk ed +Ġex alted +aw atts +H OU +Ch urch +ĠPix ie +ĠU ri +ĠForm ation +ĠPred iction +C EO +Ġthro tt +ĠBrit ann +ĠMad agascar +ë ĭ +Ġbill boards +ĠRPG s +ĠBe es +complete ly +F IL +Ġdoes nt +ĠGreen berg +re ys +Ġsl ing +Ġempt ied +ĠPix ar +ĠDh arma +l uck +ingu ished +Ġend ot +Ġbab ys +05 9 +che st +r ats +Ġr idden +Ġbeet les +Ġillum inating +Ġfict itious +ĠProv incial +Ġ7 68 +Ġshe pherd +ĠR ender +Ġ18 96 +C rew +Ġmold ed +ĠXia omi +ĠSp iral +Ġdel im +Ġorgan ising +Ġho ops +ĠBe i +z hen +Ġfuck in +Ġdec ad +Ġun biased +am my +sw ing +Ġsmugg led +Ġk ios +ĠP ERSON +ĠInquis itor +Ġsnow y +Ġscrap ing +ĠBurg ess +P tr +ag ame +R W +Ġdro id +ĠL ys +ĠCass andra +Jac ob +Ġ35 4 +Ġpast ure +Ġfr anc +ĠScot ch +ĠEnd s +ĠI GF +def inition +Ġhyster ical +ĠBrown e +77 1 +Ġmobil ization +æ ķ +iqu eness +Th or +Ġspear headed +Ġembro iled +Ġconject ure +jud icial +Ch oice +Ġpaper back +P ir +Ġrec overs +ĠSur ge +ĠSh ogun +ĠPed iatrics +ãģ ł +Ġsweep s +ĠLabor atories +ĠP acks +al us +add in +Ġhead lights +g ra +Ev idence +COL OR +Ad min +Ĭ ± +Ġconco ct +s ufficient +Ġun marked +Ġrich ness +Ġdiss ertation +Ġseason ing +Ġg ib +ĠM ages +un ctions +ĠN id +che at +ĠTM Z +c itizens +ĠCatholic ism +n b +Ġdisemb ark +ĠPROG RAM +a ques +Ty ler +Or g +ĠSl ay +ĠN ero +ĠTown send +IN TON +te le +Ġmes mer +9 01 +Ġfire ball +ev idence +aff iliated +ĠFrench man +ĠAugust a +0 21 +Ġs led +Ġre used +ĠImmun ity +Ġwrest le +assemb led +Mar ia +Ġgun shots +ĠBarb ie +Ġcannabin oids +ĠTo ast +ĠK inder +IR D +Ġre juven +Ġg ore +Ġrupt ure +Ġbre aching +ĠCart oon +Ġ4 55 +ĠPale o +6 14 +Ġspe ars +ĠAm es +ab us +Mad ison +GR OUP +Ġab orted +y ah +Ġfel on +Ġcaus ation +Ġprep aid +Ġp itted +op lan +ĠShel ley +ĠRus so +ĠP agan +Ġwill fully +ĠCan aver +und rum +ĠSal ary +ĠAr paio +read er +ĠR ational +ĠOver se +ĠCa uses +Ġ* . +Ġw ob +Ke ith +ĠCons ent +man ac +77 3 +6 23 +Ġfate ful +et imes +Ġspir ited +ĠD ys +Ġhe gemony +Ġboy cot +ĠEn rique +em outh +Ġtim elines +ĠSah ara +ĠRel ax +ĠQuin cy +ĠLess ons +ĠE QU +SE A +N K +ĠCost co +Incre ase +Ġmotiv ating +ĠCh ong +am aru +ĠDiv ide +Ġped igree +ĠTasman ia +ĠPrel ude +L as +9 40 +57 4 +Ġch au +ĠSp iegel +un ic +-- > +ĠPhil ips +ĠKaf ka +Ġuphe aval +Ġsent imental +Ġsa x +ĠAk ira +ser ial +Mat rix +Ġelect ing +Ġcomment er +ĠNeb ula +ple ts +ĠNad u +ĠAd ren +Ġen shr +ĠR AND +fin ancial +ĠCly de +uther ford +Ġsign age +Ġde line +Ġphosph ate +rovers ial +f ascist +ĠV all +ĠBeth lehem +Ġfor s +Ġeng lish +S olid +N ature +Ġv a +ĠGu ests +Ġtant al +Ġauto immune +;;;;;;;; ;;;; +ĠTot ally +ĠO v +Ġdef ences +ĠCoc onut +Ġtranqu il +Ġpl oy +Ġflav ours +ĠFl ask +ãĤ¨ ãĥ« +ĠWest on +ĠVol vo +8 70 +Ġmicro phones +ver bal +R PG +Ġi ii +; } +0 28 +Ġhead lined +Ġprim ed +Ġho ard +ĠSh ad +ĠEN TER +Ġtri angular +Ġcap it +l ik +ĠAn cients +Ġl ash +Ġconv ol +Ġcolon el +en emy +G ra +Ġpub s +ut ters +Ġassign s +ĠPen et +ĠMon strous +ĠBow en +il ver +H aunted +ĠD ing +start ed +pl in +Ġcontamin ants +ĠDO E +ff en +ĠTechn ician +R y +Ġrob bers +Ġhot line +ĠGuard iola +ĠKau fman +row er +ĠDres den +ĠAl pine +E lf +Ġf mt +ĠS ard +urs es +g pu +Un ix +Ġunequiv ocally +ĠCitizens hip +qu ad +m ire +ĠS weeney +B attery +6 15 +Ġpanc akes +Ġo ats +M aps +ĠCont rast +mbuds man +ĠE PS +Ġsub committee +Ġsour cing +Ġs izing +ĠBuff er +ĠMand atory +Ġmoder ates +ĠPattern s +ĠCh ocobo +ĠZ an +ĠSTAT ES +ĠJud ging +ĠIn her +* : +Ġb il +ĠY en +Ġexh ilar +oll ower +z ers +Ġsn ug +max imum +Ġdesp icable +ĠP ACK +ĠAn nex +Ġsarcast ic +Ġlate x +Ġt amp +ĠS ao +b ah +ĠRe verend +ĠChin atown +ĠA UT +d ocumented +ĠGA BA +ĠCan aan +ĠÙ ħ +Ġgovern s +pre v +E sc +ĠEst imates +OS P +Ġendeav our +ĠCl osing +omet ime +every one +Ġwor sen +Ġsc anners +Ġdev iations +ĠRobot ics +ĠCom pton +Ġsorce rer +Ġend ogenous +Ġem ulation +ĠPier cing +ĠA ph +ĠS ocket +Ġb ould +ĠO U +ĠBorder lands +Ġ18 63 +G ordon +ĠW TO +Ġrestrict s +Ġmosa ic +Ġmel odies +ç Ħ +T ar +Ġdis son +ĠProv ides +Ġ ...... +b ek +F IX +Ġbro om +ans hip +Do ctors +Ġner ds +ĠReg ions +na issance +Ġmet e +Ġcre pt +pl ings +Ġgirlfriend s +kn it +ig ent +ow e +Ġus hered +ĠB az +M obil +4 34 +ĠPres ents +orig in +Ġins omnia +ĠA ux +4 39 +ĠCh ili +irs ch +G AME +Ġgest ation +alg ia +rom ising +$ , +c row +ĠIn spection +at omic +Rel ations +J OHN +rom an +ĠClock work +ĠBak r +m one +M ET +Ġthirst y +Ġb c +Ġfacult ies +R um +Ġnu ance +ĠD arius +ple ting +fter s +etch up +Reg istration +ĠK E +R ah +Ġpref erential +ĠL ash +ĠH H +Val id +ĠN AV +Ġstar ve +ĠG ong +z ynski +ĠAct ress +Ġw ik +Ġun accompanied +lv l +Br ide +AD S +ĠCommand o +ĠVaugh n +Wal let +Ġho pping +ĠV ie +Ġcave ats +Ġal as +if led +ab use +66 1 +Ġib n +Ġg ul +Ġrob bing +t il +IL A +Ġmit igating +Ġapt ly +Ġty rant +Ġmid day +ĠGil more +ĠDe cker +Ġ§ § +part ial +Ex actly +Ġphen otype +Ġ[+ ] +ĠP lex +ĠI ps +vers ions +Ġe book +Ġch ic +g ross +":" "},{" +ĠSur prisingly +M organ +Ġresid ues +ĠConf ederation +in feld +Ġl yr +mod erate +Ġperpend icular +V K +Ġsynchron ized +Ġrefres hed +Ġad ore +ĠTor ment +ol ina +Ġ26 00 +Item Tracker +Ġp ies +ĠF AT +ĠR HP +0 48 +ĠRES P +ĠB J +all ows +P and +Ġunw elcome +ĠV oc +ĠBast ard +ĠO W +ĠL AR +ĠHeal er +Environment al +ĠKen yan +ĠTr ance +ĠP ats +Ġali ases +ĠGar field +Ġcampaign er +Ġadvance ments +ĠOkin awa +ĠC oh +ows ky +Ġstar ved +Ġsize able +Ġ: -) +Ġm RNA +Ġsusp ensions +ist ar +Scot land +Pr in +-------------------------------- ---------------- +Ġ50 2 +Ġteasp oons +Ġ10 50 +Ġcoerc ive +ĠMason ic +edd ed +ĠPass enger +Ġl att +Ġbr aces +ĠSt eal +ĠNY T +ĠK ats +ĠCel est +ae z +T u +ĠCoul ter +ðŁ ĺ +Fl ickr +ĠWil mington +ith s +++ ; +Ġv ending +Ġneg ro +ĠPh i +ĠYellow stone +Call back +Ġsh ampoo +ĠSh ades +w at +Ġsuper human +Ġridic uled +Ġhol iest +om bo +Ġintern s +Ġh one +ĠPar agu +UR I +Ġd angling +ãĤ » +so v +ict ional +av ailability +Ġrev ocation +Ġd ow +in ic +ĠTHE IR +Ġis o +Ġout ings +ĠLeth al +Ġ) )) +Ġinacc ur +Ġout landish +Ġan us +let ico +id on +l ol +Ġun regulated +Ġsuccumb ed +Ġc uff +ĠWast eland +let al +Ġsub str +Ġcoff ers +Ġautom akers +ov i +ĠX ue +ĠDayton a +Ġjar ring +Ġf umes +Ġdisband ed +z ik +itt on +Ġstriking ly +Ġsp ores +Ad apter +.) : +ĠLynd on +ival ry +Ġor ally +Ġtumult uous +Ġdisple asure +Ġcon es +or rect +Ġappe ase +Ġder by +ĠTrip oli +ĠAl ess +Ġp oked +ĠGu ilty +v P +En ough +Ġorig inals +6 99 +Ġrabb i +Ġproverb ial +Ġpostp one +el ope +ĠMist y +Ġstaff ed +ĠUn employment +redit ary +Ġdilig ent +re comm +me asures +as in +8 25 +Ġpond s +Ġmm ol +ĠS AR +ĠC ARE +Ġ3 71 +Ġclen ched +ĠCors air +Ġcaric ature +z n +att ach +ĠSch ro +spe ak +p ainted +ĠS uc +ĠE NT +Ġcell ul +ĠP aid +di agn +WH ERE +Ġtext ed +B arn +Ġret racted +ĠRe ferred +S av +Ġup keep +Ġwork places +ĠTok ens +Ġampl ify +cl inical +Ġmult ic +mber g +Ġconvol uted +Reg ion +5 65 +ĠTop ic +Ġsn ail +Ġsal ine +Ġins urrection +ĠPet r +f orts +B AT +ĠNav ajo +Ġrud imentary +ĠLak sh +OND ON +Me asure +Ġtransform er +ĠGodd ard +Ġcoinc ides +ir in +R ex +ĠB ok +qu it +Ġshotgun s +Ġprolet arian +Ġsc orp +ĠAd a +5 14 +Ġsl ander +record ed +Ġemb ell +ris ome +Ġapolog izing +ĠMul cair +ĠGib raltar +Cl a +Ġall ot +ĠAtt ention +Ġ4 33 +le ave +Ġwh ine +ĠIss a +ĠFa ust +ĠBar ron +hen y +Ġvictim ized +J ews +Ġnurt uring +ett el +W inged +ĠSub tle +Ġflavor ful +ĠRep s +eng ed +call back +Ġdirection al +Ġcl asp +ĠDirect ions +plan et +icult ure +Hel per +ic ion +ac ia +Ġç ¥ŀ +Ġsur ges +Ġcan oe +ĠPrem iership +be en +Ġdef ied +ĠTro oper +Ġtrip od +Ġgas p +ĠE uph +ĠAd s +vern ight +high ly +R ole +Ġent angled +ĠZe it +6 18 +ĠRust y +Ġhaven s +ĠVaugh an +HA EL +ĠSER VICE +/ , +Ġstr icken +Ġdel usions +Ġb is +ĠH af +Ġgrat ification +Ġent icing +UN CH +Ad ams +ĠOL ED +ĠBeet le +Ġ18 99 +ĠSO FTWARE +ateg or +V L +ĠTot em +ĠG ators +AT URES +Ġimped ance +Reg istered +ĠC ary +ĠAer ial +on ne +en ium +Ġd red +ĠBe g +Ġconcurrent ly +Ġsuper power +ĠX an +j ew +imes ter +ĠDick inson +âĶ ģ +F la +Ġp ree +ĠRoll ins +© ¶æ +Ġden omination +ĠL ana +5 16 +Ġinc iting +sc ribed +j uries +ĠWond ers +app roximately +Ġsusp ending +Ġmountain ous +ĠL augh +oid al +N s +Det ect +) = +ĠL uthor +ĠSchwarz enegger +ĠMull er +ĠDev i +ec ycle +J ar +6 13 +ĠL ongh +B ah +ĠSP ORTS +n w +Ġref inement +Ġwater ways +Ġd iner +Bl ade +68 3 +F ac +Ġinitial s +Ġro g +Ġparan ormal +B UT +Ġ[ ( +ĠSw anson +ĠM esh +âĸ ¬ +Impro ve +ĠRad iation +ĠEst her +ĠE sk +ĠA ly +ik y +Ġir rad +ĠBuck ingham +Ġref ill +Ġ. _ +Re pe +CON CLUS +Ġdifferent iated +Ġchi rop +ĠAt kins +Pat tern +Ġexc ise +Ġcab al +N SA +ĠST A +ĠS IL +ĠPar aly +Ġr ye +ĠHow ell +ĠCount down +ness es +alys ed +Ġres ize +ãĤ ½ +Ġbudget ary +ĠStr as +w ang +Ġap iece +Ġprecinct s +Ġpe ach +Ġsky line +Ġ35 3 +pop ular +App earances +ĠMechan ics +ĠDev Online +S ullivan +Z en +Ġp u +op olis +5 44 +Ġde form +Ġcounter act +ĠL ange +Ġ4 17 +Con sole +77 4 +Ġnodd ing +Ġpopul ism +Ġhe p +Ġcoun selling +compl iance +U FF +Ġunden iably +Ġrail ing +ĠHor owitz +ĠSim one +ĠBung ie +Ġa k +ĠTal ks +x ff +fl ake +Cr ash +Ġsweat y +Ġban quet +ĠOFF IC +Ġinvent ive +Ġastron omer +ĠStam ford +ĠSc are +ĠGRE EN +olic ited +Ġr usher +Ġcent rist +ight ing +Ġsub class +Ġdis av +Ġdef und +ĠN anto +oci ate +m ast +Ġpac if +Ġm end +e ers +imm igration +ESS ION +Ġnumber ing +Ġlaugh able +ĠEnd ed +v iation +em ark +P itt +Ġmetic ulous +ĠL F +Ġcongrat ulated +ĠBir ch +Ġsway ed +Ġsemif inals +Ġhum ankind +m atter +ĠEqu ip +opa usal +S aid +ĠLay out +Ġvo icing +Ġth ug +Ġporn ographic +I PS +Ġmo aning +Ġgriev ance +Ġconf essions +esc al +TEXT URE +Aut hent +os aurus +P urchase +Ġreleg ation +al ter +ĠÂł Âł +Ġr iddled +Ġo gre +ĠLow ell +Occ up +E at +ĠHy der +ĠAdvis er +Com merce +H unt +ĠOr th +ĠComp etitive +ĠCL A +CD C +Ġsal ads +F le +Ġindustrial ized +` , +ĠO WN +Ġbec k +ĠPart icularly +oub t +Ġm M +ĠHuss ain +ĠChen nai +Ġ9 20 +Ġappoint ing +ĠCull en +,,,, ,,,, +Ġp ores +ver ified +Ġbi ochemical +em ate +Ġcoward ly +ĠHels inki +ĠEthiop ian +S OURCE +ER C +est ro +Ġbi otech +ĠS our +Ġbrew er +Bloom berg +Ġintens ify +Gl ass +an co +ĠF DR +gre SQL +ĠF ires +©¶æ ¥µ +ec o +100 1 +ĠHom eless +Ġinstant aneous +ĠH aste +ig el +D iamond +Ġp aving +Ġland fill +Ġd ads +h oun +: ] +Ġinc endiary +ĠLiving ston +ĠHil bert +ĠChe cks +st yles +in ators +ĠCl ive +ph rine +Ġchimpan zees +Ġp all +ĠJ M +ĠAad haar +ð Ŀ +Ġachie vable +dis abled +P ET +OOOO OOOO +M ot +Ġint angible +Ġbal let +ĠWe bs +ĠEst imated +Effect s +Ġb ailed +Josh ua +Ġturb ulence +Ġoccup ant +ĠDay light +Ġ36 1 +me et +Ġstat ically +Ġon look +Ġk i +il legal +Ġvel vet +Ġdehyd ration +Ġacqu ies +ĠRe z +ak ura +ĠU pton +at ro +Ġincomp rehensible +Ġback door +ĠRh ino +7 27 +Ġmath s +) + +Ġhe resy +Ġd f +ĠRoc he +ĠL ydia +Ġpanc reat +re ply +arre ll +Ġsolicit ation +Ġcirc adian +BI P +Ġfor ay +Ġcrypt ic +iz u +ime o +ĠTom ato +ĠH oms +ex amination +Ġqu arry +ĠVal iant +ĠJer icho +ĠIN CLUD +Ġ18 40 +5 19 +Ġres ists +Ġsnap shots +ĠSp ur +ĠAnt iqu +Log in +Ġbest selling +Ġant ic +ĠS utherland +ãĤ¢ ãĥ« +Ġ~ / +ĠP arm +è ĥ +P ages +int ensity +Ġimm obil +Ġ18 65 +zz o +Ġn ifty +Ġf entanyl +ĠPres ervation +op hen +Ġd arts +ĠD inosaur +po inters +ĠR ite +s uggest +aware ness +ĠSher idan +Ġst ances +Ġsor cery +Ġper jury +ĠNik ola +ie ver +Ġf iance +ĠJordan ian +ĠBall oon +Ġn ab +Ġk b +Ġhuman ities +ĠTan aka +hill ary +Ġconsult ancy +ĠZ ub +Ġrem ission +Ġconf id +CH Q +ĠF ug +Ġimpro vis +Y ep +/ _ +Ġunwilling ness +Ġport folios +05 5 +ĠInstruct or +aim an +Ġclaim ants +M bps +ĠBy e +re ceived +T weet +Ġind emn +ri z +am ara +N at +Ġeval uates +ĠL ur +ep ad +FO X +ĠTh ro +Ġrust y +Ġbed rock +ĠOp rah +J B +Ġmanip ulative +Ġwill ful +Ġrel apse +Ġext ant +The me +S ensor +ĠSt ability +go vern +Ġpo ppy +Ġkn ack +Ġins ulated +ĠT ile +ĠExt rem +Ġunt old +Ġconver ge +Ġref uel +ig roup +Ġdistort ions +Ġrav aged +Ġmechan ically +ĠRe illy +ĠN ose +ĠIncarn ation +ĠBeck y +abb ling +Ġt aco +Ġr ake +Ġmelanch oly +Ġillust rious +ĠDart mouth +Gu ide +ĠR azer +ĠBen z +Ult imate +ĠSur prise +Ġpage ant +off er +Who ever +Ġw iser +Ġchem ist +ĠHE LL +ĠBul k +Ġpl utonium +ĠCO VER +Ö ¼ +f ailed +Ġtire lessly +Ġinf ertility +ĠTr ident +ĠShow time +ĠC iv +V ice +requ ires +itt ance +Ġun controlled +interest ing +56 1 +Ġinnov ate +ateg ic +L ie +ĠS elling +U l +Ġsav ior +ĠT osh +Ġsw ast +P ASS +Ġr ink +Ġcard io +ĠI ro +ud i +Ġv antage +Ġv ans +ĠNi ño ++ = +Ġpropag ate +< ? +Ġmethod ological +204 39 +Ġtrig lycer +Ġing rained +ĠAn notations +arr anted +6 17 +ĠS odium +ĠA AC +techn ical +mult ipl +Ġ3 73 +å ĭ +Ġdec isively +Ġboost ers +Ġdessert s +ĠGren ade +Ġtest ifying +ĠSc ully +ID s +Ġlock down +ĠSc her +ĠR é +ĠWhit man +ĠRams ay +rem ote +Ġh ikers +ĠHy undai +Ġcons cientious +Ġcler ics +ĠSiber ian +ut i +is bury +Ġrel ayed +Ġqu artz +ĠC BI +seek ers +ull a +Ġweld ing +ĠSh al +ble acher +T ai +ĠSam son +Ġt umble +ĠInvest or +Ġsub contract +ĠShin ra +ow icz +j andro +d ad +Ġtermin ating +ĠNe ural +ä» £ +Ġleak age +ĠMid lands +ĠCaucas us +í ķ +c it +ll an +iv ably +ĠAlb ion +Ġ4 57 +Ġregist rations +Ġcomr ade +Ġclip board +0 47 +Ġdiscour aging +ĠO ops +Ad apt +Ġem path +n v +ĠPR OT +ĠDon n +ĠP ax +ĠB ayer +t is +Squ are +Ġfoot prints +part icip +ĠChile an +B rend +ind ucing +M agn +Ġclub house +ĠMagn um +Ġenc amp +ĠEth nic +uch a +ere y +Ġw atered +ĠCal ais +Ġcomplex ion +Ġsect s +Ġren ters +Ġbr as +oÄŁ an +Time out +Man agement +Ġinf ographic +P okemon +Cl ar +Ġloc ality +Ġfl ora +as el +P ont +Ġpop ulate +ĠO ng +Ġsubs istence +Ġa uctions +ĠMcA uliffe +ĠL OOK +br inger +Ġtit an +Ġmanif old +ĠâĹ ı +Ġcalibr ated +Ġcal iphate +ĠSH E +ĠCommission ers +ce ivable +j c +W inner +5 24 +Ġcond one +Other wise +Ġp iling +Ġem body +ĠCrime an +ut ics +ĠEx hibition +Ġ4 26 +e ering +Ġv ying +ĠH UGE +* =- +Ġprin cipled +à ¦ +Ġquir ks +ĠEdit ors +put ing +G ES +ĠF TA +ठ¾ +add on +ĠH AM +ĠFrie za +W oman +. $ +Ġc rib +ĠHer od +Ġtim ers +ĠSp aces +ĠMac intosh +at aka +Ġgl ide +Ġsmell ing +ĠB AL +Ġun su +Ġcond os +Ġbicy cl +ĠRev ival +55 3 +Ġjugg ling +H ug +ĠKardash ian +ĠBalk ans +mult iple +Ġnutrit ious +oc ry +19 00 +Ġinteg rates +Ġad joining +ĠF older +roll ment +ven ient +Ġu ber +y i +Ġwh iff +ĠJu ven +ĠB orough +net te +Ġb ilingual +ĠSp arks +ph thal +man ufact +Ġt outing +ĠPH I +Ke efe +Rew ard +Ġinf all +ĠTem per +typ ically +ĠNik ol +Ġregular s +Ġpseud onym +Ġexhib itions +Ġbl aster +Ġ40 9 +w arming +Ġrever ber +Ġrecip rocal +Ġ6 70 +ip ient +b ett +ĠBe gins +Ġit ching +ĠPh ar +Ass uming +Ġem itting +ĠML G +Ġbirth place +Ġt aunt +ĠL uffy +ĠAm it +Ġcir cled +ĠN ost +enn ett +Ġde forestation +ĠHist orically +ĠEvery day +Ġovert ake +79 2 +Ġn un +ĠLuc ia +Ġaccompan ies +ĠSe eking +ĠTr ash +an ism +R ogue +Ġnorth western +ĠSupplement al +ĠNY U +ĠF RI +ĠSat isf +x es +5 17 +Ġreass ured +Ġspor adic +Ġ7 01 +Ġmed ial +Ġcannabin oid +Ġbarbar ic +Ġep is +ĠExplos ive +ĠD ough +Ġuns olved +Support ed +Ġacknowled gment +sp awn +Ġkit chens +Ġ- = +talk ing +ic ist +ĠPeg asus +ĠPS U +Ġphot on +ĠAuthent ication +R G +@# & +76 2 +ĠCl air +Ġdi aper +Ġbr ist +ĠProsecut ors +ĠJ em +6 28 +ĠEvery where +ĠJean ne +equ ality +ãĥ© ãĥ³ +object s +ĠPel icans +Ġ39 2 +Ġbl u +b ys +ĠA go +Ġinstruction al +Ġdiscrim inating +ĠTR AN +ĠCorn el +ag os +Ġty re +Ġas piration +ĠBrid gewater +": - +! ". +ĠEn s +ĠCoc o +P ie +Ġdet ach +ĠC ouch +Ġphys ique +ĠOccup ations +osc opic +en ough +B uzz +App earance +Y P +Ġrac er +Ġcompl icity +r pm +T oy +Ġinterrupt s +ĠCat alyst +Ġut ilitarian +imp act +Ġsp aghetti +Ġp orous +Ġeste emed +Ġinc iner +ĠI OC +7 48 +Ġesp resso +ĠSm ile +abil ia +6 35 +Ġmathematic ian +Ġ4 24 +ĠK L +ĠH IP +Ġover heard +ĠT ud +ĠT ec +Ġqu izz +Ġfl attering +Ġcon n +âĢ İ +Ġatt aches +ĠR OS +ĠAC S +Ġt cp +ĠSh ame +sk ip +res pected +ĠTrin idad +gr ain +Ġfooth old +ĠUnch arted +ĠJul io +z l +av ored +ĠAn xiety +er rors +ĠCent auri +its ch +D addy +Ġclutch ing +ĠIm plement +ĠGut ierrez +Ġ7 60 +Ġtele portation +end ra +Ġrevers ible +st ros +Ad venture +08 3 +Ġliber ating +Ġas phalt +ĠSp end +AR DS +im sy +PR ES +ĠEmer ging +Ġwild fires +Ġtechn ologically +Ġem its +ĠART ICLE +Ġirregular ities +Ġcher ish +çī Ī +Ġst ink +ĠR ost +Econom ic +Ġcough ing +ĠMcC ann +pro perties +ilant ro +Ġreneg oti +Trans lation +Ġin quest +ĠGra pe +oot ers +gu i +ĠSwords man +ace ae +h itting +Ġr c +Ġexert ed +ĠS AP +it ent +Ġperil ous +Ġobsc urity +Ġassass inate +Ġab original +Ġresc uing +ĠSh attered +lock ing +all ion +Ch anging +ĠHar rington +ĠB ord +ĠAfgh ans +Jam ie +aret z +ĠAugust us +Ġ38 6 +8 30 +Ġj og +ok ingly +Tr igger +ĠH OR +Stat istics +Ġviewers hip +Ġadd itives +h ur +Ġmaxim izing +ĠR ove +ĠLou ie +ĠBuck et +ĠCHR IST +ou sel +Ġstre aks +ir ted +Ġt ert +Ġcolonial ism +Ġbur ying +y k +Cond ition +ĠDPR K +By Id +75 1 +âĹ ¼ +Ġwor risome +Ġvoc ational +sl ice +Ġsa ils +ĠCorrection al +95 4 +Ġt ul +K id +l uster +Ġfam ilial +ĠSp it +ĠEp iscopal +Specific ally +ĠVol cano +run s +q s +Ġve tted +Ġcram med +t rop +here r +Thank fully +Ġper cussion +Ġor anges +Ġround up +Ġ4 99 +x ious +Char acters +ĠZion ism +ĠR ao +ÃĽ ÃĽ +W F +Ġunintention al +ONE Y +Gr ab +Com mercial +Ġglut amate +ĠMcK enna +ru ciating +ning ton +ih u +Ch an +ĠSw ap +Ġleaf lets +Ġfunction ally +er ous +F arm +Ġcal oric +ĠLiter ally +con cert +Ġshe nan +Ġrep aid +ey es +Ġbas hing +ĠG orge +Ġcollabor ations +Ġun account +itch ie +Ġteam work +pp elin +Ġpip ing +Ġmin ced +Ġd iam +ri eg +Ġmasc ara +Ġsuck er +ĠMo ons +App s +ĠPe ck +Ġper v +ĠFl oat +o ley +ĠN ish +im ize +Ġarom atic +u in +end ish +! / +ĠB icycle +ĠAS IC +ile ged +ĠQuad ro +ios yn +Ġlock out +ĠW ink +SP EC +Attempt s +Ġseed ed +red o +ias is +Ġsn ag +ãĥķ ãĤ© +ãĤ ¶ +Ġground ing +Ġrelie ver +Ġfrivol ous +ĠG ifts +ĠF aces +Es pecially +Ġmicrobi ome +im ag +ĠSch l +ĠP les +ĠBle ach +ĠIr win +ĠE aton +ĠDisc iple +Ġmultipl ication +Ġcoer ced +Ġ4 19 +st h +E vil +B omb +Ġex orc +Ġstag gered +L ESS +Ġinert ia +ĠED IT +Ġgo b +Tr aditional +Ġclass y +Lear y +ĠP AGE +yr s +Ġtrans porter +Ġmat ured +Ġhij ab +Ġbi ome +Where as +Ġex termination +ĠT ues +ĠT akeru +ĠAud rey +er ial +ĠAd en +aff les +Ġnarciss istic +ĠB aird +UT F +I re +ĠCon nie +Ch amp +Ġwhis pering +ĠH att +D K +Ġdis infect +Ġdeduct ed +Ġpart ake +Ġdown grade +ĠEs ports +ĠContin uing +Ġdemocr atically +icro bial +itt a +Ġlim estone +Ġexempt ed +ĠFren zy +H erm +7 28 +Ġfled gling +Met a +765 61 +69 3 +% : +w ake +5 26 +ĠDis cipline +Ġvirgin ity +ĠLeg ions +ĠFrank ie +int ent +Ġrest rooms +ĠRou ter +da q +Ġobjection able +âĨ ij +w ark +ĠRah ul +g ain +activ ation +abs olute +ĠAccess ed +Ġ24 00 +ogg les +Ġsecond ly +ĠDEF ENSE +Ġpost age +wra pper +sh arp +7 29 +Ġcommun icates +Ġadd on +ĠMil itia +H ong +Ġsl umped +ĠJP EG +ĠI car +ad ish +68 1 +Ġmaj esty +ĠWolf gang +ĠEl astic +u per +Ġv iz +Ġunconscious ly +ĠST D +ĠS ass +Ġflower ing +ĠHel ic +ĠDra per +ĠAm ateur +Ġman ure +Ġdis ingen +ĠLe i +br ing +9 49 +Ġinhib ited +Ġhead quartered +Ġen igmatic +�� � +Ġred ress +R H +Ġratt led +Ġd iction +l io +ĠT BA +ĠSN AP +C alling +Ġfasc ists +ĠD ove +iew icz +0 36 +Ġco asts +ĠR ect +Ġ) ] +L ot +6 29 +ĠS EM +ĠPeters en +ĠExpl ain +ĠBo ards +ĠBe zos +ĠJ ournals +Ġ20 24 +p arser +Ġmist rust +Ġgr ate +ĠL ocked +bo a +S aint +g aming +Ġvow el +in ately +bl ow +All ah +Ġun matched +Ġb ordering +ĠExp end +n r +Or acle +rou ch +Ġcont iguous +ac us +Ġdist raught +58 1 +Ġanat omical +O X +ap ixel +8 33 +ĠPL US +Ġres usc +Ġab iding +57 3 +Ġvac ancies +Em ily +Ġhyp othal +ĠWer ner +ĠWe e +ĠDJ s +5 13 +Ġwitch craft +Ġac upuncture +ent ary +benef it +Product s +ĠP SP +ĠMP G +ĠJ inn +ĠJ arrett +Ġ4 45 +ĠIm aging +ĠP yth +Fin ish +Ġte x +Ġjuven iles +Ġhero ism +Ġdoubt less +ĠA ki +ĠT end +ĠPatri arch +Ġbit ters +ĠTele communications +it atively +ag na +Ġr g +ĠS OLD +Ġcomp ulsion +ĠN asa +ĠKath ryn +Ġmillion aires +Ġintrins ically +Ġbolst ered +time out +fl o +Ġtut or +p our +Stat ement +Ġ{ * +ĠRud olph +ĠKimber ly +rog ens +adi q +] + +Ġindign ation +Ġfract uring +ĠRe leases +ĠGr ain +pro tein +L ago +Ġvac ations +Ġboot ed +ĠTH REE +ĠH G +oresc ence +Ġt f +Ġso ar +iosyn cr +Ġgl ances +ĠSp oon +ĠJ ury +ĠCow boy +Ġcreat ively +Hig her +Ġsolic itor +Ġhaw k +ac io +89 6 +Ġsuperf lu +Ġbombs hell +ct ure +Ġbroker age +Ġraid ing +Ġf rench +Ġang led +Trans action +ĠGen ocide +u pe +ĠHait ian +57 2 +! : +Ġunwitting ly +iter ator +sc roll +Ġtall ied +Ġbi omedical +ĠC ARD +Ġe uphem +Ġbrain storm +a quin +K o +Mic helle +ĠR unes +ĠBall istic +ud ers +Ġmod esty +ĠiP ads +ĠEzek iel +Y E +Ġstars hip +Ġpower fully +Ġper l +ĠSh ade +ĠQu art +ĠE EG +Ġfisher man +OS ED +ĠTyp ical +df x +Ġmes hes +Ġet ched +worth iness +Ġtopp led +Ġ3 96 +or ius +We iss +Ġmy sql +ĠVal halla +Ù Ĵ +le asing +Ġrec omp +rap nel +S el +04 3 +Ġder ailed +ĠGu ides +IR T +Ġde human +ĠBritt any +" )) +Ġex claim +Ġb alk +Ġ8 40 +CLA IM +int el +L AB +Ġpe gged +Ġast roph +sm oking +Ġrig ging +Ġfix ation +Ġcat apult +ins ide +ĠC ascade +ĠBolshe vik +G aza +Dep th +Ġloud spe +Ġalmond s +me yer +l eness +j en +f resh +Ġunbeat en +ĠSqu id +ĠPres umably +Tim er +B W +Ġro sters +Ġell ipt +ĠHar riet +dat abase +ĠMut ual +ĠComm odore +uk ed +kn ife +ĠCOMM UN +h ya +Ġmel ts +arch ives +Ġrat ification +Ġmultip lying +Ġinter oper +Ġasc ert +w ings +ver ting +ĠScorp ion +ay e +ĠPorts mouth +ĠM TA +n it +iaz ep +Ġqu arantine +Ġslides how +Ġcent imeters +Ġsyn opsis +Ġsp ate +th irst +Ġnom inating +ĠMel vin +Pre view +Ġthro b +Ġgener ational +ĠRad ius +rest ling +put able +aw ar +N ECT +Ġunlaw fully +ĠRevel ations +Wik ipedia +sur v +Ġeye ing +ij n +ĠF W +Ġbr unt +Ġinter stellar +Ġcl itor +ĠCroat ian +ĠCh ic +ev a +ĠDis app +ĠA kin +iner ies +d ust +Interest ed +Ġgen esis +ĠE ucl +ö n +p icking +Ġmut ated +Ġdisappro ve +ĠHD L +Ġ6 25 +Ì ¶ +c ancer +Ġsqu ats +Ġle vers +Disc uss += ] +D ex +ĠVIDE OS +A UD +Ġtrans act +ĠKin ect +ĠK uala +ĠC yp +7 47 +Ġsh attering +Ġarsen ic +ĠInt ake +ĠAngel o +ĠQu it +ĠK he +Ġ18 93 +M aker +0 29 +ĠPain ting +Dis able +9 16 +Ġanal ges +Ġtact ile +Ġprop hes +Ġd iced +ĠTravel s +ĠHe ader +ĠClub s +Ass istant +Ġinc rim +Ġd ips +Ġcruc ifix +ĠShan ahan +ĠInter pret +Ġ40 90 +al ogy +abb a +Ġsimul ac +hus band +S IM +Ġrecy cle +uc er +ed ged +Ġre naissance +ĠBomb ay +Cath olic +ĠL INE +ĠCl othing +re ports +Ġpl aus +Ġd ag +ĠM ace +Z I +Ġintr uder +ĠVeter inary +g ru +Ġsne aky +ĠS ie +ĠC innamon +P OSE +Ġcou rier +ĠC NS +Ġemanc ipation +s it +Ġplay through +ĠFac ilities +v irt +ĠG auntlet +Thom pson +Ġunbeliev ably +Param eters +Ġst itching +ign e +ĠTH ESE +Priv acy +Ġshenan igans +Ġvit ri +ĠVal id +59 1 +Ń · +ĠProt otype +ink a +SC P +ĠT id +è Ī +old ed +Ġindividual ity +Ġbark ing +Ġm ars +ĠW D +Ġ8 20 +Ġt ir +Ġsl apping +Ġdisgr untled +ĠAng ola +ri us +ĠTorn ado +ĠTh urs +Ġcapt cha +Ġang st +ĠP og +ĠAssass ins +ĠAd idas +Ġjoy ful +Ġwh ining +Emer gency +Ġphosph orus +Ġatt rition +oph on +ĠTimber wolves +ĠJ ah +ĠBr inging +ĠW ad +ĠEn sure +oh l +ĠX ie +omm el +c mp +Ġz ipper +Ġrel at +ĠCor ridor +m ilo +T ING +Av g +Ġcro pped +] } +Ġr aged +ĠLump ur +ĠGuer rero +our ke +N ut +Ġoff sets +og lu +dr m +Ġmort als +lat able +Ġdismiss ive +ä¸ ī +Ġthro ats +Ġchips et +ĠSpot light +Catal og +art ist +G b +Ġch illy +Ġst oked +Ġ3 74 +W ard +L atin +Ġf iasco +Ġble ach +Ġb rav +Enh anced +Ġin oc +ĠFior ina +_ > +Ġle ukemia +Ġel uc +Ġannoun cer +ĠLith uan +ĠArm ageddon +å ĩ +Len in +ĠR uk +Ġpe pp +ĠRom antic +ĠP IT +ĠInter stellar +ĠAt kinson +R aid +J s +Go al +C ourse +Ġvan ishing +es ley +ĠR ounds +Els a +59 3 +Ġredund ancy +ĠST AND +Ġprop hetic +Ġhabit able +ry u +Ġfaint ly +M ODE +Ġfl anked +IR C +Aw esome +Ġsp urious +ĠZ ah +ĠMS G +Ġsh ading +Ġmotiv ational +ĠSant ana +ĠS PR +Ġexc ruciating +om ial +ĠM iko +ĠLe opard +A byss +Ġ[ | +d irty +Ġbath s +Ġdem oral +and re +P B +Ġun ification +Ġsac rament +Ġ[ & +Ġpric eless +Ġgel atin +Ġeman ating +ĠAll aah +98 6 +Ġout burst +Ġer as +ĠX VI +ĠSP I +O tt +ĠLaz arus +PL IED +F lying +blog s +W isconsin +R aven +Ġreb ate +Ġcreep s +ĠSp an +ĠPain ter +ĠKir a +ĠAm os +ĠCor vette +Cons umer +ĠRec over +ck i +Ġpes ky +ĠIn vention +Compan ies +Ġchalleng ers +ad emic +ĠUkrain ians +ĠNeuro log +ĠFors aken +Ġent rants +Ġemb attled +Ġdef unct +ĠGlac ier +Ġpo isons +ĠH orses +m akes +ĠD irt +Ġ4 23 +hh h +ĠTrans formation +QUI RE +................ .. +Ġtrave ller +ĠSe xy +ĠK ern +ip olar +Ġransom ware +oooooooo oooooooo +E c +rub y +Prof essional +ĠOut break +arg ument +G rey +ĠFif a +ĠCH O +ĠFOR M +ĠAm trak +- [ +Ġcr adle +Ġantioxid ants +ãģ®å ® +7 36 +ĠNAS L +ĠContribut ions +Ind iana +ĠST EP +C SS +Ġsal ient +Ġall ocations +yr ights +Ġm ashed +ĠCut ter +Sex ual +Ġp ounded +Ġfan base +Ġc asc +ĠTrans parency +Ġanaly tic +ĠSummon er +× ŀ +ĠAD C +det ail +Ġvan quished +Ġcr abs +ar ie +Dest roy +ĠS ack +Ġtrans istor +Al abama +ĠK oen +ĠFisher ies +c one +Ġannex ed +ĠM GM +es a +Ġf aked +ĠCong ratulations +Ġhind ered +Ġcorrection al +ĠI TV +lee ve +Ġin appropriately +lic ks +Ġtresp ass +Ġp aws +Ġnegoti ator +ĠChrist ensen +lim its +ĠDian ne +Ġeleg ance +ĠContract s +an ke +Ob j +Ġvigil ance +Ġcast les +ĠN AD +ĠHol o +Ġemph atically +ĠTit us +ĠServ ing +ĠRich ie +ĠP igs +5 68 +Ġanim osity +ĠAtt ributes +ĠU riel +M Q +my ra +ĠApplic ant +Ġpsychiat rists +ĠV ij +ĠAb by +ag ree +P ush +Ġk Wh +hib a +Ġinc ite +ĠWe asley +ĠTax i +minist ic +hy per +ĠF arn +Ġ6 01 +ĠNation wide +F ake +95 2 +Ġma ize +Ġinteract ed +Ġtransition ed +Ġparas itic +Ġharm onic +Ġdec aying +Ġbas eless +ns ics +Ġtrans pired +Ġabund antly +ĠFore nsic +Ġtread mill +ĠJ av +ab and +Ġssh d +Ġfront man +ĠJak arta +oll er +dro ps +ĠSERV ICES +rompt u +oph ical +h ospital +bled on +6 45 +Ġmid range +ĠEV ENT +cul ated +raw led +Ġper ched +Ġover board +ĠPe el +ĠP wr +ĠCar th +ĠCOM PLE +co e +sh all +Ġdeter rence +M ETHOD +ĠAbs ent +M EN +Ġs ill +ĠLE VEL +Y ork +Ġsin ners +ĠOP EC +ĠN ur +ĠDesign s +se lection +Ġunw orthy +CH A +Ġstreng thens +88 3 +ed ly +Ġslic ing +Ġmal nutrition +Ġfilm making +ĠPol k +ur ated +Ġ4 21 +bre akers +!' " +Ġwet lands +ĠDisc rimination +Ġallow able +Ġste ered +ĠSic ily +S AM +Ġmust ache +Ġm ids +Ġcl ipped +Ġcirc ulate +Ġbr ittle +ĠBuild ings +ra ised +ĠRound up +Ġwealth ier +Ġoverw rite +Ġover powered +ĠGerr ard +s ites +PD ATED +Ġacute ly +ĠGam ble +Ġp im +ĠK us +Typ ically +De ploy +ĠMoroc can +p otion +com be +Ġvigil ante +Ġ36 3 +St ew +ĠB agg +Ġres ided +ĠSp o +Ġrem nant +Ġempt iness +br ainer +Ġout patient +pri ority +Ġle ptin +ĠPay ton +ĠGle aming +ĠS hed +ĠPol o +ĠMormon ism +rest ricted +arl ane +w x +Ġcreat ine +ĠAn on +ĠST UD +ĠJ UL +ĠT ee +5 28 +08 9 +Ġhat ched +Dis patch +ĠCompos ite +Ġ45 1 +p uff +ĠX COM +ĠOr n +ĠTH ANK +END ED +ĠAshe ville +Ġà ľ +Ġman go +ĠS lightly +world ly +ĠW ander +ĠExp and +ĠCh r +M ist +Ġorthodox y +ĠUN ESCO +reg ate +Else where +k ie +ir led +Ġtopp le +Ġadopt ive +ĠLeg s +d ress +ĠS agan +b are +ĠGl ou +Cr unch +Ġhelp ers +Ġchron ically +ĠH uma +1 0000 +Ġaccommod ating +äº Ķ +Ġwrink les +Ġdod ged +four th +Ġpre con +Ġcompress or +ĠK are +Ġev ict +ĠWar wick +im ar +Ġmodern ization +Ġband wagon +Ġref uted +Ġnet ted +ĠNa ples +ĠGen ie +per ors +Ġfield ed +Ġde re +ĠPar ables +le es +Ġtr out +asp ers +Ġn ihil +Ġhapp iest +Ġflo ppy +ĠLo ft +ĠHe ard +Ġun ison +Ġl ug +ĠRed mond +class ic +Supp orters +SH IP +G MT +Ġfue lled +ç IJ +Ġd d +ĠEmin em +Ġ18 97 +NY SE +Ġsecret aries +ĠF IA +ĠCanaver al +F avorite +Ġp omp +Ġdetain ee +ers hip +aim on +i our +ĠA pex +Ġplant ations +am ia +ac ion +R ust +Ġtow ed +ĠTru ly +5 77 +Ġshel tered +r ider +W o +Ġl air +ĠInt elligent +impro ve +m atically +Ġet iquette +ad ra +all o +ĠJun o +any thing +ĠStru ggle +ĠPred ict +ĠGr imes +ĠAMER ICA +ct x +ĠSit uation +W OOD +Ġsol uble +me ier +Ġintoler able +ang ering +Ġun interrupted +Ġtool tip +Ġinterrog ated +Ġgun ned +ĠSne ak +æŃ ¦ +Ġt ether +Ġcr umble +L ens +Ġclust ered +ĠSy l +ĠHas an +Ġdystop ian +w ana +Ġjoy stick +ĠTh ib +amm u +Tom orrow +5 46 +Ġoverc ame +Ġminim ized +cept or +Run ner +ENG TH +ĠBrend a +ĠAchieve ments +Ġtor ches +Ġrapp ort +ĠInvestig ator +ĠHand ling +rel ation +g rey +8 15 +Ġk cal +ĠComm ands +d q +Ġcur ls +Ġbe arer +Ġcyn icism +it ri +ĠUse ful +B ee +D CS +Ġab ras +P ract +BIL ITIES +7 12 +Ġdebug ger +Ġdebt or +ĠL ia +ĠK ers +Ġexacerb ate +ĠSt acy +ĠB land +ĠSc enes +Ġbranch ing +âĸĪâĸĪâĸĪâĸĪ âĸĪâĸĪâĸĪâĸĪ +ape ake +Ġs alsa +Ġmish and +ĠKon ami +ĠN ib +Ġanecd ote +Ġagree able +Ï ī +ĠNath aniel +ĠHe isman +ĠB eware +Ġ18 86 +spect ive +69 1 +5 22 +Ġinhib its +Ġhas hing +Ġ18 89 +å° Ĩ +v ich +P ure +Ġsolid ly +Ġaspir in +im aru +Ġstreet car +ĠU CS +ĠJ udd +Ġflash backs +p ins +Ġ14 40 +ĠUN HCR +ĠSym ptoms +T IT +5 38 +F ra +% ); +Ġo oz +Ġcur few +Ġcal med +Ġparticip ates +Te X +Ġnons ensical +Ġfull back +ĠDe L +mon key +h ari +Ġmetabol ites +Ġloot ed +ĠAL WAYS +ĠB CC +L t +oc het +B one +Ġveto ed +Ġg cc +ĠCL ICK +Ġ18 88 +s af +Ġstiff ness +Ġlow ly +ĠGe h +vers on +ors et +Ġun foreseen +Ġan esthesia +ĠOpt ical +Ġrecon structed +ĠT up +sh ows +NEW S +ĠNewsp aper +ĠA SA +ter a +N umbers +Ġinexpl icable +× ij +Ġhard ness +unt arily +ĠA cer +grad ient +ARD IS +Ġwood land +Ġmetaph ors +ĠWem bley +ĠPa vel +phil is +Ġre writing +Ġpercept ual +Ġ10 70 +worm s +ĠDown s +Ġunsur prisingly +Ġtag ging +fl ame +Ġlit res +Ġboun ces +ĠB abe +sh ut +Ġoverd oses +ĠShe ila +ĠCh au +ĠBl ess +Capt ure +ĠSign ificant +ĠSc ion +Ġ38 9 +ĠMc H +ĠTitan ium +ĠMe al +amed a +ag ents +agg ressive +B illy +76 3 +ĠS aying +DER R +it one +Coll ins +B ound +Ġbol ted +ĠDM CA +95 3 +Ġun iqueness +Ġep igen +un ci +ant am +Ġreck oning +ch airs +OG R +ĠSen egal +Ġ18 62 +re levant +Ġ ¯ +Ġpharm acies +ĠG eral +v ier +Y an +OR PG +Ġrab id +b ending +ĠUN ITED +Ġ4 65 +As sembly +Ġwe ep +Ġbe hest +ĠMother s +ĠJ ace +h id +Ġwh irlwind +ĠUN IVERS +Ġut opian +Ġkidn ap +Ph ilipp +K in +89 3 +Ġlivest ream +ĠM ISS +Ġsub versive +ĠTechn iques +ĠJUST ICE +ĠB ASE +Ġ38 7 +Ġassail ants +ĠHard core +Ġsprink led +ĠP se +é ļ +print ed +ĠH au +OR GE +ĠT OUR +Ġl aced +Ġit ch +G iving +Ġport ed +78 1 +//////////////// //////////////// +bre eding +Ġlog ger +ĠH OL +inn ie +First ly +Ġembry onic +Ġdeleg ated +p ai +O IL +Ġcentr ally +ĠR x +ĠSc outing +D utch +Ġhe reditary +ĠCru iser +s at +5 29 +ĠMar riott +other mal +Ġprohib itions +E arn +ĠSt ab +ĠColleg es +ĠBel ief +st retched +ĠL H +ĠEntity Item +C IA +Ġun rem +Ġlaure ate +Ġdenomin ations +sum mary +h ler +S pect +ĠK laus +ĠBe ans +Ġins ur +ĠPA X +Ġfield er +ĠV et +ĠSp arrow +z ie +ĠS Q +ĠMond ays +ĠOff line +ĠLer ner +ĠExt ensions +Ire land +Ġpatron age +Ġcontrast ed +ĠMan ia +h irt +Mos cow +Ġcondem ns +ĠAn ge +Ġcomp osing +ĠPe pe +ĠP addock +Ġheter ogeneity +Ġide ologically +Ġf ishes +Ġcur sing +ĠR utherford +ĠFlo ating +ĠAm elia +Te a +Syn opsis +Ġstun ts +Ġbe ad +Ġstock ing +ĠM ILL +ob ook +mass ive +\ < +Ġh ump +ĠPref erences +Engine Debug +ge ist +ĠNiet o +ome ver +ish y +eval uate +col onial +Altern ative +ĠGo Pro +ĠV ortex +ĠNET WORK +ans ky +Sec ure +ĠTh rust +Sn ake +Ġparcel s +Ġsam urai +Ġactress es +N ap +M F +ifer ation +Be er +5 23 +ĠI ly +oint ment +P ing +Ġstri ped +ĠMell on +oss ession +Ġneut ron +end ium +Ġa ph +ĠFlav oring +Ġ38 3 +Ġrespons iveness +ĠJ indal +ĠHitch cock +Den ver +ĠDRAG ON +sm anship +ĠDu pl +Ġs ly +Ġweb cam +ĠTw ain +ĠDar ling +ili ate +cons umer +D IT +Ġnames ake +Ġun orthodox +Ġfun er +ĠPL oS +ĠCONTR OL +ozy g +ogl obin +F ACE +ER G +ĠD ia +ĠF iesta +ce le +0 34 +Ġencl ave +âĸ¬ âĸ¬ +on ement +al ist +M and +Ġhome grown +ĠF ancy +Ġconcept ions +ĠCont ains +ure en +Ġreiter ate +Ġme ager +Ġinstall ments +Sp awn +6 27 +Ġphot oc +ĠCab rera +ĠRos enthal +ĠLans ing +is ner +Ġinvest s +ĠUFO s +EX P +Hard ware +Ġtr agically +Ġconced es +ie ft +ch am +bor gh +ĠSch r +ĠMel anie +ĠH oy +Ġvisit ation +Ġid iosyncr +Ġfract ions +Ġfore skin +ob os +Ġpo aching +ĠVI EW +Ġstimul ates +ĠG ork +can on +M IC +ĠNem esis +ĠInd ra +ĠDM V +Ġ5 29 +Ġinspect ing +Ġgrand ma +ĠW hedon +ĠSh ant +ĠP urg +ik an +ĠT eg +ĠCL R +z ac +Vict oria +ĠVer ify +ion ics +Ġpart ying +ĠM ou +col our +Ġtestim onies +l ations +Ġpress uring +hi ro +ac ers +Ġf id +ang ler +ĠCS I +Ġhere after +Ġdiss idents +report ing +iph any +che v +Ġsol itude +Ġl obe +Ġind is +Ġcred ential +re cent +ad ult +ĠNir vana +ĠFranch ise +L ayer +H yp +ĠBerks hire +Ġwill s +t if +Ġtot em +ĠJud ah +rep air +Inst ant +5 48 +Ġemb assies +Ġbott leneck +Ġb ount +Ġtyp ew +ĠAl vin +j ing +im ilar +R ush +Ġbr im +ĠHEL P +A im +] ' +Ġpass ively +Ġbound ed +ĠR ated +Ġcriminal ity +Ġbiom ark +Ġdisp atcher +ĠTow ards +Ġ+ ++ +right eous +f rog +ĠP anc +C arter +0 32 +æ© Ł +Ġult raviolet +ĠLic ensed +ĠT ata +ĠBl essing +ĠG AM +Ġchem ically +ĠSe af +ĠRE LE +ĠMerc enary +capital ist +Ġform ulations +Ġann ihilation +ĠVer b +ĠAr gon +Ġun loaded +Ġmorp hed +Ġconqu ering +back er +I ELD +Ġtheft s +Ġfront runner +ĠRoy ale +ĠFund amental +el ight +C hip +necess ary +ay n +ĠSl ip +Ġ4 48 +cern ed +P ause +Ġshock ingly +ĠAB V +Ġcomp osure +7 33 +ĠMotors port +ah ime +Mur ray +M ach +Ġgr ids +Ġdeb ian +Ġfurther more +Ġdexter ity +ĠCollect ions +os lov +il age +b j +ĠMont eneg +Ġstrut Connector +Ġmassac res +Ġbrief s +fet ched +uv ian +ol ition +Fail ure +emon ic +Ġfl ared +Ġclaim ant +Ġc ures +Ġgive aways +ĠSubst ance +al ions +Ġcr inge +ĠK ul +Ġarist ocracy +ĠUl ster +ol ated +h ousing +ĠM IS +Ġgl ared +ĠWil helm +ne eds +lam bda +build ers +ĠV IS +Ġradi ator +ĠGhost busters +Ġ4 36 +act ual +Ġher ds +ç a +watch ing +Ġcounter ing +Ch arge +Ġchar red +Ġwar heads +Ġiod ine +ĠM acy +04 1 +Ġdepart ures +ĠS ins +Ġdy ed +ĠConcept s +g ado +7 13 +Ġquot ations +Ġg ist +ĠChrist y +Ġant igen +ĠHem p +ĠD rawn +ĠB arg +ez vous +Ġp aternity +Ġar du +ĠAnch orage +ĠR ik +Ġover loaded +ĠUs ername +ĠTam my +ĠN au +ĠCell ular +Ġw aning +Ġrod ent +ĠWor cester +il ts +ĠT ad +Ġdwell ings +Ġbull ish +4 31 +Ġretali ate +Ġmig raine +ĠChev ron +CH ECK +Ġdon key +c rim +SP A +ĠAn alog +Ġmarqu ee +ĠHa as +B ir +ĠGD DR +ĠDownload s +Ġwill power +ĠFor th +ĠRecord ed +Ġimp ossibility +ĠLog ged +ĠFr anks +ĠR att +in itions +Ġclean ers +Ġsore ly +Ġflick ering +ĠEx amination +c atching +allow een +Ms g +Ġdun no +F a +Ġdys ph +c razy +.' '. +Ġmain line +Ġc s +Ġp tr +ĠW ally +ig un +95 1 +ĠBig foot +f ights +Ġretrie ving +J r +Ġdupl ication +ĠExpl an +Ġrel ational +Ġqu aint +Ġbisc uits +Ġad o +Ġsh udder +Ġantid ote +blood ed +ks h +Ġsa uces +Ġrein vest +Ġdispens ary +ĠD iver +Ġ9 000 +stud ent +Ġin separ +esc ap +Ġtodd lers +ĠGP IO +ĠAss ignment +head ers +Ġlack luster +Ġab ack +95 6 +Ġtool bar +7 45 +Ġo ust +Ġcontempl ation +ĠPRES IDENT +Ġ4 58 +==== == +Ġguarantee ing +ĠHe ist +ĠCann es +Ļ ½ +Ġcollabor ator +ĠAm p +Ġg ou +ĠSH ALL +st ories +78 3 +Ġmobil ized +Ġbro od +ĠL U +ĠðŁ ij +Ġref in +ĠAnthrop ology +v ind +ill i +Ġwarrant ies +ĠB abel +Ġsw ath +Ġc aches +Ġantagon ists +art ifacts +Ġhot ly +ĠSt arts +ĠG ö +z ag +!! !!! +Ġsc ourge +Ġcons piring +ru its +re verse +ĠShe en +ĠJes uit +ĠGiov anni +ad ies +Ġbutt ocks +ear cher +ac an +Ġvolley ball +Ġshroud ed +Ġscore board +b ats +ĠI PM +Ġass es +Ġde regulation +ĠTe legram +ĠReb oot +Ġ7 000 +ĠCan ary +Ġk ernels +ĠFranç ois +ĠD uff +ĠP on +ĠLe ica +ĠGar min +Ġor phans +ĠClaud ia +Ġcal endars +ĠLe ilan +ent o +R ocket +Ġbr unch +ĠHaw king +ain ers +Ġsens ibilities +Ġk W +ĠK and +Ġre claimed +Ġinteresting ly +× © +rom y +J M +ĠEnhance ment +b ush +Sk ip +Ġrapp ers +Ġg azing +p edia +ath lon +Rev olution +Ġsn ipers +Ġre verted +Ġconglomer ate +T erry +79 4 +Ġhars her +Ġdes olate +ĠHit man +Comm ission +Ġ( / +âĢ¦ ." +Com par +Ġampl ification +om inated +Ġreg ress +ĠColl ider +Ġinform ants +Ġg azed \ No newline at end of file diff --git a/qa_mdt/checkpoints/robertabase/pytorch_model.bin b/qa_mdt/checkpoints/robertabase/pytorch_model.bin new file mode 100644 index 0000000000000000000000000000000000000000..7f444f8a051dc794fa685b9275064a0e29b4c966 --- /dev/null +++ b/qa_mdt/checkpoints/robertabase/pytorch_model.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:278b7a95739c4392fae9b818bb5343dde20be1b89318f37a6d939e1e1b9e461b +size 501200538 diff --git a/qa_mdt/checkpoints/robertabase/tokenizer.json b/qa_mdt/checkpoints/robertabase/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..ad0bcbeb288f0d1373d88e0762e66357f55b8311 --- /dev/null +++ b/qa_mdt/checkpoints/robertabase/tokenizer.json @@ -0,0 +1 @@ +{"version":"1.0","truncation":null,"padding":null,"added_tokens":[{"id":0,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":true},{"id":1,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":true},{"id":2,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":true},{"id":3,"special":true,"content":"","single_word":false,"lstrip":false,"rstrip":false,"normalized":true},{"id":50264,"special":true,"content":"","single_word":false,"lstrip":true,"rstrip":false,"normalized":true}],"normalizer":null,"pre_tokenizer":{"type":"ByteLevel","add_prefix_space":false,"trim_offsets":true},"post_processor":{"type":"RobertaProcessing","sep":["",2],"cls":["",0],"trim_offsets":true,"add_prefix_space":false},"decoder":{"type":"ByteLevel","add_prefix_space":true,"trim_offsets":true},"model":{"dropout":null,"unk_token":null,"continuing_subword_prefix":"","end_of_word_suffix":"","fuse_unk":false,"vocab":{"":0,"":1,"":2,"":3,".":4,"Ġthe":5,",":6,"Ġto":7,"Ġand":8,"Ġof":9,"Ġa":10,"Ġin":11,"-":12,"Ġfor":13,"Ġthat":14,"Ġon":15,"Ġis":16,"âĢ":17,"'s":18,"Ġwith":19,"ĠThe":20,"Ġwas":21,"Ġ\"":22,"Ġat":23,"Ġit":24,"Ġas":25,"Ġsaid":26,"Ļ":27,"Ġbe":28,"s":29,"Ġby":30,"Ġfrom":31,"Ġare":32,"Ġhave":33,"Ġhas":34,":":35,"Ġ(":36,"Ġhe":37,"ĠI":38,"Ġhis":39,"Ġwill":40,"Ġan":41,"Ġthis":42,")":43,"ĠâĢ":44,"Ġnot":45,"Ŀ":46,"Ġyou":47,"ľ":48,"Ġtheir":49,"Ġor":50,"Ġthey":51,"Ġwe":52,"Ġbut":53,"Ġwho":54,"Ġmore":55,"Ġhad":56,"Ġbeen":57,"Ġwere":58,"Ġabout":59,",\"":60,"Ġwhich":61,"Ġup":62,"Ġits":63,"Ġcan":64,"Ġone":65,"Ġout":66,"Ġalso":67,"Ġ$":68,"Ġher":69,"Ġall":70,"Ġafter":71,".\"":72,"/":73,"Ġwould":74,"'t":75,"Ġyear":76,"Ġwhen":77,"Ġfirst":78,"Ġshe":79,"Ġtwo":80,"Ġover":81,"Ġpeople":82,"ĠA":83,"Ġour":84,"ĠIt":85,"Ġtime":86,"Ġthan":87,"Ġinto":88,"Ġthere":89,"t":90,"ĠHe":91,"Ġnew":92,"ĠâĢĶ":93,"Ġlast":94,"Ġjust":95,"ĠIn":96,"Ġother":97,"Ġso":98,"Ġwhat":99,"I":100,"Ġlike":101,"a":102,"Ġsome":103,"S":104,"ë":105,"Ġthem":106,"Ġyears":107,"'":108,"Ġdo":109,"Ġyour":110,"Ġ-":111,"Ġ1":112,"\"":113,"Ġif":114,"Ġcould":115,"?":116,"Ġno":117,"i":118,"m":119,"Ġget":120,"ĠU":121,"Ġnow":122,"Ġhim":123,"Ġback":124,"ĠBut":125,"ĠâĢĵ":126,"Ġmy":127,"Ġ'":128,"Ġonly":129,"Ġthree":130,";":131,"Ġ2":132,"The":133,"1":134,"Ġpercent":135,"Ġagainst":136,"Ġbefore":137,"Ġcompany":138,"o":139,"ĠTrump":140,"Ġhow":141,"Ġbecause":142,"Ġany":143,"Ġmost":144,"Ġbeing":145,"Ġmake":146,"Ġwhere":147,"Ġduring":148,"Ġthrough":149,"Ġwhile":150,"000":151,"ĠThis":152,"Ġmillion":153,"ing":154,"Ġ3":155,"Ġmade":156,"Ġwell":157,"Ġ10":158,"Ġdown":159,"Ġoff":160,"Ġsays":161,"Ġme":162,"ĠB":163,"Ġgoing":164,"Ġteam":165,"ĠWe":166,"Ġthose":167,"Ġgovernment":168,"Ġway":169,"We":170,"Ġmany":171,"Ġthen":172,"Ġwork":173,"Ġtold":174,"com":175,"2":176,"Ġgame":177,"ĠAnd":178,"in":179,"year":180,"Ġp":181,"Ġvery":182,"Ġday":183,"Ġhome":184,"Ġtake":185,"Ġweek":186,"Ġsince":187,"ĠNew":188,"Ġmay":189,"Ġeven":190,"Ġseason":191,"Ġsee":192,"Ġ2017":193,"Ġstate":194,"Ġ5":195,"ed":196,"Ġshould":197,"Ġaround":198,"Ġ2018":199,"Ġsecond":200,"Ġus":201,"Ġstill":202,"Ġmuch":203,"Ġ4":204,"Ġgood":205,"Ġthink":206,"%":207,"ĠS":208,"Ġthese":209,"Ġmarket":210,"ĠD":211,"th":212,"Ġgo":213,"'re":214,"Ġsuch":215,"Ġknow":216,"Ġincluding":217,"Ġdon":218,"y":219,"Ġnext":220,"ĠP":221,"Ġdid":222,"Ġunder":223,"Ġsay":224,"en":225,"ĠL":226,"Ġbetween":227,"Ġper":228,"ĠK":229,"ĠC":230,"Ġ6":231,"Ġworld":232,"Ġpart":233,"ĠN":234,"Ġright":235,"Ġwant":236,"Ġfour":237,"),":238,"Ġhigh":239,"Ġneed":240,"re":241,"e":242,"It":243,"Ġhelp":244,"5":245,"3":246,"Ġcountry":247,"ĠR":248,"Ġpolice":249,"A":250,"Ġlong":251,"ĠThey":252,"Ġend":253,"er":254,"ĠT":255,"ĠM":256,"u":257,"Ġboth":258,"Ġhere":259,"an":260,"on":261,"Ġ7":262,"Ġde":263,"ĠShe":264,"Ġbusiness":265,"Ġreport":266,"j":267,"ers":268,"Ġreally":269,"ĠPresident":270,"ar":271,"ĠG":272,"ĠFriday":273,"ĠF":274,"Ġbest":275,"Ġsame":276,"Ġanother":277,"Ġset":278,"old":279,"ĠThat":280,"as":281,"n":282,"Ġcome":283,"Ġfamily":284,"Ġpublic":285,"ĠFor":286,"ĠAs":287,"0":288,"ĠH":289,"Ġ8":290,"Ġ20":291,"Ġfive":292,"es":293,"ĠTuesday":294,"Ġn":295,"ĠThursday":296,"Ġquarter":297,"h":298,"Ġtop":299,"Ġgot":300,"Ġlife":301,"ĠMonday":302,"Ġfound":303,"Ġuse":304,"ĠW":305,"4":306,"ĠWednesday":307,"Ġown":308,"Ġaccording":309,"Ġplay":310,"Ġshow":311,"ĠSt":312,"Ġman":313,"Ġleft":314,"ĠUnited":315,"Ġ12":316,"Ġplace":317,"ĠIf":318,"Ġlot":319,"Ġformer":320,"Ġ0":321,").":322,"Ġsupport":323,"ie":324,"Ġbillion":325,"Ġt":326,"Ġshares":327,"!":328,"z":329,"k":330,"ĠState":331,"Ġpoints":332,"Ġgroup":333,"Ġschool":334,"Ġinformation":335,"Ġ2016":336,"al":337,"r":338,"Ġwin":339,"Ġnews":340,"Ġused":341,"Ġput":342,"Ġcity":343,"ĠJ":344,"ĠThere":345,"Ġnumber":346,"C":347,"'ve":348,"Ġeach":349,"Ġtoo":350,"Ġwon":351,"ly":352,"Ġmonth":353,"is":354,"Ġadded":355,"Ġlook":356,"Ġbetter":357,"Ġevery":358,"Ġ&":359,"Ġdays":360,"Ġ9":361,"Ġtook":362,"Ġnight":363,"Ġe":364,"Ġ11":365,"os":366,"Ġfew":367,"or":368,"ĠNorth":369,"ĠYou":370,"Ġthird":371,"Ġgreat":372,"Ġcalled":373,"ĠOn":374,"Ġpast":375,"Ġcame":376,"Ġmonths":377,"ĠSaturday":378,"Ġ15":379,"Ġbig":380,"ĠE":381,"ĠUS":382,"Ġthings":383,"ĠO":384,"Ġd":385,"Ġstart":386,"B":387,"Ġstock":388,"Ġ30":389,"Ġwomen":390,"ĠSouth":391,"ĠMay":392,"Ġnever":393,"Ġpresident":394,"ĠSunday":395,"Ġwithout":396,"man":397,"8":398,"Ġdidn":399,"Ġlocal":400,"6":401,"Ġsomething":402,"Ġcase":403,"ĠAll":404,"it":405,"7":406,"ĠSo":407,"Ġchildren":408,"Ġaway":409,"Ġlittle":410,"Ġsix":411,"ĠCity":412,"ĠCounty":413,"Ġdata":414,"at":415,"Ġalready":416,"d":417,"Ġmoney":418,"Ġearly":419,"Ġacross":420,"Ġexpected":421,"Ġrun":422,"Ġlater":423,"am":424,"Ġprice":425,"Ġgames":426,"ĠMr":427,"b":428,"Ġmight":429,"Ġdifferent":430,"Ġreported":431,"Ġdeal":432,"Ġmedia":433,"Ġgrowth":434,"Ġcommunity":435,"ĠChina":436,"'m":437,"c":438,"Ġwent":439,"ĠNo":440,"Ġable":441,"Ġmaking":442,"Ġarea":443,"Ġfar":444,"Ġstatement":445,"ĠHouse":446,"Ġworking":447,"M":448,"Ġk":449,"Ġseen":450,"Ġcompanies":451,"Ġtoday":452,"Ġmembers":453,"Ġuntil":454,"Ġfull":455,"Ġagain":456,"Ġhalf":457,"Ġshare":458,"le":459,"Ġalways":460,"Ġcourt":461,"l":462,"and":463,"Ġchange":464,"Ġfind":465,"9":466,"Ġsystem":467,"ĠV":468,"ĠYork":469,"ĠAmerican":470,"Ġhead":471,"Ġplayers":472,"Ġdoes":473,"Ġhealth":474,"Ġm":475,"Ġpower":476,"Ġpoint":477,"Ġhit":478,"Ġ.":479,"Ġ--":480,"Ġfree":481,".,":482,"Ġlead":483,"Ġseveral":484,"Ġrecent":485,"Ġcall":486,"N":487,"Ġlaw":488,"Ġkeep":489,"Ġopen":490,"ĠNews":491,"Ġgive":492,"ia":493,"ĠMarch":494,"D":495,"ĠNational":496,"ĠAt":497,"Ġtimes":498,"Ġfuture":499,"R":500,"Ġ14":501,"ĠJune":502,"Ġofficials":503,"Ġ18":504,"Ġimportant":505,"f":506,"Ġfinal":507,"Ġ13":508,"ĠOne":509,"P":510,"Ġfollowing":511,"Ġcar":512,"Ġleast":513,"Ġwater":514,"Ġevent":515,"Ġline":516,"Ġmove":517,"Ġservices":518,"Ġhaving":519,"ĠWhen":520,"Ġstudents":521,"ĠPolice":522,"el":523,"Ġam":524,"ĠZ":525,"Ġside":526,"Ġstory":527,"Ġdue":528,"Ġmeeting":529,"K":530,"Ġmust":531,"ĠStates":532,"Ġlikely":533,"G":534,"Ġcontinue":535,"Ġago":536,"Ġparty":537,"Ġmajor":538,"Ġindustry":539,"Ġless":540,"30":541,"Ġun":542,"Ġhard":543,"Ġservice":544,"Ġ16":545,"Ġlooking":546,"Ġheld":547,"ve":548,"Ġwhether":549,"ĠJuly":550,"Ġtaken":551,"Ġalong":552,"Ġasked":553,"Ġstarted":554,"Ġbecome":555,"Ġforward":556,"Ġresearch":557,"Ġoffice":558,"Ġpolitical":559,"to":560,"Ġtogether":561,"Ġgetting":562,"Ġplan":563,"Ġ25":564,"T":565,"Ġamong":566,"Ġcoming":567,"Ġdecision":568,"Ġvideo":569,"Ġ2015":570,"g":571,"ĠAfter":572,"Ġsecurity":573,"L":574,"Ġcare":575,"Ġgiven":576,"Ġavailable":577,"âĢĶ":578,"Ġs":579,"ĠWest":580,"'ll":581,"Ġpay":582,"Ġnear":583,"Ġsaying":584,"Ġannounced":585,"Ġprogram":586,"ĠApril":587,"Ġreal":588,"ĠUniversity":589,"ĠWith":590,"AP":591,"Ġsocial":592,"Ġclose":593,"et":594,"Ġcurrent":595,"Ġwhy":596,"F":597,"ĠTo":598,"ĠTwitter":599,"Ġthough":600,"Ġ17":601,"Ġtaking":602,"ĠInc":603,"Ġmen":604,"w":605,"Ġcomes":606,"ley":607,"Ġdoing":608,"Ġprocess":609,"ĠJohn":610,"ch":611,"00":612,"Ġfinancial":613,"Ġlow":614,"Ġenough":615,"ĠWhile":616,"Ġfurther":617,"Ġpost":618,"Ġfeel":619,"st":620,"Ġperson":621,"ĠFacebook":622,"ĠWorld":623,"Ġwithin":624,"ad":625,"Ġdone":626,"the":627,"Ġlate":628,"Ġtax":629,"Ġdoesn":630,"Ġthing":631,"Ġnational":632,"Ġjob":633,"Ġusing":634,"ĠHowever":635,"ic":636,"Ġcampaign":637,"Ġrecord":638,"Ġbehind":639,"://":640,"ĠDepartment":641,"p":642,"Ġothers":643,"ĠJanuary":644,"Ġorder":645,"Ġ[":646,"Ġsales":647,"Ġyet":648,"Ä":649,"Ġsmall":650,"Ġseries":651,"Ġface":652,"ĠWhat":653,"Ġ50":654,"Ġever":655,"Ġearlier":656,"Ġlove":657,"up":658,"Ġrights":659,"ĠAn":660,"ist":661,"Ġmorning":662,"ĠWashington":663,"Ġyoung":664,"Ġlatest":665,"ĠIndia":666,"Ġtrying":667,"Ġfire":668,"Ġled":669,"Ġstrong":670,"Ġreturn":671,"Ġlevel":672,"O":673,"Ġaverage":674,"Ġperiod":675,"Ġexperience":676,"ak":677,"Ġpossible":678,"Ġbelieve":679,"Ġinclude":680,"Ġoil":681,"Ġrecently":682,"Ġonce":683,"Ġknown":684,"Ġlost":685,"Ġsure":686,"us":687,"Ġweeks":688,"Ġfood":689,"Ġreports":690,"Ġrating":691,"ĠMinister":692,"Ġwoman":693,"Ġprovide":694,"Ġproject":695,"Ġissue":696,"Ġlive":697,"10":698,"Ġclear":699,"he":700,"Ġcost":701,"Ġplayed":702,"Ġreleased":703,"Ġcoach":704,"v":705,"Ġ24":706,"Ġseven":707,"Ġplans":708,"Ġdevelopment":709,"ur":710,"ĺ":711,"Ġincrease":712,"This":713,"Ġpolicy":714,"Ġcent":715,"Ġbased":716,"E":717,"il":718,"ĠDecember":719,"Ġglobal":720,"Ġtrade":721,"Ġhours":722,"Ġhigher":723,"Ġgoal":724,"H":725,"ĠAl":726,"Ġ100":727,"Ġminutes":728,"Ġelection":729,"ĠAmerica":730,"Ġrate":731,"ĠCh":732,"Ġ21":733,"...":734,"ĠWhite":735,"Ġdirector":736,"Ġposition":737,"Ġshot":738,"Ġlarge":739,"Ġc":740,"Ġb":741,"]":742,"Ġissues":743,"Ġdeath":744,"Ġbuilding":745,"Ġtotal":746,"Ġoften":747,"Ġv":748,"Ġcountries":749,"Ġhistory":750,"Ġoutside":751,"Ġfederal":752,"Ġ19":753,"Ġfact":754,"ĠHigh":755,"Ġcareer":756,"im":757,"Ġinternational":758,"ĠNovember":759,"Ġfront":760,"Ġkind":761,"Ġkey":762,"ra":763,"ĠSan":764,"Ġshort":765,"Ġname":766,"ĠAccording":767,"Ġcourse":768,"Ġre":769,"Ġwanted":770,"W":771,"ĠSeptember":772,"Ġinterest":773,"Ġrole":774,"Ġresults":775,"Ġeconomic":776,"Ġ2014":777,"Ġchance":778,"ĠOctober":779,"Ġspecial":780,"Ġofficial":781,"Ġneeds":782,"um":783,"Ġl":784,"Ġproducts":785,"Ġnon":786,"Ġ@":787,"ĠBank":788,"Ġahead":789,"Ġhouse":790,"U":791,"Ġboard":792,"Ġold":793,"Ġsaw":794,"Ġlower":795,"ĠEuropean":796,"Ġcontrol":797,"ĠRussia":798,"Ġeight":799,"Ġrelease":800,"Ġpotential":801,"Ġthought":802,"Ġinvestigation":803,"Ġonline":804,"based":805,"Ġtechnology":806,"ĠDonald":807,"id":808,"Ġbody":809,"Ġrisk":810,"ian":811,"Ġcapital":812,"Ġstaff":813,"Ġaction":814,"ĠLeague":815,"Ġplaying":816,"Ġmakes":817,"Ġalmost":818,"Ġperformance":819,"Ġ22":820,"Ġg":821,"Ġfilm":822,"Ġnearly":823,"ĠCenter":824,"Ġvisit":825,"ĠGroup":826,"Ġbank":827,"Ġbit":828,"Ġreceived":829,"ĠAugust":830,"Ġmilitary":831,"ĠHis":832,"ine":833,"Ġchief":834,"ĠSchool":835,"Ġbring":836,"ĠCourt":837,"Ġ(@":838,"Ġmeans":839,"ĠSh":840,"Ġfans":841,"Ġse":842,"Ġ40":843,"20":844,"\".":845,"V":846,"Ġcut":847,"Ġkilled":848,"Ġ#":849,"Ġprices":850,"Ġgave":851,"ĠStreet":852,"ir":853,"ĠY":854,"Ġcurrently":855,"Ġf":856,"ay":857,"ne":858,"te":859,"Ġtry":860,"ĠPark":861,"ĥ":862,"J":863,"Ġquestion":864,"Ġhand":865,"Ġeconomy":866,"Ġinvestors":867,"able":868,"Ġplayer":869,"ĠBy":870,"ĠDavid":871,"Ġloss":872,"ab":873,"Ġbelow":874,"Ġwrote":875,"co":876,"ate":877,"Ġrunning":878,"un":879,"Ġbegan":880,"Ġsingle":881,"Ġfield":882,"Ġ23":883,"Ġleader":884,"Ġw":885,"ĠCalifornia":886,"Ġfourth":887,"Ġactually":888,"Ġlist":889,"ll":890,"Ġcouple":891,"Ġstudy":892,"Ġteams":893,"He":894,"ah":895,"ĠCanada":896,"Ġla":897,"Ġresult":898,"Ġaccess":899,"Ġvote":900,"ĠMore":901,"ĠFebruary":902,"Ġrevenue":903,"Ġoffer":904,"Ġlet":905,"ier":906,"Ġbuy":907,"Ġattack":908,"Ġblack":909,"Ġr":910,"Ġareas":911,"Ġstop":912,"Ġimpact":913,"Ġmatch":914,"Ġinvestment":915,"Ġcustomers":916,"Ġleaders":917,"ies":918,"Ġmember":919,"Ġchild":920,"Ġroad":921,"ul":922,"Ġvalue":923,"Ġshows":924,"ĠDr":925,"ĠDe":926,"ant":927,"ĠLondon":928,"Ġroom":929,"Ġmusic":930,"Ġproduction":931,"Ġanything":932,"Ġfirm":933,"Ġbiggest":934,"Ġair":935,"Ġproblem":936,"Ġgeneral":937,"Ġwasn":938,"Ġi":939,"Ġprivate":940,"Ġespecially":941,"Ġadministration":942,"Ġadditional":943,"ĠCo":944,"Ġopportunity":945,"Ġhold":946,"&":947,"Ġmatter":948,"Ġsenior":949,"Ġclub":950,"Ġsomeone":951,"ĠÃ":952,"ĠEast":953,"Ġ2019":954,".'":955,"Ġneeded":956,"ĠJames":957,"time":958,"Ġhowever":959,"Ġeverything":960,"Ġeveryone":961,"Ġdied":962,"Ġinvolved":963,"Ġfriends":964,"Ġisn":965,"Ġworth":966,"ik":967,"ĠCup":968,"Ġshowed":969,"There":970,"Ġ28":971,"Ġmeet":972,"Ġ26":973,"Ġ27":974,"Y":975,"Ġregion":976,"ĠPress":977,"ĠNow":978,"Ġson":979,"Ġspace":980,"Ġleading":981,"Ġstates":982,"Ġweekend":983,"Ġ£":984,"Ġmother":985,"Ġprevious":986,"ĠUK":987,"ĠMichael":988,"Ġleave":989,"est":990,"em":991,"Ġz":992,"ĠSome":993,"ors":994,"out":995,"15":996,"Ġwar":997,"Ġwebsite":998,"Ġstar":999,"X":1000,"ro":1001,"Ġtarget":1002,"Ġhimself":1003,"Ġturn":1004,"ĠEurope":1005,"Ġworked":1006,"Ġenergy":1007,"Ġscored":1008,"Ġ*":1009,"Ġsoon":1010,"Ġball":1011,"ĠTV":1012,"Ġannual":1013,"Ġ2013":1014,"Ġrace":1015,"ĠInternational":1016,"'d":1017,"ĠMarket":1018,"Ġconference":1019,"io":1020,"Ġo":1021,"Ġchanges":1022,"ig":1023,"Ġofficers":1024,"Ġinside":1025,"Ġform":1026,"Ġpublished":1027,"Ġphone":1028,"Ġco":1029,"Ġlegal":1030,"Ġexecutive":1031,"Ġfight":1032,"ings":1033,"Ġhope":1034,"Ġsummer":1035,"Ġofficer":1036,"Ġfootball":1037,"Ġproperty":1038,"@":1039,"Ġbook":1040,"Ġparents":1041,"Ġcosts":1042,"ac":1043,"Ġmanager":1044,"Ġcreate":1045,"Ġage":1046,"Ġemail":1047,"Ġmarkets":1048,"Ġmain":1049,"Ġhuman":1050,"Ġsent":1051,"Ġmanagement":1052,"ĠDay":1053,"ton":1054,"Ġcash":1055,"Ġfocus":1056,"Ġexpect":1057,"Ġtraining":1058,"Ġbecame":1059,"Ġwhose":1060,"Ġevents":1061,"Ġround":1062,"ĠLe":1063,"Ġfell":1064,"Ġabove":1065,"Ġanalysts":1066,"Ġtalk":1067,"Ġsituation":1068,"ri":1069,"ated":1070,"ke":1071,"Ġwants":1072,"ag":1073,"Ġlives":1074,"om":1075,"Ġal":1076,"Ġdemand":1077,"Ġsafety":1078,"Ġrest":1079,"ĠCouncil":1080,"Ġpersonal":1081,"Ġsite":1082,"ĠRussian":1083,"Ġmid":1084,"Ġnothing":1085,"Ġwhole":1086,"Ġbill":1087,"Ġsold":1088,"ĠBritish":1089,"se":1090,"Ġremain":1091,"12":1092,"Ġforeign":1093,"Ġshooting":1094,"Ġstay":1095,"50":1096,"ang":1097,"Ġhospital":1098,"Ġbad":1099,"Ġaddress":1100,"ĠKorea":1101,"Ġhappened":1102,"Ġcharges":1103,"Ġwhite":1104,"Ġ31":1105,"If":1106,"Ġearnings":1107,"Ġbreak":1108,"Ġlight":1109,"Ġterms":1110,"ĠChinese":1111,"ĠSenate":1112,"ana":1113,"Ġidea":1114,"ap":1115,"of":1116,"Ġnine":1117,"Ġcompared":1118,"Ġbuild":1119,"ard":1120,"In":1121,"Ġsimilar":1122,"Ġgas":1123,"Ġvictory":1124,"Ġ2012":1125,"Ġdebt":1126,"ĠMar":1127,"Ġarrested":1128,"Ġcomment":1129,"Ġincreased":1130,"Ġmedical":1131,"Ġ29":1132,"ĠJan":1133,"Ġgroups":1134,"Ġdespite":1135,"Ġfall":1136,"Ġtell":1137,"Ġworkers":1138,"Ġtown":1139,"é":1140,"Ġwife":1141,"Ġquestions":1142,"Ġcontinued":1143,"Ġheart":1144,"Ġmet":1145,"Ġbrought":1146,"Ġhelped":1147,"ĠCongress":1148,"Ġstep":1149,"Ġfather":1150,"Ġmoment":1151,"Ġproduct":1152,"Ġprobably":1153,"Ġlargest":1154,"Ġvehicle":1155,"ĠEngland":1156,"Ġallow":1157,"Ġstarting":1158,"Ġkids":1159,"Ġincident":1160,"Ġnet":1161,"Ġrates":1162,"ĠRead":1163,"Ġpressure":1164,"Ġincluded":1165,"Ġread":1166,"Ġissued":1167,"ol":1168,"Ġeither":1169,"Ġefforts":1170,"Ġincludes":1171,"ĠRepublican":1172,"ish":1173,"âĢ¦":1174,"Ġgoals":1175,"aj":1176,"Ġen":1177,"x":1178,"Ġraised":1179,"au":1180,"Ġlonger":1181,"ut":1182,"Ġwatch":1183,"ĠTexas":1184,"You":1185,"Ġrange":1186,"nd":1187,"Ġfunds":1188,"Ġremains":1189,"ĠMark":1190,"Ġ60":1191,"Ġque":1192,"sh":1193,"Ġinterview":1194,"Ġrather":1195,"Ġresidents":1196,"Ġgrowing":1197,"Ġpre":1198,"Ġpaid":1199,"Ġcases":1200,"ĠReuters":1201,"Ġdifficult":1202,"Ġsign":1203,"ĠGoogle":1204,"Ġhttps":1205,"ĠPaul":1206,"Ġliving":1207,"day":1208,"ĠQ":1209,"iz":1210,"ĠRed":1211,"Ġland":1212,"They":1213,"ĠRoad":1214,"_":1215,"ĠThese":1216,"Ġview":1217,"Ġagency":1218,"Ġreason":1219,"Ġallowed":1220,"ĠAustralia":1221,"az":1222,"ĠRe":1223,"Ġturned":1224,"11":1225,"Ġnation":1226,"Ġready":1227,"Ġpress":1228,"Ġbudget":1229,"Ġdaily":1230,"ĠChief":1231,"Ġfamilies":1232,"Ġsignificant":1233,"ĠFirst":1234,"Ġthemselves":1235,"Ġj":1236,"Ġruns":1237,"Ġaccused":1238,"Ġtakes":1239,"Ġspent":1240,"Ġvia":1241,"ot":1242,"ina":1243,"25":1244,"land":1245,"Ġexample":1246,"Ġauthorities":1247,"Ġdate":1248,"Ġended":1249,"all":1250,"Reuters":1251,"Ġbusinesses":1252,"ans":1253,"Ġdetails":1254,"Ġground":1255,"Ġpretty":1256,"ĠApple":1257,"ation":1258,"ĠSmith":1259,"ĠCompany":1260,"ĠFlorida":1261,"Ġdrug":1262,"Ġresponse":1263,"one":1264,"Ġeducation":1265,"Ġmean":1266,"Ġleague":1267,"Ġanyone":1268,"Ġminister":1269,"Ġtitle":1270,"Ġadding":1271,"Ġproblems":1272,"Ġopening":1273,"Ġconditions":1274,"Ġred":1275,"Ġdecided":1276,"Å":1277,"Ġposted":1278,"term":1279,"Ġamount":1280,"ĠEU":1281,"Ġsuccess":1282,"Ġevidence":1283,"ĠObama":1284,"Ġaddition":1285,"Ġprovided":1286,"ĠLos":1287,"Ġagreement":1288,"Ġstage":1289,"ens":1290,"Ġrelationship":1291,"ĠGeneral":1292,"Ġsector":1293,"Ġstudent":1294,"ating":1295,"Ġtest":1296,"\",":1297,"Ġwinning":1298,"Ġfelt":1299,"Ġsource":1300,"Z":1301,"Ġseems":1302,"Ġcause":1303,"Ġschools":1304,"Ġdrive":1305,"Ġensure":1306,"Ġhuge":1307,"ĠMy":1308,"ĠHealth":1309,"Ġscene":1310,"Ġgiving":1311,"Ġcenter":1312,"Ġpositive":1313,"Ġyards":1314,"Ġjobs":1315,"Ġaccount":1316,"Ġheard":1317,"Ġquality":1318,"Ġways":1319,"Ġimmediately":1320,"Ġemployees":1321,"are":1322,"Ġpass":1323,"ĠCEO":1324,"Ġreceive":1325,"Ġlooks":1326,"ĠAfrica":1327,"Ġthroughout":1328,"led":1329,"Ġrelated":1330,"Ġsell":1331,"ĠUnion":1332,"ĠPhoto":1333,"ter":1334,"Ġquickly":1335,"ĠHow":1336,"Ġvarious":1337,"Ġreach":1338,"Ġpick":1339,"Ġcharged":1340,"Ġquite":1341,"ent":1342,"q":1343,"ins":1344,"Ġphoto":1345,"Ġunderstand":1346,"ĠâĢ¢":1347,"Ġreached":1348,"Ġtrack":1349,"uk":1350,"Ġeffort":1351,"ville":1352,"Ġcentral":1353,"Ġdaughter":1354,"Ġcontract":1355,"Ġinjury":1356,"Ġopened":1357,"Ġ($":1358,"Ġstraight":1359,"17":1360,"Ġcredit":1361,"ĠIndian":1362,"Ġsexual":1363,"Ġworks":1364,"Ġeasy":1365,"18":1366,"Ġclosed":1367,"Ġh":1368,"Ġhappen":1369,"Ġforce":1370,"ler":1371,"Ġhappy":1372,"Ġshared":1373,"Ġoverall":1374,"Ġmoving":1375,"á":1376,"Ġprojects":1377,"ĠBlack":1378,"Ġconcerns":1379,"Ġclass":1380,"Ġtried":1381,"Ġappeared":1382,"Ġcontent":1383,"ĠDistrict":1384,"Ġterm":1385,"Ġinstead":1386,"ĠOffice":1387,"Ġcontinues":1388,"Ġlevels":1389,"Ġafternoon":1390,"Ġfund":1391,"Ġsale":1392,"Ġdriver":1393,"Ġask":1394,"Ġcannot":1395,"ner":1396,"end":1397,"ĠHere":1398,"field":1399,"Ġstore":1400,"www":1401,"Ġcertain":1402,"Ġself":1403,"Ġdollar":1404,"ĠHer":1405,"Ġpopular":1406,"Ġfollow":1407,"Ġspending":1408,"by":1409,"Ġmoved":1410,"Ġgoes":1411,"Ġcreated":1412,"Ġstand":1413,"Ġoperations":1414,"Ġlooked":1415,"Ġtreatment":1416,"ov":1417,"Ġdistrict":1418,"Ġsigned":1419,"Ġhands":1420,"Ġmodel":1421,"ĠAngeles":1422,"Ġy":1423,"Ġborder":1424,"Ġincome":1425,"ĠLast":1426,"Ġcharge":1427,"Ġdriving":1428,"ĠJapan":1429,"Ġrise":1430,"Ġtalks":1431,"Ġfollowed":1432,"Ġpreviously":1433,"Ġusers":1434,"Ġfunding":1435,"ĠJohnson":1436,"Ġ":1437,"ou":1438,"ai":1439,"Ġnamed":1440,"Ġfriend":1441,"ĠNov":1442,"Ġdefense":1443,"ĠBritain":1444,"Ġentire":1445,"Ġtrading":1446,"Ġfailed":1447,"ĠEl":1448,"Ġclaims":1449,"Ġcomments":1450,"Ġbeat":1451,"ib":1452,"Ġbasis":1453,"ĠJones":1454,"Ġpresent":1455,"ĠBe":1456,"Ġdouble":1457,"Ġrose":1458,"ite":1459,"Ġability":1460,"Ġoriginal":1461,"Ġdead":1462,"ĠCommission":1463,"ĠMe":1464,"Ġcompetition":1465,"Ġ2011":1466,"Ġknew":1467,"Ġmaterial":1468,"av":1469,"ĠFrance":1470,"Ġscore":1471,"Ġsense":1472,"Ġserious":1473,"Ġconfirmed":1474,"Ġanti":1475,"Ġviolence":1476,"Ġimprove":1477,"son":1478,"ó":1479,"ĠAP":1480,"Ġsh":1481,"Ġhost":1482,"ĠMike":1483,"Ġpatients":1484,"ĠNFL":1485,"Ġcrisis":1486,"Ġrevealed":1487,"ach":1488,"ĠPrime":1489,"Ġbuilt":1490,"ĠNot":1491,"Ġrules":1492,"Ġelse":1493,"Ġdepartment":1494,"Ġitself":1495,"ise":1496,"500":1497,"Ġcomplete":1498,"ion":1499,"Ġtrial":1500,"ĠBay":1501,"ĠDec":1502,"Ġattention":1503,"Ġtravel":1504,"ĠCentral":1505,"ry":1506,"Ġagreed":1507,"Ġmind":1508,"ĠMc":1509,"Ġ70":1510,"Ġcontact":1511,"ari":1512,"ĠTimes":1513,"Ġspot":1514,"ĠFrench":1515,"Ġgets":1516,"op":1517,"Ġbrand":1518,"Ġcalls":1519,"Ġbanks":1520,"Ġdesign":1521,"Ġsafe":1522,"Ġoffers":1523,"Ġpractice":1524,"ĠOf":1525,"á":1526,"ling":1527,"Ġtrue":1528,"off":1529,"Ġnumbers":1530,"Ġfun":1531,"Ġlearn":1532,"Ġmultiple":1533,"ĠIs":1534,"res":1535,"als":1536,"Ġcommon":1537,"ized":1538,"Ġchallenge":1539,"Ġcommittee":1540,"ĠOur":1541,"Ġbase":1542,"ani":1543,"ĠAssociation":1544,"ung":1545,"Ġnetwork":1546,"ĠBrown":1547,"Ġapproach":1548,"16":1549,"Ġfinished":1550,"Ġreview":1551,"Ġrequired":1552,"Ġapp":1553,"ĠMan":1554,"ĠâĢ¦":1555,"twitter":1556,"ĠDemocratic":1557,"13":1558,"Ġevening":1559,"ĠTom":1560,"ä":1561,"ĠAssociated":1562,"ĠCanadian":1563,"Ġcollege":1564,"Ġspokesman":1565,"Ġarticle":1566,"Ġtowards":1567,"ĠChicago":1568,"Ġmovie":1569,"14":1570,"ity":1571,"Ġforces":1572,"ĠChris":1573,"ĠDemocrats":1574,"Ġfeatures":1575,"Ġhearing":1576,"ĠX":1577,"ĠAlso":1578,"Ġmessage":1579,"age":1580,"Ġnoted":1581,"ĠSuper":1582,"Ġthousands":1583,"aw":1584,"ĠBill":1585,"ĠAr":1586,"ĠLa":1587,"ip":1588,"Ġ/":1589,"ĠDuring":1590,"Ġnote":1591,".)":1592,"Ġwrong":1593,"if":1594,"Ġpassed":1595,"ĠTwo":1596,"Ġdie":1597,",'":1598,"ĠDon":1599,"ĠGermany":1600,"Ġletter":1601,"Ġdescribed":1602,"ĠIran":1603,"ĠWilliams":1604,"Ġparticularly":1605,"Ġadd":1606,"Ġconversation":1607,"ĠSe":1608,"Ġhighest":1609,"be":1610,"Ġhomes":1611,"Ġsports":1612,"Ġgone":1613,"ĠAd":1614,"Ġel":1615,"Ġopportunities":1616,"Ġwords":1617,"Ġleaving":1618,"ĠChristmas":1619,"As":1620,"ĠGovernment":1621,"Ġsimply":1622,"Ġhusband":1623,"ĠResearch":1624,"ĠMexico":1625,"ates":1626,"ale":1627,"ĠGreen":1628,"$":1629,"od":1630,"ĠHall":1631,"Ġnatural":1632,"Ġoperating":1633,"les":1634,"ations":1635,"ĠKim":1636,"Ġgold":1637,"ok":1638,"Ġprovides":1639,"(":1640,"ell":1641,"Ġbegin":1642,"ĠParty":1643,"back":1644,"ĠAmazon":1645,"19":1646,"Ġmajority":1647,"ĠEven":1648,"Ġcheck":1649,"Ġweather":1650,"Ġorganization":1651,"Ġstories":1652,"ĠCar":1653,"Ġforced":1654,"ĠGeorge":1655,"Ġwalk":1656,"ong":1657,"Ġfiled":1658,"ĠJustice":1659,"Ġlaunched":1660,"Ġoffered":1661,"Ġwww":1662,"Ġconstruction":1663,"ĠBen":1664,"Ġserved":1665,"Ġ...":1666,"Ġparts":1667,"Ġcancer":1668,"Ġguys":1669,"Reporting":1670,"ash":1671,"less":1672,"Ġleadership":1673,"ĠCommittee":1674,"Ġregular":1675,"Ġcouncil":1676,"Ġcars":1677,"ĠDirector":1678,"Ġjudge":1679,"Ġvictims":1680,"ĠDaily":1681,"Ġkept":1682,"Ġeffect":1683,"Ġbeyond":1684,"pm":1685,"Ġtalking":1686,"Ġconsidered":1687,"ore":1688,"ĠAdvertisement":1689,"Ġst":1690,"ED":1691,"Ġmiddle":1692,"Ġraise":1693,"we":1694,"Ġclaimed":1695,"ino":1696,"Ġalleged":1697,"ĠPro":1698,"ĠScott":1699,"ĠOct":1700,"Ġconsider":1701,"ĠShare":1702,"Ġtraffic":1703,"ĠAfrican":1704,"Ġcouldn":1705,"Ġtoward":1706,"Ġsearch":1707,"But":1708,"Ġlaunch":1709,"Ġinjured":1710,"That":1711,"Ġalthough":1712,"Ġactivities":1713,"Ġchanged":1714,"Ġsources":1715,"Ġmissing":1716,"Ġu":1717,"Ġ35":1718,"Ġcover":1719,"ised":1720,"Ġ|":1721,"ow":1722,"ES":1723,"Ġdecades":1724,"ich":1725,"Ġcaused":1726,"Ġelections":1727,"ane":1728,"IS":1729,"Ġfeet":1730,"ĠBar":1731,"Ġversion":1732,"Ġgrow":1733,"Ġvehicles":1734,"Ġoptions":1735,"Ġindividual":1736,"Ġenvironment":1737,"ĠRobert":1738,"ĠValley":1739,"ĠFrom":1740,"per":1741,"ara":1742,"Ġsystems":1743,"Ġprotect":1744,"ĠKing":1745,"Ġinjuries":1746,"Ġfinally":1747,"Ġnuclear":1748,"40":1749,"Ġratio":1750,"Ġgun":1751,"ĠPakistan":1752,"ĠManagement":1753,"ĠAir":1754,"ce":1755,"Ġopposition":1756,"ment":1757,"ick":1758,"Ġpro":1759,"Ġact":1760,"Ġplatform":1761,"Ġlack":1762,"Ġpair":1763,"Ġ500":1764,"Ġcalling":1765,"ary":1766,"Ġprograms":1767,"Ġscheduled":1768,"Ġfast":1769,"Ġjoined":1770,"ĠWar":1771,"ĠEditing":1772,"ĠSince":1773,"ĠRyan":1774,"ĠMac":1775,"ĠBig":1776,"ĠLake":1777,"Ġdigital":1778,"When":1779,"ue":1780,"Ġassets":1781,"Ġseeing":1782,"ĠAct":1783,"Ġpartner":1784,"ĠBoard":1785,"Ġbeginning":1786,"Ġsupply":1787,"Ġmiles":1788,"Ġprison":1789,"ons":1790,"ĠAmericans":1791,"ub":1792,"ĠOr":1793,"me":1794,"Ġbenefits":1795,"Ġbenefit":1796,"Ġmeasures":1797,"Ġhear":1798,"Ġparties":1799,"Ġsuccessful":1800,"ĠJust":1801,"Ġvictim":1802,"Ġblock":1803,"Ġlimited":1804,"Ġtrip":1805,"ĠPeople":1806,"Ġserve":1807,"Ġart":1808,"ism":1809,"Ġwide":1810,"ĠSch":1811,"Ġ80":1812,"ĠThomas":1813,"Ġ90":1814,"Ġstocks":1815,"Ġgirl":1816,"ĠAsia":1817,"Ġseeking":1818,"Ġcertainly":1819,"ĠServices":1820,"ĠCollege":1821,"Ġcommunities":1822,"Ġextra":1823,"Ġ2010":1824,"ness":1825,"Ġholding":1826,"ous":1827,"Ġtough":1828,"ade":1829,"Ġmobile":1830,"Ġowns":1831,"ĠDo":1832,"ĠFire":1833,"Ġspoke":1834,"Ġreturned":1835,"Ġsize":1836,"Ġcriminal":1837,"ĠInstagram":1838,"Ġoffering":1839,"ĠGod":1840,"ĠService":1841,"Ġpage":1842,"her":1843,"Ġdeep":1844,"wood":1845,"Ġcrime":1846,"ĠSports":1847,"ile":1848,"ĠGlobal":1849,"Ġproposed":1850,"ain":1851,"Ġsession":1852,"ĠFederal":1853,"ĠSyria":1854,"Ġch":1855,"Ġthreat":1856,"Ġallegations":1857,"ĠRepublicans":1858,"ĠGerman":1859,"Ġstrategy":1860,"Ġcommercial":1861,"ING":1862,"ĠSecretary":1863,"Q":1864,"Ġreporters":1865,"100":1866,"ĠCapital":1867,"ĠBoth":1868,"ĠPost":1869,"ĠIsrael":1870,"Ġsave":1871,"ts":1872,"ill":1873,"Ġdrop":1874,"Ġreserved":1875,"ĠMany":1876,"Ġavoid":1877,"Ġ200":1878,"iv":1879,"Ġdamage":1880,"Ġcondition":1881,"Ġdropped":1882,"Ġdoor":1883,"Ġplanning":1884,"ire":1885,"Ġcard":1886,"Ġdesigned":1887,"Ġreduce":1888,"AN":1889,"ĠUn":1890,"ford":1891,"ĠThen":1892,"Ġpic":1893,"ĠCopyright":1894,"Ġrain":1895,"ĠMartin":1896,"Ġdomestic":1897,"45":1898,"ge":1899,"Ġmurder":1900,"Ġspeech":1901,"line":1902,"Ġhelping":1903,"Ġplanned":1904,"Ġfeature":1905,"ud":1906,"Ġtype":1907,"ham":1908,"ĠPublic":1909,"ja":1910,"Ġinsurance":1911,"Ġattacks":1912,"ĠCorp":1913,"Ġforecast":1914,"Ġresources":1915,"ma":1916,"?\"":1917,"ĠAm":1918,"ĠSept":1919,"Ġpush":1920,"Ġattorney":1921,"23":1922,"Ġemergency":1923,"Ġwinner":1924,"Ġblood":1925,"Ġnorth":1926,"ĠFeb":1927,"Ġbaby":1928,"Ġfloor":1929,"Ġspend":1930,"Ġex":1931,"Ġdollars":1932,"Ġunit":1933,"ĠHill":1934,"Ġder":1935,"ĠAbout":1936,"Ġalone":1937,"ization":1938,"Ġpresidential":1939,"Ġactivity":1940,"ĠTHE":1941,"ee":1942,"ber":1943,"ĠOther":1944,"Ġowner":1945,"Ġhour":1946,"Ġcities":1947,"Ġanswer":1948,"ide":1949,"Ġfully":1950,"ek":1951,"ists":1952,"Ġcoverage":1953,"Ġvs":1954,"Ġfigure":1955,"Ġpopulation":1956,"org":1957,"Ġsnow":1958,"Ġbecoming":1959,"ĠSam":1960,"ĠCarolina":1961,"Ġjoin":1962,"Ġprofit":1963,"Ġitems":1964,"Ġindex":1965,"Ġanalysis":1966,"Ġtournament":1967,"Ġstake":1968,"Ġperfect":1969,"way":1970,"Ġband":1971,"Ġgirls":1972,"Ġoption":1973,"Ġplays":1974,"oc":1975,"Ġproviding":1976,"ÃŃ":1977,"24":1978,"Ġwouldn":1979,"Ġones":1980,"Ġdeclined":1981,"Ġwritten":1982,"Ġvoters":1983,"Ġcandidate":1984,"Ġsuspect":1985,"Ġpolicies":1986,"Ġpeace":1987,"ast":1988,"Ġparticular":1989,"for":1990,"Ġhopes":1991,"Ġstation":1992,"ĠMost":1993,"Ġspeak":1994,"ĠRiver":1995,"Ġasking":1996,"Ġstatements":1997,"Ġfifth":1998,"ha":1999,"ĠNigeria":2000,"af":2001,"Ġexplained":2002,"Ġbar":2003,"Ġhousing":2004,"ĠSanta":2005,"Ġidentified":2006,"Ġsimple":2007,"Ġcritical":2008,"ĠClub":2009,"ĠSecurity":2010,"ĠLike":2011,"Ġstarts":2012,"art":2013,"Ġstreet":2014,"Ġreality":2015,"Ġheavy":2016,"Ġprogress":2017,"Ġshowing":2018,"Ġchallenges":2019,"Ġban":2020,"Ġcommitted":2021,"35":2022,"»":2023,"Ġdirectly":2024,"Ġaren":2025,"Ġclaim":2026,"ĠWestern":2027,"ind":2028,"Ġgives":2029,"ĠSaudi":2030,"Ġchoice":2031,"ĠTh":2032,"Ġapproved":2033,"Ġlocated":2034,"Ġarrived":2035,"22":2036,"Ġcaught":2037,"Ġprofessional":2038,"Ġmissed":2039,"Ġculture":2040,"ĠYear":2041,"ĠOhio":2042,"ĠLtd":2043,"ĠAnother":2044,"Ġseem":2045,"Ġbelieves":2046,"Ġbelieved":2047,"Ġcharacter":2048,"ĠAug":2049,"red":2050,"Ġfine":2051,"Ġprior":2052,"Ġthinking":2053,"Ġhttp":2054,"Ġ+":2055,"Ġzone":2056,"Ġputting":2057,"Ġcrash":2058,"ĠAustralian":2059,"ĠAb":2060,"Ġfocused":2061,"ĠREUTERS":2062,"ĠFox":2063,"ĠSp":2064,"Ġtraditional":2065,"Ġanalyst":2066,"Ġwait":2067,"IT":2068,"Ġrequest":2069,"ru":2070,"ians":2071,"ize":2072,"Ġfinish":2073,"Ġlaws":2074,"Ġran":2075,"ER":2076,"Ġsouth":2077,"Ġspeed":2078,"Ġmovement":2079,"Ġassault":2080,"Ġexchange":2081,"Ġappear":2082,"ĠSun":2083,"Ġle":2084,"Ġmaybe":2085,"Ġlosing":2086,"Ġsubject":2087,"ive":2088,"mer":2089,"ĠBusiness":2090,"ĠBl":2091,"Ġappears":2092,"Ġadvantage":2093,"ĠLee":2094,"ada":2095,"ĠUnder":2096,"Ġprevent":2097,"Ġrespect":2098,"Ġsex":2099,"Ġcentre":2100,"ĠJoe":2101,"ado":2102,"Ġtable":2103,"Ġequipment":2104,"Ġfair":2105,"Ġtour":2106,"Ġ32":2107,"ĠFinancial":2108,"Ġcounty":2109,"Ġdevices":2110,"Ġcustomer":2111,"Ġinfrastructure":2112,"Ġexpectations":2113,"Ġfacing":2114,"Ġupon":2115,"Ġcross":2116,"ĠOpen":2117,"AL":2118,"Ġquick":2119,"Ġattempt":2120,"Ġcompleted":2121,"Ġfacility":2122,"Ġconfidence":2123,"ĠSupreme":2124,"Ġpiece":2125,"our":2126,"Ġplaces":2127,"Ġsometimes":2128,"Ġpoor":2129,"Ġstorm":2130,"Ġhot":2131,"Ġaffected":2132,"na":2133,"Ġabuse":2134,"ĠMs":2135,"Ġword":2136,"over":2137,"Ġbrother":2138,"Ġnecessary":2139,"Ġeventually":2140,"ĠStar":2141,"Ġsend":2142,"Ġboy":2143,"ĠRs":2144,"Ġremember":2145,"21":2146,"Ġclimate":2147,"Ġcapacity":2148,"Ġresponsible":2149,"ĠMatt":2150,"month":2151,"Ġsuffered":2152,"%.":2153,"og":2154,"ĠPeter":2155,"Ġ,":2156,"Ġfeeling":2157,"ze":2158,"Ġbuying":2159,"oy":2160,"ij":2161,"Ġbought":2162,"Ġactions":2163,"Ġowned":2164,"Ġ___":2165,"Ġphysical":2166,"Ġspecific":2167,"Ġbattle":2168,"ĠEnergy":2169,"Ġpicture":2170,"Ġactive":2171,"Ġindividuals":2172,"Ġguy":2173,"Ġregional":2174,"Ġbond":2175,"ows":2176,"ĠToronto":2177,"Ġrule":2178,"Ġdevelop":2179,"Ġcrowd":2180,"Ġguilty":2181,"Ġfemale":2182,"Ġselling":2183,"ĠFollow":2184,"Ġmyself":2185,"ata":2186,"Ġdevice":2187,"Ġreasons":2188,"Ġrecords":2189,"Ġfighting":2190,"ON":2191,"ities":2192,"ĠHome":2193,"Ġstatus":2194,"Ġplant":2195,"Ġdrugs":2196,"ĠChurch":2197,"Ġcompletely":2198,"Ġdisease":2199,"Ġhighly":2200,"ĠParis":2201,"Ġdecade":2202,"Ġowners":2203,"Ġwall":2204,"Ġcamp":2205,"ĠSteve":2206,"Ġreporting":2207,"Ġearned":2208,"ĠImages":2209,"Ġexisting":2210,"ĠSen":2211,"Ġconcern":2212,"Ġhundreds":2213,"Ġsong":2214,"Ġknows":2215,"Ġunique":2216,"Ġlose":2217,"ĠKh":2218,"Ġapproximately":2219,"Ġhaven":2220,"Ġpark":2221,"Ġindependent":2222,"ĠAlthough":2223,"ĠAndrew":2224,"Ġpaper":2225,"Ġdeveloped":2226,"Ġrising":2227,"Ġdirect":2228,"Ġpurchase":2229,"Ġexactly":2230,"Ġq":2231,"Ġmassive":2232,"Ġbox":2233,"Ġchampion":2234,"ĠClinton":2235,"Ġvoice":2236,"Ġarrest":2237,"ĠKorean":2238,"Ġlearning":2239,"ĠVirginia":2240,"Ġsa":2241,"Ġpar":2242,"Ġchairman":2243,"Ġagencies":2244,"Ġhealthy":2245,"ĠThose":2246,"Ġpowerful":2247,"Ġ45":2248,"Ġdifference":2249,"ĠJackson":2250,"Ġenforcement":2251,"Ġdividend":2252,"qu":2253,"Ġenjoy":2254,"Ġruling":2255,"Ġongoing":2256,"Ġsoftware":2257,"ks":2258,"Ġlocation":2259,"Ġmostly":2260,"Ġcandidates":2261,"men":2262,"Ġbroke":2263,"What":2264,"ĠBr":2265,"Ġ2008":2266,"Ġconsumer":2267,"Ġdiscuss":2268,"Ġdi":2269,"Ġprimary":2270,"ĠEn":2271,"Ġgreen":2272,"Ġconcerned":2273,"Ġimage":2274,"ĠPremier":2275,"ĠMeanwhile":2276,"Ġfired":2277,"ĠBoston":2278,"ann":2279,"Ġcamera":2280,"Ġtraded":2281,"Ġhasn":2282,"Ġexcited":2283,"Ġincreasing":2284,"ĠDespite":2285,"Ġcitizens":2286,"Ġeuro":2287,"Ġreportedly":2288,"Ġminute":2289,"ĠWill":2290,"ĠLLC":2291,"Ġsp":2292,"ĠMichigan":2293,"Ġstopped":2294,"Ġeye":2295,"Ġdenied":2296,"Ġmodern":2297,"ĠWall":2298,"Ġdefinitely":2299,"point":2300,"Ġlines":2301,"Ġpolitics":2302,"Ġhotel":2303,"Ġretail":2304,"Ġstated":2305,"ĠOver":2306,"Ġgrew":2307,"Ġbroadcast":2308,"Ġlegislation":2309,"Ġfresh":2310,"Ġbid":2311,"Ġmanaged":2312,"Ġsociety":2313,"Ġscoring":2314,"ĠGet":2315,"Ġintelligence":2316,"Ġholiday":2317,"Ġgovernor":2318,"Ġestimated":2319,"Ġexperts":2320,"ĠJeff":2321,"Ġstruck":2322,"Ġhits":2323,"Ġcarry":2324,"Ġplaced":2325,"Ġstores":2326,"Ġexpressed":2327,"Ġvalued":2328,"Ġad":2329,"Ġtwice":2330,"ala":2331,"Ġdisplay":2332,"Ġusually":2333,"Ġresponded":2334,"Ġdog":2335,"AS":2336,"ĠFed":2337,"Ġ2009":2338,"Ġdocuments":2339,"Ġnormal":2340,"Ġtrain":2341,"Ġfl":2342,"Ġshown":2343,"ĠEd":2344,"Ġsort":2345,"Ġallegedly":2346,"Ġshots":2347,"ka":2348,"Ġaccounts":2349,"Ġyesterday":2350,"Ġcreating":2351,"Ġchurch":2352,"Ġbus":2353,"Ġaward":2354,"Ġequity":2355,"Ġphotos":2356,"Ġ33":2357,"Ġfiscal":2358,"je":2359,"Ġconsumers":2360,"ĠManchester":2361,"no":2362,"ĠKevin":2363,"Ġgain":2364,"Ġcorporate":2365,"Ġcivil":2366,"ĠMiddle":2367,"ally":2368,"Ġsound":2369,"ĠEnglish":2370,"IC":2371,"Ġwinds":2372,"Ġworst":2373,"ĠGrand":2374,"Ġeffective":2375,"ĠIsland":2376,"Ġdrivers":2377,"Ġfan":2378,"pe":2379,"Ġsides":2380,"ĠGo":2381,"Ġclean":2382,"âĢĵ":2383,"Ġtelevision":2384,"ĠJr":2385,"Ġallows":2386,"My":2387,"Ġgreater":2388,"ance":2389,"Ġdecisions":2390,"Ġrestaurant":2391,"ĠHospital":2392,"ĠTr":2393,"Ġbalance":2394,"Ġmph":2395,"Ġkeeping":2396,"Ġseconds":2397,"Ġweapons":2398,"ert":2399,"Ġpain":2400,"ass":2401,"Ġsteps":2402,"ger":2403,"ĠBrexit":2404,"Ġremaining":2405,"Ġbringing":2406,"ure":2407,"Ġweight":2408,"And":2409,"Ġwriting":2410,"Photo":2411,"ĠChristian":2412,"ob":2413,"Ġsport":2414,"Ġfigures":2415,"Ġtrust":2416,"Ġskills":2417,"Ġseat":2418,"Ġfaces":2419,"ck":2420,"Ġborn":2421,"Ġsuper":2422,"Ġfuel":2423,"Ġdel":2424,"Ġmeant":2425,"ica":2426,"Ġjustice":2427,"Ġspring":2428,"Ġkilling":2429,"Ġnegative":2430,"ĠRichard":2431,"Ġund":2432,"Ġfactors":2433,"Ġsigns":2434,"Ġlearned":2435,"ĠGame":2436,"Ġaudience":2437,"Ġdeliver":2438,"Ġillegal":2439,"Ġblue":2440,"Ġscreen":2441,"Ġremained":2442,"Ġannouncement":2443,"IN":2444,"Ġwaiting":2445,"Ġthanks":2446,"Ġimmigration":2447,"ĠFBI":2448,"Ġwarned":2449,"Ġmeasure":2450,"Ġdraw":2451,"Ġpositions":2452,"Ġdebut":2453,"ĠMedia":2454,"Ġallowing":2455,"air":2456,"hen":2457,"Ġmark":2458,"ys":2459,"Ġprepared":2460,"ĠVegas":2461,"ep":2462,"ice":2463,"2018":2464,"Ġdefensive":2465,"60":2466,"ĠBeach":2467,"Ġpulled":2468,"£":2469,"Ġlawyer":2470,"Ġcast":2471,"Ġsolution":2472,"Ġeyes":2473,"Ġmarketing":2474,"ĠFoundation":2475,"Ġrisks":2476,"ĠToday":2477,"za":2478,"Ġdraft":2479,"Ġice":2480,"26":2481,"ĠHar":2482,"ĠExecutive":2483,"Ġtruck":2484,"ions":2485,"ĠYour":2486,"ĠIreland":2487,"ĠJim":2488,"Ġha":2489,"Ġfear":2490,"Ġ36":2491,"UR":2492,"ĠFord":2493,"Ġwatching":2494,"ien":2495,"Ġstyle":2496,"ĠGood":2497,"Ġwearing":2498,"ĠHouston":2499,"Ġonto":2500,"Ġboost":2501,"Ġapplication":2502,"ĠDan":2503,"Ġspread":2504,"ĠDavis":2505,"Ġstrike":2506,"els":2507,"Ġwind":2508,"Ġinterested":2509,"Ġguard":2510,"Ġmission":2511,"Ġyourself":2512,"Ġoperation":2513,"Ġlarger":2514,"She":2515,"Ġseasons":2516,"28":2517,"27":2518,"Ġrespond":2519,"ci":2520,"ĠCentre":2521,"Our":2522,"Ġnames":2523,"Ġflight":2524,"Ġquarterback":2525,"Ġstandard":2526,"so":2527,"Ġsuggested":2528,"ĠMal":2529,"Ġolder":2530,"ini":2531,"Ġperhaps":2532,"ont":2533,"ĠInstitute":2534,"Ġmillions":2535,"Ġmental":2536,"ÃĤ":2537,"ga":2538,"Ġclients":2539,"Ġplease":2540,"Ġloan":2541,"Ġaware":2542,"ft":2543,"int":2544,"75":2545,"05":2546,"AY":2547,"ĠOut":2548,"Ġhair":2549,"ied":2550,"Ġseemed":2551,"ene":2552,"ty":2553,"NYSE":2554,"Ġoffensive":2555,"Ġtaxes":2556,"Ġinitial":2557,"ren":2558,"Ġseparate":2559,"la":2560,"ĠMiami":2561,"AC":2562,"Ġclearly":2563,"Ġfit":2564,"ĠCoast":2565,"Ġfirms":2566,"Ġpartners":2567,"Ġupcoming":2568,"Ġcold":2569,"Ġproposal":2570,"AT":2571,"Ġshut":2572,"ĠCommunity":2573,"Ġnature":2574,"ĠSal":2575,"Ġbottom":2576,"ting":2577,"ĠClick":2578,"Ġnice":2579,"ets":2580,"Ġhurt":2581,"itt":2582,"ama":2583,"Ġcarried":2584,"ĠCon":2585,"rd":2586,"Ġestate":2587,"ĠLas":2588,"ĠLaw":2589,"ng":2590,"Ġprotection":2591,"Ġproduce":2592,"Ġcurrency":2593,"Ġhappens":2594,"ĠPer":2595,"ney":2596,"ĠLong":2597,"Ġfellow":2598,"Ġcuts":2599,"Ġreading":2600,"ano":2601,"Ġproud":2602,"ost":2603,"ĠUN":2604,"ĠArizona":2605,"AD":2606,"Ġhelps":2607,"Ġwinter":2608,"Ġfinding":2609,"ĠGold":2610,"att":2611,"ĠWhy":2612,"Ġbasketball":2613,"lin":2614,"ĠCan":2615,"ĠBowl":2616,"ial":2617,"ĠAlex":2618,"200":2619,"AM":2620,"Ġpresence":2621,"Ġproduced":2622,"Ġdeveloping":2623,"Ġregarding":2624,"Ġdebate":2625,"Ġvice":2626,"ĠItaly":2627,"Ġsu":2628,"its":2629,"ator":2630,"Ġ34":2631,"Ġcomplex":2632,"Ġpresented":2633,"Ġresearchers":2634,"Ġslow":2635,"ya":2636,"Ġsanctions":2637,"Ġloved":2638,"Ġseek":2639,"Ġresponsibility":2640,"Ġadmitted":2641,"Ġalbum":2642,"Ġsolutions":2643,"Ġfacilities":2644,"ett":2645,"ĠGu":2646,"ĠWell":2647,"Ġlawmakers":2648,"Ġmiss":2649,"ful":2650,"ĠNick":2651,"'.":2652,"Ġfeels":2653,"Ġprime":2654,"Ġknowledge":2655,"Ġdeals":2656,"ĠTaylor":2657,"Ġsurvey":2658,"ĠFrancisco":2659,"Ġjoint":2660,"Ġwhom":2661,"Ġsit":2662,"01":2663,"Ġtr":2664,"Ġorganizations":2665,"ĠAvenue":2666,"ĠTheir":2667,"ĠTim":2668,"Ġrally":2669,"game":2670,"Ġbigger":2671,"Ġlawsuit":2672,"Ġrecorded":2673,"Ġfavorite":2674,"yard":2675,"Ġtransaction":2676,"Ġqu":2677,"oh":2678,"Ġinteresting":2679,"Ġinflation":2680,"ath":2681,"Ġstuff":2682,"Ġindustrial":2683,"ico":2684,"TS":2685,"Ġspeaking":2686,"Ġlosses":2687,"ID":2688,"ĠStadium":2689,"Ġstars":2690,"ĠWomen":2691,"ĠBlue":2692,"Ġwins":2693,"Ġdes":2694,"Ġcompetitive":2695,"ters":2696,"Ġpounds":2697,"Ġdirection":2698,"Ġinnings":2699,"ĠBest":2700,"Ġactor":2701,"Ġdangerous":2702,"Ġrequire":2703,"Ġplus":2704,"Ġsolid":2705,"Ġgeneration":2706,"Ġstrength":2707,"ĠMary":2708,"For":2709,"Ġplenty":2710,"ĠTeam":2711,"Ġinfluence":2712,"Ġfaced":2713,"Ġes":2714,"ĠIslamic":2715,"let":2716,"ĠDevelopment":2717,"Ġpath":2718,"Ġyouth":2719,"Ġcommitment":2720,"Ġbeautiful":2721,"ĠJack":2722,"ort":2723,"Ġten":2724,"Ġattend":2725,"ars":2726,"ón":2727,"Ġviews":2728,"Ġeuros":2729,"Ġauthor":2730,"Ġcore":2731,"Ġsupporters":2732,"ĠiPhone":2733,"Ġfashion":2734,"Ġsmaller":2735,"Ġelected":2736,"Ġuniversity":2737,"Ġpicked":2738,"wa":2739,"Ġordered":2740,"ĠSc":2741,"ĠÅ":2742,"Ġlargely":2743,"+":2744,"ĠAttorney":2745,"Ġpaying":2746,"AR":2747,"Ġconnection":2748,"Ġsetting":2749,"Ġna":2750,"ĠRock":2751,"Ġrecovery":2752,"ew":2753,"Ġserving":2754,"Ġsurprise":2755,"Ġoccurred":2756,"Ġdivision":2757,"Ġtelling":2758,"Ġmargin":2759,"Ġ2020":2760,"Ġsister":2761,"ĠNBA":2762,"Ġvoted":2763,"Ġcon":2764,"By":2765,"Ġ49":2766,"Ġfoot":2767,"ü":2768,"ĠTurkey":2769,"Ġamazing":2770,"Ġcombined":2771,"Ġappearance":2772,"Ġeasily":2773,"DAY":2774,"Ġnotes":2775,"ĠStart":2776,"Ġlanguage":2777,"Ġextremely":2778,"Ġcloudy":2779,"ĠLet":2780,"Ġdelivered":2781,"Ġimproved":2782,"Ġcollection":2783,"ĠPM":2784,"Ġestimates":2785,"Ġboys":2786,"izing":2787,"Ġtext":2788,"Ġcloser":2789,"Ġprotest":2790,"Ġprovince":2791,"Ġshop":2792,"Ġsmart":2793,"de":2794,"ĠSheriff":2795,"EN":2796,"Ġcorner":2797,"Ġpanel":2798,"Ġbooks":2799,"Ġsupported":2800,"Ġmentioned":2801,"ver":2802,"ĠMinistry":2803,"ĠPrince":2804,"ĠUSA":2805,"Ġreceiving":2806,"Ġchoose":2807,"ĠIN":2808,"ĠSpain":2809,"Ġsection":2810,"Ġconsidering":2811,"ĠCor":2812,"Ġwish":2813,"Ġwelcome":2814,"ĠConference":2815,"ere":2816,"ĠOfficer":2817,"Ġhoping":2818,"Ġportfolio":2819,"Ġstandards":2820,"Ġgrand":2821,"ĠReal":2822,"Ġsecure":2823,"ĠCorporation":2824,"ĠRep":2825,"ĠKelly":2826,"Ġstreets":2827,"Ġsitting":2828,"Ġslightly":2829,"ĠInvestment":2830,"99":2831,"ond":2832,"Ġunits":2833,"Ġvotes":2834,"Ġsegment":2835,"Ġchampionship":2836,"Ġsquad":2837,"iting":2838,"ron":2839,"®":2840,"Ġem":2841,"Ġtouch":2842,"Ġ38":2843,"Ġceremony":2844,"Ġdecide":2845,"Ġapproval":2846,"So":2847,"ĠPort":2848,"Ġsub":2849,"Ġsc":2850,"Ġrep":2851,"ĠWeek":2852,"Ġupper":2853,"Ġagree":2854,"ny":2855,"Ġmatches":2856,"ics":2857,"Ġtweeted":2858,"Ġheat":2859,"ĠGreat":2860,"Ġpenalty":2861,"Ġmass":2862,"Ġalongside":2863,"Ġherself":2864,"berg":2865,"Ġscience":2866,"Ġentered":2867,"Ġappeal":2868,"ĠPr":2869,"Ġfile":2870,"che":2871,"ĠReport":2872,"ĠThree":2873,"ĠNorthern":2874,"ĠJordan":2875,"Ġamid":2876,"Ġpace":2877,"Ġjail":2878,"Ġfinance":2879,"ĠYoung":2880,"32":2881,"Ġwilling":2882,"Ġconduct":2883,"ĠPar":2884,"Ġestablished":2885,"Ġreturns":2886,"Ġaid":2887,"Ġinternet":2888,"IA":2889,"29":2890,"Ġmeetings":2891,"Ġwarning":2892,"ĠCl":2893,"Ġcampus":2894,"Most":2895,"ĠFund":2896,"ĠWilliam":2897,"ĠJapanese":2898,"Ġconsensus":2899,"Ġbrain":2900,"!\"":2901,"Ġpoll":2902,"Ġtech":2903,"Ġtrend":2904,"Ġpotentially":2905,"Ġreduced":2906,"ĠShow":2907,"Ġ37":2908,"Ġhappening":2909,"ĠBrazil":2910,"pl":2911,"ĠCal":2912,"Ġcovered":2913,"Ġenter":2914,"TV":2915,"Ġcatch":2916,"foot":2917,"Ġunion":2918,"Ġexpansion":2919,"ĠSingapore":2920,"ĠDetroit":2921,"Ġattended":2922,"ats":2923,"Ġnewspaper":2924,"ĠDivision":2925,"news":2926,"Ġcap":2927,"Ġremoved":2928,"Ġ48":2929,"ĠRoyal":2930,"Ġwindow":2931,"Ġparking":2932,"Ġdark":2933,"Ġstanding":2934,"Ġupdate":2935,"Ġagent":2936,"Ġtransfer":2937,"ĠArmy":2938,"Ġuses":2939,"80":2940,"ĠTe":2941,"Ġintroduced":2942,"Ġmale":2943,"ĠSouthern":2944,"Ġratings":2945,"Ġisland":2946,"ĠMiller":2947,"Ġteachers":2948,"Ġadvice":2949,"Ġfamiliar":2950,"uf":2951,"Ġsought":2952,"Ġpor":2953,"ĠEric":2954,"Ġda":2955,"Ġideas":2956,"uh":2957,"Ġsixth":2958,"Ġtalent":2959,"ĠImage":2960,"ering":2961,"run":2962,"ments":2963,"Ġconducted":2964,"300":2965,"Ġurged":2966,"Ġdiscovered":2967,"Ġpl":2968,"Ġunderstanding":2969,"Ġoffense":2970,"Ġsecretary":2971,"Ġsk":2972,"Ġloans":2973,"ĠGr":2974,"Ġapplications":2975,"Ġcrude":2976,"go":2977,"ĠInstead":2978,"Ġopinion":2979,"Ġdoubt":2980,"ey":2981,"Ġdis":2982,"31":2983,"Ġexperienced":2984,"Ġleg":2985,"ĠCleveland":2986,"ven":2987,"Ġfailure":2988,"market":2989,"ack":2990,"Ġdecline":2991,"Ġchanging":2992,"Ġ300":2993,"Ġdefence":2994,"ĠBrian":2995,"Ġdelivery":2996,"Ġmarried":2997,"Ġdeclared":2998,"Ġpull":2999,"Ġlimit":3000,"ĠMORE":3001,"Ġdefeat":3002,"Ġexpand":3003,"ĠColorado":3004,"ĠRob":3005,"iss":3006,"Ġworse":3007,"Ġperform":3008,"ising":3009,"Ġ2007":3010,"ĠDel":3011,"Ġsurgery":3012,"Ġeasier":3013,"Ġmaintain":3014,"ĠEx":3015,"Ġtied":3016,"Ġeast":3017,"Ġuser":3018,"ola":3019,"Ġprogramme":3020,"Ġmanufacturing":3021,"Ġhitting":3022,"Ġx":3023,"Ġskin":3024,"Ġartist":3025,"Ġtells":3026,"Ġnearby":3027,"ĠDaniel":3028,"ĠPower":3029,"Ġdetermined":3030,"Ġactual":3031,"Ġtreated":3032,"Ġlived":3033,"Ġcomputer":3034,"Ġcool":3035,"oo":3036,"ĠPl":3037,"Ġeffects":3038,"Ġenvironmental":3039,"ĠMorgan":3040,"Ġflow":3041,"Ġachieve":3042,"ĠBell":3043,"Ġtesting":3044,"ĠBob":3045,"Ġwhatever":3046,"ĠBecause":3047,"US":3048,"ĠHollywood":3049,"Ġconflict":3050,"Ġwalking":3051,"ĠJudge":3052,"ĠAlabama":3053,"Ġaircraft":3054,"Ġte":3055,"well":3056,"Ġgoods":3057,"Ġidentify":3058,"Ġassociated":3059,"ĠVer":3060,"ĠEducation":3061,"Ġairport":3062,"IL":3063,"Ġfalling":3064,"Ġgiant":3065,"ĠMa":3066,"ĠMedical":3067,"Ġride":3068,"Ġden":3069,"º":3070,"ĠJose":3071,"Ġwest":3072,"ĠPacific":3073,"Ġvisitors":3074,"ĠWatch":3075,"ĠNations":3076,"Ġgains":3077,"Ġschedule":3078,"34":3079,"ĠExchange":3080,"Ġpayments":3081,"ĠII":3082,"70":3083,"No":3084,"ĠSyrian":3085,"ĠAdam":3086,"Ġne":3087,"Ġpartnership":3088,"Ġbl":3089,"ĠGeorgia":3090,"Ġsites":3091,"Ġmodels":3092,"Ġdegree":3093,"Ġdetermine":3094,"ĠWilson":3095,"Ġcontest":3096,"Ġprofessor":3097,"ĠChelsea":3098,"Ġmeaning":3099,"ĠGames":3100,"ĠTrust":3101,"ĠAsian":3102,"33":3103,"Ġlink":3104,"ĠUp":3105,"Ġholds":3106,"ĠTop":3107,"ĠItalian":3108,"ord":3109,"ĠKansas":3110,"Ġfarmers":3111,"Ġextended":3112,"Ġbirth":3113,"Ġreform":3114,"Ġrelations":3115,"Ġwrite":3116,"Ġsupporting":3117,"55":3118,"ita":3119,"Ġnotice":3120,"ster":3121,"Ġanimals":3122,"ĠJersey":3123,"Ġarm":3124,"ĠForeign":3125,"ĠLife":3126,"Ġtruly":3127,"ĠOnce":3128,"ĠMayor":3129,"ĠFree":3130,"ĠAgency":3131,"ĠWood":3132,"Ġpassing":3133,"DA":3134,"Ġ52":3135,"Ġmoves":3136,"Ġcom":3137,"house":3138,"ĠIts":3139,"Ġmarijuana":3140,"ines":3141,"Ġveteran":3142,"Ġvariety":3143,"ki":3144,"ff":3145,"amb":3146,"Ġlisted":3147,"Ġpushed":3148,"Ġvolume":3149,"Ġincreasingly":3150,"Ġkick":3151,"Ġrock":3152,"ank":3153,"Ġfees":3154,"Ġenable":3155,"Ġimages":3156,"Ġtruth":3157,"Ġministry":3158,"Ġrare":3159,"ĠDallas":3160,"ĠMinnesota":3161,"Ġcontributed":3162,"ĠCharles":3163,"Ġpercentage":3164,"Ġtechnical":3165,"ĠApp":3166,"Ġassistant":3167,"Ġinterests":3168,"Ġimmediate":3169,"38":3170,"ĠTown":3171,"Ġclosing":3172,"ĠAnthony":3173,"Ġsouthern":3174,"ase":3175,"ĠPutin":3176,"ĠForce":3177,"ba":3178,"Ġrefused":3179,"ĠStill":3180,"ix":3181,"ĠCol":3182,"Ġmaterials":3183,"Ġstructure":3184,"Ġdriven":3185,"Ġpatient":3186,"Ġbroken":3187,"Ġradio":3188,"Ġscale":3189,"Ġreplace":3190,"Ġ39":3191,"ĠLand":3192,"Ġdeputy":3193,"und":3194,"Ġcolor":3195,"OS":3196,"Ġroads":3197,"Ġcorruption":3198,"ĠRose":3199,"Ġemployee":3200,"ĠWater":3201,"Ġseats":3202,"Ġwalked":3203,"ec":3204,"Ġcents":3205,"Ġchain":3206,"Ġpayment":3207,"ĠAndroid":3208,"eb":3209,"Ġcommission":3210,"Ġthrow":3211,"Ġcount":3212,"Ġaccident":3213,"Ġexpensive":3214,"ered":3215,"ĠYes":3216,"ĠLouis":3217,"Ġstudies":3218,"Ġinvestigating":3219,"Ġcentury":3220,"Ġdiscussion":3221,"Ġinter":3222,"DAQ":3223,"ĠBefore":3224,"Ġinitially":3225,"*":3226,"Ġinvestments":3227,"Ġmulti":3228,"Ġtight":3229,"Ġconfident":3230,"Ġcounter":3231,"ĠQu":3232,"Ġgovernments":3233,"Ġarmed":3234,"Ġsuit":3235,"Ġrow":3236,"Ġlocations":3237,"Ġepisode":3238,"itch":3239,"Ġyounger":3240,"Ġfestival":3241,"Ġpitch":3242,"ĠOF":3243,"Ġtalked":3244,"ca":3245,"Ġprotests":3246,"Ġtargets":3247,"90":3248,"Ġoriginally":3249,"Ġsinger":3250,"Ġjourney":3251,"ug":3252,"Ġapply":3253,"Ġteacher":3254,"Ġchances":3255,"):":3256,"Ġdeaths":3257,"isation":3258,"ĠStephen":3259,"Ġcode":3260,"ĠChampionship":3261,"ĠJason":3262,"ĠAT":3263,"Ġaccept":3264,"ĠSeries":3265,"Ġvalues":3266,"Ġbed":3267,"ĠHarry":3268,"Ġflat":3269,"Ġtools":3270,"Ġpublicly":3271,"37":3272,"Ġpointed":3273,"ĠGolden":3274,"ps":3275,"Ġunable":3276,"ants":3277,"Ġestimate":3278,"Ġwarm":3279,"Ġbasic":3280,"ern":3281,"Ġraising":3282,"ĠRelated":3283,"Ġultimately":3284,"Ġnorthern":3285,"Ġplane":3286,"ĠVice":3287,"ĠRaj":3288,"ĠJustin":3289,"anc":3290,"Ġbrings":3291,"ĠArt":3292,"OT":3293,"Ġshift":3294,"ĠBBC":3295,"ĠSu":3296,"BS":3297,"Ġbag":3298,"Ġdoctor":3299,"Ġfill":3300,"Ġdowntown":3301,"Ġpossibility":3302,"ĠAg":3303,"Ġest":3304,"44":3305,"Ġstruggling":3306,"Ġlinked":3307,"Ġtickets":3308,"ĠJay":3309,"ĠCall":3310,"Ġstands":3311,"Ġwedding":3312,"Ġresident":3313,"eng":3314,"Ġleads":3315,"Ġadvance":3316,"ĠAtlanta":3317,"Ġtie":3318,"Ġadvanced":3319,"pt":3320,"burg":3321,"ĠEarlier":3322,"ĠSw":3323,"ĠZealand":3324,"Ġexercise":3325,"ĠAM":3326,"Ġaffect":3327,"Ġpossession":3328,"Ġinvolving":3329,"Ġ42":3330,"Ġwriter":3331,"ĠBeijing":3332,"Ġdoctors":3333,"Ġobviously":3334,"Ġer":3335,"ĠOlympic":3336,"Ġ75":3337,"ĠKhan":3338,"ĠFort":3339,"app":3340,"like":3341,"Ġsea":3342,"ock":3343,"Ġmix":3344,"ĠIraq":3345,"ĠMuslim":3346,"ĠFinally":3347,"Ġcontinuing":3348,"Ġpr":3349,"ĠKe":3350,"ĠJoseph":3351,"Ġexpects":3352,"Ġinstitutions":3353,"Ġconservative":3354,"own":3355,"ĠChairman":3356,"Ġreturning":3357,".-":3358,"Ġstood":3359,"Ġvision":3360,"ess":3361,"Ġadults":3362,"Ġyield":3363,"Ġprove":3364,"Ġorders":3365,"Ġdream":3366,"36":3367,"related":3368,"Ġsl":3369,"Ġeverybody":3370,"ui":3371,"Ġrepresents":3372,"Ġdiscussed":3373,"Ġbecomes":3374,"Ġvillage":3375,"CC":3376,"Ġnegotiations":3377,"ĠPhiladelphia":3378,"Ġcelebrate":3379,"Ġfarm":3380,"ç":3381,"Ġregistered":3382,"ĠGovernor":3383,"OL":3384,"ĠMon":3385,"Ġfiling":3386,"04":3387,"SE":3388,"ĠAssembly":3389,"Ġactress":3390,"Ġsi":3391,"Ġthank":3392,"Ġheading":3393,"ĠWho":3394,"Ġfamous":3395,"Ġconsecutive":3396,"Ġmarriage":3397,"ette":3398,"NAS":3399,"acks":3400,"ĠPlease":3401,"ĠDiego":3402,"Ġbaseball":3403,"ĠMoore":3404,"Ġties":3405,"Ġcarrying":3406,"que":3407,"Ġturning":3408,"ĠMcC":3409,"ĠKen":3410,"OR":3411,"ĠStock":3412,"Ġbuildings":3413,"49":3414,"ĠVan":3415,"39":3416,"ĠSeattle":3417,"Ġwild":3418,"Ġcrew":3419,"Ġroute":3420,"ĠTime":3421,"Ġtonight":3422,"Ġmoments":3423,"Ġvideos":3424,"Ġinternal":3425,"ĠLiverpool":3426,"port":3427,"Ġchair":3428,"Ġrival":3429,"ĠScotland":3430,"round":3431,"ith":3432,"Ġbreaking":3433,"Ġvoting":3434,"ically":3435,"Ġproducer":3436,"ĠLove":3437,"Ġremove":3438,"PA":3439,"Ġasset":3440,"Ġrequires":3441,"Ġsigning":3442,"ages":3443,"Ġimpressive":3444,"ĠIrish":3445,"Ġauthority":3446,"Ġruled":3447,"Ġaimed":3448,"Ġcaptain":3449,"AG":3450,"Ġplants":3451,"ĠAnderson":3452,"ĠSpanish":3453,"Ġbanking":3454,"Ġthreats":3455,"Ġsuspended":3456,"Ġtests":3457,"Ġreligious":3458,"Ġelectric":3459,"ĠREAD":3460,"Ġstrategic":3461,"Ġsplit":3462,"ex":3463,"Ġpractices":3464,"ĠIsraeli":3465,"ĠArabia":3466,"ĠMoscow":3467,"Ġfranchise":3468,"Ġcustody":3469,"ĠOld":3470,"Ġrequirements":3471,"Ġquarterly":3472,"Ġcomfortable":3473,"Ġcrimes":3474,"Ġheaded":3475,"Ġnewsletter":3476,"Ġanimal":3477,"Ġregulations":3478,"long":3479,"ĠCNN":3480,"Ġassists":3481,"Ġshopping":3482,"ĠGov":3483,"ĠSecurities":3484,"Ġassistance":3485,"Ġnor":3486,"Ġrelatively":3487,"Ġincreases":3488,"Ġgenerally":3489,"Ġ55":3490,"Ġgained":3491,"Ġ41":3492,"Ġpictures":3493,"gan":3494,"Ġpop":3495,"Ġupdates":3496,"ĠRepublic":3497,"Ġrebounds":3498,"ĠPatrick":3499,"Ġrelief":3500,"Ġacting":3501,"ĠFestival":3502,"Ġ2006":3503,"Ġboss":3504,"Ġtypes":3505,"65":3506,"ĠYet":3507,"Ġpurpose":3508,"ning":3509,"Ġmatters":3510,"Ġcompete":3511,"ball":3512,"ĠRam":3513,"Ġsw":3514,"ĠFollowing":3515,"ĠBush":3516,"Ġtroops":3517,"Ġsupposed":3518,"Ġfreedom":3519,"Ġfeatured":3520,"Ġstorage":3521,"ĠInformation":3522,"ĠHong":3523,"Ġgolf":3524,"Ġagents":3525,"Ġfraud":3526,"Ġminimum":3527,"Ġartists":3528,"Ġeat":3529,"high":3530,"ĠFormer":3531,"ĠKong":3532,"ĠJosh":3533,"ĠDelhi":3534,"Ġshowers":3535,"ĠAcademy":3536,"Ġapartment":3537,"Ġvan":3538,"Ġfish":3539,"oe":3540,"Ġfilms":3541,"ĠBo":3542,"Ġedge":3543,"Ġpossibly":3544,"Ġtweet":3545,"09":3546,"Ġresolution":3547,"jo":3548,"Ġkill":3549,"Ġ44":3550,"Ġcell":3551,"Ġscheme":3552,"Ġth":3553,"Ġbonds":3554,"Ġentry":3555,"Ġsecret":3556,"Ġ43":3557,"Ġending":3558,"Ġweren":3559,"ĠCredit":3560,"ĠLive":3561,"Ġretired":3562,"Ġmachine":3563,"Ġsummit":3564,"Ġsharing":3565,"Ġacquired":3566,"Ġera":3567,"Ġwear":3568,"ical":3569,"07":3570,"Ġexciting":3571,"li":3572,"BC":3573,"ĠSocial":3574,"Ġhistoric":3575,"ĠChe":3576,"ĠLewis":3577,"ira":3578,"Ġstolen":3579,"ĠSpeaking":3580,"Ġsleep":3581,"Ġspokeswoman":3582,"week":3583,"Ġpurchased":3584,"Ġimportance":3585,"EC":3586,"Ġends":3587,"Ġdress":3588,"Ġparliament":3589,"ĠCruz":3590,"Ġcards":3591,"hi":3592,"ĠEmail":3593,"Ġrepresent":3594,"Ġbrands":3595,"ĠSenior":3596,"Ġparticipants":3597,"Ġfly":3598,"Ġidentity":3599,"ĠHam":3600,"ĠSky":3601,"ij":3602,"SA":3603,"Ġpromised":3604,"Ġtrouble":3605,"Ġsuffering":3606,"Ġleaves":3607,"Ġsuggest":3608,"Sh":3609,"Ġbusy":3610,"Ġproperties":3611,"Ġworldwide":3612,"Ġcloud":3613,"ĠSEC":3614,"Ġclosely":3615,"Ġmanage":3616,"Ġnumerous":3617,"Ġbackground":3618,"ĠExpress":3619,"Ġ65":3620,"ĠTony":3621,"ĠMadrid":3622,"ev":3623,"der":3624,"Ġsignificantly":3625,"Ġalternative":3626,"Ġship":3627,"head":3628,"ators":3629,"Ġdinner":3630,"ax":3631,"SC":3632,"Ġcriticism":3633,"ĠMah":3634,"ĠMin":3635,"rie":3636,"ĠTour":3637,"Ġbench":3638,"Ġadds":3639,"Ġseriously":3640,"star":3641,"ĠJournal":3642,"ĠDi":3643,"ali":3644,"Ġsentence":3645,"ĠSeveral":3646,"Ġmayor":3647,"ati":3648,"Ġsuggests":3649,"Ġbehavior":3650,"Ġstronger":3651,"ĠFood":3652,"Ġclient":3653,"not":3654,"ĠPrice":3655,"Ġtargeted":3656,"ĠSingh":3657,"ĠNetwork":3658,"Ġprosecutors":3659,"Ġdirected":3660,"ĠDemocrat":3661,"bl":3662,"ues":3663,"ĠFamily":3664,"Ġconnected":3665,"ĠChampions":3666,"Ġroughly":3667,"Ġabsolutely":3668,"08":3669,"Ġpassengers":3670,"ö":3671,"ĠSpecial":3672,"Ġcoast":3673,"Ġcomplaint":3674,"Ġ400":3675,"ĠEm":3676,"ves":3677,"Ġdogs":3678,"Ġhandle":3679,"Ġotherwise":3680,"Ġsees":3681,"Ġticket":3682,"ĠAward":3683,"All":3684,"Ġtask":3685,"Ġsongs":3686,"ĠAmong":3687,"Ġdedicated":3688,"Ġsteel":3689,"looking":3690,"Ġshortly":3691,"Ġtackle":3692,"ative":3693,"Ġminor":3694,"â":3695,"Ġprovider":3696,"vers":3697,"use":3698,"ives":3699,"Ġtypically":3700,"Ġarms":3701,"ĠAnt":3702,"ĠIS":3703,"Ġjump":3704,"Ġ©":3705,"47":3706,"aff":3707,"Ġmonthly":3708,"ĠMicrosoft":3709,"ĠCBS":3710,"Ġthreatened":3711,"Ġhonor":3712,"ĠMo":3713,"42":3714,"Ġinning":3715,"Ġpool":3716,"Ġhealthcare":3717,"ĠStory":3718,"ĠTennessee":3719,"Ġpromote":3720,"EL":3721,"Ġemotional":3722,"Ġpe":3723,"Ġfactor":3724,"Ġinvestigators":3725,"Ľ":3726,"ĠBack":3727,"ĠProject":3728,"Ġcu":3729,"side":3730,"Ġmessages":3731,"TH":3732,"eg":3733,"Ġexperiences":3734,"Ġcausing":3735,"Ġjoining":3736,"Ġpackage":3737,"Ġbodies":3738,"Ġlots":3739,"ĠHarris":3740,"Ġcl":3741,"ĠInternet":3742,"free":3743,"Ġperformed":3744,"Ġpieces":3745,"buy":3746,"Ġcaption":3747,"Ġweb":3748,"Ġcontracts":3749,"At":3750,"Ġattempted":3751,"Ġunlikely":3752,"Ġclick":3753,"Ġinvest":3754,"IM":3755,"ĠView":3756,"Ġneighborhood":3757,"Ġring":3758,"ĠFour":3759,"ail":3760,"46":3761,"One":3762,"Ġnative":3763,"CH":3764,"OM":3765,"Ġalcohol":3766,"ĠVal":3767,"Ġcharacters":3768,"ĠPat":3769,"Ġpoliticians":3770,"ĠMag":3771,"Ġbegins":3772,"ĠAk":3773,"Ġlos":3774,"Ġpersonnel":3775,"Ġenjoyed":3776,"ĠTechnology":3777,"Ġsun":3778,"ĠIT":3779,"Ġdocument":3780,"Ġdeficit":3781,"Ġcoalition":3782,"Ġmemory":3783,"Ġpushing":3784,"any":3785,"ified":3786,"Ġfounder":3787,"Ġ2000":3788,"2017":3789,"Ġvisited":3790,"ĠThough":3791,"ph":3792,"Ġsoft":3793,"Ġflag":3794,"Ġmom":3795,"inch":3796,"ĠSamsung":3797,"Ġapps":3798,"Ġtouchdown":3799,"ĠCare":3800,"ĠMrs":3801,"Ġredistributed":3802,"Ġencourage":3803,"ched":3804,"Ġtend":3805,"Ġregions":3806,"pp":3807,"IP":3808,"br":3809,"ush":3810,"Ġargued":3811,"Ġjunior":3812,"BA":3813,"Ġsevere":3814,"ĠNIGHT":3815,"Ġdef":3816,"Ġsurrounding":3817,"48":3818,"Ġengine":3819,"Ġfilled":3820,"Ġseventh":3821,"Ġbattery":3822,"ĠAllen":3823,"Ġguidance":3824,"Ġroll":3825,"Ġrural":3826,"Ġexpert":3827,"Ġconvicted":3828,"Ġlikes":3829,"ĠRo":3830,"Ġgrown":3831,"Ġretirement":3832,"Ġintended":3833,"Ġmis":3834,"Ġarmy":3835,"Ġdance":3836,"ĠThank":3837,"Ġent":3838,"Ġoutlook":3839,"Ġpara":3840,"Ġdry":3841,"ĠTO":3842,"era":3843,"Ġwaste":3844,"Ġfaster":3845,"ĠEagles":3846,"TA":3847,"ĠFrank":3848,"Ã":3849,"LE":3850,"ura":3851,"ko":3852,"ao":3853,"Ġdistribution":3854,"Ġimprovement":3855,"Ġplayoff":3856,"Ġacquisition":3857,"ĠCH":3858,"Ġtomorrow":3859,"Ġstruggle":3860,"ĠHuman":3861,"Ġnewly":3862,"oon":3863,"ĠNe":3864,"con":3865,"sc":3866,"Ġunless":3867,"Ġtransition":3868,"ten":3869,"ĠInter":3870,"Ġequal":3871,"Ġrec":3872,"Ġappointed":3873,"Ġwake":3874,"ĠEarth":3875,"ose":3876,"ĠEastern":3877,"Ġsoldiers":3878,"ĠParliament":3879,"Ġsets":3880,"Ġattempts":3881,"ĠIllinois":3882,"Ġrevenues":3883,"ĠWil":3884,"Ġheads":3885,"Ġprepare":3886,"Ġpriority":3887,"PS":3888,"ĠJo":3889,"ĠNBC":3890,"Ġtherefore":3891,"yn":3892,"Ġinitiative":3893,"ct":3894,"Ġcoffee":3895,"ĠFair":3896,"43":3897,"den":3898,"form":3899,"ova":3900,"Ġappropriate":3901,"ĠPlay":3902,"Ġaccepted":3903,"Ġcreative":3904,"Ġfollows":3905,"Ġrescue":3906,"Ġtree":3907,"With":3908,"ĠNetflix":3909,"ĠFootball":3910,"Ġsurprised":3911,"Ġlowest":3912,"800":3913,"amp":3914,"Ġworried":3915,"mar":3916,"ran":3917,"Ġvisiting":3918,"Ġselected":3919,"ĠMusic":3920,"ĠAnn":3921,"Ġexplain":3922,"ging":3923,"Ġwidely":3924,"Ġsquare":3925,"Ġtrends":3926,"Ġimproving":3927,"ĠHead":3928,"ĠQueen":3929,"ĠSociety":3930,"Ġcutting":3931,"ĠGOP":3932,"03":3933,"',":3934,"ET":3935,"ĠDrive":3936,"oll":3937,"ato":3938,"ĠSea":3939,"Ġjury":3940,"ĠRights":3941,"Ġinvestor":3942,"ĠABC":3943,"Ġtool":3944,"ĠAre":3945,"Ġrejected":3946,"Ġemerging":3947,"Ġcounts":3948,"Ġnations":3949,"Ġfalse":3950,"Ġtreat":3951,"va":3952,"Ġweak":3953,"ĠHighway":3954,"down":3955,"Ġstruggled":3956,"ĠMP":3957,"Ġguests":3958,"Ġgender":3959,"Ġhouses":3960,"rit":3961,"ĠWild":3962,"Ġstreak":3963,"uc":3964,"ĠReserve":3965,"ĠRatings":3966,"alt":3967,"Ġgreatest":3968,"Ġlawyers":3969,"Ġreaching":3970,"Ġtemperatures":3971,"To":3972,"Ġoutstanding":3973,"Ġpasses":3974,"Ġfaith":3975,"inc":3976,"Ġcr":3977,"Ġinformed":3978,"oz":3979,"Ġtrees":3980,"Ġsending":3981,"Ġ150":3982,"bo":3983,"Ġwine":3984,"ros":3985,"Ġsuspected":3986,"Ġrepeatedly":3987,"Ġhat":3988,"Ġshape":3989,"ĠWh":3990,"Ġassist":3991,"Ġstress":3992,"Ġfeed":3993,"ark":3994,"ored":3995,"Ġwatched":3996,"Ġincredible":3997,"cl":3998,"nt":3999,"Ġentertainment":4000,"ih":4001,"Ġbeauty":4002,"Ġbi":4003,"ĠLocal":4004,"Ġsat":4005,"41":4006,"Ġbroad":4007,"Ġheavily":4008,"Ġengaged":4009,"Ġspecifically":4010,"ĠMen":4011,"ĠRoss":4012,"Ġ2005":4013,"ST":4014,"95":4015,"Ġdownload":4016,"400":4017,"Ġsentenced":4018,"ĠCatholic":4019,"ĠOklahoma":4020,"Ġthrew":4021,"Ġworry":4022,"Ġimp":4023,"Ġdrove":4024,"Ġcolleagues":4025,"Ġagenda":4026,"64":4027,"ĠEach":4028,"Ġfee":4029,"New":4030,"ium":4031,"Ġspokesperson":4032,"Ġbills":4033,"Ġ47":4034,"ĠAfghanistan":4035,"Ġinvited":4036,"ĠYouTube":4037,"Ġanniversary":4038,"Ġdozen":4039,"ram":4040,"ĠOnly":4041,"Ġemployment":4042,"Getty":4043,"Ġgap":4044,"Ġsweet":4045,"ĠLittle":4046,"Ġinf":4047,"ying":4048,"Ġglass":4049,"Ġclasses":4050,"Ġcoal":4051,"ĠSub":4052,"Ġduty":4053,"CA":4054,"Ġcoaches":4055,"Â":4056,"anna":4057,"ĠSk":4058,"Ġ46":4059,"ison":4060,"ille":4061,"ĠST":4062,"ric":4063,"Ġparticipate":4064,"Ġequ":4065,"Ġrich":4066,"Ġrespectively":4067,"Ġexpenses":4068,"Ġcombination":4069,"right":4070,"Ġshareholders":4071,"Ġturns":4072,"Ġearn":4073,"Ġ51":4074,"ured":4075,"Ġdrink":4076,"ĠKar":4077,"ĠShares":4078,"ĠMid":4079,"ĠGetty":4080,"Ġbridge":4081,"lo":4082,"Ġinspired":4083,"Ġsurface":4084,"Ġgift":4085,"ence":4086,"Ġchallenging":4087,"Ġoffices":4088,"Ġsuspects":4089,"ĠFinance":4090,"Ġab":4091,"bound":4092,"Ġmomentum":4093,"Ġbacked":4094,"Ġparent":4095,"Ġcrucial":4096,"ave":4097,"Ġdealing":4098,"Ġregulatory":4099,"Ġapparently":4100,"ĠMat":4101,"Ġapart":4102,"Ġport":4103,"ole":4104,"Ġbeach":4105,"Ġcultural":4106,"Ġinstitutional":4107,"Ġbeating":4108,"ĠIowa":4109,"ĠAli":4110,"67":4111,"Ġje":4112,"ays":4113,"Ġweekly":4114,"Ġbirthday":4115,"Ġpipeline":4116,"Ġknee":4117,"Ġsolar":4118,"ĠPe":4119,"Ġcategory":4120,"ĠArea":4121,"ky":4122,"ures":4123,"06":4124,"ĠBall":4125,"Ġsemi":4126,"ĠHamilton":4127,"hip":4128,"ĠPh":4129,"ĠNext":4130,"Ġathletes":4131,"ii":4132,"Ġmovies":4133,"han":4134,"net":4135,"Ġplastic":4136,"Ġbehalf":4137,"gen":4138,"Ġfindings":4139,"Ġstretch":4140,"ĠSa":4141,"Ġofficially":4142,"ĠSarah":4143,"Ġprivacy":4144,"ĠMad":4145,"Ġnone":4146,"gh":4147,"On":4148,"Ġdrama":4149,"ĠFl":4150,"ika":4151,"ĠArsenal":4152,"Ġviolent":4153,"UN":4154,"called":4155,"59":4156,"Ġhate":4157,"Ġrelationships":4158,"Ġgranted":4159,"ĠJon":4160,"Ġlisten":4161,"season":4162,"Ġfewer":4163,"GA":4164,"ĠLabour":4165,"Ġremarks":4166,"ĠJonathan":4167,"ĠRos":4168,"sey":4169,"ĠOntario":4170,"ĠThompson":4171,"ĠNight":4172,"Ġranked":4173,"ĠUkraine":4174,"Ġimmigrants":4175,"Ġdegrees":4176,"ĠGe":4177,"Ġlabor":4178,"umb":4179,"ĠYORK":4180,"Ġallies":4181,"sp":4182,"hed":4183,"sw":4184,"Ġtariffs":4185,"SP":4186,"Ġclassic":4187,"Ġawards":4188,"ents":4189,"Ġfix":4190,"Ġsoccer":4191,"Ġconcert":4192,"ust":4193,"Ġadult":4194,"Ġoutput":4195,"Ġmanaging":4196,"02":4197,"Ġpromise":4198,"Ġawareness":4199,"Ġgross":4200,"Ġentering":4201,"Ġpo":4202,"oj":4203,"Ġmetal":4204,"Ġexit":4205,"Ġexcellent":4206,"Ġclubs":4207,"hold":4208,"Ġreplaced":4209,"ĠClass":4210,"Ġscientists":4211,"Ġprimarily":4212,"ĠMer":4213,"ão":4214,"Ġcircumstances":4215,"ades":4216,"Ġsupplies":4217,"aker":4218,"ĠSand":4219,"Ġscandal":4220,"Ġsettlement":4221,"ĠWisconsin":4222,"ĠWarriors":4223,"ĠAustin":4224,"Ġjournalists":4225,"ening":4226,"Ġreflect":4227,"ĠBuy":4228,"ĠAwards":4229,"Ġselection":4230,"ĠBel":4231,"bury":4232,"Ġtechnologies":4233,"%,":4234,"ime":4235,"ĠÄ":4236,"ĠAdministration":4237,"Ġchannel":4238,"Star":4239,"Ġtransport":4240,"Ġawarded":4241,"ena":4242,"Ġmotor":4243,"orn":4244,"kin":4245,"Ġfeaturing":4246,"Ġphones":4247,"ĠAND":4248,"Ġrelevant":4249,"ĠSee":4250,"Ġwinners":4251,"Ġdad":4252,"ĠSource":4253,"ĠCheck":4254,"aut":4255,"ĠFar":4256,"Ġopponents":4257,"Ġoutcome":4258,"Ġdoors":4259,"Ġsuicide":4260,"ima":4261,"Ġjumped":4262,"Ġperspective":4263,"Ġtransportation":4264,"Ġthinks":4265,"ĠMor":4266,"Ġdeadline":4267,"Ġ53":4268,"ĠDeputy":4269,"ery":4270,"Ġdetailed":4271,"uch":4272,"ĠBur":4273,"Ġtrades":4274,"ĠGreg":4275,"Ġzero":4276,"erson":4277,"ĠChildren":4278,"Ġdu":4279,"66":4280,"Ġmixed":4281,"ĠBarack":4282,"54":4283,"Ġterritory":4284,"Ġac":4285,"Ġconcept":4286,"ĠAdd":4287,"Ġourselves":4288,"Ġreaction":4289,"ĠSydney":4290,"ink":4291,"Ġconsistent":4292,"Ġboat":4293,"room":4294,"Ġdozens":4295,"Ġeffectively":4296,"but":4297,"Ġmotion":4298,"Ġalive":4299,"ĠKey":4300,"weight":4301,"Ġexports":4302,"Ġoperate":4303,"Ġregime":4304,"ĠAuthority":4305,"och":4306,"ĠCR":4307,"leg":4308,"Ġforget":4309,"American":4310,"bs":4311,"Ġthoughts":4312,"ĠSign":4313,"ĠPatriots":4314,"Ġbrief":4315,"ĠOregon":4316,"ĠBal":4317,"Ġmine":4318,"Ġciting":4319,"Ġmagazine":4320,"more":4321,"ERS":4322,"ĠBer":4323,"ua":4324,"ox":4325,"ĠMain":4326,"Ġinstance":4327,"tr":4328,"Ġrestaurants":4329,"ora":4330,"Ġharassment":4331,"\",\"":4332,"Ł":4333,"Ġsilver":4334,"ĠMueller":4335,"ĠSenator":4336,"ĠEvery":4337,"Ġfootage":4338,"ms":4339,"Ġopposed":4340,"ĠLink":4341,"Ġver":4342,"Ġpleased":4343,"ame":4344,"ending":4345,"Ġrivals":4346,"ida":4347,"ike":4348,"ta":4349,"ĠCook":4350,"Ġheadquarters":4351,"ear":4352,"Ġaggressive":4353,"Ġcourts":4354,"ĠMuseum":4355,"Ġim":4356,"ĠHoldings":4357,"Ġcommunication":4358,"Ġphase":4359,"yl":4360,"Ġpowers":4361,"Ġproved":4362,"Ġcarbon":4363,"Ġaside":4364,"ĠOlympics":4365,"Ġgathered":4366,"ĠPennsylvania":4367,"Ġsmartphone":4368,"ĠMet":4369,"ĠHurricane":4370,"Ġprotected":4371,"Ġcommunications":4372,"Ġemerged":4373,"Ġaim":4374,"Ġstable":4375,"ides":4376,"GB":4377,"Ġentirely":4378,"Ġmissile":4379,"ĠGen":4380,"Ġunclear":4381,"Ġelectricity":4382,"ology":4383,"away":4384,"Ġlicense":4385,"ĠPittsburgh":4386,"Ġcameras":4387,"Ġmusical":4388,"Ġmanagers":4389,"57":4390,"Ġscores":4391,"Ġprofile":4392,"hel":4393,"¼":4394,"Ġshouldn":4395,"RA":4396,");":4397,"Ġpermanent":4398,"ome":4399,"Ġet":4400,"Ġmar":4401,"Ġfavor":4402,"Ġmaker":4403,"Ġdiscussions":4404,"ory":4405,"Ġsharp":4406,"Ġpleaded":4407,"Ġpassenger":4408,"quarter":4409,"Ġdem":4410,"Ġversus":4411,"Ġmainly":4412,"Ġeighth":4413,"ĠAirport":4414,"ĠCross":4415,"million":4416,"ĠNas":4417,"Ġcited":4418,"56":4419,"Ġyes":4420,"ĠBelow":4421,"arn":4422,"ĠTurkish":4423,"ĠSl":4424,"Ġstepped":4425,"Ġproducers":4426,"Ġovernight":4427,"Ġsounds":4428,"52":4429,"Ġ64":4430,"Ġ54":4431,"58":4432,"ĠClark":4433,"ĠRick":4434,"Ġgr":4435,"ĠMont":4436,"Ġbeer":4437,"une":4438,"Ġreporter":4439,"Ġcharity":4440,"Ġeating":4441,"Ġextend":4442,"Ġguess":4443,"NA":4444,"Ġhedge":4445,"Ġencouraged":4446,"owned":4447,"ĠMel":4448,"ĠKentucky":4449,"ace":4450,"Ġlineup":4451,"Ġhosts":4452,"Ġcapable":4453,"PR":4454,"ĠArts":4455,"Ġcontroversial":4456,"Ġhosted":4457,"ries":4458,"Ġroster":4459,"Ġfixed":4460,"ĠWalker":4461,"ged":4462,"Ġdisaster":4463,"Ġdispute":4464,"ĠDenver":4465,"ĠTrade":4466,"ute":4467,"ese":4468,"cy":4469,"Ġgrant":4470,"ĠMax":4471,"Ġdistance":4472,"isc":4473,"Ġeditor":4474,"ĠDave":4475,"Ġperformances":4476,"Ġlay":4477,"Ġvulnerable":4478,"ĠMurray":4479,"ĠâĤ¬":4480,"Ġmining":4481,"Ġ2004":4482,"level":4483,"ability":4484,"Ġauto":4485,"Ġfake":4486,"Ġattacked":4487,"ona":4488,"ups":4489,"ened":4490,"Ġfallen":4491,"Ġstations":4492,"ĠContact":4493,"itz":4494,"Ġincidents":4495,"Ġcomplaints":4496,"Ġoperates":4497,"Ġrefugees":4498,"Ġessential":4499,"ĠTest":4500,"Ġdemands":4501,"Ġroles":4502,"yr":4503,"Ġacts":4504,"Ġusual":4505,"ring":4506,"Ġhanded":4507,"ĠMatthew":4508,"hour":4509,"Ġindustries":4510,"Ġshoot":4511,"ĠAuthorities":4512,"Ġprobe":4513,"ĠUtah":4514,"ĠRBI":4515,"ĠAD":4516,"Ġprospect":4517,"outs":4518,"ĠUber":4519,"Ġbright":4520,"Ġmention":4521,"Ġsavings":4522,"ĠMiss":4523,"ONDON":4524,"Ġ1990":4525,"arm":4526,"ĠTen":4527,"These":4528,"Ġexplains":4529,"minute":4530,"85":4531,"Ġmaximum":4532,"Ġro":4533,"Ġrookie":4534,"Ġstudio":4535,"ĠCam":4536,"ĠGal":4537,"Ġdefend":4538,"hand":4539,"53":4540,"ĠOil":4541,"Ġserves":4542,"Ġsn":4543,"ios":4544,"ĠDefense":4545,"AB":4546,"Ġhired":4547,"Ġsupports":4548,"Ġpremium":4549,"ef":4550,"Ġfailing":4551,"ĠIndiana":4552,"Ġexp":4553,"Ġobjective":4554,"Ġaffordable":4555,"ĠCom":4556,"ĠThanks":4557,"Ġanywhere":4558,"Ġconfirm":4559,"ited":4560,"Ġrepresenting":4561,"Ġwitness":4562,"69":4563,"Ġclaiming":4564,"Ġviolation":4565,"Ġhistorical":4566,"med":4567,"Ġpreparing":4568,"ĠTech":4569,"Ġposts":4570,"OC":4571,"ĠGraham":4572,"ĠGl":4573,"ĠLions":4574,"ales":4575,"ĠID":4576,"Ġcorrect":4577,"ĠAntonio":4578,"Ġadvertising":4579,"Ġeastern":4580,"OW":4581,"Ġholdings":4582,"Ġpolls":4583,"ĠSH":4584,"Ġexecutives":4585,"ĠJewish":4586,"ĠGary":4587,"Ġprize":4588,"ĠCommissioner":4589,"Ġcells":4590,"ify":4591,"Ġlunch":4592,"Ġdemocracy":4593,"ĠEr":4594,"Ġregularly":4595,"Ġresulted":4596,"ĠAve":4597,"ĠPartners":4598,"Ġrewritten":4599,"Ġlo":4600,"Ġcooperation":4601,"ĠGulf":4602,"Ġsmoke":4603,"ĠMemorial":4604,"Ġwave":4605,"Ġfears":4606,"Ġkid":4607,"ĠGiants":4608,"Ġrecovered":4609,"row":4610,"ĠRadio":4611,"ĠBarcelona":4612,"Ġwonderful":4613,"ĠDow":4614,"Ġstream":4615,"ĠSimon":4616,"Ġdetail":4617,"Ġvolunteers":4618,"ĠInd":4619,"Ġforms":4620,"mann":4621,"ĠRay":4622,"oor":4623,"ĠTake":4624,"Ġrepresented":4625,"het":4626,"Ġblow":4627,"aged":4628,"RE":4629,"ĠMissouri":4630,"Ġcovering":4631,"Ġprofits":4632,"Ġconcluded":4633,"Ġthus":4634,"ĠColumbia":4635,"ode":4636,"ĠZimbabwe":4637,"Ġdisclosed":4638,"Ġlifted":4639,"ĠSean":4640,"ĠHarvey":4641,"ĠPlus":4642,"ces":4643,"ĠGreece":4644,"ĠLady":4645,"Ġdelay":4646,"Ġkitchen":4647,"ĠIndex":4648,"Ġbear":4649,"Ġputs":4650,"new":4651,"88":4652,"ĠAsh":4653,"Å¡":4654,"Ġperforming":4655,"law":4656,"ĠPart":4657,"Ġindicated":4658,"Ġannounce":4659,"Ġcompensation":4660,"Ġka":4661,"ĠScience":4662,"ris":4663,"Ġrecommendations":4664,"ĠSecond":4665,"Ġlights":4666,"Ġtemporary":4667,"urs":4668,"Ġwestern":4669,"stone":4670,"68":4671,"ĠDisney":4672,"Ġplayoffs":4673,"Ġjudges":4674,"Ġengineering":4675,"ĠPen":4676,"ĠPal":4677,"Ġobvious":4678,"ĠBridge":4679,"ĠEnd":4680,"ĠArab":4681,"Ġexcept":4682,"Ġhole":4683,"class":4684,"Ġcauses":4685,"Ġconnect":4686,"ĠAI":4687,"An":4688,"Ġchose":4689,"ĠElizabeth":4690,"min":4691,"Ġproper":4692,"ĠNHL":4693,"Ġraces":4694,"Ġinnovation":4695,"Ġsugar":4696,"600":4697,"ĠModi":4698,"illa":4699,"Ġtrillion":4700,"ĠSar":4701,"ĠAffairs":4702,"Ġimpossible":4703,"Ġguide":4704,"Ġcaptured":4705,"ĠSales":4706,"Ġspecies":4707,"51":4708,"Ġar":4709,"Ġmaster":4710,"Ġstayed":4711,"iro":4712,"ĠEconomic":4713,"Ġvast":4714,"ili":4715,"Ġpet":4716,"ye":4717,"77":4718,"Ġkeeps":4719,"ĠPhil":4720,"ĠEPS":4721,"ĠRegional":4722,"Ġsectors":4723,"Ġdesire":4724,"ĠStanley":4725,"¾":4726,"Ġunknown":4727,"Ġpot":4728,"ĠPR":4729,"Ġknowing":4730,"Ġflying":4731,"ĠTreasury":4732,"iers":4733,"enn":4734,"ably":4735,"Ġsick":4736,"Ġmanner":4737,"Ġmanufacturers":4738,"Ġchampions":4739,"gy":4740,"Part":4741,"ister":4742,"ĠMountain":4743,"Ġimagine":4744,"Ġportion":4745,"ĠCamp":4746,"Ġchemical":4747,"ible":4748,"ĠAnaly":4749,"ĠBureau":4750,"Ġpm":4751,"Ġupdated":4752,"Ġetc":4753,"ĠField":4754,"iles":4755,"Ġobtained":4756,"Ġstick":4757,"Ġcat":4758,"har":4759,"Ġmarked":4760,"Ġmedium":4761,"ĠDes":4762,"People":4763,"Ġwealth":4764,"ores":4765,"ĠBaltimore":4766,"Ġtip":4767,"Ġdismissed":4768,"ĠVictoria":4769,"ĠBrad":4770,"Ch":4771,"Ġ56":4772,"Ġstadium":4773,"eth":4774,"Ġthunder":4775,"Ġtested":4776,"Ġdrawn":4777,"Ġcounsel":4778,"ld":4779,"Ġspirit":4780,"uss":4781,"Ġtheme":4782,"my":4783,"Ġnecessarily":4784,"Ġelements":4785,"Ġcollected":4786,"ĠRes":4787,"ĠMaryland":4788,"ĠEnter":4789,"Ġfounded":4790,"ae":4791,"Ġpilot":4792,"Ġshoulder":4793,"PC":4794,"Ġargument":4795,"Ġyen":4796,"Ġreceiver":4797,"Ġharm":4798,"ĠET":4799,"Ġprotesters":4800,"Ġ72":4801,"ĠAaron":4802,"Ġed":4803,"Ġexpecting":4804,"\":\"":4805,"Ġbike":4806,"Äĩ":4807,"Ġluxury":4808,"half":4809,"ĠBarbara":4810,"Ġfoundation":4811,"Ġill":4812,"Ġsubmitted":4813,"Ġdeeply":4814,"Ġhospitals":4815,"ĠBJP":4816,"Ġshock":4817,"Ġplatforms":4818,"Ġsummary":4819,"ĠWhere":4820,"Ġcelebration":4821,"iff":4822,"Ġveterans":4823,"Ġachieved":4824,"fl":4825,"Ġactivists":4826,"ĠManager":4827,"Ġformal":4828,"Ġformed":4829,"Ġinvestigate":4830,"ĠKyle":4831,"Ġ:":4832,"ĠRa":4833,"ovic":4834,"Ġdrinking":4835,"Ġnetworks":4836,"ĠAlexander":4837,"ĠOs":4838,"Ġ)":4839,"Ġbomb":4840,"Ġrecalled":4841,"ito":4842,"ient":4843,"Ġrepresentatives":4844,"ĠChrist":4845,"ĠWay":4846,"Ġdeadly":4847,"Ġinvesting":4848,"ĠRussell":4849,"Ġconsumption":4850,"Ġharder":4851,"Ġbail":4852,"Ġcritics":4853,"Ġdanger":4854,"Ġdrew":4855,"ĠSol":4856,"Ġcopyright":4857,"ĠHenry":4858,"Ġbuyers":4859,"Ġresidential":4860,"Ġmaintenance":4861,"pr":4862,"Ġmarks":4863,"Ġages":4864,"Ġcovers":4865,"Ġton":4866,"Ġtitles":4867,"ĠPS":4868,"ĠEvans":4869,"Ġmigrants":4870,"Ġflights":4871,"Ġmonitoring":4872,"Ġaddressed":4873,"Ġvital":4874,"Ġcontrolled":4875,"Ġweapon":4876,"Ġinches":4877,"Ġreduction":4878,"Ġurban":4879,"Ġcoaching":4880,"Ġreducing":4881,"ila":4882,"Ġrealize":4883,"Ġmeat":4884,"Ġref":4885,"Ġoverseas":4886,"Ġblame":4887,"Ġterrorist":4888,"Ġstuck":4889,"ĠUs":4890,"esh":4891,"pro":4892,"Ġ58":4893,"ough":4894,"Ġexposure":4895,"ĠAbu":4896,"state":4897,"Ġproviders":4898,"Ġfore":4899,"Ġjet":4900,"bar":4901,"Ġownership":4902,"ret":4903,"Ġupset":4904,"Ġfacts":4905,"Ġpurchasing":4906,"Ġreforms":4907,"Ġriver":4908,"Ġsomebody":4909,"Ġguest":4910,"iy":4911,"Ġauction":4912,"ĠReading":4913,"Ġconsequences":4914,"Ġrepresentative":4915,"Ġappointment":4916,"add":4917,"Ġcollaboration":4918,"ĠTesla":4919,"ĠCohen":4920,"Ġengagement":4921,"Ġspeaks":4922,"EST":4923,"Ġexposed":4924,"Ġmaintained":4925,"rs":4926,"Ġdating":4927,"ĠProgram":4928,"board":4929,"Ġracing":4930,"Ġpension":4931,"ign":4932,"iti":4933,"ĠFive":4934,"Ġextensive":4935,"ĠHa":4936,"ĠPoint":4937,"ĠMexican":4938,"Ġexpanded":4939,"Ġtotally":4940,"Ġinvestigations":4941,"ĠOrleans":4942,"Ġcycle":4943,"ĠESPN":4944,"ifying":4945,"Ġcup":4946,"ĠAz":4947,"ĠInvestors":4948,"Ġengage":4949,"reg":4950,"Ġfought":4951,"Ġterrorism":4952,"Ġblocked":4953,"ĠOK":4954,"Äį":4955,"72":4956,"Ġdestroyed":4957,"«":4958,"Ġstaying":4959,"Ġafford":4960,"Ġappearances":4961,"ĠHills":4962,"Ġcrore":4963,"Ġstrategies":4964,"Ġtips":4965,"ĠSm":4966,"ĠFr":4967,"Ġbanned":4968,"ĠSon":4969,"ask":4970,"Ġlimits":4971,"Ġrecognition":4972,"Ġeligible":4973,"ĠGar":4974,"Ġvolatility":4975,"Ġlaid":4976,"nes":4977,"Ġgrade":4978,"ĠRE":4979,"ĠHart":4980,"Ġ57":4981,"oma":4982,"Ġuncertainty":4983,"Ġrecognized":4984,"ĠPC":4985,"Ġchosen":4986,"uz":4987,"Ġadviser":4988,"una":4989,"Ġassessment":4990,"Ġreveal":4991,"mo":4992,"After":4993,"ĠBro":4994,"ĠOff":4995,"Ġpeak":4996,"Ġreferred":4997,"ĠSC":4998,"Ġ2003":4999,"ification":5000,"Ġshutdown":5001,"ĠOfficials":5002,"ias":5003,"Ġextreme":5004,"Ġflood":5005,"Ġhockey":5006,"Ġwage":5007,"ĠNet":5008,"Ġdamaged":5009,"Ġreplacement":5010,"ĠMaria":5011,"Ġcreation":5012,"Ġguns":5013,"aci":5014,"Ġworker":5015,"do":5016,"Ġviewers":5017,"Ġseed":5018,"sts":5019,"Ġtouchdowns":5020,"Ġmistake":5021,"ray":5022,"ull":5023,"Ġpricing":5024,"Ġstrongly":5025,"Ġaims":5026,"ĠNavy":5027,"ĠEgypt":5028,"ker":5029,"Ġve":5030,"ĠSteven":5031,"Ġres":5032,"ational":5033,"Ġrequests":5034,"Ġemissions":5035,"ĠArena":5036,"uma":5037,"ĠAtlantic":5038,"hr":5039,"ĠAFP":5040,"ĠSquare":5041,"Ġcontribute":5042,"Ġfunction":5043,"Ġdec":5044,"ĠNelson":5045,"89":5046,"Ġreferendum":5047,"ĠPre":5048,"Ġapplied":5049,"ĠGMT":5050,"ĠIranian":5051,"ĠNigerian":5052,"ĠAny":5053,"NG":5054,"Ġacknowledged":5055,"Ġreferring":5056,"Ġventure":5057,"Ġimports":5058,"Ġblog":5059,"Ġfutures":5060,"OU":5061,"ĠUFC":5062,"Ġneither":5063,"Ġextension":5064,"hes":5065,"ĠMed":5066,"76":5067,"Ġsustainable":5068,"ains":5069,"Ġreputation":5070,"ĠVancouver":5071,"Ġbasically":5072,"acy":5073,"Ġsad":5074,"ĠFrancis":5075,"ĠKennedy":5076,"ĠNevada":5077,"ĠLu":5078,"ras":5079,"ĠAv":5080,"Ġrear":5081,"ĠHo":5082,"Ġproperly":5083,"abe":5084,"ĠHotel":5085,"Ġopinions":5086,"under":5087,"ĠStation":5088,"ĠFOR":5089,"ops":5090,"Ġadopted":5091,"ĠSwiss":5092,"ĠCountry":5093,"ĠTer":5094,"ĠAndy":5095,"Me":5096,"ĠCooper":5097,"ĠTigers":5098,"ĠCreek":5099,"Ġgay":5100,"iner":5101,"ĠAN":5102,"Ġbird":5103,"lla":5104,"ĠKate":5105,"ĠPet":5106,"ni":5107,"Ġprospects":5108,"ater":5109,"ites":5110,"Ġescape":5111,"lam":5112,"ake":5113,"Ġ1980":5114,"ĠLag":5115,"Ġsuccessfully":5116,"Ġdistricts":5117,"Ġministers":5118,"aries":5119,"Ġframe":5120,"ĠON":5121,"ĠEuro":5122,"ĠMarkets":5123,"Ġregister":5124,"Ġdefeated":5125,"Ġdevelopments":5126,"Ġninth":5127,"Ġquiet":5128,"Ġgenerated":5129,"Ġvaluable":5130,"Ġrecommended":5131,"ĠTheatre":5132,"ĠCap":5133,"bed":5134,"Ġreference":5135,"Ġease":5136,"oring":5137,"Ġ66":5138,"Ġimprovements":5139,"Ġelsewhere":5140,"ĠHillary":5141,"Ġdefender":5142,"ĠRight":5143,"zy":5144,"Ġcomprehensive":5145,"Ġspotted":5146,"ĠOakland":5147,"ĠOk":5148,"ĠSystem":5149,"ique":5150,"Ġpersons":5151,"Ġexist":5152,"Ġbroader":5153,"Ġclinical":5154,"Ġ2001":5155,"oul":5156,"Ġsecurities":5157,"ghan":5158,"Ġshelter":5159,"ero":5160,"ATED":5161,"Ġhosting":5162,"Ġselect":5163,"ĠKavanaugh":5164,"Ġrestrictions":5165,"osa":5166,"Ġyields":5167,"ĠLA":5168,"Ġ59":5169,"Ġwonder":5170,"Ġabsence":5171,"ür":5172,"ÅĤ":5173,"DP":5174,"Ġelectronic":5175,"Ġillegally":5176,"Ġmicro":5177,"ĠNEW":5178,"Ġhall":5179,"Ġaged":5180,"Ġtemperature":5181,"cast":5182,"atic":5183,"Ġlegacy":5184,"Ġaffairs":5185,"ji":5186,"ĠResources":5187,"Ġgang":5188,"winning":5189,"Ġattending":5190,"aro":5191,"Ġfriendly":5192,"aine":5193,"Ġcannabis":5194,"Ġairline":5195,"Ġnoting":5196,"Ġprofessionals":5197,"ĠFREE":5198,"RC":5199,"Ġfinancing":5200,"Ġindependence":5201,"ved":5202,"Ġresulting":5203,"Ġsteady":5204,"ĠWinter":5205,"uring":5206,"Ġhoped":5207,"98":5208,"Ġpresentation":5209,"aya":5210,"Ġrated":5211,"osh":5212,"ĠAnalysis":5213,"=":5214,"Ġdonations":5215,"IR":5216,"Ġcombat":5217,"ĠHoward":5218,"anda":5219,"79":5220,"Ġinvested":5221,"Ġexpanding":5222,"omb":5223,"ress":5224,"ble":5225,"Ġjournalist":5226,"ĠWoods":5227,"Ġcenters":5228,"ott":5229,"Ġstreaming":5230,"Ġterror":5231,"Ġsustained":5232,"ĠWWE":5233,"pre":5234,"ÅŁ":5235,"ait":5236,"Ġarrival":5237,"Ġresidence":5238,"Ġextent":5239,"Ġarrive":5240,"Ġ2002":5241,"Ġestablish":5242,"74":5243,"ĠArgentina":5244,"ĠDem":5245,"inn":5246,"aud":5247,"ĠNCAA":5248,"Ġquestioned":5249,"Ġballot":5250,"Ġmin":5251,"Ġlandscape":5252,"Ġhorse":5253,"Ġopponent":5254,"iel":5255,"Ġprompted":5256,"atory":5257,"Ġlift":5258,"Ġassociation":5259,"cher":5260,"Ġdefending":5261,"Ġtiny":5262,"Ġpoverty":5263,"ĠSafety":5264,"Ġpetition":5265,"ĠLimited":5266,"ĠCA":5267,"FC":5268,"Ãł":5269,"oni":5270,"Ġmonitor":5271,"ÃŃa":5272,"MA":5273,"Ġanswers":5274,"ĠMitchell":5275,"Ġbo":5276,"ĠShah":5277,"Ġsm":5278,"Ġmedal":5279,"ĠCivil":5280,"Ġrecognize":5281,"key":5282,"Ġpregnant":5283,"Ġspots":5284,"ante":5285,"Ġacademic":5286,"Ġinitiatives":5287,"Ġsecured":5288,"ĠCL":5289,"ils":5290,"Ġanticipated":5291,"Ġinvolvement":5292,"ĠMake":5293,"Ġinsisted":5294,"ĠWales":5295,"Ġclothing":5296,"Ġtracks":5297,"Ġsymptoms":5298,"Ġplate":5299,"ĠNY":5300,"Ġretailers":5301,"ĠPan":5302,"Ġfled":5303,"Ġquoted":5304,"Ġsaved":5305,"ĠCarter":5306,"Ġteaching":5307,"ĠTokyo":5308,"ĠCr":5309,"ĠSix":5310,"ĠPicture":5311,"Ġrecover":5312,"Ġcomedy":5313,"ree":5314,"Ġstrikes":5315,"ĠSanders":5316,"sel":5317,"Ġgraduate":5318,"Ġpending":5319,"St":5320,"Ġwarrant":5321,"Ġhonest":5322,"ĠGM":5323,"Ġnoticed":5324,"ĠGalaxy":5325,"ider":5326,"Ġproposals":5327,"Ġwore":5328,"Ġindeed":5329,"EM":5330,"ĠChannel":5331,"ances":5332,"ĠBrady":5333,"86":5334,"Ġgotten":5335,"Ġthrowing":5336,"ĠLeader":5337,"ĠVideo":5338,"71":5339,"Ġwelcomed":5340,"NEW":5341,"Ġfairly":5342,"Ġpromises":5343,"ĠSilver":5344,"Ġrape":5345,"Ġopener":5346,"ares":5347,"ĠSir":5348,"making":5349,"Ġcur":5350,"Ġrooms":5351,"73":5352,"Ġamounts":5353,"ĠIndustry":5354,"ĠDar":5355,"Ġ62":5356,"ted":5357,"Ġabroad":5358,"ĠMaybe":5359,"Ġreaders":5360,"oke":5361,"Ġpublication":5362,"ĠJean":5363,"Ġoperator":5364,"ĠHaving":5365,"ĠMil":5366,"life":5367,"Ġgenerate":5368,"ĠCraig":5369,"ĠMass":5370,"ĠBh":5371,"Ġrequested":5372,"Ġcrazy":5373,"ĠSpace":5374,"Ġcopy":5375,"Ġexport":5376,"Ġcontext":5377,"Ġbr":5378,"62":5379,"ĠRobinson":5380,"Ġcyber":5381,"ENT":5382,"BI":5383,"arg":5384,"Ġspeaker":5385,"Ġdramatic":5386,"ĠOl":5387,"ĠMill":5388,"Ġtrained":5389,"Ġediting":5390,"Ġsalary":5391,"Ġdirectors":5392,"Ġexplore":5393,"Ġlucky":5394,"Ġprominent":5395,"Ġbrothers":5396,"Ġneck":5397,"icht":5398,"ĠWatson":5399,"born":5400,"Ġproven":5401,"Ġprincipal":5402,"Ġedition":5403,"Ed":5404,"Ġswitch":5405,"maker":5406,"Ġrelative":5407,"mi":5408,"ĠBruce":5409,"ho":5410,"ĠScottish":5411,"water":5412,"ĠSport":5413,"ĠKings":5414,"ĠCollins":5415,"adi":5416,"Ġcelebrated":5417,"Ġclothes":5418,"Ġsunny":5419,"ĠCharlotte":5420,"ees":5421,"Ġscenes":5422,"ĠData":5423,"Ġwounded":5424,"Ġunusual":5425,"Ġrealized":5426,"ĠPlan":5427,"ĠTrans":5428,"ĠFC":5429,"Ġletters":5430,"Ġalerts":5431,"ĠWarren":5432,"DS":5433,"oss":5434,"pping":5435,"Ġsuspension":5436,"Ġbenchmark":5437,"ĠAcc":5438,"Ġalert":5439,"Ġpassion":5440,"ĠEst":5441,"Ġlatter":5442,"Ġstability":5443,"Ġarts":5444,"Ġpursue":5445,"ĠSeason":5446,"Ġfields":5447,"Ġmethod":5448,"63":5449,"Ġfolks":5450,"Ġexclusive":5451,"Ġcrews":5452,"Ġsessions":5453,"ĠMajor":5454,"ĠMount":5455,"Ġmap":5456,"Ġ=":5457,"Ġsituations":5458,"ĠBerlin":5459,"rey":5460,"Ġdates":5461,"Ġsheet":5462,"ĠLo":5463,"Ġfighters":5464,"ĠMart":5465,"Ġatmosphere":5466,"Ġillness":5467,"Ġcompeting":5468,"ĠChristopher":5469,"ĠRoy":5470,"mm":5471,"iano":5472,"Ġge":5473,"ĠRams":5474,"Ġconversations":5475,"ĠPa":5476,"ĠTel":5477,"Ġappreciate":5478,"78":5479,"ĠTotal":5480,"low":5481,"ĠStone":5482,"Ġopposite":5483,"Ġbarrel":5484,"Ġdevelopers":5485,"Ġexpress":5486,"Ġhighs":5487,"which":5488,"par":5489,"ĠVietnam":5490,"Ġblocks":5491,"Ġrecording":5492,"Ġadjusted":5493,"Ġret":5494,"ĠAR":5495,"Ġmilitants":5496,"Ġinnovative":5497,"ĠGhana":5498,"FR":5499,"Ġfantastic":5500,"Ġmortgage":5501,"ando":5502,"ĠLane":5503,"ises":5504,"ĠÂ":5505,"Ġhomeless":5506,"ĠKal":5507,"Ġapproached":5508,"Ġrounds":5509,"Ġmargins":5510,"ament":5511,"ĠMotor":5512,"Ġencouraging":5513,"ÂŃ":5514,"uru":5515,"Ġhandling":5516,"ĠMassachusetts":5517,"Ġplanet":5518,"ĠSpring":5519,"ĠBon":5520,"gu":5521,"Beat":5522,"Ġdrawing":5523,"ĠPhoenix":5524,"very":5525,"aid":5526,"ĠSte":5527,"ĠEntertainment":5528,"ĠRon":5529,"Ġassigned":5530,"ĠSA":5531,"News":5532,"Ġinterviews":5533,"ĠOh":5534,"media":5535,"vel":5536,"Ġpermission":5537,"Ġtransactions":5538,"Ġtraders":5539,"Ġsolo":5540,"Ġprovincial":5541,"Ġsuggesting":5542,"¡":5543,"Ġdiverse":5544,"Ġ67":5545,"Ġranks":5546,"ĠFre":5547,"Ġfavourite":5548,"Ġ63":5549,"Ġdifferences":5550,"Ġtargeting":5551,"Ġactors":5552,"Ġ76":5553,"icated":5554,"Ġcollect":5555,"akes":5556,"war":5557,"Ġcontained":5558,"ches":5559,"Ġlibrary":5560,"Ġsegments":5561,"ĠLine":5562,"ê":5563,"ual":5564,"Ġbags":5565,"Ġfactory":5566,"Ġear":5567,"Ġsomewhat":5568,"Ġrail":5569,"ĠUP":5570,"ula":5571,"ĠNiger":5572,"Ġlas":5573,"Ġimplementation":5574,"Ġemails":5575,"kel":5576,"wing":5577,"Ġadvised":5578,"--":5579,"istic":5580,"Ġdepth":5581,"Ġshoes":5582,"ĠJennifer":5583,"Ġvenue":5584,"Ġcontain":5585,"Ġhighlights":5586,"Ġcapabilities":5587,"Ġprocesses":5588,"Ġtradition":5589,"Ġcontacted":5590,"Ġproducing":5591,"Ġtrail":5592,"rem":5593,"Ġ600":5594,"Ġ68":5595,"AA":5596,"ĠBa":5597,"ĠSuch":5598,"ĠTyler":5599,"ipp":5600,"Ġsurvived":5601,"ami":5602,"ĠContinue":5603,"Ġcapture":5604,"bi":5605,"61":5606,"96":5607,"Ġthreatening":5608,"Ġkeen":5609,"dale":5610,"Ġtrailer":5611,"Ġstages":5612,"ĠGordon":5613,"Ġfinishing":5614,"Ġlegislative":5615,"Ġuseful":5616,"ĠGreek":5617,"ald":5618,"Ġgrounds":5619,"ĠDu":5620,"storms":5621,"ills":5622,"Ġexpense":5623,"Ġdetained":5624,"Today":5625,"Ġdiet":5626,"Ġwood":5627,"ĠCameron":5628,"Ġthrown":5629,"Ġcricket":5630,"Ġideal":5631,"with":5632,"Ġteammates":5633,"ours":5634,"Ġprojected":5635,"Ġpersonally":5636,"ĠBoy":5637,"rom":5638,"ĠPhilippines":5639,"win":5640,"ges":5641,"Ġcounties":5642,"ĠBaker":5643,"Ġprosecutor":5644,"Ġroof":5645,"met":5646,"Ġpartly":5647,"ĠMoon":5648,"eman":5649,"Ġfocusing":5650,"Ġfishing":5651,"than":5652,"ĠJeremy":5653,"ĠBad":5654,"ais":5655,"Ġcontrols":5656,"Ġtonnes":5657,"Ġshall":5658,"Ġ61":5659,"Ġgathering":5660,"ĠERA":5661,"Ġpresidency":5662,"Ġ85":5663,"ĠGas":5664,"Ġscenario":5665,"Ġquarters":5666,"Ġang":5667,"Ġsettled":5668,"ĠCommerce":5669,"Ġanybody":5670,"Ġgarden":5671,"ĠLibrary":5672,"Ġbet":5673,"Ġtopic":5674,"olo":5675,"Ġintense":5676,"87":5677,"Ġlinks":5678,"Ġmed":5679,"ĠAG":5680,"Ġflooding":5681,"ĠMurphy":5682,"PM":5683,"Ġfinds":5684,"Ġsensitive":5685,"pped":5686,"Ġcompletion":5687,"Ġminority":5688,"Ġvon":5689,"Ġstriking":5690,"rich":5691,"Ġbars":5692,"Ġefficient":5693,"Ġcontributions":5694,"Ġvisits":5695,"Ġattract":5696,"ĠMalaysia":5697,"ĠREL":5698,"Ġopens":5699,"Ġessentially":5700,"Ġreasonable":5701,"Ġsentiment":5702,"ĠMelbourne":5703,"Ġfitness":5704,"Ġfrequently":5705,"ĠRangers":5706,"Ġmuseum":5707,"ĠDNA":5708,"Ġcontrast":5709,"ĠAdams":5710,"ĠWin":5711,"Ġfalls":5712,"Ġimposed":5713,"250":5714,"ood":5715,"ĠRio":5716,"Ġchoices":5717,"Ġyellow":5718,"rin":5719,"ben":5720,"ĠStaff":5721,"ĠIndonesia":5722,"Ġcarries":5723,"Ġtourism":5724,"UM":5725,"ĠOrange":5726,"sell":5727,"Ġresolve":5728,"ĠMumbai":5729,"Ġpan":5730,"Ġimplement":5731,"Ġmidfielder":5732,"OP":5733,"Ġtensions":5734,"Ġ800":5735,"ĠLord":5736,"ĠLight":5737,"Ġlies":5738,"és":5739,"Ġparticipation":5740,"Ġtries":5741,"Ġsheriff":5742,"degree":5743,"Ġcongressional":5744,"Ġmode":5745,"Ġregulation":5746,"ĠJacob":5747,"ĠCrown":5748,"Ġbowl":5749,"ĠMississippi":5750,"Ġtheft":5751,"ĠKingdom":5752,"Ġresort":5753,"Ġroyal":5754,"Ġunemployment":5755,"PP":5756,"Ġnomination":5757,"ĠTR":5758,"Ġbehaviour":5759,"bank":5760,"ĠForest":5761,"WASHINGTON":5762,"ĠOthers":5763,"Ġslowly":5764,"Ġmenu":5765,"vo":5766,"ĠSy":5767,"ĠMetro":5768,"ĠLisa":5769,"Ġregistration":5770,"While":5771,"ĠJesus":5772,"Ġ250":5773,"Ġprocessing":5774,"Ġmonetary":5775,"ape":5776,"ener":5777,"ĠSystems":5778,"Ġdisappointed":5779,"Ġprint":5780,"uy":5781,"ħ":5782,"Ġdemanding":5783,"Ġincredibly":5784,"play":5785,"Ġsurveillance":5786,"ĠStandard":5787,"Ġperiods":5788,"Ġwrites":5789,"ĠLuke":5790,"ĠPalestinian":5791,"Ġwalks":5792,"Ġriding":5793,"Ġwaters":5794,"ĠSox":5795,"Ġtraveling":5796,"Ġtap":5797,"Ġorganized":5798,"Ġresource":5799,"Ġangry":5800,"Ġtiming":5801,"Ġempty":5802,"Ġmilk":5803,"Ġtherapy":5804,"ĠBrandon":5805,"mon":5806,"Ġnationwide":5807,"Ġnovel":5808,"ĠStorm":5809,"iet":5810,"ĠBre":5811,"Ġbegun":5812,"Ġdiplomatic":5813,"Ġads":5814,"ĠDC":5815,"ĠOb":5816,"ĠMontreal":5817,"ĠDown":5818,"ĠMilwaukee":5819,"Ġmeal":5820,"ĠPuerto":5821,"ĠMas":5822,"Ġjoy":5823,"Ġdeparture":5824,"ĠWright":5825,"Ġspoken":5826,"style":5827,"ĠAction":5828,"ĠComey":5829,"Ġdelivering":5830,"Ġtoll":5831,"Ġmidnight":5832,"ĠRevenue":5833,"Ġfiring":5834,"Ġstunning":5835,"Ġkicked":5836,"ĠOttawa":5837,"Ġefficiency":5838,"ĠLincoln":5839,"Ġtaste":5840,"ez":5841,"ĠWeather":5842,"ĠMorning":5843,"Ġhadn":5844,"Ġdiversity":5845,"ily":5846,"ĠAy":5847,"Ġargue":5848,"Ġerror":5849,"Ġtaught":5850,"Ġche":5851,"Ġoccasion":5852,"Ġinc":5853,"ĠOrlando":5854,"ĠOnline":5855,"Ġlegs":5856,"ĠNation":5857,"uck":5858,"Ġwidespread":5859,"ĠOcean":5860,"Ġconstantly":5861,"ĠLatin":5862,"Ġcomfort":5863,"Ġrely":5864,"uff":5865,"ĠCard":5866,"aring":5867,"Ġhumans":5868,"ĠThomson":5869,"aka":5870,"BIT":5871,"ĠReview":5872,"po":5873,"ú":5874,"Ġtrucks":5875,"Ġforecasts":5876,"view":5877,"Ġlongtime":5878,"ĠConstitution":5879,"Ġreserves":5880,"bit":5881,"Ġstressed":5882,"Ġcontribution":5883,"Ġchicken":5884,"ĠDE":5885,"Ġfat":5886,"ĠOscar":5887,"Ġcriticized":5888,"Ġtestimony":5889,"Ġapparent":5890,"Ġconstant":5891,"Ġcabinet":5892,"ĠDuke":5893,"Ġaspects":5894,"lic":5895,"ĠVol":5896,"Ġwing":5897,"Ġreb":5898,"ĠSessions":5899,"ĠSmart":5900,"car":5901,"ĠIm":5902,"Ġoperational":5903,"Ġregulators":5904,"ĠJimmy":5905,"eter":5906,"Ġnobody":5907,"ĠMarc":5908,"Ġliterally":5909,"Ġresistance":5910,"ĠKam":5911,"Ġsexually":5912,"Ġ69":5913,"uth":5914,"Ġviewed":5915,"Ġpicks":5916,"Ġdin":5917,"Ġtalented":5918,"Ġtennis":5919,"Ġstrengthen":5920,"Ġgl":5921,"ĠProtection":5922,"Ġinstalled":5923,"ways":5924,"ĠCampbell":5925,"ĠPortland":5926,"Ġintent":5927,"ĠPalace":5928,"Ġsecondary":5929,"Ġlocked":5930,"ĠPA":5931,"Ġlanded":5932,"Ġlength":5933,"Ġboosted":5934,"Ġpurchases":5935,"Ġcommand":5936,"ĠAsked":5937,"Ġspaces":5938,"Ġiconic":5939,"Ġrecommend":5940,"Ġduties":5941,"Ġseized":5942,"Ġdelayed":5943,"FA":5944,"AND":5945,"daq":5946,"Ġhiring":5947,"Ġoccur":5948,"DC":5949,"ĠMus":5950,"Ġag":5951,"Ġhopefully":5952,"ĠPenn":5953,"ards":5954,"Ġstriker":5955,"Ġrent":5956,"ĠTy":5957,"ĠBuffalo":5958,"ĠKy":5959,"Ġhike":5960,"pper":5961,"Ġ120":5962,"Ġop":5963,"Ġwheel":5964,"ĠIan":5965,"Ġchart":5966,"tt":5967,"Ġvolunteer":5968,"IG":5969,"person":5970,"ight":5971,"ĠBook":5972,"unt":5973,"ĠTechnologies":5974,"Now":5975,"Ġfavour":5976,"ĠGh":5977,"ĠQatar":5978,"ĠDutch":5979,"ĠGrant":5980,"ĠBan":5981,"rel":5982,"Ġagreements":5983,"Ġeducational":5984,"worth":5985,"ĠWard":5986,"700":5987,"Ġanymore":5988,"Ġrepair":5989,"Ġoperators":5990,"ĠLi":5991,"ots":5992,"ĠLouisiana":5993,"ĠWhether":5994,"Ġodds":5995,"Ġnoon":5996,"ĠStr":5997,"Ġfail":5998,"iser":5999,"Ġforever":6000,"Ġrecall":6001,"ĠPo":6002,"ĠHot":6003,"Ġdesigner":6004,"ido":6005,"LL":6006,"ĠControl":6007,"Ġsurvive":6008,"iam":6009,"Ġorganisation":6010,"ĠWork":6011,"Ġwider":6012,"Ġtank":6013,"work":6014,"ĠAS":6015,"Ġposting":6016,"Ġsuddenly":6017,"MC":6018,"ĠAL":6019,"ĠProfessor":6020,"ĠCoach":6021,"Ġrushed":6022,"Ġafraid":6023,"Ġactivist":6024,"that":6025,"ĠFilm":6026,"Ġbacking":6027,"Ġhousehold":6028,"Ġsignal":6029,"Ġaccurate":6030,"str":6031,"ĠThread":6032,"ĠBears":6033,"ATION":6034,"ĠAlliance":6035,"ĠMcDonald":6036,"ĠVenezuela":6037,"ogg":6038,"ĠWindows":6039,"makers":6040,"Ġutility":6041,"Ġrapidly":6042,"Ġattractive":6043,"Ġpa":6044,"ĠLarry":6045,"Ġmisconduct":6046,"Ġfreshman":6047,"Ġqualified":6048,"Ġcleared":6049,"Ġcrashed":6050,"Ġparticipating":6051,"Ġpages":6052,"Ġhighlight":6053,"Ġdialogue":6054,"ĠAlberta":6055,"Ġca":6056,"Ġwitnesses":6057,"ables":6058,"Ġfollowers":6059,"Ġensuring":6060,"Ġpromoting":6061,"Ġsearching":6062,"Ġremote":6063,"Ġclash":6064,"Ġfirefighters":6065,"Ġteen":6066,"ĠPlace":6067,"ĠNote":6068,"Ġregardless":6069,"ult":6070,"oney":6071,"ander":6072,"ional":6073,"ining":6074,"Ġdemanded":6075,"ĠCommunications":6076,"Ġconsideration":6077,"TC":6078,"ĠSoutheast":6079,"aga":6080,"ĠGarden":6081,"inger":6082,"ht":6083,"Ġbranch":6084,"Ġmouth":6085,"Ġaudio":6086,"Ġraw":6087,"Ġcoordinator":6088,"Ġexact":6089,"ĠHan":6090,"Ġdelays":6091,"ĠWal":6092,"ĠWells":6093,"Ġng":6094,"Ġhandful":6095,"Ġgirlfriend":6096,"Ġtypical":6097,"ĠWayne":6098,"ĠFranklin":6099,"Ġconstitutional":6100,"ĠChance":6101,"Ġblamed":6102,"rim":6103,"Ġpreliminary":6104,"Ġlie":6105,"da":6106,"ĠCapitol":6107,"Ġroutine":6108,"ĠNASA":6109,"Ġtre":6110,"ĠGolf":6111,"Ġsight":6112,"ĠDer":6113,"Ġreserve":6114,"150":6115,"Ġspeculation":6116,"Ġcompetitors":6117,"ĠMacron":6118,"ony":6119,"Ġovertime":6120,"Ġ71":6121,"Ġdepending":6122,"ĠWarner":6123,"Ġaccusations":6124,"ius":6125,"Ġpredicted":6126,"ĠCharlie":6127,"Ġeverywhere":6128,"Ġcable":6129,"ĠSaint":6130,"ĠRegion":6131,"Ġhero":6132,"ĠEmb":6133,"Ġkinds":6134,"Ġstarter":6135,"Ġsolve":6136,"ĠGuard":6137,"Ġloves":6138,"ĠDouglas":6139,"Ġfunded":6140,"ĠBrent":6141,"ĠAnyone":6142,"Ġsubstantial":6143,"ĠMarine":6144,"ĠMichelle":6145,"Ġcelebrating":6146,"Ġoffset":6147,"Ġbutton":6148,"gg":6149,"Ġmedicine":6150,"uri":6151,"Ġsomewhere":6152,"PD":6153,"Ġmon":6154,"Ġfires":6155,"final":6156,"oth":6157,"ined":6158,"Ġunderway":6159,"Ġmistakes":6160,"Ġgrateful":6161,"Ġcheap":6162,"È":6163,"Ġ95":6164,"Ġviolations":6165,"arr":6166,"Ġsurprising":6167,"Ġob":6168,"ĠNATO":6169,"Ġcontroversy":6170,"ĠSweden":6171,"Ġfuneral":6172,"Ġreviews":6173,"Ġpromotion":6174,"TY":6175,"Ġliberal":6176,"Ġpromising":6177,"ĠSP":6178,"How":6179,"Ġmemories":6180,"Ġbreast":6181,"zi":6182,"ights":6183,"Ġpattern":6184,"Ġoutdoor":6185,"ĠMu":6186,"Ġrush":6187,"ĠTheresa":6188,"ĠPol":6189,"Ġdescribe":6190,"ĠBand":6191,"ĠStewart":6192,"Ġ1999":6193,"ĠRaiders":6194,"mp":6195,"Ġprocedures":6196,"Ġplot":6197,"Ġhire":6198,"used":6199,"Ġ1970":6200,"Ġpicking":6201,"ĠSim":6202,"Ġregard":6203,"inal":6204,"backs":6205,"ĠHard":6206,"ĠLow":6207,"ĠAc":6208,"Is":6209,"Ġguarantee":6210,"ĠGiven":6211,"Ġbeta":6212,"ĠTre":6213,"Ġtrans":6214,"Ġretailer":6215,"Ġpurposes":6216,"ĠHol":6217,"Ġenjoying":6218,"Ġbrown":6219,"ĠPerry":6220,"Ġplea":6221,"MS":6222,"ĠDakota":6223,"ĠParker":6224,"Ġcommit":6225,"ĠLawrence":6226,"ĠMorris":6227,"ended":6228,"Ġvirtual":6229,"ÃĹ":6230,"Ġfruit":6231,"84":6232,"ĠHas":6233,"ishing":6234,"Ġdominated":6235,"ĠFA":6236,"Ġchannels":6237,"Ġunderstood":6238,"Ġcitizen":6239,"Ġchecks":6240,"ĠKenya":6241,"Ġdisabled":6242,"SD":6243,"Ġprotecting":6244,"Ġtweets":6245,"Ġsparked":6246,"ĠCO":6247,"§":6248,"ori":6249,"ĠGDP":6250,"ĠSer":6251,"ĠVisit":6252,"ĠMS":6253,"Ġbarely":6254,"Ġsand":6255,"Ġap":6256,"aging":6257,"Ġrel":6258,"ĠPerhaps":6259,"ĠMourinho":6260,"ĠJets":6261,"Ġdisclosure":6262,"Ġhighlighted":6263,"Ġimplemented":6264,"Ġcompliance":6265,"ĠAB":6266,"ĠAssistant":6267,"ĠCape":6268,"Ġfunny":6269,"Ġleverage":6270,"Ġmachines":6271,"Ġranging":6272,"Ġfastest":6273,"ĠRoberts":6274,"ĠPolicy":6275,"gar":6276,"Ġcollapse":6277,"ĠThrough":6278,"Ġrobbery":6279,"ĠHay":6280,"Ġelite":6281,"ĠDigital":6282,"ĠFun":6283,"ĠAlan":6284,"ement":6285,"Ġmit":6286,"Ġspin":6287,"Ġlistening":6288,"ĠDoug":6289,"ĠSaints":6290,"Ġinterior":6291,"Ġenhance":6292,"ĠCardinals":6293,"ever":6294,"Ġrobust":6295,"Ġinform":6296,"Ġsuffer":6297,"book":6298,"ĠMuslims":6299,"Ġagriculture":6300,"Ġkm":6301,"Ġdivers":6302,"ñ":6303,"ĠReg":6304,"Ġequivalent":6305,"Ġcraft":6306,"Ġsettle":6307,"Ġcontains":6308,"ĠMack":6309,"ĠDis":6310,"ĠFore":6311,"ĠSudan":6312,"ĠMail":6313,"ĠBrooklyn":6314,"izer":6315,"bn":6316,"Ġhundred":6317,"Ġexhibition":6318,"ĠHave":6319,"vin":6320,"Ġcivilians":6321,"ĠCincinnati":6322,"Some":6323,"ĠSE":6324,"Ġbat":6325,"ĠIns":6326,"Ġcalm":6327,"Ġtone":6328,"Ġnormally":6329,"Ġseeks":6330,"ĠAss":6331,"Ġmembership":6332,"Ġannually":6333,"Ġemployers":6334,"CO":6335,"Ġcomplicated":6336,"Ġheadlines":6337,"ĠLabor":6338,"Ġlifestyle":6339,"ĠRen":6340,"ĠRich":6341,"cent":6342,"ude":6343,"Ġawesome":6344,"Ġpaint":6345,"Ġrolling":6346,"Ġwalls":6347,"Ġlab":6348,"Ġtourists":6349,"care":6350,"Ġgear":6351,"izz":6352,"Ġcream":6353,"ĠTro":6354,"ices":6355,"Ġpack":6356,"Ġdiseases":6357,"ĠSpeaker":6358,"ĠOfficers":6359,"Ġsky":6360,"83":6361,"ĠBE":6362,"Ġcategories":6363,"Ġindicate":6364,"Ġru":6365,"ĠSony":6366,"ĠDun":6367,"ocks":6368,"Ġconcrete":6369,"ĠMadison":6370,"ĠSab":6371,"IV":6372,"Ġobserved":6373,"ria":6374,"Ġinterim":6375,"Ġencounter":6376,"ista":6377,"Ġanger":6378,"Ġrapid":6379,"mail":6380,"Ġdestination":6381,"ĩ":6382,"Ġbreaks":6383,"rell":6384,"ĠChase":6385,"Ġattorneys":6386,"Ġrolled":6387,"ĠSprings":6388,"ĠVillage":6389,"TO":6390,"HS":6391,"Ġcampaigns":6392,"ologist":6393,"ĠTax":6394,"ĠIII":6395,"Ġteach":6396,"Ġprovision":6397,"Ġrem":6398,"Ġshirt":6399,"Ġdeployed":6400,"Ġguidelines":6401,"Ġav":6402,"zer":6403,"Ġrushing":6404,"94":6405,"place":6406,"Man":6407,"Ġdivided":6408,"ĠGun":6409,"Ġwindows":6410,"Ġcomponents":6411,"aba":6412,"ĠSwitzerland":6413,"election":6414,"ĠTampa":6415,"ĠAri":6416,"ás":6417,"Ġhighway":6418,"Ġacres":6419,"Ġcrown":6420,"known":6421,"Ġinquiry":6422,"url":6423,"Ġexpertise":6424,"Ġpraised":6425,"yer":6426,"Ġconclusion":6427,"Ġabortion":6428,"Ġlady":6429,"Ġtribute":6430,"Ġunveiled":6431,"Ġbeaten":6432,"TE":6433,"ĠMot":6434,"unk":6435,"Ġtriple":6436,"Ġforcing":6437,"ĠTickets":6438,"uit":6439,"Ġiron":6440,"Ġscientific":6441,"ĠIP":6442,"Ġdiagnosed":6443,"Ġocean":6444,"wide":6445,"ĠCowboys":6446,"LC":6447,"Ġmethods":6448,"ĠFind":6449,"ĠDean":6450,"Ġfundamental":6451,"ĠGill":6452,"Ġfeelings":6453,"IO":6454,"hu":6455,"Ġfeedback":6456,"ote":6457,"Ġduo":6458,"fully":6459,"get":6460,"Ġproof":6461,"story":6462,"Ġlongest":6463,"Ġshops":6464,"ĠJong":6465,"ĠCro":6466,"ĠHawaii":6467,"91":6468,"ĠJake":6469,"ĠSusan":6470,"Ġsubmit":6471,"rav":6472,"Ġmodest":6473,"Ġlit":6474,"Ġattempting":6475,"Ġsits":6476,"Ġaddressing":6477,"93":6478,"ĠBi":6479,"Ġlying":6480,"ĠOrganization":6481,"ĠOak":6482,"oli":6483,"Ġfatal":6484,"Ġmountain":6485,"val":6486,"lu":6487,"ĠMaine":6488,"Ġcharging":6489,"Ġresigned":6490,"illo":6491,"Ġrecommendation":6492,"party":6493,"ĠWeb":6494,"ĠPanthers":6495,"Ġnoise":6496,"ĠBrussels":6497,"awa":6498,"Ġambassador":6499,"Ġaccessible":6500,"ĠCalgary":6501,"idd":6502,"ĠAirlines":6503,"gr":6504,"Ġnu":6505,"roy":6506,"ĠMars":6507,"ĠPoland":6508,"ĠJerry":6509,"ados":6510,"ĠRico":6511,"ĠMir":6512,"ĠFin":6513,"ious":6514,"Ġpacked":6515,"Ġinsider":6516,"President":6517,"ĠBull":6518,"ĠYemen":6519,"ĠConnecticut":6520,"Ġ73":6521,"Ġdepartments":6522,"Ġorganic":6523,"ĠSummer":6524,"ĠBet":6525,"ste":6526,"zo":6527,"rat":6528,"Ġalliance":6529,"Ġintervention":6530,"wan":6531,"ĠOR":6532,"Ġdefined":6533,"ĠÃł":6534,"ĠChiefs":6535,"Ġknocked":6536,"ared":6537,"Ġholes":6538,"Ġpulling":6539,"ĠTodd":6540,"ĠJamie":6541,"ĠSher":6542,"Ġsignature":6543,"ĠSur":6544,"Ġgym":6545,"ĠVladimir":6546,"ĠThailand":6547,"Ġgaming":6548,"Ġsaving":6549,"ceive":6550,"82":6551,"ĠBern":6552,"ĠDid":6553,"Ġhardware":6554,"ished":6555,"Ġconspiracy":6556,"ANS":6557,"ĠIntelligence":6558,"Ġassembly":6559,"Ġ101":6560,"Ġconcise":6561,"ĠManhattan":6562,"Ġbelief":6563,"Ġsurge":6564,"Ġdeserve":6565,"Ġconsistently":6566,"ĠNor":6567,"okes":6568,"ðŁ":6569,"ME":6570,"ĠAsset":6571,"Ġsubstance":6572,"Ġprefer":6573,"Ġburning":6574,"ĠNik":6575,"ook":6576,"ĠPinterest":6577,"Ġboyfriend":6578,"ĠHal":6579,"ĠMerkel":6580,"Ġintroduce":6581,"ĠLinkedIn":6582,"ĠFull":6583,"ĠFarm":6584,"Ġchildhood":6585,"ĠTransportation":6586,"Ġterrible":6587,"du":6588,"Ġintention":6589,"Ġseemingly":6590,"elle":6591,"Ġfoods":6592,"Ġtitled":6593,"Ġdual":6594,"Ġimport":6595,"Ġdeveloper":6596,"UL":6597,"ington":6598,"ĠDelta":6599,"?'":6600,"iness":6601,"Ġquit":6602,"ĠGarcia":6603,"ĠSri":6604,"Ġhip":6605,"ĠBrazilian":6606,"elt":6607,"ively":6608,"Ġstructures":6609,"Ġlabour":6610,"Ġneighbors":6611,"Ġtill":6612,"Ġsoil":6613,"Ġdropping":6614,"Ġnominee":6615,"Ġmeets":6616,"92":6617,"rant":6618,"isa":6619,"Ġluck":6620,"aa":6621,"jet":6622,"ĠTor":6623,"ĠCrime":6624,"Ġlane":6625,"Ġflu":6626,"Ġlaunching":6627,"ĠAutom":6628,"aks":6629,"Ġuniversities":6630,"Ġpollution":6631,"ĠAdvis":6632,"ĠMall":6633,"ls":6634,"Ġdeeper":6635,"Ġrepeated":6636,"Ġmeanwhile":6637,"Ġchip":6638,"Ġoutlets":6639,"Ġliked":6640,"Ġsal":6641,"Ġwelfare":6642,"ago":6643,"Ġmakers":6644,"ving":6645,"fer":6646,"Ġovercome":6647,"mb":6648,"Ġshocked":6649,"akers":6650,"Ġnonprofit":6651,"Ġdonated":6652,"eral":6653,"Ġresume":6654,"Ġlogo":6655,"Ġsubscription":6656,"Ġ74":6657,"ela":6658,"Ġaspect":6659,"html":6660,"Ġsorry":6661,"Ġupgrade":6662,"Ġstance":6663,"Ġfr":6664,"Ġpapers":6665,"Ġattacking":6666,"Ġmeaningful":6667,"81":6668,"ĠWeinstein":6669,"Ġcreates":6670,"Ġhonour":6671,"ĠReply":6672,"oph":6673,"Ġmarch":6674,"Ġsmile":6675,"Ġcomparison":6676,"will":6677,"ĠSanchez":6678,"Ġvoter":6679,"Ġtheory":6680,"Ġequally":6681,"ĠRoger":6682,"Ġperfectly":6683,"Ġlanding":6684,"Ġbillions":6685,"ĠBloomberg":6686,"Ġpermit":6687,"Ġfinals":6688,"Ġracial":6689,"Ġpregnancy":6690,"iled":6691,"ĠFederation":6692,"Ġforest":6693,"Ġtag":6694,"aul":6695,"Ġdrinks":6696,"Ġ(\"":6697,"ĠMobile":6698,"Ġtouched":6699,"Ġclock":6700,"Ġreg":6701,"Ġasylum":6702,"igan":6703,"Ġsenator":6704,"Ġ99":6705,"ĠKumar":6706,"Ġskill":6707,"Ġ1998":6708,"pa":6709,"ĠAf":6710,"Ġmood":6711,"ston":6712,"Ġhang":6713,"ĠMPs":6714,"Please":6715,"ĠEve":6716,"Ġdocumentary":6717,"Ġpersonality":6718,"ĠCast":6719,"Ġdiscount":6720,"bing":6721,"ĠBoeing":6722,"Ġdepend":6723,"Ġcrossing":6724,"EX":6725,"Ġsucceed":6726,"Ġhumanitarian":6727,"ĠMuhammad":6728,"Ġwages":6729,"Ġcolumn":6730,"Ġexternal":6731,"Ġstatistics":6732,"ĠTODAY":6733,"Ġtrips":6734,"Ġta":6735,"Ġpenalties":6736,"Ġwriters":6737,"Ġshipping":6738,"ĠIndians":6739,"Ġsalt":6740,"ĠIndustrial":6741,"ĠYankees":6742,"ĠDen":6743,"Ġrough":6744,"Ġbarrels":6745,"ĠHor":6746,"bert":6747,"ĠDep":6748,"Ġresign":6749,"97":6750,"Ġballs":6751,"ĠJun":6752,"ĠBab":6753,"Ġassociate":6754,"Ġstring":6755,"Ġhub":6756,"Ġorgan":6757,"ĠMarshall":6758,"ĠFIFA":6759,"ĠMun":6760,"ency":6761,"research":6762,"Ġpeers":6763,"Ġtall":6764,"ĠGoldman":6765,"Don":6766,"Ġparade":6767,"Ġparks":6768,"Ġdet":6769,"Ġdisappointing":6770,"Ġreflects":6771,"ĠLakers":6772,"Ġfiles":6773,"Ġrelatives":6774,"ĠUSD":6775,"ĠArticle":6776,"Ġcustom":6777,"ĠCarlos":6778,"Ġtracking":6779,"Ġmaintaining":6780,"ĠCur":6781,"ardo":6782,"ĠSkip":6783,"Ġattitude":6784,"Just":6785,"Ġinstitution":6786,"Ġnarrow":6787,"Ġsnap":6788,"Ġenterprise":6789,"Ġdrives":6790,"Ġ77":6791,"Ġcrop":6792,"Ġvirus":6793,"Ġcelebrity":6794,"Ġeconomies":6795,"ued":6796,"Ġsum":6797,"ĠDubai":6798,"ĠInsurance":6799,"Ĺ":6800,"ury":6801,"ĠUnfortunately":6802,"Ġclosure":6803,"ota":6804,"ĠPhilip":6805,"oms":6806,"Ġinvestigated":6807,"Ġgenerations":6808,"ĠETF":6809,"ĠKeith":6810,"ĠLater":6811,"isk":6812,"Ġpreferred":6813,"Ġdefault":6814,"Ġtowns":6815,"ĠRod":6816,"ĠDie":6817,"Ġintegrated":6818,"Ġacquiring":6819,"Ġvoices":6820,"Ġser":6821,"Ġpresents":6822,"ĠBR":6823,"ĠEmergency":6824,"Ġreligion":6825,"HA":6826,"Ġresponding":6827,"ĠThings":6828,"Ġbeef":6829,"ĠWithout":6830,"urd":6831,"ĠCarl":6832,"Ġadministrative":6833,"ĠWhich":6834,"Ġchallenged":6835,"Ġcooking":6836,"ivid":6837,"ĠFer":6838,"Ġtremendous":6839,"ĠTerry":6840,"iri":6841,"CS":6842,"ĠJunior":6843,"ĠReddit":6844,"Ġtea":6845,"Ġaccounting":6846,"lan":6847,"Ġdetention":6848,"Ġreplied":6849,"SI":6850,"ĠHel":6851,"ns":6852,"ĠProf":6853,"Ġramp":6854,"ĠConservative":6855,"Ġattendance":6856,"Ġspecialist":6857,"ĠFinal":6858,"Ġadvertisement":6859,"Ġacquire":6860,"ĠWhatsApp":6861,"Ġworkforce":6862,"ĠCalif":6863,"Ġspeakers":6864,"ĠEPA":6865,"Ġconviction":6866,"hire":6867,"ĠFisher":6868,"ĠIntel":6869,"Ġbin":6870,"ĠWas":6871,"Ġearth":6872,"vi":6873,"Ġhurricane":6874,"Ġholidays":6875,"Ġassume":6876,"Ġinvolve":6877,"Ġdynamic":6878,"ĠGre":6879,"Ġitem":6880,"Ġpound":6881,"Ġanxiety":6882,"ĠPrint":6883,"rop":6884,"Ġautomatically":6885,"Ġdiscrimination":6886,"ĠLam":6887,"ĠColl":6888,"Ġimpressed":6889,"Ġinvolves":6890,"ĠLes":6891,"ĠTri":6892,"ĠLook":6893,"ĠiOS":6894,"Ġgrab":6895,"ĠAngel":6896,"Ġstops":6897,"ĠPay":6898,"ĠECB":6899,"Ġbunch":6900,"Ġletting":6901,"ele":6902,"ĠAdditionally":6903,"Ġboards":6904,"NC":6905,"Ġtragedy":6906,"Ġpink":6907,"Ġgonna":6908,"ones":6909,"Ġrev":6910,"ĠIndependent":6911,"ĠCambridge":6912,"ĠPence":6913,"Ġprosecution":6914,"Ġdeputies":6915,"ĠAhmed":6916,"Ġlows":6917,"ĠAmy":6918,"ĠBuilding":6919,"mark":6920,"Ġsmooth":6921,"Ġsole":6922,"Ġwanting":6923,"ĠHeart":6924,"Ġobtain":6925,"ĠBus":6926,"Ġexchanges":6927,"friendly":6928,"Ġlabel":6929,"elect":6930,"ĠCompanies":6931,"owing":6932,"ĠCB":6933,"RI":6934,"ĠMaster":6935,"Ġliquid":6936,"ĠDanny":6937,"Ġproceeds":6938,"ĠLaura":6939,"card":6940,"Ġtears":6941,"Ġexploration":6942,"Ġdepression":6943,"ken":6944,"ĠFe":6945,"Ġlending":6946,"ĠYouth":6947,"ality":6948,"NS":6949,"Ġmoon":6950,"ĠTaiwan":6951,"Ġstruggles":6952,"Ġdiscovery":6953,"Ġqualify":6954,"Ġwireless":6955,"alia":6956,"Ġwitnessed":6957,"Ġheight":6958,"ĠGuy":6959,"left":6960,"KE":6961,"Ġfoul":6962,"ĠMohammed":6963,"Ġgrass":6964,"ĠNon":6965,"Ġswim":6966,"Ġbrilliant":6967,"you":6968,"ĠFlynn":6969,"Ġsinging":6970,"eria":6971,"UT":6972,"ĠMcCain":6973,"ĠSep":6974,"ĠWars":6975,"Ġburden":6976,"Ġpas":6977,"Ġabandoned":6978,"Ġint":6979,"ĠTurner":6980,"Ġcollective":6981,"ĠEnvironmental":6982,"ĠStudents":6983,"Ġofferings":6984,"Ġresignation":6985,"Ġexplosion":6986,"ĠKoh":6987,"ager":6988,"Ġthrows":6989,"Ġasks":6990,"light":6991,"Ġanyway":6992,"Ġyard":6993,"Ġcarrier":6994,"Ġwaves":6995,"backed":6996,"TR":6997,"oud":6998,"Ġbreach":6999,"Ġdated":7000,"Ġdressed":7001,"ĠDodgers":7002,"oles":7003,"Ġ78":7004,"Ġreads":7005,"Ġpredict":7006,"ĠJerusalem":7007,"ĠPT":7008,"Ġcrack":7009,"yan":7010,"Ġnights":7011,"eline":7012,"Ġconvinced":7013,"Ġlock":7014,"Ġcarefully":7015,"ĠMercedes":7016,"Ġultimate":7017,"Ġdist":7018,"Ġslight":7019,"ĠEdwards":7020,"Ġswing":7021,"iling":7022,"Ġknife":7023,"ĠNashville":7024,"IF":7025,"inder":7026,"udd":7027,"Ġsenators":7028,"ĠFurther":7029,"ĠXi":7030,"Ġstr":7031,"ĠOd":7032,"days":7033,"Ġcomm":7034,"Ġverdict":7035,"Ġconfirmation":7036,"king":7037,"ĠCS":7038,"Ġadvocates":7039,"Ġpride":7040,"Ġmemorial":7041,"ams":7042,"erman":7043,"Ġteenager":7044,"ĠNeil":7045,"uts":7046,"Ġsoul":7047,"see":7048,"post":7049,"Ġchest":7050,"fire":7051,"ĠLynch":7052,"Ġpeaceful":7053,"OND":7054,"ĠIndustries":7055,"ĠJuan":7056,"Ġrestore":7057,"Ġreliable":7058,"ming":7059,"agan":7060,"Source":7061,"ĠCabinet":7062,"Ġremarkable":7063,"ĠTrudeau":7064,"ĠEs":7065,"Ġintegrity":7066,"ove":7067,"fe":7068,"Ġproceedings":7069,"Ġconnections":7070,"Ġunprecedented":7071,"ĠGlen":7072,"ux":7073,"Ġearning":7074,"Ġingredients":7075,"Ġnominated":7076,"ĠBangladesh":7077,"made":7078,"Ġlessons":7079,"Ġbreakfast":7080,"ĠRelations":7081,"Ġloose":7082,"Al":7083,"Ġupgraded":7084,"ral":7085,"ĠPage":7086,"oto":7087,"ĠQueensland":7088,"Ġprocedure":7089,"ĠSmall":7090,"Ġrespective":7091,"Ġpictured":7092,"ĠBas":7093,"Ġpreparation":7094,"ĠMyanmar":7095,"Ġdonation":7096,"Ġvisible":7097,"iest":7098,"ĠBroadway":7099,"rick":7100,"ĠSchools":7101,"Ġarrests":7102,"ĠJessica":7103,"ĠBengal":7104,"Ġhell":7105,"Ġannouncing":7106,"Ġmail":7107,"ĠMcG":7108,"two":7109,"rest":7110,"OD":7111,"ĠBradley":7112,"Ġdoubled":7113,"Ġpledged":7114,"Ġcomeback":7115,"Ġextraordinary":7116,"Ġslide":7117,"Ġassess":7118,"Ġagricultural":7119,"ĠKay":7120,"Ġvendors":7121,"Ġnarrative":7122,"Ġreviewed":7123,"ĠPass":7124,"Ġinspiration":7125,"ĠHunter":7126,"Ġcalendar":7127,"ĠDiamond":7128,"Ġremoval":7129,"ners":7130,"ĠKap":7131,"Ġconsent":7132,"Ġvisual":7133,"Ġcheese":7134,"ĠTher":7135,"ĠFR":7136,"ĠShanghai":7137,"iah":7138,"ĠCole":7139,"AK":7140,"Ġranking":7141,"Ġcook":7142,"Ġhalftime":7143,"ĠStars":7144,"Ġroutes":7145,"aim":7146,"Ġestablishment":7147,"ĠMug":7148,"Ġsurvivors":7149,"urg":7150,"ĠBrett":7151,"Ġunexpected":7152,"ained":7153,"Ġrarely":7154,"ĠGall":7155,"Ġadvocate":7156,"ĠNad":7157,"Ġ911":7158,"Ġracist":7159,"erer":7160,"ĠRev":7161,"ĠSection":7162,"Ġhelpful":7163,"CT":7164,"agg":7165,"Ġgovernance":7166,"Ġfelony":7167,"Ġoptimistic":7168,"Ġelectoral":7169,"EG":7170,"town":7171,"Ġdaughters":7172,"Ġanswered":7173,"Ġthin":7174,"ĠClassic":7175,"Ġshareholder":7176,"ĠBlake":7177,"ĠFla":7178,"Ġparliamentary":7179,"dy":7180,"Ġcommented":7181,"Ġtri":7182,"Ġglobe":7183,"Ġmandate":7184,"Ġslipped":7185,"ĠTower":7186,"Ġoperated":7187,"gers":7188,"Ġassured":7189,"ĠMartinez":7190,"Ġdesigns":7191,"ĠModel":7192,"Ġstakeholders":7193,"Ġdefended":7194,"Ġseniors":7195,"Ġvacation":7196,"Ġglobally":7197,"ump":7198,"Not":7199,"Ġclip":7200,"Ġarticles":7201,"BR":7202,"km":7203,"ĠFront":7204,"PL":7205,"Ġadoption":7206,"Ġsudden":7207,"Ġframework":7208,"Ġhanging":7209,"gl":7210,"ĠSel":7211,"Ġmoderate":7212,"Ġreverse":7213,"income":7214,"cor":7215,"ĠGB":7216,"Ġphysically":7217,"Ġtransparency":7218,"ĠElectric":7219,"Ġrefugee":7220,"profile":7221,"iva":7222,"ately":7223,"ĠAC":7224,"Ġtransferred":7225,"Ġaffair":7226,"ĠAlaska":7227,"oria":7228,"ĠChange":7229,"Ġrepeat":7230,"Ġscreening":7231,"ender":7232,"ĠCas":7233,"ĠDav":7234,"Ġfocuses":7235,"Ġcommissioner":7236,"Ġupside":7237,"ĠKeep":7238,"ĠBlues":7239,"ently":7240,"Ġaut":7241,"Ġexperiencing":7242,"aman":7243,"Ġapprove":7244,"Ġmile":7245,"Ġcheaper":7246,"ĠWind":7247,"ĠStore":7248,"Ġgrabbed":7249,"Ġsons":7250,"Ġfighter":7251,"Ġum":7252,"ĠBased":7253,"don":7254,"Ġconstitution":7255,"finals":7256,"act":7257,"¢":7258,"Ġmill":7259,"Ġorganisations":7260,"ĠToyota":7261,"Ġyuan":7262,"Ġterrorists":7263,"Ġforth":7264,"Ġavailability":7265,"Ġentrance":7266,"Ġvolumes":7267,"Ġmult":7268,"plus":7269,"ĠColumbus":7270,"ĠSummit":7271,"Ġbabies":7272,"ĠMur":7273,"ĠGray":7274,"ĠChar":7275,"ĠButler":7276,"Ġpose":7277,"ĠNatural":7278,"ĠAtt":7279,"Ġdecrease":7280,"Ġtens":7281,"kt":7282,"Ġminds":7283,"Ġimpacted":7284,"Ġchapter":7285,"ĠOp":7286,"ĠHarrison":7287,"ĠRodriguez":7288,"Ġethnic":7289,"Ġtravelling":7290,"ĠBond":7291,"ader":7292,"core":7293,"Ġgallery":7294,"founder":7295,"ĠVill":7296,"Ġdecent":7297,"ĠHistory":7298,"ĠInt":7299,"ĠNa":7300,"ĠHad":7301,"Ġmainstream":7302,"ĠTs":7303,"Ġbottle":7304,"sen":7305,"Ġrecession":7306,"Ġsophomore":7307,"Ġsilence":7308,"cc":7309,"Ġqualifying":7310,"Ġcomplained":7311,"ĠRad":7312,"Ġactively":7313,"Ġbacks":7314,"ĠMusk":7315,"Ġcareful":7316,"Ġmeals":7317,"ĠDor":7318,"Ġmess":7319,"ĠBelgium":7320,"Ġke":7321,"ĠLopez":7322,"Ġbow":7323,"Ġhelicopter":7324,"was":7325,"Ġstone":7326,"kins":7327,"Ġunlike":7328,"Ġcollision":7329,"ĠAlt":7330,"HP":7331,"ĠMason":7332,"has":7333,"Ġclimbed":7334,"Ġindication":7335,"Ġhotels":7336,"Ġloud":7337,"ĠMilan":7338,"kes":7339,"Ġbadly":7340,"Ġtrials":7341,"Ġimpacts":7342,"ĠJane":7343,"Ġcrossed":7344,"Ġdiscussing":7345,"ĠSM":7346,"Ġpopularity":7347,"ĠWant":7348,"fall":7349,"Ġartificial":7350,"ĠBu":7351,"akh":7352,"Ġdominant":7353,"gov":7354,"Ġpremier":7355,"Ġexecution":7356,"gate":7357,"Ġswimming":7358,"Ġchat":7359,"Ġdevastating":7360,"acking":7361,"Ġreception":7362,"urt":7363,"Ġtheater":7364,"Ġgather":7365,"Ġtear":7366,"uro":7367,"Ġdemocratic":7368,"Ġrebels":7369,"Ġlifetime":7370,"Ġradical":7371,"uan":7372,"Ġtechniques":7373,"ache":7374,"ior":7375,"Ġcamps":7376,"Ġtelephone":7377,"ĠDublin":7378,"ĠBrand":7379,"ĠMarcus":7380,"aun":7381,"ĠRec":7382,"Ġ82":7383,"ban":7384,"Ġsafely":7385,"aku":7386,"aki":7387,"Ġbankruptcy":7388,"FF":7389,"Ġformat":7390,"Ġattached":7391,"ĠFame":7392,"ĠEdward":7393,"Ġmerger":7394,"ĠRepresentatives":7395,"izes":7396,"Ġhidden":7397,"Ġval":7398,"zz":7399,"Ġexcess":7400,"Ġscope":7401,"Ġdivorce":7402,"Ġburn":7403,"Ġrequirement":7404,"BB":7405,"ĠHand":7406,"Ġcons":7407,"Ġrisen":7408,"Ġtwitter":7409,"Ġoffseason":7410,"ĠSometimes":7411,"ĠInf":7412,"ĠAng":7413,"uer":7414,"report":7415,"Ġdreams":7416,"Ġ700":7417,"ips":7418,"ĠDream":7419,"Ġgifts":7420,"Ġsomehow":7421,"ĠTur":7422,"ĠRachel":7423,"can":7424,"Ġlog":7425,"ĠMedicaid":7426,"Ġles":7427,"Ġtired":7428,"ĠArkansas":7429,"Ġliquidity":7430,"ĠPhillips":7431,"ĠBTC":7432,"Ġhide":7433,"Ġpun":7434,"ĠRun":7435,"lyn":7436,"ĠUC":7437,"ĠDesign":7438,"ĠDev":7439,"Ġvaluation":7440,"Ġreveals":7441,"ĠChild":7442,"other":7443,"Ġposed":7444,"lee":7445,"Ġships":7446,"ĠTrue":7447,"Ġdescribes":7448,"Ġrunner":7449,"bro":7450,"Ġankle":7451,"Ġod":7452,"ĠAnnual":7453,"CL":7454,"Ġoverhaul":7455,"ned":7456,"Ġbold":7457,"Ġmo":7458,"ĠFalls":7459,"Ġemployed":7460,"ĠGro":7461,"Ġflash":7462,"ĠTD":7463,"Ġnervous":7464,"Ġintegration":7465,"Ġsmartphones":7466,"Ġmovements":7467,"nie":7468,"ition":7469,"ĠThird":7470,"Ģ":7471,"Ġmetres":7472,"Ġeconomist":7473,"omp":7474,"Ġteens":7475,"Ġeveryday":7476,"Ġinterviewed":7477,"Ġbriefly":7478,"],":7479,"uke":7480,"ĠFOX":7481,"Ġunderlying":7482,"ĠLuc":7483,"Ġcourses":7484,"ss":7485,"amed":7486,"°":7487,"ju":7488,"ĠBanks":7489,"Ġoutfit":7490,"illing":7491,"Ġtrafficking":7492,"Ġurging":7493,"Ġbelt":7494,"Ġrid":7495,"CP":7496,"Ġelderly":7497,"ĠGrowth":7498,"án":7499,"ĠSn":7500,"Ġsurrounded":7501,"Ġsisters":7502,"ĠIslam":7503,"Ġsynd":7504,"ĠCosta":7505,"di":7506,"ĠKl":7507,"Ġmanufacturer":7508,"holders":7509,"Ġelement":7510,"Ġload":7511,"Ġbooked":7512,"Ġaccompanied":7513,"ĠChamber":7514,"Ġbriefing":7515,"Oh":7516,"imi":7517,"ĠDefence":7518,"ĠCurrently":7519,"aking":7520,"Ġhandled":7521,"ĠCD":7522,"ĠBenjamin":7523,"Ġpocket":7524,"ĠKashmir":7525,"Ġlighting":7526,"aps":7527,"Ġ1997":7528,"ech":7529,"Ġaddiction":7530,"Ġbases":7531,"Ġpriorities":7532,"Ġhardly":7533,"ĠQuebec":7534,"ĠEarn":7535,"IES":7536,"ĠZach":7537,"ĠAlong":7538,"MI":7539,"Ġins":7540,"ĠRogers":7541,"ĠKan":7542,"ĠFuture":7543,"Ġtriggered":7544,"ĠUnit":7545,"Ġweighed":7546,"Ġpointing":7547,"Ġchocolate":7548,"ĠBrowns":7549,"ĠISIS":7550,"Ġgoalkeeper":7551,"Ġsaves":7552,"ĠAndre":7553,"burn":7554,"ĠCont":7555,"ĠNetherlands":7556,"Ġpolitically":7557,"ĠAshley":7558,"ĠWhit":7559,"aded":7560,"PH":7561,"Ġborders":7562,"ORE":7563,"Ġally":7564,"Trump":7565,"istan":7566,"ĠHunt":7567,"ĠCancer":7568,"ĠGrace":7569,"ĠTottenham":7570,"Ġ1960":7571,"ĠMarg":7572,"ĠBryan":7573,"ĠAgain":7574,"acing":7575,"Ġarguments":7576,"ĠSouthwest":7577,"Ġvocal":7578,"Ġjudgment":7579,"Ġengaging":7580,"Ġadopt":7581,"Ġrental":7582,"Ġlinebacker":7583,"ĠKardashian":7584,"Ġepisodes":7585,"..":7586,"Ġunt":7587,"Ġvowed":7588,"Ġ79":7589,"ule":7590,"Ġtransit":7591,"Ġoffshore":7592,"Ġsuppliers":7593,"Ġarguing":7594,"Ġsatellite":7595,"ĠLind":7596,"ĠTaliban":7597,"Buy":7598,"ĠCaribbean":7599,"ĠBarry":7600,"Ġauthors":7601,"ĠWolf":7602,"Ġviewing":7603,"ĠCubs":7604,"From":7605,"Ġ%":7606,"Ġcurrencies":7607,"Why":7608,"ĠBroncos":7609,"Ġtrick":7610,"Ġdiesel":7611,"ĠLiberal":7612,"FL":7613,"Ġtopics":7614,"Ġretain":7615,"ĠLiberty":7616,"Ġacquisitions":7617,"ced":7618,"Ġfre":7619,"Ġfleet":7620,"Ġcopper":7621,"ĠPot":7622,"jen":7623,"ĠElliott":7624,"ĠPyongyang":7625,"Ġobject":7626,"ĠUse":7627,"Ġmutual":7628,"MP":7629,"Ġev":7630,"Ġdeny":7631,"ĠEveryone":7632,"lling":7633,"Ġpays":7634,"Ġdrought":7635,"Ġcorn":7636,"Ġworkplace":7637,"rig":7638,"ĠMn":7639,"Ġadvisory":7640,"ĠCat":7641,"Ġchronic":7642,"ĠSteelers":7643,"Ġboxes":7644,"ĠNap":7645,"Ġdemonstrated":7646,"ĠTournament":7647,"Ġsymbol":7648,"ĠAfghan":7649,"ĠTan":7650,"ired":7651,"ĠEv":7652,"ĠConsumer":7653,"Ġmoral":7654,"ĠAdditional":7655,"Ġwebsites":7656,"Ġoccasions":7657,"Ġfate":7658,"Ġpitcher":7659,"Ġtaxpayers":7660,"Ġdeemed":7661,"ĠLibya":7662,"Ġpriced":7663,"Ġdistributed":7664,"ĠForum":7665,"Ġrice":7666,"Ġbloc":7667,"Ġprovisions":7668,"agh":7669,"Ġpen":7670,"Ġattracted":7671,"ĠEdmonton":7672,"Ġthousand":7673,"Ġpainting":7674,"Ġil":7675,"Ġcourtesy":7676,"Ġeliminate":7677,"Ġacc":7678,"Ġmeters":7679,"Ġreflected":7680,"Ġcomponent":7681,"Every":7682,"Ġsells":7683,"Ġfault":7684,"Ġburned":7685,"ĠKirk":7686,"ĠAnna":7687,"Ġappeals":7688,"Ġeggs":7689,"Ġfrequent":7690,"Ġtrigger":7691,"Ġrevised":7692,"ĠAngela":7693,"Ġ81":7694,"Ġsingles":7695,"Ġviral":7696,"Ġworries":7697,"ĠShould":7698,"profit":7699,"Ġraises":7700,"ĠBryant":7701,"ĠProduct":7702,"Ġtenure":7703,"Ġdiabetes":7704,"Ġcolour":7705,"azz":7706,"ĠGirls":7707,"Ġpractical":7708,"Ġblind":7709,"ancing":7710,"pictured":7711,"Ġfinale":7712,"ĠElection":7713,"Ġathletic":7714,"Ġpromoted":7715,"Ġflowers":7716,"Ġtrains":7717,"ario":7718,"Ġsufficient":7719,"IE":7720,"Ġexamples":7721,"Ġshed":7722,"Ġbirds":7723,"Ġchaos":7724,"Ġwound":7725,"Ġrocket":7726,"Ġwet":7727,"Ġsample":7728,"ĠNag":7729,"ĠOliver":7730,"Ġscrutiny":7731,"ĠSeven":7732,"ĠRoman":7733,"ĠFred":7734,"Ġweird":7735,"ĠTam":7736,"ĠSupport":7737,"ĠNathan":7738,"Ġstudying":7739,"Ġintroduction":7740,"Ġtons":7741,"cer":7742,"aus":7743,"ION":7744,"Ġcritic":7745,"ĠAh":7746,"alo":7747,"pur":7748,"Ġstorms":7749,"ĠMission":7750,"Ġcredits":7751,"Ġgrants":7752,"Ġcomp":7753,"Ġhearts":7754,"part":7755,"Ġpin":7756,"Ġsubsequent":7757,"Ġmad":7758,"ĠSacramento":7759,"woman":7760,"from":7761,"Ġoutcomes":7762,"Ġoldest":7763,"Ġdesperate":7764,"ĠTal":7765,"ĠDJ":7766,"ward":7767,"Ġaudiences":7768,"Ġimportantly":7769,"ĠEmily":7770,"sk":7771,"ĠHeat":7772,"ĠType":7773,"ĠPeace":7774,"Ġsuspicious":7775,"aly":7776,"ĠGET":7777,"ĠCAP":7778,"dis":7779,"ĠIraqi":7780,"ĠReed":7781,"Ġstrange":7782,"ĠParent":7783,"900":7784,"Ġglad":7785,"ĠTroy":7786,"ĠShort":7787,"Ġheritage":7788,"Ġarriving":7789,"ingly":7790,"Ġtransformation":7791,"Ġlease":7792,"Ġcollapsed":7793,"cha":7794,"ĠPatrol":7795,"Ġcomputers":7796,"Ġprinciples":7797,"Ġsporting":7798,"ĠHughes":7799,"mile":7800,"ĠCit":7801,"Ġdrilling":7802,"ĠBox":7803,"ÃŁ":7804,"bre":7805,"ĠOverall":7806,"Ġopioid":7807,"Ġdelighted":7808,"Ġhonored":7809,"ĠCold":7810,"Ġunions":7811,"ĠCou":7812,"ĠCircuit":7813,"Ġblast":7814,"sson":7815,"ĠHernandez":7816,"ĠLooking":7817,"Ġlegally":7818,"ĠWalmart":7819,"bridge":7820,"Ġmat":7821,"rad":7822,"ids":7823,"Ġdining":7824,"Ġrebound":7825,"abad":7826,"ĠRom":7827,"Ġimpose":7828,"ĠAlpha":7829,"ĠWeekly":7830,"TER":7831,"ĠJam":7832,"Ġabsolute":7833,"Ġinventory":7834,"ĠBilly":7835,"ĠKaren":7836,"ĠFriends":7837,"ĠCent":7838,"ĠVikings":7839,"ĠMuch":7840,"cell":7841,"ads":7842,"Ġph":7843,"Ġkiller":7844,"ĠMembers":7845,"Ġshooter":7846,"ĠInvestigators":7847,"ĠJoshua":7848,"Ġparticipated":7849,"Ġinnocent":7850,"ĠRichmond":7851,"itor":7852,"ĠDal":7853,"ĠOperator":7854,"Ġmakeup":7855,"Ġconf":7856,"ĠNEWS":7857,"ĠDef":7858,"Ġchase":7859,"ĠCost":7860,"mont":7861,"\":":7862,"Ġarrangements":7863,"stein":7864,"Ġretire":7865,"ĠLuis":7866,"Ġrenewed":7867,"ĠTownship":7868,"Ġchecked":7869,"arts":7870,"ĠCash":7871,"Ġcentres":7872,"chers":7873,"ĠSolutions":7874,"Ġlegend":7875,"ige":7876,"most":7877,"osed":7878,"ĠPor":7879,"Ġpremiere":7880,"FS":7881,"Ġmissiles":7882,"ĠLang":7883,"Ġsing":7884,"best":7885,"Ġtail":7886,"Ġriders":7887,"Picture":7888,"zen":7889,"ĠKent":7890,"Ġtransform":7891,"Ġwildlife":7892,"Ġsmoking":7893,"Ġpreseason":7894,"ĠLucas":7895,"ĠAnne":7896,"owski":7897,"Ġtape":7898,"Ġdisplayed":7899,"Ġforum":7900,"Ġanonymity":7901,"ĠIndianapolis":7902,"hips":7903,"acc":7904,"ĠMoreover":7905,"lers":7906,"area":7907,"ĠIndeed":7908,"Ġconducting":7909,"Ġinfection":7910,"Ġdealt":7911,"OB":7912,"asing":7913,"ĠGaza":7914,"itter":7915,"ĠKa":7916,"Ġhopeful":7917,"ĠSnow":7918,"Ġentitled":7919,"Ġaffecting":7920,"Ġeager":7921,"Ġcircle":7922,"Ġlaugh":7923,"ĠProsecutors":7924,"ĠDur":7925,"Ġbarriers":7926,"ĠPoll":7927,"oun":7928,"ĠPalm":7929,"chi":7930,"Ġsamples":7931,"Ġcompromise":7932,"atter":7933,"Ġenormous":7934,"Ġé":7935,"coming":7936,"ĠPharmaceutical":7937,"Ġrank":7938,"Let":7939,"Ġtransgender":7940,"ĠCloud":7941,"FO":7942,"ĠBor":7943,"Ġbonus":7944,"Ġordinary":7945,"ĠPres":7946,"ĠHIV":7947,"ires":7948,"OSE":7949,"Ġdancing":7950,"ĠHD":7951,"Ġversions":7952,"Ġ88":7953,"rate":7954,"Ġtackles":7955,"Ġknock":7956,"ĠEmma":7957,"Ġmotivated":7958,"ĠBennett":7959,"ĠBurn":7960,"Ġgrid":7961,"Ġembrace":7962,"ĠSpurs":7963,"Ġflows":7964,"ĠGer":7965,"Ġsponsored":7966,"Ġsurvival":7967,"ching":7968,"Ġ1995":7969,"Ġreward":7970,"Ġdepends":7971,"Ġpostseason":7972,"Ġloaded":7973,"Ġneutral":7974,"ĠPop":7975,"BL":7976,"Ġrevolution":7977,"ĠFreedom":7978,"Ġrecovering":7979,"Ġrequiring":7980,"ALL":7981,"ARE":7982,"Ġmini":7983,"lt":7984,"ĠFDA":7985,"Ġcarpet":7986,"ĠPrior":7987,"Ġadmission":7988,"ĠEver":7989,"ĠTribune":7990,"ĠRonaldo":7991,"Ġthick":7992,"Ġlanes":7993,"Ġ84":7994,"ĠMemphis":7995,"Ġopt":7996,"BO":7997,"Ġfaculty":7998,"ĠChad":7999,"ĠSUV":8000,"ĠHen":8001,"Ġeste":8002,"ĠHu":8003,"ĠAgriculture":8004,"store":8005,"ĠDrug":8006,"inter":8007,"Ġ1996":8008,"ident":8009,"Ġbackup":8010,"ĠHonda":8011,"ĠHope":8012,"oes":8013,"ums":8014,"amer":8015,"Ġbreath":8016,"Ġ110":8017,"Ġjoke":8018,"ĠAld":8019,"Ġwondering":8020,"ĠAssad":8021,"ĠRem":8022,"Ġfundraising":8023,"pot":8024,"è":8025,"Ġquestioning":8026,"Ġpent":8027,"ĠMoney":8028,"ĠMedicine":8029,"wick":8030,"ĠKnights":8031,"Ġbatting":8032,"ĠMos":8033,"Ġdesignated":8034,"isse":8035,"Ġspotlight":8036,"Ġlake":8037,"Ġcaution":8038,"Ġinmates":8039,"Ġlap":8040,"CE":8041,"ĠJavascript":8042,"ĠDeutsche":8043,"ĠFargo":8044,"Ġguaranteed":8045,"borough":8046,"Ġfunctions":8047,"ĠElementary":8048,"ĠChuck":8049,"Ġpitched":8050,"ĠKrist":8051,"Ġsteal":8052,"Ġchips":8053,"Ġalarm":8054,"Ġbeloved":8055,"scale":8056,"Ġassaulted":8057,"ĠPentagon":8058,"Ġtemporarily":8059,"Ġ93":8060,"Ġ>":8061,"ĠPortugal":8062,"ti":8063,"HL":8064,"Ġdecreased":8065,"Ġexistence":8066,"Ġisolated":8067,"Ġdeposit":8068,"Ġstudied":8069,"\")":8070,"Ġtrophy":8071,"ĠBrooks":8072,"Ġbattling":8073,"Ġweaker":8074,"ĠPrivate":8075,"ĠAccess":8076,"Ġvirtually":8077,"Ġshortage":8078,"Ġgaining":8079,"Ġbathroom":8080,"TON":8081,"Ġconcerning":8082,"Ġengineer":8083,"Ġbread":8084,"Ġdemonstrate":8085,"ĠDh":8086,"Ġhorses":8087,"Ġintersection":8088,"Ġcolors":8089,"Ġdelegation":8090,"Ġnotable":8091,"Ġwithdrawal":8092,"ĠDennis":8093,"Ġlocally":8094,"Ġcoastal":8095,"Ġcomply":8096,"ĠMoh":8097,"ĠAlbert":8098,"Ġclosest":8099,"ĠCITY":8100,"Ġ83":8101,"Ġcancelled":8102,"ĠðŁ":8103,"Ġsharply":8104,"RS":8105,"Ġproductivity":8106,"Ġbasket":8107,"SS":8108,"Ġadmit":8109,"ool":8110,"ination":8111,"ĠBB":8112,"Ġsur":8113,"ĠSteel":8114,"ĠTed":8115,"ĠPac":8116,"Ġpatterns":8117,"Ġlisting":8118,"Ġreplacing":8119,"ĠPradesh":8120,"Ġroots":8121,"Ġbroker":8122,"ĠWriting":8123,"Ġsued":8124,"Ġorganised":8125,"ĠThanksgiving":8126,"ĠNOT":8127,"Ġjournalism":8128,"uel":8129,"Ġkilometers":8130,"Ġhunt":8131,"berry":8132,"ĠMother":8133,"Ġlegitimate":8134,"Ġinput":8135,"ĠRel":8136,"ĠGuardian":8137,"Ar":8138,"Ġtransported":8139,"Ġbedroom":8140,"ashing":8141,"Ġbats":8142,"Ġcleaning":8143,"Ġwrapped":8144,"Pacific":8145,"Ġfence":8146,"Ġtestified":8147,"Ġ1994":8148,"Ġinterference":8149,"Ġmatching":8150,"Ġexpression":8151,"eta":8152,"ĠSpencer":8153,"Ġstrategist":8154,"who":8155,"Ġvictories":8156,"Ġ2022":8157,"Ġstakes":8158,"Ġbuses":8159,"ĠHousing":8160,"Ġeditorial":8161,"Ġ86":8162,"ĠBishop":8163,"Ġfrustrated":8164,"Ġappearing":8165,"http":8166,"IGHT":8167,"Ġmemo":8168,"Ġinsiders":8169,"Even":8170,"Ġclassroom":8171,"Ġchef":8172,"aining":8173,"].":8174,"ĠMcD":8175,"Ġ87":8176,"ĠPunjab":8177,"Ġancient":8178,"Ġresolved":8179,"Ġdying":8180,"Ġdestruction":8181,"Ġgoverning":8182,"Ġrestructuring":8183,"ĠPick":8184,"Ġmunicipal":8185,"Ġengines":8186,"ĠHudson":8187,"Æ":8188,"Ġrepeal":8189,"standing":8190,"Ġbound":8191,"ĠOS":8192,"ĠCommonwealth":8193,"Ġdescription":8194,"Ġhouseholds":8195,"Ġmal":8196,"Ġstopping":8197,"equ":8198,"Ġregulator":8199,"Ġcontaining":8200,"Ġremoving":8201,"Ġwithdraw":8202,"Ġburied":8203,"Ġlists":8204,"ĠGil":8205,"Ġlowered":8206,"Ġformally":8207,"ĠRound":8208,"asi":8209,"¥":8210,"lett":8211,"Ġprogressive":8212,"ĠFalcons":8213,"ĠRaw":8214,"gun":8215,"Ġcontributing":8216,"Ġhunting":8217,"Ġvalid":8218,"Ġexception":8219,"ĠPlayers":8220,"ĠTra":8221,"Ġracism":8222,"hing":8223,"chen":8224,"Ġdifferently":8225,"Ġchampionships":8226,"ĠEng":8227,"ĠNO":8228,"ĠAuto":8229,"ĠErdogan":8230,"iding":8231,"Ġwarming":8232,"Ġcivilian":8233,"ĠDam":8234,"Ġfantasy":8235,"ĠNav":8236,"itions":8237,"ĠDrew":8238,"ĠNancy":8239,"Ġtrapped":8240,"ĠRussians":8241,"ĠIC":8242,"Ġflexibility":8243,"ular":8244,"Ġviolated":8245,"ipped":8246,"Ġgarage":8247,"ĠDeep":8248,"Ġpraise":8249,"ĠLab":8250,"ĠPlayer":8251,"Ġjudicial":8252,"Ġdonate":8253,"Ġseparated":8254,"Ġreleases":8255,"nik":8256,"Ġexplanation":8257,"aph":8258,"Ġloyal":8259,"Ġstrongest":8260,"ĠShar":8261,"Ġrescued":8262,"Ġambitious":8263,"Ġclimb":8264,"Ġscared":8265,"Ġignored":8266,"cut":8267,"Ġstole":8268,"Ġweakness":8269,"ĠRidge":8270,"oa":8271,"LA":8272,"Ġdep":8273,"ĠPowell":8274,"Do":8275,"Ġprotein":8276,"Ġreiterated":8277,"ĠCox":8278,"aling":8279,"ĠUnlike":8280,"ĠKane":8281,"ĠMcConnell":8282,"Ġshowcase":8283,"Ġuniform":8284,"ower":8285,"Ġdiscover":8286,"stop":8287,"ipper":8288,"Ġtreatments":8289,"Ġgrocery":8290,"Ġsubscribers":8291,"lock":8292,"ple":8293,"Ġflew":8294,"ania":8295,"Ġstepping":8296,"ĠSoviet":8297,"Ġconsultant":8298,"ags":8299,"ĠLim":8300,"Ġ91":8301,"ĠCode":8302,"ports":8303,"box":8304,"Ġlakh":8305,"Ġreminder":8306,"ym":8307,"ĠTravis":8308,"Ġpure":8309,"now":8310,"ĠVR":8311,"Ġachievement":8312,"ĠEmirates":8313,"ĠThunder":8314,"Ġmerely":8315,"ĠCa":8316,"ĠAverage":8317,"ĠDa":8318,"Ġtopped":8319,"ĠCurry":8320,"Ġchemicals":8321,"Ġamendment":8322,"ĠBorder":8323,"ĠBat":8324,"Ġ130":8325,"Ġprogramming":8326,"Ġtele":8327,"ĠKarl":8328,"Ġaveraged":8329,"ĠSpe":8330,"world":8331,"PG":8332,"Ġfights":8333,"ĠPrincess":8334,"ĠCIA":8335,"ĠAbe":8336,"Ġacted":8337,"only":8338,"Ġinsight":8339,"Ġathlete":8340,"ĠTar":8341,"commerce":8342,"Ġaveraging":8343,"cr":8344,"ĠPalestinians":8345,"Well":8346,"Ġbull":8347,"Ġchoosing":8348,"Ġsurely":8349,"ĠSecret":8350,"Ġteammate":8351,"ĠAmendment":8352,"ĠBirmingham":8353,"Ġexcitement":8354,"strong":8355,"ĠSin":8356,"Ġdamages":8357,"rated":8358,"Ġrankings":8359,"Ġconservation":8360,"home":8361,"erm":8362,"ield":8363,"Ġdisorder":8364,"acher":8365,"Ġnaturally":8366,"atur":8367,"Ġpackages":8368,"Ġapproaches":8369,"icks":8370,"ourn":8371,"Ġodd":8372,"Ġshore":8373,"ĠBeing":8374,"Ġmagic":8375,"Ġtourist":8376,"largest":8377,"Ġwhenever":8378,"Ġlenders":8379,"Ġegg":8380,"ĠChair":8381,"Ġlets":8382,"Ġwarnings":8383,"į":8384,"Ġpol":8385,"Ġdrag":8386,"ĠAmb":8387,"ĠCle":8388,"ĠLouisville":8389,"ĠShaw":8390,"lands":8391,"Ġanthem":8392,"ĠTrail":8393,"Ġaccepting":8394,"anger":8395,"good":8396,"ĠBroad":8397,"ĠLebanon":8398,"ĠMillion":8399,"ĠHenderson":8400,"Ġwh":8401,"Ġdust":8402,"Ġ92":8403,"ĠMend":8404,"Ġchecking":8405,"ĠCow":8406,"sized":8407,"Ġautomatic":8408,"Ġcelebrates":8409,"Ġarena":8410,"Ġfinger":8411,"ĠHarvard":8412,"Ġfrustration":8413,"Ġstrict":8414,"Ġpreserve":8415,"Ġsleeping":8416,"Ġconverted":8417,"Ġinsights":8418,"Ġtra":8419,"Ġjailed":8420,"Ġchamber":8421,"Ġtoxic":8422,"ading":8423,"ĠTriple":8424,"grade":8425,"ĠRest":8426,"ĠHoly":8427,"oper":8428,"Ġdesk":8429,"Ġmatchup":8430,"Ġsteep":8431,"ĠGot":8432,"lay":8433,"ĠCab":8434,"aked":8435,"ĠFoster":8436,"Ġrunners":8437,"ĠNA":8438,"Ġdestroy":8439,"Ġsupportive":8440,"ĠRacing":8441,"Ġtrademark":8442,"Ġjacket":8443,"Ġhorror":8444,"ĠAle":8445,"Ġass":8446,"Ġsch":8447,"abb":8448,"Ġplanes":8449,"Ġimpression":8450,"ĠEarly":8451,"ĠPompe":8452,"Ġking":8453,"Ġsilent":8454,"ĠCuba":8455,"Ġmedication":8456,"ences":8457,"list":8458,"ailing":8459,"WA":8460,"ella":8461,"Ġprop":8462,"Ġhalt":8463,"Ġslowing":8464,"ĠFoods":8465,"Ġanonymous":8466,"kh":8467,"Ġtraveled":8468,"Ġcommunicate":8469,"Ġter":8470,"ĠHockey":8471,"ĠRobin":8472,"Ġswept":8473,"Ġclinic":8474,"ration":8475,"len":8476,"Ġau":8477,"Ġcareers":8478,"ĠSound":8479,"Ġaddresses":8480,"China":8481,"ĠSr":8482,"Ġexhibit":8483,"ĠMotors":8484,"ĠIl":8485,"Ġinstall":8486,"ĠOkay":8487,"Ġ>>":8488,"hood":8489,"stand":8490,"Ġaudit":8491,"Ġcake":8492,"Ġflames":8493,"bel":8494,"ĠMust":8495,"ĠManafort":8496,"Ġcommodity":8497,"night":8498,"ĠRoom":8499,"ĠLanka":8500,"Ġcommander":8501,"ln":8502,"Ġdatabase":8503,"ĠSet":8504,"Ġgraduated":8505,"ĠTarget":8506,"Ġoutbreak":8507,"rou":8508,"ĠPope":8509,"ĠEqu":8510,"Ġpolling":8511,"Ġdig":8512,"Ġbrutal":8513,"ĠBarn":8514,"Ġdefinition":8515,"Ġpit":8516,"Ġpickup":8517,"ĠBitcoin":8518,"ĠReid":8519,"Ġloving":8520,"ĠHerald":8521,"ĠCanadians":8522,"Ġneighbor":8523,"Ġdies":8524,"ione":8525,"ĠRef":8526,"big":8527,"Ġguards":8528,"including":8529,"ente":8530,"Ġpartially":8531,"Image":8532,"Ġbulk":8533,"Ġslot":8534,"ĠNorthwest":8535,"ĠBarclays":8536,"Ġairlines":8537,"iver":8538,"isi":8539,"Ġsubsidiary":8540,"Ġcont":8541,"ĠDaniels":8542,"Ġscript":8543,"Ġunfair":8544,"Ġscreens":8545,"Ġprof":8546,"ĠIrma":8547,"Ġ1992":8548,"Ġmandatory":8549,"ĠSant":8550,"Ġsuspicion":8551,"NES":8552,"ĠLauren":8553,"igen":8554,"Ġprevention":8555,"Ġtension":8556,"ema":8557,"Ġtasks":8558,"Ġshake":8559,"Ġexplosive":8560,"Ġaffects":8561,"Ġmum":8562,"ĠDog":8563,"rer":8564,"Ġopted":8565,"Ġtrio":8566,"Ġlesson":8567,"Ġautomotive":8568,"where":8569,"ĠMontgomery":8570,"Ġcouples":8571,"Ġ89":8572,"AF":8573,"Ġinfo":8574,"ĠForm":8575,"Ġspectrum":8576,"Ġbands":8577,"Ġokay":8578,"Ġstroke":8579,"ĠNetanyahu":8580,"Ġwealthy":8581,"ĠAround":8582,"ĠGlenn":8583,"sec":8584,"there":8585,"ickets":8586,"ĠBudget":8587,"ĠBMW":8588,"Ġflagship":8589,"rier":8590,"Ġpodcast":8591,"Ġpursuing":8592,"Ġpos":8593,"ĠIslands":8594,"ĠUrban":8595,"page":8596,"Ġemotions":8597,"ided":8598,"Ġdividends":8599,"Ġboom":8600,"Ġaccusing":8601,"ird":8602,"ĠNam":8603,"ava":8604,"Ġwishes":8605,"ĠNy":8606,"ĠStanford":8607,"Ġcriteria":8608,"ĠJews":8609,"Ġengineers":8610,"Ġaccuracy":8611,"Ġdisplays":8612,"Ġdeserves":8613,"ridge":8614,"omm":8615,"aur":8616,"Ġdramatically":8617,"Ġunity":8618,"speed":8619,"Ġdeclining":8620,"Ġpermits":8621,"ĠKn":8622,"Ġconsulting":8623,"aux":8624,"ATE":8625,"ĠWat":8626,"ĠEditor":8627,"sy":8628,"urn":8629,"ĠUsing":8630,"asc":8631,"ital":8632,"Ġcre":8633,"quality":8634,"Ġce":8635,"Ġenemy":8636,"Ġoffence":8637,"icket":8638,"ĠDick":8639,"ĠTH":8640,"ĠChampionships":8641,"Ġoverwhelming":8642,"rib":8643,"ku":8644,"rap":8645,"Ġhomer":8646,"acion":8647,"member":8648,"erv":8649,"aney":8650,"MB":8651,"eded":8652,"Ġpunishment":8653,"Ġnegotiate":8654,"ĠFile":8655,"stream":8656,"ĠHur":8657,"Ġnose":8658,"ĠFab":8659,"iter":8660,"Ġpainful":8661,"ITY":8662,"eren":8663,"Ġcollecting":8664,"Additional":8665,"Ġentrepreneurs":8666,"bal":8667,"Ġexploring":8668,"Ġguitar":8669,"Ġpartnerships":8670,"Ġfurniture":8671,"Ġauthorized":8672,"Ġeasing":8673,"shirt":8674,"ĠGross":8675,"Ġpolitician":8676,"ĠSimpson":8677,"Ġdrone":8678,"ĠKatie":8679,"Ġprofitability":8680,"ĠNHS":8681,"ĠSierra":8682,"ĠNorway":8683,"ASHINGTON":8684,"ific":8685,"Ġcondemned":8686,"team":8687,"ĠNebraska":8688,"Ġthrilled":8689,"iller":8690,"Ġpatrol":8691,"ĠWR":8692,"orm":8693,"Ġspectacular":8694,"ĠKnight":8695,"ĠTravel":8696,"nam":8697,"Ġmuscle":8698,"ĠRain":8699,"ĠColombia":8700,"Ġnursing":8701,"Ġmigration":8702,"ĠMitch":8703,"Ġreleasing":8704,"ĠBesides":8705,"ĠMul":8706,"Ġheadline":8707,"Ġcontemporary":8708,"Ġdev":8709,"ĠChan":8710,"Ġindicates":8711,"ĠAp":8712,"ĠLt":8713,"ĠMarvel":8714,"Ġremembered":8715,"®":8716,"ĠForces":8717,"ĠColin":8718,"ĠGabriel":8719,"Ġobjects":8720,"ĠRHP":8721,"kar":8722,"ĠKo":8723,"Ġsignals":8724,"Ġinner":8725,"real":8726,"RO":8727,"Ġromantic":8728,"cat":8729,"ĠKel":8730,"Ġgut":8731,"ĠBoys":8732,"Ġyoungest":8733,"ĠCeltics":8734,"Ġslated":8735,"Ġremind":8736,"Ġproductive":8737,"set":8738,"Co":8739,"ĠBailey":8740,"Ġrenewable":8741,"ĠCarson":8742,"ĠDj":8743,"ĠKos":8744,"Ġurge":8745,"Ġfin":8746,"Ġpursuit":8747,"ĠCON":8748,"ĠChapter":8749,"Ġpal":8750,"Ġgate":8751,"ĠPackers":8752,"ĠReports":8753,"ĠRugby":8754,"ĠMasters":8755,"MO":8756,"Ġ98":8757,"Ġcatches":8758,"ĠAgreement":8759,"ĠTillerson":8760,"ĠIce":8761,"Ġrumors":8762,"ĠLeonard":8763,"ĠDolphins":8764,"ĠLP":8765,"top":8766,"ĠCrist":8767,"ĠHon":8768,"Ġblaze":8769,"Ġrhetoric":8770,"ands":8771,"ady":8772,"David":8773,"igh":8774,"Ġbuzz":8775,"ĠStrong":8776,"Ġshocking":8777,"ĠRh":8778,"Ġnegotiating":8779,"Ġtender":8780,"ĠJohnny":8781,"ĠMario":8782,"Ġ97":8783,"ĠHeritage":8784,"Ġexists":8785,"Ġprayers":8786,"Ġlengthy":8787,"Ġsafer":8788,"ĠHalloween":8789,"ĠJared":8790,"ĠConnect":8791,"Ġbump":8792,"Ġstrain":8793,"Ġfilling":8794,"Ġtrauma":8795,"Ġcompleting":8796,"cht":8797,"Ġkillings":8798,"anne":8799,"GE":8800,"ĠRescue":8801,"Ġdealers":8802,"Ġlocals":8803,"ĠVictor":8804,"Ġtragic":8805,"Ġdelivers":8806,"orts":8807,"Ġrugby":8808,"Ġinstallation":8809,"asa":8810,"ĠBart":8811,"Ġjournal":8812,"school":8813,"ĠCome":8814,"ĠVeterans":8815,"Sun":8816,"Ġcrowds":8817,"Ġtransparent":8818,"Ġimplications":8819,"ĠHuawei":8820,"sex":8821,"Ġrallied":8822,"Ġresponses":8823,"Ġdebris":8824,"Ġconvention":8825,"Ġmothers":8826,"BE":8827,"ĠRoute":8828,"Ġrebel":8829,"ĠEmmanuel":8830,"aster":8831,"Ġunderstands":8832,"pound":8833,"ĠCastle":8834,"Ġ2021":8835,"rik":8836,"ĠGR":8837,"Ġconvince":8838,"ault":8839,"Ġpassionate":8840,"ĠSciences":8841,"Ġarrives":8842,"idad":8843,"Ġcelebrities":8844,"ends":8845,"ĠFans":8846,"Ġdish":8847,"ĠCorps":8848,"hat":8849,"Ġemployer":8850,"ĠHy":8851,"Ġpowered":8852,"Ġgrandmother":8853,"ĠFL":8854,"oured":8855,"VE":8856,"ĠInst":8857,"ĠPerez":8858,"Ġtune":8859,"Ġcitizenship":8860,"Ġignore":8861,"Ġdoubles":8862,"IB":8863,"Ġprogrammes":8864,"inda":8865,"Ġentities":8866,"ĠInterior":8867,"Ġprompting":8868,"Ġwire":8869,"Ġtheatre":8870,"%)":8871,"Ġheels":8872,"ĠJu":8873,"Ġdeposits":8874,"Ġtrash":8875,"mond":8876,"she":8877,"iana":8878,"Ġislands":8879,"ĠTommy":8880,"Ġpub":8881,"Ġdiscipline":8882,"ĠSW":8883,"Ġmusicians":8884,"Ġembassy":8885,"ĠQB":8886,"hander":8887,"UES":8888,"ĠFerguson":8889,"Ġblocking":8890,"ahn":8891,"Ġfines":8892,"Ġtactics":8893,"Ġbullet":8894,"Ġequipped":8895,"Ġescaped":8896,"ĠSil":8897,"ĠPack":8898,"ĠAthletic":8899,"ĠMic":8900,"ĠDoes":8901,"ĠCarr":8902,"ĠChargers":8903,"ĠKyl":8904,"Ġzones":8905,"µ":8906,"iki":8907,"Ġgreatly":8908,"ĠMD":8909,"Ġimmigrant":8910,"ĠConstruction":8911,"ĠBorn":8912,"iment":8913,"ĠWade":8914,"Ġvisa":8915,"Ġgenuine":8916,"Ġelectronics":8917,"ĠSat":8918,"Ġsponsors":8919,"ĠMontana":8920,"Ġspell":8921,"ĠSachs":8922,"ĠEt":8923,"Ġfoster":8924,"Ġlocker":8925,"Ġexplaining":8926,"ĠAge":8927,"Ġgunman":8928,"Ġsauce":8929,"Ġcry":8930,"Ġstimulus":8931,"Ġarray":8932,"Ġcompare":8933,"Ġboats":8934,"Ġext":8935,"iders":8936,"ĠAst":8937,"ĠParks":8938,"ester":8939,"Ġ94":8940,"Ġrelating":8941,"Ġvegetables":8942,"Ġaccountable":8943,"Ġhyper":8944,"ĠWim":8945,"Ġnewest":8946,"ĠRome":8947,"ĠChancellor":8948,"CBS":8949,"Ġbusinessman":8950,"ĠDelaware":8951,"Ġlands":8952,"court":8953,"aria":8954,"Ġapproaching":8955,"cker":8956,"ĠSalt":8957,"ĠMak":8958,"Ġtreating":8959,"Ġsubsequently":8960,"ĠEll":8961,"xton":8962,"Ġ180":8963,"Ġdetermination":8964,"ĠSalman":8965,"ĠJoel":8966,"Ġclassified":8967,"Ġspan":8968,"Ġearthquake":8969,"ranked":8970,"Ġ96":8971,"ĠTiger":8972,"Ġadvocacy":8973,"mit":8974,"Ġcolleges":8975,"ĠYeah":8976,"ĠCaptain":8977,"Ġorange":8978,"Ġprojections":8979,"Ġelectrical":8980,"ĠMA":8981,"olog":8982,"ĠNewcastle":8983,"oppers":8984,"Ġrepresentation":8985,"Ġlawsuits":8986,"just":8987,"aced":8988,"ĠRace":8989,"ĠAqu":8990,"ĠBills":8991,"Ġexclusively":8992,"ĠProfile":8993,"Ġhometown":8994,"ĠStan":8995,"Ġstarring":8996,"Ġdeciding":8997,"ĠRating":8998,"ĠMedicare":8999,"ĠTransport":9000,"Ġmystery":9001,"ĠTa":9002,"ĠPad":9003,"ĠSwedish":9004,"ĠCarroll":9005,"about":9006,"Ġtorn":9007,"Ġnurse":9008,"NE":9009,"Ġwaited":9010,"ĠJeffrey":9011,"ĠUntil":9012,"Ġbone":9013,"ĠBobby":9014,"Ġpronounced":9015,"Ġpharmaceutical":9016,"ĠGallery":9017,"ĠMatch":9018,"Ġeconomists":9019,"ĠMarketing":9020,"face":9021,"ĠPetroleum":9022,"ories":9023,"ĠMets":9024,"ĠCore":9025,"billion":9026,"Ġexamination":9027,"ĠPorter":9028,"2016":9029,"Ġgolden":9030,"Ġsem":9031,"ĠDuterte":9032,"ĠJefferson":9033,"ĠTehran":9034,"ĠLeicester":9035,"ĠDA":9036,"Ġadapt":9037,"ĠDame":9038,"ĠRic":9039,"Ġunchanged":9040,"ect":9041,"Ġsections":9042,"kg":9043,"igned":9044,"Ġfilings":9045,"Ġreact":9046,"Ġurgent":9047,"Ġvessels":9048,"Ġspark":9049,"Ġbutter":9050,"ĠCons":9051,"Ġstating":9052,"Ġcorporations":9053,"ĠHus":9054,"Ġdamaging":9055,"raw":9056,"Ġequality":9057,"Two":9058,"ĠMills":9059,"iu":9060,"Ġobligation":9061,"ĠBrook":9062,"arian":9063,"Re":9064,"Ġphotographs":9065,"Ġepic":9066,"ĠStudent":9067,"ĠTherefore":9068,"Ġgod":9069,"ĠFILE":9070,"iqu":9071,"Ġdescribing":9072,"Ġproceed":9073,"Ġcas":9074,"ĠKat":9075,"ĠBra":9076,"Ġadequate":9077,"Ġpassage":9078,"Ġthanked":9079,"USA":9080,"ĠNeither":9081,"ĠLegislature":9082,"Ġfinances":9083,"Ġinst":9084,"ĵ":9085,"ĠAngels":9086,"Ġvet":9087,"ĠDead":9088,"Ex":9089,"Ġkicks":9090,"force":9091,"Ġsoy":9092,"ĠWindsor":9093,"Ġenhanced":9094,"Ġ1993":9095,"ĠCzech":9096,"Ġgradually":9097,"ĠMagic":9098,"Ġshadow":9099,"Ġneighborhoods":9100,"ĠRivers":9101,"Ġrapper":9102,"ĠGirl":9103,"ĠRot":9104,"Ġcrackdown":9105,"fish":9106,"Ġpreventing":9107,"Ġproduces":9108,"ĠMi":9109,"Ġnotified":9110,"Ġunderground":9111,"WE":9112,"Ġadmits":9113,"Ġboxing":9114,"Ġrefer":9115,"Ġcommitments":9116,"ĠWoman":9117,"Ġdenies":9118,"col":9119,"ĠSide":9120,"Ġambulance":9121,"ĠRodgers":9122,"Ġaftermath":9123,"Ġdeck":9124,"irmed":9125,"Ġerrors":9126,"ĠConvention":9127,"Ġcurb":9128,"ĠShop":9129,"ĠThai":9130,"Ġma":9131,"Ġrespected":9132,"ĠMVP":9133,"Ġborrowing":9134,"Ġcruise":9135,"ĠSure":9136,"Ġsentencing":9137,"ĠObamacare":9138,"ĠIr":9139,"ĠSale":9140,"ĠPete":9141,"Ġopenly":9142,"Ġstartup":9143,"rock":9144,"Ġcargo":9145,"Ġtelecom":9146,"ĠDownload":9147,"Ġextending":9148,"ĠCurrent":9149,"Ġcompetitions":9150,"ĠKids":9151,"Ġshy":9152,"ĠKerry":9153,"ĠNever":9154,"ĠDevils":9155,"Ġprim":9156,"Con":9157,"Ġcurve":9158,"Ġassumed":9159,"Ġadjust":9160,"Ġimmune":9161,"UE":9162,"ĠUr":9163,"Ġconventional":9164,"Ġgrandchildren":9165,"ĠBol":9166,"Ad":9167,"ĠMaduro":9168,"fi":9169,"ĠUAE":9170,"ĠOrgan":9171,"Ġindicating":9172,"iem":9173,"ĠAgainst":9174,"ĠAmbassador":9175,"ĠSeoul":9176,"Ġcriminals":9177,"how":9178,"put":9179,"Ġreminded":9180,"Ġparked":9181,"lich":9182,"Ġcontinent":9183,"Ġmatched":9184,"ĠNicole":9185,"Ġgenetic":9186,"Ġhumanity":9187,"ĠTem":9188,"Ġindicator":9189,"Ġvessel":9190,"Ġdefendant":9191,"ĠGriffin":9192,"jan":9193,"Ġvend":9194,"boro":9195,"Ġbrokerage":9196,"ĠFall":9197,"Ġmere":9198,"VILLE":9199,"Ġlasted":9200,"ĠMind":9201,"Ġpatch":9202,"ĠInsider":9203,"ĠComm":9204,"Ġtechnique":9205,"ĠIM":9206,"ĠCavaliers":9207,"Ġshame":9208,"Ġmil":9209,"oot":9210,"irt":9211,"Ġcop":9212,"ĠLeon":9213,"Ġfrozen":9214,"Ġslip":9215,"pton":9216,"Ġpanels":9217,"Ġpitching":9218,"Ġleather":9219,"ĠLogan":9220,"ĠNearly":9221,"urch":9222,"Ġinstructions":9223,"ĠRow":9224,"ĠKurdish":9225,"this":9226,"Ġlegendary":9227,"su":9228,"Ġstabbed":9229,"sters":9230,"Ġteenage":9231,"def":9232,"Ġoversight":9233,"Ġvolatile":9234,"Ġtransmission":9235,"ĠSgt":9236,"ĠIndigenous":9237,"ĠOxford":9238,"ĠCasey":9239,"Ġcor":9240,"Ġsalaries":9241,"Ġsponsor":9242,"Ġprescription":9243,"mat":9244,"ĠLeeds":9245,"ĠPakistani":9246,"Ġevil":9247,"Ġtables":9248,"ĠAbdul":9249,"Ġexpectation":9250,"Ġlegislature":9251,"ĠLin":9252,"¹":9253,"Ġcontractor":9254,"Ġshifting":9255,"Ġgenerous":9256,"ĠEddie":9257,"Ġpuck":9258,"utt":9259,"Ġdubbed":9260,"Ġnowhere":9261,"Ġbetting":9262,"Ġdisclose":9263,"Ĥ":9264,"ĠFashion":9265,"ĠHarper":9266,"handed":9267,"isha":9268,"ĠReds":9269,"Ġachievements":9270,"ume":9271,"Ġshootings":9272,"Ġadvisers":9273,"ĠEaster":9274,"Ġinternationally":9275,"ĠWi":9276,"ĠGandhi":9277,"ĠChristians":9278,"Ġrecruiting":9279,"Ġexperiment":9280,"Ġsol":9281,"Ġdifficulties":9282,"Ġinfluential":9283,"Ġhybrid":9284,"Ġformation":9285,"ĠBoulevard":9286,"Ġflags":9287,"Ġformula":9288,"front":9289,"Ġinclusion":9290,"ĠNone":9291,"ICE":9292,"Ġfilming":9293,"ĠLou":9294,"ĠReynolds":9295,"Ġpump":9296,"Ġexceptional":9297,"ANG":9298,"ĠCorporate":9299,"SAN":9300,"ĠHealthcare":9301,"ĠUkrainian":9302,"aron":9303,"Ġpants":9304,"Ġdrops":9305,"ete":9306,"ĠStudies":9307,"Ġwounds":9308,"END":9309,"Ġshower":9310,"Ġreviewing":9311,"ĠGreater":9312,"Ġ»":9313,"itors":9314,"alled":9315,"Ġsqu":9316,"ĠRonald":9317,"ĠInv":9318,"Ġtougher":9319,"Ġbalanced":9320,"Ġlined":9321,"Ġprinciple":9322,"Ġ1950":9323,"Ġleak":9324,"Be":9325,"Ġcircuit":9326,"Ġunfortunate":9327,"ĠGran":9328,"ĠFish":9329,"Ġfriendship":9330,"asp":9331,"OO":9332,"Ġobligations":9333,"Ġcoup":9334,"OK":9335,"Ġbreakdown":9336,"Ġhook":9337,"Ġresearcher":9338,"inated":9339,"ĠMarie":9340,"ĠGab":9341,"ĠWA":9342,"quez":9343,"General":9344,"ĠSwift":9345,"Ġgust":9346,"ĠCarol":9347,"ĠCentury":9348,"ĠOPEC":9349,"ĠRd":9350,"ĠCop":9351,"Ġsubjects":9352,"ĠComments":9353,"ases":9354,"Ġrelation":9355,"ĠEnvironment":9356,"ı":9357,"Ġgasoline":9358,"ĠLog":9359,"Ġicon":9360,"Ġprofitable":9361,"ĠRetail":9362,"ANC":9363,"Ġappealing":9364,"Ġvillages":9365,"Ġpizza":9366,"Ġmall":9367,"Ġtower":9368,"ĠLinda":9369,"Ġaccomplished":9370,"Ġpod":9371,"Ġleaked":9372,"ĠWed":9373,"Ġmer":9374,"Ġopposing":9375,"!'":9376,"Ġstomach":9377,"Ġrevealing":9378,"Ġho":9379,"DF":9380,"ĠSterling":9381,"Ġsolely":9382,"Ġpres":9383,"ĠCy":9384,"ĠLatest":9385,"ĠPitt":9386,"ĠThink":9387,"Ġcapability":9388,"aled":9389,"Ġexecuted":9390,"alling":9391,"ĠSilva":9392,"Ġrestricted":9393,"Ġdeclaration":9394,"Ġkilometres":9395,"rol":9396,"Ġidentifying":9397,"Ġdonors":9398,"vent":9399,"Ġcostly":9400,"ense":9401,"ĠSeeking":9402,"OURCE":9403,"iving":9404,"Ġplacing":9405,"tech":9406,"Ġbottles":9407,"writer":9408,"ĠSeahawks":9409,"oming":9410,"ĠArthur":9411,"ously":9412,"bin":9413,"ĠVa":9414,"Ġbias":9415,"Ġliability":9416,"ift":9417,"rak":9418,"aves":9419,"Ġcautious":9420,"ĠPrize":9421,"iley":9422,"ĠSharma":9423,"global":9424,"Ġwars":9425,"sm":9426,"ĠRemember":9427,"wind":9428,"ĠRichardson":9429,"ĠSum":9430,"ĠVincent":9431,"ĠRice":9432,"inf":9433,"Ġconsultation":9434,"range":9435,"Ġbacteria":9436,"Ġarchitecture":9437,"Ġpole":9438,"ĠMach":9439,"Ġcattle":9440,"Ġabused":9441,"being":9442,"ĠHERE":9443,"Ġfame":9444,"Ġhearings":9445,"ĠBrit":9446,"Ġjoins":9447,"ĠMcGregor":9448,"Ġoppose":9449,"Ġcheer":9450,"itting":9451,"imes":9452,"Ġusage":9453,"Ġstint":9454,"Ġoutlet":9455,"Ġshoppers":9456,"ĠBaptist":9457,"Ġinappropriate":9458,"ĠALSO":9459,"Ġstealing":9460,"Ġpledge":9461,"ĠRan":9462,"Ġphotographer":9463,"Ġprevented":9464,"Ġ01":9465,"ĠEngineering":9466,"ĠProducts":9467,"Ġuniverse":9468,"ĠMcCarthy":9469,"¿":9470,"graded":9471,"Ġinspection":9472,"Ġind":9473,"Fi":9474,"aren":9475,"Ġprotections":9476,"Ġsorts":9477,"ĠWorks":9478,"Ġbillionaire":9479,"ĠGay":9480,"ĠiPad":9481,"IX":9482,"Ġdefendants":9483,"band":9484,"Ġfarms":9485,"Ġhom":9486,"gal":9487,"iant":9488,"Ġnortheast":9489,"ĠJoint":9490,"Ġcanceled":9491,"Ġtoys":9492,"Ġrein":9493,"ĠTumblr":9494,"pees":9495,"ĠAut":9496,"Police":9497,"Ġaide":9498,"Ġachieving":9499,"Ġmund":9500,"ĠCommercial":9501,"first":9502,"Ġanticipate":9503,"iac":9504,"Ġprobation":9505,"hem":9506,"Ġports":9507,"ĠKer":9508,"Ġsupplier":9509,"ĠFather":9510,"ĠAnti":9511,"ashed":9512,"ĠTable":9513,"bledon":9514,"Ġunf":9515,"ĠRash":9516,"ĠLeBron":9517,"Car":9518,"bu":9519,"ĠDerek":9520,"Ġaccounted":9521,"ĠPri":9522,"nings":9523,"Ġreceives":9524,"lev":9525,"Ġbilateral":9526,"ĠList":9527,"ĠLG":9528,"ĠJazz":9529,"Ġrestored":9530,"Ġbattles":9531,"ials":9532,"Ġoccupied":9533,"Ġrepairs":9534,"Ġradar":9535,"ĠMLB":9536,"ĠNC":9537,"Ġflexible":9538,"ĠCommand":9539,"Ġcoat":9540,"ĠVir":9541,"ĠColts":9542,"ĠBC":9543,"Ġtwin":9544,"Ġprisoners":9545,"Ġslowed":9546,"hop":9547,"ĠInn":9548,"Ġconflicts":9549,"Ġmeasured":9550,"Ġautonomous":9551,"ĠBow":9552,"Ġdisc":9553,"inson":9554,"ĠSche":9555,"aire":9556,"ĠSU":9557,"ĠPeterson":9558,"Ġdrafted":9559,"ĠPelosi":9560,"ĠSoon":9561,"Ġmechanism":9562,"Ġaccountability":9563,"ĠNortheast":9564,"Ġfo":9565,"Ġanalytics":9566,"ĠEverything":9567,"Ġperceived":9568,"bers":9569,"Ġcelebrations":9570,"Ġinstruments":9571,"Ġstrip":9572,"ĠJuventus":9573,"Ġunfortunately":9574,"ĠGA":9575,"Ġwrestling":9576,"Ġstatue":9577,"vis":9578,"five":9579,"Ġmarine":9580,"ĠSamuel":9581,"Ġresponsibilities":9582,"hill":9583,"Ġrecruit":9584,"Ġreferee":9585,"ĠRail":9586,"ĠEagle":9587,"ĠCongressional":9588,"Ġbreathing":9589,"Ġbass":9590,"hit":9591,"Ġspreading":9592,"Ġevacuated":9593,"Ġintellectual":9594,"Ġsovereign":9595,"ocked":9596,"Ġslammed":9597,"Ġformerly":9598,"Ġarch":9599,"Ġdifficulty":9600,"ĠAFC":9601,"ĠFresh":9602,"Ġinvite":9603,"oner":9604,"ĠMich":9605,"Ġpitches":9606,"stock":9607,"Ġinitiated":9608,"ĠKu":9609,"ĠFlorence":9610,"yd":9611,"ĠFast":9612,"Ġmusician":9613,"ĠChile":9614,"anga":9615,"Ġdairy":9616,"Ġcontractors":9617,"ador":9618,"ĠPlanning":9619,"Ġultra":9620,"Ġprayer":9621,"Ġsuggestions":9622,"ĠEk":9623,"Ġrandom":9624,"ĠSullivan":9625,"Ġsensor":9626,"Ġhomicide":9627,"ĠIncome":9628,"Ġsettings":9629,"Ġacknowledge":9630,"ĠStay":9631,"Ġterminal":9632,"Ġ1991":9633,"West":9634,"hard":9635,"arc":9636,"Ġcombine":9637,"Ġprivately":9638,"Ġbarrier":9639,"Ġmedian":9640,"Ġwhereas":9641,"ĠTitans":9642,"Ġincentives":9643,"Ġhistorically":9644,"Ġindictment":9645,"Ġhiding":9646,"ĠPDT":9647,"Ġrebuild":9648,"hol":9649,"Ġpour":9650,"Ġairports":9651,"ĠEdinburgh":9652,"Ġappoint":9653,"ĠJul":9654,"Ġconfusion":9655,"Ġdam":9656,"ork":9657,"Ġcalculated":9658,"Ġhood":9659,"ĠTemple":9660,"ĠYorkshire":9661,"EP":9662,"ented":9663,"Ġapology":9664,"awi":9665,"Ġfacilitate":9666,"ĠSheffield":9667,"Ġrides":9668,"Ġcompelling":9669,"ĠGonzalez":9670,"roll":9671,"ONG":9672,"UP":9673,"ĠAj":9674,"pen":9675,"ĠVar":9676,"ĠIPO":9677,"ĠAnimal":9678,"Ġshifted":9679,"Ġ140":9680,"Ġtobacco":9681,"El":9682,"ild":9683,"Ġuncertain":9684,"Un":9685,"Ġcaps":9686,"Ġrecreational":9687,"ĠTu":9688,"Ġenc":9689,"More":9690,"iko":9691,"ĠEverton":9692,"ĠWalk":9693,"Ġmurdered":9694,"Ġpur":9695,"Ġdivisions":9696,"ivo":9697,"Ġfarming":9698,"Ġcourage":9699,"ped":9700,"Ġcrying":9701,"Ġattributed":9702,"ée":9703,"Ġimplementing":9704,"ĠWang":9705,"Ġspeeds":9706,"alk":9707,"aming":9708,"eries":9709,"Ġavoided":9710,"ĠMessi":9711,"Ġconsiderable":9712,"rt":9713,"Ġinauguration":9714,"ĠPH":9715,"Ġsoldier":9716,"Ġore":9717,"ollywood":9718,"otive":9719,"ĠAuburn":9720,"ĠSav":9721,"ĠPut":9722,"Ġemphasis":9723,"Ġaf":9724,"owed":9725,"Ġdiagnosis":9726,"Ġcart":9727,"Ġassisted":9728,"ĠOrder":9729,"ĠEstate":9730,"Ġintends":9731,"ĠCommon":9732,"Ġadventure":9733,"Ġbeliefs":9734,"Ġlasting":9735,"cel":9736,"Ġdeployment":9737,"tra":9738,"ĠStories":9739,"Ġquote":9740,"Ġfeared":9741,"Ġconvenience":9742,"Ġoptimism":9743,"Ġscientist":9744,"ĠEnterprise":9745,"ĠRex":9746,"ĠFel":9747,"Ġposes":9748,"Ġroot":9749,"Ġevacuation":9750,"Ġpresidents":9751,"ĠRather":9752,"Ġgrave":9753,"ĠHeights":9754,"Ġjumping":9755,"driven":9756,"Ġaluminum":9757,"Ġholders":9758,"Ġboot":9759,"iber":9760,"Ġprecious":9761,"uation":9762,"FP":9763,"uses":9764,"Ġcommentary":9765,"Ġadvances":9766,"ĠNissan":9767,"Ġbronze":9768,"Ġinspire":9769,"Ġstarters":9770,"ĠEvan":9771,"rah":9772,"body":9773,"Ġcrops":9774,"Ġseeds":9775,"Ġharsh":9776,"ĠHomeland":9777,"Ġenabled":9778,"ological":9779,"Ġworkshop":9780,"Ġchains":9781,"amps":9782,"Ġamongst":9783,"ĠBear":9784,"Ġcertified":9785,"ĠJulie":9786,"Ġmountains":9787,"VA":9788,"Ġfed":9789,"Ġbuyer":9790,"ahl":9791,"ĠBos":9792,"ĠCrystal":9793,"Ġquest":9794,"ĠStein":9795,"Ġacceptable":9796,"Ġunbeaten":9797,"iring":9798,"ural":9799,"Ġuncomfortable":9800,"Ġpartial":9801,"Ġsacrifice":9802,"ĠGrande":9803,"Ġarrangement":9804,"Ġpackaging":9805,"screen":9806,"Ġmirror":9807,"Ġsweep":9808,"Ġconnecting":9809,"Ġpanic":9810,"ĠJacksonville":9811,"ĠKremlin":9812,"Ġorigin":9813,"Brien":9814,"Ġnorthwest":9815,"Ġcarriers":9816,"ĠRiley":9817,"Ġaud":9818,"Ġappreciation":9819,"Ġeliminated":9820,"ĠAnalyst":9821,"CR":9822,"Ġfirearm":9823,"Ġaccommodate":9824,"Ġstructural":9825,"Ġappealed":9826,"Ġcharter":9827,"ressing":9828,"Ġalike":9829,"white":9830,"Ġslowdown":9831,"Ġweigh":9832,"ĠPalmer":9833,"ound":9834,"ĠConn":9835,"Ġbranches":9836,"Ġace":9837,"Ġinsists":9838,"yo":9839,"ĠLynn":9840,"ĠCC":9841,"ĠWithin":9842,"Ġcoll":9843,"Ġsustain":9844,"Ġemerge":9845,"ĠBattle":9846,"VER":9847,"Ġaviation":9848,"Ġenables":9849,"ĠProduction":9850,"ĠGrove":9851,"Ġnationally":9852,"ĠBaldwin":9853,"rent":9854,"Ġfirearms":9855,"irm":9856,"Ġconsiders":9857,"ĠCosby":9858,"ĠMcK":9859,"ĠEnt":9860,"Ġincumbent":9861,"iance":9862,"Ġgiants":9863,"Ġkan":9864,"Ġminimal":9865,"ivity":9866,"ĠSay":9867,"ĠNass":9868,"Ġlovely":9869,"ĠFurthermore":9870,"Ġdisplaced":9871,"Ġcontacts":9872,"NY":9873,"Ġtechnological":9874,"ancy":9875,"Ġant":9876,"ope":9877,"ĠFY":9878,"Ġfavorable":9879,"ĠVirgin":9880,"Ġcasual":9881,"ĠLat":9882,"Ġpopulations":9883,"Ġromance":9884,"Ġforgotten":9885,"Ġfleeing":9886,"Ġspecialty":9887,"Ġdrill":9888,"Ġapplying":9889,"Ġcocaine":9890,"rea":9891,"Ġheroin":9892,"Ġsweeping":9893,"ĠMaj":9894,"Ġtroubled":9895,"Ġcolleague":9896,"Ġedged":9897,"omes":9898,"ĠHappy":9899,"´":9900,"Ġmilitant":9901,"boy":9902,"aver":9903,"Yes":9904,"llo":9905,"Ġsupporter":9906,"ĠSubscribe":9907,"ĠBird":9908,"ĠGibson":9909,"Ġhill":9910,"Ġnewspapers":9911,"ĠPHOTO":9912,"Ġouting":9913,"Ġdefine":9914,"Ġann":9915,"Ġrobot":9916,"Ġregret":9917,"ĠCould":9918,"raz":9919,"Ġceiling":9920,"Ġorganizers":9921,"ĠTw":9922,"Ġcriticised":9923,"ĠJoh":9924,"ĠJe":9925,"ĠBulls":9926,"Ġteeth":9927,"ĠRanch":9928,"ĠAndrea":9929,"Ġconservatives":9930,"Ġmag":9931,"vey":9932,"Ġpredecessor":9933,"ĠJPMorgan":9934,"Ġdraws":9935,"umber":9936,"Ġvaccine":9937,"ĠDas":9938,"Ġdisappeared":9939,"ĠIron":9940,"Ġlitigation":9941,"vert":9942,"Ġbelong":9943,"ĠRet":9944,"owers":9945,"rain":9946,"controlled":9947,"ĠKil":9948,"Ġrehab":9949,"ĠAustria":9950,"Ġprivilege":9951,"Ġbounce":9952,"Ġbout":9953,"ĠIslamist":9954,"Ġtaxi":9955,"ody":9956,".'\"":9957,"Ġdos":9958,"shire":9959,"Ġaccidents":9960,"Ġdemonstration":9961,"His":9962,"ĠBO":9963,"ĠICE":9964,"van":9965,"File":9966,"ĠManning":9967,"ounded":9968,"Ġdirections":9969,"lled":9970,"Ġoffences":9971,"Ġlaptop":9972,"ĠUniversal":9973,"Ġmilestone":9974,"ĠNarendra":9975,"Ġnotion":9976,"Ġuns":9977,"ĠLower":9978,"Ġmidfield":9979,"Ġoutper":9980,"trans":9981,"ĠJa":9982,"three":9983,"Adds":9984,"Ġpressures":9985,"Ġprohibited":9986,"Ġutilities":9987,"Ġbes":9988,"ĠReporter":9989,"Ġcommodities":9990,"leton":9991,"Ġslower":9992,"EE":9993,"auer":9994,"Ġtablet":9995,"sl":9996,"iously":9997,"Ġaiming":9998,"eland":9999,"ĠNEXT":10000,"tered":10001,"IVE":10002,"onic":10003,"May":10004,"ĠMilitary":10005,"Mark":10006,"Ġlender":10007,"mate":10008,"Ġaboard":10009,"they":10010,"Ġrespondents":10011,"Ġconversion":10012,"Ġsecuring":10013,"Ġentity":10014,"ĠHarbor":10015,"ĠCu":10016,"Ġcats":10017,"ĠACC":10018,"ĠIbrahim":10019,"GL":10020,"Ġinvitation":10021,"Ġcond":10022,"ĠRecords":10023,"ĠAdrian":10024,"Ġbrave":10025,"Ġmineral":10026,"Ġsooner":10027,"Ġsatisfied":10028,"Ġpets":10029,"Ġnotably":10030,"ı":10031,"Ġmarking":10032,"ĠRO":10033,"ĠHaw":10034,"ĠVis":10035,"Ġmarketplace":10036,"ĠNat":10037,"ĠForward":10038,"ĠLeft":10039,"Ġaggravated":10040,"ĠClose":10041,"acey":10042,"Ġlandmark":10043,"Ġdisruption":10044,"ĠChallenge":10045,"ĠDays":10046,"ĠCoun":10047,"ahan":10048,"Ġaides":10049,"South":10050,"ĠDylan":10051,"ĠRavens":10052,"ĠNature":10053,"lli":10054,"Ġdiplomats":10055,"350":10056,"ĠDrake":10057,"tag":10058,"Ġlicensed":10059,"ĠDenmark":10060,"Ġcancel":10061,"Ġinstant":10062,"DI":10063,"Ġpunch":10064,"ĠJenkins":10065,"Ġstrengthening":10066,"des":10067,"-$":10068,"Ġallegation":10069,"Ġsizes":10070,"iza":10071,"Ġmentally":10072,"ĠResidents":10073,"acked":10074,"Ġsensors":10075,",'\"":10076,"illion":10077,"ĠChampion":10078,"Ġexcessive":10079,"Ġhum":10080,"ĠComp":10081,"rend":10082,"ĠLakes":10083,"Ġburst":10084,"Ġtrainer":10085,"Ġclearing":10086,"ĠSilicon":10087,"Ġ350":10088,"DE":10089,"ĠGates":10090,"ĠHorn":10091,"ests":10092,"ĠCourtesy":10093,"Ġbipartisan":10094,"Ġhabits":10095,"ĠAlexa":10096,"walk":10097,"Ġsnapped":10098,"ĠEight":10099,"itis":10100,"zel":10101,"Ġcustoms":10102,"Ġsouthwest":10103,"Ġvary":10104,"Because":10105,"Ġpayout":10106,"Ġaccelerate":10107,"ĠBarr":10108,"tu":10109,"Ġfined":10110,"cost":10111,"ĠTheater":10112,"ĠCorbyn":10113,"Ġstem":10114,"Ġundermine":10115,".;":10116,"Ġstays":10117,"Ġbreakthrough":10118,"Ġturnover":10119,"hot":10120,"Ġtriumph":10121,"Ġpainted":10122,"ĠWinnipeg":10123,"ĠKas":10124,"ĠStuart":10125,"irk":10126,"Am":10127,"Ġtrusted":10128,"aze":10129,"ĠLate":10130,"Ġaccessories":10131,"Ġmemorable":10132,"ĠFool":10133,"Ġrotation":10134,"ĠBulldogs":10135,"ĠChen":10136,"Ġpoised":10137,"ĠMonte":10138,"ĠClarke":10139,"leading":10140,"Ġvenues":10141,"Ġbeneficial":10142,"ĠLiam":10143,"ĠBrothers":10144,"ĠNeed":10145,"Ġconc":10146,"olly":10147,"ĠJulian":10148,"ogue":10149,"Ġfounding":10150,"Ġsidelines":10151,"Ġdeclare":10152,"ĠMember":10153,"Ġexamine":10154,"abs":10155,"Ġboundaries":10156,"ĠBrisbane":10157,"Ġlaunches":10158,"lor":10159,"ĠGa":10160,"Ġthr":10161,"expected":10162,"wal":10163,"ĠBarnes":10164,"Ġclashes":10165,"content":10166,"ĠClemson":10167,"iger":10168,"Mar":10169,"Ġaccord":10170,"Ġsoutheast":10171,"ģ":10172,"ĠStarbucks":10173,"osing":10174,"Ġseasonal":10175,"icking":10176,"Ġloyalty":10177,"Ġtent":10178,"ĠDy":10179,"Ġevident":10180,"Ġlobby":10181,"Ġtours":10182,"Ġbombing":10183,"uations":10184,"Ġrises":10185,"Ġdemonstrations":10186,"ĠWATCH":10187,"pin":10188,"Ġdeb":10189,"ĠDraft":10190,"rog":10191,"Ġseal":10192,"ĠPerformance":10193,"ĠLGBT":10194,"Ġsed":10195,"Ġgig":10196,"nan":10197,"Ġrainfall":10198,"Ġfabric":10199,"Ġmanages":10200,"Ġlifting":10201,"ĠMagazine":10202,"ĠCriminal":10203,"Ġhikes":10204,"Ġcatching":10205,"Ġ1989":10206,"OG":10207,"Ġdisappointment":10208,"Ġir":10209,"ĠEV":10210,"stown":10211,"pass":10212,"120":10213,"Ġmedals":10214,"ĠSimmons":10215,"Ġinaugural":10216,"ĠCorn":10217,"Ġmotorcycle":10218,"lets":10219,"ĠSkype":10220,"ét":10221,"Ġscary":10222,"opp":10223,"thirds":10224,"ĠMohamed":10225,"Ġteenagers":10226,"ANK":10227,"Ġserver":10228,"Ġouts":10229,"Ġdishes":10230,"four":10231,"dr":10232,"ĠOt":10233,"ĠSandy":10234,"ĠShane":10235,"orters":10236,"SH":10237,"Ġtouching":10238,"ĠNike":10239,"ĠHBO":10240,"driving":10241,"Ġplug":10242,"ĠBaseball":10243,"eling":10244,"hn":10245,"ulate":10246,"eed":10247,"ĠChristine":10248,"ĠGlobe":10249,"Ġethics":10250,"ĠTrevor":10251,"iya":10252,"Ġ360":10253,"Ġawaiting":10254,"Ġcounterpart":10255,"Ġsubsidies":10256,"pointers":10257,"Ġspy":10258,"ILL":10259,"Ġtakeover":10260,"ĠBeyond":10261,"Ġsurprisingly":10262,"TION":10263,"ĠSong":10264,"Ġni":10265,"Ġcommonly":10266,"Ġjack":10267,"Ġsubstitute":10268,"ews":10269,"Ġrecalls":10270,"ĠCommons":10271,"Ġsin":10272,"del":10273,"ĠMod":10274,"Ġpressing":10275,"ĠTelevision":10276,"ĠInside":10277,"ª":10278,"Ġbacklash":10279,"Ġcredible":10280,"ĠJenner":10281,"ĠPu":10282,"ĠStevens":10283,"ĠWE":10284,"Last":10285,"Ġinsurers":10286,"ĠJoin":10287,"bled":10288,"digit":10289,"Ġflooded":10290,"ĠShore":10291,"ĠTrophy":10292,"zing":10293,"ĠImmigration":10294,"Ġsuperior":10295,"IAN":10296,"Ġcasino":10297,"Ġenabling":10298,"Ġmeantime":10299,"Ġperformers":10300,"Ġproportion":10301,"Ġlawmaker":10302,"ĠConf":10303,"Ġconvert":10304,"Ġfarmer":10305,"Ġbu":10306,"ĠGE":10307,"ĠRepresentative":10308,"ĠBannon":10309,"ĠHelp":10310,"PT":10311,"formed":10312,"ĠSuperintendent":10313,"Ġfrustrating":10314,"ĠRegister":10315,"ĠPolitical":10316,"Ġboots":10317,"ĠRu":10318,"ĠSha":10319,"Ġinstrument":10320,"tor":10321,"ĠBelt":10322,"ĠWalsh":10323,"Ġrecipe":10324,"ilt":10325,"ĠClean":10326,"iors":10327,"Ġtwenty":10328,"iler":10329,"nder":10330,"Ġwinger":10331,"Ġwheat":10332,"ĠAviation":10333,"Ġcorrupt":10334,"Ġconnectivity":10335,"ĠVen":10336,"order":10337,"esc":10338,"break":10339,"Ġmetals":10340,"Ġtraditionally":10341,"Ġbell":10342,"Ġviolating":10343,"rough":10344,"Ġintroducing":10345,"Ġguided":10346,"ĠMol":10347,"Ġdesert":10348,"ĠBree":10349,"Le":10350,"ĠZone":10351,"ĠGlass":10352,"ĠEUR":10353,"ĠYahoo":10354,"Ġlaps":10355,"Ġdiffer":10356,"ĠHold":10357,"Ġtimely":10358,"Ġsuccessor":10359,"Ġcomic":10360,"Ġbears":10361,"Ġlicence":10362,"Ġreject":10363,"Ġsophisticated":10364,"Too":10365,"Ġobjectives":10366,"ĠId":10367,"urers":10368,"Ġraid":10369,"COM":10370,"Ġelect":10371,"ĠHampshire":10372,"Ġlens":10373,"Ġdesigners":10374,"Ġpresently":10375,"ĠRCMP":10376,"ĠEgyptian":10377,"ĠWalter":10378,"ĠWallace":10379,"Ġ2025":10380,"utics":10381,"ried":10382,"Ġrefuse":10383,"Ġsiblings":10384,"ĠNothing":10385,"Ġdressing":10386,"Ġnerve":10387,"AST":10388,"Ġuncertainties":10389,"Ġtale":10390,"ĠTalk":10391,"Ġissuing":10392,"shot":10393,"ĠTak":10394,"Ġacid":10395,"ĠNintendo":10396,"Ġwash":10397,"pd":10398,"ĠClaire":10399,"ĠScot":10400,"Ġsuits":10401,"ĠBayern":10402,"gest":10403,"Ġapplicable":10404,"Ġinteraction":10405,"ĠEnforcement":10406,"ĠRohingya":10407,"Ġjan":10408,"Ġunited":10409,"ĠCoalition":10410,"Ġlegislators":10411,"Ġdetectives":10412,"ĠSing":10413,"ĠBetween":10414,"ĠPoly":10415,"pool":10416,"mal":10417,"Ġreply":10418,"Ġschemes":10419,"ĠHolmes":10420,"ĠSenators":10421,"ĠVerizon":10422,"Ġwelcoming":10423,"ĠCricket":10424,"ĠMarco":10425,"ĠYears":10426,"ĠLiving":10427,"Ġcounterparts":10428,"ĠParadise":10429,"ĠTrad":10430,"#":10431,"iw":10432,"ĠSoccer":10433,"umbled":10434,"Ġdeceased":10435,"heim":10436,"Ġevaluation":10437,"Ġwrap":10438,"Ġmild":10439,"aji":10440,"ĠUCLA":10441,"ĠNative":10442,"president":10443,"ĠXbox":10444,"Ġenterprises":10445,"ĠSlam":10446,"oga":10447,"Rock":10448,"piece":10449,"ĠColeman":10450,"Ġcomparable":10451,"uba":10452,"Ġprovinces":10453,"ĠFormula":10454,"ipt":10455,"ô":10456,"Ġtick":10457,"ĠIMF":10458,"anch":10459,"atta":10460,"rew":10461,"However":10462,"LS":10463,"etta":10464,"ĠCustoms":10465,"SU":10466,"Ġpublishing":10467,"Ġinch":10468,"Ġkills":10469,"¤":10470,"ĠSus":10471,"ĠBeth":10472,"Ġsteam":10473,"jpg":10474,"pointer":10475,"Ġturnovers":10476,"Ġpowder":10477,"ĠUSB":10478,"ĠWildlife":10479,"ĠDirect":10480,"atively":10481,"ĠFerrari":10482,"Ġpleasure":10483,"ĠMatthews":10484,"Ġski":10485,"ography":10486,"ĠVermont":10487,"ĠMargaret":10488,"ĠMunich":10489,"Ġlayer":10490,"ĠProperty":10491,"Ġeconomics":10492,"ĠCrew":10493,"UK":10494,"Ġunnecessary":10495,"ĠGlasgow":10496,"Ġsealed":10497,"Ġclarity":10498,"Ġsurplus":10499,"ĠCanyon":10500,"ĠApart":10501,"Ġacceptance":10502,"ĠEllis":10503,"uster":10504,"rid":10505,"ĠHawks":10506,"Ġstatewide":10507,"Ġthreaten":10508,"ĠJail":10509,"Ġinclusive":10510,"Ġmud":10511,"Ġpat":10512,"Ġbitter":10513,"Ġalternatives":10514,"Ġaffiliate":10515,"Ġevaluate":10516,"ĠBaby":10517,"Ġperception":10518,"tim":10519,"Ġrefusing":10520,"Ġgrey":10521,"Ġarguably":10522,"Ġfirmly":10523,"ĠDark":10524,"Ġexcuse":10525,"ĠRaymond":10526,"Ġballots":10527,"inton":10528,"Ġ125":10529,"ĠCatherine":10530,"Ġsacks":10531,"ĠDeb":10532,"Ġworkout":10533,"web":10534,"Ġbatteries":10535,"breaking":10536,"ML":10537,"Ġunacceptable":10538,"ĠValentine":10539,"ĠYOU":10540,"ĠRT":10541,"Ġjurisdiction":10542,"Ġexamined":10543,"strom":10544,"ĠPocket":10545,"Ġcement":10546,"Ġuniversal":10547,"ĠOz":10548,"Ġkit":10549,"Ġchurches":10550,"Ġsuburban":10551,"ĠKushner":10552,"ĠDavidson":10553,"Sports":10554,"email":10555,"Ġrealistic":10556,"Ġintend":10557,"ĠGrey":10558,",''":10559,"Ġscholarship":10560,"Ġphilosophy":10561,"Ġwheels":10562,"Ġmotivation":10563,"eway":10564,"match":10565,"ĠDate":10566,"John":10567,"Ġcontrolling":10568,"750":10569,"aven":10570,"Ġfilmed":10571,"Ġ160":10572,"ĠBrock":10573,"ĠDetails":10574,"Ġlogistics":10575,"Ġassumptions":10576,"ĠStep":10577,"Ġfails":10578,"ĠNotre":10579,"Ġjuice":10580,"Ġcounting":10581,"Ġphotograph":10582,"Ġfortunate":10583,"Ġestablishing":10584,"ĠNJ":10585,"ĠWorkers":10586,"ĠQuinn":10587,"ĠHeather":10588,"Ġtimeline":10589,"Ġimported":10590,"ĠNASCAR":10591,"Ġexercises":10592,"Ġsearched":10593,"ĠRalph":10594,"alf":10595,"Ġgene":10596,"Ġdependent":10597,"én":10598,"iate":10599,"ĠBristol":10600,"Ġhung":10601,"Ġtropical":10602,"Ġintensity":10603,"ĠIdaho":10604,"ĠMull":10605,"Ġsuite":10606,"Ġblockchain":10607,"cz":10608,"ovich":10609,"Ġworn":10610,"ĠLE":10611,"AV":10612,"emi":10613,"Ġidentification":10614,"Ġtunnel":10615,"ĠARE":10616,"ĠArm":10617,"Ġoutrage":10618,"Ġtwist":10619,"uka":10620,"ĠGra":10621,"Ġjets":10622,"ĠThus":10623,"Ġcompound":10624,"Ġfinancially":10625,"2019":10626,"asse":10627,"Ġspare":10628,"ĠNoah":10629,"ĠMade":10630,"ĠMom":10631,"Ġphenomenon":10632,"Ġnurses":10633,"Ġoutlined":10634,"Ġpolit":10635,"ĠCarm":10636,"Ġleagues":10637,"Ġmath":10638,"Ġmodified":10639,"Ġwillingness":10640,"ĠAmanda":10641,"Ġgrandfather":10642,"Of":10643,"DR":10644,"Ġdip":10645,"ĠRAM":10646,"ĠChristie":10647,"Ġargues":10648,"ĠEX":10649,"ĠNine":10650,"ĠScroll":10651,"ĠTHIS":10652,"Pro":10653,"Ġkeys":10654,"Ġprocessor":10655,"Ġscam":10656,"ĠTraining":10657,"Ġhoney":10658,"Ĵ":10659,"Ġfacebook":10660,"ĠLegal":10661,"Ġaging":10662,"Ġspiritual":10663,"ĠHost":10664,"Ġlung":10665,"ĠUSC":10666,"Ġdirt":10667,"Ġfe":10668,"after":10669,"ĠDiana":10670,"Ġounce":10671,"date":10672,"ĠFinals":10673,"Ķ":10674,"Ġthorough":10675,"Ġviable":10676,"Ġanytime":10677,"Ġfost":10678,"orter":10679,"ware":10680,"ĠHolland":10681,"ĠMand":10682,"ĠSend":10683,"2013":10684,"ĠVolkswagen":10685,"Ġsuitable":10686,"ifies":10687,"Ġcomedian":10688,"Ġneighbours":10689,"ĠKnow":10690,"Ġcurious":10691,"ĠTwenty":10692,"ĠPrevention":10693,"ĠStephanie":10694,"Ġpilots":10695,"Ġstored":10696,"Ġdire":10697,"Ġfits":10698,"ision":10699,"ĠShell":10700,"Ġshifts":10701,"Ġpepper":10702,"Ġattendees":10703,"ĠName":10704,"hers":10705,"rip":10706,"Ġwatchdog":10707,"andy":10708,"Ġbio":10709,"Ġpublisher":10710,"powered":10711,"ĠCM":10712,"rian":10713,"ĠRand":10714,"wise":10715,"ĠJesse":10716,"Ġladies":10717,"ĠMetropolitan":10718,"ĠMicro":10719,"Ġkicking":10720,"Ġmeg":10721,"Ġclouds":10722,"Ġtrim":10723,"wear":10724,"ĠML":10725,"Ġconsists":10726,"Ġrig":10727,"Ġhonestly":10728,"GS":10729,"ĠNicholas":10730,"Ġcope":10731,"Ġpublish":10732,"working":10733,"bur":10734,"ĠNar":10735,"olds":10736,"aja":10737,"ĠSad":10738,"Ġclicking":10739,"Ġbids":10740,"ĠZuckerberg":10741,"Ġ900":10742,"Ġexam":10743,"ivers":10744,"Ġpray":10745,"Ġreader":10746,"ĠSeth":10747,"inem":10748,"Ġconfront":10749,"stra":10750,"AW":10751,"ĠGian":10752,"Ġaccordance":10753,"Ġinteract":10754,"ĠSharks":10755,"Ġfireworks":10756,"gment":10757,"illy":10758,"Ġconst":10759,"ARY":10760,"Ġprizes":10761,"Ġshoulders":10762,"Ġaccessed":10763,"Ġecosystem":10764,"Ġlicensing":10765,"La":10766,"Ġdedication":10767,"Ġdé":10768,"Ġyouths":10769,"lem":10770,"Ġtoy":10771,"ĠProm":10772,"ounding":10773,"rod":10774,"Ġ1000":10775,"ishes":10776,"Over":10777,"Ġgaps":10778,"Ġmissions":10779,"Ġrailway":10780,"Day":10781,"orp":10782,"ĠSchumer":10783,"Ġeclipse":10784,"Ġshell":10785,"ĠBY":10786,"Many":10787,"ĠRecord":10788,"Ġdrunk":10789,"ayan":10790,"Ġsuggestion":10791,"Ġdefenders":10792,"ĠNewton":10793,"Ġdisputes":10794,"Ġevolution":10795,"Ġcredibility":10796,"ĠTenn":10797,"Ġplain":10798,"size":10799,"cont":10800,"Ġlone":10801,"Ġfingers":10802,"BUR":10803,"ĠInvestigation":10804,"ĠQualcomm":10805,"var":10806,"Ġcountless":10807,"ĠRebecca":10808,"½":10809,"abi":10810,"Ġreflecting":10811,"ĠTurn":10812,"Ġinteractive":10813,"Ġincentive":10814,"second":10815,"offs":10816,"ĠBerkeley":10817,"ĠTexans":10818,"Ġheated":10819,"Ġscorer":10820,"ĠSharif":10821,"Ġmigrant":10822,"west":10823,"ĠHoliday":10824,"Ġwrist":10825,"Ġchairs":10826,"Ġrecommends":10827,"ĠWildcats":10828,"ĠPed":10829,"ĠQuarter":10830,"ĠIV":10831,"ĠArch":10832,"Ġstandings":10833,"Ġbombs":10834,"Ġcapped":10835,"Can":10836,"Ġcaring":10837,"ĠLah":10838,"lim":10839,"Ġdragged":10840,"ĠBeat":10841,"DB":10842,"Ġaired":10843,"Ġjeans":10844,"action":10845,"Ġgenerating":10846,"ĠGir":10847,"risk":10848,"lon":10849,"stage":10850,"âĤ¬":10851,"earing":10852,"ĠTogether":10853,"Ġreun":10854,"ĠCorey":10855,"ĠBak":10856,"Ġprestigious":10857,"Ġapplicants":10858,"here":10859,"ĠMattis":10860,"Ġridiculous":10861,"ĠLess":10862,"Ġrains":10863,"Ġpresenting":10864,"anti":10865,"Ġdisabilities":10866,"Ġapartments":10867,"storm":10868,"ĠHem":10869,"Ġhabit":10870,"ĠRuth":10871,"ĠNPR":10872,"nut":10873,"Ġappreciated":10874,"Ġseparation":10875,"uda":10876,"Ġminus":10877,"ĠPhotos":10878,"Ġblew":10879,"ĠVoice":10880,"Ġrallies":10881,"Ġfond":10882,"ĠTaking":10883,"yt":10884,"FE":10885,"ĠTory":10886,"ressed":10887,"ĠLy":10888,"Ġrocks":10889,"ĠRah":10890,"Ġelementary":10891,"nis":10892,"ĠPresidential":10893,"Ġnutrition":10894,"Ġbaseman":10895,"Ġsuperstar":10896,"ĠWa":10897,"lar":10898,"Ġstaged":10899,"ĠLearn":10900,"Ġbroadcaster":10901,"Ġboasts":10902,"Ġdoubts":10903,"rum":10904,"Ġbare":10905,"cap":10906,"Ġclimbing":10907,"ĠSelect":10908,"ĠCant":10909,"ĠNord":10910,"ĠBeck":10911,"ĠKad":10912,"ello":10913,"Ġenforce":10914,"ĠZe":10915,"ked":10916,"elly":10917,"ĠLED":10918,"ĠOperations":10919,"ĠLuk":10920,"Ġcertificate":10921,"Ġdeter":10922,"Ġspill":10923,"Ġgrain":10924,"league":10925,"Up":10926,"ĠKid":10927,"using":10928,"ĠJays":10929,"Ġoccasionally":10930,"ĠMI":10931,"yes":10932,"Ġdetect":10933,"Ġpropaganda":10934,"Ġneighboring":10935,"sub":10936,"avan":10937,"ĠAstros":10938,"oti":10939,"threatening":10940,"Ġshorter":10941,"INGS":10942,"Ġfeeding":10943,"Ġelevated":10944,"ĠWenger":10945,"Ġundergo":10946,"Ġpsychological":10947,"Ġautom":10948,"NP":10949,"anks":10950,"ĠNokia":10951,"Ġdrones":10952,"Ġrecognised":10953,"Ġheroes":10954,"agen":10955,"Ġparole":10956,"ĠBah":10957,"Ġhomeowners":10958,"ĠSweet":10959,"Ġinstances":10960,"ĠParish":10961,"ĠSL":10962,"Ġunw":10963,"Ġdelicious":10964,"¯":10965,"ĠInvestments":10966,"ĠPhilippine":10967,"inos":10968,"Ġmes":10969,"Ġbite":10970,"Ġcornerback":10971,"ĠHat":10972,"Ġdeserved":10973,"ologists":10974,"[":10975,"Ġwrongdoing":10976,"ĠTrent":10977,"ĠVe":10978,"ĠDeal":10979,"Mr":10980,"Ġovers":10981,"Ġhonors":10982,"ĠITV":10983,"Ġpayroll":10984,"Ġconfused":10985,"Ġelaborate":10986,"ange":10987,"World":10988,"ĠResort":10989,"ilia":10990,"ĠKr":10991,"Ġconclude":10992,"First":10993,"ĠDR":10994,"Ġpeer":10995,"Ġrunway":10996,"ĠPotter":10997,"cons":10998,"bad":10999,"si":11000,"ĠClimate":11001,"ĠHoll":11002,"Ġweighing":11003,"Ġepidemic":11004,"ĠBible":11005,"Ġhon":11006,"Ġrenew":11007,"Ġgambling":11008,"ĠNationals":11009,"itable":11010,"ĠOutlook":11011,"Ġreactions":11012,"ĠCos":11013,"ĠDana":11014,"India":11015,"ĠAirbus":11016,"power":11017,"watch":11018,"Ġstyles":11019,"Ġordinance":11020,"Ġcam":11021,"Ġinvent":11022,"ĠDurant":11023,"Ġexchanged":11024,"Ġyoga":11025,"ĠMichel":11026,"ĠWyoming":11027,"ĠPhase":11028,"ĠHannah":11029,"Ġtem":11030,"Ġfare":11031,"omer":11032,"Ġtrails":11033,"Ġquietly":11034,"ĠFourth":11035,"Ġwise":11036,"Ġappetite":11037,"Ġpedestrian":11038,"Ġfierce":11039,"hin":11040,"ako":11041,"Ġvacant":11042,"Ġdynamics":11043,"Ġbust":11044,"ĠGT":11045,"century":11046,"Ġpermitted":11047,"Ġfog":11048,"Ġrecruitment":11049,"ĠDue":11050,"Ġbro":11051,"Ġsil":11052,"ĠOpp":11053,"Ġphrase":11054,"ĠChip":11055,"ĠBase":11056,"Ġjazz":11057,"Ġenemies":11058,"Ġremainder":11059,"bles":11060,"Ġ105":11061,"ĠGur":11062,"Ġretiring":11063,"ĠCour":11064,"ĠSi":11065,"Ġinevitable":11066,"ĠAdvisory":11067,"ĠCampaign":11068,"ĠPeninsula":11069,"base":11070,"Ġjustify":11071,"inen":11072,"North":11073,"Ġfreezing":11074,"Ġphotography":11075,"Ġappointments":11076,"ĠTree":11077,"Os":11078,"Ġdivide":11079,"ĠMMA":11080,"Ġdeclines":11081,"ĠAbbott":11082,"ACH":11083,"ĠJah":11084,"Ġspr":11085,"Ġskilled":11086,"ĠTry":11087,"ANT":11088,"ael":11089,"ĠMcN":11090,"Ġtariff":11091,"generation":11092,"ĠMans":11093,"Or":11094,"Ġraped":11095,"Ġdisability":11096,"Ġnominations":11097,"Ġhappiness":11098,"ĠLSU":11099,"ĠInterstate":11100,"ĠDance":11101,"ĠMaking":11102,"Ġbailout":11103,"oro":11104,"ĠObviously":11105,"Ġinbox":11106,"football":11107,"hy":11108,"ĠCase":11109,"Ġentertaining":11110,"Ġhardest":11111,"ĠOpposition":11112,"Ġflip":11113,"ĠPirates":11114,"anu":11115,"ĠKlopp":11116,"Ġballistic":11117,"Ġprinted":11118,"ĠNFC":11119,"UST":11120,"Ġglasses":11121,"Ġrum":11122,"ĠDuncan":11123,"hal":11124,"Ġpreview":11125,"BER":11126,"dec":11127,"Ġsustainability":11128,"Ġaff":11129,"Ġhungry":11130,"service":11131,"avi":11132,"Ġsometime":11133,"Ġmod":11134,"ĠLib":11135,"oko":11136,"Ġfundraiser":11137,"Ġcrowded":11138,"mates":11139,"Ġcreativity":11140,"ĠHell":11141,"Ġtreaty":11142,"ĠSoftware":11143,"ĠRandy":11144,"ĠPolish":11145,"sa":11146,"ardi":11147,"Ġcab":11148,"ĠCamera":11149,"Ġlicenses":11150,"Ġ1988":11151,"Ġcontinuous":11152,"Ġpaired":11153,"Ġtally":11154,"Ġgrip":11155,"cho":11156,"Ġsurged":11157,"Ġpodium":11158,"Ġcontrary":11159,"SL":11160,"ĠResearchers":11161,"cing":11162,"Ġmi":11163,"Ġdisputed":11164,"Ġgrades":11165,"Ġseverely":11166,"ĠMcL":11167,"ondo":11168,"Ġshelters":11169,"Ġdomain":11170,"ĠSwitch":11171,"Ġtestify":11172,"case":11173,"omet":11174,"atch":11175,"ĠAff":11176,"Ġcasting":11177,"berger":11178,"Ġintimate":11179,"erc":11180,"plan":11181,"ĠPast":11182,"ĠUt":11183,"Ġapologized":11184,"ĠDet":11185,"alle":11186,"Ġwhilst":11187,"Ġpel":11188,"Ġexecute":11189,"Ġharmful":11190,"ĠRB":11191,"onda":11192,"ĠFul":11193,"II":11194,"Those":11195,"Ġcryptocurrency":11196,"Ġrealise":11197,"ĠAthens":11198,"ĠApplication":11199,"ORD":11200,"Ġmidst":11201,"ĠSem":11202,"Ġmessaging":11203,"Ġcousin":11204,"ĠMarsh":11205,"ĠAlmost":11206,"uto":11207,"wire":11208,"ĠManaging":11209,"Ġsends":11210,"ĠDerby":11211,"Ġpad":11212,"Ġdevoted":11213,"ĠWorking":11214,"ĠWestminster":11215,"Ġdirty":11216,"ements":11217,"ĠLew":11218,"door":11219,"Ġadvisor":11220,"ival":11221,"Ġsubscribe":11222,"Ġcredited":11223,"Ġpressed":11224,"Ġbrick":11225,"Ġrehabilitation":11226,"Ġ\"[":11227,"erry":11228,"Ġtransformed":11229,"arp":11230,"Ġreceivers":11231,"ĠFan":11232,"ĠKris":11233,"ĠCharlottesville":11234,"Ġste":11235,"Ġconstructed":11236,"Ġbroadly":11237,"ĠBetter":11238,"ĠJanet":11239,"Ġenthusiasm":11240,"ĠIrving":11241,"ĠConst":11242,"Everyone":11243,"agn":11244,"ĠCrawford":11245,"Ġregards":11246,"ĠBurns":11247,"Ġjokes":11248,"erg":11249,"ARD":11250,"apped":11251,"Ġtravelled":11252,"ĠPoor":11253,"ĠHolly":11254,"Ġcontainer":11255,"Ġinfected":11256,"Ġlean":11257,"ĠWould":11258,"Ġmagnitude":11259,"ĠDou":11260,"minded":11261,"Ġpastor":11262,"Ġwherever":11263,"ulation":11264,"Ġ1986":11265,"ĠMegan":11266,"Ġgraphic":11267,"Ġtalents":11268,"Ġkn":11269,"ĠEC":11270,"ĠMcM":11271,"ĠKon":11272,"eni":11273,"ĠEsc":11274,"inas":11275,"ĠNom":11276,"Ġchasing":11277,"arl":11278,"ĠHungary":11279,"Ġmainland":11280,"ĠDist":11281,"utes":11282,"Ġrubber":11283,"iat":11284,"ĠMorrison":11285,"ushing":11286,"iny":11287,"Ġcopies":11288,"ĠFat":11289,"agged":11290,"Ġfloating":11291,"ĠCurtis":11292,"Ġfatally":11293,"ĠManuel":11294,"Ġgraduates":11295,"nar":11296,"ĠKenny":11297,"Ġretreat":11298,"Ġretro":11299,"ĠPierre":11300,"listed":11301,"ĠDale":11302,"ding":11303,"Ġintentions":11304,"Ġsentences":11305,"ĠSere":11306,"Ġinvasion":11307,"Ġpremiums":11308,"ĠGardner":11309,"Ġshipments":11310,"Ġcol":11311,"bell":11312,"ilo":11313,"Ġworthy":11314,"Ġinterceptions":11315,"Ġcomplain":11316,"icle":11317,"ĠTah":11318,"ĠMt":11319,"ĠSyracuse":11320,"Since":11321,"aches":11322,"ĠCand":11323,"Ġinteractions":11324,"ĠShawn":11325,"nc":11326,"Ġtheaters":11327,"ART":11328,"Th":11329,"Ġalter":11330,"aley":11331,"imo":11332,"Ġresponders":11333,"kan":11334,"ĠDarren":11335,"Ġdeliveries":11336,"PI":11337,"125":11338,"Ġlaughing":11339,"ĠPatterson":11340,"Ġinfections":11341,"Ġtur":11342,"130":11343,"Ġhackers":11344,"Ġwarn":11345,"Ġfreeze":11346,"Ġscreaming":11347,"ĠEcho":11348,"ĠDom":11349,"MAN":11350,"ĠJoy":11351,"Ġbeneath":11352,"ĠHalf":11353,"Ġpatent":11354,"Ġugly":11355,"Ġlip":11356,"Ġnominees":11357,"ĠGrade":11358,"Ġinfluenced":11359,"Ġabilities":11360,"Ġlimiting":11361,"Ġsmell":11362,"Ġesc":11363,"ĠBernard":11364,"cs":11365,"ĠMyers":11366,"oted":11367,"Black":11368,"Ġlim":11369,"Ġsworn":11370,"ĠBlair":11371,"anes":11372,"ĠEvent":11373,"Ġmature":11374,"Ġpositioned":11375,"Ġerupted":11376,"grand":11377,"ĠTell":11378,"Ġbackdrop":11379,"Ġyeah":11380,"ĠClear":11381,"Ġsignificance":11382,"Ġpatience":11383,"ĠWing":11384,"Ġhorrible":11385,"Ġdeploy":11386,"ipe":11387,"Ġbitcoin":11388,"Ġcommitting":11389,"Ġdismiss":11390,"ĠBlood":11391,"ĠMeyer":11392,"selling":11393,"Ġregarded":11394,"Ġlottery":11395,"ĠLuther":11396,"Ġpipe":11397,"Ġcro":11398,"ĠANC":11399,"ĠSolar":11400,"Ġsimilarly":11401,"Ġham":11402,"ĠHonor":11403,"tar":11404,"gin":11405,"ĠArmstrong":11406,"Ġbrowser":11407,"agon":11408,"via":11409,"Ġentries":11410,"Ġinfl":11411,"Ġgraduation":11412,"Ġalleges":11413,"ĠLoading":11414,"Ġsuperb":11415,"ially":11416,"Ġadministrator":11417,"uls":11418,"Ġartistic":11419,"ĠANGEL":11420,"ĠBang":11421,"Ġfossil":11422,"¨":11423,"Ġpoly":11424,"ĠGuardiola":11425,"ĠPerth":11426,"Ġeducate":11427,"Cl":11428,"Ġcommittees":11429,"Ġforthcoming":11430,"Ġadjustments":11431,"count":11432,"Ġincoming":11433,"brook":11434,"ĠMinneapolis":11435,"Ġgown":11436,"ĠCroatia":11437,"host":11438,"Ġcompetitor":11439,"Ġlyrics":11440,"Ġbelonging":11441,"ĠFrances":11442,"ĠHaley":11443,"ĠBruins":11444,"Ġmask":11445,"ĠPv":11446,"dollar":11447,"Ġbowling":11448,"Ġjewelry":11449,"ĠJulia":11450,"Ġbroadband":11451,"ĠBhar":11452,"ĠArmed":11453,"vy":11454,"government":11455,"kov":11456,"Ġpremises":11457,"Ġjersey":11458,"Ġapplies":11459,"ĠFreeman":11460,"Ġgrows":11461,"ĠEquity":11462,"Ġmaterially":11463,"Ġfigured":11464,"ience":11465,"Ġmajors":11466,"ĠYe":11467,"ĠHey":11468,"oned":11469,"aping":11470,"Ġtoilet":11471,"ĠConnor":11472,"Ġavoiding":11473,"pos":11474,"Once":11475,"ĠRockets":11476,"ĠSnapchat":11477,"Go":11478,"Ġsolidarity":11479,"ĠAffordable":11480,"Ġdial":11481,"ĠOmar":11482,"xt":11483,"ĠVatican":11484,"anta":11485,"ĠSuperior":11486,"Ġbeaches":11487,"ĠKi":11488,"Ã¥":11489,"KY":11490,"Ġgro":11491,"ĠEmpire":11492,"Ġoccurs":11493,"Ġjoked":11494,"Ġquotes":11495,"ĠSaskatchewan":11496,"pert":11497,"Ġmaintains":11498,"olt":11499,"Ġupgrades":11500,"ĠCho":11501,"ĠAlexis":11502,"ĠHundreds":11503,"ĠBud":11504,"Ġcenturies":11505,"ĠInvestor":11506,"ĠGomez":11507,"Ġconceded":11508,"Ġexpressing":11509,"ĠIBM":11510,"Ġadvancing":11511,"ĠDollar":11512,"jer":11513,"Ġexceed":11514,"author":11515,"rist":11516,"seat":11517,"ĠPrimary":11518,"ĠForbes":11519,"ĠAlzheimer":11520,"Ġdevastated":11521,"Ġawful":11522,"ĠStudio":11523,"Ġbullpen":11524,"Ġmobility":11525,"Ġanalyze":11526,"lie":11527,"AFP":11528,"iche":11529,"ĠRoyals":11530,"Ġcoupled":11531,"Ġdug":11532,"ĠRing":11533,"Ġenvironments":11534,"national":11535,"ĠCongo":11536,"Ġalleging":11537,"wn":11538,"ulating":11539,"Ġur":11540,"Ġreaches":11541,"ĠPine":11542,"Ġthreshold":11543,"Ġtournaments":11544,"Ġheating":11545,"ĠGard":11546,"ĠHamas":11547,"Ġ«":11548,"ĠHolding":11549,"Ġpossibilities":11550,"ĠHassan":11551,"ĠMohammad":11552,"Ġoffenders":11553,"Ġautomated":11554,"Ġrealised":11555,"ouse":11556,"building":11557,"ĠDub":11558,"ĠGeneva":11559,"Ġfacial":11560,"ĠRestaurant":11561,"ĠNg":11562,"Ġtot":11563,"Ġgrace":11564,"ĠCP":11565,"Ġposter":11566,"hart":11567,"ĠNi":11568,"Ġreaff":11569,"Ġprov":11570,"Ġ111":11571,"ĠAid":11572,"Ġscrap":11573,"izers":11574,"ogen":11575,"Ġtissue":11576,"Ġvibrant":11577,"Ġrider":11578,"CD":11579,"ĠKitchen":11580,"Ġgenre":11581,"¬":11582,"depth":11583,"kind":11584,"Ġendorsed":11585,"Ġsimultaneously":11586,"Ġintern":11587,"ĠDrag":11588,"Ġembraced":11589,"Ġcounted":11590,"uj":11591,"ĠOg":11592,"Ġphysician":11593,"ĠIR":11594,"IST":11595,"ĠKir":11596,"Ġhacking":11597,"ĠSources":11598,"astic":11599,"growing":11600,"ĠWake":11601,"Ġhint":11602,"Ġcompiled":11603,"Ġreign":11604,"Ġcinema":11605,"Ġboosting":11606,"Ġaccommodation":11607,"ĠEuropa":11608,"Ġsubsidiaries":11609,"Ġclosures":11610,"ĠBil":11611,"ĠBou":11612,"wh":11613,"ĠAw":11614,"FT":11615,"hole":11616,"ĠNova":11617,"ĠNSW":11618,"Ġrap":11619,"Ġencourages":11620,"GR":11621,"ds":11622,"ĠMuk":11623,"ĠSurvey":11624,"ĠReagan":11625,"oning":11626,"Ġneighbouring":11627,"ĠMcCl":11628,"acht":11629,"Ġfinishes":11630,"ĠEsp":11631,"pat":11632,"Ġdestinations":11633,"ĠWagner":11634,"Ġconfronted":11635,"square":11636,"Ġpie":11637,"brand":11638,"hl":11639,"Ġabsent":11640,"Ġsurf":11641,"Ġrifle":11642,"ĠSS":11643,"ĠDeath":11644,"wich":11645,"Ġbeds":11646,"ĠLock":11647,"ĠAgu":11648,"atives":11649,"jee":11650,"Ġoral":11651,"Ġbudgets":11652,"Ġinspiring":11653,"IONS":11654,"works":11655,"Ġspirits":11656,"Ġcabin":11657,"Ġsatisfaction":11658,"Ġvoluntary":11659,"ĠMunicipal":11660,"Ġdeportation":11661,"ĠWriter":11662,"ĠVI":11663,"VERTISEMENT":11664,"/.":11665,"ĠSouthampton":11666,"aces":11667,"ĠHelen":11668,"ĠHum":11669,"110":11670,"Ġgarbage":11671,"through":11672,"Ġkingdom":11673,"MT":11674,"augh":11675,"Ġbizarre":11676,"ĠStarting":11677,"Ġwooden":11678,"ĠProgress":11679,"iron":11680,"sten":11681,"ĠSergio":11682,"ĠHR":11683,"Ġturnout":11684,"ĠAmericas":11685,"ĠSara":11686,"Ġagrees":11687,"apper":11688,"Ġbra":11689,"Ġrecycling":11690,"oom":11691,"Ġflee":11692,"Ġdistinct":11693,"IAL":11694,"aha":11695,"Ġfever":11696,"ĠPartnership":11697,"ĠYu":11698,"ĠPixel":11699,"ĠBlock":11700,"ĠMelissa":11701,"igg":11702,"Ġdecides":11703,"ĠNorman":11704,"Ġmas":11705,"held":11706,"ĠPD":11707,"Ġsheer":11708,"ĠDim":11709,"ĠCass":11710,"Ġcolumnist":11711,"ĠBros":11712,"Ġturnaround":11713,"ĠValue":11714,"ĠBachelor":11715,"awn":11716,"Ġassignment":11717,"ested":11718,"ĠJudiciary":11719,"Ġdiamond":11720,"Ġmus":11721,"Ġindigenous":11722,"lines":11723,"Ġ1984":11724,"igroup":11725,"ict":11726,"ĠJaguars":11727,"Ġlun":11728,"Ġprofiles":11729,"Ġcomputing":11730,"ĠBelgian":11731,"ĠLloyd":11732,"ĠGoing":11733,"Ġdisp":11734,"Ġ1987":11735,"eder":11736,"ĠVin":11737,"Ġgovern":11738,"Ġblend":11739,"ĠSebastian":11740,"ĠMidwest":11741,"iga":11742,"Ġspl":11743,"Ġtopping":11744,"Ġnetworking":11745,"ĠEmer":11746,"Ġoxygen":11747,"ĠInterest":11748,"ĠMoy":11749,"Ġtrader":11750,"Ġbay":11751,"Ġsticking":11752,"ĠMovement":11753,"Ġbidding":11754,"tax":11755,"Ġacademy":11756,"ĠMO":11757,"ĠSpirit":11758,"Ġhealing":11759,"wen":11760,"ĠPrix":11761,"cal":11762,"ĠOperating":11763,"Ġinstantly":11764,"ĠTonight":11765,"Ġsacked":11766,"Ġautomation":11767,"umps":11768,"ĠNey":11769,"March":11770,"ĠBuck":11771,"Ġconcentration":11772,"Here":11773,"Ġtravelers":11774,"Ġprotective":11775,"ĠMoody":11776,"Ġentrepreneur":11777,"Ġfac":11778,"kowski":11779,"Ġpreparations":11780,"Ġdominate":11781,"Ġspray":11782,"Ġdisturbing":11783,"ĠFraser":11784,"ĠCody":11785,"ashi":11786,"ĠPel":11787,"Ġrisky":11788,"Ġawkward":11789,"ĠVA":11790,"ails":11791,"Ġangle":11792,"Ġundergoing":11793,"Ġalbums":11794,"Ġafterwards":11795,"ĠNaw":11796,"uge":11797,"enter":11798,"ĠSussex":11799,"ĠRecently":11800,"Ġlikelihood":11801,"large":11802,"Ġsnaps":11803,"ibr":11804,"ĠMalcolm":11805,"Ġcru":11806,"Ġaltogether":11807,"Ġsetup":11808,"Ġtorture":11809,"Ġfiber":11810,"Ġquarterbacks":11811,"ĠGetting":11812,"ipping":11813,"ĠNorwegian":11814,"ĠMiles":11815,"ĠArnold":11816,"ĠDisease":11817,"Ġtends":11818,"ife":11819,"ĠCaroline":11820,"Ġnavigate":11821,"Ġbrush":11822,"ĠAssociates":11823,"Ġbath":11824,"ĠCenters":11825,"ĠMC":11826,"Ġtaxpayer":11827,"comp":11828,"Ġaccomplish":11829,"ĠTraffic":11830,"ĠBru":11831,"Ġgreenhouse":11832,"ĠMalaysian":11833,"ĠPur":11834,"ased":11835,"ĠKnicks":11836,"aters":11837,"Ġalt":11838,"ICK":11839,"Ġcalculations":11840,"Ġmindset":11841,"unch":11842,"Ġgu":11843,"Ġsteadily":11844,"Ġfiction":11845,"ĠPap":11846,"forming":11847,"ĠActor":11848,"ĠBerry":11849,"imp":11850,"ĠUpper":11851,"Ġassessed":11852,"Ġlawn":11853,"ĠRoh":11854,"Ġclearance":11855,"funded":11856,"Ġpret":11857,"ĠHom":11858,"VS":11859,"ĠTourism":11860,"ĠRy":11861,"ĠGonz":11862,"ĠStudios":11863,"Ġanchor":11864,"Ġrecognise":11865,"Ġcooperate":11866,"enny":11867,"aza":11868,"ĠMeet":11869,"Ġeventual":11870,"SW":11871,"ĠCounsel":11872,"ĠSave":11873,"Ġlucrative":11874,"Ġslim":11875,"ĠGreens":11876,"Ġchemistry":11877,"ĠSheikh":11878,"Ġbridges":11879,"business":11880,"ĠSaf":11881,"ĠGy":11882,"Ġprotocol":11883,"Ġnephew":11884,"ĠBrands":11885,"ĠCulture":11886,"orship":11887,"Ġ(£":11888,"ĠDell":11889,"astics":11890,"Ġproving":11891,"ĠMann":11892,"aca":11893,"Ġindoor":11894,"ĠUganda":11895,"ĠRomney":11896,"ĠStage":11897,"Ġward":11898,"ĠAmber":11899,"haw":11900,"Ġtw":11901,"Ġbullying":11902,"ĠCAR":11903,"Ġassociates":11904,"ĠHopkins":11905,"Ġsuburb":11906,"Ġaggressively":11907,"Ġpostponed":11908,"Ġbas":11909,"Ġburglary":11910,"ĠFound":11911,"Ġfloors":11912,"Any":11913,"Ġjam":11914,"Ġvisibility":11915,"Ġbenefited":11916,"ĠAud":11917,"aying":11918,"iku":11919,"ĠPas":11920,"ĠGPS":11921,"ĠOwens":11922,"Ġreluctant":11923,"ĠOlivia":11924,"ols":11925,"Ġemotion":11926,"ĠHeavy":11927,"Ġhostile":11928,"Ġfavorites":11929,"Ġfeat":11930,"ĠCord":11931,"ĠGO":11932,"Ġindicted":11933,"idal":11934,"ĠIL":11935,"Ħ":11936,"acer":11937,"ICH":11938,"oda":11939,"Ġrecipients":11940,"Ġtribal":11941,"Ġresist":11942,"ĠCritics":11943,"Ġsang":11944,"ĠMath":11945,"ĠBrighton":11946,"ĠKw":11947,"Ġlimitations":11948,"Ġinterception":11949,"onde":11950,"ĠRobertson":11951,"Ġenjoys":11952,"site":11953,"Ġwings":11954,"ĠCeltic":11955,"Ġrelaxed":11956,"Share":11957,"Ġwarrants":11958,"oco":11959,"Ġcritically":11960,"GC":11961,"Ġcute":11962,"Ġlaying":11963,"itude":11964,"ĠMediterranean":11965,"Ġwatches":11966,"Ġdisagree":11967,"ĠReturn":11968,"ARC":11969,"people":11970,"Ġtwelve":11971,"Ġoverdose":11972,"ĠLot":11973,"ĠFROM":11974,"ĠPeters":11975,"Ġadministrators":11976,"Ġslam":11977,"jar":11978,"OH":11979,"ĠInitiative":11980,"Ġteamed":11981,"ĠMajority":11982,"June":11983,"ĠPlaza":11984,"lake":11985,"Ġglimpse":11986,"Ġrings":11987,"Ġos":11988,"Ġmentor":11989,"have":11990,"Ġlanguages":11991,"Ġuncle":11992,"agu":11993,"ĠWine":11994,"ĠCategory":11995,"ĠIng":11996,"Ġcontests":11997,"ĠRosen":11998,"ĠWhatever":11999,"Ġdenying":12000,"ean":12001,"Ġspec":12002,"Ġgrad":12003,"Ġtenants":12004,"show":12005,"ĠGregory":12006,"Ġcontention":12007,"Ġunanimously":12008,"ĠPin":12009,"fa":12010,"ĠPink":12011,"Ġswitched":12012,"acre":12013,"ĠTrading":12014,"VP":12015,"ĠMaple":12016,"Neill":12017,"Ġdiscounts":12018,"alls":12019,"Ġsounded":12020,"Ġrumours":12021,"ĠCre":12022,"hall":12023,"ĠTele":12024,"Ġthankful":12025,"Ġsurveyed":12026,"UB":12027,"Ġdignity":12028,"Ġnod":12029,"Ġmisleading":12030,"ĠTX":12031,"ĠBurke":12032,"Ġmounting":12033,"Ġskies":12034,"Ġbesides":12035,"ĠGarrett":12036,"tha":12037,"Ġintelligent":12038,"Ġtanks":12039,"apping":12040,"ĠRat":12041,"aint":12042,"Ġentertain":12043,"ĠAbdullah":12044,"Ġsink":12045,"ĠLan":12046,"ĠManufacturing":12047,"NFL":12048,"Ġthemes":12049,"ĠHaven":12050,"ĠDavies":12051,"ĠKerr":12052,"ĠLen":12053,"Ġcourtroom":12054,"Ġfailures":12055,"Ġlately":12056,"ĠElectronics":12057,"Ġgorgeous":12058,"Ġnotification":12059,"Ġ2030":12060,"aved":12061,"Ġdeer":12062,"economic":12063,"ĠStatistics":12064,"Ġconfrontation":12065,"Ġgovernors":12066,"ĠHaram":12067,"ĠLGBTQ":12068,"Ġprocessed":12069,"ĠDuchess":12070,"Ġdowns":12071,"Ġpork":12072,"Ġhumor":12073,"ocese":12074,"Ġneeding":12075,"Ġmidterm":12076,"ĠOval":12077,"Ġcorners":12078,"Ġtablets":12079,"eds":12080,"vere":12081,"Ġattacker":12082,"Paul":12083,"pee":12084,"ĠAlice":12085,"Ġrenowned":12086,"Ġ09":12087,"ocking":12088,"Ġcreditors":12089,"ĠPedro":12090,"ĠPhone":12091,"Ġsurveys":12092,"ĠWelsh":12093,"Ġcow":12094,"Ġbuilds":12095,"Ġ000":12096,"ĠAzerbaijan":12097,"ĠYad":12098,"Ġinfant":12099,"Ġmotorists":12100,"Ġpoorly":12101,"Ġmedications":12102,"Ġstupid":12103,"ĠCastro":12104,"user":12105,"antly":12106,"alty":12107,"ĠCond":12108,"issa":12109,"ĠIvan":12110,"Ġcostume":12111,"Ġ08":12112,"Ġhence":12113,"Ġdangers":12114,"Ġbullish":12115,"Life":12116,"Ġflavor":12117,"ĠCharleston":12118,"Ġbikes":12119,"Ġworkshops":12120,"Ġarranged":12121,"Ġcontender":12122,"Ġsequel":12123,"ĠPlant":12124,"Ġdonor":12125,"Ġfactories":12126,"rict":12127,"ellen":12128,"Ġrobots":12129,"ĠWor":12130,"ĠDirectors":12131,"ĠPeru":12132,"Ġqueen":12133,"ĠTimothy":12134,"ĠToo":12135,"Ġobservers":12136,"Ġears":12137,"Ġbel":12138,"link":12139,"uns":12140,"Ġhomers":12141,"Ġadjacent":12142,"Ġconfidential":12143,"Ġstunned":12144,"iden":12145,"illed":12146,"ESS":12147,"Ġconvenient":12148,"ĠLindsey":12149,"por":12150,"upp":12151,"Ġborrow":12152,"ĠAhmad":12153,"ORT":12154,"Ġrelate":12155,"ĠSelf":12156,"ĠVanguard":12157,"utter":12158,"ĠBranch":12159,"ĠBolton":12160,"bat":12161,"Ġoutright":12162,"fighters":12163,"ĠBed":12164,"Ġpes":12165,"inski":12166,"Ġgunshot":12167,"Ġprinting":12168,"ĠSent":12169,"vern":12170,"Ġharvest":12171,"Ġbubble":12172,"Ġrefund":12173,"Ġfuels":12174,"Ġdive":12175,"Ġdiplomat":12176,"Ġpile":12177,"ĠVery":12178,"rot":12179,"ĠSearch":12180,"ĠJoyce":12181,"ĠPruitt":12182,"ĠLevel":12183,"ĠBP":12184,"ĠLac":12185,"had":12186,"Ġexpenditure":12187,"ĠMadd":12188,"Ġpockets":12189,"ĠClippers":12190,"ĠDear":12191,"ĠGive":12192,"Ġhal":12193,"Ġvertical":12194,"Ġwholesale":12195,"what":12196,"ĠSpringfield":12197,"ayed":12198,"ĠSom":12199,"Ġsecrets":12200,"Ġcharts":12201,"iar":12202,"ibility":12203,"LAND":12204,"Ġbearing":12205,"Ġprom":12206,"Ġtab":12207,"Ġsheets":12208,"ĠGL":12209,"Ġendless":12210,"opening":12211,"ĠOwen":12212,"Ġunderneath":12213,"ĠErik":12214,"ĠDACA":12215,"Ġsteering":12216,"Ġfootprint":12217,"ĠRoma":12218,"ĠDucks":12219,"ĠEllen":12220,"ĠProfessional":12221,"ĠGardens":12222,"Ġgoalie":12223,"Ġshine":12224,"Ġturmoil":12225,"Ġhunger":12226,"ĠâĢĭ":12227,"active":12228,"hey":12229,"Ġblessed":12230,"ason":12231,"oping":12232,"ĠThousands":12233,"Ġdose":12234,"ĠLor":12235,"Ġevolved":12236,"Ġcharities":12237,"ĠPE":12238,"ĠRub":12239,"ws":12240,"Ġmist":12241,"ĠShen":12242,"Ġbiological":12243,"ĠTweet":12244,"Ġcollections":12245,"Ġsubstantially":12246,"inner":12247,"Ġbattled":12248,"ĠCong":12249,"Hold":12250,"wp":12251,"Ġwells":12252,"Ġsake":12253,"Ġunrest":12254,"ĠKurt":12255,"Ġripped":12256,"itation":12257,"Ġneighbourhood":12258,"Ġinv":12259,"Ġcad":12260,"ĠCuban":12261,"ĠWealth":12262,"Ġtuition":12263,"Ġdeclaring":12264,"sch":12265,"orne":12266,"Ġwondered":12267,"ĠChaff":12268,"Ġdealer":12269,"ĠNumber":12270,"Mobile":12271,"Ġscratch":12272,"Ġprepares":12273,"ĠSens":12274,"ĠIstanbul":12275,"ĠPanama":12276,"ĠCay":12277,"Ġallocation":12278,"itutional":12279,"Ġhar":12280,"ĠNazi":12281,"ĠSund":12282,"Ġwarehouse":12283,"Ġbackyard":12284,"ĠIll":12285,"Ġunlawful":12286,"ĠReform":12287,"Ġbasement":12288,"ĠHi":12289,"ĠPictures":12290,"Ġtransfers":12291,"ĠSell":12292,"Ġfluid":12293,"Ġambitions":12294,"wife":12295,"Ġintensive":12296,"Ġsteals":12297,"Ġfestive":12298,"ĠHayes":12299,"Ġrestoration":12300,"Ġbranded":12301,"Journal":12302,"Ġmacro":12303,"Ġconsole":12304,"ĠMelania":12305,"ĠRahul":12306,"Ġdisposal":12307,"Ġcult":12308,"Ġpetrol":12309,"Ġtires":12310,"Ġkidnapping":12311,"Ġ115":12312,"Ġswap":12313,"ĠSud":12314,"Ġblown":12315,"ĠHindu":12316,"ĠBeckham":12317,"ĠGul":12318,"Ġfixture":12319,"Ġwisdom":12320,"Ġmines":12321,"fort":12322,"Ġrivers":12323,"ĠCyber":12324,"Ġtouches":12325,"race":12326,"Ġrelax":12327,"Ġcrashes":12328,"Ġconstituency":12329,"Ġ1979":12330,"Ġbureau":12331,"Ġinterface":12332,"Ġdetected":12333,"ĠBio":12334,"Ġhighlighting":12335,"ames":12336,"Ġcorresponding":12337,"great":12338,"Ġgray":12339,"Ġadvantages":12340,"ĠME":12341,"ĠAbbas":12342,"Ġnaked":12343,"rington":12344,".),":12345,"ĠFace":12346,"third":12347,"Ġtranscript":12348,"ples":12349,"Good":12350,"ĠArctic":12351,"Ġtolerance":12352,"reat":12353,"green":12354,"ĠMik":12355,"Ġoutreach":12356,"Ġrolls":12357,"Ġgen":12358,"Ġsupplied":12359,"Ġguarantees":12360,"aug":12361,"Ġsemif":12362,"ounds":12363,"running":12364,"Ġfitting":12365,"ĠRisk":12366,"iveness":12367,"family":12368,"Ġti":12369,"ĠIsaac":12370,"Ġdump":12371,"ĠPatricia":12372,"Ġpassport":12373,"ĠRhode":12374,"Who":12375,"log":12376,"Ġstat":12377,"Ġrat":12378,"ango":12379,"SB":12380,"ĠMaur":12381,"Ġsmiling":12382,"Ġstrikeouts":12383,"Ġpupils":12384,"Ġcomplications":12385,"ĠAdvanced":12386,"ĠMonetary":12387,"ĠTall":12388,"ĠALL":12389,"Ġcontributor":12390,"ĠAdvertising":12391,"Ġhorrific":12392,"Ġcompeted":12393,"ĠKenneth":12394,"Ġhailed":12395,"Ġbones":12396,"Ġbolster":12397,"ĠBoss":12398,"Ġhospitalized":12399,"ĠTelegraph":12400,"ĠIndependence":12401,"Ġdr":12402,"ĠHang":12403,"Ġdocumented":12404,"Ġsubtle":12405,"invest":12406,"Ġbounced":12407,"ĠMAN":12408,"Ġprofession":12409,"Ń":12410,"Ġexcellence":12411,"ĠInspector":12412,"ĠBL":12413,"Ġdisrupt":12414,"ĠWinston":12415,"ĠCommunist":12416,"ĠSharon":12417,"Ġmechanical":12418,"Ġtreats":12419,"Ġdesperately":12420,"ĠIndy":12421,"ĠGi":12422,"ĠComposite":12423,"ĠHeath":12424,"aser":12425,"ĠCardiff":12426,"ilit":12427,"Ġeased":12428,"Ġprospective":12429,"Ġcommissioned":12430,"Ġtire":12431,"Ġalign":12432,"Ġgesture":12433,"Ġweakened":12434,"URE":12435,"SN":12436,"Ġnationals":12437,"Ġrelies":12438,"ĠIRS":12439,"ĠCount":12440,"Ġmedicines":12441,"Ġcongress":12442,"Ġstranger":12443,"Qu":12444,"lessly":12445,"ĠQueens":12446,"ĠAlleg":12447,"uing":12448,"ĠWy":12449,"ĠMiguel":12450,"idi":12451,"Ġcivic":12452,"ĠPetro":12453,"endo":12454,"Obviously":12455,"Ġreflection":12456,"ĠStop":12457,"ĠFitzgerald":12458,"placed":12459,"shore":12460,"Ġcorrectly":12461,"ĠNE":12462,"amy":12463,"ĠCT":12464,"some":12465,"ĠMb":12466,"oi":12467,"ĠHogan":12468,"ĠInnovation":12469,"ĠVilla":12470,"ĠCAN":12471,"ĠCemetery":12472,"into":12473,"Ġquestionable":12474,"Ġcreator":12475,"rug":12476,"Ġsemifinals":12477,"mission":12478,"Ġcle":12479,"ĠWaters":12480,"ĠNixon":12481,"ĠBT":12482,"Ġassuming":12483,"ĠJer":12484,"ĠClay":12485,"pack":12486,"ĠCool":12487,"may":12488,"Ġdecor":12489,"Ġspike":12490,"ĠSomalia":12491,"ĠKarn":12492,"ĠDamascus":12493,"Shares":12494,"Ġsus":12495,"ĠMoss":12496,"Ġ1985":12497,"Ġsuperintendent":12498,"ĠResults":12499,"Ġspends":12500,"prom":12501,"Ġshipped":12502,"Ġlaundering":12503,"ĠLeslie":12504,"Ġmeteor":12505,"Ġabandon":12506,"Ġdeliberately":12507,"ĠSentinel":12508,"Ġfascinating":12509,"Ġenrollment":12510,"ĠExperts":12511,"ĠSimilarly":12512,"ĠCuomo":12513,"bor":12514,"Ġune":12515,"neutral":12516,"Ġhamstring":12517,"Ġnegotiated":12518,"zes":12519,"ĠLeo":12520,"ĠDoctor":12521,"Ġcurriculum":12522,"ĠFocus":12523,"Ġtravels":12524,"Ġbeverage":12525,"ĠIncluding":12526,"tz":12527,"type":12528,"ĠRange":12529,"Ġfloods":12530,"Ġcoached":12531,"Ġdominance":12532,"letico":12533,"ĠRafael":12534,"Ġpredictions":12535,"Ġprosperity":12536,"ĠCav":12537,"Ġclinics":12538,"ĠBanking":12539,"ĠComing":12540,"ears":12541,"ĠKaepernick":12542,"ĠBlvd":12543,"Ġretained":12544,"isions":12545,"Ġko":12546,"Ġensemble":12547,"Ġprecise":12548,"Ġcompact":12549,"MD":12550,"ĠJet":12551,"ached":12552,"ĠTru":12553,"ĠBass":12554,"ĠIcon":12555,"Ġexcluding":12556,"sur":12557,"Ġconstruct":12558,"Ġvoiced":12559,"pan":12560,"Ġinability":12561,"Ġexc":12562,"Ġmate":12563,"Ġtrailing":12564,"Ġsuccessive":12565,"Ġbets":12566,"Ġgauge":12567,"Ġminorities":12568,"ĠIND":12569,"ĠVel":12570,"ĠGP":12571,"oid":12572,"bon":12573,"Ġpred":12574,"Ġdash":12575,"Ġperformer":12576,"Ġoccasional":12577,"aken":12578,"mes":12579,"America":12580,"Ġliver":12581,"Sp":12582,"Big":12583,"Ġwildfires":12584,"ĠJackie":12585,"ĠLed":12586,"ĠFinland":12587,"Ġjurors":12588,"olic":12589,"urance":12590,"ĠEdge":12591,"open":12592,"Ġscenarios":12593,"Ġglory":12594,"entry":12595,"ĠCoffee":12596,"rep":12597,"ĠChand":12598,"ĠVas":12599,"ĠIslamabad":12600,"Ġbur":12601,"ĠFle":12602,"ĠEdition":12603,"Ġshoe":12604,"ï¸ı":12605,"**":12606,"tle":12607,"ĠEb":12608,"keeping":12609,"ĠBasketball":12610,"ĠVon":12611,"ĠCF":12612,"MENT":12613,"amm":12614,"ĠFernando":12615,"Ġcompares":12616,"ĠDouble":12617,"Ġconvictions":12618,"Ġatop":12619,"Ġcops":12620,"Ġremembers":12621,"Ġlacking":12622,"dom":12623,"itate":12624,"ĠBeauty":12625,"Ġdevelops":12626,"ĠGor":12627,"Ġfunctional":12628,"ĠCOUNTY":12629,"ĠUpon":12630,"Ġsprint":12631,"Ġinjection":12632,"Ġminors":12633,"ĠTamil":12634,"ĠGat":12635,"101":12636,"ety":12637,"Ġdrum":12638,"Ġtasked":12639,"Ġpact":12640,"Ġ170":12641,"MR":12642,"ĠRamos":12643,"Ġcandy":12644,"Sc":12645,"iced":12646,"Ġsupermarket":12647,"Ġworrying":12648,"Ġsellers":12649,"ĠTag":12650,".:":12651,"Ġmixture":12652,"oting":12653,"Bl":12654,"ĠLl":12655,"ĠJal":12656,"ican":12657,"ĠBid":12658,"country":12659,"ĠStrategy":12660,"Ġadverse":12661,"Ġplunged":12662,"ĠMit":12663,"Ġstark":12664,"aton":12665,"Ġbooking":12666,"Tr":12667,"Ġcontainers":12668,"Ġvintage":12669,"ĠPit":12670,"Ġsurfaced":12671,"Ġindependently":12672,"Ġdetection":12673,"ĠBeyon":12674,"Ġcasualties":12675,"Ġstabbing":12676,"oved":12677,"Ġbarred":12678,"Ġthereby":12679,"Ġpartnered":12680,"Ġposing":12681,"ĠShannon":12682,"ĠChapel":12683,"Ġtechnically":12684,"uous":12685,"»":12686,"ometer":12687,"Ġwildfire":12688,"share":12689,"heart":12690,"Ġammunition":12691,"Ġthrive":12692,"ĠStre":12693,"GP":12694,"cé":12695,"ĠMonaco":12696,"goal":12697,"ĠUm":12698,"ĠHSBC":12699,"ĠHilton":12700,"ĠViv":12701,"ĠKell":12702,"Ġdecisive":12703,"Ġmotive":12704,"amo":12705,"feld":12706,"ĠWH":12707,"iry":12708,"ulu":12709,"ĠSchneider":12710,"Ġcampaigning":12711,"Ġseparately":12712,"igo":12713,"ĠED":12714,"ĠRamirez":12715,"Ġmetro":12716,"ĠPatel":12717,"ĠChi":12718,"ĠAudi":12719,"Ġcharacteristics":12720,"Ġrestart":12721,"Ġkeyboard":12722,"ĠSD":12723,"his":12724,"biz":12725,"ĠSoft":12726,"ĠGrammy":12727,"Ġcontested":12728,"Ġweekends":12729,"Ġ112":12730,"Ġcycling":12731,"Ġhealthier":12732,"ija":12733,"Ġheader":12734,"Ġemploy":12735,"İ":12736,"Ġshortages":12737,"ĠAsk":12738,"ĠIvanka":12739,"Ġpartisan":12740,"Ġflowing":12741,"Ġcave":12742,"ENS":12743,"Ġups":12744,"read":12745,"ouch":12746,"Ġ102":12747,"Ġforming":12748,"bot":12749,"bie":12750,"Ġenrolled":12751,"Ġconcussion":12752,"Ġaffidavit":12753,"Ġmysterious":12754,"uries":12755,"ĠMang":12756,"Ġauthentic":12757,"Ġmetrics":12758,"ĠTwins":12759,"Ġprep":12760,"IJ":12761,"Ġdesired":12762,"ĠDiv":12763,"wall":12764,"ĠTab":12765,"Ġcompet":12766,"Ġrelied":12767,"Ġinequality":12768,"Ġmanual":12769,"ĠBucks":12770,"agging":12771,"Ġcorporation":12772,"Ġbanner":12773,"Ġgraphics":12774,"Ġaccurately":12775,"ĠMeeting":12776,"Ġconsult":12777,"ser":12778,"Ġprotesting":12779,"Ġhurting":12780,"omed":12781,"tes":12782,"Ġrode":12783,"Ġstartups":12784,"Ġhanding":12785,"ĠNest":12786,"Ġconsistency":12787,"anned":12788,"dem":12789,"ĠLyon":12790,"ĠCompetition":12791,"Ġtricky":12792,"Ġcos":12793,"ĠBengals":12794,"arry":12795,"Ġunderwent":12796,"ĠKit":12797,"à":12798,"uploads":12799,"Ġskate":12800,"Ġ''":12801,"Ġjun":12802,"ĠContent":12803,"focused":12804,"lat":12805,"ĠExp":12806,"ought":12807,"Ġnightmare":12808,"ĠExpect":12809,"Ġprecisely":12810,"ĠMonica":12811,"Ġlobbying":12812,"ĠChester":12813,"ĠInvest":12814,"Former":12815,"Ġimminent":12816,"ĠNL":12817,"Ġcomparing":12818,"ĠChes":12819,"ede":12820,"ĠNobel":12821,"mers":12822,"ĠKin":12823,"ĠBoko":12824,"ount":12825,"Ġthoroughly":12826,"Ġscattered":12827,"sharing":12828,"markets":12829,"ĠMis":12830,"Ġambition":12831,"Ġpreference":12832,"Ġeffectiveness":12833,"rio":12834,"Ġheavyweight":12835,"Ġovert":12836,"anya":12837,"ĠKanye":12838,"ishi":12839,"Ġrewards":12840,"uled":12841,"bach":12842,"Ġemphasized":12843,"Ġapologize":12844,"ĠRecent":12845,"!!":12846,"Ġanimated":12847,"ĠExxon":12848,"Ġfruits":12849,"Ġstripped":12850,"fold":12851,"ĠIndonesian":12852,"ller":12853,"Ġdementia":12854,"Ġkidney":12855,"Ġhalted":12856,"years":12857,"Ġconcerts":12858,"Ġrefers":12859,"ĠFri":12860,"Your":12861,"irl":12862,"Ġleap":12863,"jud":12864,"ĠHugh":12865,"ĠFO":12866,"Ġsore":12867,"Ġkil":12868,"ĠMate":12869,"cci":12870,"Ġsetback":12871,"Ġtightening":12872,"keeper":12873,"ĠAlbany":12874,"Ġpolicymakers":12875,"Ġdisorders":12876,"ĠCBC":12877,"ĠDiaz":12878,"Ġmaps":12879,"Ġroutinely":12880,"Ġverify":12881,"Ġbash":12882,"ĠJinping":12883,"Ġdisasters":12884,"ĠMonroe":12885,"ĠLouise":12886,"JP":12887,"ĠNevertheless":12888,"Ġconcessions":12889,"ĠPog":12890,"going":12891,"ĠFifth":12892,"ĠJill":12893,"ICT":12894,"ĠFM":12895,"ĠSugar":12896,"ĠBarb":12897,"Ġmidway":12898,"Ġtin":12899,"ĠPic":12900,"ĠPL":12901,"Ġleaks":12902,"Ġgrief":12903,"Ġtattoo":12904,"`":12905,"Ġment":12906,"ĠNu":12907,"Ġmarry":12908,"Ġdiving":12909,"Ġ1982":12910,"Ġcoin":12911,"ĠPoc":12912,"Ġstarred":12913,"ĠRiverside":12914,"Ġsidelined":12915,"Ġminers":12916,"STON":12917,"Ġbelongs":12918,"ĠSantos":12919,"ĠTechnical":12920,"aco":12921,"Ġadvise":12922,"Ġstreams":12923,"Ġcooler":12924,"ĠHE":12925,"Ġordering":12926,"ĠTask":12927,"ĠACT":12928,"ĠAnton":12929,"Ġcertification":12930,"ĠLeafs":12931,"ĠTS":12932,"ĠSerbia":12933,"azi":12934,"inks":12935,"ĠEST":12936,"Ġrelay":12937,"°":12938,"Ġdisappearance":12939,"ĠRomania":12940,"Ġoven":12941,"Ġowed":12942,"ĠStrip":12943,"ulated":12944,"UC":12945,"ITE":12946,"bling":12947,"Then":12948,"ppy":12949,"Ġunlimited":12950,"Ġcalories":12951,"Ġmerchandise":12952,"Ġblonde":12953,"ĠSpicer":12954,"performing":12955,"Ġimpl":12956,"Ġplates":12957,"Ġmosque":12958,"Ġdemon":12959,"Ġought":12960,"Ġdumped":12961,"Ġtracked":12962,"even":12963,"Ġstabil":12964,"imet":12965,"ĠLiga":12966,"ugh":12967,"ther":12968,"agar":12969,"Ġarchitect":12970,"Ġallocated":12971,"ĠJoey":12972,"Ġmarathon":12973,"master":12974,"ĠBert":12975,"Ġast":12976,"ĠEbola":12977,"ĠConservation":12978,"nic":12979,"Ġparallel":12980,"Ġinmate":12981,"Ġlocate":12982,"Ġdistribute":12983,"guard":12984,"Ġtackling":12985,"ential":12986,"Ġvi":12987,"Ġcups":12988,"Ġrhythm":12989,"Ġendured":12990,"ĠHub":12991,"ois":12992,"ĠLiberals":12993,"ĠRedskins":12994,"ĠEP":12995,"ĠKnox":12996,"fr":12997,"Ġmassacre":12998,"oka":12999,"Ġcompl":13000,"raft":13001,"ĠPublished":13002,"Ġattraction":13003,"ĠStephens":13004,"ility":13005,"ĠPul":13006,"ĠCapt":13007,"Ġexploded":13008,"Ġexceeded":13009,"lying":13010,"Ġcal":13011,"Mart":13012,"Ġpaintings":13013,"inate":13014,"ĠBrendan":13015,"Ġfortune":13016,"onductor":13017,"Ġphysicians":13018,"ĠStudy":13019,"ĠBul":13020,"ĠModern":13021,"HD":13022,"ĠBour":13023,"Ġtying":13024,"Ġ1967":13025,"Ġlighter":13026,"Ġtoss":13027,"inspired":13028,"Ġgreeted":13029,"Ġcycl":13030,"Ġverified":13031,"Ġmerit":13032,"sign":13033,"lder":13034,"Ġdebts":13035,"ĠSnyder":13036,"Ġamendments":13037,"Ġindicators":13038,"ĠDortmund":13039,"then":13040,"ĠListen":13041,"ĠFB":13042,"ref":13043,"ĠIoT":13044,"ĠBrewers":13045,"ĠLeadership":13046,"ĠNicolas":13047,"ĠBody":13048,"Ġsam":13049,"ĠAdvisor":13050,"Ġcord":13051,"Ġabuses":13052,"ĠPortuguese":13053,"Ġflown":13054,"VR":13055,"Ġconsumed":13056,"Ġreass":13057,"Ġalien":13058,"Ġrivalry":13059,"ĠREPORT":13060,"ĠRush":13061,"Ġdirecting":13062,"Ġsearches":13063,"ĠHP":13064,"ĠRoll":13065,"ĠFay":13066,"ĠClare":13067,"Ġhaul":13068,"Ġriot":13069,"Ġsettlements":13070,"Ġnorm":13071,"Ġaccelerated":13072,"ĠLok":13073,"Ġclever":13074,"Ġhyd":13075,"Ġstats":13076,"ĠHull":13077,"kers":13078,"Ġbuys":13079,"uter":13080,"Ġfue":13081,"https":13082,"UD":13083,"Ġisolation":13084,"Ġsuspend":13085,"ĠRules":13086,"ĠCircle":13087,"ĠHopefully":13088,"played":13089,"âĢ³":13090,"ĠPRE":13091,"sim":13092,"edd":13093,"ĠProperties":13094,"Ġbeans":13095,"Ġrevive":13096,"ĠBir":13097,"oug":13098,"Ġmob":13099,"Ġshowdown":13100,"iman":13101,"Ġpap":13102,"Ġvol":13103,"wu":13104,"Ġdiver":13105,"Ġpill":13106,"ĠMarlins":13107,"ĠLamar":13108,"Ġpersistent":13109,"Ġcondolences":13110,"ĠThor":13111,"Ab":13112,"Ġimpress":13113,"ĠRaptors":13114,"Ġreferences":13115,"Ġstiff":13116,"ĠBash":13117,"eding":13118,"Ġmurders":13119,"ĠGene":13120,"ĠManila":13121,"Ġbrokers":13122,"Ms":13123,"start":13124,"ĠDhabi":13125,"etz":13126,"Ġsubmission":13127,"ĠSchmidt":13128,"ĠPersonal":13129,"ĠBeverly":13130,"ĠMovie":13131,"ĠLamb":13132,"Ġplacement":13133,"Ġfolk":13134,"Ġfrequency":13135,"Ġplanted":13136,"Ġtwins":13137,"prov":13138,"rec":13139,"Ġpermanently":13140,"Ġcoordination":13141,"ĠCart":13142,"Ġobstacles":13143,"Ġliterature":13144,"Ġtu":13145,"Ġchill":13146,"ĠReserved":13147,"Ġlovers":13148,"ĠOutside":13149,"Ġslideshow":13150,"ĠGru":13151,"Ġty":13152,"Ġsalad":13153,"Ġlaboratory":13154,"ĠHolt":13155,"Ġ103":13156,"urb":13157,"ĠOrganisation":13158,"ĠAndrews":13159,"Ġrecipient":13160,"arch":13161,"Ġbleeding":13162,"ĠPand":13163,"Ġoverturned":13164,"Ġlistened":13165,"Ġclause":13166,"Ġnationalist":13167,"Ġresumed":13168,"ĠCout":13169,"ĠPride":13170,"Ġlayers":13171,"ĠBella":13172,"Ġreversed":13173,"Ġpriest":13174,"ĠFX":13175,"Ġalbeit":13176,"Ġhalfway":13177,"Ġcotton":13178,"ĠCarey":13179,"ĠTE":13180,"OCK":13181,"Ġbuck":13182,"ributes":13183,"ea":13184,"Ġfancy":13185,"ĠBuc":13186,"Ġbans":13187,"uters":13188,"Ġliabilities":13189,"ĠSou":13190,"ĠBernie":13191,"Ġintervene":13192,"food":13193,"ĠNDP":13194,"Ġinsist":13195,"Ġcontracted":13196,"hawk":13197,"),\"":13198,"ĠDawn":13199,"Ġmol":13200,"Ġcommissioners":13201,"Ġstranded":13202,"Ġoverwhelmed":13203,"Ġrecipes":13204,"Ġva":13205,"Ġrad":13206,"Ġscare":13207,"rez":13208,"Ġeliminating":13209,"Ġresc":13210,"ĠBreak":13211,"chn":13212,"Ġdelight":13213,"iot":13214,"Ġfreely":13215,"TI":13216,"ĠBluetooth":13217,"ĠMonth":13218,"ĠFlor":13219,"ĠFreddie":13220,"Ġtrailed":13221,"Ġinvestigative":13222,"Ġimposing":13223,"Ġattracting":13224,"awk":13225,"ĠSherman":13226,"Ġsucceeded":13227,"Ġvent":13228,"Ġreconciliation":13229,"ĠCel":13230,"ĠThroughout":13231,"ĠDowntown":13232,"ĠBrother":13233,"Ġtraditions":13234,"Ġmir":13235,"Ġstamp":13236,"tery":13237,"etti":13238,"isch":13239,"tic":13240,"Ġbanning":13241,"loss":13242,"ĠSpeedway":13243,"Ġstalled":13244,"ĠEN":13245,"ASH":13246,"thing":13247,"ĠAppeals":13248,"rac":13249,"Ġdistress":13250,"ĠConservatives":13251,"ĠPremium":13252,"usa":13253,"Ġslump":13254,"imm":13255,"ĠSupp":13256,"ĠWong":13257,"Ġdistant":13258,"Ġ104":13259,"Ġtide":13260,"ĠNorfolk":13261,"ĠYang":13262,"Ġsmashed":13263,"ĠBarrett":13264,"inho":13265,"Ġrobbed":13266,"ĠFarmers":13267,"filled":13268,"BT":13269,"Ġautumn":13270,"Ġtemple":13271,"ĠJacobs":13272,"Ġprecipitation":13273,"ĠHours":13274,"ĠFlight":13275,"Ġbeside":13276,"ĠOre":13277,"!)":13278,"ĠTurnbull":13279,"Ġpig":13280,"Ġcooling":13281,"Ġservers":13282,"oriented":13283,"Ġlocks":13284,"ĠSears":13285,"aving":13286,"ĠQuick":13287,"ĠGlob":13288,"ĠMining":13289,"Ġhorizon":13290,"arians":13291,"ĠOm":13292,"writing":13293,"Ġbelieving":13294,"Ġbon":13295,"Ġmounted":13296,"Ġpunt":13297,"ucci":13298,"uzz":13299,"cul":13300,"Ġkiss":13301,"ĠOnt":13302,"ĠCyprus":13303,"Ġrelying":13304,"Ġpiano":13305,"Ġcure":13306,"Ġcontinuously":13307,"ĠNobody":13308,"ĠBund":13309,"osis":13310,"ĠAurora":13311,"ĠBach":13312,"ĠKendall":13313,"Ġechoed":13314,"iable":13315,"Ġconscious":13316,"Ġmonster":13317,"omo":13318,"proof":13319,"ĠNate":13320,"Ġfilmmaker":13321,"ĠNaj":13322,"Ġvendor":13323,"ĠFoot":13324,"ĠChang":13325,"ĠFest":13326,"Ġselfie":13327,"Ġenters":13328,"ĠConor":13329,"ĠMosul":13330,"ĠWHAT":13331,"Ġwa":13332,"ĠGamb":13333,"osta":13334,"Ġcautioned":13335,"ĠTucker":13336,"ĠAirways":13337,"Ġvisitor":13338,"Ġ·":13339,"ĠRevolution":13340,"aching":13341,"Ġearliest":13342,"ĠQuality":13343,"Ġshorts":13344,"ube":13345,"ĠOperation":13346,"ĠSabha":13347,"Ġstrengths":13348,"ikes":13349,"Ġsexy":13350,"Ġrot":13351,"ibles":13352,"Ġcolours":13353,"THE":13354,"ailed":13355,"Ġwoke":13356,"ĠEmbassy":13357,"Ġinfamous":13358,"rov":13359,"State":13360,"âĢ¦.":13361,"Ġpond":13362,"Ġcapt":13363,"fore":13364,"De":13365,"Ġedited":13366,"self":13367,"Hey":13368,"Ġportrait":13369,"ĠManufact":13370,"ĠStand":13371,"Ġcontenders":13372,"':":13373,"acker":13374,"Ġwithdrawn":13375,"ĠBraves":13376,"ĠHosp":13377,"changing":13378,"ĠBag":13379,"Ġadjustment":13380,"ĠCousins":13381,"ĠAAP":13382,"Ġfi":13383,"Ġoutdoors":13384,"Ġlacked":13385,"BM":13386,"ĠWHO":13387,"ĠPST":13388,"ĠLuck":13389,"Ġassisting":13390,"ĠGround":13391,"ĠTeen":13392,"ĠOle":13393,"Ġembarrassing":13394,"ĠWalt":13395,"ĠVision":13396,"ĠFal":13397,"ĠZoo":13398,"ĠWorth":13399,"ĠFloyd":13400,"ĠGujarat":13401,"Ġtipped":13402,"Ġfam":13403,"ĠDad":13404,"Ġworship":13405,"Ġtyre":13406,"Ġrebuilding":13407,"Ġqualities":13408,"ĠLives":13409,"Ġbeats":13410,"Ġ450":13411,"Ġexisted":13412,"ĠGeorg":13413,"Ġpoured":13414,"rows":13415,"ĠOx":13416,"ĠSid":13417,"Ġmac":13418,"Ġteaches":13419,"ĠEli":13420,"alla":13421,"Ġdownside":13422,"ĠBend":13423,"non":13424,"ĠArmenia":13425,"Ġcultures":13426,"ĠMae":13427,"Ġduration":13428,"ĠAthletics":13429,"Ġjuvenile":13430,"Ġlid":13431,"Ġbankers":13432,"Ġoverview":13433,"wy":13434,"Ġorbit":13435,"Vs":13436,"because":13437,"Ps":13438,"ĠFran":13439,"Ġtouring":13440,"Ġwary":13441,"Ġ106":13442,"Ġlaser":13443,"ĠVij":13444,"âĦ¢":13445,"Ġsurrender":13446,"press":13447,"rees":13448,"NO":13449,"ĠShortly":13450,"ĠKor":13451,"edu":13452,"Ġhatred":13453,"Ġtee":13454,"Ġfamously":13455,"Ġkeeper":13456,"ND":13457,"Ġreduces":13458,"HC":13459,"Ġhay":13460,"Ġunnamed":13461,"ĠTes":13462,"Ġattackers":13463,"ĠFew":13464,"ĠRichards":13465,"Ġ1968":13466,"Ġspeeches":13467,"Ġcybersecurity":13468,"ĠInfrastructure":13469,"Ġ07":13470,"ENCE":13471,"uties":13472,"Ġanxious":13473,"ĠGang":13474,"Ġannouncements":13475,"lette":13476,"oret":13477,"ĠRockies":13478,"ĠEmployees":13479,"ĠThrones":13480,"Ġhugely":13481,"Ġclin":13482,"ĠHob":13483,"Ġfraction":13484,"ĠOfficial":13485,"ĠMariners":13486,"ĠElse":13487,"Ġsanctuary":13488,"ĠPhotograph":13489,"Ġreopen":13490,"lf":13491,"hm":13492,"vest":13493,"Ġspeeding":13494,"Ġtooth":13495,"ĠShi":13496,"ĠTitle":13497,"ĠMes":13498,"ĠJobs":13499,"fair":13500,"ĠDanish":13501,"ĠMalik":13502,"Ġlaughed":13503,"Ġnavy":13504,"ĠActress":13505,"ĠWilliamson":13506,"overs":13507,"Ġreckless":13508,"Ġjo":13509,"otic":13510,"Ġassaulting":13511,"Ġpri":13512,"ĠPi":13513,"Ġlesser":13514,"Ġtit":13515,"Ġdat":13516,"Ġnail":13517,"ĠMarathon":13518,"ĠGren":13519,"ĠDol":13520,"Ġjointly":13521,"Ġamended":13522,"mine":13523,"ĠBashar":13524,"ĠHyundai":13525,"Ġuncovered":13526,"Ġeducated":13527,"atti":13528,"pres":13529,"ĠBRE":13530,"Ġya":13531,"Bank":13532,"odd":13533,"lit":13534,"ĠLinks":13535,"Ġswitching":13536,"itte":13537,"ĠSind":13538,"erved":13539,"Ġ**":13540,"Ġpositively":13541,"Ġfrankly":13542,"Ġrevenge":13543,"ĠTrinity":13544,"ĠCDC":13545,"Ġthreatens":13546,"Ġhammer":13547,"NET":13548,"ĠMut":13549,"Ġsy":13550,"Ġunidentified":13551,"icken":13552,"Ġdrills":13553,"Ġtense":13554,"Ġforeigners":13555,"OST":13556,"Ġethical":13557,"ĠDurham":13558,"ĠQual":13559,"Ġterritories":13560,"Ġid":13561,"hor":13562,"enders":13563,"Mc":13564,"OV":13565,"percent":13566,"Ġdom":13567,"Ġupward":13568,"Ġamb":13569,"Ġvisas":13570,"zan":13571,"Ãĥ":13572,"Ġundocumented":13573,"Ġsuburbs":13574,"Ġhydro":13575,"ĠJob":13576,"ĠAdelaide":13577,"oya":13578,"ĠSR":13579,"ĠMick":13580,"Ġconsolidation":13581,"Ġemotionally":13582,"ĠHop":13583,"Her":13584,"Ġloses":13585,"ĠMoto":13586,"eled":13587,"Ġregulated":13588,"ental":13589,"Ġencountered":13590,"Ġhop":13591,"ĠTrafford":13592,"Ġsticks":13593,"Ġveto":13594,"Ġexpose":13595,"Ġstretched":13596,"fin":13597,"inance":13598,"chair":13599,"ĠGareth":13600,"ĠPil":13601,"ĠHammond":13602,"Ġserial":13603,"omy":13604,"Ġcellphone":13605,"ĠClara":13606,"Ġreacted":13607,"ĠNic":13608,"ĠHomes":13609,"ĠBroadcasting":13610,"ĠFut":13611,"ĠSupply":13612,"assing":13613,"ĠNewman":13614,"Ġcharitable":13615,"ĠClayton":13616,"Ġsovereignty":13617,"Ġconvincing":13618,"ĠPrincipal":13619,"ĠHigher":13620,"ĠCut":13621,"ĠCarrie":13622,"ĠSpot":13623,"Sometimes":13624,"ĠJar":13625,"ĠConsider":13626,"ieu":13627,"Ġrefinery":13628,"Ġbloody":13629,"wheel":13630,"Ġcryptocurrencies":13631,"Fund":13632,"ĠSunderland":13633,"ĠEvents":13634,"âĢĭ":13635,"Ġaccidentally":13636,"deep":13637,"Ġfranc":13638,"bec":13639,"ĠHartford":13640,"Ġstellar":13641,"wright":13642,"kick":13643,"UG":13644,"ĠBeast":13645,"Ġrefusal":13646,"ĠRoberto":13647,"ĠDixon":13648,"ĠDiane":13649,"name":13650,"asts":13651,"ĠCharter":13652,"Ġfueled":13653,"Ġcontents":13654,"Ġaccessing":13655,"Ġtroubles":13656,"Ġtops":13657,"Ġdebuted":13658,"icating":13659,"Ġinvestigator":13660,"Ġsubscribing":13661,"Ġcoordinated":13662,"ĠFil":13663,"six":13664,"teen":13665,"Ġwithdrew":13666,"ĠGilbert":13667,"Ġ1983":13668,"arsity":13669,"Ġimagination":13670,"Ġhandgun":13671,"ĠAlibaba":13672,"Ġbug":13673,"Ġ107":13674,"ĠCOMP":13675,"ĠSomething":13676,"Ġreliability":13677,"ĠFCC":13678,"ĠFowler":13679,"Ġsingled":13680,"nom":13681,"Ġknocking":13682,"Ġmeddling":13683,"Ġdetermining":13684,"reports":13685,"Ġshade":13686,"ĠSN":13687,"anto":13688,"Ġcomplaining":13689,"ĠNan":13690,"WS":13691,"Ġyoungsters":13692,"Il":13693,"ĠKaw":13694,"ĠProp":13695,"ĠCell":13696,"ĠHurricanes":13697,"Ġpublicity":13698,"ĠXin":13699,"rial":13700,"ICO":13701,"Ġsupervision":13702,"ĠSpotify":13703,"ĠNewport":13704,"Ġprince":13705,"anche":13706,"Ġsubscriber":13707,"ĠVic":13708,"ACT":13709,"ĠRaf":13710,"ĠActing":13711,"Ġcollusion":13712,"pet":13713,"isl":13714,"Ġcommerce":13715,"Health":13716,"ĠAbraham":13717,"pri":13718,"Ġlightweight":13719,"Ġinsurer":13720,"Like":13721,"Ġhelmet":13722,"Ġevac":13723,"look":13724,"ĠNaval":13725,"160":13726,"ĠFleet":13727,"vol":13728,"Ġexpired":13729,"ĠKlein":13730,"ĠEmmy":13731,"ABLE":13732,"ĠMorocco":13733,"ĠTrip":13734,"uted":13735,"Ġnos":13736,"ĠVista":13737,"mas":13738,"ĠRocky":13739,"ĠFlint":13740,"enberg":13741,"ĠBrow":13742,"Ġsignatures":13743,"Ġpolar":13744,"ajo":13745,"Ġendorsement":13746,"Ġreservations":13747,"LIN":13748,"anny":13749,"elli":13750,"last":13751,"Ġoversee":13752,"cm":13753,"ĠOilers":13754,"Are":13755,"Ġjudiciary":13756,"onte":13757,"ĠTrack":13758,"Ġsupervisor":13759,"erk":13760,"isher":13761,"Ġintact":13762,"Ġslid":13763,"icals":13764,"paid":13765,"ĠMAR":13766,"lement":13767,"ĠLiu":13768,"ĠLarge":13769,"ĠWings":13770,"pect":13771,"ĠRum":13772,"Ġanalyzed":13773,"Ġemploys":13774,"arte":13775,"ims":13776,"ĠEventually":13777,"Ġaffiliated":13778,"Ġhospitality":13779,"ĠSprint":13780,"Ġresolutions":13781,"Ġliquor":13782,"ĠNAFTA":13783,"ANY":13784,"Ġradiation":13785,"ĠProv":13786,"Ġpause":13787,"ĠTMZ":13788,"Ġelbow":13789,"Ġresilience":13790,"ĠParents":13791,"mus":13792,"ĠSafe":13793,"Ġinterpretation":13794,"Ġraced":13795,"IND":13796,"KR":13797,"Ġhinted":13798,"ĠErin":13799,"ĠBahrain":13800,"Ġcredentials":13801,"eless":13802,"Ġprocurement":13803,"ĠWebb":13804,"ĠLowe":13805,"ĠNak":13806,"ĠLearning":13807,"zh":13808,"Ġdipped":13809,"ĠSuite":13810,"Ġmisdemeanor":13811,"ALE":13812,"Ġstrengthened":13813,"ĠSophie":13814,"Ġconfirms":13815,"Ġrac":13816,"gey":13817,"Ġshootout":13818,"Ġble":13819,"Ġcircles":13820,"ĠChef":13821,"Ġcomprised":13822,"ĠSantiago":13823,"Ġfeud":13824,"beat":13825,"Ġstaffers":13826,"Ġacute":13827,"ski":13828,"Ġpolled":13829,"ĠKur":13830,"ĠJen":13831,"ĠUltimately":13832,"anded":13833,"ĠHoney":13834,"Ġannounces":13835,"Ġamateur":13836,"around":13837,"Ġfunctioning":13838,"group":13839,"ĠSqu":13840,"Where":13841,"Ġvoid":13842,"ĠSandra":13843,"isers":13844,"Ġhelicopters":13845,"ĠGym":13846,"ĠWol":13847,"mouth":13848,"Ġsubjected":13849,"ici":13850,"ually":13851,"ĠWash":13852,"ĠLindsay":13853,"ĠVers":13854,"Ġjumps":13855,"Ġneglect":13856,"ĠKuwait":13857,"fund":13858,"ĭ":13859,"ather":13860,"lly":13861,"ei":13862,"Although":13863,".''":13864,"Ġunhappy":13865,"Ġpills":13866,"Ġmagical":13867,"Ġdro":13868,"Ġinviting":13869,"ĠJohnston":13870,"oving":13871,"450":13872,"ĠMerc":13873,"Ġadmitting":13874,"Ġinsisting":13875,"ĠCru":13876,"ĠResource":13877,"oir":13878,"Ġcomplexity":13879,"ĠRoth":13880,"ĠCher":13881,"July":13882,"raf":13883,"Ġaggregate":13884,"Ġhelm":13885,"uclear":13886,"olan":13887,"Ġoffenses":13888,"ĠWolves":13889,"ĠFu":13890,"ĠPierce":13891,"Ġemailed":13892,"ĠStra":13893,"Ġpedestrians":13894,"ĠER":13895,"ĠConway":13896,"Ġblowing":13897,"CLOSE":13898,"hab":13899,"ĠGreene":13900,"Ġconfessed":13901,"ĠTorres":13902,"ĠHolocaust":13903,"Ġrepay":13904,"Ġdemonstrates":13905,"ĠPool":13906,"gent":13907,"Ġdeleted":13908,"Ġ$$":13909,"ĠSO":13910,"Ġdri":13911,"ĠNeg":13912,"ĠVP":13913,"ĠPF":13914,"ĠPrep":13915,"Ġorganizing":13916,"icker":13917,"Ġmanufactured":13918,"enson":13919,"adas":13920,"Ġwines":13921,"Ġmachinery":13922,"Ġspecialists":13923,"ĠDetective":13924,"ĠDL":13925,"Op":13926,"Ġquicker":13927,"ĠPenguins":13928,"Engine":13929,"zone":13930,"Ġsequence":13931,"ĠLost":13932,"Ġwarmer":13933,"ĠEthiopia":13934,"Ġaffirmed":13935,"fest":13936,"resses":13937,"Ġsoap":13938,"Ġbooth":13939,"Ġnotorious":13940,"amin":13941,"Ġpursued":13942,"ĠCer":13943,"ĠSB":13944,"Ġlivestock":13945,"Ġtrace":13946,"Ġrespects":13947,"arden":13948,"April":13949,"Ġ128":13950,"ĠSaid":13951,"ennial":13952,"Ġnamely":13953,"ĠBot":13954,"Ġ108":13955,"ĠLem":13956,"nell":13957,"Ġconfirming":13958,"Ġlogged":13959,"Ġprofound":13960,"elo":13961,"ĠChambers":13962,"RT":13963,"Ġnewer":13964,"Ġsideline":13965,"ĠCardinal":13966,"este":13967,"Ġnarrowly":13968,"Ġcompromised":13969,"Ġpolicing":13970,"Ġporn":13971,"Ġarc":13972,"Ġlearnt":13973,"INE":13974,"step":13975,"ĠDomin":13976,"Ġwaist":13977,"Ġboycott":13978,"mitted":13979,"iffs":13980,"ground":13981,"ĠMaterials":13982,"Ġceasefire":13983,"Right":13984,"ĠZen":13985,"estyle":13986,"Thank":13987,"ĠOnePlus":13988,"ĠMLS":13989,"Ġconstituents":13990,"oster":13991,"ĠProsecutor":13992,"Ġpriorit":13993,"ĠDebbie":13994,"ĠExpand":13995,"uv":13996,"Ġintegrate":13997,"Ġimmun":13998,"Ġdisciplinary":13999,"ĠImm":14000,"Ġja":14001,"Ġgardens":14002,"ĠHim":14003,"obe":14004,"Ġhitter":14005,"Ġbullets":14006,"Ġevolving":14007,"ĠScientists":14008,"Michael":14009,"ĠDO":14010,"Ġunbelievable":14011,"Ġlooming":14012,"Ġdownturn":14013,"Ġmentality":14014,"Ġreopened":14015,"Ġash":14016,"ĠChapman":14017,"Ġloop":14018,"ĠUT":14019,"ĠTier":14020,"Ġunaware":14021,"Ġgratitude":14022,"Ġperforms":14023,"olk":14024,"Ġ\"(":14025,"Ġlacks":14026,"Ġinstructed":14027,"ĠRecreation":14028,"sample":14029,"Ġrequesting":14030,"Canada":14031,"Ġsupposedly":14032,"ĠHardy":14033,"Ġholder":14034,"change":14035,"ĠDominic":14036,"ĠXavier":14037,"Ġlig":14038,"Ġcandid":14039,"ĠRab":14040,"Ġconferences":14041,"ĠBurton":14042,"Dr":14043,"Ġmunicipalities":14044,"Ġcrushed":14045,"Ġseekers":14046,"ĠCitizens":14047,"Ġheightened":14048,"ĠCasino":14049,"Ġdesktop":14050,"Ġwhoever":14051,"ĠImpact":14052,"Ġcocktail":14053,"Ġphilanthrop":14054,"ĠSAN":14055,"ĠPreston":14056,"Ġobesity":14057,"Ġrestrict":14058,"ĠKab":14059,"ĠProvidence":14060,"Ġscar":14061,"ĠChart":14062,"Ġbosses":14063,"ĠRate":14064,"Ġsav":14065,"pay":14066,"Ġtransplant":14067,"ĠNoble":14068,"child":14069,"Ġconclusions":14070,"FI":14071,"Ġsack":14072,"Ġexperimental":14073,"holder":14074,"oca":14075,"herty":14076,"ĠMT":14077,"Ġcatcher":14078,"LY":14079,"Ġgrams":14080,"reet":14081,"Ġadaptation":14082,"Ġhumble":14083,"Ġbot":14084,"Ġidentical":14085,"ication":14086,"ifer":14087,"ĠCrow":14088,"Ġregain":14089,"ĠLightning":14090,"Ġkg":14091,"Ġcomposed":14092,"Ġcorrespondent":14093,"Ġreunion":14094,"Ġobserve":14095,"Ġcomprising":14096,"Ġimpeachment":14097,"Ġresh":14098,"Ġlemon":14099,"ĠSnap":14100,"Ġproprietary":14101,"een":14102,"ourt":14103,"Ġdetective":14104,"Ġlabels":14105,"Ġcorridor":14106,"ĠClinic":14107,"Ġarra":14108,"ĠPearl":14109,"Ġinformal":14110,"ĠUnd":14111,"ĠVenezuelan":14112,"Ġpeninsula":14113,"Ġdefeating":14114,"Ġsyndrome":14115,"iere":14116,"Ġspite":14117,"bag":14118,"aran":14119,"Ġspecialized":14120,"ĠAA":14121,"ĠLyn":14122,"Ġinstrumental":14123,"Smith":14124,"Ġpivotal":14125,"Ġnightclub":14126,"ĠCob":14127,"Ġcolorful":14128,"Ġartwork":14129,"Ġ1981":14130,"Ġdawn":14131,"erville":14132,"uated":14133,"ief":14134,"Ġlinking":14135,"ĠOw":14136,"Ġappreci":14137,"Ġreductions":14138,"elling":14139,"Ġsalmon":14140,"bb":14141,"ĠPhillip":14142,"yle":14143,"Ġassure":14144,"Ġdiscretion":14145,"Ġefficiently":14146,"ĠMau":14147,"abil":14148,"Ġintentionally":14149,"Ġactivated":14150,"Ġimmense":14151,"ĠStrategic":14152,"Ġcheating":14153,"ĠTrend":14154,"ĠSamantha":14155,"Ġcomple":14156,"Ġhack":14157,"ĠSerie":14158,"ĠText":14159,"Ġstylish":14160,"ĠFaith":14161,"ĠGST":14162,"Ġexterior":14163,"Ġblessing":14164,"Ġblanket":14165,"Ġcooked":14166,"Ġretaliation":14167,"Ġtro":14168,"Ġshelves":14169,"rose":14170,"ĠGram":14171,"Ġsho":14172,"ĠArgentine":14173,"Ġclerk":14174,"specific":14175,"Ġagreeing":14176,"Ġstandout":14177,"black":14178,"Ġtrending":14179,"Ġviolate":14180,"Get":14181,"ño":14182,"ĠOpt":14183,"ĠFrankfurt":14184,"ĠFranco":14185,"eness":14186,"Ġlining":14187,"Ġzoo":14188,"oil":14189,"lia":14190,"rab":14191,"Ġorganize":14192,"Ġwoods":14193,"Ġscan":14194,"Ġurgency":14195,"Ġoccurring":14196,"Ġreliance":14197,"Ġconcepts":14198,"Ġeligibility":14199,"0000":14200,"ĠBrief":14201,"Ġabusive":14202,"ĠBench":14203,"Ġrub":14204,"ĠDil":14205,"Ġmount":14206,"Ġmaturity":14207,"ĠNut":14208,"nee":14209,"enc":14210,"Ġgunfire":14211,"ĠKill":14212,"Ġgates":14213,"Ġflower":14214,"iol":14215,"Ġshaped":14216,"Ġundoubtedly":14217,"Ġbackgrounds":14218,"ĠComplex":14219,"\":{\"":14220,"Ġnaming":14221,"Ġmonument":14222,"Ġoh":14223,"Ġembedded":14224,"Ġbang":14225,"ĠKro":14226,"Ġaggression":14227,"ĠMits":14228,"During":14229,"ĠEp":14230,"iners":14231,"ĠAnaheim":14232,"Ġrom":14233,"Ġoutgoing":14234,"Ġfulfill":14235,"Ġreminds":14236,"Ġren":14237,"à¤":14238,"ĠSue":14239,"Ġrefresh":14240,"Ġlif":14241,"Ġfil":14242,"ĠLead":14243,"Ġregulate":14244,"ĠTeachers":14245,"Ġclarify":14246,"obs":14247,"Ġblasted":14248,"ĠAx":14249,"Ġflavors":14250,"Ġmega":14251,"Ġhurdles":14252,"Ġinspector":14253,"ĠSalvador":14254,"Ġprescribed":14255,"Ġrenovation":14256,"OUR":14257,"Ġutil":14258,"ĠBradford":14259,"Ġwasted":14260,"Ġlineman":14261,"Ġpalm":14262,"icate":14263,"Ġoverseeing":14264,"otted":14265,"ĠRapids":14266,"Ġjustified":14267,"aby":14268,"Ġextends":14269,"Ġoath":14270,"bow":14271,"ĠRivera":14272,"Jan":14273,"ĠImran":14274,"Ġforests":14275,"ĠShel":14276,"ĠBrun":14277,"Ġaerial":14278,"ĠNOW":14279,"PAR":14280,"Ġbeverages":14281,"ettel":14282,"Ġfragile":14283,"Ġcodes":14284,"Į":14285,"abel":14286,"Watch":14287,"road":14288,"Ġdismissal":14289,"ĠRosa":14290,"Ġcrunch":14291,"²":14292,"Ġinnovations":14293,"Ġhabitat":14294,"Ġforefront":14295,"ĠKoch":14296,"ĠChevrolet":14297,"Ġwheelchair":14298,"Ġconsiderably":14299,"Ġexpenditures":14300,"Ġtexts":14301,"Ġprompt":14302,"Ġskating":14303,"Ġpetroleum":14304,"ĠICC":14305,"Ġvit":14306,"fit":14307,"Ġprolonged":14308,"ĠLucy":14309,"Ġcho":14310,"Ġrocked":14311,"ĠBrom":14312,"Ġfreed":14313,"Ġyours":14314,"ĠEden":14315,"Ġmonitored":14316,"asted":14317,"Ġoversees":14318,"ieri":14319,"Ġideology":14320,"ĠFine":14321,"tering":14322,"Top":14323,"Ġdamp":14324,"uta":14325,"Ġlethal":14326,"Ġpurple":14327,"udge":14328,"ĠChemical":14329,"ĠPetersburg":14330,"Ġwarns":14331,"Ġcollectively":14332,"Ġâ":14333,"Ġplaintiffs":14334,"ĠBoris":14335,"Ġsheep":14336,"oves":14337,"ĠAuthor":14338,"Ġcampuses":14339,"Ġdestroying":14340,"Ġgloves":14341,"Ġcease":14342,"Ġdelegates":14343,"Ġpreceded":14344,"realDonaldTrump":14345,"Ġforwards":14346,"erton":14347,"ĠBuzzFeed":14348,"Ġoccupation":14349,"ĠLegion":14350,"Ġstir":14351,"Ġshale":14352,"Ġterrific":14353,"Ġnewborn":14354,"Ġstandoff":14355,"OWN":14356,"Ġmuscles":14357,"ĠHerman":14358,"ĠLiz":14359,"ĠExperience":14360,"ĠSuccess":14361,"ĠHispanic":14362,"ĠCCTV":14363,"Ġcomplement":14364,"ĠBing":14365,"Ġprem":14366,"ĠJohannes":14367,"Ġdent":14368,"itar":14369,"ĠHein":14370,"ĠNicola":14371,"Ġconcludes":14372,"ĠKhal":14373,"Ġparish":14374,"Ġshaking":14375,"ĠSchw":14376,"mod":14377,"ĠLil":14378,"ña":14379,"ĠBog":14380,"ĠFight":14381,"Ġgre":14382,"Ġfel":14383,"Ġheal":14384,"err":14385,"TM":14386,"airo":14387,"health":14388,"Ġswings":14389,"Ġtier":14390,"anka":14391,"ribune":14392,"emouth":14393,"ĠBloom":14394,"Ġowing":14395,"Tech":14396,"Ġdough":14397,"Ġbatch":14398,"ĠLion":14399,"ĠZamb":14400,"Ġcrashing":14401,"ĠXL":14402,"ppers":14403,"ĠDoctors":14404,"ĠSor":14405,"video":14406,"Ġcigarettes":14407,"ĠBoxing":14408,"Ġconstitute":14409,"Ġconcentrate":14410,"ĠArmenian":14411,"Ġsemester":14412,"position":14413,"emic":14414,"ĠNYC":14415,"ĠCampus":14416,"Ġalternate":14417,"Ġexped":14418,"Ġpublishers":14419,"2015":14420,"Ġunanimous":14421,"ĠPrevious":14422,"Ġwellness":14423,"ĠCreative":14424,"edy":14425,"AGE":14426,"ĠCavs":14427,"Ġ1978":14428,"Ġfu":14429,"ĠTata":14430,"ĠChoice":14431,"Ġwoes":14432,"ĠCable":14433,"Ġ~":14434,"ĠGem":14435,"Ġconsolidated":14436,"ĠManitoba":14437,"Cloud":14438,"Ġrounded":14439,"ĠVentura":14440,"Ġshark":14441,"Ġdresses":14442,"Ġtraction":14443,"eda":14444,"Ġdiv":14445,"Ġdental":14446,"Wh":14447,"ĠGig":14448,"ĠBoyd":14449,"ĠTransit":14450,"Ġtelevised":14451,"SON":14452,"ĠVince":14453,"Ġcloses":14454,"apt":14455,"ĠWheeler":14456,"ĠTyson":14457,"Ġforensic":14458,"Ġpunished":14459,"Ġseas":14460,"Ġnavigation":14461,"Ġprecedent":14462,"Ġextremist":14463,"Ġcomposite":14464,"PO":14465,"Ġsurvivor":14466,"ĠVale":14467,"gars":14468,"HT":14469,"ĠRiyadh":14470,"Ġrevival":14471,"ĠPayne":14472,"Ġcollaborative":14473,"ĠCustomers":14474,"ĠPf":14475,"Ġproves":14476,"erve":14477,"Ġelev":14478,"ĠPaper":14479,"Ġchore":14480,"Ġthriller":14481,"Ġstraw":14482,"cock":14483,"Gu":14484,"Ġaligned":14485,"ĠChronicle":14486,"Ġshouting":14487,"Ġ1976":14488,"Ġlightning":14489,"Ġworlds":14490,"ĠOpening":14491,"enton":14492,"ĠAna":14493,"ĠGol":14494,"ĠTechn":14495,"lis":14496,"Ġorientation":14497,"ĠArri":14498,"ĠPG":14499,"ross":14500,"Ġsank":14501,"LOS":14502,"ĠAllison":14503,"Ġsmiles":14504,"USD":14505,"Ġkits":14506,"Bar":14507,"ĠBri":14508,"Ġounces":14509,"ĠNielsen":14510,"eno":14511,"Ġ109":14512,"Ġnorms":14513,"Ġskip":14514,"180":14515,"Ġmonitors":14516,"2012":14517,"Ġincorporate":14518,"Ġmechanisms":14519,"ĠHack":14520,"ĠBomb":14521,"ĠGavin":14522,"ĠNatalie":14523,"Ġdiscusses":14524,"Ġassembled":14525,"Ġcognitive":14526,"owner":14527,"Ġgenuinely":14528,"Ġdisappear":14529,"ĠAK":14530,"Ġstal":14531,"Ġsoup":14532,"ĠFinn":14533,"Ġcares":14534,"Ġfinest":14535,"Ġtuned":14536,"ende":14537,"ĠStefan":14538,"Ġaccompanying":14539,"î":14540,"Maybe":14541,"Ġoffender":14542,"TT":14543,"Ġ212":14544,"Ġvolleyball":14545,"needed":14546,"Ġquo":14547,"Ġdim":14548,"ĠHistorical":14549,"ĠLance":14550,"gmail":14551,"ĠGate":14552,"Ġdemonstrators":14553,"Ġdy":14554,"cia":14555,"ĠSteele":14556,"ĠJoan":14557,"ĠKerala":14558,"KA":14559,"ĠElectoral":14560,"Ġpaths":14561,"ø":14562,"Ne":14563,"Ġaccepts":14564,"Ġlowering":14565,"Ġportions":14566,"ĠValencia":14567,"Ġfestivals":14568,"Ġgeneric":14569,"usk":14570,"ĠVernon":14571,"ĠOrioles":14572,"Ġrenewal":14573,"Ġbelonged":14574,"Ġbreathe":14575,"Ġ220":14576,"Ġrecruited":14577,"Ġlogic":14578,"Ġrecreation":14579,"Ġverbal":14580,"ĠHaz":14581,"double":14582,"Ġfavourites":14583,"Ġfundamentals":14584,"ĠSoc":14585,"360":14586,"SO":14587,"Ġalerted":14588,"Ġbriefed":14589,"ĠBruno":14590,"Ġseating":14591,"Ġfreight":14592,"ĠAmer":14593,"Ġwished":14594,"table":14595,"growth":14596,"ĠWent":14597,"Ġhilarious":14598,"Ġthroat":14599,"bet":14600,"gon":14601,"Ġample":14602,"hee":14603,"ĠHood":14604,"ĠIceland":14605,"ĠAnkara":14606,"iang":14607,"Ġpracticing":14608,"azer":14609,"Ġleaf":14610,"Ġhottest":14611,"Ġmarginal":14612,"Ġrevelations":14613,"ĠPrices":14614,"ĠLar":14615,"times":14616,"Ġhandles":14617,"ĠNaz":14618,"Ġinstitute":14619,"Ġtranslate":14620,"ĠJP":14621,"Ġsoared":14622,"Ġconsume":14623,"ĠTap":14624,"ĠCelebrity":14625,"ĠMayweather":14626,"ĠOracle":14627,"Ġmor":14628,"ANA":14629,"Ġpaperwork":14630,"aste":14631,"Ġdil":14632,"Ġdecorated":14633,"Ġpromotional":14634,"ĠMerrill":14635,"Ġappliances":14636,"ĠCOP":14637,"Ġlips":14638,"ĠBrennan":14639,"ĠMile":14640,"ĠNetworks":14641,"ĠComment":14642,"ĠIb":14643,"ĠAgg":14644,"IDE":14645,"Ġinitiate":14646,"Ġknockout":14647,"Ġbargain":14648,"Ġaccordingly":14649,"bee":14650,"ĠGerald":14651,"Ġproblematic":14652,"Ġtrap":14653,"Ġfinalists":14654,"addy":14655,"would":14656,"Ġstrictly":14657,"ĠRamsey":14658,"Ġdownward":14659,"Ġextract":14660,"Ġfamed":14661,"ĠOUT":14662,"Ġinduct":14663,"ĠAuckland":14664,"Ġpoetry":14665,"mos":14666,"ĠGuinea":14667,"management":14668,"ohan":14669,"ĠGuide":14670,"aily":14671,"umping":14672,"Ġenacted":14673,"ĠEye":14674,"vision":14675,"umi":14676,"aped":14677,"Ġbicycle":14678,"ĠHouth":14679,"ĠNAS":14680,"Ġtapped":14681,"wer":14682,"otti":14683,"EA":14684,"Ġsurprises":14685,"ĠUpdate":14686,"ĠPun":14687,"ĠMiz":14688,"ĠOro":14689,"Ġcostumes":14690,"title":14691,"Ġsurviving":14692,"According":14693,"themed":14694,"ĠPeoples":14695,"Se":14696,"Ġassociations":14697,"hett":14698,"Time":14699,"Ġessay":14700,"Ġmu":14701,"ĠScore":14702,"ĠSpani":14703,"ĠSEE":14704,"Ġmales":14705,"Ġrage":14706,"EU":14707,"ĠYellow":14708,"rupt":14709,"Ġapparel":14710,"Ġsweat":14711,"Ġnearest":14712,"zman":14713,"Ġanticipation":14714,"Ġinjuring":14715,"Ġousted":14716,"chan":14717,"ĠAlert":14718,"Ġber":14719,"atal":14720,"Com":14721,"Ġ04":14722,"Ġafterward":14723,"edge":14724,"ĠBooker":14725,"lex":14726,"ĠWhole":14727,"Ġtoughest":14728,"ĠMaharashtra":14729,"lier":14730,"ĠTennis":14731,"Ġhandy":14732,"ĠMetal":14733,"ĠiTunes":14734,"ĠDiscovery":14735,"Ġcompassion":14736,"ĠLIVE":14737,"Ġeconomically":14738,"Ġendangered":14739,"GO":14740,"Ġmound":14741,"word":14742,"ĠTouch":14743,"ogo":14744,"Ġincomes":14745,"when":14746,"ĠAside":14747,"Ġscandals":14748,"Ġfunctionality":14749,"ĠAer":14750,"Ġcouncils":14751,"Ġdenial":14752,"140":14753,"Ġimplied":14754,"Ġoutfits":14755,"Ġsuited":14756,"Ġ1973":14757,"ĠPizza":14758,"Ġdebates":14759,"record":14760,"Ġhype":14761,"ĠRus":14762,"ĠRobbie":14763,"Ġtouted":14764,"ĠSharp":14765,"Ġbeings":14766,"Ġslavery":14767,"encies":14768,"ĠRooney":14769,"Ġnan":14770,"Ġraids":14771,"Ġinstructor":14772,"Market":14773,"Ġshook":14774,"Ġdeliberate":14775,"ĠNorthwestern":14776,"ĠEss":14777,"Ġwhatsoever":14778,"ĠConfederate":14779,"YS":14780,"ĠCameroon":14781,"ĠFlip":14782,"Yeah":14783,"Ġwashing":14784,"mand":14785,"ĠLex":14786,"Ġissuance":14787,"Ġniche":14788,"Ġfold":14789,"ĠWendy":14790,"Ġhy":14791,"Ġbucket":14792,"ĠVW":14793,"ĠCairo":14794,"ĠSK":14795,"ĠKang":14796,"Ġintake":14797,"Ġhills":14798,"anz":14799,"©":14800,"ugu":14801,"ĠFortunately":14802,"ĠMarqu":14803,"Ġimprisonment":14804,"oking":14805,"Ġdistributors":14806,"zie":14807,"Ġstip":14808,"ĠWire":14809,"Ġcouncillors":14810,"Ġsue":14811,"ĠRegardless":14812,"ĠEnc":14813,"Ġbaking":14814,"ĠVenture":14815,"Ġintriguing":14816,"Ġupheld":14817,"ĠActive":14818,"Ġgenes":14819,"ĠDawson":14820,"ĠPreviously":14821,"ĠRac":14822,"Ġmetric":14823,"Files":14824,"ĠiPhones":14825,"ĠWelcome":14826,"Ġburns":14827,"ĠScreen":14828,"ashes":14829,"ĠApr":14830,"Ġtheories":14831,"san":14832,"ĠRenault":14833,"ĠSinger":14834,"Ġfounders":14835,"Russian":14836,"ĠBelfast":14837,"Ġimagined":14838,"ĠPlanet":14839,"ĠCatalan":14840,"ĠRochester":14841,"Ġevolve":14842,"ĠOT":14843,"Ġpassword":14844,"Ġhomelessness":14845,"Ġbacklog":14846,"Ġpresenter":14847,"Ġfal":14848,"ISH":14849,"ĠEM":14850,"icked":14851,"Ġunlock":14852,"city":14853,"Ġnegotiation":14854,"Ġdancers":14855,"dan":14856,"ĠCOL":14857,"VC":14858,"boat":14859,"Ġoverly":14860,"deal":14861,"lander":14862,"Ġdiss":14863,"ICS":14864,"Ġfifty":14865,"Ġowe":14866,"Ġprisons":14867,"ifications":14868,"wo":14869,"ĠAu":14870,"Ġapiece":14871,"ĠCourtney":14872,"Ġ1975":14873,"Ġsurpass":14874,"Ġidentities":14875,"Ġintegral":14876,"Ġdocumentation":14877,"Ġelegant":14878,"ĠIg":14879,"Ġdear":14880,"Ġ113":14881,"ĠGupta":14882,"Ġcontentious":14883,"rish":14884,"Ġclues":14885,"Ġadditions":14886,"Ġep":14887,"rus":14888,"Ġcentered":14889,"ĠPhillies":14890,"father":14891,"Ġborough":14892,"Ġbuttons":14893,"Ġdeported":14894,"ĠREC":14895,"ĠAlready":14896,"eh":14897,"hur":14898,"Ġupbeat":14899,"omen":14900,"Ġdetailing":14901,"Ġwr":14902,"Ġvaried":14903,"ĠEconomics":14904,"Ġensures":14905,"ĠCivic":14906,"Ġunpaid":14907,"sold":14908,"ĠHil":14909,"ĠMult":14910,"ĠRising":14911,"ĠMini":14912,"Ġneuro":14913,"Ġpenal":14914,"Ġneighbour":14915,"ĠChavez":14916,"Ġjew":14917,"ĠVIP":14918,"Connor":14919,"ĠTalking":14920,"Ġcorrection":14921,"Ġstandpoint":14922,"roads":14923,"ĠWool":14924,"Ġverification":14925,"Ġmic":14926,"olf":14927,"Ġexemption":14928,"Ġfilter":14929,"Ġballoon":14930,"leases":14931,"ician":14932,"ĠSpr":14933,"Ġtoe":14934,"Ġunconstitutional":14935,"Ġmanslaughter":14936,"Ġtossed":14937,"ĠMeg":14938,"ATIONS":14939,"ACK":14940,"ĠRouge":14941,"ĠHansen":14942,"ĠHook":14943,"Out":14944,"ĠHorse":14945,"ĠBath":14946,"ĠAlways":14947,"Ġincorporated":14948,"Ġconjunction":14949,"ĠFit":14950,"Ġexamining":14951,"Ġwallet":14952,"Ġensured":14953,"Ġacclaimed":14954,"ippers":14955,"Ġbeneficiaries":14956,"Ġunexpectedly":14957,"Ġexploit":14958,"ĠWillie":14959,"Ġcomb":14960,"ĠWalton":14961,"rica":14962,"icky":14963,"Ġate":14964,"ĠPadres":14965,"Ġrib":14966,"Ġsnacks":14967,"ĠFernandez":14968,"ĠMachine":14969,"ction":14970,"Ġillnesses":14971,"ĠHoffman":14972,"ĠSpaceX":14973,"Ġju":14974,"Ġswift":14975,"Ġembark":14976,"ĠRailway":14977,"Ġmeasuring":14978,"agers":14979,"arsh":14980,"Ġessence":14981,"angle":14982,"Ġolive":14983,"ĠCommander":14984,"iggs":14985,"Ġrewarded":14986,"Ġdispatched":14987,"Ġplayground":14988,"½":14989,"ĠProgramme":14990,"Ġstudios":14991,"Ġskeptical":14992,"ĠOlymp":14993,"ĠKeys":14994,"ĠSunshine":14995,"amba":14996,"ĠDonna":14997,"Ġlightly":14998,"Ġobtaining":14999,"Ġpoisoning":15000,"Ġaz":15001,"Ġ1972":15002,"Ġunconscious":15003,"ECT":15004,"Ġlied":15005,"ĠKaz":15006,"Ġ06":15007,"ĠMoving":15008,"Ġnum":15009,"oral":15010,"Ġassessments":15011,"Ġscholarships":15012,"Ġevacuate":15013,"ĠSunni":15014,"Ġquake":15015,"Ġfort":15016,"ques":15017,"ĠAlonso":15018,"Ġthread":15019,"Ġsqueeze":15020,"arat":15021,"oly":15022,"ĠAlphabet":15023,"uting":15024,"icio":15025,"ĠRetirement":15026,"ither":15027,"Ġasleep":15028,"Ġpairs":15029,"Ġmanufacture":15030,"ĠHazard":15031,"Ġsidewalk":15032,"Ġwears":15033,"ĠCraft":15034,"emen":15035,"ieth":15036,"Ġbypass":15037,"ĠLancaster":15038,"Ġflour":15039,"charge":15040,"ĠCLICK":15041,"Ġpotatoes":15042,"ĠKarachi":15043,"Ġvalley":15044,"Ġsights":15045,"Ġfallout":15046,"ords":15047,"BN":15048,"Ġsunshine":15049,"Ġundertaken":15050,"Ġcontestants":15051,"Ġaccomplishments":15052,"Ġconditioning":15053,"Ġcel":15054,"ĠHalifax":15055,"Ġaccent":15056,"***":15057,"Ġpitchers":15058,"Ġadopting":15059,"Ġjustices":15060,"Ġrip":15061,"ince":15062,"Ġelimination":15063,"Ġaerospace":15064,"ĠBeer":15065,"ĠBasin":15066,"Ġunwanted":15067,"goers":15068,"isco":15069,"ĠTwin":15070,"ĠDesert":15071,"rix":15072,"Ġdarkness":15073,"ĠDunn":15074,"City":15075,"pop":15076,"Ġ1969":15077,"ataka":15078,"Ġtal":15079,"Ġautism":15080,"ĠMcLaren":15081,"ĠUEFA":15082,"Ġclassrooms":15083,"ĠLeave":15084,"Americans":15085,"las":15086,"Ġqui":15087,"Ġundefeated":15088,"otto":15089,"ĠNRA":15090,"ĠPorsche":15091,"Ġnuts":15092,"oys":15093,"ĠMethodist":15094,"Ġatt":15095,"Ġtweeting":15096,"children":15097,"eller":15098,"Ġinquiries":15099,"Ġmillennials":15100,"ĠWembley":15101,"INS":15102,"Ġautopsy":15103,"ĠElon":15104,"ĠHicks":15105,"ugg":15106,"Ġwreck":15107,"ĠComcast":15108,"Ġstones":15109,"public":15110,"ĠKem":15111,"bedroom":15112,"ļ":15113,"itated":15114,"Ġsemic":15115,"uman":15116,"Cal":15117,"ANN":15118,"ĠGaz":15119,"Ġundisclosed":15120,"ĠPlanned":15121,"ĠYale":15122,"ĠIST":15123,"lies":15124,"ĠStanding":15125,"Ġrelieved":15126,"EO":15127,"Ġgraduating":15128,"park":15129,"ĠâĢķ":15130,"Ġpensions":15131,"rave":15132,"ĠWonder":15133,"AZ":15134,"Ġcosting":15135,"Ġeditors":15136,"Ġtotaled":15137,"Ġspacecraft":15138,"meter":15139,"Ġ02":15140,"ĠNikki":15141,"sworth":15142,"ĠCrit":15143,"asha":15144,"Ġknees":15145,"Ġhats":15146,"uity":15147,"ĠPanther":15148,"Ġtan":15149,"ĠBuzz":15150,"ĠGlad":15151,"ĠPleasant":15152,"SM":15153,"Ġtricks":15154,"Ġplac":15155,"ĠDanielle":15156,"Ġours":15157,"Ġwashed":15158,"haven":15159,"Ġdrain":15160,"ĠUttar":15161,"Ġapple":15162,"Ġjunk":15163,"Ġturkey":15164,"ĠDug":15165,"Ġdiplomacy":15166,"Ġempire":15167,"Ġpinch":15168,"Ġferry":15169,"ĠDustin":15170,"Ġ03":15171,"Ġelder":15172,"Everything":15173,"ĠProgressive":15174,"ution":15175,"VI":15176,"dam":15177,"Ġlever":15178,"ĠAustralians":15179,"Ġconsequence":15180,"itan":15181,"Ġcondemn":15182,"Ġneg":15183,"ĠOverview":15184,"Ġsuccesses":15185,"Ġprobable":15186,"ĠMirror":15187,"mor":15188,"verse":15189,"Ġevaluating":15190,"ĠBes":15191,"Ġimm":15192,"Ġharness":15193,"Ġresilient":15194,"ĠBuild":15195,"Ġstraightforward":15196,"ADE":15197,"Ġgrandparents":15198,"Ġmarched":15199,"ĠKiev":15200,"Ġchiefs":15201,"oha":15202,"Ġvest":15203,"kn":15204,"enda":15205,"ĠSev":15206,"Ġbatters":15207,"ĠJos":15208,"ĠQue":15209,"ĠCourse":15210,"ĠCorner":15211,"ĠMess":15212,"Ġmourn":15213,"keepers":15214,"ĠRegina":15215,"Everybody":15216,"Ġtrajectory":15217,"Ġdefenseman":15218,"ĠArticles":15219,"Ġspur":15220,"ĠPhD":15221,"Ġpipes":15222,"Ġduck":15223,"Ġcombining":15224,"ĠHit":15225,"ĠGeorgetown":15226,"ĠBee":15227,"Cor":15228,"Ġcomposition":15229,"Ġconnects":15230,"ĠMARK":15231,"taker":15232,"Ġcertainty":15233,"Ġhefty":15234,"ĠHezbollah":15235,"ĠShip":15236,"Ġmalicious":15237,"AI":15238,"Ġbits":15239,"Ġstyl":15240,"Ġimpaired":15241,"ĠCBI":15242,"Despite":15243,"othe":15244,"ĠRyder":15245,"ĠAlf":15246,"ifa":15247,"Ind":15248,"Ġblaming":15249,"ĠToledo":15250,"EW":15251,"ĠEssex":15252,"iated":15253,"ĠAberdeen":15254,"ANCE":15255,"Ġpossess":15256,"Ġsuperhero":15257,"Ġoverhead":15258,"quet":15259,"ĠRicky":15260,"Ġdock":15261,"ĠTelecom":15262,"Ġshelf":15263,"³":15264,"Ġmaritime":15265,"Ġportrayed":15266,"ĠYesterday":15267,"Ġcollided":15268,"Ġcookies":15269,"ĠCul":15270,"Ġindexes":15271,"Ġnaval":15272,"oval":15273,"105":15274,"ĠWeber":15275,"chief":15276,"arma":15277,"ĠRey":15278,"Ġauditor":15279,"ĠMarion":15280,"ĠMartha":15281,"ĠSally":15282,"Ġsedan":15283,"ĠAlison":15284,"nce":15285,"Es":15286,"ĠParade":15287,"Ġpharmacy":15288,"ĠKre":15289,"loe":15290,"cks":15291,"Ġmitigate":15292,"Ġdesigning":15293,"Ġ2024":15294,"Ġportable":15295,"Ġimproves":15296,"ĠAMD":15297,"Ġexcluded":15298,"CON":15299,"ĠOscars":15300,"Ġfixtures":15301,"comb":15302,"ĠBerg":15303,"Ġbother":15304,"Ġboring":15305,"Ġobservation":15306,"ĠCad":15307,"Ġrecordings":15308,"ĠCultural":15309,"Ġweaken":15310,"Ġaccuse":15311,"ĠAbd":15312,"abor":15313,"115":15314,"uffle":15315,"Ġhighways":15316,"atham":15317,"empt":15318,"ĠDeer":15319,"ĠEDT":15320,"ĠWait":15321,"athan":15322,"Ġaccumulated":15323,"Ġguilt":15324,"Ġexempt":15325,"Ġdiluted":15326,"ĠJamal":15327,"Ġshit":15328,"cross":15329,"Ġeve":15330,"Ġshirts":15331,"Ġsatisfy":15332,"ĠPaulo":15333,"AH":15334,"sic":15335,"ĠChloe":15336,"ĠCities":15337,"ĠSwansea":15338,"Ġprecision":15339,"ĠTracy":15340,"ping":15341,"Ġcontinually":15342,"Ġdemographic":15343,"Ġcliff":15344,"Ġjaw":15345,"isted":15346,"ĠDevelop":15347,"ĠAJ":15348,"Ġaisle":15349,"ĠLionel":15350,"Ġpredominantly":15351,"Ġmel":15352,"Ġlifelong":15353,"hs":15354,"Ġshouted":15355,"lad":15356,"Ġdest":15357,"Ġpacks":15358,"ĠKath":15359,"ĠCruise":15360,"fired":15361,"oder":15362,"hua":15363,"Ġgoodbye":15364,"Ġinterfere":15365,"eca":15366,"Ġré":15367,"atum":15368,"itas":15369,"ĠLodge":15370,"ĠWald":15371,"Ġmidday":15372,"umble":15373,"asting":15374,"©":15375,"ĠLeg":15376,"ĠNepal":15377,"Ġchased":15378,"idge":15379,"Ġconv":15380,"Ġfraudulent":15381,"Ġopera":15382,"Ġshr":15383,"ĠUniverse":15384,"ĠJerome":15385,"Ġ1977":15386,"ĠDancing":15387,"ĠRS":15388,"±":15389,"eks":15390,"Ġchic":15391,"Ġpunish":15392,"Ġpropose":15393,"arin":15394,"ĠChop":15395,"ĠAhead":15396,"ĠGallagher":15397,"ĠBangkok":15398,"ĠShelby":15399,"ĠNS":15400,"Ġcheek":15401,"onia":15402,"Ġrelegation":15403,"ĠHind":15404,"ĠCory":15405,"Ġfingerprint":15406,"Ġstrive":15407,"Ġmm":15408,"igs":15409,"Ġholy":15410,"Ġfavored":15411,"ĠSomeone":15412,"ĠLatino":15413,"ĠPatt":15414,"Ġchallenger":15415,"ĠCotton":15416,"Sw":15417,"itten":15418,"ĠXI":15419,"ĠStat":15420,"ĠDIS":15421,"Ġautomakers":15422,"Ġevaluated":15423,"ĠArc":15424,"Ġpersuade":15425,"Af":15426,"Ġreunited":15427,"Ġabs":15428,"Ġbride":15429,"Ġpurely":15430,"uce":15431,"uded":15432,"Ġsettling":15433,"Ġlodged":15434,"Ġfixing":15435,"Ġsuccession":15436,"ĠAlfred":15437,"ĠAlvarez":15438,"mac":15439,"ĠFont":15440,"Ġcontra":15441,"affle":15442,"Ġcopied":15443,"Ġmasses":15444,"ĠElections":15445,"ĠThan":15446,"Ġsoaring":15447,"jay":15448,"Ġsuing":15449,"Ġconcentrated":15450,"Ġconvey":15451,"Ġ240":15452,"gs":15453,"ĠNeal":15454,"Ġnasty":15455,"ĠLB":15456,"odi":15457,"ĠSergei":15458,"Ġthumb":15459,"Ġservants":15460,"Ġrevelation":15461,"Ġdischarge":15462,"ĠBright":15463,"ĠBent":15464,"ĠChrysler":15465,"mill":15466,"ĠImagine":15467,"Ġreceptions":15468,"Ġpersonalities":15469,"Ġsilly":15470,"ĠLoc":15471,"ĠZero":15472,"HI":15473,"rice":15474,"Ġgar":15475,"far":15476,"enh":15477,"ĠBiden":15478,"ĠEntreprene":15479,"Ġassumption":15480,"Ġnicely":15481,"ĠEither":15482,"|":15483,"ĠNW":15484,"ĠKens":15485,"ĠNolan":15486,"Ġowning":15487,"atures":15488,"ĠPastor":15489,"ĠRegistration":15490,"Ġexperiments":15491,"Ġassurance":15492,"Ġhashtag":15493,"oint":15494,"ĠBin":15495,"Ġqualification":15496,"center":15497,"Ġausterity":15498,"ĠPers":15499,"Ġscoop":15500,"Ġpros":15501,"ĠFields":15502,"Ġfur":15503,"ĠJas":15504,"Ġplanting":15505,"security":15506,"ĠTrain":15507,"ĠKathy":15508,"demand":15509,"ĠLev":15510,"Ġtut":15511,"tier":15512,"QU":15513,"Ġexploitation":15514,"Ġignoring":15515,"ĠSex":15516,"Ġadapted":15517,"Ġdisastrous":15518,"Ġempower":15519,"Ġcreators":15520,"ĠLay":15521,"ĠDragon":15522,"ĠWyn":15523,"Ġ1974":15524,"acious":15525,"performance":15526,"ĠTiffany":15527,"isting":15528,"Ġindividually":15529,"ĠLeading":15530,"ĠSask":15531,"Ġcatastrophic":15532,"Ġpunched":15533,"ĠVienna":15534,"Ġsurgical":15535,"Gr":15536,"odo":15537,"Ġgem":15538,"ĠMinority":15539,"Ġmice":15540,"ĠHistoric":15541,"ĠKot":15542,"caster":15543,"Ġsuff":15544,"journal":15545,"Ġpresumably":15546,"ĠBit":15547,"inary":15548,"Ġbre":15549,"Ġenhancing":15550,"Ġgru":15551,"ĠRunning":15552,"hardt":15553,"Ġtroubling":15554,"Ġpumps":15555,"ĠProspect":15556,"etic":15557,"Ġmartial":15558,"Ġcouncillor":15559,"atra":15560,"ths":15561,"ĠSark":15562,"ĠChamp":15563,"scoring":15564,"ĠWel":15565,"rup":15566,"Ġterrifying":15567,"ĠCatch":15568,"Ġinspections":15569,"Ġpornography":15570,"bra":15571,"ĠKeeping":15572,"Ġbanker":15573,"angers":15574,"ĠCrimea":15575,"ĠDisclosure":15576,"iba":15577,"Ġturf":15578,"Ġschedules":15579,"ĠJorge":15580,"ĠAcross":15581,"Ġsolving":15582,"Ġsensation":15583,"ĠWW":15584,"cial":15585,"atz":15586,"Ġlion":15587,"Ġcertificates":15588,"itive":15589,"ĠWes":15590,"ĠPrison":15591,"ĠPlayStation":15592,"duty":15593,"Ġvariable":15594,"Ġstrangers":15595,"istrates":15596,"vs":15597,"Ġreigning":15598,"Ġsliding":15599,"ĠShin":15600,"Ġtelecommunications":15601,"Ġinstalling":15602,"Ġrecogn":15603,"Ġsubway":15604,"too":15605,"ĠMcKin":15606,"ĠStoke":15607,"Ġsensitivity":15608,"bas":15609,"Ġsan":15610,"Ġ(-":15611,"ĠSuarez":15612,"Ġaverages":15613,"ammu":15614,"ĠFen":15615,"Ġrefined":15616,"outh":15617,"Ġcob":15618,"ĠLaz":15619,"essa":15620,"Ġpositioning":15621,"Three":15622,"Ġoils":15623,"Ġassaults":15624,"Ġcompanion":15625,"ĠFlash":15626,"ĠMam":15627,"ĠTill":15628,"Ġblues":15629,"ĠJae":15630,"ĠPier":15631,"Ġbedrooms":15632,"ĠHawkins":15633,"ĠCornell":15634,"Ġanswering":15635,"Ġsec":15636,"Ġrecognizes":15637,"Red":15638,"ĠJamaica":15639,"Ġinsurgents":15640,"Ġbrace":15641,"Ġra":15642,"ĠTai":15643,"ocation":15644,"ignment":15645,"Ġreasonably":15646,"inating":15647,"Ġbonuses":15648,"Ġsandwich":15649,"Ġinadequate":15650,"Ġdelicate":15651,"Ġadorable":15652,"Ġpalace":15653,"Ġsmallest":15654,"Ġpractically":15655,"ĠCrosby":15656,"Ġlevy":15657,"Ġlend":15658,"boards":15659,"shaped":15660,"Ġvulnerability":15661,"ĠKelley":15662,"Ġsponsorship":15663,"ract":15664,"Ġslew":15665,"Ġfederation":15666,"ĠLal":15667,"acies":15668,"ĠFamilies":15669,"Ġproposing":15670,"Ġhyp":15671,"elected":15672,"inkle":15673,"ĠSays":15674,"ĠApollo":15675,"ĠWis":15676,"imer":15677,"Ġcombines":15678,"Ġtim":15679,"ĠQuestion":15680,"Ġborrowers":15681,"Ġswiftly":15682,"ĠMagn":15683,"Ġheadphones":15684,"Russia":15685,"Ġtongue":15686,"Ġbye":15687,"nn":15688,"Ġseller":15689,"ĠWord":15690,"Tom":15691,"ĠDevin":15692,"ĠSurrey":15693,"Ġquad":15694,"Ġcourthouse":15695,"gi":15696,"ĠGrill":15697,">":15698,"Ġrational":15699,"ĠFlames":15700,"ĠCham":15701,"Ġvacuum":15702,"ĠRays":15703,"Ġescalating":15704,"Ġouter":15705,"Ġstretches":15706,"ĠSpeed":15707,"Ġnegatively":15708,"Ġabsorb":15709,"ĠAustrian":15710,"Ġslice":15711,"ĠDiet":15712,"Ġbun":15713,"Ġtactical":15714,"ĠCBD":15715,"Ġedges":15716,"Ġnest":15717,"Ġstrained":15718,"ulates":15719,"ĠTina":15720,"Net":15721,"ķ":15722,"ĠGos":15723,"God":15724,"White":15725,"Ġproudly":15726,"usion":15727,"ĠArlington":15728,"ĠNear":15729,"ĠMaxwell":15730,"Ġbomber":15731,"Ġcared":15732,"Ġapprovals":15733,"Ġexams":15734,"ĠEconomy":15735,"Ġposters":15736,"ĠHampton":15737,"ĠPere":15738,"ĠContract":15739,"Ġhoused":15740,"Ġinstruction":15741,"ĠJess":15742,"Ġacre":15743,"Ġcongestion":15744,"ĠGener":15745,"Ġdioxide":15746,"Ġvar":15747,"ĠAlexandria":15748,"ĠSpider":15749,"Ġcoins":15750,"Ġ225":15751,"Ġterritorial":15752,"ĠSPD":15753,"Ġfloat":15754,"null":15755,"Ġcalculate":15756,"ĠDin":15757,"eto":15758,"Ġcows":15759,"Ġpunct":15760,"Ġexpire":15761,"Ġkidnapped":15762,"Ġcou":15763,"Ġattitudes":15764,"ĠLeh":15765,"ĠHero":15766,"ĠKabul":15767,"Ġcubic":15768,"Ġdigits":15769,"ĠRES":15770,"Ġpipelines":15771,"icide":15772,"ĠSingle":15773,"Ġhurts":15774,"ĠMaz":15775,"ĠPak":15776,"Ġslate":15777,"Ġmultimedia":15778,"ADA":15779,"Mexico":15780,"ĠRelease":15781,"chard":15782,"Ġgarlic":15783,"ĠFletcher":15784,"Ġaforementioned":15785,"Ġ05":15786,"ĠParkway":15787,"Ġfirefighter":15788,"Ġcounseling":15789,"utions":15790,"Cap":15791,"Ġconsultants":15792,"ĠMeh":15793,"ouring":15794,"ĠDI":15795,"mic":15796,"phones":15797,"Ġencounters":15798,"ĠHapp":15799,"Ġcartoon":15800,"flight":15801,"Ġundertake":15802,"ĠHans":15803,"Ġplunge":15804,"ĠParenthood":15805,"Ġkickoff":15806,"ĠCelsius":15807,"ĠRas":15808,"ĠDund":15809,"ounce":15810,"Ġpurse":15811,"Ġmortality":15812,"Ġbrains":15813,"Ġconglomerate":15814,"ĠObserver":15815,"ĠSector":15816,"ĠApparently":15817,"Ġblank":15818,"iston":15819,"Ġweighs":15820,"gro":15821,"ĠPaw":15822,"ĠCOM":15823,"ĠPurdue":15824,"Ġnetted":15825,"ĠLinux":15826,"Mike":15827,"Ġfaithful":15828,"Ġmagazines":15829,"Ġheadquartered":15830,"ĠIps":15831,"Ġindications":15832,"Look":15833,"ĠElite":15834,"Ġsupreme":15835,"Ġchunk":15836,"ĠSz":15837,"ĠVine":15838,"rise":15839,"ĠYas":15840,"general":15841,"ĠOpera":15842,"Ġpriests":15843,"Assad":15844,"Ġaunt":15845,"Ġwhopping":15846,"enzie":15847,"Ġvegan":15848,"Ġinflux":15849,"ĠConsult":15850,"Ġwaiver":15851,"Having":15852,"inning":15853,"Ġproximity":15854,"Ġclassical":15855,"ĠIslanders":15856,"Ġadvertisers":15857,"ĠCe":15858,"ĠSochi":15859,"Ġmemoir":15860,"ĠPlaying":15861,"yers":15862,"Ġstud":15863,"Ġobservations":15864,"Ġadmire":15865,"Ġhiking":15866,"Ġbatter":15867,"Ġconfusing":15868,"Ġprecaution":15869,"kil":15870,"clusive":15871,"opoulos":15872,"ĠWestbrook":15873,"ĠTanzania":15874,"ĠCedar":15875,"usted":15876,"Ġdestructive":15877,"ĠIndies":15878,"osi":15879,"ĠAmid":15880,"Ġintercepted":15881,"Ġpartnering":15882,"Ġsubstances":15883,"ĠSuns":15884,"Ġpromotes":15885,"bird":15886,"Gen":15887,"aper":15888,"ĠEy":15889,"Ġterrain":15890,"Ġ1930":15891,"zon":15892,"Ġbreed":15893,"broken":15894,"uchin":15895,"ĠPrim":15896,"ĠRoland":15897,"Ġfitted":15898,"Ġprotects":15899,"Ġ114":15900,"RP":15901,"Ġdisrupted":15902,"ĠBaylor":15903,"oren":15904,"ĠKeen":15905,"Ġmansion":15906,"Ġgrassroots":15907,"ĠVictory":15908,"Ġbarn":15909,"Ġdepreciation":15910,"oped":15911,"immer":15912,"Ġgarnered":15913,"ĠLip":15914,"ĠTob":15915,"Ġcreatures":15916,"ooter":15917,"Ġconsortium":15918,"obi":15919,"ĠMonster":15920,"arks":15921,"turn":15922,"Ġsketch":15923,"Ġpredicting":15924,"Ġminimize":15925,"ĠEthan":15926,"anson":15927,"ĠAdjusted":15928,"ĠHornets":15929,"ĠNZ":15930,"ĠKathleen":15931,"ĠKier":15932,"ĠMercury":15933,"Ġghost":15934,"Ġhaw":15935,"ĠDemand":15936,"ĠCollection":15937,"ĠFortune":15938,"Ġcruel":15939,"Ġfurious":15940,"ĠKun":15941,"ĠSalem":15942,"Ġunsuccessful":15943,"ĠLomb":15944,"ĠFury":15945,"ahi":15946,"Ġenthusiastic":15947,"Ġsurgeries":15948,"ACE":15949,"Ġroller":15950,"ĠStamford":15951,"Being":15952,"Dec":15953,"check":15954,"Ġaffection":15955,"Ġgifted":15956,"Ġenerg":15957,"Ġvarying":15958,"ĠCharl":15959,"Ġsolved":15960,"ĠNV":15961,"Ġlaptops":15962,"Ġkindness":15963,"mart":15964,"ĠPenny":15965,"Ġ116":15966,"ĠFeder":15967,"ĠCisco":15968,"Ġeducators":15969,"Ġminim":15970,"Ġgangs":15971,"Ġfestivities":15972,"ĠOriginal":15973,"yre":15974,"rying":15975,"Ġtighter":15976,"ĠMalta":15977,"Ġshield":15978,"interest":15979,"Ġbuoy":15980,"Ġsupplement":15981,"ĠSof":15982,"Ġok":15983,"Ġprosecuted":15984,"Ġinterventions":15985,"Ġseize":15986,"Ġcaravan":15987,"ĠCarlson":15988,"ĠEnterprises":15989,"ĠChristina":15990,"ĠWellington":15991,"Ġaltered":15992,"TP":15993,"Ġexpresses":15994,"Ġcomfortably":15995,"Ġstaffing":15996,"afa":15997,"itu":15998,"saving":15999,"Ġinflammation":16000,"hatt":16001,"ĠMiranda":16002,"icious":16003,"Ġgrabbing":16004,"ĠANY":16005,"Ġobjections":16006,"Ġdot":16007,"cle":16008,"Ġrelates":16009,"Ġtribe":16010,"Ġboarding":16011,"ĠEpisode":16012,"ĠEnjoy":16013,"arding":16014,"Ġathletics":16015,"Ġflies":16016,"Ġmortgages":16017,"ruct":16018,"Ġink":16019,"ĠKC":16020,"ĠSecondary":16021,"Ġfer":16022,"ĠQaeda":16023,"OA":16024,"Frank":16025,"track":16026,"ĠChandler":16027,"Ġenv":16028,"ĠLeaders":16029,"ĠKemp":16030,"Ġunsafe":16031,"sponsored":16032,"San":16033,"ĠUsers":16034,"PE":16035,"ĠAccount":16036,"otta":16037,"ĠMix":16038,"ĠCindy":16039,"En":16040,"Ġ175":16041,"Ġoverlooked":16042,"Ġpublications":16043,"Ġrewarding":16044,"Ġexplicit":16045,"Ġnotch":16046,"Ġspecifics":16047,"Ġdesignation":16048,"ĠAppeal":16049,"Ġcontingent":16050,"Ġcage":16051,"ĠKol":16052,"ĠJohns":16053,"ĠReach":16054,"ĠTin":16055,"ĠAfricans":16056,"Ġprec":16057,"ĠRural":16058,"ĠDw":16059,"Ġuphold":16060,"Ġsuffers":16061,"Ġweed":16062,"inst":16063,"Ġcancellation":16064,"ĠShaun":16065,"Ġleve":16066,"Ġdivisive":16067,"Ġhel":16068,"Ġfatigue":16069,"ĠSchwartz":16070,"ĠKirst":16071,"Ġarise":16072,"Ġgrandson":16073,"ĠLawson":16074,"Ġcollaborate":16075,"Ġparticipant":16076,"ĠBryce":16077,"Ġinfield":16078,"mid":16079,"Ġut":16080,"Ġnotices":16081,"Ġsneak":16082,"ĠPAR":16083,"Chris":16084,"Ġutilize":16085,"ĠByron":16086,"ĠZhang":16087,"PF":16088,"Ġoverwhelmingly":16089,"Ġvegetable":16090,"Ġabsurd":16091,"ĠChem":16092,"etime":16093,"Ġenvoy":16094,"Ġlover":16095,"length":16096,"Ġrevolutionary":16097,"ĠYam":16098,"Ġshutting":16099,"mt":16100,"super":16101,"ĠToby":16102,"ĠCoca":16103,"Ġproposition":16104,"Ġembracing":16105,"Ġversatile":16106,"ĠWalking":16107,"Ġillicit":16108,"Ġnude":16109,"Ġunpredictable":16110,"take":16111,"Ġgotta":16112,"ĠXiaomi":16113,"Ġinstit":16114,"ĠPep":16115,"ĠPearson":16116,"Ġrejection":16117,"stead":16118,"Ġmut":16119,"Ġoutspoken":16120,"ĠBaghdad":16121,"ĠFly":16122,"Ġwholly":16123,"ĠRM":16124,"ĠFa":16125,"Ġcleaner":16126,"frey":16127,"ĠHab":16128,"ĠLiber":16129,"Ġwhereabouts":16130,"Ġchefs":16131,"Ġalumni":16132,"Ġstopp":16133,"dd":16134,"forward":16135,"rast":16136,"ĠNash":16137,"ĠCort":16138,"Ġpotent":16139,"Ġmold":16140,"Ġdistinctive":16141,"chip":16142,"ĠBrunswick":16143,"Ġpopulist":16144,"Ġplagued":16145,"eka":16146,"ĠIOC":16147,"ugs":16148,"ĠDob":16149,"Ġmagn":16150,"asser":16151,"hew":16152,"Ġcapturing":16153,"oos":16154,"Ġcrystal":16155,"Ġalarming":16156,"Ġ135":16157,"iating":16158,"Ġnap":16159,"umar":16160,"ĠExpl":16161,"Ġupgrading":16162,"Ġdecl":16163,"Ġoverturn":16164,"ARK":16165,"linked":16166,"ĠContinued":16167,"Ġslumped":16168,"ĠGaga":16169,"iful":16170,"ĠPosted":16171,"ĠRecommended":16172,"Ġsnake":16173,"Ġexplosives":16174,"Ġhind":16175,"Ġcontempt":16176,"Ġmock":16177,"NBA":16178,"Ġstall":16179,"Ġorganisers":16180,"Ġingredient":16181,"Ġblockbuster":16182,"ĠStream":16183,"ĠLeah":16184,"Pic":16185,"Ġventures":16186,"oman":16187,"Ġweakening":16188,"Ġmaximize":16189,"Ġdigging":16190,"uez":16191,"Ġdistinction":16192,"ĠMali":16193,"Ġcontaminated":16194,"Ġhij":16195,"Ġcrafts":16196,"Fl":16197,"Ġcloset":16198,"ĠRapp":16199,"Ġtowers":16200,"Ġamenities":16201,"Ġopioids":16202,"Ġcontend":16203,"load":16204,"ĠJol":16205,"ĠBooks":16206,"Ġsim":16207,"Ġthrilling":16208,"Ġmeter":16209,"ĠMultiple":16210,"Ġarbitration":16211,"Ġcracked":16212,"Pl":16213,"Ġphotographers":16214,"Te":16215,"ĠSidd":16216,"Ġexplored":16217,"170":16218,"Ġpleasant":16219,"ĠCapitals":16220,"ĠRi":16221,"ĠRandall":16222,"overed":16223,"Ġchar":16224,"ĠEverybody":16225,"ĠPolitics":16226,"Ġmoisture":16227,"Ġthriving":16228,"ĠScotia":16229,"arded":16230,"imb":16231,"ĠFantasy":16232,"Ġcemetery":16233,"ĠPath":16234,"eur":16235,"ĠSec":16236,"ĠPlatform":16237,"Ġdeparted":16238,"ĠVIDEO":16239,"ĠPant":16240,"ĠSyn":16241,"Ġ230":16242,"bleacher":16243,"live":16244,"Ġprob":16245,"Ġgymn":16246,"Ġjudged":16247,"orns":16248,"Ġstemming":16249,"umbling":16250,"ĠHew":16251,"ĠCheryl":16252,"Ġconsciousness":16253,"cos":16254,"ĠTate":16255,"CNN":16256,"Ġrecognizing":16257,"meg":16258,"Ġpant":16259,"ulk":16260,"MM":16261,"ĠPrescott":16262,"ĠMarcel":16263,"anas":16264,"Ġhappier":16265,"mag":16266,"ĠLov":16267,"Ġspreads":16268,"ĠSample":16269,"Ġpopped":16270,"HR":16271,"ĠMitt":16272,"Ġ00":16273,"Ġlabeled":16274,"Ġaspirations":16275,"?)":16276,"Ġloads":16277,"ĠBritt":16278,"hurst":16279,"ĠTeams":16280,"Ġextremists":16281,"ĠClement":16282,"lings":16283,"shirts":16284,"cheon":16285,"ĠDEL":16286,"ĠLocation":16287,"Ġpresentations":16288,"ĠFalcon":16289,"Ġtoddler":16290,"kl":16291,"Ġprone":16292,"Ġcommemor":16293,"ĠStanton":16294,"201":16295,"Ġranges":16296,"Ġfielder":16297,"Ġattends":16298,"rade":16299,"Ġproactive":16300,"Ġhostage":16301,"ĠGriffith":16302,"ockey":16303,"ĠAdding":16304,"ĠAFL":16305,"gas":16306,"istics":16307,"Ġsurgeon":16308,"Ġtsunami":16309,"2014":16310,"Ġconstraints":16311,"cu":16312,"Ġsurrendered":16313,"azed":16314,"ĠAirbnb":16315,"650":16316,"zed":16317,"Ġinjustice":16318,"dog":16319,"full":16320,"ĠHear":16321,"Ġsprawling":16322,"Ġhomeland":16323,"ĠSG":16324,"anced":16325,"Ġpools":16326,"ĠCE":16327,"Ġbeers":16328,"AE":16329,"ĠJac":16330,"Ġrecurring":16331,"Writing":16332,"Ġgenius":16333,"ĠFrost":16334,"Ġgrounded":16335,"Ġallege":16336,"lessness":16337,"Ġjumper":16338,"Ġvicious":16339,"Ġsecretly":16340,"Ġhacked":16341,"ĠAmsterdam":16342,"ibu":16343,"Ġ1971":16344,"ĠRosenstein":16345,"nick":16346,"arge":16347,"Ġladder":16348,"elled":16349,"Ġsatellites":16350,"Ġassassination":16351,"ĠDepot":16352,"built":16353,"Ġunrelated":16354,"maid":16355,"ĠDod":16356,"ĠVanderbilt":16357,"Ġboundary":16358,"ĠStafford":16359,"ĠBry":16360,"Ġtribunal":16361,"Ġoutings":16362,"Ġquantity":16363,"imming":16364,"ĠBlacks":16365,"Br":16366,"eri":16367,"uffed":16368,"Ġexplicitly":16369,"ĠBieber":16370,"AKING":16371,"Ġphotographed":16372,"ĠPolit":16373,"Ġpremature":16374,"hered":16375,"ĠVi":16376,"Ġmarsh":16377,"casters":16378,"ĠKra":16379,"Ġdried":16380,"Ġcafe":16381,"eting":16382,"Ġshaping":16383,"aram":16384,"orf":16385,"Ġrichest":16386,"Ġhurricanes":16387,"Ġcommands":16388,"Gl":16389,"anth":16390,"Ġstunt":16391,"Ġyearly":16392,"Ġdefeats":16393,"Ġconsultancy":16394,"call":16395,"Ġlag":16396,"adh":16397,"ĠPalestine":16398,"Ġcustomized":16399,"ĠScar":16400,"ĠWesley":16401,"ready":16402,"Ġpersist":16403,"Ġpacking":16404,"ono":16405,"Ġdischarged":16406,"Ġpouring":16407,"sburg":16408,"Ġreconsider":16409,"ĠMethod":16410,"enez":16411,"cill":16412,"Ġsecular":16413,"pers":16414,"Ġple":16415,"ELS":16416,"ĠMine":16417,"Ġpushes":16418,"Us":16419,"Ġframes":16420,"ĠNets":16421,"ĠSiem":16422,"ĠHitler":16423,"kill":16424,"Ġrented":16425,"Ġcharm":16426,"Ġpulls":16427,"ĠTide":16428,"Ġinsufficient":16429,"itted":16430,"Care":16431,"iera":16432,"Ġcouch":16433,"aders":16434,"ext":16435,"ĠCitizen":16436,"Ġlogical":16437,"ĠMeadows":16438,"ĠDenis":16439,"ĠDrivers":16440,"Ġrepublic":16441,"Ġadvising":16442,"Ġparamedics":16443,"insky":16444,"illard":16445,"encia":16446,"Ġkh":16447,"Ġrh":16448,"Ġfinalized":16449,"Ġreins":16450,"ĠFarrell":16451,"Ġsteer":16452,"Ġproxy":16453,"unes":16454,"ĠSoul":16455,"ĠCopper":16456,"ĠKenyan":16457,"amped":16458,"conference":16459,"sted":16460,"ĠLon":16461,"Ġreplay":16462,"ĠBle":16463,"Ġvibe":16464,"Ġportfolios":16465,"sea":16466,"Ġbeautifully":16467,"Ġairs":16468,"ĠRap":16469,"ĠKatrina":16470,"Ġberth":16471,"gold":16472,"ĠIsaiah":16473,"iques":16474,"elson":16475,"Ġrelentless":16476,"ĠHighland":16477,"ĠPhilippe":16478,"ĠFol":16479,"Ġenduring":16480,"enz":16481,"Ġaer":16482,"icing":16483,"ĠHTC":16484,"Ġdoping":16485,"ĠAlb":16486,"Ġsom":16487,"icia":16488,"Ġcoroner":16489,"Ġdamn":16490,"Ġ119":16491,"Ġwiped":16492,"ĠAuditor":16493,"hern":16494,"ĠJew":16495,"endra":16496,"osp":16497,"ĠRory":16498,"Ġshapes":16499,"ĠPablo":16500,"Ġforemost":16501,"ĠHos":16502,"ĠCunningham":16503,"145":16504,"ĠRecovery":16505,"!!!":16506,"western":16507,"Ġimaging":16508,"ĠRookie":16509,"ĠMTV":16510,"Ġunc":16511,"ĠSporting":16512,"Ġpatrons":16513,"ĠCoverage":16514,"ĠObservatory":16515,"Ġfishermen":16516,"ĠProvince":16517,"ĠAston":16518,"ĠOsh":16519,"ĠWeekend":16520,"Ġrecruits":16521,"Ġdensity":16522,"FM":16523,"ĠGorsuch":16524,"ĠErie":16525,"lining":16526,"Ġshowcased":16527,"ĠRubio":16528,"Ġchaotic":16529,"Ġattractions":16530,"Ġhug":16531,"ĠHerbert":16532,"ĠRespond":16533,"Ġhappily":16534,"Ġtor":16535,"ĠOTHER":16536,"runner":16537,"ĠShakespeare":16538,"Ġstretching":16539,"ĠJudy":16540,"wyn":16541,"ĠCafe":16542,"Ġgreens":16543,"ĠHend":16544,"Ġglam":16545,"iation":16546,"ĠKingston":16547,"Ġincremental":16548,"Live":16549,"ĠBraun":16550,"USS":16551,"reb":16552,"Ġimperative":16553,"Ġsympathy":16554,"Ġrefuge":16555,"Ġadministered":16556,"rance":16557,"ĠLiberia":16558,"Ġmobil":16559,"heads":16560,"Ġinevitably":16561,"ĠEugene":16562,"ĠBerkshire":16563,"ĠHarbour":16564,"ĠTrends":16565,"TB":16566,"Ġdeficits":16567,"Ġlistings":16568,"Ġreadings":16569,"Ġtumor":16570,"Ġoffic":16571,"opy":16572,"Ġdistracted":16573,"Ġappropriately":16574,"ĠWillis":16575,"Ġskirt":16576,"ĠTea":16577,"Ġshades":16578,"Ġbargaining":16579,"Ġretention":16580,"ĠConcert":16581,"ĠMeteor":16582,"ĠCustom":16583,"Ġinputs":16584,"ĠSah":16585,"enta":16586,"Love":16587,"ĠBurg":16588,"ĠCynthia":16589,"ĠMoses":16590,"ubb":16591,"Ġpeoples":16592,"dh":16593,"ĠFro":16594,"bean":16595,"Ġcigarette":16596,"tta":16597,"umm":16598,"Ġphenomenal":16599,"Ġyelling":16600,"Ġinaug":16601,"Ġconven":16602,"ĠGore":16603,"request":16604,"Ġcolonial":16605,"ĠAleppo":16606,"Ġdemolition":16607,"Ġamounted":16608,"Ġstaggering":16609,"Ġclips":16610,"Ġinconsistent":16611,"ĠMilton":16612,"ĠWireless":16613,"ĠReno":16614,"ĠPerkins":16615,"Ġunusually":16616,"Ġmemor":16617,"Ġhectares":16618,"Ġlat":16619,"central":16620,"ĠDig":16621,"ĠMarina":16622,"ĠPartner":16623,"daily":16624,"your":16625,"Reilly":16626,"Ġpope":16627,"phy":16628,"Ġassessing":16629,"ĠRodrigo":16630,"wi":16631,"Ġcompatible":16632,"imate":16633,"Ġgentle":16634,"ĠRhodes":16635,"Brexit":16636,"ieve":16637,"Ġbreaches":16638,"Ġchopped":16639,"Ġcancers":16640,"VEL":16641,"Ġsluggish":16642,"ĠUltra":16643,"ĠUl":16644,"Ġcrises":16645,"ONE":16646,"ĠEquipment":16647,"Ġcater":16648,"Ġadjourn":16649,"Ġreadily":16650,"ĠRolling":16651,"ĠBott":16652,"inel":16653,"ĠRule":16654,"Ġgrind":16655,"ĠHussain":16656,"ussie":16657,"Ġdepressed":16658,"ĠImperial":16659,"ongo":16660,"Ġuniforms":16661,"Ġ117":16662,"Ġchambers":16663,"ĠDum":16664,"ifi":16665,"ĠBetty":16666,"ĠTA":16667,"Ġpromotions":16668,"itary":16669,"Ġcried":16670,"Ġbranding":16671,"ĠBahamas":16672,"ĠDat":16673,"Ġantibiotics":16674,"ĠAus":16675,"Ġumbrella":16676,"Ġgradual":16677,"Ġaltercation":16678,"Ġlure":16679,"ĠJakarta":16680,"Ġunified":16681,"chin":16682,"ettes":16683,"ĠRwanda":16684,"ulations":16685,"Ġbrink":16686,"Ġbroadcasting":16687,"ĠArtist":16688,"Ġrecon":16689,"Ġaqu":16690,"ĠServ":16691,"999":16692,"ĠParticipants":16693,"ĠVentures":16694,"fight":16695,"Ġactivism":16696,"Ġstructured":16697,"Ġportal":16698,"Ġtendency":16699,"ĠAssociate":16700,"Ġcalf":16701,"ĠOrd":16702,"ĠTi":16703,"ĠFrancois":16704,"uary":16705,"ĠVik":16706,"urchase":16707,"Ġfried":16708,"Ġbooming":16709,"Ġparticles":16710,"amas":16711,"INA":16712,"Super":16713,"supp":16714,"urring":16715,"ĠWatts":16716,"affer":16717,"ĠDEC":16718,"Ġroadway":16719,"border":16720,"Ġsequ":16721,"entially":16722,"ieg":16723,"Ġcamping":16724,"Ġ750":16725,"Ġcycles":16726,"ĠReese":16727,"ĠFellow":16728,"isters":16729,"ĠVehicle":16730,"kies":16731,"ĠJonas":16732,"Ġfoundations":16733,"ĠNigel":16734,"Ġstab":16735,"Ġcongressman":16736,"ĠWichita":16737,"antes":16738,"Ġprogression":16739,"Ġditch":16740,"lik":16741,"Ġsid":16742,"Ġele":16743,"ĠMund":16744,"Ġstairs":16745,"lete":16746,"Ġlingering":16747,"Ġsadly":16748,"Ġay":16749,"Em":16750,"Ġdeadliest":16751,"soon":16752,"Ġtangible":16753,"Ġabusing":16754,"Ġcomprises":16755,"vil":16756,"ĠBun":16757,"Ġdoubling":16758,"Ġcommun":16759,"Ġslogan":16760,"Ġloading":16761,"Ġshallow":16762,"Ġattributes":16763,"Che":16764,"Ġcheering":16765,"Ġrefuses":16766,"cam":16767,"bes":16768,"hon":16769,"ĠSpartans":16770,"cept":16771,"ĠComputer":16772,"ĠCanberra":16773,"ĠWARNING":16774,"Ġstuffed":16775,"block":16776,"ĠJennings":16777,"ĠAU":16778,"atin":16779,"Ġom":16780,"Ġbachelor":16781,"Ġprediction":16782,"ĠWinner":16783,"agne":16784,"Ġrob":16785,"ĠKatherine":16786,"Ġli":16787,"ĠHumph":16788,"ĠPEOPLE":16789,"IRO":16790,"Cola":16791,"Ġguitarist":16792,"isen":16793,"ĠHighlights":16794,"Ġwelcomes":16795,"Ġprisoner":16796,"Ġpsychology":16797,"Ġextradition":16798,"Ġrou":16799,"ĠLund":16800,"Ġthoughtful":16801,"RY":16802,"orman":16803,"Alex":16804,"Ġlaughter":16805,"Ġfumble":16806,"Ġsynthetic":16807,"Ġdigit":16808,"ĠRoc":16809,"ĠFactory":16810,"ellery":16811,"ishment":16812,"ilar":16813,"ĠEarl":16814,"ĠSutton":16815,"ĠJur":16816,"ĠAllan":16817,"ĠKoreans":16818,"uki":16819,"Ġculinary":16820,"PU":16821,"Stock":16822,"stars":16823,"ĠDayton":16824,"beck":16825,"Ġinstability":16826,"ĠBring":16827,"Ġbreeding":16828,"Ġmiracle":16829,"bons":16830,"Ġdonating":16831,"ĠKick":16832,"ĠSag":16833,"afi":16834,"Ġharassed":16835,"asm":16836,"Their":16837,"inity":16838,"Ġacademics":16839,"Ġstatute":16840,"ĠAmit":16841,"Ġpressured":16842,"east":16843,"\"),":16844,"iso":16845,"220":16846,"Ġairplane":16847,"ĠMcCabe":16848,"ctions":16849,"ĠMesa":16850,"Ġsensational":16851,"ĠFE":16852,"ĠNeigh":16853,"Ġbribery":16854,"Ġflaws":16855,"Ġfemales":16856,"Ġmisses":16857,"ĠColor":16858,"ĠVietnamese":16859,"ĠMental":16860,"Unfortunately":16861,"ĠPont":16862,"Ġ1940":16863,"dry":16864,"ĠGazette":16865,"ĠAns":16866,"Ġwhistle":16867,"Ġsymbolic":16868,"Ġpossessions":16869,"ĠDriver":16870,"Ġbracket":16871,"ĠReign":16872,"oji":16873,"Ġoct":16874,"Ġtube":16875,"ĠFelix":16876,"Ġtranslated":16877,"Ġpromptly":16878,"ĠErnest":16879,"arth":16880,"Ġdumb":16881,"Ġinfluences":16882,"taking":16883,"Ġprivat":16884,"erers":16885,"Ġmalware":16886,"Ġpredictable":16887,"Ġtighten":16888,"Ġheights":16889,"Ġfairness":16890,"facing":16891,"Ġrematch":16892,"Ġpoet":16893,"Ġfundamentally":16894,"Ġcoveted":16895,"Ġlivelihood":16896,"ĠABOUT":16897,"Ġsourced":16898,"Ġdeferred":16899,"Ġslashed":16900,"ĠSchultz":16901,"Ġtriggering":16902,"ĠShiv":16903,"Ġlithium":16904,"ahead":16905,"Ġleisure":16906,"Ġbackpack":16907,"ilateral":16908,"ĠNuclear":16909,"ĠLeone":16910,"ĠNice":16911,"Ġenthusiasts":16912,"September":16913,"Ġenroll":16914,"ĠWear":16915,"erey":16916,"angs":16917,"such":16918,"Ġunpopular":16919,"Ġdisciplined":16920,"Ġshrinking":16921,"ĠBrewing":16922,"ĠReally":16923,"Ġdirective":16924,"175":16925,"Ġnotifications":16926,"Ġfortunes":16927,"ĠHour":16928,"ĠGan":16929,"ĠChurchill":16930,"ĠDodge":16931,"ĠJeep":16932,"Ġsour":16933,"Ġderived":16934,"Ġft":16935,"riv":16936,"Ġlaundry":16937,"Ġfentanyl":16938,"ĠSioux":16939,"achi":16940,"workers":16941,"Ġworkload":16942,"rooms":16943,"ĠQU":16944,"ĠTruth":16945,"Ġdefenses":16946,"Ġdunk":16947,"IJ":16948,"Ġderby":16949,"ĠMotion":16950,"ĠMayo":16951,"ĠIke":16952,"Ġpreferences":16953,"Ġped":16954,"elman":16955,"moon":16956,"Ġshoots":16957,"ĠNoel":16958,"Ġmilit":16959,"ĠCambodia":16960,"ĠMLA":16961,"Ġhonoured":16962,"fast":16963,"Ġalgorithms":16964,"Ġstormed":16965,"NT":16966,"Benz":16967,"Ġvaccines":16968,"Ġmarching":16969,"Ġ118":16970,"ĠWilmington":16971,"GM":16972,"coin":16973,"Ġunderwater":16974,"ĠClearly":16975,"Ġorgans":16976,"mir":16977,"Ġdenounced":16978,"pless":16979,"imal":16980,"ĠKom":16981,"Ġfatalities":16982,"Ġyoungster":16983,"Ġthirty":16984,"Ġinternally":16985,"222":16986,"Ġdemonstrating":16987,"Ġbusiest":16988,"Ġperpetrators":16989,"Ġstun":16990,"Both":16991,"ĠMcCoy":16992,"gn":16993,"ĠDalton":16994,"ĠDAY":16995,"Ġsacred":16996,"Ġconsuming":16997,"Ġ(+":16998,"ĠPioneer":16999,"ĠApplications":17000,"ĠBolt":17001,"ĠBarkley":17002,"ĠExpo":17003,"ĠLore":17004,"ĠPrivacy":17005,"ĠHarley":17006,"Ġtractor":17007,"Ġtenth":17008,"ĠHaiti":17009,"ÃŃn":17010,"ĠTVs":17011,"ĠCathedral":17012,"Ġunite":17013,"Ġbinding":17014,"oks":17015,"ĠJenny":17016,"Ġcaller":17017,"ĠIngram":17018,"ĠPrairie":17019,"Ġrunoff":17020,"Ġasserted":17021,"icit":17022,"ĠSie":17023,"102":17024,"ĠMB":17025,"Ġobstruction":17026,"Ġgroom":17027,"Ġtolerate":17028,"Ġcans":17029,"forth":17030,"Ġvillain":17031,"Ġdefining":17032,"ĠFrenchman":17033,"otte":17034,"Ġcontr":17035,"clock":17036,"onder":17037,"Ġprolific":17038,"ĠElectronic":17039,"ĠSak":17040,"annie":17041,"ASS":17042,"Ġmultinational":17043,"Associated":17044,"IZ":17045,"ĠBelle":17046,"Ġmand":17047,"asis":17048,"Mac":17049,"Ġpretend":17050,"ĠCommunication":17051,"Ġheartbreaking":17052,"ĠShepherd":17053,"ĠBIG":17054,"mph":17055,"ĠShield":17056,"ĠLiv":17057,"ĠStatus":17058,"Ġbikini":17059,"Ġranch":17060,"Ġpeacefully":17061,"ITCH":17062,"bourne":17063,"ĠVariety":17064,"Ġstationed":17065,"Ġhed":17066,"Ġexhausted":17067,"Ġsurpassed":17068,"Ġcatalyst":17069,"Ġsmuggling":17070,"uating":17071,"Ġ123":17072,"Ġdup":17073,"ĠSul":17074,"conf":17075,"jit":17076,"Ġmaiden":17077,"asta":17078,"ĠCalvin":17079,"borne":17080,"Ġgrim":17081,"Ġtort":17082,"cott":17083,"olas":17084,"NR":17085,"Ġbreakout":17086,"ĠHun":17087,"ĠGuatemala":17088,"Ġhistorian":17089,"ĠLawyers":17090,"ĠDisplay":17091,"Ġobstruct":17092,"ĠOsborne":17093,"Ġtherapies":17094,"ĠAub":17095,"Ġinjunction":17096,"stroke":17097,"Ġseafood":17098,"Ġhazardous":17099,"ĠWolver":17100,"ĠViolence":17101,"ĠBillion":17102,"ĠLetter":17103,"ĠWorldwide":17104,"Real":17105,"Ġexpires":17106,"Ġflawed":17107,"European":17108,"Ġrigorous":17109,"ĠSimilar":17110,"ĠSurface":17111,"ĠEF":17112,"mys":17113,"ĠFunds":17114,"ographer":17115,"Ġtribes":17116,"Ġspouse":17117,"Ġunsure":17118,"aways":17119,"Ġtrainers":17120,"arie":17121,"ĠZar":17122,"ĠComedy":17123,"ĠLit":17124,"ĠNoon":17125,"Ġgallon":17126,"Ġconsulate":17127,"ĠBras":17128,"iology":17129,"onies":17130,"ĠBelichick":17131,"ĠRoot":17132,"ĠLux":17133,"ĠSed":17134,"ĠTos":17135,"Ġinherited":17136,"tw":17137,"Ġdeaf":17138,"Ġdriveway":17139,"jah":17140,"ĠScientific":17141,"ĠNottingham":17142,"both":17143,"awan":17144,"Ġnut":17145,"ĠLebanese":17146,"ĠAAA":17147,"ĠSuzuki":17148,"ĠBU":17149,"ells":17150,"Ġspecify":17151,"ĠNotes":17152,"Ġvoluntarily":17153,"ĠMolly":17154,"Ġoutskirts":17155,"Ġbehaviors":17156,"Ġmilitia":17157,"Ġsplash":17158,"Ġpersonalized":17159,"ĠFiat":17160,"ĠKind":17161,"ĠTruck":17162,"py":17163,"ĠWIN":17164,"dist":17165,"itational":17166,"APP":17167,"ĠPelicans":17168,"ĠGam":17169,"mel":17170,"Ġmandated":17171,"Ġbalances":17172,"ĠWizards":17173,"iary":17174,"ĠAvailable":17175,"Ġkay":17176,"jin":17177,"eyed":17178,"Ġsterling":17179,"Ġconcealed":17180,"ĠFedEx":17181,"ĠPO":17182,"ĠJacqu":17183,"anted":17184,"eme":17185,"ĠDefensive":17186,"manship":17187,"Ġreliever":17188,"Ġshortstop":17189,"Ġphot":17190,"ĠGain":17191,"ĠConcern":17192,"due":17193,"Ġalgorithm":17194,"fell":17195,"ĠMountains":17196,"icians":17197,"Ġhonoring":17198,"Ġuploaded":17199,"Ġtore":17200,"GH":17201,"orde":17202,"ĠCoin":17203,"ĠAven":17204,"Ġliterary":17205,"Before":17206,"Ġtactic":17207,"Ġsocially":17208,"ĠSik":17209,"Ġthermal":17210,"Ġhor":17211,"price":17212,"Ġrooted":17213,"arrow":17214,"Ġcirculating":17215,"Ġlaughs":17216,"ĠLines":17217,"lig":17218,"Ġjudgement":17219,"....":17220,"Ġsewer":17221,"Ġdancer":17222,"ĠPens":17223,"Ġsig":17224,"ische":17225,"wives":17226,"Ġgran":17227,"ĠBron":17228,"ĠHyde":17229,"yards":17230,"Ġcandidacy":17231,"Ġhey":17232,"Ġcontributors":17233,"ĠUpdated":17234,"Ġ190":17235,"Ġhalls":17236,"Ġemphas":17237,"ĠCherry":17238,"Ġrim":17239,"Ġbilled":17240,"Ġbaked":17241,"ĠPopular":17242,"lb":17243,"Ġgravity":17244,"Under":17245,"Ġreservation":17246,"organ":17247,"ĠPict":17248,"ĠWhitney":17249,"Ġonboard":17250,"NEY":17251,"ĠBreaking":17252,"Ġflagged":17253,"rar":17254,"ĠBasic":17255,"ĠDomestic":17256,"ĠPent":17257,"Ġvigilant":17258,"Ġzoning":17259,"Fire":17260,"Ġcorrected":17261,"isbury":17262,"ĠLaure":17263,"ĠDevon":17264,"print":17265,"ĠTopics":17266,"ĠFuel":17267,"Ġcirculation":17268,"ĠPratt":17269,"Ġskiing":17270,"Ġtornado":17271,"dep":17272,"ĠUnless":17273,"ifting":17274,"Ġfool":17275,"should":17276,"Ġinspectors":17277,"Ġprotested":17278,"Ġba":17279,"ussia":17280,"Ġspun":17281,"grass":17282,"phone":17283,"Ġpotato":17284,"ĠBehind":17285,"cil":17286,"Ġconcession":17287,"Ġapplause":17288,"ĠChin":17289,"Ġceremonies":17290,"pit":17291,"Ġtraumatic":17292,"Ġbasics":17293,"Ġparameters":17294,"ĠMoz":17295,"ĠAIDS":17296,"Ph":17297,"Ġjudging":17298,"Ġlecture":17299,"Ġmunicipality":17300,"Ġcardiac":17301,"ogan":17302,"pir":17303,"could":17304,"Channel":17305,"Ġshattered":17306,"ĠAV":17307,"continental":17308,"chie":17309,"ibi":17310,"ĠOy":17311,"Mon":17312,"ĠCN":17313,"WC":17314,"Ġdistributor":17315,"ĠSavannah":17316,"Ġcleaned":17317,"ĠFlores":17318,"Ġembarrassed":17319,"Ġclay":17320,"Ġvolcano":17321,"Ġstressful":17322,"Ġsummoned":17323,"ĠSeg":17324,"Ġstatistical":17325,"ĠShak":17326,"Ġadequately":17327,"worthy":17328,"fighting":17329,"alan":17330,"Ġnecessity":17331,"Ġresidency":17332,"Ġsober":17333,"arius":17334,"ĠTaj":17335,"mount":17336,"wards":17337,"Ġaesthetic":17338,"Coin":17339,"ĠDew":17340,"were":17341,"SK":17342,"Ġpowerhouse":17343,"Ġcleanup":17344,"ĠWITH":17345,"ĠHers":17346,"ĠRao":17347,"ĠFlyers":17348,"Ġdominating":17349,"issued":17350,"ĠMcGr":17351,"Ġinsurgency":17352,"Ġburial":17353,"ĠPlains":17354,"ensive":17355,"ĠPresent":17356,"Mo":17357,"Ġnerves":17358,"Ġsmoothly":17359,"staff":17360,"Ġrestoring":17361,"ĠGeneration":17362,"Ġcommuters":17363,"ĠLegend":17364,"ĠGad":17365,"lied":17366,"Ġissuer":17367,"ĠDozens":17368,"Ġphases":17369,"ĠWu":17370,"ĠTunisia":17371,"ĠPacers":17372,"Ġdur":17373,"ĠIG":17374,"annon":17375,"sided":17376,"Ġvo":17377,"ĠNI":17378,"Ġvitamin":17379,"Ġsoc":17380,"Ġimmunity":17381,"Ġgenerates":17382,"ĠMcGu":17383,"Ġexplores":17384,"Ġassistants":17385,"Ġstems":17386,"ushed":17387,"ĠZak":17388,"ĠOwners":17389,"Ġvariant":17390,"ardy":17391,"ĠNewark":17392,"ĠCatalonia":17393,"Ġautonomy":17394,"Ġgreet":17395,"Ġawait":17396,"ĠLuckily":17397,"ĠTicket":17398,"ĠSTOR":17399,"asy":17400,"Ġincorrect":17401,"Ġconsisting":17402,"Ġperspectives":17403,"ĠQuint":17404,"Ġtotaling":17405,"Ġnortheastern":17406,"Ġcharacterized":17407,"Ġsurfaces":17408,"nation":17409,"Ġprevents":17410,"ĠSho":17411,"Ġelectorate":17412,"Ġshortfall":17413,"chy":17414,"aws":17415,"ĠAddress":17416,"Ġdefensively":17417,"quel":17418,"chester":17419,"Ġterr":17420,"ahu":17421,"lined":17422,"ĠNev":17423,"unn":17424,"Def":17425,"pc":17426,"ĠSig":17427,"Ġnonetheless":17428,"ĠSundays":17429,"ĠBAS":17430,"Ġpolicemen":17431,"ĠGoal":17432,"apa":17433,"Ġrope":17434,"Ġoutage":17435,"ĠPaso":17436,"Ġsadness":17437,"ĠGrowing":17438,"ĠKyr":17439,"Ġale":17440,"ĠBreitbart":17441,"ĠVia":17442,"ĠBrig":17443,"idence":17444,"Ġ145":17445,"quire":17446,"Ġdistraction":17447,"ĠOdd":17448,"ĠSimply":17449,"ĠNin":17450,"Ġcompetent":17451,"ded":17452,"iper":17453,"ĠKaty":17454,"ĠSolomon":17455,"Ġfeeds":17456,"ĠMort":17457,"ĠRica":17458,"affe":17459,"Ġcooperating":17460,"Ġarrivals":17461,"Ġdelete":17462,"ĠAth":17463,"Ġtrustees":17464,"Ġtub":17465,"Ġsaga":17466,"otes":17467,"ĠCJ":17468,"Ġexited":17469,"stakes":17470,"Ġinflu":17471,"2000":17472,"ĠDonovan":17473,"ĠNur":17474,"Ġoutline":17475,"Ġaudition":17476,"oked":17477,"ĠJag":17478,"money":17479,"Ġcardiovascular":17480,"song":17481,"ĠOften":17482,"ĠGoff":17483,"ĠOaks":17484,"Will":17485,"acon":17486,"Ġ?":17487,"Har":17488,"ĠLambert":17489,"atoon":17490,"ĠAF":17491,"ĠMavericks":17492,"nia":17493,"ĠChennai":17494,"\"},\"":17495,"Ġpairing":17496,"mad":17497,"ause":17498,"ĠRide":17499,"111":17500,"ĠFallon":17501,"ĠHyder":17502,"ĠPiper":17503,"Ġfilmmakers":17504,"icon":17505,"ĠBeau":17506,"Ġbutt":17507,"lot":17508,"Ġrifles":17509,"Ġsunglasses":17510,"ĠTRA":17511,"Ġmagnetic":17512,"arty":17513,"ĠYo":17514,"ĠWeight":17515,"?!":17516,"ether":17517,"Ġaspir":17518,"Ġhunters":17519,"Ġcontamination":17520,"Ben":17521,"political":17522,"],\"":17523,"ĠBever":17524,"Ġmonuments":17525,"won":17526,"auc":17527,"Ġexpressions":17528,"Ġlakes":17529,"iao":17530,"abin":17531,"Ġpleading":17532,"Ġdiscounted":17533,"Ġdisappoint":17534,"ĠTW":17535,"craft":17536,"Ġsocieties":17537,"ĠAugusta":17538,"Ġbott":17539,"Ġmarker":17540,"ĠWrestling":17541,"CBC":17542,"athy":17543,"ĠAZ":17544,"Ġfabulous":17545,"valued":17546,"Ġoptical":17547,"Ġshaken":17548,"OSS":17549,"ĠImp":17550,"ĠAUD":17551,"inals":17552,"Ġrevital":17553,"Ġcontroller":17554,"Ġgrasp":17555,"uling":17556,"ĠFrederick":17557,"ague":17558,"bull":17559,"ĠLadies":17560,"Ġdisruptive":17561,"Ġbenefiting":17562,"Ġverge":17563,"ĠDak":17564,"Ġgrabs":17565,"ĠPAC":17566,"GN":17567,"ĠMcMahon":17568,"rob":17569,"ĠEspecially":17570,"ĠChrome":17571,"ĠBundesliga":17572,"104":17573,"Ġliberty":17574,"ĠSF":17575,"Ġvarieties":17576,"East":17577,"Ġgrowers":17578,"Ġsocialist":17579,"Ġunemployed":17580,"AMI":17581,"Ġtotals":17582,"ĠGib":17583,"Ġdefect":17584,"ĠOrtiz":17585,"ĠPerfect":17586,"Ġpraying":17587,"ISS":17588,"Ġul":17589,"Ġthrust":17590,"osc":17591,"ĠOtherwise":17592,"Ġobsessed":17593,"Ġ650":17594,"ĠWebsite":17595,"Ġspectators":17596,"ĠScout":17597,"ĠBoone":17598,"ĠDillon":17599,"Ġabortions":17600,"lect":17601,"utz":17602,"Ġvillagers":17603,"Ġaccelerating":17604,"Ġslap":17605,"Ġvague":17606,"Ġjurisdictions":17607,"League":17608,"ĠUruguay":17609,"Ġobstacle":17610,"Ġmanufactures":17611,"Ġcampaigned":17612,"ĠAdvance":17613,"ĠNort":17614,"emer":17615,"Ġ1964":17616,"Ġirre":17617,"Ġprog":17618,"ĠFeatured":17619,"Ġcommute":17620,"Ġhandset":17621,"akis":17622,"ĠArs":17623,"tail":17624,"iker":17625,"Ġcrafted":17626,"Ġupl":17627,"ĠMarcos":17628,"Looking":17629,"Ġseated":17630,"ĠBoat":17631,"Ġreadiness":17632,"ĠLLP":17633,"otechnology":17634,"facebook":17635,"ĠScouts":17636,"ĠEar":17637,"ĠAdv":17638,"ĠDemocracy":17639,"NI":17640,"oci":17641,"ĠSnapdragon":17642,"Saturday":17643,"ĠPra":17644,"ĠCoastal":17645,"ĠVoters":17646,"ĠLeigh":17647,"ohn":17648,"orry":17649,"Ġtechnicians":17650,"armed":17651,"Ġshrink":17652,"Ġspinning":17653,"agram":17654,"320":17655,"liner":17656,"ĠContest":17657,"ĠCountries":17658,"Ġfarewell":17659,"ĠCW":17660,"aris":17661,"Ġstorytelling":17662,"Ġpasser":17663,"Ġsailing":17664,"control":17665,"Ġdissent":17666,"ĠRih":17667,"Ġedit":17668,"Ġspoilers":17669,"itched":17670,"ĠBentley":17671,"Ġcant":17672,"mn":17673,"ĠMacy":17674,"Ġindefinitely":17675,"Ġvill":17676,"Ġmeth":17677,"ĠEL":17678,"Ġoptional":17679,"Ġremark":17680,"ĠVanessa":17681,"ã":17682,"Ġmasks":17683,"ĠProvincial":17684,"Ġculprit":17685,"ĠTol":17686,"Ġsnack":17687,"ĠInfinity":17688,"ĠPub":17689,"Ġbrakes":17690,"Ġclar":17691,"Ġinception":17692,"love":17693,"Ġwonders":17694,"Ġforged":17695,"ĠCEOs":17696,"Ġspecifications":17697,"irst":17698,"ension":17699,"ĠMarin":17700,"det":17701,"Ġordeal":17702,"ĠFeed":17703,"December":17704,"Ġstrokes":17705,"fect":17706,"orial":17707,"Ġshowcasing":17708,"Ġstack":17709,"UAL":17710,"ĠAlexandra":17711,"Ġpoison":17712,"ĠFry":17713,"ĠCars":17714,"Ġprototype":17715,"ĠUSDA":17716,"ĠIF":17717,"flows":17718,"Ġtailored":17719,"ĠGear":17720,"Ġmyth":17721,"Ġplatinum":17722,"seven":17723,"founded":17724,"encing":17725,"ĠTip":17726,"ĠMald":17727,"Ġgeopolitical":17728,"112":17729,"Ġenqu":17730,"ĠNR":17731,"ĠNadu":17732,"leen":17733,"ĠTat":17734,"Ġcolon":17735,"ĠSize":17736,"Ġvis":17737,"Ġbere":17738,"ĠAnnie":17739,"ĠWatkins":17740,"Ġpumping":17741,"cur":17742,"ĠBates":17743,"Ġslug":17744,"miss":17745,"Ġforecasting":17746,"source":17747,"Ġacknowledges":17748,"Ġprosecute":17749,"Ġtestament":17750,"Ġcum":17751,"ems":17752,"Ġsocks":17753,"ĠSame":17754,"Ġcompetitiveness":17755,"Ġdefinitive":17756,"Ġintensified":17757,"Ġsatisfying":17758,"Ġphysics":17759,"ĠHarden":17760,"Ġsubsidy":17761,"Men":17762,"ĠPaddock":17763,"Ġworkouts":17764,"ĠSaw":17765,"Ġcrisp":17766,"ĠBezos":17767,"ĠVote":17768,"Ġguiding":17769,"anged":17770,"Ġstaple":17771,"ŀ":17772,"ules":17773,"ĠAvengers":17774,"Ġoptim":17775,"ĠBuffett":17776,"Ġtimetable":17777,"oust":17778,"HE":17779,"ĠGrab":17780,"Have":17781,"cca":17782,"Ġwaived":17783,"Ġretaining":17784,"Ġaber":17785,"Ġoffline":17786,"Ġvigil":17787,"books":17788,"ĠRein":17789,"Ġacknowledging":17790,"ĠDoyle":17791,"Ġproteins":17792,"Ġmixing":17793,"ĠAlcohol":17794,"ĠJD":17795,"Ġsyn":17796,"Ġthieves":17797,"Ġhomemade":17798,"Ġfeminist":17799,"ĠRoosevelt":17800,"ĠCoal":17801,"Ġwishing":17802,"ĠSIGN":17803,"ĠLad":17804,"Ġempathy":17805,"ĠBrooke":17806,"ĠMash":17807,"inations":17808,"''":17809,"ulators":17810,"Ġdrastically":17811,"Ġfloral":17812,"ĠGuild":17813,"Ġundercover":17814,"ĠLaboratory":17815,"ĠRank":17816,"Ġrestraining":17817,"Ġparagraph":17818,"Ġpersona":17819,"ĠEmployment":17820,"ogs":17821,"ĠGw":17822,"ĠMedal":17823,"Ġwildly":17824,"fare":17825,"ĠCNBC":17826,"photo":17827,"Ġtransforming":17828,"Ġtermination":17829,"still":17830,"INT":17831,"Ġbal":17832,"ĠEconom":17833,"ĠLarson":17834,"Ġheck":17835,"Ġquantitative":17836,"Ġemergence":17837,"esta":17838,"Ġknot":17839,"Ġwhale":17840,"ĠðŁĺ":17841,"Ġperimeter":17842,"Ġempowerment":17843,"Ġmg":17844,"Ġrents":17845,"Ġrefreshing":17846,"Ġleasing":17847,"Ġpatents":17848,"andi":17849,"Ġfathers":17850,"Ġunse":17851,"Ġprocessors":17852,"Down":17853,"Ġreversal":17854,"veh":17855,"andal":17856,"ĠKov":17857,"Blue":17858,"Ġspecializes":17859,"Link":17860,"ĠConsidering":17861,"ĠEdmund":17862,"Ġneo":17863,"agger":17864,"rg":17865,"Ġseverity":17866,"Ġcour":17867,"RL":17868,"ĠTeresa":17869,"Ġgallons":17870,"Ġacquitted":17871,"Ġaccompl":17872,"Ġcracks":17873,"Ġsciences":17874,"Club":17875,"Ġpredicts":17876,"ĠVu":17877,"Ġhints":17878,"ĠZack":17879,"Ġrefurb":17880,"Ġdestabil":17881,"ĠSamar":17882,"ĠInfo":17883,"fs":17884,"Ġratios":17885,"Ġinherent":17886,"ĠContinental":17887,"Ġtreasure":17888,"Ġcaucus":17889,"Ġenact":17890,"orporated":17891,"ineries":17892,"Ġtastes":17893,"main":17894,"Ġsq":17895,"ickson":17896,"corruption":17897,"ulture":17898,"ĠGoodman":17899,"ĠLing":17900,"ĠSup":17901,"Ġexposing":17902,"immers":17903,"Ġresponds":17904,"heimer":17905,"Air":17906,"ĠFigures":17907,"Ġlongstanding":17908,"ĠAnalytics":17909,"Ġenforced":17910,"Ġnickname":17911,"Ġclinch":17912,"ĠCarpenter":17913,"ĠPharma":17914,"Ġconstructive":17915,"Ġgel":17916,"ĠSham":17917,"ĠTOP":17918,"ĠDerrick":17919,"ör":17920,"birds":17921,"ĠTong":17922,"ĠBatman":17923,"ĠRouhani":17924,"ĠOlive":17925,"ĠRiv":17926,"Ġdessert":17927,"Ġguides":17928,"Ġsag":17929,"Ġchemotherapy":17930,"Ġslept":17931,"ĠFranc":17932,"ĠDunk":17933,"writers":17934,"ĠÃĹ":17935,"Ġ401":17936,"Ġoutfielder":17937,"ĠHamburg":17938,"izu":17939,"Ġscr":17940,"Ġcomparisons":17941,"Ġwhites":17942,"Ġtraits":17943,"Ġcollateral":17944,"LEY":17945,"ideshow":17946,"Ġstatutory":17947,"Ġruin":17948,"Ġsituated":17949,"tem":17950,"Ġinject":17951,"rage":17952,"550":17953,"Ġfactions":17954,"ĠNaomi":17955,"cutting":17956,"Ġcommunicating":17957,"Ġrailroad":17958,"Ġsparking":17959,"Ġrespiratory":17960,"ĠWebster":17961,"ĠCarbon":17962,"Ġundertaking":17963,"Ġcomposer":17964,"ĠFigure":17965,"Ġspecified":17966,"Video":17967,"uber":17968,"Ġsexuality":17969,"lected":17970,"ĠBurger":17971,"ĠCards":17972,"SR":17973,"ĠLie":17974,"Ġrecount":17975,"Ġexceeding":17976,"Ġquoting":17977,"ĠJama":17978,"ĠVictorian":17979,"Ġsway":17980,"ĠGes":17981,"ĠSI":17982,"ĠKazakhstan":17983,"Ġaccusation":17984,"etr":17985,"Ah":17986,"Ġproc":17987,"Ġlamb":17988,"ĠMorales":17989,"ĠLily":17990,"Ġderail":17991,"Ġcontributes":17992,"iddle":17993,"ĠConcord":17994,"Ġelectr":17995,"Ġequip":17996,"Ġquantum":17997,"Ġthereafter":17998,"Ġarrange":17999,"Ġraided":18000,"ĠMove":18001,"ĠSang":18002,"ĠGaming":18003,"Ġbiology":18004,"ĠAmnesty":18005,"Ġdemise":18006,"ĠBarton":18007,"Ġqualifier":18008,"ANI":18009,"Ġundersc":18010,"Ġroyalty":18011,"ĠINC":18012,"Ġsne":18013,"ariat":18014,"ĠWan":18015,"Ġcluster":18016,"quin":18017,"Ġwhales":18018,"ĠFear":18019,"ĠBrew":18020,"Ġdeport":18021,"airs":18022,"Ġcensus":18023,"OUS":18024,"Ġrespectful":18025,"bone":18026,"Ġwaivers":18027,"friend":18028,"Ġsystemic":18029,"ĠDion":18030,"James":18031,"ĠAdmission":18032,"Ġstigma":18033,"ĠTIME":18034,"Ġunderpin":18035,"ĠWitnesses":18036,"Ġdigs":18037,"Ġgenocide":18038,"Ġstaging":18039,"rolled":18040,"Ġspecially":18041,"oop":18042,"Ġbaseline":18043,"ĠRF":18044,"avis":18045,"Ġvocals":18046,"COL":18047,"LD":18048,"Ġimpending":18049,"ĠCaldwell":18050,"Ġaluminium":18051,"Ġstra":18052,"ĠTayyip":18053,"Ġadmissions":18054,"falls":18055,"Ġrealizing":18056,"oen":18057,"ĠRV":18058,"ĠMog":18059,"Ġadvocating":18060,"ĠPepper":18061,"lived":18062,"ĠWick":18063,"Facebook":18064,"ĠSpect":18065,"Ġshout":18066,"Ġfractured":18067,"vet":18068,"Ġ1966":18069,"Ġcompensate":18070,"ĠVolume":18071,"Ġcategor":18072,"ĠHuntington":18073,"Free":18074,"OUGH":18075,"local":18076,"Sch":18077,"uti":18078,"Ġburger":18079,"Ġbush":18080,"Ġimpacting":18081,"Ġfrost":18082,"tti":18083,"ĠFresno":18084,"onz":18085,"shaw":18086,"ĠLibyan":18087,"Ġassert":18088,"ĠLegacy":18089,"ĠIE":18090,"ĠKinder":18091,"ĠHorizon":18092,"Ġtum":18093,"Ġsignaled":18094,"ĠFors":18095,"Ġspeedy":18096,"rang":18097,"ĠFT":18098,"Ġselecting":18099,"Ġpale":18100,"WD":18101,"Ġprobability":18102,"OUND":18103,"istrate":18104,"Ġsens":18105,"ocating":18106,"Ġinterpret":18107,"Ġpuzzle":18108,"Ġinland":18109,"Ġmanipulation":18110,"Sal":18111,"Ġfulfilling":18112,"ĠMcMaster":18113,"Make":18114,"jun":18115,"giving":18116,"ĠNiagara":18117,"Ġscholars":18118,"ALT":18119,"ĠSteam":18120,"omin":18121,"ĠSau":18122,"ĠDowning":18123,"Ġgy":18124,"ĠTit":18125,"ĠLav":18126,"ĠPepsi":18127,"Ġdumping":18128,"ĠDetect":18129,"ĠTDs":18130,"ĠKob":18131,"ĠSY":18132,"Ġpioneer":18133,"Ġ_":18134,"Ġclarified":18135,"ĠTests":18136,"opic":18137,"ĠMN":18138,"ĠBowman":18139,"umin":18140,"Ġwidow":18141,"Ġrallying":18142,"ĠPull":18143,"Ġprojection":18144,"Ġescalation":18145,"Ġlibraries":18146,"ĠFounder":18147,"ĠHugo":18148,"ĠStyle":18149,"Ġfreelance":18150,"Ġlisteners":18151,"Ġdiscovering":18152,"ĠPlans":18153,"Ġfranchises":18154,"ĠPam":18155,"Ġfarther":18156,"UI":18157,"opers":18158,"103":18159,"ublished":18160,"keys":18161,"aky":18162,"Ġinnov":18163,"¦":18164,"ĠDrum":18165,"Ġwraps":18166,"ĠCongressman":18167,"ĠVenus":18168,"fake":18169,"ĠBronx":18170,"ĠDinner":18171,"faced":18172,"Ġbackward":18173,"inge":18174,"Ġarsenal":18175,"ĠAce":18176,"uden":18177,"fre":18178,"Ġspa":18179,"ĠSaunders":18180,"ĠMatter":18181,"ĠSpons":18182,"Ġconsultations":18183,"ĠRuss":18184,"Ġsculpture":18185,"Ġuncommon":18186,"Nov":18187,"pg":18188,"otherapy":18189,"Ġgol":18190,"ĠBlazers":18191,"Ġadvises":18192,"ĠRegulatory":18193,"ĠBoyle":18194,"Äģ":18195,"Ġcuisine":18196,"Ġencouragement":18197,"yp":18198,"eny":18199,"ĠOrchestra":18200,"ĠChicken":18201,"Ġ1965":18202,"ĠPret":18203,"ĠCooperation":18204,"ĠDevices":18205,"ĠRodney":18206,"ĠHonduras":18207,"ĠEgg":18208,"Ġchurn":18209,"Ġclutch":18210,"ĠBernstein":18211,"Ġain":18212,"Ġformidable":18213,"ĠFacility":18214,"Ġpag":18215,"mons":18216,"bol":18217,"Ġliteracy":18218,"Ġsubmissions":18219,"ĠHulu":18220,"ĠConstitutional":18221,"ĠIsh":18222,"ĠPaula":18223,"olve":18224,"Ġabundance":18225,"ĠAla":18226,"ĠEcuador":18227,"Ġreconstruction":18228,"Ġcrush":18229,"reek":18230,"ĠÂŃ":18231,"ibo":18232,"Ġpracticed":18233,"Ġpac":18234,"rett":18235,"Ġpasta":18236,"Ġresp":18237,"ĠFlag":18238,"pal":18239,"Ġcommenting":18240,"Ġrecap":18241,"âĢĶâĢĶ":18242,"ĠToy":18243,"ĠMeredith":18244,"Ġreceipt":18245,"Ġseparating":18246,"ĠMap":18247,"Ġmogul":18248,"ĠBurlington":18249,"Ġger":18250,"Ġcoordinate":18251,"grad":18252,"Ġescalated":18253,"Ġproceeded":18254,"turned":18255,"Ġupt":18256,"hum":18257,"ĠWere":18258,"Whether":18259,"Ġenjoyable":18260,"energy":18261,"Ġprohibit":18262,"Ġhurdle":18263,"Ġdivorced":18264,"Ġcommentator":18265,"GT":18266,"ATH":18267,"Ġtravellers":18268,"Ġpopulated":18269,"ĠVo":18270,"ĠRebels":18271,"Ġspurred":18272,"Ġideological":18273,"Ġelephant":18274,"keyes":18275,"Pat":18276,"Ġlinger":18277,"Ġreps":18278,"Ġcocktails":18279,"ĠKristen":18280,"istically":18281,"Ġgunmen":18282,"Ġ1920":18283,"Ġquart":18284,"National":18285,"Ġexceptions":18286,"kat":18287,"priced":18288,"ĠHarold":18289,"ĠPistons":18290,"Ġcompounds":18291,"Ġmouse":18292,"Ġexhibits":18293,"ĠBurk":18294,"Ġclassmates":18295,"Ġcirculated":18296,"Ġattributable":18297,"ĠBaton":18298,"Ġorganizer":18299,"Ġdurable":18300,"Ġsingers":18301,"ĠOman":18302,"Ġhydrogen":18303,"Ġslash":18304,"Ġaccidental":18305,"ĠAbrams":18306,"KS":18307,"itty":18308,"Ġrust":18309,"Ġselections":18310,"porting":18311,"ĠEmanuel":18312,"XX":18313,"ĠThornton":18314,"Ġcolumns":18315,"Ġsentiments":18316,"fun":18317,"Ġplight":18318,"ĠSister":18319,"ĠMaggie":18320,"hya":18321,"Daniel":18322,"Ġplung":18323,"orio":18324,"ĠYorker":18325,"ĠSaturdays":18326,"Ġloc":18327,"aye":18328,"illon":18329,"ĠConsulting":18330,"pled":18331,"ĠZin":18332,"ĠFarms":18333,"ĠGiuliani":18334,"ĠMIN":18335,"ĠHanson":18336,"ĠComplete":18337,"ourke":18338,"oche":18339,"ĠJord":18340,"Ġprofessors":18341,"ĠWILL":18342,"ĠCron":18343,"Ġdorm":18344,"Ġcracking":18345,"tur":18346,"ORS":18347,"Ant":18348,"Ġdeduction":18349,"ĠSIM":18350,"igue":18351,"ĠValent":18352,"ĠEthereum":18353,"ĠSunny":18354,"ĠExtra":18355,"ivan":18356,"ĠFo":18357,"Ġleases":18358,"ibe":18359,"Ġ1800":18360,"Ġslapped":18361,"emaker":18362,"Ġfa":18363,"rien":18364,"ĠPeriod":18365,"ĠES":18366,"ĠBlu":18367,"Ġpreserving":18368,"Ġsmarter":18369,"mans":18370,"Ġgest":18371,"zu":18372,"nu":18373,"Ġdivest":18374,"roc":18375,"ĠFlood":18376,"Given":18377,"ĠNorton":18378,"Ġgranting":18379,"Ġdealings":18380,"Ġgeographic":18381,"esa":18382,"Ġcub":18383,"Ġcriticizing":18384,"ĠCub":18385,"Ġsurroundings":18386,"ĠInternal":18387,"Ġsle":18388,"Ġcrushing":18389,"ĠPP":18390,"izations":18391,"ĠAbdel":18392,"Joe":18393,"ĠVisitors":18394,"ĠCarly":18395,"INGTON":18396,"ĠGC":18397,"ĠWB":18398,"Ġgently":18399,"·":18400,"though":18401,"ĠAlto":18402,"Ġresting":18403,"ĠPerson":18404,"ĠTon":18405,"Ġbore":18406,"ĠClar":18407,"Ġmot":18408,"Ġbathrooms":18409,"ĠTypically":18410,"Ġdisconnect":18411,"Ġtightly":18412,"ĠHarvest":18413,"ĠHed":18414,"ĠGermans":18415,"atar":18416,"Ġkeynote":18417,"Ġimproper":18418,"fil":18419,"Ġintens":18420,"iev":18421,"Ġmedi":18422,"Ġtenant":18423,"Ġfootsteps":18424,"uli":18425,"Ġlegalization":18426,"106":18427,"ĠLexington":18428,"folio":18429,"Ġ½":18430,"ĠRita":18431,"Ġbattered":18432,"inka":18433,"ĠJavaScript":18434,"ĠMusical":18435,"ĠTalent":18436,"Ġlounge":18437,"Ġintimidation":18438,"ikh":18439,"ĠFam":18440,"Ġtherapeutic":18441,"Ġbalancing":18442,"Ġrocky":18443,"liners":18444,"ĠPredators":18445,"Ġregistering":18446,"Ġdiligence":18447,"ĠRover":18448,"ĠDot":18449,"Ġterminated":18450,"ĠEdu":18451,"Ġcharming":18452,"ĠPLAY":18453,"ĠFact":18454,"ĠCi":18455,").\"":18456,"ĠWrestle":18457,"hun":18458,"Ġopenings":18459,"Ġfou":18460,"Ġ126":18461,"spe":18462,"ĠAW":18463,"Ġbud":18464,"ĠTemper":18465,"ĠOrthodox":18466,"Ġprogressed":18467,"tre":18468,"Ġtasting":18469,"Ġscrutin":18470,"ĠLima":18471,"Ġlayout":18472,"Ġlitter":18473,"ijk":18474,"ĠParkinson":18475,"ĠAnfield":18476,"Ġdevelopmental":18477,"Ġheaven":18478,"ĠWoodward":18479,"index":18480,"Ġpistol":18481,"Ġreson":18482,"ĠWS":18483,"Ġemb":18484,"ĠLap":18485,"ĠPle":18486,"lington":18487,"ĠSit":18488,"Ġabruptly":18489,"ĠSenegal":18490,"ĠYates":18491,"aceutical":18492,"ĠJak":18493,"ĠHastings":18494,"iste":18495,"ĠDB":18496,"ĠAgent":18497,"Ġpreservation":18498,"ĠLank":18499,"ĠSuffolk":18500,"Ġboo":18501,"essed":18502,"Ġempowering":18503,"enne":18504,"Ġrecycled":18505,"Ġstrateg":18506,"Ġbrake":18507,"135":18508,"ĠStef":18509,"ĠFlake":18510,"ĠGregg":18511,"ĠRent":18512,"Ġinstallment":18513,"FW":18514,"ĠCran":18515,"obo":18516,"ml":18517,"ĠJade":18518,"Ġaccuses":18519,"ĠNvidia":18520,"Ġburg":18521,"High":18522,"Ġbothered":18523,"ĠBenn":18524,"Ġinterrupted":18525,"Ġtrek":18526,"Ġserv":18527,"Ġpatron":18528,"Ġdictator":18529,"owa":18530,"jad":18531,"ĠTulsa":18532,"Ġboil":18533,"Ġdisplaying":18534,"Ġcinem":18535,"awaited":18536,"¸":18537,"Ġreacts":18538,"ĠDee":18539,"ĠGron":18540,"igation":18541,"Ġservic":18542,"capt":18543,"Ġinsane":18544,"ĠVeteran":18545,"umen":18546,"End":18547,"ĠCream":18548,"Ġextremism":18549,"ĠMalone":18550,"Col":18551,"Ġsafeguard":18552,"Ġtomatoes":18553,"die":18554,"Ġchamp":18555,"zero":18556,"ĠPRES":18557,"Ġchoir":18558,"Ġpediatric":18559,"Ġprivileged":18560,"Ġdownstream":18561,"Business":18562,"ĠFighting":18563,"atable":18564,"Ġsums":18565,"Ġinsult":18566,"arten":18567,"ĠWikiLeaks":18568,"Ġpads":18569,"Ġretali":18570,"ĠHunts":18571,"Ġindie":18572,"ĠShields":18573,"ĠMortgage":18574,"oses":18575,"ampton":18576,"ĠVideos":18577,"ĠPER":18578,"itionally":18579,"ĠKimmel":18580,"sum":18581,"trade":18582,"acity":18583,"marked":18584,"ĠAngus":18585,"Ġtemper":18586,"Ġseizure":18587,"Ġfictional":18588,"utton":18589,"eva":18590,"Rs":18591,"Ġintra":18592,"ĠRequest":18593,"ppe":18594,"ĠeBay":18595,"ĠUSS":18596,"Ġ1500":18597,"Ġpossessing":18598,"Ġbacon":18599,"ĠSexual":18600,"ĠBuff":18601,"Ġslaughter":18602,"Ġjur":18603,"zhou":18604,"suit":18605,"ĠCha":18606,"ĠBuk":18607,"crime":18608,"ĠEasy":18609,"ĠChain":18610,"aq":18611,"ĠPall":18612,"flation":18613,"225":18614,"oup":18615,"109":18616,"ĠMcKenzie":18617,"Ġclearer":18618,"ĠDogs":18619,"oration":18620,"Ġsubs":18621,"Follow":18622,"ĠShirley":18623,"Ġadjusting":18624,"ĠEFF":18625,"Ġflipped":18626,"Ġconform":18627,"ĠLaurent":18628,"Ġcircular":18629,"ĠNOR":18630,"Ġmort":18631,"Ġtexture":18632,"avour":18633,"Ġflex":18634,"ĠHedge":18635,"ðŁĺ":18636,"Ġtrophies":18637,"ĠINV":18638,"Ġboast":18639,"ĠTyr":18640,"ĠNichols":18641,"ĠSpa":18642,"Ġcheered":18643,"Ġprey":18644,"reach":18645,"Ġbreached":18646,"ĠRegions":18647,"ĠLyft":18648,"ĠTul":18649,"ĠKore":18650,"Ġendure":18651,"ĠCover":18652,"\").":18653,"ĠSavage":18654,"ère":18655,"reens":18656,"Ġnic":18657,"sector":18658,"Ġweaknesses":18659,"Ġreboot":18660,"Ġ210":18661,"Ġimagery":18662,"ĠFrem":18663,"Ġclue":18664,"ĠLars":18665,"Ġfaction":18666,"hetic":18667,"Ġallied":18668,"ĠMarvin":18669,"Ġmethodology":18670,"ĠTN":18671,"Ġutter":18672,"Ġ270":18673,"ĠVolvo":18674,"oline":18675,"ĠACLU":18676,"Ġindirect":18677,"Ġminer":18678,"ĠBale":18679,"ĠStrange":18680,"ĠFuller":18681,"Ġexpelled":18682,"ĠTropical":18683,"Ġremotely":18684,"ĠTIM":18685,"Ġinnocence":18686,"Ġconfined":18687,"Ġfares":18688,"Ġprevalent":18689,"Ġdesp":18690,"House":18691,"azar":18692,"Ġgestures":18693,"ĠCES":18694,"ĠDM":18695,"eal":18696,"ĠÐ":18697,"Ġburnt":18698,"Ġframed":18699,"ĠDani":18700,"Ġhol":18701,"ĠCannes":18702,"ĠHayden":18703,"Ġwardrobe":18704,"ĠAssange":18705,"ĠSamp":18706,"bay":18707,"sky":18708,"ĠHence":18709,"ĠGrizzlies":18710,"rates":18711,"laws":18712,"ĠMandela":18713,"ĠHoover":18714,"rics":18715,"charged":18716,"Ġexclude":18717,"Ġpassive":18718,"Ġcontinuation":18719,"Ġblunt":18720,"Ġvac":18721,"ĠEmerging":18722,"rench":18723,"tv":18724,"ĠHollow":18725,"ĠOC":18726,"Ġadvisors":18727,"Ġrendered":18728,"ĠBernardino":18729,"ĠSupporters":18730,"ronic":18731,"Ġchancellor":18732,"Ġ1963":18733,"Ġuranium":18734,"Ġak":18735,"ĠOptions":18736,"ermott":18737,"ĠBerger":18738,"ibia":18739,"Ġexplosions":18740,"Ġimpairment":18741,"Ġhail":18742,"Ġalley":18743,"Ġcruelty":18744,"ĠClarence":18745,"Ġvariations":18746,"Ġrealm":18747,"Ġrenovations":18748,"ĠNorwich":18749,"Ġbelongings":18750,"Ġmerchants":18751,"ĠMinisters":18752,"ĠDodd":18753,"Ġviewer":18754,"Ġneutrality":18755,"quer":18756,"ĠPrinceton":18757,"dead":18758,"arest":18759,"GET":18760,"ĠCanadiens":18761,"ĠIgn":18762,"clear":18763,"Mal":18764,"ĠBridges":18765,"ĠHayward":18766,"Ġremarked":18767,"ingle":18768,"Ġsob":18769,"Ġdepart":18770,"beans":18771,"Ġpreserved":18772,"ĠFairfax":18773,"Ġforgot":18774,"ĠBeh":18775,"Rob":18776,"Ġcooperative":18777,"ullah":18778,"Ġmates":18779,"Ġrang":18780,"Ġthigh":18781,"Ġabducted":18782,"Ġchaired":18783,"ĠHearts":18784,"Ġidentifies":18785,"ĠBuckingham":18786,"ijn":18787,"ĠJab":18788,"Ġclashed":18789,"feed":18790,"sites":18791,"ĠCareer":18792,"exp":18793,"ĠBuccaneers":18794,"scape":18795,"Ġupdating":18796,"Ġintentional":18797,"ĠGuam":18798,"ĠBreakfast":18799,"ĠHag":18800,"Media":18801,"Ġtapping":18802,"Ġpics":18803,"Ġeaten":18804,"Ġpremise":18805,"Kim":18806,"ĠStorage":18807,"Ġextensively":18808,"Ġoutrageous":18809,"ĠSadly":18810,"Global":18811,"¢":18812,"leaning":18813,"CM":18814,"Ġeasiest":18815,"ument":18816,"Ġ122":18817,"Ġdaunting":18818,"ISE":18819,"Ġsunset":18820,"Ġreset":18821,"Ġbent":18822,"Trust":18823,"ĠCaleb":18824,"ĠRut":18825,"ĠBast":18826,"ETS":18827,"iencies":18828,"Ġpu":18829,"ature":18830,"Ġrealities":18831,"omi":18832,"Ġsoda":18833,"Ġunveil":18834,"ĠGoldberg":18835,"opes":18836,"Ġuprising":18837,"ĠMR":18838,"Ġendorse":18839,"Ġsail":18840,"Ġconverting":18841,"Ġglamorous":18842,"ĠHollande":18843,"108":18844,"isky":18845,"Ġcushion":18846,"240":18847,"Ġadventures":18848,"Ġantitrust":18849,"ĠStockholm":18850,"pace":18851,"ĠVald":18852,"ĠTransfer":18853,"ERT":18854,"ĠMcInt":18855,"Ġsurging":18856,"ogn":18857,"Ġlauded":18858,"ĠZam":18859,"ĠRough":18860,"TOR":18861,"Ġwed":18862,"Ġorigins":18863,"ĠEld":18864,"oso":18865,"Ġsupplying":18866,"ĠPetty":18867,"ĠTwe":18868,"ĠDenise":18869,"ĠBec":18870,"Ġbehave":18871,"Ġ121":18872,"estone":18873,"ĠBoulder":18874,"ĠBlackhawks":18875,"ĠWyatt":18876,"Ġfiguring":18877,"ĠDeborah":18878,"agi":18879,"significant":18880,"Ġasthma":18881,"Ġmessy":18882,"mpire":18883,"Ġax":18884,"Ġaspiring":18885,"ĠNH":18886,"ĠGina":18887,"heavy":18888,"ĠVick":18889,"ÃŃs":18890,"something":18891,"Ġbodily":18892,"Ġunauthorized":18893,"ĠActually":18894,"ĠOH":18895,"Ġmicrophone":18896,"allah":18897,"Ġrampant":18898,"Ġrelocated":18899,"Ġwidening":18900,"ĠCait":18901,"nel":18902,"ĠBlackBerry":18903,"Ġprofessionally":18904,"ĠInterestingly":18905,"Ġbarbecue":18906,"Ġresisting":18907,"ĠNunes":18908,"disc":18909,"Ġgroundbreaking":18910,"orable":18911,"ĠRegulation":18912,"Ġborrowed":18913,"Ġleaking":18914,"Ġlengths":18915,"Ġunveiling":18916,"houses":18917,"Ġ155":18918,"ĠBillboard":18919,"icion":18920,"Times":18921,"ĠZoe":18922,"ĠAbby":18923,"bus":18924,"ĠMinutes":18925,"ributed":18926,"Ġparap":18927,"Ġfertil":18928,"ABC":18929,"ĠIsle":18930,"Ġtherapist":18931,"Ġgubernatorial":18932,"ĠAust":18933,"ĠLoan":18934,"Bo":18935,"ĠNRL":18936,"rag":18937,"Clear":18938,"Ġrevision":18939,"Ġflesh":18940,"BD":18941,"iji":18942,"Ġproductions":18943,"Ġcoconut":18944,"ĠMcCorm":18945,"ĠDash":18946,"Ġgeography":18947,"hearted":18948,"Ġarson":18949,"Ġgoaltender":18950,"Ġbelly":18951,"Ġqualifications":18952,"ĠActiv":18953,"Ġhooked":18954,"ĠHungarian":18955,"Ġprotocols":18956,"inking":18957,"Ġfronts":18958,"ĠKuala":18959,"ĠToys":18960,"ĠFitness":18961,"Ġwarfare":18962,"Ġoutp":18963,"ĠQuestions":18964,"Ġwel":18965,"ĠShan":18966,"ĠMorton":18967,"ĠRomero":18968,"Ġglance":18969,"ĠTay":18970,"Ġsneakers":18971,"ĠSymphony":18972,"Ġinspect":18973,"enna":18974,"Nobody":18975,"Ġscrapped":18976,"ĠDeVos":18977,"ĠDominican":18978,"Ġplanets":18979,"anova":18980,"Ġnotify":18981,"Ġincurred":18982,"Ġunders":18983,"Ġdetainees":18984,"ĠMarriott":18985,"electric":18986,"ĠKes":18987,"union":18988,"ĠWatt":18989,"ATING":18990,"Ġslipping":18991,"Ġraft":18992,"Ġresisted":18993,"Ġcred":18994,"tern":18995,"Ġflurry":18996,"Line":18997,"Ġconsulted":18998,"Ġanalyzing":18999,"107":19000,"ĠWide":19001,"¶":19002,"human":19003,"ĠFEMA":19004,"Ġsmash":19005,"Ġcorps":19006,"Ġbarric":19007,"Ġcollar":19008,"ĠTB":19009,"without":19010,"ĠCanucks":19011,"Ġneedle":19012,"ĠSidney":19013,"ĠLauderdale":19014,"Ġglove":19015,"ilee":19016,"pic":19017,"Ġbenef":19018,"ĠHydro":19019,"ĠDisc":19020,"ĠArg":19021,"Ġtermin":19022,"Ġsympath":19023,"Ġpest":19024,"ĠCoff":19025,"Ġadvancement":19026,"social":19027,"pol":19028,"ĠEmails":19029,"Ġstacked":19030,"ibly":19031,"ĠAlbion":19032,"Ġfist":19033,"hero":19034,"ĠMarian":19035,"asia":19036,"Ġtownship":19037,"Ġslick":19038,"Ġmodeling":19039,"achers":19040,"ĠArgent":19041,"ĠSUN":19042,"arde":19043,"Ġpinned":19044,"Ġhitters":19045,"Ġdare":19046,"ictions":19047,"arily":19048,"Ġsting":19049,"Ġprimaries":19050,"appointed":19051,"Ġformats":19052,"Ġglitter":19053,"Ġpatches":19054,"Ġstrategically":19055,"Ġaka":19056,"Ġyielded":19057,"BY":19058,"Ġjeopard":19059,"ĠVand":19060,"Ġcrowned":19061,"Ġoccupants":19062,"Ġtanker":19063,"ĠVisa":19064,"Great":19065,"Ġseasoned":19066,"ĠAviv":19067,"Ġfiery":19068,"Ġderivatives":19069,"Ġdiverted":19070,"Ġacqu":19071,"Ġsandwiches":19072,"ĠLorenzo":19073,"Ġpardon":19074,"ĠBarber":19075,"ĠAgricultural":19076,"ĠPhilly":19077,"Ġregrets":19078,"ĠMillions":19079,"ĠFrazier":19080,"Ġtreasury":19081,"ĠKenn":19082,"Ġdestined":19083,"olved":19084,"Back":19085,"leader":19086,"lyss":19087,"ĠReyes":19088,"001":19089,"bags":19090,"ĠStandards":19091,"ĠExcellence":19092,"ĠMaid":19093,"ĠAnthem":19094,"FIELD":19095,"Ġrevived":19096,"ĠQuad":19097,"Ġdistinguished":19098,"Ġweighted":19099,"Ġritual":19100,"Ġinvites":19101,"wana":19102,"iture":19103,"ĠCI":19104,"ĠMAY":19105,"Ġunfairly":19106,"ĠKP":19107,"ĠMidlands":19108,"Ġmint":19109,"uers":19110,"Ġcatalog":19111,"arant":19112,"Ġlosers":19113,"Ġscheduling":19114,"esar":19115,"Ġtransferring":19116,"Ġbankrupt":19117,"Ġmethamphetamine":19118,"ĠEsk":19119,"ĠTreatment":19120,"ĠResponse":19121,"Ġhomework":19122,"ĠBald":19123,"Ġembarrassment":19124,"Ġpoorest":19125,"ĠPlatinum":19126,"ĠFac":19127,"Ġunleashed":19128,"Ġbrighter":19129,"002":19130,"Ġdisl":19131,"ĠLowry":19132,"ived":19133,"ĠDemon":19134,"ĠNonetheless":19135,"arro":19136,"ĠCONT":19137,"ifted":19138,"ĠFreder":19139,"isson":19140,"Ġrout":19141,"ARA":19142,"Ġswinging":19143,"Oct":19144,"Ġliable":19145,"Ġleaning":19146,"Ġlungs":19147,"380":19148,"ĠProcess":19149,"ĠCov":19150,"terrorism":19151,"Ġresistant":19152,"Ġpumped":19153,"Ġtripled":19154,"Semitism":19155,"ĠMia":19156,"Ġpenetration":19157,"ĠLutheran":19158,"BU":19159,"odes":19160,"Ġspanning":19161,"utch":19162,"Trans":19163,"ĠVolunteers":19164,"Ġpathway":19165,"Ġinfectious":19166,"Ġdrastic":19167,"ĠEngineers":19168,"Ġprincess":19169,"acts":19170,"usting":19171,"utive":19172,"achel":19173,"DO":19174,"Ġpave":19175,"ĠHerrera":19176,"Ġnearing":19177,"help":19178,"Ġembarked":19179,"Ġmodes":19180,"ĠDriving":19181,"Ġopting":19182,"Best":19183,"Ġbehavioral":19184,"Ġcables":19185,"App":19186,"otion":19187,"ĠExt":19188,"ĠSinclair":19189,"ĠInsp":19190,"Ġsinking":19191,"Next":19192,"ĠLumpur":19193,"ĠShadow":19194,"Donald":19195,"itals":19196,"Ġmentions":19197,"floor":19198,"Ġconsiderations":19199,"ĠSquad":19200,"ĠPlate":19201,"dos":19202,"Friday":19203,"Hopefully":19204,"arre":19205,"Ġalum":19206,"\":\"/":19207,"Ġfet":19208,"anza":19209,"Ġdign":19210,"ĠNguyen":19211,"ĠRutgers":19212,"ĠSew":19213,"Ġfilters":19214,"ofi":19215,"Ġunavailable":19216,"ranking":19217,"Ġrefining":19218,"ĠUNC":19219,"Ġmax":19220,"yll":19221,"Ġhandsome":19222,"Ġutterly":19223,"See":19224,"ĠStores":19225,"Ke":19226,"ĠAdvoc":19227,"ordon":19228,"umbles":19229,"Ġbugs":19230,"olar":19231,"ĠCork":19232,"Ġtoken":19233,"Ġauthorization":19234,"Ġconscience":19235,"Ġrepl":19236,"edi":19237,"owitz":19238,"iven":19239,"Ġlieu":19240,"Ġlifts":19241,"Lean":19242,"Ġmagnificent":19243,"ĠFilms":19244,"onents":19245,"Ġ***":19246,"Green":19247,"ĠAdvocate":19248,"ĠArrow":19249,"Ġblows":19250,"Ġexploited":19251,"fly":19252,"ĠAmar":19253,"ĠNOTICE":19254,"Ġsincere":19255,"found":19256,"ĠRud":19257,"Ġcy":19258,"ĠHeidi":19259,"Ġempowered":19260,"Ġweakest":19261,"ĠKru":19262,"Credit":19263,"aunted":19264,"Ġexotic":19265,"aning":19266,"Ġaw":19267,"ĠMulti":19268,"Ġanimation":19269,"850":19270,"ĠCounter":19271,"ĠNit":19272,"alli":19273,"Ġcapitalize":19274,"Ġexecuting":19275,"Ġdescent":19276,"ovi":19277,"ĠKimberly":19278,"headed":19279,"Ġmentioning":19280,")-":19281,"ĠSpecifically":19282,"ayette":19283,"ihad":19284,"ĠIss":19285,"Ġdisagreed":19286,"ĠKum":19287,"Ġurges":19288,"Ġpermitting":19289,"Ġpy":19290,"isp":19291,"Ġhygiene":19292,"Ġmourning":19293,"Ġcyclists":19294,"cats":19295,"FER":19296,"cycl":19297,"Ġnewcomers":19298,"Ġplead":19299,"Ġmend":19300,"secret":19301,"fan":19302,"Ġtranslates":19303,"unit":19304,"ĠTank":19305,"drive":19306,"ĠSite":19307,"Ġacceleration":19308,"ĠEnrique":19309,"ĠElaine":19310,"Ġstaring":19311,"Ġbackwards":19312,"Ġot":19313,"Ġvot":19314,"ĠHK":19315,"Ġfian":19316,"ĠLockheed":19317,"Ġmanifest":19318,"ĠZurich":19319,"pad":19320,"ĠRav":19321,"flow":19322,"Ġmoms":19323,"ĠSolid":19324,"ĠReady":19325,"aughlin":19326,"Ġreminding":19327,"ĠCOR":19328,"Ġoptimal":19329,"ĠCrisis":19330,"Ġcholesterol":19331,"ĠGerard":19332,"Ġfest":19333,"Ġsanction":19334,"Ġdragging":19335,"inent":19336,"ĠBravo":19337,"Ġamend":19338,"aval":19339,"Ġpoem":19340,"Ġinvasive":19341,"Ġlandsc":19342,"leigh":19343,"Ġheadache":19344,"ĠMuse":19345,"ĠTurning":19346,"girl":19347,"cess":19348,"Ġfalsely":19349,"Ġplaintiff":19350,"Ġheavier":19351,"Ġrumored":19352,"Ġeleven":19353,"ĠConsumers":19354,"ĠOriginally":19355,"ĠStatement":19356,"bors":19357,"Ġrevoked":19358,"ĠOmaha":19359,"Fox":19360,"ĠKle":19361,"Ġvault":19362,"Ġoutdated":19363,"umes":19364,"ĠArk":19365,"Ġapologised":19366,"Ġrockets":19367,"ĠMarines":19368,"Ġcaptures":19369,"ĠMW":19370,"ĠWalters":19371,"ĠFactor":19372,"Ġensuing":19373,"ĠSession":19374,"oons":19375,"Ġ132":19376,"gt":19377,"ĠPoints":19378,"Ġexhaust":19379,"ĠOsaka":19380,"heed":19381,"Ġhandic":19382,"amber":19383,"inging":19384,"Ġll":19385,"Ġescorted":19386,"Ġfloated":19387,"Ġmerge":19388,"Ġcompliment":19389,"ĠVC":19390,"Ġinsulin":19391,"ĠDebt":19392,"ça":19393,"Ġpens":19394,"Ġassertion":19395,"Ġredevelopment":19396,"moderate":19397,"Ġleftist":19398,"ĠBA":19399,"Ġherd":19400,"Ġinsecurity":19401,"liter":19402,"Ġcommence":19403,"ĠCaucus":19404,"Ġnovels":19405,"ĠChevron":19406,"Ġerosion":19407,"ĠNicholson":19408,"ĠRoof":19409,"ĠVolunteer":19410,"Ġcompelled":19411,"Ġcongratulated":19412,"ĠPanel":19413,"Ġov":19414,"idelity":19415,"Ġspect":19416,"Ġbee":19417,"ĠAssistance":19418,"Ġterrified":19419,"iew":19420,"Ġweekday":19421,"ĠHiggins":19422,"special":19423,"ubs":19424,"anton":19425,"Ġbribes":19426,"Ġneat":19427,"ĠCliff":19428,"Ġdisqualified":19429,"ĠND":19430,"Ġvers":19431,"andra":19432,"Ġgraft":19433,"value":19434,"Ġportray":19435,"Ġdaytime":19436,"ksh":19437,"Ġconsist":19438,"Ġhonesty":19439,"ĠTimber":19440,"ĠNich":19441,"Ġinvented":19442,"ĠBuch":19443,"Ġskull":19444,"Ġtags":19445,"Ġ124":19446,"ighth":19447,"Ġrelaxing":19448,"Online":19449,"Ġsanctioned":19450,"Sport":19451,"ĠCove":19452,"Ġcomics":19453,"MW":19454,"AMA":19455,"mother":19456,"Home":19457,"ĠCustomer":19458,"Ġstrides":19459,"ĠWins":19460,"Ġrollout":19461,"ĠWeaver":19462,"Ġshuttle":19463,"Ġsteak":19464,"Ġglorious":19465,"ĠToll":19466,"Ġtrustee":19467,"Ġinstallations":19468,"ĠOpportunity":19469,"Ġoper":19470,"horse":19471,"Ġaided":19472,"irus":19473,"Ġsleek":19474,"Ġyelled":19475,"ĠSocialist":19476,"Ġapplaud":19477,"ĠWah":19478,"Ġdevote":19479,"Ġdh":19480,"Ġarchitectural":19481,"ĠMAC":19482,"centric":19483,"ĠSense":19484,"illas":19485,"ĠArchbishop":19486,"glass":19487,"Ġallowance":19488,"Ġbundle":19489,"andon":19490,"eight":19491,"ĠKare":19492,"haus":19493,"ĠAndreas":19494,"Ġdoll":19495,"RAM":19496,"Ġvolunteering":19497,"ĠRaleigh":19498,"Ġbees":19499,"Ġnickel":19500,"Ġgenerosity":19501,"Ġhomeowner":19502,"ĠLieutenant":19503,"Ġlandfall":19504,"ĠRenew":19505,"ĠGiving":19506,"ĠContribut":19507,"aret":19508,"ulf":19509,"Ġreinforce":19510,"ĠSalv":19511,"ĠVenice":19512,"Ġfreedoms":19513,"ĠTools":19514,"Ġ1962":19515,"ĠWarm":19516,"majority":19517,"Ġpleas":19518,"oding":19519,"plant":19520,"Ġtow":19521,"ĠBlanc":19522,"ĠPipeline":19523,"ĠMoor":19524,"Ġrefrain":19525,"ĠExplore":19526,"language":19527,"cers":19528,"ĠWT":19529,"sent":19530,"ĠNun":19531,"Ġplastics":19532,"acas":19533,"Ġdisruptions":19534,"Ġdiscomfort":19535,"enko":19536,"Ġimprisoned":19537,"Copyright":19538,"Ġmyriad":19539,"Ġparenting":19540,"Ġspree":19541,"NBC":19542,"Ġonion":19543,"ĠIsraelis":19544,"ĠRA":19545,"Ġrelocate":19546,"113":19547,"ĠHir":19548,"ĠDre":19549,"ĠDry":19550,"ĠONE":19551,"ĠAdministrator":19552,"Ġprints":19553,"ĠGret":19554,"Ġundergraduate":19555,"ĠLif":19556,"avers":19557,"ĠCarney":19558,"Ġapex":19559,"Ġlenses":19560,"Ġliberals":19561,"gb":19562,"ĠWhereas":19563,"Ġcountryside":19564,"amine":19565,"ĠTerminal":19566,"Ġintr":19567,"ĠTrey":19568,"ALS":19569,"Ġcontinental":19570,"Ġselfies":19571,"FILE":19572,"ĠUnity":19573,"Ġauthoritarian":19574,"Ġoriginated":19575,"ĠExcept":19576,"yna":19577,"Ġmonet":19578,"Ġundermining":19579,"ĠGS":19580,"pi":19581,"iq":19582,"Ġslides":19583,"ĠSummary":19584,"Ġpains":19585,"cluding":19586,"Ġequation":19587,"locked":19588,"Ġfraternity":19589,"Ġwithstand":19590,"Ġdevastation":19591,"Ġdemo":19592,"late":19593,"Ġpunches":19594,"Ġgeared":19595,"nen":19596,"ĠBowie":19597,"attle":19598,"Ġpolitic":19599,"ĠGle":19600,"mented":19601,"ĠCoordinator":19602,"Ġupwards":19603,"ĠMega":19604,"angled":19605,"Ġengineered":19606,"Ġluggage":19607,"ĠWen":19608,"ĠSergeant":19609,"Ġkindergarten":19610,"ĠPortsmouth":19611,"uddin":19612,"ket":19613,"oba":19614,"Ġoscill":19615,"esse":19616,"ĠOlson":19617,"ĠBorough":19618,"Ġsupplements":19619,"ĠEvening":19620,"ANE":19621,"Ġlava":19622,"Ġgearing":19623,"setting":19624,"urgical":19625,"asty":19626,"ĠDaytona":19627,"Ġbrewery":19628,"Ġpledges":19629,"rounder":19630,"ulous":19631,"ĠHancock":19632,"rex":19633,"Ġram":19634,"Ġproceeding":19635,"ĠMurdoch":19636,"Ġdowngrade":19637,"Ġstatues":19638,"Ġdebated":19639,"ĠSleep":19640,"Ġ144":19641,"ĠRuby":19642,"ĠFi":19643,"123":19644,"ĠArabic":19645,"Ġlasts":19646,"ĠIvy":19647,"ĠWid":19648,"rown":19649,"stick":19650,"?'\"":19651,"ĠSTEM":19652,"Ġsensible":19653,"htar":19654,"Ġharbor":19655,"Ġcra":19656,"ĠAlbum":19657,"ĠCarnival":19658,"Ġimplies":19659,"agement":19660,"ĠInitially":19661,"Ġchooses":19662,"Jeff":19663,"ĠHig":19664,"Ġtam":19665,"Ġlump":19666,"ucks":19667,"Ġrepatri":19668,"ĠMercy":19669,"zza":19670,"Ġ365":19671,"ĠRicardo":19672,"ogram":19673,"Ġundergone":19674,"system":19675,"Ġtel":19676,"ĠKee":19677,"ully":19678,"istas":19679,"Ġgrains":19680,"ĠTomorrow":19681,"ĠRC":19682,"ĠTurk":19683,"Ġfreshmen":19684,"ĠAway":19685,"ĠSach":19686,"ĠUltimate":19687,"Ġoffensively":19688,"ismo":19689,"Ġteaser":19690,"ĠJud":19691,"Ġlegitimacy":19692,"opt":19693,"ĠCobb":19694,"Ġrejecting":19695,"ĠSolo":19696,"ĠArcher":19697,"Ġsoutheastern":19698,"ĠPlain":19699,"ĠLoss":19700,"Ġminerals":19701,"ĠMari":19702,"Ġscrambling":19703,"ĠPeak":19704,"Ġhavoc":19705,"rings":19706,"Ġunofficial":19707,"ĠHaj":19708,"director":19709,"ĠCanal":19710,"ĠNSA":19711,"ĠEaton":19712,"ĠPART":19713,"ĠCommissioners":19714,"Ġwellbeing":19715,"resa":19716,"Ġunderstandable":19717,"dates":19718,"ĠSorry":19719,"Ġastonishing":19720,"Ġrevise":19721,"ĠEc":19722,"ĠLack":19723,"endi":19724,"endale":19725,"also":19726,"Ġcolder":19727,"Ġheel":19728,"Ġcellular":19729,"Conn":19730,"ĠThur":19731,"Ġmassage":19732,"olla":19733,"clus":19734,"Ġtoilets":19735,"ĠCelebr":19736,"Ġtackled":19737,"Ġchorus":19738,"ETA":19739,"anca":19740,"ĠOLED":19741,"Ġpunk":19742,"ĠBrain":19743,"ĠNuggets":19744,"Ġseamless":19745,"make":19746,"atted":19747,"ĠRog":19748,"ĠPatch":19749,"Ġruined":19750,"Ins":19751,"Ġconsolidate":19752,"Ġgospel":19753,"ĠCaption":19754,"Ġoverweight":19755,"Ġscreened":19756,"ĠKraft":19757,"ĠBain":19758,"breaker":19759,"ĠFeinstein":19760,"ĠDoc":19761,"Ġdeepest":19762,"ĠOL":19763,"Ġtunes":19764,"Ġrightly":19765,"ĠLanc":19766,"ĠBrotherhood":19767,"Ġpoultry":19768,"ĠPure":19769,"Ġstimulate":19770,"Ġdiscourse":19771,"ĠStark":19772,"Ġmuseums":19773,"ention":19774,"Ġtaxation":19775,"ĠAkron":19776,"ayer":19777,"ĠKirby":19778,"farm":19779,"oser":19780,"Ġcommend":19781,"Ġunarmed":19782,"ensions":19783,"Ġsuperst":19784,"Ġoceans":19785,"Ġmisuse":19786,"LO":19787,"ĠByrne":19788,"ĠMaritime":19789,"Ġdense":19790,"Ġexcuses":19791,"Ġsuppose":19792,"ĠMarks":19793,"Ġrainy":19794,"Ġreplicate":19795,"Ġboutique":19796,"ĠRenaissance":19797,"jas":19798,"icted":19799,"Ġreferenced":19800,"ĠTir":19801,"ĠHatch":19802,"ĠCry":19803,"ĠPayPal":19804,"Ġfulfil":19805,"ĠHawaiian":19806,"come":19807,"ĠThirty":19808,"Ġ260":19809,"ĠYak":19810,"Ġangles":19811,"Ġlandlord":19812,"Ġlavish":19813,"Women":19814,"ĠNT":19815,"Ġreinforced":19816,"Ġprevail":19817,"ĠCommunities":19818,"Ġfootwear":19819,"Ġassurances":19820,"Ġlb":19821,"Ġairing":19822,"Ġresorts":19823,"ĠFiji":19824,"ĠShay":19825,"Ġprevailing":19826,"many":19827,"Ġimpe":19828,"ĠDul":19829,"Ġsymbols":19830,"zb":19831,"ĠCere":19832,"Ġapplauded":19833,"Ġsoundtrack":19834,"Ġdrunken":19835,"ĠEuropeans":19836,"Ġherds":19837,"moving":19838,"WR":19839,"ĠHindi":19840,"Ġwaking":19841,"Jo":19842,"Andrew":19843,"rosse":19844,"ĠLegislative":19845,"Ġdisgrace":19846,"Nothing":19847,"ĠBulgaria":19848,"Ġhumidity":19849,"Ġtranslation":19850,"Ġmeasurements":19851,"Ġvying":19852,"ĠBrid":19853,"Max":19854,"Ġdir":19855,"unci":19856,"Ġdefines":19857,"Ġperfection":19858,"ancers":19859,"Matt":19860,"ĠShinzo":19861,"ĠPresidents":19862,"Ġginger":19863,"onna":19864,"existing":19865,"rika":19866,"enced":19867,"ĠBray":19868,"Ġgall":19869,"Ġdisrespect":19870,"ĠCumber":19871,"Ġcontestant":19872,"ucky":19873,"anticipated":19874,"abled":19875,"LLOW":19876,"Bel":19877,"ĠKear":19878,"Ġstoryline":19879,"Ġrigs":19880,"ĠScots":19881,"ĠChap":19882,"ĠThankfully":19883,"Ġcommunist":19884,"ĠAdviser":19885,"Ġregist":19886,"Ġannoying":19887,"ĠDVD":19888,"Ġethic":19889,"ĠFilipino":19890,"ĠAdidas":19891,"Ġbilling":19892,"Ġalleviate":19893,"Ġsmoked":19894,"Ġhazard":19895,"EV":19896,"Ag":19897,"baum":19898,"Ġdoses":19899,"Ġoutcry":19900,"Ġinclined":19901,"Ġpsychologist":19902,"itzer":19903,"January":19904,"Ġmornings":19905,"aught":19906,"Ġsurreal":19907,"ĠCannon":19908,"avy":19909,"ĠCris":19910,"cf":19911,"Ġinterpreted":19912,"Ġpersecution":19913,"vation":19914,"Ġupfront":19915,"ĠWaste":19916,"Ġmills":19917,"Ġbombings":19918,"ĠHeaven":19919,"ĠFlat":19920,"Ġboxer":19921,"Ġavenues":19922,"Invest":19923,"ĠZika":19924,"Ġbackstage":19925,"idas":19926,"eston":19927,"ead":19928,"Ġbishops":19929,"Ġrender":19930,"Ġfootballer":19931,"Ġspilled":19932,"Only":19933,"Ġsaddened":19934,"ĠAbove":19935,"inator":19936,"tro":19937,"onen":19938,"ĠAMC":19939,"Ġstringent":19940,"Ġfooting":19941,"ĠGhost":19942,"Ġtexting":19943,"ĠCPI":19944,"ĠUW":19945,"Ġaccol":19946,"iries":19947,"ĠFlex":19948,"ĠCarolyn":19949,"Andre":19950,"Ġsiege":19951,"Muslim":19952,"Ġautomobile":19953,"reci":19954,"Ġdean":19955,"atre":19956,"Ġwax":19957,"Ġwo":19958,"ĠDuffy":19959,"Ġfiance":19960,"Ġfib":19961,"Ġeagle":19962,"ĠCatal":19963,"Ġinfants":19964,"Ġsubmitting":19965,"Ġdownhill":19966,"Ġstaffer":19967,"ĠLights":19968,"Ġeater":19969,"ĠCaliforn":19970,"Ġsupervisors":19971,"ĠPy":19972,"Ġcondemnation":19973,"Ġsci":19974,"Ġhated":19975,"Ġtil":19976,"ĠLavrov":19977,"Ġsab":19978,"Ġmotors":19979,"Ġlogging":19980,"ĠOwn":19981,"Ġpi":19982,"Ġrepeating":19983,"ĠDOJ":19984,"enary":19985,"ĠChow":19986,"fat":19987,"Ġbalcony":19988,"orie":19989,"NING":19990,"ĠUnified":19991,"Neil":19992,"Bill":19993,"ĠSims":19994,"uten":19995,"LV":19996,"ĠEMS":19997,"Ġsip":19998,"Ġreplaces":19999,"ichi":20000,"ĠFig":20001,"ĠCharity":20002,"Ġpeek":20003,"Ġrack":20004,"Ġcousins":20005,"Ġresolving":20006,"Ġthrone":20007,"ĠEngine":20008,"ĠChak":20009,"Ġlamented":20010,"Ġwipe":20011,"Ġnutrients":20012,"ĠChat":20013,"AMP":20014,"ĠOprah":20015,"uming":20016,"serving":20017,"Ġfir":20018,"Ġlandlords":20019,"neck":20020,"Ġupload":20021,"Ġunspecified":20022,"Ġicy":20023,"´":20024,"Ġze":20025,"Ġprohibits":20026,"ĠFI":20027,"Res":20028,"ĠEff":20029,"hell":20030,"umbo":20031,"Ġreceipts":20032,"Ġoperatives":20033,"stant":20034,"Ġwives":20035,"ĠCinema":20036,"Ġnegligence":20037,"Ġgases":20038,"ĠLau":20039,"Ġbrew":20040,"August":20041,"never":20042,"Ġpenned":20043,"Ġincomplete":20044,"ĠZh":20045,"esi":20046,"Ġranged":20047,"apolis":20048,"Ġwithdrawing":20049,"ĠLevi":20050,"ĠLevy":20051,"ĠDaly":20052,"Ġdelaying":20053,"ĠMSNBC":20054,"ĠCyrus":20055,"ĠNutrition":20056,"NN":20057,"Ġwinding":20058,"Ġglow":20059,"ĠMY":20060,"Ġgoodwill":20061,"ĠMON":20062,"Ġslots":20063,"ĠNina":20064,"ĠFIR":20065,"ĠLTE":20066,"ĠInnov":20067,"dev":20068,"ctic":20069,"Ġanalyses":20070,"ĠBangalore":20071,"Ġtales":20072,"Ġovercame":20073,"ĠThurs":20074,"Ġcherry":20075,"ĠNou":20076,"ĠFlowers":20077,"1000":20078,"updated":20079,"rieve":20080,"ĠBeautiful":20081,"iak":20082,"Ġplayback":20083,"Ġheadset":20084,"Ġashamed":20085,"Min":20086,"Ġadm":20087,"ĠLucky":20088,"ĠTucson":20089,"Ġentirety":20090,"ranging":20091,"ĠVance":20092,"kered":20093,"image":20094,"ĠGord":20095,"War":20096,"Ġsimilarities":20097,"dig":20098,"ĠJude":20099,"Ġlonely":20100,"hra":20101,"ĠStaples":20102,"ĠACA":20103,"Ġmeasurement":20104,"Ġcooper":20105,"ATER":20106,"ĠMeng":20107,"Ġbarring":20108,"190":20109,"ĠBatt":20110,"Ġreproductive":20111,"ĠRowe":20112,"Ġsubsid":20113,"Ġslogans":20114,"ugar":20115,"ĠKeller":20116,"ingham":20117,"fuel":20118,"Ġhid":20119,"afe":20120,"Ġindul":20121,"cash":20122,"Ġstressing":20123,"ĠMIT":20124,"Ġtrump":20125,"ancer":20126,"ĠPes":20127,"ĠMint":20128,"Ġcrossover":20129,"ĠWeiss":20130,"ĠElvis":20131,"ĠPermanent":20132,"ĠKhalid":20133,"Ġunjust":20134,"Ġexceptionally":20135,"Ġfut":20136,"Ġavid":20137,"ĠEthics":20138,"Ġutilized":20139,"Ġfeasibility":20140,"Ġcatering":20141,"Press":20142,"wayne":20143,"October":20144,"Ġfavors":20145,"Ġobsession":20146,"Ġmelt":20147,"Ġmug":20148,"ĠMK":20149,"Ġapples":20150,"Ġvine":20151,"cliffe":20152,"Ġgrat":20153,"Ġspells":20154,"ounced":20155,"Ġdecree":20156,"issy":20157,"Team":20158,"Ġdeploying":20159,"Feb":20160,"Ġmiserable":20161,"Ġwat":20162,"ĠBust":20163,"ĠNorris":20164,"ĠTimberwolves":20165,"Ġangered":20166,"ĠArn":20167,"oft":20168,"rome":20169,"Ġadvertisements":20170,"onal":20171,"Ġnun":20172,"Ġtorque":20173,"Ġslave":20174,"Ġnonsense":20175,"Ġcoy":20176,"Ġcites":20177,"Game":20178,"Ġarchitects":20179,"playing":20180,"Ġgener":20181,"Ġsocio":20182,"Ġmeditation":20183,"Ġforgive":20184,"Ġsmiled":20185,"%),":20186,"Ġpers":20187,"ĠSoph":20188,"Ġoccupy":20189,"atton":20190,"Ġwitnessing":20191,"Ġapologise":20192,"Ġpredecessors":20193,"ĠCassidy":20194,"Ġtallied":20195,"NER":20196,"Ġtract":20197,"ĠHolder":20198,"ĠPav":20199,"Ġjackets":20200,"Mel":20201,"raud":20202,"Ġexercising":20203,"ĠChung":20204,"ĠAmin":20205,"athi":20206,"ĠMem":20207,"Ġracked":20208,"Ġcarved":20209,"ĠMickey":20210,"ĠLafayette":20211,"Ġgrill":20212,"ĠINFORMATION":20213,"usc":20214,"ĠPromotion":20215,"yson":20216,"istry":20217,"Ġfulfilled":20218,"Ġrestraint":20219,"Ġpopping":20220,"ĠSlater":20221,"Ġmercy":20222,"aden":20223,"Ġsubmarine":20224,"ĠBowling":20225,"dogs":20226,"ĠSwe":20227,"Ġnoticeable":20228,"Ġbis":20229,"ĠPremiership":20230,"Ġspat":20231,"ĠTow":20232,"ĠWand":20233,"Ġmechanics":20234,"while":20235,"ĠBenson":20236,"Ġmolecules":20237,"Ġcrosses":20238,"Ġrecalling":20239,"ĠCertainly":20240,"HAM":20241,"Ġsever":20242,"ĠRudy":20243,"ĠDUI":20244,"OLD":20245,"ĠTobacco":20246,"Ġsubdued":20247,"Ġquota":20248,"TF":20249,"Ġflats":20250,"Ġemphasize":20251,"Ġbelts":20252,"ĠOpinion":20253,"Ġpiled":20254,"ĠSpark":20255,"ĠElias":20256,"Ġclassification":20257,"ĠHands":20258,"ĠCV":20259,"Ġtoast":20260,"Ġcandle":20261,"atching":20262,"short":20263,"ĠDup":20264,"Ġult":20265,"bats":20266,"Ġmarketers":20267,"ĠAvery":20268,"ĠColbert":20269,"ĠIk":20270,"ĠVac":20271,"ĠJackets":20272,"Ġmerits":20273,"eli":20274,"PORT":20275,"Ġelevator":20276,"irming":20277,"effective":20278,"Ġgroceries":20279,"Ġhi":20280,"ĠINTER":20281,"ĠSAP":20282,"ĠNYPD":20283,"ĠKY":20284,"Ġangel":20285,"Ġspectacle":20286,"ré":20287,"ĠRoche":20288,"Ġinsects":20289,"Ġcommenced":20290,"ĠFoley":20291,"Ġdarker":20292,"ĠUg":20293,"ĠMostly":20294,"Ġtermed":20295,"uci":20296,"ĠExec":20297,"ĠBrittany":20298,"Ġharmony":20299,"Ġadvocated":20300,"Ġparcel":20301,"ĠHots":20302,"Ġmonarch":20303,"ĠSiri":20304,"odge":20305,"ĠPag":20306,"Ġprogressing":20307,"grounds":20308,"Ġonstage":20309,"Ġwarmth":20310,"ĠWon":20311,"Ġviolates":20312,"ĠSaudis":20313,"Ġbumper":20314,"Ġpatrols":20315,"ĠBarron":20316,"Ġindoors":20317,"Ġtar":20318,"Each":20319,"Val":20320,"Ġapplicant":20321,"ĠCater":20322,"Ġclassics":20323,"ĠThreat":20324,"Ġwrapping":20325,"ĠIdlib":20326,"anking":20327,"Did":20328,"adia":20329,"ĠRig":20330,"ĠBram":20331,"ĠLaurie":20332,"ĠHair":20333,"ĠCannabis":20334,"Ġdaylight":20335,"ĠNorm":20336,"ĠRip":20337,"sin":20338,"unta":20339,"Pass":20340,"ĠAcad":20341,"ĠCummings":20342,"Ġtheirs":20343,"ĠDistribution":20344,"especially":20345,"Ġgrilled":20346,"Ġaffiliates":20347,"ĠVander":20348,"ĠCath":20349,"ĠProductions":20350,"ĠTrek":20351,"230":20352,"Ġcasinos":20353,"ĠCain":20354,"atu":20355,"idget":20356,"ĠWinds":20357,"Ġunanswered":20358,"Ġintercept":20359,"ĠMarty":20360,"Ġrefin":20361,"Ġlieutenant":20362,"cas":20363,"Chief":20364,"average":20365,"ilot":20366,"Ġscrimmage":20367,"ĠMud":20368,"speaking":20369,"ĠFranken":20370,"ĠTories":20371,"Ġabstract":20372,"awar":20373,"ĠTerms":20374,"dal":20375,"ĠFur":20376,"Ġhumour":20377,"rh":20378,"Ġsitu":20379,"aed":20380,"ĠFIN":20381,"Ġtranscripts":20382,"approved":20383,"ĠParsons":20384,"Ġpigs":20385,"Ġrepayment":20386,"ĠARM":20387,"ĠElliot":20388,"ĠLevine":20389,"Ġtagged":20390,"pun":20391,"ĠDwight":20392,"Ġconfiguration":20393,"sis":20394,"ĠAdult":20395,"Ġearthquakes":20396,"Ġcreature":20397,"ĠMRI":20398,"Ġmach":20399,"Ġprescriptions":20400,"cover":20401,"Ġministries":20402,"Ġinaccurate":20403,"ĠLabs":20404,"ĠMGM":20405,"Ġtomato":20406,"Ġeng":20407,"Ġopposes":20408,"owan":20409,"Ġmapping":20410,"Ġconsum":20411,"online":20412,"eters":20413,"code":20414,"Aug":20415,"Point":20416,"branded":20417,"pling":20418,"ĠCalder":20419,"Oper":20420,"ĠMiddles":20421,"Ġchampagne":20422,"ĠTues":20423,"Ġsampling":20424,"Ġenergetic":20425,"rano":20426,"ĠStyles":20427,"Ġneglected":20428,"ĠDamon":20429,"Ġendanger":20430,"Ġsouthwestern":20431,"ĠATM":20432,"ĠDuck":20433,"engers":20434,"Ġdan":20435,"yth":20436,"Ġbou":20437,"ĠDecl":20438,"Gold":20439,"Ġprojecting":20440,"Google":20441,"ĠHussein":20442,"Ġaccomplishment":20443,"itarian":20444,"Ġgossip":20445,"ĠRai":20446,"ril":20447,"ĠSke":20448,"Ġpsychiatric":20449,"ĠMacBook":20450,"ĠAdobe":20451,"ĠHodg":20452,"Ġaccompany":20453,"Ġadvertised":20454,"Ġreminiscent":20455,"Ġgeographical":20456,"Ġconvertible":20457,"IK":20458,"CTV":20459,"Ġcommunal":20460,"Ġchim":20461,"Ġselfish":20462,"Ġdrilled":20463,"Ġtortured":20464,"Ġblacks":20465,"noon":20466,"Ġmanifesto":20467,"ĠRichie":20468,"acco":20469,"Im":20470,"Ġdebit":20471,"ĠSNP":20472,"perfect":20473,"gard":20474,"ĠRatio":20475,"Ġstubborn":20476,"Ġaccumulation":20477,"Ġcongregation":20478,"Ġkissing":20479,"Ġkillers":20480,"ĠAbbey":20481,"von":20482,"ĠFuj":20483,"ĠIsabel":20484,"NB":20485,"ĠNish":20486,"ĠJulius":20487,"ĠZimmer":20488,"Ġuncover":20489,"dar":20490,"isle":20491,"ĠCompar":20492,"Ġcounselor":20493,"ĠSok":20494,"ĠCumm":20495,"ĠHip":20496,"Ġurgently":20497,"Ġrentals":20498,"Ġapproving":20499,"Ġirrigation":20500,"Ġprostate":20501,"ĠJudicial":20502,"ĠSubmit":20503,"ĠTanner":20504,"attack":20505,"emb":20506,"Ġreclaim":20507,"Ġec":20508,"Ġbrutality":20509,"Ġcommanding":20510,"Ġreasoning":20511,"Roy":20512,"ĠElect":20513,"ĠMobil":20514,"anding":20515,"Ġmirrors":20516,"Israel":20517,"Ġpavement":20518,"Ġoverdue":20519,"ĠMd":20520,"street":20521,"Ġthrill":20522,"pora":20523,"azon":20524,"Ġbrewing":20525,"enge":20526,"ĠDisaster":20527,"Ġbuilder":20528,"ods":20529,"utsch":20530,"Ġterminals":20531,"ĠBaird":20532,"enburg":20533,"Ġhast":20534,"Ġbrass":20535,"Ġparental":20536,"enture":20537,"ĠConduct":20538,"Ġexpands":20539,"luck":20540,"mur":20541,"ĠBj":20542,"Ġadministrations":20543,"ĠOlivier":20544,"oux":20545,"Ġnarrowed":20546,"winner":20547,"Ġmakeshift":20548,"ĠVAT":20549,"ĠJavier":20550,"-,":20551,"Ġsystematic":20552,"Ġenforcing":20553,"emin":20554,"ĠAudio":20555,"United":20556,"gener":20557,"ĠKara":20558,"ivas":20559,"ĠPretty":20560,"ĠLob":20561,"Ġpetitions":20562,"ĠMercer":20563,"ampa":20564,"product":20565,"Ġdistributing":20566,"Ġtunnels":20567,"Ġcondo":20568,"ĠRSS":20569,"ĠCarlo":20570,"Ġpumpkin":20571,"Ġsto":20572,"Ġassumes":20573,"oway":20574,"hiba":20575,"lection":20576,"Ġgam":20577,"ĠAires":20578,"Ġtransmitted":20579,"Ġtrousers":20580,"Ġcheers":20581,"ĠJensen":20582,"Ġemer":20583,"Ġsimpler":20584,"Ġcolored":20585,"ĠSustainable":20586,"Ġinstruct":20587,"Ġpoles":20588,"Ġsupervised":20589,"Ġinteg":20590,"ĠMoreno":20591,"boarding":20592,"igrant":20593,"ĠYoga":20594,"Ġenvironmentally":20595,"Ġsacrifices":20596,"Ġshores":20597,"Ġ127":20598,"Ġestranged":20599,"Ġintoxicated":20600,"Ġemergencies":20601,"ĠKosovo":20602,"yang":20603,"Ġfastball":20604,"Ġpackaged":20605,"LAN":20606,"Ġhurry":20607,"ĠManny":20608,"Ġporch":20609,"Ġcuriosity":20610,"ĠKend":20611,"thouse":20612,"ĠTou":20613,"mun":20614,"Ġwaving":20615,"Ġpasswords":20616,"ĠSwan":20617,"Ġprefers":20618,"ĠCorrections":20619,"aic":20620,"Ġejected":20621,"Ġdossier":20622,"ĠChal":20623,"Ġfacto":20624,"Ġspine":20625,"leck":20626,"Ġrestriction":20627,"Ġdisagreement":20628,"grown":20629,"ĠEdgar":20630,"Ġquantities":20631,"ĠRapid":20632,"Ġpals":20633,"Ġspared":20634,"Ġremarkably":20635,"ructure":20636,"Ġbackers":20637,"ĠGoals":20638,"cles":20639,"rolling":20640,"ĠBlasio":20641,"Ġorchestra":20642,"ologies":20643,"ĠRise":20644,"Power":20645,"Ġuptick":20646,"atha":20647,"ĠMob":20648,"Ġshotgun":20649,"downs":20650,"ĠBorg":20651,"Ġmorale":20652,"Call":20653,"wave":20654,"ĠDuc":20655,"Ġunwilling":20656,"oad":20657,"Ġbusinessmen":20658,"Ġrefriger":20659,"Ġgamers":20660,"Ġcele":20661,"Ġprecip":20662,"Ġrenegoti":20663,"OY":20664,"ĠPharm":20665,"Ġresponsive":20666,"Ġservant":20667,"eye":20668,"Ġraping":20669,"vas":20670,"Ġgroin":20671,"ĠMelvin":20672,"ĠKurds":20673,"Ġstricter":20674,"ĠMum":20675,"ients":20676,"Ġstandalone":20677,"Ġforums":20678,"Ġcommemorate":20679,"Far":20680,"ĠTelegram":20681,"Ġscreenings":20682,"ĠLeonardo":20683,"ighton":20684,"ĠDOWN":20685,"Ġmodule":20686,"Ġremedy":20687,"Ġ280":20688,"Su":20689,"ĠBecker":20690,"ĠGast":20691,"prem":20692,"ĠInto":20693,"oyle":20694,"114":20695,"Ġadhere":20696,"Report":20697,"ĠJaneiro":20698,"ĠKry":20699,"Pakistan":20700,"Ġrobotic":20701,"ande":20702,"Ġoverlooking":20703,"ĠTreaty":20704,"Ġrect":20705,"yne":20706,"Ġbattlefield":20707,"ĠGeoff":20708,"Ġearns":20709,"ĠMiner":20710,"Ġteased":20711,"Ġexemptions":20712,"Ġvacancy":20713,"oku":20714,"Ġvulnerabilities":20715,"ĠRou":20716,"Ġobserv":20717,"Ġoverlook":20718,"Ġcorrespond":20719,"Ġtheatrical":20720,"Ġrobotics":20721,"ĠCompl":20722,"ĠPasadena":20723,"laden":20724,"Ġvastly":20725,"olit":20726,"Ġjustification":20727,"Ġtampering":20728,"ĠSutherland":20729,"ĠMens":20730,"Ġinvisible":20731,"uren":20732,"ĠAshton":20733,"owl":20734,"Ġdisqual":20735,"ĠEva":20736,"Ġfriction":20737,"ĠIrvine":20738,"Ġaliens":20739,"ĠPension":20740,"ĠAssets":20741,"ĠBenedict":20742,"ittal":20743,"Ġsword":20744,"Ġunderwear":20745,"ĠFarmer":20746,"Ġtimber":20747,"Ġdependence":20748,"ĠTang":20749,"Ġ165":20750,"ĠNazis":20751,"Ġpunching":20752,"ĠGloria":20753,"usat":20754,"Ġluxurious":20755,"chuk":20756,"ĠCot":20757,"Ġregained":20758,"Ġreassure":20759,"Ġhello":20760,"Ġante":20761,"Ġnegotiators":20762,"Add":20763,"paced":20764,"ér":20765,"Ġdemolished":20766,"Ann":20767,"joy":20768,"ĠJenna":20769,"Apple":20770,"Ġdisturbance":20771,"Ġcommissions":20772,"ĠPolitico":20773,"along":20774,"Ġnem":20775,"Ġauctions":20776,"ruck":20777,"ĠOD":20778,"ofer":20779,"Play":20780,"Ġcarn":20781,"vez":20782,"Ġtents":20783,"Ġcongratulate":20784,"ĠLiquid":20785,"ĠCoyotes":20786,"uku":20787,"ĠAllah":20788,"Ġbend":20789,"Ġcanvas":20790,"ĠClifford":20791,"Ġvolunteered":20792,"Luc":20793,"bp":20794,"ĠCensus":20795,"ĠShot":20796,"Ġanonymously":20797,"ĠAnglo":20798,"ĠBayer":20799,"ĠAber":20800,"ĠCorrectional":20801,"Ġhardship":20802,"ĠBuenos":20803,"ĠDaw":20804,"Ġbaskets":20805,"Ġupstairs":20806,"Ġmindful":20807,"ĠLCD":20808,"ĠBlackburn":20809,"ĠHale":20810,"477":20811,"Ġcircus":20812,"ĠDragons":20813,"Ġrubble":20814,"rb":20815,"Ġheadaches":20816,"aunt":20817,"itus":20818,"Ġscaled":20819,"ĠComic":20820,"asio":20821,"ĠNordic":20822,"Per":20823,"Ġbombers":20824,"ilitation":20825,"Ġindirectly":20826,"ĠHod":20827,"andan":20828,"operation":20829,"Ġpuppy":20830,"ĠMats":20831,"Ġstewards":20832,"roup":20833,"Ġmemorandum":20834,"Ġpatio":20835,"const":20836,"ĠBold":20837,"ĠKaiser":20838,"Following":20839,"Ġcompat":20840,"Ġsidewalks":20841,"ĠFitzpatrick":20842,"Ġsunlight":20843,"ĠLever":20844,"ĠBecky":20845,"icles":20846,"ĠProbably":20847,"Ġgarner":20848,"ĠTomas":20849,"Ġblankets":20850,"uga":20851,"jiang":20852,"Ġrevel":20853,"ĠHutch":20854,"llers":20855,"Ġtrimmed":20856,"ĠSTR":20857,"ĠKR":20858,"ĠPike":20859,"ĠASS":20860,"Bay":20861,"Ġdiagnostic":20862,"ĠSteph":20863,"Ġtoured":20864,"ĠAvoid":20865,"vic":20866,"Without":20867,"ĠClinical":20868,"Ġblo":20869,"undo":20870,"ĠBoise":20871,"Ġspeculated":20872,"ĠProt":20873,"vention":20874,"Ġscholar":20875,"ĠSta":20876,"Featured":20877,"ĠPrev":20878,"Ġpenny":20879,"ĠHath":20880,"rawn":20881,"Ġrenovated":20882,"ĠFried":20883,"itol":20884,"uddle":20885,"Ġinquest":20886,"Ġmetropolitan":20887,"lights":20888,"Ġtempo":20889,"onom":20890,"ĠImport":20891,"Asia":20892,"Ġowes":20893,"Ġmagistrate":20894,"ĠFriedman":20895,"Ġcontacting":20896,"Ġstrains":20897,"Ġhomage":20898,"Ġlent":20899,"ception":20900,"git":20901,"Ġlively":20902,"Ġscra":20903,"WW":20904,"ön":20905,"rill":20906,"Jack":20907,"ĠShank":20908,"iani":20909,"Ġdecreasing":20910,"MON":20911,"ĠSupervisor":20912,"ĠCats":20913,"ĠFusion":20914,"Ġracially":20915,"ĠTara":20916,"ĠPurchase":20917,"ĠRally":20918,"ĠGraph":20919,"ĠHello":20920,"hest":20921,"ĠVarg":20922,"Ġdrowned":20923,"ĠThu":20924,"ĠWet":20925,"ĠEug":20926,"Ġrainbow":20927,"Ġtelev":20928,"ĠAmir":20929,"Based":20930,"Ġcookie":20931,"uding":20932,"Ġcontracting":20933,"Ġobjected":20934,"Ġfork":20935,"acent":20936,"ĠTil":20937,"ĠLilly":20938,"ĠEur":20939,"Ġhormone":20940,"Ġnails":20941,"ĠFischer":20942,"Ġpier":20943,"EMENT":20944,"Ġeruption":20945,"visory":20946,"Ġspeculate":20947,"apan":20948,"ĠJub":20949,"ĠHuckabee":20950,"string":20951,"stay":20952,"Ġsustaining":20953,"VM":20954,"Ġpriv":20955,"Ġclos":20956,"Ġdownloaded":20957,"ĠIv":20958,"Ġfinanced":20959,"ĠSao":20960,"ĠEverett":20961,"rene":20962,"ĠWo":20963,"ĠPiet":20964,"Ġengulfed":20965,"Ġexiting":20966,"uni":20967,"horn":20968,"Ġgrav":20969,"ection":20970,"Ġdrainage":20971,"Ġfuelled":20972,"Ġorganizational":20973,"bike":20974,"ĠAreas":20975,"Ġpoliceman":20976,"ĠFirm":20977,"ĠSlide":20978,"Ġrand":20979,"ĠJedi":20980,"Ge":20981,"really":20982,"Manchester":20983,"ĠWise":20984,"parent":20985,"Ġlad":20986,"Ġurine":20987,"ĠColombian":20988,"geon":20989,"Ġ1961":20990,"Mania":20991,"Ġgraph":20992,"Ġcod":20993,"fred":20994,"Ġeffic":20995,"ĠGateway":20996,"asket":20997,"Ġdiminished":20998,"Mass":20999,"Ġ205":21000,"Long":21001,"Ġgranddaughter":21002,"Ġshining":21003,"Semitic":21004,"Ġarising":21005,"Ġ330":21006,"ĠDU":21007,"ĠZah":21008,"Ġexclusion":21009,"ĠClaus":21010,"Ġven":21011,"oine":21012,"ĠAPI":21013,"reve":21014,"Ġmilitias":21015,"Ġfro":21016,"Ġwaved":21017,"ĠLuxembourg":21018,"Ġdiamonds":21019,"Ġstabilize":21020,"Ġqueue":21021,"ĠSponsor":21022,"Ġeldest":21023,"ĠLud":21024,"Ġwasting":21025,"Ġdimension":21026,"Ġmotorcycles":21027,"ucker":21028,"ĠTav":21029,"Ġsupremacy":21030,"Take":21031,"ĠCPU":21032,"cup":21033,"Ġdisregard":21034,"Ġenvelope":21035,"ĠCah":21036,"Ġproposes":21037,"ĠMaurice":21038,"Ġhobby":21039,"Ġharmon":21040,"Ġribbon":21041,"ĠOrigin":21042,"Ġbuilders":21043,"Ġconj":21044,"Ġcert":21045,"eat":21046,"ĠStern":21047,"ulia":21048,"vals":21049,"cling":21050,"Ġprovocative":21051,"Ġsofter":21052,"Ġ1948":21053,"Ġremod":21054,"ĠSob":21055,"Ġmaxim":21056,"Ġblueprint":21057,"oit":21058,"ĠGarner":21059,"Ġfibre":21060,"search":21061,"ĠWrite":21062,"270":21063,"Ġclergy":21064,"ĠPalo":21065,"obile":21066,"Mad":21067,"Ġclown":21068,"Ġtraced":21069,"280":21070,"ĠAlberto":21071,"Ġdrums":21072,"ĠFridays":21073,"ĠStrat":21074,"stated":21075,"ĠStevenson":21076,"Pr":21077,"Ġboasted":21078,"ĠBrees":21079,"ĠDonn":21080,"ĠMaya":21081,"Ġrelieve":21082,"Ġ1080":21083,"Ġcheapest":21084,"Ġuniquely":21085,"Ġjungle":21086,"Ġprevalence":21087,"Ġoutfield":21088,"ĠMaps":21089,"Ġaccustomed":21090,"pac":21091,"Ġcombinations":21092,"ĠSoros":21093,"stad":21094,"Ġket":21095,"Ġdisgusting":21096,"ĠOFF":21097,"irs":21098,"Ġbiased":21099,"Ġpaved":21100,"iked":21101,"utterstock":21102,"ocal":21103,"Ġsurround":21104,"ĠGuang":21105,"Ġspear":21106,"ĠBellev":21107,"ortun":21108,"Rec":21109,"acho":21110,"Ġfrightening":21111,"Ġtyres":21112,"normal":21113,"ĠYan":21114,"ĠWarsaw":21115,"ĠBod":21116,"ourse":21117,"199":21118,"Ver":21119,"erent":21120,"Ġsparkling":21121,"Ġchanting":21122,"Ġ1945":21123,"Ġturbo":21124,"Ġhazards":21125,"IRE":21126,"ĠRonnie":21127,"Ġsplitting":21128,"ĠMatte":21129,"roph":21130,"Ġtended":21131,"Ġvandalism":21132,"alis":21133,"SY":21134,"Ġoversaw":21135,"Happy":21136,"ĠTC":21137,"275":21138,"Ġeco":21139,"ĠKers":21140,"Ġextensions":21141,"ĠFlan":21142,"ĠCena":21143,"ĠDowns":21144,"Ġdrummer":21145,"Ġawaited":21146,"ĠACL":21147,"Ġlegends":21148,"ĠRollins":21149,"hend":21150,"Ġdeparting":21151,"Ġtha":21152,"Ġunre":21153,".(":21154,"Ġfaded":21155,"Ġretirees":21156,"vid":21157,"Ġentrants":21158,"ĠStella":21159,"arer":21160,"Ġteaspoon":21161,"ĠSheridan":21162,"irc":21163,"ĠRelief":21164,"ĠButt":21165,"Ġris":21166,"Ġundermined":21167,"Ġsunk":21168,"Sam":21169,"kamp":21170,"riot":21171,"rating":21172,"Ġclubhouse":21173,"Ġpeaked":21174,"ĠSki":21175,"Ġairstrikes":21176,"Ġconce":21177,"ĠCPR":21178,"Ġesp":21179,"ĠWave":21180,"ĠColiseum":21181,"outheastern":21182,"Ġtrou":21183,"Ġfeather":21184,"ĠSoy":21185,"ĠBihar":21186,"Ġintervened":21187,"mits":21188,"colored":21189,"330":21190,"Ġprocession":21191,"apeake":21192,"ité":21193,"riel":21194,"Ġmart":21195,"afer":21196,"ĠGuests":21197,"ĠPie":21198,"Ġshiny":21199,"ĠSixers":21200,"ĠRoads":21201,"Ġkicker":21202,"ĠCrimes":21203,"Ġfrontier":21204,"ansen":21205,"November":21206,"smith":21207,"ĠLaun":21208,"fried":21209,"weet":21210,"ĠGrass":21211,"Ġsanitation":21212,"ĠEat":21213,"ĠParts":21214,"ĠTun":21215,"amar":21216,"ĠJupiter":21217,"ĠFS":21218,"Ġunsc":21219,"ĠDone":21220,"Ġleveraging":21221,"Ġtucked":21222,"Ġineffective":21223,"Ġriots":21224,"wei":21225,"ĠAttend":21226,"Ġpertaining":21227,"amen":21228,"monds":21229,"Ġmism":21230,"serious":21231,"ĠViol":21232,"rous":21233,"Ġ129":21234,"uebl":21235,"umption":21236,"tri":21237,"ĠWedding":21238,"Ġtroopers":21239,"ĠTHR":21240,"olving":21241,"leys":21242,"Med":21243,"Ġseparatists":21244,"Ġimper":21245,"ĠFrontier":21246,"Ġwhit":21247,"ĠMutual":21248,"Ġrested":21249,"Ġunhealthy":21250,"gang":21251,"Ġresearching":21252,"ĠColonel":21253,"Ġaffordability":21254,"ĠRegarding":21255,"ĠWend":21256,"ĠMellon":21257,"Ġplots":21258,"Ġcanal":21259,"PER":21260,"ĠShopping":21261,"etry":21262,"Ġoccurrence":21263,"Ġgraves":21264,"BF":21265,"ĠKau":21266,"indust":21267,"Ġbeard":21268,"uate":21269,"ĠProdu":21270,"ĠSomali":21271,"ishers":21272,"ĠFell":21273,"ĠHutchinson":21274,"Ġhust":21275,"Ġillustration":21276,"Ġ//":21277,"Ġsharks":21278,"Ġcoincidence":21279,"Ġremake":21280,"Ġmural":21281,"course":21282,"ĠSultan":21283,"arse":21284,"Ġwhip":21285,"ĠPodcast":21286,"Ġtightened":21287,"Ġdenim":21288,"Ġlandfill":21289,"future":21290,"Ġsuperv":21291,"Hand":21292,"Ġpraising":21293,"ĠEly":21294,"ĠGust":21295,"ĠMayer":21296,"Ġorphan":21297,"Ġrepaired":21298,"ĠPir":21299,"Ġspiral":21300,"husband":21301,"ienne":21302,"iatric":21303,"Ġmarriages":21304,"Ġhorn":21305,"plain":21306,"ĠLum":21307,"ession":21308,"ĠFeatures":21309,"Ġbreakup":21310,"Ġentrepreneurship":21311,"rina":21312,"Ġembargo":21313,"Ġcapitalism":21314,"ĠMinor":21315,"Ġpromo":21316,"Ġexcel":21317,"Japan":21318,"Ġworsening":21319,"Ġstumbled":21320,"Ġpins":21321,"Ġswipe":21322,"Ġexile":21323,"Ġseparatist":21324,"ĠBian":21325,"Ġrelocation":21326,"Ġcommanders":21327,"Ġdowned":21328,"Ġblogger":21329,"packed":21330,"ĠSchn":21331,"Ġwaterfront":21332,"ĠYus":21333,"Ġnegotiator":21334,"Ġfavourable":21335,"Iran":21336,"oulder":21337,"Ġcance":21338,"Ġvind":21339,"angel":21340,"Ġauthenticity":21341,"Ġtowel":21342,"bul":21343,"ĠNeville":21344,"ĠBuddhist":21345,"fields":21346,"uly":21347,"Ġniece":21348,"Ġcorrections":21349,"Ġassignments":21350,"ĠSchl":21351,"Ġharmed":21352,"375":21353,"Ġwounding":21354,"ĠPosition":21355,"Ġsupermarkets":21356,"Ġdisclosures":21357,"Ġ185":21358,"esp":21359,"ĠMcCull":21360,"ĠMale":21361,"Ġsailors":21362,"mis":21363,"ĠSophia":21364,"Ġunfolded":21365,"owell":21366,"ĠScarborough":21367,"Ġentrepreneurial":21368,"118":21369,"ogy":21370,"ĠLikewise":21371,"Ġswung":21372,"Ġdrawings":21373,"Ġdrafting":21374,"ĠSimple":21375,"ĠFilip":21376,"arf":21377,"Ġfade":21378,"Ġmerged":21379,"ĠLeaf":21380,"sun":21381,"Ġflame":21382,"Ġindices":21383,"ĠCreate":21384,"ittle":21385,"ĠWer":21386,"ĠMond":21387,"Ġoz":21388,"ĠSmoke":21389,"Ġreplies":21390,"ĠDH":21391,"Ġjud":21392,"ĠFalk":21393,"Ġ---":21394,"Ġconstitutes":21395,"Ġtheat":21396,"119":21397,"Ġintermediate":21398,"vill":21399,"ĠGow":21400,"ĠHut":21401,"ł":21402,"155":21403,"ĠLocated":21404,"ĠDoor":21405,"Ġsliced":21406,"aru":21407,"Ġtearing":21408,"defense":21409,"oyer":21410,"Ġprodu":21411,"Ġseminar":21412,"asso":21413,"Ġpeaks":21414,"Ġconceal":21415,"Ġcrypto":21416,"Ġsetbacks":21417,"ĠAlicia":21418,"ĠFAA":21419,"Ġcontinuity":21420,"Ġcatastrophe":21421,"Ġbeg":21422,"Ġscales":21423,"apixel":21424,"Ġsalon":21425,"Ste":21426,"Ġlesbian":21427,"Ġanticip":21428,"Ġutilization":21429,"Ġchickens":21430,"Ġspinal":21431,"ĠJuliet":21432,"ĠFas":21433,"prising":21434,"ĠSalvation":21435,"Ġ138":21436,"Ġutilizing":21437,"âĢ¢":21438,"ĠMessenger":21439,"Ġrebellion":21440,"ĠAlexand":21441,"Ġinsect":21442,"Ġribs":21443,"ĠBild":21444,"Ġmonopoly":21445,"Queen":21446,"ĠNaples":21447,"Ġ133":21448,"Ġhourly":21449,"Ġego":21450,"Ġpencil":21451,"ĠPew":21452,"Ġdesirable":21453,"vant":21454,"ĠLAT":21455,"Ġperpet":21456,"lish":21457,"Ġ201":21458,"Ġdistances":21459,"Ġdistressed":21460,"Work":21461,"Ġtattoos":21462,"Ġstereotypes":21463,"istent":21464,"ĠCoral":21465,"fo":21466,"Ġpayable":21467,"Ġakin":21468,"ĠLis":21469,"ĠFinding":21470,"Ġsusceptible":21471,"ĠKiw":21472,"Ġforgiveness":21473,"ĠMoment":21474,"ĠDmitry":21475,"Ġrenov":21476,"Ġquint":21477,"ĠWaterloo":21478,"ĠReality":21479,"Ġstray":21480,"ĠBeaver":21481,"Ġbites":21482,"Ġelusive":21483,"Ġvirtue":21484,"Ġgadgets":21485,"Ġlandslide":21486,"ĠHealthy":21487,"Ġpits":21488,"Donnell":21489,"Ġirony":21490,"uct":21491,"Ġpractitioners":21492,"Ġreck":21493,"governmental":21494,"Ġatomic":21495,"Ġmotiv":21496,"Ġpolic":21497,"Ġcommunicated":21498,"ĠHS":21499,"Ġcriticize":21500,"Ġsynerg":21501,"Del":21502,"ĠRoe":21503,"Ġinspirational":21504,"ĠWarning":21505,"pel":21506,"Ġnevertheless":21507,"Ġdespair":21508,"Ġ(.":21509,"Ġfearing":21510,"Ġgrop":21511,"tree":21512,"Ġtrusts":21513,"Ġinterviewing":21514,"amic":21515,"Ġscor":21516,"ject":21517,"Another":21518,"pose":21519,"Ġdepicted":21520,"ĠPhotography":21521,"ĠLenovo":21522,"ĠEpic":21523,"ĠBoot":21524,"GI":21525,"enses":21526,"Class":21527,"arity":21528,"Ġservicing":21529,"ĠHann":21530,"Ġawe":21531,"Ġoverdoses":21532,"ĠFinnish":21533,"Ġpav":21534,"ĠPCs":21535,"SEC":21536,"ĠStro":21537,"Ġattracts":21538,"Ġapprehended":21539,"128":21540,"Ġunstable":21541,"ĠOutdoor":21542,"Ġcloth":21543,"ĠUlster":21544,"Ġvisually":21545,"Ġsculpt":21546,"Ġsufficiently":21547,"ĠKendrick":21548,"Ġengages":21549,"Ġknives":21550,"ĠGut":21551,"Ġarbit":21552,"osition":21553,"Ġemoji":21554,"Ġpinpoint":21555,"Ġremembering":21556,"rence":21557,"ĠVish":21558,"Ġimproperly":21559,"Ġranc":21560,"Ġupstream":21561,"Ġcheckpoint":21562,"Ġrash":21563,"eson":21564,"Ġtoes":21565,"260":21566,"Ġinvalid":21567,"Ġonions":21568,"Ġlashed":21569,"ĠDong":21570,"Ġprovisional":21571,"ĠFern":21572,"Ġirresponsible":21573,"actively":21574,"ĠKnown":21575,"Ġben":21576,"ĠBlank":21577,"Ġactresses":21578,"paying":21579,"Ġsyrup":21580,"isman":21581,"Ġeducating":21582,"Sunday":21583,"ifiable":21584,"Post":21585,"Ġcalculation":21586,"Ġhesitate":21587,"ĠIncreasing":21588,"Ġreeling":21589,"ĠDairy":21590,"ensing":21591,"Ġmaternity":21592,"Ø":21593,"./":21594,"ĠElm":21595,"Ġweddings":21596,"ĠYard":21597,"117":21598,"ĠRocket":21599,"OF":21600,"Ġtreasurer":21601,"Ġrattled":21602,"ĠDrop":21603,"arel":21604,"ĠFulton":21605,"ĠGiant":21606,"ĠFloor":21607,"Jet":21608,"ikk":21609,"ĠBucs":21610,"ostics":21611,"reme":21612,"ĠRouse":21613,"Ġdeliber":21614,"ĠEle":21615,"Ġconducts":21616,"ĠBlog":21617,"connected":21618,"Ġprayed":21619,"Ġcolourful":21620,"Ġaugmented":21621,"Ġbatted":21622,"Ġrelevance":21623,"ĠRomanian":21624,"acqu":21625,"ĠChel":21626,"ĠClo":21627,"ĠGraves":21628,"Ġchees":21629,"ĠGibbs":21630,"CLE":21631,"Ġfertility":21632,"Ġambul":21633,"Ġspecs":21634,"ĠIRA":21635,"ĠBooth":21636,"ithe":21637,"ĠPlayoff":21638,"ammed":21639,"Ġcollaborating":21640,"Ġlunar":21641,"Ġconfronting":21642,"Ġattribute":21643,"King":21644,"riz":21645,"Ġcasualty":21646,"acia":21647,"waters":21648,"Ġpaving":21649,"Ġcaregivers":21650,"nor":21651,"Ġreacting":21652,"ĠHash":21653,"Ġsqueezed":21654,"Ġexert":21655,"ĠMichele":21656,"ĠConc":21657,"ĠHep":21658,"Ġsewage":21659,"wart":21660,"GY":21661,"Ġdiscourage":21662,"ĠFir":21663,"Ġtextile":21664,"ĠSpice":21665,"ĠFah":21666,"Ġcomplainant":21667,"Ġinstinct":21668,"camp":21669,"ĠEdison":21670,"ĠVIDEOS":21671,"LM":21672,"ĠSands":21673,"About":21674,"Ġdisk":21675,"brid":21676,"Ġmuted":21677,"ACC":21678,"Ġwre":21679,"event":21680,"Ġicons":21681,"Express":21682,"udes":21683,"ĠBeatles":21684,"color":21685,"ĠHaas":21686,"ĠWolfe":21687,"ĠYOUR":21688,"Ġaccessibility":21689,"ĠCornwall":21690,"Ġing":21691,"Ġatrocities":21692,"weather":21693,"ĠDominion":21694,"ĠMIL":21695,"ĠLara":21696,"Ġunravel":21697,"Ġmaneuver":21698,"Ġfoam":21699,"ribe":21700,"CI":21701,"Ġcandles":21702,"acs":21703,")(":21704,"coon":21705,"ĠPurple":21706,"ĠGovernors":21707,"ĠKeystone":21708,"ĠYuk":21709,"file":21710,"Ġviol":21711,"gery":21712,"370":21713,"train":21714,"Ġgunshots":21715,"olin":21716,"Ġviruses":21717,"ĠTex":21718,"hours":21719,"Ġprev":21720,"ĠRid":21721,"ected":21722,"ĠVog":21723,"riers":21724,"Ġmurdering":21725,"ĠIz":21726,"Ġdeliberations":21727,"arming":21728,"unda":21729,"Ġrink":21730,"ĠDrugs":21731,"idered":21732,"Ġforge":21733,"Ġexpansive":21734,"VIEW":21735,"ĠBots":21736,"Ġswitches":21737,"KO":21738,"atten":21739,"Ġvariants":21740,"ĠVirtual":21741,"ĠCoch":21742,"yon":21743,"ĠKai":21744,"Ġbullied":21745,"iday":21746,"version":21747,"Ġlib":21748,"ĠCec":21749,"igated":21750,"ĠTRUMP":21751,"ĠPod":21752,"Ġtoppled":21753,"Ġeyeing":21754,"ĠPatients":21755,"techn":21756,"Ġhampered":21757,"Ġavert":21758,"ĠScheme":21759,"ĠCorm":21760,"Ġpony":21761,"Ġzoom":21762,"abo":21763,"Ġsleeves":21764,"lane":21765,"ĠLester":21766,"ĠDane":21767,"Ġcough":21768,"Ġsignings":21769,"HER":21770,"Ġsibling":21771,"Ġredemption":21772,"Ġstockp":21773,"ĠAlgeria":21774,"Ġpadd":21775,"ĠBrenda":21776,"uchi":21777,"Ġtransporting":21778,"Ġspeculative":21779,"ĠSek":21780,"abal":21781,"Ġshipment":21782,"oker":21783,"Ġwarranty":21784,"atan":21785,"Ġblister":21786,"ĠCelebration":21787,"Ġwal":21788,"Ġlac":21789,"Ġprioritize":21790,"ression":21791,"BP":21792,"Ġcollaborated":21793,"ĠNewsletter":21794,"ĠDamian":21795,"ĠResidential":21796,"Ġgra":21797,"Ġfeasible":21798,"ĠCrest":21799,"ĠBean":21800,"ĠSturgeon":21801,"ĠTale":21802,"ĠContin":21803,"ĠMush":21804,"Ġrocking":21805,"ĠMane":21806,"ĠHumane":21807,"resistant":21808,"ĠFra":21809,"highest":21810,"fts":21811,"Ġamassed":21812,"ĠPavilion":21813,"ĠSkin":21814,"Ġunfold":21815,"Ġresur":21816,"ĠPET":21817,"model":21818,"Ġemploying":21819,"Ġrude":21820,"Ġirrelevant":21821,"angu":21822,"Page":21823,"PN":21824,"igator":21825,"ĠReb":21826,"ĠArrest":21827,"ĠGund":21828,"Ġmalls":21829,"zhen":21830,"wed":21831,"Ġdaring":21832,"Ġfactual":21833,"ĠGent":21834,"Ġinforming":21835,"ĠStri":21836,"ĠLounge":21837,".]":21838,"ĠTribunal":21839,"ĠMoines":21840,"Ġshadows":21841,"generated":21842,"fulness":21843,"Ġheartfelt":21844,"ĠLivingston":21845,"ĠClerk":21846,"Ġnationalism":21847,"ĠMiche":21848,"balls":21849,"anos":21850,"agle":21851,"Ġprejudice":21852,"Ġevenly":21853,"Ġswearing":21854,"Ġexits":21855,"Ġcondemning":21856,"Ġvanilla":21857,"club":21858,"ĠFunding":21859,"ĠDover":21860,"Ġhots":21861,"Ġfres":21862,"Ġgoodness":21863,"ĠMcKay":21864,"Ġbulls":21865,"avia":21866,"129":21867,"Ġ1947":21868,"Ġdefamation":21869,"ĠMoran":21870,"irms":21871,"ĠFitz":21872,"ĠRossi":21873,"urated":21874,"Ġvariation":21875,"ĠBauer":21876,"ĠSchro":21877,"Ġcolony":21878,"ĠParliamentary":21879,"ikan":21880,"Ġstirring":21881,"ĠSheldon":21882,"Ġaccessory":21883,"ĠUtilities":21884,"Ġnab":21885,"Ġpract":21886,"Ġherein":21887,"ĠRole":21888,"ĠMant":21889,"Ġpharm":21890,"Ġ215":21891,"ĠNGO":21892,"ĠAnything":21893,"ĠMacedonia":21894,"Ġbree":21895,"ĠWTO":21896,"Chicago":21897,"ĠProtect":21898,"quarters":21899,"ĠGrassley":21900,"ĠInteractive":21901,"ĠInterview":21902,"Ġ550":21903,"Ġastronauts":21904,"Ġfreak":21905,"ĠIntegrated":21906,"Ġindict":21907,"Ġgenerators":21908,"acio":21909,"Kevin":21910,"Ġvaccination":21911,"Ġblockade":21912,"ĠSons":21913,"Ġcapita":21914,"ĠAnita":21915,"ĠExport":21916,"ĠNex":21917,"ĠAram":21918,"Ġzinc":21919,"Ġrevamped":21920,"Ġselective":21921,"Ġmanipulate":21922,"ĠBedford":21923,"ĠBattery":21924,"Ġqualifiers":21925,"lean":21926,"Ġscrew":21927,"film":21928,"ror":21929,"ĠEllison":21930,"ombo":21931,"ĠOst":21932,"165":21933,"Ġslaves":21934,"ĠPayton":21935,"Ġbarg":21936,"Ġrugged":21937,"ĠWinn":21938,"ĠHammer":21939,"ĠUPS":21940,"Euro":21941,"Ġunfamiliar":21942,"Ġdistract":21943,"Ġbuffer":21944,"ledge":21945,"Ġtrunk":21946,"Ġ320":21947,"122":21948,"Ġdilemma":21949,"Ġpra":21950,"Ġutmost":21951,"Ġcampaigners":21952,"icular":21953,"eful":21954,"�":21955,"ĠHQ":21956,"neau":21957,"Ġsir":21958,"test":21959,"Company":21960,"Ġrescind":21961,"ardon":21962,"MG":21963,"Gov":21964,"ĠRaz":21965,"Ġrod":21966,"fed":21967,"Ġpsych":21968,"Ġunin":21969,"ĠArbor":21970,"Ġnewcomer":21971,"ĠEdwin":21972,"raising":21973,"quist":21974,"Ġdiscoveries":21975,"Steve":21976,"Ġscramble":21977,"js":21978,"Ġacoustic":21979,"Ġdeterioration":21980,"Ġobserving":21981,"ĠWinning":21982,"ĠSaban":21983,"idy":21984,"Ġoverd":21985,"Ġscouting":21986,"Ġpunitive":21987,"ĠShelter":21988,"Ġmocked":21989,"Ġdreamed":21990,"Ġinvaluable":21991,"LP":21992,"standard":21993,"Ġrecounted":21994,"ĠSabres":21995,"points":21996,"Ġfringe":21997,"ĠBarker":21998,"alian":21999,"ĠPROV":22000,"Ġcartel":22001,"Ġovercrowd":22002,"tain":22003,"Year":22004,"ĠWelfare":22005,"ĠChr":22006,"Ġintroduces":22007,"ĠDoing":22008,"ĠGlover":22009,"Ġdeteriorating":22010,"Par":22011,"Ġattendant":22012,"ĠMold":22013,"ĠFlying":22014,"ovan":22015,"Ġoptimize":22016,"Ġchapters":22017,"Ġdull":22018,"gay":22019,"ĠATP":22020,"ĠKah":22021,"ainer":22022,"feet":22023,"Ġjoking":22024,"Ġdisadvantage":22025,"Rep":22026,"Ġtwisted":22027,"Ġslain":22028,"Ġcomprise":22029,"Ġrestricting":22030,"Ġdispos":22031,"Ġshaky":22032,"Ġembattled":22033,"owe":22034,"conscious":22035,"oken":22036,"Ġmistaken":22037,"ĠDra":22038,"Ġreservoir":22039,"Ġspate":22040,"Scott":22041,"avor":22042,"Ġqual":22043,"amel":22044,"hunt":22045,"ĠChevy":22046,"Ġclaw":22047,"Ġwitch":22048,"ĠZimmerman":22049,"arium":22050,"Ġrubbish":22051,"Ġstrings":22052,"Ġdoc":22053,"Ġplaque":22054,"ĠCyr":22055,"Ġflourish":22056,"Ġworthwhile":22057,"Ġbanners":22058,"ĠLemon":22059,"ĠRainbow":22060,"Ġconsisted":22061,"ĠHOW":22062,"Ñ":22063,"Ġblogs":22064,"CLUS":22065,"eely":22066,"Ġbeast":22067,"ĠMai":22068,"Ġhostility":22069,"eros":22070,"Ġforeseeable":22071,"ĠCorker":22072,"ĠWEEK":22073,"visors":22074,"ressive":22075,"ĠViktor":22076,"Ġbureaucracy":22077,"Ġ256":22078,"ĠFeel":22079,"ĠAdventure":22080,"Ġefficacy":22081,"ĠInstitution":22082,"ĠHarbaugh":22083,"ĠPractice":22084,"ĠChristianity":22085,"Thanks":22086,"Ġfridge":22087,"idel":22088,"Ġeff":22089,"Ġvein":22090,"terms":22091,"Ġignorance":22092,"Ġscream":22093,"Ġwit":22094,"ĠRousse":22095,"ĠWillow":22096,"Ġhallway":22097,"former":22098,"Ġshooters":22099,"ĠReporting":22100,"Ġgal":22101,"Ġsavvy":22102,"rand":22103,"Ġremed":22104,"ĠBaron":22105,"inar":22106,"Ġseizures":22107,"ĠThorn":22108,"ĠProtesters":22109,"ĠRevolutionary":22110,"think":22111,"ĠCabrera":22112,"Four":22113,"ĠRudd":22114,"Ġprost":22115,"ĠBottom":22116,"Port":22117,"nas":22118,"ifax":22119,"Wire":22120,"Ġtokens":22121,"antis":22122,"ĠSOU":22123,"ĠMilk":22124,"asters":22125,"Ġshrimp":22126,"Ġcakes":22127,"blue":22128,"ifty":22129,"View":22130,"adium":22131,"fen":22132,"zyk":22133,"ĠEmil":22134,"Ġdismay":22135,"Ġtilt":22136,"aska":22137,"Young":22138,"Ġpredators":22139,"Ġovershadowed":22140,"mitt":22141,"ĠSemin":22142,"ĠSchiff":22143,"ĠClarkson":22144,"212":22145,"210":22146,"Ġvanished":22147,"Ġmesh":22148,"ĠBurnett":22149,"ĠMent":22150,"ĠBlind":22151,"ĠPatriot":22152,"ĠVil":22153,"Ġflick":22154,"ĠTowns":22155,"ĠWhites":22156,"Ġspice":22157,"ĠMode":22158,"Ġnominate":22159,"Ġwrest":22160,"ĠAshes":22161,"Ġrows":22162,"ĠClint":22163,"Ġgentleman":22164,"utan":22165,"athlon":22166,"ĠIntermediate":22167,"hews":22168,"Ġoffended":22169,"ĠPaige":22170,"ĠFinch":22171,"ĠAboriginal":22172,"positive":22173,"Stop":22174,"Ġrenting":22175,"Ġ[âĢ¦]":22176,"ĠHert":22177,"Ġvegetation":22178,"apes":22179,"ĠCanon":22180,"appa":22181,"Ġabst":22182,"ĠKatz":22183,"Ġsurfing":22184,"aghan":22185,"ĠPresidency":22186,"Ġscaling":22187,"ĠSas":22188,"Ġpeanut":22189,"Ġrecommending":22190,"cious":22191,"endez":22192,"eker":22193,"ĠKamp":22194,"Ġsitcom":22195,"Ġcrust":22196,"women":22197,"ĠJes":22198,"ĠWhe":22199,"ĠWarwick":22200,"Ġepit":22201,"ĠAlc":22202,"Ġdictate":22203,"ĠSPORTS":22204,"ĠLanguage":22205,"Ġindicative":22206,"ĠMacDonald":22207,"Ġreorgan":22208,"Ġ`":22209,"ARS":22210,"Ġliberation":22211,"Ġbless":22212,"Ġreflective":22213,"Ġà¤":22214,"Ġdesires":22215,"ĠHank":22216,"ĠLaunch":22217,"Ġrotating":22218,"ĠStones":22219,"Ġcoordinating":22220,"ĠZeit":22221,"Ġskepticism":22222,"ĠAlam":22223,"ĠTrout":22224,"ĠSMS":22225,"ĠCrescent":22226,"ĠTeacher":22227,"Ġfury":22228,"Ġeyebrows":22229,"onga":22230,"ĠPilot":22231,"ĠRutherford":22232,"Ġinterstate":22233,"established":22234,"Ġbaggage":22235,"Ġ131":22236,"riks":22237,"mil":22238,"Ġneon":22239,"Ġqueer":22240,"ourced":22241,"ĠKash":22242,"ĠEleven":22243,"illes":22244,"ĠOpportun":22245,"Ġstre":22246,"Washington":22247,"ĠDifferent":22248,"Ġexempl":22249,"Ġboarded":22250,"Ġrogue":22251,"ĠDNC":22252,"rone":22253,"Ġreversing":22254,"nine":22255,"ĠIvory":22256,"itating":22257,"uve":22258,"Ġfracture":22259,"255":22260,"ĠAssessment":22261,"Ġsubjective":22262,"Ġfluct":22263,"ĠJaguar":22264,"Ġstride":22265,"Ġreapp":22266,"ĠGrow":22267,"against":22268,"ĠMedina":22269,"scenes":22270,"ĠNieto":22271,"Ġsou":22272,"ĠFleming":22273,"Ġnarcotics":22274,"ĠBere":22275,"ĠBub":22276,"ĠAck":22277,"Ġvinyl":22278,"ĠCopy":22279,"ĠGarland":22280,"ĠDuty":22281,"Ġinn":22282,"Ġmerchant":22283,"Ġactivate":22284,"Ġglowing":22285,"ettle":22286,"ĠBran":22287,"Ġsilk":22288,"anco":22289,"TL":22290,"ĠFurn":22291,"Ġwithheld":22292,"Ġpulse":22293,"ĠGU":22294,"BUS":22295,"ĠHyper":22296,"Ġpicnic":22297,"Ġpositives":22298,"ĠParamount":22299,"Ġ737":22300,"Ġenlisted":22301,"ĠValerie":22302,"false":22303,"ĠChocolate":22304,"ĠSTAR":22305,"Ġdescended":22306,"Ġtasty":22307,"ĠDaesh":22308,"ĠNed":22309,"Ġcomplimentary":22310,"Ġdepicting":22311,"ĠHavana":22312,"college":22313,"Ġtraces":22314,"Ġundue":22315,"ĠSisters":22316,"aum":22317,"ĠCourier":22318,"ĠOng":22319,"ĠSparks":22320,"ongs":22321,"ĠYong":22322,"URR":22323,"los":22324,"Ġhorsepower":22325,"confidence":22326,"ĠPett":22327,"ĠMeasure":22328,"Ġmarches":22329,"zig":22330,"ĠTOR":22331,"Ġexported":22332,"ĠRak":22333,"ĠInvestigations":22334,"Ġterminate":22335,"ĠTian":22336,"Ġmasters":22337,"ĠDS":22338,"Ġoutraged":22339,"ĠCups":22340,"ĠWeir":22341,"exec":22342,"Ġjourneys":22343,"Ġabide":22344,"Ġavail":22345,"ĠStreets":22346,"Ġfixes":22347,"Ġcocoa":22348,"Ġabundant":22349,"Ġhubs":22350,"mort":22351,"Ġrobberies":22352,"ĠBark":22353,"Ġprecautions":22354,"Ġhammered":22355,"ometric":22356,"mith":22357,"ĠMcCann":22358,"ĠJaw":22359,"ĠQuest":22360,"ĠMcF":22361,"Ġlob":22362,"Ġlegalized":22363,"Ġquirky":22364,"Ġtrailers":22365,"ĠIndividual":22366,"Ġcumulative":22367,"Ġenlarge":22368,"Ġconvoy":22369,"olen":22370,"got":22371,"landers":22372,"Ġscanner":22373,"Ġscans":22374,"ĠEg":22375,"prof":22376,"Ġhosp":22377,"ĠColo":22378,"Ġerr":22379,"Ġdeval":22380,"ĠUsually":22381,"Ġbul":22382,"ummy":22383,"Ġtandem":22384,"occupied":22385,"Ġmandates":22386,"ĠSwim":22387,"121":22388,"ussed":22389,"EF":22390,"Ġfries":22391,"Until":22392,"rc":22393,"Ġbadge":22394,"Ġstrips":22395,"Ġmagnet":22396,"Ġarchive":22397,"stan":22398,"ĠDeadline":22399,"Ġdisposable":22400,"Ġbob":22401,"Ġnorthwestern":22402,"Jul":22403,"ĠSAL":22404,"Ġinfluencing":22405,"Ġdevil":22406,"ĠEllie":22407,"cms":22408,"ingo":22409,"888":22410,"Ġcosmetic":22411,"Also":22412,"Ġyacht":22413,"Ġlazy":22414,"Ġmerc":22415,"Ġabsorbed":22416,"harm":22417,"116":22418,"Ġsubpoena":22419,"Ġcounters":22420,"ĠLori":22421,"Ġrandomly":22422,"nea":22423,"waves":22424,"Ġrelie":22425,"ĠKiss":22426,"Ġchassis":22427,"Ġbakery":22428,"Images":22429,"ĠHolden":22430,"Ġamazed":22431,"Ġalignment":22432,"ĠPowers":22433,"Ġlabelled":22434,"Ġstaunch":22435,"Ġsignaling":22436,"Ġsenate":22437,"Ġunconventional":22438,"ĠAlternative":22439,"Ġambassadors":22440,"ĠVPN":22441,"atics":22442,"Ġmosquito":22443,"ĠScholarship":22444,"Ġhelpless":22445,"alone":22446,"ZA":22447,"chel":22448,"Ġconstituencies":22449,"ĠCafé":22450,"Ġhatch":22451,"ĠRupert":22452,"Ġrendering":22453,"Ġreinstated":22454,"Ġinterval":22455,"Texas":22456,"ĠAHL":22457,"February":22458,"review":22459,"Ġgle":22460,"Ġfals":22461,"Ġmarkers":22462,"Ġgovernmental":22463,"ĠPos":22464,"Ġarose":22465,"every":22466,"Ġrulings":22467,"obar":22468,"Govern":22469,"gren":22470,"isan":22471,"Ġmarketed":22472,"Click":22473,"Ġord":22474,"Ġballoons":22475,"asers":22476,"ĠHorton":22477,"pub":22478,"ĠAerospace":22479,"Ġflank":22480,"Ġmolecular":22481,"bour":22482,"nuts":22483,"Ġalliances":22484,"Ġbenchmarks":22485,"ocate":22486,"stadt":22487,"ĠGoodwin":22488,"lap":22489,"ĠFactors":22490,"Never":22491,"ĠNem":22492,"Ġroadside":22493,"orth":22494,"Ġexhibited":22495,"ĠPearce":22496,"ĠOlsen":22497,"Ġpostal":22498,"ĠLiberation":22499,"reen":22500,"mary":22501,"Ġropes":22502,"Ġlarg":22503,"Ġgob":22504,"boys":22505,"ĠSax":22506,"Ġreimbursement":22507,"ĠVie":22508,"ĠCatholics":22509,"ĠMartial":22510,"Ġpremiered":22511,"Ġawaits":22512,"ĠUnderstanding":22513,"ĠBelarus":22514,"ĠVor":22515,"ogi":22516,"iaz":22517,"Ġvictorious":22518,"Ġancestors":22519,"Ġwreckage":22520,"Ġoppression":22521,"ĠChildhood":22522,"Ġwidth":22523,"ĠPlymouth":22524,"ĠFifty":22525,"Ġoccupancy":22526,"etts":22527,"ĠFiscal":22528,"lifting":22529,"ĠTraditional":22530,"Ġnostalgia":22531,"Law":22532,"Ġlays":22533,"Ġarresting":22534,"Ġanticipating":22535,"Ġinsults":22536,"ĠExtension":22537,"Ġgenerator":22538,"ummer":22539,"Ġageing":22540,"Ġbouncing":22541,"ember":22542,"ĠWAR":22543,"ĠNico":22544,"ĠWow":22545,"ĠRaven":22546,"flower":22547,"ĠCrim":22548,"bh":22549,"Ġundo":22550,"Ġburgers":22551,"roud":22552,"ĠAtkinson":22553,"ĠYEAR":22554,"Ġpoorer":22555,"ICA":22556,"ĠSchedule":22557,"Ġstronghold":22558,"ĠMillennium":22559,"Ġ###":22560,"ilda":22561,"ĠGH":22562,"Ġupscale":22563,"aldi":22564,"ĠResolution":22565,"Ġswelling":22566,"Ġgrieving":22567,"ĠNile":22568,"ĠTig":22569,"ERY":22570,"ooth":22571,"BALL":22572,"Ġballet":22573,"Ġbucks":22574,"ĠUV":22575,"akin":22576,"Ġchilling":22577,"Ġdatabases":22578,"ĠGD":22579,"section":22580,"Ġhires":22581,"Ġmul":22582,"Ġsen":22583,"ĠTownsend":22584,"Ġinspected":22585,"ilic":22586,"Ġdiscriminatory":22587,"fol":22588,"Ġalcoholic":22589,"ĠHoff":22590,"Carl":22591,"Ġvicinity":22592,"lein":22593,"ĠEco":22594,"ĠGovern":22595,"Ġsecrecy":22596,"aned":22597,"ĠDUP":22598,"Ġ570":22599,"Ġsow":22600,"Ġstalls":22601,"Ġinsulting":22602,"ĠDT":22603,"Ġinforms":22604,"fitting":22605,"ĠDepending":22606,"ĠMelanie":22607,"ĠThom":22608,"path":22609,"Ġadmired":22610,"Peter":22611,"idents":22612,"ielding":22613,"ĠShanahan":22614,"TD":22615,"Things":22616,"sn":22617,"Ġconstituted":22618,"Ġ137":22619,"Ġderailed":22620,"ĠBonnie":22621,"Ġgraffiti":22622,"Ġearnest":22623,"Ġcompliant":22624,"blown":22625,"Ġalle":22626,"prise":22627,"Ġfocal":22628,"Ġgentlemen":22629,"ĠTalks":22630,"Ġpassports":22631,"Ġdeprived":22632,"Ġdude":22633,"ĠNath":22634,"Ġgoverned":22635,"Ġsac":22636,"Ġcastle":22637,"qv":22638,"Ġtolerated":22639,"ĠSci":22640,"close":22641,"ĠDynamics":22642,"Ġflashing":22643,"yk":22644,"ĠConsolid":22645,"Ġinherently":22646,"ĠForrest":22647,"Gene":22648,"Public":22649,"Ġloser":22650,"runners":22651,"Ġprudent":22652,"Ġpioneering":22653,"ĠHowe":22654,"ĠButter":22655,"ĠArabian":22656,"acha":22657,"ĠBBQ":22658,"ĠMineral":22659,"Ġdestiny":22660,"Ġretrieve":22661,"ĠBav":22662,"reth":22663,"oby":22664,"ĠGrid":22665,"Ġgrievances":22666,"ĠTips":22667,"Ġadamant":22668,"Ġdiets":22669,"Ġmilestones":22670,"Ġcollects":22671,"ĠLaboratories":22672,"ĠWC":22673,"Ġpostp":22674,"Ġdams":22675,"ĠOEM":22676,"Ġrumor":22677,"Ġlocking":22678,"Ġemission":22679,"Ġqueries":22680,"Jones":22681,"Ġlang":22682,"ĠAcqu":22683,"ĠMedium":22684,"ĠTreasurer":22685,"Sept":22686,"FB":22687,"Ġintegrating":22688,"Ġbolstered":22689,"Ġincorporating":22690,"encers":22691,"Ġirregularities":22692,"Ġnom":22693,"iod":22694,"ĠAi":22695,"Ġsor":22696,"anked":22697,"Ġrehears":22698,"fig":22699,"ĠBug":22700,"hoff":22701,"Ġtrooper":22702,"Ġgalaxy":22703,"amon":22704,"ĠAtlas":22705,"Ġsolicit":22706,"Ġsings":22707,"ĠInstructions":22708,"ĠMig":22709,"thinking":22710,"ĠCostco":22711,"Ġbreasts":22712,"Ġportraits":22713,"ĠCock":22714,"Ġsubscriptions":22715,"Ġpine":22716,"Ġhaunted":22717,"ĠMED":22718,"eer":22719,"ega":22720,"ĠZa":22721,"ENN":22722,"ĠWinners":22723,"aith":22724,"safe":22725,"Ġ143":22726,"ĠWeston":22727,"ĠLansing":22728,"ĠLaurel":22729,"ocrat":22730,"ograph":22731,"Ġmatchups":22732,"ĠFriend":22733,"Ġdigest":22734,"Ġdimensions":22735,"azing":22736,"Ġtipping":22737,"Ġenrich":22738,"gart":22739,"argo":22740,"Ġoutbreaks":22741,"Ġsalvage":22742,"ĠErica":22743,"Ġmodules":22744,"ĠPDF":22745,"ĠGoods":22746,"oots":22747,"2011":22748,"Ġinterrupt":22749,"Ġradi":22750,"ĠSimone":22751,"vell":22752,"ĠSV":22753,"extremely":22754,"Ġstadiums":22755,"ĠRox":22756,"Ġconflicting":22757,"Ġyouthful":22758,"ĠUM":22759,"series":22760,"Ġded":22761,"Ġfielding":22762,"Pre":22763,"itled":22764,"Ġstreamed":22765,"Ġapprentices":22766,"ĠAlec":22767,"ĠGap":22768,"ĠPrem":22769,"Ġleased":22770,"Ġdeepening":22771,"Ġbounds":22772,"Ġrethink":22773,"ĠVoting":22774,"ĠScha":22775,"blood":22776,"ĠReeves":22777,"Ġbells":22778,"Ġcollector":22779,"ĠCrimson":22780,"ĠWheat":22781,"207":22782,"ĠHB":22783,"ĠBCC":22784,"Ġsync":22785,"ĠAnders":22786,"Ġthanking":22787,"Ġlayoffs":22788,"Ġfoolish":22789,"Ġcustod":22790,"Ġelephants":22791,"Ġcorrelation":22792,"ĠHarding":22793,"ĠGPU":22794,"ĠBarnett":22795,"Ġol":22796,"Ġalarms":22797,"Ġfluctuations":22798,"shop":22799,"Ġcommentators":22800,"ĠAlpine":22801,"Ġmur":22802,"Ġbiotech":22803,"Ġunlocked":22804,"ouri":22805,"roe":22806,"ĠPayment":22807,"ĠPOL":22808,"ĠGuest":22809,"Ġphrases":22810,"ĠBuilt":22811,"erves":22812,"Ġnutritional":22813,"205":22814,"ourage":22815,"Related":22816,"Come":22817,"ĠSAT":22818,"Ġgatherings":22819,"Ġsquads":22820,"Ġorganising":22821,"Ġministerial":22822,"Ġkilomet":22823,"ĠJump":22824,"ĠStrength":22825,"ĠFerr":22826,"Ġillustrated":22827,"ĠOber":22828,"Ġextrad":22829,"Ġlimitation":22830,"idis":22831,"ĠMonths":22832,"ifts":22833,"Ġmotives":22834,"Ġmaternal":22835,"Ġbait":22836,"Ġadversity":22837,"Twitter":22838,"ĠUni":22839,"Ġgrappling":22840,"Ġbowls":22841,"ĠHib":22842,"ĠCopenhagen":22843,"Ġsergeant":22844,"Ġintro":22845,"Ġscrambled":22846,"ĠExc":22847,"Ġshowcases":22848,"Ġplotting":22849,"Ġsym":22850,"ĠNah":22851,"berries":22852,"itching":22853,"conn":22854,"istle":22855,"ĠBeginning":22856,"asley":22857,"ĠMeadow":22858,"ĠCra":22859,"Ġsupremacist":22860,"Ġsweats":22861,"production":22862,"innon":22863,"ovo":22864,"Ġscept":22865,"Ġdrowning":22866,"ĠEh":22867,"Ġdecorations":22868,"Ġsympathetic":22869,"raction":22870,"Ġ195":22871,"ripp":22872,"ĠNotice":22873,"charging":22874,"ĠDIY":22875,"ĠJin":22876,"Ġskinny":22877,"Ġmaj":22878,"Ġwhisk":22879,"Ġcongreg":22880,"RAL":22881,"Ġvolley":22882,"Ġestablishments":22883,"Ġcite":22884,"Miss":22885,"Int":22886,"iola":22887,"ĠBare":22888,"KING":22889,"ools":22890,"private":22891,"Ġflaw":22892,"Ġwires":22893,"Ġideals":22894,"oub":22895,"Ġ\"'":22896,"ĠCompet":22897,"ĠStatements":22898,"ĠHDR":22899,"rm":22900,"Ġbegging":22901,"uffs":22902,"Ġdispatch":22903,"Ġskipped":22904,"Ġlabs":22905,"hawks":22906,"Ġexpl":22907,"Ġpatriotic":22908,"ussions":22909,"Ġportrayal":22910,"ĠBudapest":22911,"ĠCod":22912,"Ġextingu":22913,"smart":22914,"Ġburdens":22915,"ĠDrama":22916,"Ġaltitude":22917,"Ġpursuant":22918,"à¥":22919,"atari":22920,"cot":22921,"Ġhotline":22922,"ooters":22923,"ĠRolls":22924,"Ġjeopardy":22925,"oids":22926,"Ġpageant":22927,"149":22928,"Ġdistinguish":22929,"support":22930,"ĠHighlands":22931,"ĠErnst":22932,"ĠHole":22933,"pering":22934,"ĠHasan":22935,"Ġrece":22936,"Ġirregular":22937,"Ġdisturbed":22938,"Ġcoupon":22939,"ĠElijah":22940,"oise":22941,"Ġfriendships":22942,"girlfriend":22943,"Ġrampage":22944,"arers":22945,"Ġdispens":22946,"assion":22947,"Ġtentative":22948,"ĠExploration":22949,"fashioned":22950,"ĠInstit":22951,"Ġthemed":22952,"ĠKurdistan":22953,"ĠCAL":22954,"ĠSweeney":22955,"Ġransom":22956,"Ġstamps":22957,"ĠSchwe":22958,"ĠLucia":22959,"124":22960,"omore":22961,"Ġmotivate":22962,"ĠWorcester":22963,"wald":22964,"CAR":22965,"iken":22966,"andro":22967,"ffic":22968,"ĠRehab":22969,"Ġgrou":22970,"Ġcontrollers":22971,"ĠHai":22972,"nz":22973,"Ġartillery":22974,"ĠMish":22975,"Ġregistry":22976,"Ġfrontman":22977,"ĠCharg":22978,"orneys":22979,"ĠPRESS":22980,"Ġperceptions":22981,"ĠMcGee":22982,"AU":22983,"mg":22984,"Off":22985,"ĠNGOs":22986,"chemical":22987,"Ġbrun":22988,"ĠHav":22989,"Ġlace":22990,"Ġ202":22991,"Ġdefer":22992,"Ġinjected":22993,"Ġgluten":22994,"ĠRin":22995,"ĠAvalanche":22996,"Ġcorpor":22997,"ĠPamela":22998,"Ġfills":22999,"ĠReve":23000,"ĠMonument":23001,"Ġnationalists":23002,"ĠIQ":23003,"adden":23004,"ĠLoop":23005,"Ġ134":23006,"Reg":23007,"click":23008,"bush":23009,"ĠKub":23010,"ipes":23011,"Ġtoggle":23012,"ĠRae":23013,"Ġburgl":23014,"Ġholistic":23015,"ronics":23016,"Ġprominence":23017,"jack":23018,"Ġfinan":23019,"icates":23020,"Ġvel":23021,"important":23022,"Thursday":23023,"chet":23024,"Ġrefunds":23025,"ĠElder":23026,"ĠOwner":23027,"Ġtakeaway":23028,"Pe":23029,"ĠToro":23030,"Tim":23031,"fix":23032,"before":23033,"ĠMotorola":23034,"Ġlev":23035,"Term":23036,"ĠSne":23037,"Ġmisinformation":23038,"ĠSinai":23039,"Ġnitrogen":23040,"Ġ203":23041,"Ġescaping":23042,"Ġjunction":23043,"ĠSantana":23044,"ĠYemeni":23045,"Ġwhipped":23046,"ĠStephenson":23047,"Ġattire":23048,"ĠBard":23049,"atically":23050,"ĠFaul":23051,"ĠSym":23052,"resh":23053,"ĠMG":23054,"Sub":23055,"ĠCarmen":23056,"Ġig":23057,"ĠSanford":23058,"ĠYa":23059,"cycle":23060,"Ġencryption":23061,"ĠScal":23062,"ĠChest":23063,"ĠMadonna":23064,"agin":23065,"ĠDHS":23066,"ĠCed":23067,"YR":23068,"Ġtruce":23069,"ĠBike":23070,"Ġfoes":23071,"ĠSlovakia":23072,"adal":23073,"Rain":23074,"OPE":23075,"Ġlockdown":23076,"Ġunilateral":23077,"Ġoverseen":23078,"Ġblames":23079,"Ġbarrage":23080,"aan":23081,"uds":23082,"ĠRust":23083,"ĠHC":23084,"cox":23085,"ĠAllied":23086,"ĠJosé":23087,"pected":23088,"Ġunp":23089,"Ġsomeday":23090,"Ġdeductions":23091,"icial":23092,"ĠPRO":23093,"ĠIntern":23094,"Ġhemp":23095,"Ġkilograms":23096,"Ġnets":23097,"ĠBACK":23098,"early":23099,"outed":23100,"Ġrelegated":23101,"Ġ1958":23102,"ĠMustang":23103,"Ġgamble":23104,"Ġprostitution":23105,"ĠPapa":23106,"Ġinexpensive":23107,"GHz":23108,"Ġjerseys":23109,"Ġmisery":23110,"VIS":23111,"ĠRAW":23112,"Ġthri":23113,"Ġaffiliation":23114,"small":23115,"Ġflashed":23116,"Ġcoastline":23117,"Ġgard":23118,"Ġsv":23119,"Ġwaits":23120,"itton":23121,"London":23122,"Ġaccus":23123,"ĠCharge":23124,"Ġincub":23125,"Ġwanna":23126,"ĠAwareness":23127,"abies":23128,"ĠUh":23129,"Ġpersuaded":23130,"ĠThames":23131,"Ġcurated":23132,"Ī":23133,"Ġbrutally":23134,"Ġrooftop":23135,"Ġoy":23136,"Ġ1900":23137,"bery":23138,"Ġuphill":23139,"Ġinteracting":23140,"Ġchilly":23141,"ERE":23142,"Ġcapsule":23143,"ĠSaul":23144,"ocker":23145,"Ġdeserving":23146,"ĠBowen":23147,"ĠReaders":23148,"ĠWriters":23149,"Ġartifacts":23150,"ĠRanger":23151,"reau":23152,"Ġimperson":23153,"Ġhears":23154,"ĠMaher":23155,"neg":23156,"Ġmantra":23157,"Ġmull":23158,"Ġelders":23159,"ĠAmtrak":23160,"Ġspouses":23161,"ĠHak":23162,"Ġopenness":23163,"Ġprevailed":23164,"Ġfortnight":23165,"Pal":23166,"ride":23167,"Ġillustrate":23168,"dominated":23169,"trust":23170,"ī":23171,"ĠFemale":23172,"ĠSlim":23173,"Ġdesc":23174,"ĠKathryn":23175,"Ġdeepen":23176,"TAIN":23177,"eredith":23178,"Ġchanted":23179,"ĠHector":23180,"bread":23181,"ĠIsa":23182,"Ġvolcanic":23183,"Ġah":23184,"owners":23185,"aquin":23186,"Ġmelting":23187,"Ġpreschool":23188,"ocus":23189,"ĠMast":23190,"ĠMyr":23191,"Ġsuppress":23192,"Ġversatility":23193,"ĠNEC":23194,"Ġhoax":23195,"Ġmutually":23196,"ĠNeb":23197,"ĠWheel":23198,"kit":23199,"abl":23200,"again":23201,"ĠSonny":23202,"rift":23203,"Ġsweater":23204,"Ġinund":23205,"ĠTaco":23206,"ĠBout":23207,"Ġnonprofits":23208,"Ġmodify":23209,"Ġprofessionalism":23210,"ĠGould":23211,"ĠGuerrero":23212,"Ġterribly":23213,"ĠBenz":23214,"Ġcountered":23215,"Ġbean":23216,"ĠPhelps":23217,"Ġprowess":23218,"bc":23219,"Ġfeast":23220,"Ġ5000":23221,"Ġrevisit":23222,"Ġchin":23223,"agent":23224,"Ġtones":23225,"Ġextraction":23226,"ĠPosts":23227,"oin":23228,"Ġattain":23229,"Ġgardening":23230,"earned":23231,"ĠOtto":23232,"player":23233,"Ġscams":23234,"ĠHonolulu":23235,"ĠAppro":23236,"ĠHIGH":23237,"Ġdwell":23238,"Islam":23239,"leaders":23240,"Ġlegisl":23241,"expl":23242,"ĠChoi":23243,"Ġfrenzy":23244,"Ġcommercially":23245,"Ġlbs":23246,"Ġgateway":23247,"ĠAndersen":23248,"emia":23249,"lez":23250,"Ġresidences":23251,"office":23252,"ĠHelsinki":23253,"olia":23254,"Ġwolf":23255,"Ġstyling":23256,"ĠJunction":23257,"ĠPeyton":23258,"udo":23259,"ĠDorothy":23260,"Ġfreshly":23261,"ĠJulio":23262,"ĠSunset":23263,"ĠMadden":23264,"Ġissu":23265,"Ġsounding":23266,"sports":23267,"Ġmassively":23268,"ĠRahman":23269,"Ġpresided":23270,"Instead":23271,"Ġ136":23272,"ĠHowell":23273,"beit":23274,"Ġprosperous":23275,"Ġwrongly":23276,"ĠRaqqa":23277,"ĠCes":23278,"Ġbuddy":23279,"Ġchatting":23280,"Ġfencing":23281,"Ġtant":23282,"ocated":23283,"ALK":23284,"Ġsnapping":23285,"euro":23286,"Ryan":23287,"ĠRecogn":23288,"ucked":23289,"Ġpurported":23290,"ĠCann":23291,"Ġintimidating":23292,"Ġrulers":23293,"ĠMarse":23294,"Art":23295,"ĠAadhaar":23296,"Ġvows":23297,"Ġhunter":23298,"ourmet":23299,"ĠVarious":23300,"2009":23301,"anie":23302,"Ġcompassionate":23303,"ĠParking":23304,"Ġmalaria":23305,"Ġamnesty":23306,"Ġworsened":23307,"ĠTitan":23308,"Ġcrossings":23309,"drug":23310,"Ġaddicted":23311,"Ġremorse":23312,"ĠDestiny":23313,"Dear":23314,"Ġhur":23315,"Ġimplicated":23316,"Ġplayful":23317,"Ġripe":23318,"Ġsizable":23319,"Ġcrab":23320,"Ġliqu":23321,"Ġdrib":23322,"Ġcontraction":23323,"cro":23324,"ĠGus":23325,"Ġdoomed":23326,"Ġmog":23327,"ĠMonitor":23328,"Count":23329,"Ġsadd":23330,"Ġwrestler":23331,"Ġrestraints":23332,"Ġraging":23333,"185":23334,"Ġtapes":23335,"Ġmitigation":23336,"ocratic":23337,"Ġvib":23338,"ĠSnowden":23339,"aldo":23340,"Ġweights":23341,"Ġ1959":23342,"ucc":23343,"ĠCoc":23344,"Log":23345,"ĠStev":23346,"Ġdealership":23347,"Ġtrademarks":23348,"iru":23349,"Ġbeneficiary":23350,"Ġlegislator":23351,"Ġdeadlines":23352,"Ġcosmetics":23353,"ĠTammy":23354,"ĠCombined":23355,"Ġeducator":23356,"athon":23357,"Ġcombo":23358,"fu":23359,"appropriate":23360,"nington":23361,"ĠLiberties":23362,"missions":23363,"opard":23364,"ĠMondays":23365,"Ġfetch":23366,"Ġhers":23367,"jon":23368,"ukes":23369,"zek":23370,"Ġvetting":23371,"yet":23372,"Ġfacilitating":23373,"ĠStras":23374,"character":23375,"ĠHeads":23376,"Ġclim":23377,"ĠAlbuquerque":23378,"Ġbind":23379,"Ġconcluding":23380,"ĠBasically":23381,"rail":23382,"ĠTCU":23383,"ĠDepression":23384,"Ġhem":23385,"ĠHue":23386,"Ġpand":23387,"Ġscoreboard":23388,"Av":23389,"Ġidol":23390,"compl":23391,"Ġredesign":23392,"ĠJarrett":23393,"Ġfavoured":23394,"ĠINS":23395,"Ġpropelled":23396,"Ġevasion":23397,"Ġwidened":23398,"Ġwastewater":23399,"nard":23400,"responsive":23401,"Ġdemographics":23402,"engine":23403,"ĠBrewer":23404,"ĠBaxter":23405,"ront":23406,"ĠColon":23407,"Ġpromoter":23408,"Ġgenres":23409,"ovsky":23410,"build":23411,"urate":23412,"ĠCohn":23413,"design":23414,"Ġturbulent":23415,"Ġcurtain":23416,"310":23417,"ĠLamp":23418,"ĠBonds":23419,"church":23420,"Ġdeterrent":23421,"Ġdictatorship":23422,"acement":23423,"haul":23424,"Ġspir":23425,"Ġconceived":23426,"Ġstern":23427,"sit":23428,"Ġsingular":23429,"ĠYog":23430,"Ġconditional":23431,"Ġide":23432,"lund":23433,"Ġautop":23434,"ĠBEST":23435,"ĠJed":23436,"Ġrationale":23437,"Ġalarmed":23438,"Ġshovel":23439,"ĠProb":23440,"ĠMao":23441,"ĠBurgess":23442,"Ġ1953":23443,"above":23444,"ĠManson":23445,"Ġdismal":23446,"ĠFrankie":23447,"Ġtempted":23448,"Ġunderdog":23449,"ribing":23450,"ENCY":23451,"ĠDele":23452,"Las":23453,"places":23454,"Ġnotoriously":23455,"ĠAkin":23456,"Ġglut":23457,"Ġseamlessly":23458,"Ġrecess":23459,"written":23460,"ĠTJ":23461,"occ":23462,"ĠTerritory":23463,"ĠAIR":23464,"ĠDiagn":23465,"Ġvacancies":23466,"Ġcultivation":23467,"ĠAless":23468,"Ġrenamed":23469,"ĠMahmoud":23470,"bright":23471,"Ġvisibly":23472,"Ġnas":23473,"erred":23474,"ĠCarn":23475,"Ġtriggers":23476,"Ġpunishing":23477,"Ġluc":23478,"ĠBett":23479,"Ġbeam":23480,"ĠCheng":23481,"aina":23482,"Ġdetermines":23483,"ĠGerry":23484,"Ġshocks":23485,"Ġstainless":23486,"Ġdefects":23487,"ĠCinem":23488,"Ġtorrent":23489,"Ġresurgence":23490,"Ġcoral":23491,"Ġblitz":23492,"ĠGel":23493,"Ġstemmed":23494,"gur":23495,"Ġlymph":23496,"zzo":23497,"Ġspearheaded":23498,"Ġlicences":23499,"';":23500,"Ġarbitrary":23501,"ĠUzbek":23502,"Ġthief":23503,"reaching":23504,"Ġcand":23505,"ĠEA":23506,"ĠParaly":23507,"ĠEmerson":23508,"ĠSergey":23509,"ĠScher":23510,"ĠWr":23511,"rowing":23512,"Ġ3000":23513,"Ġmighty":23514,"elight":23515,"mAh":23516,"Ġcelebr":23517,"ĠConclusion":23518,"ĠCathy":23519,"Ġpolished":23520,"uddled":23521,"ewski":23522,"Ġfucking":23523,"Ġinterfering":23524,"Ġlandscapes":23525,"Ġfearful":23526,"ĠDetention":23527,"%).":23528,"ĠTT":23529,"Ġbleak":23530,"Ġindebted":23531,"Ġcheat":23532,"Ġconsolation":23533,"ĠPace":23534,"raine":23535,"Ġhonorary":23536,"420":23537,"Ġtechnician":23538,"ĠComprehensive":23539,"Ġfences":23540,"Ġwearable":23541,"ĠMarilyn":23542,"stru":23543,"Ġdrained":23544,"ĠGibraltar":23545,"lag":23546,"Ġdisorderly":23547,"Ġproclaimed":23548,"Ġcapacities":23549,"Ġretains":23550,"ĠVid":23551,"oshi":23552,"ĠEid":23553,"Ġanalytical":23554,"ominium":23555,"ĠExaminer":23556,"ĠNAACP":23557,"ocol":23558,"rev":23559,"ĠRim":23560,"ĠWoody":23561,"ĠMcKenna":23562,"ĠLennon":23563,"ĠEmploy":23564,"Fort":23565,"psy":23566,"Ġsphere":23567,"oday":23568,"ĠChick":23569,"ĠCompared":23570,"ĠIranians":23571,"ĠAccountability":23572,"itchie":23573,"ĠDickinson":23574,"Ġflock":23575,"Ġeclips":23576,"Ġnat":23577,"anke":23578,"ĠNeighborhood":23579,"Ġ141":23580,"Ġscarce":23581,"Ġcreations":23582,"lists":23583,"Ġuseless":23584,"Ġcriticisms":23585,"Ġruler":23586,"ĠHick":23587,"arya":23588,"worker":23589,"alam":23590,"Angelo":23591,"otle":23592,"Ġnewsletters":23593,"Ġerected":23594,"Ġzip":23595,"ĠBirthday":23596,"Ġdogged":23597,"Ġdanced":23598,"Ġconfession":23599,"Ġvomiting":23600,"ickers":23601,"Ġfox":23602,"Ġdeduct":23603,"Ġstresses":23604,"poll":23605,"ĠRadar":23606,"Ġengagements":23607,"Ġexaminer":23608,"Ġopportun":23609,"Ġlongevity":23610,"Ġbanana":23611,"carbon":23612,"uo":23613,"ĠLT":23614,"Ġsynagogue":23615,"Ġblackmail":23616,"INK":23617,"Ġfle":23618,"ĠGutierrez":23619,"Ġracket":23620,"Ġevenings":23621,"Ġdietary":23622,"ĠKok":23623,"Ġfaulty":23624,"Ġabandoning":23625,"ĠFlow":23626,"quest":23627,"estead":23628,"Ġbir":23629,"Ġsuicidal":23630,"ĠGift":23631,"ĠMissing":23632,"ĠMazda":23633,"ĠRib":23634,"ĠJourney":23635,"Ġconcede":23636,"Ġbrushed":23637,"Tw":23638,"andowski":23639,"ĠYun":23640,"Bride":23641,"zai":23642,"awatts":23643,"Ġcha":23644,"Ġspans":23645,"SF":23646,"Ġshells":23647,"planned":23648,"ĠGeographic":23649,"ĠVent":23650,"Ġfav":23651,"Ġinterrogation":23652,"Ġvaries":23653,"ĠPlat":23654,"operative":23655,"avid":23656,"Ġgreatness":23657,"ĠStrait":23658,"ĠSelling":23659,"Ġlawful":23660,"Ġlyn":23661,"Ġfunnel":23662,"Ġpundits":23663,"ties":23664,"Ġpneumonia":23665,"Ġcommencement":23666,"Ġbrisk":23667,"fires":23668,"ĠHTML":23669,"ĠSevent":23670,"Ġhistor":23671,"Ġ147":23672,"olls":23673,"Ġpian":23674,"Little":23675,"Ġcommercials":23676,"Ġdeteriorated":23677,"Ġbasin":23678,"Ġprohibition":23679,"Ġrestrictive":23680,"Ġtom":23681,"ĠPulse":23682,"vale":23683,"Ġmim":23684,"ĠLyons":23685,"ĠTrinidad":23686,"data":23687,"195":23688,"ĠPain":23689,"vor":23690,"ĠDirectorate":23691,"Wow":23692,"essential":23693,"Ġemerges":23694,"ĠDoors":23695,"Ġunde":23696,"Ġarchives":23697,"ĠIX":23698,"ĠAman":23699,"oric":23700,"ĠOper":23701,"nothing":23702,"Ġ142":23703,"igr":23704,"rust":23705,"ĠBYU":23706,"ĠBom":23707,"Ġrift":23708,"ĠAbs":23709,"ĠJenn":23710,"Ġrookies":23711,"hoe":23712,"Ġunderage":23713,"eden":23714,"Ġroasted":23715,"Ġenrol":23716,"Ġerased":23717,"Ġfreeway":23718,"Sil":23719,"Ġplanner":23720,"Ġconfess":23721,"ĠDual":23722,"ĠHeadquarters":23723,"bottom":23724,"Ġstatistic":23725,"ĠPush":23726,"Ġanim":23727,"ITT":23728,"Ġexecutions":23729,"Hub":23730,"ĠStick":23731,"Ġobscure":23732,"oven":23733,"Ġcoats":23734,"unc":23735,"Morning":23736,"Ġnit":23737,"mie":23738,"Ġcurves":23739,"gew":23740,"ĠAnniversary":23741,"members":23742,"ĠAbsolutely":23743,"Ġapt":23744,"otional":23745,"ĠGin":23746,"izo":23747,"Ġpretending":23748,"arak":23749,"Ġorganise":23750,"Ġroyalties":23751,"ĠCamden":23752,"Ġsausage":23753,"Inst":23754,"Ġchalk":23755,"ĠSurf":23756,"ĠSunrise":23757,"Ġmoder":23758,"aido":23759,"loving":23760,"lus":23761,"Ġoblig":23762,"Ġmotions":23763,"Ġclarification":23764,"ĠOM":23765,"Ġbishop":23766,"Ġexhibitions":23767,"ĠRifle":23768,"ĠPhot":23769,"ĠHM":23770,"ATIONAL":23771,"Ġwid":23772,"Ġreside":23773,"ĠPV":23774,"OOK":23775,"ĠTue":23776,"Ġ1200":23777,"Ġ1957":23778,"Ġespionage":23779,"ĠAPPLIC":23780,"Ġblasts":23781,"fter":23782,"Ġimmensely":23783,"ĠLots":23784,"Ġinflammatory":23785,"anging":23786,"Ġtumultuous":23787,"identified":23788,"Ġstead":23789,"ĠAch":23790,"Ãī":23791,"Ġbub":23792,"hler":23793,"olution":23794,"Ġshun":23795,"Ġnull":23796,"Ġunused":23797,"ĠObs":23798,"Ġinsol":23799,"ĠAttack":23800,"ertain":23801,"Ġdefiant":23802,"Through":23803,"ĠArmour":23804,"Ġsimulation":23805,"UCK":23806,"Ġinfluenza":23807,"Ġonset":23808,"Ġbored":23809,"Ġsouls":23810,"Ġreferees":23811,"Ġcollaborations":23812,"ĠLer":23813,"Ġcreepy":23814,"Ġanaly":23815,"ĠEffect":23816,"orting":23817,"Card":23818,"Ġdice":23819,"Ġharvesting":23820,"235":23821,"sty":23822,"ĠMcCartney":23823,"Ġsalute":23824,"UMP":23825,"Ġherb":23826,"ĠAbuse":23827,"ĠRamadan":23828,"Ġsuck":23829,"trained":23830,"ĠPhysical":23831,"iren":23832,"anches":23833,"erie":23834,"Ġhangs":23835,"Ġcataly":23836,"Ġintuitive":23837,"assi":23838,"Ġtechn":23839,"Ġjugg":23840,"Ġgameplay":23841,"Ġapolog":23842,"Ġfifteen":23843,"Ġgalleries":23844,"Ġoutlines":23845,"patient":23846,"ĠPotential":23847,"Ġethnicity":23848,"Ġharbour":23849,"Ġoverthrow":23850,"ĠLung":23851,"Ġwarehouses":23852,"ĠMonitoring":23853,"Ġmentors":23854,"Ġsized":23855,"Ġenvisioned":23856,"Ġgin":23857,"DT":23858,"Ġpropel":23859,"ĠKul":23860,"ference":23861,"estic":23862,"ĠLego":23863,"Ġdinners":23864,"ĠMoe":23865,"designed":23866,"ĠSusp":23867,"ĠBrick":23868,"qua":23869,"IDS":23870,"ĠBam":23871,"athe":23872,"Ġslices":23873,"Ġbottled":23874,"thy":23875,"producing":23876,"ĠTerror":23877,"professional":23878,"ĠKis":23879,"erto":23880,"ĠVehicles":23881,"Ġbeforehand":23882,"Ġdetrimental":23883,"weights":23884,"Ġallowances":23885,"Williams":23886,"ĠSyrians":23887,"ĠSto":23888,"Ġcozy":23889,"reditation":23890,"ensen":23891,"ĠSard":23892,"Ġroy":23893,"ooting":23894,"ĠReserv":23895,"ominated":23896,"emate":23897,"ĠTot":23898,"ĠCarnegie":23899,"ĠThib":23900,"ĠMarshal":23901,"Ġ152":23902,"Ġmayors":23903,"inery":23904,"ĠFiona":23905,"ĠCadillac":23906,"ivated":23907,"Ġeagerly":23908,"ĠOffensive":23909,"Ġastronaut":23910,"ĠVital":23911,"Ġcane":23912,"Ġquitting":23913,"ĠLone":23914,"Ġcensorship":23915,"ĠWelch":23916,"ĠUd":23917,"Ġmarquee":23918,"ĠDip":23919,"Ġwhereby":23920,"Ġtiger":23921,"gem":23922,"Ġconserv":23923,"Ġpresumed":23924,"ĠEntry":23925,"ffer":23926,"ĠProceed":23927,"Ġbrawl":23928,"ĠJaime":23929,"Ġecho":23930,"Ġadvancements":23931,"Ġtransitional":23932,"erick":23933,"Ġbully":23934,"anan":23935,"Ġreinvent":23936,"ĠLetters":23937,"Ġbricks":23938,"ĠSmy":23939,"Ġtowering":23940,"gging":23941,"299":23942,"orian":23943,"dimensional":23944,"ĠForty":23945,"ĠSinn":23946,"ushi":23947,"ĠSurveillance":23948,"enabled":23949,"ĠMous":23950,"ĠVive":23951,"Marcus":23952,"Ġvom":23953,"Ġcreek":23954,"Ġlime":23955,"Ġseismic":23956,"ĠFork":23957,"Ġembroiled":23958,"marks":23959,"Ġherald":23960,"ĠSonia":23961,"âĢ¦\"":23962,"wired":23963,"Ġobliged":23964,"ĠProjects":23965,"lde":23966,"ĠRiders":23967,"Ġovercoming":23968,"Mail":23969,"ĠLawn":23970,"ĠHawk":23971,"figure":23972,"ĠWritten":23973,"Ġens":23974,"Ġspacious":23975,"target":23976,"ĠRecep":23977,"ĠSAM":23978,"Ġentertained":23979,"Ġignited":23980,"ĠCENT":23981,"ogenic":23982,"Ġunatt":23983,"Ġexceeds":23984,"Ġ--------------------------------":23985,"Ġpillars":23986,"ĠBorders":23987,"ickey":23988,"Ġextinction":23989,"Ġviability":23990,"Ġtumors":23991,"ĠWilkinson":23992,"ĠKEY":23993,"Ġbins":23994,"ĠReported":23995,"Sm":23996,"ĠExclusive":23997,"ĠChilean":23998,"info":23999,"Ġwilderness":24000,"did":24001,"absolutely":24002,"pillar":24003,"Ġelites":24004,"ĠPreview":24005,"ixie":24006,"Mont":24007,"ribut":24008,"dream":24009,"Ġplanners":24010,"ĠSomerset":24011,"Ġenvis":24012,"ĠStall":24013,"Ġelevate":24014,"ographies":24015,"rama":24016,"Ha":24017,"Ġamidst":24018,"oho":24019,"Ġrejects":24020,"Jim":24021,"Ġmarginally":24022,"Ġusher":24023,"arez":24024,"ĠHawth":24025,"Ġsprink":24026,"ĠOffer":24027,"Ġanchored":24028,"ucking":24029,"ĠGarn":24030,"ĠConserv":24031,"Ġsocietal":24032,"Ġbrowsing":24033,"Ġbidder":24034,"burgh":24035,"ĠRunner":24036,"Ġtrendy":24037,"verts":24038,"imposed":24039,"ĠPatton":24040,"lements":24041,"Ġspicy":24042,"Ġswe":24043,"ĠStrike":24044,"Ġclam":24045,"ĠYankee":24046,"ĠKT":24047,"ĠGreenwood":24048,"ĠWays":24049,"Ġ2050":24050,"Ġattach":24051,"ĠShim":24052,"Ġmeltdown":24053,"Ġassemble":24054,"ĠUPDATE":24055,"Ġscout":24056,"Brown":24057,"ĠKobe":24058,"Ġpostpone":24059,"liness":24060,"allo":24061,"rief":24062,"ĠGerm":24063,"ĠFD":24064,"ĠReggie":24065,"ĠUnivers":24066,"ĠShepard":24067,"Ġcancell":24068,"ĠRomeo":24069,"ĠWarrior":24070,"ench":24071,"ifier":24072,"Ġprivileges":24073,"Ġsenses":24074,"Ġimpoverished":24075,"ĠPostal":24076,"encer":24077,"ĠConrad":24078,"Ġprinter":24079,"Ġinflicted":24080,"ĠGamble":24081,"ĠHeroes":24082,"132":24083,"Ġrevisions":24084,"Ġunsuccessfully":24085,"ĠHeisman":24086,"Ġstamped":24087,"inding":24088,"ĠLuna":24089,"Ġreinvest":24090,"ducers":24091,"ĠPassword":24092,"Leod":24093,"Ġcompounded":24094,"',\"":24095,"ogging":24096,"Ġprobing":24097,"ĠPBS":24098,"ĠMU":24099,"ĠWhenever":24100,"Ġsped":24101,"ĠCompetitive":24102,"isans":24103,"opa":24104,"Ġcleric":24105,"Ġvivid":24106,"à¸":24107,"126":24108,"Ġinconvenience":24109,"udi":24110,"Ġimmersive":24111,"Ġdiversion":24112,"Ġlogs":24113,"Ġspying":24114,"inct":24115,"Ġlitres":24116,"Ġmetallic":24117,"identally":24118,"FX":24119,"Ġloudly":24120,"Ġnursery":24121,"Ġcollectors":24122,"ĠKart":24123,"Ġescalate":24124,"Ġringing":24125,"Ġprocedural":24126,"Ġdisrupting":24127,"ĠEthiopian":24128,"ĠCFL":24129,"Ġillustrates":24130,"Ġperks":24131,"official":24132,"325":24133,"Ġmillennial":24134,"Ġbreadth":24135,"Ġmelted":24136,"Ġ850":24137,"ĠBake":24138,"donald":24139,"ĠGrac":24140,"Ġseeded":24141,"ĠDiscount":24142,"idates":24143,"Ġdrift":24144,"Ġcaptive":24145,"Ġseriousness":24146,"Ġrepercussions":24147,"Ġdisciplines":24148,"Ġthesis":24149,"Ġsleeve":24150,"ses":24151,"Monday":24152,"Ġthwart":24153,"ĠLic":24154,"Ġquadru":24155,"ĠPresbyterian":24156,"Ġreactors":24157,"ĠSuzanne":24158,"ewater":24159,"Ġlam":24160,"Ġbreastfeeding":24161,"Ġrats":24162,"ĠArtists":24163,"Ġdomestically":24164,"Ġdecom":24165,"ĠArms":24166,"basketball":24167,"Ġscrub":24168,"ĠTeddy":24169,"beh":24170,"ĠBetsy":24171,"ĠNursing":24172,"Ġdescriptions":24173,"127":24174,"gil":24175,"itional":24176,"Ġchampioned":24177,"ĠCalling":24178,"Ġrealization":24179,"ĠBuddy":24180,"hou":24181,"ĠDire":24182,"ĠHuff":24183,"Ġlipstick":24184,"Ray":24185,"Ġflare":24186,"belt":24187,"Ġbrightest":24188,"Ġmalfunction":24189,"ĠManor":24190,"Ġsaturated":24191,"rays":24192,"ĠDW":24193,"ixed":24194,"ĠSlovenia":24195,"seen":24196,"ĠCause":24197,"arios":24198,"ASE":24199,"Ġrend":24200,"ĠTBA":24201,"Ġlecturer":24202,"attering":24203,"Ġaffluent":24204,"CEO":24205,"Ġbreathtaking":24206,"ĠGiles":24207,"irth":24208,"ĠPhilips":24209,"Ġposture":24210,"ĠTSA":24211,"heit":24212,"Ġmenace":24213,"ricks":24214,"ĠAden":24215,"ĠReich":24216,"iggle":24217,"ĠShutterstock":24218,"Ġcourageous":24219,"edia":24220,"Staff":24221,"Ġdivert":24222,"ĠCir":24223,"Ġguessing":24224,"apers":24225,"ĠBritons":24226,"lé":24227,"Ġconvened":24228,"ĠSerbian":24229,"Ġricher":24230,"Ġcock":24231,"Ġdeposited":24232,"company":24233,"Ġdelic":24234,"sensitive":24235,"tank":24236,"ĠPatty":24237,"mia":24238,"onomous":24239,"cn":24240,"Ġclamp":24241,"ĠAcademic":24242,"Ġprosecuting":24243,"ĠTransparency":24244,"Ġdeflation":24245,"Ġdashboard":24246,"ĠDress":24247,"Ġlin":24248,"mu":24249,"ĠGoodell":24250,"Ġlav":24251,"ĠTwelve":24252,"Ġflavour":24253,"Ġfiercely":24254,"Ġbloom":24255,"ĠHaf":24256,"ĠGrad":24257,"LET":24258,"ĠSeeing":24259,"oxide":24260,"Ġmenus":24261,"char":24262,"adoes":24263,"combe":24264,"Street":24265,"ĠRidley":24266,"Ġdepicts":24267,"ĠPred":24268,"ÑĢ":24269,"British":24270,"Ġbumps":24271,"Ġlamp":24272,"ĠDesmond":24273,"ĠPB":24274,"Ġfrag":24275,"tin":24276,"ĠSharing":24277,"Ġdesperation":24278,"Ġcommuter":24279,"igrants":24280,"ĠShapiro":24281,"Ġkinda":24282,"Ġimpartial":24283,"ĠJewel":24284,"Ġcongratulations":24285,"Ġcompost":24286,"Ġadmiration":24287,"Ġpaycheck":24288,"ĠAnonymous":24289,"enger":24290,"Mer":24291,"ĠGospel":24292,"ĠEth":24293,"ĠMH":24294,"Ġfem":24295,"ĠTrial":24296,"Ġdepths":24297,"ĠApplied":24298,"Ġgrit":24299,"Ġerase":24300,"sid":24301,"comm":24302,"}":24303,"Ġretreated":24304,"Ġanalysed":24305,"ĠRegular":24306,"ĠPesh":24307,"ICAL":24308,"pei":24309,"ĠReilly":24310,"ĠTrib":24311,"Ġbooths":24312,"Ġdrank":24313,"Ġcoma":24314,"Ġharvested":24315,"ĠCHAR":24316,"Ġbutterfly":24317,"Ġsailed":24318,"ĠDrink":24319,"eping":24320,"ATCH":24321,"ĠLegends":24322,"Ġinsured":24323,"Ġwholes":24324,"ĠBis":24325,"ĠShea":24326,"ighter":24327,"Ġsnakes":24328,"ĠGunn":24329,"ĠPoss":24330,"Ġdispar":24331,"Ġbombshell":24332,"Ġscanning":24333,"340":24334,"choice":24335,"cool":24336,"\"âĢĶ":24337,"ĠTheo":24338,"rine":24339,"ĠJacques":24340,"Ġdisadvantaged":24341,"Ġparamount":24342,"igate":24343,"stat":24344,"anski":24345,"Ġoutsourcing":24346,"Ġpopulous":24347,"Ġbinge":24348,"ĠOrganic":24349,"urban":24350,"Ġyogurt":24351,"Ġretweet":24352,"osen":24353,"cially":24354,"215":24355,"Ġeditions":24356,"Ġburgeoning":24357,"efully":24358,"ĠThousand":24359,"Ġreplacements":24360,"ĠAmazing":24361,"rator":24362,"icy":24363,"Ġintensify":24364,"Sen":24365,"ĠQuincy":24366,"powers":24367,"ĠAur":24368,"ĠZion":24369,"stal":24370,"Ġpillar":24371,"ĠErit":24372,"ĠPerform":24373,"aston":24374,"Eric":24375,"Ġunh":24376,"IFF":24377,"950":24378,"ĠEngineer":24379,"ĠLands":24380,"Ġdubious":24381,"fy":24382,"ĠWI":24383,"ĠSv":24384,"ĠHendricks":24385,"ĠKod":24386,"Ġoutlining":24387,"ĠCorrespond":24388,"amus":24389,"worst":24390,"arter":24391,"coni":24392,"Ġhierarchy":24393,"ĠTHAT":24394,"Ġexce":24395,"Ġrailways":24396,"Ġmasked":24397,"lene":24398,"Ġoutset":24399,"Ġavalanche":24400,"Ġnicknamed":24401,"Ġ702":24402,"Lee":24403,"Ġ139":24404,"ĠSixth":24405,"365":24406,"nda":24407,"Ġaccountant":24408,"Ġobese":24409,"Ġgrape":24410,"Ġimpunity":24411,"ĠYorkers":24412,"Ġguardian":24413,"icity":24414,"Ġcentrist":24415,"Ġwaterways":24416,"ursed":24417,"Ġhopeless":24418,"header":24419,"Ġtack":24420,"Ġric":24421,"umn":24422,"Ġvalve":24423,"Ġtread":24424,"ĠCST":24425,"Ġhepatitis":24426,"ctor":24427,"ĠRED":24428,"Ġsolitary":24429,"NW":24430,"Ġceremonial":24431,"Ġfoe":24432,"Ġling":24433,"Jason":24434,"ĠLisbon":24435,"Ġ1955":24436,"ĠHeller":24437,"Ġkin":24438,"essen":24439,"Ġturbines":24440,"shi":24441,"Ġlodge":24442,"Ġveterinary":24443,"ĠBoll":24444,"ĠConfederation":24445,"ĠJournalists":24446,"Ġtug":24447,"ĠStarr":24448,"Ġpiles":24449,"Way":24450,"adel":24451,"orean":24452,"Ġoft":24453,"Ġshortcomings":24454,"ĠSheila":24455,"Ġbackbone":24456,"III":24457,"ĠDarwin":24458,"ĠTunis":24459,"Ġsuspicions":24460,"Ġdisagreements":24461,"Ġ247":24462,"illery":24463,"'\"":24464,"Ġsegregation":24465,"ohl":24466,"Ġinstincts":24467,"ĠPoo":24468,"nih":24469,"parency":24470,"uddy":24471,"esting":24472,"asses":24473,"ĠIntroduction":24474,"ĠSirius":24475,"Local":24476,"orous":24477,"Ġrehearsal":24478,"Ġdemol":24479,"Ġtraffickers":24480,"Ġupsetting":24481,"Ġheir":24482,"death":24483,"ĠMoments":24484,"Los":24485,"Ġatmospheric":24486,"aints":24487,"ĠDianne":24488,"Ġlikewise":24489,"ĠMing":24490,"auga":24491,"Ġfirsthand":24492,"Ġnarratives":24493,"ĠAstron":24494,"ĠExtreme":24495,"Ġhorns":24496,"ĠSana":24497,"Ġrecapt":24498,"ĠMist":24499,"ĠRandolph":24500,"connect":24501,"Ġindecent":24502,"Ġforty":24503,"Ġjihadists":24504,"azes":24505,"Ġdread":24506,"Ġgrapes":24507,"Ġremoves":24508,"Ġscreamed":24509,"ĠCrus":24510,"ikers":24511,"Ġsnapshot":24512,"ĠCalls":24513,"Cons":24514,"Ġlettuce":24515,"ĠPig":24516,"urable":24517,"jured":24518,"ILY":24519,"ĠJessie":24520,".).":24521,"Pay":24522,"Tra":24523,"----------------":24524,"ĠUnits":24525,"ĠPlayboy":24526,"Ġarthritis":24527,"Ġafforded":24528,"insk":24529,"ĠFake":24530,"ĠLies":24531,"ĠBaltic":24532,"oyal":24533,"ĠVest":24534,"Ġrusher":24535,"Ġincorporates":24536,"ĠMM":24537,"ĠDru":24538,"ĠWare":24539,"ĠSammy":24540,"ĠGob":24541,"ĠRuk":24542,"Ġ146":24543,"ĠCrowd":24544,"Ġduel":24545,"irts":24546,"Ġsourcing":24547,"hp":24548,"ĠJava":24549,"bred":24550,"ĠRefer":24551,"Ġuninsured":24552,"Ġslope":24553,"256":24554,"Ġregulating":24555,"Ġfundra":24556,"Ġinserted":24557,"ĠNickel":24558,"ĠConsumption":24559,"ĠRomo":24560,"Atlantic":24561,"Ġenclave":24562,"Ġpegged":24563,"Ġdirects":24564,"mbudsman":24565,"ĠDES":24566,"Ob":24567,"Ġlimbs":24568,"Ġbury":24569,"ILA":24570,"Ġstew":24571,"Ġbreeze":24572,"Ġabrupt":24573,"ĠGott":24574,"ĠClaude":24575,"Ġgenetically":24576,"Ġrigid":24577,"ĠDudley":24578,"ĠNer":24579,"registered":24580,"Ġentrenched":24581,"Ġextortion":24582,"ĠNurs":24583,"Ġcontingency":24584,"etter":24585,"Ġrejo":24586,"Ġprotagonist":24587,"Ġcounselling":24588,"ĠVit":24589,"aware":24590,"ĠMonsanto":24591,"GG":24592,"Ġincarcerated":24593,"Ġabduction":24594,"Ġreferencing":24595,"Germany":24596,"uates":24597,"reck":24598,"Ġtram":24599,"Ġchron":24600,"Ġmish":24601,"ĠVes":24602,"ĠTire":24603,"Ġvandal":24604,"ĠCrazy":24605,"ĠLifetime":24606,"ĠSpectrum":24607,"celer":24608,"Ġmotto":24609,"hang":24610,"Ġblade":24611,"gel":24612,"Ġbiography":24613,"Ġallegiance":24614,"hod":24615,"hap":24616,"ptic":24617,"acle":24618,"ĠBlade":24619,"ĠBoh":24620,"Ġ149":24621,"Ġchang":24622,"Ġcanned":24623,"Ġfacilitated":24624,"actor":24625,"iologist":24626,"Ġrebuilt":24627,"Ġawake":24628,"Ġmayoral":24629,"ĠEuros":24630,"Ġdangerously":24631,"MK":24632,"Ġreplica":24633,"Ġcoinc":24634,"blog":24635,"ĠEra":24636,"Ġrelinqu":24637,"quite":24638,"ondon":24639,"rosso":24640,"tun":24641,"Ġtouchscreen":24642,"Ġpops":24643,"ousing":24644,"efficient":24645,"Ġ148":24646,"Ġconced":24647,"although":24648,"Ġ1956":24649,"Ġmortar":24650,"ĠCave":24651,"ĠJung":24652,"urer":24653,"Ġillusion":24654,"ĠBerman":24655,"intend":24656,"Ġcoping":24657,"Dem":24658,"tion":24659,"estation":24660,"ĠSounds":24661,"Ġnavigating":24662,"Ġsperm":24663,"Ġreligions":24664,"Ġfol":24665,"Ġheroic":24666,"FD":24667,"Ġhesitant":24668,"asure":24669,"Ġredeem":24670,"Adam":24671,"Ġfireplace":24672,"vertis":24673,"ĠSung":24674,"290":24675,"iland":24676,"ĠUpdates":24677,"OTUS":24678,"ĠPTSD":24679,"Ġhelmets":24680,"\"?":24681,"Ġslashing":24682,"Ġscouts":24683,"Ġspelling":24684,"ĠInitial":24685,"draw":24686,"Ġchallengers":24687,"Ġsupremacists":24688,"Ġpilgrims":24689,"Ġasc":24690,"ĠFill":24691,"ĠPau":24692,"Ġjewel":24693,"ĠMalt":24694,"icip":24695,"Ġinhabitants":24696,"Ġmetre":24697,"ahar":24698,"Comp":24699,"atches":24700,"inv":24701,"Ġcyclist":24702,"ĠQC":24703,"Ġmanually":24704,"ĠAnchorage":24705,"Ġdiscarded":24706,"Ġconsolid":24707,"Ġnavig":24708,"ĠAnimals":24709,"ĠPole":24710,"esson":24711,"Ġ1954":24712,"Ġsorted":24713,"Ġmadness":24714,"ĠBrigade":24715,"ĠGenesis":24716,"Ġdismissing":24717,"ĠPanasonic":24718,"Ġdizz":24719,"ĠEducational":24720,"ĠKO":24721,"ĠPill":24722,"ĠGIF":24723,"Ġbol":24724,"Ġwards":24725,"Ġcontroversies":24726,"Chinese":24727,"Ġantics":24728,"Ġreliant":24729,"ĠMoff":24730,"Ġethanol":24731,"Ġtorch":24732,"rights":24733,"ĠHabit":24734,"arton":24735,"rera":24736,"ĠSasha":24737,"abella":24738,"Ġproliferation":24739,"Ġsincerely":24740,"communication":24741,"ĠNay":24742,"ĠChattanooga":24743,"ounces":24744,"ĠNXT":24745,"ĠEmir":24746,"Ġmanipulated":24747,"Ġharassing":24748,"wat":24749,"Ġbouts":24750,"Book":24751,"Ġhovering":24752,"ĠScan":24753,"ship":24754,"ĠAngola":24755,"ĠLC":24756,"Ġruins":24757,"Ġsexist":24758,"zar":24759,"Ġpledging":24760,"ober":24761,"Ġembold":24762,"Ġobjection":24763,"Ġboasting":24764,"MIN":24765,"Ġherbs":24766,"Ġgears":24767,"ĠIc":24768,"stre":24769,"him":24770,"Ġhomicides":24771,"cki":24772,"castle":24773,"counter":24774,"ĠCAS":24775,"ĠReasons":24776,"ĠDeclaration":24777,"Ġsimplify":24778,"Ġfared":24779,"Ġescort":24780,"Ġkidn":24781,"ĠHamm":24782,"Ġnailed":24783,"Ġaccommodations":24784,"Ġmodifications":24785,"rible":24786,"Ġwool":24787,"EDIT":24788,"2010":24789,"Ġauthentication":24790,"Ġgoat":24791,"hom":24792,"Ġfederally":24793,"ĠRath":24794,"Ġspiked":24795,"Ġmisrepresent":24796,"Ġavenue":24797,"Ġbroadcasts":24798,"ĠEstonia":24799,"ennes":24800,"ĠMare":24801,"ption":24802,"ĠKag":24803,"Ġcircumstance":24804,"orrow":24805,"isons":24806,"ĠCollabor":24807,"Ġstroll":24808,"ĠCPS":24809,"soft":24810,"iral":24811,"apo":24812,"usky":24813,"poke":24814,"Ġwoo":24815,"ĠElena":24816,"ĠLastly":24817,"Ġlinemen":24818,"Canadian":24819,"ĠAnyway":24820,"Ġsubstantive":24821,"ĠCurt":24822,"Ġard":24823,"ĠYosh":24824,"ĠBuchanan":24825,"Ġrevolving":24826,"Ġspecials":24827,"Ġshrine":24828,"Ġlumber":24829,"Ġorchestrated":24830,"kie":24831,"azy":24832,"Ġexpiration":24833,"ĠDaryl":24834,"ĠPatri":24835,"better":24836,"2020":24837,"ĠFav":24838,"ĠOP":24839,"OTT":24840,"Ġflush":24841,"ĠSikh":24842,"Ġecosystems":24843,"ĠBET":24844,"eared":24845,"audio":24846,"ĠFahrenheit":24847,"police":24848,"Ġincarceration":24849,"Ġerupt":24850,"ĠDamien":24851,"ĠHague":24852,"ulz":24853,"ĠAgents":24854,"ĠBanner":24855,"Ġconductor":24856,"ĠAjax":24857,"arson":24858,"Ġrests":24859,"Ġeurozone":24860,"Ġfelon":24861,"Ġcurator":24862,"morning":24863,"Ġevidenced":24864,"ĠNeh":24865,"Ġmattress":24866,"Ġtast":24867,"Ġfueling":24868,"ĠOccup":24869,"Ġbake":24870,"ĠZac":24871,"meaning":24872,"Ill":24873,"ĠHau":24874,"ĠLaden":24875,"Ġbald":24876,"Mary":24877,"oky":24878,"atri":24879,"Ġtracker":24880,"OTA":24881,"catching":24882,"ĠUnderground":24883,"ĠHuffPost":24884,"ĠAtkins":24885,"oglu":24886,"Ġauthorised":24887,"Ġroutines":24888,"ĠHof":24889,"veland":24890,"Ġlangu":24891,"Ġprot":24892,"ĠHyd":24893,"integ":24894,"Ġbravery":24895,"Ġviolin":24896,"Ġdelightful":24897,"Ġticks":24898,"iton":24899,"Ġreap":24900,"Ġoversized":24901,"ĠPitch":24902,"Ġprized":24903,"Ġfusion":24904,"fact":24905,"acting":24906,"Ġfullback":24907,"Ġpolite":24908,"Ġswear":24909,"Ġconfiscated":24910,"ĠStud":24911,"Ġfielded":24912,"rito":24913,"covered":24914,"financial":24915,"bill":24916,"HK":24917,"OTOS":24918,"loaded":24919,"Ġmarble":24920,"ĠDiplom":24921,".âĢĶ":24922,"Ġeats":24923,"Ġbackfield":24924,"Ġtimeframe":24925,"Ġvegetarian":24926,"Ġswaps":24927,"ĠMines":24928,"igor":24929,"ĠLenn":24930,"ĠDP":24931,"ordered":24932,"ĠShark":24933,"Ġquant":24934,"erence":24935,"Ġashes":24936,"ĠBuckley":24937,"ophobia":24938,"Ġwarranted":24939,"Rose":24940,"Ġunreasonable":24941,"ĠJav":24942,"Ġpalette":24943,"Ġjoints":24944,"Ġadvent":24945,"Ġnoteworthy":24946,"ĠNicol":24947,"ĠChristensen":24948,"Ġplummeted":24949,"ayers":24950,"Ġdefends":24951,"Ġcontended":24952,"ĠCongratulations":24953,"kish":24954,"ĠHannity":24955,"Ġgroundwater":24956,"ĠKramer":24957,"Ġerect":24958,"Ġappet":24959,"ĠKardash":24960,"Ġexacerbated":24961,"Ġexplanations":24962,"vious":24963,"eport":24964,"---":24965,"icism":24966,"ĠNatasha":24967,"ĠGeoffrey":24968,"estro":24969,"Article":24970,"Ġincidence":24971,"Ġprovoked":24972,"elf":24973,"Ġinsistence":24974,"ĠOUR":24975,"Ġfertilizer":24976,"Ġstickers":24977,"ĠGators":24978,"ĠLanding":24979,"ĠDON":24980,"sta":24981,"ĠRobbins":24982,"Ġpixels":24983,"ĠHoy":24984,"imated":24985,"ĠÃī":24986,"â":24987,"Ġsimpl":24988,"Other":24989,"245":24990,"Ġforcibly":24991,"'.\"":24992,"Ġsmashing":24993,"Ġmosquitoes":24994,"Ġpaints":24995,"Ġdebating":24996,"enty":24997,"ĠIB":24998,"leaf":24999,"ĠDah":25000,"Ġreferral":25001,"pired":25002,"Ġbrunch":25003,"gie":25004,"Ġvict":25005,"ribute":25006,"Ġbloggers":25007,"Ġgum":25008,"ĠAdmiral":25009,"France":25010,"ĠPK":25011,"ĠSaturn":25012,"Ġinflated":25013,"WAR":25014,"Ġscenic":25015,"usal":25016,"their":25017,"Ġcontends":25018,"Ġpathways":25019,"inis":25020,"Ġawarding":25021,"Ġmisled":25022,"Ġeternal":25023,"Ġexaminations":25024,"Ġpoker":25025,"Ġsafest":25026,"Ġchildcare":25027,"aday":25028,"Ġpreceding":25029,"ĠCollective":25030,"Ġrespectable":25031,"ographical":25032,"Ġoak":25033,"00000":25034,"ĠCorridor":25035,"oran":25036,"133":25037,"Ġmushrooms":25038,"gaard":25039,"ĠOmega":25040,"ĠNaturally":25041,"anim":25042,"Ġcaptains":25043,"Ġtang":25044,"Ġlobbyists":25045,"ĠSug":25046,"Ġsucc":25047,"249":25048,"ENG":25049,"134":25050,"Ġsolic":25051,"ĠAdded":25052,"ĠSuicide":25053,"ĠFULL":25054,"ĠStrauss":25055,"ĠDiesel":25056,"Ġtempting":25057,"acist":25058,"ĠDelivery":25059,"Ġquiz":25060,"ĠPARK":25061,"Ġcollisions":25062,"Ġrestrained":25063,"purpose":25064,"ĠChanges":25065,"Ġabsentee":25066,"Ġprobes":25067,"hib":25068,"Ġcul":25069,"Ġpetty":25070,"Ġnecess":25071,"Ġcues":25072,"OME":25073,"Ġinadvertently":25074,"urity":25075,"ĠStuff":25076,"FG":25077,"Ġwrestlers":25078,"Ġpaste":25079,"ĠRoku":25080,"Ġcardboard":25081,"aires":25082,"Ġvariables":25083,"ĠSaras":25084,"ĠFif":25085,"Ġinvests":25086,"ĠDiscover":25087,"ĠFix":25088,"Thomas":25089,"ĠLunch":25090,"lv":25091,"camera":25092,"Step":25093,"Ġresumes":25094,"ĠSacred":25095,"ĠShooting":25096,"Ġnoble":25097,"Ġslopes":25098,"Ġont":25099,"Ġtwists":25100,"Very":25101,"Ġbigotry":25102,"ĠTib":25103,"Ġmos":25104,"Ġwarrior":25105,"Ġbroadcasters":25106,"Ġubiquitous":25107,"ameda":25108,"Ġchess":25109,"Special":25110,"Ġconver":25111,"Ġdeleg":25112,"endant":25113,"Ġfoil":25114,"Ġlush":25115,"Ġtaxed":25116,"Mag":25117,"ahs":25118,"Ġtablespoons":25119,"scription":25120,"clamation":25121,"ĠCertain":25122,"ĠDiversity":25123,"Ġhairst":25124,"ĠBrewery":25125,"Ġshedding":25126,"Cla":25127,"Ġpenis":25128,"ĠMurder":25129,"Park":25130,"uner":25131,"iments":25132,"ĠOVER":25133,"hus":25134,"Ġtabloid":25135,"Chart":25136,"Ġvouchers":25137,"ĠCoord":25138,"Ġmethane":25139,"ĠFisheries":25140,"ĠKham":25141,"includes":25142,"ĠSuperman":25143,"ensed":25144,"isure":25145,"Amazon":25146,"Ġvacated":25147,"heet":25148,"Ġroast":25149,"Ġlegalize":25150,"ĠTut":25151,"Ġsignage":25152,"init":25153,"Ġthefts":25154,"202":25155,"Ġstatic":25156,"Ġchants":25157,"Bob":25158,"Ġdiscretionary":25159,"Ġendurance":25160,"Ġcollegiate":25161,"Ġcorridors":25162,"Ġslack":25163,"ĠLash":25164,"Az":25165,"Series":25166,"Ġnonpartisan":25167,"ĠMcGill":25168,"Ġuneven":25169,"ulsive":25170,"eu":25171,"Ġpil":25172,"Ġfisheries":25173,"Ġonslaught":25174,"fiction":25175,"holding":25176,"Ġcheated":25177,"Ġtraumat":25178,"lasting":25179,"Ġmultitude":25180,"ĠThr":25181,"ĠBreast":25182,"Ġ1600":25183,"ĠMatth":25184,"Ġdiminish":25185,"ĠFTC":25186,"Ġgram":25187,"ĠResident":25188,"Ġfading":25189,"Ġmarginalized":25190,"ĠLite":25191,"ĠCarlton":25192,"Ġerad":25193,"Welcome":25194,"ĠFaw":25195,"iddy":25196,"Ġparticip":25197,"Ġcz":25198,"Ġtexted":25199,"Ġsuites":25200,"ĠForever":25201,"Ġrendition":25202,"rait":25203,"ĠPrague":25204,"Ġsponsoring":25205,"Ġcompos":25206,"ĠBeacon":25207,"144":25208,"Ġpupil":25209,"Ġintricate":25210,"Ġathleticism":25211,"Ġoptimization":25212,"Ġloot":25213,"polit":25214,"ĠOtt":25215,"Whatever":25216,"uno":25217,"ĠConstable":25218,"esville":25219,"Ġlookout":25220,"ĠAircraft":25221,"Ġspo":25222,"Ġcorrobor":25223,"Ġhiatus":25224,"ĠKnowing":25225,"ĠHamp":25226,"Ġspe":25227,"Ġstoring":25228,"Ġshakes":25229,"uran":25230,"Ġsickness":25231,"Ġliber":25232,"ĠAdministrative":25233,"Ġpleasing":25234,"ĠEqual":25235,"ĠConversation":25236,"Ġalgae":25237,"Ġlobbyist":25238,"ĠHelena":25239,"ptions":25240,"Ġfaire":25241,"ĠGone":25242,"ĠWiggins":25243,"Robert":25244,"Ġlistens":25245,"ĠDaisy":25246,"Ġsticky":25247,"sale":25248,"ĠMarijuana":25249,"ĠSSD":25250,"ĠTool":25251,"once":25252,"ĠHarmon":25253,"mobile":25254,"Ġdetain":25255,"Money":25256,"Ġflawless":25257,"forced":25258,"Ġguru":25259,"Ġairspace":25260,"ĠArchie":25261,"ĠGender":25262,"ĠMeat":25263,"abilities":25264,"ĠBD":25265,"Open":25266,"Ġoutsider":25267,"issue":25268,"Ġlearns":25269,"natural":25270,"Ġvinegar":25271,"ĠSUB":25272,"ĠRecon":25273,"blers":25274,"Ġsniff":25275,"Ġsuppression":25276,"Ġsaf":25277,"urger":25278,"Ġbunker":25279,"asaki":25280,"ĠSpartan":25281,"ĠTok":25282,"Ġrav":25283,"Ġfoc":25284,"Sean":25285,"etric":25286,"Ġballpark":25287,"ĠHerb":25288,"ĠBM":25289,"ĠPublishing":25290,"Ġroadmap":25291,"pered":25292,"Ġpredator":25293,"ĠBlockchain":25294,"Ġvalidity":25295,"ĠGlou":25296,"ĠYamaha":25297,"Ġadop":25298,"Ġswamp":25299,"Ġcomplied":25300,"Ky":25301,"Greg":25302,"casts":25303,"john":25304,"ĠBosnia":25305,"Ġcinematic":25306,"ĠTavern":25307,"Ġfrustrations":25308,"eryl":25309,"Ġfairy":25310,"UNCH":25311,"ĠTus":25312,"Corp":25313,"ĠNug":25314,"closed":25315,"Ġexercised":25316,"urden":25317,"Ġdigitally":25318,"137":25319,"ĠVictims":25320,"Ġreluctance":25321,"ELL":25322,"ĠTribe":25323,"chall":25324,"Ġwhiskey":25325,"ogl":25326,"Ġmater":25327,"ĠBac":25328,"Ġapartheid":25329,"ĠMBA":25330,"mot":25331,"ĠIre":25332,"®,":25333,"ĠChic":25334,"Ġtimed":25335,"ĠDome":25336,"efer":25337,"Ġobserver":25338,"unky":25339,"ĠKant":25340,"Ġundrafted":25341,"Ġsimplicity":25342,"onds":25343,"Ġstoked":25344,"Ġ1949":25345,"Ġransomware":25346,"ĠPow":25347,"ĠAngelo":25348,"ĠAmbrose":25349,"adjusted":25350,"Guard":25351,"138":25352,"ĠKaplan":25353,"stri":25354,"Ġcries":25355,"NF":25356,"atro":25357,"Ġavocado":25358,"illian":25359,"Ġsculptures":25360,"Ġelevation":25361,"Ġinspires":25362,"Ġgenerals":25363,"arb":25364,"chell":25365,"ĠJournalism":25366,"ĠHybrid":25367,"ĠCaller":25368,"vec":25369,"Lu":25370,"Ġresemble":25371,"bys":25372,"erving":25373,"antz":25374,"Ġwiden":25375,"vised":25376,"Ev":25377,"Ġdiagn":25378,"ĠMakes":25379,"Ġcer":25380,"ĠPats":25381,"single":25382,"sche":25383,"struct":25384,"Ġdissolved":25385,"Ġtimeout":25386,"Ġenhancement":25387,"CF":25388,"Ġindust":25389,"ĠDed":25390,"ĠZo":25391,"CB":25392,"Ġpesticides":25393,"ĠRubin":25394,"George":25395,"opal":25396,"Ġmotel":25397,"critical":25398,"Ġcollapsing":25399,"ĠShal":25400,"tex":25401,"Ġcomplementary":25402,"Ġoust":25403,"ĠFlu":25404,"Ġexporting":25405,"Ġdifferential":25406,"north":25407,"ĠFG":25408,"Ġspoon":25409,"sha":25410,"Ġdismantle":25411,"elta":25412,"Ġjar":25413,"space":25414,"Smart":25415,"mere":25416,"Ð":25417,"ĠGillespie":25418,"Lo":25419,"ĠMead":25420,"capacity":25421,"ĠIssue":25422,"050":25423,"ĠVall":25424,"Ġdisgr":25425,"Ġmeme":25426,"Ġpard":25427,"Ġcompensated":25428,"ĠKet":25429,"major":25430,"ĠBren":25431,"Ġheed":25432,"131":25433,"Ġcm":25434,"Ġdazzling":25435,"ĠCheese":25436,"Ġmonumental":25437,"Ġyielding":25438,"Read":25439,"Ġgrinding":25440,"Ang":25441,"Ġdefiance":25442,"Ġintimidated":25443,"Ġ310":25444,"Ġoutsiders":25445,"houn":25446,"Ma":25447,"ĸ":25448,"ĠForget":25449,"ĠSans":25450,"Ġunfolding":25451,"ĠSap":25452,"ĠLak":25453,"Ġsectarian":25454,"ĠDaddy":25455,"oxy":25456,"hitting":25457,"Ġdetectors":25458,"ĠRee":25459,"Ġbroaden":25460,"Ġslaying":25461,"Ġsuspending":25462,"Ġinvestig":25463,"Tuesday":25464,"Ġantibiotic":25465,"ĠShiite":25466,"igi":25467,"ĠExternal":25468,"ĠPhotographer":25469,"Ġerratic":25470,"NJ":25471,"ĠDock":25472,"Ġoutweigh":25473,"rants":25474,"Ġlobster":25475,"Ġreactor":25476,"Ġunrealistic":25477,"ĠAudrey":25478,"ĠYor":25479,"Anyone":25480,"Ġfraught":25481,"е":25482,"ĠWester":25483,"fc":25484,"ĠDunham":25485,"ĠLug":25486,"allow":25487,"139":25488,"Ġparity":25489,"Ġhorizontal":25490,"ijuana":25491,"Ġcivilization":25492,"ĠGins":25493,"Ġsmokers":25494,"ĠDiabetes":25495,"Five":25496,"ĠDG":25497,"Ġunderscores":25498,"Ġelabor":25499,"ĠLub":25500,"ĠDevil":25501,"Ġ154":25502,"ĠGuarant":25503,"ĠPandora":25504,"Ġexcav":25505,"Ġaccuser":25506,"Ġrevolt":25507,"Ġinstructors":25508,"Ġire":25509,"ographic":25510,"ĠCLE":25511,"Ġexpedition":25512,"ould":25513,"Ġstriving":25514,"south":25515,"onis":25516,"ĠSwed":25517,"MY":25518,"ĠLevin":25519,"Ġcarp":25520,"ĠArchitects":25521,"Ġ{":25522,"Ġcovert":25523,"Ġcooled":25524,"ĠStaten":25525,"Ġspecializing":25526,"ĠHazel":25527,"Ġlen":25528,"ighty":25529,"Ġbrilliantly":25530,"Phil":25531,"Ġlament":25532,"Australia":25533,"203":25534,"Ġticking":25535,"Ġadjud":25536,"Ġroommate":25537,"ĠSheet":25538,"capital":25539,"167":25540,"Ġendeavor":25541,"Ġaver":25542,"Ġdues":25543,"ĠCycl":25544,"oried":25545,"Va":25546,"loading":25547,"Ġpremie":25548,"Ġregimes":25549,"ĠAly":25550,"Ġperennial":25551,"Ġconsoles":25552,"Ġironic":25553,"ichael":25554,"Ġvigorously":25555,"Ġtransmit":25556,"gary":25557,"eking":25558,"Ġjails":25559,"ĠEpiscopal":25560,"eddy":25561,"Ġidle":25562,"Ġsafeguards":25563,"Ġdwindling":25564,"NOR":25565,"torn":25566,"ĠEvangel":25567,"ĠPlastic":25568,"ĠTerm":25569,"Ġforwarded":25570,"avage":25571,"Ġrefrigerator":25572,"arna":25573,"ĠGuinness":25574,"ĠCandy":25575,"Ġbotched":25576,"seller":25577,"Ġpul":25578,"grades":25579,"oshenko":25580,"earth":25581,"nette":25582,"Ġtraps":25583,"Ġtarn":25584,"Ġmilitar":25585,"ĠAriel":25586,"Ġtubes":25587,"ulo":25588,"Water":25589,"edin":25590,"Ġmarvel":25591,"chenko":25592,"ĠElk":25593,"spect":25594,"coe":25595,"ĠIllustrated":25596,"Ġruthless":25597,"etermined":25598,"Ġdys":25599,"Ġbreaching":25600,"gee":25601,"Nick":25602,"Ġcruiser":25603,"Ġciv":25604,"Ġdou":25605,"Ġ;":25606,"deb":25607,"ĠAsheville":25608,"Ġbiting":25609,"Ġyo":25610,"Courtesy":25611,"Ġroses":25612,"ĠConsequently":25613,"Ġrevis":25614,"Ġconfinement":25615,"next":25616,"produced":25617,"Ġmoratorium":25618,"Ġkne":25619,"eties":25620,"Ġplethora":25621,"Ġceleb":25622,"FIN":25623,"Ġdepartures":25624,"ĠWynne":25625,"abilia":25626,"ĠCourts":25627,"olis":25628,"Ġcereal":25629,"Ġblended":25630,"333":25631,"ĠLun":25632,"Ġrepe":25633,"Ġmathematics":25634,"Ġpharmacies":25635,"Center":25636,"Ġwhist":25637,"pine":25638,"Ġperm":25639,"Ġcustomary":25640,"Ġhormones":25641,"Ġcleansing":25642,"Ġconfidentiality":25643,"Ġmascot":25644,"Ġslippery":25645,"Ġmediation":25646,"Ġpodcasts":25647,"Ġcoating":25648,"Ġconveyed":25649,"Ġgir":25650,"ĠNurse":25651,"DM":25652,"Ġlured":25653,"orted":25654,"Ġolig":25655,"ritz":25656,"ĠINF":25657,"Ġtirelessly":25658,"Ġdoorstep":25659,"Ġtomb":25660,"Ġwithholding":25661,"irling":25662,"Ġhog":25663,"Ġ156":25664,"Ġgau":25665,"chem":25666,"raid":25667,"Ġtrolls":25668,"Ġ182":25669,"ĠColumb":25670,"Ġtissues":25671,"Ġnaive":25672,"Ġlect":25673,"Central":25674,"Sign":25675,"168":25676,"Ġbribe":25677,"ĠDoll":25678,"ĠTripoli":25679,"Ġfunk":25680,"Ġplaza":25681,"Ġmechanic":25682,"mem":25683,"Ġmonkey":25684,"grid":25685,"Ġtainted":25686,"ĠNicaragua":25687,"pelling":25688,"ĠXia":25689,"ammers":25690,"Ġorth":25691,"ICAN":25692,"Ġrant":25693,"Ġdiary":25694,"ĠHarrington":25695,"Ġimply":25696,"Qaeda":25697,"Ġworsen":25698,"Ġcrafting":25699,"ĠShir":25700,"Ġcoincided":25701,"Ġsnatched":25702,"ileen":25703,"sei":25704,"Ġsurgeons":25705,"directed":25706,"Ġcompulsory":25707,"Ġnowadays":25708,"ĠLI":25709,"ĠRebel":25710,"Ġlions":25711,"ĠJR":25712,"scar":25713,"ĠRespons":25714,"Ġscroll":25715,"ĠErd":25716,"iety":25717,"\";":25718,"ĠBone":25719,"ĠRumble":25720,"ĠKS":25721,"ĠLaur":25722,"kell":25723,"ĠBirds":25724,"agic":25725,"Ġsimmer":25726,"Ġrunaway":25727,"Ġ162":25728,"auna":25729,"Ġdialog":25730,"Ġlouder":25731,"esque":25732,"RR":25733,"Ġbloss":25734,"Ġcaliber":25735,"nery":25736,"Ġhauled":25737,"Ġbacterial":25738,"ĠVanity":25739,"ĠPrograms":25740,"omew":25741,"ĠMama":25742,"Ġarr":25743,"Ġdod":25744,"ĠJarvis":25745,"ĠFIRST":25746,"Ġinjections":25747,"ĠBallard":25748,"Ġmedically":25749,"angan":25750,"ĠNewfoundland":25751,"Ġfracking":25752,"Ġbast":25753,"outing":25754,"Ġmercury":25755,"Ġwatershed":25756,"ĠAmateur":25757,"Ġ153":25758,"escal":25759,"Ġpainter":25760,"creat":25761,"Ġperceive":25762,"Ġgent":25763,"attacks":25764,"worked":25765,"Ġimporting":25766,"Indian":25767,"Ġconvict":25768,"clad":25769,"Ġbudding":25770,"Ġambient":25771,"ĠWitness":25772,"letes":25773,"Ġbuffet":25774,"Ġneedles":25775,"Ġcoding":25776,"Ġchoke":25777,"Ġcorrespondence":25778,"Ġgods":25779,"Ġdances":25780,"Ġsteadfast":25781,"cert":25782,"Ġroaming":25783,"between":25784,"weak":25785,"Jer":25786,"jandro":25787,"Ġdiscouraged":25788,"Ġfruition":25789,"ĠØ":25790,"ĠKop":25791,"ULL":25792,"efe":25793,"imble":25794,"obb":25795,"ulla":25796,"Ġaccredited":25797,"Ġlectures":25798,"bil":25799,"why":25800,"Ġgreeting":25801,"ĠBoost":25802,"Ġmailed":25803,"Ġtroop":25804,"Ġfrig":25805,"Ġrese":25806,"Ġscratched":25807,"Stars":25808,"ĠRailroad":25809,"ĠIdol":25810,"Ġsuccumbed":25811,"ĠWeeks":25812,"ffe":25813,"Ġjihadist":25814,"ITION":25815,"Ġthreads":25816,"ĠGenerally":25817,"Ġmedieval":25818,"Ġquotas":25819,"ĠFerry":25820,"rique":25821,"Ġprod":25822,"ĠEduc":25823,"rive":25824,"Ġensued":25825,"Cy":25826,"Ġinfring":25827,"Ġprank":25828,"Ġfrontline":25829,"Ġcompletes":25830,"upe":25831,"Ġmanageable":25832,"Ġpoems":25833,"otten":25834,"igne":25835,"threat":25836,"ĠDri":25837,"ĠLINK":25838,"Calif":25839,"ĠDos":25840,"ulent":25841,"Ġaids":25842,"Ġslips":25843,"umped":25844,"Ġstyled":25845,"Ġdisproportionately":25846,"ĠDish":25847,"ĠUncle":25848,"andel":25849,"Ġrecharge":25850,"rators":25851,"ĠSPR":25852,"Ġguarded":25853,"ĠGreatest":25854,"ĠSkills":25855,"ĠNob":25856,"ĠDesk":25857,"ĠCros":25858,"Ġwrit":25859,"Ġquery":25860,"ORTS":25861,"Ġbundled":25862,"Ġgib":25863,"Ġeth":25864,"iesta":25865,"Ġevade":25866,"dict":25867,"straight":25868,"Met":25869,"present":25870,"Ġdiff":25871,"Ġdere":25872,"ĠSpl":25873,"Ġrepr":25874,"ĠBeard":25875,"Ġvain":25876,"Ġappointing":25877,"ĠVisual":25878,"caps":25879,"gado":25880,"ĠRican":25881,"ĠPose":25882,"endor":25883,"Ġ222":25884,"ĠLear":25885,"Ġconstructing":25886,"Dan":25887,"ĠSpears":25888,"ĠTherapy":25889,"pta":25890,"Ġrehabilit":25891,"Ġrisked":25892,"ĠGuer":25893,"HF":25894,"Ġ301":25895,"Ġliking":25896,"Ġmodular":25897,"eree":25898,"ĠMAT":25899,"ĠHomeless":25900,"Ġstove":25901,"erd":25902,"hash":25903,"ĠAchilles":25904,"ĠBeta":25905,"Ġincl":25906,"Ġgunned":25907,"ĠCrab":25908,"ĠMara":25909,"Ġinvaded":25910,"ulatory":25911,"ATA":25912,"angering":25913,"onso":25914,"Ġallocate":25915,"Ġgarment":25916,"itudes":25917,"ĠHuang":25918,"Ġstaples":25919,"ĠAlban":25920,"Ġtrough":25921,"Ġupright":25922,"tie":25923,"Ġexploits":25924,"ĠVaughan":25925,"ĠDarrell":25926,"Ġassortment":25927,"ĠChill":25928,"Ġlearners":25929,"aqu":25930,"Ġexplode":25931,"ĠChong":25932,"bt":25933,"opl":25934,"Ġaltern":25935,"Ġ151":25936,"fur":25937,"ULT":25938,"HOU":25939,"ĠMemory":25940,"Ġboosts":25941,"ynes":25942,"priv":25943,"Ġtimeless":25944,"Ġcurtail":25945,"ĠCary":25946,"ĠHud":25947,"Ġexclus":25948,"Ġ275":25949,"Ġfry":25950,"ĠVera":25951,"Ġdefied":25952,"ĠDust":25953,"Ġenvision":25954,"ĠPhilipp":25955,"Ġenhancements":25956,"ĠLIB":25957,"ggy":25958,"ĠAzure":25959,"esis":25960,"Ġcharismatic":25961,"Ġcoincide":25962,"inged":25963,"ĠChoose":25964,"Ġsizeable":25965,"136":25966,"Ġpronounce":25967,"ĠPositive":25968,"Ġideally":25969,"Ġechoes":25970,"Ġcottage":25971,"Ġencrypted":25972,"Prime":25973,"Ġá":25974,"Ġflashes":25975,"Group":25976,"Ġ501":25977,"heat":25978,"atility":25979,"ĠTesting":25980,"pex":25981,"WT":25982,"154":25983,"annah":25984,"Ġcompromising":25985,"Ġinactive":25986,"Ġdisparity":25987,"Ġgruesome":25988,"ĠFeather":25989,"ĠMandal":25990,"Ġthereof":25991,"ĠProducer":25992,"Ġprofiling":25993,"Ġlogistical":25994,"Ġcornerstone":25995,"ĠClaudia":25996,"Congress":25997,"ĠDill":25998,"ophone":25999,"Ġcameo":26000,"ĠCutler":26001,"Ġcraz":26002,"throw":26003,"ĠKasich":26004,"Ġexploiting":26005,"ĠSeas":26006,"agles":26007,"ĠGeological":26008,"ĠStub":26009,"ĠUps":26010,"MER":26011,"Ġmem":26012,"itution":26013,"Ġunderstandably":26014,"Ġcontractual":26015,"warming":26016,"qi":26017,"Sky":26018,"whelming":26019,"Ġcurse":26020,"ĠAren":26021,"Ġ265":26022,"ĠGree":26023,"Ġpresiding":26024,"Works":26025,"stones":26026,"Ġappalling":26027,"plex":26028,"dj":26029,"aunting":26030,"Ġimag":26031,"Ġsexism":26032,"ĠVert":26033,"ĠRag":26034,"ĠBliss":26035,"posium":26036,"div":26037,"Ġexperimenting":26038,"Ass":26039,"Lago":26040,"worthiness":26041,"ĠBerk":26042,"ĠDisneyland":26043,"Ġexaggerated":26044,"iliation":26045,"ĠFP":26046,"Ġprincipals":26047,"Miami":26048,"ropri":26049,"PLE":26050,"iona":26051,"ĠPokemon":26052,"apse":26053,"Ġbubbles":26054,"INC":26055,"ĠCaps":26056,"ĠBrowne":26057,"sing":26058,"Ġcafé":26059,"Ġceilings":26060,"frame":26061,"ĠIrwin":26062,"ATS":26063,"dated":26064,"Ġprotester":26065,"Ġtaps":26066,"ĠOslo":26067,"Ù":26068,"Ġconcentrations":26069,"Ġdistributions":26070,"Ġglucose":26071,"ĠRudolph":26072,"Ġtowels":26073,"Ġâĸº":26074,"Ġneighbourhoods":26075,"Ġinduction":26076,"Ġglaring":26077,"Ġannexation":26078,"Ġunsustainable":26079,"ĠTend":26080,"Ġthumbs":26081,"iegel":26082,"cript":26083,"gor":26084,"closure":26085,"thought":26086,"Ġpaddle":26087,"Ġemulate":26088,"Ġdiameter":26089,"Ġtailor":26090,"ĠCorpor":26091,"icable":26092,"ĠPrin":26093,"Ġadminister":26094,"ĠJudd":26095,"ĠColleg":26096,"aund":26097,"ĠPond":26098,"ĠNOTE":26099,"Ġcombating":26100,"Ġinvention":26101,"ĠOculus":26102,"ĠRepl":26103,"iscal":26104,"Ġtrilogy":26105,"anian":26106,"ATT":26107,"ĠCoke":26108,"DL":26109,"ĠLup":26110,"living":26111,"Ġadvertise":26112,"ĠConnie":26113,"amping":26114,"Ġsung":26115,"ORY":26116,"ĠTet":26117,"Ġsplits":26118,"Ġreconnect":26119,"Ġlou":26120,"mut":26121,"ulator":26122,"Ġstrap":26123,"Ġswallow":26124,"rote":26125,"Ġexec":26126,"ffen":26127,"ĠCombine":26128,"ĠTreat":26129,"Ġsorrow":26130,"ĠNotably":26131,"ĠSever":26132,"rette":26133,"Ġwherein":26134,"Ġtransitioning":26135,"Ġtrout":26136,"Ġcockpit":26137,"Ġcrawl":26138,"Ġferv":26139,"Ġliquids":26140,"Ġtsp":26141,"atell":26142,"Ġmeasles":26143,"Ġjug":26144,"Ac":26145,"ĠKD":26146,"ĠMoose":26147,"Ġvans":26148,"chain":26149,"ĠPapua":26150,"plet":26151,"Wednesday":26152,"lynn":26153,"chery":26154,"budget":26155,"Tony":26156,"ĠBacon":26157,"Ġstirred":26158,"ĠSpecialist":26159,"Ġcounterfeit":26160,"а":26161,"Ġdifferentiate":26162,"Ġmuscular":26163,"ĠTheodore":26164,"Ġlooms":26165,"ĠXX":26166,"ottage":26167,"Ġbenches":26168,"ĠMunicip":26169,"Po":26170,"ĠHeck":26171,"Ġscars":26172,"ĠNim":26173,"ÙĬ":26174,"ĠIngredients":26175,"Ġecological":26176,"ĠAWS":26177,"Ġdispose":26178,"Ġmattered":26179,"Ġ720":26180,"Ġpatriotism":26181,"ĠGrind":26182,"Ġcurved":26183,"opia":26184,"ĠLiqu":26185,"Ġevangelical":26186,"tto":26187,"ĠMaterial":26188,"ĠShowtime":26189,"ĠBS":26190,"Ġcheckpoints":26191,"Ġcrippling":26192,"ĠBalance":26193,"stress":26194,"bearing":26195,"Ġ216":26196,"ĠGuards":26197,"Ġlinebackers":26198,"Ġoffending":26199,"Ġsands":26200,"umbnail":26201,"atorial":26202,"Ġliberties":26203,"ĠGW":26204,"ĠPulitzer":26205,"ĠAlvin":26206,"ĠFAC":26207,"ĠStrategies":26208,"Ġreiter":26209,"ĠRestaur":26210,"ĠLithuania":26211,"ĠSwanson":26212,"terror":26213,"ĠMaurit":26214,"Ġparadise":26215,"zzle":26216,"owment":26217,"ĠWP":26218,"Ġsodium":26219,"Ġfuturistic":26220,"Ġdots":26221,"Anthony":26222,"Though":26223,"Ġstripes":26224,"Ġorig":26225,"ultz":26226,"Ġ340":26227,"KK":26228,"umer":26229,"ivery":26230,"Ġplacebo":26231,"Ġdemocrat":26232,"Ġsubmerged":26233,"ĠHidden":26234,"pieces":26235,"Ġasteroid":26236,"ĠGraphic":26237,"Ġadvert":26238,"sil":26239,"Ġdreaming":26240,"Ġnationality":26241,"Ġfostering":26242,"daughter":26243,"ĠSavings":26244,"Ġmischief":26245,"ĠClair":26246,"ĠBundy":26247,"Ġblatant":26248,"Ġtabs":26249,"qa":26250,"severe":26251,"attered":26252,"Ġgreed":26253,"Ġresembles":26254,"Ġnominal":26255,"Ġineligible":26256,"wealth":26257,"fax":26258,"payers":26259,"Ġdisplacement":26260,"itute":26261,"Ġunpleasant":26262,"ĠPom":26263,"lif":26264,"edo":26265,"ĠNP":26266,"Inter":26267,"Ġcohort":26268,"ĠStacy":26269,"ĠDai":26270,"Ġhistories":26271,"alin":26272,"273":26273,"Ġdram":26274,"ĠKand":26275,"Ġexpectancy":26276,"ansson":26277,"Ġlimbo":26278,"ĠPolar":26279,"Ġdivine":26280,"oused":26281,"Ġshel":26282,"ĠProblem":26283,"achment":26284,"Ġâĸł":26285,"shoot":26286,"antam":26287,"ĠHerz":26288,"Ġ157":26289,"Ġpreventive":26290,"keye":26291,"Sing":26292,"Ġcharacteristic":26293,"Ġcasually":26294,"ĠTaiwanese":26295,"md":26296,"ĠHubbard":26297,"imon":26298,"Ġsect":26299,"148":26300,"Ġmartyr":26301,"stud":26302,"Ġcongrat":26303,"ĠSWAT":26304,"ĠTheory":26305,"INAL":26306,"opping":26307,"ply":26308,"ĠKindle":26309,"uu":26310,"ĠLith":26311,"kaya":26312,"ĠActivity":26313,"uously":26314,"ĠJeb":26315,"tell":26316,"ĠSpin":26317,"ĠExplorer":26318,"Ġfolded":26319,"ĠCanterbury":26320,"ĠStur":26321,"Ġminiature":26322,"Ġmultif":26323,"ĠPressure":26324,"angling":26325,"ĠOverse":26326,"Ġresides":26327,"Ġimpressions":26328,"Ġauthored":26329,"265":26330,"Ġallergies":26331,"143":26332,"ĠJi":26333,"Ġsticker":26334,"ĠAccord":26335,"Ġcaste":26336,"Ġseparates":26337,"ĠFein":26338,"Daily":26339,"179":26340,"ĠScores":26341,"ĠAuction":26342,"hea":26343,"Ġdisclosing":26344,"ĠTacoma":26345,"Ġverse":26346,"ĠBeg":26347,"Ġfabrics":26348,"aez":26349,"Ġattachment":26350,"isy":26351,"Christ":26352,"Ġaddictive":26353,"Ġvir":26354,"Week":26355,"ĠPlum":26356,"croft":26357,"itivity":26358,"ĠExhibition":26359,"Ġbruised":26360,"Ġmimic":26361,"rers":26362,"Ġanal":26363,"Ġunintended":26364,"Ġpall":26365,"atts":26366,"ĠWarn":26367,"Ġslows":26368,"WH":26369,"Ġembro":26370,"nec":26371,"Ġ168":26372,"285":26373,"ologic":26374,"Ġhob":26375,"ĠPeel":26376,"Mill":26377,"eps":26378,"Ġrobbers":26379,"ĠDahl":26380,"semble":26381,"omics":26382,"toe":26383,"ĠLoch":26384,"Ġreproduction":26385,"ĠCullen":26386,"Ġimplants":26387,"Ġwow":26388,"ĠSTATE":26389,"vt":26390,"Ġdepleted":26391,"Ġbreweries":26392,"Ġhateful":26393,"Ġgast":26394,"Ġhollow":26395,"Ġradically":26396,"ographed":26397,"ĠFog":26398,"onian":26399,"ĠSequ":26400,"Ġdisrespectful":26401,"Dis":26402,"ĠExper":26403,"pron":26404,"ĠAmelia":26405,"ĠSage":26406,"bath":26407,"Ġtransformative":26408,"Ġtremendously":26409,"Ġpillow":26410,"ĠNormal":26411,"Cont":26412,"ĠMedic":26413,"educated":26414,"Ġredesigned":26415,"Ġkneeling":26416,"Ġinh":26417,"Ġroofs":26418,"Ġhandmade":26419,"Ġprotracted":26420,"ĠIsn":26421,"ĠCapacity":26422,"Ġsquash":26423,"ĠVega":26424,"Ġfats":26425,"ĠCertified":26426,"ointed":26427,"Ġpricey":26428,"ĠBasil":26429,"Ġfreezer":26430,"Ġscent":26431,"Ġpizz":26432,"ĠArd":26433,"Ġdistractions":26434,"Ġviolently":26435,"ĠHess":26436,"Ġfunc":26437,"Ġundert":26438,"Ġrejuven":26439,"Ġdisbelief":26440,"cluded":26441,"named":26442,"ĠFailure":26443,"kus":26444,"Ġhostages":26445,"ĠSahara":26446,"Ġ1944":26447,"Leary":26448,"ĠPrel":26449,"enza":26450,"ĠAlly":26451,"ĠKak":26452,"Ġcounselors":26453,"ĠGale":26454,"ĠHok":26455,"ĠSold":26456,"Ġhacker":26457,"Ġhun":26458,"Ġbung":26459,"Ġdeclares":26460,"Ġinfringement":26461,"OOD":26462,"Ġdoub":26463,"jam":26464,"Ġallergy":26465,"ĠShipping":26466,"Ġmedic":26467,"Ġaccommod":26468,"Ġdocumenting":26469,"Ġcompanions":26470,"Ġmodelling":26471,"Ġcarriage":26472,"ĠCherokee":26473,"Ġtresp":26474,"Ġtaxable":26475,"ĠActivities":26476,"ĠCrane":26477,"bots":26478,"ĠRusso":26479,"Ġstocked":26480,"ervation":26481,"Ġcoffin":26482,"aign":26483,"guards":26484,"Ġonwards":26485,"Ġfrank":26486,".*":26487,"unic":26488,"Ġcens":26489,"enic":26490,"ruit":26491,"rained":26492,"Ġadapting":26493,"aments":26494,"Ġstagnant":26495,"azaar":26496,"ĠHarlem":26497,"Ġ158":26498,"ysis":26499,"Ġbraking":26500,"Ġdipping":26501,"Ġclan":26502,"ĠShu":26503,"Ġprops":26504,"qualified":26505,"Ġmistakenly":26506,"ĠStalin":26507,"Ġaddicts":26508,"ĠCALL":26509,"ropolis":26510,"aten":26511,"pec":26512,"ĠDro":26513,"ĠFellowship":26514,"ĠSupporting":26515,"loc":26516,"uben":26517,"499":26518,"Bro":26519,"Ġpots":26520,"Ġchunks":26521,"wr":26522,"ĠColonial":26523,"ĠArchitecture":26524,"Ġconstrained":26525,"Ġenvelop":26526,"ĠIronically":26527,"aban":26528,"Ġapparatus":26529,"Ġcue":26530,"Ġborne":26531,"ĠRoz":26532,"ilton":26533,"Ġtheoretical":26534,"ĠWatching":26535,"Ġfuck":26536,"ĠSilk":26537,"ĠSTE":26538,"bler":26539,"ĠPOST":26540,"ĠUpton":26541,"Ġsummons":26542,"ĠCum":26543,"ĠKL":26544,"Ġrelaxation":26545,"ĠDuff":26546,"Ġincumb":26547,"ĠRedd":26548,"Ġstature":26549,"Ġcanv":26550,"added":26551,"Ġremedies":26552,"ĠISO":26553,"ĠDecker":26554,"Ġafloat":26555,"Ġstartling":26556,"ĠBethlehem":26557,"Ġrealizes":26558,"find":26559,"ĠAra":26560,"Ġphased":26561,"arov":26562,"Ġhalting":26563,"ĠWindow":26564,"Ġdentist":26565,"Ġtumble":26566,"Ġvalidation":26567,"Ġcarve":26568,"ĠIPS":26569,"Ġirrit":26570,"ĠEssential":26571,"Ġfluids":26572,"rons":26573,"Ġimplant":26574,"Ġnuisance":26575,"ĠShelley":26576,"ĠGemini":26577,"Ġpharmac":26578,"iction":26579,"Ġtaped":26580,"ĠGovernments":26581,"ruly":26582,"Ġscant":26583,"Ġprominently":26584,"Ġreim":26585,"unning":26586,"arted":26587,"ĠMatters":26588,"Ġ1918":26589,"ĠPros":26590,"atel":26591,"ĠBattalion":26592,"onduct":26593,"talk":26594,"ĠTinder":26595,"ĠInstant":26596,"ĠKern":26597,"Ġbuckets":26598,"ĠGroups":26599,"Ġmetaphor":26600,"cloud":26601,"ĠString":26602,"Ohio":26603,"Ġcaffeine":26604,"Old":26605,"Ġdefinite":26606,"ĠNikola":26607,"ĠLords":26608,"icol":26609,")?":26610,"Ġenjoyment":26611,"Ġfamine":26612,"Ġdefinitions":26613,"ĠJem":26614,"Check":26615,"Ġaiding":26616,"ĠMé":26617,"Ġrenewables":26618,"Ġsightings":26619,"footed":26620,"Box":26621,"Ġgoats":26622,"Ġshack":26623,"AX":26624,"ĠMonk":26625,"ĠGraduate":26626,"Ġmeats":26627,"handle":26628,"147":26629,"rys":26630,"Ġunsub":26631,"Pont":26632,"uble":26633,"440":26634,"Ġeyel":26635,"thro":26636,"Ġcreep":26637,"^^^^":26638,"Ġpopcorn":26639,"Ġcompression":26640,"sal":26641,"ouf":26642,"Ġrepairing":26643,"Think":26644,"Ġdoubtful":26645,"ĠLooks":26646,"Ġtaller":26647,"Ġsul":26648,"sf":26649,"give":26650,"ĠGau":26651,"Ġrevered":26652,"EMBER":26653,"Ġsloppy":26654,"ersen":26655,"Ġvitamins":26656,"ĠImprovement":26657,"Ġprogresses":26658,"Ġdiploma":26659,"semb":26660,"ustain":26661,"Ġchant":26662,"Ġbumped":26663,"Ġsabotage":26664,"nant":26665,"Ġrabbit":26666,"Ġdividing":26667,"ĠDefender":26668,"Ġlik":26669,"Ġirrespective":26670,"cade":26671,"ĠSter":26672,"touch":26673,"EMA":26674,"Ġparted":26675,"ĠBAR":26676,"hung":26677,"Ġannoyed":26678,"Ġhinder":26679,"Ġexamines":26680,"oan":26681,"ĠBoe":26682,"Ġaggreg":26683,"ĠChu":26684,"ĠUCS":26685,"IGHTS":26686,"pez":26687,"ĠUNESCO":26688,"Ġwindshield":26689,"Martin":26690,"Ġwithhold":26691,"does":26692,"Ġbruising":26693,"Ġdeterior":26694,"bourg":26695,"ĠTowers":26696,"JD":26697,"England":26698,"Ġequivalents":26699,"Ġrazor":26700,"Ġreassuring":26701,"Ġident":26702,"Ġ208":26703,"reath":26704,"ceans":26705,"Ġpatrolling":26706,"eve":26707,"pots":26708,"itative":26709,"Ġsided":26710,"Ġsofa":26711,"Ġunborn":26712,"Ġaug":26713,"Ġperpetual":26714,"effect":26715,"represented":26716,"Ġrails":26717,"ĠSummers":26718,"ĠMOR":26719,"ĠSlow":26720,"ĠExpert":26721,"Ġshameful":26722,"Ġaudits":26723,"Sl":26724,"ĠBurr":26725,"adow":26726,"ĠWAY":26727,"anic":26728,"ĠIslamists":26729,"ĠStranger":26730,"pse":26731,"amaz":26732,"ĠPeggy":26733,"ĠSeventh":26734,"Ġscreenplay":26735,"ĠGriff":26736,"Ireland":26737,"142":26738,"Ġneural":26739,"ĠFernand":26740,"ainment":26741,"ĠMigration":26742,"ureen":26743,"ĠSCH":26744,"Sullivan":26745,"ĠWag":26746,"ĠREG":26747,"Ġ420":26748,"inky":26749,"ĠNewspaper":26750,"School":26751,"Ok":26752,"ĠKrishna":26753,"Ġ480":26754,"erald":26755,"Ġskipping":26756,"Ġharrowing":26757,"158":26758,"rogen":26759,"Ġbetrayal":26760,"Ġculmination":26761,"ĠCirc":26762,"Ġ211":26763,"stro":26764,"ĠTrace":26765,"Ġheaviest":26766,"td":26767,"ĠHenri":26768,"epend":26769,"RB":26770,"arella":26771,"umbai":26772,"Ġcrem":26773,"ĠDistribut":26774,"ruff":26775,"Ġscreams":26776,"Ġscathing":26777,"girls":26778,"Ġtiles":26779,"ĠEvil":26780,"usp":26781,"Ġknowledgeable":26782,"Ġrestitution":26783,"ĠWiFi":26784,"Ġitiner":26785,"exper":26786,"oris":26787,"ĠPokémon":26788,"iane":26789,"produ":26790,"ĠAchievement":26791,"Ġbrunt":26792,"ĠSurgery":26793,"Ġpragmatic":26794,"Ber":26795,"ĠKejriwal":26796,"cus":26797,"Ġconsensual":26798,"acet":26799,"ĠSecondly":26800,"Ġdivul":26801,"uca":26802,"Ġbusted":26803,"emies":26804,"ĠMou":26805,"Ġ217":26806,"Ġexcludes":26807,"ĠSamoa":26808,"Ġlofty":26809,"ĠSic":26810,"ĠRemem":26811,"dn":26812,"Ġeradicate":26813,"Ġpies":26814,"Ġscenery":26815,"ATTLE":26816,"ĠWAS":26817,"Ġinnovate":26818,"ĠEverest":26819,"Ġsynonymous":26820,"izen":26821,"Ġeuth":26822,"ĠFIA":26823,"ITIES":26824,"ĠSuddenly":26825,"Ġforay":26826,"pell":26827,"ÄŁ":26828,"licensed":26829,"Ġfra":26830,"Ġblasting":26831,"autical":26832,"ĠBlizzard":26833,"orer":26834,"Ġchili":26835,"ĠSylvia":26836,"except":26837,"tec":26838,"ĠResistance":26839,"young":26840,"usions":26841,"iotic":26842,"ĠDreams":26843,"ĠArchives":26844,"Ġunleash":26845,"ĠPract":26846,"Ġlikened":26847,"Ġga":26848,"Ġdisappearing":26849,"Ġunnoticed":26850,"Ġfrightened":26851,"arms":26852,"ĠCAD":26853,"Ġcoloured":26854,"ĠSigns":26855,"oing":26856,"Ġvodka":26857,"ruption":26858,"otions":26859,"isal":26860,"ĠBecome":26861,"Ġswoop":26862,"reating":26863,"Ġchoking":26864,"Ġunforgettable":26865,"258":26866,"packs":26867,"345":26868,"ĠAutumn":26869,"Ġther":26870,"399":26871,"ĠFaculty":26872,"Ġ1933":26873,"ĠNormally":26874,"orge":26875,"ĠTess":26876,"ĠChrom":26877,"Ġscripts":26878,"Ġbiking":26879,"Act":26880,"Ġgrazing":26881,"ĠLabrador":26882,"ĠLey":26883,"Ġwandering":26884,"Ġfend":26885,"ĠPolk":26886,"ĠKeane":26887,"ĠBeef":26888,"elope":26889,"ĠApproximately":26890,"Ġ1952":26891,"personal":26892,"Ġhistorians":26893,"ĠMcDonnell":26894,"must":26895,"LES":26896,"iking":26897,"Ġtherm":26898,"Ġhumane":26899,"Ġcrowdfunding":26900,"ĠBenefits":26901,"Land":26902,"Ġanalog":26903,"agency":26904,"ĠCrowley":26905,"Ġbirths":26906,"Ġobj":26907,"Ġfren":26908,"ĠSalmon":26909,"bies":26910,"Ġreve":26911,"216":26912,"Ġbetrayed":26913,"Ġinduced":26914,"acles":26915,"Ġtrad":26916,"Ġforgiven":26917,"Ġearners":26918,"208":26919,"Ġxen":26920,"Ġunle":26921,"Ġnecklace":26922,"Ġgravel":26923,"Ġsalads":26924,"Ġgrooming":26925,"California":26926,"Ġpossessed":26927,"Ġproclamation":26928,"Ġsequences":26929,"ream":26930,"FOX":26931,"arkin":26932,"ĠTRAN":26933,"Ġpurs":26934,"ĠLoans":26935,"Ġsacrificed":26936,"Ġiceberg":26937,"Phill":26938,"Ġgalvan":26939,"Ġsmugglers":26940,"formation":26941,"onson":26942,"ĠVaughn":26943,"Ġdoctrine":26944,"ĠEyes":26945,"Ġunmanned":26946,"states":26947,"Ġdetermin":26948,"almost":26949,"Ġeviction":26950,"Ġtid":26951,"ARR":26952,"Ġcooks":26953,"Bad":26954,"ĠCamb":26955,"Ġlinear":26956,"229":26957,"ĠCooke":26958,"ĠPurch":26959,"join":26960,"ĠCult":26961,"ĠRefugee":26962,"Ġslamming":26963,"ĠðŁij":26964,"Ġpedal":26965,"ĠVeronica":26966,"Ġlandowners":26967,"ĠYel":26968,"ĠWorkshop":26969,"antic":26970,"Ġdysfunction":26971,"Ġ229":26972,"Ġculturally":26973,"Ġinfuri":26974,"ĠEck":26975,"sem":26976,"Ġwired":26977,"ĠWerner":26978,"lov":26979,"ĠJasper":26980,"Ġvehemently":26981,"ĠSpy":26982,"lift":26983,"ĠNab":26984,"ĠPound":26985,"ĠHanna":26986,"Ġleveled":26987,"WOOD":26988,"tm":26989,"ĠKitt":26990,"Ġconve":26991,"nat":26992,"Ġjog":26993,"IVER":26994,"Ġmemes":26995,"Ġseaw":26996,"ector":26997,"Ġsprayed":26998,"Ġvaccinated":26999,"Europe":27000,"Ġmustard":27001,"ĠMahm":27002,"Ġ214":27003,"Research":27004,"iminary":27005,"Ġconcerted":27006,"Detroit":27007,"Ġkios":27008,"Ġplummet":27009,"Ġvisuals":27010,"247":27011,"Ġ228":27012,"development":27013,"ĠPascal":27014,"acial":27015,"ĠSeasons":27016,"ĠTL":27017,"480":27018,"ĠReader":27019,"Ġexpulsion":27020,"Ġchoked":27021,"Ġdevotion":27022,"ĠSTAT":27023,"urred":27024,"Ġfascinated":27025,"Ġstealth":27026,"NL":27027,"Ġbooster":27028,"Kat":27029,"ĠPriebus":27030,"Ġaux":27031,"ĠHate":27032,"ĠThing":27033,"Ġabnormal":27034,"Ġcalmly":27035,"Ġdedicate":27036,"cause":27037,"Ġisolate":27038,"ĠPai":27039,"Ġsuspensions":27040,"Ġpoisoned":27041,"ission":27042,"Ġprohibiting":27043,"353":27044,"banks":27045,"Ġkissed":27046,"ĠBegin":27047,"atis":27048,"LI":27049,"Ġshaft":27050,"ĠGuth":27051,"ĠBoo":27052,"Ġcinnamon":27053,"Ġverbally":27054,"ĠRabbi":27055,"Ġmonsters":27056,"done":27057,"ĠClyde":27058,"Ġspar":27059,"ĠCage":27060,"ĠPersons":27061,"305":27062,"ĠMons":27063,"Ġjealous":27064,"Ġswirling":27065,"know":27066,"Ġprote":27067,"Ġcruising":27068,"Ġduly":27069,"Ġchapel":27070,"Ġgroove":27071,"bps":27072,"ĠKelvin":27073,"iom":27074,"aer":27075,"bomb":27076,"Christian":27077,"Ġgigs":27078,"+.":27079,"ĠWei":27080,"Ġfarmland":27081,"otally":27082,"Ġequitable":27083,"ĠCBO":27084,"chool":27085,"amara":27086,"Ġwealthiest":27087,"ĠMeans":27088,"Ġ235":27089,"ĠUk":27090,"steps":27091,"raham":27092,"nerg":27093,"Ġclad":27094,"Ġsled":27095,"ĠMorrow":27096,"152":27097,"ĠRece":27098,"Ġplausible":27099,"Ġbisexual":27100,"artments":27101,"Ġveh":27102,"ĠLoft":27103,"bly":27104,"ĠCONC":27105,"automatic":27106,"Ġmasterpiece":27107,"ĠSpringer":27108,"Ġtendencies":27109,"Ro":27110,"Ġresentment":27111,"Ġadversely":27112,"Ġbandwidth":27113,"ĠDAV":27114,"Ġtun":27115,"Ġpuppies":27116,"ĠBundes":27117,"ĠHort":27118,"ĠGarfield":27119,"Ġenlist":27120,"Ġmont":27121,"gd":27122,"Ġrooting":27123,"Dream":27124,"Ġfulfillment":27125,"chal":27126,"182":27127,"prop":27128,"159":27129,"Ġcourtyard":27130,"iard":27131,"ĠSle":27132,"Ġoperative":27133,"Ġpublishes":27134,"ĠProposition":27135,"Ġcritique":27136,"Ġredist":27137,"wang":27138,"ĠNep":27139,"DD":27140,"Ġbonding":27141,"141":27142,"ĠAssault":27143,"-'":27144,"Ġlodging":27145,"itters":27146,"cigarettes":27147,"Ġ__":27148,"ĠLaf":27149,"GF":27150,"ĠAnat":27151,"ĠStephan":27152,"214":27153,"ĠKass":27154,"Ġviz":27155,"Ġpiling":27156,"Ġfugitive":27157,"ĠCurrency":27158,"ĠCrypto":27159,"Ġfaux":27160,"ĠPing":27161,"ĠLia":27162,"igl":27163,"Ġadversaries":27164,"ĠYPG":27165,"ĠComb":27166,"ĠYar":27167,"heny":27168,"Ġoverhe":27169,"Fest":27170,"emy":27171,"Ever":27172,"Ġ370":27173,"Ġsecretive":27174,"ĠSEN":27175,"ĠMEM":27176,"PRESS":27177,"ĠBirth":27178,"kos":27179,"Ġprecarious":27180,"irting":27181,"ĠUI":27182,"Ġoccupying":27183,"olute":27184,"Ġperiodic":27185,"eon":27186,"iens":27187,"ĠRH":27188,"Win":27189,"Ġplaybook":27190,"Ġexodus":27191,"ĠSkinner":27192,"Ġorderly":27193,"ĠVed":27194,"ouses":27195,"Ġescal":27196,"Ġbenign":27197,"Ġbots":27198,"ĠWhis":27199,"Ġappra":27200,"FOR":27201,"ĠChromebook":27202,"_____":27203,"990":27204,"athed":27205,"Ġspirited":27206,"illi":27207,"Ġbicycles":27208,"orse":27209,"ifestyle":27210,"orno":27211,"ĠDept":27212,"JA":27213,"Ġnausea":27214,"Ġpervasive":27215,"velop":27216,"commun":27217,"ĠUniversities":27218,"Ġremnants":27219,"Ġdisarm":27220,"ĠBoots":27221,"Ġprin":27222,"...\"":27223,"quila":27224,"Ġcautiously":27225,"uper":27226,"onto":27227,"din":27228,"Ġvelocity":27229,"Ġconspiring":27230,"ĠMX":27231,"Ġemphasizing":27232,"Ġâĸ":27233,"ĠStam":27234,"Ġspices":27235,"Ġairplanes":27236,"uty":27237,"culture":27238,"ĠPetr":27239,"Ġglor":27240,"ĠExcel":27241,"ĠSpeech":27242,"Ġharmless":27243,"ĠPend":27244,"ĠCrossing":27245,"ĠDocument":27246,"Ġramifications":27247,"ĠCroatian":27248,"ĠKiller":27249,"Ġmultim":27250,"Ġdiscontinued":27251,"Ġcherished":27252,"ĠMaker":27253,"aspers":27254,"ĠBlooming":27255,"ĠMata":27256,"offic":27257,"Ġsettlers":27258,"ĠPlenty":27259,"ĠInstitutes":27260,"ĠArpaio":27261,"Pool":27262,"ĠSubst":27263,"Ġ380":27264,"Ġdecidedly":27265,"ollah":27266,"Den":27267,"ĠJiang":27268,"ĠAmos":27269,"Grand":27270,"ĠTurns":27271,"meyer":27272,"Ġconducive":27273,"Ġpoignant":27274,"abortion":27275,"Ġnotebook":27276,"Ġshelling":27277,"common":27278,"ĠPavel":27279,"Ġhumid":27280,"Ġinappropriately":27281,"????":27282,"Ġsoar":27283,"Ġdynasty":27284,"Ġresearched":27285,"ĠYon":27286,"Ġmaple":27287,"Ġwedge":27288,"mass":27289,"ĠTM":27290,"USE":27291,"eln":27292,"Ġgloss":27293,"rigan":27294,"steen":27295,"ĠDeV":27296,"Ġdebacle":27297,"Christmas":27298,"Ġtweaks":27299,"grab":27300,"Ġprofoundly":27301,"Ġcampaigner":27302,"ĠSeal":27303,"Ġiteration":27304,"Ġsigh":27305,"Ġunfounded":27306,"Ġframing":27307,"Ġrecognizable":27308,"Ġseizing":27309,"legal":27310,"Ġproportions":27311,"omers":27312,"rek":27313,"Ġscreenshot":27314,"itsu":27315,"ĠOG":27316,"ĠYing":27317,"ĠMississ":27318,"295":27319,"Ġlandsl":27320,"Ġpsychiatrist":27321,"sov":27322,"arine":27323,"Ju":27324,"Ġflo":27325,"apple":27326,"hof":27327,"wig":27328,"ĠENT":27329,"Ġenthusiast":27330,"Such":27331,"ĠArtificial":27332,"happy":27333,"oton":27334,"ĠFram":27335,"ĠRemove":27336,"Ġsmear":27337,"Ġjer":27338,"Ġtopp":27339,"Ġimbalance":27340,"ĠWords":27341,"Ġcoffers":27342,"olina":27343,"Ġrigged":27344,"uction":27345,"idding":27346,"Ġdispensaries":27347,"Ġdermat":27348,"Ġshutter":27349,"idental":27350,"Ġcontinu":27351,"Ġhumility":27352,"Ġbulbs":27353,"Ġ207":27354,"lass":27355,"ĠBeirut":27356,"ĠUlt":27357,"urry":27358,"NEWS":27359,"Ġfeminine":27360,"Ġsimulated":27361,"Ġcharger":27362,"mom":27363,"ĠCreed":27364,"Ġwolves":27365,"essions":27366,"created":27367,"ifiers":27368,"Ġdissemin":27369,"ĠDarling":27370,"umann":27371,"Ġmarrying":27372,"Ġshred":27373,"avin":27374,"Ġbudgetary":27375,"Ġmedicinal":27376,"ulin":27377,"seys":27378,"agues":27379,"Ġextracted":27380,"ĠFlower":27381,"Ġcontinents":27382,"ĠWish":27383,"Ġdivides":27384,"ĠDing":27385,"Ġinsulation":27386,"respect":27387,"ĠABS":27388,"Ġreconcile":27389,"keep":27390,"ILD":27391,"Ġgenome":27392,"Ġ410":27393,"ĠSweep":27394,"Ġharass":27395,"Ġfrantic":27396,"ĠEE":27397,"dad":27398,"Ġaperture":27399,"rought":27400,"Ġhugs":27401,"Ġdrying":27402,"Ġoverrun":27403,"Space":27404,"Ġperiodically":27405,"Ġbrightness":27406,"atched":27407,"kee":27408,"ĠITS":27409,"ĠSpokane":27410,"ĠSeaf":27411,"Ġdesks":27412,"ĠEisen":27413,"ĠOPS":27414,"Ġcider":27415,"Ġacceler":27416,"ĠAthlet":27417,"2008":27418,"ĠGuid":27419,"ĠManip":27420,"Ġmould":27421,"Ġmisguided":27422,"Ġbrow":27423,"Ġmanagerial":27424,"Ġhugged":27425,"Ġfurnish":27426,"ĠHarmony":27427,"ĠHebrew":27428,"Ġtyph":27429,"Ġdecreases":27430,"Ġimpetus":27431,"Ġcontagious":27432,"Ġunch":27433,"209":27434,"Ġswell":27435,"ĠHuffington":27436,"Ġpubs":27437,"Ġadequ":27438,"amoto":27439,"rir":27440,"Ġpristine":27441,"Ġanx":27442,"ĠSecure":27443,"Ġenrichment":27444,"ĠVAL":27445,"Ġsummed":27446,"Ġconfidently":27447,"ĠProfit":27448,"ĠFrog":27449,"ĠLena":27450,"ĠFUN":27451,"Ġbruises":27452,"Ġuproar":27453,"coll":27454,"ĠImpro":27455,"Ġflair":27456,"146":27457,"ĠBrend":27458,"Ġ166":27459,"Ġenhances":27460,"ĠDent":27461,"Ġdegener":27462,"Ġproponents":27463,"ĠInspired":27464,"Ġramps":27465,"Ġwisely":27466,"Western":27467,"Ġtart":27468,"Ġsteered":27469,"Ġtreason":27470,"dropping":27471,"Ġtransc":27472,"ĠScarlett":27473,"ĠEzekiel":27474,"Ġpivot":27475,"esame":27476,"Show":27477,"Ġdiscontent":27478,"ĠJudith":27479,"ĠPutting":27480,"Ġblessings":27481,"Ġhardcore":27482,"Ġtray":27483,"Ġdiscern":27484,"oley":27485,"ouk":27486,"Ġwil":27487,"Ġintolerance":27488,"157":27489,"ĠRelative":27490,"ĠLynd":27491,"Ġwhistleblower":27492,"Ġincon":27493,"ĠTao":27494,"Ġindefinite":27495,"Ġguardians":27496,"Ġagon":27497,"ĠInstruments":27498,"Ġexistential":27499,"AAF":27500,"vind":27501,"Ġbrazen":27502,"condition":27503,"Ġratified":27504,"fam":27505,"ĠHin":27506,"ĠMichaels":27507,"204":27508,"ĠKats":27509,"ITS":27510,"ISON":27511,"prone":27512,"Ġboiling":27513,"Ġprolong":27514,"Ġnoticing":27515,"resident":27516,"brance":27517,"ĠFolk":27518,"Ġdesserts":27519,"uton":27520,"Web":27521,"ĠLongh":27522,"ĠReef":27523,"Going":27524,"ĠCarb":27525,"Sur":27526,"complete":27527,"ĠSloan":27528,"ĠClubs":27529,"ĠSadd":27530,"Ġshrugged":27531,"Ġedible":27532,"ĠTyp":27533,"thal":27534,"ĠRocks":27535,"ĠClive":27536,"Ġkidding":27537,"ĠCrom":27538,"ĠTurks":27539,"ĠWak":27540,"Ġeyewitness":27541,"ĠHass":27542,"collar":27543,"Ġsucceeding":27544,"Ġinsert":27545,"Ġ224":27546,"ĠBret":27547,"Ġneurological":27548,"Ġrewrite":27549,"imil":27550,"ultimate":27551,"ĠJeremiah":27552,"Ġliaison":27553,"Ġpedd":27554,"direct":27555,"ĠYi":27556,"ĠMAD":27557,"ĠOrion":27558,"oyd":27559,"ĠLOC":27560,"release":27561,"Ġinvestigates":27562,"ĠApache":27563,"û":27564,"ĠVend":27565,"Ġcynical":27566,"ĠHelm":27567,"ĠMovies":27568,"tops":27569,"Ġsinister":27570,"Ġunparalleled":27571,"Ġspikes":27572,"Ġoverlap":27573,"enstein":27574,"Ġhypocrisy":27575,"Plus":27576,"Ġexpansions":27577,"Ġvow":27578,"Ġdetonated":27579,"Ġfellowship":27580,"Ġsolicitor":27581,"ĠNewtown":27582,"mony":27583,"ĠLod":27584,"ĠDevelopers":27585,"ateg":27586,"ibus":27587,"Ġcrumbling":27588,"ĠWein":27589,"ĠKlan":27590,"gio":27591,"ĠPhys":27592,"ĠAntarctica":27593,"368":27594,"Ġseam":27595,"Ġautomobiles":27596,"ĠTEAM":27597,"bern":27598,"Ġmanic":27599,"Ġsanct":27600,"Ġequals":27601,"Est":27602,"Ġincentiv":27603,"ĠHawking":27604,"nin":27605,"Ġresonate":27606,"bid":27607,"Ġtelescope":27608,"endon":27609,"ĠVacc":27610,"Ġregretted":27611,"Ġ1300":27612,"ĠForestry":27613,"BOOK":27614,"Ġgroundwork":27615,"Ġessays":27616,"ĠIndo":27617,"Pierre":27618,"ĠChau":27619,"Ġapologies":27620,"killers":27621,"ĠMoroccan":27622,"0001":27623,"336":27624,"Ra":27625,"Ġparcels":27626,"Ġleaned":27627,"Ġthankfully":27628,"ĠSplit":27629,"Ġlobbied":27630,"ĠDegree":27631,"Ġrisking":27632,"assy":27633,"Ġsupplemental":27634,"little":27635,"Ġeclectic":27636,"Ġ206":27637,"ealing":27638,"206":27639,"Ġrepo":27640,"Ġhose":27641,"ayn":27642,"lux":27643,"Ġbeliever":27644,"')":27645,"ĠHide":27646,"vance":27647,"ĠEinstein":27648,"Ġdepos":27649,"Ġfray":27650,"Ġki":27651,"Ġinternship":27652,"ĠHou":27653,"Vis":27654,"Ġstare":27655,"ĠBreed":27656,"option":27657,"Ġvisionary":27658,"Ġmins":27659,"Ġbitten":27660,"ancies":27661,"ĠShake":27662,"Ġtemplate":27663,"Ġliner":27664,"Ġmuster":27665,"appro":27666,"ĠMubarak":27667,"esty":27668,"mong":27669,"actory":27670,"Ġheadphone":27671,"ĠPrec":27672,"Ġwaive":27673,"Ron":27674,"ĠHearing":27675,"Ġimperfect":27676,"Ġsealing":27677,"Ġlocating":27678,"Ġculminated":27679,"chio":27680,"channel":27681,"lust":27682,"ĠLowell":27683,"woods":27684,"Ġsoak":27685,"Ġforbidden":27686,"Ġdetached":27687,"unct":27688,"ĠHunger":27689,"ĠPatient":27690,"ĠPolo":27691,"Saharan":27692,"Jon":27693,"athered":27694,"ĠSignal":27695,"Six":27696,"Ġstatistically":27697,"ITH":27698,"artment":27699,"ĠCU":27700,"Ġhates":27701,"qual":27702,"Ġcapitalist":27703,"ATES":27704,"ĠDesc":27705,"Ġhandcuffed":27706,"Ġindulge":27707,"ĠReligious":27708,"German":27709,"housing":27710,"Ġdismantling":27711,"Ġconventions":27712,"dain":27713,"chairs":27714,"Ġloos":27715,"Ġknowingly":27716,"Var":27717,"Ġhusbands":27718,"eez":27719,"asion":27720,"ĠIssa":27721,"Ġswollen":27722,"Ġ1946":27723,"Ġheadlined":27724,"Chelsea":27725,"Ġignorant":27726,"Ġperipheral":27727,"Note":27728,"Ġaxe":27729,"Ġnicotine":27730,"ĠSanctuary":27731,"Ġ1917":27732,"Ġwithdrawals":27733,"uits":27734,"Hot":27735,"Ġreimburse":27736,"probably":27737,"ĠAdapt":27738,"industrial":27739,"answer":27740,"orus":27741,"ĠMell":27742,"Talk":27743,"Ġcontemplating":27744,"omas":27745,"Ġtaxis":27746,"Ġencompasses":27747,"rations":27748,"ĠLatvia":27749,"Ġhumiliating":27750,"Ġloft":27751,"tight":27752,"rium":27753,"Ġlogin":27754,"ĠBulletin":27755,"Ġturtles":27756,"EAR":27757,"349":27758,"Radio":27759,"ĠBord":27760,"151":27761,"kk":27762,"pocket":27763,"Ġdove":27764,"348":27765,"Ġtemptation":27766,"ĠCoy":27767,"those":27768,"ĠDest":27769,"ishly":27770,"rn":27771,"Ġmammals":27772,"ĠTub":27773,"arial":27774,"ĠPersian":27775,"Ġdaddy":27776,"Zen":27777,"Ġps":27778,"Ġ]":27779,"Field":27780,"adiq":27781,"Ġmeaningless":27782,"Ġprimer":27783,"Ġ1942":27784,"Ġ!":27785,"625":27786,"Ġfashionable":27787,"ĠTheft":27788,"ĠHAVE":27789,"christ":27790,"Ġperil":27791,"Ġrepealing":27792,"Ġbuff":27793,"Ġodor":27794,"Ġstalking":27795,"ĠDems":27796,"iences":27797,"Ġunilaterally":27798,"odies":27799,"ĠQuite":27800,"Ġbloodshed":27801,"Ġinfect":27802,"Ġreminders":27803,"Ġchop":27804,"Ġevapor":27805,"877":27806,"Ġhorrified":27807,"ĠFruit":27808,"rams":27809,"Ġinsecure":27810,"cester":27811,"ĠNationwide":27812,"Ġmocking":27813,"Ret":27814,"Ġcomplying":27815,"sav":27816,"Ġali":27817,"Family":27818,"Ĩ":27819,"Ġdishonest":27820,"Ġincorrectly":27821,"LOAD":27822,"ĠGand":27823,"ourcing":27824,"obby":27825,"ĠPetersen":27826,"Something":27827,"Ġravaged":27828,"limited":27829,"Ġrituals":27830,"ĠKnowledge":27831,"ĠUtility":27832,"Ġdoom":27833,"Ġsheds":27834,"ĠGael":27835,"ĠMillennials":27836,"ĠMonthly":27837,"Ġdomination":27838,"Ġrapport":27839,"spot":27840,"ĠPrest":27841,"ĠHA":27842,"ushes":27843,"Ġtact":27844,"Richard":27845,"Ġgritty":27846,"Does":27847,"ĠTNT":27848,"Ġdownfall":27849,"Wood":27850,"ĠPrediction":27851,"ĠPour":27852,"ĠFraud":27853,"ĠSyndrome":27854,"166":27855,"Ġliteral":27856,"Ġaddict":27857,"ĠLoud":27858,"hens":27859,"ĠAccounts":27860,"distance":27861,"Ġclassmate":27862,"Ġsalv":27863,"Ġunlucky":27864,"Ġpartying":27865,"ĠKou":27866,"ĠSNAP":27867,"%-":27868,"Ġdelegate":27869,"Ġstrikers":27870,"ĠSlate":27871,"Ġarticulate":27872,"390":27873,"Ġinqu":27874,"Ġdiscredit":27875,"ĠPriv":27876,"ploy":27877,"ĠMarketplace":27878,"ĠTune":27879,"visor":27880,"Ġwrestle":27881,"Ġkindly":27882,"ĠCollect":27883,"Ġcirc":27884,"ĠRemain":27885,"Ġ192":27886,"contin":27887,"Ġ325":27888,"Ġsevered":27889,"isations":27890,"Ġmuddy":27891,"Ġtaxing":27892,"ĠRepresent":27893,"ĠSty":27894,"rology":27895,"ĠJudges":27896,"ĠBronze":27897,"ĠApplic":27898,"Ġarrow":27899,"consuming":27900,"ĠFeaturing":27901,"Ġspies":27902,"Ġnoises":27903,"ĠColony":27904,"lost":27905,"Ġopp":27906,"Ġdeem":27907,"ĠGarc":27908,"icent":27909,"ptroller":27910,"liest":27911,"Ġoutward":27912,"ĠUser":27913,"Ġintimidate":27914,"156":27915,"Ġjab":27916,"ANGE":27917,"Jay":27918,"ĠPoverty":27919,"ACA":27920,"Ġrife":27921,"Ġfaint":27922,"ĠAcceler":27923,"tall":27924,"ĠUNITED":27925,"ĠFighter":27926,"ĠGilmore":27927,"Ġsod":27928,"amura":27929,"Ġpredictive":27930,"Ġpolish":27931,"ĠDD":27932,"Ġfabricated":27933,"ĠDag":27934,"Ġfatty":27935,"Ġplague":27936,"Ġexhib":27937,"ĠAdvent":27938,"Ġ1941":27939,"ERSON":27940,"initely":27941,"Ġloneliness":27942,"ĠEquality":27943,"Ġuntrue":27944,"Ġonlook":27945,"Ġfragmented":27946,"ruce":27947,"Ġdistrust":27948,"Ġscal":27949,"ĠCors":27950,"Ġrobbing":27951,"cultural":27952,"clusion":27953,"ĠObi":27954,"sels":27955,"ĠEvidence":27956,"ĠSac":27957,"Ġfragments":27958,"Ġflipping":27959,"ĠRabbit":27960,"Ġdisproportionate":27961,"ĠCreat":27962,"Ġlabeling":27963,"ĠGri":27964,"Ġ161":27965,"ĠEditors":27966,"holm":27967,"adr":27968,"Ĭ":27969,"tailed":27970,"Ġrenters":27971,"Ġnoodles":27972,"Ġcompetence":27973,"Ġpanc":27974,"uration":27975,"Ġacids":27976,"Ġconfid":27977,"rival":27978,"AAA":27979,"kson":27980,"Ġrecreate":27981,"153":27982,"Ġ164":27983,"ĠOlympia":27984,"ĠUnlimited":27985,"ĠShock":27986,"ĠTeaching":27987,"ĠHouses":27988,"resso":27989,"ĠMaw":27990,"Ġreplen":27991,"Ġprotestors":27992,"bey":27993,"Ġsurve":27994,"Ġemphasizes":27995,"223":27996,"ĠEsther":27997,"ĠNikol":27998,"Ġprosecutions":27999,"ĠFreed":28000,"Ġposs":28001,"OTE":28002,"ĠPrayer":28003,"Ġsquarely":28004,"Ġtir":28005,"adv":28006,"Ġbogus":28007,"Ġwrongful":28008,"Ġembell":28009,"Ġseldom":28010,"Ġpossesses":28011,"Er":28012,"ĠAlternatively":28013,"Ġinstituted":28014,"rr":28015,"Ġvocational":28016,"eval":28017,"ĠComics":28018,"Ġstumbling":28019,"335":28020,"Ġdragon":28021,"vine":28022,"services":28023,"Ġcrit":28024,"irens":28025,"Ġlayered":28026,"orb":28027,"Ġdominates":28028,"ĠMarx":28029,"period":28030,"avering":28031,"Ġbrigade":28032,"Ġchem":28033,"ĠEvolution":28034,"ĠSuk":28035,"Ġ209":28036,"ĠMalk":28037,"Ġtallest":28038,"recogn":28039,"ĠCraw":28040,"Ġell":28041,"ĠCaesar":28042,"php":28043,"ĠSurvivors":28044,"sd":28045,"itsch":28046,"ambo":28047,"Ġashore":28048,"acular":28049,"rost":28050,"Ġmurderer":28051,"Ġcasts":28052,"ĠEconomist":28053,"ĠWeapons":28054,"Ġnostalgic":28055,"Skip":28056,"REAM":28057,"Pa":28058,"Ġjournals":28059,"ĠSitting":28060,"Union":28061,"Att":28062,"ĠMaxim":28063,"Ġpurportedly":28064,"Ġrespecting":28065,"ĠMAX":28066,"seed":28067,"Ġjuicy":28068,"ĠGallup":28069,"Ġmileage":28070,"adier":28071,"Ġbod":28072,"DER":28073,"Ġsummers":28074,"icult":28075,"ipl":28076,"ĠDeng":28077,"Ġsmells":28078,"Ġivory":28079,"Ġ255":28080,"Id":28081,"DEN":28082,"Ġ159":28083,"Due":28084,"ĠLighting":28085,"ĠSurely":28086,"Ġsund":28087,"ĠKessler":28088,"immigrant":28089,"Ġtragedies":28090,"ĠOxy":28091,"ĠFixed":28092,"ĠBalk":28093,"Ġoriented":28094,"pher":28095,"Ġkitchens":28096,"Ġhips":28097,"Ġtweak":28098,"Ġtuna":28099,"ĠCla":28100,"Ġdislike":28101,"ussy":28102,"Ġoutnumbered":28103,"Ġplumbing":28104,"Ġcogn":28105,"ĠThrow":28106,"ĠTER":28107,"urally":28108,"ĠMurd":28109,"Ġcreamy":28110,"Ġresiding":28111,"otics":28112,"Ġfingerprints":28113,"!,":28114,"Ġpaused":28115,"ĠMilo":28116,"Ġhomosexuality":28117,"Ġresponsibly":28118,"iop":28119,"UCT":28120,"Ġsucceeds":28121,"ĠCRE":28122,"ĠThatcher":28123,"Ġcurrents":28124,"Ġarises":28125,"Ġwaterproof":28126,"Ġamp":28127,"ĠClaims":28128,"177":28129,"Ġsubpoen":28130,"Ġvig":28131,"ĠNeuro":28132,"Ġblur":28133,"ĠPaint":28134,"campus":28135,"Ġtoughness":28136,"ĠButton":28137,"Neal":28138,"ĠDEN":28139,"ĠNir":28140,"ĠAxel":28141,"EEP":28142,"Ġpint":28143,"Ġagile":28144,"odor":28145,"Ġessentials":28146,"ĠMov":28147,"ĠVenezuel":28148,"Ġexchanging":28149,"ĠNegative":28150,"Mil":28151,"Key":28152,"Ġbuzzing":28153,"ĠStew":28154,"Ġrebuke":28155,"Ġdepl":28156,"ĠKoz":28157,"Ġ163":28158,"Ġshines":28159,"NZ":28160,"Ġcarnage":28161,"cases":28162,"Ġwarmed":28163,"ĠGreenwich":28164,"College":28165,"Ġneedy":28166,"301":28167,"ĠMü":28168,"culation":28169,"Ġ440":28170,"425":28171,"atories":28172,"Ġsatisfactory":28173,"ĠFib":28174,"ĠElim":28175,"developed":28176,"Ġvacations":28177,"Ġpeculiar":28178,"Ġvets":28179,"onest":28180,"ĠPug":28181,"Ġlifestyles":28182,"zzi":28183,"Ġprovoke":28184,"bah":28185,"arger":28186,"ĠVirt":28187,"Sales":28188,"annel":28189,"ĠMeth":28190,"ivating":28191,"Ġrevoke":28192,"ĠAgenda":28193,"ĠIch":28194,"Ġsensit":28195,"ĠAzerbai":28196,"ĠBombay":28197,"Ġuncon":28198,"river":28199,"Ġapr":28200,"actic":28201,"ĠSubaru":28202,"Ġbanquet":28203,"Ġcontradict":28204,"tek":28205,"Football":28206,"igent":28207,"Ġreintrodu":28208,"ĠInsight":28209,"Ġsystematically":28210,"Ġboun":28211,"ĠFishing":28212,"Ġstri":28213,"ĠOB":28214,"Ġstair":28215,"Wall":28216,"ĠAllow":28217,"Ġcaramel":28218,"169":28219,"Ġcafes":28220,"Ġcalcium":28221,"Ġ169":28222,"Ġportraying":28223,"Ġdiscriminate":28224,"Ġunrestricted":28225,"Ġmant":28226,"Ġscarcity":28227,"Ġfeminism":28228,"ĠJJ":28229,"ĠOversight":28230,"ĠCue":28231,"Ġinexperienced":28232,"Ġdrafts":28233,"Ġ1939":28234,"nm":28235,"forest":28236,"ĠHonour":28237,"Ġceramic":28238,"Ġdownstairs":28239,"Ġboon":28240,"Ġmorality":28241,"Ġhorrifying":28242,"Rad":28243,"justice":28244,"Ġmosques":28245,"Ġcurfew":28246,"Ġsurrogate":28247,"Ġreimb":28248,"enth":28249,"pressure":28250,"beam":28251,"Ġwhirlwind":28252,"ĠRecession":28253,"ĠTours":28254,"Ġclusters":28255,"ĠQuant":28256,"Jonathan":28257,"project":28258,"Ġ777":28259,"ĠNOAA":28260,"abis":28261,"Ġdeficiencies":28262,"Ġsuicides":28263,"Ġfoothold":28264,"ĠYah":28265,"imeter":28266,"URN":28267,"Ġcultivate":28268,"Ġnoisy":28269,"Ġ1951":28270,"Ġpressuring":28271,"ĠDeals":28272,"ĠProphet":28273,"ĠWikipedia":28274,"INESS":28275,"ĠShine":28276,"ĠCalled":28277,"ĠSole":28278,"ĠZhou":28279,"Ġasphalt":28280,"armac":28281,"ĠScorp":28282,"ĠUnknown":28283,"ĠPAT":28284,"Heart":28285,"Ġguessed":28286,"Ġsushi":28287,"Ġheartbeat":28288,"Ġconcent":28289,"eret":28290,"plin":28291,"Ġweeds":28292,"Ġbombed":28293,"ĠTerrorism":28294,"Rich":28295,"Ġblades":28296,"Ġhaunt":28297,"Ġstorefront":28298,"Ġthwarted":28299,"access":28300,"ĠLydia":28301,"LINE":28302,"Ġpregnancies":28303,"Ġripping":28304,"ĠBelieve":28305,"spoken":28306,"inian":28307,"sed":28308,"ĠBrass":28309,"econom":28310,"current":28311,"Ġvoc":28312,"Ġmodeled":28313,"Ġpeppers":28314,"otech":28315,"ĠOption":28316,"Connell":28317,"isel":28318,"Ġcompel":28319,"Ġjuveniles":28320,"ĠNET":28321,"ĠEXP":28322,"Ġparadigm":28323,"Des":28324,"Ġ204":28325,"employed":28326,"Ġdurability":28327,"Ġ245":28328,"Ġbillionaires":28329,"violent":28330,"ĠCooperative":28331,"TOP":28332,"ĠGarry":28333,"ĠSoldiers":28334,"Ġdared":28335,"Ġvoucher":28336,"Ġblends":28337,"gue":28338,"Ġadventurous":28339,"Ġorganisms":28340,"Ġgaze":28341,"Ġcrap":28342,"Coach":28343,"omon":28344,"ĠWheels":28345,"ĠGrayson":28346,"Ġrecy":28347,"grave":28348,"Ġallergic":28349,"Ġreef":28350,"Ġbeginnings":28351,"ĠRuff":28352,"Ġclout":28353,"structed":28354,"315":28355,"ĠGeorgian":28356,"say":28357,"Ġsprings":28358,"ĠAsus":28359,"Ġrepaid":28360,"ĠGuys":28361,"ticket":28362,"Ġunb":28363,"ĠCertificate":28364,"ĠSTORY":28365,"cin":28366,"Ġpassions":28367,"Ġmediocre":28368,"Ġlackluster":28369,"vernight":28370,"kids":28371,"ĠWife":28372,"politics":28373,"ĠHimal":28374,"oddy":28375,"ensus":28376,"ĠGustav":28377,"binding":28378,"ĠIndividuals":28379,"Ġmaize":28380,"Ġhoop":28381,"ĠChanging":28382,"Ġlessen":28383,"Ġarranging":28384,"ĠFukushima":28385,"ĠTrying":28386,"ĠMage":28387,"Ġskeleton":28388,"ĠTec":28389,"289":28390,"Ġrecl":28391,"ĠFIL":28392,"Gs":28393,"ĠOdyssey":28394,"ĠProcessing":28395,"ilion":28396,"Ġsubsidized":28397,"Ġabdomen":28398,"Ġanalyse":28399,"music":28400,"clean":28401,"Ġunfinished":28402,"Ġdownloads":28403,"Ġmorally":28404,"Ġ218":28405,"Ġtrib":28406,"Keep":28407,"ĠSER":28408,"FY":28409,"Ġaust":28410,"Ġdiscovers":28411,"ĠGROUP":28412,"ĠMachines":28413,"Ġeroded":28414,"Ġominous":28415,"Ġbrightly":28416,"IME":28417,"Ġwicked":28418,"ĠTrou":28419,"Ġvisions":28420,"Kay":28421,"reported":28422,"Ġbog":28423,"ĠQuin":28424,"ĠSigma":28425,"urned":28426,"ixon":28427,"Ġharming":28428,"Ġcheckout":28429,"inet":28430,"much":28431,"Ġcherish":28432,"ĠByrd":28433,"ĠSamson":28434,"WP":28435,"orders":28436,"boa":28437,"Ġbron":28438,"oki":28439,"ĠRR":28440,"Ġsuitcase":28441,"Ġfeathers":28442,"ĠChristy":28443,"Islamic":28444,"Ġamusement":28445,"ĠISS":28446,"intensive":28447,"Qaida":28448,"Ġneurons":28449,"Ġwagon":28450,"ĠTek":28451,"Ġdolls":28452,"ĠShoot":28453,"Ġunderestimate":28454,"Ġstreamlined":28455,"Ġfractures":28456,"Ġcathedral":28457,"Ġeliminates":28458,"helle":28459,"Ġcitrus":28460,"risis":28461,"Ġimpecc":28462,"istries":28463,"ĠHog":28464,"vote":28465,"pas":28466,"Ġassign":28467,"ĠSongs":28468,"ĠMiracle":28469,"kas":28470,"zynski":28471,"Ġcrane":28472,"Ġadulthood":28473,"ĠBenefit":28474,"ĠGrimes":28475,"Ġpayday":28476,"ablished":28477,"Ġcenterpiece":28478,"Ġhassle":28479,"ĠAppalachian":28480,"follow":28481,"Ġ290":28482,"ĠRL":28483,"ĠDoe":28484,"Ġacclaim":28485,"Ġlevied":28486,"Ġtossing":28487,"Ġcarrots":28488,"ĠDarius":28489,"161":28490,"Ġoffspring":28491,"ĠJury":28492,"ĠTPP":28493,"CAP":28494,"Ġenvironmentalists":28495,"Ġrays":28496,"267":28497,"Ser":28498,"Ġcaptivity":28499,"Ġappellate":28500,"ĠElectricity":28501,"ĠEnough":28502,"232":28503,"Ġfisher":28504,"Ġbrilliance":28505,"Ġpraises":28506,"aunch":28507,"Ġsolicitation":28508,"Ġadolescent":28509,"Ġinferior":28510,"checks":28511,"Set":28512,"Ġmutations":28513,"ĠLatinos":28514,"ĠLicense":28515,"ĠAme":28516,"hirt":28517,"ĠChun":28518,"Ġdeeds":28519,"ldon":28520,"Ġmammoth":28521,"Ġturtle":28522,"rule":28523,"Ken":28524,"Ġvoyage":28525,"gram":28526,"Ġconquer":28527,"Ġretaliate":28528,"ĠPJ":28529,"ĠViking":28530,"Ġsafegu":28531,"ordinary":28532,"ĠArbit":28533,"ĠDigest":28534,"Die":28535,"Ġbureaucratic":28536,"Ġhonorable":28537,"Ġcafeteria":28538,"ĠRAF":28539,"ĠPlaces":28540,"ĠKlu":28541,"Cam":28542,"ĠBiology":28543,"ĠCycling":28544,"imore":28545,"Ġstripping":28546,"Ġwarriors":28547,"Ġbursting":28548,"Ġlapse":28549,"Ġversa":28550,"Ġclicked":28551,"ogh":28552,"Ġ\"âĢ¦":28553,"Ġdiligently":28554,"ĠMiy":28555,"ĠCorpus":28556,"Ġredef":28557,"Ġ176":28558,"ĠInstrument":28559,"ĠOECD":28560,"Ġstro":28561,"Ġmicrowave":28562,"Santa":28563,"Ġpars":28564,"Social":28565,"iffe":28566,"itability":28567,"Equ":28568,"Ġnud":28569,"legged":28570,"ĠTud":28571,"lav":28572,"Ġinterpreter":28573,"alcohol":28574,"Ġimposition":28575,"Ġdwelling":28576,"Ġ1400":28577,"].\"":28578,"ĠIw":28579,"RM":28580,"Ġ555":28581,"Ġparalyzed":28582,"mind":28583,"rans":28584,"adin":28585,"French":28586,"Ġliar":28587,"Represent":28588,"Ġstrapped":28589,"orate":28590,"Ġrigging":28591,"Ġinterrog":28592,"Ġsparse":28593,"ento":28594,"ĠThem":28595,"Ġbaseless":28596,"Ġbuildup":28597,"Ġundecided":28598,"isms":28599,"Ġabduct":28600,"Ġflowed":28601,"Ġprestige":28602,"Ġhacks":28603,"Ġpanicked":28604,"Cast":28605,"ĠKrish":28606,"umat":28607,"Ġantique":28608,"Ġbitters":28609,"Ġentitlement":28610,"Ġstandby":28611,"Ten":28612,"said":28613,"ĠConditions":28614,"events":28615,"Ġobey":28616,"Ġshortest":28617,"etting":28618,"Ġconcentrating":28619,"ĠNeeds":28620,"234":28621,"Ġintrigued":28622,"enting":28623,"ĠXen":28624,"ĠAlger":28625,"seekers":28626,"anish":28627,"Ġ172":28628,"âĢij":28629,"Ġsilicon":28630,"Ġstandardized":28631,"ĠFountain":28632,"essel":28633,"Ġapproves":28634,"Ġsucked":28635,"gone":28636,"ĠBriggs":28637,"brother":28638,"Ġartisan":28639,"ĠContinuing":28640,"vir":28641,"Ġsubmarines":28642,"ĠInk":28643,"program":28644,"ĠNexus":28645,"ĠCoco":28646,"Ġconceptual":28647,"Ġmatt":28648,"aughters":28649,"Ġbaths":28650,"Ġbeaut":28651,"ĠEmerald":28652,"ĠParties":28653,"248":28654,"completely":28655,"esan":28656,"Ġdiarrhea":28657,"Ġ1100":28658,"borg":28659,"ĠBroken":28660,"Ġreiterate":28661,"Ġsorting":28662,"ONS":28663,"Ġ177":28664,"Ġadmin":28665,"ĠMandatory":28666,"Ġsymptom":28667,"Ġpaced":28668,"Remember":28669,"Ġabdominal":28670,"Ġswapped":28671,"Ġtransitions":28672,"IFA":28673,"pretty":28674,"ĠJC":28675,"Ġallotted":28676,"ĠShows":28677,"Arthur":28678,"Ġsoften":28679,"dozen":28680,"Mah":28681,"Ġextinguished":28682,"Ġreelection":28683,"Ġdeployments":28684,"Ġsturdy":28685,"Ġdownright":28686,"Ġjams":28687,"ĠOptim":28688,"Ġhumiliation":28689,"cd":28690,"Ġbunk":28691,"sie":28692,"NAT":28693,"ilies":28694,"Ġimplying":28695,"Ġ<":28696,"Ġhomepage":28697,"242":28698,"Ġey":28699,"Ġdict":28700,"Ġslender":28701,"Ġforehead":28702,"ĠCecil":28703,"Ġshrunk":28704,"ĠExit":28705,"Ġexpressly":28706,"Ġseals":28707,"ĠThiel":28708,"umni":28709,"Ġdamning":28710,"ĠVS":28711,"ulum":28712,"BBC":28713,"URES":28714,"Ġinhal":28715,"Ġfont":28716,"Ġworkplaces":28717,"ĠPUBLIC":28718,"ĠHorror":28719,"Bs":28720,"arta":28721,"ĠBread":28722,"Ġstret":28723,"Ġethos":28724,"Ġstabilized":28725,"Ġconvers":28726,"ĠInqu":28727,"Ġjudgments":28728,"ĠContemporary":28729,"221":28730,"Ġzombie":28731,"VD":28732,"Ġmisunderstanding":28733,"Ġspam":28734,"ĠPapers":28735,"Ġcrocod":28736,"ENA":28737,"ĠJuven":28738,"ĠAbram":28739,"Ġbursts":28740,"atto":28741,"Ġturbulence":28742,"tty":28743,"sexual":28744,"Ġwaning":28745,"community":28746,"Government":28747,"Ġtranspl":28748,"??":28749,"Getting":28750,"ĠRare":28751,"prime":28752,"Ġlooting":28753,"Ġvalidate":28754,"ĠCreating":28755,"ĠCorruption":28756,"Ġspit":28757,"ĠFavorite":28758,"Kar":28759,"Ġadaptive":28760,"ĠART":28761,"Ġtorso":28762,"ĠIdent":28763,"Ġsubdivision":28764,"azo":28765,"Ġconsequently":28766,"Ġrotate":28767,"ĠWit":28768,"Ġestab":28769,"managed":28770,"ĠBound":28771,"Ġskim":28772,"198":28773,"ĠCorona":28774,"ĠâĿ":28775,"Ġwording":28776,"buck":28777,"iph":28778,"patrick":28779,"Help":28780,"flying":28781,"Ġracer":28782,"Ġfisherman":28783,"____":28784,"ackers":28785,"Ġpersisted":28786,"Ġmyths":28787,"Ġgarn":28788,"ologue":28789,"ĠApprentice":28790,"Ġhereby":28791,"Ġvulgar":28792,"ĠGinger":28793,"Ġtrait":28794,"ĠIdea":28795,"Ġfigur":28796,"ĠSchwarzenegger":28797,"ĠSafari":28798,"178":28799,"ĠAsians":28800,"775":28801,"ĠTriangle":28802,"Ġdemons":28803,"ĠOv":28804,"Ġanime":28805,"Broad":28806,"Ġmolecule":28807,"Ġdeposition":28808,"Ġbiodiversity":28809,"modern":28810,"Ġwallets":28811,"NH":28812,"planes":28813,"rats":28814,"ĠSeed":28815,"Ġ174":28816,"umed":28817,"Ġtouting":28818,"gre":28819,"ĠSEAL":28820,"Ġperpetrator":28821,"ĠGerrard":28822,"Ġallocations":28823,"Ġworsh":28824,"payment":28825,"bett":28826,"ĠIssues":28827,"ennis":28828,"eering":28829,"ĠMV":28830,"yi":28831,"hak":28832,"Ġ167":28833,"Ġorchestr":28834,"224":28835,"Ġsup":28836,"Ġleukemia":28837,"osures":28838,"575":28839,"Ġnoticeably":28840,"Ġparamilitary":28841,"ĠTHERE":28842,"Ġwaged":28843,"igrated":28844,"Ġdocumentaries":28845,"Ġsenseless":28846,"Ġbark":28847,"Ġgenetics":28848,"ĠAlbania":28849,"ĠCrypt":28850,"ĠSEO":28851,"Ġnightly":28852,"Ġfaults":28853,"279":28854,"ĠFerdinand":28855,"ĠSylv":28856,"Ġcalam":28857,"ĠMuller":28858,"ĠSpielberg":28859,"Boy":28860,"ĠUrs":28861,"Ġrug":28862,"Ġcolonies":28863,"ĠFunk":28864,"Ġlyric":28865,"ĠATT":28866,"anni":28867,"ĠNB":28868,"Ġthorn":28869,"Ġpertinent":28870,"188":28871,"Ġpartic":28872,"Head":28873,"Pad":28874,"Palestinian":28875,"ĠBarg":28876,"anical":28877,"beaut":28878,"onge":28879,"Ġgigantic":28880,"travel":28881,"Ġdownloading":28882,"Contin":28883,"whe":28884,"plane":28885,"Wil":28886,"IDA":28887,"Ele":28888,"ĠPAL":28889,"Ġbeams":28890,"ĠProud":28891,"ramer":28892,"Ġindependents":28893,"Ġtranslator":28894,"ĠBrah":28895,"ĠTrooper":28896,"aylor":28897,"pson":28898,"Ġguise":28899,"Ġdiffering":28900,"Ġtopple":28901,"ichen":28902,"ĠSeymour":28903,"deg":28904,"ĠMixed":28905,"Ġinvoluntary":28906,"Ġcountdown":28907,"ĠNarc":28908,"ĠAdults":28909,"Ġcoaster":28910,"Ġ342":28911,"ĠAcquisition":28912,"mone":28913,"Ġpenchant":28914,"Brian":28915,"Gh":28916,"Pres":28917,"enei":28918,"Ġreefs":28919,"ĠMaver":28920,"Ġdevised":28921,"ĠIMP":28922,"vict":28923,"Ġagility":28924,"ĠPayments":28925,"respected":28926,"Ġtuning":28927,"ĠFACE":28928,"actions":28929,"Ġyell":28930,"ĠLeaving":28931,"Ġsnowy":28932,"Saudi":28933,"Ġformations":28934,"Ġairborne":28935,"Ġdeed":28936,"ooks":28937,"Ġnamesake":28938,"Ġpunishable":28939,"Ġagg":28940,"oths":28941,"ĠFamous":28942,"ĠDeposit":28943,"Ġinduce":28944,"189":28945,"Ġhesitation":28946,"ĠBrowse":28947,"ople":28948,"reys":28949,"henko":28950,"Ġsecretaries":28951,"Ġintersections":28952,"Ġdiminishing":28953,"ints":28954,"Ġ1934":28955,"ĠInvestigative":28956,"ĠMexicans":28957,"ĠMahar":28958,"ibur":28959,"Ġstocking":28960,"gross":28961,"Ġasbestos":28962,"Ġagitation":28963,"ĠBST":28964,"Overall":28965,"Ġheats":28966,"ĠSpan":28967,"Ġimped":28968,"Ġtrusting":28969,"Pet":28970,"Ġegregious":28971,"Ġcomedians":28972,"zin":28973,"WIN":28974,"Ġchats":28975,"Ġexploding":28976,"ĠTort":28977,"Ġembraces":28978,"Ġneut":28979,"verson":28980,"ouncing":28981,"ĠFiber":28982,"Ġbaker":28983,"Ġunstoppable":28984,"ĠDial":28985,"cars":28986,"Marc":28987,"164":28988,"volt":28989,"Ġceased":28990,"EFF":28991,"Ġpromoters":28992,"Ġcircuits":28993,"Ġexcise":28994,"Ġseminars":28995,"ĠTiny":28996,"ĠImportant":28997,"ĠTup":28998,"Ġoutburst":28999,"ĠSOC":29000,"ĠWWII":29001,"Ġmerging":29002,"highly":29003,"ĠGmail":29004,"ozy":29005,"ĠKB":29006,"Ġlaboratories":29007,"knit":29008,"ĠClosed":29009,"Ġsurrounds":29010,"ĠVet":29011,"Ġcere":29012,"vard":29013,"ĠDeadpool":29014,"text":29015,"Ġinfusion":29016,"Ġcuc":29017,"ĠAtl":29018,"Ġbustling":29019,"ĠSettings":29020,"Ġ193":29021,"ryan":29022,"184":29023,"186":29024,"Ġswat":29025,"rane":29026,"Ġepidem":29027,"lando":29028,"Ġtestifying":29029,"Ġmoistur":29030,"ĠTens":29031,"Ġexemplary":29032,"ĠPump":29033,"Ġforcefully":29034,"ĠFare":29035,"Ġcomplicate":29036,"Fe":29037,"Di":29038,"ĠThy":29039,"Ġcompartment":29040,"ĠFiesta":29041,"Would":29042,"fitted":29043,"Ġcull":29044,"Ġcomedic":29045,"cyl":29046,"Ġwhichever":29047,"stic":29048,"Ġ213":29049,"Ġspills":29050,"Ġplasma":29051,"Ġdisguise":29052,"ĠCompass":29053,"ĠImmun":29054,"Ġscarf":29055,"Ġdisperse":29056,"Ġreckon":29057,"ĠTaste":29058,"root":29059,"ĠGAME":29060,"xx":29061,"Ġhomophobic":29062,"Ġdimin":29063,"/#":29064,"Ġ178":29065,"Ġgems":29066,"lio":29067,"informed":29068,"ample":29069,"XT":29070,"Ġrepression":29071,"ĠTakes":29072,"Ġhabitats":29073,"Ġmountainous":29074,"ĠMcH":29075,"ENC":29076,"Mobil":29077,"Ġreel":29078,"ĠTI":29079,"Ġauthorize":29080,"ĠAccept":29081,"ĠMetall":29082,"CCC":29083,"Ġwetlands":29084,"ĠWitch":29085,"heading":29086,"Ġintervals":29087,"ĠWitt":29088,"hene":29089,"Ġcomforting":29090,"ollen":29091,"ERN":29092,"ooky":29093,"etch":29094,"Ġassailant":29095,"announced":29096,"elin":29097,"plate":29098,"920":29099,"eating":29100,"induced":29101,"ĠIgor":29102,"ĠAmph":29103,"Ġpatented":29104,"posing":29105,"Ġextraordinarily":29106,"Ġfearless":29107,"mortem":29108,"ĠDraw":29109,"ĠRend":29110,"Son":29111,"ridden":29112,"ĠAdvantage":29113,"Ġ305":29114,"Ġroared":29115,"Str":29116,"Ġradioactive":29117,"Ġslur":29118,"ĠRear":29119,"affles":29120,"ĠPon":29121,"Ġost":29122,"umbs":29123,"ĠSlack":29124,"athom":29125,"baby":29126,"213":29127,"ĠSpending":29128,"ĠAccordingly":29129,"Ġclocks":29130,"archs":29131,"Ġsmugg":29132,"Ġmastermind":29133,"ĠKlaus":29134,"alpha":29135,"Ġspoiled":29136,"264":29137,"Pod":29138,"Ġflared":29139,"Ġcomposure":29140,"ĠCAM":29141,"Ġrestruct":29142,"Ġtasted":29143,"ĠKimber":29144,"Ġupheaval":29145,"CHAR":29146,"ĠGeo":29147,"itations":29148,"Ġbegged":29149,"UX":29150,"Authorities":29151,"ĠEngel":29152,"ĠHOME":29153,"Ġratt":29154,"Ġquickest":29155,"475":29156,"ĠSting":29157,"ĠICO":29158,"yu":29159,"Ġdefy":29160,"Prince":29161,"cards":29162,"Ġovertake":29163,"Ġretrieved":29164,"ĠNavajo":29165,"Ġpastry":29166,"ĠLange":29167,"Ġentrusted":29168,"ĠCull":29169,"aler":29170,"Ġdinosaurs":29171,"Ġbragging":29172,"ĠAlley":29173,"meier":29174,"ĠAssuming":29175,"Ġana":29176,"omatic":29177,"Brend":29178,"acted":29179,"Ġexhaustive":29180,"Ġunfit":29181,"Several":29182,"gap":29183,"Ġtet":29184,"228":29185,"Sk":29186,"302":29187,"Ġdeflect":29188,"Ġ179":29189,"226":29190,"Ġadorned":29191,"ĠSpread":29192,"Ġthirds":29193,"ĠSemi":29194,"Ġdescend":29195,"Ġaccumulate":29196,"Ġflavours":29197,"Ġinvoked":29198,"ĠAnge":29199,"Ġprofess":29200,"unks":29201,"ĠKickstarter":29202,"ENTS":29203,"ĠRw":29204,"Ġchatter":29205,"ĠPOS":29206,"Ġcollaborators":29207,"ĠEW":29208,"ĠMarkus":29209,"Ġimpair":29210,"Ġbolt":29211,"Ġglue":29212,"Ġloosely":29213,"ĠSUM":29214,"Ġhydraulic":29215,"Ġpredatory":29216,"Charles":29217,"cond":29218,"Ġspawned":29219,"Fr":29220,"174":29221,"Ġtame":29222,"Ġaggrav":29223,"Ġchrist":29224,"true":29225,"ivable":29226,"Ġhen":29227,"ĠKut":29228,"Ġskyrocket":29229,"Ġeg":29230,"Ġveterinarian":29231,"ĠStats":29232,"Kit":29233,"Ġbiologist":29234,"Spe":29235,"Ġantenna":29236,"Ġsust":29237,"fill":29238,"Ġpayload":29239,"227":29240,"Ġlivestream":29241,"ORN":29242,"ĠAbel":29243,"Ġdeception":29244,"ussen":29245,"Britain":29246,"partisan":29247,"Ġbrowse":29248,"Ġmelan":29249,"172":29250,"ĠNumerous":29251,"ĠMansion":29252,"Ġassailants":29253,"£":29254,"olerance":29255,"Ġdirectives":29256,"ĠInteg":29257,"zers":29258,"Ġduct":29259,"ĠHonestly":29260,"ĠImmediately":29261,"ixty":29262,"Ġdiagnose":29263,"Ġimplication":29264,"ĠiPads":29265,"testers":29266,"riots":29267,"Ġrespons":29268,"XP":29269,"pes":29270,"875":29271,"Ġ199":29272,"ĠPoe":29273,"303":29274,"Ġailments":29275,"ĠCarrier":29276,"Ġeject":29277,"Ġrestroom":29278,"Drive":29279,"manufact":29280,"Ġcompens":29281,"Ġglossy":29282,"Ġrecovers":29283,"Ġthinner":29284,"Ġdescendants":29285,"antle":29286,"Beaut":29287,"competitive":29288,"ĠRobotics":29289,"Ġpretext":29290,"233":29291,"Ġflanked":29292,"ĠâĻ":29293,"Ġguts":29294,"Ġwee":29295,"Ġaccents":29296,"mc":29297,"Ġgrapp":29298,"ĠNathaniel":29299,"ĠMikhail":29300,"Ġobligated":29301,"Ġmanoeuv":29302,"Ġechoing":29303,"Ġ189":29304,"ĠDevice":29305,"isd":29306,"Ġloopholes":29307,"Ġbehold":29308,"ĠMerry":29309,"Ġfunn":29310,"Ġnuanced":29311,"667":29312,"ELY":29313,"ĠTasmania":29314,"ĠSaddam":29315,"Ġquizz":29316,"military":29317,"cient":29318,"Ġoutlaw":29319,"ĠAudit":29320,"ĠBoom":29321,"Ġcrim":29322,"asured":29323,"ĠApps":29324,"ĠKush":29325,"onica":29326,"Ġamput":29327,"signed":29328,"ĠMEN":29329,"ĠRosenberg":29330,"Ġvide":29331,"ĠDirection":29332,"Ġfountain":29333,"TW":29334,"ĠCARE":29335,"Ġreassured":29336,"Food":29337,"Ġdepressing":29338,"ĠWhilst":29339,"reatment":29340,"Ġspelled":29341,"Ġhipp":29342,"ĠPeach":29343,"hound":29344,"Harry":29345,"Ġcatalogue":29346,"ĠCommun":29347,"Ġnurture":29348,"rush":29349,"ĠPopulation":29350,"ĠNTS":29351,"ĠElectrical":29352,"rounded":29353,"Ġblending":29354,"Ġ223":29355,"alities":29356,"ilation":29357,"eas":29358,"estate":29359,"Ġnarrowing":29360,"ĠTreasure":29361,"192":29362,"Ġwhims":29363,"Ġrobber":29364,"Ġsoaked":29365,"nian":29366,"Ġcongest":29367,"ĠYosemite":29368,"notes":29369,"icer":29370,"ĠGuardians":29371,"ĠFrozen":29372,"Ġ187":29373,"Ġhandcuffs":29374,"Someone":29375,"Ġenshr":29376,"gency":29377,"ĠCube":29378,"Ġprinters":29379,"Ġundercut":29380,"ĠSolution":29381,"rosis":29382,"ĠHumanity":29383,"Ġsucks":29384,"ĠSick":29385,"Tax":29386,"Ġtablespoon":29387,"ĠTrin":29388,"ĠArchive":29389,"Mom":29390,"ĠSAY":29391,"Ġdrifting":29392,"ĠFarage":29393,"Ġforging":29394,"WM":29395,"ĠEleanor":29396,"USH":29397,"Ġemph":29398,"Ġcareless":29399,"Ġspew":29400,"Ġinsensitive":29401,"Ġawhile":29402,"Ġcit":29403,"opened":29404,"ĠFem":29405,"Ġvapor":29406,"Ġdownt":29407,"ylene":29408,"Ġclut":29409,"Ġculp":29410,"1990":29411,"Ġdisgruntled":29412,"Students":29413,"uttering":29414,"gyn":29415,"vre":29416,"Ġrapes":29417,"division":29418,"ĠCalendar":29419,"tal":29420,"icts":29421,"caliber":29422,"ĠFighters":29423,"ĠUnc":29424,"163":29425,"ĠRogue":29426,"Ġregistrations":29427,"Ġundermines":29428,"ĠPunch":29429,"Ġdramas":29430,"176":29431,"Ġslider":29432,"ĠFlore":29433,"ر":29434,"Ġbru":29435,"inelli":29436,"Ġdisparities":29437,"ا":29438,"Ġreferrals":29439,"ĠCharges":29440,"Ġbreeds":29441,"ĠMEP":29442,"288":29443,"Ġmouths":29444,"Ġsideways":29445,"Ġbelievers":29446,"ppard":29447,"Ġhotter":29448,"Ġunderestimated":29449,"Ġjelly":29450,"525":29451,"ĠCMS":29452,"ĠWeiner":29453,"Ġguarding":29454,"Ġampl":29455,"ĠKidd":29456,"UF":29457,"orient":29458,"max":29459,"Ash":29460,"Ġwander":29461,"Ġ..........":29462,"ĠDempsey":29463,"ĠToken":29464,"chat":29465,"Justin":29466,"equipped":29467,"ĠBI":29468,"Ġsins":29469,"Ġnond":29470,"ursion":29471,"Ġcoc":29472,"Ġmailing":29473,"ĠArchitect":29474,"Ġhaunting":29475,"Ġpont":29476,"Ġascertain":29477,"Ġwig":29478,"Ġskysc":29479,"Ġarg":29480,"ĠItalians":29481,"/?":29482,"Ġ----------------------------------------------------------------":29483,"ĠPrecision":29484,"EPA":29485,"Ġhotly":29486,"Ġcircumvent":29487,"ĠEcc":29488,"Ġmerch":29489,"akov":29490,"Ġunab":29491,"heres":29492,"Ġsubcommittee":29493,"ĠDiscuss":29494,"ĠChallenger":29495,"crafted":29496,"Ġcanine":29497,"osphere":29498,"Ġspider":29499,"Ġteachings":29500,"atos":29501,"Ġuniversally":29502,"Ġturbine":29503,"ĠLO":29504,"ĠMAG":29505,"Ġpassers":29506,"Ġroundup":29507,"Ġdenounce":29508,"ĠSpiegel":29509,"until":29510,"Ġshaved":29511,"Ġdisdain":29512,"Nazi":29513,"Ġnewfound":29514,"Ġspontaneous":29515,"Ġmash":29516,"ĠDispatch":29517,"Ġsunrise":29518,"ogged":29519,"Ġfuss":29520,"Ġeas":29521,"acci":29522,"ĠTarg":29523,"Ġhash":29524,"lict":29525,"Ġmisc":29526,"ĠSched":29527,"guy":29528,"linger":29529,"warm":29530,"ipel":29531,"ĠGork":29532,"Ġdispatcher":29533,"Ġ315":29534,"Ġfinely":29535,"Ġreliably":29536,"Ġrupt":29537,"Ġnegligent":29538,"Ġendorsements":29539,"ĠOrient":29540,"Ġelectro":29541,"haired":29542,"Ġphysique":29543,"wine":29544,"Ġadolescents":29545,"Ġ184":29546,"alth":29547,"Ġvalidated":29548,"izzard":29549,"ĠPeck":29550,"Ġemblem":29551,"status":29552,"ĠJungle":29553,"orius":29554,"Ġeccentric":29555,"Ġfolding":29556,"poor":29557,"ĠTHC":29558,"appers":29559,"Ġscripted":29560,"239":29561,"ĠPreferred":29562,"digital":29563,"Ġsharper":29564,"Ġportrays":29565,"rative":29566,"238":29567,"Ġ183":29568,"Ġuneasy":29569,"ĠRI":29570,"Ġvil":29571,"171":29572,"Ġspoil":29573,"ĠPricing":29574,"ĠHardware":29575,"Ġ188":29576,"Ġhorrendous":29577,"Ġostensibly":29578,"nah":29579,"Ġgadget":29580,"ADS":29581,"coat":29582,"Ġexhausting":29583,"Ġdraining":29584,"arate":29585,"ĠBulgarian":29586,"emo":29587,"Ġhier":29588,"Ġguitars":29589,"ieties":29590,"assed":29591,"ĠYaz":29592,"Ġaggress":29593,"ĠBG":29594,"vik":29595,"Ġneatly":29596,"Ġpixel":29597,"Ġintimacy":29598,"ĠRug":29599,"Ġ512":29600,"Ġnarrated":29601,"Ġmast":29602,"ĠNos":29603,"ĠHung":29604,"reciation":29605,"ĠChandra":29606,"Ġbios":29607,"ĠEnded":29608,"lique":29609,"ĠCambod":29610,"Ġworrisome":29611,"ĠEQ":29612,"Ġnovelist":29613,"ĠDynamic":29614,"ĠMIC":29615,"Ġdisposed":29616,"Ġbrackets":29617,"Ġhaircut":29618,"ĠLana":29619,"Ġlull":29620,"Ġbillboard":29621,"ĠReverend":29622,"ĠNAV":29623,"borgh":29624,"Ġadrenaline":29625,"Ġseeming":29626,"ĠPCB":29627,"ĠBridgewater":29628,"Ġsquirrel":29629,"262":29630,"write":29631,"Ġstabilization":29632,"wild":29633,"Ġsecession":29634,"Ġpacket":29635,"AMES":29636,"licted":29637,"Ġmalnutrition":29638,"claimed":29639,"Ġcharred":29640,"Ġtragically":29641,"Published":29642,"Ġrepealed":29643,"ĠSawyer":29644,"ĠMormon":29645,"resolution":29646,"ĠSaud":29647,"Henry":29648,"Ġdiscontin":29649,"Ġsnag":29650,"danger":29651,"Ġmixes":29652,"Ġupbringing":29653,"Ġlimb":29654,"ĠFantastic":29655,"Sim":29656,"ĠAugustine":29657,"ĠGreeks":29658,"cod":29659,"ĠHistorically":29660,"mire":29661,"register":29662,"ĠKund":29663,"Ġdebilitating":29664,"Chat":29665,"ĠTau":29666,"ï":29667,"lower":29668,"pie":29669,"Ġ430":29670,"Ġnascent":29671,"Ġ375":29672,"Ġbum":29673,"WI":29674,"Netflix":29675,"whether":29676,"Ġdearly":29677,"eff":29678,"PRES":29679,"Ġlandmarks":29680,"Ġculminating":29681,"Ġmigrate":29682,"balanced":29683,"Ġregulars":29684,"Ġmodification":29685,"Ġdips":29686,"ĠRedmond":29687,"ationally":29688,"atsu":29689,"Ġphilosophical":29690,"Ġtyping":29691,"Ġunreal":29692,"Ġboiled":29693,"Ġblight":29694,"Ġdru":29695,"ĠGaddafi":29696,"Ġnour":29697,"Ġsequential":29698,"Ġaugment":29699,"ĠEuras":29700,"ĠWiley":29701,"endar":29702,"Ġacronym":29703,"esteem":29704,"ĠMajesty":29705,"Ġgrips":29706,"Ġobsolete":29707,"nos":29708,"Made":29709,"ogie":29710,"ĠLiver":29711,"ĠDonetsk":29712,"Ġdynam":29713,"tel":29714,"bring":29715,"Ġknit":29716,"Ġfirepower":29717,"Ġprepaid":29718,"ĠRaphael":29719,"Ġsensing":29720,"720":29721,"WN":29722,"Nor":29723,"puted":29724,"Ġbureaucrats":29725,"ĠAdjust":29726,"Ġintensely":29727,"Ġsunscreen":29728,"Ho":29729,"ĠYelp":29730,"ĠPU":29731,"ĠSerge":29732,"ĠCyp":29733,"ELF":29734,"ĠGuns":29735,"Ġteamwork":29736,"ĠBib":29737,"ĠMaintenance":29738,"perate":29739,"Ġwiping":29740,"Ġcharcoal":29741,"ordan":29742,"International":29743,"Ġbehaving":29744,"Ġsoftened":29745,"ĠIncreased":29746,"Ġunfl":29747,"470":29748,"Ġinformative":29749,"Ġnovelty":29750,"Ġavoidance":29751,"Ġteasing":29752,"matic":29753,"Ġmaid":29754,"ĠPell":29755,"Ġcounterterrorism":29756,"ĠGabe":29757,"ications":29758,"ĠConnection":29759,"ĠInquiry":29760,"isin":29761,"orama":29762,"Ġcorpse":29763,"Ġpractitioner":29764,"itto":29765,"UA":29766,"Ġforestry":29767,"Ġlic":29768,"Ġrevolves":29769,"Ġcalculating":29770,"Ġpuppet":29771,"ulously":29772,"ĠPebble":29773,"Dep":29774,"Ġupholding":29775,"Ġcarving":29776,"Ġwartime":29777,"Ġenvy":29778,"Ġencro":29779,"ĠPunk":29780,"ĠAdminist":29781,"ucha":29782,"Ġbattleground":29783,"Ġlol":29784,"uable":29785,"Ġunheard":29786,"ĠSpur":29787,"phony":29788,"Ġcarc":29789,"ĠSut":29790,"Ġpollutants":29791,"Cr":29792,"Ġvigorous":29793,"355":29794,"ĠMarriage":29795,"Ġstaffed":29796,"fecture":29797,"ĠArabs":29798,"supported":29799,"Ġmanpower":29800,"ĠSatellite":29801,"None":29802,"Ġqueues":29803,"Ġinsightful":29804,"Ġinterchange":29805,"Rel":29806,"Ġsolemn":29807,"Ġsmuggled":29808,"upt":29809,"Ġ171":29810,"Ġparallels":29811,"intelligence":29812,"punk":29813,"Ġrecycle":29814,"Ġdecorative":29815,"Ġshar":29816,"arrell":29817,"iances":29818,"ĠBolivia":29819,"Ġstrengthens":29820,"430":29821,"Ġhardships":29822,"Ġsignalling":29823,"Ġunthinkable":29824,"READ":29825,"Ġtad":29826,"picked":29827,"Ġarmor":29828,"Ġcores":29829,"ĠMatrix":29830,"Ġdj":29831,"Ġevolutionary":29832,"ĠBermuda":29833,"OE":29834,"organized":29835,"Ġrelentlessly":29836,"sol":29837,"ĠMamm":29838,"Ġpounding":29839,"Weather":29840,"Ġrab":29841,"Ġsweets":29842,"funding":29843,"ĠHUD":29844,"ĠSoldier":29845,"reed":29846,"released":29847,"Ġcontainment":29848,"alid":29849,"ĠNikon":29850,"Ġcervical":29851,"Ġign":29852,"Ġalias":29853,"Ġoptimized":29854,"Ġasserting":29855,"ĠAFTER":29856,"Ġflatt":29857,"Ġdinosaur":29858,"ĠRefugees":29859,"ĠAnch":29860,"Ġadjustable":29861,"Ġroaring":29862,"Ġpilgrimage":29863,"Ġcowboy":29864,"Ġentails":29865,"ractions":29866,"EY":29867,"undy":29868,"ĠKuh":29869,"inges":29870,"ĠTerra":29871,"ĠEscape":29872,"Ġrundown":29873,"Ġstriped":29874,"KN":29875,"ocations":29876,"IDENT":29877,"IGH":29878,"Ġavoids":29879,"Moh":29880,"ĠLS":29881,"lbs":29882,"ĠAttempt":29883,"Ġtriangle":29884,"Ġclimax":29885,"Ġhp":29886,"Ġallot":29887,"learning":29888,"ĠJFK":29889,"Justice":29890,"OUT":29891,"ĠHER":29892,"ĠLect":29893,"Ġtrench":29894,"edar":29895,"Ġreservoirs":29896,"uid":29897,"rf":29898,"162":29899,"Ġinterfered":29900,"Ġemit":29901,"these":29902,"444":29903,"ĠLeather":29904,"essing":29905,"ĠEighth":29906,"uckle":29907,"Breaking":29908,"Ġunresolved":29909,"Ġgoose":29910,"252":29911,"platform":29912,"atus":29913,"Ġcomplexion":29914,"ĠBUS":29915,"Ġstruct":29916,"middle":29917,"Sat":29918,"ĠWHERE":29919,"LB":29920,"redible":29921,"vered":29922,"Louis":29923,"ĠBaz":29924,"Eye":29925,"safety":29926,"Ġhypothetical":29927,"Ġbowel":29928,"Ġuntouched":29929,"312":29930,"ĠPric":29931,"Ġastounding":29932,"meet":29933,"Aaron":29934,"ĠWoo":29935,"236":29936,"ĠShape":29937,"Ġdrifted":29938,"Ġtile":29939,"ĠGrim":29940,"Ġundeniable":29941,"Ġ..":29942,"Ġradius":29943,"Ġovarian":29944,"ĠSeriously":29945,"verning":29946,"Ġassertions":29947,"oxic":29948,"231":29949,"ĠViz":29950,"Jackson":29951,"ĠSno":29952,"Ġboycot":29953,"okingly":29954,"ousse":29955,"proclaimed":29956,"Ġblazing":29957,"Ġinefficient":29958,"Ġfig":29959,"Ġbooze":29960,"259":29961,"agus":29962,"statement":29963,"Ġlocom":29964,"Ġtacos":29965,"Ġmemos":29966,"gender":29967,"ĠOrt":29968,"263":29969,"Ġintervening":29970,"Soc":29971,"University":29972,"ĠPis":29973,"ĠReturns":29974,"ĠPAN":29975,"Ġultrasound":29976,"Ġcoherent":29977,"tracking":29978,"rieved":29979,"383":29980,"Ġqualitative":29981,"uld":29982,"ĠGiovanni":29983,"Ġstorylines":29984,"Ġdarkest":29985,"Ġvelvet":29986,"RIP":29987,"Ġcompatibility":29988,"Ġtroll":29989,"CN":29990,"Found":29991,"ĠOu":29992,"Ġtease":29993,"Ġvested":29994,"Ġprovocation":29995,"Ġimprovised":29996,"Ġactivation":29997,"unte":29998,"ĠMonteneg":29999,"ĠJOHN":30000,"ĠReact":30001,"Ġpolluted":30002,"217":30003,"Ġmushroom":30004,"Ġdisconnected":30005,"ĠVoices":30006,"asu":30007,"Ġsensory":30008,"REE":30009,"Ġmonarchy":30010,"Ġ173":30011,"doing":30012,"involved":30013,"ĠJonah":30014,"Ġtoxins":30015,"Ġtv":30016,"Ġacademia":30017,"IQ":30018,"Mor":30019,"ĠStraight":30020,"ĠRN":30021,"ĠâĹı":30022,"Ġpear":30023,"187":30024,"Ġendeavors":30025,"ĠTurbo":30026,"Ġducks":30027,"ĠRamsay":30028,"Ġoutpatient":30029,"Ġcomprehend":30030,"UNE":30031,"Ġbriefings":30032,"total":30033,"Ġmigr":30034,"always":30035,"Ġmoot":30036,"ĠRider":30037,"Ġbiblical":30038,"Form":30039,"Ġcurry":30040,"Ġexquisite":30041,"385":30042,"244":30043,"Ġattendants":30044,"Ġcabinets":30045,"nton":30046,"Baby":30047,"Honestly":30048,"ĠFIRE":30049,"211":30050,"itech":30051,"ĠProsper":30052,"Ġchops":30053,"odic":30054,"Rod":30055,"job":30056,"orset":30057,"ĠAry":30058,"obic":30059,"ĠNil":30060,"isable":30061,"Ġorche":30062,"Ġtrivial":30063,"ĠZy":30064,"ĠXP":30065,"Ġendorsing":30066,"ĠLIM":30067,"adish":30068,"237":30069,"ĠLaws":30070,"heid":30071,"ĠSignature":30072,"ĠVern":30073,"ĠBland":30074,"ansk":30075,"Ġrepository":30076,"ĠPetra":30077,"Enter":30078,"Ġtruths":30079,"Ġbordering":30080,"Ġpenn":30081,"Ġsimplified":30082,"zn":30083,"ĠCree":30084,"Ġ181":30085,"Hi":30086,"ĠGreenberg":30087,"Ġprematurely":30088,"ĠSass":30089,"Ġwrecked":30090,"Ġheinous":30091,"415":30092,"Turn":30093,"zl":30094,"amental":30095,"ĠBraz":30096,"fing":30097,"ĠAngle":30098,"ĠPhantom":30099,"agra":30100,"ĠShack":30101,"Ġhomegrown":30102,"Ġalright":30103,"AME":30104,"ĠKN":30105,"Ġclicks":30106,"Ġmanned":30107,"ĠScope":30108,"Ġextras":30109,"Ġclinicians":30110,"321":30111,"African":30112,"Ġjuices":30113,"Ġrefere":30114,"****":30115,"ambling":30116,"since":30117,"Ġvoic":30118,"QB":30119,"ĠAtmospheric":30120,"Mat":30121,"Ġperpetrated":30122,"ĠSteps":30123,"Fit":30124,"Ġsilenced":30125,"Ġbonded":30126,"Ġquantify":30127,"Houston":30128,"ocracy":30129,"Ġfreeing":30130,"pipe":30131,"corn":30132,"rones":30133,"ooked":30134,"ĠSuz":30135,"Ġunaccount":30136,"196":30137,"Ġlogos":30138,"ĠFurious":30139,"ĠSpart":30140,"urst":30141,"itri":30142,"ĠZub":30143,"ĠActual":30144,"Ġslee":30145,"Ġgag":30146,"Ġmetabolism":30147,"ĠDesigned":30148,"Ġpedigree":30149,"Ġcoolest":30150,"âĿ":30151,"iuses":30152,"ĠYellowstone":30153,"Ġinformant":30154,"Ġushered":30155,"ĠGarg":30156,"thel":30157,"Hop":30158,"Ġrepetitive":30159,"flag":30160,"Ġunmarked":30161,"ĠBrave":30162,"Ġincur":30163,"reading":30164,"ppel":30165,"lah":30166,"ateurs":30167,"286":30168,"ĠAtomic":30169,"Ġappliance":30170,")'":30171,"traditional":30172,"Ġdads":30173,"Ġregimen":30174,"Ġinfrared":30175,"Ġdotted":30176,"Ġtails":30177,"Ġhorrors":30178,"uments":30179,"Ġdub":30180,"lighting":30181,"Ġunearthed":30182,"assisted":30183,"ĠSpiel":30184,"trial":30185,"Ġpersever":30186,"MAX":30187,"Ġicing":30188,"Energy":30189,"Ġ1943":30190,"move":30191,"Error":30192,"Ġliter":30193,"ĠCly":30194,"Ari":30195,"Ġgranite":30196,"Ġcropped":30197,"ĠRD":30198,"ĠREM":30199,"TX":30200,"Ġdispleasure":30201,"ĠComfort":30202,"Ġunsettling":30203,"Ġscratching":30204,"866":30205,"eton":30206,"560":30207,"Ġcommonplace":30208,"Ġreproduced":30209,"ggie":30210,"Ġschooling":30211,"Ġreprim":30212,"Ġdarling":30213,"huge":30214,"ĠDante":30215,"cp":30216,"heastern":30217,"Ġeduc":30218,"Digital":30219,"Ġwrath":30220,"Ġwatering":30221,"ĠTail":30222,"Ġdegradation":30223,"530":30224,"usive":30225,"ĠXu":30226,"ĠAH":30227,"Ġclassy":30228,"ĠSET":30229,"Ġcriminally":30230,"dependent":30231,"ĠAlps":30232,"Ġnotwithstanding":30233,"Ġfamiliarity":30234,"ĠAPP":30235,"aurus":30236,"gments":30237,"Mid":30238,"Ġepilepsy":30239,"Ġresemblance":30240,"brush":30241,"Ġ333":30242,"Ġliberated":30243,"ĠBeng":30244,"ĠLans":30245,"Ġtraff":30246,"ihu":30247,"establish":30248,"Ġcort":30249,"Rick":30250,"Ġplugged":30251,"onement":30252,"ĠAccounting":30253,"Ġreconstruct":30254,"Pop":30255,"Ġincapable":30256,"aho":30257,"ĠDexter":30258,"Ġpitted":30259,"Ġbathing":30260,"Ġdun":30261,"Ġexplor":30262,"ĠMidnight":30263,"Ġactiv":30264,"iann":30265,"likely":30266,"acons":30267,"owicz":30268,"Ġnegativity":30269,"Ġfreel":30270,"ewitness":30271,"Ġinj":30272,"Stephen":30273,"Ġshredded":30274,"Ġprepar":30275,"Script":30276,"Ġcorrectional":30277,"Ġcommits":30278,"hai":30279,"activity":30280,"Imp":30281,"Ġstumble":30282,"Ġcache":30283,"ĠPromise":30284,"Ġprecinct":30285,"Ġmulticultural":30286,"Ġsubstitutes":30287,"Ġshortened":30288,"ovable":30289,"Ġfasting":30290,"Ġinfused":30291,"Ġbulldo":30292,"alm":30293,"Ġadjoining":30294,"Ġmultiplayer":30295,"ĠAlien":30296,"Ġpund":30297,"ethyl":30298,"Ġbliss":30299,"ĠDecision":30300,"Ġbab":30301,"Ġangrily":30302,"another":30303,"oled":30304,"ainted":30305,"ĠPriest":30306,"Ġdraped":30307,"ĠPersonally":30308,"Ġstomp":30309,"ĠWolfgang":30310,"Ġoste":30311,"itches":30312,"Ġhoops":30313,"ĠJO":30314,"Ġsche":30315,"ĠZan":30316,"Ġcleans":30317,"Ġclimbs":30318,"Ġelectronically":30319,"243":30320,"ocy":30321,"gall":30322,"ĠREAL":30323,"Ġmurky":30324,"Ġmodernization":30325,"tub":30326,"Really":30327,"Ġlax":30328,"Ġdoubted":30329,"yden":30330,"ĠPrevent":30331,"UTERS":30332,"Ġoverride":30333,"ĠSAF":30334,"Ġcoun":30335,"Ġexcerpts":30336,"Ġmotivations":30337,"Ġdecency":30338,"Ġastronomers":30339,"orical":30340,"Ġaltering":30341,"Ġ232":30342,"described":30343,"omic":30344,"Ġexh":30345,"Ġknocks":30346,"ĠRiot":30347,"ĠPurs":30348,"equal":30349,"pleting":30350,"llan":30351,"ĠSOL":30352,"iator":30353,"ILE":30354,"ĠWM":30355,"Ġdefences":30356,"Ġforearm":30357,"Toronto":30358,"526":30359,"Ġacne":30360,"Ġthirteen":30361,"itiz":30362,"akable":30363,"charges":30364,"Ġinaction":30365,"Ġbred":30366,"Ġdeficiency":30367,"Ġintrigue":30368,"opoly":30369,"ĠCamer":30370,"ĠMelt":30371,"Ġunlawfully":30372,"Ġpenetrate":30373,"ĠUsed":30374,"ĠDirty":30375,"Ġexcerpt":30376,"ĠYen":30377,"ĠCARD":30378,"Ġcher":30379,"ĠChallenges":30380,"ieves":30381,"Ġambush":30382,"Data":30383,"eeks":30384,"Ġgiveaway":30385,"Ġpawn":30386,"Ġtransf":30387,"renched":30388,"Ġmoderately":30389,"Ġnumbered":30390,"ĠIntegrity":30391,"ĠHOU":30392,"ĠHDMI":30393,"Royal":30394,"LT":30395,"ĠDirk":30396,"izon":30397,"Ġ227":30398,"Ġdisagrees":30399,"ĠNinth":30400,"Ġincrement":30401,"ĠGlory":30402,"suff":30403,"Ġartery":30404,"ĠEmployee":30405,"bum":30406,"ĠEditorial":30407,"Kh":30408,"ĠPremiere":30409,"ĠWeld":30410,"ĠIncluded":30411,"Ġmathematical":30412,"Ġexponentially":30413,"Ġhandwritten":30414,"ĠMAS":30415,"Ġindiscrim":30416,"Ġnutrient":30417,"ĠSelection":30418,"Ġ219":30419,"hyd":30420,"Ġdeton":30421,"æ":30422,"dark":30423,"ĠFidel":30424,"Ġmonkeys":30425,"Ġnutritious":30426,"Ġheadlights":30427,"oller":30428,"piring":30429,"ĠDefenders":30430,"Ġdrown":30431,"elong":30432,"Ġfloats":30433,"graduate":30434,"Ġprosper":30435,"ĠNamed":30436,"ĠEating":30437,"ECK":30438,"establishment":30439,"XM":30440,"Ġsoaking":30441,"278":30442,"Ġlistener":30443,"Ġsimultaneous":30444,"olutions":30445,"payer":30446,"Ġcustomize":30447,"ĠROCK":30448,"Ġaltar":30449,"ĠExercise":30450,"anky":30451,"ĠProfession":30452,"sever":30453,"ĠMerchant":30454,"RF":30455,"ĠCombat":30456,"Ġlegality":30457,"fledged":30458,"Ġdiapers":30459,"lves":30460,"Ġlur":30461,"Ġignores":30462,"ĠProtocol":30463,"Ġrepresentations":30464,"ĠBlumenthal":30465,"ĠLime":30466,"romptu":30467,"Ġbesieged":30468,"dl":30469,"Ġsighting":30470,"ĠParm":30471,"ĠServer":30472,"ĠBenghazi":30473,"estival":30474,"Ġplaylist":30475,"ĠUng":30476,"ĠQuantum":30477,"Ġcompromises":30478,"ĠSurvivor":30479,"ĠMobility":30480,"Ġbounty":30481,"ophers":30482,"ISA":30483,"need":30484,"uese":30485,"Ġorn":30486,"218":30487,"Ġ530":30488,"Ġbuddies":30489,"Ġagendas":30490,"ĠFeldman":30491,"ĠÃĸ":30492,"ĠBMC":30493,"ĠServe":30494,"Ent":30495,"ĠKH":30496,"ĠINT":30497,"Ġlittered":30498,"Ġvisitation":30499,"mist":30500,"Ġdupl":30501,"Ġrouted":30502,"ĠAmount":30503,"Dev":30504,"ĠConv":30505,"Ġslams":30506,"ĠVeterinary":30507,"bold":30508,"Ġ186":30509,"ĠDOT":30510,"builder":30511,"Ġdecay":30512,"ĠHemp":30513,"pelled":30514,"Ġmankind":30515,"Tonight":30516,"Ġeffortlessly":30517,"ĠBUT":30518,"Ġhostilities":30519,"formerly":30520,"alon":30521,"ĠCrash":30522,"humane":30523,"Ġmayhem":30524,"ĠBudd":30525,"Ġdisinformation":30526,"Ġ226":30527,"Ġprototypes":30528,"__":30529,"IVERS":30530,"izzy":30531,"ĠMight":30532,"ĠPip":30533,"pour":30534,"INO":30535,"ĠLL":30536,"Ġwiret":30537,"Ġresorted":30538,"ĠTanaka":30539,"ĠDOES":30540,"Earlier":30541,"HO":30542,"Ġmoniker":30543,"ĠFang":30544,"ĠHua":30545,"bered":30546,"adding":30547,"194":30548,"STR":30549,".\")":30550,"cop":30551,"ĠFlags":30552,"ĠColleges":30553,"ĠUz":30554,"Ġsparks":30555,"Ġparadox":30556,"Marie":30557,"Strong":30558,"Ġstrawberry":30559,"Ġnurturing":30560,"Ġfax":30561,"Tor":30562,"killer":30563,"burse":30564,"Ġattachments":30565,"Ġpup":30566,"Ġexhaustion":30567,"Ġwhisky":30568,"isu":30569,"ologically":30570,"iership":30571,"Ġlamps":30572,"Ġshuff":30573,"Ġcentralized":30574,"ĠNeedless":30575,"Ġgrenade":30576,"Ġrouter":30577,"Ġoptics":30578,"ivering":30579,"Ġpioneers":30580,"ĠHug":30581,"Ġhandguns":30582,"010":30583,"Ġbailed":30584,"uana":30585,"197":30586,"Ġdistorted":30587,"ĠEssentially":30588,"ĠSilent":30589,"Ġcomparative":30590,"Music":30591,"ĠMUS":30592,"Bur":30593,"ĠComet":30594,"ĠWinchester":30595,"IGN":30596,"Mod":30597,"ĠCandidate":30598,"Ġdysfunctional":30599,"ĠCeleb":30600,"Ġhitch":30601,"api":30602,"Ġidiot":30603,"Ġunsupported":30604,"gat":30605,"inker":30606,"Ġredevelop":30607,"Ġdwind":30608,"Ġforgetting":30609,"ĠRost":30610,"Ġremembrance":30611,"Na":30612,"mopolitan":30613,"Ġberries":30614,"Ġmarital":30615,"Vol":30616,"ĠClosing":30617,"ĠHindus":30618,"itism":30619,"Ġrover":30620,"Ġmysteries":30621,"ĠNig":30622,"ucing":30623,"Ġfabrication":30624,"Ġgarments":30625,"Ġwield":30626,"ĠCompton":30627,"357":30628,"Ġoxide":30629,"chron":30630,"ĠThought":30631,"Ġcomed":30632,"ĠEpstein":30633,"ĠBART":30634,"orative":30635,"ĠKahn":30636,"adan":30637,"APH":30638,"cum":30639,"Ġloophole":30640,"ĠGoPro":30641,"osit":30642,"Ġspecification":30643,"ĠAPR":30644,"Ġdrains":30645,"Ġconserve":30646,"ĠMorse":30647,"Ġcalorie":30648,"ĠCheney":30649,"station":30650,"Ġevangel":30651,"Ġspraying":30652,"lections":30653,"Ġenclosure":30654,"Ġcommanded":30655,"ĠOrganizations":30656,"Ġimb":30657,"mins":30658,"ĠTobias":30659,"Ve":30660,"ĠNau":30661,"183":30662,"ĠGuantanamo":30663,"173":30664,"Ġrequisite":30665,"Ġderivative":30666,"Ġpopulism":30667,"Ġcultivated":30668,"lord":30669,"uler":30670,"ĠDEA":30671,"inally":30672,"Ġdemonstr":30673,"trip":30674,"ĠFirefox":30675,"246":30676,"confirmed":30677,"Anne":30678,"Ġtamp":30679,"ĠHousehold":30680,"amous":30681,"Meet":30682,"Ġdashed":30683,"pire":30684,"Ġinex":30685,"Ġloosen":30686,"272":30687,"famous":30688,"ĠHeard":30689,"Ġhindsight":30690,"Ġdepot":30691,"ĠCutting":30692,"ĠMouse":30693,"Ġgeological":30694,"number":30695,"OUN":30696,".,\"":30697,"Ġmoderation":30698,"ĠUNHCR":30699,"Ġdomains":30700,"eco":30701,"Ġcrater":30702,"Ġ510":30703,"kid":30704,"Ġcylinders":30705,"ĠClasses":30706,"Kn":30707,"Ġcarcin":30708,"ĠHunting":30709,"irit":30710,"ARP":30711,"anting":30712,"ĠMarino":30713,"ĠRESP":30714,"ifle":30715,"Ġ239":30716,"fman":30717,"Ġtheoretically":30718,"Ġdistraught":30719,"Ġstaircase":30720,"Ġexpel":30721,"Ġlord":30722,"Ġbehaviours":30723,"Ġprescribing":30724,"ographs":30725,"ĠNewly":30726,"Ġpatiently":30727,"Ġskyline":30728,"udos":30729,"Ġrepertoire":30730,"Ġhover":30731,"mint":30732,"Ġclears":30733,"Ġkale":30734,"ĠSco":30735,"ĠCoulter":30736,"Ġpancreat":30737,"pu":30738,"995":30739,"Ġincompetent":30740,"2007":30741,"Ġgripping":30742,"enable":30743,"Ġreinforcing":30744,"ĠFee":30745,"education":30746,"ĠKuro":30747,"Ġbowed":30748,"Ġshave":30749,"ĠMean":30750,"xi":30751,"Ġinciting":30752,"atters":30753,"Ġecstatic":30754,"hog":30755,"Ġclauses":30756,"Ġsubt":30757,"Ġbehaved":30758,"tains":30759,"Liverpool":30760,"Ġstrives":30761,"ĠKev":30762,"ĠFramework":30763,"defined":30764,"Ġrecounts":30765,"array":30766,"tips":30767,"Ġartificially":30768,"fits":30769,"Clearly":30770,"mediate":30771,"Ġunseen":30772,"Ġthugs":30773,"ĠLent":30774,"Ġ1938":30775,"Ġgenital":30776,"ĠSonic":30777,"ĠWarehouse":30778,"pler":30779,"Ġunm":30780,"Ġpackets":30781,"ĠMET":30782,"ealous":30783,"ographers":30784,"Ġlabou":30785,"Core":30786,"+,":30787,"parable":30788,"Ġstrat":30789,"Ġinvitations":30790,"Ġsouven":30791,"Ġbillboards":30792,"ĠRegulations":30793,"Ġdwarf":30794,"Ġtoler":30795,"Ġprose":30796,"Ġestates":30797,"Ġmetabolic":30798,"ĠSuff":30799,"ĠFirstly":30800,"Ġpolio":30801,"Ġchick":30802,"ĠDaughter":30803,"Ġsubstant":30804,"ĠIdentity":30805,"umbers":30806,"ĠFacts":30807,"Ġfrust":30808,"Ġdissip":30809,"ĠDeck":30810,"Hy":30811,"ĠBirch":30812,"Ġhurled":30813,"democracy":30814,"nered":30815,"eper":30816,"Ġcerebral":30817,"181":30818,"Ġhalves":30819,"abit":30820,"balance":30821,"ĠTibet":30822,"Ġhandheld":30823,"ĠDough":30824,"Ġprogrammed":30825,"hw":30826,"Ġoutlawed":30827,"ĠSerious":30828,"Ġironically":30829,"Ġmanipulating":30830,")\"":30831,"juries":30832,"Ġfragrance":30833,"crete":30834,"ĠHHS":30835,"cience":30836,"Ġcosmic":30837,"Ġforeclosure":30838,"Ġpercentages":30839,"Bus":30840,"Ġenticing":30841,"extra":30842,"ĠShy":30843,"ĠÂ¥":30844,"Ġheadsets":30845,"imensional":30846,"Ġlux":30847,"Ġresidual":30848,"Ġmantle":30849,"ĠSJ":30850,"ĠPeaks":30851,"ĠFinger":30852,"Ġunfolds":30853,"anity":30854,"Ġresettlement":30855,"ĠWeak":30856,"ĠBeen":30857,"Ġ198":30858,"Ġangels":30859,"ĠFarn":30860,"peace":30861,"Ġcapac":30862,"Ġhue":30863,"Ġlust":30864,"traumatic":30865,"laun":30866,"Ġstrawberries":30867,"Ġherbal":30868,"Ġconversions":30869,"ĠHeld":30870,"Ġprescribe":30871,"Its":30872,"ĠDartmouth":30873,"Ġfashioned":30874,"460":30875,"BLE":30876,"international":30877,"Ġlumin":30878,"Ġplantation":30879,"ilde":30880,"490":30881,"Ġeuph":30882,"Ġdisgust":30883,"Ġaspire":30884,"medical":30885,"Ġsocialism":30886,"Ġdissolve":30887,"Wal":30888,"Ġadmittedly":30889,"Ġsewing":30890,"ĠAcer":30891,"Ġtul":30892,"Ġfacilit":30893,"Ġgrandma":30894,"ĠFeeling":30895,"Ġobst":30896,"ĠFranz":30897,"ĠPalin":30898,"ĠIncrease":30899,"gets":30900,"ĠImam":30901,"âĢİ":30902,"Ġcoincides":30903,"urrence":30904,"Ġlifes":30905,"Lab":30906,"Ham":30907,"angelo":30908,"Wild":30909,"Ġvetoed":30910,"Ġventilation":30911,"olid":30912,"Summer":30913,"Ġfacade":30914,"neys":30915,"ĠWOM":30916,"ĠBenny":30917,"ĠMarried":30918,"squ":30919,"ĠReflect":30920,"return":30921,"elia":30922,"olding":30923,"Ġrefine":30924,"ĠMadness":30925,"innacle":30926,"posts":30927,"287":30928,"fruit":30929,"274":30930,"icator":30931,"ĠVoy":30932,"Ġunsett":30933,"Ġfant":30934,"Ġtreaties":30935,"Ġcrystals":30936,"Ġhijacked":30937,"words":30938,"ĠReleased":30939,"Save":30940,"Ġcannon":30941,"Ġanomaly":30942,"Ġbeacon":30943,"Ġcrippled":30944,"Ġbundles":30945,"Ġuntreated":30946,"Ġhappiest":30947,"Ġgalaxies":30948,"Ġoccupational":30949,"416":30950,"Dar":30951,"Ġcrank":30952,"Ġappropriation":30953,"asking":30954,"mens":30955,"Ġdetector":30956,"Ġskewed":30957,"Ġpoke":30958,"254":30959,"Ġhypertension":30960,"apolog":30961,"Ġevaluations":30962,"blocks":30963,"Ġpow":30964,"GEN":30965,"Ġscalp":30966,"Ġarrogant":30967,"AIDS":30968,"ority":30969,"Ġredirect":30970,"Ġderogatory":30971,"Ġlateral":30972,"495":30973,"rolley":30974,"brew":30975,"Ġbabys":30976,"Ġmuff":30977,"ĠRequ":30978,"Ġdime":30979,"Ġwonderfully":30980,"Ġtreasures":30981,"ĠNES":30982,"Ġponds":30983,"Ġimpulse":30984,"Ġdetecting":30985,"Ġgrin":30986,"Ġbrid":30987,"Ġshoved":30988,"Ġpurge":30989,"irteen":30990,"OTHER":30991,"ÙĦ":30992,"irsch":30993,"ĠOcc":30994,"193":30995,"Ġfodder":30996,"wrote":30997,"meric":30998,"posal":30999,"Ġwinters":31000,"ĠJuice":31001,"hub":31002,"Ġcontrasting":31003,"Brazil":31004,"Ġflashy":31005,"uffer":31006,"technology":31007,"Children":31008,"Ġcatapult":31009,"owsky":31010,"ĠEclipse":31011,"abeth":31012,"ĠParticip":31013,"Ġlaud":31014,"ĠQuiet":31015,"Ġsimulations":31016,"Ġsacrificing":31017,"Ġpreaching":31018,"Ġvoicing":31019,"itizen":31020,"Ġgn":31021,"Ġsans":31022,"Ġ285":31023,"ĠRobot":31024,"Ġ1936":31025,"Ġsham":31026,"ĠKislyak":31027,"ĠGCC":31028,"tale":31029,"ĠShades":31030,"Ġsediment":31031,"Ġconveniently":31032,"Give":31033,"mounted":31034,"Ġpeel":31035,"Jun":31036,"ĠEisenhower":31037,"Ġdiplom":31038,"ĠPreservation":31039,"Ġaffirm":31040,"Ġtaboo":31041,"ĠGarr":31042,"ĠApply":31043,"prim":31044,"Ġausp":31045,"Ġtextbook":31046,"Ġforfeit":31047,"icides":31048,"Ġundis":31049,"DJ":31050,"Ġ\"...":31051,"ĠXperia":31052,"Ġfurry":31053,"Australian":31054,"Ġpreach":31055,"Ġparamed":31056,"Ġ196":31057,"agos":31058,"ĠRIP":31059,"Ġ408":31060,"ĠQuarterly":31061,"ĠQuentin":31062,"Ġdeft":31063,"ĠVlad":31064,"massive":31065,"apore":31066,"Ġquestionnaire":31067,"secution":31068,"ĠTunnel":31069,"ĠAssist":31070,"BILITY":31071,"everything":31072,"vich":31073,"Ġcomparatively":31074,"heng":31075,"ETH":31076,"ĠiPod":31077,"Ġinsurgent":31078,"Ġtestosterone":31079,"191":31080,"Ġmoons":31081,"Ġgripped":31082,"Ġstrang":31083,"pects":31084,"ĠSERVICE":31085,"Ġnumb":31086,"Ġmeasurable":31087,"Ġdismantled":31088,"Ġdepict":31089,"Ġretake":31090,"Light":31091,"Ġaquatic":31092,"useum":31093,"judicial":31094,"Ġ****":31095,"Ġrosters":31096,"certain":31097,"Ġhypothesis":31098,"2002":31099,"Snow":31100,"Ġpounded":31101,"ĠZel":31102,"ĠTrem":31103,"iversity":31104,"219":31105,"Jen":31106,"ĠAdventures":31107,"Ġcylinder":31108,"Ġbanging":31109,"Ġbalk":31110,"analy":31111,"ĠHust":31112,"ookie":31113,"ĠReturning":31114,"Ġpods":31115,"analysis":31116,"ĠTruman":31117,"Ġorg":31118,"Ġsar":31119,"Ġdred":31120,"ĠTelecommunications":31121,"ĠSven":31122,"carry":31123,"ĠLOVE":31124,"Ġparting":31125,"asar":31126,"utations":31127,"itic":31128,"Ġactu":31129,"Ġbananas":31130,"ĠNights":31131,"410":31132,"Still":31133,"Ġtweaked":31134,"went":31135,"Ġtoddlers":31136,"irted":31137,"Ġpaed":31138,"ĠWink":31139,"Ġviewpoint":31140,"ĠHelic":31141,"Ġhandshake":31142,"Ġpoaching":31143,"Ġrounding":31144,"268":31145,"ĠNVIDIA":31146,"Ġsquat":31147,"Ġtowed":31148,"Ġhandler":31149,"Ġconspir":31150,"Ġadditionally":31151,"CENT":31152,"ĠÃľ":31153,"article":31154,"ĠTough":31155,"NM":31156,"Rem":31157,"Ġstunts":31158,"ILS":31159,"ĠLM":31160,"Connect":31161,"ĠParagu":31162,"Ġcomplexities":31163,"Ġhugging":31164,"Ġabolish":31165,"ricting":31166,"ĠItems":31167,"Ġtemples":31168,"ĠSeat":31169,"ĠRubber":31170,"Ġindic":31171,"ĠVitamin":31172,"Ġcitations":31173,"Ġarmored":31174,"---------------":31175,"ĠNeo":31176,"ippy":31177,"Que":31178,"Ġrag":31179,"Ġlov":31180,"630":31181,"Ġadept":31182,"orbit":31183,"253":31184,"412":31185,"Ġbutterflies":31186,"Ġoutl":31187,"ĠCycle":31188,"Ġaesthetics":31189,"ĠTwitch":31190,"405":31191,"factor":31192,"ðŁij":31193,"ĠCircus":31194,"Posted":31195,"Ġintroductory":31196,"ĠStack":31197,"atoes":31198,"Ġfurn":31199,"ĠHond":31200,"Ġbipolar":31201,"ĠAging":31202,"inches":31203,"Ġincompetence":31204,"Ġaloud":31205,"Imagine":31206,"Ġsepar":31207,"Ġmanip":31208,"ophobic":31209,"inion":31210,"bek":31211,"Ġquer":31212,"ĠArmen":31213,"Ġhumorous":31214,"Ġmundane":31215,"Ġapologizing":31216,"Ġpioneered":31217,"Ġ303":31218,"282":31219,"Ġcalming":31220,"orious":31221,"760":31222,"Ġstitches":31223,"Ġthrottle":31224,"Ġspinach":31225,"urities":31226,"ĠCologne":31227,"Ġripple":31228,"Cs":31229,"Cent":31230,"Should":31231,"Ġaffinity":31232,"amount":31233,"ĠMISS":31234,"Ġsage":31235,"Ġamusing":31236,"Ġsnatch":31237,"clair":31238,"ĠGuess":31239,"bench":31240,"ĠMoj":31241,"nuclear":31242,"Ġfid":31243,"ĠVM":31244,"ĠGN":31245,"brainer":31246,"Ġcurled":31247,"Ġbushes":31248,"icably":31249,"Ġcreeping":31250,"Ġveil":31251,"ĠALS":31252,"ESPN":31253,"ulsion":31254,"ĠGTX":31255,"ĠANN":31256,"Ġcomplicit":31257,"assault":31258,"IOR":31259,"Ġpolymer":31260,"Ġestimating":31261,"277":31262,"alog":31263,"Ġglimps":31264,"Ġreinforces":31265,"Ġtextbooks":31266,"Ġdictated":31267,"ĠReyn":31268,"latable":31269,"ĠOrth":31270,"520":31271,"Ġtrickle":31272,"ĠWrong":31273,".[":31274,"ĠDesigner":31275,"304":31276,"ĠInner":31277,"Ġrave":31278,"ppa":31279,"ĠGim":31280,"Ġswath":31281,"Ġcarts":31282,"atlantic":31283,"Ġpersists":31284,"ĠDeveloper":31285,"Ġgoodies":31286,"isive":31287,"Inf":31288,"ĠSaving":31289,"loop":31290,"tions":31291,"Ġabusers":31292,"Ġclot":31293,"Ġmesmer":31294,"Ġdeg":31295,"Ġskirts":31296,"257":31297,"Ġunreliable":31298,"ĠCOMM":31299,"Ġ194":31300,"Ġfledgling":31301,"administ":31302,"Israeli":31303,"ĠBarbie":31304,"ĠJeanne":31305,"Ġgenerously":31306,"ĠStruct":31307,"ĠZap":31308,"Ġvetted":31309,"ĠViolet":31310,"Ġ),":31311,"Ġembarrass":31312,"bang":31313,"ĠProvider":31314,"getting":31315,"alg":31316,"Ġunconditional":31317,"ĠHulk":31318,"ĠWad":31319,"utation":31320,"Ġpointless":31321,"Ġdeprivation":31322,"Ġstarving":31323,"ĠImpossible":31324,"ĠStir":31325,"Ġknack":31326,"anse":31327,"Ġsecurely":31328,"Ġply":31329,"395":31330,"Pack":31331,"liv":31332,"Ġridden":31333,"alks":31334,"308":31335,"male":31336,"Ġbitterly":31337,"Ġirrational":31338,"Members":31339,"ported":31340,"qq":31341,"ractor":31342,"Ġinflict":31343,"ĠBoehner":31344,"Ġthickness":31345,"Ġdome":31346,"ĠInflu":31347,"Ġheap":31348,"Ġmirrored":31349,"Ġconstituent":31350,"Ġfertile":31351,"Ġvaping":31352,"266":31353,"riages":31354,"Ġembassies":31355,"Ġpersu":31356,"ĠMacArthur":31357,"issions":31358,"Main":31359,"aths":31360,"onne":31361,"circ":31362,"Ġsweating":31363,"quartered":31364,"Ġsax":31365,"Ġ540":31366,"Ġreputable":31367,"Ġsatire":31368,"Ġpastors":31369,"ventional":31370,"Mic":31371,"female":31372,"Ġpity":31373,"appropri":31374,"voc":31375,"hei":31376,"Ġimperial":31377,"Ġcorrective":31378,"Ġresent":31379,"Ġtempered":31380,"Ġdiffers":31381,"Hamilton":31382,"Ġsaddle":31383,"Ġgrenades":31384,"ĠQuart":31385,"onymous":31386,"til":31387,"Ġdepiction":31388,"Ġdisreg":31389,"Ġpetitioner":31390,"Ġfret":31391,"ĠEns":31392,"Emer":31393,"540":31394,"opathy":31395,"vertisements":31396,"Ġsketches":31397,"venth":31398,"Ġautomate":31399,"Ġjihad":31400,"iping":31401,"Ġtert":31402,"ĠSop":31403,"ships":31404,"Ġdeceptive":31405,"ĠPryor":31406,"ĠGorge":31407,"ĠMeridian":31408,"rero":31409,"affected":31410,"Ġlame":31411,"660":31412,"rub":31413,"Hello":31414,"ĠNumbers":31415,"269":31416,"Ġmarg":31417,"Fran":31418,"640":31419,"Ġcath":31420,"winter":31421,"ĠMosque":31422,"Ġreckoning":31423,"ĠImaging":31424,"Ġmutation":31425,"ĠMild":31426,"Ġkidnap":31427,"Ġnav":31428,"Ġferocious":31429,"Ġdusty":31430,"Cele":31431,"ĠFoss":31432,"Ġregrett":31433,"lymp":31434,"Ġcoli":31435,"Ġstereo":31436,"Ġforesee":31437,"alties":31438,"Ġresusc":31439,"Full":31440,"wash":31441,"ĠINST":31442,"ĠPars":31443,"Ġcoated":31444,"ĠHT":31445,"Ġdiscord":31446,"Ġreforming":31447,"CAN":31448,"Ġblink":31449,"Ġlubric":31450,"Ġmishand":31451,"ensible":31452,"existent":31453,"secondary":31454,"ĠDoesn":31455,"terrorist":31456,"Ġriff":31457,"custom":31458,"ĠDET":31459,"Ġreusable":31460,"ĠCRA":31461,"ĠScalia":31462,"Ġaccelerator":31463,"Ġpropag":31464,"ĠMID":31465,"ework":31466,"Ġlooted":31467,"oscope":31468,"eners":31469,"ruction":31470,"Ġbarr":31471,"Ġviewership":31472,"Ġlends":31473,"obil":31474,"ĠRoots":31475,"ĠCame":31476,"ibel":31477,"Ġglobalization":31478,"lab":31479,"information":31480,"Ġcoordin":31481,"Ġglitch":31482,"Ġworms":31483,"Ġslurs":31484,"Ġcontemplated":31485,"ĠPenal":31486,"Ġ191":31487,"Ġ221":31488,"Ġexposes":31489,"Ġ248":31490,"ĠASP":31491,"Ġdependency":31492,"urga":31493,"pdf":31494,"Ġvibr":31495,"clone":31496,"ossible":31497,"ĠUtt":31498,"serv":31499,"ĠLevant":31500,"maybe":31501,"MU":31502,"ĠLunar":31503,"Ġbystanders":31504,"Ġcapitals":31505,"Ġpreacher":31506,"thin":31507,"Ġunderscore":31508,"Ġ('":31509,"Ġmedd":31510,"Ġautobiography":31511,"Ġpersistence":31512,"Ġarming":31513,"Ġappalled":31514,"Ġcontradictory":31515,"Ġreciproc":31516,"Ġtakedown":31517,"tan":31518,"Ġnecessities":31519,"itans":31520,"ĠAlas":31521,"Ġsegregated":31522,"ĠResponsibility":31523,"ĠSHOW":31524,"ISIS":31525,"Ġpengu":31526,"Ġumb":31527,"ĠHO":31528,"HB":31529,"ĠChou":31530,"Ġalluded":31531,"Ġharms":31532,"bara":31533,"ĠWOR":31534,"Sorry":31535,"Ġstarvation":31536,"Ġspilling":31537,"Ġcarb":31538,"annis":31539,"ĠGarrison":31540,"Ġmillionaire":31541,"ifling":31542,"ĠCancel":31543,"Ġimprint":31544,"Ġborrower":31545,"455":31546,"ĠCic":31547,"Ġexposures":31548,"dest":31549,"Ġunn":31550,"Ġ802":31551,"Ġadherence":31552,"prints":31553,"Ġweary":31554,"Ġwaging":31555,"Ġ1937":31556,"ĠKepler":31557,"%;":31558,"Ġdefective":31559,"ĠReps":31560,"ĠGranted":31561,"Ġdisco":31562,"ĠRanking":31563,"erno":31564,"Ġarchaeological":31565,"sq":31566,"Ġcapit":31567,"Ġfleets":31568,"Ġinventor":31569,"iffin":31570,"Ġspotting":31571,"ĠSHARES":31572,"309":31573,"Hard":31574,"save":31575,"241":31576,"ĠThinking":31577,"XY":31578,"Ġhavens":31579,"Ġmessed":31580,"crop":31581,"Ġperme":31582,"Ġtimelines":31583,"ĠGarage":31584,"Ġplateau":31585,"together":31586,"fox":31587,"Ġfailings":31588,"ĠTight":31589,"ĠPhysics":31590,"ĠScholars":31591,"Ġpans":31592,"Fall":31593,"Ġhull":31594,"GER":31595,"Ġbourbon":31596,"ceived":31597,"Ġsteroids":31598,"Ġhamb":31599,"Ġinterpretations":31600,"Ġcush":31601,"Chair":31602,"Ġinformational":31603,"aryn":31604,"Ġwoven":31605,"Ġamen":31606,"Bre":31607,"Ġrefreshed":31608,"York":31609,"ĠBlast":31610,"Editor":31611,"Ġmotivating":31612,"ĠReason":31613,"Florida":31614,"Ġdreaded":31615,"Ġstationary":31616,"Ġbil":31617,"doors":31618,"Ġslightest":31619,"Ġcombustion":31620,"Ġfascination":31621,"Ġstraps":31622,"scribed":31623,"Ġexhibiting":31624,"Ġsimplest":31625,"Gar":31626,"Ġprogressives":31627,"claim":31628,"ocket":31629,"Ġexoner":31630,"ĠNETWORK":31631,"Brad":31632,"Ġ197":31633,"Ġnightmares":31634,"Ġillust":31635,"among":31636,"ĠGreenpeace":31637,"Ġoval":31638,"Ġblocker":31639,"3000":31640,"ĠMemor":31641,"Ġmids":31642,"Ġconfuse":31643,"YN":31644,"cow":31645,"Ġdispensary":31646,"telling":31647,"Ġentail":31648,"Ġneurolog":31649,"Ġbroth":31650,"Ġpron":31651,"ĠAnswer":31652,"thank":31653,"Ġintersect":31654,"Ġclinging":31655,"ĠKilling":31656,"Ġcohesion":31657,"Ġcategorized":31658,"Ġtangled":31659,"ĠASC":31660,"Arsenal":31661,"ĠAutomatic":31662,"580":31663,"sac":31664,"Ġshady":31665,"consumer":31666,"hetically":31667,"NV":31668,"Ġoverl":31669,"holes":31670,"ĠDonation":31671,"tera":31672,"score":31673,"library":31674,"Ġsmoother":31675,"Ġcoasts":31676,"Ġintercourse":31677,"Ġunfavorable":31678,"erb":31679,"Hel":31680,"Ġbiases":31681,"Ġinheritance":31682,"Ġsuppressed":31683,"ĠRecommend":31684,"iculture":31685,"ighting":31686,"inguished":31687,"idences":31688,"operated":31689,"Ġhors":31690,"Ġshrug":31691,"aila":31692,"ĠConsortium":31693,"Ġveins":31694,"uria":31695,"ĠSmithsonian":31696,"ĠAX":31697,")âĢĶ":31698,"given":31699,"JC":31700,"Ġreneg":31701,"Ġprincip":31702,"Ġextinct":31703,"Golden":31704,"ASON":31705,"Ġstatutes":31706,"292":31707,"ĠGOOD":31708,"ĠGreenland":31709,"ĠRasmussen":31710,"ATHER":31711,"Ġdeserted":31712,"ĠHitchcock":31713,"Ġqualifies":31714,"Ġdreadful":31715,"Ġsupers":31716,"Ġtendon":31717,"oter":31718,"ĠFate":31719,"Ġrestrooms":31720,"igating":31721,"Sher":31722,"Name":31723,"orph":31724,"ĠCritical":31725,"rox":31726,"Ġdefunct":31727,"Ġcanoe":31728,"Ġbiscuits":31729,"Ġwomb":31730,"808":31731,"istar":31732,"Ġroar":31733,"aundering":31734,"iewicz":31735,"ĠNM":31736,"ĠChamberlain":31737,"Ġ233":31738,"ĠCoat":31739,"Ġ999":31740,"aft":31741,"Ġlurking":31742,"ĠPist":31743,"Ġfollower":31744,"Ġcareg":31745,"ÙĨ":31746,"ĠThin":31747,"ZZ":31748,"ĠGI":31749,"ĠVintage":31750,"Ġpainstaking":31751,"Ġgloom":31752,"Ġtbsp":31753,"Ġwhim":31754,"ĠMask":31755,"rugged":31756,"Ġwritings":31757,"stantial":31758,"luence":31759,"ordable":31760,"akia":31761,"Ġassassinated":31762,"Wind":31763,"Ġdemeanor":31764,"Night":31765,"rape":31766,"ĠBringing":31767,"Ġshields":31768,"ĠAntarctic":31769,"Ġfruitful":31770,"ĠBuster":31771,"ĠLois":31772,"Ġ302":31773,"Style":31774,"ĠRIS":31775,"Ġdissatisfaction":31776,"ulp":31777,"ĠLaser":31778,"Ġdisposition":31779,"ĠAnk":31780,"Ġabsorbing":31781,"276":31782,"Ġvolcan":31783,"Ġleftover":31784,"yah":31785,"ĠVaj":31786,"Ġunsolved":31787,"oland":31788,"Ġstained":31789,"Ġpathetic":31790,"ylan":31791,"Ġknots":31792,"immigration":31793,"ieving":31794,"Coming":31795,"Commerce":31796,"ĠHurt":31797,"drawn":31798,"Ġaxis":31799,"Ġdye":31800,"ĠNora":31801,"ĠPortal":31802,"Ġsuspense":31803,"ĠExactly":31804,"Ġpowering":31805,"ĠClock":31806,"Ġdrawer":31807,"ĠSpike":31808,"Ġhallmark":31809,"aber":31810,"ĠTrainer":31811,"UV":31812,"Ġredundant":31813,"Tour":31814,"Ġdesignate":31815,"Ġredress":31816,"ĠUb":31817,"cake":31818,"oded":31819,"Ġkings":31820,"iates":31821,"Ġcoupons":31822,"Ġextremes":31823,"Elect":31824,"Ġcitation":31825,"Ġdirectory":31826,"Ġtranspired":31827,"cele":31828,"gence":31829,"5000":31830,"ostic":31831,"Ġraining":31832,"ĠSight":31833,"videos":31834,"phthal":31835,"llor":31836,"Ġappraisal":31837,"Ġdetox":31838,"Ġelecting":31839,"Ġordinances":31840,"Ġlifespan":31841,"Ref":31842,"Ġilluminated":31843,"Ġforfe":31844,"Making":31845,"ĠWorst":31846,"ĠTP":31847,"Ġfullest":31848,"ĠISIL":31849,"ĠRates":31850,"Ġyeast":31851,"sett":31852,"ĠYok":31853,"innie":31854,"edition":31855,"ĠGoldstein":31856,"Ġunaff":31857,"god":31858,"Ġzo":31859,"rums":31860,"Ġopaque":31861,"ĠHist":31862,"Yesterday":31863,"AMS":31864,"aband":31865,"005":31866,"illary":31867,"ĠSplash":31868,"Ġaccrued":31869,"Ell":31870,"Ġnominating":31871,"ĠBroadcast":31872,"ĠWhip":31873,"ARM":31874,"Ġunnecessarily":31875,"brown":31876,"429":31877,"ansky":31878,"Ġextravagant":31879,"Malley":31880,"wage":31881,"Ġexempted":31882,"Ġtypo":31883,"Ġesports":31884,"ĠStru":31885,"ĠPython":31886,"Ġsaint":31887,"ĠCSI":31888,"ĠPowder":31889,"Ġdisguised":31890,"ĠSubway":31891,"Ġprecursor":31892,"ĠWizard":31893,"Johnson":31894,"icas":31895,"Ġdefaults":31896,"!).":31897,"ebra":31898,"jected":31899,"Ġunaccompanied":31900,"HH":31901,"Ġproced":31902,"clinical":31903,"Ġmitigating":31904,"ĠSoup":31905,"ĠFunny":31906,"344":31907,"Hall":31908,"Ġscalable":31909,"Ġshimmer":31910,"Ġunderstatement":31911,"zeb":31912,"icus":31913,"Ġretract":31914,"IDER":31915,"ieft":31916,"iii":31917,"ĠEmperor":31918,"Ġvoltage":31919,"343":31920,"Rest":31921,"ĠButcher":31922,"Ġlaced":31923,"Ġsalty":31924,"Ġfourteen":31925,"Ġoxy":31926,"Ġraged":31927,"Ġforg":31928,"Ġcaveat":31929,"Ġponder":31930,"process":31931,"Ġghosts":31932,"ĠGoose":31933,"didn":31934,"stood":31935,"amation":31936,"Ġvillains":31937,"contract":31938,"Ġbooted":31939,"ĠDidn":31940,"ĠSalon":31941,"Ġlewd":31942,"ĠFritz":31943,"Ġorganis":31944,"Ġpuzzles":31945,"ĠRX":31946,"Ġcurtains":31947,"ĠPackage":31948,"Ġrebate":31949,"Ġspokes":31950,"Ġoccupant":31951,"Ġfooled":31952,"appy":31953,"Ġyourselves":31954,"Ġmaths":31955,"Ġ630":31956,"bos":31957,"ĠHeb":31958,"APS":31959,"Ġbulletin":31960,"Ġpests":31961,"Ġlum":31962,"ĠHAS":31963,"users":31964,"idated":31965,"Ġpalpable":31966,"ĠFeature":31967,"ĠPKK":31968,"Ġdetriment":31969,"Ġbamboo":31970,"Ġimmersed":31971,"ĠDud":31972,"Ġion":31973,"icc":31974,"ĠIris":31975,"ĠBeats":31976,"Ġimprobable":31977,"Ġfuner":31978,"Ġsprung":31979,"ĠLieberman":31980,"ĠSTA":31981,"venge":31982,"Ġtreacherous":31983,"Ġpreced":31984,"Ġsniper":31985,"ĠGOLD":31986,"ĠSUR":31987,"Nic":31988,"ĠROB":31989,"Camp":31990,"Ġhooks":31991,"oling":31992,"Ġbolst":31993,"339":31994,"heter":31995,"Ġbracelet":31996,"Ġbreat":31997,"307":31998,"ĠTrader":31999,"ĠPixar":32000,"hist":32001,"Ġmenacing":32002,"Ġgrizz":32003,"294":32004,"Ġillustrious":32005,"Ġtransact":32006,"Ġspoiler":32007,"ĠWORK":32008,"Road":32009,"Ġblackout":32010,"Ġencomp":32011,"proven":32012,"ĠFriendship":32013,"Ġentrances":32014,"Ġprofessions":32015,"Ġinsin":32016,"Ġrecorder":32017,"Ġformulation":32018,"govern":32019,"Ġpainfully":32020,"ĠRepe":32021,"eeds":32022,"cru":32023,"ĠDir":32024,"Ġtriumphant":32025,"Ġignition":32026,"xy":32027,"Ġintrusion":32028,"ĠEAR":32029,"RES":32030,"Ġration":32031,"ĠTaken":32032,"Ġcages":32033,"Ġpeg":32034,"Ġcommem":32035,"680":32036,"ĠRite":32037,"Ġfolder":32038,"Ġvertically":32039,"Ġcheeks":32040,"pick":32041,"Ġcrispy":32042,"Ġsqueezing":32043,"ĠBene":32044,"ĠTrailer":32045,"ĠKM":32046,"acceptable":32047,"ĠSetting":32048,"Ġsupernatural":32049,"ĠEz":32050,"Ġvenom":32051,"ĠFrey":32052,"Ġpulp":32053,"Had":32054,"centered":32055,"metics":32056,"Kent":32057,"ĠDOI":32058,"kr":32059,"ĠWHEN":32060,"Ġtakeoff":32061,"isf":32062,"uko":32063,"Ġquasi":32064,"Ġveggies":32065,"Ġpesticide":32066,"Ġstimulating":32067,"Ġacknowledgement":32068,"Ġattained":32069,"ĠBackground":32070,"281":32071,"317":32072,"ĠTrees":32073,"Ġdetractors":32074,"Ġannouncer":32075,"Ġjoyful":32076,"ĠElf":32077,"istration":32078,"phi":32079,"Ġprogressively":32080,"mini":32081,"Ġcontraception":32082,"asca":32083,"ishops":32084,"Ġmisunderstood":32085,"Ġinitiating":32086,"ĠConversely":32087,"338":32088,"080":32089,"idation":32090,"ĠGoes":32091,"Ġimprov":32092,"Ġswapping":32093,"Vict":32094,"Ġdevoid":32095,"fighter":32096,"ĠMori":32097,"Ġvoy":32098,"ĠElev":32099,"ĠAim":32100,"Ġtrustworthy":32101,"Leg":32102,"675":32103,"ĠPossible":32104,"Crunch":32105,"ĠRings":32106,"Ġphony":32107,"Ġbladder":32108,"ĠChall":32109,"Spot":32110,"oak":32111,"Was":32112,"ĠFAM":32113,"ĠAGA":32114,"ĠFifa":32115,"Ġenclosed":32116,"Ġanthrop":32117,"faith":32118,"ĠAux":32119,"Ġgracious":32120,"roller":32121,"Ġdowntime":32122,"swing":32123,"Ġcamouflage":32124,"ĠCosts":32125,"Ġliv":32126,"ricular":32127,"ĠUran":32128,"Ġdisapproval":32129,"Ġpropriet":32130,"bits":32131,"Ġmafia":32132,"ĠSCHOOL":32133,"ĠPrepar":32134,"button":32135,"Almost":32136,"Ġpastoral":32137,"ĠDove":32138,"Hol":32139,"Ġimposes":32140,"ĠDram":32141,"lys":32142,"ĠSAS":32143,"Ġwiring":32144,"271":32145,"ĠModels":32146,"Ġoutpost":32147,"etics":32148,"Ġinsulted":32149,"ĠMongolia":32150,"Ġoverth":32151,"Haw":32152,"ĠHomer":32153,"itta":32154,"raining":32155,"Ġevidently":32156,"raphic":32157,"impact":32158,"Ġfranch":32159,"Ġ2100":32160,"Ġapproximate":32161,"Ġcartoons":32162,"Ġbackups":32163,"umbing":32164,"Ġforceful":32165,"ĠShad":32166,"Ġsurges":32167,"Ġperf":32168,"Ġdele":32169,"Ġquieter":32170,"ĠHorowitz":32171,"ĠDX":32172,"anners":32173,"ĠNinja":32174,"ĠScript":32175,"ĠElise":32176,"collect":32177,"Ġgrading":32178,"ĠBethesda":32179,"Kids":32180,"ĠTelephone":32181,"Ġpreferring":32182,"Ġreconcil":32183,"Ġmango":32184,"ĠHail":32185,"ĠCitizenship":32186,"Master":32187,"cular":32188,"Ġstuffing":32189,"ĠAlive":32190,"ALLY":32191,"Ġchi":32192,"ĠDynam":32193,"ĠRosenthal":32194,"Ġpurity":32195,"Ġtemp":32196,"ĠHAL":32197,"employ":32198,"Ġplentiful":32199,"ĠComed":32200,"Ġstacks":32201,"ĠHuge":32202,"ĠOlder":32203,"Ġsclerosis":32204,"ONY":32205,"Ġfilmmaking":32206,"chance":32207,"Cry":32208,"Ġworkflow":32209,"ĠPersonnel":32210,"awed":32211,"ĠColumn":32212,"Ġuncomp":32213,"Ġdiscriminated":32214,"Ġpts":32215,"Ġallev":32216,"ĠKinn":32217,"meal":32218,"Ġnovice":32219,"Ġcrest":32220,"Ġhearty":32221,"Ġlowers":32222,"inqu":32223,"ĠPlayoffs":32224,"ĠHyp":32225,"Ġautos":32226,"Ġindec":32227,"Ġnighttime":32228,"Ġreflex":32229,"306":32230,"disciplinary":32231,"ophe":32232,"contact":32233,"Ġachievable":32234,"Ġslab":32235,"ĠMessage":32236,"ĠVMware":32237,"ĠDia":32238,"REG":32239,"Ġconfisc":32240,"ĠMechan":32241,"Ġphenomena":32242,"Ġsequencing":32243,"Ġshaming":32244,"Ġcompilation":32245,"ĠAges":32246,"Ġmastered":32247,"Ġagony":32248,"Ġrestrain":32249,"ĠLyme":32250,"Which":32251,"ĠBarney":32252,"ĠConcept":32253,"Ġsuperheroes":32254,"ĠPsychology":32255,"Ġreminis":32256,"violence":32257,"Lead":32258,"Da":32259,"VEN":32260,"ERC":32261,"ĠVoter":32262,"Ġbetray":32263,"Ġsavage":32264,"driver":32265,"IFT":32266,"Chain":32267,"angler":32268,"'-":32269,"lain":32270,"ĠRatt":32271,"bis":32272,"iverse":32273,"Ġdensely":32274,"Ġuncom":32275,"Ġunsuspecting":32276,"Ġstimulation":32277,"diff":32278,"Ġskins":32279,"ĠRiding":32280,"ategic":32281,"ĠUnderstand":32282,"occup":32283,"ĠCooking":32284,"Ġschizophrenia":32285,"ĠKoen":32286,"Ġcomrades":32287,"HY":32288,"Ġfab":32289,"ĠRowling":32290,"Allen":32291,"ĠJUL":32292,"Ġembryos":32293,"UU":32294,"ĠCAT":32295,"Ġtidy":32296,"finger":32297,"ĠCake":32298,"Ġrightfully":32299,"religious":32300,"Ġ407":32301,"Gal":32302,"408":32303,"Ġgrievance":32304,"Ġswallowed":32305,"251":32306,"283":32307,"ĠBarcl":32308,"opter":32309,"Ġpedoph":32310,"Ġcured":32311,"Ġestablishes":32312,"increasing":32313,"tics":32314,"articles":32315,"Ġunethical":32316,"authored":32317,"Ġanchors":32318,"ĠContra":32319,"Ġventured":32320,"ĠCoh":32321,"Ġpuff":32322,"heddar":32323,"Ġomission":32324,"Ġdich":32325,"ceed":32326,"Ġscares":32327,"Ġdoctoral":32328,"293":32329,"ĠUnt":32330,"Ġdop":32331,"ĠInjury":32332,"ificantly":32333,"ĠRift":32334,"ĠOrders":32335,"Ġmobilize":32336,"particularly":32337,"Ġchilled":32338,"Reports":32339,"redibly":32340,"ĠGuru":32341,"Ġvalleys":32342,"Ġtextures":32343,"Ġreuse":32344,"roit":32345,"unts":32346,"Ġirreversible":32347,"Ġwarships":32348,"Ġpus":32349,"Ġpeeled":32350,"Ġthirst":32351,"Ġgrapple":32352,"busters":32353,"Ġnort":32354,"ĠDates":32355,"Safe":32356,"Ġbirthplace":32357,"hemoth":32358,"Ġvile":32359,"Ġ306":32360,"Ram":32361,"activated":32362,"ĠAero":32363,"Ġbutcher":32364,"ĠKnock":32365,"Ġdisturb":32366,"Ġtotality":32367,"tted":32368,"Ġlegit":32369,"cking":32370,"nikov":32371,"Ġfavoring":32372,"lang":32373,"Ġrightful":32374,"orum":32375,"!!!!":32376,"ĠMinute":32377,"Ġpostings":32378,"Java":32379,"510":32380,"Ġmicrobes":32381,"Ġsixteen":32382,"entimes":32383,"Ġbulb":32384,"Ġgoalt":32385,"Ġhumiliated":32386,"ansom":32387,"roach":32388,"Ġgrouping":32389,"hari":32390,"Ġcler":32391,"Ġstared":32392,"ĠSymptoms":32393,"Ġbasil":32394,"Whenever":32395,"ĠWhoever":32396,"Oil":32397,"ĠJericho":32398,"ĠAlm":32399,"Pol":32400,"Hur":32401,"Ġupro":32402,"ĠSpo":32403,"hammer":32404,"Mur":32405,"ĠTorch":32406,"Ġfrequencies":32407,"ĠExpansion":32408,"Ġparalysis":32409,"igon":32410,"ĠSail":32411,"Ġsilently":32412,"Ġrevolver":32413,"Ġstockpile":32414,"Ġpessimistic":32415,"ESA":32416,"Ġdisclaim":32417,"Ġdemocracies":32418,"ĠTales":32419,"ĠAngry":32420,"ĠWhitman":32421,"ĠOri":32422,"Ġtransitioned":32423,"behind":32424,"ĠLAN":32425,"Ġcav":32426,"ĠJazeera":32427,"KC":32428,"ĠInspect":32429,"irty":32430,"ĠAin":32431,"ĠOrig":32432,"Ġobscene":32433,"Ġdormant":32434,"Ġharb":32435,"ĠWiz":32436,"ĠAdolf":32437,"Ġvic":32438,"Ġdenouncing":32439,"Ġye":32440,"aques":32441,"Ġomn":32442,"Ġassemblies":32443,"nosis":32444,"Ġadmon":32445,"Ġanguish":32446,"Ġvag":32447,"YE":32448,"ĠMacro":32449,"Ġrubbing":32450,"Ġreplicated":32451,"Moon":32452,"ĠGuitar":32453,"Ġcentimeters":32454,"amily":32455,"ĠAmes":32456,"Ġchlorine":32457,"Perhaps":32458,"Ġpartisans":32459,"soc":32460,"Ġvagina":32461,"Ġtrove":32462,"ĠYES":32463,"Ġtherapists":32464,"Ġnods":32465,"Ġhanged":32466,"Ġridge":32467,"Ġhaz":32468,"ĠmacOS":32469,"Ġske":32470,"ĠShia":32471,"Ġsteril":32472,"Ġalmond":32473,"ĠRockefeller":32474,"Ġintrinsic":32475,"Certainly":32476,"Ġsublime":32477,"Earn":32478,"abet":32479,"Ġframeworks":32480,"ogical":32481,"ilst":32482,"ipal":32483,"Ġrescuing":32484,"ĠWatergate":32485,"Ġ231":32486,"ĠNano":32487,"ighthouse":32488,"olph":32489,"Ġ312":32490,"Ġhealed":32491,"ĠTomb":32492,"Ġsubst":32493,"Ġsulph":32494,"ĠNewsp":32495,"ĠLama":32496,"venue":32497,"387":32498,"productive":32499,"ĠNEED":32500,"minus":32501,"ĠPages":32502,"cand":32503,"ĠClover":32504,"ĠForensic":32505,"ryn":32506,"ogle":32507,"ocr":32508,"Ġvaccinations":32509,"cies":32510,"ĠMek":32511,"Ġunaffected":32512,"Ġfetal":32513,"ĠDino":32514,"Ġhemisphere":32515,"Ġfroze":32516,"ĠPeg":32517,"Ġmicroscope":32518,"Ġmoderates":32519,"ĠGEN":32520,"ĠHawai":32521,"Ġstagn":32522,"Absolutely":32523,"practice":32524,"IBLE":32525,"cture":32526,"ĠAshe":32527,"Ġcondoms":32528,"Ġpoked":32529,"training":32530,"Ġintermedi":32531,"347":32532,"Ġcardinal":32533,"ĠSpoon":32534,"Ġsupp":32535,"Ġpreviews":32536,"Service":32537,"ĠBeam":32538,"Ġtranscend":32539,"Fresh":32540,"Sure":32541,"Ġ4000":32542,"idential":32543,"ĠCoinbase":32544,"Ġworkings":32545,"ĠPI":32546,"Ġpassionately":32547,"Ġdecisively":32548,"ĠInspection":32549,"Ġinvoke":32550,"Ġstain":32551,"Ġcleaners":32552,"Ġregulates":32553,"Ġshone":32554,"ĠEVERY":32555,"istance":32556,"map":32557,"Ġredu":32558,"Ġoccupies":32559,"Ġprocure":32560,"acket":32561,"roman":32562,"Ġilleg":32563,"Ġleaps":32564,"yond":32565,"Ġyarn":32566,"ĠLTD":32567,"ĠCONTR":32568,"ĠRestoration":32569,"ĠCDs":32570,"Ġdrinkers":32571,"ĠJordanian":32572,"Ġabl":32573,"Ġdisparate":32574,"Ġprimed":32575,"ĠFirearms":32576,"artz":32577,"Ġindispensable":32578,"Ter":32579,"Ġfright":32580,"Ġmarkedly":32581,"Ġroam":32582,"ĠJurassic":32583,"Ġfeder":32584,"Ġpepp":32585,"ĠDV":32586,"Ġpancakes":32587,"sweet":32588,"Ġunmatched":32589,"Ġassembling":32590,"Ultimately":32591,"Ġendeavour":32592,"Ġluckily":32593,"Ġbitch":32594,"Ġelegance":32595,"eers":32596,"drop":32597,"credit":32598,"Ġscourge":32599,"ĠMinimum":32600,"Ġimpatient":32601,"Ġhunted":32602,"ĠGoddard":32603,"Kal":32604,"Ġmined":32605,"Ġcalves":32606,"Ġ234":32607,"Ġplank":32608,"Ġinjecting":32609,"ĠKaufman":32610,"ĠCompliance":32611,"tone":32612,"Ġ345":32613,"Ġdazz":32614,"ĠClarks":32615,"Ġcomprehens":32616,"Ġpist":32617,"Ġrhythms":32618,"Ġreserv":32619,"337":32620,"ĠIDF":32621,"Ġshouts":32622,"midt":32623,"323":32624,"Ġsoothing":32625,"Ġadministr":32626,"Ġgloomy":32627,"Ġfutile":32628,"ĠProhibition":32629,"upon":32630,"ĠAnglic":32631,"seeking":32632,"Ġdodge":32633,"Ds":32634,"ĠGrants":32635,"editor":32636,"ĠInquis":32637,"Ġ1929":32638,"decl":32639,"ĠPorts":32640,"ĠCure":32641,"ĠDPRK":32642,"oct":32643,"Ġvocabulary":32644,"Ġcling":32645,"298":32646,"Ġpeac":32647,"Ġantibodies":32648,"dor":32649,"ĠWorse":32650,"Ġsmelled":32651,"Ġleash":32652,"MED":32653,"Ġdisinteg":32654,"Ġtruthful":32655,"Ġsalesman":32656,"Ġsquares":32657,"susp":32658,"Ġcraving":32659,"Ġwizard":32660,"moral":32661,"ĠQué":32662,"Anything":32663,"Ġfalsehood":32664,"ARI":32665,"Ġcoworkers":32666,"Ġthy":32667,"outher":32668,"Ġbrushing":32669,"ĠProtest":32670,"ĠMF":32671,"abba":32672,"lead":32673,"ĠExhibit":32674,"Ga":32675,"ĠFranks":32676,"Ġdictates":32677,"illegal":32678,"Ġrelayed":32679,"Ġploy":32680,"ĠاÙĦ":32681,"ĠDocuments":32682,"Ġtint":32683,"ĠYuan":32684,"Ġdepended":32685,"Mir":32686,"ĠIntrodu":32687,"Ġrecourse":32688,"oqu":32689,"ĠTED":32690,"Ġdifferentiated":32691,"ĠWalls":32692,"Ġsentimental":32693,"Ġantis":32694,"retion":32695,"comes":32696,"ĠWORLD":32697,"Ġcoax":32698,"ĠTatt":32699,"ĠGingrich":32700,"2006":32701,"ĠBrut":32702,"Second":32703,"posed":32704,"shots":32705,"Ġ313":32706,"idian":32707,"alking":32708,"Ġdens":32709,"Ġgif":32710,"akings":32711,"Ġkeywords":32712,"Ġchast":32713,"Ġadversary":32714,"Ġnick":32715,"iasis":32716,"ĠLegisl":32717,"Ġcoff":32718,"ĠOriental":32719,"ĠMorg":32720,"ĠHAR":32721,"Ġlegalizing":32722,"Ġbanter":32723,"ĠTart":32724,"ĠTRI":32725,"Ġantagon":32726,"ĠGF":32727,"oler":32728,"ĠUFO":32729,"Therefore":32730,"ĠOsama":32731,"ĠStructure":32732,"apps":32733,"Ġpee":32734,"ĠSomehow":32735,"ĠOverwatch":32736,"ĠCasual":32737,"Ġdishon":32738,"SEE":32739,"ctive":32740,"andering":32741,"ĠTransformation":32742,"Andy":32743,"ĠFever":32744,"Ġspectator":32745,"Ġlash":32746,"Ġprotector":32747,"apy":32748,"Ġexhilar":32749,"aroo":32750,"Ġmamm":32751,"Ġbystand":32752,"acky":32753,"Ġdigestive":32754,"Ġamplified":32755,"Ġalpha":32756,"continue":32757,"Low":32758,"Ġdisgusted":32759,"356":32760,"script":32761,"Ġgenerational":32762,"ĠPassenger":32763,"sight":32764,"Ġcout":32765,"Ġhone":32766,"ulse":32767,"Ġignite":32768,"284":32769,"gow":32770,"Ġbinary":32771,"Ġincess":32772,"Review":32773,"607":32774,"ĠSurprise":32775,"Ġirritation":32776,"ĠBarth":32777,"ĠGum":32778,"Ġvideot":32779,"ĠFres":32780,"asons":32781,"Ġcollaborator":32782,"fal":32783,"ĠGon":32784,"Ġsettles":32785,"regular":32786,"Ġmiscarriage":32787,"cube":32788,"Ġsubord":32789,"ĠRegistered":32790,"Ġnotions":32791,"zzy":32792,"Ġrevert":32793,"OFF":32794,"Ġhasht":32795,"ĠPNG":32796,"Ġunimaginable":32797,"builders":32798,"Taylor":32799,"ĠPAY":32800,"Ġ).":32801,"Ġ238":32802,"ĠLAST":32803,"MAS":32804,"Ġillustrations":32805,"Ġparody":32806,"Ġdispersed":32807,"ĠRoses":32808,"Ġestimation":32809,"ĠGets":32810,"Patrick":32811,"CHA":32812,"Ġmisdem":32813,"agate":32814,"alter":32815,"Ġgeo":32816,"Ġenormously":32817,"Ġarrogance":32818,"Ġpert":32819,"Ġmeta":32820,"ĠJuno":32821,"iov":32822,"imov":32823,"Ġchores":32824,"acan":32825,"Paris":32826,"313":32827,"Lewis":32828,"Ġwillingly":32829,"ERA":32830,"Ġencaps":32831,"ilk":32832,"Ġnodes":32833,"Ġenzyme":32834,"want":32835,"Ġtolerant":32836,"Ġcondos":32837,"Ġasserts":32838,"Ġcanon":32839,"Ġscanned":32840,"bishop":32841,"Ġperched":32842,"util":32843,"ĠBonus":32844,"create":32845,"ĠFuk":32846,"Ġmotif":32847,"Ġcontemplate":32848,"ĠBEN":32849,"imir":32850,"Ġacadem":32851,"uvian":32852,"ĠIdeas":32853,"ĠCY":32854,"Ġants":32855,"Ġprostitutes":32856,"2005":32857,"Spring":32858,"ĠBarrel":32859,"ĠAunt":32860,"ĠLudwig":32861,"ĠHerm":32862,"PRO":32863,"obiles":32864,"rack":32865,"STER":32866,"ucket":32867,"Ġmun":32868,"Ġ419":32869,"ICES":32870,"Ġcardio":32871,"Ġtrenches":32872,"Nation":32873,"yahoo":32874,"Ġburd":32875,"Ġnost":32876,"Ġappropriations":32877,"ĠChili":32878,"Josh":32879,"GW":32880,"Ġoppressed":32881,"ĠBEFORE":32882,"Ġmurderous":32883,"Pen":32884,"achable":32885,"Ġrive":32886,"Ġculmin":32887,"Ġdefin":32888,"ĠMord":32889,"idate":32890,"ĠChim":32891,"ource":32892,"ĠElectro":32893,"orthy":32894,"Ġcalendars":32895,"regation":32896,"Ġretrospect":32897,"ĠTribal":32898,"ĠHes":32899,"Ġcran":32900,"Ġcreditor":32901,"Ġfibers":32902,"note":32903,"idays":32904,"ĠSebast":32905,"ĠKitty":32906,"Ġplainly":32907,"ĠLAPD":32908,"Ġtrumpet":32909,"ĠAppropriations":32910,"Hill":32911,"ĠVeget":32912,"296":32913,"lated":32914,"othes":32915,"ibrarian":32916,"Listen":32917,"nex":32918,"WHO":32919,"Ġshampoo":32920,"Ġclaimants":32921,"Ġisol":32922,"Ġunchecked":32923,"Ġmov":32924,"umo":32925,"ĠLens":32926,"Ġdiscreet":32927,"Ġrespectfully":32928,"Ġreclaimed":32929,"ĠHatt":32930,"thus":32931,"ĠFlo":32932,"Ġsumm":32933,"phas":32934,"ĠHaitian":32935,"Ġstrife":32936,"Ġabound":32937,"verted":32938,"Ġpatronage":32939,"449":32940,"Ġprelim":32941,"ĠZhu":32942,"ĠRevel":32943,"adic":32944,"Ġminded":32945,"ĠStability":32946,"Ġresembling":32947,"Ġvending":32948,"ischer":32949,"Ġkisses":32950,"Ġsuperiority":32951,"Ġinfinite":32952,"ISC":32953,"880":32954,"Ġappease":32955,"VO":32956,"404":32957,"ECH":32958,"gam":32959,"River":32960,"metal":32961,"determination":32962,"Cook":32963,"Ġbuds":32964,"Ġ(%)":32965,"ĠCreated":32966,"Ġstrut":32967,"Ġ425":32968,"Ġverte":32969,"ĠOrb":32970,"Ġweaving":32971,"261":32972,"Ġflyers":32973,"spons":32974,"ĠCovenant":32975,"570":32976,"Ġintangible":32977,"ĠBJ":32978,"ĠStead":32979,"ĠBrune":32980,"pain":32981,"independent":32982,"Ball":32983,"witch":32984,"ĠIon":32985,"Ġpupp":32986,"Cash":32987,"ĠConvert":32988,"Ġimpede":32989,"broad":32990,"onew":32991,"Ġsynergy":32992,"Ġcoined":32993,"620":32994,"ivalent":32995,"ĠInfect":32996,"ĠAqua":32997,"Together":32998,"ĠChemistry":32999,"ĠURL":33000,"ampion":33001,"Ġdeclarations":33002,"Ġaffirmative":33003,"umper":33004,"ĠTarant":33005,"Ġstereotype":33006,"Ġbookstore":33007,"incre":33008,"Ġchipset":33009,"Ġangst":33010,"Jose":33011,"laus":33012,"Ġheater":33013,"ipers":33014,"Ġeminent":33015,"hook":33016,"sticks":33017,"ĠCoul":33018,"Ġmildly":33019,"SG":33020,"Ġworm":33021,"Ġdisable":33022,"Ġperfume":33023,"ISTER":33024,"Ġgathers":33025,"ĠLotus":33026,"hyp":33027,"actus":33028,"Ġdistinctly":33029,"fifth":33030,"!),":33031,"ĠCrunch":33032,"Ġcohesive":33033,"Ġfortunately":33034,"Ġninety":33035,"Ġcartels":33036,"empl":33037,"Direct":33038,"Ġcommuting":33039,"ĠSX":33040,"ractive":33041,"Ġtranslating":33042,"ĠAQ":33043,"Ġslay":33044,"abuse":33045,"ĠProc":33046,"ĠCantor":33047,"ĠTas":33048,"Sir":33049,"Thom":33050,"ĠCHRIST":33051,"Ġreceptive":33052,"ĠCornel":33053,"Arab":33054,"Ġgrammar":33055,"Ġhandlers":33056,"Ġalloy":33057,"Ġthinly":33058,"adem":33059,"Ġproponent":33060,"ĠPVC":33061,"Ġstump":33062,"tom":33063,"rets":33064,"iciency":33065,"780":33066,"Ġ311":33067,"ĠClapper":33068,"ITAL":33069,"Ùħ":33070,"Ġnarrator":33071,"Ġblond":33072,"Ġintermittent":33073,"Ġcollabor":33074,"646":33075,"Ġmetast":33076,"Ġregeneration":33077,"ĠLegendary":33078,"Ġgenitals":33079,"Ġbartender":33080,"atson":33081,"Okay":33082,"Ġpassages":33083,"Ġsubstituted":33084,"orr":33085,"ALTH":33086,"Ġartic":33087,"Ġascent":33088,"Ġmatured":33089,"Ġterminology":33090,"served":33091,"ĠDeliver":33092,"Ġattic":33093,"anges":33094,"Ġrenaissance":33095,"Ġbleed":33096,"claimer":33097,"onse":33098,"Sec":33099,"Ġparticle":33100,"aneous":33101,"ateur":33102,"Ġzeal":33103,"ĠPets":33104,"Working":33105,"ĠRespect":33106,"Ġsermon":33107,"ĠProvided":33108,"Ġfilibuster":33109,"Ġabolished":33110,"reviewed":33111,"cription":33112,"Ġrevers":33113,"atered":33114,"435":33115,"Ġwhe":33116,"ometown":33117,"UFC":33118,"products":33119,"Winter":33120,"Ġ304":33121,"Ġsporadic":33122,"orough":33123,"EB":33124,"ĠAgric":33125,"ĠMTA":33126,"wic":33127,"Ġpowerless":33128,"Ġcarrot":33129,"ww":33130,"Ġabsorption":33131,"ĠTyphoon":33132,"Turkey":33133,"Ġproclaim":33134,"Ġhikers":33135,"Ġpractise":33136,"/$":33137,"Ġfingertips":33138,"Ġbaff":33139,"vu":33140,"Ġans":33141,"plug":33142,"Ġacquaintance":33143,"itement":33144,"ihar":33145,"Ġreluctantly":33146,"Ġforc":33147,"Ġguarant":33148,"ĠWanted":33149,"Walk":33150,"addle":33151,"unders":33152,"Fred":33153,"Ġtides":33154,"ĠBai":33155,"Ġcountering":33156,"raper":33157,"ursions":33158,"ĠFlav":33159,"pared":33160,"raised":33161,"Ñı":33162,"ĠDiff":33163,"Ġreload":33164,"ourses":33165,"ĠBurning":33166,"Ġwand":33167,"Ġledger":33168,"Ġcoughing":33169,"ĠLoren":33170,"Nazis":33171,"Ġcompile":33172,"Eight":33173,"icultural":33174,"yy":33175,"Ġ1932":33176,"Run":33177,"AIN":33178,"Ġattractiveness":33179,"ĠOmn":33180,"Ġconfer":33181,"compliance":33182,"Ġembed":33183,"Steven":33184,"2001":33185,"Ġdecre":33186,"Ġprompts":33187,"ĠHare":33188,"Ġleaping":33189,"Ġslaughtered":33190,"Ġforfeiture":33191,"342":33192,"Charl":33193,"CDC":33194,"ographically":33195,"Ġduplicate":33196,"Ġdistracting":33197,"examination":33198,"Ġpeas":33199,"Ġcatchy":33200,"Ġdives":33201,"ĠAda":33202,"Hay":33203,"Ġenthusiastically":33204,"Ġfunky":33205,"kay":33206,"EVA":33207,"Ġpsychologists":33208,"Ġancestry":33209,"iyah":33210,"ifter":33211,"nob":33212,"518":33213,"rouse":33214,"Ġchord":33215,"Ġcone":33216,"Ġbarracks":33217,"ĠRoyale":33218,"ĠIntegration":33219,"Ġtrolling":33220,"ĠSynt":33221,"andals":33222,"ĠGrain":33223,"ĠNeck":33224,"618":33225,"Ġrapist":33226,"pins":33227,"Ġwitty":33228,"Ġdehydration":33229,"arlane":33230,"Ġimmoral":33231,"Ġaccum":33232,"ĠMcAuliffe":33233,"slow":33234,"Ġinjust":33235,"Ġ1700":33236,"Ġcarbs":33237,"Ġintel":33238,"Non":33239,"isks":33240,"Tre":33241,"Ġinterviewer":33242,"sam":33243,"Ġdelve":33244,"Ġadmirable":33245,"ĠROM":33246,"ĠHispanics":33247,"Ġimpart":33248,"Ġunderrated":33249,"Ġvictimized":33250,"ĠPsych":33251,"ppings":33252,"Ġ610":33253,"pole":33254,"Ġdiner":33255,"ĠScale":33256,"Ġunforeseen":33257,"surprisingly":33258,"opus":33259,"ĠCOURT":33260,"Ġjuggling":33261,"ĠFacilities":33262,"Aid":33263,"ĠHPV":33264,"Ġcrawling":33265,"flu":33266,"etary":33267,"ĠHarriet":33268,"329":33269,"ĠSod":33270,"ĠBiological":33271,"birth":33272,"ribed":33273,"Ġpulses":33274,"396":33275,"eways":33276,"ĠAlma":33277,"nov":33278,"015":33279,"ricane":33280,"agna":33281,"Ak":33282,"ĠClaim":33283,"Ġpref":33284,"Ġinterfaces":33285,"ĠADHD":33286,"604":33287,"ZE":33288,"venture":33289,"Ġascend":33290,"ĠGou":33291,"Ġpriceless":33292,"redo":33293,"kw":33294,"Conf":33295,"Ġmah":33296,"Ġpoets":33297,"Ġstalk":33298,"Ġencamp":33299,"Ġhopped":33300,"Ġmelody":33301,"JECT":33302,"eming":33303,"Ġbewild":33304,"aternal":33305,"uchs":33306,"dit":33307,"ĠTransmission":33308,"Lake":33309,"Ġatoms":33310,"ĠThoughts":33311,"ilts":33312,"volume":33313,"Ġsocioeconomic":33314,"atisf":33315,"Ġnarr":33316,"zinski":33317,"ymes":33318,"episode":33319,"Ġinherit":33320,"Ġintending":33321,"Ġarenas":33322,"uras":33323,"burning":33324,"334":33325,"teenth":33326,"Ġsophistication":33327,"Ġscreenshots":33328,"Ġautistic":33329,"lip":33330,"paper":33331,"Ġmonopol":33332,"799":33333,"forms":33334,"ocrats":33335,"Ġpineapple":33336,"Ġbegs":33337,"Ġpersecuted":33338,"Ġsubscribed":33339,"Ġelic":33340,"ĠPRESIDENT":33341,"297":33342,"Ġpreferential":33343,"Ġpyramid":33344,"Ġconvergence":33345,"Ġwob":33346,"Project":33347,"ĠAluminum":33348,"ĠJPM":33349,"ĠBAT":33350,"Ġdolphins":33351,"018":33352,"healthy":33353,"ĠCG":33354,"ĠEffective":33355,"worm":33356,"ĠEas":33357,"olicited":33358,"ĠUSE":33359,"ĠCaval":33360,"Ġswirl":33361,"Ġspaghetti":33362,"Ġinward":33363,"Republican":33364,"Ġpublicized":33365,"Ġeconomical":33366,"Ġsalsa":33367,"ĠTitanic":33368,"dot":33369,"Ġcontro":33370,"ĠBangl":33371,"iban":33372,"ĠKlux":33373,"Ġhinges":33374,"610":33375,"Ġvalves":33376,"profits":33377,"Wonder":33378,"Ġorient":33379,"Ġsque":33380,"Ġprivatization":33381,"Obama":33382,"Thousands":33383,"ĠTasman":33384,"Ġmaze":33385,"eem":33386,"Ġsurvives":33387,"istant":33388,"Ġenriched":33389,"Ġencl":33390,"Ġcompliments":33391,"ĠShoes":33392,"Ġinsanity":33393,"consider":33394,"agog":33395,"Ġbaffled":33396,"Ġ°":33397,"ĠWordPress":33398,"qus":33399,"usual":33400,"stall":33401,"Deb":33402,"ĠRothschild":33403,"Ġesche":33404,"Ġsoph":33405,"Ġambiguous":33406,"negative":33407,"Ġdiscouraging":33408,"Alexander":33409,"319":33410,"Ġsummon":33411,"ipation":33412,"000000":33413,"Ġminimalist":33414,"Ġenraged":33415,"777":33416,"Ġplanetary":33417,"Ġthroughput":33418,"Ġtemperament":33419,"ĠNIC":33420,"ileged":33421,"minster":33422,"ĠPLEASE":33423,"Ġexagger":33424,"ĠDescription":33425,"Ġagitated":33426,"Ġimmortal":33427,"Ġrenders":33428,"Ġcharisma":33429,"sequ":33430,"Ġmajorities":33431,"Ġfreaking":33432,"ĠAdvice":33433,"Ġembodies":33434,"stable":33435,"Ġcustomization":33436,"started":33437,"ĠAutism":33438,"Ġparticipates":33439,"ĠUTC":33440,"Marco":33441,"Ġoddly":33442,"Ġantiqu":33443,"ĠPear":33444,"ĠFey":33445,"Ġcertify":33446,"Ġdisillusion":33447,"ĠPhysicians":33448,"obl":33449,"855":33450,"Ġelim":33451,"Ġ335":33452,"Ol":33453,"ĠSear":33454,"Ġnuances":33455,"past":33456,"Sa":33457,"ĠSlov":33458,"Ġfiltered":33459,"Ġanalogy":33460,"Ġformulate":33461,"Ġarmies":33462,"Ġpuls":33463,"fters":33464,"ilipp":33465,"ĠHOT":33466,"485":33467,"ĠAfghans":33468,"Ġtopical":33469,"ĠBunny":33470,"seeing":33471,"Ġeloqu":33472,"Ġkidneys":33473,"ĠDEM":33474,"pent":33475,"Ġhus":33476,"stores":33477,"ĠProtestant":33478,"Comm":33479,"label":33480,"Kings":33481,"ĠPurpose":33482,"âĢ¦..":33483,"Ġaccumulating":33484,"calling":33485,"Ġgiveaways":33486,"Ġpredicament":33487,"Ġtyp":33488,"Ġtraveler":33489,"003":33490,"impro":33491,"fac":33492,"Ġmapped":33493,"itious":33494,"Ġmasculinity":33495,"Ġtantal":33496,"ĠDJs":33497,"Ġviewpoints":33498,"Burn":33499,"ĠWii":33500,"pak":33501,"ĠEB":33502,"Ġhinge":33503,"Ġfacets":33504,"Ġphotographic":33505,"Ġcompiling":33506,"Ġdecks":33507,"Ġarticulated":33508,"Federal":33509,"crim":33510,"llah":33511,"Ġfiasco":33512,"ĠLIST":33513,"oute":33514,"ĠDraper":33515,"ĠLaos":33516,"Ġclimbers":33517,"raph":33518,"ĠDek":33519,"WAY":33520,"Ġgreets":33521,"Ġoppressive":33522,"otor":33523,"otiation":33524,"\":[":33525,"Record":33526,"mining":33527,"Town":33528,"Ġfavorably":33529,"ĠYoutube":33530,"William":33531,"Ġlan":33532,"âĢ²":33533,"ĠSpec":33534,"Ġtranquil":33535,"ĠClient":33536,"oln":33537,"celona":33538,"Ġrealistically":33539,"Ġmisplaced":33540,"ĠBie":33541,"bye":33542,"Yo":33543,"465":33544,"ĠMadagascar":33545,"oplan":33546,"arist":33547,"Ġconfines":33548,"Ġï":33549,"awks":33550,"Ġpiracy":33551,"Ġunwelcome":33552,"Intel":33553,"Ġparanoid":33554,"CLAIM":33555,"Ġblush":33556,"united":33557,"Ġmotivational":33558,"ĠVII":33559,"Ġdiabetic":33560,"Ġantiv":33561,"Ġdissect":33562,"Ġbestselling":33563,"Ġfluffy":33564,"ĠRemote":33565,"Ġvert":33566,"Correct":33567,"Ġcolossal":33568,"Ġcontrasts":33569,"Ġcirca":33570,"ĠDamage":33571,"Ġunrel":33572,"Ġdiscrepancy":33573,"ĠCIS":33574,"ĠCLASS":33575,"ilty":33576,"Ġsynopsis":33577,"emed":33578,"cakes":33579,"ibal":33580,"inea":33581,"ienced":33582,"Ġimplicit":33583,"ĠLOOK":33584,"Ġsilhouette":33585,"affiliated":33586,"ĠHalo":33587,"377":33588,"Ġlyr":33589,"ĠVide":33590,"herent":33591,"Ġbadges":33592,"plays":33593,"orea":33594,"Ġjammed":33595,"cancer":33596,"ĠYep":33597,"racted":33598,"ĠDisability":33599,"Ġfooth":33600,"friends":33601,"Ġbloated":33602,"Bet":33603,"ĠAntioch":33604,"Ġintrodu":33605,"Ġannexed":33606,"ivism":33607,"ĠFlickr":33608,"pants":33609,"Ġinterruption":33610,"645":33611,"ĠIly":33612,"ĠOss":33613,"ĠAMA":33614,"Ġpolitely":33615,"Ġnatives":33616,"Ġrushes":33617,"enges":33618,"ĠHarm":33619,"Ġdestroyer":33620,"ĠEstimates":33621,"Ġtransforms":33622,"Ġinvariably":33623,"Ġcac":33624,"iency":33625,"599":33626,"Ġconstitutionally":33627,"Ġrappers":33628,"ĠSettlement":33629,"icz":33630,"Ġhardened":33631,"citizens":33632,"Ġcircling":33633,"Ġtrapping":33634,"Ġguaranteeing":33635,"690":33636,"agher":33637,"Ġarcade":33638,"Ġfanc":33639,"Ġslapping":33640,"OPS":33641,"Ġmasse":33642,"Ġpudding":33643,"Jac":33644,"ĠGraphics":33645,"Ġuptake":33646,"?,":33647,"Fair":33648,"ĠSatan":33649,"uffy":33650,"ĠGuatem":33651,"ĠTransaction":33652,"Ġunlocking":33653,"ĠLINE":33654,"Ġapprehens":33655,"Ġglean":33656,"291":33657,"Ġexacerbate":33658,"ĠTrave":33659,"ĠTrop":33660,"Supp":33661,"Ġqueens":33662,"cart":33663,"Ġscrolling":33664,"Ġox":33665,"cone":33666,"Matthew":33667,"ĠDIRECT":33668,"Ġbacker":33669,"Ġthyroid":33670,"Sarah":33671,"ĠEDIT":33672,"ĠActivision":33673,"352":33674,"Ġreinforcements":33675,"Ġding":33676,"Ġplush":33677,"Ġpeanuts":33678,"ĠFant":33679,"ĠPediatrics":33680,"Ġaccommodating":33681,"ĠPractices":33682,"Answer":33683,"racial":33684,"ĠConstant":33685,"740":33686,"strength":33687,"apist":33688,"Ġsynthes":33689,"ĠLeap":33690,"ĠFabric":33691,"Ġbrainstorm":33692,"obia":33693,"Ġconception":33694,"Ġtuberculosis":33695,"Ġmajestic":33696,"ĠTitus":33697,"ĠTee":33698,"Ġlikeness":33699,"ĠSEA":33700,"lite":33701,"Ġ950":33702,"sufficient":33703,"Ġtrem":33704,"Ġharshly":33705,"Ġredacted":33706,"Ġwelding":33707,"Ġperplex":33708,"Ġpoetic":33709,"Ġinsignificant":33710,"Ġware":33711,"Ġwandered":33712,"Ġmete":33713,"ĠSTART":33714,"Ġweaponry":33715,"opsy":33716,"shadow":33717,"Ġobsc":33718,"hare":33719,"ĠOPEN":33720,"Ġdiligent":33721,"Girls":33722,"Ġinitials":33723,"Start":33724,"ĠBrookings":33725,"ombs":33726,"Ġlashes":33727,"essor":33728,"Ġgravy":33729,"ĠUbuntu":33730,"Tree":33731,"Ġ435":33732,"Ġcellar":33733,"Ġaquarium":33734,"ĠPodesta":33735,"361":33736,"ĠController":33737,"Ġeru":33738,"reasonable":33739,"Ġpermissions":33740,"725":33741,"Ġadministering":33742,"Ġflirt":33743,"Ġfleeting":33744,"asive":33745,"Ġsubcontract":33746,"Ġfascist":33747,"Ġcabbage":33748,"science":33749,"Ġboiler":33750,"ioned":33751,"Ġintegrates":33752,"Ġresidue":33753,"KEY":33754,"Ġwi":33755,"Ġsquared":33756,"Unless":33757,"Ġmute":33758,"ĠTuc":33759,"Ġverb":33760,"Gary":33761,"Ġexperimentation":33762,"fee":33763,"chini":33764,"Ġmarrow":33765,"ĠBalt":33766,"Ġnodded":33767,"tn":33768,"Ġmissionary":33769,"OTO":33770,"Ġoptimum":33771,"555":33772,"Ġwhipping":33773,"aunts":33774,"ĠScene":33775,"Ġcharacterize":33776,"Ġretrospective":33777,"Ġutilizes":33778,"Ġhastily":33779,"older":33780,"ĠPW":33781,"Ġsleepy":33782,"020":33783,"ĠAcid":33784,"Ġridiculously":33785,"Ġgigg":33786,"649":33787,"Ġcrus":33788,"ĠShame":33789,"ĠTorn":33790,"finding":33791,"IPS":33792,"Ġplat":33793,"ometers":33794,"Ġamphib":33795,"ellow":33796,"ĠSpecies":33797,"commercial":33798,"Ġvirgin":33799,"Ġdarn":33800,"Ġsorely":33801,"Ġrespondent":33802,"Ġray":33803,"ĠCONS":33804,"Ġunequivocally":33805,"server":33806,"Ġdrip":33807,"ĠRazor":33808,"Ban":33809,"ĠHMS":33810,"Ġhijab":33811,"ĠMuss":33812,"Ġsandy":33813,"Ġaversion":33814,"Ġoverarching":33815,"Ġultr":33816,"ĠIraqis":33817,"Ġuninterrupted":33818,"Ġrouting":33819,"Ġundone":33820,"independence":33821,"gra":33822,"ysics":33823,"inflammatory":33824,"cussion":33825,"ĠDefinitely":33826,"Ġelastic":33827,"peer":33828,"ĠGiov":33829,"ĠMandarin":33830,"Ġscratches":33831,"Ġphysicist":33832,"Ġbestowed":33833,"usually":33834,"OULD":33835,"igration":33836,"Human":33837,"Dead":33838,"osph":33839,"bott":33840,"doctoral":33841,"Ġbending":33842,"Ġconfigurations":33843,"psych":33844,"db":33845,"ĠUD":33846,"Ġarteries":33847,"orically":33848,"Ġblasphemy":33849,"jj":33850,"checking":33851,"adian":33852,"IRD":33853,"ĠDialogue":33854,"Ġshielded":33855,"ĠVox":33856,"Dave":33857,"Ġturb":33858,"ĠMassive":33859,"ĠBMI":33860,"ĠNF":33861,"uced":33862,"ickle":33863,"ishable":33864,"Ġembody":33865,"ÙĪ":33866,"Senior":33867,"ĠResult":33868,"try":33869,"egu":33870,"401":33871,"ĠLoyal":33872,"Ġperilous":33873,"Ġdissu":33874,"Ġmythology":33875,"ĠWax":33876,"Jesus":33877,"ĠMotorsport":33878,"Ġadvis":33879,"ĠAki":33880,"ISM":33881,"tested":33882,"Ġplag":33883,"Ġriches":33884,"ĠOCT":33885,"ĠLocke":33886,"BG":33887,"Ġ460":33888,"rawl":33889,"ĠTermin":33890,"Ġ295":33891,"Ġchopping":33892,"KT":33893,"Ġconverts":33894,"Ask":33895,"alse":33896,"ĠKeynes":33897,"Ġrefuted":33898,"Ġrabbits":33899,"Ġbilingual":33900,"urse":33901,"ĠSalad":33902,"odiac":33903,"Ġsolidly":33904,"Dam":33905,"Ġpp":33906,"rities":33907,"Rah":33908,"itness":33909,"Ġsixty":33910,"332":33911,"cold":33912,"Ġhindered":33913,"Ġclipped":33914,"Ġreceptor":33915,"ĠHoms":33916,"Ġdusk":33917,"Ġarchae":33918,"LR":33919,"Ġrods":33920,"Ġ257":33921,"ĠSith":33922,"ĠPumpkin":33923,"ellation":33924,"ĠWD":33925,"Ġdecriminal":33926,"Ġusable":33927,"Ġcheerful":33928,"ĠInform":33929,"Ġbrushes":33930,"vier":33931,"ĠBrush":33932,"590":33933,"boost":33934,"guided":33935,"ĠMJ":33936,"Ġsatirical":33937,"ortion":33938,"efficiency":33939,"Ġstrands":33940,"ĠWilde":33941,"Ġreproduce":33942,"verage":33943,"Ġlug":33944,"Ġhist":33945,"offer":33946,"Ġcollapses":33947,"Ġclerks":33948,"Ġairstrike":33949,"IPP":33950,"iscover":33951,"Ġnefarious":33952,"Ġstripe":33953,"Ġbona":33954,"ocon":33955,"Ġpunishments":33956,"ITED":33957,"ĠAltern":33958,"testing":33959,"Ġeerie":33960,"erous":33961,"Ġcaves":33962,"Ġcondemns":33963,"ĠDropbox":33964,"inese":33965,"axis":33966,"ĠRegistry":33967,"ĠMong":33968,"Ġbullies":33969,"Ġdocks":33970,"ĠAlter":33971,"rella":33972,"446":33973,"ĠDare":33974,"Ġvirtues":33975,"Ġdont":33976,"Value":33977,"ENE":33978,"received":33979,"Ġseaf":33980,"476":33981,"ilon":33982,"ĠKits":33983,"Ġrarity":33984,"Ġnurt":33985,"skin":33986,"ĠUL":33987,"ĠRegiment":33988,"terior":33989,"hate":33990,"ĠEstimated":33991,"ĠSilence":33992,"Ġorganism":33993,"ĠSigned":33994,"ĠIA":33995,"bite":33996,"Ġthicker":33997,"Ġeyeb":33998,"Ġjournalistic":33999,"ĠDisp":34000,"margin":34001,"Dri":34002,"Ġcomplexes":34003,"Ġimaginary":34004,"Ġrefuel":34005,"Ġmeticulous":34006,"Dub":34007,"Ġhaze":34008,"860":34009,"Ġproverbial":34010,"Ġozone":34011,"cale":34012,"resent":34013,"Ġdiscrete":34014,"boats":34015,"Ġ343":34016,"ĠRET":34017,"Ġsailor":34018,"hair":34019,"gear":34020,"Ġmalt":34021,"Ġpeach":34022,"ĠRabb":34023,"699":34024,"318":34025,"ĠVerge":34026,"Fin":34027,"ĠMighty":34028,"ierce":34029,"403":34030,"Ġdisenfranch":34031,"bass":34032,"nice":34033,"Ġsinks":34034,"ĠLaugh":34035,"367":34036,"ĠZur":34037,"Ġtravers":34038,"ĠMystery":34039,"onsense":34040,"ĠMonarch":34041,"Ġleapt":34042,"ergy":34043,"porate":34044,"display":34045,"ilet":34046,"Ġendemic":34047,"Bern":34048,"Ġpulmonary":34049,"Ġbroch":34050,"ĠManziel":34051,"Lyn":34052,"Repe":34053,"lda":34054,"hands":34055,"Ġtroublesome":34056,"Jordan":34057,"UTION":34058,"ĠALP":34059,"ĠLEG":34060,"Ġreconnaissance":34061,"ĠRNA":34062,"letters":34063,"ĠYounger":34064,"ĠLW":34065,"ĠSensor":34066,"388":34067,"Ġwielding":34068,"spr":34069,"Ġancestral":34070,"331":34071,"OTH":34072,"ĠAxis":34073,"irement":34074,"ĠCompact":34075,"voice":34076,"Ġpercussion":34077,"Ġendeav":34078,"Kate":34079,"ĠJACK":34080,"ĠMagnus":34081,"Ġinterconnected":34082,"ĠTraff":34083,"demon":34084,"Ġardent":34085,"ĠSomers":34086,"andum":34087,"346":34088,"heartedly":34089,"ayne":34090,"Design":34091,"melon":34092,"ĠCarib":34093,"Ġ1935":34094,"intention":34095,"cape":34096,"cend":34097,"organic":34098,"373":34099,"ĠRevival":34100,"ĠBLACK":34101,"Ġaspiration":34102,"yellow":34103,"bodied":34104,"Ġcrave":34105,"ĠIntelligent":34106,"ĠUnique":34107,"tab":34108,"386":34109,"ĠNess":34110,"Official":34111,"Stay":34112,"Ġcreat":34113,"iliary":34114,"rified":34115,"ĠPok":34116,"Ġabolition":34117,"Ka":34118,"ĠCourage":34119,"ĠDickens":34120,"rophic":34121,"ĠFAR":34122,"Ġfurnished":34123,".âĢĵ":34124,"rete":34125,"Ġvaginal":34126,"hner":34127,"ĠLONG":34128,"imates":34129,"ĠLiter":34130,"ĠMeasures":34131,"ĠBelg":34132,"\"-":34133,"ĠRaider":34134,"enario":34135,"rification":34136,"ĠFISA":34137,"ĠStab":34138,"Ġnar":34139,"mund":34140,"Tenn":34141,"Ġwakes":34142,"Ġcharg":34143,"okers":34144,"assment":34145,"Ġsiph":34146,"Ġludicrous":34147,"670":34148,"Ġcompositions":34149,"Ġpinnacle":34150,"ĠRankings":34151,"ĠTelescope":34152,"secure":34153,"Ġib":34154,"Ġaptly":34155,"paste":34156,"ĠJUST":34157,"RD":34158,"herry":34159,"sung":34160,"Ġmig":34161,"naires":34162,"Ġmigrated":34163,"Base":34164,"Ġamazingly":34165,"Ġunregulated":34166,"published":34167,"ĠPIT":34168,"ĠMissile":34169,"extreme":34170,"ĠAlone":34171,"skilled":34172,"ĠRamp":34173,"Ġcamer":34174,"Ġflyer":34175,"Ġbrewers":34176,"ĠReference":34177,"ĠMOV":34178,"ĠLep":34179,"Ġentitle":34180,"ivals":34181,"ĠPIN":34182,"Ġbatches":34183,"Ġunexplained":34184,"Ġenergies":34185,"Ġblurred":34186,"enged":34187,"orig":34188,"WF":34189,"olves":34190,"ĠPicks":34191,"ĠTwice":34192,"arranted":34193,"Ġmembrane":34194,"ĠMoonlight":34195,"Ġsulfur":34196,"Ġpurposely":34197,"Ġfumes":34198,"Ġ(#":34199,"onics":34200,"ivities":34201,"rollers":34202,"Ġflattering":34203,"felt":34204,"Ġintoxication":34205,"Bridge":34206,"ĠFallout":34207,"Ġcreatively":34208,"Ġpsychologically":34209,"Ġdespicable":34210,"gae":34211,"820":34212,"VERS":34213,"Ġtidal":34214,"Ġcarbohydrates":34215,"strip":34216,"Ġgravitational":34217,"Ġfeds":34218,"ĠZhao":34219,"legates":34220,"Ġ307":34221,"String":34222,"ĠRepair":34223,"Ġ1928":34224,"orses":34225,"atography":34226,"Boston":34227,"Ġasymm":34228,"ĠSomebody":34229,"Van":34230,"ĠSovereign":34231,"Ġnotoriety":34232,"Ġsimulate":34233,"ĠDiscussion":34234,"ĠTransition":34235,"Ġcopying":34236,"antage":34237,"ĠRodrig":34238,"Ġindifference":34239,"Ġ580":34240,"Ġastronomical":34241,"Ġscrews":34242,"840":34243,"inates":34244,"ĠStreaming":34245,"Ġentit":34246,"ĠLiterature":34247,"369":34248,"805":34249,"OTS":34250,"о":34251,"img":34252,"inness":34253,"Ġreverber":34254,"Ġpartition":34255,"Short":34256,"Ġmoist":34257,"Ġspoof":34258,"ĠDesire":34259,"orce":34260,"Ġcrammed":34261,"Ġunfor":34262,"Pan":34263,"ingen":34264,"Ġrelat":34265,"Mother":34266,"ĠGn":34267,"altern":34268,"Ġresurg":34269,"Ġcramped":34270,"ĠCitadel":34271,"Ġlaureate":34272,"Ġanalys":34273,"Ġnuns":34274,"ĠTie":34275,"activ":34276,"ĠSurprisingly":34277,"ĠProtective":34278,"ĠRedemption":34279,"Ġendlessly":34280,"Ġfists":34281,"spl":34282,"ĠKron":34283,"ĠExamples":34284,"Especially":34285,"Ġprejud":34286,"ĠSchwar":34287,"Ġ237":34288,"ĠPlants":34289,"ĠUNDER":34290,"Ġlasers":34291,"Ġsher":34292,"Ġgoddess":34293,"Ġwipes":34294,"409":34295,"ĠGTA":34296,"Ġhybrids":34297,"rowd":34298,"ĠMILL":34299,"ĠNUM":34300,"ĠGeek":34301,"ĠTWO":34302,"ĠTimbers":34303,"Ġresembled":34304,"ĠGRE":34305,"Bring":34306,"Ġcompressed":34307,"ĠOral":34308,"379":34309,"Ġwrench":34310,"LCS":34311,"Ġhomosexual":34312,"Kelly":34313,"Ġhump":34314,"ĠSicily":34315,"Ġperished":34316,"aos":34317,"doesn":34318,"scrib":34319,"Charlie":34320,"Ġshuffle":34321,"372":34322,"cedented":34323,"402":34324,"Ġtiers":34325,"Ġinteracted":34326,"ĠHG":34327,"ĠJere":34328,"ĠBRA":34329,"ĠDOC":34330,"things":34331,"Ġfaiths":34332,"Ġgirlfriends":34333,"Ġfortified":34334,"develop":34335,"ĠKus":34336,"iability":34337,"rase":34338,"iotics":34339,"ĠChern":34340,"boxes":34341,"abol":34342,"idan":34343,"emon":34344,"ĠJudaism":34345,"ĠSituation":34346,"ĠGrimm":34347,"Ġgou":34348,"ĠVictim":34349,"backer":34350,"Ġanimosity":34351,"ĠHorizons":34352,"ĠKazakh":34353,"Ġgrossly":34354,"ĠTac":34355,"yg":34356,"366":34357,"Ġcheaply":34358,"Ġformulated":34359,"ĠDangerous":34360,"offensive":34361,"Ġsauces":34362,"Ġkeyboards":34363,"666":34364,"Ġcanopy":34365,"Inc":34366,"astered":34367,"iesel":34368,"Ġadv":34369,"currency":34370,"Ġscapego":34371,"plings":34372,"ĠBDS":34373,"Ġstrangely":34374,"today":34375,"ĠEgyptians":34376,"Ġcoron":34377,"often":34378,"ĠTransformers":34379,"ĠAfterwards":34380,"reated":34381,"Ġpoisonous":34382,"Ġgeographically":34383,"Ġmell":34384,"Cross":34385,"Ġdeductible":34386,"ĠZionist":34387,"Ġcutter":34388,"ĠRP":34389,"ĠImag":34390,"Ġoverflow":34391,"358":34392,"ĠADD":34393,"bones":34394,"Ġflattened":34395,"ĠGREEN":34396,"Ġlaure":34397,"haps":34398,"ĠCellular":34399,"kens":34400,"363":34401,"ĠSmash":34402,"ĠSpeak":34403,"ĠMaiden":34404,"Ġgreedy":34405,"ĠManit":34406,"Ġfacet":34407,"ĠGPA":34408,"Ġracks":34409,"popular":34410,"322":34411,"ĠBars":34412,"avement":34413,"359":34414,"Ġpomp":34415,"Ġregisters":34416,"Fs":34417,"ĠLoving":34418,"ĠTaxi":34419,"concert":34420,"ĠArchae":34421,"Ġcurls":34422,"ĠSpit":34423,"ĠLIFE":34424,"Ġinvade":34425,"rolog":34426,"wreck":34427,"Ġconflicted":34428,"Ġ970":34429,"Ġexiled":34430,"Ġchew":34431,"udging":34432,"Ġexper":34433,"ĠFt":34434,"rius":34435,"ĠXer":34436,"~":34437,"Ġbandwagon":34438,"Fore":34439,"Cat":34440,"Ġoverflowing":34441,"Ġradios":34442,"Much":34443,"Ġfacilitates":34444,"ĠCaf":34445,"ĠQing":34446,"Use":34447,"Ġmang":34448,"Ġpissed":34449,"ĠOuter":34450,"within":34451,"ĠSchr":34452,"ĠSherlock":34453,"Ġ336":34454,"Ġcasc":34455,"chens":34456,"incent":34457,"Ġcultivating":34458,"ampions":34459,"Ġwasteful":34460,"adays":34461,"sets":34462,"ĠLF":34463,"watching":34464,"Ġabandonment":34465,"ĠJesuit":34466,"Ġlegislatures":34467,"regnancy":34468,"ĠColt":34469,"Ġinterns":34470,"Ġundertook":34471,"ĠIPA":34472,"ĠInstall":34473,"nsics":34474,"washer":34475,"Ġbeginners":34476,"ĠDiseases":34477,"Ġlimp":34478,"ĠESA":34479,"Basically":34480,"Ġprud":34481,"LED":34482,"Ġgrease":34483,"ousel":34484,"Ġrotten":34485,"ĠCele":34486,"facts":34487,"ĠLouie":34488,"ĠISI":34489,"481":34490,"Ġsett":34491,"Ġtoug":34492,"ĠReck":34493,"OUNT":34494,"ĠFou":34495,"Ġinhibitor":34496,"gru":34497,"bane":34498,"1980":34499,"ĠPanc":34500,"Ġsuperficial":34501,"Ġauthoritative":34502,"ĠVOL":34503,"790":34504,"Ġcrusade":34505,"airy":34506,"Ġemphatically":34507,"Ġflourishing":34508,"Ġ416":34509,"Ġheroine":34510,"inx":34511,"Ġanch":34512,"stretched":34513,"ĠRegener":34514,"ĠAncient":34515,"evaluate":34516,"Ġantibody":34517,"ĠEston":34518,"ĠAeg":34519,"Ġboldly":34520,"TN":34521,"ĠPercentage":34522,"Ġ747":34523,"Ġrapt":34524,"ĠEdited":34525,"Earth":34526,"phal":34527,"ĠXXX":34528,"arling":34529,"ĠReligion":34530,"Ġ503":34531,"forces":34532,"Ġendpoint":34533,"Miller":34534,"Ba":34535,"Ġdisappears":34536,"andre":34537,"Ġconnector":34538,"407":34539,"ĠTOUR":34540,"aura":34541,"ĠRazer":34542,"UPDATE":34543,"Ġcalib":34544,"original":34545,"ĠMonkey":34546,"Ir":34547,"Ġexacerb":34548,"killing":34549,"Ġforb":34550,"native":34551,"Ġpoking":34552,"Ġveiled":34553,"mails":34554,"Ġalphabet":34555,"Ġawkwardly":34556,"ĠNames":34557,"Ġspiders":34558,"ĠParam":34559,"ĠColour":34560,"Ġunification":34561,"ĠPione":34562,"Ġoffend":34563,"Ġscoff":34564,"ĠSAR":34565,"ĠBuildings":34566,"edes":34567,"ĠAke":34568,"Ġfirmware":34569,"Madison":34570,"policy":34571,"ĠComputing":34572,"ĠRW":34573,"Ġfluent":34574,"Ġdece":34575,"Ġswore":34576,"Ġrestaur":34577,"Ġpresses":34578,"ophon":34579,"Ġphilosopher":34580,"ften":34581,"Ġintruder":34582,"Ġleng":34583,"ĠCowboy":34584,"cled":34585,"Ġmeticulously":34586,"ĠPair":34587,"ĠEND":34588,"Ġcapsules":34589,"Ġauxiliary":34590,"Ġverses":34591,"Ġsheltered":34592,"Ġexplorer":34593,"ĠWolverine":34594,"auts":34595,"Ġinhibitors":34596,"ĠPeng":34597,"ĠValve":34598,"imar":34599,"Ġchuck":34600,"ĠRecording":34601,"Ġardu":34602,"Test":34603,"Ġinterven":34604,"Ġchrome":34605,"months":34606,"tap":34607,"ĠManz":34608,"format":34609,"ĠBalkans":34610,"Ġannex":34611,"uder":34612,"ĠAAC":34613,"Ġdisturbances":34614,"354":34615,"asms":34616,"ĠTad":34617,"puting":34618,"Ġfateful":34619,"imen":34620,"Ġaudi":34621,"ĠNewsweek":34622,"Around":34623,"Ġretribution":34624,"Ġsugars":34625,"Ġescapes":34626,"Ġlegitim":34627,"ĠProof":34628,"Ġmisogyn":34629,"cit":34630,"Ġclutching":34631,"exist":34632,"Ġrevol":34633,"Ġdiscs":34634,"discrimination":34635,"Ġstout":34636,"aline":34637,"ĠRandom":34638,"364":34639,"Ġapprehension":34640,"Ġmockery":34641,"Ġfossils":34642,"ĠStress":34643,"Ġbenefic":34644,"exc":34645,"lude":34646,"Small":34647,"Ġgh":34648,"Ġobserves":34649,"ĠSUP":34650,"Ġbrewer":34651,"ĠESP":34652,"Ġomitted":34653,"multiple":34654,"Ġminimizing":34655,"Ġtaco":34656,"Ġindifferent":34657,"medi":34658,"available":34659,"Ġ252":34660,"Ġsanity":34661,"ĠCookie":34662,"mostly":34663,"near":34664,"NASA":34665,"Ġlowly":34666,"seless":34667,"Ġobsess":34668,"itous":34669,"Dispatch":34670,"Ġcanyon":34671,"Ġbriefs":34672,"Say":34673,"ĠNato":34674,"ĠSpend":34675,"Ġ242":34676,"ĠEthernet":34677,"Ġmatte":34678,"ĠStim":34679,"hetics":34680,"Ġflourished":34681,"389":34682,"ĠMcA":34683,"695":34684,"Ġoverr":34685,"Ġtorment":34686,"Ġpirate":34687,"ĠJohann":34688,"roversial":34689,"ĠUnemployment":34690,"breakers":34691,"ĠMessages":34692,"tones":34693,"Ġtagging":34694,"Ġfrog":34695,"Jewish":34696,"Ġmessenger":34697,"Ġexasper":34698,"ernaut":34699,"Ġnarrower":34700,"ĠCatalyst":34701,"ĠSecrets":34702,"Ġadj":34703,"ĠFug":34704,"Ġaura":34705,"Ġtherape":34706,"mber":34707,"Ġcaliphate":34708,"Ġretreating":34709,"ĠComput":34710,"Ġburying":34711,"Ġail":34712,"Ġgriev":34713,"lins":34714,"825":34715,"tten":34716,"ifully":34717,"ĠTrials":34718,"igma":34719,"Ġ1914":34720,"Ġcoordinates":34721,"ocusing":34722,"ĠFeng":34723,"ĠWhale":34724,"Ġshorten":34725,"Ġcorrectness":34726,"evil":34727,"network":34728,"Ġreactive":34729,"assuming":34730,"ĠLaksh":34731,"games":34732,"Ġruining":34733,"excluding":34734,"annels":34735,"º":34736,"Ġrubbed":34737,"aleb":34738,"flex":34739,"iped":34740,"ĠLimit":34741,"allowed":34742,"ĠDMV":34743,"ĠLD":34744,"Ġstamina":34745,"conduct":34746,"Ġmislead":34747,"lib":34748,"ĠEminem":34749,"Ġpayoff":34750,"Ġkernel":34751,"Ġsweeps":34752,"Ġsonic":34753,"ĠKodi":34754,"unique":34755,"Ġsurrog":34756,"Michigan":34757,"Ġattest":34758,"Ġdummy":34759,"ĠStellar":34760,"ĠSquadron":34761,"ĠHait":34762,"ĠSpirits":34763,"605":34764,"ĠHemisphere":34765,"legram":34766,"ĠRack":34767,"opol":34768,"Ġfreshwater":34769,"cession":34770,"Ġabort":34771,"ĠLOG":34772,"Ġfuzzy":34773,"Ġcrystall":34774,"illation":34775,"ĠFreddy":34776,"Ġsalvation":34777,"Ġjuxtap":34778,"weekly":34779,"usha":34780,"456":34781,"Ġ660":34782,"ĠGlacier":34783,"Ġnegatives":34784,"Ġillegitimate":34785,"ĠProtein":34786,"Moore":34787,"Der":34788,"Ġinfancy":34789,"Again":34790,"ALD":34791,"Leon":34792,"ĠIdeally":34793,"fresh":34794,"730":34795,"Ġgamb":34796,"Ġscrewed":34797,"wow":34798,"Ġembodied":34799,"ĠCinderella":34800,"341":34801,"ĠPiano":34802,"Ġbroccoli":34803,"Ġmats":34804,"ĠZheng":34805,"cream":34806,"anut":34807,"ĠZig":34808,"Columb":34809,"ĠTibetan":34810,"Death":34811,"Ġstren":34812,"ĠVertical":34813,"Ġratification":34814,"Ġprincipally":34815,"ELD":34816,"Ġforbid":34817,"Ġamalg":34818,"blind":34819,"auri":34820,"stery":34821,"Ġbarley":34822,"FBI":34823,"ĠHex":34824,"925":34825,"Domin":34826,"oat":34827,"Ġswayed":34828,"ĠKKK":34829,"ĠTaxes":34830,"Ġker":34831,"eeper":34832,"ĠAwakens":34833,"ĠPix":34834,"ĠKING":34835,"dc":34836,"Ren":34837,"Ġlegitimately":34838,"ĠTriumph":34839,"ĠSites":34840,"ĠSai":34841,"tl":34842,"painted":34843,"ĠWaiting":34844,"starting":34845,"parents":34846,"ĠDuo":34847,"eele":34848,"upper":34849,"ĠInvestig":34850,"Ġeighteen":34851,"Ġcorrelated":34852,"ĠCascade":34853,"acca":34854,"ĠAlph":34855,"ĠPolic":34856,"ĠEVs":34857,"Ġworthless":34858,"ĠIndust":34859,"auld":34860,"ĠYiannopoulos":34861,"ĠEzra":34862,"Ġmorphed":34863,"Ġoriginating":34864,"mania":34865,"Ġsparing":34866,"Ġextrem":34867,"cre":34868,"ults":34869,"mare":34870,"classified":34871,"Ġparachute":34872,"Ġmistrust":34873,"ONT":34874,"Mind":34875,"Ġthru":34876,"707":34877,"ĠTwain":34878,"Ġmelodies":34879,"ĠDanger":34880,"ĠDPS":34881,"Ġderive":34882,"Ġdissolution":34883,"Ġchildbirth":34884,"Ġ415":34885,"fork":34886,"solid":34887,"loads":34888,"ĠCGI":34889,"378":34890,"ĠShed":34891,"Face":34892,"Ġcomet":34893,"iceps":34894,"ĠReduction":34895,"Fly":34896,"jp":34897,"ĠAnimation":34898,"Luke":34899,"Ġabiding":34900,"Ġdevise":34901,"ĠAe":34902,"Ġflux":34903,"Ġbras":34904,"Ġfracturing":34905,"Ġinventive":34906,"ĠGranger":34907,"Ġsap":34908,"inducing":34909,"Ġreviewers":34910,"Officers":34911,"ĠWHY":34912,"Ġamplify":34913,"Ġentr":34914,"Ġslit":34915,"457":34916,"Ġreformed":34917,"ĠPhi":34918,"Ġtempt":34919,"Ġcontradiction":34920,"585":34921,"ĠMaced":34922,"371":34923,"kinson":34924,"robe":34925,"ĠHunters":34926,"astern":34927,"criminal":34928,"jew":34929,"Ġdecentralized":34930,"bands":34931,"Ġavatar":34932,"ĠBarrier":34933,"Ġcharacterization":34934,"student":34935,"Ġgays":34936,"Ġspecialize":34937,"ĠJudging":34938,"Ġinitiation":34939,"Ġshove":34940,"Ġpirates":34941,"Ġfictitious":34942,"ĠPoker":34943,"ĠElsa":34944,"ĠTECH":34945,"handedly":34946,"Ġglued":34947,"Ġclinically":34948,"Ġinaccessible":34949,"Ġderegulation":34950,"Ġprohib":34951,"Ġdangling":34952,"Ġnoses":34953,"Ġstash":34954,"اØ":34955,"ESH":34956,"Ġmonstrous":34957,"Ġcrept":34958,"ĠCharm":34959,"Ġbeh":34960,"Ġshuts":34961,"Ġ236":34962,"imedia":34963,"445":34964,"Du":34965,"Ġafar":34966,"ĠRout":34967,"Ġflares":34968,"Utah":34969,"Ġ808":34970,"Ġjewels":34971,"2004":34972,"Ġrecal":34973,"Gas":34974,"ĠExcellent":34975,"Ġpitfalls":34976,"ĠDrawing":34977,"viously":34978,"angered":34979,"changes":34980,"Ġpasture":34981,"talking":34982,"Ġinequ":34983,"Ġbicycl":34984,"Cost":34985,"423":34986,"bard":34987,"Ġanterior":34988,"ecast":34989,"CHR":34990,"397":34991,"masters":34992,"706":34993,"ĠFinish":34994,"Yet":34995,"study":34996,"ĠCogn":34997,"Ġloaf":34998,"Ġspatial":34999,"ĠParad":35000,"batch":35001,"Ġvents":35002,"Ġspins":35003,"ĠAddiction":35004,"Ġcondone":35005,"Ġproble":35006,"English":35007,"ĠRomans":35008,"ĠSaying":35009,"ĠKling":35010,"Universal":35011,"ivist":35012,"Ġskirm":35013,"Ġ2500":35014,"Ġ263":35015,"aired":35016,"ĠMartian":35017,"ĠCompensation":35018,"lation":35019,"ĠSalam":35020,"LGBT":35021,"ĠDart":35022,"strike":35023,"vasive":35024,"ILLE":35025,"Ġimaginative":35026,"ĠEuph":35027,"Financial":35028,"Ġholog":35029,"orah":35030,"crit":35031,"ĠOswald":35032,"512":35033,"ĠUri":35034,"Ġdiscrepancies":35035,"Ġbeads":35036,"ĠShots":35037,"Mem":35038,"Ġhunts":35039,"Ġsubtly":35040,"Ġ470":35041,"ĠVigil":35042,"Ġsew":35043,"ĠBurma":35044,"igm":35045,"ighed":35046,"swe":35047,"Ġ251":35048,"Ġdeceit":35049,"Ġphysi":35050,"iflower":35051,"ĠCert":35052,"Ġchewing":35053,"rax":35054,"ĠMER":35055,"icient":35056,"Les":35057,"Ġ390":35058,"Ġperjury":35059,"Ġfiltering":35060,"770":35061,"Ġpoppy":35062,"Ġbland":35063,"ĠNasa":35064,"Ġorbiting":35065,"ĠRipple":35066,"otal":35067,"ĠRyu":35068,"ĠShap":35069,"ĠJian":35070,"Ġpiv":35071,"ĠNeptune":35072,"rary":35073,"Ġunavoidable":35074,"Ġguideline":35075,"Ġwaterfall":35076,"inators":35077,"ĠLogic":35078,"ĠPlug":35079,"role":35080,"Ġalterations":35081,"ĠSett":35082,"ĠFeld":35083,"Ġfreezes":35084,"Ġbedrock":35085,"ĠVIEW":35086,"ovation":35087,"Ġneedless":35088,"ĠIU":35089,"ignant":35090,"ĠConfeder":35091,"316":35092,"fine":35093,"Ġjars":35094,"gotten":35095,"Bron":35096,"Ġmindfulness":35097,"imating":35098,"Ġhysteria":35099,"Ġhurried":35100,"Ġinfantry":35101,"ĠNYU":35102,"tags":35103,"Penn":35104,"Ġtracing":35105,"ĠSwing":35106,"ĠIo":35107,"Ġreckoned":35108,"ĠRecall":35109,"ĠVersion":35110,"314":35111,"Ġecology":35112,"Ġarmoured":35113,"Ġresonance":35114,"970":35115,"Ġvigilance":35116,"Ġrede":35117,"ĠBohem":35118,"Ġchau":35119,"ĠDevi":35120,"Ġtru":35121,"))":35122,"Put":35123,"Ġflavored":35124,"ĠClown":35125,"Senate":35126,"ĠScandinavian":35127,"mable":35128,"Residents":35129,"ĠFranchise":35130,"Ġprecincts":35131,"Prem":35132,"ĠNeutral":35133,"coal":35134,"Ġdelinqu":35135,"Mus":35136,"UME":35137,"Ġtedious":35138,"roots":35139,"ĠCondition":35140,"ĠIntercept":35141,"017":35142,"itives":35143,"Ġdefinitively":35144,"Ġobliter":35145,"Ġclandestine":35146,"Ġstagnation":35147,"Ġblindness":35148,"abiding":35149,"Ġremix":35150,"feeding":35151,"Ġunrecogn":35152,"2003":35153,"960":35154,"381":35155,"Ġbulky":35156,"xia":35157,"ivered":35158,"inic":35159,"ĠSoci":35160,"ĠYards":35161,"Ġhides":35162,"Film":35163,"Ġtestim":35164,"Ġblacklist":35165,"Deep":35166,"Standard":35167,"ĠClash":35168,"Ġriddled":35169,"Ġdiseng":35170,"ĠTRE":35171,"ĠIDs":35172,"Ġmigrating":35173,"protect":35174,"Ġgraded":35175,"Ġvaguely":35176,"ĠCharacter":35177,"382":35178,"ĠMOD":35179,"Eng":35180,"Ġmobilized":35181,"Ġsincerity":35182,"Ġ317":35183,"sighted":35184,"ownt":35185,"ĠâĢİ":35186,"umpy":35187,"Ġitching":35188,"ĠVerd":35189,"cook":35190,"Ġsimulator":35191,"players":35192,"Early":35193,"infeld":35194,"Ġmaximizing":35195,"Philipp":35196,"ĠPhotoshop":35197,"Ġdestroys":35198,"Ġbefriend":35199,"Ġfilthy":35200,"ĠIncident":35201,"gha":35202,"Ġcomplicity":35203,"Ġmessing":35204,"YA":35205,"ĠNegro":35206,"adows":35207,"374":35208,"Ġpip":35209,"cean":35210,"Ġ1924":35211,"Sent":35212,"represent":35213,"Ġdeems":35214,"ĠRue":35215,"Ġtitanium":35216,"Ġmanners":35217,"âĢ¦âĢ¦":35218,"bare":35219,"Ġusur":35220,"mma":35221,"ĠPanda":35222,"ulus":35223,"ĠSlav":35224,"324":35225,"ĠMole":35226,"^":35227,"micro":35228,"foreign":35229,"lest":35230,"ocular":35231,"ĠUniv":35232,"ĠFrag":35233,"Ġshepherd":35234,"Ġelectron":35235,"ĠFSA":35236,"Ġunl":35237,"dose":35238,"Ġimmersion":35239,"ĠDeL":35240,"Ġbiomedical":35241,"Anna":35242,"Ġskillet":35243,"Ġrecre":35244,"Ġtrillions":35245,"voy":35246,"Ġnormalized":35247,"radio":35248,"cue":35249,"urbed":35250,"Ġthinkers":35251,"328":35252,"327":35253,"ĠForge":35254,"505":35255,"Ġunbearable":35256,"olini":35257,"Ġdisinfect":35258,"Ġshaving":35259,"Ġtoxicity":35260,"453":35261,"Ġheterosexual":35262,"Baltimore":35263,"Ġstool":35264,"lr":35265,"ĠMk":35266,"Ġantidote":35267,"Dark":35268,"810":35269,"Ġirritated":35270,"ĠSUPPORT":35271,"Chance":35272,"bent":35273,"ĠZelda":35274,"ĠPenguin":35275,"ifled":35276,"Ġarte":35277,"705":35278,"Ġcondol":35279,"izza":35280,"ĠCK":35281,"Ġprojector":35282,"ravings":35283,"Ġ1919":35284,"Ġburner":35285,"ĠSchwarz":35286,"Oregon":35287,"Ġridicule":35288,"Ġinstructional":35289,"Ġ\"#":35290,"ĠDign":35291,"Ġkitten":35292,"Ġconstit":35293,"iration":35294,"Speed":35295,"ecycle":35296,"ĠFalse":35297,"ĠDealer":35298,"Could":35299,"655":35300,"outside":35301,"Ġworldview":35302,"Ġ246":35303,"Ġspitting":35304,"595":35305,"MN":35306,"ĠComes":35307,"ingu":35308,"Ġenzymes":35309,"Ġcompass":35310,"Ġexclaimed":35311,"ĠMalays":35312,"Ġ1916":35313,"Ġcoloring":35314,"Ġrepeats":35315,"Ġsoils":35316,"Ġtrivia":35317,"ĠIsles":35318,"Const":35319,"ĠFiction":35320,"665":35321,"Ġcriminality":35322,"ĠZi":35323,"384":35324,"ĠWilderness":35325,"ĠCanary":35326,"ĠVs":35327,"и":35328,"ĠAPIs":35329,"Ġbehest":35330,"Ġeb":35331,"ĠHipp":35332,"Ġpreempt":35333,"Ġevoke":35334,"Ġinept":35335,"tele":35336,"447":35337,"ĠGarmin":35338,"Ġpursuits":35339,"351":35340,"Ġcliché":35341,"ĠJihad":35342,"Ġ308":35343,"ĠSnake":35344,"ĠAnnounce":35345,"Nearly":35346,"!'\"":35347,"Ġ1927":35348,"saw":35349,"Ġabhor":35350,"Plan":35351,"rawled":35352,"ĠRiy":35353,"ensor":35354,"Fal":35355,"quick":35356,"odynamic":35357,"Ġsubstitution":35358,"Ġprovoking":35359,"Operation":35360,"rupulous":35361,"Ġsweetness":35362,"folk":35363,"ĠDefault":35364,"Ġstarved":35365,"ĠPrinting":35366,"urious":35367,"ĠTracker":35368,"them":35369,"Ġleth":35370,"Ġemptied":35371,"Ġfootprints":35372,"ilian":35373,"Ġbattalion":35374,"Ġprophet":35375,"Ġrailing":35376,"Ġhect":35377,"rouch":35378,"lees":35379,"Ġideologies":35380,"Ġ254":35381,"ĠGods":35382,"ĠAvalon":35383,"Ġfrontrunner":35384,"ĠPork":35385,"ĠPipe":35386,"Ġscaven":35387,"Ġming":35388,"Ġerg":35389,"Ġ520":35390,"Ġhatched":35391,"asant":35392,"ĠHI":35393,"Ġpend":35394,"Ġ288":35395,"Prom":35396,"achev":35397,"ĠEcology":35398,"enforcement":35399,"467":35400,"dule":35401,"Ġrealism":35402,"ĠTypes":35403,"USB":35404,"utra":35405,"ĠHiroshima":35406,"Ġcontradicted":35407,"393":35408,"ĠDSL":35409,"Ġtherein":35410,"ĠReconstruction":35411,"Ġ243":35412,"irled":35413,"479":35414,"ĠWhats":35415,"Currently":35416,"ĠPOWER":35417,"ĠHiro":35418,"ĠBreath":35419,"ĠYourself":35420,"Ġlantern":35421,"376":35422,"É":35423,"ĠHumans":35424,"Lady":35425,"Ġdissemination":35426,"ecake":35427,"ĠChao":35428,"flat":35429,"Ġinspecting":35430,"stration":35431,"Ġidentifiable":35432,"CV":35433,"ĠLobby":35434,"function":35435,"Roll":35436,"DIV":35437,"Tell":35438,"Ġfasc":35439,"ĠAOL":35440,"HM":35441,"Keefe":35442,"Ġporous":35443,"Ġsmoot":35444,"existence":35445,"ĠDeg":35446,"Ġdivor":35447,"isner":35448,"allas":35449,"Bloomberg":35450,"Ġdictators":35451,"ĠGeh":35452,"Ġsilicone":35453,"Ġdab":35454,"Ġmashed":35455,"Ġpric":35456,"might":35457,"ĠBLM":35458,"Ġpatriarch":35459,"Microsoft":35460,"ĠAds":35461,"Ġcoronary":35462,"ĠContrary":35463,"Ġdra":35464,"ĠStarted":35465,"Ġbuckle":35466,"lear":35467,"accept":35468,"Within":35469,"bd":35470,"interested":35471,"bia":35472,"POR":35473,"motion":35474,"ĠFounders":35475,"ĠCassandra":35476,"ĠPassion":35477,"Ġbehavioural":35478,"ĠHealing":35479,"Ġmarkings":35480,"Ġsnowball":35481,"Ġridiculed":35482,"phase":35483,"Ġunto":35484,"aque":35485,"uggets":35486,"Ġfrantically":35487,"Ġcoward":35488,"Ġinconvenient":35489,"Taking":35490,"Afee":35491,"Ġtwisting":35492,"930":35493,"ĠSieg":35494,"ĠGit":35495,"Ġcurs":35496,"ĠGlas":35497,"ĠSignificant":35498,"Ġachieves":35499,"Ġpreferably":35500,"Ġcondensed":35501,"Ġfetus":35502,"Ġunivers":35503,"Ġpse":35504,"Access":35505,"Ġintertwined":35506,"been":35507,"quit":35508,"ĠLEGO":35509,"Ġimagining":35510,"454":35511,"Ġplains":35512,"sequently":35513,"pull":35514,"Fast":35515,"Pot":35516,"yles":35517,"AIR":35518,"Ġblatantly":35519,"eki":35520,"ilated":35521,"ĠMembership":35522,"Ġ262":35523,"Ġ}":35524,"Ġexcavation":35525,"Ġethn":35526,"addin":35527,"Ġfoundational":35528,"ceptions":35529,"ĠViet":35530,"exempt":35531,"Ġmicrophones":35532,"Ġ244":35533,"778":35534,"Ġdwar":35535,"attery":35536,"502":35537,"ĠKik":35538,"Ġinspir":35539,"ĠMaximum":35540,"Ġvengeance":35541,"Ġetched":35542,"outine":35543,"552":35544,"Ġunicorn":35545,"gged":35546,".�":35547,"ĠBlackwell":35548,"ĠStatue":35549,"Ġdissidents":35550,"ĠKaine":35551,"Ġdeforestation":35552,"ĠScholar":35553,"Ġpleasantly":35554,"ÑĤ":35555,"398":35556,"ĠRUN":35557,"arent":35558,"Ġundeniably":35559,"Ġtechnologically":35560,"Ġconsciously":35561,"ĠEther":35562,"Ġproportional":35563,"Ġlaund":35564,"ĠRye":35565,"Ġambiguity":35566,"Ġunmist":35567,"Terror":35568,"ciplinary":35569,"ĠImproved":35570,"hesis":35571,"Ġcooker":35572,"elsen":35573,"Ġguerrilla":35574,"opped":35575,"ATURE":35576,"Ġrequ":35577,"Ġunprepared":35578,"Ġcamel":35579,"Ġfitt":35580,"Sex":35581,"edged":35582,"Ġrecurrent":35583,"ctuary":35584,"ĠCompare":35585,"ĠServing":35586,"Tri":35587,"Ġtransient":35588,"ĠBees":35589,"Ġcovenant":35590,"Ġfantasies":35591,"Ġespresso":35592,"draft":35593,"baugh":35594,"Ġdemocratically":35595,"ĠBans":35596,"ĠManual":35597,"ĠTurtle":35598,"ennett":35599,"achy":35600,"ĠClim":35601,"Ġdescending":35602,"Ġprow":35603,"Ġinconsistencies":35604,"Player":35605,"Ġoblivious":35606,"ĠWonderland":35607,"nav":35608,"aughter":35609,"Ġlod":35610,"Ġ403":35611,"ĠPolaris":35612,"ĠLeia":35613,"ĠInfantry":35614,"Sy":35615,"ĠMeter":35616,"Ġautoimmune":35617,"Ġdiagnoses":35618,"Ġtrespass":35619,"011":35620,"wrong":35621,"ĠGREAT":35622,"Ġtelescopes":35623,"shows":35624,"Pac":35625,"olation":35626,"Ġclerics":35627,"Ġdissenting":35628,"406":35629,"Ġetiquette":35630,"Ġdeterrence":35631,"765":35632,"Ġove":35633,"Has":35634,"Pak":35635,"ा":35636,"ĠNec":35637,"Ġsociology":35638,"witz":35639,"Ġkittens":35640,"Ġcontinual":35641,"Ġoverlapping":35642,"Ġmonks":35643,"ĠMechanical":35644,"Captain":35645,"ocial":35646,"ĠFalling":35647,"ĠCorrection":35648,"ĠTrouble":35649,"Ġslog":35650,"Ġ253":35651,"Ġemanating":35652,"Ġwidest":35653,"PROV":35654,"Japanese":35655,"urat":35656,"Ġboxed":35657,"ĠCases":35658,"Ġjarring":35659,"Fix":35660,"'?":35661,"ĠStrateg":35662,"Republic":35663,"ovy":35664,"362":35665,"ĠMothers":35666,"Ġstreaks":35667,"Ġlocalized":35668,"ĠONLY":35669,"Ġeh":35670,"ĠObject":35671,"Ġstub":35672,"Fre":35673,"ĠScarlet":35674,"Ġmultip":35675,"ĠMaul":35676,"ĠProblems":35677,"cest":35678,"Ġmortal":35679,"Ġarche":35680,"ulet":35681,"Ġfuller":35682,"ĠGER":35683,"Si":35684,"mr":35685,"ĠPowerful":35686,"boxing":35687,"ĠPeer":35688,"Jean":35689,"ĠTF":35690,"Ġplural":35691,"optim":35692,"Jimmy":35693,"ĠFriendly":35694,"Mex":35695,"Ġdepri":35696,"PK":35697,"Ġwaitress":35698,"eph":35699,"arrass":35700,"ikawa":35701,"feel":35702,"Finally":35703,"fourth":35704,"394":35705,"conom":35706,"VT":35707,"Ġeleg":35708,"ivot":35709,"Ġharsher":35710,"ĠPepe":35711,"ĠImpl":35712,"Ġankles":35713,"idity":35714,"ĠPrepare":35715,"Rather":35716,"Ġconservatism":35717,"Ġunquestion":35718,"ribution":35719,"ĠPatent":35720,"ĠDeluxe":35721,"ĠAE":35722,"007":35723,"Ġprag":35724,"bg":35725,"Ġpalate":35726,"Ġintric":35727,"ossom":35728,"Ġspac":35729,"ĠSpotlight":35730,"Seven":35731,"amacare":35732,"ĠGotham":35733,"Ġencompass":35734,"Ġnicer":35735,"ĠLauder":35736,"Ġscaff":35737,"worn":35738,"442":35739,"Ġpropri":35740,"443":35741,"ĠCompos":35742,"ĠIniti":35743,"inth":35744,"Ġrehe":35745,"Prov":35746,"Ġgri":35747,"ossip":35748,"ĠModest":35749,"quiet":35750,"Ġwealthier":35751,"Ġ241":35752,"icum":35753,"Ġcommunism":35754,"Ġhelpers":35755,"Ġbellig":35756,"Ġ405":35757,"uttered":35758,"Ġbitterness":35759,"nl":35760,"474":35761,"Ġvitality":35762,"blank":35763,"ĠLeth":35764,"PAC":35765,"326":35766,"ĠNapoleon":35767,"Ġ299":35768,"ĠReviews":35769,"ĠSect":35770,"Ġstrongh":35771,"ĠTube":35772,"Ġwoodland":35773,"Ġhumming":35774,"411":35775,"Alpha":35776,"Ġundet":35777,"Ġmounts":35778,"Officials":35779,"igning":35780,"830":35781,"ĠStamp":35782,"ubby":35783,"424":35784,"Ġoutlandish":35785,"Ġjerk":35786,"Ġradiant":35787,"Ġcubes":35788,"Director":35789,"Ġatro":35790,"vous":35791,"Sab":35792,"Ġpretended":35793,"Ġ620":35794,"975":35795,"Sham":35796,"Ġpotassium":35797,"ĠAttention":35798,"gly":35799,"opens":35800,"ĠWorker":35801,"porter":35802,"Ġsplendid":35803,"embed":35804,"Je":35805,"ĠMeal":35806,"Ġsurname":35807,"Usually":35808,"Ġtimer":35809,"Ġweave":35810,"irin":35811,"ĠGenetics":35812,"ensual":35813,"Ġmerry":35814,"Ġapprehend":35815,"utsche":35816,"strate":35817,"Ġsupplementary":35818,"ĠRoundup":35819,"upid":35820,"Ġmiraculous":35821,"ĠHUN":35822,"Ġglaciers":35823,"weed":35824,"ĠSuggest":35825,"XL":35826,"authors":35827,"Ġbarking":35828,"ĠUKIP":35829,"leased":35830,"ĠRAD":35831,"Ġfide":35832,"Ġphen":35833,"Ġscanners":35834,"Parents":35835,"ĠBlaze":35836,"Ġtweaking":35837,"Ġelaborated":35838,"Ġsusp":35839,"iscovered":35840,"Ġthighs":35841,"Ġradicals":35842,"ULTS":35843,"aggressive":35844,"endants":35845,"Hon":35846,"Ġcorrecting":35847,"391":35848,"pps":35849,"ĠTerritories":35850,"Ġconferred":35851,"crazy":35852,"utor":35853,"ĠSurvival":35854,"Ġbrowsers":35855,"ĠConflict":35856,"pn":35857,"Ġdeprive":35858,"riage":35859,"ilan":35860,"à¦":35861,"949":35862,"Congratulations":35863,"radical":35864,"ĠHits":35865,"powerful":35866,"Ġcrypt":35867,"745":35868,"ĠRegistrar":35869,"ophile":35870,"ĠElement":35871,"cooked":35872,"ĠTwilight":35873,"Ġdemos":35874,"IER":35875,"Ġstricken":35876,"Magic":35877,"abby":35878,"ĠSack":35879,"ĠShrine":35880,"Nev":35881,"Probably":35882,"ĠWisdom":35883,"ulpt":35884,"opher":35885,"Ġcolonel":35886,"atl":35887,"Tem":35888,"kun":35889,"ĠIndie":35890,"Putin":35891,"jection":35892,"areth":35893,"ĠBullet":35894,"Ġsmartest":35895,"ĠEsper":35896,"Ġproficiency":35897,"Ġcessation":35898,"Ġmars":35899,"ĠDATA":35900,"sup":35901,"Ġostr":35902,"Jane":35903,"Ġpathogens":35904,"hd":35905,"ĠNK":35906,"Ġhorribly":35907,"regulated":35908,"Ġesteemed":35909,"ĠChinatown":35910,"Ġvibration":35911,"Ġoverboard":35912,"ĠRhod":35913,"Ġfeces":35914,"otation":35915,"Ġcryptic":35916,"Bal":35917,"OPER":35918,"Ġaffirmation":35919,"Ġmenstrual":35920,"Ġuntold":35921,"Ġanecdotes":35922,"ĠHOUSE":35923,"Ġcape":35924,"311":35925,"ittance":35926,"ĠRemy":35927,"ĠWaves":35928,"ĠCOVER":35929,"ordinate":35930,"Ġrestricts":35931,"Samsung":35932,"Ġplantations":35933,"olver":35934,"Better":35935,"ĠExplos":35936,"Ġnasal":35937,"ĠSyri":35938,"ĠPerl":35939,"Ġlatency":35940,"othermal":35941,"Sweet":35942,"ĠRyzen":35943,"ĠYuri":35944,"Ġsmack":35945,"Ġcrow":35946,"aniel":35947,"iological":35948,"Ġmonk":35949,"Ġtutorial":35950,"ĠAure":35951,"Ġcliffs":35952,"ameron":35953,"umers":35954,"ĠMour":35955,"Ġunorthodox":35956,"Ġgulf":35957,"Ġintrusive":35958,"ĠVIII":35959,"ĠFF":35960,"Ġenlarged":35961,"Ġspheres":35962,"ĠCheap":35963,"ĠAmend":35964,"Ġ::":35965,"Ġpacing":35966,"ĠStartup":35967,"ĠDating":35968,"racist":35969,"ĠDivine":35970,"Ġpollen":35971,"ĠMeaning":35972,"ĠLei":35973,"ĠMOT":35974,"ĠARC":35975,"legate":35976,"Ġbrav":35977,"Ross":35978,"redit":35979,"414":35980,"ringe":35981,"perhaps":35982,"SPA":35983,"Southern":35984,"Front":35985,"undrum":35986,"Ġassorted":35987,"ĠDawkins":35988,"ĠWrap":35989,"Ġconsequential":35990,"ĠFuji":35991,"458":35992,"Ġunst":35993,"Bon":35994,"acter":35995,"Trade":35996,"ingers":35997,"ĠClin":35998,"Ġstimul":35999,"arah":36000,"inois":36001,"urdy":36002,"Ġobsessive":36003,"Zone":36004,"Ġprimitive":36005,"unctions":36006,"Ġadapter":36007,"Ġassures":36008,"Daddy":36009,"Ġunsatisf":36010,"441":36011,"Ġ1910":36012,"Ġsecondly":36013,"truth":36014,"RED":36015,"040":36016,"Pope":36017,"venants":36018,"Ġestim":36019,"Ġhemorrh":36020,"Ġexcruciating":36021,"459":36022,"Ġboils":36023,"ieved":36024,"Storm":36025,"Ġmanifestation":36026,"Ġinsulated":36027,"fb":36028,"Ġclassify":36029,"Mbps":36030,"Ġinclination":36031,"Ġaur":36032,"Ġpolarized":36033,"Ġoccupations":36034,"Secretary":36035,"Ġcustomizable":36036,"scribe":36037,"Ġadjunct":36038,"Ġ1922":36039,"rived":36040,"ocative":36041,"Friends":36042,"Oak":36043,"Ġpsyche":36044,"Ġwrinkles":36045,"anthrop":36046,"Ġcoercion":36047,"enos":36048,"Ġvariability":36049,"hma":36050,"phot":36051,"ĠXander":36052,"ĠDiss":36053,"Ġtigers":36054,"ahoo":36055,"focus":36056,"rical":36057,"grow":36058,"Ġseminal":36059,"Ġdisciples":36060,"Cas":36061,"Hundreds":36062,"Ġscissors":36063,"correct":36064,"Ġfascism":36065,"imoto":36066,"Ġnudity":36067,"charg":36068,"Ġrusty":36069,"ĠLyndon":36070,"Ġanomalies":36071,"onial":36072,"ĠiCloud":36073,"Ġannoy":36074,"Ġdistortion":36075,"Lou":36076,"ĠGiul":36077,"eyes":36078,"870":36079,"uum":36080,"ĠUltr":36081,"Action":36082,"cigarette":36083,"igators":36084,"kj":36085,"Ġ323":36086,"uine":36087,"Score":36088,"Ġmans":36089,"Security":36090,"Ġarom":36091,"ĠBoards":36092,"Ġwrists":36093,"602":36094,"Ġastronomy":36095,"Ġresin":36096,"width":36097,")/":36098,"Ġconcurrent":36099,"unless":36100,"606":36101,"ĠMagnet":36102,"Ġauthorizing":36103,"ĠJunk":36104,"atical":36105,"Ġauthent":36106,"zac":36107,"413":36108,"ĠGrape":36109,"Ġcircled":36110,"Ġooz":36111,"Ġvisceral":36112,"ointment":36113,"Ġincendiary":36114,"ĠBourbon":36115,"Ġgimmick":36116,"vette":36117,"Stan":36118,"Ġdetachment":36119,"488":36120,"Ġmisogyny":36121,"Ġenlight":36122,"utic":36123,"Ġinquire":36124,"ĠBEL":36125,"ascular":36126,"ĠWasserman":36127,"Dallas":36128,"Ġconstellation":36129,"Ġdystopian":36130,"504":36131,"ĠOptical":36132,"Ġsilhou":36133,"Girl":36134,"ĠGong":36135,"ĠHighest":36136,"????????":36137,"Sav":36138,"ocity":36139,"leted":36140,"Ġattrition":36141,"ĠExpedition":36142,"ĠKilled":36143,"501":36144,"ONES":36145,"dat":36146,"Ġglyphosate":36147,"Ġplugs":36148,"Ġlact":36149,"Fla":36150,"fps":36151,"riger":36152,"Ġparagraphs":36153,"Ġinnate":36154,"ĠFoo":36155,"aternity":36156,"ĠGry":36157,"Ġoneself":36158,"642":36159,"Iowa":36160,"oodle":36161,"ĠCoconut":36162,"ĠChess":36163,"ommel":36164,"Ġmagnesium":36165,"Ġairliner":36166,"Ġexceedingly":36167,"ĠCreator":36168,"YouTube":36169,"Ġsleeper":36170,"Ġlonging":36171,"ĠPercy":36172,"Ġmatrix":36173,"Ġâľ":36174,"Ġbarren":36175,"Mrs":36176,"Ġinvading":36177,"Ġincom":36178,"Ġemperor":36179,"Ġip":36180,"irie":36181,"Ġpredictably":36182,"ĠBless":36183,"Ġsuperpower":36184,":-":36185,"Ġpropensity":36186,"easy":36187,"educ":36188,"ĠPolly":36189,"Ġcumbersome":36190,"Ġcollide":36191,"016":36192,"Ġtransports":36193,"Ġscraps":36194,"below":36195,"Ġhairs":36196,"mentation":36197,"Ġevolves":36198,"ĠFallen":36199,"Ġunsurprisingly":36200,"Ġcuff":36201,"Ġ249":36202,"mental":36203,"ĠCamel":36204,"Ġ337":36205,"Clinton":36206,"Ġdecad":36207,"ĠSTEP":36208,"ĠTestament":36209,"Ġirresistible":36210,"ĠACE":36211,"Ġhamm":36212,"ĠTerr":36213,"Ġcaul":36214,"iggins":36215,"Ġproficient":36216,"resp":36217,"Ġheirs":36218,"Ġ321":36219,"dress":36220,"ĠClothing":36221,"Ġ560":36222,"Ġ264":36223,"ĠRobb":36224,"Ġfrail":36225,"Ġoptimizing":36226,"615":36227,"ĠRefuge":36228,"rowth":36229,"washing":36230,"Ġgenders":36231,"indu":36232,"ĠNAT":36233,"Ġleans":36234,"Ġeyed":36235,"Ġhilar":36236,"vice":36237,"wolf":36238,"Ġfatig":36239,"ococ":36240,"ĠCarry":36241,"Community":36242,"Clark":36243,"itably":36244,"sv":36245,"448":36246,"Ġnumer":36247,"Ġ1925":36248,"ĠBehavioral":36249,"ĠScream":36250,"Ġgeek":36251,"rake":36252,"ĠTTC":36253,"Ġadditives":36254,"ĠBye":36255,"ylon":36256,"Ġfoliage":36257,"ateral":36258,"rapnel":36259,"Science":36260,"Ġrecollection":36261,"thening":36262,"ĠUbisoft":36263,"ĠLur":36264,"ĠOkinawa":36265,"ĠProvision":36266,"ferred":36267,"ĠGrounds":36268,"Ġhops":36269,"aterial":36270,"Ġacad":36271,"Ġengulf":36272,"ĠApex":36273,"frequency":36274,"relations":36275,"ĠCorvette":36276,"ĠRepeat":36277,"Ġanew":36278,"Ġhes":36279,"ĠLair":36280,"ĠPSP":36281,"foundation":36282,"Band":36283,"ĠPublisher":36284,"Ġreciprocal":36285,"Ġ287":36286,"Ġpir":36287,"Adams":36288,"Ġprostitute":36289,"ĠMecca":36290,"ectomy":36291,"Ġskew":36292,"ĠLol":36293,"Voice":36294,"ĠCalais":36295,"ISION":36296,"rue":36297,"Ġgaping":36298,"prot":36299,"Ġ6000":36300,"Ġtilted":36301,"Ġgoofy":36302,"Stand":36303,"Ġfellows":36304,"Ġcurly":36305,"ĠPOW":36306,"Ġlore":36307,"Ġinhabited":36308,"ĠIdentification":36309,"Metro":36310,"Ġdispel":36311,"Ġinvoking":36312,"Ġdeleting":36313,"Ġstigmat":36314,"ĠDalai":36315,"Ġequate":36316,"Ġmascara":36317,"endered":36318,"ĠNYT":36319,"ĠCommittees":36320,"rians":36321,"ĠOlympus":36322,"ĠQR":36323,"ĠDrinking":36324,"Ġbatt":36325,"andr":36326,"computer":36327,"Senator":36328,"ĠTwist":36329,"ĠNoise":36330,"Ġcheesy":36331,"Ġ1931":36332,"Ġtyranny":36333,"Ġnegligible":36334,"ĠBok":36335,"Ġwebpage":36336,"ĠHEAD":36337,"ĠNovel":36338,"Ġquarry":36339,"Ġexpressive":36340,"Ġforgiving":36341,"Among":36342,"asin":36343,"ĠSuc":36344,"Democrats":36345,"795":36346,"Ġaback":36347,"¨":36348,"ĠNeon":36349,"392":36350,"ĠRNC":36351,"ĠPROC":36352,"sein":36353,"Ros":36354,"Ġemot":36355,"ĠASA":36356,"ĠSeb":36357,"ĠExtended":36358,"atern":36359,"Ġpsychedelic":36360,"Fil":36361,"ĠOrwell":36362,"ĠSOS":36363,"Ġconceive":36364,"Ġhobbies":36365,"Ġspecimens":36366,"ĠTEXT":36367,"sometimes":36368,"Mario":36369,"orpor":36370,"ĠTemporary":36371,"Ġapocalypse":36372,"Ġcounterproductive":36373,"ĠQUEST":36374,"ĠCargo":36375,"Amb":36376,"Ġoptic":36377,"groups":36378,"Ġparanoia":36379,".?":36380,"sounding":36381,"mediately":36382,"System":36383,"ubi":36384,"Ġuttered":36385,"Ġgraphs":36386,"âĢĭâĢĭ":36387,"Ġscientifically":36388,"Ġbluntly":36389,"Ġhopping":36390,"Fun":36391,"ĠSUPER":36392,"Ġrobe":36393,"VB":36394,"ĠQuote":36395,"Ġincarnation":36396,"Ġtreadmill":36397,"Ġ1915":36398,"Ġbart":36399,"669":36400,"Ġhoc":36401,"Ġ309":36402,"Ġimprovis":36403,"Ġhut":36404,"Ġmixer":36405,"ĠCt":36406,"span":36407,"Ġwatered":36408,"Ġpatriot":36409,"Ġdehyd":36410,"laughs":36411,"ĠFancy":36412,"ĠVoc":36413,"Ġintellect":36414,"ĠTid":36415,"Ġnesting":36416,"Tel":36417,"Ġ()":36418,"letter":36419,"ĠSeems":36420,"Ops":36421,"ĠContents":36422,"ript":36423,"hani":36424,"Ġrecru":36425,"Ġpickups":36426,"repair":36427,"Throughout":36428,"bear":36429,"Ġconquered":36430,"656":36431,"Ġmalf":36432,"Ġordained":36433,"755":36434,"ĠReprodu":36435,"brain":36436,"ĠOuts":36437,"ĠWage":36438,"Ru":36439,"________":36440,"ĠLAW":36441,"ĠWass":36442,"Ġcomplication":36443,"Fri":36444,"Ġregener":36445,"Wait":36446,"577":36447,"Ġmisconception":36448,"Ġbombardment":36449,"Ġunloaded":36450,"Ġdictionary":36451,"IU":36452,"025":36453,"etically":36454,"ĠNarr":36455,"repe":36456,"Ġassigning":36457,"Rail":36458,"Ġnotebooks":36459,"Ġingest":36460,"Ġrpm":36461,"Ġalienated":36462,"ĠCredits":36463,"Ġindis":36464,"ĠGathering":36465,"aration":36466,"-+-+-+-+":36467,"Ġori":36468,"Ġsr":36469,"ndra":36470,"Ġlibertarian":36471,"Ġcoerced":36472,"ording":36473,"Ġtranqu":36474,"Ġelbows":36475,"549":36476,"Ġping":36477,"ĠRELE":36478,"ĠYanuk":36479,"Ġmaneuvers":36480,"ĠTrojan":36481,"IFIED":36482,"ĠViolent":36483,"è":36484,"Ġlest":36485,"Ġarrows":36486,"frog":36487,"anty":36488,"WB":36489,"ĠSeen":36490,"648":36491,"Ġclutter":36492,"ĠBender":36493,"Ġpessim":36494,"ĠTeg":36495,"Asian":36496,"IFIC":36497,"Ġexponential":36498,"Ġsponge":36499,"rite":36500,"ĠDAM":36501,"Ġtacit":36502,"ĠZoom":36503,"Ġolds":36504,"Ġonward":36505,"ĠSandwich":36506,"missible":36507,"isol":36508,"940":36509,"Ġinciner":36510,"ĠTrick":36511,"Ġawakening":36512,"Ġdart":36513,"ĠCouch":36514,"respons":36515,"ĠElephant":36516,"ĠPluto":36517,"ĠTags":36518,"itcher":36519,"644":36520,"702":36521,"Ġelectrons":36522,"ĠMyth":36523,"ĠAad":36524,"Danny":36525,"Ġcraw":36526,"ĠCertification":36527,"Ġtending":36528,"Ġpellets":36529,"Ġamused":36530,"ĠAuschwitz":36531,"ĠAppl":36532,"iris":36533,"ashion":36534,"walking":36535,"Ġabnorm":36536,"Cro":36537,"?:":36538,"ĠIcelandic":36539,"ĠAvailability":36540,"Ġcann":36541,"Opt":36542,"buster":36543,"ĠQuartz":36544,"Executive":36545,"tracks":36546,"igel":36547,"MIT":36548,"ĠTracking":36549,"Ġconditioned":36550,"Ġsampled":36551,"ĠGenius":36552,"Ġsubstit":36553,"ĠSiberia":36554,"Ġfrequ":36555,"historic":36556,"okin":36557,"OWS":36558,"1500":36559,"warts":36560,"ĠEtsy":36561,"licks":36562,"ĠSmooth":36563,"unity":36564,"515":36565,"Ġperk":36566,"aida":36567,"forts":36568,"ĠUA":36569,"RIC":36570,"Spain":36571,"ĠWired":36572,"cuts":36573,"Ġfurnace":36574,"ĠTOTAL":36575,"ĠTables":36576,"662":36577,"Fab":36578,"Ġquaint":36579,"ĠWorlds":36580,"ĠCabin":36581,"atche":36582,"List":36583,"ĠVO":36584,"Ġkeyword":36585,"Ġ258":36586,"Farm":36587,"timer":36588,"ĠVolt":36589,"Build":36590,"pressed":36591,"*,":36592,"Ġ324":36593,"aiman":36594,"TING":36595,"Ġsneaking":36596,"cery":36597,"Ġcrib":36598,"ĠIllust":36599,"later":36600,"Ġcompar":36601,"Ġpropulsion":36602,"647":36603,"ĠTrails":36604,"Ġperiphery":36605,"steel":36606,"Ġvividly":36607,"ĠConver":36608,"eatured":36609,"427":36610,"463":36611,"Ġapprox":36612,"spin":36613,"Ġconfigured":36614,"inside":36615,"razy":36616,"account":36617,"anye":36618,"riend":36619,"Ġbows":36620,"809":36621,"ĠDEF":36622,"ĠRez":36623,"Fans":36624,"ĠDF":36625,"Ġstains":36626,"ĠAtom":36627,"ĠConce":36628,"ĠTOM":36629,"ĠELECT":36630,"Ġdisappro":36631,"019":36632,"afia":36633,"ĠTemperature":36634,"Ġextracts":36635,"fab":36636,"Ġunsur":36637,"Ġseasoning":36638,"Ty":36639,"KB":36640,"Ġposit":36641,"Ġlocality":36642,"1200":36643,"cour":36644,"izons":36645,"hh":36646,"506":36647,"ĠDLC":36648,"iago":36649,"Ġcorpses":36650,"iddling":36651,"Mayor":36652,"Ġsimplistic":36653,"Ġlibel":36654,"Ġalmonds":36655,"Ġswast":36656,"Change":36657,"ĠJoker":36658,"MAR":36659,"ĠScully":36660,"Ġmailbox":36661,"VIDEO":36662,"ĠKyoto":36663,"esley":36664,"ĠIncredible":36665,"youtube":36666,"Ġinequalities":36667,"Ġbolts":36668,"Ġbothering":36669,"Ġattentive":36670,"ĠSparrow":36671,"Ġdiaper":36672,"Ġfanbase":36673,"Ġuncont":36674,"Ap":36675,"ĠQi":36676,"Price":36677,"471":36678,"Ġpearl":36679,"wid":36680,"899":36681,"ĠPony":36682,"casting":36683,"Ġinhabit":36684,"Ġunve":36685,"Ġinsur":36686,"ĠWee":36687,"658":36688,"Ġeffected":36689,"gger":36690,"Ġinstallments":36691,"imilar":36692,"FU":36693,"Ġinfertility":36694,"climate":36695,"HEAD":36696,"fashion":36697,"ĠTHEY":36698,"jc":36699,"Ġsatisf":36700,"ĠGuidelines":36701,"Ġinsure":36702,"ĠRSA":36703,"Ġvirt":36704,"Ġinterpre":36705,"Joshua":36706,"ĠShut":36707,"Ġtestimonies":36708,"Ñģ":36709,"untary":36710,"417":36711,"Ġbeck":36712,"ĠMilky":36713,"ç":36714,"Ġsequels":36715,"Ġ281":36716,"ĠRibbon":36717,"Ġroomm":36718,"Ġsynchron":36719,"452":36720,"Ġ1926":36721,"Ġhawk":36722,"ĠDisorder":36723,"Ġbackstory":36724,"ĠNum":36725,"Ġoverheard":36726,"technical":36727,"Jud":36728,"aii":36729,"Ġdecon":36730,"ĠRape":36731,"ĠWarrant":36732,"Ġpoop":36733,"spir":36734,"Country":36735,"Ġweld":36736,"Ġabuser":36737,"Ġ------":36738,"material":36739,"Ġpreserves":36740,"spring":36741,"Ġpuzzled":36742,"ĠDebate":36743,"Joseph":36744,"Ġ272":36745,"Blood":36746,"antry":36747,"Ġconverge":36748,"Ġimaginable":36749,"oward":36750,"545":36751,"Ġfug":36752,"Vision":36753,"075":36754,"Ġadoptive":36755,"Ġunknow":36756,"Stream":36757,"Ġaffili":36758,"ĠPUR":36759,"ĠWally":36760,"Ġgamer":36761,"Ġfart":36762,"stice":36763,"Ġcongen":36764,"н":36765,"685":36766,"orst":36767,"ĠATF":36768,"Ġml":36769,"ĠMozilla":36770,"Ġcalmed":36771,"bage":36772,"ĠVault":36773,"arkable":36774,"ĠGuan":36775,"Ġclueless":36776,"umatic":36777,"Ġshameless":36778,"Ġpreached":36779,"Ġmisconceptions":36780,"Ġanthology":36781,"Ġbiomass":36782,"ĠPs":36783,"tails":36784,"Ġexcessively":36785,"Ġextr":36786,"Davis":36787,"Ġgrounding":36788,"Ġshortcuts":36789,"ĠShift":36790,"ĠRew":36791,"ĠIllum":36792,"Ġincite":36793,"sense":36794,"ĠScouting":36795,"otos":36796,"respond":36797,"Ġbeware":36798,"gran":36799,"ĠXV":36800,"JM":36801,"ĠSounders":36802,"Ġ276":36803,"Ġshockingly":36804,"Ġgastrointestinal":36805,"erences":36806,"df":36807,"ĠNG":36808,"Ġdiscredited":36809,"Ġdemoral":36810,"Ġgladly":36811,"Tal":36812,"ĠPredator":36813,"708":36814,"Ġdoi":36815,"Ġdecentral":36816,"illin":36817,"printed":36818,"Ġinflicting":36819,"ribes":36820,"Ġsupper":36821,"abc":36822,"Ġgraz":36823,"980":36824,"Bull":36825,"Ġmillionaires":36826,"Ġvanity":36827,"imony":36828,"Ġbiologists":36829,"Ġalternating":36830,"Ġsleeps":36831,"Force":36832,"ĠPrinc":36833,"ĠTransgender":36834,"Ġ314":36835,"ĠProvide":36836,"enthal":36837,"Ġplum":36838,"Ġresurrect":36839,"CW":36840,"Ġinjure":36841,"ĠPerspective":36842,"ĠBei":36843,"Ġrestless":36844,"aciously":36845,"Ġchlor":36846,"catch":36847,"ĠLuigi":36848,"Ġinconsistency":36849,"Ġwhiff":36850,"Arizona":36851,"ustration":36852,"ĠRaid":36853,"ĠDemons":36854,"ĠVita":36855,":\"":36856,"Ġmigraine":36857,"ĠHamb":36858,"Ġwidget":36859,"451":36860,"Ġrandomized":36861,"etchup":36862,"ĠParticularly":36863,"Ġdiced":36864,"Ġperfected":36865,"roid":36866,"710":36867,"Ġreflections":36868,"Ġantioxidants":36869,"ĠLabel":36870,"Ġ326":36871,"igious":36872,"ĠEucl":36873,"608":36874,"Ġstrand":36875,"ĠDirt":36876,"ĠLift":36877,"suits":36878,"ĠControls":36879,"RAW":36880,"Ġcowardly":36881,"ĠUmb":36882,"Growing":36883,"mington":36884,"Ġ339":36885,"ĠCommit":36886,"Ġnonviolent":36887,"Ġcontaminants":36888,"Ġacrylic":36889,"ĠMAP":36890,"Ġ269":36891,"Ġdegrading":36892,"Ġmiracles":36893,"ĠEstablishment":36894,"despite":36895,"cry":36896,"Ġpauses":36897,"Ġmythical":36898,"Ġtwenties":36899,"Actually":36900,"phan":36901,"recorded":36902,"Ġunwillingness":36903,"engineering":36904,"avored":36905,"Ġdevout":36906,"item":36907,"Ġbunny":36908,"ĠMerchants":36909,"Ġconsumes":36910,"508":36911,"Ġlex":36912,"ĠClause":36913,"Ġchecklist":36914,"Sus":36915,"uther":36916,".#":36917,"Bit":36918,"uay":36919,"bf":36920,"Ġpopulace":36921,"Ġ316":36922,"Ġcombust":36923,"Ġnano":36924,"Ġpopul":36925,"Indust":36926,"Ġcapitalists":36927,"ĠFiles":36928,"Bang":36929,"Ġkosher":36930,"atile":36931,"Ġincrim":36932,"OVER":36933,"Ġmelee":36934,"ymph":36935,"ĠPupp":36936,"evin":36937,"ĠMolecular":36938,"Ġmisinterpret":36939,"vc":36940,"olithic":36941,"ĠSimpsons":36942,"Ġshrew":36943,"Ġselectively":36944,"ĠDrain":36945,"mittedly":36946,"conservative":36947,"True":36948,"Using":36949,"562":36950,"apon":36951,"Ġapprentice":36952,"Mas":36953,"ĠBattlefield":36954,"Ġfing":36955,"Ġconcoct":36956,"ĠVIS":36957,"ĠHuss":36958,"Ġdetects":36959,"ĠFriedrich":36960,"Ġlatitude":36961,"Custom":36962,"ĠÙ":36963,"ĠBones":36964,"whose":36965,"Ġredirected":36966,"aligned":36967,"ĠNeighbor":36968,"ĠAmen":36969,"ĠMarble":36970,"Beyond":36971,"Ġbiomark":36972,"Ġerroneous":36973,"Atlanta":36974,"Ġmasturb":36975,"ĠAssoci":36976,"Albert":36977,"Ġcigar":36978,"ĠFraz":36979,"ethe":36980,"skinned":36981,"Ford":36982,"throp":36983,"Acc":36984,"Ġtricked":36985,"Ġoverwhelm":36986,"Ġimplements":36987,"ĠGeForce":36988,"Ġbounces":36989,"Ġmoderator":36990,"910":36991,"ĠButterfly":36992,"ĠIllegal":36993,"ĠSubject":36994,"RET":36995,"ĠFreeze":36996,"ĠNewt":36997,"Ġuterus":36998,"696":36999,"Ġ267":37000,"tk":37001,"Ġdodged":37002,"liam":37003,"Ġparasite":37004,"obal":37005,"ĠHubble":37006,"Ġtheology":37007,"âĢĶ\"":37008,"height":37009,"Ale":37010,"employment":37011,"ĠWallet":37012,"cessive":37013,"Ġ404":37014,"Ġsimilarity":37015,"zens":37016,"Ġdumps":37017,"Ġdepress":37018,"Ġlifeless":37019,"535":37020,"oard":37021,"Scotland":37022,"Ġbelievable":37023,"Ġcalculator":37024,"ĠNaked":37025,"Ġremission":37026,"Ġoranges":37027,"ĠSections":37028,"Ġentangled":37029,"Ġuncanny":37030,"Ġteaspoons":37031,"vr":37032,"ĠPorn":37033,"Organ":37034,"Ġbund":37035,"Doug":37036,"ĠGHz":37037,"Major":37038,"abus":37039,"Bell":37040,"avier":37041,"Ġimplanted":37042,"RON":37043,"Fle":37044,"462":37045,"509":37046,"Ġgoggles":37047,"Ġmanuscript":37048,"NOT":37049,"ĠCanaveral":37050,"ĠDID":37051,"Season":37052,"HAEL":37053,"Edge":37054,"appiness":37055,"DIS":37056,"Ġplotted":37057,"Ġwrought":37058,"Ġquarantine":37059,"Ġrearr":37060,"itage":37061,"Ġsocket":37062,"Ġbrig":37063,"Ġunbelievably":37064,"abytes":37065,"TG":37066,"Ġ444":37067,"ĠOffic":37068,"Ġacquaintances":37069,"ĠComparison":37070,"Nine":37071,"ĠFeast":37072,"758":37073,"YC":37074,"Ġfiner":37075,"ĠStrawberry":37076,"Ġeternity":37077,"liament":37078,"urrency":37079,"ĠCortana":37080,"ĠSabbath":37081,"Ġsprinkle":37082,"unker":37083,"ĠUE":37084,"flies":37085,"Ġblender":37086,"Ġacutely":37087,"emark":37088,"ĠAffect":37089,"Politics":37090,"Ġsane":37091,"Ġcorrosion":37092,"Ġspirituality":37093,"Ġredeemed":37094,"Ġingrained":37095,"manager":37096,"joined":37097,"ĠDumb":37098,"ĠHeight":37099,"Ġseventeen":37100,"Ġ640":37101,"Ġreviewer":37102,"Ġwallpaper":37103,"Ġnurs":37104,"Ġsubset":37105,"703":37106,"Ġsymbolism":37107,"Ġdudes":37108,"Ġmismatch":37109,"gans":37110,"please":37111,"ĠKE":37112,"Ġatom":37113,"004":37114,"ionic":37115,"Ġservings":37116,"Ġproxies":37117,"Ġtranscription":37118,"yx":37119,"bowl":37120,"iscovery":37121,"ĠScotch":37122,"brace":37123,"riter":37124,"ĠDesktop":37125,"Ġlimestone":37126,"æ":37127,"Neg":37128,"013":37129,"Ġformulas":37130,"Ġeval":37131,"Ġzombies":37132,"GU":37133,"ĠHermes":37134,"Ġbrist":37135,"Mand":37136,"Ġmastery":37137,"Ġgoverns":37138,"Ġconstrued":37139,"region":37140,"Ġemitted":37141,"Vice":37142,"060":37143,"Jennifer":37144,"mol":37145,"Ġjealousy":37146,"Ġingenuity":37147,"bug":37148,"olitical":37149,"Ġperce":37150,"ĠSapp":37151,"dim":37152,"utral":37153,"Ġinterrogated":37154,"Gate":37155,"Ġamber":37156,"911":37157,"ĠEveryday":37158,"ĠDDR":37159,"ĠBlades":37160,"Ġnifty":37161,"Ġmurderers":37162,"Ġpresumption":37163,"Pitt":37164,"Div":37165,"ĠDestination":37166,"having":37167,"Ġprolifer":37168,"Ġbreaker":37169,"ĠBW":37170,"Ġcourier":37171,"Try":37172,"ĠBUR":37173,"itized":37174,"Ġcompress":37175,"Ġrepetition":37176,"ĠTik":37177,"Ġdivergence":37178,"Ġcube":37179,"everyone":37180,"ĠPoles":37181,"418":37182,"ĠHighly":37183,"468":37184,"Jeremy":37185,"Ġcontradictions":37186,"Ġmanure":37187,"Sad":37188,"pletion":37189,"626":37190,"Ġ279":37191,"Ġfrivolous":37192,"ĠCanaan":37193,"olor":37194,"Ġincapac":37195,"ĠGentle":37196,"Ġinsomnia":37197,"ĠJing":37198,"688":37199,"ĠViews":37200,"Ġsyll":37201,"486":37202,"antom":37203,"Ġcog":37204,"aintain":37205,"ĠDVDs":37206,"Ġ318":37207,"archy":37208,"Ġreprodu":37209,"Ġconcedes":37210,"Brook":37211,"Ġinterpreting":37212,"Ġextracting":37213,"Ġess":37214,"uning":37215,"ĠMathematics":37216,"iably":37217,"Ġmultit":37218,"ĠActs":37219,"iliated":37220,"Foreign":37221,"Ġflaming":37222,"ĠCoup":37223,"Ġglitches":37224,"Ġdifferentiation":37225,"ihadi":37226,"ĠDrone":37227,"Ġincompatible":37228,"asher":37229,"documented":37230,"agons":37231,"wark":37232,"Ġshielding":37233,"ĠCorrect":37234,"romising":37235,"uned":37236,"Ġconduit":37237,"ĠDiablo":37238,"Ġbeginner":37239,"Ġarchived":37240,"smanship":37241,"ĠTBD":37242,"digy":37243,"Ġ322":37244,"Ġ268":37245,"ĠTears":37246,"ĠPriority":37247,"Italy":37248,"Ġ^":37249,"annot":37250,"different":37251,"Joy":37252,"Ġbreathed":37253,"heon":37254,"Ġracists":37255,"Ġvascular":37256,"Between":37257,"etition":37258,"ĠLikely":37259,"icans":37260,"529":37261,"ĠMonsters":37262,"agy":37263,"Orange":37264,"hide":37265,"SIM":37266,"Ġdeceive":37267,"ĠDAR":37268,"Ġshattering":37269,"Ġow":37270,"peak":37271,"Ġpreferable":37272,"Ġpiping":37273,"ĠLEDs":37274,"ĠCOMMUN":37275,"ĠConstruct":37276,"008":37277,"Ġdissatisfied":37278,"ĠKNOW":37279,"ĠFrame":37280,"ĠToast":37281,"Ġadore":37282,"history":37283,"Soviet":37284,"reporting":37285,"Ġ266":37286,"pract":37287,"ĠSauce":37288,"686":37289,"ievers":37290,"ĠDomain":37291,"ousand":37292,"768":37293,"Cos":37294,"609":37295,"432":37296,"Ġtransl":37297,"oof":37298,"Ġ292":37299,"Turkish":37300,"ĠPOLIT":37301,"Harris":37302,"bj":37303,"Ġrodents":37304,"556":37305,"Ġintellectuals":37306,"Ġinteroper":37307,"ixt":37308,"Ġunbiased":37309,"itia":37310,"Ġ504":37311,"Ġbuttocks":37312,"ĠFlam":37313,"Ġchrom":37314,"Ġ259":37315,"shock":37316,"ĠRJ":37317,"ĠLich":37318,"422":37319,"Ġcondom":37320,"phen":37321,"Ġvigilante":37322,"Ġowl":37323,"Ġdwellings":37324,"Ġarchaeologists":37325,"Ġ680":37326,"RAY":37327,"Ġ1921":37328,"Ġ625":37329,"ĠPLAN":37330,"alde":37331,"030":37332,"abbling":37333,"Wave":37334,"Ni":37335,"Ġfurthe":37336,"JS":37337,"Ġpsycho":37338,"ĠFrançois":37339,"Ġundergrad":37340,"Ġsuccessors":37341,"Ġpadded":37342,"introdu":37343,"Ġreasoned":37344,"Ġvas":37345,"creen":37346,"onsequ":37347,"starter":37348,"Court":37349,"ĠHIS":37350,"Ġplaster":37351,"Ġranger":37352,"Ġ298":37353,"esters":37354,"Ġglare":37355,"ype":37356,"Ġcompute":37357,"Ali":37358,"mallow":37359,"Ġmasculine":37360,"ĠExamination":37361,"improve":37362,"Ġdeclass":37363,"Ġdecoration":37364,"ĠFIG":37365,"abre":37366,"Ġstale":37367,"abling":37368,"ĠRusty":37369,"ĠASAP":37370,"Ġadjusts":37371,"Ġbluff":37372,"density":37373,"Ġdisse":37374,"Ġcensor":37375,"ervatives":37376,"Ġkettle":37377,"Ġskeptics":37378,"fd":37379,"Imm":37380,"461":37381,"Ġadvantageous":37382,"419":37383,"ĠPresents":37384,"482":37385,"ĠRewards":37386,"Ġovershadow":37387,"Alabama":37388,"ĠCPC":37389,"Ġsock":37390,"ĠChurches":37391,"hidden":37392,"Ġcringe":37393,"ĠHOR":37394,"PB":37395,"Pretty":37396,"Hong":37397,"?),":37398,"687":37399,"Ġgrocer":37400,"472":37401,"565":37402,"itent":37403,"Ġpartake":37404,"wait":37405,"usters":37406,"Ġcones":37407,"Ġconcurrently":37408,"Ġlevers":37409,"Ġaroma":37410,"ĠDrill":37411,"498":37412,"804":37413,"ithering":37414,"Ġ355":37415,"Ġlegion":37416,"Ġvitri":37417,"Ġcondu":37418,"Angel":37419,"OWER":37420,"Ġ{*":37421,"Simon":37422,"Ġsynthesis":37423,"ĠContainer":37424,"sheet":37425,"Bi":37426,"ĠRaspberry":37427,"Ġ328":37428,"anders":37429,"ĠBlossom":37430,"ĠFINAL":37431,"acid":37432,"Ġborderline":37433,"Aut":37434,"Ġoriginate":37435,"Ġtransm":37436,"Ġbuffalo":37437,"atial":37438,"ĠCraigslist":37439,"Ġcredential":37440,"Ġdisbanded":37441,"Ġunprotected":37442,"ĠZer":37443,"waukee":37444,"diagn":37445,"1999":37446,"doc":37447,"ellig":37448,"Ġwarheads":37449,"ĠADS":37450,"verified":37451,"ĠHAM":37452,"785":37453,"Cu":37454,"Ġenorm":37455,"ĠSkill":37456,"\\":37457,"Ġbashing":37458,"Ġloudspe":37459,"during":37460,"Ġdebunked":37461,"adequ":37462,"Ġuh":37463,"Feed":37464,"ificial":37465,"pred":37466,"ĠPassing":37467,"Kyle":37468,"enance":37469,"ĠMex":37470,"itect":37471,"Ġcavern":37472,"Ġtrop":37473,"ĠEliot":37474,"753":37475,"Ġencountering":37476,"Ġsulf":37477,"Always":37478,"ĠGest":37479,"Ġadditive":37480,"Ġ278":37481,"Ġloops":37482,"liberal":37483,"urion":37484,"ĠRefresh":37485,"ĠDynasty":37486,"Ġsweaty":37487,"Ġsails":37488,"protection":37489,"ĠRooms":37490,"ĠEXT":37491,"few":37492,"ĠPaid":37493,"Ġ377":37494,"Ġcolonialism":37495,"Ġchuckle":37496,"Ġarmour":37497,"Ġsoftly":37498,"661":37499,"Building":37500,"ĠAMER":37501,"Ġbabe":37502,"Ġshif":37503,"Sem":37504,"Ġdisembark":37505,"ĠSubstance":37506,"Stone":37507,"Ġdialect":37508,"ĠAph":37509,"Ġspreadsheet":37510,"ierra":37511,"Ġlineage":37512,"ĠCust":37513,"ĠBabe":37514,"Ġwra":37515,"ĠMafia":37516,"Ġflakes":37517,"ĠEVER":37518,"cong":37519,"ĠCreation":37520,"loo":37521,"ĠAmpl":37522,"ĠSpectre":37523,"012":37524,"geons":37525,"Ġswarm":37526,"ĠPale":37527,"ĠSeek":37528,"itures":37529,"Ġarri":37530,"Ġredistribution":37531,"campaign":37532,"ĠAbility":37533,"579":37534,"ournament":37535,"locks":37536,"Ġnests":37537,"ĠConstantine":37538,"Ġwhisper":37539,"Ġshrouded":37540,"changed":37541,"ĠEnhanced":37542,"Ġ920":37543,"Ġglob":37544,"Tam":37545,"Ġoutwe":37546,"Ġilliter":37547,"Ġsurg":37548,"Nap":37549,"ĠAerial":37550,"iferation":37551,"Egypt":37552,"ERO":37553,"Ġantip":37554,"environment":37555,"machine":37556,"Ġrupture":37557,"treatment":37558,"internal":37559,"Ġinfiltrate":37560,"Ġgratification":37561,"Uber":37562,"Ġunequal":37563,"Ġflav":37564,"Lord":37565,"tein":37566,"ĠLOT":37567,"Ġbullshit":37568,"Ġoriginals":37569,"Ġminced":37570,"Ġmultiply":37571,"ayson":37572,"Ġrecomm":37573,"Ġreceptors":37574,"Ġflashlight":37575,"Ġinhuman":37576,"Future":37577,"Ġpuzzling":37578,"Ġrouters":37579,"Ġuncontroll":37580,"responsible":37581,"Ġcellul":37582,"ĠTablet":37583,"Ġbolted":37584,"Ġpermissible":37585,"adra":37586,"picture":37587,"ODY":37588,"BRE":37589,"Iraq":37590,"Total":37591,"rising":37592,"Ġ273":37593,"nv":37594,"Ġ327":37595,"alysed":37596,"infect":37597,"Ġ1912":37598,"ĠVT":37599,"ĠLazarus":37600,"ictive":37601,"Bu":37602,"ĠNEVER":37603,"ĠCODE":37604,"ĠModified":37605,"fetched":37606,"ĠTrap":37607,"mob":37608,"Ġupkeep":37609,"WARD":37610,"Ġbrewed":37611,"Ġsaliva":37612,"Ġ1923":37613,"Ġsteroid":37614,"rather":37615,"ĠVER":37616,"Ġcontextual":37617,"Ont":37618,"ĠLSD":37619,"agine":37620,"Ġaudible":37621,"ĠMeta":37622,"erek":37623,"aults":37624,"ĠOttoman":37625,"ĠIncludes":37626,"Ġocc":37627,"678":37628,"ipple":37629,"Ġcontrasted":37630,"014":37631,"ĠLenin":37632,"Ġomega":37633,"885":37634,"civil":37635,"Ġoverload":37636,"},\"":37637,"Ġprogrammers":37638,"Ġgeometry":37639,"?).":37640,"shift":37641,"ĠClancy":37642,"nr":37643,"verb":37644,"Ġ760":37645,"Ġstaggered":37646,"Playing":37647,"ĠSmile":37648,"Ġcomplains":37649,"ĠSloven":37650,"Ġdisobedience":37651,"creator":37652,"Ġly":37653,"incoln":37654,"emp":37655,"Ġcrate":37656,"ĠPledge":37657,"ĠGPUs":37658,"protected":37659,"Vo":37660,"medium":37661,"Ġacet":37662,"603":37663,"478":37664,"469":37665,"Further":37666,"Ġsensed":37667,"Lock":37668,"Ġcrabs":37669,"ĠChains":37670,"ĠNEO":37671,"Ġexperimented":37672,"ĠRhythm":37673,"802":37674,"Ġhormonal":37675,"491":37676,"ĠMedian":37677,"Ġevaluates":37678,"ippi":37679,"Ġremovable":37680,"Ġvector":37681,"ilant":37682,"TERN":37683,"Ġpurch":37684,"ĠBind":37685,"athering":37686,"Ġcords":37687,"Lib":37688,"Ġdamned":37689,"orc":37690,"ĠEverywhere":37691,"Ġgorilla":37692,"ystem":37693,"fail":37694,"Ġecstasy":37695,"allion":37696,"Sea":37697,"Ġuploading":37698,"ĠSpecific":37699,"Ġreinforcement":37700,"cerned":37701,"ĠDollars":37702,"Twenty":37703,"OX":37704,"ADD":37705,"Ġbraces":37706,"Ġraven":37707,"Ġ1890":37708,"Ġcirculate":37709,"udden":37710,"Disney":37711,"ĠNope":37712,"ĠBagg":37713,"ĠBuddha":37714,"rael":37715,"urus":37716,"ĠKarma":37717,"Ġcurl":37718,"Ġflips":37719,"Ġbearer":37720,"Ġmisunderstand":37721,"Ġabras":37722,"ĠAssassin":37723,"Fact":37724,"Ġinterf":37725,"Ġvantage":37726,"ĠGenocide":37727,"Ġdeducted":37728,"Sep":37729,"McC":37730,"Jessica":37731,"ĠBackup":37732,"Ian":37733,"urnal":37734,"Ġlaborers":37735,"438":37736,"ĠContinuous":37737,"ĠNBN":37738,"Cool":37739,"mitting":37740,"ĠNormandy":37741,"Ġpurchaser":37742,"Ġacquainted":37743,"Ġblogging":37744,"route":37745,"marine":37746,"Ġstartled":37747,"6000":37748,"ĠRadical":37749,"kiss":37750,"ĠBlitz":37751,"express":37752,"Ġ601":37753,"hent":37754,"Ġtink":37755,"pires":37756,"launch":37757,"sg":37758,"ĠEffects":37759,"Ġstiffness":37760,"ĠAllies":37761,"Ġthirsty":37762,"Ġmyst":37763,"Ġlogger":37764,"Ġstances":37765,"ĠEvaluation":37766,"090":37767,"Ġproclaiming":37768,"Ġhypocritical":37769,"496":37770,"Ġcaus":37771,"ĠKappa":37772,"ĠLann":37773,"ĠScientist":37774,"Ġempath":37775,"etrical":37776,"lege":37777,"Hom":37778,"Aud":37779,"ĠColors":37780,"ĠStraw":37781,"each":37782,"ĠPatron":37783,"Ġnuance":37784,"send":37785,"ourney":37786,"ĠPhen":37787,"Ġamino":37788,"ĠSeconds":37789,"Sn":37790,"ĠCiv":37791,"Ġconglomer":37792,"Ġ411":37793,"versely":37794,"487":37795,"prises":37796,"Ġ277":37797,"necessary":37798,"Ġdope":37799,"Late":37800,"Ġrake":37801,"ĠBrigham":37802,"ogun":37803,"ĠSTATES":37804,"ĠGaal":37805,"Ġintellig":37806,"Ġglacier":37807,"destruct":37808,"ĠZucker":37809,"484":37810,"Ġ332":37811,"ĠArist":37812,"Ġprotagonists":37813,"Ġgraveyard":37814,"names":37815,"ĠPax":37816,"Ġthresholds":37817,"Seeing":37818,"Ġmunitions":37819,"Ġcontradicts":37820,"684":37821,"Ġ529":37822,"ĠConcent":37823,"ĠBlessed":37824,"Hz":37825,"Ġinhibit":37826,"Ġshenanigans":37827,"ĠSpear":37828,"Ġoverlay":37829,"ritis":37830,"ilus":37831,"Ġvariance":37832,"Ġoverpower":37833,"viol":37834,"erning":37835,"Ġpolarization":37836,"aito":37837,"GV":37838,"493":37839,"Keeping":37840,"Ġpaternity":37841,"ĠHappiness":37842,"oops":37843,"sb":37844,"xit":37845,"ophysical":37846,"Ġconclusive":37847,"Arch":37848,"Ġmiser":37849,"Ġsuffice":37850,"ĠStout":37851,"Ġhrs":37852,"643":37853,"Ġprincipled":37854,"azine":37855,"atorium":37856,"ĠFairy":37857,"Ġinfiltrated":37858,"ĠHier":37859,"ĠMIA":37860,"inders":37861,"Ġrebutt":37862,"Ġxx":37863,"Ġfeats":37864,"izzle":37865,"Ġ780":37866,"668":37867,"Ġrepressive":37868,"ĠYugoslavia":37869,"sole":37870,"704":37871,"ĠRPG":37872,"ĠTroll":37873,"packing":37874,"ĠDatabase":37875,"ĠVelvet":37876,"ĠRELEASE":37877,"ablish":37878,"smoking":37879,"ĠBottle":37880,"ĠFully":37881,"ĠLean":37882,"Ġobjectively":37883,"ĠFounding":37884,"ĠClassics":37885,"Ġmosaic":37886,"473":37887,"Ġrooft":37888,"Ġcentrally":37889,"Ġdismissive":37890,"Ġparasites":37891,"009":37892,"Ġcursed":37893,"Ġvex":37894,"Ġeconom":37895,"ĠBore":37896,"enery":37897,"ĠFundamental":37898,"ĠOmni":37899,"489":37900,"714":37901,"Ġforegoing":37902,"Ġfragment":37903,"oros":37904,"070":37905,"ĠFaust":37906,"Ġsucking":37907,"Ġnode":37908,"Ġrighteous":37909,"ĠPowered":37910,"426":37911,"HQ":37912,"Ġchronically":37913,"ĠBAL":37914,"Ġprest":37915,"Ġrapists":37916,"ĠRelationship":37917,"ĠCHR":37918,"Ġlinen":37919,"Ġnumerical":37920,"oters":37921,"Ġiterations":37922,"ttes":37923,"ĠENTER":37924,"Ġrabbi":37925,"Ġhoard":37926,"Ġmerciless":37927,"Ġrobes":37928,"ĠSpray":37929,"Ġadvers":37930,"ilantro":37931,"483":37932,"Ġfungus":37933,"Ġalcoholism":37934,"anasia":37935,"ĠCruiser":37936,"Ġmorals":37937,"cision":37938,"measures":37939,"Ġsabot":37940,"Ġrecol":37941,"ĠSaur":37942,"ĠError":37943,"Ġmysteriously":37944,"sle":37945,"Ġfeminists":37946,"д":37947,"ackle":37948,"ĠMarxist":37949,"Ġselves":37950,"Ġdoorway":37951,"Ġdiscard":37952,"Ġbandits":37953,"ĠDive":37954,"ameless":37955,"TRY":37956,"Ġgull":37957,"Ġrepublican":37958,"sr":37959,"ĠDynamo":37960,"Ġembryo":37961,"MENTS":37962,"ĠLOW":37963,"Ġ319":37964,"Ġgly":37965,"Ġcowork":37966,"Coll":37967,"Ġcris":37968,"ĠBanana":37969,"reality":37970,"Ġmobilization":37971,"unal":37972,"Updated":37973,"Crew":37974,"ĠGideon":37975,"Ġvines":37976,"Ġknitting":37977,"Ġdag":37978,"ĠSurv":37979,"Ġvacc":37980,"Ġimpulses":37981,"Northern":37982,"Ġnanop":37983,"allows":37984,"UTH":37985,"Ġflashbacks":37986,"alsa":37987,"Ġ282":37988,"Ġtransmissions":37989,"ĠAlmighty":37990,"Office":37991,"ĠBride":37992,"ĠBeasts":37993,"othy":37994,"ĠClouds":37995,"ĠDyn":37996,"ĠJolly":37997,"District":37998,"Ġveget":37999,"Ġantit":38000,"ĠSmoking":38001,"hess":38002,"Ġcompose":38003,"Ġreligiously":38004,"ĠHY":38005,"Ġfluorescent":38006,"rame":38007,"ĠMeier":38008,"ĠSQ":38009,"benefit":38010,"Thirty":38011,"559":38012,"ĠCance":38013,"586":38014,"Ġgrouped":38015,"Ġphys":38016,"Ġrebellious":38017,"ĠBASE":38018,"chid":38019,"582":38020,"ĠLessons":38021,"ĠWonderful":38022,"ODE":38023,"uctions":38024,"Ġbarbaric":38025,"rahim":38026,"635":38027,"Ġcloves":38028,"ĠNIH":38029,"ossession":38030,"Employ":38031,"Ġliberate":38032,"Gro":38033,"Ġmagician":38034,"ountain":38035,"FORM":38036,"533":38037,"Ġunpredict":38038,"rity":38039,"Ġfaked":38040,"plets":38041,"ppelin":38042,"Living":38043,"Ġnearer":38044,"Ġsuperiors":38045,"Ur":38046,"Ġheroism":38047,"Ġbearded":38048,"006":38049,"Cole":38050,"1970":38051,"Ġsill":38052,"ĠReduce":38053,"OLOG":38054,"onel":38055,"Billy":38056,"ĠPainter":38057,"ansas":38058,"Ġintermediary":38059,"trump":38060,"ĠMith":38061,"otom":38062,"434":38063,"Ġterrit":38064,"Wa":38065,"Ġsuprem":38066,"Rh":38067,"liction":38068,"ĠDEAD":38069,"Ġbothers":38070,"503":38071,"Ġfrogs":38072,"Ġsprinkled":38073,"Ġnil":38074,"628":38075,"Private":38076,"ĠKGB":38077,"Ġoverriding":38078,"Ġdeceived":38079,"698":38080,"idium":38081,"Ġseeker":38082,"Final":38083,"Ġsubconscious":38084,"Ġwom":38085,"Ġcass":38086,"Ġchicks":38087,"Ġverifying":38088,"ective":38089,"inia":38090,"ĠDetection":38091,"MH":38092,"fortable":38093,"ĠISPs":38094,"Ġcrumble":38095,"ĠRecap":38096,"598":38097,"ummies":38098,"export":38099,"Irish":38100,"Ġlil":38101,"ĠRapt":38102,"ĠRIGHT":38103,"Ġanecdotal":38104,"Ġpiercing":38105,"deck":38106,"Liber":38107,"Books":38108,"Ġassassin":38109,"Tur":38110,"revolution":38111,"ĠSheep":38112,"ĠPublishers":38113,"EMS":38114,"iosis":38115,"finder":38116,"ĠCuriosity":38117,"ARB":38118,"ĠConvers":38119,"IVES":38120,"clave":38121,"ĠChaos":38122,"ĠMim":38123,"ĠCostume":38124,"Ġtwe":38125,"Ġintim":38126,"757":38127,"berto":38128,"Ġ261":38129,"VPN":38130,"cribed":38131,"ĠVerb":38132,"cb":38133,"Ġaxle":38134,"Ġsandwic":38135,"Ice":38136,"ĠThermal":38137,"654":38138,"709":38139,"ĠPact":38140,"ĠEnsure":38141,"izable":38142,"497":38143,"Ġbloodstream":38144,"Aw":38145,"Ġleakage":38146,"Ġalleg":38147,"ĠMelody":38148,"681":38149,"Austin":38150,"428":38151,"Ġsummarized":38152,"ĠDefendants":38153,"ĠVader":38154,"Ê":38155,"Ġ1880":38156,"Ġassemb":38157,"YOU":38158,"GREEN":38159,"jury":38160,"4000":38161,"Ġvenerable":38162,"Ġcomputational":38163,"Ġperpetuate":38164,"Ġtorpedo":38165,"Ġaborted":38166,"Ġrhetorical":38167,"ĠOvert":38168,"Ġacknowledgment":38169,"essment":38170,"ĠIGN":38171,"ĠSheen":38172,"571":38173,"Ġcontag":38174,"Ġcultiv":38175,"Ġspawn":38176,"mess":38177,"Dur":38178,"Ġvortex":38179,"ixties":38180,"ĠBlow":38181,"Sum":38182,"Åį":38183,"Rom":38184,"ĠRadeon":38185,"Fed":38186,"Ġameric":38187,"ĠAnth":38188,"Ġantic":38189,"Ġfortress":38190,"Cold":38191,"ĠPredict":38192,"Fake":38193,"Ġilluminate":38194,"Find":38195,"Ġintellectually":38196,"Ġgon":38197,"alker":38198,"Ġinvoice":38199,"IELD":38200,"Ġfools":38201,"ĠEnding":38202,"-(":38203,"Ġalk":38204,"ĠControlled":38205,"Ġpurposefully":38206,"ĠChronic":38207,"Ġrele":38208,"ĠOps":38209,"Party":38210,"ethnic":38211,"ĠSpecifications":38212,"ffee":38213,"ĠTeach":38214,"ulas":38215,"Ġenslaved":38216,"onomy":38217,"Ġtenets":38218,"Ġammonia":38219,"Ġ1913":38220,"Ġdripping":38221,"612":38222,"659":38223,"ĠSagan":38224,"Ġinaccur":38225,"Ġabol":38226,"ĠLIKE":38227,"Ġvisualization":38228,"learn":38229,"anon":38230,"cipline":38231,"Ġadaptations":38232,"Ġwaiter":38233,"nergy":38234,"507":38235,"ĠDK":38236,"YD":38237,"Ġpedest":38238,"Sense":38239,"ĠObst":38240,"Ġresurrection":38241,"ĠSPECIAL":38242,"Unlike":38243,"Ġlia":38244,"Ġpersuasive":38245,"iatrics":38246,"ONEY":38247,"esthetic":38248,"494":38249,"zik":38250,"Ġfract":38251,"ĠOutput":38252,"ĠBers":38253,"rozen":38254,"ĠRevis":38255,"Ġdraconian":38256,"Words":38257,"asions":38258,"ĠClintons":38259,"CU":38260,"History":38261,"Ġtwilight":38262,"iform":38263,"Ġdispl":38264,"progress":38265,"ĠIO":38266,"Ġcannibal":38267,"Michelle":38268,"Ġnerv":38269,"Ġcontexts":38270,"ĠHorses":38271,"Ġanatomy":38272,"ĠLegislation":38273,"ĠBloody":38274,"Ġunwittingly":38275,"Ġinquired":38276,"ĠZip":38277,"ĠDesigns":38278,"Ġirritating":38279,"Ġunison":38280,"ĠRG":38281,"aviour":38282,"Ġpseudo":38283,"ĠVenom":38284,"Ġobscured":38285,"Ġner":38286,"uked":38287,"ORGE":38288,"Ġmomentarily":38289,"olyn":38290,"Syrian":38291,"Ġmicroscopic":38292,"Ġmistress":38293,"Less":38294,"Ġawoke":38295,"Ġtutor":38296,"esome":38297,"ollar":38298,"egg":38299,"UTE":38300,"Buzz":38301,"Ġattainment":38302,"Ġdiscriminating":38303,"::":38304,"Ġ525":38305,"azard":38306,"ĠBrist":38307,"oras":38308,"Ġveterin":38309,"jing":38310,"idon":38311,"ĠAustral":38312,"arious":38313,"ĠGrav":38314,"anol":38315,"ĠQuran":38316,"Ġbleach":38317,"588":38318,"ĠOsw":38319,"Ġdiffered":38320,"typ":38321,"ĠSIL":38322,"failed":38323,"436":38324,"Ġpalms":38325,"ĠFail":38326,"idespread":38327,"Ġchap":38328,"ĠIMAGES":38329,"ACP":38330,"matched":38331,"Ġjaws":38332,"MHz":38333,"Nik":38334,"ĠHume":38335,"OSH":38336,"Ġpresume":38337,"secut":38338,"ĠDied":38339,"ĠBreat":38340,"gins":38341,"prison":38342,"ĠUR":38343,"ĠROS":38344,"isitions":38345,"Ġpelvic":38346,"exclusive":38347,"522":38348,"689":38349,"FN":38350,"Ġener":38351,"Ġdispers":38352,"Ġcohorts":38353,"shut":38354,"ĠLoad":38355,"needs":38356,"azaki":38357,"inoa":38358,"Inside":38359,"usra":38360,"ighters":38361,"Ġ271":38362,"Ġsubordinate":38363,"ĠHOL":38364,"ĠGlow":38365,"Ġincred":38366,"ĠMadame":38367,"Ġoats":38368,"Ġdeviation":38369,"ĠApproach":38370,"Ġnarc":38371,"bart":38372,"bole":38373,"ĠSHE":38374,"effects":38375,"ĠADA":38376,"Ġmuse":38377,"Squ":38378,"Ġneuroscience":38379,"ĠValues":38380,"engu":38381,"Ġdosage":38382,"Ġwhispers":38383,"Ġnaughty":38384,"ĠFarming":38385,"Recently":38386,"Ġrelapse":38387,"rentice":38388,"UGH":38389,"Ġdarkened":38390,"appings":38391,"ĠSlaughter":38392,"ĠAnim":38393,"Ġovertly":38394,"poses":38395,"Ġdeficient":38396,"Ġnecks":38397,"Iron":38398,"Ġphysiological":38399,"ĠLiang":38400,"Ġlear":38401,"Ġcelestial":38402,"Ġpistols":38403,"Ġeyebrow":38404,"915":38405,"ratch":38406,"cephal":38407,"ĠPSU":38408,"Ġphotograp":38409,"ĠGaul":38410,"Ġuncontrolled":38411,"ĠJoined":38412,"652":38413,"itory":38414,"Ġ274":38415,"GAN":38416,"imester":38417,"essional":38418,"Ø©":38419,"Ġuncons":38420,"THER":38421,"Ġpaternal":38422,"Zero":38423,"ugen":38424,"538":38425,"Ġende":38426,"Ġ505":38427,"movie":38428,"Lind":38429,"Ġscorn":38430,"ulty":38431,"Ġpesky":38432,"Ġ8000":38433,"677":38434,"Ġhomophobia":38435,"ranch":38436,"Ġnarciss":38437,"ĠVoyager":38438,"ĠHELP":38439,"528":38440,"edly":38441,"Ġdetract":38442,"Hope":38443,"787":38444,"ĠMerlin":38445,"Ġgrids":38446,"KI":38447,"Mu":38448,"ĠSelected":38449,"select":38450,"ĠModer":38451,"ĠFeet":38452,"Ġrename":38453,"intensity":38454,"Wilson":38455,"Ġ414":38456,"leave":38457,"Ready":38458,"intuitive":38459,"Ġmeager":38460,"Franc":38461,"DH":38462,"Ġrhy":38463,"ĠPillar":38464,"ĠDOE":38465,"minist":38466,"ĠGrave":38467,"isible":38468,"Ess":38469,"Ġempt":38470,"Ġpatched":38471,"ĠAbortion":38472,"rals":38473,"Ġdow":38474,"Ġcrawled":38475,"igrate":38476,"Virginia":38477,"Ġconting":38478,"Ġorphans":38479,"ĠCrimean":38480,"Ġdyn":38481,"Ġshadowy":38482,"sound":38483,"ailable":38484,"Ġ293":38485,"vm":38486,"Ġaccompanies":38487,"Meanwhile":38488,"JR":38489,"ĠDirections":38490,"Ġadolescence":38491,"Ġpenetrated":38492,"bars":38493,"Rev":38494,"Ta":38495,"ĠSkywalker":38496,"ĠFires":38497,"concept":38498,"ĠSIG":38499,"554":38500,"currently":38501,"Ġ----------------":38502,"ĠWHITE":38503,"767":38504,"rors":38505,"PDF":38506,"Ġcasing":38507,"673":38508,"Ġdisapprove":38509,"1800":38510,"ĠWeed":38511,"Ġinhib":38512,"Ġmorbid":38513,"433":38514,"Ġawfully":38515,"Ts":38516,"Maria":38517,"Ġillusions":38518,"Ġtotalitarian":38519,"ollo":38520,"Ġsuppl":38521,"Ġsarc":38522,"ĠRGB":38523,"Ġlauncher":38524,"Ġbadass":38525,"ĠSyd":38526,"Ġscrape":38527,"ĠCLA":38528,"Ġcircum":38529,"657":38530,"Ġnucleus":38531,"ĠUkip":38532,"Ġmodem":38533,"ĠJou":38534,"adders":38535,"Ġwiser":38536,"thereal":38537,"Ġdemocr":38538,"ĠInvalid":38539,"Mine":38540,"Ġmanifested":38541,"meat":38542,"MORE":38543,"Larry":38544,"acements":38545,"Ġspecimen":38546,"results":38547,"Ġswallowing":38548,"Ġpigeon":38549,"tons":38550,"ĠLose":38551,"Ġquartz":38552,"Ġintraven":38553,"Ġ412":38554,"alyst":38555,"Ġengraved":38556,"client":38557,"ĠADV":38558,"ĠShared":38559,"Ġrites":38560,"Ġhysterical":38561,"ĠHUM":38562,"Cow":38563,"orously":38564,"Ġpleasures":38565,"democratic":38566,"Ġamph":38567,"Ġnib":38568,"rieg":38569,"Ġcalculates":38570,"Ġfrying":38571,"favorite":38572,"Ġantim":38573,"ĠDoom":38574,"monitor":38575,"Want":38576,"Ġtemplates":38577,"558":38578,"iever":38579,"Photos":38580,",,":38581,"ĠSync":38582,"Ġconfronts":38583,"kept":38584,"dt":38585,"ĠERROR":38586,"ETF":38587,"578":38588,"Ġspor":38589,"718":38590,"ivation":38591,"ĠHaskell":38592,"Ca":38593,"Ġdick":38594,"Ġcivilized":38595,"Ġblah":38596,"enough":38597,"Ġoccup":38598,"Ġ334":38599,"antically":38600,"584":38601,"ĠDolphin":38602,"ĠStarts":38603,"Ġfanatic":38604,"ت":38605,"imag":38606,"Ġmicrobial":38607,"freedom":38608,"cult":38609,"wra":38610,"Ġ423":38611,"RIPT":38612,"601":38613,"BTC":38614,"atmeal":38615,"653":38616,"agogue":38617,"Ġderives":38618,"Wolf":38619,"466":38620,"Susan":38621,"ĠPassage":38622,"ARDS":38623,"Guy":38624,"Council":38625,"Ġerotic":38626,"pure":38627,"ĠMemories":38628,"ĠWikileaks":38629,"elines":38630,"Ġanth":38631,"Capital":38632,"807":38633,"ĠEggs":38634,"cv":38635,"ctors":38636,"Ġshatter":38637,"Ġesteem":38638,"vity":38639,"ĠVulcan":38640,"effic":38641,"ĠBELOW":38642,"Ġplatoon":38643,"Commun":38644,"oustic":38645,"Amy":38646,"Freedom":38647,"ppo":38648,"Ja":38649,"ĠConan":38650,"Ġinsepar":38651,"scene":38652,"Ġurinary":38653,"gain":38654,"Hillary":38655,"ĠTAM":38656,"Hist":38657,"Ġmechan":38658,"ĠRobots":38659,"Leader":38660,"Ġcartridges":38661,"Ġwhistleblowers":38662,"ĠSPL":38663,"Labour":38664,"unction":38665,"Ġfaithfully":38666,"Ġcoarse":38667,"Ġsynth":38668,"ĠLV":38669,"Ġjustifying":38670,"439":38671,"Victoria":38672,"ĠProceedings":38673,"alogy":38674,"Ġmorph":38675,"Ġcove":38676,"Ġlaughable":38677,"ECA":38678,"Ġ670":38679,"aturated":38680,"ĠSouls":38681,"ĠSleeping":38682,"Ly":38683,"ĠRetro":38684,"Ġastroph":38685,"Ġseism":38686,"atherine":38687,"ĠHercules":38688,"Ġfuse":38689,"ĠHL":38690,"Ġunintentionally":38691,"ĠRé":38692,"iery":38693,"Ġconco":38694,"Ġeras":38695,"recent":38696,"Ġlaunchers":38697,"ĠVolcano":38698,"ĠJace":38699,"Ġterminating":38700,"ĠIde":38701,"zee":38702,"asonic":38703,"itone":38704,"Ġnutshell":38705,"Ġbip":38706,"dies":38707,"Ġ286":38708,"Ġnood":38709,"ĠFathers":38710,"alys":38711,"Ġtheor":38712,"???":38713,"548":38714,"674":38715,"efined":38716,"806":38717,"âĻ":38718,"697":38719,"Ġdecap":38720,"ĠFN":38721,"Ġbureaucr":38722,"ĠGoat":38723,"ĠShang":38724,"Ġsemin":38725,"Ġthroats":38726,"Ġmoth":38727,"herer":38728,"Democratic":38729,"ixtures":38730,"impl":38731,"ĠLogo":38732,"ortunate":38733,"Ġclumsy":38734,"Ġinnocuous":38735,"ĠBlend":38736,"abulary":38737,"ĠFaces":38738,"Ġpornographic":38739,"px":38740,"Information":38741,"Ġfluoride":38742,"Ġatroc":38743,"Ġdelta":38744,"whatever":38745,"ossier":38746,"ĠNoir":38747,"ĠYao":38748,"551":38749,"undred":38750,"Ġmillennium":38751,"Ġferal":38752,"Ġconvinc":38753,"cano":38754,"imsy":38755,"angles":38756,"Ġsterile":38757,"ĠMenu":38758,"779":38759,"ĠCrack":38760,"Ġabundantly":38761,"ĠmL":38762,"Ġinfiltration":38763,"ĠDefinition":38764,"733":38765,"oubt":38766,"Ġorbital":38767,"Ġpiss":38768,"Ġbeet":38769,"679":38770,"Ġcounteract":38771,"ĠALE":38772,"ulative":38773,"crew":38774,"Ġliberating":38775,"ĠDull":38776,"Speaking":38777,"Sadly":38778,"Ġmisfortune":38779,"Ġdolphin":38780,"557":38781,"Ġbould":38782,"ĠTorah":38783,"ĠConfederacy":38784,"421":38785,"Ġorbits":38786,"ocused":38787,"beer":38788,"Rand":38789,"ĠORIG":38790,"Ġmuc":38791,"LER":38792,"ĠMisty":38793,"Ġinexpl":38794,"Ġreptiles":38795,"Ġaven":38796,"blocking":38797,"ĠPASS":38798,"Ġarisen":38799,"ĠMock":38800,"Ġops":38801,"Ġshin":38802,"524":38803,"Ġdigestion":38804,"Soft":38805,"irect":38806,"POL":38807,"ĠSpell":38808,"Level":38809,"Ġhex":38810,"Ġbitcoins":38811,"ĠHungry":38812,"VL":38813,"ĠRealm":38814,"RELATED":38815,"Delta":38816,"Pri":38817,"Ġrejoice":38818,"ĠLatter":38819,"LG":38820,"Ġstupidity":38821,"Ġdonkey":38822,"nova":38823,"Vill":38824,"Ġdecomp":38825,"Ġexternally":38826,"Ġsequest":38827,"815":38828,"Ġshortcut":38829,"riminal":38830,"Hun":38831,"EH":38832,"Ġregiment":38833,"Case":38834,"definition":38835,"Ġappendix":38836,"ĠPlayed":38837,"associated":38838,"izens":38839,"ĠVag":38840,"Ġflung":38841,"Ġfru":38842,"Ġcoil":38843,"________________________":38844,"Ġselects":38845,"Ġsolves":38846,"aea":38847,"985":38848,"Tomorrow":38849,"Ġsear":38850,"APE":38851,"492":38852,"Ġenlightened":38853,"Ġnonexistent":38854,"ĠPotato":38855,"Ghost":38856,"Ġrichness":38857,"ĠKarin":38858,"Ġfamilial":38859,"ĠJA":38860,"Regardless":38861,"Ġepis":38862,"GD":38863,"Ġinsanely":38864,"ĠPhill":38865,"Block":38866,"Finding":38867,"omal":38868,"Ġdecipher":38869,"ĠSwap":38870,"derived":38871,"ĠOFFIC":38872,"Support":38873,"Ġnylon":38874,"Ġexaggeration":38875,"Ġevangelicals":38876,"Ġbearings":38877,"587":38878,"Ġlocale":38879,"Ġpowerfully":38880,"Ġappropriated":38881,"itates":38882,"irlfriend":38883,"cule":38884,"ĠSomewhere":38885,"747":38886,"ĠInteresting":38887,"464":38888,"Ġelong":38889,"Ġdegrade":38890,"rafted":38891,"Ġtutorials":38892,"905":38893,"ĠIntervention":38894,"Ġuniqueness":38895,"Ġ284":38896,"Ġexplorers":38897,"Ġnucle":38898,"ĠMillenn":38899,"511":38900,"ĠReneg":38901,"Ġexecut":38902,"urai":38903,"leon":38904,"Ġdeserts":38905,"ĠCig":38906,"Ġsuggestive":38907,"instead":38908,"Ġlousy":38909,"Ġenigmatic":38910,"594":38911,"Know":38912,"rollment":38913,"ipher":38914,"Ġhumanities":38915,"Ġmodifying":38916,".....":38917,"Ġdegraded":38918,"Ġsuppressing":38919,"Ġeman":38920,"abouts":38921,"functional":38922,"ĠOU":38923,"ĠRelax":38924,"786":38925,"esses":38926,"ĠLogin":38927,"spec":38928,"ĠWWF":38929,"Ġ364":38930,"ĠIsis":38931,"Wisconsin":38932,"Ġequival":38933,"ĠCollector":38934,"ibilities":38935,"malink":38936,"acea":38937,"Ġchained":38938,"Ġarist":38939,"Ġdisadvantages":38940,"ĠBrus":38941,"limits":38942,"ĠDmit":38943,"544":38944,"ĠRecipe":38945,"Ġhabitual":38946,".):":38947,"ĠPRODUCT":38948,"772":38949,"Ġrept":38950,"Ġpathology":38951,"Ġresurrected":38952,"uders":38953,"Ġlingu":38954,"Ġdenomination":38955,"Ġfirewall":38956,"scient":38957,"Ġvaliant":38958,"Kansas":38959,"516":38960,"Ġcontemporaries":38961,"Roman":38962,"Ġaccompan":38963,"Ġantennas":38964,"ĠXan":38965,"Ġelectromagnetic":38966,"ĠNek":38967,"alien":38968,"indle":38969,"Ġgraphene":38970,"Ġgraceful":38971,"syn":38972,"ĠBosh":38973,"Ġ1908":38974,"Ġsuccumb":38975,"Technology":38976,"Ġtoxin":38977,"myra":38978,"essert":38979,"Hell":38980,"Gil":38981,"Ġdiarr":38982,"imeters":38983,"Ġexplo":38984,"Ġgeometric":38985,"ĠNavigation":38986,"cern":38987,"Ġprogrammer":38988,"oÄŁan":38989,"Ġdodging":38990,"ĠLU":38991,"573":38992,"inters":38993,"Ġserum":38994,"Ġuber":38995,"Ġmanga":38996,"762":38997,"ĠOccasionally":38998,"437":38999,"ĠTheme":39000,"Ġimmature":39001,"Ġactivating":39002,"ĠTruly":39003,"د":39004,"osion":39005,"Age":39006,"TIME":39007,"Silver":39008,"sand":39009,"ulnerable":39010,"Ġcram":39011,"Large":39012,"ĠAnger":39013,"icators":39014,"431":39015,"ĠHonest":39016,"zip":39017,"Ġdism":39018,"Ġfades":39019,"ĠPik":39020,"Ast":39021,"sequent":39022,"Ġunsigned":39023,"xious":39024,"creation":39025,"Ġ395":39026,"ottenham":39027,"Ġundesirable":39028,"ugal":39029,"ĠDivide":39030,"lp":39031,"563":39032,"ĠPOP":39033,"ĠCET":39034,"session":39035,"Ġoccurrences":39036,"chu":39037,"ĠACS":39038,"ĠProsecut":39039,"Ġhypnot":39040,"rely":39041,"ERG":39042,"Ven":39043,"Republicans":39044,"inez":39045,"ĠImplementation":39046,"Ġsprang":39047,"Ġobs":39048,"Defense":39049,"Ġunexpl":39050,"ĠPAGE":39051,"ĠTent":39052,"ĠNeurolog":39053,"Ġintuition":39054,"759":39055,"Ġterrestrial":39056,"Ġmorphine":39057,"Ġ.\"":39058,"ĠHydra":39059,"651":39060,"Ġneoliberal":39061,"683":39062,"Ġabnormalities":39063,"quant":39064,"Ġmonastery":39065,"jac":39066,"ĠReaction":39067,"Ġcontraceptive":39068,"ĠBalls":39069,"Ġapost":39070,"676":39071,"ĠHELL":39072,"approximately":39073,"Ġvibrations":39074,"COR":39075,"ĠCPUs":39076,"Ġcontin":39077,"Ġsemblance":39078,"Ġshorth":39079,"tip":39080,"ĠChips":39081,"makes":39082,"Ġprett":39083,"Ġconspicuous":39084,"ĠAmp":39085,"Ġvisualize":39086,"Hu":39087,"sorry":39088,"nai":39089,"ĠArcade":39090,"rimination":39091,"obin":39092,"Ġvampire":39093,"773":39094,"ĠCaucasus":39095,"Medic":39096,"ĠGitHub":39097,"ĠWicked":39098,"ĠFet":39099,"Krist":39100,"998":39101,"Ġfrontal":39102,"Ġ283":39103,"ndum":39104,"Ġidols":39105,"ĠMSG":39106,"ĠShuttle":39107,"ĠTowards":39108,"Ġsaturation":39109,"Ġ®":39110,"Ġcradle":39111,"eteen":39112,"Ġprejudices":39113,"separ":39114,"ĠSoda":39115,"ynam":39116,"Ġnause":39117,"Ġpenetrating":39118,"ĠVampire":39119,"Ġmole":39120,"Ġgoogle":39121,"earance":39122,"583":39123,"Ġdomin":39124,"727":39125,"Kind":39126,"Ġcust":39127,"manuel":39128,"ĠAstro":39129,"Roger":39130,"JO":39131,"killed":39132,"ĠDisapp":39133,"833":39134,"ĠEQU":39135,"Ġprecedence":39136,"mberg":39137,"641":39138,"ĠRoller":39139,"Ġspecifying":39140,"035":39141,"phil":39142,"Ġpowdered":39143,"Ġblot":39144,"Ġdeline":39145,"Bruce":39146,"536":39147,"Ġpim":39148,"leasing":39149,"vacc":39150,"RN":39151,"Ġspacing":39152,"Ġhangar":39153,"ĠPlot":39154,"537":39155,"legraph":39156,"596":39157,"Ġpolyg":39158,"doi":39159,"ĠNerd":39160,"installed":39161,"ĠSeeds":39162,"ĠPlays":39163,"ĠRomance":39164,"layer":39165,"Ġunsu":39166,"Ġcurric":39167,"Mi":39168,"restrial":39169,"ĠNiño":39170,"ĠProper":39171,"Ġpores":39172,"Giving":39173,"aeus":39174,"Middle":39175,"liber":39176,"Ġcombatants":39177,"ĠBulk":39178,"Ġ502":39179,"Ġstru":39180,"ĠLonely":39181,"Companies":39182,"inence":39183,"Autom":39184,"Ġfearsome":39185,"Ġsummar":39186,"Ġrotated":39187,"ĠPLA":39188,"ĠFAT":39189,"572":39190,"ĠSkies":39191,"iour":39192,"Ġintimately":39193,"amera":39194,"Ġ475":39195,"623":39196,"Ġirrig":39197,"Ġboosters":39198,"Ġtransmitting":39199,"DOWN":39200,"ĠAble":39201,"Ġfuriously":39202,"spirit":39203,"Ġgrun":39204,"Ġbible":39205,"ĠAdmir":39206,"Ġ§":39207,"ĠRaise":39208,"Ġflowering":39209,"uxe":39210,"ravis":39211,"urther":39212,"ĠScientology":39213,"pathy":39214,"Ġruth":39215,"Ġtempor":39216,"Ġwhispered":39217,"ogly":39218,"coord":39219,"chlor":39220,"processing":39221,"iott":39222,"ĠTY":39223,"wik":39224,"abolic":39225,"ĠUnable":39226,"ĠLiterary":39227,"ĠpH":39228,"Eastern":39229,"Craig":39230,"Fear":39231,"Ġinventions":39232,"ĠNost":39233,"Ġafflicted":39234,"ĠSwamp":39235,"INST":39236,"Jerry":39237,"Ġprope":39238,"ĠLancet":39239,"Ġrefres":39240,"ĠPrinciples":39241,"ĠLys":39242,"ERAL":39243,"addock":39244,"Ġcynicism":39245,"Ġmassacres":39246,"roo":39247,"Ġcollagen":39248,"Johnny":39249,"Keith":39250,"Italian":39251,"553":39252,"Dad":39253,"Neither":39254,"cler":39255,"ilers":39256,"Ġassass":39257,"Travel":39258,"672":39259,"Ġeaves":39260,"ATOR":39261,"Ġoily":39262,"581":39263,"ateful":39264,"728":39265,"Ġchiefly":39266,"tical":39267,"enes":39268,"ĠWouldn":39269,"ĠJacket":39270,"ĠSuit":39271,"Ġindustrialized":39272,"ĠNose":39273,"ĠSECTION":39274,"Ġredd":39275,"Ġcavity":39276,"Ġconn":39277,"Shield":39278,"Ġtongues":39279,"Ġsuccinct":39280,"views":39281,"ĠMUST":39282,"oliath":39283,"Ġlimitless":39284,"Ġapocalyptic":39285,"ĠAtlantis":39286,"DNA":39287,"ilded":39288,"ĠDresden":39289,"nit":39290,"Ġsubdiv":39291,"gressive":39292,"701":39293,"hops":39294,"alist":39295,"Ġunintentional":39296,"Ġpsychic":39297,"Ġcontrovers":39298,"Ġforeground":39299,"Ġnaïve":39300,"Ġfolders":39301,"icist":39302,"Ġdrawbacks":39303,"ĠToxic":39304,"ophy":39305,"ĠMasonic":39306,"Ġcis":39307,"olated":39308,"Ġdepletion":39309,"Rap":39310,"692":39311,"Ġinver":39312,"ĠFAQ":39313,"Ġmeanings":39314,"Ġbisc":39315,"ĠRage":39316,"Ġresear":39317,"Ep":39318,"Ġunbeat":39319,"ĠComponents":39320,"bub":39321,"ĠInterface":39322,"Isa":39323,"ĠArgon":39324,"Ġdenomin":39325,"Ġmammal":39326,"519":39327,"Ġsizing":39328,"imbabwe":39329,"ĠReplacement":39330,"Georgia":39331,"ĠParticipation":39332,"Ġmelts":39333,"Ġfemin":39334,"514":39335,"Ġseams":39336,"513":39337,"ĠGaw":39338,"Ġbrood":39339,"Mit":39340,"Ġannoyance":39341,"Ġequilibrium":39342,"Ġpatri":39343,"Ġ338":39344,"561":39345,"mentioned":39346,"ĠVotes":39347,"Ġintoler":39348,"Ġstrikingly":39349,"Ġ352":39350,"Ġskeletal":39351,"616":39352,"isition":39353,"Ġfluor":39354,"provided":39355,"517":39356,"Ġclimates":39357,"Ġsensibilities":39358,"ĠFrequ":39359,"onite":39360,"Kenn":39361,"Ġmagnets":39362,"assis":39363,"Ġprerequisite":39364,"Ġ>>>":39365,"Ġscree":39366,"google":39367,"ĠMirage":39368,"Ġevict":39369,"Peace":39370,"Ġmissionaries":39371,"617":39372,"748":39373,"rient":39374,"ĠSTATS":39375,"Bird":39376,"ĠShiva":39377,"ĠBlessing":39378,"Ġredundancy":39379,"Ġphotoc":39380,"ĠOnes":39381,"754":39382,"alert":39383,"urous":39384,"Ġfolklore":39385,"ĠIdeal":39386,"sheets":39387,"according":39388,"Hor":39389,"Cle":39390,"ĠEdit":39391,"671":39392,"olitics":39393,"ĠESC":39394,"Ġparaly":39395,"Ġorgasm":39396,"speak":39397,"ð":39398,"Ġsneaky":39399,"Ġswords":39400,"Ġfandom":39401,"776":39402,"ĠScandinav":39403,"Ġdarts":39404,"546":39405,"cerpt":39406,"ĠGifts":39407,"Ġmagically":39408,"phys":39409,"Laughs":39410,"ĠSour":39411,"ources":39412,"789":39413,"ĠEps":39414,"ository":39415,"uality":39416,"literally":39417,"Ġheavens":39418,"FUL":39419,"Ġie":39420,"ĠISP":39421,"Ġwink":39422,"Ġweeping":39423,"Ġdocking":39424,"ACY":39425,"iece":39426,"Ġsignifies":39427,"guns":39428,"Sac":39429,"Leave":39430,"imation":39431,"Ġunex":39432,"uctive":39433,"ĠFees":39434,"ĠPortable":39435,"ĠInvestigator":39436,"pill":39437,"rehensible":39438,"Ġpotency":39439,"803":39440,"Ġembodiment":39441,"overty":39442,"shine":39443,"REL":39444,"ĠMPH":39445,"ĠPatriarch":39446,"Ġaspirin":39447,"Ġrinse":39448,"Ġinher":39449,"ograms":39450,"ĠTHREE":39451,"qt":39452,"ipples":39453,"Ġdehuman":39454,"Ġslander":39455,"Ġflora":39456,"brow":39457,"Ġblindly":39458,"ectar":39459,"endish":39460,"Ġpigment":39461,"cellent":39462,"Ġyells":39463,"ĠLust":39464,"ĠAttacks":39465,"ĠSyndicate":39466,"otin":39467,"gress":39468,"reenshot":39469,"picking":39470,"Ġacupuncture":39471,"images":39472,"glas":39473,"ĠPolicies":39474,"Ġintestinal":39475,"1998":39476,"ULE":39477,"runs":39478,"ĠNing":39479,"ĠAsuka":39480,"ĠSkull":39481,"Motor":39482,"Ġdefund":39483,"Ġattaching":39484,"ĠBAD":39485,"Ġquarrel":39486,"Child":39487,"Dog":39488,"issan":39489,"irmation":39490,"Ġinline":39491,"ĠLover":39492,"Ġcyan":39493,"entary":39494,"awareness":39495,"Ġtraveller":39496,"âĢIJ":39497,"Ġbeasts":39498,"Ġboobs":39499,"ĠDeadly":39500,"Ġplutonium":39501,"ĠIntellectual":39502,"Jam":39503,"Ġconsec":39504,"663":39505,"ĠVegan":39506,"Ġ331":39507,"uron":39508,"ĠHEL":39509,"reements":39510,"Ġclone":39511,"Ġoutputs":39512,"oult":39513,"ĠDOM":39514,"ĠNX":39515,"Ze":39516,"909":39517,"brate":39518,"arations":39519,"ĠJindal":39520,"Ġbooklet":39521,"amide":39522,"Ġscraping":39523,"Sol":39524,"Date":39525,"796":39526,"Ġfulf":39527,"Ġskeletons":39528,"Ġsaints":39529,"ĠCurious":39530,"Han":39531,"Ġrepud":39532,"osity":39533,"ĠGravity":39534,"Ġmetadata":39535,"Focus":39536,"Ġthrott":39537,"ĠProgramming":39538,"Break":39539,"erver":39540,"Ġknight":39541,"yrs":39542,"Ġ376":39543,"sat":39544,"auto":39545,"Ġbroom":39546,"Ġnerd":39547,"Political":39548,"022":39549,"-------------":39550,"oulos":39551,"Ġrelic":39552,"Ġenactment":39553,"rious":39554,"ĠUniform":39555,"Teen":39556,"Colorado":39557,"055":39558,"Ġangled":39559,"bolt":39560,"ĠNeander":39561,"ĠDism":39562,"thanks":39563,"Polit":39564,"ersion":39565,"dro":39566,"install":39567,"Jake":39568,"hz":39569,"Ġ770":39570,"ĠCommodore":39571,"lahoma":39572,"Ġshri":39573,"Ġ....":39574,"Ġ7000":39575,"scope":39576,"Ġgenesis":39577,"Ġresided":39578,"ĠRivals":39579,"Ġsarcastic":39580,"Ġelicit":39581,"Ġmultiplied":39582,"uitous":39583,"Ġoppress":39584,"ĠPROT":39585,"Ġperpetually":39586,"ĠAdds":39587,"Ġbuffers":39588,"Ġmush":39589,"Ġ354":39590,"Ġpresc":39591,"ĠKung":39592,"682":39593,"Education":39594,"Ġpled":39595,"bsp":39596,"Ġconfessions":39597,"Ġrevocation":39598,"Micro":39599,"ĠHobby":39600,"ĠFatal":39601,"STAR":39602,"Ġworkspace":39603,"Ġtransformations":39604,"Ġportals":39605,"orned":39606,"figured":39607,"Ġlinguistic":39608,"pperc":39609,"ergus":39610,"Fel":39611,"ĠIntent":39612,"Ġ289":39613,"Ġdelinquent":39614,"Ġhandwriting":39615,"Ġvap":39616,"576":39617,"redited":39618,"736":39619,"Ġpsychiatry":39620,"GMT":39621,"Ġdisingen":39622,"Ġcrou":39623,"801":39624,"Ġmalice":39625,"itutes":39626,"ĠTiff":39627,"Ġstink":39628,"574":39629,"Story":39630,"Modern":39631,"ĠGly":39632,"Jamie":39633,"Ġadvertis":39634,"Ġhiber":39635,"Ġinfiltr":39636,"Ġelector":39637,"rovers":39638,"ĠFist":39639,"peed":39640,"ĠClassical":39641,"592":39642,"Ġconscientious":39643,"Surv":39644,"Text":39645,"ĠDrunk":39646,"Ġsupplemented":39647,"THIS":39648,"Ġtimid":39649,"Ġstacking":39650,"rites":39651,"Ġrebirth":39652,"Ġbalcon":39653,"Ġyawn":39654,"rosc":39655,"axy":39656,"Hart":39657,"ĠOPER":39658,"996":39659,"Ġrabid":39660,"ĠTick":39661,"Ġgrinning":39662,"elfth":39663,"045":39664,"Ġjustifies":39665,"ĠPirate":39666,"ĠSalary":39667,"Ġmirac":39668,"613":39669,"inately":39670,"ĠLIN":39671,"Ġinadequ":39672,"NPR":39673,"iddled":39674,"storage":39675,"Ġseventy":39676,"onet":39677,"Ġgastro":39678,"FIR":39679,"Ġrodent":39680,"629":39681,"ĠInclude":39682,"ĠCategories":39683,"ĠLiterally":39684,"Ġpree":39685,"aunder":39686,"ĠLOL":39687,"694":39688,"Ġindef":39689,"Ped":39690,"Ġmenstru":39691,"Ġcensored":39692,"Ġconfigure":39693,"Ġoverest":39694,"igenous":39695,"Ġrectangular":39696,"ĠMIS":39697,"ĠMub":39698,"Ġwitches":39699,"izards":39700,"Ġobnoxious":39701,"ĠLoll":39702,"ĠSEM":39703,"Ġspiritually":39704,"Ġcoer":39705,"Ġmodesty":39706,"butt":39707,"Ġedits":39708,"ĠShall":39709,"sburgh":39710,"Ġ1911":39711,"Rex":39712,"manent":39713,"ĠLithuan":39714,"Ġpointers":39715,"ativity":39716,"retch":39717,"Ġcascade":39718,"ĠRagnarok":39719,"ĠPainting":39720,"ĠATL":39721,"Born":39722,"Ġpadding":39723,"whel":39724,"Ġgrotesque":39725,"Ġtheorists":39726,"forcer":39727,"ĠJinn":39728,"Ġrenal":39729,"jamin":39730,"ĠFEC":39731,".\"\"":39732,"redict":39733,"Ġoppos":39734,"opted":39735,"Sel":39736,"ipment":39737,"752":39738,"792":39739,"Pur":39740,"Ġvolt":39741,"Ġflap":39742,"ĠCASE":39743,"Ġdyed":39744,"orers":39745,"becca":39746,",.":39747,"ifice":39748,"ubes":39749,"Ġyr":39750,"DW":39751,"Ġalteration":39752,"ĠSimpl":39753,"Ġunequiv":39754,"756":39755,"Dou":39756,"Ġplunder":39757,"Ġcommons":39758,"Ġstag":39759,"ĠZeal":39760,"avanaugh":39761,"Self":39762,"none":39763,"EGIN":39764,"Ġflashback":39765,"VAL":39766,"Gab":39767,"ĠCapture":39768,"ĠBrilliant":39769,"ĠDisk":39770,"ĠMood":39771,"Ġhaun":39772,"Ġrotting":39773,"ĠCobra":39774,"Ġpsychopath":39775,"Ġhelper":39776,"Starting":39777,"ĠOrbit":39778,"Ġcaf":39779,"Half":39780,"Volume":39781,"aptop":39782,"ĠSaga":39783,"azor":39784,"593":39785,"774":39786,"ĠCaucasian":39787,"compan":39788,"ĠVERY":39789,"GES":39790,"Ġvomit":39791,"Ġdispro":39792,"ĠMechanics":39793,"Ġ385":39794,"Ġmystical":39795,"AFTA":39796,"Ġbacter":39797,"availability":39798,"Ġhairc":39799,"ĠVec":39800,"rypt":39801,"Ġmanipulative":39802,"shell":39803,"ĠWeird":39804,"jab":39805,"ĠByr":39806,"Bow":39807,"uin":39808,"Ġquot":39809,"MX":39810,"Ġ960":39811,"ĠSharia":39812,"ĠWeapon":39813,"ĠPowerPoint":39814,"Ġstitching":39815,"Ġconstraint":39816,"âľ":39817,"ulic":39818,"597":39819,"omedical":39820,"ĠSupplemental":39821,"ĠSurve":39822,"ĠSubcommittee":39823,"ĠDarkness":39824,"Ġpython":39825,"LU":39826,"Ġ402":39827,"ĠQuan":39828,"ĠModerate":39829,"clusively":39830,"Ġextrap":39831,"Ġlatt":39832,"ĠSTUD":39833,"oslav":39834,"Ġsymb":39835,"battle":39836,"flash":39837,"ĠDeploy":39838,"Ġmicrobiome":39839,"Ġingested":39840,"Ġdistort":39841,"Ġassimil":39842,"Ġmobs":39843,"illet":39844,"Gre":39845,"Ġ294":39846,"Ġforbids":39847,"ĠEfficiency":39848,"ĠClan":39849,"763":39850,"Ġdragons":39851,"States":39852,"ĠMAKE":39853,"ĠBOOK":39854,"ĠRuns":39855,"ĠUX":39856,"EED":39857,"Whoever":39858,"ionics":39859,"worldly":39860,"ĠMermaid":39861,"Ġbenz":39862,"Info":39863,"523":39864,"Ġbiod":39865,"ĠPoison":39866,"ceivable":39867,"Services":39868,"ATIVE":39869,"ĠItem":39870,"Ġdisav":39871,"Ġheter":39872,"Ġasteroids":39873,"ĠWooden":39874,"Ġelectroly":39875,"assadors":39876,"nance":39877,"reflect":39878,"Ġattent":39879,"iphany":39880,"Ġspaceship":39881,"Ġbegg":39882,"algia":39883,"Ax":39884,"Ġidiosyncr":39885,"Ġinserting":39886,"ĠCSS":39887,"ĠLET":39888,"ĠStrikes":39889,"ossibly":39890,"Exp":39891,"Opp":39892,"dden":39893,"Ġplayable":39894,"ĠJM":39895,"Ġlawfully":39896,"ĠBlink":39897,"Ġ413":39898,"Ġoverpowered":39899,"Ġcommenter":39900,"Track":39901,"Ġmethyl":39902,"Ġfermented":39903,"Ġinvaders":39904,"ĠMoves":39905,"Ġcommunicates":39906,"rint":39907,"ĠTray":39908,"jug":39909,"Ġsuperf":39910,"ochet":39911,"ĠJelly":39912,"Ġestrogen":39913,"Dom":39914,"mix":39915,"Gun":39916,"ochemistry":39917,"952":39918,"Ġovere":39919,"ĠPlaintiff":39920,"ĠPilgrim":39921,"ĠSERVICES":39922,"ĠExpend":39923,"ĠFRE":39924,"Ġsmelling":39925,"ĠSpaces":39926,"bris":39927,"Mission":39928,"Ġarter":39929,"Ġautonom":39930,"Lisa":39931,"ĠPercent":39932,"NK":39933,"ĠLimits":39934,"Ġ356":39935,"Recent":39936,"ĠSiberian":39937,"etermin":39938,"nets":39939,"ĠSword":39940,"essee":39941,"Ùĩ":39942,"icycle":39943,"Ġparas":39944,"Ġrud":39945,"Ġscrib":39946,"Ġ1860":39947,"Shop":39948,"orld":39949,"Ġpept":39950,"ENSE":39951,"Ġanimations":39952,"ership":39953,"Search":39954,"ĠUSSR":39955,"washed":39956,"Ġpromulg":39957,"Ġdetainee":39958,"Ġunderest":39959,"ĠAppropri":39960,"Left":39961,"Update":39962,"Wallet":39963,"idently":39964,"ĠBicycle":39965,"Ġgorge":39966,"abyte":39967,"ĠMinecraft":39968,"rike":39969,"997":39970,"Tesla":39971,"Often":39972,"ĠTHESE":39973,"Ġregression":39974,"Hen":39975,"Ġsnippets":39976,"irds":39977,"Ġprinces":39978,"Ġwastes":39979,"ĠWond":39980,"itimate":39981,"ĠMongol":39982,"ĠkW":39983,"Ġidiots":39984,"Ġforeigner":39985,"Upon":39986,"Ġbackdoor":39987,"umph":39988,"ĠSquirrel":39989,"Ġtyped":39990,"Ġblockers":39991,"Vote":39992,"ĠPossibly":39993,"geist":39994,"ĠTRANS":39995,"Ġtitan":39996,"VG":39997,"Ġmicrobi":39998,"Ġinteracts":39999,"Ġmasc":40000,"Ġfinite":40001,"Ġcutoff":40002,"ornings":40003,"Ġprototyp":40004,"Ġcompan":40005,"mology":40006,"ĠBOX":40007,"Cre":40008,"Bot":40009,"grading":40010,"PET":40011,"Ġinsidious":40012,"ĠFranch":40013,"orians":40014,"ĠAUT":40015,"ĠCrush":40016,"589":40017,"question":40018,"anguard":40019,"Ġabsurdity":40020,"?\",":40021,"Hum":40022,"Ġliberalism":40023,"Ġpostwar":40024,"Gener":40025,"Personally":40026,"889":40027,"Bul":40028,"Ġlighthouse":40029,"Ġ291":40030,"VK":40031,"ĠExposure":40032,"Ġsubtract":40033,"ometime":40034,"arbon":40035,"ĠThieves":40036,"anus":40037,"ĠLibertarian":40038,"Raw":40039,"Ġsolvent":40040,"Ġcorros":40041,"Ġsignific":40042,"Ġscholarly":40043,"024":40044,"Ġfetish":40045,"Ġlarvae":40046,"Ġcatast":40047,"Ġtraitor":40048,"ijing":40049,"Demand":40050,"math":40051,"Ġconceivable":40052,"either":40053,"acl":40054,"ĠArrows":40055,"627":40056,"ĠFrankenstein":40057,"entious":40058,"Ġimitation":40059,"amn":40060,"ĠSTOP":40061,"Ġcripp":40062,"zag":40063,"ĠZed":40064,"797":40065,"Along":40066,"Ġwont":40067,"Ġfolds":40068,"Shar":40069,"ĠCommentary":40070,"ĠLibraries":40071,"ĠThunderbolt":40072,"itud":40073,"Toy":40074,"Ġincidentally":40075,"ĠResp":40076,"Ġordinarily":40077,"Ġvanish":40078,"acterial":40079,"Minnesota":40080,"rank":40081,"614":40082,"ĠExam":40083,"Got":40084,"Ġsnipers":40085,"ETHOD":40086,"dirty":40087,"igsaw":40088,"Obs":40089,"ĠAuthors":40090,"Ġillustrating":40091,"782":40092,"864":40093,"Ġblinded":40094,"transfer":40095,"Ġspawning":40096,"ĠDiary":40097,"ĠDNS":40098,"CG":40099,"someone":40100,"Ġcruc":40101,"Morgan":40102,"Learn":40103,"API":40104,"toc":40105,"STAT":40106,"ĠFlame":40107,"aganda":40108,"ĠBenef":40109,"stuff":40110,"SEA":40111,"Ġincest":40112,"Normally":40113,"ĠRU":40114,"Ġarsenic":40115,"isine":40116,"ĠTG":40117,"Type":40118,"regn":40119,"Cass":40120,"Touch":40121,"Site":40122,"Ġpict":40123,"Ġcorrupted":40124,"729":40125,"Ġnineteen":40126,"Ġparaph":40127,"Ġtavern":40128,"Ġretard":40129,"ĠKaf":40130,"Ġcolleg":40131,"bucks":40132,"imum":40133,"ĠCandle":40134,"ĠMisc":40135,"ĠAwesome":40136,"edited":40137,"ĠDN":40138,"otomy":40139,"Ġdisclaimer":40140,"798":40141,"ĠGoodbye":40142,"ucle":40143,"atom":40144,"Judge":40145,"cipl":40146,"Ġinexplicable":40147,"iddler":40148,"781":40149,"Ġempirical":40150,"Veter":40151,"Ġascert":40152,"Ġaest":40153,"Ġlaz":40154,"binary":40155,"Ġ358":40156,"contained":40157,"Ġmultipl":40158,"ocado":40159,"Ġdelusional":40160,"Ġaeros":40161,"udence":40162,"Ġjargon":40163,"estine":40164,"Ġarbitrarily":40165,"Ġprick":40166,"BACK":40167,"amines":40168,"Mess":40169,"Knowing":40170,"ublic":40171,"ĠWarfare":40172,"Ġsignify":40173,"Ġfragmentation":40174,"Tex":40175,"Ġnin":40176,"Ġdise":40177,"882":40178,"hospital":40179,"volent":40180,"Need":40181,"Ġinfer":40182,"Sony":40183,"783":40184,"YING":40185,"Ġinfinity":40186,"ĠFortress":40187,"Ġmustache":40188,"Ġcorresponds":40189,"DX":40190,"Ġunmarried":40191,"ĠCruel":40192,"Ġ1901":40193,"Ġappropri":40194,"ZI":40195,"Ġphosph":40196,"901":40197,"IFE":40198,"Ġ347":40199,"Ġconvoluted":40200,"ĠApost":40201,"htm":40202,"Ġilluminating":40203,"568":40204,"Ġassassinate":40205,"Ġparam":40206,"Ġimpractical":40207,"cedes":40208,"ĠProcedure":40209,"ĠMouth":40210,"Battle":40211,"Ġ451":40212,"Sand":40213,"Ġcontamin":40214,"Hour":40215,"Cell":40216,"BIL":40217,"Ġprecon":40218,"ĠScor":40219,"Ġconfig":40220,"ĠMuscle":40221,"Ġhive":40222,"Ġunderworld":40223,"plement":40224,"Ġpostage":40225,"Ġinterpersonal":40226,"Ġpierced":40227,"Ġcharms":40228,"oscopic":40229,"ASC":40230,"ĠDex":40231,"render":40232,"png":40233,"Ġcritiques":40234,"992":40235,"ĠVinyl":40236,"Bear":40237,"idia":40238,"ĠTemp":40239,"Ġcyn":40240,"ĠBCE":40241,"Ġpatriarchal":40242,"Ġantagonist":40243,"ĠGMO":40244,"Ġunnatural":40245,"Race":40246,"imeo":40247,"ĠUkrainians":40248,"Train":40249,"Ġ329":40250,"ritten":40251,"igil":40252,"Lin":40253,"alus":40254,"*****":40255,"olded":40256,"ĠPegasus":40257,"Bas":40258,"photos":40259,"Ġ820":40260,"Ġsquadron":40261,"ESE":40262,"Ġ373":40263,"Uk":40264,"Lost":40265,"Store":40266,"ĠScenes":40267,"JJ":40268,"Ġlick":40269,"Tyler":40270,"cius":40271,"lishing":40272,"ocl":40273,"Ġassoci":40274,"ensitivity":40275,"entanyl":40276,"Rum":40277,"Ġ443":40278,"onding":40279,"Ġpedals":40280,"ĠPsychological":40281,"Ġthro":40282,"Network":40283,"591":40284,"Pick":40285,"Ġchords":40286,"ĠHound":40287,"entials":40288,"faces":40289,"ĠYin":40290,"ugi":40291,"bows":40292,"ĠForms":40293,"886":40294,"Ox":40295,"Ġ351":40296,"Ġmating":40297,"Ġchirop":40298,"916":40299,"Ġexpend":40300,"Ġusefulness":40301,"Marvel":40302,"ĠStretch":40303,"omez":40304,"ĠJS":40305,"Hal":40306,"fle":40307,"ĠCountdown":40308,"ĠLH":40309,"assian":40310,"vd":40311,"ĠTranscript":40312,"ĠExtrem":40313,"idine":40314,"ustainable":40315,"ederal":40316,"ĠOwl":40317,"Ġcreed":40318,"ĠGrateful":40319,"Ġprenatal":40320,"________________________________":40321,"ĠElements":40322,"âĢ¦)":40323,"nesia":40324,"ARGET":40325,"Ġboredom":40326,"Ġdepictions":40327,"verbal":40328,"ĠeSports":40329,"Laura":40330,"ilage":40331,"ĠGalactic":40332,"Investigators":40333,"Ġscattering":40334,"instein":40335,"ĠExperiment":40336,"ĠRecre":40337,"Ġregul":40338,"Ġrelent":40339,"STE":40340,"Ġslicing":40341,"igans":40342,"raped":40343,"ĠDeter":40344,"Ġsmoker":40345,"ĠWikimedia":40346,"pages":40347,"Ted":40348,"713":40349,"Ġpuberty":40350,"Ġhars":40351,"ĠStarter":40352,"patch":40353,"leeve":40354,"Ġ346":40355,"ĠAccessories":40356,"ventions":40357,"ĠSTAND":40358,"ĠUrug":40359,"ĠOccupy":40360,"Ġbinds":40361,"ĠBubble":40362,"Ġincorporation":40363,"Ġstereotypical":40364,"Ġgor":40365,"987":40366,"Ġevils":40367,"tower":40368,"Ġastronomer":40369,"Ble":40370,"ĠNid":40371,"ĠWidow":40372,"Ġpaw":40373,"Ġinnoc":40374,"ĠOWN":40375,"Ġtofu":40376,"drops":40377,"ĠEval":40378,"693":40379,"Collins":40380,"penter":40381,"ĠNib":40382,"Ġsmokes":40383,"Ġ1850":40384,"Ġtechno":40385,"oooo":40386,"ĠUnic":40387,"ĠKirin":40388,"\":[\"":40389,"Ġincrements":40390,"989":40391,"oodoo":40392,"ĠCyborg":40393,"Ġcures":40394,"ĠOW":40395,"ĠAnnex":40396,"behavior":40397,"/-":40398,"Ġbuggy":40399,"onent":40400,"Bey":40401,"Ġsummarize":40402,"putable":40403,"Ġfri":40404,"Gi":40405,"urances":40406,"ĠAppalach":40407,"Ġhegemony":40408,"ĠOrigins":40409,"Ġconnectors":40410,"ĠAST":40411,"object":40412,"ĠSlay":40413,"Arm":40414,"oston":40415,"ĠEVEN":40416,"Ġprophecy":40417,"Bright":40418,"ĠVector":40419,"Marg":40420,"omical":40421,"Holy":40422,"ĠRPM":40423,"ĠReceiver":40424,"Ġtracts":40425,"boss":40426,"Ġblurry":40427,"aspx":40428,"DES":40429,"Ġcess":40430,"ĠAster":40431,"anything":40432,"levard":40433,"unciation":40434,"jong":40435,"Ġiv":40436,"Common":40437,"ĠDistance":40438,"imus":40439,"outheast":40440,"Ġcir":40441,"ĠCato":40442,"Ġinscribed":40443,"ersed":40444,"Ġanarchy":40445,"Ġplagiar":40446,"Ġthug":40447,"Actor":40448,"ĠTant":40449,"Researchers":40450,"remember":40451,"Ġitch":40452,"Ġrefill":40453,"Ġsucker":40454,"ĠWANT":40455,"RAG":40456,"rencies":40457,"ĠTape":40458,"Ġattaches":40459,"nb":40460,"Tan":40461,"Ġappend":40462,"Ġalas":40463,"951":40464,"panel":40465,"Climate":40466,"icrobial":40467,"Brandon":40468,"ĠFreud":40469,"Ġfungi":40470,"Ġcommenters":40471,"ĠDelicious":40472,"Ġhitherto":40473,"conv":40474,"Ġchemist":40475,"Ġdenominations":40476,"ĠBehavior":40477,"comed":40478,"ĠLantern":40479,"ĠFloating":40480,"magic":40481,"ĠBarbar":40482,"bender":40483,"iliar":40484,"unny":40485,"Ġretracted":40486,"atars":40487,"ĠLovely":40488,"Ġinfinitely":40489,"Ġhumili":40490,"Ġinterestingly":40491,"Ġmunicip":40492,"ĠPanic":40493,"Ġcomprehension":40494,"ĠMassacre":40495,"Ġpersuasion":40496,"enf":40497,"Ġcoded":40498,"higher":40499,"chart":40500,"umbered":40501,"ĠIndigo":40502,"Ġthinker":40503,"Ġgoof":40504,"ĠPetition":40505,"fascist":40506,"absor":40507,"Ġassay":40508,"ĠClassification":40509,"Ġhalluc":40510,"speech":40511,"issues":40512,"Ġinexper":40513,"ĠLibre":40514,"Ġsling":40515,"zech":40516,"Ġpouch":40517,"ĠOffense":40518,"ĠHF":40519,"Fight":40520,"026":40521,"ĠTrident":40522,"fm":40523,"Ġintox":40524,"Ġ465":40525,"colonial":40526,"ovies":40527,"794":40528,"Techn":40529,"undreds":40530,"Ġchildish":40531,"arenthood":40532,"ĠShade":40533,"Host":40534,"Ġdirectional":40535,"reader":40536,"rimp":40537,"ĠEater":40538,"prep":40539,"Ġmeas":40540,"Ġlatch":40541,"inant":40542,"nels":40543,"finished":40544,"application":40545,"Board":40546,"Ġfiller":40547,"ivably":40548,"CAST":40549,"Ġstereotyp":40550,"Ġwarranties":40551,"ĠProbe":40552,"Ġspontaneously":40553,"Ġtropes":40554,"Meg":40555,"ĠHandling":40556,"hemer":40557,"986":40558,"ĠSly":40559,"plates":40560,"Ġmolten":40561,"ĠHIT":40562,"strings":40563,"Ġcentrif":40564,"ĠENG":40565,"Indeed":40566,"Ġ429":40567,"Ġsly":40568,"Ġ490":40569,"Ġhordes":40570,"boot":40571,"691":40572,"ihara":40573,"Ġsubversive":40574,"Russell":40575,"aceous":40576,"wk":40577,"Ġreverence":40578,"Ġingenious":40579,"holiday":40580,"eligible":40581,"ĠTactical":40582,"978":40583,"herence":40584,"Ġgimm":40585,"Ġarchaic":40586,"Ġadam":40587,"Ġ297":40588,"Father":40589,"ĠLerner":40590,"Ġhesitated":40591,"Safety":40592,"Ġawakened":40593,"ueller":40594,"Ġextrater":40595,"Ġmummy":40596,"ĠBuddhism":40597,"Ġ359":40598,"Ġlegions":40599,"Ġprehistoric":40600,"ancouver":40601,"Ġmelancholy":40602,"ĠEnemy":40603,"ĠSyl":40604,"ĠRobo":40605,"verting":40606,"ĠBullets":40607,"essler":40608,"Ġmarvelous":40609,"ĠBened":40610,"Ġsavior":40611,"omever":40612,"Bee":40613,"Ġrapp":40614,"Ġpredomin":40615,"ĠScripture":40616,"Ġsnapshots":40617,"Ġunrem":40618,"Ġsquid":40619,"ĠBuddh":40620,"ĠSantorum":40621,"Internet":40622,"avoid":40623,"Ġunamb":40624,"Ġ296":40625,"Ġnexus":40626,"Ġinterchangeable":40627,"ockets":40628,"Ġfoll":40629,"ĠOPT":40630,"023":40631,"²":40632,"Ġhereditary":40633,"Ġvape":40634,"=\"":40635,"1996":40636,"س":40637,"Emergency":40638,"Ġneb":40639,"Ġisot":40640,"Ġdiam":40641,"stairs":40642,"ĠAppendix":40643,"venient":40644,"Ġinvol":40645,"Ġtheorist":40646,"Ġconqu":40647,"Mich":40648,"ĠSort":40649,"antasy":40650,"dating":40651,"771":40652,"Ġape":40653,"Ġindemn":40654,"ween":40655,"Games":40656,"ascal":40657,"Muslims":40658,"Ġleaflets":40659,"Ġtraverse":40660,"Ġtransgress":40661,"Ġflushed":40662,"893":40663,"lasses":40664,"obos":40665,"ooming":40666,"Ġtou":40667,"mast":40668,"âģ":40669,"751":40670,"Either":40671,"Ġgrate":40672,"urgy":40673,"Ġendowed":40674,"ĠRasm":40675,"Nat":40676,"odka":40677,"olon":40678,"iants":40679,"Ġsensations":40680,"Ġsituational":40681,"pox":40682,"Figure":40683,"Ġslime":40684,"Ġ421":40685,"ollow":40686,"Ġanesthesia":40687,"adult":40688,"ĠPiece":40689,"994":40690,"ĠAnalog":40691,"Iv":40692,"flo":40693,"Ġdomest":40694,"Ġcabal":40695,"Ġgarg":40696,"Ġrabb":40697,"REC":40698,"ISTORY":40699,"Friend":40700,"Ġancestor":40701,"ĠLets":40702,"Ġelf":40703,"Ġlobb":40704,"ĠAdren":40705,"silver":40706,"astical":40707,"Ġstitch":40708,"028":40709,"Hug":40710,"Ġmoss":40711,"ompl":40712,"Ġunob":40713,"883":40714,"Ġcortex":40715,"olutely":40716,"052":40717,"Seattle":40718,"restling":40719,"endment":40720,"Ġ366":40721,"ventus":40722,"ĠRated":40723,"ĠClever":40724,"Ġcloak":40725,"phrase":40726,"flake":40727,"Ġphilosophies":40728,"784":40729,"Ġskulls":40730,"wake":40731,"oru":40732,"ĠACTION":40733,"Ġcomprom":40734,"ĠManufacturer":40735,"ĠImprove":40736,"Ns":40737,"ĠRevenge":40738,"lords":40739,"Ġ417":40740,"iddles":40741,"Ġcondesc":40742,"tiny":40743,"Ġchloride":40744,"greg":40745,"ĠREST":40746,"subject":40747,"Ġundes":40748,"ftime":40749,"Ġbottleneck":40750,"ĠZombie":40751,"Ġhabitable":40752,"Ġcigars":40753,"Ġenlarg":40754,"icester":40755,"ðĿ":40756,"regulation":40757,"arters":40758,"Ġformulations":40759,"Ġadhesive":40760,"Ġ344":40761,"pod":40762,"etitive":40763,"Ġcontinuum":40764,"aghd":40765,"Ġ701":40766,"Ġdisband":40767,"Tu":40768,"Ġcivilisation":40769,"ĠPCI":40770,"Ġcrooked":40771,"ammy":40772,"Ġbrim":40773,"Jr":40774,"ĠBunker":40775,"plot":40776,"Ġwielded":40777,"Ġcaricature":40778,"ĠInfinite":40779,"piracy":40780,"aretz":40781,"Ġstares":40782,"incinnati":40783,"agents":40784,"ĠObamaCare":40785,"asuring":40786,"ansion":40787,"Ġastonished":40788,"iovascular":40789,"Bio":40790,"Ġadvisable":40791,"Ġsender":40792,"887":40793,"Led":40794,"DN":40795,"Ġaggregation":40796,"ĠInnocent":40797,"ĠTransactions":40798,"worms":40799,"ĠWorm":40800,"Ġ363":40801,"ĠBiblical":40802,"rared":40803,"Ġgazing":40804,"chant":40805,"Ġsubordinates":40806,"1600":40807,"actually":40808,"olition":40809,"ĠRTX":40810,"ĠPyramid":40811,"alph":40812,"ĠFPS":40813,"Ġerrone":40814,"ĠLR":40815,"Scientists":40816,"Ġincons":40817,"Ġbrittle":40818,"027":40819,"ĠBowser":40820,"Rub":40821,"links":40822,"ĠWik":40823,"ussion":40824,"Marsh":40825,"resents":40826,"Clean":40827,"Ġbrute":40828,"ĠInventory":40829,"1100":40830,"ĠATK":40831,"793":40832,"Ġcaveats":40833,"ĠKnot":40834,"IRT":40835,"ĠCanad":40836,"isma":40837,"entin":40838,"Own":40839,"Ġ455":40840,"Ġlesions":40841,"ĠAres":40842,"ĠKali":40843,"Ġpaws":40844,"Auto":40845,"Ġdiscrim":40846,"044":40847,"ĠCOUN":40848,"Ġ1905":40849,"Ġexperien":40850,"Ġ406":40851,"achelor":40852,"Ġscarcely":40853,"Ġsynchronized":40854,"Rat":40855,"Blake":40856,"Ġrewriting":40857,"Ġcannons":40858,"stem":40859,"Apparently":40860,"Ġleveling":40861,"?]":40862,"Ġfins":40863,"ĠTone":40864,"ogether":40865,"Sound":40866,"Ġmicrosc":40867,"ĠAsylum":40868,"Ġindividuality":40869,"Ġ432":40870,"lease":40871,"Chuck":40872,"Ġhating":40873,"Ġleftists":40874,"ĠPersonality":40875,"ĠBundle":40876,"Dutch":40877,"Ġtransformer":40878,"iami":40879,"ĠTradition":40880,"ĠRecipes":40881,"Ġdiscour":40882,"Viol":40883,"Ext":40884,"ĠOliv":40885,"ashington":40886,"Ġmillennia":40887,"Ġpsychiatrists":40888,"ĠTrilogy":40889,"inction":40890,"Ġdisliked":40891,"088":40892,"954":40893,"Ġoverloaded":40894,"Ġopium":40895,"acus":40896,"resources":40897,"mud":40898,"ometry":40899,"Hit":40900,"Ġguild":40901,"Ġabyss":40902,"884":40903,"ensity":40904,"ĠDifference":40905,"Electric":40906,"authent":40907,"Ġdownloadable":40908,"ellar":40909,"ĠSavior":40910,"ĠFRI":40911,"Ġ445":40912,"Ġincidental":40913,"Ġanalogue":40914,"ounters":40915,"ĠBuilder":40916,"Ġnarration":40917,"ategor":40918,"raise":40919,"Ġindoctr":40920,"Aren":40921,"Ġbaptism":40922,"Ġobe":40923,"Ġtubing":40924,"apsed":40925,"Fortunately":40926,"gered":40927,"Pict":40928,"Ġmastering":40929,"ĠHIM":40930,"ĠObesity":40931,"Ġornament":40932,"advant":40933,"ĠCous":40934,"032":40935,"cells":40936,"Ġpreclude":40937,"Ġanecdote":40938,"Ġpatriarchy":40939,"ĠSending":40940,"Pie":40941,"Ġdepressive":40942,"ĠEnds":40943,"712":40944,"zos":40945,"icka":40946,"Ġ1906":40947,"Anti":40948,"vana":40949,"ĠRestrict":40950,"Ġprotr":40951,"Ġusername":40952,"Ġparach":40953,"1997":40954,"imental":40955,"rower":40956,"carb":40957,"033":40958,"Ġobligatory":40959,"Ġwillful":40960,"Ġsnail":40961,"json":40962,"izarre":40963,"Ġmiscar":40964,"Ġdopamine":40965,"л":40966,"Ġapplic":40967,"Ġnervously":40968,"YY":40969,"alez":40970,"ĠSoviets":40971,"ĠMister":40972,"Ġcrates":40973,"Ġheavenly":40974,"Ġdoct":40975,"048":40976,"Ġ2400":40977,"ivia":40978,"adies":40979,"Phone":40980,"asks":40981,"Ġperenn":40982,"Ġcomposing":40983,"Ġraiding":40984,"requent":40985,"ibli":40986,"ĠFeedback":40987,"cellaneous":40988,"ĠContracts":40989,"ĠCasting":40990,"vim":40991,"Cut":40992,"Ġabbrevi":40993,"Ġintest":40994,"ricted":40995,"969":40996,"nostic":40997,"Ġinverted":40998,"ĠEG":40999,"aiden":41000,"ĠClaud":41001,"ĠiP":41002,"urized":41003,"Emily":41004,"Ġ353":41005,"Ġ((":41006,"ammad":41007,"Reb":41008,"plom":41009,"YES":41010,"connection":41011,"ĠWra":41012,"ĠMerch":41013,"Ġether":41014,"Elizabeth":41015,"Chip":41016,"relevant":41017,"URA":41018,"Ġantioxidant":41019,"ĠChron":41020,"Ġtheological":41021,"HCR":41022,"ruits":41023,"Body":41024,"enezuel":41025,"Few":41026,"adder":41027,"Ġinducing":41028,"ĠDarth":41029,"Ġimplicitly":41030,"Ġoverfl":41031,"Ġrelics":41032,"Must":41033,"ĠAnswers":41034,"Ġretina":41035,"ĠSlowly":41036,"ĠShib":41037,"software":41038,"Ġ\"\"":41039,"hack":41040,"Apart":41041,"told":41042,"Ger":41043,"Civil":41044,"problem":41045,"Ġslang":41046,"Ġtactile":41047,"Ġtabl":41048,"ĠAscension":41049,"Ġhumankind":41050,"Howard":41051,"rescent":41052,"ĠReleases":41053,"arijuana":41054,"Christopher":41055,"ĠWarden":41056,"blogspot":41057,"ĠVari":41058,"idency":41059,"ĠHandler":41060,"Round":41061,"MJ":41062,"Ġrhyth":41063,"Tai":41064,"terson":41065,"Ġ,\"":41066,"portation":41067,"ĠOrbital":41068,"Ġfantas":41069,"Ġattribut":41070,"Ġdiagram":41071,"atech":41072,"1992":41073,"ibl":41074,"Woman":41075,"ternally":41076,"Days":41077,"Ġdebunk":41078,"ĠPhant":41079,"ĠOath":41080,"sharp":41081,"Ġclaws":41082,"Lots":41083,"Incre":41084,"Aff":41085,"hooting":41086,"rect":41087,"Ġaltru":41088,"Ġwors":41089,"Ġtho":41090,"Ġ349":41091,"clusions":41092,"Ġpseudonym":41093,"Bec":41094,"Ġphosphorus":41095,"ivic":41096,"Ġ348":41097,"otent":41098,"Ġub":41099,"Ġcoales":41100,"regate":41101,"Ġ1870":41102,"Ġglide":41103,"treated":41104,"ĠSymb":41105,"Ġenchant":41106,"Besides":41107,"stocks":41108,"Ġ388":41109,"--------------":41110,"interpret":41111,"ouple":41112,"Ġdrawback":41113,"ĠRevised":41114,"Ġanat":41115,"Ġpsychosis":41116,"ب":41117,"Ġdiffuse":41118,"Ġaffidav":41119,"elve":41120,"amination":41121,"ĠTackle":41122,"hunter":41123,"env":41124,"Ġchests":41125,"Ġsubter":41126,"Ġconquest":41127,"Ġfidelity":41128,"Ġinfringing":41129,"opathic":41130,"ĠGrip":41131,"ĠKeyboard":41132,"Ġobjectionable":41133,"Ġmetabol":41134,"ĠGö":41135,"Room":41136,"...)":41137,"KEN":41138,"assic":41139,"Ġgeop":41140,"Tro":41141,"Ġcursing":41142,"Ġdile":41143,"Ġultraviolet":41144,"inarily":41145,"Ġdistilled":41146,"sect":41147,"ĠShooter":41148,"uckles":41149,"Ġdistortions":41150,"Map":41151,"Doctor":41152,"Ġinstalls":41153,"oire":41154,"Ġstarch":41155,"ociation":41156,"Lev":41157,"Ġscripture":41158,"Ġsalient":41159,"ilitating":41160,"wb":41161,"ĠSov":41162,"ĠDamn":41163,"Grey":41164,"Ġ980":41165,"Ġjung":41166,"Ġlicking":41167,"029":41168,"ĠDian":41169,"ĠBabylon":41170,"к":41171,"ĠRomantic":41172,"Ġguesses":41173,"ĠFren":41174,"Generally":41175,"ultural":41176,"istence":41177,"Ġiniti":41178,"Ġ341":41179,"ĠSlave":41180,"ultan":41181,"ĠTrash":41182,"ĠEmpty":41183,"ĠHundred":41184,"ĠDirective":41185,"Anderson":41186,"Advertisement":41187,"RH":41188,"ĠOo":41189,"ĠHik":41190,"peg":41191,"Sup":41192,"ĠXT":41193,"Ġencrypt":41194,"selage":41195,"ĠThrone":41196,"Ġconsecut":41197,"Li":41198,"ĠVirus":41199,"ĠCookies":41200,"SHIP":41201,"Ġflavorful":41202,"odynamics":41203,"animal":41204,"spread":41205,"ĠIPCC":41206,"jobs":41207,"ernand":41208,"ĠHaunted":41209,"Ġintolerable":41210,"ĠLAR":41211,"ixtape":41212,"Ġneur":41213,"Ġcausal":41214,"ĠPsychiatry":41215,"ĠVim":41216,"Ġgenomic":41217,"duration":41218,"ĠUsername":41219,"ategy":41220,"Ġunic":41221,"ĠKILL":41222,"blooded":41223,"Ġcaucuses":41224,"ĠPOLITICO":41225,"Spanish":41226,"Ġobedience":41227,"Ġinconven":41228,"MAT":41229,"Ġbends":41230,"ĠImprovements":41231,"Ġrelig":41232,"ĠForth":41233,"ĠLumia":41234,"uces":41235,"Ġunim":41236,"ĠStatistical":41237,"kb":41238,"auntlet":41239,"ĠDisco":41240,"ĠInstruction":41241,"ooo":41242,"ĠDictionary":41243,"culated":41244,"Adv":41245,"ĠAvatar":41246,"ictional":41247,"Ġcentr":41248,"ifles":41249,"orks":41250,"skill":41251,"Ġlatex":41252,"ĠPagan":41253,"Ġdevast":41254,"Ġprol":41255,"896":41256,"Product":41257,"968":41258,"Ġfrench":41259,"083":41260,"ĠCluster":41261,"cloth":41262,"ĠFilter":41263,"ĠDisorders":41264,"etimes":41265,"Ġinstinctively":41266,"ĠBritann":41267,"Ġaft":41268,"ĠVict":41269,"Ġâĺħ":41270,"Ġperverse":41271,"Ġcontraceptives":41272,"ĠHannibal":41273,"escap":41274,"ĠApostle":41275,"ĠXiao":41276,"ĠMagnum":41277,"Ġphosphate":41278,"Ġ399":41279,"utable":41280,"Ġsten":41281,"Ġwearer":41282,"Ġsmug":41283,"ĠInfluence":41284,"Ġ384":41285,"Truth":41286,"struction":41287,"Ġmaniac":41288,"ĠMagnetic":41289,"ousands":41290,"Ġsemen":41291,"dir":41292,"ĠTornado":41293,"Ġexplos":41294,"1995":41295,"Xi":41296,"Steel":41297,"057":41298,"Barn":41299,"Fan":41300,"ĠChatt":41301,"Chem":41302,"ĠFold":41303,"bees":41304,"1080":41305,"ĠMaze":41306,"ierre":41307,"oeuv":41308,"Cand":41309,"odium":41310,"mmm":41311,"ereo":41312,"Ġreactionary":41313,"Ġacidic":41314,"ĠRemoval":41315,"Ġnont":41316,"031":41317,"ĠTerminator":41318,"ĠVendor":41319,"enemy":41320,"Ġreconstructed":41321,"ĠGalileo":41322,"Ġtesters":41323,"albeit":41324,"uminium":41325,"Ġrite":41326,"ĠInput":41327,"committee":41328,"Ġjour":41329,"gements":41330,"Ġgerm":41331,"Dick":41332,"ĠRequirements":41333,"omsday":41334,"Î":41335,"ISSION":41336,"Ġmolded":41337,"Ġrye":41338,"Attorney":41339,"population":41340,"Ġrepet":41341,"Sync":41342,"breaks":41343,"Ġbanished":41344,"Ġraspberry":41345,"Ġammo":41346,"Ġorthodox":41347,"Ġwebcam":41348,"ĠAsc":41349,"vl":41350,"1989":41351,"Ġdiscipl":41352,"Ġmoreover":41353,"Ġexplodes":41354,"1960":41355,"Ġpropositions":41356,"Protect":41357,"Ġsexes":41358,"physical":41359,"ĠAthena":41360,"ocent":41361,"ĠGothic":41362,"ĠRacial":41363,"istani":41364,"Ġhelium":41365,"ĠPresumably":41366,"Ġperman":41367,"becue":41368,"ĠHW":41369,"rued":41370,"ĠCNS":41371,"DEP":41372,"ĠManifest":41373,"2500":41374,"ĠMyst":41375,"Economic":41376,"Prot":41377,"Ġledge":41378,"Ġimitate":41379,"ĠTotally":41380,"ĠBeaut":41381,"OIL":41382,"Ġ1440":41383,"Moscow":41384,"ĠSets":41385,"merga":41386,"Ġlesbians":41387,"Walker":41388,"Move":41389,"ĠSOM":41390,"ĠPsy":41391,"strument":41392,"Ġiter":41393,"ĠTosh":41394,"oola":41395,"ĠAntiqu":41396,"ĠShining":41397,"Ġobservational":41398,"VW":41399,"rophe":41400,"034":41401,"Ġcontiguous":41402,"Ġstarve":41403,"sure":41404,"Ġnegate":41405,"Ġmindless":41406,"tf":41407,"Ġdownwards":41408,"046":41409,"riors":41410,"Ġreverted":41411,"ĠAthe":41412,"Bra":41413,"eah":41414,"Rachel":41415,"Hung":41416,"Join":41417,"ĠRaces":41418,"Ġmutant":41419,"Ġuncond":41420,"Ġusability":41421,"NESS":41422,"haust":41423,"036":41424,"Ġobscurity":41425,"Ġimperialism":41426,"Ġemitting":41427,"Ġideologically":41428,"ĠIro":41429,"erva":41430,"ĠIzzy":41431,"ĠLevels":41432,"onym":41433,"ĠConspiracy":41434,"ĠSapphire":41435,"Ul":41436,"Ġhuh":41437,"ochem":41438,"Ġbehaves":41439,"ĠMesh":41440,"Ark":41441,"Ġvec":41442,"ĠActions":41443,"Ġdistinguishing":41444,"ĠTsarnaev":41445,"ĠEndurance":41446,"ederation":41447,"itant":41448,"Ġstreetcar":41449,"041":41450,"ĠAval":41451,"ĠCompanion":41452,"ĠCartoon":41453,"Ġcalculus":41454,"993":41455,"eq":41456,"ĠVanilla":41457,"MAC":41458,"wolves":41459,"fg":41460,"Ġfermentation":41461,"Ġinformants":41462,"Ġsudo":41463,"Ġperipher":41464,"Ġindign":41465,"parts":41466,"detail":41467,"femin":41468,"blade":41469,"Ġinserts":41470,"Ġoffsets":41471,"Ġantidepressants":41472,"Ġphr":41473,"Ġresultant":41474,"biology":41475,"Ġacquies":41476,"UFF":41477,"****************":41478,"ĠPenalty":41479,"Ġrever":41480,"heric":41481,"ĠShadows":41482,"command":41483,"Ġreprint":41484,"089":41485,"empty":41486,"ĠTAG":41487,"stim":41488,"FK":41489,"Ġkins":41490,"uggle":41491,"imura":41492,"wit":41493,"Kill":41494,"Beck":41495,"Ocean":41496,"Ġlabyrinth":41497,"ĠNorse":41498,"IENCE":41499,"Ġ+++":41500,"DoS":41501,"gm":41502,"Ġbarbar":41503,"ĠCeres":41504,"Ġhashing":41505,"eworthy":41506,"Ġrecite":41507,"Ġelectrodes":41508,"Ġconformity":41509,"response":41510,"olate":41511,"Ġ357":41512,"Snap":41513,"Crime":41514,"Ġpointer":41515,"ĠTIT":41516,"Ġdistinctions":41517,"Ġ427":41518,"ĠÙĪ":41519,"abases":41520,"Mars":41521,"ĠSpiritual":41522,"Ġimpuls":41523,"Philadelphia":41524,"1994":41525,"Ġcunning":41526,"Ġfram":41527,"Ġinco":41528,"Ġomnip":41529,"imize":41530,"ervative":41531,"Gy":41532,"Drug":41533,"Ġcarniv":41534,"ĠSailor":41535,"download":41536,"ĠBeetle":41537,"ĠEarthqu":41538,"izontal":41539,"Alan":41540,"Nice":41541,"Prior":41542,"MAG":41543,"Ġautobi":41544,"ĠBrill":41545,"Ġpredominant":41546,"ĠMessiah":41547,"REM":41548,"ĠSlip":41549,"ĠWebs":41550,"ademic":41551,"<":41552,"ĠVessel":41553,"vari":41554,"Code":41555,"Ġbeetle":41556,"projects":41557,"BAT":41558,"Ġpsychotic":41559,"Ġunderside":41560,"Ġrefute":41561,"Considering":41562,"kees":41563,"wd":41564,"priority":41565,"Ġtwentieth":41566,"Ġatheist":41567,"amina":41568,"Ġeuphem":41569,"Ġtripod":41570,"ĠTrayvon":41571,"ĠNON":41572,"2200":41573,"ĠNPC":41574,"ependence":41575,"ĠMHz":41576,"ĠBung":41577,"Ġpane":41578,"Ġaboriginal":41579,"ĠPLUS":41580,"igers":41581,"ĠSexy":41582,"MF":41583,"Chall":41584,"Ay":41585,"ilingual":41586,"adj":41587,"Ġfrown":41588,"successful":41589,"stack":41590,"Ġic":41591,"ĠSeah":41592,"Ġconsequ":41593,"bugs":41594,"ĠScand":41595,"ĠCurve":41596,"Nob":41597,"ĠHoo":41598,"ĠKissinger":41599,"ĠTimeline":41600,"Ġmt":41601,"Description":41602,"YP":41603,"ĠInstallation":41604,"levision":41605,"Ġanthropology":41606,"itzerland":41607,"iaries":41608,"kward":41609,"robat":41610,"Ġcarbohydrate":41611,"Phot":41612,"оÐ":41613,"ĠSQL":41614,"Disc":41615,"Ġdataset":41616,"ynski":41617,"Ġfiat":41618,"ĠDres":41619,"ĠFavor":41620,"ĠHalls":41621,"Alt":41622,"PART":41623,"Spider":41624,"Ġdisabling":41625,"RG":41626,"Ward":41627,"aturation":41628,"Ġwillfully":41629,"Ġlockout":41630,"ĠShutdown":41631,"956":41632,"Ġcommunists":41633,"Against":41634,"Ore":41635,"ĠRik":41636,"ĠASD":41637,"ĠOnion":41638,"Ġparticulars":41639,"Analy":41640,"checked":41641,"selected":41642,"romy":41643,"ĠAkira":41644,"Ġcongr":41645,"Choice":41646,"Ġbos":41647,"organisms":41648,"Ġfrowned":41649,"Tok":41650,"Bir":41651,"ĠScrib":41652,"Ġrealms":41653,"Ġcoercive":41654,"1993":41655,"021":41656,"âĢĵâĢĵ":41657,"athetic":41658,"rior":41659,"Ġfolly":41660,"ĠAMERICA":41661,"Ġcassette":41662,"953":41663,"Ġabsorbs":41664,"043":41665,"quad":41666,"''.":41667,"ĠExtract":41668,"Ġ424":41669,"Whit":41670,"Dun":41671,"Ġexerted":41672,"Ġbrethren":41673,"ĠChronicles":41674,"eric":41675,"Mot":41676,"Ġendings":41677,"piration":41678,"Ġpredetermined":41679,"ĠAirl":41680,"Ġgasp":41681,"Ġ367":41682,"Ġexclaim":41683,"cation":41684,"sort":41685,"idden":41686,"missive":41687,"ع":41688,"oice":41689,"same":41690,"Ott":41691,"Ġscatter":41692,"Flight":41693,"ĠTOD":41694,"Stra":41695,"amia":41696,"IZE":41697,"Ġcompressor":41698,"ixels":41699,"lethal":41700,"ĠExperimental":41701,"Ing":41702,"knife":41703,"Ġvanishing":41704,"ĠRequired":41705,"Stat":41706,"ĠPlex":41707,"spection":41708,"ĠBakr":41709,"Amazing":41710,"Ġbreaths":41711,"rots":41712,"OSP":41713,"Ġ840":41714,"Wars":41715,"OGR":41716,"Ġ372":41717,"ĠKhe":41718,"inous":41719,"lightly":41720,"ĠRounds":41721,"Ġrefinement":41722,"property":41723,"Ġmetaph":41724,"oultry":41725,"istor":41726,"Ġintestine":41727,"eus":41728,"ĠWilhelm":41729,"ĠBane":41730,"emption":41731,"oubtedly":41732,"ĠVirtue":41733,"'),":41734,"Ħ¢":41735,"Ġappar":41736,"ĠTranslation":41737,"Quite":41738,"Ġphysicists":41739,"Ġpriesthood":41740,"Ġallowable":41741,"Saint":41742,"OSED":41743,"bind":41744,"Ġtorches":41745,"osexual":41746,"Cruz":41747,"ertility":41748,"ĠAES":41749,"Ġascended":41750,"Ġmuzzle":41751,"Ġelectors":41752,"ĠKrug":41753,"Ġcc":41754,"classic":41755,"ĠMace":41756,"Å«":41757,"ĠâĢ¦\"":41758,"ĠTEST":41759,"gomery":41760,"Person":41761,"Ġtranslations":41762,"ĠDys":41763,"ĠConsent":41764,"Ġ361":41765,"alos":41766,"Ġallerg":41767,"ĠWast":41768,"ĠChecks":41769,"cerning":41770,"Ġlizard":41771,"Ġrevolutions":41772,"Ġtether":41773,"Ġminimized":41774,"ĠReverse":41775,"itely":41776,"iguous":41777,"athing":41778,"Flow":41779,"Moving":41780,"Ġ409":41781,"047":41782,"Ġsnug":41783,"Nich":41784,"Ġcartridge":41785,"YL":41786,"Ġforwarding":41787,"umerous":41788,"ĠAbedin":41789,"iolet":41790,"tick":41791,"ĠTransform":41792,"Grant":41793,"Ġsubtitles":41794,"ĠEmin":41795,"ghost":41796,"ĠKurd":41797,"Ġfireball":41798,"compatible":41799,"Ġprojectiles":41800,"amorph":41801,"ĠSatisf":41802,"Ġquirks":41803,"Ġrecept":41804,"spective":41805,"Ġgraphical":41806,"ĠPicard":41807,"ĠAuthent":41808,"ĠSponge":41809,"Army":41810,"ĠLumin":41811,"ĠSOME":41812,"Ġsolitude":41813,"ĠSHOULD":41814,"ĠFasc":41815,"opez":41816,"types":41817,"gallery":41818,"OLOGY":41819,"shake":41820,"Ġ369":41821,"Ġreused":41822,"Ġ378":41823,"Ġexorc":41824,"Ġdocs":41825,"Yu":41826,"ĠGOD":41827,"ocrine":41828,"location":41829,"fif":41830,"Grid":41831,"Ġpowd":41832,"Ġ'[":41833,"Ġposterior":41834,"Thompson":41835,"Table":41836,"oslov":41837,"ĠGoddess":41838,"odon":41839,"ĠSTD":41840,"Ġresponsiveness":41841,"stab":41842,"absolute":41843,"Enough":41844,"ĠEssence":41845,"ĠUpgrade":41846,"hematically":41847,"Subscribe":41848,"alsh":41849,"repl":41850,"Ġselector":41851,"ĠLength":41852,"Ġtemporal":41853,"Tele":41854,"ocalyptic":41855,"ĠDeaths":41856,"rl":41857,"Target":41858,"ĠOrn":41859,"ongh":41860,"Ġ1909":41861,"Quest":41862,"Place":41863,"ĠDisabled":41864,"Ġascending":41865,"giene":41866,"ĠMSI":41867,"ivil":41868,"Ġcaval":41869,"Ġintermitt":41870,"Ġsalts":41871,"Apr":41872,"059":41873,"ĠKeeper":41874,"emis":41875,"ĠEternal":41876,"SER":41877,"estones":41878,"Ġrudimentary":41879,"Ġpooled":41880,"ĠAlright":41881,"Ġdiagrams":41882,"ydia":41883,"Jacob":41884,"Ġarchitectures":41885,"ĠUSPS":41886,"Ġfootnote":41887,"ĠBrav":41888,"ĠLeopard":41889,"Ġvirtuous":41890,"ploma":41891,"ĠHIP":41892,"Ġhorizontally":41893,"olith":41894,"Prop":41895,"ĠApocalypse":41896,"Syria":41897,"ĠShowdown":41898,"constitutional":41899,"Independent":41900,"ĠMiliband":41901,"ĠTracks":41902,"adle":41903,"ĠESL":41904,"ĠFIGHT":41905,"Ġjohn":41906,"é":41907,"benef":41908,"eware":41909,"ĠTABLE":41910,"ĠVeg":41911,"ainers":41912,"Ġresolves":41913,"Warren":41914,"ĠRanked":41915,"possibly":41916,"bian":41917,"simple":41918,"Ġuniformly":41919,"ĠSlash":41920,"otton":41921,"ĠAbsent":41922,"agically":41923,"ĠPieces":41924,"Station":41925,"ĠBeware":41926,"ĠDiscrimination":41927,"Ġponies":41928,"Import":41929,"utory":41930,"ĠParas":41931,"Phoenix":41932,"Lat":41933,"UTC":41934,"push":41935,"astically":41936,"urrent":41937,"untarily":41938,"Ġparanormal":41939,"Ġglanced":41940,"Ġmanifestations":41941,"ĠNeuroscience":41942,"irgin":41943,"ROM":41944,"Ġ($)":41945,"Ġ379":41946,"missing":41947,"Ġmercenaries":41948,"Ġenumer":41949,"ĠShant":41950,"Ws":41951,"wered":41952,"Ġbuffs":41953,"ultane":41954,"ĠRohing":41955,"igger":41956,"Ring":41957,"Ġmanifests":41958,"Fat":41959,"ĠReduced":41960,"ĠMinerva":41961,"uart":41962,"ĠArmory":41963,"orange":41964,"igible":41965,"Ġphysiology":41966,"Ut":41967,"Ġparchment":41968,"ĠFired":41969,"trap":41970,"oggle":41971,"mson":41972,"ĠPoster":41973,"Ġbount":41974,"import":41975,"maximum":41976,"Ġ422":41977,"ĠFemin":41978,"Ġnodding":41979,"Ġinscription":41980,"Results":41981,"GRE":41982,"icative":41983,"Ġcognition":41984,"Ġions":41985,"ĠBite":41986,"Ġneutron":41987,"Ġduplication":41988,"ĠZIP":41989,"ĠQuit":41990,"Ġgrasping":41991,"ĠDaylight":41992,"Ġlayouts":41993,"CLA":41994,"reason":41995,"ĠHuh":41996,"Ġpige":41997,"ĠBomber":41998,"Produ":41999,"Ġgland":42000,"ĠAbsolute":42001,"writ":42002,"Ġmassac":42003,"Ġfixation":42004,"device":42005,"yz":42006,"ĠGOT":42007,"ĠDying":42008,"adjust":42009,"grain":42010,"Ġdeform":42011,"Ġtypew":42012,"Ġdagger":42013,"ĠTuring":42014,"ĠBucc":42015,"Heavy":42016,"Ġcommod":42017,"files":42018,"ogeneous":42019,"roth":42020,"Buff":42021,"Ġbookmark":42022,"porary":42023,"Medical":42024,"Um":42025,"Ġtranslucent":42026,"ĠAnxiety":42027,"ĠCorinthians":42028,"optional":42029,"PUT":42030,"Ġcrucifix":42031,"alloween":42032,"ĠVK":42033,"Ġblu":42034,"ĠCorinth":42035,"Mount":42036,"Ġmembranes":42037,"particip":42038,"Ġextraord":42039,"Ġstimulated":42040,"leneck":42041,"Ġspecifies":42042,"Sin":42043,"lash":42044,"Edited":42045,"Ġfused":42046,"Nin":42047,"ĠBungie":42048,"ĠTooth":42049,"WATCH":42050,"Nav":42051,"Initially":42052,"+)":42053,"ĠAncest":42054,"Ġtransmitter":42055,"ĠVolks":42056,"ezvous":42057,"ĠNirvana":42058,"ĠCald":42059,"font":42060,"Und":42061,"remlin":42062,"ichever":42063,"ĠHeal":42064,"shall":42065,"Ġattribution":42066,"authorized":42067,"ĠINTO":42068,"acteria":42069,"ĠTsu":42070,"ĠPlane":42071,"iphate":42072,"igraph":42073,"chev":42074,"Ġinverse":42075,"ifest":42076,"Players":42077,"!!\"":42078,"ĠContrast":42079,"1984":42080,"Ġsevent":42081,"colour":42082,"ĠRational":42083,"virtual":42084,"Ġfec":42085,"ĠETH":42086,"ĠPru":42087,"Õ":42088,"asma":42089,"Cur":42090,"Ġassigns":42091,"Ġridic":42092,"Todd":42093,"ulton":42094,"ĠDefendant":42095,"opsis":42096,"Ġpercentile":42097,"shr":42098,"wagen":42099,"Ġ368":42100,"SIGN":42101,"Screen":42102,"reprene":42103,"Ġerection":42104,"ĠFreak":42105,"ĠStard":42106,"stained":42107,"Ġcla":42108,"fet":42109,"ramids":42110,"QL":42111,"avorable":42112,"ĠTCP":42113,"nown":42114,"ulence":42115,"similar":42116,"Ġlinkage":42117,"ercise":42118,"Path":42119,"LECT":42120,"ĠCollections":42121,"ĠModule":42122,"Ġcs":42123,"Current":42124,"Ġmono":42125,"ĠAlv":42126,"ĠDude":42127,"Ġhypers":42128,"Ġ2600":42129,"surface":42130,"Ġpredictor":42131,"ĠColomb":42132,"Prof":42133,"anqu":42134,"natal":42135,"Ġadultery":42136,"ĠGenerations":42137,"clerosis":42138,"Ġ371":42139,"Ġenlightenment":42140,"onomic":42141,"Ġsatir":42142,"ĠBasics":42143,"Graham":42144,"ĠRove":42145,"Ġadul":42146,"Shut":42147,"ocious":42148,"Ġhandc":42149,"BW":42150,"ĠCognitive":42151,"visible":42152,"Ġinev":42153,"Ġ978":42154,"ĠSupported":42155,"Ġarrays":42156,"Ġalienation":42157,"Weight":42158,"ĠkWh":42159,"Ġwarped":42160,"Ġ386":42161,"lance":42162,"Ġherpes":42163,"ĠPHP":42164,"Ġclaimant":42165,"uitive":42166,"Ġpussy":42167,"Ġcorpus":42168,"ĠAo":42169,"Qual":42170,"ĠXVI":42171,"requ":42172,"Ġsympt":42173,"mination":42174,"Ġhairy":42175,"ĠBattles":42176,"owntown":42177,"Roberts":42178,"Ġnec":42179,"ablo":42180,"AMD":42181,"internet":42182,"Tar":42183,"direction":42184,"ouston":42185,"ĠGlock":42186,"ĠYanukovych":42187,"ogens":42188,"rogram":42189,"otype":42190,"ĠPt":42191,"tenance":42192,"Ġaromatic":42193,"oxin":42194,"Vert":42195,"Ġsociop":42196,"cible":42197,"Db":42198,"________________":42199,"Third":42200,"ĠShips":42201,"!.":42202,"expensive":42203,"WOR":42204,"primary":42205,"Ġ666":42206,"Ġdecaying":42207,"Ġclustered":42208,"Ġbeetles":42209,"ĠHogwarts":42210,"Ġheaders":42211,"ĠJudah":42212,"Ġscen":42213,"Ġcosmos":42214,"ĠGenetic":42215,"blems":42216,"Ġfeeble":42217,"NOW":42218,"NSA":42219,"Ġadminist":42220,"ĠDocker":42221,"portion":42222,"gression":42223,"Ġ1904":42224,"heard":42225,"Ġinhab":42226,"ĠLeaves":42227,"Ġcortisol":42228,"atinum":42229,"unknown":42230,"ĠObserv":42231,"ĠPhilosophy":42232,"Ide":42233,"Ġcopyrighted":42234,"surv":42235,"ĠLocations":42236,"Ġglands":42237,"ĠKnife":42238,"ĠEmber":42239,"ĠUnicorn":42240,"Ġhaste":42241,"Ġkinderg":42242,"ĠTerrit":42243,"ĠKoran":42244,"Ġaval":42245,"addon":42246,"ĠNero":42247,"\"]":42248,"Ġ392":42249,"comfort":42250,"Ġclothed":42251,"ashtra":42252,"mode":42253,"Ġ??":42254,"!\",":42255,"Ġknob":42256,"EMP":42257,"norm":42258,"ĠAgo":42259,"RECT":42260,"Denver":42261,"Ġ1907":42262,"ĠBombs":42263,"Sche":42264,"Ġtriangular":42265,"Ġperv":42266,"rises":42267,"Jes":42268,"Ġcalibration":42269,"Ġts":42270,"Same":42271,"ĠAxe":42272,"ĠMei":42273,"multi":42274,"Ġexerc":42275,"orney":42276,"Ware":42277,"abul":42278,"ĠFior":42279,"Eventually":42280,"ĠGrizz":42281,"Past":42282,"married":42283,"Ġscram":42284,"ĠCache":42285,"posure":42286,"Ġheav":42287,"ĠShirt":42288,"powder":42289,"complex":42290,"Doc":42291,"arus":42292,"Pi":42293,"Ġcurv":42294,"ĠTopic":42295,"Ġ.)":42296,"Ġwills":42297,"philis":42298,"gui":42299,"leground":42300,"Eth":42301,"Strike":42302,"Kid":42303,"Ġdelegated":42304,"Soon":42305,"Ġwast":42306,"gage":42307,"Ġprosecut":42308,"Ġ374":42309,"opolis":42310,"chest":42311,"ensation":42312,"Ġredes":42313,"Ġpresum":42314,"Portland":42315,"Ġannihil":42316,"yssey":42317,"Ġforks":42318,"Ġvitro":42319,"walker":42320,"ĠPsal":42321,"ĠStealth":42322,"Quick":42323,"ĠBaghd":42324,"ĠDrift":42325,"//":42326,"Ġinvincible":42327,"ĠGAM":42328,"Ġcastles":42329,"Ġbondage":42330,"ĠBalloon":42331,"Amid":42332,"individual":42333,"tis":42334,"ĠGuides":42335,"xe":42336,"Cong":42337,"URI":42338,"ĠHH":42339,"PHOTOS":42340,"ĠASIC":42341,"burst":42342,"ahon":42343,"ĠFIX":42344,"ilib":42345,"Ġ457":42346,"ĠLogged":42347,"à¹":42348,"Creat":42349,"inatory":42350,"column":42351,"ĠAugustus":42352,"suggest":42353,"pret":42354,"ĠParan":42355,"Ġsubsistence":42356,"wx":42357,"×":42358,"aleigh":42359,"dash":42360,"ĠMana":42361,"Ko":42362,"opausal":42363,"Ġbene":42364,"ĠSabb":42365,"ĠGhosts":42366,"Ġ1830":42367,"ĠHats":42368,"ĠHive":42369,"Perfect":42370,"Ġsocialists":42371,"Ġtumult":42372,"EGA":42373,"ĠNAME":42374,"Android":42375,"assembled":42376,"phis":42377,"Stage":42378,"Char":42379,"Double":42380,"Ġinsign":42381,"IED":42382,"perial":42383,"ĠEMP":42384,"mx":42385,"Ġskept":42386,"Ġwifi":42387,"Ġparad":42388,"ĠFrequency":42389,"Dist":42390,"nil":42391,"iots":42392,"å":42393,"Message":42394,"Furthermore":42395,"Ġhideous":42396,"ĠLDL":42397,"ĠFault":42398,"ĠDimensions":42399,"ĠImplement":42400,"fram":42401,"Ġamaz":42402,"ĠIndones":42403,"ĠTile":42404,"Ġlar":42405,"gc":42406,"Ġcorrelate":42407,"Ġensl":42408,"mite":42409,"Ġhomosexuals":42410,"Ġagric":42411,"8000":42412,"Ġcuring":42413,"rament":42414,"Ġrecons":42415,"ocene":42416,"ENTION":42417,"Ġcommunion":42418,"ĠFunction":42419,"iple":42420,"Ġredund":42421,"Ġcalibrated":42422,"Ġcontribut":42423,"ĠHuck":42424,"limit":42425,"ĠFedora":42426,"ĠTsuk":42427,"brates":42428,"Ġ1903":42429,"ozo":42430,"visual":42431,"ĠDiscipline":42432,"chains":42433,"ĠOCD":42434,"Ġexpended":42435,"0002":42436,"Ġsty":42437,"ĠNightmare":42438,"ĠReplace":42439,"ounty":42440,"fn":42441,"1900":42442,"ĠEpidem":42443,"ĠFW":42444,"Ġgul":42445,"ĠTomato":42446,"ĠPerse":42447,"wl":42448,"ĠFormation":42449,"Scan":42450,"cosystem":42451,"Brand":42452,"Ġ398":42453,"Ġcaptives":42454,"Ġ×":42455,"ESCO":42456,"ĠEnder":42457,"lesh":42458,"ĠAscend":42459,"poly":42460,"eous":42461,"Ġhyster":42462,"Murray":42463,"phe":42464,"Ġradiator":42465,"esthes":42466,"Ġopin":42467,"Ġconspic":42468,"intosh":42469,"Ġwitchcraft":42470,"ĠCFR":42471,"ussian":42472,"escent":42473,"locking":42474,"Ġnonsensical":42475,"uala":42476,"ĠSerial":42477,"1991":42478,"ĠCalm":42479,"containing":42480,"Ġstimulates":42481,"Ġ448":42482,"Pir":42483,"ĠâĨĴ":42484,"ĠDiver":42485,"Ġmanuscripts":42486,"ĠGaia":42487,"Ñĥ":42488,"Learning":42489,"Ġnipple":42490,"reads":42491,"Ġandroid":42492,"ĠMeditation":42493,"Ġincomprehensible":42494,"edded":42495,"Ġdescendant":42496,"ĠMorty":42497,"Luckily":42498,"ARCH":42499,"ausible":42500,"Dig":42501,"shared":42502,"ĠClip":42503,"Ġtrope":42504,"Ġnarcissistic":42505,"ventures":42506,"Ġcuriously":42507,"ĠCosmos":42508,"Aust":42509,"Lay":42510,"ĠShard":42511,"ĠRecorded":42512,"Ġ458":42513,"........":42514,"Ġperish":42515,"ĠExample":42516,"luent":42517,"Ġapes":42518,"ĠHitch":42519,"Ġholiest":42520,"Ġamplifier":42521,"minent":42522,"xxxxxxxx":42523,"inite":42524,"Ġgenomes":42525,"ĠGuilty":42526,"mult":42527,"Ġorc":42528,"Ġnipples":42529,"Side":42530,"Ġlogically":42531,"Ġdatasets":42532,"ĠTitanium":42533,"Ġrotor":42534,"undle":42535,"handled":42536,"nexpected":42537,"Ġdw":42538,"Ġdiagonal":42539,"ĠAnimated":42540,"Ġnumbering":42541,"Forest":42542,"ĠâĨ":42543,"Prin":42544,"Ġchemically":42545,"ĠGithub":42546,"Ġaph":42547,"ĠFaster":42548,"ĠTinker":42549,"ikini":42550,"Dest":42551,"dri":42552,"Manufact":42553,"isance":42554,"Return":42555,"Alert":42556,"elcome":42557,"ĠMMR":42558,"Ġresid":42559,"ĠLIC":42560,"Ġspecificity":42561,"zanne":42562,"Ġanyways":42563,"Ġ426":42564,"Scot":42565,"astery":42566,"Via":42567,"ĠBlocks":42568,"Ġactivates":42569,"Ġabstinence":42570,"Ġchronological":42571,"Soul":42572,"ĠSchne":42573,"Ġwatts":42574,"AUT":42575,"Ġcalcul":42576,"Simply":42577,"Emb":42578,"ceptive":42579,"ĠCatholicism":42580,"obook":42581,"ĠBits":42582,"ĠMbps":42583,"Ġindignation":42584,"Ġshorthand":42585,"Active":42586,"ĠLimbaugh":42587,"ĠCapcom":42588,"adesh":42589,"Ġclipping":42590,"ĠInstructor":42591,"Secret":42592,"___":42593,"Fer":42594,"rawling":42595,"ĠReward":42596,"Ġweep":42597,"Ġmotherboard":42598,"Above":42599,"metry":42600,"ĠPTS":42601,"Ġbombard":42602,"abetes":42603,".--":42604,"Lens":42605,"Comb":42606,"basic":42607,"ĠREALLY":42608,"Later":42609,"Ġ383":42610,"Ġpositional":42611,"olesc":42612,"Ġcrotch":42613,"ĠMDMA":42614,"requently":42615,"ĠPants":42616,"Ġ433":42617,"uctor":42618,"Ġillumination":42619,"ĠÙħ":42620,"ocrin":42621,"Ġpamph":42622,"atio":42623,"etc":42624,"Ġrestores":42625,"ĠProtector":42626,"Develop":42627,"ĠMew":42628,"trop":42629,"ĠSlayer":42630,"Ti":42631,"ĠNotwithstanding":42632,"Match":42633,"LIST":42634,"IDES":42635,"ĠThick":42636,"Ġdisks":42637,"Kin":42638,"Ġghetto":42639,"ĠObjects":42640,"Ġprism":42641,"ĠNether":42642,"Ġvul":42643,"iky":42644,"]:":42645,"ĠDetail":42646,"Ġfucked":42647,"!?":42648,"anium":42649,"Ġlords":42650,"ilities":42651,"ĠEthnic":42652,"static":42653,"$$":42654,"evidence":42655,"Ġmainline":42656,"Ġpeasant":42657,"ĠEnhance":42658,"ĠForced":42659,"virt":42660,"Ġii":42661,"Ġsymm":42662,"Ġconverter":42663,"ularity":42664,"Ġrepent":42665,"num":42666,"ĠScrew":42667,"ĠFTA":42668,"Ġmarines":42669,"hetto":42670,"blow":42671,"Ġado":42672,"ĠTypical":42673,"Ġoverw":42674,"ĠBerm":42675,"keley":42676,"Song":42677,"hao":42678,"valid":42679,"EXT":42680,"ĠProvides":42681,"âĺħâĺħ":42682,"ĠOdin":42683,"Shot":42684,"Ġgamma":42685,"Princ":42686,"asonry":42687,"ĠAccuracy":42688,"Ġcriterion":42689,"Ġdescriptive":42690,"Gall":42691,"gray":42692,"ĠCalcul":42693,"Ġaxes":42694,"ĠCommunists":42695,"ĠRebellion":42696,"Success":42697,"tg":42698,"Ġâĺ":42699,"Ġmultiplier":42700,"ravity":42701,"Thus":42702,"URL":42703,"Ġalternatively":42704,"duction":42705,"Ġsarcast":42706,"ĠCarth":42707,"ĠUSL":42708,"ĠInvisible":42709,"larg":42710,"pleted":42711,"pathic":42712,"Additionally":42713,"ĠCao":42714,"Ġlatent":42715,"ĠSurge":42716,"MEN":42717,"communications":42718,"ĠArray":42719,"Pink":42720,"commit":42721,"isodes":42722,"earcher":42723,"Ukraine":42724,"ĠAnthrop":42725,"incial":42726,"Ġquotations":42727,"adena":42728,"Ġwhining":42729,"Ġretri":42730,"ĠAssass":42731,"elligent":42732,"ĠPERSON":42733,"Py":42734,"Send":42735,"ĠâĪĴ":42736,"DON":42737,"Ġwatt":42738,"description":42739,"POS":42740,"Ġrepro":42741,"destroy":42742,"icidal":42743,"Ġmidrange":42744,"Ġinfographic":42745,"interesting":42746,"category":42747,"Flash":42748,"ĠInvasion":42749,"ĠExodus":42750,"restricted":42751,"Ġinference":42752,"dding":42753,"mingham":42754,"Ġcircumst":42755,"Wi":42756,"ĠHast":42757,"Ġsubjug":42758,"Ġwhispering":42759,"-.":42760,"Ġadren":42761,"ĠPattern":42762,"BOX":42763,"ĠEnhancement":42764,"Exc":42765,"ĠBucket":42766,"ĠGUN":42767,"deen":42768,"ĠHomo":42769,"1985":42770,"Ġclo":42771,"Ġsnippet":42772,"Ġ1896":42773,"TPP":42774,"Seg":42775,"success":42776,";\"":42777,"ĠMUCH":42778,"Author":42779,"Ġreplication":42780,"Ġhallucinations":42781,"Inv":42782,"ĠAware":42783,"ĠViper":42784,"kai":42785,"frames":42786,"ĠTHANK":42787,"ĠSHA":42788,"wordpress":42789,"Ġbc":42790,"CIA":42791,"arrison":42792,"Ġalloc":42793,"ĠAlz":42794,"letcher":42795,"ĠDaredevil":42796,"iversary":42797,"Ġmanuals":42798,"Catholic":42799,"feat":42800,"Ġkinetic":42801,"JB":42802,"yeah":42803,"ĠLDS":42804,"Ġppm":42805,"ĠADC":42806,"pring":42807,"cence":42808,"Ġclasp":42809,"Ġsetups":42810,"Ġdeity":42811,"ĠIndra":42812,"ĠWander":42813,"Ġantib":42814,"Otherwise":42815,"ombie":42816,"Bitcoin":42817,"ipop":42818,"expression":42819,"Animal":42820,"ĠResurrection":42821,"ĠMoral":42822,"ĠSDK":42823,"Ġwretched":42824,"ogenous":42825,"species":42826,"Ġchuckled":42827,"Thor":42828,"Ġ428":42829,"avery":42830,"ĠPry":42831,"asures":42832,"ĠErn":42833,"apor":42834,"Ġinnumerable":42835,"Ġbaptized":42836,"ĠExplosive":42837,"Ġelves":42838,"idges":42839,"ĠParadox":42840,"Close":42841,"aldehyde":42842,"construct":42843,"Ġvirginity":42844,"Poll":42845,"assin":42846,"Doctors":42847,"Pos":42848,"NECT":42849,"Moreover":42850,"Commercial":42851,"cknowled":42852,"1988":42853,"Ġquotation":42854,"marriage":42855,"ĠBapt":42856,"ĠSina":42857,"ĠGloves":42858,"gian":42859,"Ġconfounding":42860,"URRENT":42861,"Dean":42862,"Brew":42863,"thur":42864,"pty":42865,"immune":42866,"ĠSQU":42867,"Ġcounterfe":42868,"rider":42869,"Ġinferred":42870,"ĠDimension":42871,"ĠToad":42872,"Ġafterlife":42873,"ĠHERO":42874,"Indiana":42875,"seek":42876,"Ġdistinguishes":42877,"ĠQur":42878,"ĠMethods":42879,"combat":42880,"Ġcateg":42881,"ĠStruggle":42882,"teness":42883,"liquid":42884,"Ġblinking":42885,"ĠCONTIN":42886,"iae":42887,"Ġaerobic":42888,"Ġstrugg":42889,"Ġegalitarian":42890,"hello":42891,"orrect":42892,"ĠAbandon":42893,"Ġferment":42894,"Area":42895,"idem":42896,"ĠMania":42897,"Ġjs":42898,"ĠBALL":42899,"Running":42900,"Ġregenerate":42901,"iquid":42902,"Uh":42903,"Crystal":42904,"ĠItal":42905,"ĠHeavenly":42906,"в":42907,"CRIPTION":42908,"Consumer":42909,"dust":42910,"amiliar":42911,"ĠRhino":42912,"Rocket":42913,"Ġreversible":42914,"kok":42915,"ĠSketch":42916,"Ġshotguns":42917,"apses":42918,"Ġdetach":42919,"ĠCells":42920,"artist":42921,"rily":42922,"ĠRestore":42923,"Scar":42924,"Ġevid":42925,"Ġspaced":42926,"ĠContributions":42927,"Ġ418":42928,"ĠMystic":42929,"Ġobfusc":42930,"Russ":42931,"wings":42932,"Pear":42933,"osite":42934,"Nusra":42935,"urations":42936,"ovie":42937,"icago":42938,"ĠConcepts":42939,"Ġstimuli":42940,"Ġaroused":42941,"aughty":42942,"Talking":42943,"ĠPrompt":42944,"Across":42945,"ĠPlaint":42946,"Ġbranching":42947,"Thankfully":42948,"Original":42949,"Esc":42950,"ĠTechnician":42951,"fleet":42952,"usher":42953,"Mos":42954,"livion":42955,"oenix":42956,"Ġhr":42957,"ibble":42958,"Ġindent":42959,"ĠFinished":42960,"Department":42961,"ĠINFO":42962,"Movie":42963,"++":42964,"THING":42965,"Ġtimers":42966,"rocket":42967,"Natural":42968,"lime":42969,"Ġangular":42970,"osure":42971,"Ġdynamically":42972,"Ġpacif":42973,"ĠProcessor":42974,"Ġdisgu":42975,"Ġmoderators":42976,"Ġceases":42977,"Ġinertia":42978,"Ġpaperback":42979,"yton":42980,"ĠHuma":42981,"Ġprohibitions":42982,"Ġgestation":42983,"Bomb":42984,"termin":42985,"Ġcaric":42986,"oS":42987,"tc":42988,"Cop":42989,"raved":42990,"Ġeighty":42991,"ĠEnable":42992,"Ġimplementations":42993,"Ġconquering":42994,"ĠFinder":42995,"window":42996,"Gra":42997,"Ġfonts":42998,"laughter":42999,"Ġcolonization":43000,"ĠDOD":43001,")!":43002,",)":43003,"ĠGeral":43004,"ĠSpoiler":43005,"ĠComponent":43006,"Ġgist":43007,"hiro":43008,"Ġlicens":43009,"nesses":43010,"Ġkarma":43011,"?\".":43012,"OPA":43013,"Ġsquats":43014,"ĠRAND":43015,"Ġorally":43016,"document":43017,"olars":43018,"Ġpresumptive":43019,"Pers":43020,"OAD":43021,"ufficient":43022,"LESS":43023,"Hidden":43024,"ORK":43025,"xs":43026,"Ġmathematician":43027,"ĠGloss":43028,"Ġannihilation":43029,"Ġmanifold":43030,"Ry":43031,"Thunder":43032,"Yan":43033,"Activ":43034,"Ġworldly":43035,"TED":43036,"marg":43037,"ĠStun":43038,"ryce":43039,"ĠVG":43040,"Isn":43041,"ĠCyn":43042,"Expl":43043,"IRED":43044,"Ġcompr":43045,"Ġindisc":43046,"Boss":43047,"()":43048,"berman":43049,"ĠBegins":43050,"ujah":43051,"ornia":43052,"hetical":43053,"Ġcivilizations":43054,"Ġfundamentalist":43055,"strap":43056,"Forward":43057,"ettlement":43058,"Ġprophetic":43059,"glers":43060,"bending":43061,"Terry":43062,"Ġidi":43063,"Ġtrunc":43064,"Ġcreeps":43065,"intel":43066,"switch":43067,"ailand":43068,"Ġinstaller":43069,"GOP":43070,"Ġ499":43071,"ĠParallel":43072,"Cru":43073,"Ġ\"@":43074,"Ġ396":43075,"ĠUnlock":43076,"Raven":43077,"Corn":43078,"Ġcircadian":43079,"Ġ********************************":43080,"iliate":43081,"ĠFunctional":43082,"Ġpronouns":43083,"ĠSatoshi":43084,"Ġstim":43085,"Gay":43086,"Iss":43087,"ĠThief":43088,"atellite":43089,"Ġshards":43090,"Ġphil":43091,"protein":43092,"Ġalters":43093,"Poor":43094,"Typically":43095,"KER":43096,"ociate":43097,"Ġemits":43098,"recy":43099,"Ġmechanically":43100,"Ġ...\"":43101,"nature":43102,"sys":43103,"ysc":43104,"Ġwavelengths":43105,"pattern":43106,"insured":43107,"Ġparasitic":43108,"ĠLCS":43109,"ĠPACs":43110,"Ġheals":43111,"ĠCCP":43112,"ĠHacker":43113,"Ġpsy":43114,"ĠBeans":43115,"Ġdemonic":43116,"JV":43117,"Ġatmosp":43118,"equality":43119,"Ġairst":43120,"Ġincarn":43121,"ynthesis":43122,"Ġequations":43123,"tch":43124,"ĠHUGE":43125,"ĠChanged":43126,"itatively":43127,"Job":43128,"gaming":43129,"Ġ1899":43130,"ĠMorsi":43131,"Ġconjecture":43132,"riad":43133,"Ġprimates":43134,"ĠArtemis":43135,"ĠThro":43136,"Ġbiologically":43137,"Church":43138,"topia":43139,"recomm":43140,"Ġgradient":43141,"Ġful":43142,"Ġbastard":43143,"CHO":43144,"IUM":43145,"sleep":43146,"Construction":43147,"raints":43148,"vable":43149,"ionage":43150,"Ġcomrade":43151,"Ġpopulate":43152,"Ġnerds":43153,"ĠXie":43154,"result":43155,"ĠImper":43156,"Ġpamphlet":43157,"Ku":43158,"Ġbackend":43159,"ificent":43160,"etus":43161,"Ġdisson":43162,"config":43163,"Ġsuc":43164,"Ġwavelength":43165,"external":43166,"owder":43167,"Ġpredis":43168,"eenth":43169,"Det":43170,"andem":43171,"Ġ1865":43172,"ĠDefeat":43173,"Individual":43174,"Ġretrieving":43175,"stories":43176,"Ġdesolate":43177,"Ġlett":43178,"Ġunpublished":43179,"Ġpassively":43180,"Ġdissertation":43181,"raits":43182,"abee":43183,"ĠResist":43184,"Robin":43185,"Ġbenevolent":43186,"blast":43187,"Offic":43188,"snap":43189,"vernment":43190,"Ġextermin":43191,"wt":43192,"bitious":43193,"hibited":43194,"Insp":43195,"posted":43196,"ĠYugoslav":43197,"rational":43198,"adapt":43199,"ĠAtari":43200,"Ġplugin":43201,"oglobin":43202,"efeated":43203,"ĠHRC":43204,"cko":43205,"ilver":43206,"ĠDestruction":43207,"gewater":43208,"ĠRadiation":43209,"Ġimprison":43210,"origin":43211,"antine":43212,"ĠPublication":43213,"Ġhealer":43214,"istered":43215,"ĠTHEIR":43216,"hazard":43217,"Contract":43218,"Ġmediated":43219,"Ġindexed":43220,"ĠSYSTEM":43221,"Labor":43222,"Blade":43223,"Ġyog":43224,"Champ":43225,"Gordon":43226,"IAS":43227,"Ġnineteenth":43228,"animous":43229,"begin":43230,"ĠHolo":43231,"Planet":43232,"udding":43233,"default":43234,"ĠOMG":43235,"Ġwond":43236,"wm":43237,"pend":43238,"Extreme":43239,"Ġinterstellar":43240,"ASED":43241,"ĠBerks":43242,"Ġprimal":43243,"Foot":43244,"Ġinadvert":43245,"amboo":43246,"ĠLeica":43247,"Events":43248,"ĠPigs":43249,"RAFT":43250,"ï":43251,"ĠGentleman":43252,"Multiple":43253,"ĠPsychiatric":43254,"Ġdespise":43255,"ĠZionism":43256,"ĠSSL":43257,"shit":43258,"Ġthreaded":43259,"Ġartifact":43260,"Ġmitochondrial":43261,"ĠLayer":43262,"inus":43263,"podcast":43264,"Ġawaken":43265,"Management":43266,"Ġdelusions":43267,"grey":43268,"Ġpseud":43269,"agonal":43270,"ĠHirosh":43271,"Georg":43272,"Dragon":43273,"Stack":43274,"ohm":43275,"Ġvener":43276,"Row":43277,"Ġsandbox":43278,"Ġblinding":43279,"razen":43280,"Ġ389":43281,"Ġcrappy":43282,"Ġlith":43283,"antha":43284,"Ġplurality":43285,"ĠDAC":43286,"inently":43287,"intage":43288,"Ġ1902":43289,"ĠDepend":43290,"Ġelapsed":43291,"==":43292,"ĠGenie":43293,"Bush":43294,"ĠPlanetary":43295,"Bah":43296,"ĠKira":43297,"emn":43298,"Month":43299,"allic":43300,"coded":43301,"VOL":43302,"Ġ[...]":43303,"ĠRampage":43304,"Ġ(*":43305,"Production":43306,"licts":43307,"Ġinoc":43308,"Cour":43309,"Ġspurious":43310,"Ġultras":43311,"ggles":43312,"Ġdelusion":43313,"ĠRacer":43314,"ĠPrism":43315,"FH":43316,"uppet":43317,"Ġcultured":43318,"Ġ436":43319,"aneously":43320,"اÙĦ":43321,"ĠMissions":43322,"monton":43323,"criptions":43324,"ificate":43325,"Cause":43326,"Ġ1898":43327,"ocaust":43328,"Ġbri":43329,"ĠShoals":43330,"ommod":43331,"alted":43332,"ogenesis":43333,"warn":43334,"illus":43335,"vv":43336,"Ġcontam":43337,"ĠLesbian":43338,"Ġcavalry":43339,"ĠPresence":43340,"rehens":43341,"tool":43342,"accessible":43343,"Ġ(~":43344,"ĠLicensed":43345,"Ġprophets":43346,"Ġboulder":43347,"mean":43348,"akura":43349,"Ġunres":43350,"ĠCinnamon":43351,"Leaks":43352,"........................":43353,"Contact":43354,"Ġassassins":43355,"ĠGreenwald":43356,"dk":43357,"amazon":43358,"Ġagreeable":43359,"ernandez":43360,"Easy":43361,"PLA":43362,"ĠBigfoot":43363,"Ġconvent":43364,"Ġempires":43365,"Ġ387":43366,"Ġgrasped":43367,"Ġruby":43368,"Ġreconc":43369,"Warning":43370,"atem":43371,"Ġretrieval":43372,"ĠFDR":43373,"ĠReaper":43374,"orem":43375,"ĠLuo":43376,"hig":43377,"ĠArmor":43378,"tp":43379,"ĠInterpret":43380,"Conservative":43381,"ĠSodium":43382,"Ġbead":43383,"Ġpropagate":43384,"claw":43385,"href":43386,"ĠPaste":43387,"Ġomit":43388,"Boost":43389,"Diamond":43390,"goo":43391,"Ġanomal":43392,"ĠDISTRICT":43393,"Greek":43394,"warning":43395,"Ġdespised":43396,"Karl":43397,"AGES":43398,"Ġserotonin":43399,"ESSION":43400,"_______":43401,"ĠCollider":43402,"auldron":43403,"Ġsquee":43404,"Control":43405,"ffield":43406,"cycles":43407,"Legal":43408,"xa":43409,"minimum":43410,"ĠGeneric":43411,"Circ":43412,"·":43413,"Behind":43414,"guide":43415,"Ground":43416,"roying":43417,"ĠGrail":43418,"Ġthee":43419,"Ġ9000":43420,"Batman":43421,"Brother":43422,"Ġnons":43423,"RW":43424,"saf":43425,"ĠCroat":43426,"tainment":43427,"sci":43428,"Ye":43429,"Range":43430,"Ey":43431,"perature":43432,"ĠDracula":43433,"oreal":43434,"Fighting":43435,"Ġreleg":43436,"Ġcoupling":43437,"Tracker":43438,"tyard":43439,"Mut":43440,"Military":43441,"lamm":43442,"ittens":43443,"ĠCRC":43444,"ĠXiang":43445,"Ġorthodoxy":43446,"ĠGoth":43447,"Ġalgorith":43448,"ĠAthen":43449,"Ġtyrann":43450,"ĠTorrent":43451,"IDs":43452,"ĠGENERAL":43453,"ĠASUS":43454,"rastructure":43455,"Faith":43456,"models":43457,"rentices":43458,"ĠCurse":43459,"Ġcalibr":43460,"attled":43461,"monary":43462,"Ġpenet":43463,"aclysm":43464,"album":43465,"Ġremnant":43466,"Ġfung":43467,"itiveness":43468,"thodox":43469,"Ġunlocks":43470,"Ġprobabilities":43471,"Ġster":43472,"Ġscrim":43473,"Ġanalytic":43474,"Urban":43475,"âĢĶâĢĶâĢĶâĢĶ":43476,"Craft":43477,"Ġbrut":43478,"1986":43479,"Section":43480,"raged":43481,"arij":43482,"Hero":43483,"ĠHebdo":43484,"ĠEmpress":43485,"Ġvivo":43486,"ĠPublications":43487,"Ġcannabinoids":43488,"arrett":43489,"Ġbounded":43490,"Ġquests":43491,"Ġomin":43492,"ĠRuler":43493,"ĠYue":43494,"ridges":43495,"Ġpeasants":43496,"ĠAlloy":43497,"Desk":43498,"ULAR":43499,"Ġthor":43500,"ĠOvers":43501,"ĠTome":43502,"mk":43503,"Ġ1050":43504,"Ġshroud":43505,"Ġdistribut":43506,"weapons":43507,"ĠAuthorization":43508,"ĠPoke":43509,"ĠAlternate":43510,"scan":43511,"artisan":43512,"ĠGems":43513,"ĠForums":43514,"atonin":43515,"viron":43516,"Rog":43517,"duct":43518,"Ġtabletop":43519,"crow":43520,"/)":43521,"ĠStainless":43522,"ottest":43523,"Ġreborn":43524,"anchez":43525,"cium":43526,"ĠNicarag":43527,"elfare":43528,"Ġupd":43529,"ritic":43530,"bm":43531,"Ġ608":43532,"ĠSlightly":43533,"ĠDrops":43534,"ISO":43535,"ĠiT":43536,"xiety":43537,"ĠGawker":43538,"omination":43539,"ĠReached":43540,"Student":43541,"Drop":43542,"MET":43543,"ĠKubrick":43544,"1950":43545,"ĠTuls":43546,"Ġcomputed":43547,"depending":43548,"ĠCosmetic":43549,"udget":43550,"Lex":43551,"icut":43552,"ĠDepth":43553,"Ġ1893":43554,"ahah":43555,"Ġath":43556,"fights":43557,"thia":43558,"Ġoccult":43559,"Wheel":43560,"ĠSega":43561,"Ġtheolog":43562,"reement":43563,")--":43564,"Ġunus":43565,"ĠGamma":43566,"Looks":43567,"Ġellipt":43568,"Ġairflow":43569,"ĠHimself":43570,"Ġpagan":43571,"ĠRei":43572,"Ġpilgr":43573,"ĠSubmission":43574,"Region":43575,"Ġinsertion":43576,"Ġsket":43577,"Ġsatisfies":43578,"ĠPixie":43579,"Ġcontempl":43580,"abbit":43581,"ĠReplay":43582,"ĠGalile":43583,"ĠGodzilla":43584,"Ġarithmetic":43585,"iasm":43586,"1987":43587,"ĠFeminist":43588,"Liter":43589,"ĠDisable":43590,"ouble":43591,"essors":43592,"Ġfors":43593,"Ġensu":43594,"Putting":43595,"ĠMSM":43596,"Cond":43597,"emade":43598,"Ġindistinguishable":43599,"Magn":43600,"Ġms":43601,"MAL":43602,"ĠBF":43603,"dm":43604,"iltration":43605,"irection":43606,"ĠSpir":43607,"Gb":43608,"ĠIbn":43609,"Abs":43610,"imens":43611,"RNA":43612,"============":43613,"Ġ655":43614,"ĠConversion":43615,"imilation":43616,"igion":43617,"ĠSomew":43618,"mL":43619,"Border":43620,"Ë":43621,"Factor":43622,"Number":43623,"Ġejac":43624,"Cho":43625,"Ġrighteousness":43626,"ĠPATH":43627,"ĠElys":43628,"ouched":43629,"Ġmultic":43630,"Ġfaculties":43631,"ĠEarthquake":43632,"ĠReferences":43633,"ensitive":43634,"Ġimpat":43635,"Ġ................":43636,"buff":43637,"Ġ1895":43638,"colo":43639,"Vi":43640,"Ġubiqu":43641,"ĠChev":43642,"Fish":43643,"ĠBlueprint":43644,"CHQ":43645,"Ġlinem":43646,"ĠFlavor":43647,"Ġcrimson":43648,"ĠAbstract":43649,"arette":43650,"plete":43651,"ranean":43652,"Dash":43653,"Ġdimensional":43654,"Cub":43655,"ttle":43656,"ĠDSM":43657,"Ġinstantaneous":43658,"esy":43659,"Ġepoch":43660,"Brit":43661,"ĠÎ":43662,"ECD":43663,"Ġwarp":43664,"obyl":43665,"ubric":43666,"Ġutilitarian":43667,"Ġsummarizes":43668,"letal":43669,"Ord":43670,"opath":43671,"tained":43672,"ghai":43673,"Ġwhis":43674,"insert":43675,"Ġphon":43676,"rils":43677,"Ġearthly":43678,"ĠAlic":43679,"ĠPCIe":43680,"Ġfurthermore":43681,"ocard":43682,"Ġuter":43683,"ĠAdmin":43684,"ographics":43685,"ĠConstantin":43686,"gravity":43687,"iPhone":43688,"Ġwasteland":43689,"Ġfps":43690,"Tip":43691,"Ġmurm":43692,"paces":43693,"ĠSamurai":43694,"ĠFOIA":43695,"ĠRadiant":43696,"ĠUnreal":43697,"Ġmicrow":43698,"usterity":43699,"zyme":43700,"itbart":43701,"metadata":43702,"Dat":43703,"ĠMoons":43704,"ĠProtestants":43705,"ungle":43706,"Ġvideog":43707,"pid":43708,"Ġdisple":43709,"aucus":43710,"Ġcoils":43711,"ĠDwar":43712,"fixed":43713,"Alice":43714,"Ġgarrison":43715,"ĠVelocity":43716,"ĠJehovah":43717,"Ġfascists":43718,"ĠCHO":43719,"jl":43720,"Ġmetaphors":43721,"ĠSiege":43722,"scientific":43723,"Ä«":43724,"Slow":43725,"hex":43726,"ĠBlaz":43727,"mediated":43728,"esthesia":43729,"ĠAvg":43730,"Ġbelie":43731,"Carter":43732,"Ġexposition":43733,"azeera":43734,"dial":43735,"Ġbask":43736,"Scale":43737,"Ġdisob":43738,"Ġgore":43739,"Ġhypocr":43740,"Ġphantom":43741,"ĠSynd":43742,"BLIC":43743,"pter":43744,"ĠScorpion":43745,"eor":43746,"ĠRecover":43747,"Ġsummoning":43748,"Ġorb":43749,"jump":43750,"Ġ768":43751,"ĠEnix":43752,"Spons":43753,",...":43754,"Wide":43755,"Ġparse":43756,"Ġdebtor":43757,"Ġpathological":43758,"Ġserpent":43759,"ĠFranç":43760,"reetings":43761,"Ġdeletion":43762,"Ġvolunt":43763,"ĠNotification":43764,"liga":43765,"Disk":43766,"Account":43767,"1979":43768,"Ġsymmetry":43769,"ĠBearing":43770,"ĠABV":43771,"ĠORDER":43772,"rpm":43773,"ĠFuck":43774,"?!\"":43775,"mask":43776,"Grade":43777,"neath":43778,"ocom":43779,"Detect":43780,"ryption":43781,"ĠAura":43782,"Ġinert":43783,"PLAY":43784,"gres":43785,"INTON":43786,"Deal":43787,"fficient":43788,"ĠVoid":43789,"gement":43790,"Ġscorp":43791,"Ġreincarn":43792,"ĠVapor":43793,"Ġ1840":43794,"Yellow":43795,"......":43796,"Ġparameter":43797,"ĠDISTR":43798,"ĠForgotten":43799,"Eat":43800,"izational":43801,"Witness":43802,"ĠDupl":43803,"Ġdogma":43804,"Ġzipper":43805,"ĠZeus":43806,"mage":43807,"ormal":43808,"Ġ\".":43809,"Ġecc":43810,"ĠSlot":43811,"ĠRegist":43812,"Others":43813,"VID":43814,"Windows":43815,"Ġshitty":43816,"ĠLethal":43817,"Monster":43818,"ĠExpression":43819,"tx":43820,"ythm":43821,"Were":43822,"ivalry":43823,"atcher":43824,"ĠFormat":43825,"ĠPlasma":43826,"Phys":43827,"laugh":43828,"Fu":43829,"java":43830,"roma":43831,"ĠIncreases":43832,"Ġlicensee":43833,"Ġmystic":43834,"Ġproto":43835,"ĠLoki":43836,"forcing":43837,"hots":43838,"Ġ->":43839,"Outside":43840,"ĠEndless":43841,"Ġachie":43842,"ĠTurtles":43843,"Ġconvin":43844,"JUST":43845,"Ġimmobil":43846,"ĠCauses":43847,"Ġclich":43848,"xes":43849,"ffiti":43850,"Ġhypot":43851,"Bat":43852,"Ġbigot":43853,"Personal":43854,"ĠPharmac":43855,"Lot":43856,"VERT":43857,"Ġbapt":43858,"idelines":43859,"Ġprox":43860,"MAP":43861,"Spirit":43862,"ĠSlug":43863,"Ġebook":43864,"eches":43865,"ĠAndromeda":43866,"Ġceremon":43867,"1975":43868,"PRE":43869,"Ġasshole":43870,"linear":43871,"Nevertheless":43872,"Ġwillpower":43873,"azel":43874,"Fif":43875,"andise":43876,"Ġextravag":43877,"ĠBuffy":43878,"Ġcorrelations":43879,"ptr":43880,"Progress":43881,"shape":43882,"ĠSymbol":43883,"arag":43884,"ĠContext":43885,"ucer":43886,"1983":43887,"ĠMyster":43888,"Pain":43889,"Login":43890,"mbol":43891,"codes":43892,"RANT":43893,"Ġoverse":43894,"opot":43895,"STEM":43896,"enser":43897,"ĠCosmic":43898,"Spl":43899,"ritional":43900,"ĠPharaoh":43901,"ĠRemix":43902,"xon":43903,"ĠXII":43904,"Ġunman":43905,"Ġimmedi":43906,"Ġmonog":43907,"ĠLX":43908,"Ġabstraction":43909,"ocolate":43910,"ĠDonkey":43911,"Ġ!!":43912,"ĠLIA":43913,"shed":43914,"rules":43915,"Ġcalc":43916,"ĠAutob":43917,"anmar":43918,"eworks":43919,"notations":43920,"Ġtenancy":43921,"ĠPetraeus":43922,"dp":43923,"amphetamine":43924,"ĠCortex":43925,"rw":43926,"Ġprojectile":43927,"Ġintrinsically":43928,"Route":43929,"Ġnegoti":43930,"anuts":43931,"Analysis":43932,"redits":43933,"ĠGG":43934,"thread":43935,"ĠChosen":43936,"Years":43937,"otyp":43938,"ĠNCT":43939,"udic":43940,"ochemical":43941,"Neigh":43942,"Ġfishes":43943,"ĠFloat":43944,"Print":43945,"okia":43946,"Ġbarb":43947,"quote":43948,"Lew":43949,"Ġannoun":43950,"istors":43951,"Reading":43952,"ACTION":43953,"Ġintakes":43954,"ĠBeet":43955,"matter":43956,"Swe":43957,"Ther":43958,"Ġtyrant":43959,"ĠPsycho":43960,"ĠDestroy":43961,"Ġesoteric":43962,"Ġbiom":43963,"idious":43964,"Merc":43965,"hran":43966,"ĠBaal":43967,"seconds":43968,"Ġsuperhuman":43969,"ancel":43970,"Ġworshipped":43971,"Ġwebs":43972,"Ġviolet":43973,"ĠMetallic":43974,"eday":43975,"ordering":43976,"Nut":43977,"Ġconstructs":43978,"olescent":43979,"Unit":43980,"otypes":43981,"Ġembryonic":43982,"perm":43983,"Nature":43984,"ĠDecre":43985,"levant":43986,"Ġss":43987,"+(":43988,"ĠDoctrine":43989,"puters":43990,"Ġsaline":43991,"orsche":43992,"1111":43993,"values":43994,"Ġutopian":43995,"ĠBooster":43996,"Technical":43997,"ì":43998,"ĠLIMITED":43999,"nir":44000,"Ġclones":44001,"Performance":44002,"aple":44003,"Ġshudder":44004,"Ġcontempor":44005,"lator":44006,"ĠOops":44007,"Ġammon":44008,"Ġdavid":44009,"Ġbom":44010,"bish":44011,"Ġdetectable":44012,"Ġmultiplying":44013,"Ġreddit":44014,"Prim":44015,"Ġmedial":44016,"Ġsubstrate":44017,"ĠSanskrit":44018,"Spect":44019,"ĠMagical":44020,"Ġarcane":44021,"align":44022,"Ġ1861":44023,"Ġneocons":44024,"Ì":44025,"ĠBounty":44026,"ĠContinent":44027,"Ġhurd":44028,"alions":44029,"Ġgeneralized":44030,"ĠInsect":44031,"Ġsimul":44032,"actual":44033,"advert":44034,"ukong":44035,"Resp":44036,"ĠWarcraft":44037,"Hunter":44038,"hyper":44039,"ĠBreach":44040,"ught":44041,"Ġcomputation":44042,"react":44043,"Feel":44044,"ĠCheong":44045,"Ġslut":44046,"Ġgalactic":44047,"Ġtaunt":44048,"Enjoy":44049,"Ġreprinted":44050,"Word":44051,"ĠHandbook":44052,"amins":44053,"exit":44054,"Wo":44055,"Ġadherents":44056,"Counter":44057,"ĠNode":44058,"ĠTwisted":44059,"Ġgrinned":44060,"universal":44061,"ĠAmon":44062,"Ġaster":44063,"ĠEquip":44064,"!\".":44065,"Ġanalogous":44066,"rients":44067,"alky":44068,"ĠQian":44069,"Ġspont":44070,"docs":44071,"Ġcontemplation":44072,"Ġrevolutionaries":44073,"Ġpreset":44074,"ĠAmendments":44075,"Ġexecutes":44076,"ĠDuration":44077,"Ġcompulsion":44078,"Ġstagger":44079,"ynamic":44080,"blem":44081,"];":44082,"Higher":44083,"Balt":44084,"heast":44085,"Ġcorp":44086,"awei":44087,"Motion":44088,"Mis":44089,"Ġadventurer":44090,"eger":44091,"Ġarsen":44092,"ĠVoltage":44093,"ĠEVENTS":44094,"Salt":44095,"issance":44096,"DK":44097,"Ship":44098,"Ġunwitting":44099,"Ton":44100,"ĠPROGRAM":44101,"Ġtentacles":44102,"erness":44103,"thirst":44104,"Fig":44105,"fty":44106,"ĠTolkien":44107,"Sleep":44108,"ĠExplain":44109,"Pub":44110,"ĠBounce":44111,"ĠDemo":44112,"Ġ1897":44113,"ĠSPI":44114,"intern":44115,"********":44116,"ĠKills":44117,"ĠZombies":44118,"Single":44119,"ratom":44120,"ĠClaw":44121,"hid":44122,"asel":44123,"Shock":44124,"erential":44125,"Ġupgr":44126,"holy":44127,"Ġ\\":44128,"aghetti":44129,"Ġthence":44130,"genic":44131,"papers":44132,"1982":44133,"ravel":44134,"ĠUNIVERS":44135,"Charge":44136,"ĠDelay":44137,"ibrary":44138,"ĠHDD":44139,"olson":44140,"Ġenchanted":44141,"Wr":44142,"graph":44143,"Ġcorro":44144,"ept":44145,"etsu":44146,"ĠQin":44147,"Û":44148,"Ġantidepressant":44149,"ĠCerberus":44150,"Ġappe":44151,"ĠDEFENSE":44152,"Ġdysph":44153,"split":44154,"zilla":44155,"attr":44156,"Clar":44157,"Äĵ":44158,"hov":44159,"IRC":44160,"hibition":44161,"'/":44162,"ĠURLs":44163,"Draft":44164,"Prep":44165,"ĠLanguages":44166,"ĠTravels":44167,"ceiver":44168,"aturally":44169,"pair":44170,"ĠALWAYS":44171,"aaaa":44172,"ĠTenth":44173,"ĠNAD":44174,"Serv":44175,"ĠUID":44176,"cens":44177,"ĠLearned":44178,"Ġtraject":44179,"Ġmoaning":44180,"ĠNare":44181,"Ġingen":44182,"Ġsurn":44183,"Ġfloppy":44184,"breeding":44185,"uph":44186,"rossover":44187,"Understanding":44188,"Glass":44189,"Ġruntime":44190,"gp":44191,"Ġâľĵ":44192,"Ġcyt":44193,"bley":44194,"agall":44195,"Ġunworthy":44196,"otine":44197,"Ġchromosome":44198,"utters":44199,"Ġµ":44200,"Ġexpans":44201,"Ġdement":44202,"Ġinsurrection":44203,"Ġsurviv":44204,"genre":44205,"ospital":44206,"ĠPlato":44207,"ĠTrigger":44208,"selection":44209,"ilege":44210,"Ġsegreg":44211,"itizens":44212,"ĠRAID":44213,"Pure":44214,"hetti":44215,"ĠFailed":44216,"ĠCharacters":44217,"ĠCreep":44218,"akra":44219,"Ec":44220,"ĠAristotle":44221,"Lim":44222,"error":44223,"yrus":44224,"umably":44225,">>":44226,"Ġtsun":44227,"knowledge":44228,"Cert":44229,"bable":44230,"hesion":44231,"ĠProcedures":44232,"Ġmarkup":44233,"ideo":44234,"Ġrhet":44235,"ĠChapters":44236,"ĠChecking":44237,"mega":44238,"Ġphotons":44239,"required":44240,"Unknown":44241,"ĠDrawn":44242,"Ġvari":44243,"EEK":44244,"Ġcompuls":44245,"Ġcloning":44246,"ccoli":44247,"Ġ1070":44248,"Ġkindred":44249,"Ġdiscl":44250,"ĠCind":44251,"Collect":44252,"Ġchromosomes":44253,"phant":44254,"ĠKafka":44255,"Ġeverlasting":44256,"Ġmercenary":44257,"ĠHmm":44258,"----":44259,"riber":44260,"Ġdoubtless":44261,"Ġsusceptibility":44262,"beta":44263,"notice":44264,"Ġcrochet":44265,"Ġrespir":44266,"Ġphilosophers":44267,"ĠExtras":44268,"Ġseparat":44269,"shown":44270,"iblings":44271,"Hispanic":44272,"copy":44273,"Tang":44274,"Knight":44275,"Ġpursu":44276,"ĠAnime":44277,"Ġlipid":44278,"ggies":44279,"levels":44280,"phalt":44281,"ĠCompleted":44282,"bral":44283,"Ġcerv":44284,"ĠAfric":44285,"ĠPhar":44286,"Color":44287,"ogene":44288,"ĠCompan":44289,"memory":44290,"Dust":44291,"ĠXIV":44292,"ĠConsole":44293,"').":44294,"Ġ1888":44295,"byn":44296,"Ġpolygamy":44297,"Auth":44298,"BUT":44299,"istine":44300,"Ġsacr":44301,"Ġabsor":44302,"ijah":44303,"ĠNeural":44304,"olester":44305,"ql":44306,"Already":44307,"Creating":44308,"ĠStarg":44309,"ĠPhilos":44310,"Consider":44311,"Ġrepositories":44312,"cludes":44313,"ĠBuffer":44314,"ĠPerspect":44315,"Ġcomput":44316,"Stew":44317,"iamond":44318,"ĠJudgment":44319,"OVA":44320,"angible":44321,"Ġoxid":44322,"Ġepigen":44323,"Ġsidel":44324,"ĠEag":44325,"devices":44326,"icone":44327,"1920":44328,"atism":44329,"beard":44330,"ĠGujar":44331,"ĠPlaystation":44332,"Ġglances":44333,"ĠCOMPLE":44334,"VERTIS":44335,"ukemia":44336,"Edit":44337,"Tickets":44338,"Square":44339,"ĠSerpent":44340,"Ġtransporter":44341,"MQ":44342,"ĠMongo":44343,"1967":44344,"ibaba":44345,"Ġtimet":44346,"sylvania":44347,"Latin":44348,"osaurs":44349,"Ġhumanoid":44350,"Ġcannabinoid":44351,"Ġdisciple":44352,"Psych":44353,"Ġimpro":44354,"Ġmc":44355,"Raid":44356,"Letter":44357,"ificant":44358,"ĠPortug":44359,"ĠFreem":44360,"Ġappell":44361,"ĠMushroom":44362,"Ġclans":44363,"Ġsinful":44364,"Ġingestion":44365,"ĠDirectory":44366,"abetic":44367,"Ġantigen":44368,"Ġimagin":44369,"mitter":44370,"!!!!!":44371,"ĠDPR":44372,"leness":44373,"\":\"\",\"":44374,"ĠAUTHOR":44375,"Ġgrunt":44376,"Ġflickering":44377,"Cath":44378,"asury":44379,"Ġnozzle":44380,"Secure":44381,"Stre":44382,"ĠBIT":44383,"Ġdeviations":44384,"Professor":44385,"bilt":44386,"ĠConscious":44387,"Ġinterrupts":44388,"ĠMormons":44389,"ĠCutter":44390,"Bed":44391,"ipient":44392,"ĠGhostbusters":44393,"Cart":44394,"endas":44395,"ĠExecution":44396,"ycle":44397,"Ġwedd":44398,"Sold":44399,"Ġvanquished":44400,"Regarding":44401,"Depending":44402,"']":44403,"atron":44404,"oidal":44405,"Cube":44406,"Studio":44407,":/":44408,"ĠExplosion":44409,"activate":44410,"pport":44411,"fuck":44412,"Whe":44413,"Ġsmir":44414,"Ġwidgets":44415,"urses":44416,"izard":44417,")*":44418,"icho":44419,"ĠVersus":44420,"ĠIntroduced":44421,"osaurus":44422,"1977":44423,"forum":44424,"Gray":44425,"Program":44426,"righteous":44427,"endum":44428,"ĠScare":44429,"Ġresists":44430,"*)":44431,"ĠCombo":44432,"Ġsockets":44433,"Ġaston":44434,"LAB":44435,"Ġmutated":44436,"eworld":44437,"DEF":44438,"Trend":44439,"âĢĶ-":44440,"Ġpropagation":44441,"Ġemancipation":44442,"collection":44443,"ĠDifferences":44444,"Tweet":44445,"Ġmajesty":44446,")...":44447,"sylv":44448,"Ġadapters":44449,"Ġmilliseconds":44450,"Jews":44451,"ĠPatreon":44452,"phasis":44453,"ĠHTTP":44454,"onnaissance":44455,"ENDED":44456,"ĠIntro":44457,"qs":44458,"Ġsuperflu":44459,"*.":44460,"Ġminions":44461,"ĠStupid":44462,"Ġspecialization":44463,"ĠPikachu":44464,"Ġappellant":44465,"Training":44466,"circle":44467,"Interest":44468,"Ġfallacy":44469,"ĠDinosaur":44470,"ĠTHEM":44471,"Ġdirectories":44472,"Ġmasturbation":44473,"ĠStain":44474,"1978":44475,"odied":44476,"Ġexqu":44477,"ĠRats":44478,"swick":44479,"Ġemptiness":44480,"ĠXeon":44481,"Ġthereto":44482,"ĠEngels":44483,"ĠSupplement":44484,"Chan":44485,"Ġundead":44486,"ĠNoct":44487,"erest":44488,"ĠQuery":44489,"ĠSOLD":44490,"thritis":44491,"ĠEncounter":44492,"Ġvectors":44493,"Econom":44494,"Rogue":44495,"Ġgelatin":44496,"Rot":44497,"Flickr":44498,"Ġcaching":44499,"Ġloader":44500,"ĠELE":44501,"Ġcamoufl":44502,"Commission":44503,"Ġ1886":44504,"Ġcombos":44505,"ĠAwakening":44506,"Ġfeudal":44507,"Ġasses":44508,"ASY":44509,"atalie":44510,"Ġpanties":44511,"ĠMono":44512,"selves":44513,"Download":44514,"Ġvampires":44515,"------":44516,"ishop":44517,"User":44518,"Ġimperialist":44519,"ĠGOODMAN":44520,"1973":44521,"Vel":44522,"Struct":44523,"ĠUFOs":44524,"drivers":44525,"ĠOptional":44526,"uably":44527,"ĠPrinciple":44528,"verett":44529,"taining":44530,"Ġ1889":44531,"ĠCommunism":44532,"auder":44533,"Keys":44534,"lore":44535,"ĠMedieval":44536,"Hyd":44537,"weapon":44538,"Register":44539,"ĠHighlander":44540,"ĠRFC":44541,"Demon":44542,"ardless":44543,"ĠOrche":44544,"Kick":44545,"pixel":44546,"address":44547,"OUP":44548,"Brain":44549,"ĠMorph":44550,"bash":44551,"ĠANG":44552,"ĠIdle":44553,"ĠLucifer":44554,"Ġcorrelates":44555,"Ġgazed":44556,"colm":44557,"ĠKard":44558,"Solar":44559,"ĠVariable":44560,"ĠPACK":44561,"Ġfuzz":44562,"Ġanonym":44563,"ĠECO":44564,"feature":44565,"ĠEsports":44566,"ĠAnthropology":44567,"cise":44568,"manac":44569,"ĠSupports":44570,"rists":44571,"Quant":44572,"istical":44573,"çļĦ":44574,"Ġdexterity":44575,"monster":44576,"ordial":44577,"Mob":44578,"DEC":44579,"ĠConj":44580,"entric":44581,"1981":44582,"ECTION":44583,"ietal":44584,"ĠUses":44585,"ĠArmageddon":44586,"ĠCapitalism":44587,"Ub":44588,"iazep":44589,"helps":44590,"ouls":44591,"grim":44592,"ĠEthiop":44593,"tesy":44594,"Ġclipboard":44595,"Ġchimpanzees":44596,"PLIC":44597,"Sexual":44598,"wallet":44599,"ĠRect":44600,"ocytes":44601,"ĠHels":44602,"lace":44603,"Damn":44604,"Ġblasp":44605,"ildo":44606,"ĠRober":44607,"APD":44608,"ĠWCS":44609,"ippery":44610,"ellectual":44611,"Ġ$(":44612,"Ġuniverses":44613,"Ġholster":44614,"Ġshading":44615,"Ġinflic":44616,"else":44617,"ĠShiny":44618,"ĠAVG":44619,"Lower":44620,"ĠMayhem":44621,"Originally":44622,"Crypt":44623,"SHARE":44624,"ĠBeir":44625,"!:":44626,"Ġrepentance":44627,"WHAT":44628,".......":44629,"Ġauditory":44630,"aaa":44631,"ĠLoot":44632,"ciples":44633,"Ġcontem":44634,"Ġphoton":44635,"æľ":44636,"omach":44637,"ĠWhedon":44638,"ĠValid":44639,"asonable":44640,"pha":44641,"assad":44642,"ĠPse":44643,"Heat":44644,"Ġplugins":44645,"Ġclenched":44646,"ĠAmeric":44647,"transform":44648,"ĠEnh":44649,"agnetic":44650,"usalem":44651,"sych":44652,"Wed":44653,"replace":44654,"ĠKinect":44655,"shield":44656,"Sax":44657,"ividually":44658,"Ġfunctionally":44659,"Ġ:)":44660,"typically":44661,"Opening":44662,"Fa":44663,"ĠSELECT":44664,"Ġsamurai":44665,"Ġhorde":44666,"entle":44667,"sth":44668,"Changes":44669,"Pin":44670,"ithing":44671,"illance":44672,"ĠEmblem":44673,"ĠMicha":44674,"crypt":44675,"ĠObjective":44676,"ophys":44677,"Ġavg":44678,"poon":44679,"Ġreadable":44680,"ĠRx":44681,"allel":44682,"Sit":44683,"gom":44684,"ureau":44685,"ĠDoodle":44686,"Ġdungeon":44687,"($":44688,"Nintendo":44689,"\"],\"":44690,"Notes":44691,"Grab":44692,"Prosecutors":44693,"Advanced":44694,"Ġ1862":44695,"ĠVeter":44696,"Ġjurisd":44697,"ĠLauncher":44698,"Catal":44699,"udder":44700,"Ġresidues":44701,"Ġregress":44702,"ĠConquer":44703,"osal":44704,"ĠDice":44705,"************":44706,"braska":44707,"ipolar":44708,"Ġathe":44709,"bringing":44710,"Suddenly":44711,"ĠIEEE":44712,"verbs":44713,"Ġdelet":44714,"ipeg":44715,"Previous":44716,"]\"":44717,"Ġsidebar":44718,"illac":44719,"Property":44720,"α":44721,"REP":44722,"Ġauthenticated":44723,"gypt":44724,"uilding":44725,"ĠGing":44726,"Ġwart":44727,"Birth":44728,"Ġobedient":44729,"ĠXuan":44730,"ĠTYPE":44731,"Ġinhibits":44732,"1972":44733,"humans":44734,"IENT":44735,"Ġyoutube":44736,"Shortly":44737,"ophen":44738,"ĠWinc":44739,"ĠWrit":44740,"AUD":44741,"ĠHobbit":44742,"emphasis":44743,"ĠWonders":44744,"Ġtwitch":44745,"ĠProphe":44746,"Berry":44747,"ĠGinny":44748,"ĠBurst":44749,"ĠGenerator":44750,"Ġepile":44751,"ĠBalanced":44752,"GPU":44753,"maps":44754,"Ġneurotrans":44755,"ĠIRC":44756,"Ġ\"$":44757,"Create":44758,"Particip":44759,"ĠMarxism":44760,"Ġthou":44761,"ĠMortal":44762,"Ġ�":44763,"Ġninja":44764,"inburgh":44765,"Ġappro":44766,"ĠPistol":44767,"Jar":44768,"Ġprophes":44769,"classes":44770,"Ġanarchist":44771,"Ġextant":44772,"message":44773,"itaire":44774,"Ġ1863":44775,"ĠProl":44776,"Ġpropell":44777,"Ġimpossibility":44778,"Ġpropos":44779,"itamin":44780,"Rating":44781,"olphin":44782,"Ġmitochond":44783,"versions":44784,"Liberal":44785,"ishy":44786,"Ġspherical":44787,"ĠSurvive":44788,"FREE":44789,"rawler":44790,"Metal":44791,"ĠStarship":44792,"Ġ=================================================================":44793,"ĠDharma":44794,"ĠSeller":44795,"Ġwrapper":44796,"Experience":44797,"Integ":44798,"Customer":44799,"hammad":44800,"Ġunanim":44801,"Jenn":44802,"Ġschizophren":44803,"agree":44804,"ĠEVENT":44805,"Shell":44806,"Ġfractions":44807,"1968":44808,"Ġextermination":44809,"ĠSniper":44810,"Ġpronoun":44811,"ĠHitman":44812,"xp":44813,"resource":44814,"WIND":44815,"Ġhierarchical":44816,"Ġted":44817,"Changing":44818,"Ġplaus":44819,"Transform":44820,"Ġbicy":44821,"imentary":44822,"Fuck":44823,"Mini":44824,"Ġoverc":44825,"ĠOptimus":44826,"outer":44827,"helial":44828,"akening":44829,"fx":44830,"Ġnig":44831,"Ġ+/-":44832,"ĠVICE":44833,"Ġnm":44834,"1976":44835,"ĠRitual":44836,"ĠTyrann":44837,"Ġscriptures":44838,"inical":44839,"ĠNull":44840,"ourgeois":44841,"dra":44842,"Ġpious":44843,"Ġneuron":44844,"Ġcolonists":44845,"ĠNebula":44846,"apply":44847,"Sah":44848,"Marx":44849,"Ġhypotheses":44850,"notation":44851,"acists":44852,"Math":44853,"Manager":44854,"Library":44855,"audi":44856,"Ġmp":44857,"ergic":44858,"Ġwizards":44859,"fw":44860,"DVD":44861,"ĠScala":44862,"Different":44863,"ampoo":44864,"ĠDread":44865,"abbage":44866,"Rus":44867,"ĠDumbledore":44868,"keleton":44869,"elsh":44870,"esian":44871,"ĠCorsair":44872,"Tier":44873,"ĠCelest":44874,"Ġnoun":44875,"Ġlucid":44876,"requisites":44877,"Ġgenus":44878,"Event":44879,"1974":44880,"ĠSatanic":44881,"iox":44882,"ĠHandle":44883,"ĠDestroyer":44884,"Ġinvocation":44885,"ĠXD":44886,"modified":44887,"Gam":44888,"ĠRPC":44889,"Ġsubsystem":44890,"Compared":44891,"odan":44892,"ĠPassive":44893,"ĠHelmet":44894,"nutrition":44895,"riction":44896,"HOW":44897,"Jess":44898,"Ġpiston":44899,"imately":44900,"Ġhypoc":44901,"ĠCelestial":44902,"MRI":44903,"Ġcompiler":44904,"ĠBadge":44905,"ĠRevelation":44906,"Ġintrig":44907,"Grad":44908,"ĠSPACE":44909,"Poly":44910,"ĠVul":44911,"Ġtrembling":44912,"Ġindepend":44913,"doctor":44914,"Certain":44915,"emet":44916,"Password":44917,"Ġgasped":44918,"Ġpronunciation":44919,"Fuel":44920,"ĠSPEC":44921,"assets":44922,"Extra":44923,"Ġformatting":44924,"Ġmods":44925,"\"!":44926,"akedown":44927,"Ġcircuitry":44928,"ĠTRUE":44929,"ĠVeil":44930,"Ġsighed":44931,"Charg":44932,"eals":44933,"Ġworkaround":44934,"Ġank":44935,"ĠScrolls":44936,"Ġdiffusion":44937,"Ġamps":44938,"ĠTempest":44939,"adata":44940,"Ġphenomen":44941,"Ġ???":44942,"Ġpopup":44943,"Ġinhibition":44944,"Ġaliases":44945,"erity":44946,"agraph":44947,"Jew":44948,"Ġbec":44949,"Classic":44950,"comment":44951,"usable":44952,"rodu":44953,"ĠEnlightenment":44954,"Ġinvis":44955,"Ġbiochemical":44956,"latest":44957,"ĠGMOs":44958,"ĠSocialism":44959,"Ġpollut":44960,"Ġeluc":44961,"Js":44962,"orthern":44963,"PDATED":44964,"alyses":44965,"Experts":44966,"Blog":44967,"ĠDemocr":44968,"etooth":44969,"pause":44970,"âĢ¢âĢ¢":44971,"ĠShinji":44972,"Ġdystop":44973,"Sources":44974,"ĠBrach":44975,"np":44976,"ĠXY":44977,"Ġneurot":44978,"assembly":44979,"Ġbourgeois":44980,"ĠReson":44981,"ĠIDE":44982,"Ġrecoil":44983,"raq":44984,"ĠAvenger":44985,"Paper":44986,"UTF":44987,"ĠWrest":44988,"ĠSimulation":44989,"elaide":44990,"ĠDMCA":44991,"utm":44992,"1963":44993,"Ġarcs":44994,"Ġmaximal":44995,"Ġcyl":44996,"Ġphilosoph":44997,"enium":44998,"Ġrelativity":44999,"ĠMacintosh":45000,"Ġpneum":45001,"LOC":45002,"Ġgoddamn":45003,"SHA":45004,"Ġlocalization":45005,"ĠPHI":45006,"Ġhierarch":45007,"Ġatheists":45008,"±":45009,"Luck":45010,"ĠJugg":45011,"options":45012,"alore":45013,"Edward":45014,"Monitor":45015,"Ġneoc":45016,"numbered":45017,"Arc":45018,"ĠCodes":45019,"ĠHallow":45020,"olitan":45021,"sections":45022,"ĠEzek":45023,"Ġamy":45024,"task":45025,"ĠCLS":45026,"ĠValkyrie":45027,"Ġcircumference":45028,"amac":45029,"ĠNotting":45030,"Ġproverb":45031,"Spec":45032,"Ġelemental":45033,"ĠBitcoins":45034,"Except":45035,"Release":45036,"ADVERTISEMENT":45037,"Complete":45038,"phrine":45039,"Ġspores":45040,"random":45041,"neum":45042,"trigger":45043,"ocide":45044,"Ġlongitudinal":45045,"isec":45046,"peat":45047,"Ġprecept":45048,"Wing":45049,"ĠâĹ":45050,"otropic":45051,"mouse":45052,"ĠWitcher":45053,"ĠAppearance":45054,"ROR":45055,"Ġ||":45056,"aird":45057,"Blu":45058,"Ġincomp":45059,"ĠFirefly":45060,"update":45061,"Loc":45062,"Ġnihil":45063,"hesive":45064,"Quality":45065,"youtu":45066,"Seriously":45067,"Ġannot":45068,"ĠCoins":45069,"Visit":45070,"lc":45071,"----------":45072,"Ġdiction":45073,"Ġafore":45074,"Ġimmortality":45075,"ĠForbidden":45076,"Allah":45077,"ĠPartial":45078,"ĠGears":45079,"Ġtrance":45080,"Hat":45081,"irez":45082,"ĠSATA":45083,"Ġelectrode":45084,"ĠLinear":45085,"rikes":45086,"Ġderiv":45087,"ĠXue":45088,"Fine":45089,"ĠIgnore":45090,"desc":45091,"DOM":45092,"Simple":45093,"orescence":45094,"Previously":45095,"Ġcircumcision":45096,"Sphere":45097,"Ġrenown":45098,"SET":45099,"ilight":45100,"ĠByzantine":45101,"EXP":45102,"Ġwhine":45103,"Missing":45104,"Lt":45105,"Guide":45106,"Ġhippocampus":45107,"Ġwip":45108,"yrights":45109,"Ġsubmer":45110,"Maker":45111,"Switch":45112,"Ġspectral":45113,"nect":45114,"Ãį":45115,"Ġreven":45116,"WER":45117,"Adding":45118,"ĠCONTROL":45119,"asper":45120,"0000000":45121,"ynt":45122,"annabin":45123,"ĠAliens":45124,"ĠPCR":45125,"asketball":45126,"ricia":45127,"ĠUnch":45128,"Tap":45129,"Ġpracticable":45130,"ĠUsage":45131,"Ġsoluble":45132,"Scroll":45133,"Random":45134,"Ġmoan":45135,"ĠPuppet":45136,"Dim":45137,"Attack":45138,"Ġspears":45139,"Ġrectangle":45140,"Ġamuse":45141,"ĠDoct":45142,"reon":45143,"ĠReset":45144,"vag":45145,"unin":45146,"ĠBris":45147,"ĠSwarm":45148,"Model":45149,"Standing":45150,"Ġdenotes":45151,"{":45152,"ĠLizard":45153,"nesty":45154,"Ġwor":45155,"Ġamplification":45156,"ĠInferno":45157,"Cover":45158,"SAM":45159,"respective":45160,"Shift":45161,"Ġlibertarians":45162,"Runner":45163,"ĠRevelations":45164,"Spr":45165,"ĠCrusader":45166,"Ġcaffe":45167,"Patch":45168,"stros":45169,"ĠImmortal":45170,"Ġinsofar":45171,"itance":45172,"ĠValhalla":45173,"Ġradial":45174,"Beast":45175,"sync":45176,"Ġ--------":45177,"ĠPathfinder":45178,"iless":45179,"operator":45180,"Choose":45181,"Ġdecode":45182,"Ġvou":45183,"ĠMutant":45184,"ĠCVE":45185,"Female":45186,"Ġoxidation":45187,"inational":45188,"dB":45189,"Scope":45190,"Wan":45191,"ĠBought":45192,"ĠDietary":45193,"rotein":45194,"Present":45195,"aukee":45196,"Ġtotem":45197,"Ġsatur":45198,"wagon":45199,"Builder":45200,"ĠBulg":45201,"Ġsects":45202,"Flo":45203,"ombat":45204,"ĠHermione":45205,"aughs":45206,"Ġhydra":45207,"paren":45208,"ë":45209,"Whereas":45210,"tsky":45211,"Ġchall":45212,"WORK":45213,"opian":45214,"rican":45215,"vati":45216,"ĠHTTPS":45217,"Ġwrink":45218,"Ġthrob":45219,"habi":45220,"Ġiodine":45221,"omorph":45222,"ĠScion":45223,"Hunt":45224,"Written":45225,"iosity":45226,"ĠBrowser":45227,"Ġsinners":45228,"culosis":45229,"Ġunconsciously":45230,"0100":45231,"Ġanarchists":45232,"Pull":45233,"FFER":45234,"Ġpandemonium":45235,"matically":45236,"Rush":45237,"Ġpurified":45238,"ĠCyan":45239,"ĠDifficulty":45240,"«":45241,"Aside":45242,"oggles":45243,"untu":45244,"iege":45245,"iberal":45246,"ĠCOUR":45247,"eteenth":45248,"weeney":45249,"biased":45250,"ĠDecay":45251,"quart":45252,"alysis":45253,"Ġstere":45254,"ellect":45255,"Ġkernels":45256,"juven":45257,"ĠJPEG":45258,"indal":45259,"topic":45260,"Ġidentifier":45261,"åı":45262,"Ġepid":45263,"1969":45264,"Ġpoisons":45265,"sym":45266,"mop":45267,"LOCK":45268,"axe":45269,"cohol":45270,"ctory":45271,"Ġadject":45272,"Skin":45273,"ĠFract":45274,"ĠSHAR":45275,"echo":45276,"thood":45277,"Ġencoding":45278,"Ġrelational":45279,"Len":45280,"Bone":45281,"agara":45282,"uggish":45283,"ĠTanks":45284,"Stats":45285,"lihood":45286,"Mult":45287,"Graph":45288,"ĠCannot":45289,"ĠSpac":45290,"handler":45291,"ĠShit":45292,"Ġmorp":45293,"controller":45294,"udeau":45295,"Screenshot":45296,"Development":45297,"Gear":45298,"Ġtong":45299,"ĠColossus":45300,"rylic":45301,"STRUCT":45302,"capitalist":45303,"Ġsupplementation":45304,"Parts":45305,"pb":45306,"oppy":45307,"pite":45308,"processor":45309,"Ġexplanatory":45310,"Environmental":45311,"Compl":45312,"Gaming":45313,"arently":45314,"Ġconcess":45315,"Ġathlet":45316,"forestation":45317,"orsi":45318,"igmat":45319,"Ġencoded":45320,"misc":45321,"Ġproofs":45322,"ĠRevision":45323,"Ġmathematic":45324,"Ġconstitu":45325,"fficiency":45326,"Ġlightsaber":45327,"gz":45328,"erate":45329,"ournals":45330,"Comment":45331,"Ġpercept":45332,".\"[":45333,"ĠTechniques":45334,"coins":45335,"Shape":45336,"venant":45337,"ĠPrinted":45338,"Native":45339,"ĠGors":45340,"pecting":45341,"ĠDuel":45342,"Ġadmins":45343,"Flor":45344,"ĠDeus":45345,"cham":45346,"ĠRails":45347,"ceptor":45348,"naire":45349,"ĠSquid":45350,"ĠWarranty":45351,"SPEC":45352,"ensis":45353,"FUN":45354,"stellar":45355,"Select":45356,"llular":45357,"arget":45358,"ĠUncharted":45359,"Details":45360,"rison":45361,"Ġsyntax":45362,"chanted":45363,"Ġ-----":45364,"Ġthats":45365,"Registration":45366,"ĠSaber":45367,"ethical":45368,"Ġcryptography":45369,"atown":45370,"Ġdependencies":45371,"nw":45372,"Ġvehement":45373,"Ġrationality":45374,"ĠThou":45375,"Ġ----":45376,"rador":45377,"Ġenh":45378,"ĠCrate":45379,"STATE":45380,"/(":45381,"Ġdelim":45382,"CEPT":45383,"monkey":45384,"pai":45385,"uracy":45386,"Ġmortals":45387,"Sanders":45388,"ĠSeraph":45389,"-\"":45390,"1945":45391,"endix":45392,":'":45393,"ĠLegs":45394,"Exper":45395,"ĠKrypt":45396,"clinton":45397,"Ġuphe":45398,"Vers":45399,"Similarly":45400,"ressor":45401,"leans":45402,"LOG":45403,"cific":45404,"Ġ].":45405,"-)":45406,"resist":45407,"Pred":45408,"Latest":45409,"ilyn":45410,"Ġblob":45411,"Ġdevils":45412,"ĠIllusion":45413,"erella":45414,"Ġyak":45415,"method":45416,"Ġ698":45417,"Shadow":45418,"velt":45419,"Ġsomet":45420,"xc":45421,"Ġtriangles":45422,"netic":45423,"Calling":45424,"ĠDRM":45425,"Ġtriglycer":45426,"Ġinhibited":45427,"Ġnep":45428,"Ġalgebra":45429,"ascar":45430,"laim":45431,"Ġappl":45432,"1971":45433,"Bernie":45434,"Eh":45435,"Ġundefined":45436,"âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ":45437,"Sys":45438,"ournaments":45439,"Solid":45440,"Ġhep":45441,"ĠMales":45442,"Agent":45443,"Ġpsychedel":45444,"Wik":45445,"Ġdoctrines":45446,"rection":45447,"Compare":45448,"âĺ":45449,"Ġcertific":45450,"Ġsubstr":45451,"ĠCitation":45452,"ĠAFB":45453,"ĠBecame":45454,"Ġaristocracy":45455,"aryl":45456,"Ġanatomical":45457,"ocumented":45458,"ĠAssy":45459,"ĠFORM":45460,"Traditional":45461,"azines":45462,"Content":45463,"furt":45464,"Ġscripting":45465,"Ġcloaked":45466,"Ġunint":45467,"ĠCivilization":45468,"Desktop":45469,"ĠRagnar":45470,"Ġcurses":45471,"Ġobservable":45472,"ĠSpock":45473,"ĠPyr":45474,"Ġelectrom":45475,"ĠLump":45476,"oresc":45477,"ĠAttribution":45478,"egal":45479,"achusetts":45480,"Ġmarqu":45481,"âĻ¦":45482,"Ġcursor":45483,"ascist":45484,"1966":45485,"edit":45486,"lisher":45487,"ocyte":45488,"Writer":45489,"BILITIES":45490,"ĠUpload":45491,"Ġtreacher":45492,"Ġrecomb":45493,"Ġknights":45494,"Ġimmutable":45495,"ĠPly":45496,"Ġatten":45497,"ĠPassed":45498,"Flying":45499,"icipated":45500,"querade":45501,"ĠZot":45502,"CRE":45503,"ĠCursed":45504,"ickr":45505,"ĠDroid":45506,"thereum":45507,"Ġadjective":45508,"DIT":45509,"Ġtob":45510,"Ġinit":45511,"ĠPenet":45512,"Ġignor":45513,"Ġexalted":45514,"ĠDwell":45515,"assemb":45516,"Ġsentient":45517,"Ġ``":45518,"ĠGoo":45519,"Professional":45520,"othing":45521,"rupted":45522,"olics":45523,"ĠSetup":45524,"Thu":45525,"Campaign":45526,"Secondly":45527,"clipse":45528,"hibit":45529,"amate":45530,"SUP":45531,"ĠSuppose":45532,"submit":45533,"ĠDebian":45534,"Ġantid":45535,"Ġentert":45536,"ysical":45537,"ĠGladiator":45538,"ĠSTL":45539,"ĠBugs":45540,"ĠMech":45541,"ĠCoffin":45542,"itored":45543,"ICLE":45544,"Mist":45545,"Ġinfall":45546,"votes":45547,"actly":45548,"Occ":45549,"ĠConquest":45550,"alach":45551,"Ġintertw":45552,"reverse":45553,"amiya":45554,"icularly":45555,"edom":45556,"ĠLuxem":45557,"Fra":45558,"urrencies":45559,"Ġnobility":45560,"Tab":45561,"Beer":45562,"Ġ10000":45563,"Ġincor":45564,"Ġmelanch":45565,"Depth":45566,"Firstly":45567,"usr":45568,"ĠWiki":45569,"hhhh":45570,"ĠProxy":45571,"Ġantagonists":45572,"Ġtransistor":45573,"ĠRelic":45574,"ĠPrometheus":45575,"Ġ1280":45576,"Coun":45577,"ĠMedals":45578,"stats":45579,"Assembly":45580,"inished":45581,"cemic":45582,"Ġadventurers":45583,"Ġcd":45584,"Supporters":45585,"ĠYs":45586,"])":45587,"Ġneglig":45588,"Request":45589,"Ġwhore":45590,"Ġovercl":45591,"_-":45592,"partial":45593,"amd":45594,"Ġfructose":45595,"Ġdivid":45596,"Administ":45597,"amples":45598,"Boo":45599,"akery":45600,"owered":45601,"hester":45602,"Links":45603,"GROUND":45604,"ethy":45605,"Ġincarcer":45606,"Ġincap":45607,"Drag":45608,"ĠElastic":45609,"âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ":45610,"Ultra":45611,"AAAA":45612,"Order":45613,"ĠMysteries":45614,"Ġcanonical":45615,"Ign":45616,"Ġanimate":45617,"wegian":45618,"ggle":45619,"Hash":45620,"Arg":45621,"verty":45622,"Ġanalges":45623,"ouver":45624,"ittees":45625,"ĠAsgard":45626,"______":45627,"Mix":45628,"1964":45629,"Rate":45630,"Ġarousal":45631,"pheus":45632,"undai":45633,"hetamine":45634,"ĠMysterious":45635,"Alright":45636,"ĠHerod":45637,"riott":45638,"ĠAnarchy":45639,"ĠArche":45640,"Question":45641,"Chapter":45642,"Token":45643,"ĠSphere":45644,"Ġinduces":45645,"Audio":45646,"Normal":45647,"Ġprophe":45648,"ĠValiant":45649,"Tag":45650,"Relations":45651,"Ġblinked":45652,"onyms":45653,"ĠVortex":45654,"Ġdb":45655,"emonic":45656,"Phase":45657,"Ġkingdoms":45658,"Twe":45659,"ĠLORD":45660,"plementation":45661,"ĠConstantinople":45662,"helm":45663,"ĠFlesh":45664,"Ġthumbnail":45665,"ledged":45666,"ĠPROG":45667,"Ġdisbel":45668,"ĠLikes":45669,"ĠGamer":45670,"renches":45671,"hattan":45672,"Index":45673,"pecially":45674,"ĠJiu":45675,"Ġwhats":45676,"erion":45677,"xf":45678,"ĠPerception":45679,"Alien":45680,"Capt":45681,"ãĢĤ":45682,"joining":45683,"nesium":45684,"ĠSocrates":45685,"Icon":45686,"animate":45687,"ocalypse":45688,"ĠTactics":45689,"assador":45690,"Veh":45691,"src":45692,",-":45693,"Ġvisc":45694,"ĠDiscord":45695,"initial":45696,"atana":45697,"Size":45698,"Claim":45699,"ffect":45700,"iciary":45701,"Ġturret":45702,"reset":45703,"Ï":45704,"wrap":45705,"ulnerability":45706,"ĠInsert":45707,"Ġirrad":45708,"ognitive":45709,"clips":45710,"uncle":45711,"chemy":45712,"ottesville":45713,"Write":45714,"earances":45715,"1965":45716,"MIC":45717,"Ġmanag":45718,"Ġtelesc":45719,"Termin":45720,"Guest":45721,"Ġdenote":45722,"Failure":45723,"ograp":45724,"âĢķ":45725,"Ġscrolls":45726,"ĠArmored":45727,"Ġrecomp":45728,"Ġplaceholder":45729,"ĠISBN":45730,"ĠBelief":45731,"emporary":45732,"Asset":45733,"arcer":45734,"haar":45735,"assium":45736,"%:":45737,"ernal":45738,"ĠLv":45739,"atible":45740,"Pand":45741,"oubted":45742,"Lie":45743,"bial":45744,"STEP":45745,"Ġpresets":45746,"Ġstatist":45747,"Sund":45748,"reshold":45749,"endium":45750,"\");":45751,"Software":45752,"Ġbasal":45753,"ĠYose":45754,"Ġmortg":45755,"ocry":45756,"Ġsubreddit":45757,"omorphic":45758,"ĠLoaded":45759,"berra":45760,"vg":45761,"orkshire":45762,"ĠChrys":45763,"Repeat":45764,"ĠSimulator":45765,"rx":45766,"gex":45767,"Linux":45768,"ĠInstruct":45769,"irable":45770,"Ġmosquit":45771,"ĠManga":45772,"iOS":45773,"Ġsynt":45774,"Ġclitor":45775,"Ġlobe":45776,"ĠDelete":45777,"CVE":45778,"fortunately":45779,"Enc":45780,"vertising":45781,"Ġanten":45782,"Ġfif":45783,"Study":45784,"prev":45785,"ossus":45786,"Nar":45787,"Decl":45788,"erala":45789,"ĠPrototype":45790,"UGE":45791,"1001":45792,"Ġ---------":45793,"deals":45794,"odcast":45795,"TPS":45796,"Ġcodec":45797,"ittee":45798,"isexual":45799,"ĠBreaker":45800,"menu":45801,"ĠURI":45802,"('":45803,"ĠFiorina":45804,"ĠApostles":45805,"ĠWitches":45806,"raint":45807,"addafi":45808,"ersive":45809,"yrim":45810,"Ġmosa":45811,"Ġrog":45812,"Ear":45813,"âĺħ":45814,"Ġcaloric":45815,"matical":45816,"yrics":45817,"ĠKrugman":45818,"axter":45819,"1016":45820,"Ġsep":45821,"ĠExtend":45822,"ropolitan":45823,"thren":45824,"ologne":45825,"atomic":45826,"Naturally":45827,"Pros":45828,"gencies":45829,"akens":45830,"Male":45831,"Ġcausation":45832,"omnia":45833,"Comments":45834,"eeee":45835,"iquette":45836,"Ġcytok":45837,"ename":45838,"details":45839,"Ġdestruct":45840,"leep":45841,"ĠCavern":45842,"ĠInvention":45843,"ueless":45844,"Ġsubsection":45845,"outhern":45846,"metic":45847,"blogs":45848,"ĠPacks":45849,"ĠArduino":45850,"hhh":45851,"elligence":45852,"imity":45853,"ĠUltron":45854,"astrous":45855,"Ġbiome":45856,"ĠHover":45857,"Ġprivile":45858,"igham":45859,"apest":45860,"ĠYoshi":45861,"Artist":45862,".\",":45863,"gamer":45864,"Virgin":45865,"Tea":45866,"ĠDoomsday":45867,"ĠðŁĻĤ":45868,"terday":45869,"ĠCommando":45870,"ĠAchieve":45871,"chrom":45872,"Ġcryptographic":45873,"Ġrebell":45874,"Specifically":45875,"âĢ¦âĢ¦âĢ¦âĢ¦":45876,"ĠEternity":45877,"Ġemulation":45878,"ĠSERV":45879,"ĠMiscellaneous":45880,"ĠParticipant":45881,"duc":45882,"vp":45883,"ĠSparkle":45884,"ategories":45885,"Ġdecrypt":45886,"ĠGNOME":45887,"activation":45888,"Ġanarch":45889,"owler":45890,"adiator":45891,"itars":45892,"ĠTHEN":45893,")\",":45894,"åħ":45895,"Ġembod":45896,"vae":45897,"âĺĨ":45898,"Member":45899,"Ġrm":45900,"nyder":45901,"ĠLeviathan":45902,"Gaza":45903,"erenn":45904,"Chicken":45905,"ĠDefinitive":45906,"ĠBolshe":45907,"ĠJagu":45908,"gorith":45909,"loader":45910,"exe":45911,".........":45912,"ĠReceived":45913,"ĠProto":45914,"ĠLocked":45915,"Posts":45916,"ankind":45917,"Clock":45918,"ĠCLI":45919,"Throw":45920,"dL":45921,"epad":45922,"ĠAtmosp":45923,"Ġmk":45924,"ĠSteal":45925,"uple":45926,"reference":45927,"ĠGNU":45928,"adelphia":45929,"scripts":45930,"ilaterally":45931,"ĠMods":45932,"odus":45933,"ignty":45934,"REF":45935,"Ġhypothesized":45936,"issors":45937,"Ġanus":45938,"HUD":45939,"rices":45940,"Draw":45941,"Computer":45942,"Below":45943,"uthor":45944,"ĠTact":45945,"=$":45946,"00000000":45947,"Ġcaut":45948,"Sharp":45949,"depend":45950,"Ġtatt":45951,"Goal":45952,"Sounds":45953,"zona":45954,"anyon":45955,"ricanes":45956,"ĠUSAF":45957,"Jump":45958,"Bottom":45959,"etermination":45960,"ĠPles":45961,"Ġhypothes":45962,"Reference":45963,"Ġswall":45964,"Ġmaneu":45965,"rifice":45966,"ĠVeh":45967,"Ġtex":45968,"geoning":45969,"ĠâľĶ":45970,"Mach":45971,"eanor":45972,"%);":45973,"archives":45974,"Ġencyclopedia":45975,"ĠPreferences":45976,"damage":45977,"Done":45978,"Ġcoefficient":45979,"ĠCreatures":45980,"Ġital":45981,"ivari":45982,"Revolution":45983,"Ġnob":45984,"Diff":45985,"Ġabbre":45986,"Writ":45987,"ĠDOS":45988,"redd":45989,"Ġsplend":45990,"orest":45991,"flame":45992,"Ġdevs":45993,"Ġ==":45994,"ĠPuzzle":45995,"Ġgit":45996,"MOD":45997,"ĠArgument":45998,"ĠAbyss":45999,"Studies":46000,"ophob":46001,"uild":46002,"scill":46003,"fp":46004,"Ġplur":46005,"Delete":46006,"ĠFALSE":46007,"FIL":46008,"Ġmicrobiota":46009,"ĠIPv":46010,"Stud":46011,"ortal":46012,"ĠDivinity":46013,"ounter":46014,"ä¸":46015,"Naz":46016,"stals":46017,"ihilation":46018,"Ġpersecut":46019,"ĠPlanes":46020,"viation":46021,"Driver":46022,"ĠEEG":46023,"Unity":46024,"Premium":46025,"ĠSiren":46026,"ĠPaleo":46027,"earchers":46028,"Pract":46029,"Ö":46030,"VII":46031,"mosp":46032,"Ġidentifiers":46033,"Near":46034,"achu":46035,"Apps":46036,"tackle":46037,"COLOR":46038,"Ġperpendicular":46039,"viks":46040,"ecided":46041,"ĠDota":46042,"icons":46043,"Ġpsi":46044,"Brave":46045,"Ġunimagin":46046,"ĠATI":46047,"OOL":46048,"Gender":46049,"ĠSwords":46050,"oples":46051,"Rank":46052,"olphins":46053,"Ġdeities":46054,"ĠXIII":46055,"м":46056,"ĠKraken":46057,"ĠLEVEL":46058,"stasy":46059,"ĠBabel":46060,"Hours":46061,"Avoid":46062,"Mech":46063,"Multi":46064,"Ġect":46065,"Occup":46066,"panic":46067,"Ġmutants":46068,"Evidence":46069,"Tips":46070,"Ġvolts":46071,"Exit":46072,"xb":46073,"planet":46074,"avez":46075,"features":46076,")]":46077,"lol":46078,"ĠNeph":46079,"ĠSanct":46080,"Ġimpover":46081,"................................":46082,"Sty":46083,"Email":46084,"Torrent":46085,"Ġgluc":46086,"ĠSins":46087,"ĠIncarn":46088,"ĠWITHOUT":46089,"ĠPanzer":46090,"ĠAssignment":46091,"versible":46092,"Strange":46093,"ITNESS":46094,"incible":46095,"ZX":46096,"ĠMySQL":46097,"Ġconson":46098,"Ġoxidative":46099,"Machine":46100,"Impro":46101,"Parent":46102,"ĠMetroid":46103,"Educ":46104,"Ġdismant":46105,"dx":46106,"ĠPersona":46107,"ĠHDL":46108,"Americ":46109,"Users":46110,"Ġeighteenth":46111,"WARNING":46112,"ĠLists":46113,"ĠCanter":46114,"ĠTrotsky":46115,"Ġhaha":46116,"]'":46117,"ĠEncyclopedia":46118,"admin":46119,"ĠACTIONS":46120,"idav":46121,"ο":46122,"ĠFTP":46123,"Ġquar":46124,"ongyang":46125,"âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦âĢ¦":46126,"Ġsynchronization":46127,"DEM":46128,"riched":46129,"Ġnegro":46130,"Bench":46131,"Ġfilament":46132,"Ġdecoding":46133,"obj":46134,"Ġjoystick":46135,"Decre":46136,"ĠBolshevik":46137,"Virtual":46138,"ĠSacrament":46139,"xd":46140,"BILL":46141,"-+-+":46142,"¶":46143,"anchester":46144,"Pokemon":46145,"Ġslic":46146,"iameter":46147,"errilla":46148,"Exactly":46149,"\"'":46150,"getic":46151,"3333":46152,"solete":46153,"Ġincorpor":46154,"Ġio":46155,"------------":46156,"Ġantiquity":46157,"ATURES":46158,"Policy":46159,"oppable":46160,"Ġ=>":46161,"ODUCT":46162,"otide":46163,"Ú":46164,"Ġnormative":46165,"Fac":46166,"Ġshaman":46167,"element":46168,"Plex":46169,"INTER":46170,"etsk":46171,"ĠGauntlet":46172,"ĠBIOS":46173,"×ķ":46174,"riet":46175,"Rew":46176,"uristic":46177,"urches":46178,"ĠChomsky":46179,"ixir":46180,"package":46181,"Owner":46182,"Ġschematic":46183,"Assistant":46184,"Ġemanc":46185,"Ġarchetype":46186,"Initial":46187,"intent":46188,"Ġfilib":46189,"ispers":46190,"Flag":46191,"Tank":46192,"Ġinsurg":46193,"Ġapproximation":46194,"Ġsemantic":46195,"Ġsubtitle":46196,"Font":46197,"Ġintimid":46198,"Ġhath":46199,"tools":46200,"gob":46201,"Process":46202,"slave":46203,"ĠJUSTICE":46204,"âĻ¥":46205,"ĠHardcore":46206,"Discover":46207,"Ġexch":46208,"ptive":46209,"units":46210,"ĠDjango":46211,"itudinal":46212,"Ġpc":46213,"akespeare":46214,"ospace":46215,"Ġhorny":46216,"auth":46217,"ĠSkyrim":46218,"ENGTH":46219,"perors":46220,"ĠVulkan":46221,"Ġchimpan":46222,"Ġremem":46223,"Ġopacity":46224,"Ġ:(":46225,"ushima":46226,"Ġawoken":46227,"Ġsacrament":46228,"Beginning":46229,"escape":46230,"Anim":46231,"Ġadvant":46232,"ĠRequires":46233,"output":46234,"Ġdroid":46235,"Yep":46236,"rieving":46237,"Ġpt":46238,"ĠShotgun":46239,"ĠOsiris":46240,"disabled":46241,"ĠRadius":46242,"Medium":46243,"ĠScient":46244,"ĠRept":46245,"ymm":46246,"Ġcp":46247,"ĠLabyrinth":46248,"poral":46249,"Ġ'(":46250,"Hack":46251,"ĠTechnique":46252,"/,":46253,"Ġambig":46254,"Basic":46255,"Ġretrie":46256,"VICE":46257,"BIP":46258,"ragon":46259,"phies":46260,"uminum":46261,"ĠFei":46262,"lesi":46263,"Ġsemantics":46264,"ĠHz":46265,"ĠUnderworld":46266,"Ġendot":46267,"olesterol":46268,"ourning":46269,"Ġcaches":46270,"ĠYug":46271,"Legendary":46272,"ĠDocumentation":46273,"ĠSpiral":46274,"ĠClone":46275,"bnb":46276,"ĠâĶ":46277,"ustom":46278,"Mp":46279,"gettable":46280,"agonist":46281,"Ġneuronal":46282,"culus":46283,"enum":46284,"cules":46285,"Ġmuttered":46286,"ctica":46287,"necess":46288,"ĠSubtle":46289,"Ġsolder":46290,"Environment":46291,"oneliness":46292,"orage":46293,"âĢ¦.\"":46294,"nesota":46295,"agements":46296,"Ùİ":46297,"WHERE":46298,"ĠGDDR":46299,"Scient":46300,"ĠMulcair":46301,"ĠRena":46302,"________________________________________________________________":46303,"antics":46304,"Ġtorped":46305,"Brow":46306,"ossal":46307,"Category":46308,"Regular":46309,"remote":46310,"ãģ":46311,"ĠCoil":46312,"ritch":46313,"specified":46314,"Average":46315,"Ġfingert":46316,"entity":46317,"atibility":46318,"ampunk":46319,"ĠScriptures":46320,"Ġunequ":46321,"arettes":46322,"arching":46323,"Ġastron":46324,"Ġnumeric":46325,"ĠeBook":46326,"remove":46327,"onday":46328,"Ġmetaphysical":46329,"ĠGoku":46330,"Element":46331,"ĠRuin":46332,"Norm":46333,"Ġtox":46334,"puff":46335,"Ġharmonic":46336,"ĠAgility":46337,"ĠHearthstone":46338,"Ġmana":46339,"Points":46340,"Ġconduc":46341,"ĠPersia":46342,"-----":46343,"license":46344,"Application":46345,"assert":46346,"Reader":46347,"ĠSacrifice":46348,"float":46349,"inctions":46350,"byter":46351,"Ġfundament":46352,"\"âĢ¦":46353,"Fourth":46354,"Effective":46355,"ĠMeow":46356,"ĠErrors":46357,"ĠIcar":46358,"ĠMMO":46359,"Ġapostles":46360,"Ġfaintly":46361,"component":46362,"bably":46363,"uggage":46364,"ĠMPG":46365,"krit":46366,"container":46367,"ixture":46368,"ĠPOV":46369,"izabeth":46370,"onut":46371,"isdom":46372,"trace":46373,"ĠSDL":46374,"Interestingly":46375,"ĠExplan":46376,"lesiastical":46377,"ternal":46378,"Bug":46379,"Ġmetabolites":46380,"geries":46381,"Ġsupra":46382,"ĠMakoto":46383,"orget":46384,"racuse":46385,"][":46386,"ĠPrelude":46387,"peria":46388,"tube":46389,"ĠCatalog":46390,"ĠGoblin":46391,"QUEST":46392,"ĠINCLUD":46393,"ĠVERS":46394,"erguson":46395,"Ġcommandments":46396,"ĠUDP":46397,"itle":46398,"ι":46399,"domain":46400,"roximately":46401,"ĠTLS":46402,"ongevity":46403,"Ġmodulation":46404,"Ġdidnt":46405,"ĠCalories":46406,"Applications":46407,"ormon":46408,"Ġsd":46409,"dullah":46410,"Ġcous":46411,"ĠDARK":46412,"clip":46413,"ĠPsychiat":46414,"ĠTanz":46415,"ĠCharisma":46416,"ĠMerge":46417,"ĠKDE":46418,"requires":46419,"urdue":46420,"Ġdecimal":46421,"Ġâī¥":46422,"ĠAuth":46423,"ebted":46424,"ĠTempl":46425,"ĠâĢº":46426,"Ultimate":46427,"Ġmammalian":46428,"advertising":46429,"Ġdominion":46430,"Ġacron":46431,"ĠWem":46432,"ĠHeist":46433,"oiler":46434,"FLAG":46435,"ovember":46436,"Syn":46437,"Ġgodd":46438,"ĠPyth":46439,"Ġglyc":46440,"ĠHelpful":46441,"Ġgad":46442,"chedel":46443,"Similar":46444,"Ġ¶":46445,"Ġnp":46446,"ĠREPL":46447,"Fill":46448,"ĠSunder":46449,"etsy":46450,"ĠPAX":46451,"ĠFemales":46452,"ĠKingdoms":46453,"Ġwhistlebl":46454,"Hide":46455,"serial":46456,"ĠEnemies":46457,"ĠPeb":46458,"Ġpiety":46459,"ifact":46460,"esity":46461,"bsite":46462,"esides":46463,"Ġported":46464,"Ġamygdala":46465,"ĠGerr":46466,"afety":46467,"Ġadip":46468,"(\"":46469,"Ġcf":46470,"Ġurl":46471,"unia":46472,"icro":46473,"Austral":46474,"ĠConfig":46475,"accompanied":46476,"isite":46477,"Ġtextual":46478,"\">":46479,"Ġanecd":46480,"Ġ\",":46481,"angular":46482,"ĠUnicode":46483,"Proof":46484,"Ġmultiplication":46485,"Address":46486,"Ġbytes":46487,"lems":46488,"uterte":46489,"Episode":46490,"oshop":46491,"ritical":46492,"Adjust":46493,"argument":46494,"\\'":46495,"Rober":46496,"pection":46497,"Agg":46498,"äº":46499,"interrupted":46500,"ĠDebor":46501,"Ġlair":46502,"Various":46503,"isively":46504,"ĠStatic":46505,"ohyd":46506,"ĠEchoes":46507,"UID":46508,"raught":46509,"Bott":46510,"Ġapostle":46511,"ĠCentauri":46512,"oxicity":46513,"ibling":46514,"Ġparalle":46515,"inav":46516,"Crit":46517,"ĠTyph":46518,"Ġhig":46519,"ĠEDITION":46520,"Ġcoord":46521,"uish":46522,"sectional":46523,"inki":46524,"Title":46525,"anyahu":46526,"osterone":46527,"Ġdesper":46528,"ribly":46529,"Legend":46530,"afort":46531,"Org":46532,"Ġempir":46533,"ĠQuake":46534,"SSL":46535,"ioxide":46536,"åľ":46537,"Ġenz":46538,"urtle":46539,"BSD":46540,"Rust":46541,"ospels":46542,"Rare":46543,"Ġpartitions":46544,"Ġheresy":46545,"overy":46546,"Ġmonop":46547,"Pixel":46548,"odder":46549,"Option":46550,"withstanding":46551,"Transfer":46552,"Ġarrog":46553,"skip":46554,"ĠSSH":46555,"ĠSph":46556,"Ġcallback":46557,"PIN":46558,"Ġpdf":46559,"Ġplaint":46560,"cipled":46561,"reenshots":46562,"Ġparsing":46563,"::::::::":46564,"ioxid":46565,"Ġhereafter":46566,"ĠFunctions":46567,"ĠBulgar":46568,"Ġintu":46569,"DOC":46570,"Location":46571,"Hyper":46572,"ageddon":46573,"Evil":46574,"illions":46575,"Introduction":46576,"Physical":46577,"ĠLayout":46578,"âķ":46579,"------------------------":46580,"ĠRodham":46581,"ĠPatterns":46582,"Delivery":46583,"Ġdistur":46584,"ĠVolunte":46585,"ĠGUI":46586,"Ġclen":46587,"Ġinacc":46588,"ĠBallistic":46589,"ĠSprite":46590,"Privacy":46591,"theme":46592,"dump":46593,"ĠByte":46594,"ĠIncre":46595,"apult":46596,"ĠWrath":46597,"ensibly":46598,"NOTE":46599,"ounge":46600,"ustomed":46601,"ochond":46602,"ĠQt":46603,"Primary":46604,"Ġsidew":46605,"Root":46606,"gregation":46607,"SQL":46608,"ĠSOFTWARE":46609,"Gallery":46610,"ĠDungeon":46611,"ĠVengeance":46612,"->":46613,"steam":46614,"Ġfrivol":46615,"Ġpid":46616,"filter":46617,"Ġfacult":46618,"doms":46619,"Tool":46620,"1959":46621,"Ġprefix":46622,"Ġcomma":46623,"relative":46624,"Ġformatted":46625,"appropriately":46626,"Ġmd":46627,"xxx":46628,"ĠAuthentication":46629,"ĠWTC":46630,"Ġvulner":46631,"reditary":46632,"Steam":46633,"Tx":46634,"ĠGHC":46635,"Increased":46636,"forcement":46637,"ĠGuant":46638,"bernatorial":46639,"Entry":46640,"ĠWarp":46641,"ĠCreature":46642,"ĠAmmunition":46643,"Ġclust":46644,"ĠInher":46645,"Ġunbel":46646,"RGB":46647,"ĠMankind":46648,"ĠPlague":46649,"Ġ=================================":46650,"psc":46651,"Intern":46652,"tml":46653,"ĠCrusade":46654,"inflamm":46655,"Storage":46656,"token":46657,"inse":46658,"False":46659,"Adult":46660,"Pokémon":46661,"PLIED":46662,"Ġglac":46663,"ĠDwarf":46664,"sequence":46665,"Ġmagnification":46666,"ĠIlluminati":46667,"hedral":46668,"param":46669,"regon":46670,".\",\"":46671,"Eva":46672,"igree":46673,"Object":46674,"Ġoptimizations":46675,"uador":46676,"mmmm":46677,"ullivan":46678,"Ġ[\"":46679,"ĠDusk":46680,"Ġtrig":46681,"Ġiss":46682,"Ġhypert":46683,"Ġperspect":46684,"Ġassum":46685,":,":46686,"Ġinterpol":46687,"Asked":46688,"Boot":46689,"LIB":46690,"Loading":46691,"Ident":46692,"upuncture":46693,"ioch":46694,"Ġprefrontal":46695,"delay":46696,"ĠPoké":46697,"bestos":46698,"overe":46699,"Elf":46700,"eteria":46701,"ĠSneak":46702,"bians":46703,"ĠARTICLE":46704,"Xbox":46705,"encrypted":46706,"ync":46707,"ĠNietzsche":46708,"Nonetheless":46709,"Ġ±":46710,"ĠPrimal":46711,"ĠFlare":46712,"Ġconflic":46713,"ĠRune":46714,"Tes":46715,"cellence":46716,"Mega":46717,"ĠEntity":46718,"chrome":46719,"iatures":46720,"Ġuninstall":46721,"Winner":46722,"aimon":46723,"Ġhomebrew":46724,"Ruby":46725,"araoh":46726,"itime":46727,"Ġpotion":46728,"ĠAllows":46729,"ogyn":46730,"osuke":46731,"Limited":46732,"Ġmacros":46733,"ERROR":46734,"gling":46735,"Ġtodd":46736,"repre":46737,"ĠSakura":46738,"erker":46739,"items":46740,"FIG":46741,"ĠUnle":46742,"Ġhardness":46743,"Split":46744,"Ġarous":46745,"ocally":46746,"Ġì":46747,"ĠEVE":46748,"pleasant":46749,"ihil":46750,"ĠRouter":46751,"ĠLucius":46752,"readable":46753,"Ġtremb":46754,"Dro":46755,"Ġblaster":46756,"Ġbourgeoisie":46757,"NUM":46758,"Alternative":46759,"flags":46760,"GAME":46761,"ebook":46762,"ĠIPM":46763,"Ġcorrel":46764,"Setting":46765,"Frame":46766,"Ġatheism":46767,"Interested":46768,"Liquid":46769,"stanbul":46770,"Lv":46771,"Ġtits":46772,"Ġdc":46773,"×Ļ×":46774,"Ġdoctr":46775,"background":46776,"tsy":46777,"ĠCtrl":46778,"ĠCompatibility":46779,"idae":46780,"example":46781,"perture":46782,"Ġguid":46783,"ĠWinged":46784,"Command":46785,"ridor":46786,"bool":46787,"comments":46788,"ĠImmunity":46789,"Nit":46790,"Statement":46791,"Ġmanif":46792,"ĠIntake":46793,"Bloom":46794,"txt":46795,"context":46796,"input":46797,"achus":46798,"proc":46799,"Ñĭ":46800,"Ġdisemb":46801,"ospons":46802,"utical":46803,"ĠRender":46804,"Ironically":46805,"ursday":46806,"ĠExile":46807,"lishes":46808,"iets":46809,"orescent":46810,"cair":46811,"ĠSubjects":46812,"ĠDungeons":46813,"Ġiii":46814,"neapolis":46815,"ĠBlaster":46816,"Ġphp":46817,"ORED":46818,"ĠSLI":46819,"Ġelig":46820,"ĠIdentified":46821,"ĠBrawl":46822,"bytes":46823,"ĠCTR":46824,"Ġsched":46825,"Assuming":46826,"Bound":46827,"ĠMathemat":46828,"razil":46829,"ĠAstral":46830,"mble":46831,"untled":46832,"Ġmech":46833,"ĠDagger":46834,"ĠUseful":46835,"nesday":46836,"tarians":46837,"AMY":46838,"Camera":46839,"node":46840,"pict":46841,"ginx":46842,"Ġyea":46843,">>>>>>>>":46844,"paragraph":46845,"ĠSupplementary":46846,"9999":46847,"ĠAlchemist":46848,"uzzle":46849,"igun":46850,"ĠCalculator":46851,"ĠApplicant":46852,"hift":46853,"ĠGPL":46854,"Ġencode":46855,"Crash":46856,"ĠNutr":46857,"kHz":46858,"TABLE":46859,"intestinal":46860,"andom":46861,"archive":46862,"Ëľ":46863,"Registered":46864,"Questions":46865,"Remote":46866,"ethyst":46867,"Ġgren":46868,"ĠTexture":46869,"Ġseiz":46870,"Anyway":46871,"ĠVariant":46872,"ê":46873,"Adapt":46874,"ittered":46875,"meta":46876,"ambers":46877,"ĠRuins":46878,"ĠChimera":46879,"password":46880,"ĠReboot":46881,"Ġcaster":46882,"Ġamplitude":46883,"Position":46884,"Ġnotation":46885,"Ġsecretion":46886,"Excellent":46887,"delete":46888,"aminer":46889,"ä»":46890,"Exec":46891,"ĠKenobi":46892,"Interview":46893,"ontent":46894,"ospel":46895,"Ġtuber":46896,"CONT":46897,"roups":46898,"Ġemulator":46899,"Ġjava":46900,"0200":46901,"Ġnested":46902,"Ġfert":46903,")).":46904,"Dex":46905,"ĠSora":46906,"Ġpotions":46907,"ĠAnon":46908,"aah":46909,"Ġdunno":46910,"Ġμ":46911,"Ġmethodological":46912,"itles":46913,"phia":46914,"Beg":46915,"Rules":46916,"ĠXML":46917,"Ġflask":46918,"ĠShogun":46919,"Ġ2048":46920,"atchewan":46921,"Ġfuckin":46922,"Built":46923,"Ġbour":46924,"Ġdisag":46925,"yss":46926,"ĠÏ":46927,"Spoiler":46928,"Wiki":46929,"Ġmorphology":46930,"Ġendors":46931,"Ġdungeons":46932,"dragon":46933,")),":46934,"Ġhous":46935,"Ġoverwhel":46936,"SAY":46937,"abwe":46938,"--------------------------------":46939,"Ġepist":46940,"Ġpalp":46941,"ĠExtensions":46942,"ĠMistress":46943,"ĠUkrain":46944,"================":46945,"edience":46946,"abama":46947,"ĠLua":46948,"ĠOffline":46949,"ĠKonami":46950,"unicip":46951,"ĠMachina":46952,"Specific":46953,"Ġpresupp":46954,"ĠGEAR":46955,"rition":46956,"rences":46957,"successfully":46958,"Ġ1024":46959,"Platform":46960,"}}":46961,"clude":46962,"roxy":46963,"Ġpromot":46964,"ĠAdapter":46965,"rocal":46966,"ĠMasquerade":46967,"Panel":46968,"Language":46969,"elsius":46970,"Push":46971,"abase":46972,"ĠdB":46973,"argon":46974,"ĠRemoved":46975,"amph":46976,"ĠWyr":46977,"Ġindisp":46978,"ĠOkin":46979,"aepernick":46980,"moil":46981,"Continue":46982,"00007":46983,"ĠJournals":46984,"TAG":46985,"ĠRemastered":46986,"Ġsymp":46987,"methyl":46988,"Overview":46989,"umeric":46990,"ĠCodex":46991,".$":46992,"ranged":46993,"Sym":46994,"ĠVerse":46995,"ĠEnabled":46996,"ĠFUCK":46997,"ĠHearth":46998,"Ġbrill":46999,"ĠChaser":47000,"Beh":47001,"ĠAlchemy":47002,"Oracle":47003,"roleum":47004,"ĠVoldemort":47005,"();":47006,"Ġcollaps":47007,"Visual":47008,"ĠAngular":47009,"ĠOsc":47010,"ichita":47011,"Ġcig":47012,"Ġtoolbar":47013,"ĠEnlight":47014,"ÑĮ":47015,"ε":47016,"aliation":47017,"ĠLovecraft":47018,"jri":47019,"ĠInterstellar":47020,"Ġdebugging":47021,"Ġparentheses":47022,"ĠInit":47023,"Located":47024,"Weak":47025,"ĠPvP":47026,"ĠCloak":47027,"uture":47028,"iths":47029,"asionally":47030,"FACE":47031,"Introdu":47032,"');":47033,"slot":47034,"aturday":47035,"ĠNiet":47036,"Ġpuzz":47037,"!!!!!!!!":47038,"folios":47039,"Ç":47040,"Ġverbs":47041,"ĠFrames":47042,"ĠAmbro":47043,"Ġmillisec":47044,"ĠRebell":47045,"ylum":47046,"PASS":47047,"ĠConfiguration":47048,"μ":47049,"brids":47050,"vantage":47051,"Ġ['":47052,"ĠScy":47053,"Benef":47054,"gradation":47055,"ĠOrc":47056,"Resources":47057,"Awesome":47058,"ĠMilitia":47059,"POST":47060,"Ġbinaries":47061,"Mode":47062,"Ġkb":47063,"ĠWARRANT":47064,"hemy":47065,"Desc":47066,"alion":47067,"Ġwiki":47068,"Ġcommer":47069,"Serial":47070,"ĠUncommon":47071,"ignore":47072,"Ġconstructor":47073,"ctl":47074,"Ġ):":47075,"ĠVerify":47076,"Notice":47077,"ĠRPGs":47078,"uckland":47079,"Ġincre":47080,"Pinterest":47081,"ĠDefinitions":47082,"iband":47083,"Ġtd":47084,"Ġsubscrib":47085,"Shin":47086,"ĠGadget":47087,"Document":47088,"å®":47089,"Requ":47090,"QUIRE":47091,"ĠQuadro":47092,"ĠUnix":47093,"Enlarge":47094,"thens":47095,"\"...":47096,"gebra":47097,"pload":47098,"alogue":47099,"vironments":47100,"Strength":47101,"ĠPID":47102,"ĠInvaders":47103,"HOME":47104,"Atl":47105,"ĠBlizz":47106,"ĠWidth":47107,"ĠOpenGL":47108,"zx":47109,"$,":47110,"Ġå":47111,"cig":47112,"lectic":47113,"relation":47114,"Ġfeas":47115,"undown":47116,"Said":47117,"ν":47118,"��":47119,"english":47120,"ĠTokens":47121,"ĠALEC":47122,"OOOO":47123,"isconsin":47124,"Ġconstants":47125,"ĠTemplar":47126,"Accept":47127,"Ġmascul":47128,"enegger":47129,"ampires":47130,"Rated":47131,"lua":47132,"ucl":47133,"ĠSequence":47134,"ĠNRS":47135,"STD":47136,"Cra":47137,"autions":47138,"ĠKernel":47139,"oleon":47140,"htaking":47141,"ancial":47142,"Pages":47143,"orthodox":47144,"ropy":47145,"EEE":47146,"Ġtranssexual":47147,"?????":47148,"Ġsurpr":47149,"arthy":47150,"ĠPsychic":47151,"Ġdorsal":47152,"cember":47153,"joice":47154,"/+":47155,"verend":47156,"uint":47157,"Ġderog":47158,"Subject":47159,"hemat":47160,"!]":47161,"Ġ);":47162,"Ġmeshes":47163,"Ġreperc":47164,"ĠTerran":47165,"åĪ":47166,"Load":47167,"å¹":47168,"ikarp":47169,"rompt":47170,"Ġgoblins":47171,"ĠShattered":47172,"tests":47173,"Spread":47174,"ĠNaruto":47175,"Ġpredic":47176,"Hyp":47177,"ĠArkham":47178,"ĠNASL":47179,"Material":47180,"Rule":47181,"raviolet":47182,"ĠKlingon":47183,"Memory":47184,"acers":47185,"Known":47186,"Important":47187,"Ġα":47188,"Ġtraged":47189,"Ġshalt":47190,"Ġiso":47191,"ĠJSON":47192,"Instant":47193,"Ġpg":47194,"Ġexponent":47195,"formance":47196,"bitcoin":47197,"DOS":47198,"cheat":47199,"Ġrook":47200,"ĠBiol":47201,"noticed":47202,"Ġtwent":47203,"ĠRedux":47204,"ĠBorderlands":47205,"Supported":47206,"TRUMP":47207,"Ġturrets":47208,"include":47209,"Effect":47210,"Ġdisg":47211,"ophical":47212,"ĠFaction":47213,"wiki":47214,"Ġsrc":47215,"Laun":47216,"TIT":47217,"Ġorbs":47218,"Ġincompet":47219,"Ġdescriptor":47220,"ĠTrog":47221,"Contribut":47222,"ĠGodd":47223,"inances":47224,"Ult":47225,"lyak":47226,"âĢ¢âĢ¢âĢ¢âĢ¢":47227,"stitial":47228,"essim":47229,"Graphics":47230,"ubis":47231,"Ġegreg":47232,"DEV":47233,"Ġannotations":47234,"Yang":47235,"ĠDruid":47236,"ĠInquisition":47237,"ohydrate":47238,"Critical":47239,"æĸ":47240,"Sample":47241,"ĠPref":47242,"ĠUnleashed":47243,"ĠAccessed":47244,"Ġconceptions":47245,"Minor":47246,"pard":47247,"prus":47248,"Factory":47249,"thinkable":47250,"Ġexecutable":47251,"chapter":47252,"inyl":47253,"Display":47254,"ilater":47255,"Released":47256,"ĠDirectX":47257,"aneers":47258,"Ġ______":47259,"ĠHilbert":47260,"Options":47261,"Ġsorcery":47262,"esm":47263,"ÏĦ":47264,"Ġdescript":47265,"ĠTycoon":47266,"psons":47267,"Ġcov":47268,"Launch":47269,"ogeneity":47270,"Ġsacrific":47271,"ADRA":47272,"netflix":47273,"flix":47274,"usage":47275,"properties":47276,"attach":47277,"req":47278,"Resource":47279,"requisite":47280,"1007":47281,"ĠMIDI":47282,"ĠZoro":47283,"Tue":47284,"hower":47285,"dds":47286,"ynasty":47287,"headers":47288,"Ġdisproportion":47289,"omaly":47290,"Ġvim":47291,"inces":47292,"edient":47293,"ĠWraith":47294,"ilibrium":47295,"Hig":47296,"ĠFrie":47297,"Meat":47298,"ldom":47299,"KNOWN":47300,"orgetown":47301,"Improve":47302,"10000":47303,"Ġretarded":47304,"Disclaimer":47305,"Ġunfocused":47306,"ĠUnsure":47307,"ĠElixir":47308,"idth":47309,"atural":47310,"ĠErr":47311,"Critics":47312,"ĠBows":47313,"ifferent":47314,"proxy":47315,"Lic":47316,"aucas":47317,"rolet":47318,"ĠCoC":47319,"Ġdoesnt":47320,"phabet":47321,"Version":47322,"Ġhepat":47323,"gif":47324,"izophren":47325,"ãĥ»":47326,"ĠGutenberg":47327,"β":47328,"phans":47329,"Scene":47330,"Ġaccomp":47331,"ilings":47332,"rypted":47333,"aceae":47334,"arantine":47335,"heses":47336,"iasco":47337,"lopp":47338,"ĠGSL":47339,"disk":47340,"ãĢģ":47341,"0010":47342,"ĠOutbreak":47343,"Column":47344,"odox":47345,"atform":47346,"ĠThrust":47347,"ĠSVG":47348,"Enhanced":47349,"¯":47350,"Tools":47351,"rogens":47352,"xus":47353,"Available":47354,"zbollah":47355,"è¡":47356,"osate":47357,"usb":47358,"ordes":47359,"Matrix":47360,"ĠBlazing":47361,"ascus":47362,"ĠSovere":47363,"hement":47364,"*:":47365,"amaru":47366,"Ġparsed":47367,"Bonus":47368,"otrop":47369,"spell":47370,"ancock":47371,"ĠEnchant":47372,"vP":47373,"ĠReferred":47374,"Ġalot":47375,"ĠRuntime":47376,"ĠFn":47377,"CPU":47378,"ĠNicotine":47379,"External":47380,"ĠNightmares":47381,"Ġentropy":47382,"kB":47383,"ĠRealms":47384,"Ġ##":47385,"Ġsubmar":47386,"ĠSlime":47387,"itual":47388,"ĠBastard":47389,"Ġacknowled":47390,"Magazine":47391,"rendered":47392,"ircraft":47393,"CSS":47394,"Numbers":47395,"Pg":47396,"utenant":47397,"ĠPalest":47398,"ĠRoose":47399,"udicrous":47400,"anooga":47401,"Unt":47402,"Ġcapacitor":47403,"Ġschema":47404,"hematic":47405,"ĠPinball":47406,"endars":47407,"Ġ===":47408,"nsic":47409,"ipedia":47410,"Ġchromos":47411,"ĠmRNA":47412,"Ct":47413,"ĠPaladin":47414,"sonian":47415,"Ġæ":47416,"ajor":47417,"repeat":47418,"ortex":47419,"ĠHeroic":47420,"ĠHera":47421,"ociated":47422,"Ġdebug":47423,"osher":47424,"upiter":47425,"_.":47426,"Ġsys":47427,"ĠDownloads":47428,"','":47429,"Adventure":47430,"FORE":47431,"ocument":47432,"arning":47433,"Ġmiscon":47434,"vidia":47435,"Cod":47436,"ibraries":47437,"buffer":47438,"cdn":47439,"ĠModes":47440,"tarian":47441,"ĠPyro":47442,"ĠFixes":47443,"ĠâĪ":47444,"ĠCf":47445,"Testing":47446,"Byte":47447,"nants":47448,"oufl":47449,"ĠCipher":47450,"Aim":47451,"ĠAfgh":47452,"ĠStarCraft":47453,"intendent":47454,"akespe":47455,"Apply":47456,">>>":47457,"Lenin":47458,"ĠShaman":47459,"%\"":47460,"ĠFrenzy":47461,"illusion":47462,"===":47463,"Website":47464,"Allow":47465,"ĠBinary":47466,"ensable":47467,"ĠEmpires":47468,"Ġpromul":47469,"ormonal":47470,"ileaks":47471,"ĠAmmo":47472,"assies":47473,"atican":47474,"avior":47475,"ĠIter":47476,"1024":47477,"uesday":47478,"ĠAppears":47479,"achine":47480,"Problem":47481,"ousy":47482,"ramid":47483,"nox":47484,"··":47485,"omething":47486,"ĠPurg":47487,"artney":47488,"Ġ0000":47489,"psey":47490,"Ġglutamate":47491,"ĠActivate":47492,"Repl":47493,"Priv":47494,"cyclop":47495,"ĠHispan":47496,"atsuki":47497,"Likewise":47498,"JOHN":47499,"POSE":47500,"pherd":47501,"schild":47502,"Ġsuffix":47503,"åIJ":47504,"Ġoptionally":47505,"ĠRecomm":47506,"ĠSpawn":47507,"ARDIS":47508,"Ġinconsist":47509,"Ġenglish":47510,"Beta":47511,"ĠContains":47512,"uddenly":47513,"Ġls":47514,"Dynamic":47515,"åĽ":47516,"Ġ{{":47517,"dq":47518,"Hmm":47519,"oliberal":47520,"ĠCarnage":47521,"ĠRebirth":47522,"incerity":47523,"Ġproletariat":47524,"ĠCrafting":47525,"Explore":47526,"Ġeld":47527,"ĠAnarch":47528,"Ġ(>":47529,"ĠClockwork":47530,"ĠProced":47531,"APTER":47532,"ĠSorcerer":47533,"âĶ":47534,"ĠSnape":47535,"elist":47536,"Balance":47537,"Tube":47538,"Ġ--------------------":47539,"Ġnostalg":47540,"ACTED":47541,"ĠVID":47542,"soever":47543,"ignt":47544,"Ġhypothal":47545,"ĠObj":47546,"igure":47547,"ĠElves":47548,"gorithm":47549,"Romney":47550,"idable":47551,"renheit":47552,"aptic":47553,"Ġnonex":47554,"Profile":47555,"Ġscient":47556,"ĠAchievements":47557,"ĠReload":47558,"Products":47559,"ampire":47560,"pread":47561,"ĠYamato":47562,"Thread":47563,"ĠFML":47564,"ĠForsaken":47565,"Statistics":47566,"Ġ([":47567,"utsu":47568,"nces":47569,"...?":47570,"upload":47571,"Typ":47572,"ĠReflex":47573,"Dial":47574,"Ġspawns":47575,"Server":47576,"Ġacquaint":47577,"iterranean":47578,"='":47579,"Device":47580,"ר":47581,"ocaly":47582,"Remove":47583,"Ġ=====":47584,"Ġabdom":47585,"ideos":47586,"Dual":47587,"Fax":47588,"Ġbesie":47589,"ĠAdin":47590,"Ġdescrib":47591,"Ġiod":47592,"Limit":47593,"aunders":47594,"ĠAssassins":47595,"xxxx":47596,"ulner":47597,"Shipping":47598,"Item":47599,"fortune":47600,"Ġcipher":47601,"mA":47602,"acerb":47603,"ebus":47604,"Ġmodifiers":47605,"Added":47606,"prisingly":47607,"Dir":47608,"ĠArchangel":47609,"umbnails":47610,"Huh":47611,"ĠWARN":47612,"Role":47613,"usional":47614,"Ġcortical":47615,"ĠSCP":47616,"ĠException":47617,"ĠWarhammer":47618,")))":47619,"](":47620,"Ġsynaptic":47621,"Ġcached":47622,"archment":47623,"Ġtarg":47624,"Filter":47625,"ĠHades":47626,"Ġprinc":47627,"halla":47628,"ptoms":47629,"Ïģ":47630,"ructose":47631,"termination":47632,"Ġcompe":47633,"define":47634,"Ġprosec":47635,"require":47636,"ĠCorpse":47637,"Abstract":47638,"********************************":47639,"Used":47640,"ĠIbid":47641,"trak":47642,"ä¸Ń":47643,"ĠGABA":47644,"åĬ":47645,"ĠHegel":47646,"Jere":47647,"odore":47648,"í":47649,"namese":47650,"Origin":47651,"ĠMastery":47652,"gerald":47653,"Charges":47654,"--------------------":47655,"Forge":47656,"comings":47657,"åį":47658,"Ġ(&":47659,"Ġgrap":47660,"Mask":47661,"ĠGundam":47662,"generic":47663,"ĠMalf":47664,"raphics":47665,"Internal":47666,"ourge":47667,"Ġirresist":47668,"sterdam":47669,"Ġendogenous":47670,"Export":47671,"Ġë":47672,"poons":47673,"Ġabund":47674,"ĠQuantity":47675,"Issue":47676,"âĪĴ":47677,"cknow":47678,"Anonymous":47679,"ĠDRAG":47680,"Wikipedia":47681,"Ġsubdu":47682,"iverpool":47683,"apesh":47684,"Ability":47685,"ĠCentOS":47686,"iseum":47687,"lycer":47688,"Untitled":47689,"Ġlineback":47690,"Ġtomat":47691,"byte":47692,"tile":47693,"linux":47694,"Palest":47695,"canon":47696,"FAULT":47697,"ĠkHz":47698,"Ġhelic":47699,"ĠIGF":47700,"WARE":47701,"Feature":47702,"ĠGraveyard":47703,"ĠNemesis":47704,"akuya":47705,"inement":47706,"Ġwhence":47707,"ractical":47708,"Ping":47709,"tesque":47710,"scroll":47711,"espie":47712,"Ġasynchronous":47713,"ocre":47714,"Measure":47715,"morph":47716,"std":47717,"Settings":47718,"Course":47719,"Ġ],":47720,"Ïĥ":47721,"Documents":47722,"estern":47723,"Ġtf":47724,"Ġcircumcised":47725,"geant":47726,"Ġconject":47727,"ĠFolder":47728,"outube":47729,"ĠMedline":47730,"Status":47731,"ctr":47732,"anoia":47733,"ĠPowerShell":47734,"Chel":47735,"Loop":47736,"Ġresize":47737,"aphael":47738,"workshop":47739,"velength":47740,"hover":47741,"flush":47742,"Ġβ":47743,"Task":47744,"pedia":47745,"ptin":47746,"bidden":47747,"windows":47748,"ĠCaucas":47749,"aml":47750,"isoft":47751,"Ġrs":47752,"cgi":47753,"urrection":47754,"miah":47755,"ÏĤ":47756,"Ġplaythrough":47757,"Reddit":47758,"׾":47759,"Ġannotation":47760,"Ġnobles":47761,"seq":47762,"mares":47763,"Ġwik":47764,"foreseen":47765,"RPG":47766,"Ġreper":47767,"aredevil":47768,"arcity":47769,"/\"":47770,"Ġ});":47771,"Ġdiscont":47772,"ĠBinding":47773,"answered":47774,"Mesh":47775,"ĠMPEG":47776,"Ġperceptual":47777,"OTAL":47778,"ursive":47779,"ãģĦ":47780,"Ġplun":47781,"onential":47782,"ãĤ":47783,"ĠReloaded":47784,"iscopal":47785,"ĠDespair":47786,"FIX":47787,"Ġheterogeneity":47788,",[":47789,"ichick":47790,"DCS":47791,"Ġcooldown":47792,"................":47793,"Ġsomew":47794,"Battery":47795,"stract":47796,"Attempt":47797,"allery":47798,"ĠNept":47799,"Ġtac":47800,"ĠElemental":47801,"Function":47802,"Ġbindings":47803,"versive":47804,"ĠWarlock":47805,"Response":47806,"ĠNPCs":47807,"ollower":47808,"ĠReborn":47809,"Ġphenotype":47810,"uscript":47811,"Ġpecul":47812,"!/":47813,"Unique":47814,"ĠFreeBSD":47815,"ĠChero":47816,"Ġcolle":47817,"gently":47818,"Empty":47819,"rss":47820,"Ġdd":47821,"forge":47822,"ĠTraps":47823,"×Ķ":47824,"iblical":47825,"---------":47826,"uminati":47827,"login":47828,"asus":47829,"xual":47830,"ĠMiko":47831,"ĠDrac":47832,"ssh":47833,"Submit":47834,"ĠMultiplayer":47835,"leanor":47836,"Orig":47837,"anism":47838,"peror":47839,"ĠESV":47840,"Ġencour":47841,"å°":47842,"ĠPLoS":47843,"ĠCrusher":47844,"ocrates":47845,"ynchronous":47846,"§":47847,"ĠLuffy":47848,"Lastly":47849,"Ġdiffere":47850,"okane":47851,"Enh":47852,"ursor":47853,"Ġapopt":47854,"ĠTotem":47855,"ä½":47856,"Honest":47857,"xml":47858,"Created":47859,"Ġteleport":47860,"NRS":47861,"ccess":47862,"ilitary":47863,"ackets":47864,"Ġenchantment":47865,"ĠCunning":47866,"ortmund":47867,"Altern":47868,"Alternatively":47869,"ĠLuthor":47870,"Publisher":47871,"GBT":47872,"çĶ":47873,"Activity":47874,"Ġleptin":47875,"æĪ":47876,"ĠStarfleet":47877,"å¸":47878,"oooooooo":47879,"Ġlawy":47880,"Frag":47881,"ת":47882,"yright":47883,"cookie":47884,"Finish":47885,"wikipedia":47886,"ĠAbilities":47887,"interface":47888,"Ġglared":47889,"Engineers":47890,"ĠAtk":47891,"oteric":47892,"Ġbyte":47893,"ossibility":47894,"Label":47895,"ĠCSV":47896,"Ġè":47897,"ĠOblivion":47898,"android":47899,"rehensive":47900,"ĠCommands":47901,"clud":47902,"ĠTutorial":47903,"retched":47904,"irlwind":47905,"conserv":47906,"ministic":47907,"void":47908,"ernels":47909,"alias":47910,"ĠDraco":47911,"desktop":47912,"ĠMormonism":47913,"oÄŁ":47914,"kef":47915,"Ġtimestamp":47916,"WAYS":47917,"ãģĹ":47918,"\"(":47919,"eneg":47920,"CHAT":47921,"Ġnpm":47922,"ĠGrenade":47923,"rongh":47924,"dinand":47925,"Definition":47926,"ĠInteger":47927,"Ġmodifier":47928,"Ġdex":47929,"ĠParameters":47930,"andestine":47931,"ĠSHALL":47932,"Purchase":47933,"enaries":47934,"Ġstarship":47935,"Armor":47936,"Skill":47937,"Ġlookup":47938,"verages":47939,"Minimum":47940,"ĠBleach":47941,"Ġdf":47942,"inosaur":47943,"ixel":47944,"Zip":47945,"temp":47946,"ruby":47947,"Fram":47948,"sword":47949,"Minecraft":47950,"strous":47951,"Client":47952,"ĠBarbarian":47953,"æĹ":47954,"USER":47955,"ĠMehran":47956,"axies":47957,"ermanent":47958,"ĠHeader":47959,"ablishment":47960,"hyde":47961,"Snake":47962,"ĠTelesc":47963,"Pocket":47964,"Ġ........":47965,"Destroy":47966,"Method":47967,"ĠZup":47968,"olulu":47969,"Ġunemploy":47970,"Temp":47971,"ĠExplicit":47972,"人":47973,"cache":47974,"innamon":47975,"Ġunavoid":47976,"Summary":47977,"Ġappre":47978,"Ġtaxp":47979,"XXX":47980,"ieval":47981,"ĠSummon":47982,"å¤":47983,"Lear":47984,"ibliography":47985,"CLASS":47986,"dimension":47987,"ĠHorde":47988,"Ġfilesystem":47989,"ĠQiao":47990,"obbies":47991,"DIR":47992,"Ġimpedance":47993,"éĩ":47994,"Names":47995,"ĠDrupal":47996,"Applic":47997,"imei":47998,"ynchron":47999,"Ire":48000,"ĠMinion":48001,"ĠHaste":48002,"ä¿":48003,"Ġ(=":48004,"LinkedIn":48005,"Maps":48006,"ifacts":48007,"Damage":48008,"odynam":48009,"ĠShroud":48010,"Ancient":48011,"enhagen":48012,"Tact":48013,"anship":48014,"aturdays":48015,"ãģ«":48016,"ikhail":48017,"ãģ®":48018,"framework":48019,"lication":48020,"âĢ¦]":48021,"Plug":48022,"ĠLilith":48023,"browser":48024,"offset":48025,"ĠJuda":48026,"ciating":48027,"console":48028,"Ġ=================":48029,"._":48030,"ĠPuzz":48031,"OPLE":48032,"erial":48033,"OHN":48034,"ĠGolem":48035,"ierrez":48036,"Ġ},":48037,"inition":48038,"insula":48039,"ĠEntered":48040,"greSQL":48041,"ĠFlask":48042,"ĠXCOM":48043,"fixes":48044,"ĠWeasley":48045,"arser":48046,"Ġrc":48047,"microsoft":48048,"HHHH":48049,"INFO":48050,"rehend":48051,"Ġpolymorph":48052,"Button":48053,"âī":48054,"QUI":48055,"twitch":48056,"jriwal":48057,"ĠSaiyan":48058,"Ġadherent":48059,"acters":48060,"arthed":48061,"âĢł":48062,"Ġfoss":48063,"ã":48064,"Quote":48065,"ependent":48066,"Ġhorr":48067,"UGC":48068,"Weiss":48069,"styles":48070,"advertisement":48071,"Credits":48072,"Lua":48073,"ĠUCH":48074,"Ġhorrend":48075,"Ġminion":48076,">,":48077,"ãĥ³":48078,"Ġinclud":48079,"Compar":48080,"Ġ[]":48081,"Ġ(<":48082,"Phones":48083,"paralleled":48084,"HTML":48085,"Ġ(%":48086,"raltar":48087,"Ġamd":48088,"Maximum":48089,"ĠSolitaire":48090,"SCP":48091,"ĠVaugh":48092,"ĠCLR":48093,"database":48094,"module":48095,"̶":48096,"Capture":48097,"Window":48098,"ubuntu":48099,"Includes":48100,"ĠUriel":48101,"ORPG":48102,"κ":48103,"âĪ":48104,"ä¸Ģ":48105,"Ġdexter":48106,"ĠGlac":48107,"slice":48108,"HAHAHAHA":48109,"\\\"":48110,"lations":48111,"ÙIJ":48112,"ĠAUTH":48113,"earch":48114,"ĠSocket":48115,"Character":48116,"Sort":48117,"Ġindist":48118,"/_":48119,"ĠAntar":48120,"ifix":48121,"Ġlich":48122,"variable":48123,"_(":48124,"Ġgui":48125,"Herm":48126,"elvet":48127,"è¯":48128,"Developer":48129,"Ġkcal":48130,"ciation":48131,"Transaction":48132,"Ġdocker":48133,"###":48134,"ĠVegeta":48135,"Result":48136,"ocamp":48137,"aughtered":48138,"Increase":48139,"aples":48140,"iannopoulos":48141,"zbek":48142,"estyles":48143,"emonium":48144,"è¿":48145,"ĠFANT":48146,"Reason":48147,"Elsewhere":48148,"\"\"":48149,"ĠArtifact":48150,"Authent":48151,"herical":48152,"Ġmembr":48153,"socket":48154,"Elsa":48155,"Condition":48156,"Ġlapt":48157,"Ġsorcerer":48158,"Layer":48159,"apters":48160,"Ġveter":48161,"Myth":48162,"ensical":48163,"ÏĢ":48164,"noxious":48165,"Ġunpre":48166,"Flags":48167,"OOOOOOOO":48168,"Ġincent":48169,"Combat":48170,"Session":48171,"Ġteleportation":48172,"éĢ":48173,"ortment":48174,"Admin":48175,"Fixed":48176,"×Ļ":48177,"Ġconfir":48178,"ãģŁ":48179,"morrow":48180,"osponsors":48181,"\\/":48182,"ictionary":48183,"Num":48184,"Ġquir":48185,"åº":48186,"à¨":48187,"Ġ<<":48188,"Attempts":48189,"ãģ§":48190,"λ":48191,"Features":48192,"XXXX":48193,"Ġinflamm":48194,"VERSION":48195,"ortality":48196,"spawn":48197,"ratulations":48198,"Ġcharism":48199,"Ġ&&":48200,"Dialogue":48201,"luster":48202,"<<":48203,"args":48204,"redients":48205,"Ġpredicate":48206,"qqa":48207,"etheus":48208,"Ġ(!":48209,"Ġshowc":48210,"cmd":48211,"bringer":48212,"Ġcoh":48213,"Input":48214,"ĠFANTASY":48215,"Ġfict":48216,"Blocks":48217,"Install":48218,"vector":48219,"umblr":48220,"agnar":48221,"Array":48222,"Ġembry":48223,"Ġtheoret":48224,"Ġhref":48225,"irrel":48226,"irements":48227,"iations":48228,"Ġ(/":48229,"Thumbnail":48230,"Ġhashes":48231,"^^":48232,"Copy":48233,"Ġeq":48234,"translation":48235,"Favorite":48236,"Fail":48237,"Ġogre":48238,"isites":48239,"Merit":48240,"ãģ¦":48241,"DATA":48242,"rarily":48243,"igmatic":48244,"Sequ":48245,"Els":48246,"ãģª":48247,"lehem":48248,"requency":48249,"aughed":48250,"Ġdistingu":48251,"Ġartific":48252,"Ġdwarves":48253,"Í":48254,"resy":48255,"~~":48256,"sofar":48257,"ideon":48258,"ozyg":48259,"EEEE":48260,"ĠMelee":48261,"大":48262,"tumblr":48263,"ssl":48264,"Wra":48265,"ONSORED":48266,"Ġvowel":48267,"},":48268,"Vari":48269,"cientious":48270,"Node":48271,"Ġsorce":48272,"========":48273,"perse":48274,"Detailed":48275,"isphere":48276,"Background":48277,"ĺħ":48278,"Redd":48279,"ìĿ":48280,"ãģ¨":48281,"ĠCTRL":48282,"Ġç":48283,"iculty":48284,"ername":48285,"Ġns":48286,"Deploy":48287,"Ġhapp":48288,"Ġ///":48289,"Begin":48290,"Ġgp":48291,"$.":48292,"Output":48293,"Suggest":48294,"×IJ":48295,"ĠToggle":48296,"Ġnutrit":48297,"Ġ\\\"":48298,"Ġpreval":48299,"Ġsubreddits":48300,"Menu":48301,"Amount":48302,"ĠWasteland":48303,"Ġsprites":48304,"Ġshader":48305,"Ġ;)":48306,"NAME":48307,"CLUD":48308,"Ġgoblin":48309,"Refer":48310,"ÙĴ":48311,"á¹":48312,"Improved":48313,"endiary":48314,"Ġassail":48315,"chieve":48316,"reply":48317,"Ġcontrad":48318,"cients":48319,"GROUP":48320,"Controller":48321,"omsky":48322,"chemist":48323,"packages":48324,"ombies":48325,"scl":48326,"Ġibn":48327,"çĽ":48328,":(":48329,"ĠMinotaur":48330,"niper":48331,"====":48332,"Ġsubsc":48333,"è¦":48334,"Ġinteger":48335,"Ġ\"-":48336,"Ġtheorem":48337,"utenberg":48338,"Trigger":48339,"github":48340,"ä¼":48341,"##":48342,"xtap":48343,"oké":48344,"ilial":48345,"idepress":48346,":\\":48347,"Param":48348,"Correction":48349,"ïve":48350,"Chest":48351,"ש":48352,"ĠÏĦ":48353,"Ġrespawn":48354,"Ġrall":48355,"Ġcreatine":48356,"umsy":48357,"ĠTemplate":48358,"foo":48359,"query":48360,"Ġmanufact":48361,"Hardware":48362,"iframe":48363,"Ġ-------":48364,"Ġrecip":48365,"ĠAttributes":48366,"Ġforeskin":48367,"ãĤĭ":48368,"ãĥĦ":48369,"uania":48370,"................................................................":48371,"Ġphylogen":48372,"eaturing":48373,"Ġsprite":48374,"Ġinvari":48375,"DonaldTrump":48376,"({":48377,"ĠMalfoy":48378,"Gamer":48379,"ĠPlugin":48380,"γ":48381,"Query":48382,"ĠPuzzles":48383,"inventory":48384,"trl":48385,"Insert":48386,"Ġawa":48387,"ĠWerewolf":48388,"Ġhorizont":48389,"×ŀ":48390,"Ġcunt":48391,"]]":48392,"ĠByz":48393,"Mouse":48394,"Ġ[[":48395,"ĠCthulhu":48396,"ĠDRAGON":48397,"Default":48398,"ĠPresbyter":48399,"Ġff":48400,"Ġorcs":48401,"Construct":48402,"ĠDebug":48403,"Ġ*/":48404,"×ij":48405,"Ġembr":48406,"License":48407,"css":48408,"incinn":48409,"Prosecut":48410,"Ġsugg":48411,"å¾":48412,"ĠUndead":48413,"æĿ":48414,"Ġfs":48415,"Ġthw":48416,"Vector":48417,"åĮ":48418,"settings":48419,"å¯":48420,"Ġssh":48421,"ĠConverted":48422,"ãĤĴ":48423,"risome":48424,"Ġagre":48425,"Collection":48426,"cmp":48427,"puter":48428,"alloc":48429,"Ġé":48430,"ascade":48431,"ĠSpells":48432,"Ġ:-)":48433,"Haunted":48434,"Ġadolesc":48435,"FORMATION":48436,"ĠImperium":48437,"ãĥ¼":48438,"Supplement":48439,"Render":48440,"Theme":48441,"ĠTorment":48442,"([":48443,"ëĭ":48444,"Ġhtml":48445,"Ġjuven":48446,"ĠSiber":48447,"Ġdaemon":48448,"ivariate":48449,"objects":48450,"negie":48451,"Ġindu":48452,"landish":48453,"Meta":48454,"Impl":48455,"Ġglyph":48456,"Ġ-->":48457,"Ġstreng":48458,"agascar":48459,"guyen":48460,"((":48461,")[":48462,"ĠNorn":48463,"Ġhippocamp":48464,"Ġ¯":48465,"îĢ":48466,"Connection":48467,"PATH":48468,"mbuds":48469,"ĠShards":48470,"Ġadvoc":48471,"Ġsimulac":48472,"âĸij":48473,"!?\"":48474,"ĠPotion":48475,"Ġamulet":48476,"ĠFnatic":48477,"Ġcryptoc":48478,"wav":48479,"radius":48480,"pkg":48481,"ĠMFT":48482,"æĢ":48483,"Ġtoile":48484,"Items":48485,"ifference":48486,"errors":48487,"ĠCelt":48488,"Ġunpop":48489,"ilogy":48490,"6666":48491,"hesda":48492,"Instruct":48493,"å·":48494,"Materials":48495,"ettings":48496,"Percent":48497,"Ġresistor":48498,"tymology":48499,"Ġdeprecated":48500,"Ġgrep":48501,"ĠWRITE":48502,"Ġtriv":48503,"Ġscrut":48504,"[/":48505,"anyl":48506,"skirts":48507,"MSN":48508,"ĠCodec":48509,"ecd":48510,"Anth":48511,"){":48512,"%]":48513,"veyard":48514,"aspberry":48515,"ãĢ":48516,"Reward":48517,"rha":48518,"Stretch":48519,"]-":48520,"Prev":48521,"Context":48522,"Ġlinux":48523,"HAHA":48524,"perties":48525,"ĠVIDE":48526,"Domain":48527,"Ġmurd":48528,"ĠLegions":48529,"apache":48530,"æŃ":48531,"Pause":48532,"Temperature":48533,"ufact":48534,"igslist":48535,"ĠRetrieved":48536,"èª":48537,"ãģĮ":48538,"Ingredients":48539,"ruary":48540,"dyl":48541,"Alias":48542,"ĠÎĶ":48543,"Ġinval":48544,"amsung":48545,"!--":48546,"olean":48547,"æī":48548,"ãģ¯":48549,"Ġcoefficients":48550,"ĠDHCP":48551,"âĨĴ":48552,"utonium":48553,":[":48554,"âĹ":48555,"cli":48556,"Container":48557,"å¼":48558,"nexus":48559,"SOURCE":48560,"Ò":48561,"=/":48562,"Ġmysql":48563,"ĠGained":48564,"Ġ/*":48565,"uncture":48566,"Ġstatically":48567,"âĸł":48568,"æĺ¯":48569,"æ°":48570,"estamp":48571,"Cache":48572,"ulkan":48573,"staking":48574,"apter":48575,"ãģ¾":48576,"Ġμg":48577,"Ġtremend":48578,"ĠPiercing":48579,"naissance":48580,"ĠHealer":48581,"Enabled":48582,"éģ":48583,"âĸ":48584,"ĠThumbnails":48585,"Ġhither":48586,"Format":48587,"utherland":48588,"íķ":48589,"Ġdestro":48590,"fff":48591,"execute":48592,"msg":48593,"romancer":48594,"ĠCanaver":48595,"ĠVaults":48596,"oided":48597,"iage":48598,"Ġimg":48599,"summary":48600,"]);":48601,"ĠABE":48602,"ĠGamergate":48603,"utherford":48604,"Ġoverwrite":48605,"enment":48606,"æķ":48607,"Ġsystemd":48608,"tif":48609,"]).":48610,"ãĤ¤":48611,"Widget":48612,"======":48613,"(-":48614,"Ġ\"+":48615,"ĠIncarnation":48616,"æĥ":48617,"���":48618,"GUI":48619,"èĥ":48620,"forums":48621,"Ġrunes":48622,"Ġâī¤":48623,"Ġdefic":48624,"Distance":48625,"directory":48626,"ĠHorus":48627,"iltr":48628,"ortium":48629,"Ġ./":48630,"bda":48631,"owship":48632,"ĠâĨij":48633,"}.":48634,"åĩ":48635,"1027":48636,"Weapons":48637,"lucent":48638,"Ġauth":48639,";;":48640,"Recommended":48641,"Ġsurv":48642,"Ġvm":48643,"ĠStronghold":48644,"Ġparan":48645,"ĠTrance":48646,"æĺ":48647,"Ġsovere":48648,"Ġcorrid":48649,"ĠPwr":48650,"Ġ[/":48651,"Ġseq":48652,"Population":48653,"Ġ[];":48654,"Ġreferen":48655,"ĠInstr":48656,"ĠStamina":48657,"kernel":48658,"Python":48659,"-+":48660,"Ġallele":48661,"éĽ":48662,"isode":48663,"ä¸į":48664,"otonin":48665,"modules":48666,"Notable":48667,"Spell":48668,"\\\\":48669,"Pref":48670,"Ġdatas":48671,"setup":48672,"Ġhapl":48673,"Height":48674,"åĭ":48675,"ãģ£":48676,"]),":48677,"Handle":48678,"umenthal":48679,"Package":48680,"Ġenthus":48681,"Ġunsus":48682,"Narr":48683,"Examples":48684,"FAQ":48685,"REDACTED":48686,"Ġnotor":48687,"Enable":48688,"Pattern":48689,"aeda":48690,">.":48691,"CHECK":48692,"Ġ����":48693,"Ġ'.":48694,"Ġãĥ":48695,"append":48696,"����":48697,"gemony":48698,"terness":48699,"ĠHaku":48700,"NVIDIA":48701,"queue":48702,"Bind":48703,"Ġneigh":48704,"armor":48705,"retty":48706,"LOD":48707,"plugins":48708,"Ġ/>":48709,"TYPE":48710,"Ġ4096":48711,"-------":48712,"Preview":48713,"FML":48714,"Ġproletarian":48715,"zees":48716,"enfranch":48717,"ãģĨ":48718,"Ctrl":48719,"Module":48720,"ĠSurviv":48721,"ĠStarcraft":48722,"rored":48723,"reddit":48724,"Ġrul":48725,"Ġtx":48726,"Ġmage":48727,"Sword":48728,"Ġ~/":48729,"Effects":48730,"éļ":48731,"ä¹":48732,"Sensor":48733,"Solution":48734,"ãģĻ":48735,"Arcade":48736,"Ġpredec":48737,"Values":48738,"Length":48739,"Ġfortun":48740,"ttp":48741,"\"[":48742,"tmp":48743,"ĠBerserker":48744,"åĨ":48745,"ositories":48746,"Ġcouncill":48747,"ffff":48748,"));":48749,"Recipe":48750,"ĠASCII":48751,"âĦ¢:":48752,"ä":48753,"Ġhorm":48754,"=>":48755,"sers":48756,"ãģĭ":48757,"Recommend":48758,"['":48759,"agame":48760,"Animation":48761,"aucuses":48762,"Discussion":48763,"Ġhelicop":48764,"å¿":48765,"Float":48766,"Component":48767,"instance":48768,"Ġfoo":48769,"localhost":48770,"=-":48771,"Offset":48772,"Psy":48773,"ĠGohan":48774,"buquerque":48775,"Ġdefe":48776,"chwitz":48777,"parse":48778,"Ġdors":48779,"Ġspons":48780,"Ġasync":48781,"agonists":48782,"Ġindo":48783,".>>":48784,"ĠDisciple":48785,"Ġfilename":48786,"rency":48787,"ĠDise":48788,"Ġ\"/":48789,"template":48790,"ãĤ¹":48791,"swers":48792,"Ġ++":48793,"Ġ[(":48794,"thora":48795,"ĠDepths":48796,"livious":48797,"Ġdisadvant":48798,"foundland":48799,"Upload":48800,"Ġ§§":48801,"Ġsophistic":48802,";}":48803,"izont":48804,"\"}":48805,"estial":48806,"Ranked":48807,"ĠOccupations":48808,"LEASE":48809,"ĠOgre":48810,"folder":48811,"Plot":48812,"farious":48813,"Ġsuscept":48814,"Types":48815,"Discuss":48816,"Ġ'/":48817,"æµ":48818,"earable":48819,"æ³":48820,"Tile":48821,"iatus":48822,"åŃ":48823,"Ġreperto":48824,"Helper":48825,"Returns":48826,"ä¸Ĭ":48827,"imaru":48828,"Ġreq":48829,"Ġdissatisf":48830,"multipl":48831,"}{":48832,"-[":48833,"itial":48834,"*/":48835,"Config":48836,"Example":48837,"ĠjQuery":48838,"Mods":48839,"ĠGPIO":48840,"Ġlaun":48841,"layout":48842,"cised":48843,"Ġ......":48844,"+++":48845,"prototype":48846,"Exception":48847,"Ġsubsections":48848,"Ġresemb":48849,"Ġâĩ":48850,"ĠPubMed":48851,"username":48852,"Ġaggro":48853,"éĥ":48854,"Ġ};":48855,"ĠMages":48856,"ryu":48857,"apons":48858,"Optional":48859,"ĠAncients":48860,"ãĤĬ":48861,"Quotes":48862,"oaded":48863,"Ġsuspic":48864,"inline":48865,"omial":48866,"ĠMahjong":48867,"auntlets":48868,"Ġanarchism":48869,"Ġsubclass":48870,"ĠMLG":48871,"...]":48872,"Dialog":48873,"uphem":48874,"Ġrecursive":48875,"7601":48876,"frac":48877,"Else":48878,"ĠSeverus":48879,"},{\"":48880,"ĠCLIENT":48881,"Ġjavascript":48882,"sama":48883,"ĠLearns":48884,"ãĤĤ":48885,"Upgrade":48886,"Listener":48887,"Ġsnipp":48888,"Ġrune":48889,"ĠTTL":48890,"ertation":48891,"olicy":48892,"=\"\"":48893,"«ĺ":48894,"Ġexpr":48895,"ovych":48896,"Ġãģ":48897,"_-_":48898,"munition":48899,"////":48900,"func":48901,">>>>":48902,"Provider":48903,"Ïī":48904,"BUG":48905,"Ġ[-":48906,"Ġarrang":48907,"merce":48908,"ãĥ":48909,"incarn":48910,"Valid":48911,"ĠAether":48912,"ãĤĵ":48913,"ĠUTF":48914,"ĠMonstrous":48915,"ãĤĮ":48916,"hedon":48917,"áµ":48918,":#":48919,"ĠFrieza":48920,"padding":48921,"Reviewer":48922,"Ġpsychiat":48923,"yrinth":48924,"ĠâĶĤ":48925,"hillary":48926,"Static":48927,"Newsletter":48928,"Avg":48929,"Ġfn":48930,"Topic":48931,"choes":48932,"Ġnewsp":48933,"á¸":48934,"Ġ[+":48935,"~~~~~~~~~~~~~~~~":48936,":]":48937,"apego":48938,"buf":48939,"Translation":48940,"ById":48941,"Ġmmol":48942,"ãĥ¼ãĥ":48943,"å½":48944,"ãĤī":48945,"Ġparser":48946,"ãĥª":48947,"`,":48948,"Lair":48949,")}":48950,"ypes":48951,"adobe":48952,"Ġancest":48953,"ernel":48954,"ĠNULL":48955,"ç«":48956,"anguages":48957,"Increases":48958,"æĦ":48959,"utorial":48960,"ithmetic":48961,"dll":48962,"ĠArcane":48963,"çī":48964,"Ġtc":48965,"urtles":48966,"èĪ":48967,"Bytes":48968,"Slot":48969,"ĠBahá":48970,"Weapon":48971,"widget":48972,"querque":48973,"Ġembodiments":48974,"å¥":48975,"WARN":48976,"swer":48977,"thumbnails":48978,"FFFF":48979,"inguishable":48980,"Ġâī":48981,"Ġ${":48982,"AAAAAAAA":48983,"Conclusion":48984,"ĻĤ":48985,"disable":48986,"Rect":48987,"Ġsubp":48988,"Ġ().":48989,"ĠDetected":48990,"èĢ":48991,"[]":48992,"Ġcoerc":48993,"ĠmM":48994,"recated":48995,"fusc":48996,"ĠSorce":48997,"çĶŁ":48998,").[":48999,"Ġ})":49000,"mobi":49001,"yip":49002,"Acknowled":49003,"ternity":49004,"iqueness":49005,"ython":49006,"><":49007,"Ġstd":49008,"Url":49009,"Ġnamespace":49010,"Ġtion":49011,"oother":49012,"Ó":49013,"Ġhemor":49014,"Ġrg":49015,"ventory":49016,"ãĤ¢":49017,"anamo":49018,"Socket":49019,"Topics":49020,"apeshifter":49021,"gnu":49022,"Ġdetrim":49023,"`.":49024,"romeda":49025,"çIJ":49026,"Ġlambda":49027,"Compan":49028,"Variable":49029,"Ġusb":49030,"ĠAdamant":49031,"ournal":49032,"Ġcovari":49033,"ãĥ©":49034,"éĸ":49035,"åİ":49036,"otaur":49037,"Ġ(),":49038,"Marginal":49039,"ãģı":49040,"Ġphysic":49041,"adeon":49042,"RESULTS":49043,"200000":49044,"ãģį":49045,"udeb":49046,"ãģĵ":49047,"COMPLE":49048,"Ġmsg":49049,"ghazi":49050,"/*":49051,"ĠDeity":49052,"Ġdisapp":49053,"Availability":49054,"Ġillum":49055,"à©":49056,"ptives":49057,",âĢĶ":49058,"chnology":49059,"Ġaccur":49060,"Ġapi":49061,"Obj":49062,"ãĤ«":49063,"ãĤ¸":49064,"ä¹ĭ":49065,"ËĪ":49066,"Ġtcp":49067,"Required":49068,".<":49069,"\".[":49070,"Ġ~/.":49071,"Ġobser":49072,"RFC":49073,"Ġintegers":49074,"åī":49075,"Installation":49076,"Ô":49077,"ó":49078,"csv":49079,"ãĥ«":49080,"ĠNoticed":49081,"âĸĵ":49082,"Tumblr":49083,"Reply":49084,"||":49085,"Ġconclud":49086,"Ġ))":49087,"ebin":49088,"sql":49089,"Closure":49090,"++++":49091,"],[":49092,"âĹı":49093,"Ġprolet":49094,"Ġ>=":49095,"estinal":49096,"Ġ[*":49097,"ĠInquisitor":49098,"Ġcmd":49099,"FINE":49100,"CRIP":49101,"Ġvertex":49102,"TeX":49103,"///":49104,"Ö¼":49105,"iscons":49106,"Ġmyster":49107,"Changed":49108,"timeout":49109,"irtual":49110,"Methods":49111,"Ġcerts":49112,"texture":49113,"Roaming":49114,"Proxy":49115,"Override":49116,"éĹ":49117,"utf":49118,"python":49119,"ĠRarity":49120,"ilitarian":49121,"çľ":49122,"().":49123,"æł":49124,"Ġbuf":49125,"åij":49126,"çķ":49127,"Ġ*.":49128,"umerable":49129,"~~~~":49130,"å¦":49131,"Ġsimultane":49132,"Ġjson":49133,"Requires":49134,"Ġperl":49135,"Interface":49136,"rupal":49137,":":49242,"itialized":49243,"HTTP":49244,"Trivia":49245,"Sov":49246,"wrapper":49247,"={":49248,"ĠAzerb":49249,"aeper":49250,"Ġneighb":49251,"initions":49252,"Ġsts":49253,"ĠSasuke":49254,"#$":49255,"uliffe":49256,"æĸ¹":49257,"++++++++++++++++":49258,"ĠElven":49259,"ãģĤ":49260,"Ġartif":49261,"Folder":49262,"Ġà¨":49263,"åĤ":49264,"Ġphyl":49265,"uggest":49266,"blance":49267,"ãģł":49268,"Requirements":49269,"Usage":49270,"Ġinitialized":49271,"ãģ®æ":49272,"conservancy":49273,"ĠReincarn":49274,")|":49275,"Ġantioxid":49276,"ĠClicker":49277,"Ġunlaw":49278,"Ġ\\(":49279,"ãĥĪ":49280,"Ġ[*]":49281,"Characters":49282,"////////":49283,"ãĢIJ":49284,"ãĤ·":49285,"webkit":49286,"ãĢij":49287,"Ġxp":49288,"alkyrie":49289,"Console":49290,"());":49291,"ĠKorra":49292,"\"))":49293,"oooooooooooooooo":49294,"Timer":49295,"////////////////":49296,"yout":49297,"engeance":49298,"emetery":49299,"Ġmages":49300,"mods":49301,"Null":49302,"Ġphilos":49303,"ascript":49304,"Ġaddon":49305,"ĠâĸĪ":49306,"emale":49307,"----------------------------------------------------------------":49308,"Ġ\\\\":49309,"=[":49310,"ĠParables":49311,"ãĥĨ":49312,"VALUE":49313,"Ġ@@":49314,"Ġuint":49315,"${":49316,"cpp":49317,"%%":49318,"Ġ(âĪĴ":49319,"utils":49320,"prefix":49321,"å°Ĩ":49322,"ãĥŃ":49323,"Completed":49324,"Ġgoto":49325,"ãĤ¯":49326,"Winged":49327,"perty":49328,"[\"":49329,"ãĥİ":49330,"ĠScythe":49331,"Ġæľ":49332,"Ġ!=":49333,"Buffer":49334,"docker":49335,"ĠWATCHED":49336,"èĢħ":49337,"())":49338,"Ġdst":49339,"SIZE":49340,"ĠDemonic":49341,"Ġresil":49342,"ãĤ¿":49343,"Ġpione":49344,"cpu":49345,"++)":49346,"TEXT":49347,"Ġdiscrep":49348,"debian":49349,"quished":49350,"Ġacknow":49351,"Ġtrave":49352,"Ġgcc":49353,"Catalog":49354,"ctrl":49355,"ĠMoroc":49356,"Ġcpu":49357,"Ġ];":49358,"ĠSorceress":49359,"Introduced":49360,"Frames":49361,"Ġcondem":49362,"¶æ":49363,"~~~~~~~~":49364,"ĠEmacs":49365,"][/":49366,"Ġglim":49367,"Init":49368,"ĠPrimordial":49369,"ãĥĥ":49370,"Ġ+=":49371,"Ġblat":49372,"à¼":49373,"------------------------------------------------":49374,"gpu":49375,"ãĥĥãĥĪ":49376,"Ġxml":49377,"Ġboolean":49378,"References":49379,"Ġ?)":49380,"Ġsatell":49381,"Queue":49382,"Ġpestic":49383,"Ġ}}":49384,"Attribute":49385,"Ġdx":49386,"ĠDefin":49387,"Synopsis":49388,"..................":49389,"ãĥ¬":49390,"plugin":49391,"Disable":49392,"0000000000000000":49393,")\\":49394,"ĠIchigo":49395,"println":49396,"rontal":49397,"Setup":49398,"Ġ��������":49399,"å§":49400,"âĸº":49401,"ĠPengu":49402,"ailability":49403,"Duration":49404,"Timeout":49405,"ãĢĮ":49406,"Ġbehav":49407,"Reviewed":49408,"Ġtoget":49409,"\\.":49410,"lished":49411,"Ġthous":49412,"Ġperpend":49413,"ecause":49414,"Layout":49415,"è»":49416,"ĠDexterity":49417,"unsigned":49418,"+=":49419,"[[":49420,"ĠRunes":49421,"ãĤ¦":49422,"};":49423,"})":49424,"FTWARE":49425,"ength":49426,"milo":49427,"duino":49428,"天":49429,"ĠClojure":49430,"ļé":49431,"ãĥ¥":49432,"gradient":49433,"Ġ\"\"\"":49434,"âĨij":49435,"@#":49436,"JSON":49437,"Ġproport":49438,"addr":49439,"});":49440,"ãĥIJ":49441,"ä¸ī":49442,"Ġtmp":49443,"å£":49444,"../":49445,"zsche":49446,"ĠâĪ¼":49447,"Entity":49448,"æ©Ł":49449,"ĠâĶľâĶĢâĶĢ":49450,"filename":49451,"{{":49452,"@@":49453,"ĠSeym":49454,"Ġ/**":49455,"ĠSummoner":49456,"Quantity":49457,"ç·":49458,"Attach":49459,"Ġbool":49460,"Texture":49461,"Ġopio":49462,".}":49463,"ãĥĭ":49464,"integer":49465,"Ġregex":49466,"Ġnomine":49467,"ription":49468,"ãģ®ç":49469,"ãĥķ":49470,"Ġsubparagraph":49471,"GGGG":49472,"Ġexplan":49473,"Header":49474,"Spawn":49475,"toggle":49476,"²¾":49477,"Abyss":49478,"expr":49479,"ĠZerg":49480,"ĠGrimoire":49481,"Contents":49482,"Instance":49483,"cyclopedia":49484,"ãĥĹ":49485,"ĠTakeru":49486,"=(":49487,"代":49488,"\\)":49489,"Ġrgb":49490,"htt":49491,"bryce":49492,"Ġlivest":49493,"ĠAnnotations":49494,"âĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢ":49495,"berus":49496,"ntil":49497,"Ġskelet":49498,"callback":49499,"åħī":49500,"Joined":49501,"ãĤª":49502,"Ġargs":49503,"artifacts":49504,"Ġå¤":49505,"ÃĽ":49506,"ãĥŀ":49507,"Streamer":49508,"}\"":49509,"Ġunden":49510,"ãĥģ":49511,"Īè":49512,"ãĥ£":49513,"Ġ0004":49514,"Ġ\\'":49515,"ãĤ°":49516,"ĠCONFIG":49517,"Ġ#####":49518,"``":49519,"anguage":49520,"Ġ*)":49521,"Template":49522,"MODE":49523,"Ġ00000000":49524,"'';":49525,">":49625,"Ġlvl":49626,"Footnote":49627,"Iter":49628,"####":49629,"ãĥij":49630,"ĠCarbuncle":49631,"Ġ[+]":49632,"Ġmathemat":49633,"Allows":49634,"Ġ4090":49635,"Async":49636,"ģ«":49637,"Ļ½":49638,"))))":49639,"á½":49640,"Ġcx":49641,"Ġansw":49642,"{\"":49643,"ãĥŁ":49644,"addons":49645,"Filename":49646,"Appearances":49647,"ĠãĢĮ":49648,"Ġaddr":49649,"Ġcharact":49650,"glomer":49651,"Advertisements":49652,"Ġdracon":49653,"ĠFenrir":49654,"Ġ();":49655,"ĠCitiz":49656,"acebook":49657,"Ġparams":49658,"]=":49659,"Ġsubscript":49660,"Ġentreprene":49661,"tnc":49662,"iversal":49663,"Ġmillenn":49664,"ithub":49665,"/>":49666,"Ġ\"{":49667,"Frameworks":49668,"avorite":49669,"Ġ])":49670,"Constructed":49671,"fml":49672,"ãĥį":49673,"################################":49674,"-|":49675,"¥ŀ":49676,"Ġwithd":49677,"ĠCth":49678,"AppData":49679,"Msg":49680,":{":49681,"ãĤ¨":49682,"Ġtuple":49683,"ç¥ŀ":49684,"Ġintrins":49685,"ĠCooldown":49686,"ategory":49687,"^{":49688,"ãĥĬ":49689,"''''":49690,"çĶ°":49691,"ĠDEBUG":49692,"Ġcannabin":49693,"ocobo":49694,"Invalid":49695,"ãĥĢ":49696,"Compat":49697,"Ġ({":49698,"Removed":49699,"Ġconvol":49700,"}:":49701,"interstitial":49702,"Ġ\"":49721,"initialized":49722,"Ġexting":49723,"Poké":49724,"Parameters":49725,"¶ħ":49726,"########":49727,"NULL":49728,"ãĥĩ":49729,"groupon":49730,"\\-":49731,"ãĥı":49732,"ãĤ±":49733,"Ġsubsequ":49734,"ccording":49735,"ĠMODULE":49736,"ĠProtoss":49737,"\"},{\"":49738,"Ġ..............":49739,"Integer":49740,"endif":49741,"ãĥĻ":49742,"parser":49743,"lambda":49744,"Ġcarbohyd":49745,"ĠUnloaded":49746,"_{":49747,"âĸ¬âĸ¬":49748,"Ġdebian":49749,"]}":49750,"ãĤ¶":49751,"Parameter":49752,"ãĤ£":49753,"ãĤ»":49754,"Ġ$_":49755,"İĭ":49756,"Ġiterator":49757,"ãĤ¬":49758,"WINDOWS":49759,"CONCLUS":49760,"Ġ\"\\":49761,"umbn":49762,"(&":49763,"ãĥ©ãĥ³":49764,"usercontent":49765,"ometimes":49766,"METHOD":49767,"ãĥ¢":49768,"potion":49769,"ãĥ¯":49770,"everal":49771,"Ġweap":49772,"minecraft":49773,"================================":49774,"printf":49775,"ĠShinra":49776,"Ġreluct":49777,"\\\",":49778,"Runtime":49779,"xff":49780,"ĠAbyssal":49781,"akeru":49782,"Ġ\\(\\":49783,"\"/>":49784,"efficients":49785,"Ü":49786,"avascript":49787,"Ġbehavi":49788,"++;":49789,"=#":49790,"Attributes":49791,"âĵĺ":49792,"lvl":49793,"¬¼":49794,"/**":49795,"Gameplay":49796,"ĠLeilan":49797,">)":49798,"=\"/":49799,"Ġ));":49800,"ãĥĨãĤ£":49801,"ġ":49802,".":49836,"DEBUG":49837,"âĶģ":49838,"ãĢı":49839,"WithNo":49840,"Redditor":49841,"ĠâĶľ":49842,"Ġfmt":49843,"ãĢİ":49844,"Ġmsec":49845,"ĪĴ":49846,"eatures":49847,"itially":49848,"\"\"\"":49849,"ãĥ¼ãĤ¯":49850,"Textures":49851,"\"},":49852,"\"><":49858,"||||":49859,"ß":49860,"iterator":49861,"è£ħ":49862,"Ĥª":49863,"ojure":49864,"ãħĭãħĭ":49865,"ãĥ¼ãĥ³":49866,"Ġprintln":49867,"Ġ][":49868,"âĸĪâĸĪ":49869,"âķIJ":49870,"\\\":":49871,"senal":49872,"é¾į":49873,"é¾":49874,"Ġcryst":49875,"ãĥķãĤ¡":49876,"ĠCosponsors":49877,"ãĤ·ãĥ£":49878,"Magikarp":49879,"ĠMagicka":49880,"âĸĪâĸĪâĸĪâĸĪ":49881,",,,,,,,,":49882,"vertisement":49883,"âĶĢâĶĢâĶĢâĶĢ":49884,"ãĥķãĤ©":49885,"luaj":49886,"CLASSIFIED":49887,".''.":49888,"byss":49889,"Ġ{:":49890,"ĠNanto":49891,"Ġptr":49892,"Ġ%%":49893,"Ġteasp":49894,"[_":49895,"ãĥ¤":49896,"ħĭ":49897,"ŃĶ":49898,"Ġpci":49899,"Ġ\"<":49900,"GGGGGGGG":49901,"æĪ¦":49902,"--+":49903,"ãĤ®":49904,"Ġ())":49905,"âĸ¬":49906,"Ġsizeof":49907,"}}}":49908,";;;;;;;;":49909,">]":49910,"âĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪ":49911,"Vaults":49912,"Ġistg":49913,"Ġnewcom":49914,"=]":49915,"¿½":49916,"ĵĺ":49917,"{\\":49918,"Args":49919,"Ġexha":49920,"(\\":49921,"Ġunnecess":49922,"\"}],\"":49923,"ĠUNCLASSIFIED":49924,">(":49925,"ãĤ¢ãĥ«":49926,"æ©":49927,"70710":49928,"Ń·":49929,"ãĥ¼ãĥĨãĤ£":49930,"ĠSakuya":49931,"ãĥĥãĥī":49932,"ĠPyrrha":49933,"escription":49934,"VIDIA":49935,"================================================================":49936,"Ġlooph":49937,"=~":49938,"Ġcumbers":49939,"Ġ)]":49940,"govtrack":49941,"ĠãĤµ":49942,"Ġsubur":49943,"Þ":49944,"Ġâī¡":49945,"Interstitial":49946,"ãĥ¼ãĥĨ":49947,"Ġgobl":49948,"ãĥīãĥ©":49949,"oldown":49950,"ģĸ":49951,"Depths":49952,"Ġ());":49953,"Ġ._":49954,"20439":49955,"Ġç¥ŀ":49956,"ãģ®å®":49957,"ãĤ¼":49958,"Ġ$\\":49959,"âĹ¼":49960,"Ġencount":49961,"Ġ": 48457, "Ġstreng": 48458, "agascar": 48459, "guyen": 48460, "((": 48461, ")[": 48462, "ĠNorn": 48463, "Ġhippocamp": 48464, "Ġ¯": 48465, "îĢ": 48466, "Connection": 48467, "PATH": 48468, "mbuds": 48469, "ĠShards": 48470, "Ġadvoc": 48471, "Ġsimulac": 48472, "âĸij": 48473, "!?\"": 48474, "ĠPotion": 48475, "Ġamulet": 48476, "ĠFnatic": 48477, "Ġcryptoc": 48478, "wav": 48479, "radius": 48480, "pkg": 48481, "ĠMFT": 48482, "æĢ": 48483, "Ġtoile": 48484, "Items": 48485, "ifference": 48486, "errors": 48487, "ĠCelt": 48488, "Ġunpop": 48489, "ilogy": 48490, "6666": 48491, "hesda": 48492, "Instruct": 48493, "å·": 48494, "Materials": 48495, "ettings": 48496, "Percent": 48497, "Ġresistor": 48498, "tymology": 48499, "Ġdeprecated": 48500, "Ġgrep": 48501, "ĠWRITE": 48502, "Ġtriv": 48503, "Ġscrut": 48504, "[/": 48505, "anyl": 48506, "skirts": 48507, "MSN": 48508, "ĠCodec": 48509, "ecd": 48510, "Anth": 48511, "){": 48512, "%]": 48513, "veyard": 48514, "aspberry": 48515, "ãĢ": 48516, "Reward": 48517, "rha": 48518, "Stretch": 48519, "]-": 48520, "Prev": 48521, "Context": 48522, "Ġlinux": 48523, "HAHA": 48524, "perties": 48525, "ĠVIDE": 48526, "Domain": 48527, "Ġmurd": 48528, "ĠLegions": 48529, "apache": 48530, "æŃ": 48531, "Pause": 48532, "Temperature": 48533, "ufact": 48534, "igslist": 48535, "ĠRetrieved": 48536, "èª": 48537, "ãģĮ": 48538, "Ingredients": 48539, "ruary": 48540, "dyl": 48541, "Alias": 48542, "ĠÎĶ": 48543, "Ġinval": 48544, "amsung": 48545, "!--": 48546, "olean": 48547, "æī": 48548, "ãģ¯": 48549, "Ġcoefficients": 48550, "ĠDHCP": 48551, "âĨĴ": 48552, "utonium": 48553, ":[": 48554, "âĹ": 48555, "cli": 48556, "Container": 48557, "å¼": 48558, "nexus": 48559, "SOURCE": 48560, "Ò": 48561, "=/": 48562, "Ġmysql": 48563, "ĠGained": 48564, "Ġ/*": 48565, "uncture": 48566, "Ġstatically": 48567, "âĸł": 48568, "æĺ¯": 48569, "æ°": 48570, "estamp": 48571, "Cache": 48572, "ulkan": 48573, "staking": 48574, "apter": 48575, "ãģ¾": 48576, "Ġμg": 48577, "Ġtremend": 48578, "ĠPiercing": 48579, "naissance": 48580, "ĠHealer": 48581, "Enabled": 48582, "éģ": 48583, "âĸ": 48584, "ĠThumbnails": 48585, "Ġhither": 48586, "Format": 48587, "utherland": 48588, "íķ": 48589, "Ġdestro": 48590, "fff": 48591, "execute": 48592, "msg": 48593, "romancer": 48594, "ĠCanaver": 48595, "ĠVaults": 48596, "oided": 48597, "iage": 48598, "Ġimg": 48599, "summary": 48600, "]);": 48601, "ĠABE": 48602, "ĠGamergate": 48603, "utherford": 48604, "Ġoverwrite": 48605, "enment": 48606, "æķ": 48607, "Ġsystemd": 48608, "tif": 48609, "]).": 48610, "ãĤ¤": 48611, "Widget": 48612, "======": 48613, "(-": 48614, "Ġ\"+": 48615, "ĠIncarnation": 48616, "æĥ": 48617, "���": 48618, "GUI": 48619, "èĥ": 48620, "forums": 48621, "Ġrunes": 48622, "Ġâī¤": 48623, "Ġdefic": 48624, "Distance": 48625, "directory": 48626, "ĠHorus": 48627, "iltr": 48628, "ortium": 48629, "Ġ./": 48630, "bda": 48631, "owship": 48632, "ĠâĨij": 48633, "}.": 48634, "åĩ": 48635, "1027": 48636, "Weapons": 48637, "lucent": 48638, "Ġauth": 48639, ";;": 48640, "Recommended": 48641, "Ġsurv": 48642, "Ġvm": 48643, "ĠStronghold": 48644, "Ġparan": 48645, "ĠTrance": 48646, "æĺ": 48647, "Ġsovere": 48648, "Ġcorrid": 48649, "ĠPwr": 48650, "Ġ[/": 48651, "Ġseq": 48652, "Population": 48653, "Ġ[];": 48654, "Ġreferen": 48655, "ĠInstr": 48656, "ĠStamina": 48657, "kernel": 48658, "Python": 48659, "-+": 48660, "Ġallele": 48661, "éĽ": 48662, "isode": 48663, "ä¸į": 48664, "otonin": 48665, "modules": 48666, "Notable": 48667, "Spell": 48668, "\\\\": 48669, "Pref": 48670, "Ġdatas": 48671, "setup": 48672, "Ġhapl": 48673, "Height": 48674, "åĭ": 48675, "ãģ£": 48676, "]),": 48677, "Handle": 48678, "umenthal": 48679, "Package": 48680, "Ġenthus": 48681, "Ġunsus": 48682, "Narr": 48683, "Examples": 48684, "FAQ": 48685, "REDACTED": 48686, "Ġnotor": 48687, "Enable": 48688, "Pattern": 48689, "aeda": 48690, ">.": 48691, "CHECK": 48692, "Ġ����": 48693, "Ġ'.": 48694, "Ġãĥ": 48695, "append": 48696, "����": 48697, "gemony": 48698, "terness": 48699, "ĠHaku": 48700, "NVIDIA": 48701, "queue": 48702, "Bind": 48703, "Ġneigh": 48704, "armor": 48705, "retty": 48706, "LOD": 48707, "plugins": 48708, "Ġ/>": 48709, "TYPE": 48710, "Ġ4096": 48711, "-------": 48712, "Preview": 48713, "FML": 48714, "Ġproletarian": 48715, "zees": 48716, "enfranch": 48717, "ãģĨ": 48718, "Ctrl": 48719, "Module": 48720, "ĠSurviv": 48721, "ĠStarcraft": 48722, "rored": 48723, "reddit": 48724, "Ġrul": 48725, "Ġtx": 48726, "Ġmage": 48727, "Sword": 48728, "Ġ~/": 48729, "Effects": 48730, "éļ": 48731, "ä¹": 48732, "Sensor": 48733, "Solution": 48734, "ãģĻ": 48735, "Arcade": 48736, "Ġpredec": 48737, "Values": 48738, "Length": 48739, "Ġfortun": 48740, "ttp": 48741, "\"[": 48742, "tmp": 48743, "ĠBerserker": 48744, "åĨ": 48745, "ositories": 48746, "Ġcouncill": 48747, "ffff": 48748, "));": 48749, "Recipe": 48750, "ĠASCII": 48751, "âĦ¢:": 48752, "ä": 48753, "Ġhorm": 48754, "=>": 48755, "sers": 48756, "ãģĭ": 48757, "Recommend": 48758, "['": 48759, "agame": 48760, "Animation": 48761, "aucuses": 48762, "Discussion": 48763, "Ġhelicop": 48764, "å¿": 48765, "Float": 48766, "Component": 48767, "instance": 48768, "Ġfoo": 48769, "localhost": 48770, "=-": 48771, "Offset": 48772, "Psy": 48773, "ĠGohan": 48774, "buquerque": 48775, "Ġdefe": 48776, "chwitz": 48777, "parse": 48778, "Ġdors": 48779, "Ġspons": 48780, "Ġasync": 48781, "agonists": 48782, "Ġindo": 48783, ".>>": 48784, "ĠDisciple": 48785, "Ġfilename": 48786, "rency": 48787, "ĠDise": 48788, "Ġ\"/": 48789, "template": 48790, "ãĤ¹": 48791, "swers": 48792, "Ġ++": 48793, "Ġ[(": 48794, "thora": 48795, "ĠDepths": 48796, "livious": 48797, "Ġdisadvant": 48798, "foundland": 48799, "Upload": 48800, "Ġ§§": 48801, "Ġsophistic": 48802, ";}": 48803, "izont": 48804, "\"}": 48805, "estial": 48806, "Ranked": 48807, "ĠOccupations": 48808, "LEASE": 48809, "ĠOgre": 48810, "folder": 48811, "Plot": 48812, "farious": 48813, "Ġsuscept": 48814, "Types": 48815, "Discuss": 48816, "Ġ'/": 48817, "æµ": 48818, "earable": 48819, "æ³": 48820, "Tile": 48821, "iatus": 48822, "åŃ": 48823, "Ġreperto": 48824, "Helper": 48825, "Returns": 48826, "ä¸Ĭ": 48827, "imaru": 48828, "Ġreq": 48829, "Ġdissatisf": 48830, "multipl": 48831, "}{": 48832, "-[": 48833, "itial": 48834, "*/": 48835, "Config": 48836, "Example": 48837, "ĠjQuery": 48838, "Mods": 48839, "ĠGPIO": 48840, "Ġlaun": 48841, "layout": 48842, "cised": 48843, "Ġ......": 48844, "+++": 48845, "prototype": 48846, "Exception": 48847, "Ġsubsections": 48848, "Ġresemb": 48849, "Ġâĩ": 48850, "ĠPubMed": 48851, "username": 48852, "Ġaggro": 48853, "éĥ": 48854, "Ġ};": 48855, "ĠMages": 48856, "ryu": 48857, "apons": 48858, "Optional": 48859, "ĠAncients": 48860, "ãĤĬ": 48861, "Quotes": 48862, "oaded": 48863, "Ġsuspic": 48864, "inline": 48865, "omial": 48866, "ĠMahjong": 48867, "auntlets": 48868, "Ġanarchism": 48869, "Ġsubclass": 48870, "ĠMLG": 48871, "...]": 48872, "Dialog": 48873, "uphem": 48874, "Ġrecursive": 48875, "7601": 48876, "frac": 48877, "Else": 48878, "ĠSeverus": 48879, "},{\"": 48880, "ĠCLIENT": 48881, "Ġjavascript": 48882, "sama": 48883, "ĠLearns": 48884, "ãĤĤ": 48885, "Upgrade": 48886, "Listener": 48887, "Ġsnipp": 48888, "Ġrune": 48889, "ĠTTL": 48890, "ertation": 48891, "olicy": 48892, "=\"\"": 48893, "«ĺ": 48894, "Ġexpr": 48895, "ovych": 48896, "Ġãģ": 48897, "_-_": 48898, "munition": 48899, "////": 48900, "func": 48901, ">>>>": 48902, "Provider": 48903, "Ïī": 48904, "BUG": 48905, "Ġ[-": 48906, "Ġarrang": 48907, "merce": 48908, "ãĥ": 48909, "incarn": 48910, "Valid": 48911, "ĠAether": 48912, "ãĤĵ": 48913, "ĠUTF": 48914, "ĠMonstrous": 48915, "ãĤĮ": 48916, "hedon": 48917, "áµ": 48918, ":#": 48919, "ĠFrieza": 48920, "padding": 48921, "Reviewer": 48922, "Ġpsychiat": 48923, "yrinth": 48924, "ĠâĶĤ": 48925, "hillary": 48926, "Static": 48927, "Newsletter": 48928, "Avg": 48929, "Ġfn": 48930, "Topic": 48931, "choes": 48932, "Ġnewsp": 48933, "á¸": 48934, "Ġ[+": 48935, "~~~~~~~~~~~~~~~~": 48936, ":]": 48937, "apego": 48938, "buf": 48939, "Translation": 48940, "ById": 48941, "Ġmmol": 48942, "ãĥ¼ãĥ": 48943, "å½": 48944, "ãĤī": 48945, "Ġparser": 48946, "ãĥª": 48947, "`,": 48948, "Lair": 48949, ")}": 48950, "ypes": 48951, "adobe": 48952, "Ġancest": 48953, "ernel": 48954, "ĠNULL": 48955, "ç«": 48956, "anguages": 48957, "Increases": 48958, "æĦ": 48959, "utorial": 48960, "ithmetic": 48961, "dll": 48962, "ĠArcane": 48963, "çī": 48964, "Ġtc": 48965, "urtles": 48966, "èĪ": 48967, "Bytes": 48968, "Slot": 48969, "ĠBahá": 48970, "Weapon": 48971, "widget": 48972, "querque": 48973, "Ġembodiments": 48974, "å¥": 48975, "WARN": 48976, "swer": 48977, "thumbnails": 48978, "FFFF": 48979, "inguishable": 48980, "Ġâī": 48981, "Ġ${": 48982, "AAAAAAAA": 48983, "Conclusion": 48984, "ĻĤ": 48985, "disable": 48986, "Rect": 48987, "Ġsubp": 48988, "Ġ().": 48989, "ĠDetected": 48990, "èĢ": 48991, "[]": 48992, "Ġcoerc": 48993, "ĠmM": 48994, "recated": 48995, "fusc": 48996, "ĠSorce": 48997, "çĶŁ": 48998, ").[": 48999, "Ġ})": 49000, "mobi": 49001, "yip": 49002, "Acknowled": 49003, "ternity": 49004, "iqueness": 49005, "ython": 49006, "><": 49007, "Ġstd": 49008, "Url": 49009, "Ġnamespace": 49010, "Ġtion": 49011, "oother": 49012, "Ó": 49013, "Ġhemor": 49014, "Ġrg": 49015, "ventory": 49016, "ãĤ¢": 49017, "anamo": 49018, "Socket": 49019, "Topics": 49020, "apeshifter": 49021, "gnu": 49022, "Ġdetrim": 49023, "`.": 49024, "romeda": 49025, "çIJ": 49026, "Ġlambda": 49027, "Compan": 49028, "Variable": 49029, "Ġusb": 49030, "ĠAdamant": 49031, "ournal": 49032, "Ġcovari": 49033, "ãĥ©": 49034, "éĸ": 49035, "åİ": 49036, "otaur": 49037, "Ġ(),": 49038, "Marginal": 49039, "ãģı": 49040, "Ġphysic": 49041, "adeon": 49042, "RESULTS": 49043, "200000": 49044, "ãģį": 49045, "udeb": 49046, "ãģĵ": 49047, "COMPLE": 49048, "Ġmsg": 49049, "ghazi": 49050, "/*": 49051, "ĠDeity": 49052, "Ġdisapp": 49053, "Availability": 49054, "Ġillum": 49055, "à©": 49056, "ptives": 49057, ",âĢĶ": 49058, "chnology": 49059, "Ġaccur": 49060, "Ġapi": 49061, "Obj": 49062, "ãĤ«": 49063, "ãĤ¸": 49064, "ä¹ĭ": 49065, "ËĪ": 49066, "Ġtcp": 49067, "Required": 49068, ".<": 49069, "\".[": 49070, "Ġ~/.": 49071, "Ġobser": 49072, "RFC": 49073, "Ġintegers": 49074, "åī": 49075, "Installation": 49076, "Ô": 49077, "ó": 49078, "csv": 49079, "ãĥ«": 49080, "ĠNoticed": 49081, "âĸĵ": 49082, "Tumblr": 49083, "Reply": 49084, "||": 49085, "Ġconclud": 49086, "Ġ))": 49087, "ebin": 49088, "sql": 49089, "Closure": 49090, "++++": 49091, "],[": 49092, "âĹı": 49093, "Ġprolet": 49094, "Ġ>=": 49095, "estinal": 49096, "Ġ[*": 49097, "ĠInquisitor": 49098, "Ġcmd": 49099, "FINE": 49100, "CRIP": 49101, "Ġvertex": 49102, "TeX": 49103, "///": 49104, "Ö¼": 49105, "iscons": 49106, "Ġmyster": 49107, "Changed": 49108, "timeout": 49109, "irtual": 49110, "Methods": 49111, "Ġcerts": 49112, "texture": 49113, "Roaming": 49114, "Proxy": 49115, "Override": 49116, "éĹ": 49117, "utf": 49118, "python": 49119, "ĠRarity": 49120, "ilitarian": 49121, "çľ": 49122, "().": 49123, "æł": 49124, "Ġbuf": 49125, "åij": 49126, "çķ": 49127, "Ġ*.": 49128, "umerable": 49129, "~~~~": 49130, "å¦": 49131, "Ġsimultane": 49132, "Ġjson": 49133, "Requires": 49134, "Ġperl": 49135, "Interface": 49136, "rupal": 49137, ":": 49242, "itialized": 49243, "HTTP": 49244, "Trivia": 49245, "Sov": 49246, "wrapper": 49247, "={": 49248, "ĠAzerb": 49249, "aeper": 49250, "Ġneighb": 49251, "initions": 49252, "Ġsts": 49253, "ĠSasuke": 49254, "#$": 49255, "uliffe": 49256, "æĸ¹": 49257, "++++++++++++++++": 49258, "ĠElven": 49259, "ãģĤ": 49260, "Ġartif": 49261, "Folder": 49262, "Ġà¨": 49263, "åĤ": 49264, "Ġphyl": 49265, "uggest": 49266, "blance": 49267, "ãģł": 49268, "Requirements": 49269, "Usage": 49270, "Ġinitialized": 49271, "ãģ®æ": 49272, "conservancy": 49273, "ĠReincarn": 49274, ")|": 49275, "Ġantioxid": 49276, "ĠClicker": 49277, "Ġunlaw": 49278, "Ġ\\(": 49279, "ãĥĪ": 49280, "Ġ[*]": 49281, "Characters": 49282, "////////": 49283, "ãĢIJ": 49284, "ãĤ·": 49285, "webkit": 49286, "ãĢij": 49287, "Ġxp": 49288, "alkyrie": 49289, "Console": 49290, "());": 49291, "ĠKorra": 49292, "\"))": 49293, "oooooooooooooooo": 49294, "Timer": 49295, "////////////////": 49296, "yout": 49297, "engeance": 49298, "emetery": 49299, "Ġmages": 49300, "mods": 49301, "Null": 49302, "Ġphilos": 49303, "ascript": 49304, "Ġaddon": 49305, "ĠâĸĪ": 49306, "emale": 49307, "----------------------------------------------------------------": 49308, "Ġ\\\\": 49309, "=[": 49310, "ĠParables": 49311, "ãĥĨ": 49312, "VALUE": 49313, "Ġ@@": 49314, "Ġuint": 49315, "${": 49316, "cpp": 49317, "%%": 49318, "Ġ(âĪĴ": 49319, "utils": 49320, "prefix": 49321, "å°Ĩ": 49322, "ãĥŃ": 49323, "Completed": 49324, "Ġgoto": 49325, "ãĤ¯": 49326, "Winged": 49327, "perty": 49328, "[\"": 49329, "ãĥİ": 49330, "ĠScythe": 49331, "Ġæľ": 49332, "Ġ!=": 49333, "Buffer": 49334, "docker": 49335, "ĠWATCHED": 49336, "èĢħ": 49337, "())": 49338, "Ġdst": 49339, "SIZE": 49340, "ĠDemonic": 49341, "Ġresil": 49342, "ãĤ¿": 49343, "Ġpione": 49344, "cpu": 49345, "++)": 49346, "TEXT": 49347, "Ġdiscrep": 49348, "debian": 49349, "quished": 49350, "Ġacknow": 49351, "Ġtrave": 49352, "Ġgcc": 49353, "Catalog": 49354, "ctrl": 49355, "ĠMoroc": 49356, "Ġcpu": 49357, "Ġ];": 49358, "ĠSorceress": 49359, "Introduced": 49360, "Frames": 49361, "Ġcondem": 49362, "¶æ": 49363, "~~~~~~~~": 49364, "ĠEmacs": 49365, "][/": 49366, "Ġglim": 49367, "Init": 49368, "ĠPrimordial": 49369, "ãĥĥ": 49370, "Ġ+=": 49371, "Ġblat": 49372, "à¼": 49373, "------------------------------------------------": 49374, "gpu": 49375, "ãĥĥãĥĪ": 49376, "Ġxml": 49377, "Ġboolean": 49378, "References": 49379, "Ġ?)": 49380, "Ġsatell": 49381, "Queue": 49382, "Ġpestic": 49383, "Ġ}}": 49384, "Attribute": 49385, "Ġdx": 49386, "ĠDefin": 49387, "Synopsis": 49388, "..................": 49389, "ãĥ¬": 49390, "plugin": 49391, "Disable": 49392, "0000000000000000": 49393, ")\\": 49394, "ĠIchigo": 49395, "println": 49396, "rontal": 49397, "Setup": 49398, "Ġ��������": 49399, "å§": 49400, "âĸº": 49401, "ĠPengu": 49402, "ailability": 49403, "Duration": 49404, "Timeout": 49405, "ãĢĮ": 49406, "Ġbehav": 49407, "Reviewed": 49408, "Ġtoget": 49409, "\\.": 49410, "lished": 49411, "Ġthous": 49412, "Ġperpend": 49413, "ecause": 49414, "Layout": 49415, "è»": 49416, "ĠDexterity": 49417, "unsigned": 49418, "+=": 49419, "[[": 49420, "ĠRunes": 49421, "ãĤ¦": 49422, "};": 49423, "})": 49424, "FTWARE": 49425, "ength": 49426, "milo": 49427, "duino": 49428, "天": 49429, "ĠClojure": 49430, "ļé": 49431, "ãĥ¥": 49432, "gradient": 49433, "Ġ\"\"\"": 49434, "âĨij": 49435, "@#": 49436, "JSON": 49437, "Ġproport": 49438, "addr": 49439, "});": 49440, "ãĥIJ": 49441, "ä¸ī": 49442, "Ġtmp": 49443, "å£": 49444, "../": 49445, "zsche": 49446, "ĠâĪ¼": 49447, "Entity": 49448, "æ©Ł": 49449, "ĠâĶľâĶĢâĶĢ": 49450, "filename": 49451, "{{": 49452, "@@": 49453, "ĠSeym": 49454, "Ġ/**": 49455, "ĠSummoner": 49456, "Quantity": 49457, "ç·": 49458, "Attach": 49459, "Ġbool": 49460, "Texture": 49461, "Ġopio": 49462, ".}": 49463, "ãĥĭ": 49464, "integer": 49465, "Ġregex": 49466, "Ġnomine": 49467, "ription": 49468, "ãģ®ç": 49469, "ãĥķ": 49470, "Ġsubparagraph": 49471, "GGGG": 49472, "Ġexplan": 49473, "Header": 49474, "Spawn": 49475, "toggle": 49476, "²¾": 49477, "Abyss": 49478, "expr": 49479, "ĠZerg": 49480, "ĠGrimoire": 49481, "Contents": 49482, "Instance": 49483, "cyclopedia": 49484, "ãĥĹ": 49485, "ĠTakeru": 49486, "=(": 49487, "代": 49488, "\\)": 49489, "Ġrgb": 49490, "htt": 49491, "bryce": 49492, "Ġlivest": 49493, "ĠAnnotations": 49494, "âĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢâĶĢ": 49495, "berus": 49496, "ntil": 49497, "Ġskelet": 49498, "callback": 49499, "åħī": 49500, "Joined": 49501, "ãĤª": 49502, "Ġargs": 49503, "artifacts": 49504, "Ġå¤": 49505, "ÃĽ": 49506, "ãĥŀ": 49507, "Streamer": 49508, "}\"": 49509, "Ġunden": 49510, "ãĥģ": 49511, "Īè": 49512, "ãĥ£": 49513, "Ġ0004": 49514, "Ġ\\'": 49515, "ãĤ°": 49516, "ĠCONFIG": 49517, "Ġ#####": 49518, "``": 49519, "anguage": 49520, "Ġ*)": 49521, "Template": 49522, "MODE": 49523, "Ġ00000000": 49524, "'';": 49525, ">": 49625, "Ġlvl": 49626, "Footnote": 49627, "Iter": 49628, "####": 49629, "ãĥij": 49630, "ĠCarbuncle": 49631, "Ġ[+]": 49632, "Ġmathemat": 49633, "Allows": 49634, "Ġ4090": 49635, "Async": 49636, "ģ«": 49637, "Ļ½": 49638, "))))": 49639, "á½": 49640, "Ġcx": 49641, "Ġansw": 49642, "{\"": 49643, "ãĥŁ": 49644, "addons": 49645, "Filename": 49646, "Appearances": 49647, "ĠãĢĮ": 49648, "Ġaddr": 49649, "Ġcharact": 49650, "glomer": 49651, "Advertisements": 49652, "Ġdracon": 49653, "ĠFenrir": 49654, "Ġ();": 49655, "ĠCitiz": 49656, "acebook": 49657, "Ġparams": 49658, "]=": 49659, "Ġsubscript": 49660, "Ġentreprene": 49661, "tnc": 49662, "iversal": 49663, "Ġmillenn": 49664, "ithub": 49665, "/>": 49666, "Ġ\"{": 49667, "Frameworks": 49668, "avorite": 49669, "Ġ])": 49670, "Constructed": 49671, "fml": 49672, "ãĥį": 49673, "################################": 49674, "-|": 49675, "¥ŀ": 49676, "Ġwithd": 49677, "ĠCth": 49678, "AppData": 49679, "Msg": 49680, ":{": 49681, "ãĤ¨": 49682, "Ġtuple": 49683, "ç¥ŀ": 49684, "Ġintrins": 49685, "ĠCooldown": 49686, "ategory": 49687, "^{": 49688, "ãĥĬ": 49689, "''''": 49690, "çĶ°": 49691, "ĠDEBUG": 49692, "Ġcannabin": 49693, "ocobo": 49694, "Invalid": 49695, "ãĥĢ": 49696, "Compat": 49697, "Ġ({": 49698, "Removed": 49699, "Ġconvol": 49700, "}:": 49701, "interstitial": 49702, "Ġ\"": 49721, "initialized": 49722, "Ġexting": 49723, "Poké": 49724, "Parameters": 49725, "¶ħ": 49726, "########": 49727, "NULL": 49728, "ãĥĩ": 49729, "groupon": 49730, "\\-": 49731, "ãĥı": 49732, "ãĤ±": 49733, "Ġsubsequ": 49734, "ccording": 49735, "ĠMODULE": 49736, "ĠProtoss": 49737, "\"},{\"": 49738, "Ġ..............": 49739, "Integer": 49740, "endif": 49741, "ãĥĻ": 49742, "parser": 49743, "lambda": 49744, "Ġcarbohyd": 49745, "ĠUnloaded": 49746, "_{": 49747, "âĸ¬âĸ¬": 49748, "Ġdebian": 49749, "]}": 49750, "ãĤ¶": 49751, "Parameter": 49752, "ãĤ£": 49753, "ãĤ»": 49754, "Ġ$_": 49755, "İĭ": 49756, "Ġiterator": 49757, "ãĤ¬": 49758, "WINDOWS": 49759, "CONCLUS": 49760, "Ġ\"\\": 49761, "umbn": 49762, "(&": 49763, "ãĥ©ãĥ³": 49764, "usercontent": 49765, "ometimes": 49766, "METHOD": 49767, "ãĥ¢": 49768, "potion": 49769, "ãĥ¯": 49770, "everal": 49771, "Ġweap": 49772, "minecraft": 49773, "================================": 49774, "printf": 49775, "ĠShinra": 49776, "Ġreluct": 49777, "\\\",": 49778, "Runtime": 49779, "xff": 49780, "ĠAbyssal": 49781, "akeru": 49782, "Ġ\\(\\": 49783, "\"/>": 49784, "efficients": 49785, "Ü": 49786, "avascript": 49787, "Ġbehavi": 49788, "++;": 49789, "=#": 49790, "Attributes": 49791, "âĵĺ": 49792, "lvl": 49793, "¬¼": 49794, "/**": 49795, "Gameplay": 49796, "ĠLeilan": 49797, ">)": 49798, "=\"/": 49799, "Ġ));": 49800, "ãĥĨãĤ£": 49801, "ġ": 49802, ".": 49836, "DEBUG": 49837, "âĶģ": 49838, "ãĢı": 49839, "WithNo": 49840, "Redditor": 49841, "ĠâĶľ": 49842, "Ġfmt": 49843, "ãĢİ": 49844, "Ġmsec": 49845, "ĪĴ": 49846, "eatures": 49847, "itially": 49848, "\"\"\"": 49849, "ãĥ¼ãĤ¯": 49850, "Textures": 49851, "\"},": 49852, "\"><": 49858, "||||": 49859, "ß": 49860, "iterator": 49861, "è£ħ": 49862, "Ĥª": 49863, "ojure": 49864, "ãħĭãħĭ": 49865, "ãĥ¼ãĥ³": 49866, "Ġprintln": 49867, "Ġ][": 49868, "âĸĪâĸĪ": 49869, "âķIJ": 49870, "\\\":": 49871, "senal": 49872, "é¾į": 49873, "é¾": 49874, "Ġcryst": 49875, "ãĥķãĤ¡": 49876, "ĠCosponsors": 49877, "ãĤ·ãĥ£": 49878, "Magikarp": 49879, "ĠMagicka": 49880, "âĸĪâĸĪâĸĪâĸĪ": 49881, ",,,,,,,,": 49882, "vertisement": 49883, "âĶĢâĶĢâĶĢâĶĢ": 49884, "ãĥķãĤ©": 49885, "luaj": 49886, "CLASSIFIED": 49887, ".''.": 49888, "byss": 49889, "Ġ{:": 49890, "ĠNanto": 49891, "Ġptr": 49892, "Ġ%%": 49893, "Ġteasp": 49894, "[_": 49895, "ãĥ¤": 49896, "ħĭ": 49897, "ŃĶ": 49898, "Ġpci": 49899, "Ġ\"<": 49900, "GGGGGGGG": 49901, "æĪ¦": 49902, "--+": 49903, "ãĤ®": 49904, "Ġ())": 49905, "âĸ¬": 49906, "Ġsizeof": 49907, "}}}": 49908, ";;;;;;;;": 49909, ">]": 49910, "âĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪâĸĪ": 49911, "Vaults": 49912, "Ġistg": 49913, "Ġnewcom": 49914, "=]": 49915, "¿½": 49916, "ĵĺ": 49917, "{\\": 49918, "Args": 49919, "Ġexha": 49920, "(\\": 49921, "Ġunnecess": 49922, "\"}],\"": 49923, "ĠUNCLASSIFIED": 49924, ">(": 49925, "ãĤ¢ãĥ«": 49926, "æ©": 49927, "70710": 49928, "Ń·": 49929, "ãĥ¼ãĥĨãĤ£": 49930, "ĠSakuya": 49931, "ãĥĥãĥī": 49932, "ĠPyrrha": 49933, "escription": 49934, "VIDIA": 49935, "================================================================": 49936, "Ġlooph": 49937, "=~": 49938, "Ġcumbers": 49939, "Ġ)]": 49940, "govtrack": 49941, "ĠãĤµ": 49942, "Ġsubur": 49943, "Þ": 49944, "Ġâī¡": 49945, "Interstitial": 49946, "ãĥ¼ãĥĨ": 49947, "Ġgobl": 49948, "ãĥīãĥ©": 49949, "oldown": 49950, "ģĸ": 49951, "Depths": 49952, "Ġ());": 49953, "Ġ._": 49954, "20439": 49955, "Ġç¥ŀ": 49956, "ãģ®å®": 49957, "ãĤ¼": 49958, "Ġ$\\": 49959, "âĹ¼": 49960, "Ġencount": 49961, "Ġ