diff --git a/configs/model/audioldm.yaml b/configs/model/audioldm.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e7190c63cd8a90b629f56b0d849dc557689fa9dd --- /dev/null +++ b/configs/model/audioldm.yaml @@ -0,0 +1,24 @@ +######################## +# audioldm autoencoder # +######################## + + +audioldm_autoencoder: + type: audioldm_autoencoder + args: + embed_dim: 8 + monitor: val/rec_loss + ddconfig: + double_z: True + z_channels: 8 + resolution: 256 + downsample_time: False + in_channels: 1 + out_ch: 1 + ch: 128 + ch_mult: [1, 2, 4] + num_res_blocks: 2 + attn_resolutions: [] + dropout: 0.0 + lossconfig: + target: torch.nn.Identity \ No newline at end of file diff --git a/configs/model/clap.yaml b/configs/model/clap.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fe57e8570fdb7b8343756b7fb7fe404be27a091b --- /dev/null +++ b/configs/model/clap.yaml @@ -0,0 +1,10 @@ +###################### +# clap audio encoder # +###################### + + +clap_audio: + type: clap_audio + args: + amodel: "HTSAT-large" + joint_embed_shape: 768 \ No newline at end of file diff --git a/configs/model/clip.yaml b/configs/model/clip.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1d095cf6400b0b7929f75bb21a6065070e829548 --- /dev/null +++ b/configs/model/clip.yaml @@ -0,0 +1,22 @@ +############################## +# clip vision & text encoder # +############################## + +clip: + symbol: clip + args: {} + +clip_frozen: + super_cfg: clip + type: clip_frozen + args: {} + +clip_text: + super_cfg: clip + type: clip_text + args: {} + +clip_vision: + super_cfg: clip + type: clip_vision + args: {} \ No newline at end of file diff --git a/configs/model/codi.yaml b/configs/model/codi.yaml new file mode 100644 index 0000000000000000000000000000000000000000..88bc03ee038509193660d0f90274ccb4ce5c04d6 --- /dev/null +++ b/configs/model/codi.yaml @@ -0,0 +1,23 @@ +######## +# CoDi # +######## + +codi: + type: codi + symbol: codi + find_unused_parameters: true + args: + audioldm_cfg: MODEL(audioldm_autoencoder) + autokl_cfg: MODEL(sd_autoencoder) + optimus_cfg: MODEL(optimus_vae) + clip_cfg: MODEL(clip_frozen) + clap_cfg: MODEL(clap_audio) + unet_config: MODEL(openai_unet_codi) + beta_linear_start: 0.00085 + beta_linear_end: 0.012 + timesteps: 1000 + vision_scale_factor: 0.18215 + text_scale_factor: 4.3108 + audio_scale_factor: 0.9228 + use_ema: false + parameterization : "eps" \ No newline at end of file diff --git a/configs/model/openai_unet.yaml b/configs/model/openai_unet.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5be0d88400aface8c99cf294e229595ca574c30b --- /dev/null +++ b/configs/model/openai_unet.yaml @@ -0,0 +1,85 @@ +openai_unet_sd: + type: openai_unet + args: + image_size: null # no use + in_channels: 4 + out_channels: 4 + model_channels: 320 + attention_resolutions: [ 4, 2, 1 ] + num_res_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + num_heads: 8 + use_spatial_transformer: True + transformer_depth: 1 + context_dim: 768 + use_checkpoint: True + legacy: False + +openai_unet_dual_context: + super_cfg: openai_unet_sd + type: openai_unet_dual_context + +######################## +# Code cleaned version # +######################## + +openai_unet_2d_audio: + type: openai_unet_2d + args: + input_channels: 8 + model_channels: 192 + output_channels: 8 + num_noattn_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + with_attn: [true, true, true, false] + channel_mult_connector: [1, 2, 4] + num_noattn_blocks_connector: [1, 1, 1] + with_connector: [True, True, True, False] + connector_output_channel: 1280 + num_heads: 8 + context_dim: 768 + use_checkpoint: False + +openai_unet_2d: + type: openai_unet_2d + args: + input_channels: 4 + model_channels: 320 + output_channels: 4 + num_noattn_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + with_attn: [true, true, true, false] + channel_mult_connector: [1, 2, 4] + num_noattn_blocks_connector: [1, 1, 1] + with_connector: [True, True, True, False] + connector_output_channel: 1280 + num_heads: 8 + context_dim: 768 + use_checkpoint: True + use_video_architecture: True + +openai_unet_0dmd: + type: openai_unet_0dmd + args: + input_channels: 768 + model_channels: 320 + output_channels: 768 + num_noattn_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + second_dim: [ 4, 4, 4, 4 ] + with_attn: [true, true, true, false] + num_noattn_blocks_connector: [1, 1, 1] + second_dim_connector: [4, 4, 4] + with_connector: [True, True, True, False] + connector_output_channel: 1280 + num_heads: 8 + context_dim: 768 + use_checkpoint: True + +openai_unet_codi: + type: openai_unet_codi + args: + unet_image_cfg: MODEL(openai_unet_2d) + unet_text_cfg: MODEL(openai_unet_0dmd) + unet_audio_cfg: MODEL(openai_unet_2d_audio) + model_type: ['video', 'image', 'text'] \ No newline at end of file diff --git a/configs/model/optimus.yaml b/configs/model/optimus.yaml new file mode 100644 index 0000000000000000000000000000000000000000..96a8692134f2824aa7de7142618ea82775e389f3 --- /dev/null +++ b/configs/model/optimus.yaml @@ -0,0 +1,107 @@ + +optimus: + symbol: optimus + find_unused_parameters: false + args: {} + +optimus_bert_encoder: + super_cfg: optimus + type: optimus_bert_connector + # pth: pretrained/optimus_bert_encoder.pth + args: + config: + architectures: + - BertForMaskedLM + attention_probs_dropout_prob: 0.1 + finetuning_task: null + hidden_act: gelu + hidden_dropout_prob: 0.1 + hidden_size: 768 + initializer_range: 0.02 + intermediate_size: 3072 + layer_norm_eps: 1.e-12 + max_position_embeddings: 512 + num_attention_heads: 12 + num_hidden_layers: 12 + num_labels: 2 + output_attentions: false + output_hidden_states: false + pruned_heads: {} + torchscript: false + type_vocab_size: 2 + vocab_size: 28996 + latent_size: 768 + +optimus_bert_tokenizer: + super_cfg: optimus + type: optimus_bert_tokenizer + args: + do_lower_case: false + max_len: 512 + vocab_file: core/models/latent_diffusion/vae/optimus_modules/vocab/bert-base-cased-vocab.txt + +optimus_gpt2_decoder: + super_cfg: optimus + type: optimus_gpt2_connector + # pth: pretrained/optimus_gpt2_decoder.pth + args: + config: + architectures: + - GPT2LMHeadModel + attn_pdrop: 0.1 + embd_pdrop: 0.1 + finetuning_task: null + hidden_size: 768 + initializer_range: 0.02 + latent_size: 768 + layer_norm_epsilon: 1.e-05 + max_position_embeddings: 1024 + n_ctx: 1024 + n_embd: 768 + n_head: 12 + n_layer: 12 + n_positions: 1024 + num_attention_heads: 12 + num_hidden_layers: 12 + num_labels: 1 + output_attentions: false + output_hidden_states: false + pretrained_config_archive_map: + gpt2 : https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-config.json + gpt2-medium : https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-config.json + gpt2-large : https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-config.json + pruned_heads: {} + resid_pdrop: 0.1 + summary_activation: null + summary_first_dropout: 0.1 + summary_proj_to_labels: true + summary_type: cls_index + summary_use_proj: true + torchscript: false + vocab_size: 50260 + +optimus_gpt2_tokenizer: + super_cfg: optimus + type: optimus_gpt2_tokenizer + args: + do_lower_case: false + max_len: 1024 + vocab_file: core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-vocab.json + merges_file: core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-merges.txt + +optimus_vae: + super_cfg: optimus + type: optimus_vae + pth: pretrained/optimus-vae.pth + args: + encoder: MODEL(optimus_bert_encoder) + decoder: MODEL(optimus_gpt2_decoder) + tokenizer_encoder: MODEL(optimus_bert_tokenizer) + tokenizer_decoder: MODEL(optimus_gpt2_tokenizer) + args: + latent_size: 768 + beta: 1.0 + fb_mode: 0 + length_weighted_loss: false + dim_target_kl : 3.0 + diff --git a/configs/model/prova.yaml b/configs/model/prova.yaml new file mode 100644 index 0000000000000000000000000000000000000000..03c7f07a75d95e24f80f0cb51f06ca6e672012ea --- /dev/null +++ b/configs/model/prova.yaml @@ -0,0 +1,85 @@ +openai_unet_sd: + type: openai_unet + args: + image_size: null # no use + in_channels: 4 + out_channels: 4 + model_channels: 320 + attention_resolutions: [ 4, 2, 1 ] + num_res_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + num_heads: 8 + use_spatial_transformer: True + transformer_depth: 1 + context_dim: 768 + use_checkpoint: True + legacy: False + +openai_unet_dual_context: + super_cfg: openai_unet_sd + type: openai_unet_dual_context + +######################## +# Code cleaned version # +######################## + +openai_unet_2d_audio: + type: openai_unet_2d + args: + input_channels: 8 + model_channels: 192 + output_channels: 8 + num_noattn_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + with_attn: [true, true, true, false] + channel_mult_connector: [1, 2, 4] + num_noattn_blocks_connector: [1, 1, 1] + with_connector: [True, True, True, False] + connector_output_channel: 1280 + num_heads: 8 + context_dim: 768 + use_checkpoint: False + +openai_unet_2d: + type: openai_unet_2d + args: + input_channels: 4 + model_channels: 320 + output_channels: 4 + num_noattn_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + with_attn: [true, true, true, false] + channel_mult_connector: [1, 2, 4] + num_noattn_blocks_connector: [1, 1, 1] + with_connector: [True, True, True, False] + connector_output_channel: 1280 + num_heads: 8 + context_dim: 768 + use_checkpoint: True + use_video_architecture: True + +openai_unet_0dmd: + type: openai_unet_0dmd + args: + input_channels: 768 + model_channels: 320 + output_channels: 768 + num_noattn_blocks: [ 2, 2, 2, 2 ] + channel_mult: [ 1, 2, 4, 4 ] + second_dim: [ 4, 4, 4, 4 ] + with_attn: [true, true, true, false] + num_noattn_blocks_connector: [1, 1, 1] + second_dim_connector: [4, 4, 4] + with_connector: [True, True, True, False] + connector_output_channel: 1280 + num_heads: 8 + context_dim: 768 + use_checkpoint: True + +prova: + type: prova + args: + unet_frontal_cfg: MODEL(openai_unet_2d) + unet_lateral_cfg: MODEL(openai_unet_2d) + unet_text_cfg: MODEL(openai_unet_0dmd) + model_type: ['text'] diff --git a/configs/model/sd.yaml b/configs/model/sd.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d5b4810101159102e35ade92fa272d29302adf5e --- /dev/null +++ b/configs/model/sd.yaml @@ -0,0 +1,20 @@ +sd_autoencoder: + type: autoencoderkl + args: + embed_dim: 4 + monitor: val/rec_loss + ddconfig: + double_z: true + z_channels: 4 + resolution: 256 + in_channels: 3 + out_ch: 3 + ch: 128 + ch_mult: [1, 2, 4, 4] + num_res_blocks: 2 + attn_resolutions: [] + dropout: 0.0 + use_video_arch: true + lossconfig: + target: torch.nn.Identity + pth: pretrained/kl-f8.pth \ No newline at end of file diff --git a/configs/model/thesis_model.yaml b/configs/model/thesis_model.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0dedd2466d96f54b5affc867caa05ad1e484fb5b --- /dev/null +++ b/configs/model/thesis_model.yaml @@ -0,0 +1,21 @@ +######## +# CoDi # +######## + +thesis_model: + type: thesis_model + symbol: thesis_model + find_unused_parameters: true + args: + autokl_cfg: MODEL(sd_autoencoder) + optimus_cfg: MODEL(optimus_vae) + clip_cfg: MODEL(clip_frozen) + unet_config: MODEL(prova) + beta_linear_start: 0.00085 + beta_linear_end: 0.012 + timesteps: 1000 + vision_scale_factor: 0.18215 + text_scale_factor: 4.3108 + audio_scale_factor: 0.9228 + use_ema: false + parameterization : "eps" diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/__pycache__/__init__.cpython-38.pyc b/core/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4954c349681cd99b329da8f7b8c31db25fa67e00 Binary files /dev/null and b/core/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/__pycache__/cfg_helper.cpython-38.pyc b/core/__pycache__/cfg_helper.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a352f15b2f331625612050277c0d29056127620 Binary files /dev/null and b/core/__pycache__/cfg_helper.cpython-38.pyc differ diff --git a/core/__pycache__/cfg_holder.cpython-38.pyc b/core/__pycache__/cfg_holder.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6df0aea37c7a7f52bb46169af937baf71e172f7b Binary files /dev/null and b/core/__pycache__/cfg_holder.cpython-38.pyc differ diff --git a/core/__pycache__/sync.cpython-38.pyc b/core/__pycache__/sync.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48528471b831701925ca455c68edbfeb5d0e910a Binary files /dev/null and b/core/__pycache__/sync.cpython-38.pyc differ diff --git a/core/cfg_helper.py b/core/cfg_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..315e0fb0986d9decea10f8e7756e2a690eb0c198 --- /dev/null +++ b/core/cfg_helper.py @@ -0,0 +1,665 @@ +import os +import os.path as osp +import shutil +import copy +import time +import pprint +import numpy as np +import torch +import argparse +import json +import yaml +from easydict import EasyDict as edict + +from core.models import get_model + +############ +# cfg_bank # +############ + + +def cfg_solvef(cmd, root): + if not isinstance(cmd, str): + return cmd + + if cmd.find('SAME')==0: + zoom = root + p = cmd[len('SAME'):].strip('()').split('.') + p = [pi.strip() for pi in p] + for pi in p: + try: + pi = int(pi) + except: + pass + + try: + zoom = zoom[pi] + except: + return cmd + return cfg_solvef(zoom, root) + + if cmd.find('SEARCH')==0: + zoom = root + p = cmd[len('SEARCH'):].strip('()').split('.') + p = [pi.strip() for pi in p] + find = True + # Depth first search + for pi in p: + try: + pi = int(pi) + except: + pass + + try: + zoom = zoom[pi] + except: + find = False + break + + if find: + return cfg_solvef(zoom, root) + else: + if isinstance(root, dict): + for ri in root: + rv = cfg_solvef(cmd, root[ri]) + if rv != cmd: + return rv + if isinstance(root, list): + for ri in root: + rv = cfg_solvef(cmd, ri) + if rv != cmd: + return rv + return cmd + + if cmd.find('MODEL')==0: + goto = cmd[len('MODEL'):].strip('()') + return model_cfg_bank()(goto) + + if cmd.find('DATASET')==0: + goto = cmd[len('DATASET'):].strip('()') + return dataset_cfg_bank()(goto) + + return cmd + + +def cfg_solve(cfg, cfg_root): + # The function solve cfg element such that + # all sorrogate input are settled. + # (i.e. SAME(***) ) + if isinstance(cfg, list): + for i in range(len(cfg)): + if isinstance(cfg[i], (list, dict)): + cfg[i] = cfg_solve(cfg[i], cfg_root) + else: + cfg[i] = cfg_solvef(cfg[i], cfg_root) + if isinstance(cfg, dict): + for k in cfg: + if isinstance(cfg[k], (list, dict)): + cfg[k] = cfg_solve(cfg[k], cfg_root) + else: + cfg[k] = cfg_solvef(cfg[k], cfg_root) + return cfg + + +class model_cfg_bank(object): + def __init__(self): + self.cfg_dir = osp.join('configs', 'model') + self.cfg_bank = edict() + + def __call__(self, name): + if name not in self.cfg_bank: + cfg_path = self.get_yaml_path(name) + with open(cfg_path, 'r') as f: + cfg_new = yaml.load( + f, Loader=yaml.FullLoader) + cfg_new = edict(cfg_new) + self.cfg_bank.update(cfg_new) + + cfg = self.cfg_bank[name] + cfg.name = name + if 'super_cfg' not in cfg: + cfg = cfg_solve(cfg, cfg) + self.cfg_bank[name] = cfg + return copy.deepcopy(cfg) + + super_cfg = self.__call__(cfg.super_cfg) + # unlike other field, + # args will not be replaced but update. + if 'args' in cfg: + if 'args' in super_cfg: + super_cfg.args.update(cfg.args) + else: + super_cfg.args = cfg.args + cfg.pop('args') + + super_cfg.update(cfg) + super_cfg.pop('super_cfg') + cfg = super_cfg + try: + delete_args = cfg.pop('delete_args') + except: + delete_args = [] + + for dargs in delete_args: + cfg.args.pop(dargs) + + cfg = cfg_solve(cfg, cfg) + self.cfg_bank[name] = cfg + return copy.deepcopy(cfg) + + def get_yaml_path(self, name): + if name.find('openai_unet')==0: + return osp.join( + self.cfg_dir, 'openai_unet.yaml') + elif name.find('prova')==0: + return osp.join( + self.cfg_dir, 'prova.yaml') + elif name.find('audioldm')==0: + return osp.join( + self.cfg_dir, 'audioldm.yaml') + elif name.find('clip')==0: + return osp.join( + self.cfg_dir, 'clip.yaml') + elif name.find('sd')==0: + return osp.join( + self.cfg_dir, 'sd.yaml') + elif name.find('codi')==0: + return osp.join( + self.cfg_dir, 'codi.yaml') + elif name.find('thesis_model')==0: + return osp.join( + self.cfg_dir, 'thesis_model.yaml') + elif name.find('clap')==0: + return osp.join( + self.cfg_dir, 'clap.yaml') + elif name.find('optimus')==0: + return osp.join( + self.cfg_dir, 'optimus.yaml') + else: + raise ValueError + + +class dataset_cfg_bank(object): + def __init__(self): + self.cfg_dir = osp.join('configs', 'dataset') + self.cfg_bank = edict() + + def __call__(self, name): + if name not in self.cfg_bank: + cfg_path = self.get_yaml_path(name) + with open(cfg_path, 'r') as f: + cfg_new = yaml.load( + f, Loader=yaml.FullLoader) + cfg_new = edict(cfg_new) + self.cfg_bank.update(cfg_new) + + cfg = self.cfg_bank[name] + cfg.name = name + if cfg.get('super_cfg', None) is None: + cfg = cfg_solve(cfg, cfg) + self.cfg_bank[name] = cfg + return copy.deepcopy(cfg) + + super_cfg = self.__call__(cfg.super_cfg) + super_cfg.update(cfg) + cfg = super_cfg + cfg.super_cfg = None + try: + delete = cfg.pop('delete') + except: + delete = [] + + for dargs in delete: + cfg.pop(dargs) + + cfg = cfg_solve(cfg, cfg) + self.cfg_bank[name] = cfg + return copy.deepcopy(cfg) + + def get_yaml_path(self, name): + if name.find('cityscapes')==0: + return osp.join( + self.cfg_dir, 'cityscapes.yaml') + elif name.find('div2k')==0: + return osp.join( + self.cfg_dir, 'div2k.yaml') + elif name.find('gandiv2k')==0: + return osp.join( + self.cfg_dir, 'gandiv2k.yaml') + elif name.find('srbenchmark')==0: + return osp.join( + self.cfg_dir, 'srbenchmark.yaml') + elif name.find('imagedir')==0: + return osp.join( + self.cfg_dir, 'imagedir.yaml') + elif name.find('places2')==0: + return osp.join( + self.cfg_dir, 'places2.yaml') + elif name.find('ffhq')==0: + return osp.join( + self.cfg_dir, 'ffhq.yaml') + elif name.find('imcpt')==0: + return osp.join( + self.cfg_dir, 'imcpt.yaml') + elif name.find('texture')==0: + return osp.join( + self.cfg_dir, 'texture.yaml') + elif name.find('openimages')==0: + return osp.join( + self.cfg_dir, 'openimages.yaml') + elif name.find('laion2b')==0: + return osp.join( + self.cfg_dir, 'laion2b.yaml') + elif name.find('laionart')==0: + return osp.join( + self.cfg_dir, 'laionart.yaml') + elif name.find('celeba')==0: + return osp.join( + self.cfg_dir, 'celeba.yaml') + elif name.find('coyo')==0: + return osp.join( + self.cfg_dir, 'coyo.yaml') + elif name.find('pafc')==0: + return osp.join( + self.cfg_dir, 'pafc.yaml') + elif name.find('coco')==0: + return osp.join( + self.cfg_dir, 'coco.yaml') + else: + raise ValueError + + +class experiment_cfg_bank(object): + def __init__(self): + self.cfg_dir = osp.join('configs', 'experiment') + self.cfg_bank = edict() + + def __call__(self, name): + if name not in self.cfg_bank: + cfg_path = self.get_yaml_path(name) + with open(cfg_path, 'r') as f: + cfg = yaml.load( + f, Loader=yaml.FullLoader) + cfg = edict(cfg) + + cfg = cfg_solve(cfg, cfg) + cfg = cfg_solve(cfg, cfg) + # twice for SEARCH + self.cfg_bank[name] = cfg + return copy.deepcopy(cfg) + + def get_yaml_path(self, name): + return osp.join( + self.cfg_dir, name+'.yaml') + + +def load_cfg_yaml(path): + if osp.isfile(path): + cfg_path = path + elif osp.isfile(osp.join('configs', 'experiment', path)): + cfg_path = osp.join('configs', 'experiment', path) + elif osp.isfile(osp.join('configs', 'experiment', path+'.yaml')): + cfg_path = osp.join('configs', 'experiment', path+'.yaml') + else: + assert False, 'No such config!' + + with open(cfg_path, 'r') as f: + cfg = yaml.load(f, Loader=yaml.FullLoader) + cfg = edict(cfg) + cfg = cfg_solve(cfg, cfg) + cfg = cfg_solve(cfg, cfg) + return cfg + +############## +# cfg_helper # +############## + + +def get_experiment_id(ref=None): + if ref is None: + time.sleep(0.5) + return int(time.time()*100) + else: + try: + return int(ref) + except: + pass + + _, ref = osp.split(ref) + ref = ref.split('_')[0] + try: + return int(ref) + except: + assert False, 'Invalid experiment ID!' + + +def record_resume_cfg(path): + cnt = 0 + while True: + if osp.exists(path+'.{:04d}'.format(cnt)): + cnt += 1 + continue + shutil.copyfile(path, path+'.{:04d}'.format(cnt)) + break + + +def get_command_line_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--debug', action='store_true', default=False) + parser.add_argument('--config', type=str) + parser.add_argument('--gpu', nargs='+', type=int) + + parser.add_argument('--node_rank', type=int, default=0) + parser.add_argument('--nodes', type=int, default=1) + parser.add_argument('--addr', type=str, default='127.0.0.1') + parser.add_argument('--port', type=int, default=11233) + + parser.add_argument('--signature', nargs='+', type=str) + parser.add_argument('--seed', type=int) + + parser.add_argument('--eval', type=str) + parser.add_argument('--eval_subdir', type=str) + parser.add_argument('--pretrained', type=str) + + parser.add_argument('--resume_dir', type=str) + parser.add_argument('--resume_step', type=int) + parser.add_argument('--resume_weight', type=str) + + args = parser.parse_args() + + # Special handling the resume + if args.resume_dir is not None: + cfg = edict() + cfg.env = edict() + cfg.env.debug = args.debug + cfg.env.resume = edict() + cfg.env.resume.dir = args.resume_dir + cfg.env.resume.step = args.resume_step + cfg.env.resume.weight = args.resume_weight + return cfg + + cfg = load_cfg_yaml(args.config) + cfg.env.debug = args.debug + cfg.env.gpu_device = [0] if args.gpu is None else list(args.gpu) + cfg.env.master_addr = args.addr + cfg.env.master_port = args.port + cfg.env.dist_url = 'tcp://{}:{}'.format(args.addr, args.port) + cfg.env.node_rank = args.node_rank + cfg.env.nodes = args.nodes + + istrain = False if args.eval is not None else True + isdebug = cfg.env.debug + + if istrain: + if isdebug: + cfg.env.experiment_id = 999999999999 + cfg.train.signature = ['debug'] + else: + cfg.env.experiment_id = get_experiment_id() + if args.signature is not None: + cfg.train.signature = args.signature + else: + if 'train' in cfg: + cfg.pop('train') + cfg.env.experiment_id = get_experiment_id(args.eval) + if args.signature is not None: + cfg.eval.signature = args.signature + + if isdebug and (args.eval is None): + cfg.env.experiment_id = 999999999999 + cfg.eval.signature = ['debug'] + + if args.eval_subdir is not None: + if isdebug: + cfg.eval.eval_subdir = 'debug' + else: + cfg.eval.eval_subdir = args.eval_subdir + if args.pretrained is not None: + cfg.eval.pretrained = args.pretrained + # The override pretrained over the setting in cfg.model + if args.seed is not None: + cfg.env.rnd_seed = args.seed + return cfg + + +def cfg_initiates(cfg): + cfge = cfg.env + isdebug = cfge.debug + isresume = 'resume' in cfge + istrain = 'train' in cfg + haseval = 'eval' in cfg + cfgt = cfg.train if istrain else None + cfgv = cfg.eval if haseval else None + + ############################### + # get some environment params # + ############################### + + cfge.computer = os.uname() + cfge.torch_version = str(torch.__version__) + + ########## + # resume # + ########## + + if isresume: + resume_cfg_path = osp.join(cfge.resume.dir, 'config.yaml') + record_resume_cfg(resume_cfg_path) + with open(resume_cfg_path, 'r') as f: + cfg_resume = yaml.load(f, Loader=yaml.FullLoader) + cfg_resume = edict(cfg_resume) + cfg_resume.env.update(cfge) + cfg = cfg_resume + cfge = cfg.env + log_file = cfg.train.log_file + + print('') + print('##########') + print('# resume #') + print('##########') + print('') + with open(log_file, 'a') as f: + print('', file=f) + print('##########', file=f) + print('# resume #', file=f) + print('##########', file=f) + print('', file=f) + + pprint.pprint(cfg) + with open(log_file, 'a') as f: + pprint.pprint(cfg, f) + + #################### + # node distributed # + #################### + + if cfg.env.master_addr!='127.0.0.1': + os.environ['MASTER_ADDR'] = cfge.master_addr + os.environ['MASTER_PORT'] = '{}'.format(cfge.master_port) + if cfg.env.dist_backend=='nccl': + os.environ['NCCL_SOCKET_FAMILY'] = 'AF_INET' + if cfg.env.dist_backend=='gloo': + os.environ['GLOO_SOCKET_FAMILY'] = 'AF_INET' + + ####################### + # cuda visible device # + ####################### + + os.environ["CUDA_VISIBLE_DEVICES"] = ','.join( + [str(gid) for gid in cfge.gpu_device]) + + ##################### + # return resume cfg # + ##################### + + if isresume: + return cfg + + ############################################# + # some misc setting that not need in resume # + ############################################# + + cfgm = cfg.model + cfge.gpu_count = len(cfge.gpu_device) + + ########################################## + # align batch size and num worker config # + ########################################## + + gpu_n = cfge.gpu_count * cfge.nodes + + def align_batch_size(bs, bs_per_gpu): + assert (bs is not None) or (bs_per_gpu is not None) + bs = bs_per_gpu * gpu_n if bs is None else bs + bs_per_gpu = bs // gpu_n if bs_per_gpu is None else bs_per_gpu + assert (bs == bs_per_gpu * gpu_n) + return bs, bs_per_gpu + + if istrain: + cfgt.batch_size, cfgt.batch_size_per_gpu = \ + align_batch_size(cfgt.batch_size, cfgt.batch_size_per_gpu) + cfgt.dataset_num_workers, cfgt.dataset_num_workers_per_gpu = \ + align_batch_size(cfgt.dataset_num_workers, cfgt.dataset_num_workers_per_gpu) + if haseval: + cfgv.batch_size, cfgv.batch_size_per_gpu = \ + align_batch_size(cfgv.batch_size, cfgv.batch_size_per_gpu) + cfgv.dataset_num_workers, cfgv.dataset_num_workers_per_gpu = \ + align_batch_size(cfgv.dataset_num_workers, cfgv.dataset_num_workers_per_gpu) + + ################## + # create log dir # + ################## + + if istrain: + if not isdebug: + sig = cfgt.get('signature', []) + version = get_model().get_version(cfgm.type) + sig = sig + ['v{}'.format(version), 's{}'.format(cfge.rnd_seed)] + else: + sig = ['debug'] + + log_dir = [ + cfge.log_root_dir, + '{}_{}'.format(cfgm.symbol, cfgt.dataset.symbol), + '_'.join([str(cfge.experiment_id)] + sig) + ] + log_dir = osp.join(*log_dir) + log_file = osp.join(log_dir, 'train.log') + if not osp.exists(log_file): + os.makedirs(osp.dirname(log_file)) + cfgt.log_dir = log_dir + cfgt.log_file = log_file + + if haseval: + cfgv.log_dir = log_dir + cfgv.log_file = log_file + else: + model_symbol = cfgm.symbol + if cfgv.get('dataset', None) is None: + dataset_symbol = 'nodataset' + else: + dataset_symbol = cfgv.dataset.symbol + + log_dir = osp.join(cfge.log_root_dir, '{}_{}'.format(model_symbol, dataset_symbol)) + exp_dir = search_experiment_folder(log_dir, cfge.experiment_id) + if exp_dir is None: + if not isdebug: + sig = cfgv.get('signature', []) + ['evalonly'] + else: + sig = ['debug'] + exp_dir = '_'.join([str(cfge.experiment_id)] + sig) + + eval_subdir = cfgv.get('eval_subdir', None) + # override subdir in debug mode (if eval_subdir is set) + eval_subdir = 'debug' if (eval_subdir is not None) and isdebug else eval_subdir + + if eval_subdir is not None: + log_dir = osp.join(log_dir, exp_dir, eval_subdir) + else: + log_dir = osp.join(log_dir, exp_dir) + + disable_log_override = cfgv.get('disable_log_override', False) + if osp.isdir(log_dir): + if disable_log_override: + assert False, 'Override an exsited log_dir is disabled at [{}]'.format(log_dir) + else: + os.makedirs(log_dir) + + log_file = osp.join(log_dir, 'eval.log') + cfgv.log_dir = log_dir + cfgv.log_file = log_file + + ###################### + # print and save cfg # + ###################### + + pprint.pprint(cfg) + with open(log_file, 'w') as f: + pprint.pprint(cfg, f) + with open(osp.join(log_dir, 'config.yaml'), 'w') as f: + yaml.dump(edict_2_dict(cfg), f) + + ############# + # save code # + ############# + + save_code = False + if istrain: + save_code = cfgt.get('save_code', False) + elif haseval: + save_code = cfgv.get('save_code', False) + + if save_code: + codedir = osp.join(log_dir, 'code') + if osp.exists(codedir): + shutil.rmtree(codedir) + for d in ['configs', 'lib']: + fromcodedir = d + tocodedir = osp.join(codedir, d) + shutil.copytree( + fromcodedir, tocodedir, + ignore=shutil.ignore_patterns( + '*__pycache__*', '*build*')) + for codei in os.listdir('.'): + if osp.splitext(codei)[1] == 'py': + shutil.copy(codei, codedir) + + ####################### + # set matplotlib mode # + ####################### + + if 'matplotlib_mode' in cfge: + try: + matplotlib.use(cfge.matplotlib_mode) + except: + print('Warning: matplotlib mode [{}] failed to be set!'.format(cfge.matplotlib_mode)) + + return cfg + + +def edict_2_dict(x): + if isinstance(x, dict): + xnew = {} + for k in x: + xnew[k] = edict_2_dict(x[k]) + return xnew + elif isinstance(x, list): + xnew = [] + for i in range(len(x)): + xnew.append( edict_2_dict(x[i]) ) + return xnew + else: + return x + + +def search_experiment_folder(root, exid): + target = None + for fi in os.listdir(root): + if not osp.isdir(osp.join(root, fi)): + continue + if int(fi.split('_')[0]) == exid: + if target is not None: + return None # duplicated + elif target is None: + target = fi + return target diff --git a/core/cfg_holder.py b/core/cfg_holder.py new file mode 100644 index 0000000000000000000000000000000000000000..18d16b0eeab83e3284ac2345135e018bcfebd9cc --- /dev/null +++ b/core/cfg_holder.py @@ -0,0 +1,33 @@ +import copy + + +def singleton(class_): + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + return getinstance + +############## +# cfg_holder # +############## + + +@singleton +class cfg_unique_holder(object): + def __init__(self): + self.cfg = None + # this is use to track the main codes. + self.code = set() + + def save_cfg(self, cfg): + self.cfg = copy.deepcopy(cfg) + + def add_code(self, code): + """ + A new main code is reached and + its name is added. + """ + self.code.add(code) diff --git a/core/common/__pycache__/utils.cpython-38.pyc b/core/common/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2425bbde7306394096729b0e86841766e8ff5207 Binary files /dev/null and b/core/common/__pycache__/utils.cpython-38.pyc differ diff --git a/core/common/registry.py b/core/common/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..94a01421177ae0d9b33c7bef682459987cd58d04 --- /dev/null +++ b/core/common/registry.py @@ -0,0 +1,86 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from argparse import Namespace +from typing import Union + +from hydra.core.config_store import ConfigStore +from omegaconf import DictConfig + +REGISTRIES = {} + + +def setup_registry(registry_name: str, + base_class=None, + default=None, + required=False): + assert registry_name.startswith('--') + registry_name = registry_name[2:].replace('-', '_') + + REGISTRY = {} + REGISTRY_CLASS_NAMES = set() + DATACLASS_REGISTRY = {} + + # maintain a registry of all registries + if registry_name in REGISTRIES: + return # registry already exists + REGISTRIES[registry_name] = { + 'registry': REGISTRY, + 'default': default, + 'dataclass_registry': DATACLASS_REGISTRY, + } + + def build_x(cfg: Union[DictConfig, str, Namespace], *extra_args, + **extra_kwargs): + + assert isinstance(cfg, str) + choice = cfg + if choice in DATACLASS_REGISTRY: + cfg = DATACLASS_REGISTRY[choice]() + + if choice is None: + if required: + raise ValueError('{} is required!'.format(registry_name)) + return None + + cls = REGISTRY[choice] + if hasattr(cls, 'build_' + registry_name): + builder = getattr(cls, 'build_' + registry_name) + else: + builder = cls + return builder(cfg, *extra_args, **extra_kwargs) + + def register_x(name, dataclass=None): + def register_x_cls(cls): + if name in REGISTRY: + raise ValueError('Cannot register duplicate {} ({})'.format( + registry_name, name)) + if cls.__name__ in REGISTRY_CLASS_NAMES: + raise ValueError( + 'Cannot register {} with duplicate class name ({})'.format( + registry_name, cls.__name__)) + if base_class is not None and not issubclass(cls, base_class): + raise ValueError('{} must extend {}'.format( + cls.__name__, base_class.__name__)) + + cls.__dataclass = dataclass + if cls.__dataclass is not None: + DATACLASS_REGISTRY[name] = cls.__dataclass + + cs = ConfigStore.instance() + node = dataclass() + node._name = name + cs.store(name=name, + group=registry_name, + node=node, + provider='layoutlmft') + + REGISTRY[name] = cls + + return cls + + return register_x_cls + + return build_x, register_x, REGISTRY, DATACLASS_REGISTRY diff --git a/core/common/utils.py b/core/common/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..31cc40d95adf81a5b2c4c8fdf98f8e523b485a34 --- /dev/null +++ b/core/common/utils.py @@ -0,0 +1,412 @@ +import random +import torch +from collections import OrderedDict + +import numpy as np +from PIL import Image +import torchvision.transforms as T +from torchvision.transforms import Compose, Resize, CenterCrop, ToTensor +from torchvision import transforms as tvtrans + +from decord import VideoReader, cpu, gpu + + +############### +# text helper # +############### + + +def remove_duplicate_word(tx): + def combine_words(input, length): + combined_inputs = [] + if len(splitted_input) > 1: + for i in range(len(input) - 1): + combined_inputs.append(input[i] + " " + last_word_of(splitted_input[i + 1], + length)) # add the last word of the right-neighbour (overlapping) sequence (before it has expanded), which is the next word in the original sentence + return combined_inputs, length + 1 + + def remove_duplicates(input, length): + bool_broke = False #this means we didn't find any duplicates here + for i in range(len(input) - length): + if input[i] == input[i + length]: #found a duplicate piece of sentence! + for j in range(0, length): #remove the overlapping sequences in reverse order + del input[i + length - j] + bool_broke = True + break #break the for loop as the loop length does not matches the length of splitted_input anymore as we removed elements + if bool_broke: + return remove_duplicates(input, + length) #if we found a duplicate, look for another duplicate of the same length + return input + + def last_word_of(input, length): + splitted = input.split(" ") + if len(splitted) == 0: + return input + else: + return splitted[length - 1] + + def split_and_puncsplit(text): + tx = text.split(" ") + txnew = [] + for txi in tx: + txqueue = [] + while True: + if txi[0] in '([{': + txqueue.extend([txi[:1], '']) + txi = txi[1:] + if len(txi) == 0: + break + else: + break + txnew += txqueue + txstack = [] + if len(txi) == 0: + continue + while True: + if txi[-1] in '?!.,:;}])': + txstack = ['', txi[-1:]] + txstack + txi = txi[:-1] + if len(txi) == 0: + break + else: + break + if len(txi) != 0: + txnew += [txi] + txnew += txstack + return txnew + + if tx == '': + return tx + + splitted_input = split_and_puncsplit(tx) + word_length = 1 + intermediate_output = False + while len(splitted_input) > 1: + splitted_input = remove_duplicates(splitted_input, word_length) + if len(splitted_input) > 1: + splitted_input, word_length = combine_words(splitted_input, word_length) + if intermediate_output: + print(splitted_input) + print(word_length) + output = splitted_input[0] + output = output.replace(' ', '') + return output + + +################# +# vision helper # +################# + + +def regularize_image(x, image_size=512): + BICUBIC = T.InterpolationMode.BICUBIC + if isinstance(x, str): + x = Image.open(x) + size = min(x.size) + elif isinstance(x, Image.Image): + x = x.convert('RGB') + size = min(x.size) + elif isinstance(x, np.ndarray): + x = Image.fromarray(x).convert('RGB') + size = min(x.size) + elif isinstance(x, torch.Tensor): + # normalize to [0, 1] + x = x/255.0 + size = min(x.size()[1:]) + else: + assert False, 'Unknown image type' + + """transforms = T.Compose([ + T.RandomCrop(size), + T.Resize( + (image_size, image_size), + interpolation=BICUBIC, + ), + T.RandomHorizontalFlip(), + T.ToTensor(), + ]) + x = transforms(x) + + assert (x.shape[1] == image_size) & (x.shape[2] == image_size), \ + 'Wrong image size' + """ + x = x * 2 - 1 + return x + + +def center_crop(img, new_width=None, new_height=None): + width = img.shape[2] + height = img.shape[1] + + if new_width is None: + new_width = min(width, height) + + if new_height is None: + new_height = min(width, height) + + left = int(np.ceil((width - new_width) / 2)) + right = width - int(np.floor((width - new_width) / 2)) + + top = int(np.ceil((height - new_height) / 2)) + bottom = height - int(np.floor((height - new_height) / 2)) + if len(img.shape) == 3: + center_cropped_img = img[:, top:bottom, left:right] + else: + center_cropped_img = img[:, top:bottom, left:right, ...] + + return center_cropped_img + + +def _transform(n_px): + return Compose([ + Resize([n_px, n_px], interpolation=T.InterpolationMode.BICUBIC), ]) + + +def regularize_video(video, image_size=256): + min_shape = min(video.shape[1:3]) + video = center_crop(video, min_shape, min_shape) + video = torch.from_numpy(video).permute(0, 3, 1, 2) + video = _transform(image_size)(video) + video = video / 255.0 * 2.0 - 1.0 + return video.permute(1, 0, 2, 3) + + +def time_to_indices(video_reader, time): + times = video_reader.get_frame_timestamp(range(len(video_reader))).mean(-1) + indices = np.searchsorted(times, time) + # Use `np.bitwise_or` so it works both with scalars and numpy arrays. + return np.where(np.bitwise_or(indices == 0, times[indices] - time <= time - times[indices - 1]), indices, + indices - 1) + + +def load_video(video_path, sample_duration=8.0, num_frames=8): + sample_duration = 4.0 + num_frames = 4 + + vr = VideoReader(video_path, ctx=cpu(0)) + framerate = vr.get_avg_fps() + video_frame_len = len(vr) + video_len = video_frame_len / framerate + sample_duration = min(sample_duration, video_len) + + if video_len > sample_duration: + s = random.random() * (video_len - sample_duration) + t = s + sample_duration + start, end = time_to_indices(vr, [s, t]) + end = min(video_frame_len - 1, end) + start = min(start, end - 1) + downsamlp_indices = np.linspace(start, end, num_frames, endpoint=True).astype(int).tolist() + else: + downsamlp_indices = np.linspace(0, video_frame_len - 1, num_frames, endpoint=True).astype(int).tolist() + + video = vr.get_batch(downsamlp_indices).asnumpy() + return video + + +############### +# some helper # +############### + +def atomic_save(cfg, net, opt, step, path): + if isinstance(net, (torch.nn.DataParallel, + torch.nn.parallel.DistributedDataParallel)): + netm = net.module + else: + netm = net + sd = netm.state_dict() + slimmed_sd = [(ki, vi) for ki, vi in sd.items() + if ki.find('first_stage_model') != 0 and ki.find('cond_stage_model') != 0] + + checkpoint = { + "config": cfg, + "state_dict": OrderedDict(slimmed_sd), + "step": step} + if opt is not None: + checkpoint['optimizer_states'] = opt.state_dict() + import io + import fsspec + bytesbuffer = io.BytesIO() + torch.save(checkpoint, bytesbuffer) + with fsspec.open(path, "wb") as f: + f.write(bytesbuffer.getvalue()) + + +def load_state_dict(net, cfg): + pretrained_pth_full = cfg.get('pretrained_pth_full', None) + pretrained_ckpt_full = cfg.get('pretrained_ckpt_full', None) + pretrained_pth = cfg.get('pretrained_pth', None) + pretrained_ckpt = cfg.get('pretrained_ckpt', None) + pretrained_pth_dm = cfg.get('pretrained_pth_dm', None) + pretrained_pth_ema = cfg.get('pretrained_pth_ema', None) + strict_sd = cfg.get('strict_sd', False) + errmsg = "Overlapped model state_dict! This is undesired behavior!" + + if pretrained_pth_full is not None or pretrained_ckpt_full is not None: + assert (pretrained_pth is None) and \ + (pretrained_ckpt is None) and \ + (pretrained_pth_dm is None) and \ + (pretrained_pth_ema is None), errmsg + if pretrained_pth_full is not None: + target_file = pretrained_pth_full + sd = torch.load(target_file, map_location='cpu') + assert pretrained_ckpt is None, errmsg + else: + target_file = pretrained_ckpt_full + sd = torch.load(target_file, map_location='cpu')['state_dict'] + print('Load full model from [{}] strict [{}].'.format( + target_file, strict_sd)) + net.load_state_dict(sd, strict=strict_sd) + + if pretrained_pth is not None or pretrained_ckpt is not None: + assert (pretrained_ckpt_full is None) and \ + (pretrained_pth_full is None) and \ + (pretrained_pth_dm is None) and \ + (pretrained_pth_ema is None), errmsg + if pretrained_pth is not None: + target_file = pretrained_pth + sd = torch.load(target_file, map_location='cpu') + assert pretrained_ckpt is None, errmsg + else: + target_file = pretrained_ckpt + sd = torch.load(target_file, map_location='cpu')['state_dict'] + print('Load model from [{}] strict [{}].'.format( + target_file, strict_sd)) + sd_extra = [(ki, vi) for ki, vi in net.state_dict().items() \ + if ki.find('first_stage_model') == 0 or ki.find('cond_stage_model') == 0] + sd.update(OrderedDict(sd_extra)) + net.load_state_dict(sd, strict=strict_sd) + + if pretrained_pth_dm is not None: + assert (pretrained_ckpt_full is None) and \ + (pretrained_pth_full is None) and \ + (pretrained_pth is None) and \ + (pretrained_ckpt is None), errmsg + print('Load diffusion model from [{}] strict [{}].'.format( + pretrained_pth_dm, strict_sd)) + sd = torch.load(pretrained_pth_dm, map_location='cpu') + net.model.diffusion_model.load_state_dict(sd, strict=strict_sd) + + if pretrained_pth_ema is not None: + assert (pretrained_ckpt_full is None) and \ + (pretrained_pth_full is None) and \ + (pretrained_pth is None) and \ + (pretrained_ckpt is None), errmsg + print('Load unet ema model from [{}] strict [{}].'.format( + pretrained_pth_ema, strict_sd)) + sd = torch.load(pretrained_pth_ema, map_location='cpu') + net.model_ema.load_state_dict(sd, strict=strict_sd) + + +def auto_merge_imlist(imlist, max=64): + imlist = imlist[0:max] + h, w = imlist[0].shape[0:2] + num_images = len(imlist) + num_row = int(np.sqrt(num_images)) + num_col = num_images // num_row + 1 if num_images % num_row != 0 else num_images // num_row + canvas = np.zeros([num_row * h, num_col * w, 3], dtype=np.uint8) + for idx, im in enumerate(imlist): + hi = (idx // num_col) * h + wi = (idx % num_col) * w + canvas[hi:hi + h, wi:wi + w, :] = im + return canvas + + +def latent2im(net, latent): + single_input = len(latent.shape) == 3 + if single_input: + latent = latent[None] + im = net.decode_image(latent.to(net.device)) + im = torch.clamp((im + 1.0) / 2.0, min=0.0, max=1.0) + im = [tvtrans.ToPILImage()(i) for i in im] + if single_input: + im = im[0] + return im + + +def im2latent(net, im): + single_input = not isinstance(im, list) + if single_input: + im = [im] + im = torch.stack([tvtrans.ToTensor()(i) for i in im], dim=0) + im = (im * 2 - 1).to(net.device) + z = net.encode_image(im) + if single_input: + z = z[0] + return z + + +class color_adjust(object): + def __init__(self, ref_from, ref_to): + x0, m0, std0 = self.get_data_and_stat(ref_from) + x1, m1, std1 = self.get_data_and_stat(ref_to) + self.ref_from_stat = (m0, std0) + self.ref_to_stat = (m1, std1) + self.ref_from = self.preprocess(x0).reshape(-1, 3) + self.ref_to = x1.reshape(-1, 3) + + def get_data_and_stat(self, x): + if isinstance(x, str): + x = np.array(PIL.Image.open(x)) + elif isinstance(x, PIL.Image.Image): + x = np.array(x) + elif isinstance(x, torch.Tensor): + x = torch.clamp(x, min=0.0, max=1.0) + x = np.array(tvtrans.ToPILImage()(x)) + elif isinstance(x, np.ndarray): + pass + else: + raise ValueError + x = x.astype(float) + m = np.reshape(x, (-1, 3)).mean(0) + s = np.reshape(x, (-1, 3)).std(0) + return x, m, s + + def preprocess(self, x): + m0, s0 = self.ref_from_stat + m1, s1 = self.ref_to_stat + y = ((x - m0) / s0) * s1 + m1 + return y + + def __call__(self, xin, keep=0, simple=False): + xin, _, _ = self.get_data_and_stat(xin) + x = self.preprocess(xin) + if simple: + y = (x * (1 - keep) + xin * keep) + y = np.clip(y, 0, 255).astype(np.uint8) + return y + + h, w = x.shape[:2] + x = x.reshape(-1, 3) + y = [] + for chi in range(3): + yi = self.pdf_transfer_1d(self.ref_from[:, chi], self.ref_to[:, chi], x[:, chi]) + y.append(yi) + + y = np.stack(y, axis=1) + y = y.reshape(h, w, 3) + y = (y.astype(float) * (1 - keep) + xin.astype(float) * keep) + y = np.clip(y, 0, 255).astype(np.uint8) + return y + + def pdf_transfer_1d(self, arr_fo, arr_to, arr_in, n=600): + arr = np.concatenate((arr_fo, arr_to)) + min_v = arr.min() - 1e-6 + max_v = arr.max() + 1e-6 + min_vto = arr_to.min() - 1e-6 + max_vto = arr_to.max() + 1e-6 + xs = np.array( + [min_v + (max_v - min_v) * i / n for i in range(n + 1)]) + hist_fo, _ = np.histogram(arr_fo, xs) + hist_to, _ = np.histogram(arr_to, xs) + xs = xs[:-1] + # compute probability distribution + cum_fo = np.cumsum(hist_fo) + cum_to = np.cumsum(hist_to) + d_fo = cum_fo / cum_fo[-1] + d_to = cum_to / cum_to[-1] + # transfer + t_d = np.interp(d_fo, d_to, xs) + t_d[d_fo <= d_to[0]] = min_vto + t_d[d_fo >= d_to[-1]] = max_vto + arr_out = np.interp(arr_in, xs, t_d) + return arr_out diff --git a/core/models/__init__.py b/core/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1c0a57a9e5d66ee79319d7390dedf650ffb05caf --- /dev/null +++ b/core/models/__init__.py @@ -0,0 +1,4 @@ +from .common.get_model import get_model +from .common.get_optimizer import get_optimizer +from .common.get_scheduler import get_scheduler +from .common.utils import get_unit diff --git a/core/models/__pycache__/__init__.cpython-38.pyc b/core/models/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a573f9bfa4224ca8c1096e270171b3dfb18adf1a Binary files /dev/null and b/core/models/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/__pycache__/codi.cpython-38.pyc b/core/models/__pycache__/codi.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a20c4504ae183edaf77644fc93801f1180fd3d73 Binary files /dev/null and b/core/models/__pycache__/codi.cpython-38.pyc differ diff --git a/core/models/__pycache__/codi_2.cpython-38.pyc b/core/models/__pycache__/codi_2.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20e9fb262e3f96ad1d6d87c846fb4483c6bc6032 Binary files /dev/null and b/core/models/__pycache__/codi_2.cpython-38.pyc differ diff --git a/core/models/__pycache__/dani_model.cpython-38.pyc b/core/models/__pycache__/dani_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1fb71654dca8747edee16fea4e5c11b939aef340 Binary files /dev/null and b/core/models/__pycache__/dani_model.cpython-38.pyc differ diff --git a/core/models/__pycache__/ema.cpython-38.pyc b/core/models/__pycache__/ema.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84880cceb2846e714dd480a8c13a44d7e3dc50bc Binary files /dev/null and b/core/models/__pycache__/ema.cpython-38.pyc differ diff --git a/core/models/__pycache__/model_module_infer.cpython-38.pyc b/core/models/__pycache__/model_module_infer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f99560bfe2e651637e4932c2314d33e8b0fc9797 Binary files /dev/null and b/core/models/__pycache__/model_module_infer.cpython-38.pyc differ diff --git a/core/models/__pycache__/sd.cpython-38.pyc b/core/models/__pycache__/sd.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aed18f436ec8af530fdd194245ec4acb80baf364 Binary files /dev/null and b/core/models/__pycache__/sd.cpython-38.pyc differ diff --git a/core/models/codi.py b/core/models/codi.py new file mode 100644 index 0000000000000000000000000000000000000000..9a91b4de008271e11884b7df906e95f24bd0f6e8 --- /dev/null +++ b/core/models/codi.py @@ -0,0 +1,227 @@ +from typing import Dict, List +import os + +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +import numpy.random as npr +import copy +from functools import partial +from contextlib import contextmanager + +from .common.get_model import get_model, register +from .sd import DDPM + +version = '0' +symbol = 'codi' + + +@register('codi', version) +class CoDi(DDPM): + def __init__(self, + audioldm_cfg=None, + autokl_cfg=None, + optimus_cfg=None, + clip_cfg=None, + clap_cfg=None, + vision_scale_factor=0.1812, + text_scale_factor=4.3108, + audio_scale_factor=0.9228, + scale_by_std=False, + *args, + **kwargs): + super().__init__(*args, **kwargs) + + if audioldm_cfg is not None: + self.audioldm = get_model()(audioldm_cfg) + + if autokl_cfg is not None: + self.autokl = get_model()(autokl_cfg) + + if optimus_cfg is not None: + self.optimus = get_model()(optimus_cfg) + + if clip_cfg is not None: + self.clip = get_model()(clip_cfg) + + if clap_cfg is not None: + self.clap = get_model()(clap_cfg) + + if not scale_by_std: + self.vision_scale_factor = vision_scale_factor + self.text_scale_factor = text_scale_factor + self.audio_scale_factor = audio_scale_factor + else: + self.register_buffer("text_scale_factor", torch.tensor(text_scale_factor)) + self.register_buffer("audio_scale_factor", torch.tensor(audio_scale_factor)) + self.register_buffer('vision_scale_factor', torch.tensor(vision_scale_factor)) + + @property + def device(self): + return next(self.parameters()).device + + @torch.no_grad() + def autokl_encode(self, image): + encoder_posterior = self.autokl.encode(image) + z = encoder_posterior.sample().to(image.dtype) + return self.vision_scale_factor * z + + @torch.no_grad() + def autokl_decode(self, z): + z = 1. / self.vision_scale_factor * z + return self.autokl.decode(z) + + @torch.no_grad() + def optimus_encode(self, text): + if isinstance(text, List): + tokenizer = self.optimus.tokenizer_encoder + token = [tokenizer.tokenize(sentence.lower()) for sentence in text] + token_id = [] + for tokeni in token: + token_sentence = [tokenizer._convert_token_to_id(i) for i in tokeni] + token_sentence = tokenizer.add_special_tokens_single_sentence(token_sentence) + token_id.append(torch.LongTensor(token_sentence)) + token_id = torch._C._nn.pad_sequence(token_id, batch_first=True, padding_value=0.0)[:, :512] + else: + token_id = text + z = self.optimus.encoder(token_id, attention_mask=(token_id > 0))[1] + z_mu, z_logvar = self.optimus.encoder.linear(z).chunk(2, -1) + return z_mu.squeeze(1) * self.text_scale_factor + + @torch.no_grad() + def optimus_decode(self, z, temperature=1.0, max_length=30): + z = 1.0 / self.text_scale_factor * z + return self.optimus.decode(z, temperature, max_length=max_length) + + @torch.no_grad() + def audioldm_encode(self, audio, time=2.0): + encoder_posterior = self.audioldm.encode(audio, time=time) + z = encoder_posterior.sample().to(audio.dtype) + return z * self.audio_scale_factor + + @torch.no_grad() + def audioldm_decode(self, z): + if torch.max(torch.abs(z)) > 1e2: + z = torch.clip(z, min=-10, max=10) + z = 1.0 / self.audio_scale_factor * z + return self.audioldm.decode(z) + + @torch.no_grad() + def mel_spectrogram_to_waveform(self, mel): + # Mel: [bs, 1, t-steps, fbins] + if len(mel.size()) == 4: + mel = mel.squeeze(1) + mel = mel.permute(0, 2, 1) + waveform = self.audioldm.vocoder(mel) + waveform = waveform.cpu().detach().numpy() + return waveform + + @torch.no_grad() + def clip_encode_text(self, text, encode_type='encode_text'): + swap_type = self.clip.encode_type + self.clip.encode_type = encode_type + embedding = self.clip(text, encode_type) + self.clip.encode_type = swap_type + return embedding + + @torch.no_grad() + def clip_encode_vision(self, vision, encode_type='encode_vision'): + swap_type = self.clip.encode_type + self.clip.encode_type = encode_type + embedding = self.clip(vision, encode_type) + self.clip.encode_type = swap_type + return embedding + + @torch.no_grad() + def clap_encode_audio(self, audio): + embedding = self.clap(audio) + return embedding + + def forward(self, x=None, c=None, noise=None, xtype='image', ctype='prompt', u=None, return_algined_latents=False): + if isinstance(x, list): + t = torch.randint(0, self.num_timesteps, (x[0].shape[0],), device=x[0].device).long() + else: + t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=x.device).long() + return self.p_losses(x, c, t, noise, xtype, ctype, u, return_algined_latents) + + def apply_model(self, x_noisy, t, cond, xtype='image', ctype='text', u=None, return_algined_latents=False): + return self.model.diffusion_model(x_noisy, t, cond, xtype, ctype, u, return_algined_latents) + + def get_pixel_loss(self, pred, target, mean=True): + if self.loss_type == 'l1': + loss = (target - pred).abs() + if mean: + loss = loss.mean() + elif self.loss_type == 'l2': + if mean: + loss = torch.nn.functional.mse_loss(target, pred) + else: + loss = torch.nn.functional.mse_loss(target, pred, reduction='none') + else: + raise NotImplementedError("unknown loss type '{loss_type}'") + loss = torch.nan_to_num(loss, nan=0.0, posinf=0.0, neginf=-0.0) + return loss + + def get_text_loss(self, pred, target): + if self.loss_type == 'l1': + loss = (target - pred).abs() + elif self.loss_type == 'l2': + loss = torch.nn.functional.mse_loss(target, pred, reduction='none') + loss = torch.nan_to_num(loss, nan=0.0, posinf=0.0, neginf=0.0) + return loss + + def p_losses(self, x_start, cond, t, noise=None, xtype='image', ctype='prompt', u=None, return_algined_latents=False): + if isinstance(x_start, list): + noise = [torch.randn_like(x_start_i) for x_start_i in x_start] if noise is None else noise + x_noisy = [self.q_sample(x_start=x_start_i, t=t, noise=noise_i) for x_start_i, noise_i in zip(x_start, noise)] + model_output = self.apply_model(x_noisy, t, cond, xtype, ctype, u, return_algined_latents) + if return_algined_latents: + return model_output + + loss_dict = {} + + if self.parameterization == "x0": + target = x_start + elif self.parameterization == "eps": + target = noise + else: + raise NotImplementedError() + + loss = 0.0 + for model_output_i, target_i, xtype_i in zip(model_output, target, xtype): + if xtype_i == 'image': + loss_simple = self.get_pixel_loss(model_output_i, target_i, mean=False).mean([1, 2, 3]) + elif xtype_i == 'video': + loss_simple = self.get_pixel_loss(model_output_i, target_i, mean=False).mean([1, 2, 3, 4]) + elif xtype_i == 'text': + loss_simple = self.get_text_loss(model_output_i, target_i).mean([1]) + elif xtype_i == 'audio': + loss_simple = self.get_pixel_loss(model_output_i, target_i, mean=False).mean([1, 2, 3]) + loss += loss_simple.mean() + return loss / len(xtype) + + else: + noise = torch.randn_like(x_start) if noise is None else noise + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_output = self.apply_model(x_noisy, t, cond, xtype, ctype) + + loss_dict = {} + + if self.parameterization == "x0": + target = x_start + elif self.parameterization == "eps": + target = noise + else: + raise NotImplementedError() + + if xtype == 'image': + loss_simple = self.get_pixel_loss(model_output, target, mean=False).mean([1, 2, 3]) + elif xtype == 'video': + loss_simple = self.get_pixel_loss(model_output, target, mean=False).mean([1, 2, 3, 4]) + elif xtype == 'text': + loss_simple = self.get_text_loss(model_output, target).mean([1]) + elif xtype == 'audio': + loss_simple = self.get_pixel_loss(model_output, target, mean=False).mean([1, 2, 3]) + loss = loss_simple.mean() + return loss diff --git a/core/models/codi_2.py b/core/models/codi_2.py new file mode 100644 index 0000000000000000000000000000000000000000..f81ef6df70a7677899b1ab982e94133f70c051aa --- /dev/null +++ b/core/models/codi_2.py @@ -0,0 +1,221 @@ +from typing import Dict, List +import os + +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +import numpy.random as npr +import copy +from functools import partial +from contextlib import contextmanager + +from .common.get_model import get_model, register +from .sd import DDPM + +version = '0' +symbol = 'thesis_model' + + +@register('thesis_model', version) +class CoDi(DDPM): + def __init__(self, + autokl_cfg=None, + optimus_cfg=None, + clip_cfg=None, + vision_scale_factor=0.1812, + text_scale_factor=4.3108, + audio_scale_factor=0.9228, + scale_by_std=False, + *args, + **kwargs): + super().__init__(*args, **kwargs) + + if autokl_cfg is not None: + self.autokl = get_model()(autokl_cfg) + + if optimus_cfg is not None: + self.optimus = get_model()(optimus_cfg) + + if clip_cfg is not None: + self.clip = get_model()(clip_cfg) + + if not scale_by_std: + self.vision_scale_factor = vision_scale_factor + self.text_scale_factor = text_scale_factor + self.audio_scale_factor = audio_scale_factor + else: + self.register_buffer("text_scale_factor", torch.tensor(text_scale_factor)) + self.register_buffer("audio_scale_factor", torch.tensor(audio_scale_factor)) + self.register_buffer('vision_scale_factor', torch.tensor(vision_scale_factor)) + + @property + def device(self): + return next(self.parameters()).device + + @torch.no_grad() + def autokl_encode(self, image): + encoder_posterior = self.autokl.encode(image) + z = encoder_posterior.sample().to(image.dtype) + return self.vision_scale_factor * z + + @torch.no_grad() + def autokl_decode(self, z): + z = 1. / self.vision_scale_factor * z + return self.autokl.decode(z) + + @torch.no_grad() + def optimus_encode(self, text): + if isinstance(text, List): + tokenizer = self.optimus.tokenizer_encoder + token = [tokenizer.tokenize(sentence.lower()) for sentence in text] + token_id = [] + for tokeni in token: + token_sentence = [tokenizer._convert_token_to_id(i) for i in tokeni] + token_sentence = tokenizer.add_special_tokens_single_sentence(token_sentence) + token_id.append(torch.LongTensor(token_sentence)) + token_id = torch._C._nn.pad_sequence(token_id, batch_first=True, padding_value=0.0)[:, :512] + else: + token_id = text + z = self.optimus.encoder(token_id, attention_mask=(token_id > 0))[1] + z_mu, z_logvar = self.optimus.encoder.linear(z).chunk(2, -1) + return z_mu.squeeze(1) * self.text_scale_factor + + @torch.no_grad() + def optimus_decode(self, z, temperature=1.0): + z = 1.0 / self.text_scale_factor * z + return self.optimus.decode(z, temperature) + + @torch.no_grad() + def clip_encode_text(self, text, encode_type='encode_text'): + swap_type = self.clip.encode_type + self.clip.encode_type = encode_type + embedding = self.clip(text, encode_type) + self.clip.encode_type = swap_type + return embedding + + @torch.no_grad() + def clip_encode_vision(self, vision, encode_type='encode_vision'): + swap_type = self.clip.encode_type + self.clip.encode_type = encode_type + embedding = self.clip(vision, encode_type) + self.clip.encode_type = swap_type + return embedding + + @torch.no_grad() + def clap_encode_audio(self, audio): + embedding = self.clap(audio) + return embedding + + def forward(self, x=None, c=None, noise=None, xtype='frontal', ctype='text', u=None, return_algined_latents=False, env_enc=False): + if isinstance(x, list): + t = torch.randint(0, self.num_timesteps, (x[0].shape[0],), device=x[0].device).long() + else: + t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=x.device).long() + return self.p_losses(x, c, t, noise, xtype, ctype, u, return_algined_latents, env_enc) + + def apply_model(self, x_noisy, t, cond, xtype='frontal', ctype='text', u=None, return_algined_latents=False, env_enc=False): + return self.model.diffusion_model(x_noisy, t, cond, xtype, ctype, u, return_algined_latents, env_enc=env_enc) + + def get_pixel_loss(self, pred, target, mean=True): + if self.loss_type == 'l1': + loss = (target - pred).abs() + if mean: + loss = loss.mean() + elif self.loss_type == 'l2': + if mean: + loss = torch.nn.functional.mse_loss(target, pred) + else: + loss = torch.nn.functional.mse_loss(target, pred, reduction='none') + else: + raise NotImplementedError("unknown loss type '{loss_type}'") + loss = torch.nan_to_num(loss, nan=0.0, posinf=0.0, neginf=-0.0) + return loss + + def get_text_loss(self, pred, target): + if self.loss_type == 'l1': + loss = (target - pred).abs() + elif self.loss_type == 'l2': + loss = torch.nn.functional.mse_loss(target, pred, reduction='none') + loss = torch.nan_to_num(loss, nan=0.0, posinf=0.0, neginf=0.0) + return loss + + def p_losses(self, x_start, cond, t, noise=None, xtype='frontal', ctype='text', u=None, + return_algined_latents=False, env_enc=False): + if isinstance(x_start, list): + noise = [torch.randn_like(x_start_i) for x_start_i in x_start] if noise is None else noise + x_noisy = [self.q_sample(x_start=x_start_i, t=t, noise=noise_i) for x_start_i, noise_i in + zip(x_start, noise)] + if not env_enc: + model_output = self.apply_model(x_noisy, t, cond, xtype, ctype, u, return_algined_latents, env_enc) + else: + model_output, h_con = self.apply_model(x_noisy, t, cond, xtype, ctype, u, return_algined_latents, env_enc) + if return_algined_latents: + return model_output + + loss_dict = {} + + if self.parameterization == "x0": + target = x_start + elif self.parameterization == "eps": + target = noise + else: + raise NotImplementedError() + + loss = 0.0 + for model_output_i, target_i, xtype_i in zip(model_output, target, xtype): + if xtype_i == 'frontal': + loss_simple = self.get_pixel_loss(model_output_i, target_i, mean=False).mean([1, 2, 3]) + elif xtype_i == 'text': + loss_simple = self.get_text_loss(model_output_i, target_i).mean([1]) + elif xtype_i == 'lateral': + loss_simple = self.get_pixel_loss(model_output_i, target_i, mean=False).mean([1, 2, 3]) + loss += loss_simple.mean() + + # Controlliamo se il modello ha restituito anche h_con + # In tal caso, abbiamo le rappresentazioni latenti delle due modalità + # estratte dagli environmental encoder, essendo due tensori di dimensione batch_sizex1x1280 + # possiamo utilizzarli per calcolare anche un termine di contrastive loss (crossentropy come in CLIP) + if h_con is not None: + def similarity(z_a, z_b): + return F.cosine_similarity(z_a, z_b) + + z_a, z_b = h_con + + z_a = z_a / z_a.norm(dim=-1, keepdim=True) + z_b = z_b / z_b.norm(dim=-1, keepdim=True) + + logits_a = z_a.squeeze() @ z_b.squeeze().t() + logits_b = z_a.squeeze() @ z_b.squeeze().t() + + labels = torch.arange(len(z_a)).to(z_a.device) + + loss_a = F.cross_entropy(logits_a, labels) + loss_b = F.cross_entropy(logits_b, labels) + + loss_con = (loss_a + loss_b) / 2 + loss += loss_con + return loss / len(xtype) + + else: + noise = torch.randn_like(x_start) if noise is None else noise + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_output = self.apply_model(x_noisy, t, cond, xtype, ctype) + + loss_dict = {} + + if self.parameterization == "x0": + target = x_start + elif self.parameterization == "eps": + target = noise + else: + raise NotImplementedError() + + if xtype == 'frontal': + loss_simple = self.get_pixel_loss(model_output, target, mean=False).mean([1, 2, 3]) + elif xtype == 'text': + loss_simple = self.get_text_loss(model_output, target).mean([1]) + elif xtype == 'lateral': + loss_simple = self.get_pixel_loss(model_output, target, mean=False).mean([1, 2, 3]) + loss = loss_simple.mean() + return loss diff --git a/core/models/common/__pycache__/get_model.cpython-38.pyc b/core/models/common/__pycache__/get_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95fa24e299c9960ef068c818731514bd00a3a157 Binary files /dev/null and b/core/models/common/__pycache__/get_model.cpython-38.pyc differ diff --git a/core/models/common/__pycache__/get_optimizer.cpython-38.pyc b/core/models/common/__pycache__/get_optimizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..245b6154f44997d185704c608406ed0a1ecd36d5 Binary files /dev/null and b/core/models/common/__pycache__/get_optimizer.cpython-38.pyc differ diff --git a/core/models/common/__pycache__/get_scheduler.cpython-38.pyc b/core/models/common/__pycache__/get_scheduler.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0628287f2db98d72942a52770267740d025b977 Binary files /dev/null and b/core/models/common/__pycache__/get_scheduler.cpython-38.pyc differ diff --git a/core/models/common/__pycache__/utils.cpython-38.pyc b/core/models/common/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1bbd3d038bd45650ab7778ff83a27e211d8f7564 Binary files /dev/null and b/core/models/common/__pycache__/utils.cpython-38.pyc differ diff --git a/core/models/common/get_model.py b/core/models/common/get_model.py new file mode 100644 index 0000000000000000000000000000000000000000..481cee6ea7a14fa8f00f09437fcd19d6337aa4e6 --- /dev/null +++ b/core/models/common/get_model.py @@ -0,0 +1,88 @@ +from email.policy import strict +import torch +import torchvision.models +import os.path as osp +import copy +from .utils import \ + get_total_param, get_total_param_sum, \ + get_unit + + +def singleton(class_): + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + return getinstance + + +def preprocess_model_args(args): + # If args has layer_units, get the corresponding + # units. + # If args get backbone, get the backbone model. + args = copy.deepcopy(args) + if 'layer_units' in args: + layer_units = [ + get_unit()(i) for i in args.layer_units + ] + args.layer_units = layer_units + if 'backbone' in args: + args.backbone = get_model()(args.backbone) + return args + +@singleton +class get_model(object): + def __init__(self): + self.model = {} + self.version = {} + + def register(self, model, name, version='x'): + self.model[name] = model + self.version[name] = version + + def __call__(self, cfg, verbose=True): + """ + Construct model based on the config. + """ + t = cfg.type + + # the register is in each file + if t.find('audioldm')==0: + from ..latent_diffusion.vae import audioldm + elif t.find('autoencoderkl')==0: + from ..latent_diffusion.vae import autokl + elif t.find('optimus')==0: + from ..latent_diffusion.vae import optimus + + elif t.find('clip')==0: + from ..encoders import clip + elif t.find('clap')==0: + from ..encoders import clap + + elif t.find('sd')==0: + from .. import sd + elif t.find('codi')==0: + from .. import codi + elif t.find('thesis_model')==0: + from .. import codi_2 + elif t.find('openai_unet')==0: + from ..latent_diffusion import diffusion_unet + elif t.find('prova')==0: + from ..latent_diffusion import diffusion_unet + + args = preprocess_model_args(cfg.args) + net = self.model[t](**args) + + return net + + def get_version(self, name): + return self.version[name] + + +def register(name, version='x'): + def wrapper(class_): + get_model().register(class_, name, version) + return class_ + return wrapper diff --git a/core/models/common/get_optimizer.py b/core/models/common/get_optimizer.py new file mode 100644 index 0000000000000000000000000000000000000000..10ac7f779a65fe12ac878dd660a151e9e2e3d468 --- /dev/null +++ b/core/models/common/get_optimizer.py @@ -0,0 +1,50 @@ +import torch +import torch.optim as optim +import numpy as np +import itertools + + +def singleton(class_): + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + return getinstance + + +class get_optimizer(object): + def __init__(self): + self.optimizer = {} + self.register(optim.SGD, 'sgd') + self.register(optim.Adam, 'adam') + self.register(optim.AdamW, 'adamw') + + def register(self, optim, name): + self.optimizer[name] = optim + + def __call__(self, net, cfg): + if cfg is None: + return None + t = cfg.type + if isinstance(net, (torch.nn.DataParallel, + torch.nn.parallel.DistributedDataParallel)): + netm = net.module + else: + netm = net + pg = getattr(netm, 'parameter_group', None) + + if pg is not None: + params = [] + for group_name, module_or_para in pg.items(): + if not isinstance(module_or_para, list): + module_or_para = [module_or_para] + + grouped_params = [mi.parameters() if isinstance(mi, torch.nn.Module) else [mi] for mi in module_or_para] + grouped_params = itertools.chain(*grouped_params) + pg_dict = {'params': grouped_params, 'name': group_name} + params.append(pg_dict) + else: + params = net.parameters() + return self.optimizer[t](params, lr=0, **cfg.args) diff --git a/core/models/common/get_scheduler.py b/core/models/common/get_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..67c0a5c0a0d3107a61b5c87eb0f67a2136c12bb0 --- /dev/null +++ b/core/models/common/get_scheduler.py @@ -0,0 +1,273 @@ +import torch +import torch.optim as optim +import numpy as np +import copy +from ... import sync +from ...cfg_holder import cfg_unique_holder as cfguh + + +def singleton(class_): + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + return getinstance + + +@singleton +class get_scheduler(object): + def __init__(self): + self.lr_scheduler = {} + + def register(self, lrsf, name): + self.lr_scheduler[name] = lrsf + + def __call__(self, cfg): + if cfg is None: + return None + if isinstance(cfg, list): + schedulers = [] + for ci in cfg: + t = ci.type + schedulers.append( + self.lr_scheduler[t](**ci.args)) + if len(schedulers) == 0: + raise ValueError + else: + return compose_scheduler(schedulers) + t = cfg.type + return self.lr_scheduler[t](**cfg.args) + + +def register(name): + def wrapper(class_): + get_scheduler().register(class_, name) + return class_ + return wrapper + + +class template_scheduler(object): + def __init__(self, step): + self.step = step + + def __getitem__(self, idx): + raise ValueError + + def set_lr(self, optim, new_lr, pg_lrscale=None): + """ + Set Each parameter_groups in optim with new_lr + New_lr can be find according to the idx. + pg_lrscale tells how to scale each pg. + """ + # new_lr = self.__getitem__(idx) + pg_lrscale = copy.deepcopy(pg_lrscale) + for pg in optim.param_groups: + if pg_lrscale is None: + pg['lr'] = new_lr + else: + pg['lr'] = new_lr * pg_lrscale.pop(pg['name']) + assert (pg_lrscale is None) or (len(pg_lrscale)==0), \ + "pg_lrscale doesn't match pg" + +@register('constant') +class constant_scheduler(template_scheduler): + def __init__(self, lr, step): + super().__init__(step) + self.lr = lr + + def __getitem__(self, idx): + if idx >= self.step: + raise ValueError + return self.lr + + +@register('poly') +class poly_scheduler(template_scheduler): + def __init__(self, start_lr, end_lr, power, step): + super().__init__(step) + self.start_lr = start_lr + self.end_lr = end_lr + self.power = power + + def __getitem__(self, idx): + if idx >= self.step: + raise ValueError + a, b = self.start_lr, self.end_lr + p, n = self.power, self.step + return b + (a-b)*((1-idx/n)**p) + + +@register('linear') +class linear_scheduler(template_scheduler): + def __init__(self, start_lr, end_lr, step): + super().__init__(step) + self.start_lr = start_lr + self.end_lr = end_lr + + def __getitem__(self, idx): + if idx >= self.step: + raise ValueError + a, b, n = self.start_lr, self.end_lr, self.step + return b + (a-b)*(1-idx/n) + + +@register('multistage') +class constant_scheduler(template_scheduler): + def __init__(self, start_lr, milestones, gamma, step): + super().__init__(step) + self.start_lr = start_lr + m = [0] + milestones + [step] + lr_iter = start_lr + self.lr = [] + for ms, me in zip(m[0:-1], m[1:]): + for _ in range(ms, me): + self.lr.append(lr_iter) + lr_iter *= gamma + + def __getitem__(self, idx): + if idx >= self.step: + raise ValueError + return self.lr[idx] + + +class compose_scheduler(template_scheduler): + def __init__(self, schedulers): + self.schedulers = schedulers + self.step = [si.step for si in schedulers] + self.step_milestone = [] + acc = 0 + for i in self.step: + acc += i + self.step_milestone.append(acc) + self.step = sum(self.step) + + def __getitem__(self, idx): + if idx >= self.step: + raise ValueError + ms = self.step_milestone + for idx, (mi, mj) in enumerate(zip(ms[:-1], ms[1:])): + if mi <= idx < mj: + return self.schedulers[idx-mi] + raise ValueError + +#################### +# lambda schedular # +#################### + + +class LambdaWarmUpCosineScheduler(template_scheduler): + """ + note: use with a base_lr of 1.0 + """ + def __init__(self, + base_lr, + warm_up_steps, + lr_min, lr_max, lr_start, max_decay_steps, verbosity_interval=0): + cfgt = cfguh().cfg.train + bs = cfgt.batch_size + if 'gradacc_every' not in cfgt: + print('Warning, gradacc_every is not found in xml, use 1 as default.') + acc = cfgt.get('gradacc_every', 1) + self.lr_multi = base_lr * bs * acc + self.lr_warm_up_steps = warm_up_steps + self.lr_start = lr_start + self.lr_min = lr_min + self.lr_max = lr_max + self.lr_max_decay_steps = max_decay_steps + self.last_lr = 0. + self.verbosity_interval = verbosity_interval + + def schedule(self, n): + if self.verbosity_interval > 0: + if n % self.verbosity_interval == 0: + print(f"current step: {n}, recent lr-multiplier: {self.last_lr}") + if n < self.lr_warm_up_steps: + lr = (self.lr_max - self.lr_start) / self.lr_warm_up_steps * n + self.lr_start + self.last_lr = lr + return lr + else: + t = (n - self.lr_warm_up_steps) / (self.lr_max_decay_steps - self.lr_warm_up_steps) + t = min(t, 1.0) + lr = self.lr_min + 0.5 * (self.lr_max - self.lr_min) * ( + 1 + np.cos(t * np.pi)) + self.last_lr = lr + return lr + + def __getitem__(self, idx): + return self.schedule(idx) * self.lr_multi + + +class LambdaWarmUpCosineScheduler2(template_scheduler): + """ + supports repeated iterations, configurable via lists + note: use with a base_lr of 1.0. + """ + def __init__(self, + base_lr, + warm_up_steps, + f_min, f_max, f_start, cycle_lengths, verbosity_interval=0): + cfgt = cfguh().cfg.train + # bs = cfgt.batch_size + # if 'gradacc_every' not in cfgt: + # print('Warning, gradacc_every is not found in xml, use 1 as default.') + # acc = cfgt.get('gradacc_every', 1) + # self.lr_multi = base_lr * bs * acc + self.lr_multi = base_lr + assert len(warm_up_steps) == len(f_min) == len(f_max) == len(f_start) == len(cycle_lengths) + self.lr_warm_up_steps = warm_up_steps + self.f_start = f_start + self.f_min = f_min + self.f_max = f_max + self.cycle_lengths = cycle_lengths + self.cum_cycles = np.cumsum([0] + list(self.cycle_lengths)) + self.last_f = 0. + self.verbosity_interval = verbosity_interval + + def find_in_interval(self, n): + interval = 0 + for cl in self.cum_cycles[1:]: + if n <= cl: + return interval + interval += 1 + + def schedule(self, n): + cycle = self.find_in_interval(n) + n = n - self.cum_cycles[cycle] + if self.verbosity_interval > 0: + if n % self.verbosity_interval == 0: print(f"current step: {n}, recent lr-multiplier: {self.last_f}, " + f"current cycle {cycle}") + if n < self.lr_warm_up_steps[cycle]: + f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[cycle] * n + self.f_start[cycle] + self.last_f = f + return f + else: + t = (n - self.lr_warm_up_steps[cycle]) / (self.cycle_lengths[cycle] - self.lr_warm_up_steps[cycle]) + t = min(t, 1.0) + f = self.f_min[cycle] + 0.5 * (self.f_max[cycle] - self.f_min[cycle]) * ( + 1 + np.cos(t * np.pi)) + self.last_f = f + return f + + def __getitem__(self, idx): + return self.schedule(idx) * self.lr_multi + + +@register('stable_diffusion_linear') +class LambdaLinearScheduler(LambdaWarmUpCosineScheduler2): + def schedule(self, n): + cycle = self.find_in_interval(n) + n = n - self.cum_cycles[cycle] + if self.verbosity_interval > 0: + if n % self.verbosity_interval == 0: + print(f"current step: {n}, recent lr-multiplier: {self.last_f}, " + f"current cycle {cycle}") + if n < self.lr_warm_up_steps[cycle]: + f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[cycle] * n + self.f_start[cycle] + self.last_f = f + return f + else: + f = self.f_min[cycle] + (self.f_max[cycle] - self.f_min[cycle]) * (self.cycle_lengths[cycle] - n) / (self.cycle_lengths[cycle]) + self.last_f = f + return f \ No newline at end of file diff --git a/core/models/common/utils.py b/core/models/common/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b84e8b6ae51f7be515c0bee98ce469dade1cdb1b --- /dev/null +++ b/core/models/common/utils.py @@ -0,0 +1,310 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +import functools +import itertools + + +######## +# unit # +######## + + +def singleton(class_): + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + + return getinstance + + +def str2value(v): + v = v.strip() + try: + return int(v) + except: + pass + try: + return float(v) + except: + pass + if v in ('True', 'true'): + return True + elif v in ('False', 'false'): + return False + else: + return v + + +@singleton +class get_unit(object): + def __init__(self): + self.unit = {} + self.register('none', None) + + # general convolution + self.register('conv', nn.Conv2d) + self.register('bn', nn.BatchNorm2d) + self.register('relu', nn.ReLU) + self.register('relu6', nn.ReLU6) + self.register('lrelu', nn.LeakyReLU) + self.register('dropout', nn.Dropout) + self.register('dropout2d', nn.Dropout2d) + self.register('sine', Sine) + self.register('relusine', ReLUSine) + + def register(self, + name, + unitf, ): + + self.unit[name] = unitf + + def __call__(self, name): + if name is None: + return None + i = name.find('(') + i = len(name) if i == -1 else i + t = name[:i] + f = self.unit[t] + args = name[i:].strip('()') + if len(args) == 0: + args = {} + return f + else: + args = args.split('=') + args = [[','.join(i.split(',')[:-1]), i.split(',')[-1]] for i in args] + args = list(itertools.chain.from_iterable(args)) + args = [i.strip() for i in args if len(i) > 0] + kwargs = {} + for k, v in zip(args[::2], args[1::2]): + if v[0] == '(' and v[-1] == ')': + kwargs[k] = tuple([str2value(i) for i in v.strip('()').split(',')]) + elif v[0] == '[' and v[-1] == ']': + kwargs[k] = [str2value(i) for i in v.strip('[]').split(',')] + else: + kwargs[k] = str2value(v) + return functools.partial(f, **kwargs) + + +def register(name): + def wrapper(class_): + get_unit().register(name, class_) + return class_ + + return wrapper + + +class Sine(object): + def __init__(self, freq, gain=1): + self.freq = freq + self.gain = gain + self.repr = 'sine(freq={}, gain={})'.format(freq, gain) + + def __call__(self, x, gain=1): + act_gain = self.gain * gain + return torch.sin(self.freq * x) * act_gain + + def __repr__(self, ): + return self.repr + + +class ReLUSine(nn.Module): + def __init(self): + super().__init__() + + def forward(self, input): + a = torch.sin(30 * input) + b = nn.ReLU(inplace=False)(input) + return a + b + + +@register('lrelu_agc') +class lrelu_agc(object): + """ + The lrelu layer with alpha, gain and clamp + """ + + def __init__(self, alpha=0.1, gain=1, clamp=None): + # super().__init__() + self.alpha = alpha + if gain == 'sqrt_2': + self.gain = np.sqrt(2) + else: + self.gain = gain + self.clamp = clamp + self.repr = 'lrelu_agc(alpha={}, gain={}, clamp={})'.format( + alpha, gain, clamp) + + # def forward(self, x, gain=1): + def __call__(self, x, gain=1): + x = F.leaky_relu(x, negative_slope=self.alpha, inplace=True) + act_gain = self.gain * gain + act_clamp = self.clamp * gain if self.clamp is not None else None + if act_gain != 1: + x = x * act_gain + if act_clamp is not None: + x = x.clamp(-act_clamp, act_clamp) + return x + + def __repr__(self, ): + return self.repr + + +#################### +# spatial encoding # +#################### + + +@register('se') +class SpatialEncoding(nn.Module): + def __init__(self, + in_dim, + out_dim, + sigma=6, + cat_input=True, + require_grad=False, ): + + super().__init__() + assert out_dim % (2 * in_dim) == 0, "dimension must be dividable" + + n = out_dim // 2 // in_dim + m = 2 ** np.linspace(0, sigma, n) + m = np.stack([m] + [np.zeros_like(m)] * (in_dim - 1), axis=-1) + m = np.concatenate([np.roll(m, i, axis=-1) for i in range(in_dim)], axis=0) + self.emb = torch.FloatTensor(m) + if require_grad: + self.emb = nn.Parameter(self.emb, requires_grad=True) + self.in_dim = in_dim + self.out_dim = out_dim + self.sigma = sigma + self.cat_input = cat_input + self.require_grad = require_grad + + def forward(self, x, format='[n x c]'): + """ + Args: + x: [n x m1], + m1 usually is 2 + Outputs: + y: [n x m2] + m2 dimention number + :param format: + """ + if format == '[bs x c x 2D]': + xshape = x.shape + x = x.permute(0, 2, 3, 1).contiguous() + x = x.view(-1, x.size(-1)) + elif format == '[n x c]': + pass + else: + raise ValueError + + if not self.require_grad: + self.emb = self.emb.to(x.device) + y = torch.mm(x, self.emb.T) + if self.cat_input: + z = torch.cat([x, torch.sin(y), torch.cos(y)], dim=-1) + else: + z = torch.cat([torch.sin(y), torch.cos(y)], dim=-1) + + if format == '[bs x c x 2D]': + z = z.view(xshape[0], xshape[2], xshape[3], -1) + z = z.permute(0, 3, 1, 2).contiguous() + return z + + def extra_repr(self): + outstr = 'SpatialEncoding (in={}, out={}, sigma={}, cat_input={}, require_grad={})'.format( + self.in_dim, self.out_dim, self.sigma, self.cat_input, self.require_grad) + return outstr + + +@register('rffe') +class RFFEncoding(SpatialEncoding): + """ + Random Fourier Features + """ + + def __init__(self, + in_dim, + out_dim, + sigma=6, + cat_input=True, + require_grad=False, ): + super().__init__(in_dim, out_dim, sigma, cat_input, require_grad) + n = out_dim // 2 + m = np.random.normal(0, sigma, size=(n, in_dim)) + self.emb = torch.FloatTensor(m) + if require_grad: + self.emb = nn.Parameter(self.emb, requires_grad=True) + + def extra_repr(self): + outstr = 'RFFEncoding (in={}, out={}, sigma={}, cat_input={}, require_grad={})'.format( + self.in_dim, self.out_dim, self.sigma, self.cat_input, self.require_grad) + return outstr + + +########## +# helper # +########## + + +def freeze(net): + for m in net.modules(): + if isinstance(m, ( + nn.BatchNorm2d, + nn.SyncBatchNorm,)): + # inplace_abn not supported + m.eval() + for pi in net.parameters(): + pi.requires_grad = False + return net + + +def common_init(m): + if isinstance(m, ( + nn.Conv2d, + nn.ConvTranspose2d,)): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + if m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, ( + nn.BatchNorm2d, + nn.SyncBatchNorm,)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + else: + pass + + +def init_module(module): + """ + Args: + module: [nn.module] list or nn.module + a list of module to be initialized. + """ + if isinstance(module, (list, tuple)): + module = list(module) + else: + module = [module] + + for mi in module: + for mii in mi.modules(): + common_init(mii) + + +def get_total_param(net): + if getattr(net, 'parameters', None) is None: + return 0 + return sum(p.numel() for p in net.parameters()) + + +def get_total_param_sum(net): + if getattr(net, 'parameters', None) is None: + return 0 + with torch.no_grad(): + s = sum(p.cpu().detach().numpy().sum().item() for p in net.parameters()) + return s diff --git a/core/models/dani_model.py b/core/models/dani_model.py new file mode 100644 index 0000000000000000000000000000000000000000..edb71feabc36e77f3226e0420d61cd9c16fd3ad0 --- /dev/null +++ b/core/models/dani_model.py @@ -0,0 +1,170 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as tvtrans + +from einops import rearrange + +import pytorch_lightning as pl + +from . import get_model +from ..cfg_helper import model_cfg_bank +from ..common.utils import regularize_image, regularize_video, remove_duplicate_word + +import warnings + +warnings.filterwarnings("ignore") + + +class dani_model(pl.LightningModule): + def __init__(self, model='thesis_model', load_weights=True, data_dir='pretrained', pth=["CoDi_encoders.pth"], fp16=False): + super().__init__() + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + cfgm = model_cfg_bank()(model) + net = get_model()(cfgm) + if load_weights: + for path in pth: + net.load_state_dict(torch.load(os.path.join(data_dir, path), map_location='cpu'), strict=False) + print('Load pretrained weight from {}'.format(pth)) + + self.net = net + + from core.models.ddim.ddim_vd import DDIMSampler_VD + self.sampler = DDIMSampler_VD(net) + + def decode(self, z, xtype): + device = z.device + net = self.net + z = z.to(device) + if xtype == 'image': + x = net.autokl_decode(z) + x = torch.clamp((x + 1.0) / 2.0, min=0.0, max=1.0) + return x + + elif xtype == 'video': + num_frames = z.shape[2] + z = rearrange(z, 'b c f h w -> (b f) c h w') + x = net.autokl_decode(z) + x = rearrange(x, '(b f) c h w -> b f c h w', f=num_frames) + + x = torch.clamp((x + 1.0) / 2.0, min=0.0, max=1.0) + video_list = [] + for video in x: + video_list.append([tvtrans.ToPILImage()(xi) for xi in video]) + return video_list + + elif xtype == 'text': + prompt_temperature = 1.0 + prompt_merge_same_adj_word = True + x = net.optimus_decode(z, temperature=prompt_temperature) + """ + if prompt_merge_same_adj_word: + xnew = [] + for xi in x: + xi_split = xi.split() + xinew = [] + for idxi, wi in enumerate(xi_split): + if idxi!=0 and wi==xi_split[idxi-1]: + continue + xinew.append(wi) + xnew.append(remove_duplicate_word(' '.join(xinew))) + x = xnew + """ + return x + + elif xtype == 'audio': + x = net.audioldm_decode(z) + x = net.mel_spectrogram_to_waveform(x) + return x + + def forward(self, xtype=[], condition=[], condition_types=[], n_samples=1, + mix_weight={'video': 1, 'audio': 1, 'text': 1, 'image': 1}, image_size=256, ddim_steps=50, scale=7.5, + num_frames=8): + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + device = self.device + net = self.net + sampler = self.sampler + ddim_eta = 0.0 + + conditioning = [] + assert len(set(condition_types)) == len(condition_types), "we don't support condition with same modalities yet." + assert len(condition) == len(condition_types) + + for i, condition_type in enumerate(condition_types): + if condition_type == 'image': + print(condition[i].shape) + ctemp1 = regularize_image(condition[i]).squeeze().to(device) + print(ctemp1.shape) + ctemp1 = ctemp1[None].repeat(n_samples, 1, 1, 1) + cim = net.clip_encode_vision(ctemp1).to(device) + uim = None + if scale != 1.0: + dummy = torch.zeros_like(ctemp1).to(device) + uim = net.clip_encode_vision(dummy).to(device) + conditioning.append(torch.cat([uim, cim])) + + elif condition_type == 'video': + ctemp1 = regularize_video(condition[i]).to(device) + ctemp1 = ctemp1[None].repeat(n_samples, 1, 1, 1, 1) + cim = net.clip_encode_vision(ctemp1).to(device) + uim = None + if scale != 1.0: + dummy = torch.zeros_like(ctemp1).to(device) + uim = net.clip_encode_vision(dummy).to(device) + conditioning.append(torch.cat([uim, cim])) + + elif condition_type == 'audio': + ctemp = condition[i][None].repeat(n_samples, 1, 1) + cad = net.clap_encode_audio(ctemp) + uad = None + if scale != 1.0: + dummy = torch.zeros_like(ctemp) + uad = net.clap_encode_audio(dummy) + conditioning.append(torch.cat([uad, cad])) + + elif condition_type == 'text': + ctx = net.clip_encode_text(n_samples * [condition[i]]).to(device) + utx = None + if scale != 1.0: + utx = net.clip_encode_text(n_samples * [""]).to(device) + conditioning.append(torch.cat([utx, ctx])) + + shapes = [] + for xtype_i in xtype: + if xtype_i == 'image': + h, w = [image_size, image_size] + shape = [n_samples, 4, h // 8, w // 8] + elif xtype_i == 'video': + h, w = [image_size, image_size] + shape = [n_samples, 4, num_frames, h // 8, w // 8] + elif xtype_i == 'text': + n = 768 + shape = [n_samples, n] + elif xtype_i == 'audio': + h, w = [256, 16] + shape = [n_samples, 8, h, w] + else: + raise + shapes.append(shape) + + z, _ = sampler.sample( + steps=ddim_steps, + shape=shapes, + condition=conditioning, + unconditional_guidance_scale=scale, + xtype=xtype, + condition_types=condition_types, + eta=ddim_eta, + verbose=False, + mix_weight=mix_weight) + + out_all = [] + for i, xtype_i in enumerate(xtype): + z[i] = z[i].to(device) + x_i = self.decode(z[i], xtype_i) + out_all.append(x_i) + return out_all diff --git a/core/models/ddim/__pycache__/ddim.cpython-38.pyc b/core/models/ddim/__pycache__/ddim.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95b9724582c574078441d49682a6d69c455718bd Binary files /dev/null and b/core/models/ddim/__pycache__/ddim.cpython-38.pyc differ diff --git a/core/models/ddim/__pycache__/ddim_vd.cpython-38.pyc b/core/models/ddim/__pycache__/ddim_vd.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b909844df277911df78bec4726af1bd0d254f4cf Binary files /dev/null and b/core/models/ddim/__pycache__/ddim_vd.cpython-38.pyc differ diff --git a/core/models/ddim/__pycache__/diffusion_utils.cpython-38.pyc b/core/models/ddim/__pycache__/diffusion_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79bfef9490d05dee64b7bafbec316a44d718577c Binary files /dev/null and b/core/models/ddim/__pycache__/diffusion_utils.cpython-38.pyc differ diff --git a/core/models/ddim/ddim.py b/core/models/ddim/ddim.py new file mode 100644 index 0000000000000000000000000000000000000000..0beba123b041382c417b5ac6224bb0a0025b2e63 --- /dev/null +++ b/core/models/ddim/ddim.py @@ -0,0 +1,224 @@ +"""SAMPLING ONLY.""" + +import torch +import numpy as np +from tqdm import tqdm +from functools import partial + +from .diffusion_utils import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like + + +class DDIMSampler(object): + def __init__(self, model, schedule="linear", **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + + def register_buffer(self, name, attr): + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + device = self.model.device + + if type(attr) == torch.Tensor: + if attr.device != device: + attr = attr.to(device) + setattr(self, name, attr) + + def make_schedule(self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0., verbose=True): + self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, + num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps, + verbose=verbose) + alphas_cumprod = self.model.alphas_cumprod + assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep' + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + self.register_buffer('betas', to_torch(self.model.betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu()))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu()))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu()))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1))) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters( + alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta,verbose=verbose) + + self.register_buffer('ddim_sigmas', ddim_sigmas) + self.register_buffer('ddim_alphas', ddim_alphas) + self.register_buffer('ddim_alphas_prev', ddim_alphas_prev) + self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * ( + 1 - self.alphas_cumprod / self.alphas_cumprod_prev)) + self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps) + + @torch.no_grad() + def sample(self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0., + mask=None, + x0=None, + temperature=1., + noise_dropout=0., + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1., + unconditional_conditioning=None, + video_frame_share_noise=False, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs + ): + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + device = self.model.device + + if conditioning is not None: + if isinstance(conditioning, dict): + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + if cbs != batch_size: + print(f"Warning: Got {cbs} conditionings but batch-size is {batch_size}") + else: + if conditioning.shape[0] != batch_size: + print(f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}") + + self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose) + # sampling + C, H, W = shape + size = (batch_size, C, H, W) + print(f'Data shape for DDIM sampling is {size}, eta {eta}') + + samples, intermediates = self.ddim_sampling(conditioning, size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling(self, + cond, shape, + x_T=None, + ddim_use_original_steps=False, + callback=None, + timesteps=None, + quantize_denoised=False, + mask=None, x0=None, + img_callback=None, log_every_t=100, + temperature=1., + noise_dropout=0., + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1., + unconditional_conditioning=None,): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + if timesteps is None: + timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps + elif timesteps is not None and not ddim_use_original_steps: + subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1 + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {'x_inter': [img], 'pred_x0': [img]} + time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + print(f"Running DDIM Sampling with {total_steps} timesteps") + + iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps) + + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + + if mask is not None: + assert x0 is not None + img_orig = self.model.q_sample(x0, ts) # TODO: deterministic forward pass? + img = img_orig * mask + (1. - mask) * img + + outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, temperature=temperature, + noise_dropout=noise_dropout, score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning) + img, pred_x0 = outs + if callback: callback(i) + if img_callback: img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates['x_inter'].append(img) + intermediates['pred_x0'].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False, + temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None, + unconditional_guidance_scale=1., unconditional_conditioning=None): + b, *_, device = *x.shape, x.device + + if unconditional_conditioning is None or unconditional_guidance_scale == 1.: + e_t = self.model.apply_model(x, t, c) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t] * 2) + c_in = torch.cat([unconditional_conditioning, c]) + e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2) + e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond) + + if score_corrector is not None: + assert self.model.parameterization == "eps" + e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev + sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas + sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas + # select parameters corresponding to the currently considered timestep + a_t = torch.full((b, 1, 1, 1), alphas[index], device=device) + a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device) + sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device) + sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device) + + # current prediction for x_0 + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + # direction pointing to x_t + dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t + noise = sigma_t * noise_like(x, repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise + return x_prev, pred_x0 diff --git a/core/models/ddim/ddim_vd.py b/core/models/ddim/ddim_vd.py new file mode 100644 index 0000000000000000000000000000000000000000..154d35a0011ff92dac46fb2a2356ac15b7f0ab86 --- /dev/null +++ b/core/models/ddim/ddim_vd.py @@ -0,0 +1,175 @@ +""" +https://github.com/SHI-Labs/Versatile-Diffusion +""" + +import torch +import numpy as np +from tqdm import tqdm +from functools import partial + +from .diffusion_utils import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like + +from .ddim import DDIMSampler + + +class DDIMSampler_VD(DDIMSampler): + @torch.no_grad() + def sample(self, + steps, + shape, + xt=None, + condition=None, + unconditional_guidance_scale=1., + xtype='image', + condition_types=['text'], + eta=0., + temperature=1., + mix_weight=None, + noise_dropout=0., + verbose=True, + log_every_t=100, ): + + self.make_schedule(ddim_num_steps=steps, ddim_eta=eta, verbose=verbose) + print(f'Data shape for DDIM sampling is {shape}, eta {eta}') + samples, intermediates = self.ddim_sampling( + shape, + xt=xt, + condition=condition, + unconditional_guidance_scale=unconditional_guidance_scale, + xtype=xtype, + condition_types=condition_types, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + log_every_t=log_every_t, + mix_weight=mix_weight, ) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling(self, + shape, + xt=None, + condition=None, + unconditional_guidance_scale=1., + xtype=['image'], + condition_types=['text'], + ddim_use_original_steps=False, + timesteps=None, + noise_dropout=0., + temperature=1., + mix_weight=None, + log_every_t=100, ): + + device = self.model.device + dtype = condition[0][0].dtype + + if isinstance(shape[0], list): + bs = shape[0][0] + else: + bs = shape[0] + if xt is None: + if isinstance(shape[0], list): + xt = [torch.randn(shape_i, device=device, dtype=dtype) for shape_i in shape] + else: + xt = torch.randn(shape, device=device, dtype=dtype) + + if timesteps is None: + timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps + elif timesteps is not None and not ddim_use_original_steps: + subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1 + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {'pred_xt': [], 'pred_x0': []} + time_range = reversed(range(0, timesteps)) if ddim_use_original_steps else np.flip(timesteps) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + # print(f"Running DDIM Sampling with {total_steps} timesteps") + + pred_xt = xt + iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps) + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((bs,), step, device=device, dtype=torch.long) + + outs = self.p_sample_ddim( + pred_xt, + condition, + ts, index, + unconditional_guidance_scale=unconditional_guidance_scale, + xtype=xtype, + condition_types=condition_types, + use_original_steps=ddim_use_original_steps, + noise_dropout=noise_dropout, + temperature=temperature, + mix_weight=mix_weight, ) + pred_xt, pred_x0 = outs + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates['pred_xt'].append(pred_xt) + intermediates['pred_x0'].append(pred_x0) + + return pred_xt, intermediates + + @torch.no_grad() + def p_sample_ddim(self, x, + condition, + t, index, + unconditional_guidance_scale=1., + xtype=['image'], + condition_types=['text'], + repeat_noise=False, + use_original_steps=False, + noise_dropout=0., + temperature=1., + mix_weight=None, ): + + b, *_, device = *x[0].shape, x[0].device + + x_in = [] + for x_i in x: + x_in.append(torch.cat([x_i] * 2)) + t_in = torch.cat([t] * 2) + + out = self.model.model.diffusion_model( + x_in, t_in, condition, xtype=xtype, condition_types=condition_types, mix_weight=mix_weight) + e_t = [] + for out_i in out: + e_t_uncond_i, e_t_i = out_i.chunk(2) + e_t_i = e_t_uncond_i + unconditional_guidance_scale * (e_t_i - e_t_uncond_i) + e_t_i = e_t_i.to(device) + e_t.append(e_t_i) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev + sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas + sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas + # select parameters corresponding to the currently considered timestep + + x_prev = [] + pred_x0 = [] + device = x[0].device + dtype = x[0].dtype + for i, xtype_i in enumerate(xtype): + if xtype_i in ['image', 'frontal', 'lateral']: + extended_shape = (b, 1, 1, 1) + elif xtype_i == 'video': + extended_shape = (b, 1, 1, 1, 1) + elif xtype_i == 'text': + extended_shape = (b, 1) + elif xtype_i == 'audio': + extended_shape = (b, 1, 1, 1) + + a_t = torch.full(extended_shape, alphas[index], device=device, dtype=dtype) + a_prev = torch.full(extended_shape, alphas_prev[index], device=device, dtype=dtype) + sigma_t = torch.full(extended_shape, sigmas[index], device=device, dtype=dtype) + sqrt_one_minus_at = torch.full(extended_shape, sqrt_one_minus_alphas[index], device=device, dtype=dtype) + + # current prediction for x_0 + pred_x0_i = (x[i] - sqrt_one_minus_at * e_t[i]) / a_t.sqrt() + dir_xt = (1. - a_prev - sigma_t ** 2).sqrt() * e_t[i] + noise = sigma_t * noise_like(x[i], repeat_noise) * temperature + if noise_dropout > 0.: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + x_prev_i = a_prev.sqrt() * pred_x0_i + dir_xt + noise + x_prev.append(x_prev_i) + pred_x0.append(pred_x0_i) + return x_prev, pred_x0 diff --git a/core/models/ddim/diffusion_utils.py b/core/models/ddim/diffusion_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..deead230fcae37f1025911346e0f30f2f971012e --- /dev/null +++ b/core/models/ddim/diffusion_utils.py @@ -0,0 +1,273 @@ +import os +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat + + +def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if schedule == "linear": + betas = ( + torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + elif schedule == "sqrt": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True): + if ddim_discr_method == 'uniform': + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + elif ddim_discr_method == 'quad': + ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int) + else: + raise NotImplementedError(f'There is no ddim discretization method called "{ddim_discr_method}"') + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + if num_ddpm_timesteps != 1000: + steps_out = ddim_timesteps + 1 + else: + steps_out = ddim_timesteps + if verbose: + print(f'Selected timesteps for ddim sampler: {steps_out}') + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev)) + if verbose: + print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}') + print(f'For the chosen value of eta, which is {eta}, ' + f'this results in the following sigma_t schedule for ddim sampler {sigmas}') + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, 'b -> b d', d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + # return super().forward(x.float()).type(x.dtype) + return super().forward(x) + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class HybridConditioner(nn.Module): + + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} + + +def noise_like(x, repeat=False): + noise = torch.randn_like(x) + if repeat: + bs = x.shape[0] + noise = noise[0:1].repeat(bs, *((1,) * (len(x.shape) - 1))) + return noise + +########################## +# inherit from ldm.utils # +########################## + + +def count_params(model, verbose=False): + total_params = sum(p.numel() for p in model.parameters()) + if verbose: + print(f"{model.__class__.__name__} has {total_params*1.e-6:.2f} M params.") + return total_params diff --git a/core/models/ema.py b/core/models/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..9560e16158a52290f1f854973c7c9f828de30e91 --- /dev/null +++ b/core/models/ema.py @@ -0,0 +1,76 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_updates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError('Decay must be between 0 and 1') + + self.m_name2s_name = {} + self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32)) + self.register_buffer('num_updates', torch.tensor(0, dtype=torch.int) if use_num_updates + else torch.tensor(-1, dtype=torch.int)) + + for name, p in model.named_parameters(): + if p.requires_grad: + # remove as '.'-character is not allowed in buffers + s_name = name.replace('.', '') + self.m_name2s_name.update({name: s_name}) + self.register_buffer(s_name, p.clone().detach().data) + + self.collected_params = [] + + def forward(self, model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key])) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/core/models/encoders/__pycache__/clap.cpython-311.pyc b/core/models/encoders/__pycache__/clap.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46a95e5d8c6721cfdf1c004dc6f943fb5c4aa574 Binary files /dev/null and b/core/models/encoders/__pycache__/clap.cpython-311.pyc differ diff --git a/core/models/encoders/__pycache__/clap.cpython-38.pyc b/core/models/encoders/__pycache__/clap.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..985b83ff0a9546c5d936fb036b7c471a56507eda Binary files /dev/null and b/core/models/encoders/__pycache__/clap.cpython-38.pyc differ diff --git a/core/models/encoders/__pycache__/clip.cpython-311.pyc b/core/models/encoders/__pycache__/clip.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2065f3f536d97e3bb3a6eb7183697e5574473fdd Binary files /dev/null and b/core/models/encoders/__pycache__/clip.cpython-311.pyc differ diff --git a/core/models/encoders/__pycache__/clip.cpython-38.pyc b/core/models/encoders/__pycache__/clip.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e5b5dd9dece03718f8e6200147d4902e2d8cd47c Binary files /dev/null and b/core/models/encoders/__pycache__/clip.cpython-38.pyc differ diff --git a/core/models/encoders/clap.py b/core/models/encoders/clap.py new file mode 100644 index 0000000000000000000000000000000000000000..55c7b421cf2c38d5f111cf18ad7348db6a35a365 --- /dev/null +++ b/core/models/encoders/clap.py @@ -0,0 +1,134 @@ +import torch +import torch.nn as nn +import torchaudio + +from .clap_modules.open_clip import create_model +from .clap_modules.training.data import get_audio_features + +from ..common.get_model import register + + +@register('clap_audio') +class CLAPAudioEmbeddingClassifierFreev2(nn.Module): + """Uses the CLAP audio encoder""" + def __init__( + self, + pretrained_path="", + key="waveform", + sampling_rate=16000, + embed_mode="audio", + unconditional_prob=0.1, + random_mute=False, + max_random_mute_portion=0.5, + training_mode=True, + joint_embed_shape=768, + embed_shape=512, + num_layers=12, + depths=[2, 2, 6, 2], + amodel="HTSAT-large", + ): + super().__init__() + + self.key = key + self.amodel = amodel # or 'PANN-14' + self.tmodel = "roberta" # the best text encoder in our training + self.enable_fusion = False # False if you do not want to use the fusion model + self.fusion_type = "aff_2d" + self.pretrained = pretrained_path + self.embed_mode = embed_mode + self.embed_mode_orig = embed_mode + self.sampling_rate = sampling_rate + self.unconditional_prob = unconditional_prob + self.random_mute = random_mute + self.joint_embed_shape = joint_embed_shape + self.max_random_mute_portion = max_random_mute_portion + self.training_mode = training_mode + self.model, self.model_cfg = create_model( + self.amodel, + self.tmodel, + self.pretrained, + precision="fp32", + device="cpu", + enable_fusion=self.enable_fusion, + fusion_type=self.fusion_type, + joint_embed_shape=self.joint_embed_shape, + ) + + def get_dtype(self): + return next(self.model.parameters()).dtype + + def get_unconditional_condition(self, batchsize): + self.unconditional_token = self.model.get_text_embedding( + self.tokenizer(["", ""]) + )[0:1] + return torch.cat([self.unconditional_token.unsqueeze(0)] * batchsize, dim=0) + + def batch_to_list(self, batch): + ret = [] + for i in range(batch.size(0)): + ret.append(batch[i]) + return ret + + def make_decision(self, probability): + if float(torch.rand(1)) < probability: + return True + else: + return False + + def random_uniform(self, start, end): + val = torch.rand(1).item() + return start + (end - start) * val + + def _random_mute(self, waveform): + # waveform: [bs, t-steps] + t_steps = waveform.size(-1) + for i in range(waveform.size(0)): + mute_size = int( + self.random_uniform(0, end=int(t_steps * self.max_random_mute_portion)) + ) + mute_start = int(self.random_uniform(0, t_steps - mute_size)) + waveform[i, mute_start : mute_start + mute_size] = 0 + return waveform + + def cos_similarity(self, waveform, text): + # waveform: [bs, t_steps] + with torch.no_grad(): + self.embed_mode = "audio" + audio_emb = self(waveform.cuda()) + self.embed_mode = "text" + text_emb = self(text) + similarity = F.cosine_similarity(audio_emb, text_emb, dim=2) + return similarity.squeeze() + + def forward(self, batch, key=None): + + # the 'fusion' truncate mode can be changed to 'rand_trunc' if run in unfusion mode + if self.embed_mode == "audio": + audio_dict_list = [] + assert ( + self.sampling_rate == 16000 + ), "We only support 16000 sampling rate" + # batch: [bs, 1, t-samples] + batch = torchaudio.functional.resample( + batch, orig_freq=self.sampling_rate, new_freq=48000 + ) + + for waveform in self.batch_to_list(batch): + audio_dict = {} + audio_dict = get_audio_features( + audio_dict, + waveform.squeeze(), + 480000, + data_truncating="fusion", + data_filling="repeatpad", + audio_cfg=self.model_cfg["audio_cfg"], + dtype=self.get_dtype(), + ) + audio_dict_list.append(audio_dict) + # [bs, 768] + embed = self.model.get_audio_embedding(audio_dict_list) + + embed = embed.unsqueeze(1) + + # [bs, 1, 768] + return embed diff --git a/core/models/encoders/clap_modules/__init__.py b/core/models/encoders/clap_modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/models/encoders/clap_modules/__pycache__/__init__.cpython-311.pyc b/core/models/encoders/clap_modules/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e8f52dcc979bb3cda6a2ab4ad52bf1e6eae4773 Binary files /dev/null and b/core/models/encoders/clap_modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/__pycache__/__init__.cpython-38.pyc b/core/models/encoders/clap_modules/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c2554b2dae5b731a413d2603f879678e92a8521e Binary files /dev/null and b/core/models/encoders/clap_modules/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__init__.py b/core/models/encoders/clap_modules/open_clip/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e9f728f2f273be5d5fdbec6c6cc41d737176a8c0 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/__init__.py @@ -0,0 +1,25 @@ +from .factory import ( + list_models, + create_model, + create_model_and_transforms, + add_model_config, +) +from .loss import ClipLoss, gather_features, LPLoss, lp_gather_features, LPMetrics +from .model import ( + CLAP, + CLAPTextCfg, + CLAPVisionCfg, + CLAPAudioCfp, + convert_weights_to_fp16, + trace_model, +) +from .openai import load_openai_model, list_openai_models +from .pretrained import ( + list_pretrained, + list_pretrained_tag_models, + list_pretrained_model_tags, + get_pretrained_url, + download_pretrained, +) +from .tokenizer import SimpleTokenizer, tokenize +from .transform import image_transform diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/__init__.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c8a928371857e02c8188ae8f77260c8beb4dea8 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/__init__.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/__init__.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb8909f2a4f2894d2f7dd48127992b9b3cf5bdb1 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/factory.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/factory.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54f82ccdf06391c408cf3918a620a35b43572fc9 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/factory.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/factory.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/factory.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0424b4cbaa0feed8cb14f29ae2b6ecc06f3d7b4b Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/factory.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/feature_fusion.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/feature_fusion.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c6579fac5b266e296b03f81b3f49abedbe3420b Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/feature_fusion.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/feature_fusion.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/feature_fusion.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ffc67040b45ebdb2e5c5a0dc2e1978cee59ac47f Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/feature_fusion.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/htsat.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/htsat.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eeb4b410589af64dffa159a1e8ce326f60d43075 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/htsat.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/htsat.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/htsat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eefe33b7a7d140f8af7088b345b1a6accbab6f41 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/htsat.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/loss.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/loss.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fd7f85a5141cbc2c637aa64b52c23ece96a8cdf Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/loss.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/loss.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/loss.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93056190e789566d035f81b1f5d1d0e286ceab16 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/loss.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/model.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/model.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93dfc0c10c584ba0044f646d8f89a399a77d30eb Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/model.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/model.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88b5d714916ef75dcdb6d120b5e623500ccc3e27 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/model.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/openai.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/openai.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6988f3aba76f94d1ab0bef9b5949ad67948689bf Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/openai.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/openai.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/openai.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c1b02996372286a7d062779a19e30ee1e72ac6a Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/openai.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/pann_model.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/pann_model.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05d1f6498e8e3440e725457aed9a3896f09381a5 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/pann_model.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/pann_model.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/pann_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ebb8af1b0c91dfaaf80fc8dab8f396d77c7ec28 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/pann_model.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/pretrained.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/pretrained.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..839ba4e7ab71478e4ed7202442c9792e509bf955 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/pretrained.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/pretrained.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/pretrained.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eaa6b328ea4407db716107c6d736817e3dcac812 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/pretrained.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/timm_model.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/timm_model.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..275b21e8287da491ba5d8bd95b8cd8b4cadfc31f Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/timm_model.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/timm_model.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/timm_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c027f6b9dcf104cebf86849afba0a44af483ec50 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/timm_model.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/tokenizer.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/tokenizer.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2558ea7dd3d724d4c0c16db810ff0187e66a8883 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/tokenizer.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/tokenizer.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/tokenizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8036f602d334ce9e558a0607b76c22bd70ffe34f Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/tokenizer.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/transform.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/transform.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ac53cbab4a2396ffdc71af1db8d72b9ea56e0b8 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/transform.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/transform.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/transform.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2609a38885cd92dda205a56bc1d25ff5aa7a5819 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/transform.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/utils.cpython-311.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36b2e7e266d4b9cbb23e358d72bf90215db0482d Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/utils.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/__pycache__/utils.cpython-38.pyc b/core/models/encoders/clap_modules/open_clip/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38ac1687551f68ea7608a212a0bb5513f0533ee3 Binary files /dev/null and b/core/models/encoders/clap_modules/open_clip/__pycache__/utils.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/open_clip/bert.py b/core/models/encoders/clap_modules/open_clip/bert.py new file mode 100644 index 0000000000000000000000000000000000000000..a83d96d2a77ed05198efc05837522bc88d2499cc --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/bert.py @@ -0,0 +1,40 @@ +from transformers import BertTokenizer, BertModel + +tokenizer = BertTokenizer.from_pretrained("bert-base-uncased") +model = BertModel.from_pretrained("bert-base-uncased") +text = "Replace me by any text you'd like." + + +def bert_embeddings(text): + # text = "Replace me by any text you'd like." + encoded_input = tokenizer(text, return_tensors="pt") + output = model(**encoded_input) + return output + + +from transformers import RobertaTokenizer, RobertaModel + +tokenizer = RobertaTokenizer.from_pretrained("roberta-base") +model = RobertaModel.from_pretrained("roberta-base") +text = "Replace me by any text you'd like." + + +def Roberta_embeddings(text): + # text = "Replace me by any text you'd like." + encoded_input = tokenizer(text, return_tensors="pt") + output = model(**encoded_input) + return output + + +from transformers import BartTokenizer, BartModel + +tokenizer = BartTokenizer.from_pretrained("facebook/bart-base") +model = BartModel.from_pretrained("facebook/bart-base") +text = "Replace me by any text you'd like." + + +def bart_embeddings(text): + # text = "Replace me by any text you'd like." + encoded_input = tokenizer(text, return_tensors="pt") + output = model(**encoded_input) + return output diff --git a/core/models/encoders/clap_modules/open_clip/bpe_simple_vocab_16e6.txt.gz b/core/models/encoders/clap_modules/open_clip/bpe_simple_vocab_16e6.txt.gz new file mode 100644 index 0000000000000000000000000000000000000000..36a15856e00a06a9fbed8cdd34d2393fea4a3113 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/bpe_simple_vocab_16e6.txt.gz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:924691ac288e54409236115652ad4aa250f48203de50a9e4722a6ecd48d6804a +size 1356917 diff --git a/core/models/encoders/clap_modules/open_clip/factory.py b/core/models/encoders/clap_modules/open_clip/factory.py new file mode 100644 index 0000000000000000000000000000000000000000..3815d1c16a2f371fd081fb5bb9037577b0490559 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/factory.py @@ -0,0 +1,273 @@ +import json +import logging +import os +import pathlib +import re +from copy import deepcopy +from pathlib import Path + +import torch + +from .model import CLAP, convert_weights_to_fp16 +from .openai import load_openai_model +from .pretrained import get_pretrained_url, download_pretrained +from .transform import image_transform + +_MODEL_CONFIG_PATHS = [Path(__file__).parent / f"model_configs/"] +_MODEL_CONFIGS = {} # directory (model_name: config) of model architecture configs + + +def _natural_key(string_): + return [int(s) if s.isdigit() else s for s in re.split(r"(\d+)", string_.lower())] + + +def _rescan_model_configs(): + global _MODEL_CONFIGS + + config_ext = (".json",) + config_files = [] + for config_path in _MODEL_CONFIG_PATHS: + if config_path.is_file() and config_path.suffix in config_ext: + config_files.append(config_path) + elif config_path.is_dir(): + for ext in config_ext: + config_files.extend(config_path.glob(f"*{ext}")) + + for cf in config_files: + if os.path.basename(cf)[0] == ".": + continue # Ignore hidden files + + with open(cf, "r") as f: + model_cfg = json.load(f) + if all(a in model_cfg for a in ("embed_dim", "audio_cfg", "text_cfg")): + _MODEL_CONFIGS[cf.stem] = model_cfg + + _MODEL_CONFIGS = { + k: v + for k, v in sorted(_MODEL_CONFIGS.items(), key=lambda x: _natural_key(x[0])) + } + + +_rescan_model_configs() # initial populate of model config registry + + +def load_state_dict(checkpoint_path: str, map_location="cpu", skip_params=True): + checkpoint = torch.load(checkpoint_path, map_location=map_location) + if isinstance(checkpoint, dict) and "state_dict" in checkpoint: + state_dict = checkpoint["state_dict"] + else: + state_dict = checkpoint + if skip_params: + if next(iter(state_dict.items()))[0].startswith("module"): + state_dict = {k[7:]: v for k, v in state_dict.items()} + # for k in state_dict: + # if k.startswith('transformer'): + # v = state_dict.pop(k) + # state_dict['text_branch.' + k[12:]] = v + return state_dict + + +def create_model( + amodel_name: str, + tmodel_name: str, + pretrained: str = "", + precision: str = "fp32", + device: torch.device = torch.device("cpu"), + jit: bool = False, + force_quick_gelu: bool = False, + openai_model_cache_dir: str = os.path.expanduser("~/.cache/clip"), + skip_params=True, + pretrained_audio: str = "", + pretrained_text: str = "", + enable_fusion: bool = False, + fusion_type: str = "None", + joint_embed_shape: int = 512 +): + amodel_name = amodel_name.replace( + "/", "-" + ) # for callers using old naming with / in ViT names + pretrained_orig = pretrained + pretrained = pretrained.lower() + if pretrained == "openai": + if amodel_name in _MODEL_CONFIGS: + logging.info(f"Loading {amodel_name} model config.") + model_cfg = deepcopy(_MODEL_CONFIGS[amodel_name]) + else: + logging.error( + f"Model config for {amodel_name} not found; available models {list_models()}." + ) + raise RuntimeError(f"Model config for {amodel_name} not found.") + + logging.info(f"Loading pretrained ViT-B-16 text encoder from OpenAI.") + # Hard Code in model name + model_cfg["text_cfg"]["model_type"] = tmodel_name + model = load_openai_model( + "ViT-B-16", + model_cfg, + device=device, + jit=jit, + cache_dir=openai_model_cache_dir, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + # See https://discuss.pytorch.org/t/valueerror-attemting-to-unscale-fp16-gradients/81372 + if precision == "amp" or precision == "fp32": + model = model.float() + else: + if amodel_name in _MODEL_CONFIGS: + logging.info(f"Loading {amodel_name} model config.") + model_cfg = deepcopy(_MODEL_CONFIGS[amodel_name]) + else: + logging.error( + f"Model config for {amodel_name} not found; available models {list_models()}." + ) + raise RuntimeError(f"Model config for {amodel_name} not found.") + + if force_quick_gelu: + # override for use of QuickGELU on non-OpenAI transformer models + model_cfg["quick_gelu"] = True + + model_cfg["text_cfg"]["model_type"] = tmodel_name + model_cfg["enable_fusion"] = enable_fusion + model_cfg["fusion_type"] = fusion_type + model_cfg["joint_embed_shape"] = joint_embed_shape + + model = CLAP(**model_cfg) + + if pretrained: + checkpoint_path = "" + url = get_pretrained_url(amodel_name, pretrained) + if url: + checkpoint_path = download_pretrained(url, root=openai_model_cache_dir) + elif os.path.exists(pretrained_orig): + checkpoint_path = pretrained_orig + if checkpoint_path: + logging.info( + f"Loading pretrained {amodel_name}-{tmodel_name} weights ({pretrained})." + ) + ckpt = load_state_dict(checkpoint_path, skip_params=True) + model.load_state_dict(ckpt) + param_names = [n for n, p in model.named_parameters()] + # for n in param_names: + # print(n, "\t", "Loaded" if n in ckpt else "Unloaded") + else: + logging.warning( + f"Pretrained weights ({pretrained}) not found for model {amodel_name}." + ) + raise RuntimeError( + f"Pretrained weights ({pretrained}) not found for model {amodel_name}." + ) + + if pretrained_audio: + if amodel_name.startswith("PANN"): + if "Cnn14_mAP" in pretrained_audio: # official checkpoint + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["model"] + keys = list(audio_ckpt.keys()) + for key in keys: + if ( + "spectrogram_extractor" not in key + and "logmel_extractor" not in key + ): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key] = v + elif os.path.basename(pretrained_audio).startswith( + "PANN" + ): # checkpoint trained via HTSAT codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["state_dict"] + keys = list(audio_ckpt.keys()) + for key in keys: + if key.startswith("sed_model"): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key[10:]] = v + elif os.path.basename(pretrained_audio).startswith( + "finetuned" + ): # checkpoint trained via linear probe codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + else: + raise ValueError("Unknown audio checkpoint") + elif amodel_name.startswith("HTSAT"): + if "HTSAT_AudioSet_Saved" in pretrained_audio: # official checkpoint + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["state_dict"] + keys = list(audio_ckpt.keys()) + for key in keys: + if key.startswith("sed_model") and ( + "spectrogram_extractor" not in key + and "logmel_extractor" not in key + ): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key[10:]] = v + elif os.path.basename(pretrained_audio).startswith( + "HTSAT" + ): # checkpoint trained via HTSAT codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + audio_ckpt = audio_ckpt["state_dict"] + keys = list(audio_ckpt.keys()) + for key in keys: + if key.startswith("sed_model"): + v = audio_ckpt.pop(key) + audio_ckpt["audio_branch." + key[10:]] = v + elif os.path.basename(pretrained_audio).startswith( + "finetuned" + ): # checkpoint trained via linear probe codebase + audio_ckpt = torch.load(pretrained_audio, map_location="cpu") + else: + raise ValueError("Unknown audio checkpoint") + else: + raise f"this audio encoder pretrained checkpoint is not support" + + model.load_state_dict(audio_ckpt, strict=False) + logging.info( + f"Loading pretrained {amodel_name} weights ({pretrained_audio})." + ) + param_names = [n for n, p in model.named_parameters()] + for n in param_names: + print(n, "\t", "Loaded" if n in audio_ckpt else "Unloaded") + + model.to(device=device) + if precision == "fp16": + assert device.type != "cpu" + convert_weights_to_fp16(model) + + if jit: + model = torch.jit.script(model) + + return model, model_cfg + + +def create_model_and_transforms( + model_name: str, + pretrained: str = "", + precision: str = "fp32", + device: torch.device = torch.device("cpu"), + jit: bool = False, + force_quick_gelu: bool = False, + # pretrained_image: bool = False, +): + model = create_model( + model_name, + pretrained, + precision, + device, + jit, + force_quick_gelu=force_quick_gelu, + # pretrained_image=pretrained_image + ) + preprocess_train = image_transform(model.visual.image_size, is_train=True) + preprocess_val = image_transform(model.visual.image_size, is_train=False) + return model, preprocess_train, preprocess_val + + +def list_models(): + """enumerate available model architectures based on config files""" + return list(_MODEL_CONFIGS.keys()) + + +def add_model_config(path): + """add model config path or file and update registry""" + if not isinstance(path, Path): + path = Path(path) + _MODEL_CONFIG_PATHS.append(path) + _rescan_model_configs() diff --git a/core/models/encoders/clap_modules/open_clip/feature_fusion.py b/core/models/encoders/clap_modules/open_clip/feature_fusion.py new file mode 100644 index 0000000000000000000000000000000000000000..dbe4e170e05894c12ebdc36ba1dc1de65e441b89 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/feature_fusion.py @@ -0,0 +1,192 @@ +""" +Feature Fusion for Varible-Length Data Processing +AFF/iAFF is referred and modified from https://github.com/YimianDai/open-aff/blob/master/aff_pytorch/aff_net/fusion.py +According to the paper: Yimian Dai et al, Attentional Feature Fusion, IEEE Winter Conference on Applications of Computer Vision, WACV 2021 +""" + +import torch +import torch.nn as nn + + +class DAF(nn.Module): + """ + 直接相加 DirectAddFuse + """ + + def __init__(self): + super(DAF, self).__init__() + + def forward(self, x, residual): + return x + residual + + +class iAFF(nn.Module): + """ + 多特征融合 iAFF + """ + + def __init__(self, channels=64, r=4, type="2D"): + super(iAFF, self).__init__() + inter_channels = int(channels // r) + + if type == "1D": + # 本地注意力 + self.local_att = nn.Sequential( + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + + # 全局注意力 + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool1d(1), + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + + # 第二次本地注意力 + self.local_att2 = nn.Sequential( + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + # 第二次全局注意力 + self.global_att2 = nn.Sequential( + nn.AdaptiveAvgPool1d(1), + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + elif type == "2D": + # 本地注意力 + self.local_att = nn.Sequential( + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + + # 全局注意力 + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + + # 第二次本地注意力 + self.local_att2 = nn.Sequential( + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + # 第二次全局注意力 + self.global_att2 = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + else: + raise f"the type is not supported" + + self.sigmoid = nn.Sigmoid() + + def forward(self, x, residual): + flag = False + xa = x + residual + if xa.size(0) == 1: + xa = torch.cat([xa, xa], dim=0) + flag = True + xl = self.local_att(xa) + xg = self.global_att(xa) + xlg = xl + xg + wei = self.sigmoid(xlg) + xi = x * wei + residual * (1 - wei) + + xl2 = self.local_att2(xi) + xg2 = self.global_att(xi) + xlg2 = xl2 + xg2 + wei2 = self.sigmoid(xlg2) + xo = x * wei2 + residual * (1 - wei2) + if flag: + xo = xo[0].unsqueeze(0) + return xo + + +class AFF(nn.Module): + """ + 多特征融合 AFF + """ + + def __init__(self, channels=64, r=4, type="2D"): + super(AFF, self).__init__() + inter_channels = int(channels // r) + + if type == "1D": + self.local_att = nn.Sequential( + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool1d(1), + nn.Conv1d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv1d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm1d(channels), + ) + elif type == "2D": + self.local_att = nn.Sequential( + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + self.global_att = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(channels, inter_channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(inter_channels), + nn.ReLU(inplace=True), + nn.Conv2d(inter_channels, channels, kernel_size=1, stride=1, padding=0), + nn.BatchNorm2d(channels), + ) + else: + raise f"the type is not supported." + + self.sigmoid = nn.Sigmoid() + + def forward(self, x, residual): + flag = False + xa = x + residual + if xa.size(0) == 1: + xa = torch.cat([xa, xa], dim=0) + flag = True + xl = self.local_att(xa) + xg = self.global_att(xa) + xlg = xl + xg + wei = self.sigmoid(xlg) + xo = 2 * x * wei + 2 * residual * (1 - wei) + if flag: + xo = xo[0].unsqueeze(0) + return xo diff --git a/core/models/encoders/clap_modules/open_clip/htsat.py b/core/models/encoders/clap_modules/open_clip/htsat.py new file mode 100644 index 0000000000000000000000000000000000000000..bbc71ef2666511f0d44c9c02db875cdddbad783f --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/htsat.py @@ -0,0 +1,1276 @@ +# Ke Chen +# knutchen@ucsd.edu +# HTS-AT: A HIERARCHICAL TOKEN-SEMANTIC AUDIO TRANSFORMER FOR SOUND CLASSIFICATION AND DETECTION +# Some layers designed on the model +# below codes are based and referred from https://github.com/microsoft/Swin-Transformer +# Swin Transformer for Computer Vision: https://arxiv.org/pdf/2103.14030.pdf + +import torch +import torch.nn as nn +import torch.nn.functional as F +from itertools import repeat +import collections.abc +import math +import warnings + +from torch.nn.init import _calculate_fan_in_and_fan_out +import torch.utils.checkpoint as checkpoint + +import random + +from torchlibrosa.stft import Spectrogram, LogmelFilterBank +from torchlibrosa.augmentation import SpecAugmentation + +from itertools import repeat +from .utils import do_mixup, interpolate + +from .feature_fusion import iAFF, AFF, DAF + +# from PyTorch internals +def _ntuple(n): + def parse(x): + if isinstance(x, collections.abc.Iterable): + return x + return tuple(repeat(x, n)) + + return parse + + +to_1tuple = _ntuple(1) +to_2tuple = _ntuple(2) +to_3tuple = _ntuple(3) +to_4tuple = _ntuple(4) +to_ntuple = _ntuple + + +def drop_path(x, drop_prob: float = 0.0, training: bool = False): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + This is the same as the DropConnect impl I created for EfficientNet, etc networks, however, + the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper... + See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for + changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use + 'survival rate' as the argument. + """ + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * ( + x.ndim - 1 + ) # work with diff dim tensors, not just 2D ConvNets + random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device) + random_tensor.floor_() # binarize + output = x.div(keep_prob) * random_tensor + return output + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" + + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) + + +class PatchEmbed(nn.Module): + """2D Image to Patch Embedding""" + + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + norm_layer=None, + flatten=True, + patch_stride=16, + enable_fusion=False, + fusion_type="None", + ): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + patch_stride = to_2tuple(patch_stride) + self.img_size = img_size + self.patch_size = patch_size + self.patch_stride = patch_stride + self.grid_size = ( + img_size[0] // patch_stride[0], + img_size[1] // patch_stride[1], + ) + self.num_patches = self.grid_size[0] * self.grid_size[1] + self.flatten = flatten + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + padding = ( + (patch_size[0] - patch_stride[0]) // 2, + (patch_size[1] - patch_stride[1]) // 2, + ) + + if (self.enable_fusion) and (self.fusion_type == "channel_map"): + self.proj = nn.Conv2d( + in_chans * 4, + embed_dim, + kernel_size=patch_size, + stride=patch_stride, + padding=padding, + ) + else: + self.proj = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=patch_size, + stride=patch_stride, + padding=padding, + ) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + self.mel_conv2d = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=(patch_size[0], patch_size[1] * 3), + stride=(patch_stride[0], patch_stride[1] * 3), + padding=padding, + ) + if self.fusion_type == "daf_2d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_2d": + self.fusion_model = AFF(channels=embed_dim, type="2D") + elif self.fusion_type == "iaff_2d": + self.fusion_model = iAFF(channels=embed_dim, type="2D") + + def forward(self, x, longer_idx=None): + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + global_x = x[:, 0:1, :, :] + + # global processing + B, C, H, W = global_x.shape + assert ( + H == self.img_size[0] and W == self.img_size[1] + ), f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + global_x = self.proj(global_x) + TW = global_x.size(-1) + if len(longer_idx) > 0: + # local processing + local_x = x[longer_idx, 1:, :, :].contiguous() + B, C, H, W = local_x.shape + local_x = local_x.view(B * C, 1, H, W) + local_x = self.mel_conv2d(local_x) + local_x = local_x.view( + B, C, local_x.size(1), local_x.size(2), local_x.size(3) + ) + local_x = local_x.permute((0, 2, 3, 1, 4)).contiguous().flatten(3) + TB, TC, TH, _ = local_x.size() + if local_x.size(-1) < TW: + local_x = torch.cat( + [ + local_x, + torch.zeros( + (TB, TC, TH, TW - local_x.size(-1)), + device=global_x.device, + ), + ], + dim=-1, + ) + else: + local_x = local_x[:, :, :, :TW] + + global_x[longer_idx] = self.fusion_model(global_x[longer_idx], local_x) + x = global_x + else: + B, C, H, W = x.shape + assert ( + H == self.img_size[0] and W == self.img_size[1] + ), f"Input image size ({H}*{W}) doesn't match model ({self.img_size[0]}*{self.img_size[1]})." + x = self.proj(x) + + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x + + +class Mlp(nn.Module): + """MLP as used in Vision Transformer, MLP-Mixer and related networks""" + + def __init__( + self, + in_features, + hidden_features=None, + out_features=None, + act_layer=nn.GELU, + drop=0.0, + ): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def _no_grad_trunc_normal_(tensor, mean, std, a, b): + # Cut & paste from PyTorch official master until it's in a few official releases - RW + # Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf + def norm_cdf(x): + # Computes standard normal cumulative distribution function + return (1.0 + math.erf(x / math.sqrt(2.0))) / 2.0 + + if (mean < a - 2 * std) or (mean > b + 2 * std): + warnings.warn( + "mean is more than 2 std from [a, b] in nn.init.trunc_normal_. " + "The distribution of values may be incorrect.", + stacklevel=2, + ) + + with torch.no_grad(): + # Values are generated by using a truncated uniform distribution and + # then using the inverse CDF for the normal distribution. + # Get upper and lower cdf values + l = norm_cdf((a - mean) / std) + u = norm_cdf((b - mean) / std) + + # Uniformly fill tensor with values from [l, u], then translate to + # [2l-1, 2u-1]. + tensor.uniform_(2 * l - 1, 2 * u - 1) + + # Use inverse cdf transform for normal distribution to get truncated + # standard normal + tensor.erfinv_() + + # Transform to proper mean, std + tensor.mul_(std * math.sqrt(2.0)) + tensor.add_(mean) + + # Clamp to ensure it's in the proper range + tensor.clamp_(min=a, max=b) + return tensor + + +def trunc_normal_(tensor, mean=0.0, std=1.0, a=-2.0, b=2.0): + # type: (Tensor, float, float, float, float) -> Tensor + r"""Fills the input Tensor with values drawn from a truncated + normal distribution. The values are effectively drawn from the + normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)` + with values outside :math:`[a, b]` redrawn until they are within + the bounds. The method used for generating the random values works + best when :math:`a \leq \text{mean} \leq b`. + Args: + tensor: an n-dimensional `torch.Tensor` + mean: the mean of the normal distribution + std: the standard deviation of the normal distribution + a: the minimum cutoff value + b: the maximum cutoff value + Examples: + >>> w = torch.empty(3, 5) + >>> nn.init.trunc_normal_(w) + """ + return _no_grad_trunc_normal_(tensor, mean, std, a, b) + + +def variance_scaling_(tensor, scale=1.0, mode="fan_in", distribution="normal"): + fan_in, fan_out = _calculate_fan_in_and_fan_out(tensor) + if mode == "fan_in": + denom = fan_in + elif mode == "fan_out": + denom = fan_out + elif mode == "fan_avg": + denom = (fan_in + fan_out) / 2 + + variance = scale / denom + + if distribution == "truncated_normal": + # constant is stddev of standard normal truncated to (-2, 2) + trunc_normal_(tensor, std=math.sqrt(variance) / 0.87962566103423978) + elif distribution == "normal": + tensor.normal_(std=math.sqrt(variance)) + elif distribution == "uniform": + bound = math.sqrt(3 * variance) + tensor.uniform_(-bound, bound) + else: + raise ValueError(f"invalid distribution {distribution}") + + +def lecun_normal_(tensor): + variance_scaling_(tensor, mode="fan_in", distribution="truncated_normal") + + +def window_partition(x, window_size): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = ( + x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + ) + return windows + + +def window_reverse(windows, window_size, H, W): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view( + B, H // window_size, W // window_size, window_size, window_size, -1 + ) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + r"""Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + """ + + def __init__( + self, + dim, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0.0, + proj_drop=0.0, + ): + + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim**-0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads) + ) # 2*Wh-1 * 2*Ww-1, nH + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.window_size[0]) + coords_w = torch.arange(self.window_size[1]) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :] + ) # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0 + ).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer("relative_position_index", relative_position_index) + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + trunc_normal_(self.relative_position_bias_table, std=0.02) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ + Args: + x: input features with shape of (num_windows*B, N, C) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + B_, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B_, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = ( + qkv[0], + qkv[1], + qkv[2], + ) # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = q @ k.transpose(-2, -1) + + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1) + ].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1, + ) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1 + ).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze( + 1 + ).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x, attn + + def extra_repr(self): + return f"dim={self.dim}, window_size={self.window_size}, num_heads={self.num_heads}" + + +# We use the model based on Swintransformer Block, therefore we can use the swin-transformer pretrained model +class SwinTransformerBlock(nn.Module): + r"""Swin Transformer Block. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resulotion. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__( + self, + dim, + input_resolution, + num_heads, + window_size=7, + shift_size=0, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + norm_before_mlp="ln", + ): + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + self.norm_before_mlp = norm_before_mlp + if min(self.input_resolution) <= self.window_size: + # if window size is larger than input resolution, we don't partition windows + self.shift_size = 0 + self.window_size = min(self.input_resolution) + assert ( + 0 <= self.shift_size < self.window_size + ), "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop, + ) + + self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + if self.norm_before_mlp == "ln": + self.norm2 = nn.LayerNorm(dim) + elif self.norm_before_mlp == "bn": + self.norm2 = lambda x: nn.BatchNorm1d(dim)(x.transpose(1, 2)).transpose( + 1, 2 + ) + else: + raise NotImplementedError + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + ) + + if self.shift_size > 0: + # calculate attention mask for SW-MSA + H, W = self.input_resolution + img_mask = torch.zeros((1, H, W, 1)) # 1 H W 1 + h_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + w_slices = ( + slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None), + ) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition( + img_mask, self.window_size + ) # nW, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill( + attn_mask != 0, float(-100.0) + ).masked_fill(attn_mask == 0, float(0.0)) + else: + attn_mask = None + + self.register_buffer("attn_mask", attn_mask) + + def forward(self, x): + # pdb.set_trace() + H, W = self.input_resolution + # print("H: ", H) + # print("W: ", W) + # pdb.set_trace() + B, L, C = x.shape + # assert L == H * W, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(B, H, W, C) + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll( + x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2) + ) + else: + shifted_x = x + + # partition windows + x_windows = window_partition( + shifted_x, self.window_size + ) # nW*B, window_size, window_size, C + x_windows = x_windows.view( + -1, self.window_size * self.window_size, C + ) # nW*B, window_size*window_size, C + + # W-MSA/SW-MSA + attn_windows, attn = self.attn( + x_windows, mask=self.attn_mask + ) # nW*B, window_size*window_size, C + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, H, W) # B H' W' C + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll( + shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2) + ) + else: + x = shifted_x + x = x.view(B, H * W, C) + + # FFN + x = shortcut + self.drop_path(x) + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x, attn + + def extra_repr(self): + return ( + f"dim={self.dim}, input_resolution={self.input_resolution}, num_heads={self.num_heads}, " + f"window_size={self.window_size}, shift_size={self.shift_size}, mlp_ratio={self.mlp_ratio}" + ) + + +class PatchMerging(nn.Module): + r"""Patch Merging Layer. + Args: + input_resolution (tuple[int]): Resolution of input feature. + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, input_resolution, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.input_resolution = input_resolution + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(4 * dim) + + def forward(self, x): + """ + x: B, H*W, C + """ + H, W = self.input_resolution + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + assert H % 2 == 0 and W % 2 == 0, f"x size ({H}*{W}) are not even." + + x = x.view(B, H, W, C) + + x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C + x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C + x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C + x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C + x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C + x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C + + x = self.norm(x) + x = self.reduction(x) + + return x + + def extra_repr(self): + return f"input_resolution={self.input_resolution}, dim={self.dim}" + + +class BasicLayer(nn.Module): + """A basic Swin Transformer layer for one stage. + Args: + dim (int): Number of input channels. + input_resolution (tuple[int]): Input resolution. + depth (int): Number of blocks. + num_heads (int): Number of attention heads. + window_size (int): Local window size. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__( + self, + dim, + input_resolution, + depth, + num_heads, + window_size, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop=0.0, + attn_drop=0.0, + drop_path=0.0, + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False, + norm_before_mlp="ln", + ): + + super().__init__() + self.dim = dim + self.input_resolution = input_resolution + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList( + [ + SwinTransformerBlock( + dim=dim, + input_resolution=input_resolution, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path[i] + if isinstance(drop_path, list) + else drop_path, + norm_layer=norm_layer, + norm_before_mlp=norm_before_mlp, + ) + for i in range(depth) + ] + ) + + # patch merging layer + if downsample is not None: + self.downsample = downsample( + input_resolution, dim=dim, norm_layer=norm_layer + ) + else: + self.downsample = None + + def forward(self, x): + attns = [] + for blk in self.blocks: + if self.use_checkpoint: + x = checkpoint.checkpoint(blk, x) + else: + x, attn = blk(x) + if not self.training: + attns.append(attn.unsqueeze(0)) + if self.downsample is not None: + x = self.downsample(x) + if not self.training: + attn = torch.cat(attns, dim=0) + attn = torch.mean(attn, dim=0) + return x, attn + + def extra_repr(self): + return f"dim={self.dim}, input_resolution={self.input_resolution}, depth={self.depth}" + + +# The Core of HTSAT +class HTSAT_Swin_Transformer(nn.Module): + r"""HTSAT based on the Swin Transformer + Args: + spec_size (int | tuple(int)): Input Spectrogram size. Default 256 + patch_size (int | tuple(int)): Patch size. Default: 4 + path_stride (iot | tuple(int)): Patch Stride for Frequency and Time Axis. Default: 4 + in_chans (int): Number of input image channels. Default: 1 (mono) + num_classes (int): Number of classes for classification head. Default: 527 + embed_dim (int): Patch embedding dimension. Default: 96 + depths (tuple(int)): Depth of each HTSAT-Swin Transformer layer. + num_heads (tuple(int)): Number of attention heads in different layers. + window_size (int): Window size. Default: 8 + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4 + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. Default: None + drop_rate (float): Dropout rate. Default: 0 + attn_drop_rate (float): Attention dropout rate. Default: 0 + drop_path_rate (float): Stochastic depth rate. Default: 0.1 + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False + patch_norm (bool): If True, add normalization after patch embedding. Default: True + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False + config (module): The configuration Module from config.py + """ + + def __init__( + self, + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + in_chans=1, + num_classes=527, + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + mlp_ratio=4.0, + qkv_bias=True, + qk_scale=None, + drop_rate=0.0, + attn_drop_rate=0.0, + drop_path_rate=0.1, + norm_layer=nn.LayerNorm, + ape=False, + patch_norm=True, + use_checkpoint=False, + norm_before_mlp="ln", + config=None, + enable_fusion=False, + fusion_type="None", + **kwargs, + ): + super(HTSAT_Swin_Transformer, self).__init__() + + self.config = config + self.spec_size = spec_size + self.patch_stride = patch_stride + self.patch_size = patch_size + self.window_size = window_size + self.embed_dim = embed_dim + self.depths = depths + self.ape = ape + self.in_chans = in_chans + self.num_classes = num_classes + self.num_heads = num_heads + self.num_layers = len(self.depths) + self.num_features = int(self.embed_dim * 2 ** (self.num_layers - 1)) + + self.drop_rate = drop_rate + self.attn_drop_rate = attn_drop_rate + self.drop_path_rate = drop_path_rate + + self.qkv_bias = qkv_bias + self.qk_scale = None + + self.patch_norm = patch_norm + self.norm_layer = norm_layer if self.patch_norm else None + self.norm_before_mlp = norm_before_mlp + self.mlp_ratio = mlp_ratio + + self.use_checkpoint = use_checkpoint + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # process mel-spec ; used only once + self.freq_ratio = self.spec_size // self.config.mel_bins + self.interpolate_ratio = 32 # Downsampled ratio + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) # 2 2 + self.bn0 = nn.BatchNorm2d(self.config.mel_bins) + + # split spctrogram into non-overlapping patches + self.patch_embed = PatchEmbed( + img_size=self.spec_size, + patch_size=self.patch_size, + in_chans=self.in_chans, + embed_dim=self.embed_dim, + norm_layer=self.norm_layer, + patch_stride=patch_stride, + enable_fusion=self.enable_fusion, + fusion_type=self.fusion_type, + ) + + num_patches = self.patch_embed.num_patches + patches_resolution = self.patch_embed.grid_size + self.patches_resolution = patches_resolution + + # absolute position embedding + if self.ape: + self.absolute_pos_embed = nn.Parameter( + torch.zeros(1, num_patches, self.embed_dim) + ) + trunc_normal_(self.absolute_pos_embed, std=0.02) + + self.pos_drop = nn.Dropout(p=self.drop_rate) + + # stochastic depth + dpr = [ + x.item() for x in torch.linspace(0, self.drop_path_rate, sum(self.depths)) + ] # stochastic depth decay rule + + # build layers + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = BasicLayer( + dim=int(self.embed_dim * 2**i_layer), + input_resolution=( + patches_resolution[0] // (2**i_layer), + patches_resolution[1] // (2**i_layer), + ), + depth=self.depths[i_layer], + num_heads=self.num_heads[i_layer], + window_size=self.window_size, + mlp_ratio=self.mlp_ratio, + qkv_bias=self.qkv_bias, + qk_scale=self.qk_scale, + drop=self.drop_rate, + attn_drop=self.attn_drop_rate, + drop_path=dpr[ + sum(self.depths[:i_layer]) : sum(self.depths[: i_layer + 1]) + ], + norm_layer=self.norm_layer, + downsample=PatchMerging if (i_layer < self.num_layers - 1) else None, + use_checkpoint=use_checkpoint, + norm_before_mlp=self.norm_before_mlp, + ) + self.layers.append(layer) + + self.norm = self.norm_layer(self.num_features) + self.avgpool = nn.AdaptiveAvgPool1d(1) + self.maxpool = nn.AdaptiveMaxPool1d(1) + + SF = ( + self.spec_size + // (2 ** (len(self.depths) - 1)) + // self.patch_stride[0] + // self.freq_ratio + ) + self.tscam_conv = nn.Conv2d( + in_channels=self.num_features, + out_channels=self.num_classes, + kernel_size=(SF, 3), + padding=(0, 1), + ) + self.head = nn.Linear(num_classes, num_classes) + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"] + ): + self.mel_conv1d = nn.Sequential( + nn.Conv1d(64, 64, kernel_size=5, stride=3, padding=2), + nn.BatchNorm1d(64), + ) + if self.fusion_type == "daf_1d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_1d": + self.fusion_model = AFF(channels=64, type="1D") + elif self.fusion_type == "iaff_1d": + self.fusion_model = iAFF(channels=64, type="1D") + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {"absolute_pos_embed"} + + @torch.jit.ignore + def no_weight_decay_keywords(self): + return {"relative_position_bias_table"} + + def forward_features(self, x, longer_idx=None): + # A deprecated optimization for using a hierarchical output from different blocks + + frames_num = x.shape[2] + x = self.patch_embed(x, longer_idx=longer_idx) + if self.ape: + x = x + self.absolute_pos_embed + x = self.pos_drop(x) + for i, layer in enumerate(self.layers): + x, attn = layer(x) + # for x + x = self.norm(x) + B, N, C = x.shape + SF = frames_num // (2 ** (len(self.depths) - 1)) // self.patch_stride[0] + ST = frames_num // (2 ** (len(self.depths) - 1)) // self.patch_stride[1] + x = x.permute(0, 2, 1).contiguous().reshape(B, C, SF, ST) + B, C, F, T = x.shape + # group 2D CNN + c_freq_bin = F // self.freq_ratio + x = x.reshape(B, C, F // c_freq_bin, c_freq_bin, T) + x = x.permute(0, 1, 3, 2, 4).contiguous().reshape(B, C, c_freq_bin, -1) + # get latent_output + fine_grained_latent_output = torch.mean(x, dim=2) + fine_grained_latent_output = interpolate( + fine_grained_latent_output.permute(0, 2, 1).contiguous(), + 8 * self.patch_stride[1], + ) + + latent_output = self.avgpool(torch.flatten(x, 2)) + latent_output = torch.flatten(latent_output, 1) + + # display the attention map, if needed + + x = self.tscam_conv(x) + x = torch.flatten(x, 2) # B, C, T + + fpx = interpolate( + torch.sigmoid(x).permute(0, 2, 1).contiguous(), 8 * self.patch_stride[1] + ) + + x = self.avgpool(x) + x = torch.flatten(x, 1) + + output_dict = { + "framewise_output": fpx, # already sigmoided + "clipwise_output": torch.sigmoid(x), + "fine_grained_embedding": fine_grained_latent_output, + "embedding": latent_output, + } + + return output_dict + + def crop_wav(self, x, crop_size, spe_pos=None): + time_steps = x.shape[2] + tx = torch.zeros(x.shape[0], x.shape[1], crop_size, x.shape[3]).to(x.device) + for i in range(len(x)): + if spe_pos is None: + crop_pos = random.randint(0, time_steps - crop_size - 1) + else: + crop_pos = spe_pos + tx[i][0] = x[i, 0, crop_pos : crop_pos + crop_size, :] + return tx + + # Reshape the wavform to a img size, if you want to use the pretrained swin transformer model + def reshape_wav2img(self, x): + B, C, T, F = x.shape + target_T = int(self.spec_size * self.freq_ratio) + target_F = self.spec_size // self.freq_ratio + assert ( + T <= target_T and F <= target_F + ), "the wav size should less than or equal to the swin input size" + # to avoid bicubic zero error + if T < target_T: + x = nn.functional.interpolate( + x, (target_T, x.shape[3]), mode="bicubic", align_corners=True + ) + if F < target_F: + x = nn.functional.interpolate( + x, (x.shape[2], target_F), mode="bicubic", align_corners=True + ) + x = x.permute(0, 1, 3, 2).contiguous() + x = x.reshape( + x.shape[0], + x.shape[1], + x.shape[2], + self.freq_ratio, + x.shape[3] // self.freq_ratio, + ) + # print(x.shape) + x = x.permute(0, 1, 3, 2, 4).contiguous() + x = x.reshape(x.shape[0], x.shape[1], x.shape[2] * x.shape[3], x.shape[4]) + return x + + # Repeat the wavform to a img size, if you want to use the pretrained swin transformer model + def repeat_wat2img(self, x, cur_pos): + B, C, T, F = x.shape + target_T = int(self.spec_size * self.freq_ratio) + target_F = self.spec_size // self.freq_ratio + assert ( + T <= target_T and F <= target_F + ), "the wav size should less than or equal to the swin input size" + # to avoid bicubic zero error + if T < target_T: + x = nn.functional.interpolate( + x, (target_T, x.shape[3]), mode="bicubic", align_corners=True + ) + if F < target_F: + x = nn.functional.interpolate( + x, (x.shape[2], target_F), mode="bicubic", align_corners=True + ) + x = x.permute(0, 1, 3, 2).contiguous() # B C F T + x = x[:, :, :, cur_pos : cur_pos + self.spec_size] + x = x.repeat(repeats=(1, 1, 4, 1)) + return x + + def init_spec(self, example): + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + interpolate_ratio = 32 # Downsampled ratio + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=self.config.window_size, + hop_length=self.config.hop_size, + win_length=self.config.window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ).to(example.device).float() + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=self.config.sample_rate, + n_fft=self.config.window_size, + n_mels=self.config.mel_bins, + fmin=self.config.fmin, + fmax=self.config.fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ).to(example.device).float() + + def forward( + self, x: torch.Tensor, mixup_lambda=None, infer_mode=False, device=None + ): # out_feat_keys: List[str] = None): + + if self.enable_fusion and x["longer"].sum() == 0: + # if no audio is longer than 10s, then randomly select one audio to be longer + x["longer"][torch.randint(0, x["longer"].shape[0], (1,))] = True + + if not self.enable_fusion: + x = x["waveform"].to(device=device, non_blocking=True) + if not hasattr(self, 'spectrogram_extractor'): + self.init_spec(x) + temp_dtype = x.dtype + x = self.spectrogram_extractor(x.float()) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x).to(temp_dtype) # (batch_size, 1, time_steps, mel_bins) + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + if self.training: + x = self.spec_augmenter(x) + + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + dtype = x.dtype + x = self.reshape_wav2img(x) + output_dict = self.forward_features(x) + else: + longer_list = x["longer"].to(device=device, non_blocking=True) + x = x["mel_fusion"].to(device=device, non_blocking=True) + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + longer_list_idx = torch.where(longer_list)[0] + if self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"]: + new_x = x[:, 0:1, :, :].clone().contiguous() + if len(longer_list_idx) > 0: + # local processing + fusion_x_local = x[longer_list_idx, 1:, :, :].clone().contiguous() + FB, FC, FT, FF = fusion_x_local.size() + fusion_x_local = fusion_x_local.view(FB * FC, FT, FF) + fusion_x_local = torch.permute( + fusion_x_local, (0, 2, 1) + ).contiguous() + fusion_x_local = self.mel_conv1d(fusion_x_local) + fusion_x_local = fusion_x_local.view( + FB, FC, FF, fusion_x_local.size(-1) + ) + fusion_x_local = ( + torch.permute(fusion_x_local, (0, 2, 1, 3)) + .contiguous() + .flatten(2) + ) + if fusion_x_local.size(-1) < FT: + fusion_x_local = torch.cat( + [ + fusion_x_local, + torch.zeros( + (FB, FF, FT - fusion_x_local.size(-1)), + device=device, + ), + ], + dim=-1, + ) + else: + fusion_x_local = fusion_x_local[:, :, :FT] + # 1D fusion + new_x = new_x.squeeze(1).permute((0, 2, 1)).contiguous() + new_x[longer_list_idx] = self.fusion_model( + new_x[longer_list_idx], fusion_x_local + ) + x = new_x.permute((0, 2, 1)).contiguous()[:, None, :, :] + else: + x = new_x + + elif self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d", "channel_map"]: + x = x # no change + + if self.training: + x = spec_augmenter(x) + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.reshape_wav2img(x) + output_dict = self.forward_features(x, longer_idx=longer_list_idx) + + # We process the data in the dataloader part, in that here we only consider the input_T < fixed_T + + return output_dict + + +def create_htsat_model(audio_cfg, enable_fusion=False, fusion_type="None", embed_shape=512, depth=[2, 2, 6, 2]): + try: + + assert audio_cfg.model_name in [ + "tiny", + "base", + "large", + ], "model name for HTS-AT is wrong!" + if audio_cfg.model_name == "tiny": + model = HTSAT_Swin_Transformer( + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + num_classes=audio_cfg.class_num, + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + config=audio_cfg, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + elif audio_cfg.model_name == "base": + model = HTSAT_Swin_Transformer( + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + num_classes=audio_cfg.class_num, + embed_dim=128, + depths=[2, 2, 12, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + config=audio_cfg, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + elif audio_cfg.model_name == "large": + model = HTSAT_Swin_Transformer( + spec_size=256, + patch_size=4, + patch_stride=(4, 4), + num_classes=audio_cfg.class_num, + embed_dim=256, + depths=[2, 2, 12, 2], + num_heads=[4, 8, 16, 32], + window_size=8, + config=audio_cfg, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + + return model + except: + raise RuntimeError( + f"Import Model for {audio_cfg.model_name} not found, or the audio cfg parameters are not enough." + ) diff --git a/core/models/encoders/clap_modules/open_clip/linear_probe.py b/core/models/encoders/clap_modules/open_clip/linear_probe.py new file mode 100644 index 0000000000000000000000000000000000000000..9d7e23b6b67a53e16d050d675a99d01d7d04d581 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/linear_probe.py @@ -0,0 +1,66 @@ +import numpy as np +import torch.nn.functional as F +from torch import nn +from .model import MLPLayers + + +class LinearProbe(nn.Module): + def __init__(self, model, mlp, freeze, in_ch, out_ch, act=None): + """ + Args: + model: nn.Module + mlp: bool, if True, then use the MLP layer as the linear probe module + freeze: bool, if Ture, then freeze all the CLAP model's layers when training the linear probe + in_ch: int, the output channel from CLAP model + out_ch: int, the output channel from linear probe (class_num) + act: torch.nn.functional, the activation function before the loss function + """ + super().__init__() + in_ch = 512 + self.clap_model = model + self.clap_model.text_branch = None # to save memory + self.freeze = freeze + if mlp: + self.lp_layer = MLPLayers(units=[in_ch, in_ch * 2, out_ch]) + else: + self.lp_layer = nn.Linear(in_ch, out_ch) + + if self.freeze: + for param in self.clap_model.parameters(): + param.requires_grad = False + + if act == "None": + self.act = None + elif act == "relu": + self.act = nn.ReLU() + elif act == "elu": + self.act = nn.ELU() + elif act == "prelu": + self.act = nn.PReLU(num_parameters=in_ch) + elif act == "softmax": + self.act = nn.Softmax(dim=-1) + elif act == "sigmoid": + self.act = nn.Sigmoid() + + def forward(self, x, mix_lambda=None, device=None): + """ + Args: + x: waveform, torch.tensor [batch, t_samples] / batch of mel_spec and longer list + mix_lambda: torch.tensor [batch], the mixup lambda + Returns: + class_prob: torch.tensor [batch, class_num] + + """ + # batchnorm cancel grandient + if self.freeze: + self.clap_model.eval() + + x = self.clap_model.audio_projection( + self.clap_model.audio_branch(x, mixup_lambda=mix_lambda, device=device)[ + "embedding" + ] + ) + out = self.lp_layer(x) + if self.act is not None: + out = self.act(out) + return out diff --git a/core/models/encoders/clap_modules/open_clip/loss.py b/core/models/encoders/clap_modules/open_clip/loss.py new file mode 100644 index 0000000000000000000000000000000000000000..cc66298a14997da4aa2efc71e37c0a6bcda53fd1 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/loss.py @@ -0,0 +1,398 @@ +from multiprocessing.sharedctypes import Value +import torch +import torch.distributed.nn +from torch import distributed as dist, nn as nn +from torch.nn import functional as F +import numpy as np +from sklearn.metrics import average_precision_score, roc_auc_score, accuracy_score + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + + +def gather_features( + audio_features, + text_features, + audio_features_mlp=None, + text_features_mlp=None, + local_loss=False, + gather_with_grad=False, + rank=0, + world_size=1, + use_horovod=False, + mlp_loss=False, +): + if use_horovod: + assert hvd is not None, "Please install horovod" + if gather_with_grad: + all_audio_features = hvd.allgather(audio_features) + all_text_features = hvd.allgather(text_features) + if mlp_loss: + all_audio_features_mlp = hvd.allgather(audio_features_mlp) + all_text_features_mlp = hvd.allgather(text_features_mlp) + else: + with torch.no_grad(): + all_audio_features = hvd.allgather(audio_features) + all_text_features = hvd.allgather(text_features) + if mlp_loss: + all_audio_features_mlp = hvd.allgather(audio_features_mlp) + all_text_features_mlp = hvd.allgather(text_features_mlp) + if not local_loss: + # ensure grads for local rank when all_* features don't have a gradient + gathered_audio_features = list( + all_audio_features.chunk(world_size, dim=0) + ) + gathered_text_features = list( + all_text_features.chunk(world_size, dim=0) + ) + gathered_audio_features[rank] = audio_features + gathered_text_features[rank] = text_features + all_audio_features = torch.cat(gathered_audio_features, dim=0) + all_text_features = torch.cat(gathered_text_features, dim=0) + if mlp_loss: + gathered_audio_features_mlp = list( + all_audio_features_mlp.chunk(world_size, dim=0) + ) + gathered_text_features_mlp = list( + all_text_features_mlp.chunk(world_size, dim=0) + ) + gathered_audio_features_mlp[rank] = audio_features_mlp + gathered_text_features_mlp[rank] = text_features_mlp + all_audio_features_mlp = torch.cat( + gathered_audio_features_mlp, dim=0 + ) + all_text_features_mlp = torch.cat(gathered_text_features_mlp, dim=0) + else: + # We gather tensors from all gpus + if gather_with_grad: + all_audio_features = torch.cat( + torch.distributed.nn.all_gather(audio_features), dim=0 + ) + all_text_features = torch.cat( + torch.distributed.nn.all_gather(text_features), dim=0 + ) + if mlp_loss: + all_audio_features_mlp = torch.cat( + torch.distributed.nn.all_gather(audio_features_mlp), dim=0 + ) + all_text_features_mlp = torch.cat( + torch.distributed.nn.all_gather(text_features_mlp), dim=0 + ) + else: + gathered_audio_features = [ + torch.zeros_like(audio_features) for _ in range(world_size) + ] + gathered_text_features = [ + torch.zeros_like(text_features) for _ in range(world_size) + ] + dist.all_gather(gathered_audio_features, audio_features) + dist.all_gather(gathered_text_features, text_features) + if mlp_loss: + gathered_audio_features_mlp = [ + torch.zeros_like(audio_features_mlp) for _ in range(world_size) + ] + gathered_text_features_mlp = [ + torch.zeros_like(text_features_mlp) for _ in range(world_size) + ] + dist.all_gather(gathered_audio_features_mlp, audio_features_mlp) + dist.all_gather(gathered_text_features_mlp, text_features_mlp) + if not local_loss: + # ensure grads for local rank when all_* features don't have a gradient + gathered_audio_features[rank] = audio_features + gathered_text_features[rank] = text_features + if mlp_loss: + gathered_audio_features_mlp[rank] = audio_features_mlp + gathered_text_features_mlp[rank] = text_features_mlp + + all_audio_features = torch.cat(gathered_audio_features, dim=0) + all_text_features = torch.cat(gathered_text_features, dim=0) + if mlp_loss: + all_audio_features_mlp = torch.cat(gathered_audio_features_mlp, dim=0) + all_text_features_mlp = torch.cat(gathered_text_features_mlp, dim=0) + if mlp_loss: + return ( + all_audio_features, + all_text_features, + all_audio_features_mlp, + all_text_features_mlp, + ) + else: + return all_audio_features, all_text_features + + +class ClipLoss(nn.Module): + def __init__( + self, + local_loss=False, + gather_with_grad=False, + cache_labels=False, + rank=0, + world_size=1, + use_horovod=False, + mlp_loss=False, + weight_loss_kappa=0, + ): + super().__init__() + self.local_loss = local_loss + self.gather_with_grad = gather_with_grad + self.cache_labels = cache_labels + self.rank = rank + self.world_size = world_size + self.use_horovod = use_horovod + self.mlp_loss = mlp_loss + self.weighted_loss = bool(weight_loss_kappa != 0) + self.weight_loss_kappa = weight_loss_kappa + # cache state + self.prev_num_logits = 0 + self.labels = {} + + def forward( + self, + audio_features, + text_features, + logit_scale_a, + logit_scale_t=None, + audio_features_mlp=None, + text_features_mlp=None, + ): + device = audio_features.device + if self.mlp_loss: + if self.world_size > 1: + ( + all_audio_features, + all_text_features, + all_audio_features_mlp, + all_text_features_mlp, + ) = gather_features( + audio_features=audio_features, + text_features=text_features, + audio_features_mlp=audio_features_mlp, + text_features_mlp=text_features_mlp, + local_loss=self.local_loss, + gather_with_grad=self.gather_with_grad, + rank=self.rank, + world_size=self.world_size, + use_horovod=self.use_horovod, + mlp_loss=self.mlp_loss, + ) + if self.local_loss: + a_logits_per_audio = ( + logit_scale_a * audio_features @ all_text_features_mlp.T + ) + a_logits_per_text = ( + logit_scale_a * text_features_mlp @ all_audio_features.T + ) + t_logits_per_audio = ( + logit_scale_t * audio_features_mlp @ all_text_features.T + ) + t_logits_per_text = ( + logit_scale_t * text_features @ all_audio_features_mlp.T + ) + else: + a_logits_per_audio = ( + logit_scale_a * all_audio_features @ all_text_features_mlp.T + ) + a_logits_per_text = a_logits_per_audio.T + t_logits_per_audio = ( + logit_scale_t * all_audio_features_mlp @ all_text_features.T + ) + t_logits_per_text = t_logits_per_audio.T + else: + a_logits_per_audio = ( + logit_scale_a * audio_features @ text_features_mlp.T + ) + a_logits_per_text = logit_scale_a * text_features_mlp @ audio_features.T + t_logits_per_audio = ( + logit_scale_t * audio_features_mlp @ text_features.T + ) + t_logits_per_text = logit_scale_t * text_features @ audio_features_mlp.T + + # calculated ground-truth and cache if enabled + num_logits = a_logits_per_audio.shape[0] + if self.prev_num_logits != num_logits or device not in self.labels: + labels = torch.arange(num_logits, device=device, dtype=torch.long) + if self.world_size > 1 and self.local_loss: + labels = labels + num_logits * self.rank + if self.cache_labels: + self.labels[device] = labels + self.prev_num_logits = num_logits + else: + labels = self.labels[device] + + if not self.weighted_loss: + total_loss = ( + F.cross_entropy(a_logits_per_audio, labels) + + F.cross_entropy(a_logits_per_text, labels) + + F.cross_entropy(t_logits_per_audio, labels) + + F.cross_entropy(t_logits_per_text, labels) + ) / 4 + else: + audio_weight = (audio_features @ audio_features.T).detach() + audio_weight = ( + torch.exp( + torch.sum(audio_weight, axis=1) + / (self.weight_loss_kappa * len(audio_weight)) + ) + ).detach() + text_weight = (text_features @ text_features.T).detach() + text_weight = ( + torch.exp( + torch.sum(text_weight, axis=1) + / (self.weight_loss_kappa * len(text_features)) + ) + ).detach() + total_loss = ( + F.cross_entropy(a_logits_per_audio, labels, weight=audio_weight) + + F.cross_entropy(a_logits_per_text, labels, weight=audio_weight) + + F.cross_entropy(t_logits_per_audio, labels, weight=text_weight) + + F.cross_entropy(t_logits_per_text, labels, weight=text_weight) + ) / 4 + else: + if self.world_size > 1: + all_audio_features, all_text_features = gather_features( + audio_features=audio_features, + text_features=text_features, + local_loss=self.local_loss, + gather_with_grad=self.gather_with_grad, + rank=self.rank, + world_size=self.world_size, + use_horovod=self.use_horovod, + mlp_loss=self.mlp_loss, + ) + + if self.local_loss: + logits_per_audio = ( + logit_scale_a * audio_features @ all_text_features.T + ) + logits_per_text = ( + logit_scale_a * text_features @ all_audio_features.T + ) + else: + logits_per_audio = ( + logit_scale_a * all_audio_features @ all_text_features.T + ) + logits_per_text = logits_per_audio.T + else: + logits_per_audio = logit_scale_a * audio_features @ text_features.T + logits_per_text = logit_scale_a * text_features @ audio_features.T + + # calculated ground-truth and cache if enabled + num_logits = logits_per_audio.shape[0] + if self.prev_num_logits != num_logits or device not in self.labels: + labels = torch.arange(num_logits, device=device, dtype=torch.long) + if self.world_size > 1 and self.local_loss: + labels = labels + num_logits * self.rank + if self.cache_labels: + self.labels[device] = labels + self.prev_num_logits = num_logits + else: + labels = self.labels[device] + if not self.weighted_loss: + total_loss = ( + F.cross_entropy(logits_per_audio, labels) + + F.cross_entropy(logits_per_text, labels) + ) / 2 + else: + audio_weight = (all_audio_features @ all_audio_features.T).detach() + audio_weight = ( + torch.exp( + torch.sum(audio_weight, axis=1) + / (self.weight_loss_kappa * len(all_audio_features)) + ) + ).detach() + text_weight = (all_text_features @ all_text_features.T).detach() + text_weight = ( + torch.exp( + torch.sum(text_weight, axis=1) + / (self.weight_loss_kappa * len(all_text_features)) + ) + ).detach() + total_loss = ( + F.cross_entropy(logits_per_audio, labels, weight=text_weight) + + F.cross_entropy(logits_per_text, labels, weight=audio_weight) + ) / 2 + return total_loss + + +def lp_gather_features(pred, target, world_size=1, use_horovod=False): + if use_horovod: + assert hvd is not None, "Please install horovod" + with torch.no_grad(): + all_preds = hvd.allgather(pred) + all_targets = hvd.allgath(target) + else: + gathered_preds = [torch.zeros_like(pred) for _ in range(world_size)] + gathered_targets = [torch.zeros_like(target) for _ in range(world_size)] + + dist.all_gather(gathered_preds, pred) + dist.all_gather(gathered_targets, target) + all_preds = torch.cat(gathered_preds, dim=0) + all_targets = torch.cat(gathered_targets, dim=0) + + return all_preds, all_targets + + +def get_map(pred, target): + pred = torch.sigmoid(pred).numpy() + target = target.numpy() + return np.mean(average_precision_score(target, pred, average=None)) + + +def get_acc(pred, target): + pred = torch.argmax(pred, 1).numpy() + target = torch.argmax(target, 1).numpy() + return accuracy_score(target, pred) + + +def get_mauc(pred, target): + pred = torch.sigmoid(pred).numpy() + target = target.numpy() + return np.mean(roc_auc_score(target, pred, average=None)) + + +class LPMetrics(object): + def __init__(self, metric_names=["map", "acc", "mauc"]): + self.metrics = [] + for name in metric_names: + self.metrics.append(self.get_metric(name)) + self.metric_names = metric_names + + def get_metric(self, name): + if name == "map": + return get_map + elif name == "acc": + return get_acc + elif name == "mauc": + return get_mauc + else: + raise ValueError(f"the metric should be at least one of [map, acc, mauc]") + + def evaluate_mertics(self, pred, target): + metric_dict = {} + for i in range(len(self.metric_names)): + metric_dict[self.metric_names[i]] = self.metrics[i](pred, target) + return metric_dict + + +def calc_celoss(pred, target): + target = torch.argmax(target, 1).long() + return nn.CrossEntropyLoss()(pred, target) + + +class LPLoss(nn.Module): + def __init__(self, loss_name): + super().__init__() + if loss_name == "bce": + self.loss_func = nn.BCEWithLogitsLoss() + elif loss_name == "ce": + self.loss_func = calc_celoss + elif loss_name == "mse": + self.loss_func = nn.MSELoss() + else: + raise ValueError(f"the loss func should be at least one of [bce, ce, mse]") + + def forward(self, pred, target): + loss = self.loss_func(pred, target) + return loss diff --git a/core/models/encoders/clap_modules/open_clip/model.py b/core/models/encoders/clap_modules/open_clip/model.py new file mode 100644 index 0000000000000000000000000000000000000000..116ef417683c22e0f0ad109169ae80be59b23673 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/model.py @@ -0,0 +1,799 @@ +""" CLAP Model + +Adapted from CLIP: https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. +Adapted to the Audio Task. +""" + +from collections import OrderedDict +from dataclasses import dataclass +from email.mime import audio +from typing import Tuple, Union, Callable, Optional + +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn + +from .timm_model import TimmModel +import logging +from .utils import freeze_batch_norm_2d + +from .pann_model import create_pann_model +from .htsat import create_htsat_model +from transformers import BertModel, RobertaModel, BartModel +from transformers.tokenization_utils_base import BatchEncoding + + +class MLPLayers(nn.Module): + def __init__(self, units=[512, 512, 512], nonlin=nn.ReLU(), dropout=0.1): + super(MLPLayers, self).__init__() + self.nonlin = nonlin + self.dropout = dropout + + sequence = [] + for u0, u1 in zip(units[:-1], units[1:]): + sequence.append(nn.Linear(u0, u1)) + sequence.append(self.nonlin) + sequence.append(nn.Dropout(self.dropout)) + sequence = sequence[:-2] + + self.sequential = nn.Sequential(*sequence) + + def forward(self, X): + X = self.sequential(X) + return X + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1): + super().__init__() + + # all conv layers have stride 1. an avgpool is performed after the second convolution when stride > 1 + self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + + self.conv2 = nn.Conv2d(planes, planes, 3, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + + self.avgpool = nn.AvgPool2d(stride) if stride > 1 else nn.Identity() + + self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + + self.relu = nn.ReLU(inplace=True) + self.downsample = None + self.stride = stride + + if stride > 1 or inplanes != planes * Bottleneck.expansion: + # downsampling layer is prepended with an avgpool, and the subsequent convolution has stride 1 + self.downsample = nn.Sequential( + OrderedDict( + [ + ("-1", nn.AvgPool2d(stride)), + ( + "0", + nn.Conv2d( + inplanes, + planes * self.expansion, + 1, + stride=1, + bias=False, + ), + ), + ("1", nn.BatchNorm2d(planes * self.expansion)), + ] + ) + ) + + def forward(self, x: torch.Tensor): + identity = x + + out = self.relu(self.bn1(self.conv1(x))) + out = self.relu(self.bn2(self.conv2(out))) + out = self.avgpool(out) + out = self.bn3(self.conv3(out)) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + return out + + +class AttentionPool2d(nn.Module): + def __init__( + self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None + ): + super().__init__() + self.positional_embedding = nn.Parameter( + torch.randn(spacial_dim**2 + 1, embed_dim) / embed_dim**0.5 + ) + self.k_proj = nn.Linear(embed_dim, embed_dim) + self.q_proj = nn.Linear(embed_dim, embed_dim) + self.v_proj = nn.Linear(embed_dim, embed_dim) + self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) + self.num_heads = num_heads + + def forward(self, x): + x = x.reshape(x.shape[0], x.shape[1], x.shape[2] * x.shape[3]).permute( + 2, 0, 1 + ) # NCHW -> (HW)NC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (HW+1)NC + x = x + self.positional_embedding[:, None, :].to(x.dtype) # (HW+1)NC + x, _ = F.multi_head_attention_forward( + query=x, + key=x, + value=x, + embed_dim_to_check=x.shape[-1], + num_heads=self.num_heads, + q_proj_weight=self.q_proj.weight, + k_proj_weight=self.k_proj.weight, + v_proj_weight=self.v_proj.weight, + in_proj_weight=None, + in_proj_bias=torch.cat( + [self.q_proj.bias, self.k_proj.bias, self.v_proj.bias] + ), + bias_k=None, + bias_v=None, + add_zero_attn=False, + dropout_p=0, + out_proj_weight=self.c_proj.weight, + out_proj_bias=self.c_proj.bias, + use_separate_proj_weight=True, + training=self.training, + need_weights=False, + ) + + return x[0] + + +class ModifiedResNet(nn.Module): + """ + A ResNet class that is similar to torchvision's but contains the following changes: + - There are now 3 "stem" convolutions as opposed to 1, with an average pool instead of a max pool. + - Performs anti-aliasing strided convolutions, where an avgpool is prepended to convolutions with stride > 1 + - The final pooling layer is a QKV attention instead of an average pool + """ + + def __init__(self, layers, output_dim, heads, image_size=224, width=64): + super().__init__() + self.output_dim = output_dim + self.image_size = image_size + + # the 3-layer stem + self.conv1 = nn.Conv2d( + 3, width // 2, kernel_size=3, stride=2, padding=1, bias=False + ) + self.bn1 = nn.BatchNorm2d(width // 2) + self.conv2 = nn.Conv2d( + width // 2, width // 2, kernel_size=3, padding=1, bias=False + ) + self.bn2 = nn.BatchNorm2d(width // 2) + self.conv3 = nn.Conv2d(width // 2, width, kernel_size=3, padding=1, bias=False) + self.bn3 = nn.BatchNorm2d(width) + self.avgpool = nn.AvgPool2d(2) + self.relu = nn.ReLU(inplace=True) + + # residual layers + self._inplanes = width # this is a *mutable* variable used during construction + self.layer1 = self._make_layer(width, layers[0]) + self.layer2 = self._make_layer(width * 2, layers[1], stride=2) + self.layer3 = self._make_layer(width * 4, layers[2], stride=2) + self.layer4 = self._make_layer(width * 8, layers[3], stride=2) + + embed_dim = width * 32 # the ResNet feature dimension + self.attnpool = AttentionPool2d(image_size // 32, embed_dim, heads, output_dim) + + self.init_parameters() + + def _make_layer(self, planes, blocks, stride=1): + layers = [Bottleneck(self._inplanes, planes, stride)] + + self._inplanes = planes * Bottleneck.expansion + for _ in range(1, blocks): + layers.append(Bottleneck(self._inplanes, planes)) + + return nn.Sequential(*layers) + + def init_parameters(self): + if self.attnpool is not None: + std = self.attnpool.c_proj.in_features**-0.5 + nn.init.normal_(self.attnpool.q_proj.weight, std=std) + nn.init.normal_(self.attnpool.k_proj.weight, std=std) + nn.init.normal_(self.attnpool.v_proj.weight, std=std) + nn.init.normal_(self.attnpool.c_proj.weight, std=std) + + for resnet_block in [self.layer1, self.layer2, self.layer3, self.layer4]: + for name, param in resnet_block.named_parameters(): + if name.endswith("bn3.weight"): + nn.init.zeros_(param) + + def lock(self, unlocked_groups=0, freeze_bn_stats=False): + assert ( + unlocked_groups == 0 + ), "partial locking not currently supported for this model" + for param in self.parameters(): + param.requires_grad = False + if freeze_bn_stats: + freeze_batch_norm_2d(self) + + def stem(self, x): + for conv, bn in [ + (self.conv1, self.bn1), + (self.conv2, self.bn2), + (self.conv3, self.bn3), + ]: + x = self.relu(bn(conv(x))) + x = self.avgpool(x) + return x + + def forward(self, x): + x = self.stem(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.attnpool(x) + + return x + + +class LayerNorm(nn.LayerNorm): + """Subclass torch's LayerNorm to handle fp16.""" + + def forward(self, x: torch.Tensor): + orig_type = x.dtype + x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) + return x.to(orig_type) + + +class QuickGELU(nn.Module): + # NOTE This is slower than nn.GELU or nn.SiLU and uses more GPU memory + def forward(self, x: torch.Tensor): + return x * torch.sigmoid(1.702 * x) + + +class ResidualAttentionBlock(nn.Module): + def __init__(self, d_model: int, n_head: int, act_layer: Callable = nn.GELU): + super().__init__() + + self.attn = nn.MultiheadAttention(d_model, n_head) + self.ln_1 = LayerNorm(d_model) + self.mlp = nn.Sequential( + OrderedDict( + [ + ("c_fc", nn.Linear(d_model, d_model * 4)), + ("gelu", act_layer()), + ("c_proj", nn.Linear(d_model * 4, d_model)), + ] + ) + ) + self.ln_2 = LayerNorm(d_model) + + def attention(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): + return self.attn(x, x, x, need_weights=False, attn_mask=attn_mask)[0] + + def forward(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): + x = x + self.attention(self.ln_1(x), attn_mask=attn_mask) + x = x + self.mlp(self.ln_2(x)) + return x + + +class Transformer(nn.Module): + def __init__( + self, width: int, layers: int, heads: int, act_layer: Callable = nn.GELU + ): + super().__init__() + self.width = width + self.layers = layers + self.resblocks = nn.ModuleList( + [ + ResidualAttentionBlock(width, heads, act_layer=act_layer) + for _ in range(layers) + ] + ) + + def forward(self, x: torch.Tensor, attn_mask: Optional[torch.Tensor] = None): + for r in self.resblocks: + x = r(x, attn_mask=attn_mask) + return x + + +class VisualTransformer(nn.Module): + def __init__( + self, + image_size: int, + patch_size: int, + width: int, + layers: int, + heads: int, + output_dim: int, + act_layer: Callable = nn.GELU, + ): + super().__init__() + self.image_size = image_size + self.output_dim = output_dim + self.conv1 = nn.Conv2d( + in_channels=3, + out_channels=width, + kernel_size=patch_size, + stride=patch_size, + bias=False, + ) + + scale = width**-0.5 + self.class_embedding = nn.Parameter(scale * torch.randn(width)) + self.positional_embedding = nn.Parameter( + scale * torch.randn((image_size // patch_size) ** 2 + 1, width) + ) + self.ln_pre = LayerNorm(width) + + self.text_branch = Transformer(width, layers, heads, act_layer=act_layer) + + self.ln_post = LayerNorm(width) + self.proj = nn.Parameter(scale * torch.randn(width, output_dim)) + + def lock(self, unlocked_groups=0, freeze_bn_stats=False): + assert ( + unlocked_groups == 0 + ), "partial locking not currently supported for this model" + for param in self.parameters(): + param.requires_grad = False + + def forward(self, x: torch.Tensor): + x = self.conv1(x) # shape = [*, width, grid, grid] + x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2] + x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width] + x = torch.cat( + [ + self.class_embedding.to(x.dtype) + + torch.zeros( + x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device + ), + x, + ], + dim=1, + ) # shape = [*, grid ** 2 + 1, width] + x = x + self.positional_embedding.to(x.dtype) + x = self.ln_pre(x) + + x = x.permute(1, 0, 2) # NLD -> LND + x = self.text_branch(x) + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.ln_post(x[:, 0, :]) + + if self.proj is not None: + x = x @ self.proj + + return x + + +@dataclass +class CLAPVisionCfg: + layers: Union[Tuple[int, int, int, int], int] = 12 + width: int = 768 + patch_size: int = 16 + image_size: Union[Tuple[int, int], int] = 224 + timm_model_name: str = ( + None # a valid model name overrides layers, width, patch_size + ) + timm_model_pretrained: bool = ( + False # use (imagenet) pretrained weights for named model + ) + timm_pool: str = ( + "avg" # feature pooling for timm model ('abs_attn', 'rot_attn', 'avg', '') + ) + timm_proj: str = ( + "linear" # linear projection for timm model output ('linear', 'mlp', '') + ) + + +# Audio Config Class +@dataclass +class CLAPAudioCfp: + model_type: str = "PANN" + model_name: str = "Cnn14" + sample_rate: int = 48000 + # Param + audio_length: int = 1024 + window_size: int = 1024 + hop_size: int = 1024 + fmin: int = 50 + fmax: int = 14000 + class_num: int = 527 + mel_bins: int = 64 + clip_samples: int = 480000 + + +@dataclass +class CLAPTextCfg: + context_length: int + vocab_size: int + width: int + heads: int + layers: int + model_type: str + + +class CLAP(nn.Module): + def __init__( + self, + embed_dim: int, + audio_cfg: CLAPAudioCfp, + text_cfg: CLAPTextCfg, + quick_gelu: bool = False, + enable_fusion: bool = False, + fusion_type: str = "None", + joint_embed_shape: int = 512, + embed_shape: int = 768, + depth: list = [2, 2, 6, 2], + mlp_act: str = "relu", + ): + super().__init__() + if isinstance(audio_cfg, dict): + audio_cfg = CLAPAudioCfp(**audio_cfg) + + self.audio_cfg = audio_cfg + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + self.joint_embed_shape = joint_embed_shape + self.depth = depth + self.mlp_act = mlp_act + + # OpenAI models are pretrained w/ QuickGELU but native nn.GELU is both faster and more + # memory efficient in recent PyTorch releases (>= 1.10). + # NOTE: timm models always use native GELU regardless of quick_gelu flag. + act_layer = QuickGELU if quick_gelu else nn.GELU + + if mlp_act == "relu": + mlp_act_layer = nn.ReLU() + elif mlp_act == "gelu": + mlp_act_layer = nn.GELU() + else: + raise NotImplementedError + + # audio branch + if audio_cfg.model_type == "PANN": + self.audio_branch = create_pann_model(audio_cfg, enable_fusion, fusion_type, embed_shape, depth) + elif audio_cfg.model_type == "HTSAT": + self.audio_branch = create_htsat_model( + audio_cfg, enable_fusion, fusion_type, embed_shape, depth + ) + else: + logging.error(f"Model config for {audio_cfg.model_type} not found") + raise RuntimeError(f"Model config for {audio_cfg.model_type} not found.") + + # audio branch parameters + self.audio_transform = MLPLayers( + units=[ + self.joint_embed_shape, + self.joint_embed_shape, + self.joint_embed_shape, + ], + dropout=0.1, + ) + + # ============================================================================================================ + self.audio_projection = nn.Sequential( + nn.Linear(embed_dim, self.joint_embed_shape), + mlp_act_layer, + nn.Linear(self.joint_embed_shape, self.joint_embed_shape), + ) + + def encode_audio(self, audio, device): + return self.audio_branch( + audio, mixup_lambda=None, device=device + ) # mix lambda needs to add + + + def encode_text(self, text, device): + if self.text_branch_type == "transformer": + text = text.to(device=device, non_blocking=True) + x = self.token_embedding(text) # [batch_size, n_ctx, d_model] + + x = x + self.positional_embedding + x = x.permute(1, 0, 2) # NLD -> LND + x = self.text_branch(x, attn_mask=self.attn_mask) + x = x.permute(1, 0, 2) # LND -> NLD + x = self.ln_final(x) + + # x.shape = [batch_size, n_ctx, transformer.width] + # take features from the eot embedding (eot_token is the highest number in each sequence) + x = self.text_projection(x[torch.arange(x.shape[0]), text.argmax(dim=-1)]) + elif self.text_branch_type == "bert": + # text = self.list_of_dict_of_tensor2dict_of_tensor(text, device) + # text = BatchEncoding(text) + x = self.text_branch( + input_ids=text["input_ids"].to(device=device, non_blocking=True), + attention_mask=text["attention_mask"].to( + device=device, non_blocking=True + ), + token_type_ids=text["token_type_ids"].to( + device=device, non_blocking=True + ), + )["pooler_output"] + x = self.text_projection(x) + elif self.text_branch_type == "roberta": + x = self.text_branch( + input_ids=text["input_ids"].to(device=device, non_blocking=True), + attention_mask=text["attention_mask"].to( + device=device, non_blocking=True + ), + )["pooler_output"] + x = self.text_projection(x) + elif self.text_branch_type == "bart": + x = torch.mean( + self.text_branch( + input_ids=text["input_ids"].to(device=device, non_blocking=True), + attention_mask=text["attention_mask"].to( + device=device, non_blocking=True + ), + )["encoder_last_hidden_state"], + axis=1, + ) + x = self.text_projection(x) + else: + logging.error(f"Model type {self.text_branch_type} not found") + raise RuntimeError(f"Model type {self.text_branch_type} not found.") + return x + + def forward(self, audio, text, device=None): + """Forward audio and text into the CLAP + + Parameters + ---------- + audio: torch.Tensor (batch_size, audio_length) + the time-domain audio input / the batch of mel_spec and longer list. + text: torch.Tensor () // need to add + the text token input + """ + if device is None: + if audio is not None: + device = audio.device + elif text is not None: + device = text.device + if audio is None and text is None: + # a hack to get the logit scale + return self.logit_scale_a.exp(), self.logit_scale_t.exp() + elif audio is None: + return self.encode_text(text, device=device) + elif text is None: + return self.audio_projection( + self.encode_audio(audio, device=device)["embedding"] + ) + audio_features = self.audio_projection( + self.encode_audio(audio, device=device)["embedding"] + ) + audio_features = F.normalize(audio_features, dim=-1) + + text_features = self.encode_text(text, device=device) + # print("text_features", text_features) + # print("text_features.shape", text_features.shape) + # print("text_features.type", type(text_features)) + text_features = F.normalize(text_features, dim=-1) + + audio_features_mlp = self.audio_transform(audio_features) + text_features_mlp = self.text_transform(text_features) + # Four outputs: audio features (basic & MLP), text features (basic & MLP) + return ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + self.logit_scale_a.exp(), + self.logit_scale_t.exp(), + ) + + def get_logit_scale(self): + return self.logit_scale_a.exp(), self.logit_scale_t.exp() + + def get_text_embedding(self, data): + """Get the text embedding from the model + + Parameters + ---------- + data: torch.Tensor + a tensor of text embedding + + Returns + ---------- + text_embed: torch.Tensor + a tensor of text_embeds (N, D) + + """ + device = next(self.parameters()).device + for k in data: + data[k] = data[k].to(device) + if len(data[k].size()) < 2: + data[k] = data[k].unsqueeze(0) + text_embeds = self.encode_text(data, device=device) + text_embeds = F.normalize(text_embeds, dim=-1) + + return text_embeds + + def get_audio_embedding(self, data): + """Get the audio embedding from the model + + Parameters + ---------- + data: a list of dict + the audio input dict list from 'get_audio_feature' method + + Returns + ---------- + audio_embed: torch.Tensor + a tensor of audio_embeds (N, D) + + """ + device = next(self.parameters()).device + input_dict = {} + keys = data[0].keys() + for k in keys: + input_dict[k] = torch.cat([d[k].unsqueeze(0) for d in data], dim=0).to( + device + ) + + audio_embeds = self.audio_projection( + self.encode_audio(input_dict, device=device)["embedding"] + ) + audio_embeds = F.normalize(audio_embeds, dim=-1) + + return audio_embeds + + def audio_infer(self, audio, hopsize=None, device=None): + """Forward one audio and produce the audio embedding + + Parameters + ---------- + audio: (audio_length) + the time-domain audio input, notice that it must be only one input + hopsize: int + the overlap hopsize as the sliding window + + Returns + ---------- + output_dict: { + key: [n, (embedding_shape)] if "HTS-AT" + or + key: [(embedding_shape)] if "PANN" + } + the list of key values of the audio branch + + """ + + assert not self.training, "the inference mode must be run at eval stage" + output_dict = {} + # PANN + if self.audio_cfg.model_type == "PANN": + audio_input = audio.unsqueeze(dim=0) + output_dict[key] = self.encode_audio(audio_input, device=device)[ + key + ].squeeze(dim=0) + elif self.audio_cfg.model_type == "HTSAT": + # repeat + audio_len = len(audio) + k = self.audio_cfg.clip_samples // audio_len + if k > 1: + audio = audio.repeat(k) + audio_len = len(audio) + + if hopsize is None: + hopsize = min(hopsize, audio_len) + + if audio_len > self.audio_cfg.clip_samples: + audio_input = [ + audio[pos : pos + self.audio_cfg.clip_samples].clone() + for pos in range( + 0, audio_len - self.audio_cfg.clip_samples, hopsize + ) + ] + audio_input.append(audio[-self.audio_cfg.clip_samples :].clone()) + audio_input = torch.stack(audio_input) + output_dict[key] = self.encode_audio(audio_input, device=device)[key] + else: + audio_input = audio.unsqueeze(dim=0) + output_dict[key] = self.encode_audio(audio_input, device=device)[ + key + ].squeeze(dim=0) + + return output_dict + + +def convert_weights_to_fp16(model: nn.Module): + """Convert applicable model parameters to fp16""" + + def _convert_weights_to_fp16(l): + if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): + l.weight.data = l.weight.data.half() + if l.bias is not None: + l.bias.data = l.bias.data.half() + + if isinstance(l, nn.MultiheadAttention): + for attr in [ + *[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], + "in_proj_bias", + "bias_k", + "bias_v", + ]: + tensor = getattr(l, attr) + if tensor is not None: + tensor.data = tensor.data.half() + + for name in ["text_projection", "proj"]: + if hasattr(l, name): + attr = getattr(l, name) + if attr is not None: + attr.data = attr.data.half() + + model.apply(_convert_weights_to_fp16) + + +# Ignore the state dict of the vision part +def build_model_from_openai_state_dict( + state_dict: dict, model_cfg, enable_fusion: bool = False, fusion_type: str = "None" +): + + embed_dim = model_cfg["embed_dim"] + audio_cfg = model_cfg["audio_cfg"] + text_cfg = model_cfg["text_cfg"] + context_length = state_dict["positional_embedding"].shape[0] + vocab_size = state_dict["token_embedding.weight"].shape[0] + transformer_width = state_dict["ln_final.weight"].shape[0] + transformer_heads = transformer_width // 64 + transformer_layers = len( + set( + k.split(".")[2] + for k in state_dict + if k.startswith(f"transformer.resblocks") + ) + ) + + audio_cfg = CLAPAudioCfp(**audio_cfg) + text_cfg = CLAPTextCfg(**text_cfg) + + model = CLAP( + embed_dim, + audio_cfg=audio_cfg, + text_cfg=text_cfg, + quick_gelu=True, # OpenAI models were trained with QuickGELU + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + state_dict["logit_scale_a"] = state_dict["logit_scale"] + state_dict["logit_scale_t"] = state_dict["logit_scale"] + pop_keys = list(state_dict.keys())[::] + # pop the visual branch saved weights + for key in pop_keys: + if key.startswith("visual."): + state_dict.pop(key, None) + + for key in ["logit_scale", "input_resolution", "context_length", "vocab_size"]: + state_dict.pop(key, None) + + # not use fp16 + # convert_weights_to_fp16(model) + model.load_state_dict(state_dict, strict=False) + return model.eval() + + +def trace_model(model, batch_size=256, device=torch.device("cpu")): + model.eval() + audio_length = model.audio_cfg.audio_length + example_audio = torch.ones((batch_size, audio_length), device=device) + example_text = torch.zeros( + (batch_size, model.context_length), dtype=torch.int, device=device + ) + model = torch.jit.trace_module( + model, + inputs=dict( + forward=(example_audio, example_text), + encode_text=(example_text,), + encode_image=(example_audio,), + ), + ) + model.audio_cfg.audio_length = audio_length # Question: what does this do? + return model diff --git a/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-base.json b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-base.json new file mode 100644 index 0000000000000000000000000000000000000000..6cef625a89daf4431f1c9f72e10bc9640eef2ba8 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-base.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 1024, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "base" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-large.json b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-large.json new file mode 100644 index 0000000000000000000000000000000000000000..699cdb1b16855582606551e4196b24aba2ffd871 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-large.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 2048, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "large" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-tiny-win-1536.json b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-tiny-win-1536.json new file mode 100644 index 0000000000000000000000000000000000000000..73e42990fe8361a0df502e7f93d29f19f58c9ecb --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-tiny-win-1536.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 768, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1536, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "tiny" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-tiny.json b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-tiny.json new file mode 100644 index 0000000000000000000000000000000000000000..a6e7821163d9afa81c27345a1e472475b92af169 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/model_configs/HTSAT-tiny.json @@ -0,0 +1,23 @@ +{ + "embed_dim": 768, + "audio_cfg": { + "audio_length": 1024, + "clip_samples": 480000, + "mel_bins": 64, + "sample_rate": 48000, + "window_size": 1024, + "hop_size": 480, + "fmin": 50, + "fmax": 14000, + "class_num": 527, + "model_type": "HTSAT", + "model_name": "tiny" + }, + "text_cfg": { + "context_length": 77, + "vocab_size": 49408, + "width": 512, + "heads": 8, + "layers": 12 + } +} \ No newline at end of file diff --git a/core/models/encoders/clap_modules/open_clip/openai.py b/core/models/encoders/clap_modules/open_clip/openai.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4eb8b55fe960e1792b3da804b60b3d8f70fe26 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/openai.py @@ -0,0 +1,156 @@ +""" OpenAI pretrained model functions + +Adapted from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. +""" + +import os +import warnings +from typing import Union, List + +import torch + +from .model import build_model_from_openai_state_dict +from .pretrained import ( + get_pretrained_url, + list_pretrained_tag_models, + download_pretrained, +) + +__all__ = ["list_openai_models", "load_openai_model"] + + +def list_openai_models() -> List[str]: + """Returns the names of available CLIP models""" + return list_pretrained_tag_models("openai") + + +def load_openai_model( + name: str, + model_cfg, + device: Union[str, torch.device] = "cuda" if torch.cuda.is_available() else "cpu", + jit=True, + cache_dir=os.path.expanduser("~/.cache/clip"), + enable_fusion: bool = False, + fusion_type: str = "None", +): + """Load a CLIP model, preserve its text pretrained part, and set in the CLAP model + + Parameters + ---------- + name : str + A model name listed by `clip.available_models()`, or the path to a model checkpoint containing the state_dict + device : Union[str, torch.device] + The device to put the loaded model + jit : bool + Whether to load the optimized JIT model (default) or more hackable non-JIT model. + + Returns + ------- + model : torch.nn.Module + The CLAP model + preprocess : Callable[[PIL.Image], torch.Tensor] + A torchvision transform that converts a PIL image into a tensor that the returned model can take as its input + """ + if get_pretrained_url(name, "openai"): + model_path = download_pretrained( + get_pretrained_url(name, "openai"), root=cache_dir + ) + elif os.path.isfile(name): + model_path = name + else: + raise RuntimeError( + f"Model {name} not found; available models = {list_openai_models()}" + ) + + try: + # loading JIT archive + model = torch.jit.load(model_path, map_location=device if jit else "cpu").eval() + state_dict = None + except RuntimeError: + # loading saved state dict + if jit: + warnings.warn( + f"File {model_path} is not a JIT archive. Loading as a state dict instead" + ) + jit = False + state_dict = torch.load(model_path, map_location="cpu") + + if not jit: + try: + model = build_model_from_openai_state_dict( + state_dict or model.state_dict(), model_cfg, enable_fusion, fusion_type + ).to(device) + except KeyError: + sd = {k[7:]: v for k, v in state_dict["state_dict"].items()} + model = build_model_from_openai_state_dict( + sd, model_cfg, enable_fusion, fusion_type + ).to(device) + + if str(device) == "cpu": + model.float() + return model + + # patch the device names + device_holder = torch.jit.trace( + lambda: torch.ones([]).to(torch.device(device)), example_inputs=[] + ) + device_node = [ + n + for n in device_holder.graph.findAllNodes("prim::Constant") + if "Device" in repr(n) + ][-1] + + def patch_device(module): + try: + graphs = [module.graph] if hasattr(module, "graph") else [] + except RuntimeError: + graphs = [] + + if hasattr(module, "forward1"): + graphs.append(module.forward1.graph) + + for graph in graphs: + for node in graph.findAllNodes("prim::Constant"): + if "value" in node.attributeNames() and str(node["value"]).startswith( + "cuda" + ): + node.copyAttributes(device_node) + + model.apply(patch_device) + patch_device(model.encode_audio) + patch_device(model.encode_text) + + # patch dtype to float32 on CPU + if str(device) == "cpu": + float_holder = torch.jit.trace( + lambda: torch.ones([]).float(), example_inputs=[] + ) + float_input = list(float_holder.graph.findNode("aten::to").inputs())[1] + float_node = float_input.node() + + def patch_float(module): + try: + graphs = [module.graph] if hasattr(module, "graph") else [] + except RuntimeError: + graphs = [] + + if hasattr(module, "forward1"): + graphs.append(module.forward1.graph) + + for graph in graphs: + for node in graph.findAllNodes("aten::to"): + inputs = list(node.inputs()) + for i in [ + 1, + 2, + ]: # dtype can be the second or third argument to aten::to() + if inputs[i].node()["value"] == 5: + inputs[i].node().copyAttributes(float_node) + + model.apply(patch_float) + patch_float(model.encode_audio) + patch_float(model.encode_text) + model.float() + + model.audio_branch.audio_length = model.audio_cfg.audio_length + return model diff --git a/core/models/encoders/clap_modules/open_clip/pann_model.py b/core/models/encoders/clap_modules/open_clip/pann_model.py new file mode 100644 index 0000000000000000000000000000000000000000..0d9a8eb0bf897ad6ec04923361b01e5de433b2ef --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/pann_model.py @@ -0,0 +1,704 @@ +# PANNs: Large-Scale Pretrained Audio Neural Networks for Audio Pattern Recognition +# Reference from https://github.com/qiuqiangkong/audioset_tagging_cnn +# Some layers are re-designed for CLAP +import os + +os.environ["NUMBA_CACHE_DIR"] = "/tmp/" + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchlibrosa.stft import Spectrogram, LogmelFilterBank +from torchlibrosa.augmentation import SpecAugmentation + +from .utils import do_mixup, interpolate, pad_framewise_output +from .feature_fusion import iAFF, AFF, DAF + + +def init_layer(layer): + """Initialize a Linear or Convolutional layer.""" + nn.init.xavier_uniform_(layer.weight) + + if hasattr(layer, "bias"): + if layer.bias is not None: + layer.bias.data.fill_(0.0) + + +def init_bn(bn): + """Initialize a Batchnorm layer.""" + bn.bias.data.fill_(0.0) + bn.weight.data.fill_(1.0) + + +class ConvBlock(nn.Module): + def __init__(self, in_channels, out_channels): + + super(ConvBlock, self).__init__() + + self.conv1 = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=(3, 3), + stride=(1, 1), + padding=(1, 1), + bias=False, + ) + + self.conv2 = nn.Conv2d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=(3, 3), + stride=(1, 1), + padding=(1, 1), + bias=False, + ) + + self.bn1 = nn.BatchNorm2d(out_channels) + self.bn2 = nn.BatchNorm2d(out_channels) + + self.init_weight() + + def init_weight(self): + init_layer(self.conv1) + init_layer(self.conv2) + init_bn(self.bn1) + init_bn(self.bn2) + + def forward(self, input, pool_size=(2, 2), pool_type="avg"): + + x = input + x = F.relu_(self.bn1(self.conv1(x))) + x = F.relu_(self.bn2(self.conv2(x))) + if pool_type == "max": + x = F.max_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg": + x = F.avg_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg+max": + x1 = F.avg_pool2d(x, kernel_size=pool_size) + x2 = F.max_pool2d(x, kernel_size=pool_size) + x = x1 + x2 + else: + raise Exception("Incorrect argument!") + + return x + + +class ConvBlock5x5(nn.Module): + def __init__(self, in_channels, out_channels): + + super(ConvBlock5x5, self).__init__() + + self.conv1 = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=(5, 5), + stride=(1, 1), + padding=(2, 2), + bias=False, + ) + + self.bn1 = nn.BatchNorm2d(out_channels) + + self.init_weight() + + def init_weight(self): + init_layer(self.conv1) + init_bn(self.bn1) + + def forward(self, input, pool_size=(2, 2), pool_type="avg"): + + x = input + x = F.relu_(self.bn1(self.conv1(x))) + if pool_type == "max": + x = F.max_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg": + x = F.avg_pool2d(x, kernel_size=pool_size) + elif pool_type == "avg+max": + x1 = F.avg_pool2d(x, kernel_size=pool_size) + x2 = F.max_pool2d(x, kernel_size=pool_size) + x = x1 + x2 + else: + raise Exception("Incorrect argument!") + + return x + + +class AttBlock(nn.Module): + def __init__(self, n_in, n_out, activation="linear", temperature=1.0): + super(AttBlock, self).__init__() + + self.activation = activation + self.temperature = temperature + self.att = nn.Conv1d( + in_channels=n_in, + out_channels=n_out, + kernel_size=1, + stride=1, + padding=0, + bias=True, + ) + self.cla = nn.Conv1d( + in_channels=n_in, + out_channels=n_out, + kernel_size=1, + stride=1, + padding=0, + bias=True, + ) + + self.bn_att = nn.BatchNorm1d(n_out) + self.init_weights() + + def init_weights(self): + init_layer(self.att) + init_layer(self.cla) + init_bn(self.bn_att) + + def forward(self, x): + # x: (n_samples, n_in, n_time) + norm_att = torch.softmax(torch.clamp(self.att(x), -10, 10), dim=-1) + cla = self.nonlinear_transform(self.cla(x)) + x = torch.sum(norm_att * cla, dim=2) + return x, norm_att, cla + + def nonlinear_transform(self, x): + if self.activation == "linear": + return x + elif self.activation == "sigmoid": + return torch.sigmoid(x) + + +class Cnn14(nn.Module): + def __init__( + self, + sample_rate, + window_size, + hop_size, + mel_bins, + fmin, + fmax, + classes_num, + enable_fusion=False, + fusion_type="None", + ): + + super(Cnn14, self).__init__() + + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=window_size, + hop_length=hop_size, + win_length=window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=sample_rate, + n_fft=window_size, + n_mels=mel_bins, + fmin=fmin, + fmax=fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) + + self.bn0 = nn.BatchNorm2d(64) + + if (self.enable_fusion) and (self.fusion_type == "channel_map"): + self.conv_block1 = ConvBlock(in_channels=4, out_channels=64) + else: + self.conv_block1 = ConvBlock(in_channels=1, out_channels=64) + self.conv_block2 = ConvBlock(in_channels=64, out_channels=128) + self.conv_block3 = ConvBlock(in_channels=128, out_channels=256) + self.conv_block4 = ConvBlock(in_channels=256, out_channels=512) + self.conv_block5 = ConvBlock(in_channels=512, out_channels=1024) + self.conv_block6 = ConvBlock(in_channels=1024, out_channels=2048) + + self.fc1 = nn.Linear(2048, 2048, bias=True) + self.fc_audioset = nn.Linear(2048, classes_num, bias=True) + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"] + ): + self.mel_conv1d = nn.Sequential( + nn.Conv1d(64, 64, kernel_size=5, stride=3, padding=2), + nn.BatchNorm1d(64), # No Relu + ) + if self.fusion_type == "daf_1d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_1d": + self.fusion_model = AFF(channels=64, type="1D") + elif self.fusion_type == "iaff_1d": + self.fusion_model = iAFF(channels=64, type="1D") + + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + self.mel_conv2d = nn.Sequential( + nn.Conv2d(1, 64, kernel_size=(5, 5), stride=(6, 2), padding=(2, 2)), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + ) + + if self.fusion_type == "daf_2d": + self.fusion_model = DAF() + elif self.fusion_type == "aff_2d": + self.fusion_model = AFF(channels=64, type="2D") + elif self.fusion_type == "iaff_2d": + self.fusion_model = iAFF(channels=64, type="2D") + self.init_weight() + + def init_weight(self): + init_bn(self.bn0) + init_layer(self.fc1) + init_layer(self.fc_audioset) + + def forward(self, input, mixup_lambda=None, device=None): + """ + Input: (batch_size, data_length)""" + + if self.enable_fusion and input["longer"].sum() == 0: + # if no audio is longer than 10s, then randomly select one audio to be longer + input["longer"][torch.randint(0, input["longer"].shape[0], (1,))] = True + + if not self.enable_fusion: + x = self.spectrogram_extractor( + input["waveform"].to(device=device, non_blocking=True) + ) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + else: + longer_list = input["longer"].to(device=device, non_blocking=True) + x = input["mel_fusion"].to(device=device, non_blocking=True) + longer_list_idx = torch.where(longer_list)[0] + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + if self.fusion_type in ["daf_1d", "aff_1d", "iaff_1d"]: + new_x = x[:, 0:1, :, :].clone().contiguous() + # local processing + if len(longer_list_idx) > 0: + fusion_x_local = x[longer_list_idx, 1:, :, :].clone().contiguous() + FB, FC, FT, FF = fusion_x_local.size() + fusion_x_local = fusion_x_local.view(FB * FC, FT, FF) + fusion_x_local = torch.permute( + fusion_x_local, (0, 2, 1) + ).contiguous() + fusion_x_local = self.mel_conv1d(fusion_x_local) + fusion_x_local = fusion_x_local.view( + FB, FC, FF, fusion_x_local.size(-1) + ) + fusion_x_local = ( + torch.permute(fusion_x_local, (0, 2, 1, 3)) + .contiguous() + .flatten(2) + ) + if fusion_x_local.size(-1) < FT: + fusion_x_local = torch.cat( + [ + fusion_x_local, + torch.zeros( + (FB, FF, FT - fusion_x_local.size(-1)), + device=device, + ), + ], + dim=-1, + ) + else: + fusion_x_local = fusion_x_local[:, :, :FT] + # 1D fusion + new_x = new_x.squeeze(1).permute((0, 2, 1)).contiguous() + new_x[longer_list_idx] = self.fusion_model( + new_x[longer_list_idx], fusion_x_local + ) + x = new_x.permute((0, 2, 1)).contiguous()[:, None, :, :] + else: + x = new_x + elif self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d", "channel_map"]: + x = x # no change + + if self.training: + x = self.spec_augmenter(x) + # Mixup on spectrogram + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + if (self.enable_fusion) and ( + self.fusion_type in ["daf_2d", "aff_2d", "iaff_2d"] + ): + global_x = x[:, 0:1, :, :] + + # global processing + B, C, H, W = global_x.shape + global_x = self.conv_block1(global_x, pool_size=(2, 2), pool_type="avg") + if len(longer_list_idx) > 0: + local_x = x[longer_list_idx, 1:, :, :].contiguous() + TH = global_x.size(-2) + # local processing + B, C, H, W = local_x.shape + local_x = local_x.view(B * C, 1, H, W) + local_x = self.mel_conv2d(local_x) + local_x = local_x.view( + B, C, local_x.size(1), local_x.size(2), local_x.size(3) + ) + local_x = local_x.permute((0, 2, 1, 3, 4)).contiguous().flatten(2, 3) + TB, TC, _, TW = local_x.size() + if local_x.size(-2) < TH: + local_x = torch.cat( + [ + local_x, + torch.zeros( + (TB, TC, TH - local_x.size(-2), TW), + device=global_x.device, + ), + ], + dim=-2, + ) + else: + local_x = local_x[:, :, :TH, :] + + global_x[longer_list_idx] = self.fusion_model( + global_x[longer_list_idx], local_x + ) + x = global_x + else: + x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg") + + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block2(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block3(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block4(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block5(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block6(x, pool_size=(1, 1), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = torch.mean(x, dim=3) + + latent_x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x = latent_x1 + latent_x2 + latent_x = latent_x.transpose(1, 2) + latent_x = F.relu_(self.fc1(latent_x)) + latent_output = interpolate(latent_x, 32) + + (x1, _) = torch.max(x, dim=2) + x2 = torch.mean(x, dim=2) + x = x1 + x2 + x = F.dropout(x, p=0.5, training=self.training) + x = F.relu_(self.fc1(x)) + embedding = F.dropout(x, p=0.5, training=self.training) + clipwise_output = torch.sigmoid(self.fc_audioset(x)) + + output_dict = { + "clipwise_output": clipwise_output, + "embedding": embedding, + "fine_grained_embedding": latent_output, + } + return output_dict + + +class Cnn6(nn.Module): + def __init__( + self, + sample_rate, + window_size, + hop_size, + mel_bins, + fmin, + fmax, + classes_num, + enable_fusion=False, + fusion_type="None", + ): + + super(Cnn6, self).__init__() + + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=window_size, + hop_length=hop_size, + win_length=window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=sample_rate, + n_fft=window_size, + n_mels=mel_bins, + fmin=fmin, + fmax=fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) + + self.bn0 = nn.BatchNorm2d(64) + + self.conv_block1 = ConvBlock5x5(in_channels=1, out_channels=64) + self.conv_block2 = ConvBlock5x5(in_channels=64, out_channels=128) + self.conv_block3 = ConvBlock5x5(in_channels=128, out_channels=256) + self.conv_block4 = ConvBlock5x5(in_channels=256, out_channels=512) + + self.fc1 = nn.Linear(512, 512, bias=True) + self.fc_audioset = nn.Linear(512, classes_num, bias=True) + + self.init_weight() + + def init_weight(self): + init_bn(self.bn0) + init_layer(self.fc1) + init_layer(self.fc_audioset) + + def forward(self, input, mixup_lambda=None, device=None): + """ + Input: (batch_size, data_length)""" + + x = self.spectrogram_extractor(input) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + + if self.training: + x = self.spec_augmenter(x) + + # Mixup on spectrogram + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block2(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block3(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block4(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = torch.mean(x, dim=3) + + latent_x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x = latent_x1 + latent_x2 + latent_x = latent_x.transpose(1, 2) + latent_x = F.relu_(self.fc1(latent_x)) + latent_output = interpolate(latent_x, 16) + + (x1, _) = torch.max(x, dim=2) + x2 = torch.mean(x, dim=2) + x = x1 + x2 + x = F.dropout(x, p=0.5, training=self.training) + x = F.relu_(self.fc1(x)) + embedding = F.dropout(x, p=0.5, training=self.training) + clipwise_output = torch.sigmoid(self.fc_audioset(x)) + + output_dict = { + "clipwise_output": clipwise_output, + "embedding": embedding, + "fine_grained_embedding": latent_output, + } + + return output_dict + + +class Cnn10(nn.Module): + def __init__( + self, + sample_rate, + window_size, + hop_size, + mel_bins, + fmin, + fmax, + classes_num, + enable_fusion=False, + fusion_type="None", + ): + + super(Cnn10, self).__init__() + + window = "hann" + center = True + pad_mode = "reflect" + ref = 1.0 + amin = 1e-10 + top_db = None + + self.enable_fusion = enable_fusion + self.fusion_type = fusion_type + + # Spectrogram extractor + self.spectrogram_extractor = Spectrogram( + n_fft=window_size, + hop_length=hop_size, + win_length=window_size, + window=window, + center=center, + pad_mode=pad_mode, + freeze_parameters=True, + ) + + # Logmel feature extractor + self.logmel_extractor = LogmelFilterBank( + sr=sample_rate, + n_fft=window_size, + n_mels=mel_bins, + fmin=fmin, + fmax=fmax, + ref=ref, + amin=amin, + top_db=top_db, + freeze_parameters=True, + ) + + # Spec augmenter + self.spec_augmenter = SpecAugmentation( + time_drop_width=64, + time_stripes_num=2, + freq_drop_width=8, + freq_stripes_num=2, + ) + + self.bn0 = nn.BatchNorm2d(64) + + self.conv_block1 = ConvBlock(in_channels=1, out_channels=64) + self.conv_block2 = ConvBlock(in_channels=64, out_channels=128) + self.conv_block3 = ConvBlock(in_channels=128, out_channels=256) + self.conv_block4 = ConvBlock(in_channels=256, out_channels=512) + self.conv_block5 = ConvBlock(in_channels=512, out_channels=1024) + + self.fc1 = nn.Linear(1024, 1024, bias=True) + self.fc_audioset = nn.Linear(1024, classes_num, bias=True) + + self.init_weight() + + def init_weight(self): + init_bn(self.bn0) + init_layer(self.fc1) + init_layer(self.fc_audioset) + + def forward(self, input, mixup_lambda=None, device=None): + """ + Input: (batch_size, data_length)""" + + x = self.spectrogram_extractor(input) # (batch_size, 1, time_steps, freq_bins) + x = self.logmel_extractor(x) # (batch_size, 1, time_steps, mel_bins) + + x = x.transpose(1, 3) + x = self.bn0(x) + x = x.transpose(1, 3) + + if self.training: + x = self.spec_augmenter(x) + + # Mixup on spectrogram + if self.training and mixup_lambda is not None: + x = do_mixup(x, mixup_lambda) + + x = self.conv_block1(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block2(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block3(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block4(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = self.conv_block5(x, pool_size=(2, 2), pool_type="avg") + x = F.dropout(x, p=0.2, training=self.training) + x = torch.mean(x, dim=3) + + latent_x1 = F.max_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x2 = F.avg_pool1d(x, kernel_size=3, stride=1, padding=1) + latent_x = latent_x1 + latent_x2 + latent_x = latent_x.transpose(1, 2) + latent_x = F.relu_(self.fc1(latent_x)) + latent_output = interpolate(latent_x, 32) + + (x1, _) = torch.max(x, dim=2) + x2 = torch.mean(x, dim=2) + x = x1 + x2 + x = F.dropout(x, p=0.5, training=self.training) + x = F.relu_(self.fc1(x)) + embedding = F.dropout(x, p=0.5, training=self.training) + clipwise_output = torch.sigmoid(self.fc_audioset(x)) + + output_dict = { + "clipwise_output": clipwise_output, + "embedding": embedding, + "fine_grained_embedding": latent_output, + } + + return output_dict + + +def create_pann_model(audio_cfg, enable_fusion=False, fusion_type="None"): + try: + ModelProto = eval(audio_cfg.model_name) + model = ModelProto( + sample_rate=audio_cfg.sample_rate, + window_size=audio_cfg.window_size, + hop_size=audio_cfg.hop_size, + mel_bins=audio_cfg.mel_bins, + fmin=audio_cfg.fmin, + fmax=audio_cfg.fmax, + classes_num=audio_cfg.class_num, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + return model + except: + raise RuntimeError( + f"Import Model for {audio_cfg.model_name} not found, or the audio cfg parameters are not enough." + ) diff --git a/core/models/encoders/clap_modules/open_clip/pretrained.py b/core/models/encoders/clap_modules/open_clip/pretrained.py new file mode 100644 index 0000000000000000000000000000000000000000..e211d8b5b59320a599e62605f1dee6199f317253 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/pretrained.py @@ -0,0 +1,167 @@ +import hashlib +import os +import urllib +import warnings + +from tqdm import tqdm + +_RN50 = dict( + openai="https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt", + cc12m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt", +) + +_RN50_quickgelu = dict( + openai="https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-yfcc15m-455df137.pt", + cc12m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn50-quickgelu-cc12m-f000538c.pt", +) + +_RN101 = dict( + openai="https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt", +) + +_RN101_quickgelu = dict( + openai="https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt", + yfcc15m="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/rn101-quickgelu-yfcc15m-3e04b30e.pt", +) + +_RN50x4 = dict( + openai="https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt", +) + +_RN50x16 = dict( + openai="https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt", +) + +_RN50x64 = dict( + openai="https://openaipublic.azureedge.net/clip/models/be1cfb55d75a9666199fb2206c106743da0f6468c9d327f3e0d0a543a9919d9c/RN50x64.pt", +) + +_VITB32 = dict( + openai="https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt", + laion400m_e31="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt", + laion400m_e32="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt", + laion400m_avg="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_avg-8a00ab3c.pt", +) + +_VITB32_quickgelu = dict( + openai="https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt", + laion400m_e31="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e31-d867053b.pt", + laion400m_e32="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_e32-46683a32.pt", + laion400m_avg="https://github.com/mlfoundations/open_clip/releases/download/v0.2-weights/vit_b_32-quickgelu-laion400m_avg-8a00ab3c.pt", +) + +_VITB16 = dict( + openai="https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt", +) + +_VITL14 = dict( + openai="https://openaipublic.azureedge.net/clip/models/b8cca3fd41ae0c99ba7e8951adf17d267cdb84cd88be6f7c2e0eca1737a03836/ViT-L-14.pt", +) + +_PRETRAINED = { + "RN50": _RN50, + "RN50-quickgelu": _RN50_quickgelu, + "RN101": _RN101, + "RN101-quickgelu": _RN101_quickgelu, + "RN50x4": _RN50x4, + "RN50x16": _RN50x16, + "ViT-B-32": _VITB32, + "ViT-B-32-quickgelu": _VITB32_quickgelu, + "ViT-B-16": _VITB16, + "ViT-L-14": _VITL14, +} + + +def list_pretrained(as_str: bool = False): + """returns list of pretrained models + Returns a tuple (model_name, pretrain_tag) by default or 'name:tag' if as_str == True + """ + return [ + ":".join([k, t]) if as_str else (k, t) + for k in _PRETRAINED.keys() + for t in _PRETRAINED[k].keys() + ] + + +def list_pretrained_tag_models(tag: str): + """return all models having the specified pretrain tag""" + models = [] + for k in _PRETRAINED.keys(): + if tag in _PRETRAINED[k]: + models.append(k) + return models + + +def list_pretrained_model_tags(model: str): + """return all pretrain tags for the specified model architecture""" + tags = [] + if model in _PRETRAINED: + tags.extend(_PRETRAINED[model].keys()) + return tags + + +def get_pretrained_url(model: str, tag: str): + if model not in _PRETRAINED: + return "" + model_pretrained = _PRETRAINED[model] + if tag not in model_pretrained: + return "" + return model_pretrained[tag] + + +def download_pretrained(url: str, root: str = os.path.expanduser("~/.cache/clip")): + os.makedirs(root, exist_ok=True) + filename = os.path.basename(url) + + if "openaipublic" in url: + expected_sha256 = url.split("/")[-2] + else: + expected_sha256 = "" + + download_target = os.path.join(root, filename) + + if os.path.exists(download_target) and not os.path.isfile(download_target): + raise RuntimeError(f"{download_target} exists and is not a regular file") + + if os.path.isfile(download_target): + if expected_sha256: + if ( + hashlib.sha256(open(download_target, "rb").read()).hexdigest() + == expected_sha256 + ): + return download_target + else: + warnings.warn( + f"{download_target} exists, but the SHA256 checksum does not match; re-downloading the file" + ) + else: + return download_target + + with urllib.request.urlopen(url) as source, open(download_target, "wb") as output: + with tqdm( + total=int(source.info().get("Content-Length")), + ncols=80, + unit="iB", + unit_scale=True, + ) as loop: + while True: + buffer = source.read(8192) + if not buffer: + break + + output.write(buffer) + loop.update(len(buffer)) + + if ( + expected_sha256 + and hashlib.sha256(open(download_target, "rb").read()).hexdigest() + != expected_sha256 + ): + raise RuntimeError( + f"Model has been downloaded but the SHA256 checksum does not not match" + ) + + return download_target diff --git a/core/models/encoders/clap_modules/open_clip/timm_model.py b/core/models/encoders/clap_modules/open_clip/timm_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c9d1ab4666b5bab5038d44b90c9ddca5087de460 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/timm_model.py @@ -0,0 +1,112 @@ +""" timm model adapter + +Wraps timm (https://github.com/rwightman/pytorch-image-models) models for use as a vision tower in CLIP model. +""" +from collections import OrderedDict + +import torch.nn as nn + +try: + import timm + from timm.models.layers import Mlp, to_2tuple + from timm.models.layers.attention_pool2d import RotAttentionPool2d + from timm.models.layers.attention_pool2d import ( + AttentionPool2d as AbsAttentionPool2d, + ) +except ImportError as e: + timm = None + +from .utils import freeze_batch_norm_2d + + +class TimmModel(nn.Module): + """timm model adapter + # FIXME this adapter is a work in progress, may change in ways that break weight compat + """ + + def __init__( + self, + model_name, + embed_dim, + image_size=224, + pool="avg", + proj="linear", + drop=0.0, + pretrained=False, + ): + super().__init__() + if timm is None: + raise RuntimeError("Please `pip install timm` to use timm models.") + + self.image_size = to_2tuple(image_size) + self.trunk = timm.create_model(model_name, pretrained=pretrained) + feat_size = self.trunk.default_cfg.get("pool_size", None) + feature_ndim = 1 if not feat_size else 2 + if pool in ("abs_attn", "rot_attn"): + assert feature_ndim == 2 + # if attn pooling used, remove both classifier and default pool + self.trunk.reset_classifier(0, global_pool="") + else: + # reset global pool if pool config set, otherwise leave as network default + reset_kwargs = dict(global_pool=pool) if pool else {} + self.trunk.reset_classifier(0, **reset_kwargs) + prev_chs = self.trunk.num_features + + head_layers = OrderedDict() + if pool == "abs_attn": + head_layers["pool"] = AbsAttentionPool2d( + prev_chs, feat_size=feat_size, out_features=embed_dim + ) + prev_chs = embed_dim + elif pool == "rot_attn": + head_layers["pool"] = RotAttentionPool2d(prev_chs, out_features=embed_dim) + prev_chs = embed_dim + else: + assert proj, "projection layer needed if non-attention pooling is used." + + # NOTE attention pool ends with a projection layer, so proj should usually be set to '' if such pooling is used + if proj == "linear": + head_layers["drop"] = nn.Dropout(drop) + head_layers["proj"] = nn.Linear(prev_chs, embed_dim) + elif proj == "mlp": + head_layers["mlp"] = Mlp(prev_chs, 2 * embed_dim, embed_dim, drop=drop) + + self.head = nn.Sequential(head_layers) + + def lock(self, unlocked_groups=0, freeze_bn_stats=False): + """lock modules + Args: + unlocked_groups (int): leave last n layer groups unlocked (default: 0) + """ + if not unlocked_groups: + # lock full model + for param in self.trunk.parameters(): + param.requires_grad = False + if freeze_bn_stats: + freeze_batch_norm_2d(self.trunk) + else: + # NOTE: partial freeze requires latest timm (master) branch and is subject to change + try: + # FIXME import here until API stable and in an official release + from timm.models.helpers import group_parameters, group_modules + except ImportError: + raise RuntimeError( + "Please install latest timm `pip install git+https://github.com/rwightman/pytorch-image-models`" + ) + matcher = self.trunk.group_matcher() + gparams = group_parameters(self.trunk, matcher) + max_layer_id = max(gparams.keys()) + max_layer_id = max_layer_id - unlocked_groups + for group_idx in range(max_layer_id + 1): + group = gparams[group_idx] + for param in group: + self.trunk.get_parameter(param).requires_grad = False + if freeze_bn_stats: + gmodules = group_modules(self.trunk, matcher, reverse=True) + gmodules = {k for k, v in gmodules.items() if v <= max_layer_id} + freeze_batch_norm_2d(self.trunk, gmodules) + + def forward(self, x): + x = self.trunk(x) + x = self.head(x) + return x diff --git a/core/models/encoders/clap_modules/open_clip/tokenizer.py b/core/models/encoders/clap_modules/open_clip/tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..ee4d28450ec5dd12a79daf38cf3088e9e73c2cd5 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/tokenizer.py @@ -0,0 +1,197 @@ +""" CLIP tokenizer + +Copied from https://github.com/openai/CLIP. Originally MIT License, Copyright (c) 2021 OpenAI. +""" +import gzip +import html +import os +from functools import lru_cache +from typing import Union, List + +import ftfy +import regex as re +import torch + + +@lru_cache() +def default_bpe(): + return os.path.join( + os.path.dirname(os.path.abspath(__file__)), "bpe_simple_vocab_16e6.txt.gz" + ) + + +@lru_cache() +def bytes_to_unicode(): + """ + Returns list of utf-8 byte and a corresponding list of unicode strings. + The reversible bpe codes work on unicode strings. + This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. + When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. + This is a signficant percentage of your normal, say, 32K bpe vocab. + To avoid that, we want lookup tables between utf-8 bytes and unicode strings. + And avoids mapping to whitespace/control characters the bpe code barfs on. + """ + bs = ( + list(range(ord("!"), ord("~") + 1)) + + list(range(ord("¡"), ord("¬") + 1)) + + list(range(ord("®"), ord("ÿ") + 1)) + ) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8 + n) + n += 1 + cs = [chr(n) for n in cs] + return dict(zip(bs, cs)) + + +def get_pairs(word): + """Return set of symbol pairs in a word. + Word is represented as tuple of symbols (symbols being variable-length strings). + """ + pairs = set() + prev_char = word[0] + for char in word[1:]: + pairs.add((prev_char, char)) + prev_char = char + return pairs + + +def basic_clean(text): + text = ftfy.fix_text(text) + text = html.unescape(html.unescape(text)) + return text.strip() + + +def whitespace_clean(text): + text = re.sub(r"\s+", " ", text) + text = text.strip() + return text + + +class SimpleTokenizer(object): + def __init__(self, bpe_path: str = default_bpe(), special_tokens=None): + self.byte_encoder = bytes_to_unicode() + self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} + merges = gzip.open(bpe_path).read().decode("utf-8").split("\n") + merges = merges[1 : 49152 - 256 - 2 + 1] + merges = [tuple(merge.split()) for merge in merges] + vocab = list(bytes_to_unicode().values()) + vocab = vocab + [v + "" for v in vocab] + for merge in merges: + vocab.append("".join(merge)) + if not special_tokens: + special_tokens = ["", ""] + else: + special_tokens = ["", ""] + special_tokens + vocab.extend(special_tokens) + self.encoder = dict(zip(vocab, range(len(vocab)))) + self.decoder = {v: k for k, v in self.encoder.items()} + self.bpe_ranks = dict(zip(merges, range(len(merges)))) + self.cache = {t: t for t in special_tokens} + special = "|".join(special_tokens) + self.pat = re.compile( + special + r"""|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", + re.IGNORECASE, + ) + + self.vocab_size = len(self.encoder) + self.all_special_ids = [self.encoder[t] for t in special_tokens] + + def bpe(self, token): + if token in self.cache: + return self.cache[token] + word = tuple(token[:-1]) + (token[-1] + "",) + pairs = get_pairs(word) + + if not pairs: + return token + "" + + while True: + bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf"))) + if bigram not in self.bpe_ranks: + break + first, second = bigram + new_word = [] + i = 0 + while i < len(word): + try: + j = word.index(first, i) + new_word.extend(word[i:j]) + i = j + except: + new_word.extend(word[i:]) + break + + if word[i] == first and i < len(word) - 1 and word[i + 1] == second: + new_word.append(first + second) + i += 2 + else: + new_word.append(word[i]) + i += 1 + new_word = tuple(new_word) + word = new_word + if len(word) == 1: + break + else: + pairs = get_pairs(word) + word = " ".join(word) + self.cache[token] = word + return word + + def encode(self, text): + bpe_tokens = [] + text = whitespace_clean(basic_clean(text)).lower() + for token in re.findall(self.pat, text): + token = "".join(self.byte_encoder[b] for b in token.encode("utf-8")) + bpe_tokens.extend( + self.encoder[bpe_token] for bpe_token in self.bpe(token).split(" ") + ) + return bpe_tokens + + def decode(self, tokens): + text = "".join([self.decoder[token] for token in tokens]) + text = ( + bytearray([self.byte_decoder[c] for c in text]) + .decode("utf-8", errors="replace") + .replace("", " ") + ) + return text + + +_tokenizer = SimpleTokenizer() + + +def tokenize( + texts: Union[str, List[str]], context_length: int = 77 +) -> torch.LongTensor: + """ + Returns the tokenized representation of given input string(s) + + Parameters + ---------- + texts : Union[str, List[str]] + An input string or a list of input strings to tokenize + context_length : int + The context length to use; all CLIP models use 77 as the context length + + Returns + ------- + A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length] + """ + if isinstance(texts, str): + texts = [texts] + + sot_token = _tokenizer.encoder[""] + eot_token = _tokenizer.encoder[""] + all_tokens = [[sot_token] + _tokenizer.encode(text) + [eot_token] for text in texts] + result = torch.zeros(len(all_tokens), context_length, dtype=torch.long) + + for i, tokens in enumerate(all_tokens): + if len(tokens) > context_length: + tokens = tokens[:context_length] # Truncate + result[i, : len(tokens)] = torch.tensor(tokens) + + return result diff --git a/core/models/encoders/clap_modules/open_clip/transform.py b/core/models/encoders/clap_modules/open_clip/transform.py new file mode 100644 index 0000000000000000000000000000000000000000..77aaa722c4a5544ac50de6df35d3e922f63b111d --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/transform.py @@ -0,0 +1,45 @@ +from torchvision.transforms import ( + Normalize, + Compose, + RandomResizedCrop, + InterpolationMode, + ToTensor, + Resize, + CenterCrop, +) + + +def _convert_to_rgb(image): + return image.convert("RGB") + + +def image_transform( + image_size: int, + is_train: bool, + mean=(0.48145466, 0.4578275, 0.40821073), + std=(0.26862954, 0.26130258, 0.27577711), +): + normalize = Normalize(mean=mean, std=std) + if is_train: + return Compose( + [ + RandomResizedCrop( + image_size, + scale=(0.9, 1.0), + interpolation=InterpolationMode.BICUBIC, + ), + _convert_to_rgb, + ToTensor(), + normalize, + ] + ) + else: + return Compose( + [ + Resize(image_size, interpolation=InterpolationMode.BICUBIC), + CenterCrop(image_size), + _convert_to_rgb, + ToTensor(), + normalize, + ] + ) diff --git a/core/models/encoders/clap_modules/open_clip/utils.py b/core/models/encoders/clap_modules/open_clip/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..34ecbced4cb7e6b6f92154a666e2c7efc7c922c6 --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/utils.py @@ -0,0 +1,362 @@ +import numpy as np +import torch +from torch import nn as nn +from torchvision.ops.misc import FrozenBatchNorm2d +import logging + +# import h5py +from tqdm import tqdm +import random +import json +import os +import pathlib + +# TODO: (yusong) this not a good place to store those information and does not scale. Need to be fixed later. +dataset_split = { + "audiocaps": ["train", "valid", "test"], + "audioset": ["balanced_train", "unbalanced_train", "eval"], + "BBCSoundEffects": ["train", "test"], + "Clotho": ["train", "test", "valid"], + "free_to_use_sounds": ["train", "test"], + "paramount_motion": ["train", "test"], + "sonniss_game_effects": ["train", "test"], + "wesoundeffects": ["train", "test"], + "MACS": ["train", "test"], + "freesound": ["train", "test"], + "FSD50K": ["train", "test", "valid"], + "fsd50k_class_label": ["train", "test", "valid"], + "esc50": ["train", "test"], + "audiostock": ["train", "test"], + "freesound_no_overlap_noesc50": ["train", "test"], + "epidemic_sound_effects": ["train", "test"], + "VGGSound": ["train", "test"], + "urbansound8k_class_label": ["train", "test"], + "audioset_t5": ["balanced_train", "unbalanced_train", "eval"], + "epidemic_sound_effects_t5": ["train", "test"], + "WavText5K": ["train", "test"], + "esc50_no_overlap": ["train", "test"], + "usd8k_no_overlap": ["train", "test"], + "fsd50k_200_class_label": ["train", "test", "valid"], +} + + +def freeze_batch_norm_2d(module, module_match={}, name=""): + """ + Converts all `BatchNorm2d` and `SyncBatchNorm` layers of provided module into `FrozenBatchNorm2d`. If `module` is + itself an instance of either `BatchNorm2d` or `SyncBatchNorm`, it is converted into `FrozenBatchNorm2d` and + returned. Otherwise, the module is walked recursively and submodules are converted in place. + + Args: + module (torch.nn.Module): Any PyTorch module. + module_match (dict): Dictionary of full module names to freeze (all if empty) + name (str): Full module name (prefix) + + Returns: + torch.nn.Module: Resulting module + + Inspired by https://github.com/pytorch/pytorch/blob/a5895f85be0f10212791145bfedc0261d364f103/torch/nn/modules/batchnorm.py#L762 + """ + res = module + is_match = True + if module_match: + is_match = name in module_match + if is_match and isinstance( + module, (nn.modules.batchnorm.BatchNorm2d, nn.modules.batchnorm.SyncBatchNorm) + ): + res = FrozenBatchNorm2d(module.num_features) + res.num_features = module.num_features + res.affine = module.affine + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for child_name, child in module.named_children(): + full_child_name = ".".join([name, child_name]) if name else child_name + new_child = freeze_batch_norm_2d(child, module_match, full_child_name) + if new_child is not child: + res.add_module(child_name, new_child) + return res + + +def exist(dataset_name, dataset_type): + """ + Check if dataset exists + """ + if dataset_type in dataset_split[dataset_name]: + return True + else: + return False + + +def get_tar_path_from_dataset_name( + dataset_names, dataset_types, islocal, dataset_path, proportion=1, full_dataset=None +): + """ + Get tar path from dataset name and type + """ + output = [] + for n in dataset_names: + if full_dataset is not None and n in full_dataset: + current_dataset_types = dataset_split[n] + else: + current_dataset_types = dataset_types + for s in current_dataset_types: + tmp = [] + if islocal: + sizefilepath_ = f"{dataset_path}/{n}/{s}/sizes.json" + if not os.path.exists(sizefilepath_): + sizefilepath_ = f"./json_files/{n}/{s}/sizes.json" + else: + sizefilepath_ = f"./json_files/{n}/{s}/sizes.json" + if not os.path.exists(sizefilepath_): + continue + sizes = json.load(open(sizefilepath_, "r")) + for k in sizes.keys(): + if islocal: + tmp.append(f"{dataset_path}/{n}/{s}/{k}") + else: + tmp.append( + f"pipe:aws s3 --cli-connect-timeout 0 cp s3://s-laion-audio/webdataset_tar/{n}/{s}/{k} -" + ) + if proportion != 1: + tmp = random.sample(tmp, int(proportion * len(tmp))) + output.append(tmp) + return sum(output, []) + + +def get_tar_path_from_txts(txt_path, islocal, proportion=1): + """ + Get tar path from txt path + """ + if isinstance(txt_path, (list, tuple)): + return sum( + [ + get_tar_path_from_txts( + txt_path[i], islocal=islocal, proportion=proportion + ) + for i in range(len(txt_path)) + ], + [], + ) + if isinstance(txt_path, str): + with open(txt_path) as f: + lines = f.readlines() + if islocal: + lines = [ + lines[i] + .split("\n")[0] + .replace("pipe:aws s3 cp s3://s-laion-audio/", "/mnt/audio_clip/") + for i in range(len(lines)) + ] + else: + lines = [ + lines[i].split("\n")[0].replace(".tar", ".tar -") + for i in range(len(lines)) + ] + if proportion != 1: + print("Sampling tars with proportion of {}".format(proportion)) + lines = random.sample(lines, int(proportion * len(lines))) + return lines + + +def get_mix_lambda(mixup_alpha, batch_size): + mixup_lambdas = [ + np.random.beta(mixup_alpha, mixup_alpha, 1)[0] for _ in range(batch_size) + ] + return np.array(mixup_lambdas).astype(np.float32) + + +def do_mixup(x, mixup_lambda): + """ + Args: + x: (batch_size , ...) + mixup_lambda: (batch_size,) + Returns: + out: (batch_size, ...) + """ + out = ( + x.transpose(0, -1) * mixup_lambda + + torch.flip(x, dims=[0]).transpose(0, -1) * (1 - mixup_lambda) + ).transpose(0, -1) + return out + + +def interpolate(x, ratio): + """Interpolate data in time domain. This is used to compensate the + resolution reduction in downsampling of a CNN. + + Args: + x: (batch_size, time_steps, classes_num) + ratio: int, ratio to interpolate + Returns: + upsampled: (batch_size, time_steps * ratio, classes_num) + """ + (batch_size, time_steps, classes_num) = x.shape + upsampled = x[:, :, None, :].repeat(1, 1, ratio, 1) + upsampled = upsampled.reshape(batch_size, time_steps * ratio, classes_num) + return upsampled + + +def pad_framewise_output(framewise_output, frames_num): + """Pad framewise_output to the same length as input frames. The pad value + is the same as the value of the last frame. + Args: + framewise_output: (batch_size, frames_num, classes_num) + frames_num: int, number of frames to pad + Outputs: + output: (batch_size, frames_num, classes_num) + """ + pad = framewise_output[:, -1:, :].repeat( + 1, frames_num - framewise_output.shape[1], 1 + ) + """tensor for padding""" + + output = torch.cat((framewise_output, pad), dim=1) + """(batch_size, frames_num, classes_num)""" + + +# def process_ipc(index_path, classes_num, filename): +# # load data +# logging.info("Load Data...............") +# ipc = [[] for _ in range(classes_num)] +# with h5py.File(index_path, "r") as f: +# for i in tqdm(range(len(f["target"]))): +# t_class = np.where(f["target"][i])[0] +# for t in t_class: +# ipc[t].append(i) +# print(ipc) +# np.save(filename, ipc) +# logging.info("Load Data Succeed...............") + + +def save_to_dict(s, o_={}): + sp = s.split(": ") + o_.update({sp[0]: float(sp[1])}) + return o_ + + +def get_data_from_log(txt_path): + """ + Output dictionary from out.txt log file + """ + with open(txt_path) as f: + lines = f.readlines() + val_data = {} + train_data = {} + train_losses = [] + train_losses_epoch = [] + for i in range(len(lines)): + if "| INFO |" in lines[i]: + if "Eval Epoch" in lines[i]: + if "val_loss" in lines[i]: + # float(regex.sub("", lines[310].split(" ")[-1]).replace(" ", "")) + line = lines[i].split("Eval Epoch: ")[-1] + num_epoch = int(line.split(" ")[0].split(" ")[0]) + d = { + line.split(" ")[0] + .split(" ")[1] + .replace(":", ""): float(line.split(" ")[0].split(" ")[-1]) + } + for i in range(1, len(line.split(" "))): + d = save_to_dict(line.split(" ")[i], d) + val_data[num_epoch] = d + elif "Train Epoch" in lines[i]: + num_epoch = int(lines[i].split("Train Epoch: ")[1][0]) + loss = float(lines[i].split("Loss: ")[-1].split(" (")[0]) + train_losses.append(loss) + train_losses_epoch.append(num_epoch) + for i in range(len(train_losses)): + train_data[i] = { + "num_epoch": train_losses_epoch[i], + "train_loss": train_losses[i], + } + return train_data, val_data + + +def save_p(obj, filename): + import pickle + + try: + from deepdiff import DeepDiff + except: + os.system("pip install deepdiff") + from deepdiff import DeepDiff + with open(filename, "wb") as file: + pickle.dump(obj, file, protocol=pickle.HIGHEST_PROTOCOL) # highest protocol + with open(filename, "rb") as file: + z = pickle.load(file) + assert ( + DeepDiff(obj, z, ignore_string_case=True) == {} + ), "there is something wrong with the saving process" + return + + +def load_p(filename): + import pickle + + with open(filename, "rb") as file: + z = pickle.load(file) + return z + + +def save_json(data, name="data.json"): + import json + + with open(name, "w") as fp: + json.dump(data, fp) + return + + +def load_json(name): + import json + + with open(name, "r") as fp: + data = json.load(fp) + return data + + +from multiprocessing import Process, Manager +from multiprocessing import Process, Value, Array +from ctypes import c_wchar + + +def load_class_label(path): + # https://stackoverflow.com/questions/48004243/how-to-share-large-read-only-dictionary-list-across-processes-in-multiprocessing + # https://stackoverflow.com/questions/45693949/storing-strings-in-a-multiprocessing-sharedctypes-array + out = None + if path is not None: + if pathlib.Path(path).suffix in [".pkl", ".pickle"]: + out = load_p(path) + elif pathlib.Path(path).suffix in [".json", ".txt"]: + out = load_json(path) + elif pathlib.Path(path).suffix in [".npy", ".npz"]: + out = np.load(path) + elif pathlib.Path(path).suffix in [".csv"]: + import pandas as pd + + out = pd.read_csv(path) + return out + # if out is None: + # return None + # else: + # key = Array(c_wchar, '\n'.join(list(out.keys())), lock=False) + # val = Array('i', out.values(), lock=False) + # return (key, val) + + +from torch import optim + + +def get_optimizer(params, lr, betas, eps, momentum, optimizer_name): + if optimizer_name.lower() == "adamw": + optimizer = optim.AdamW(params, lr=lr, betas=betas, eps=eps) + elif optimizer_name.lower() == "sgd": + optimizer = optim.SGD(params, lr=lr, momentum=momentum) + elif optimizer_name.lower() == "adam": + optimizer = optim.Adam(params, lr=lr, betas=betas, eps=eps) + else: + raise ValueError("optimizer name is not correct") + return optimizer diff --git a/core/models/encoders/clap_modules/open_clip/version.py b/core/models/encoders/clap_modules/open_clip/version.py new file mode 100644 index 0000000000000000000000000000000000000000..3ced3581bb601ae91b1e1da4b8f4f520855a065e --- /dev/null +++ b/core/models/encoders/clap_modules/open_clip/version.py @@ -0,0 +1 @@ +__version__ = "0.2.1" diff --git a/core/models/encoders/clap_modules/training/__init__.py b/core/models/encoders/clap_modules/training/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/models/encoders/clap_modules/training/__pycache__/__init__.cpython-311.pyc b/core/models/encoders/clap_modules/training/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b1bc69ac289d14b53ffd4a8fcc3ee581a1379017 Binary files /dev/null and b/core/models/encoders/clap_modules/training/__pycache__/__init__.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/training/__pycache__/__init__.cpython-38.pyc b/core/models/encoders/clap_modules/training/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1c505cd656ce476dc3b7028d7b9d83def19daca Binary files /dev/null and b/core/models/encoders/clap_modules/training/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/training/__pycache__/data.cpython-311.pyc b/core/models/encoders/clap_modules/training/__pycache__/data.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18c9bf737ef6c127df44cc01ab1fd29eb0e73e63 Binary files /dev/null and b/core/models/encoders/clap_modules/training/__pycache__/data.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/training/__pycache__/data.cpython-38.pyc b/core/models/encoders/clap_modules/training/__pycache__/data.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..127e34f4aed420c185037070c05edf5d919c9f8c Binary files /dev/null and b/core/models/encoders/clap_modules/training/__pycache__/data.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/training/__pycache__/params.cpython-311.pyc b/core/models/encoders/clap_modules/training/__pycache__/params.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d94e7623885ddf394da6d24d7aac9e7d55e05b9a Binary files /dev/null and b/core/models/encoders/clap_modules/training/__pycache__/params.cpython-311.pyc differ diff --git a/core/models/encoders/clap_modules/training/__pycache__/params.cpython-38.pyc b/core/models/encoders/clap_modules/training/__pycache__/params.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27b910b2df36a75b0a8e272578d2dfae161956d7 Binary files /dev/null and b/core/models/encoders/clap_modules/training/__pycache__/params.cpython-38.pyc differ diff --git a/core/models/encoders/clap_modules/training/audioset_textmap.npy b/core/models/encoders/clap_modules/training/audioset_textmap.npy new file mode 100644 index 0000000000000000000000000000000000000000..3da4c92d3819aaec11e5f576464a9973a6df811b --- /dev/null +++ b/core/models/encoders/clap_modules/training/audioset_textmap.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bada103070d92f9eadd33e1b4f45ec8583f59080ef218c966b43294bd4c86d5b +size 84448 diff --git a/core/models/encoders/clap_modules/training/data.py b/core/models/encoders/clap_modules/training/data.py new file mode 100644 index 0000000000000000000000000000000000000000..5d9f7a51bebfcdea5561cbf3c856047ccad079e6 --- /dev/null +++ b/core/models/encoders/clap_modules/training/data.py @@ -0,0 +1,813 @@ +import ast +import json +import logging +import math +import os +import random + +# import h5py +from dataclasses import dataclass +from .params import parse_args + +# import braceexpand +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.datasets as datasets +import torchvision.transforms + +# import webdataset as wds +from PIL import Image +from torch.utils.data import Dataset, DataLoader, SubsetRandomSampler +from torch.utils.data.distributed import DistributedSampler +from functools import partial +import soundfile as sf +import io +from pathlib import Path + +# import wget + +from ..open_clip.utils import ( + get_tar_path_from_dataset_name, + dataset_split, +) +from ..open_clip.utils import load_p, load_class_label +import copy + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + +try: + import torchaudio +except ImportError: + torchaudio = None + +from ..open_clip import tokenize + + +def tokenizer(text): + return tokenize(text).squeeze(0) + + +from transformers import RobertaTokenizer + +tokenize = None +# RobertaTokenizer.from_pretrained("roberta-base") + + +def tokenizer(text): + result = tokenize( + text, + padding="max_length", + truncation=True, + max_length=77, + return_tensors="pt", + ) + return {k: v.squeeze(0) for k, v in result.items()} + + +# initizlied the audioset map +_AUDIOSET_MAP_PATH = os.path.join(Path(__file__).parent, "audioset_textmap.npy") +_AUDIOSET_MAP = np.load(_AUDIOSET_MAP_PATH, allow_pickle=True) + + +def int16_to_float32(x): + return (x / 32767.0).astype(np.float32) + + +def float32_to_int16(x): + x = np.clip(x, a_min=-1.0, a_max=1.0) + return (x * 32767.0).astype(np.int16) + +@dataclass +class DataInfo: + dataloader: DataLoader + sampler: DistributedSampler + + +def preprocess_txt(text): + return tokenize([str(text)])[0] + + +def get_dataset_size(shards, sizefilepath_=None, is_local=True): + if isinstance(shards, list): + size_list = [] + for s in shards: + size_list.append( + get_dataset_size(s, sizefilepath_=sizefilepath_, is_local=is_local)[0] + ) + else: + if not is_local: + for n in dataset_split.keys(): + if n in shards.split("/"): + break + for s in dataset_split[n]: + if s in shards.split("/"): + break + sizefilepath_ = f"./json_files/{n}/{s}/sizes.json" + shards_list = list(braceexpand.braceexpand(shards)) + dir_path = os.path.dirname(shards) + if sizefilepath_ is not None: + sizes = json.load(open(sizefilepath_, "r")) + total_size = sum( + [ + int(sizes[os.path.basename(shard.replace(".tar -", ".tar"))]) + for shard in shards_list + ] + ) + else: + sizes_filename = os.path.join(dir_path, "sizes.json") + len_filename = os.path.join(dir_path, "__len__") + if os.path.exists(sizes_filename): + sizes = json.load(open(sizes_filename, "r")) + total_size = sum( + [int(sizes[os.path.basename(shard)]) for shard in shards_list] + ) + elif os.path.exists(len_filename): + # FIXME this used to be eval(open(...)) but that seemed rather unsafe + total_size = ast.literal_eval(open(len_filename, "r").read()) + else: + raise Exception( + "Cannot find sizes file for dataset. Please specify the path to the file." + ) + # total_size = None # num samples undefined + # some common dataset sizes (at time of authors last download) + # cc3m-train: 2905954 + # cc12m: 10968539 + # LAION-400m: 407332084 + num_shards = len(shards_list) + if isinstance(shards, list): + return sum(size_list), len(shards) + else: + return total_size, num_shards + + +def get_imagenet(args, preprocess_fns, split): + assert split in ["train", "val", "v2"] + is_train = split == "train" + preprocess_train, preprocess_val = preprocess_fns + + if split == "v2": + from imagenetv2_pytorch import ImageNetV2Dataset + + dataset = ImageNetV2Dataset(location=args.imagenet_v2, transform=preprocess_val) + else: + if is_train: + data_path = args.imagenet_train + preprocess_fn = preprocess_train + else: + data_path = args.imagenet_val + preprocess_fn = preprocess_val + assert data_path + + dataset = datasets.ImageFolder(data_path, transform=preprocess_fn) + + if is_train: + idxs = np.zeros(len(dataset.targets)) + target_array = np.array(dataset.targets) + k = 50 + for c in range(1000): + m = target_array == c + n = len(idxs[m]) + arr = np.zeros(n) + arr[:k] = 1 + np.random.shuffle(arr) + idxs[m] = arr + + idxs = idxs.astype("int") + sampler = SubsetRandomSampler(np.where(idxs)[0]) + else: + sampler = None + + dataloader = torch.utils.data.DataLoader( + dataset, + batch_size=args.batch_size, + num_workers=args.workers, + sampler=sampler, + ) + + return DataInfo(dataloader, sampler) + + +def count_samples(dataloader): + os.environ["WDS_EPOCH"] = "0" + n_elements, n_batches = 0, 0 + for images, texts in dataloader: + n_batches += 1 + n_elements += len(images) + assert len(images) == len(texts) + return n_elements, n_batches + + +def filter_no_caption(sample): + return "txt" in sample + + +def log_and_continue(exn): + """Call in an exception handler to ignore any exception, isssue a warning, and continue.""" + logging.warning(f"Handling webdataset error ({repr(exn)}). Ignoring.") + return True + + +_SHARD_SHUFFLE_SIZE = 2000 +_SHARD_SHUFFLE_INITIAL = 500 +_SAMPLE_SHUFFLE_SIZE = 5000 +_SAMPLE_SHUFFLE_INITIAL = 1000 + + +def sample_prop(sizefile, inputs, proportion, is_local=True): + """ + Sample a proportion of the data. + """ + file_path_dict = { + os.path.split(inputs[i])[1]: os.path.split(inputs[i])[0] + for i in range(len(inputs)) + } + sampled_filepath_dict = {} + sampled_size_dict = {} + if not is_local: + if os.path.exists("sizes.json"): + os.remove("sizes.json") + wget.download(sizefile, "sizes.json") + sizefile = "sizes.json" + with open(sizefile, "r", encoding="UTF-8") as f: + load_dict = json.load(f) + L = int(len(file_path_dict) * proportion) + subkeys = random.sample(file_path_dict.keys(), L) + for k in subkeys: + sampled_size_dict[k] = load_dict[k] + sampled_filepath_dict[k] = file_path_dict[k] + return ( + sum(sampled_size_dict.values()), + L, + [os.path.join(v, k) for k, v in sampled_filepath_dict.items()], + sampled_size_dict, + ) + + +def get_mel(audio_data, audio_cfg): + # mel shape: (n_mels, T) + mel = torchaudio.transforms.MelSpectrogram( + sample_rate=audio_cfg["sample_rate"], + n_fft=audio_cfg["window_size"], + win_length=audio_cfg["window_size"], + hop_length=audio_cfg["hop_size"], + center=True, + pad_mode="reflect", + power=2.0, + norm=None, + onesided=True, + n_mels=64, + f_min=audio_cfg["fmin"], + f_max=audio_cfg["fmax"], + ).to(audio_data.device) + mel = mel(audio_data) + # Align to librosa: + # librosa_melspec = librosa.feature.melspectrogram( + # waveform, + # sr=audio_cfg['sample_rate'], + # n_fft=audio_cfg['window_size'], + # hop_length=audio_cfg['hop_size'], + # win_length=audio_cfg['window_size'], + # center=True, + # pad_mode="reflect", + # power=2.0, + # n_mels=64, + # norm=None, + # htk=True, + # f_min=audio_cfg['fmin'], + # f_max=audio_cfg['fmax'] + # ) + # we use log mel spectrogram as input + mel = torchaudio.transforms.AmplitudeToDB(top_db=None)(mel) + return mel.T # (T, n_mels) + + +def get_audio_features( + sample, audio_data, max_len, data_truncating, data_filling, audio_cfg, dtype +): + """ + Calculate and add audio features to sample. + Sample: a dict containing all the data of current sample. + audio_data: a tensor of shape (T) containing audio data. + max_len: the maximum length of audio data. + data_truncating: the method of truncating data. + data_filling: the method of filling data. + audio_cfg: a dict containing audio configuration. Comes from model_cfg['audio_cfg']. + """ + with torch.no_grad(): + if len(audio_data) > max_len: + if data_truncating == "rand_trunc": + longer = torch.tensor([True]) + elif data_truncating == "fusion": + # fusion + mel = get_mel(audio_data, audio_cfg) + # split to three parts + chunk_frames = ( + max_len // audio_cfg["hop_size"] + 1 + ) # the +1 related to how the spectrogram is computed + total_frames = mel.shape[0] + if chunk_frames == total_frames: + # there is a corner case where the audio length is + # larger than max_len but smaller than max_len+hop_size. + # In this case, we just use the whole audio. + mel_fusion = torch.stack([mel, mel, mel, mel], dim=0) + sample["mel_fusion"] = mel_fusion.to(dtype) + longer = torch.tensor([False]) + else: + ranges = np.array_split( + list(range(0, total_frames - chunk_frames + 1)), 3 + ) + # print('total_frames-chunk_frames:', total_frames-chunk_frames, + # 'len(audio_data):', len(audio_data), + # 'chunk_frames:', chunk_frames, + # 'total_frames:', total_frames) + if len(ranges[1]) == 0: + # if the audio is too short, we just use the first chunk + ranges[1] = [0] + if len(ranges[2]) == 0: + # if the audio is too short, we just use the first chunk + ranges[2] = [0] + # randomly choose index for each part + idx_front = np.random.choice(ranges[0]) + idx_middle = np.random.choice(ranges[1]) + idx_back = np.random.choice(ranges[2]) + # select mel + mel_chunk_front = mel[idx_front : idx_front + chunk_frames, :] + mel_chunk_middle = mel[idx_middle : idx_middle + chunk_frames, :] + mel_chunk_back = mel[idx_back : idx_back + chunk_frames, :] + + # shrink the mel + mel_shrink = torchvision.transforms.Resize(size=[chunk_frames, 64])( + mel[None] + )[0] + # logging.info(f"mel_shrink.shape: {mel_shrink.shape}") + + # stack + mel_fusion = torch.stack( + [mel_chunk_front, mel_chunk_middle, mel_chunk_back, mel_shrink], + dim=0, + ) + sample["mel_fusion"] = mel_fusion.to(dtype) + longer = torch.tensor([True]) + else: + raise NotImplementedError( + f"data_truncating {data_truncating} not implemented" + ) + # random crop to max_len (for compatibility) + overflow = len(audio_data) - max_len + idx = np.random.randint(0, overflow + 1) + audio_data = audio_data[idx : idx + max_len] + + else: # padding if too short + if len(audio_data) < max_len: # do nothing if equal + if data_filling == "repeatpad": + n_repeat = int(max_len / len(audio_data)) + audio_data = audio_data.repeat(n_repeat) + # audio_data = audio_data.unsqueeze(0).unsqueeze(0).unsqueeze(0) + # audio_data = F.interpolate(audio_data,size=max_len,mode="bicubic")[0,0,0] + audio_data = F.pad( + audio_data, + (0, max_len - len(audio_data)), + mode="constant", + value=0, + ) + elif data_filling == "pad": + audio_data = F.pad( + audio_data, + (0, max_len - len(audio_data)), + mode="constant", + value=0, + ) + elif data_filling == "repeat": + n_repeat = int(max_len / len(audio_data)) + audio_data = audio_data.repeat(n_repeat + 1)[:max_len] + else: + raise NotImplementedError( + f"data_filling {data_filling} not implemented" + ) + if data_truncating == "fusion": + mel = get_mel(audio_data, audio_cfg) + mel_fusion = torch.stack([mel, mel, mel, mel], dim=0) + sample["mel_fusion"] = mel_fusion.to(dtype) + longer = torch.tensor([False]) + + sample["longer"] = longer + sample["waveform"] = audio_data.to(dtype) + + return sample + + +def preprocess( + sample, + audio_ext, + text_ext, + max_len, + audio_cfg, + class_index_dict=None, + data_filling="pad", + data_truncating="rand_trunc", + text_augment_selection=None, +): + """ + Preprocess a single sample for wdsdataloader. + """ + audio_data, orig_sr = sf.read(io.BytesIO(sample[audio_ext])) + audio_data = int16_to_float32(float32_to_int16(audio_data)) + audio_data = torch.tensor(audio_data).float() + + # TODO: (yusong) to be include in the future + # # if torchaudio not installed, use soundfile to load audio + # if torchaudio is None: + # audio_data, orig_sr = sf.read(io.BytesIO(sample[audio_ext])) + # audio_data = torch.tensor(audio_data).float() + # else: + # # https://github.com/webdataset/webdataset/blob/main/webdataset/autodecode.py + # with tempfile.TemporaryDirectory() as dirname: + # os.makedirs(dirname, exist_ok=True) + # fname = os.path.join(dirname, f"file.flac") + # with open(fname, "wb") as stream: + # stream.write(sample[audio_ext]) + # audio_data, orig_sr = torchaudio.load(fname) + # audio_data = audio_data[0, :].float() + + sample = get_audio_features( + sample, audio_data, max_len, data_truncating, data_filling, audio_cfg + ) + del sample[audio_ext] + + try: + json_dict_raw = json.loads(sample[text_ext].decode("utf-8")) + except: + print("sample[__url__]:", sample["__url__"]) + + # For selecting augmented text from dataset + if text_augment_selection is None or text_augment_selection == "none": + texts = json_dict_raw["text"] + elif text_augment_selection == "all": + if "text_augment_all" in json_dict_raw.keys(): + texts = json_dict_raw["text_augment_all"] + else: + texts = json_dict_raw["text"] + elif text_augment_selection == "augment_only": + if "text_augment_all" in json_dict_raw.keys(): + if json_dict_raw["text_augment_t5"] is None: + texts = json_dict_raw["text"] + else: + texts = json_dict_raw["text_augment_t5"] + else: + texts = json_dict_raw["text"] + else: + raise NotImplementedError( + f"text_augment_selection {text_augment_selection} not implemented" + ) + sample["full_text"] = texts + + if isinstance(texts, list) and isinstance(texts[0], str) and len(texts) > 1: + texts = random.choice(texts) + sample["raw_text"] = texts + sample["text"] = tokenizer(texts) # text shape: [num_token] + if class_index_dict is not None: + # https://stackoverflow.com/questions/48004243/how-to-share-large-read-only-dictionary-list-across-processes-in-multiprocessing + # https://stackoverflow.com/questions/45693949/storing-strings-in-a-multiprocessing-sharedctypes-array + # key, val = class_index_dict + # key = key[:].split('\n') + # _dict = {k: v for k, v in zip(key, val)} + sample["class_label"] = np.zeros(len(class_index_dict.keys())) + for x in json_dict_raw["tag"]: + sample["class_label"][class_index_dict[x]] = 1 + sample["class_label"] = torch.tensor(sample["class_label"]).float() + del sample[text_ext] + sample["audio_name"] = sample["__key__"].split("/")[-1] + "." + audio_ext + sample["text_name"] = sample["__key__"].split("/")[-1] + "." + text_ext + sample["audio_orig_sr"] = orig_sr + return sample + + +def collate_fn(batch): + """ + Collate function for wdsdataloader. + batch: a list of dict, each dict is a sample + """ + # concatenate values in each dictionary. if it is a tensor, concatenate. if it is a list, extend. + batch_dict = {} + for k in batch[0].keys(): + if isinstance(batch[0][k], dict): # dealwith bert tokenizer output + batch_dict[k] = {} + for kk in batch[0][k].keys(): + tmp = [] + for i in range(len(batch)): + tmp.append(batch[i][k][kk]) + batch_dict[k][kk] = torch.vstack(tmp) + elif isinstance(batch[0][k], torch.Tensor): + batch_dict[k] = torch.stack([sample[k] for sample in batch]) + elif isinstance(batch[0][k], np.ndarray): + batch_dict[k] = torch.tensor(np.stack([sample[k] for sample in batch])) + else: + batch_dict[k] = [sample[k] for sample in batch] + return batch_dict + + +def get_wds_dataset( + args, + model_cfg, + is_train, + audio_ext="flac", + text_ext="json", + max_len=480000, + proportion=1.0, + sizefilepath_=None, + is_local=None, +): + """ + Get a dataset for wdsdataloader. + """ + if is_local is None and (not args.remotedata is None): + is_local = not args.remotedata + + input_shards = args.train_data if is_train else args.val_data + assert input_shards is not None + + if not sizefilepath_ is None: + sizefilepath = sizefilepath_ + else: + sizefilepath = os.path.join(os.path.dirname(input_shards[0]), "sizes.json") + + if proportion != 1.0: + num_samples, num_shards, input_shards, _ = sample_prop( + sizefilepath, input_shards, proportion, is_local=is_local + ) + else: + num_samples, num_shards = get_dataset_size( + input_shards, sizefilepath_=sizefilepath_, is_local=is_local + ) + + if not num_samples: + if is_train: + num_samples = args.train_num_samples + if not num_samples: + raise RuntimeError( + "Currently, number of dataset samples must be specified for training dataset. " + "Please specify via `--train-num-samples` if no dataset length info present." + ) + else: + num_samples = ( + args.val_num_samples or 0 + ) # eval will just exhaust the iterator if not specified + + pipeline = [wds.SimpleShardList(input_shards)] + # at this point we have an iterator over all the shards + # TODO: (yusong): add a if statement of distributed. If not, we don't need to split_by_node + if is_train or args.parallel_eval: + pipeline.extend( + [ + wds.detshuffle( + bufsize=_SHARD_SHUFFLE_SIZE, + initial=_SHARD_SHUFFLE_INITIAL, + seed=args.seed, + ), + wds.split_by_node, + wds.split_by_worker, + # at this point, we have an iterator over the shards assigned to each worker at each node + wds.tarfile_to_samples(handler=log_and_continue), + wds.shuffle( + bufsize=_SAMPLE_SHUFFLE_SIZE, + initial=_SAMPLE_SHUFFLE_INITIAL, + rng=random.Random(args.seed), + ), + # wds.repeatedly, # FIXME determine if this is beneficial + ] + ) + else: + pipeline.extend( + [ + wds.split_by_worker, + # at this point, we have an iterator over the shards assigned to each worker + wds.tarfile_to_samples(handler=log_and_continue), + ] + ) + pipeline.append( + wds.map( + partial( + preprocess, + audio_ext=audio_ext, + text_ext=text_ext, + max_len=max_len, + audio_cfg=model_cfg["audio_cfg"], + class_index_dict=copy.deepcopy(args.class_index_dict), + data_filling=args.data_filling, + data_truncating=args.data_truncating, + text_augment_selection=args.text_augment_selection, + ) + ), + ) + + pipeline.append( + wds.batched( + args.batch_size, + partial=not (is_train or args.parallel_eval), + collation_fn=collate_fn, + ) + ) + + dataset = wds.DataPipeline(*pipeline) + if is_train or args.parallel_eval: + # (yusong): Currently parallel evaluation will be not precise as we are repeat the last few samples. + # (yusong): See comments below. + # roll over and repeat a few samples to get same number of full batches on each node + global_batch_size = args.batch_size * args.world_size + num_batches = math.ceil(num_samples / global_batch_size) + num_workers = max(1, args.workers) + num_worker_batches = math.ceil( + num_batches / num_workers + ) # per dataloader worker + num_batches = num_worker_batches * num_workers + num_samples = num_batches * global_batch_size + dataset = dataset.with_epoch( + num_worker_batches + ) # each worker is iterating over this + else: + # last batches are partial, eval is done on single (master) node + num_batches = math.ceil(num_samples / args.batch_size) + + kwargs = {} + if args.horovod: # multi-node training on summit + kwargs["multiprocessing_context"] = "forkserver" + + dataloader = wds.WebLoader( + dataset, batch_size=None, shuffle=False, num_workers=args.workers, **kwargs + ) + + # FIXME not clear which approach is better, with_epoch before vs after dataloader? + # hoping to resolve via https://github.com/webdataset/webdataset/issues/169 + # if is_train: + # # roll over and repeat a few samples to get same number of full batches on each node + # global_batch_size = args.batch_size * args.world_size + # num_batches = math.ceil(num_samples / global_batch_size) + # num_workers = max(1, args.workers) + # num_batches = math.ceil(num_batches / num_workers) * num_workers + # num_samples = num_batches * global_batch_size + # dataloader = dataloader.with_epoch(num_batches) + # else: + # # last batches are partial, eval is done on single (master) node + # num_batches = math.ceil(num_samples / args.batch_size) + + # add meta-data to dataloader instance for convenience + dataloader.num_batches = num_batches + dataloader.num_samples = num_samples + + return DataInfo(dataloader, None) + + +def wds_batch_list2dict( + batch, + keys=[ + "__url__", + "__key__", + "waveform", + "text", + "raw_text", + "audio_name", + "text_name", + "audio_orig_sr", + ], +): + """ + Return a dictionary of the batch, with keys as the names of the fields. + """ + assert len(keys) == len( + batch + ), "batch must have same number of keys as keys argument" + return {keys[i]: batch[i] for i in range(len(batch))} + + +def get_csv_dataset(args, preprocess_fn, is_train): + input_filename = args.train_data if is_train else args.val_data + assert input_filename + dataset = CsvDataset( + input_filename, + preprocess_fn, + img_key=args.csv_img_key, + caption_key=args.csv_caption_key, + sep=args.csv_separator, + ) + num_samples = len(dataset) + sampler = DistributedSampler(dataset) if args.distributed and is_train else None + shuffle = is_train and sampler is None + + dataloader = DataLoader( + dataset, + batch_size=args.batch_size, + shuffle=shuffle, + num_workers=args.workers, + pin_memory=True, + sampler=sampler, + drop_last=is_train, + ) + dataloader.num_samples = num_samples + dataloader.num_batches = len(dataloader) + + return DataInfo(dataloader, sampler) + + +def get_toy_dataset(args, model_cfg, is_train): + index_path = args.train_data if is_train else args.val_data + ipc_path = args.train_ipc if is_train else args.val_ipc + assert index_path and ipc_path + eval_mode = not is_train + dataset = ToyDataset(index_path, ipc_path, model_cfg, eval_mode=eval_mode) + + num_samples = len(dataset) + sampler = ( + DistributedSampler(dataset, shuffle=False) + if args.distributed and is_train + else None + ) + + dataloader = DataLoader( + dataset, + batch_size=args.batch_size, + shuffle=False, + num_workers=args.workers, + sampler=sampler, + drop_last=is_train, + ) + dataloader.num_samples = num_samples + dataloader.num_batches = len(dataloader) + + return DataInfo(dataloader, sampler) + + +def get_dataset_fn(data_path, dataset_type): + if dataset_type == "webdataset": + return get_wds_dataset + elif dataset_type == "csv": + return get_csv_dataset + elif dataset_type == "auto": + ext = data_path.split(".")[-1] + if ext in ["csv", "tsv"]: + return get_csv_dataset + elif ext in ["tar"]: + return get_wds_dataset + else: + raise ValueError( + f"Tried to figure out dataset type, but failed for extension {ext}." + ) + elif dataset_type == "toy": + return get_toy_dataset + else: + raise ValueError(f"Unsupported dataset type: {dataset_type}") + + +def get_data(args, model_cfg): + data = {} + + args.class_index_dict = load_class_label(args.class_label_path) + + if args.datasetinfos is None: + args.datasetinfos = ["train", "unbalanced_train", "balanced_train"] + if args.dataset_type == "webdataset": + args.train_data = get_tar_path_from_dataset_name( + args.datasetnames, + args.datasetinfos, + islocal=not args.remotedata, + proportion=args.dataset_proportion, + dataset_path=args.datasetpath, + full_dataset=args.full_train_dataset, + ) + + if args.full_train_dataset is None: + args.full_train_dataset = [] + if args.exclude_eval_dataset is None: + args.exclude_eval_dataset = [] + excluded_eval_datasets = args.full_train_dataset + args.exclude_eval_dataset + + val_dataset_names = ( + [n for n in args.datasetnames if n not in excluded_eval_datasets] + if excluded_eval_datasets + else args.datasetnames + ) + args.val_dataset_names = val_dataset_names + args.val_data = get_tar_path_from_dataset_name( + val_dataset_names, + ["valid", "test", "eval"], + islocal=not args.remotedata, + proportion=1, + dataset_path=args.datasetpath, + full_dataset=None, + ) + + if args.train_data: + data["train"] = get_dataset_fn(args.train_data, args.dataset_type)( + args, model_cfg, is_train=True + ) + + if args.val_data: + data["val"] = get_dataset_fn(args.val_data, args.dataset_type)( + args, model_cfg, is_train=False + ) + + return data diff --git a/core/models/encoders/clap_modules/training/distributed.py b/core/models/encoders/clap_modules/training/distributed.py new file mode 100644 index 0000000000000000000000000000000000000000..2fa61f76c5cc3ab9f6a9643042afa8e1f2e1cb7f --- /dev/null +++ b/core/models/encoders/clap_modules/training/distributed.py @@ -0,0 +1,150 @@ +import os + +import torch +import socket + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + + +def is_global_master(args): + return args.rank == 0 + + +def is_local_master(args): + return args.local_rank == 0 + + +def is_master(args, local=False): + return is_local_master(args) if local else is_global_master(args) + + +def is_using_horovod(): + # NOTE w/ horovod run, OMPI vars should be set, but w/ SLURM PMI vars will be set + # Differentiating between horovod and DDP use via SLURM may not be possible, so horovod arg still required... + ompi_vars = ["OMPI_COMM_WORLD_RANK", "OMPI_COMM_WORLD_SIZE"] + pmi_vars = ["PMI_RANK", "PMI_SIZE"] + if all([var in os.environ for var in ompi_vars]) or all( + [var in os.environ for var in pmi_vars] + ): + return True + else: + return False + + +def is_using_distributed(): + if "WORLD_SIZE" in os.environ: + return int(os.environ["WORLD_SIZE"]) > 1 + if "SLURM_NTASKS" in os.environ: + return int(os.environ["SLURM_NTASKS"]) > 1 + return False + + +def world_info_from_env(): + local_rank = 0 + for v in ( + "SLURM_LOCALID", + "MPI_LOCALRANKID", + "OMPI_COMM_WORLD_LOCAL_RANK", + "LOCAL_RANK", + ): + if v in os.environ: + local_rank = int(os.environ[v]) + break + global_rank = 0 + for v in ("SLURM_PROCID", "PMI_RANK", "OMPI_COMM_WORLD_RANK", "RANK"): + if v in os.environ: + global_rank = int(os.environ[v]) + break + world_size = 1 + for v in ("SLURM_NTASKS", "PMI_SIZE", "OMPI_COMM_WORLD_SIZE", "WORLD_SIZE"): + if v in os.environ: + world_size = int(os.environ[v]) + break + + return local_rank, global_rank, world_size + + +def init_distributed_device(args): + # Distributed training = training on more than one GPU. + # Works in both single and multi-node scenarios. + args.distributed = False + args.world_size = 1 + args.rank = 0 # global rank + args.local_rank = 0 + if args.horovod: + assert hvd is not None, "Horovod is not installed" + hvd.init() + world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"]) + world_rank = int(os.environ["OMPI_COMM_WORLD_RANK"]) + local_rank = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"]) + args.local_rank = local_rank + args.rank = world_rank + args.world_size = world_size + # args.local_rank = int(hvd.local_rank()) + # args.rank = hvd.rank() + # args.world_size = hvd.size() + args.distributed = True + os.environ["LOCAL_RANK"] = str(args.local_rank) + os.environ["RANK"] = str(args.rank) + os.environ["WORLD_SIZE"] = str(args.world_size) + print( + f"Distributed training: local_rank={args.local_rank}, " + f"rank={args.rank}, world_size={args.world_size}, " + f"hostname={socket.gethostname()}, pid={os.getpid()}" + ) + elif is_using_distributed(): + if "SLURM_PROCID" in os.environ: + # DDP via SLURM + args.local_rank, args.rank, args.world_size = world_info_from_env() + # SLURM var -> torch.distributed vars in case needed + os.environ["LOCAL_RANK"] = str(args.local_rank) + os.environ["RANK"] = str(args.rank) + os.environ["WORLD_SIZE"] = str(args.world_size) + torch.distributed.init_process_group( + backend=args.dist_backend, + init_method=args.dist_url, + world_size=args.world_size, + rank=args.rank, + ) + elif "OMPI_COMM_WORLD_SIZE" in os.environ: # using Summit cluster + world_size = int(os.environ["OMPI_COMM_WORLD_SIZE"]) + world_rank = int(os.environ["OMPI_COMM_WORLD_RANK"]) + local_rank = int(os.environ["OMPI_COMM_WORLD_LOCAL_RANK"]) + args.local_rank = local_rank + args.rank = world_rank + args.world_size = world_size + torch.distributed.init_process_group( + backend=args.dist_backend, + init_method=args.dist_url, + world_size=args.world_size, + rank=args.rank, + ) + else: + # DDP via torchrun, torch.distributed.launch + args.local_rank, _, _ = world_info_from_env() + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url + ) + args.world_size = torch.distributed.get_world_size() + args.rank = torch.distributed.get_rank() + args.distributed = True + print( + f"Distributed training: local_rank={args.local_rank}, " + f"rank={args.rank}, world_size={args.world_size}, " + f"hostname={socket.gethostname()}, pid={os.getpid()}" + ) + + if torch.cuda.is_available(): + if args.distributed and not args.no_set_device_rank: + device = "cuda:%d" % args.local_rank + else: + device = "cuda:0" + torch.cuda.set_device(device) + else: + device = "cpu" + args.device = device + device = torch.device(device) + return device diff --git a/core/models/encoders/clap_modules/training/imagenet_zeroshot_data.py b/core/models/encoders/clap_modules/training/imagenet_zeroshot_data.py new file mode 100644 index 0000000000000000000000000000000000000000..d32e55328d6799ccb8d61625f43abb80a33d6c17 --- /dev/null +++ b/core/models/encoders/clap_modules/training/imagenet_zeroshot_data.py @@ -0,0 +1,1088 @@ +# NOTE: This script is currently not supported for CLAP. + +imagenet_classnames = [ + "tench", + "goldfish", + "great white shark", + "tiger shark", + "hammerhead shark", + "electric ray", + "stingray", + "rooster", + "hen", + "ostrich", + "brambling", + "goldfinch", + "house finch", + "junco", + "indigo bunting", + "American robin", + "bulbul", + "jay", + "magpie", + "chickadee", + "American dipper", + "kite (bird of prey)", + "bald eagle", + "vulture", + "great grey owl", + "fire salamander", + "smooth newt", + "newt", + "spotted salamander", + "axolotl", + "American bullfrog", + "tree frog", + "tailed frog", + "loggerhead sea turtle", + "leatherback sea turtle", + "mud turtle", + "terrapin", + "box turtle", + "banded gecko", + "green iguana", + "Carolina anole", + "desert grassland whiptail lizard", + "agama", + "frilled-necked lizard", + "alligator lizard", + "Gila monster", + "European green lizard", + "chameleon", + "Komodo dragon", + "Nile crocodile", + "American alligator", + "triceratops", + "worm snake", + "ring-necked snake", + "eastern hog-nosed snake", + "smooth green snake", + "kingsnake", + "garter snake", + "water snake", + "vine snake", + "night snake", + "boa constrictor", + "African rock python", + "Indian cobra", + "green mamba", + "sea snake", + "Saharan horned viper", + "eastern diamondback rattlesnake", + "sidewinder rattlesnake", + "trilobite", + "harvestman", + "scorpion", + "yellow garden spider", + "barn spider", + "European garden spider", + "southern black widow", + "tarantula", + "wolf spider", + "tick", + "centipede", + "black grouse", + "ptarmigan", + "ruffed grouse", + "prairie grouse", + "peafowl", + "quail", + "partridge", + "african grey parrot", + "macaw", + "sulphur-crested cockatoo", + "lorikeet", + "coucal", + "bee eater", + "hornbill", + "hummingbird", + "jacamar", + "toucan", + "duck", + "red-breasted merganser", + "goose", + "black swan", + "tusker", + "echidna", + "platypus", + "wallaby", + "koala", + "wombat", + "jellyfish", + "sea anemone", + "brain coral", + "flatworm", + "nematode", + "conch", + "snail", + "slug", + "sea slug", + "chiton", + "chambered nautilus", + "Dungeness crab", + "rock crab", + "fiddler crab", + "red king crab", + "American lobster", + "spiny lobster", + "crayfish", + "hermit crab", + "isopod", + "white stork", + "black stork", + "spoonbill", + "flamingo", + "little blue heron", + "great egret", + "bittern bird", + "crane bird", + "limpkin", + "common gallinule", + "American coot", + "bustard", + "ruddy turnstone", + "dunlin", + "common redshank", + "dowitcher", + "oystercatcher", + "pelican", + "king penguin", + "albatross", + "grey whale", + "killer whale", + "dugong", + "sea lion", + "Chihuahua", + "Japanese Chin", + "Maltese", + "Pekingese", + "Shih Tzu", + "King Charles Spaniel", + "Papillon", + "toy terrier", + "Rhodesian Ridgeback", + "Afghan Hound", + "Basset Hound", + "Beagle", + "Bloodhound", + "Bluetick Coonhound", + "Black and Tan Coonhound", + "Treeing Walker Coonhound", + "English foxhound", + "Redbone Coonhound", + "borzoi", + "Irish Wolfhound", + "Italian Greyhound", + "Whippet", + "Ibizan Hound", + "Norwegian Elkhound", + "Otterhound", + "Saluki", + "Scottish Deerhound", + "Weimaraner", + "Staffordshire Bull Terrier", + "American Staffordshire Terrier", + "Bedlington Terrier", + "Border Terrier", + "Kerry Blue Terrier", + "Irish Terrier", + "Norfolk Terrier", + "Norwich Terrier", + "Yorkshire Terrier", + "Wire Fox Terrier", + "Lakeland Terrier", + "Sealyham Terrier", + "Airedale Terrier", + "Cairn Terrier", + "Australian Terrier", + "Dandie Dinmont Terrier", + "Boston Terrier", + "Miniature Schnauzer", + "Giant Schnauzer", + "Standard Schnauzer", + "Scottish Terrier", + "Tibetan Terrier", + "Australian Silky Terrier", + "Soft-coated Wheaten Terrier", + "West Highland White Terrier", + "Lhasa Apso", + "Flat-Coated Retriever", + "Curly-coated Retriever", + "Golden Retriever", + "Labrador Retriever", + "Chesapeake Bay Retriever", + "German Shorthaired Pointer", + "Vizsla", + "English Setter", + "Irish Setter", + "Gordon Setter", + "Brittany dog", + "Clumber Spaniel", + "English Springer Spaniel", + "Welsh Springer Spaniel", + "Cocker Spaniel", + "Sussex Spaniel", + "Irish Water Spaniel", + "Kuvasz", + "Schipperke", + "Groenendael dog", + "Malinois", + "Briard", + "Australian Kelpie", + "Komondor", + "Old English Sheepdog", + "Shetland Sheepdog", + "collie", + "Border Collie", + "Bouvier des Flandres dog", + "Rottweiler", + "German Shepherd Dog", + "Dobermann", + "Miniature Pinscher", + "Greater Swiss Mountain Dog", + "Bernese Mountain Dog", + "Appenzeller Sennenhund", + "Entlebucher Sennenhund", + "Boxer", + "Bullmastiff", + "Tibetan Mastiff", + "French Bulldog", + "Great Dane", + "St. Bernard", + "husky", + "Alaskan Malamute", + "Siberian Husky", + "Dalmatian", + "Affenpinscher", + "Basenji", + "pug", + "Leonberger", + "Newfoundland dog", + "Great Pyrenees dog", + "Samoyed", + "Pomeranian", + "Chow Chow", + "Keeshond", + "brussels griffon", + "Pembroke Welsh Corgi", + "Cardigan Welsh Corgi", + "Toy Poodle", + "Miniature Poodle", + "Standard Poodle", + "Mexican hairless dog (xoloitzcuintli)", + "grey wolf", + "Alaskan tundra wolf", + "red wolf or maned wolf", + "coyote", + "dingo", + "dhole", + "African wild dog", + "hyena", + "red fox", + "kit fox", + "Arctic fox", + "grey fox", + "tabby cat", + "tiger cat", + "Persian cat", + "Siamese cat", + "Egyptian Mau", + "cougar", + "lynx", + "leopard", + "snow leopard", + "jaguar", + "lion", + "tiger", + "cheetah", + "brown bear", + "American black bear", + "polar bear", + "sloth bear", + "mongoose", + "meerkat", + "tiger beetle", + "ladybug", + "ground beetle", + "longhorn beetle", + "leaf beetle", + "dung beetle", + "rhinoceros beetle", + "weevil", + "fly", + "bee", + "ant", + "grasshopper", + "cricket insect", + "stick insect", + "cockroach", + "praying mantis", + "cicada", + "leafhopper", + "lacewing", + "dragonfly", + "damselfly", + "red admiral butterfly", + "ringlet butterfly", + "monarch butterfly", + "small white butterfly", + "sulphur butterfly", + "gossamer-winged butterfly", + "starfish", + "sea urchin", + "sea cucumber", + "cottontail rabbit", + "hare", + "Angora rabbit", + "hamster", + "porcupine", + "fox squirrel", + "marmot", + "beaver", + "guinea pig", + "common sorrel horse", + "zebra", + "pig", + "wild boar", + "warthog", + "hippopotamus", + "ox", + "water buffalo", + "bison", + "ram (adult male sheep)", + "bighorn sheep", + "Alpine ibex", + "hartebeest", + "impala (antelope)", + "gazelle", + "arabian camel", + "llama", + "weasel", + "mink", + "European polecat", + "black-footed ferret", + "otter", + "skunk", + "badger", + "armadillo", + "three-toed sloth", + "orangutan", + "gorilla", + "chimpanzee", + "gibbon", + "siamang", + "guenon", + "patas monkey", + "baboon", + "macaque", + "langur", + "black-and-white colobus", + "proboscis monkey", + "marmoset", + "white-headed capuchin", + "howler monkey", + "titi monkey", + "Geoffroy's spider monkey", + "common squirrel monkey", + "ring-tailed lemur", + "indri", + "Asian elephant", + "African bush elephant", + "red panda", + "giant panda", + "snoek fish", + "eel", + "silver salmon", + "rock beauty fish", + "clownfish", + "sturgeon", + "gar fish", + "lionfish", + "pufferfish", + "abacus", + "abaya", + "academic gown", + "accordion", + "acoustic guitar", + "aircraft carrier", + "airliner", + "airship", + "altar", + "ambulance", + "amphibious vehicle", + "analog clock", + "apiary", + "apron", + "trash can", + "assault rifle", + "backpack", + "bakery", + "balance beam", + "balloon", + "ballpoint pen", + "Band-Aid", + "banjo", + "baluster / handrail", + "barbell", + "barber chair", + "barbershop", + "barn", + "barometer", + "barrel", + "wheelbarrow", + "baseball", + "basketball", + "bassinet", + "bassoon", + "swimming cap", + "bath towel", + "bathtub", + "station wagon", + "lighthouse", + "beaker", + "military hat (bearskin or shako)", + "beer bottle", + "beer glass", + "bell tower", + "baby bib", + "tandem bicycle", + "bikini", + "ring binder", + "binoculars", + "birdhouse", + "boathouse", + "bobsleigh", + "bolo tie", + "poke bonnet", + "bookcase", + "bookstore", + "bottle cap", + "hunting bow", + "bow tie", + "brass memorial plaque", + "bra", + "breakwater", + "breastplate", + "broom", + "bucket", + "buckle", + "bulletproof vest", + "high-speed train", + "butcher shop", + "taxicab", + "cauldron", + "candle", + "cannon", + "canoe", + "can opener", + "cardigan", + "car mirror", + "carousel", + "tool kit", + "cardboard box / carton", + "car wheel", + "automated teller machine", + "cassette", + "cassette player", + "castle", + "catamaran", + "CD player", + "cello", + "mobile phone", + "chain", + "chain-link fence", + "chain mail", + "chainsaw", + "storage chest", + "chiffonier", + "bell or wind chime", + "china cabinet", + "Christmas stocking", + "church", + "movie theater", + "cleaver", + "cliff dwelling", + "cloak", + "clogs", + "cocktail shaker", + "coffee mug", + "coffeemaker", + "spiral or coil", + "combination lock", + "computer keyboard", + "candy store", + "container ship", + "convertible", + "corkscrew", + "cornet", + "cowboy boot", + "cowboy hat", + "cradle", + "construction crane", + "crash helmet", + "crate", + "infant bed", + "Crock Pot", + "croquet ball", + "crutch", + "cuirass", + "dam", + "desk", + "desktop computer", + "rotary dial telephone", + "diaper", + "digital clock", + "digital watch", + "dining table", + "dishcloth", + "dishwasher", + "disc brake", + "dock", + "dog sled", + "dome", + "doormat", + "drilling rig", + "drum", + "drumstick", + "dumbbell", + "Dutch oven", + "electric fan", + "electric guitar", + "electric locomotive", + "entertainment center", + "envelope", + "espresso machine", + "face powder", + "feather boa", + "filing cabinet", + "fireboat", + "fire truck", + "fire screen", + "flagpole", + "flute", + "folding chair", + "football helmet", + "forklift", + "fountain", + "fountain pen", + "four-poster bed", + "freight car", + "French horn", + "frying pan", + "fur coat", + "garbage truck", + "gas mask or respirator", + "gas pump", + "goblet", + "go-kart", + "golf ball", + "golf cart", + "gondola", + "gong", + "gown", + "grand piano", + "greenhouse", + "radiator grille", + "grocery store", + "guillotine", + "hair clip", + "hair spray", + "half-track", + "hammer", + "hamper", + "hair dryer", + "hand-held computer", + "handkerchief", + "hard disk drive", + "harmonica", + "harp", + "combine harvester", + "hatchet", + "holster", + "home theater", + "honeycomb", + "hook", + "hoop skirt", + "gymnastic horizontal bar", + "horse-drawn vehicle", + "hourglass", + "iPod", + "clothes iron", + "carved pumpkin", + "jeans", + "jeep", + "T-shirt", + "jigsaw puzzle", + "rickshaw", + "joystick", + "kimono", + "knee pad", + "knot", + "lab coat", + "ladle", + "lampshade", + "laptop computer", + "lawn mower", + "lens cap", + "letter opener", + "library", + "lifeboat", + "lighter", + "limousine", + "ocean liner", + "lipstick", + "slip-on shoe", + "lotion", + "music speaker", + "loupe magnifying glass", + "sawmill", + "magnetic compass", + "messenger bag", + "mailbox", + "tights", + "one-piece bathing suit", + "manhole cover", + "maraca", + "marimba", + "mask", + "matchstick", + "maypole", + "maze", + "measuring cup", + "medicine cabinet", + "megalith", + "microphone", + "microwave oven", + "military uniform", + "milk can", + "minibus", + "miniskirt", + "minivan", + "missile", + "mitten", + "mixing bowl", + "mobile home", + "ford model t", + "modem", + "monastery", + "monitor", + "moped", + "mortar and pestle", + "graduation cap", + "mosque", + "mosquito net", + "vespa", + "mountain bike", + "tent", + "computer mouse", + "mousetrap", + "moving van", + "muzzle", + "metal nail", + "neck brace", + "necklace", + "baby pacifier", + "notebook computer", + "obelisk", + "oboe", + "ocarina", + "odometer", + "oil filter", + "pipe organ", + "oscilloscope", + "overskirt", + "bullock cart", + "oxygen mask", + "product packet / packaging", + "paddle", + "paddle wheel", + "padlock", + "paintbrush", + "pajamas", + "palace", + "pan flute", + "paper towel", + "parachute", + "parallel bars", + "park bench", + "parking meter", + "railroad car", + "patio", + "payphone", + "pedestal", + "pencil case", + "pencil sharpener", + "perfume", + "Petri dish", + "photocopier", + "plectrum", + "Pickelhaube", + "picket fence", + "pickup truck", + "pier", + "piggy bank", + "pill bottle", + "pillow", + "ping-pong ball", + "pinwheel", + "pirate ship", + "drink pitcher", + "block plane", + "planetarium", + "plastic bag", + "plate rack", + "farm plow", + "plunger", + "Polaroid camera", + "pole", + "police van", + "poncho", + "pool table", + "soda bottle", + "plant pot", + "potter's wheel", + "power drill", + "prayer rug", + "printer", + "prison", + "missile", + "projector", + "hockey puck", + "punching bag", + "purse", + "quill", + "quilt", + "race car", + "racket", + "radiator", + "radio", + "radio telescope", + "rain barrel", + "recreational vehicle", + "fishing casting reel", + "reflex camera", + "refrigerator", + "remote control", + "restaurant", + "revolver", + "rifle", + "rocking chair", + "rotisserie", + "eraser", + "rugby ball", + "ruler measuring stick", + "sneaker", + "safe", + "safety pin", + "salt shaker", + "sandal", + "sarong", + "saxophone", + "scabbard", + "weighing scale", + "school bus", + "schooner", + "scoreboard", + "CRT monitor", + "screw", + "screwdriver", + "seat belt", + "sewing machine", + "shield", + "shoe store", + "shoji screen / room divider", + "shopping basket", + "shopping cart", + "shovel", + "shower cap", + "shower curtain", + "ski", + "balaclava ski mask", + "sleeping bag", + "slide rule", + "sliding door", + "slot machine", + "snorkel", + "snowmobile", + "snowplow", + "soap dispenser", + "soccer ball", + "sock", + "solar thermal collector", + "sombrero", + "soup bowl", + "keyboard space bar", + "space heater", + "space shuttle", + "spatula", + "motorboat", + "spider web", + "spindle", + "sports car", + "spotlight", + "stage", + "steam locomotive", + "through arch bridge", + "steel drum", + "stethoscope", + "scarf", + "stone wall", + "stopwatch", + "stove", + "strainer", + "tram", + "stretcher", + "couch", + "stupa", + "submarine", + "suit", + "sundial", + "sunglasses", + "sunglasses", + "sunscreen", + "suspension bridge", + "mop", + "sweatshirt", + "swim trunks / shorts", + "swing", + "electrical switch", + "syringe", + "table lamp", + "tank", + "tape player", + "teapot", + "teddy bear", + "television", + "tennis ball", + "thatched roof", + "front curtain", + "thimble", + "threshing machine", + "throne", + "tile roof", + "toaster", + "tobacco shop", + "toilet seat", + "torch", + "totem pole", + "tow truck", + "toy store", + "tractor", + "semi-trailer truck", + "tray", + "trench coat", + "tricycle", + "trimaran", + "tripod", + "triumphal arch", + "trolleybus", + "trombone", + "hot tub", + "turnstile", + "typewriter keyboard", + "umbrella", + "unicycle", + "upright piano", + "vacuum cleaner", + "vase", + "vaulted or arched ceiling", + "velvet fabric", + "vending machine", + "vestment", + "viaduct", + "violin", + "volleyball", + "waffle iron", + "wall clock", + "wallet", + "wardrobe", + "military aircraft", + "sink", + "washing machine", + "water bottle", + "water jug", + "water tower", + "whiskey jug", + "whistle", + "hair wig", + "window screen", + "window shade", + "Windsor tie", + "wine bottle", + "airplane wing", + "wok", + "wooden spoon", + "wool", + "split-rail fence", + "shipwreck", + "sailboat", + "yurt", + "website", + "comic book", + "crossword", + "traffic or street sign", + "traffic light", + "dust jacket", + "menu", + "plate", + "guacamole", + "consomme", + "hot pot", + "trifle", + "ice cream", + "popsicle", + "baguette", + "bagel", + "pretzel", + "cheeseburger", + "hot dog", + "mashed potatoes", + "cabbage", + "broccoli", + "cauliflower", + "zucchini", + "spaghetti squash", + "acorn squash", + "butternut squash", + "cucumber", + "artichoke", + "bell pepper", + "cardoon", + "mushroom", + "Granny Smith apple", + "strawberry", + "orange", + "lemon", + "fig", + "pineapple", + "banana", + "jackfruit", + "cherimoya (custard apple)", + "pomegranate", + "hay", + "carbonara", + "chocolate syrup", + "dough", + "meatloaf", + "pizza", + "pot pie", + "burrito", + "red wine", + "espresso", + "tea cup", + "eggnog", + "mountain", + "bubble", + "cliff", + "coral reef", + "geyser", + "lakeshore", + "promontory", + "sandbar", + "beach", + "valley", + "volcano", + "baseball player", + "bridegroom", + "scuba diver", + "rapeseed", + "daisy", + "yellow lady's slipper", + "corn", + "acorn", + "rose hip", + "horse chestnut seed", + "coral fungus", + "agaric", + "gyromitra", + "stinkhorn mushroom", + "earth star fungus", + "hen of the woods mushroom", + "bolete", + "corn cob", + "toilet paper", +] + + +openai_imagenet_template = [ + lambda c: f"a bad photo of a {c}.", + lambda c: f"a photo of many {c}.", + lambda c: f"a sculpture of a {c}.", + lambda c: f"a photo of the hard to see {c}.", + lambda c: f"a low resolution photo of the {c}.", + lambda c: f"a rendering of a {c}.", + lambda c: f"graffiti of a {c}.", + lambda c: f"a bad photo of the {c}.", + lambda c: f"a cropped photo of the {c}.", + lambda c: f"a tattoo of a {c}.", + lambda c: f"the embroidered {c}.", + lambda c: f"a photo of a hard to see {c}.", + lambda c: f"a bright photo of a {c}.", + lambda c: f"a photo of a clean {c}.", + lambda c: f"a photo of a dirty {c}.", + lambda c: f"a dark photo of the {c}.", + lambda c: f"a drawing of a {c}.", + lambda c: f"a photo of my {c}.", + lambda c: f"the plastic {c}.", + lambda c: f"a photo of the cool {c}.", + lambda c: f"a close-up photo of a {c}.", + lambda c: f"a black and white photo of the {c}.", + lambda c: f"a painting of the {c}.", + lambda c: f"a painting of a {c}.", + lambda c: f"a pixelated photo of the {c}.", + lambda c: f"a sculpture of the {c}.", + lambda c: f"a bright photo of the {c}.", + lambda c: f"a cropped photo of a {c}.", + lambda c: f"a plastic {c}.", + lambda c: f"a photo of the dirty {c}.", + lambda c: f"a jpeg corrupted photo of a {c}.", + lambda c: f"a blurry photo of the {c}.", + lambda c: f"a photo of the {c}.", + lambda c: f"a good photo of the {c}.", + lambda c: f"a rendering of the {c}.", + lambda c: f"a {c} in a video game.", + lambda c: f"a photo of one {c}.", + lambda c: f"a doodle of a {c}.", + lambda c: f"a close-up photo of the {c}.", + lambda c: f"a photo of a {c}.", + lambda c: f"the origami {c}.", + lambda c: f"the {c} in a video game.", + lambda c: f"a sketch of a {c}.", + lambda c: f"a doodle of the {c}.", + lambda c: f"a origami {c}.", + lambda c: f"a low resolution photo of a {c}.", + lambda c: f"the toy {c}.", + lambda c: f"a rendition of the {c}.", + lambda c: f"a photo of the clean {c}.", + lambda c: f"a photo of a large {c}.", + lambda c: f"a rendition of a {c}.", + lambda c: f"a photo of a nice {c}.", + lambda c: f"a photo of a weird {c}.", + lambda c: f"a blurry photo of a {c}.", + lambda c: f"a cartoon {c}.", + lambda c: f"art of a {c}.", + lambda c: f"a sketch of the {c}.", + lambda c: f"a embroidered {c}.", + lambda c: f"a pixelated photo of a {c}.", + lambda c: f"itap of the {c}.", + lambda c: f"a jpeg corrupted photo of the {c}.", + lambda c: f"a good photo of a {c}.", + lambda c: f"a plushie {c}.", + lambda c: f"a photo of the nice {c}.", + lambda c: f"a photo of the small {c}.", + lambda c: f"a photo of the weird {c}.", + lambda c: f"the cartoon {c}.", + lambda c: f"art of the {c}.", + lambda c: f"a drawing of the {c}.", + lambda c: f"a photo of the large {c}.", + lambda c: f"a black and white photo of a {c}.", + lambda c: f"the plushie {c}.", + lambda c: f"a dark photo of a {c}.", + lambda c: f"itap of a {c}.", + lambda c: f"graffiti of the {c}.", + lambda c: f"a toy {c}.", + lambda c: f"itap of my {c}.", + lambda c: f"a photo of a cool {c}.", + lambda c: f"a photo of a small {c}.", + lambda c: f"a tattoo of the {c}.", +] diff --git a/core/models/encoders/clap_modules/training/infer_demo.py b/core/models/encoders/clap_modules/training/infer_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..7d1f4784898dbfeb69affefb6f624711adc8cb42 --- /dev/null +++ b/core/models/encoders/clap_modules/training/infer_demo.py @@ -0,0 +1,105 @@ +import sys + +import os +import torch +import librosa +from open_clip import create_model +from training.data import get_audio_features +from training.data import int16_to_float32, float32_to_int16 +from transformers import RobertaTokenizer + +tokenize = RobertaTokenizer.from_pretrained("roberta-base") + + +def tokenizer(text): + result = tokenize( + text, + padding="max_length", + truncation=True, + max_length=77, + return_tensors="pt", + ) + return {k: v.squeeze(0) for k, v in result.items()} + + +PRETRAINED_PATH = "/mnt/fast/nobackup/users/hl01486/projects/contrastive_pretraining/CLAP/assets/checkpoints/epoch_top_0_audioset_no_fusion.pt" +WAVE_48k_PATH = "/mnt/fast/nobackup/users/hl01486/projects/contrastive_pretraining/CLAP/assets/audio/machine.wav" + + +def infer_text(): + device = "cuda:0" if torch.cuda.is_available() else "cpu" + precision = "fp32" + amodel = "HTSAT-tiny" # or 'PANN-14' + tmodel = "roberta" # the best text encoder in our training + enable_fusion = False # False if you do not want to use the fusion model + fusion_type = "aff_2d" + pretrained = PRETRAINED_PATH + + model, model_cfg = create_model( + amodel, + tmodel, + pretrained, + precision=precision, + device=device, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + # load the text, can be a list (i.e. batch size) + text_data = ["I love the contrastive learning", "I love the pretrain model"] + # tokenize for roberta, if you want to tokenize for another text encoder, please refer to data.py#L43-90 + text_data = tokenizer(text_data) + + text_embed = model.get_text_embedding(text_data) + print(text_embed.size()) + + +def infer_audio(): + + device = "cuda:0" if torch.cuda.is_available() else "cpu" + precision = "fp32" + amodel = "HTSAT-tiny" # or 'PANN-14' + tmodel = "roberta" # the best text encoder in our training + enable_fusion = False # False if you do not want to use the fusion model + fusion_type = "aff_2d" + pretrained = PRETRAINED_PATH + + model, model_cfg = create_model( + amodel, + tmodel, + pretrained, + precision=precision, + device=device, + enable_fusion=enable_fusion, + fusion_type=fusion_type, + ) + + # load the waveform of the shape (T,), should resample to 48000 + audio_waveform, sr = librosa.load(WAVE_48k_PATH, sr=48000) + # quantize + audio_waveform = int16_to_float32(float32_to_int16(audio_waveform)) + audio_waveform = torch.from_numpy(audio_waveform).float() + audio_dict = {} + + # the 'fusion' truncate mode can be changed to 'rand_trunc' if run in unfusion mode + import ipdb + + ipdb.set_trace() + audio_dict = get_audio_features( + audio_dict, + audio_waveform, + 480000, + data_truncating="fusion", + data_filling="repeatpad", + audio_cfg=model_cfg["audio_cfg"], + ) + # can send a list to the model, to process many audio tracks in one time (i.e. batch size) + audio_embed = model.get_audio_embedding([audio_dict]) + print(audio_embed.size()) + import ipdb + + ipdb.set_trace() + + +if __name__ == "__main__": + infer_text() + infer_audio() diff --git a/core/models/encoders/clap_modules/training/logger.py b/core/models/encoders/clap_modules/training/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..ac4634970fae6aacde2b7b808355dbd50c90ce73 --- /dev/null +++ b/core/models/encoders/clap_modules/training/logger.py @@ -0,0 +1,30 @@ +import logging + + +def setup_logging(log_file, level, include_host=False): + if include_host: + import socket + + hostname = socket.gethostname() + formatter = logging.Formatter( + f"%(asctime)s | {hostname} | %(levelname)s | %(message)s", + datefmt="%Y-%m-%d,%H:%M:%S", + ) + else: + formatter = logging.Formatter( + "%(asctime)s | %(levelname)s | %(message)s", datefmt="%Y-%m-%d,%H:%M:%S" + ) + + logging.root.setLevel(level) + loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict] + for logger in loggers: + logger.setLevel(level) + + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) + logging.root.addHandler(stream_handler) + + if log_file: + file_handler = logging.FileHandler(filename=log_file) + file_handler.setFormatter(formatter) + logging.root.addHandler(file_handler) diff --git a/core/models/encoders/clap_modules/training/lp_main.py b/core/models/encoders/clap_modules/training/lp_main.py new file mode 100644 index 0000000000000000000000000000000000000000..c2d4e8c85aaa3c8e4221963ef56a815cc14f354f --- /dev/null +++ b/core/models/encoders/clap_modules/training/lp_main.py @@ -0,0 +1,670 @@ +from cmath import cos +from inspect import getargs +import logging +import os +import random +from datetime import datetime +import bisect +import copy +from sched import scheduler +import numpy as np +import torch +import torch.backends.cudnn as cudnn +from torch import optim +from torch.cuda.amp import GradScaler +import faulthandler +import pathlib +import argparse +import time + +try: + import wandb +except ImportError: + wandb = None + +try: + import torch.utils.tensorboard as tensorboard +except ImportError: + tensorboard = None + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + +from open_clip import create_model_and_transforms, trace_model, create_model +from training.data import get_data +from training.params import parse_args +from training.distributed import is_master, init_distributed_device, world_info_from_env +from training.logger import setup_logging +from training.scheduler import cosine_lr +from training.lp_train import train_one_epoch, evaluate +from open_clip.utils import get_tar_path_from_dataset_name, dataset_split, get_optimizer +from open_clip.utils import load_p, load_class_label +from open_clip.linear_probe import LinearProbe + + +def maintain_ckpts(args, startidx, all_idx_len): + for i in reversed(range(startidx, all_idx_len)): + if os.path.exists(os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt")): + os.rename( + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + os.path.join(args.checkpoint_path, f"epoch_top_{i+1}.pt"), + ) + if os.path.exists( + os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt") + ): + os.remove(os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt")) + return + + +def update_top_k_performance( + new_metrics_inputs, current_top_k_ckpt_metrics, args, ckpt, bignumbetter=True +): + """ + Record the top-k performance of the current epoch. + current_top_k_metrics is a dictionary of the form: {1: top_1_ckpt_measure, 2: top_2_ckpt_measure, ...} + """ + if isinstance(new_metrics_inputs, (list, tuple)): + new_metrics_inputs = np.mean(new_metrics_inputs) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, dict): + new_metrics_inputs = np.mean(list(new_metrics_inputs.values())) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, (float, int)): + update_flag = {k: False for k in current_top_k_ckpt_metrics.keys()} + sorted_keys = sorted(current_top_k_ckpt_metrics.keys()) + sorted_values = sorted( + current_top_k_ckpt_metrics.values(), reverse=bignumbetter + ) + sorted_values_ = copy.deepcopy(sorted_values) + sorted_values.append(new_metrics_inputs) + sorted_values = sorted(sorted_values, reverse=bignumbetter) + sorted_values = sorted_values[:-1] + + if sorted_values == sorted_values_: + return current_top_k_ckpt_metrics, new_metrics_inputs + else: + for i in range(len(sorted_keys)): + if current_top_k_ckpt_metrics[sorted_keys[i]] != sorted_values[i]: + current_top_k_ckpt_metrics[sorted_keys[i]] = sorted_values[i] + update_flag[sorted_keys[i]] = True + for i in range(len(update_flag)): + if update_flag[i]: + maintain_ckpts(args, i, len(sorted_keys)) + torch.save( + ckpt, + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + ) + break + return current_top_k_ckpt_metrics, new_metrics_inputs + + +# def updateifNone(a, b): +# a = b if None else a +# return a + + +def is_pretrained_params(n): + return ( + n.startswith("clap_model.transformer") + or n in ["clap_model.positional_embedding", "clap_model.text_projection"] + or n.startswith("clap_model.token_embedding") + or n.startswith("clap_model.ln_final") + or n.startswith("clap_model.logit_scale_t") + ) + + +def random_seed(seed=42, rank=0): + torch.manual_seed(seed + rank) + np.random.seed(seed + rank) + random.seed(seed + rank) + + +def config_lp_optimizer(model, data, args): + # set wd-related params to 0 if use adam optimizer + if args.optimizer == "adam": + args.wd = 0 + args.wd_pretrained = 0 + args.wd_new = 0 + + in_clap = lambda n, p: n.startswith("clap_model") + + named_parameters = list(model.named_parameters()) + + optimizer = {} + scheduler = {} + + # freeze text encoder + text_freeze_parameters = [ + p + for n, p in named_parameters + if n.startswith("clap_model.transformer") + or n in ["clap_model.positional_embedding", "clap_model.text_projection"] + or n.startswith("clap_model.token_embedding") + or n.startswith("clap_model.ln_final") + ] + + if args.freeze_text: + logging.info("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + + if not args.lp_freeze: + exclude = ( + lambda n, p: p.ndim < 2 + or "bn" in n + or "ln" in n + or "bias" in n + or "logit_scale" in n + ) + include = lambda n, p: not exclude(n, p) + + # (yusong): we do not split the learning rate anymore + # p for n, p in named_parameters if in_clap(n,p) and exclude(n, p) and p.requires_grad + gain_or_bias_params = [ + p for n, p in named_parameters if exclude(n, p) and p.requires_grad + ] + # rest_params = [p for n, p in named_parameters if in_clap(n,p) and include(n, p) and p.requires_grad] + rest_params = [ + p for n, p in named_parameters if include(n, p) and p.requires_grad + ] + + if args.train_data is None: + optimizer = None + scheduler = None + else: + total_steps = data["train"].dataloader.num_batches * args.epochs + + if args.split_opt: + for x in ["lr", "beta1", "beta2", "eps", "wd"]: + for y in ["_new", "_pretrained"]: + if getattr(args, x + y) is None: + setattr(args, x + y, getattr(args, x)) + + gain_or_bias_pretrained_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + rest_pretrained_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + gain_or_bias_new_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) + and (not is_pretrained_params(n)) + ] + rest_new_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) + and (not is_pretrained_params(n)) + ] + + pretrained_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_pretrained_params, "weight_decay": 0.0}, + { + "params": rest_pretrained_params, + "weight_decay": args.wd_pretrained, + }, + ], + lr=args.lr_pretrained, + betas=(args.beta1_pretrained, args.beta2_pretrained), + eps=args.eps_pretrained, + momentum=args.momentum_pretrained, + optimizer_name=args.optimizer, + ) + pretrained_params_scheduler = cosine_lr( + pretrained_params_optimizer, + args.lr_pretrained, + args.warmup, + total_steps, + ) + + new_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_new_params, "weight_decay": 0.0}, + {"params": rest_new_params, "weight_decay": args.wd_new}, + ], + lr=args.lr_new, + betas=(args.beta1_new, args.beta2_new), + eps=args.eps_new, + momentum=args.momentum_new, + optimizer_name=args.optimizer, + ) + new_params_scheduler = cosine_lr( + new_params_optimizer, args.lr_new, args.warmup, total_steps + ) + + optimizer["text"] = pretrained_params_optimizer + optimizer["audio"] = new_params_optimizer + scheduler["text"] = pretrained_params_scheduler + scheduler["audio"] = new_params_scheduler + + if args.horovod: + pretrained_params_optimizer = hvd.DistributedOptimizer( + pretrained_params_optimizer, + named_parameters=model.named_parameters(), + ) + new_params_optimizer = hvd.DistributedOptimizer( + new_params_optimizer, named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state( + pretrained_params_optimizer, root_rank=0 + ) + hvd.broadcast_optimizer_state(new_params_optimizer, root_rank=0) + else: + + optimizer["clap"] = get_optimizer( + [ + {"params": gain_or_bias_params, "weight_decay": 0.0}, + {"params": rest_params, "weight_decay": args.wd}, + ], + lr=args.lr, + betas=(args.beta1, args.beta2), + eps=args.eps, + momentum=args.momentum, + optimizer_name=args.optimizer, + ) + scheduler["clap"] = cosine_lr( + optimizer["clap"], args.lr, args.warmup, total_steps + ) + + if args.horovod: + optimizer["clap"] = hvd.DistributedOptimizer( + optimizer["clap"], named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state(optimizer["clap"], root_rank=0) + + # linear probe optimizer + else: + lp_params = [ + p for n, p in named_parameters if (not in_clap(n, p)) and p.requires_grad + ] + lp_optim = get_optimizer( + lp_params, + lr=args.lp_lr, + betas=(args.beta1, args.beta2), + eps=args.eps, + momentum=0.9, + optimizer_name=args.optimizer, + ) + optimizer["lp"] = lp_optim + + return optimizer, scheduler, text_freeze_parameters + + +def main(): + args = parse_args() + + time.sleep(args.sleep) + + # sanitize model name for filesystem / uri use, easier if we don't use / in name as a rule? + args.amodel = args.amodel.replace("/", "-") + # download sizes.json file + + # (yusong): the below two lines are for debug + # print("setting up faulthandler") + # faulthandler.register(10) + + random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + np.random.seed(args.seed) + args.class_index_dict = load_class_label(args.class_label_path) + + # get the name of the experiments + if args.name is None: + args.name = "-".join( + [ + datetime.now().strftime("%Y_%m_%d-%H_%M_%S"), + f"linear_probe" f"model_{args.amodel}", + f"lr_{args.lr}", + f"b_{args.batch_size}", + f"j_{args.workers}", + f"p_{args.precision}", + ] + ) + + # discover initial world args early so we can log properly + args.distributed = False + args.local_rank, args.rank, args.world_size = world_info_from_env() + + if args.remotedata and is_master(args): + for dataset_name in args.datasetnames: + for split in dataset_split[dataset_name]: + if not os.path.exists(f"./json_files/{dataset_name}/{split}"): + os.makedirs(f"./json_files/{dataset_name}/{split}") + os.system( + f"aws s3 cp s3://s-laion-audio/webdataset_tar/{dataset_name}/{split}/sizes.json ./json_files/{dataset_name}/{split}/sizes.json" + ) + + args.log_path = None + if is_master(args, local=args.log_local): + log_base_path = os.path.join(args.logs, args.name) + os.makedirs(log_base_path, exist_ok=True) + log_filename = f"out-{args.rank}" if args.log_local else "out.log" + args.log_path = os.path.join(log_base_path, log_filename) + + # avoid log dir in same name: + postfix = 0 + while os.path.exists(args.log_path): + postfix += 1 + log_base_path_new = log_base_path + "-" + str(postfix) + os.makedirs(log_base_path_new, exist_ok=True) + log_filename = f"out-{args.rank}" if args.log_local else "out.log" + args.log_path = os.path.join(log_base_path_new, log_filename) + # print( + # "Error. Experiment already exists. Use --name {} to specify a new experiment." + # ) + # return -1 + + # Set logger + args.log_level = logging.DEBUG if args.debug else logging.INFO + setup_logging(args.log_path, args.log_level) + + # fully initialize distributed device environment + device = init_distributed_device(args) + + args.wandb = "wandb" in args.report_to or "all" in args.report_to + args.tensorboard = "tensorboard" in args.report_to or "all" in args.report_to + if is_master(args): + args.tensorboard_path = ( + os.path.join(args.logs, args.name, "tensorboard") + if args.tensorboard + else "" + ) + args.checkpoint_path = os.path.join(args.logs, args.name, "checkpoints") + for dirname in [args.tensorboard_path, args.checkpoint_path]: + if dirname: + os.makedirs(dirname, exist_ok=True) + else: + args.tensorboard_path = "" + args.checkpoint_path = "" + + if args.copy_codebase: + copy_codebase(args) + + assert args.precision in ["amp", "fp16", "fp32"] + if args.precision == "fp16": + logging.warning( + "It is recommended to use AMP mixed-precision instead of FP16. " + "FP16 support needs further verification and tuning, especially for train." + ) + + if args.horovod: + logging.info( + f"Running in horovod mode with multiple processes / nodes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + elif args.distributed: + logging.info( + f"Running in distributed mode with multiple processes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + else: + logging.info(f"Running with a single process. Device {args.device}.") + + logging.info(f"openai cache dir: {os.path.expanduser(args.openai_model_cache_dir)}") + + # Create CLAP model + clap_model, clap_model_cfg = create_model( + args.amodel, + args.tmodel, + args.pretrained, + precision=args.precision, + device=device, + jit=args.torchscript, + force_quick_gelu=args.force_quick_gelu, + openai_model_cache_dir=os.path.expanduser(args.openai_model_cache_dir), + skip_params=False, + pretrained_audio=args.pretrained_audio, + pretrained_text=args.pretrained_text, + enable_fusion=args.enable_fusion, + fusion_type=args.fusion_type, + ) + + args.lp_out_ch = len(list(args.class_index_dict.keys())) + # Linear Probe + logging.info(f"linear probe using mlp: {args.lp_mlp}") + logging.info(f"linear probe using freeze: {args.lp_freeze}") + logging.info(f"linear probe act layer: {args.lp_act}") + logging.info(f"linear probe out ch: {args.lp_out_ch}") + logging.info(f"linear probe learning rate (if applicable): {args.lp_lr}") + logging.info(f"linear probe loss func: {args.lp_loss}") + logging.info(f"linear probe lp_metrics: {args.lp_metrics}") + + model = LinearProbe( + clap_model, + mlp=args.lp_mlp, + freeze=args.lp_freeze, + in_ch=512, + out_ch=args.lp_out_ch, + act=args.lp_act, + ) # in_ch is fixed (i.e., 512) + model = model.to(device) + + if args.horovod: + with torch.no_grad(): + for param in model.parameters(): + param.set_(param.contiguous()) + + if args.trace: + model = trace_model(model, batch_size=args.batch_size, device=device) + + if is_master(args): + logging.info("Linear Probe CLAP Model:") + logging.info(f"{str(clap_model)}") + logging.info("Params:") + params_file = os.path.join(args.logs, args.name, "params.txt") + with open(params_file, "w") as f: + for name in sorted(vars(args)): + val = getattr(args, name) + logging.info(f" {name}: {val}") + f.write(f"{name}: {val}\n") + + if args.distributed and not args.horovod: + if args.use_bn_sync: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + ddp_args = {} + if args.ddp_static_graph: + # this doesn't exist in older PyTorch, arg only added if enabled + ddp_args["static_graph"] = True + model = torch.nn.parallel.DistributedDataParallel( + model, device_ids=[device], find_unused_parameters=True, **ddp_args + ) + + data = get_data(args, clap_model_cfg) + assert len(data), "At least one train or eval dataset must be specified." + if args.trace: + assert "train" not in data, "Cannot train with traced model" + + optimizer, scheduler, text_freeze_parameters = config_lp_optimizer( + model, data, args + ) + + scaler = GradScaler() if args.precision == "amp" else None + + # optionally resume from a checkpoint + start_epoch = 0 + if args.resume is not None: + if os.path.isfile(args.resume): + checkpoint = torch.load(args.resume, map_location=device) + if "epoch" in checkpoint: + # resuming a train checkpoint w/ epoch and optimizer state + start_epoch = checkpoint["epoch"] + sd = checkpoint["state_dict"] + if not args.distributed and next(iter(sd.items()))[0].startswith( + "module" + ): + sd = {k[len("module.") :]: v for k, v in sd.items()} + model.load_state_dict(sd) + if args.split_opt: + if optimizer is not None: + for k, o_ in optimizer.items(): + o_.load_state_dict(checkpoint[k + "_" + "optimizer"]) + if optimizer is not None: + optimizer.load_state_dict(checkpoint["optimizer"]) + if scaler is not None and "scaler" in checkpoint: + scaler.load_state_dict(checkpoint["scaler"]) + logging.info( + f"=> resuming checkpoint '{args.resume}' (epoch {start_epoch})" + ) + else: + # loading a bare (model only) checkpoint for fine-tune or evaluation + model.load_state_dict(checkpoint) + logging.info( + f"=> loaded checkpoint '{args.resume}' (epoch {start_epoch})" + ) + if args.freeze_text: + print("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + else: + logging.info("=> no checkpoint found at '{}'".format(args.resume)) + + cudnn.benchmark = True + cudnn.deterministic = False + + # determine if this worker should save logs and checkpoints. only do so if it is rank == 0 + args.save_logs = args.logs and args.logs.lower() != "none" and is_master(args) + writer = None + if args.save_logs and args.tensorboard: + assert tensorboard is not None, "Please install tensorboard." + writer = tensorboard.SummaryWriter(args.tensorboard_path) + + if args.wandb and is_master(args): + assert wandb is not None, "Please install wandb." + logging.debug("Starting wandb.") + args.train_sz = data["train"].dataloader.num_samples + if args.val_data is not None: + args.val_sz = data["val"].dataloader.num_samples + # you will have to configure this for your project! + wandb.init( + project="clap", + notes=args.wandb_notes, + name=args.wandb_notes, + tags=[], + config=vars(args), + ) + if args.debug: + wandb.watch(model, log="all") + wandb.save(params_file) + logging.debug("Finished loading wandb.") + + if "train" not in data: + evaluate(model, data, start_epoch, args, writer) + return + elif start_epoch == 0 and "val" in data and not args.no_eval: + evaluate(model, data, 0, args, writer) + if args.save_top_performance: + current_top_k_ckpt_metrics = { + i: 0 for i in range(args.save_top_performance) + } # initialize the top-k metric for ckpts to 0 + + for epoch in range(start_epoch, args.epochs): + # freeze the text param after (include) args.freeze_text_after, this is -1 by default + if epoch == args.freeze_text_after: + print("Text pretrained parameters are freezed since this epoch.") + for k in text_freeze_parameters: + k.requires_grad = False + if is_master(args): + logging.info(f"Start epoch {epoch}") + + train_one_epoch(model, data, epoch, optimizer, scaler, scheduler, args, writer) + completed_epoch = epoch + 1 + + if ( + any(v in data for v in ("val", "imagenet-val", "imagenet-v2")) + and not args.no_eval + ): + metrics = evaluate(model, data, completed_epoch, args, writer) + if args.save_top_performance: + top_k_dataset = args.top_k_checkpoint_select_dataset + top_k_metric = args.top_k_checkpoint_select_metric + filtered_metrics = [ + v + for k, v in metrics.items() + if top_k_metric in k and top_k_dataset in k + ] # check all R@10 metrics (all dataset) and use it to update the ckpt + # Saving checkpoints. + if args.save_logs: + opt_dict = { + k + "_" + "optimizer": v.state_dict() for k, v in optimizer.items() + } + checkpoint_dict = { + "epoch": completed_epoch, + "name": args.name, + "state_dict": model.state_dict(), + } + checkpoint_dict.update(opt_dict) + if scaler is not None: + checkpoint_dict["scaler"] = scaler.state_dict() + + if completed_epoch == args.epochs or ( + args.save_frequency > 0 and (completed_epoch % args.save_frequency) == 0 + ): + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_{completed_epoch}.pt"), + ) + if args.save_most_recent: + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_latest.pt"), + ) + if args.save_top_performance and not args.no_eval: + update_top_k_performance( + filtered_metrics, + current_top_k_ckpt_metrics, + args, + checkpoint_dict, + bignumbetter=True, + ) + + if args.wandb and is_master(args): + wandb.finish() + + +def copy_codebase(args): + from shutil import copytree, ignore_patterns + + new_code_path = os.path.join(args.logs, args.name, "code") + if os.path.exists(new_code_path): + print( + f"Error. Experiment already exists at {new_code_path}. Use --name to specify a new experiment." + ) + return -1 + print(f"Copying codebase to {new_code_path}") + current_code_path = os.path.realpath(__file__) + for _ in range(3): + current_code_path = os.path.dirname(current_code_path) + copytree( + current_code_path, new_code_path, ignore=ignore_patterns("log", "logs", "wandb") + ) + print("Done copying code.") + return 1 + + +if __name__ == "__main__": + main() diff --git a/core/models/encoders/clap_modules/training/lp_train.py b/core/models/encoders/clap_modules/training/lp_train.py new file mode 100644 index 0000000000000000000000000000000000000000..24a19bacd0a4b789415cfccbce1f8bc99bc493ed --- /dev/null +++ b/core/models/encoders/clap_modules/training/lp_train.py @@ -0,0 +1,301 @@ +import json +import logging +import math +import os +import time +from contextlib import suppress + +import numpy as np +import torch +import torch.nn.functional as F + +try: + import wandb +except ImportError: + wandb = None + +from open_clip import LPLoss, LPMetrics, lp_gather_features +from open_clip.utils import do_mixup, get_mix_lambda +from .distributed import is_master +from .zero_shot import zero_shot_eval + + +class AverageMeter(object): + """Computes and stores the average and current value""" + + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +def unwrap_model(model): + if hasattr(model, "module"): + return model.module + else: + return model + + +def train_one_epoch( + model, + data, + epoch, + optimizer, + scaler, + scheduler, + args, + tb_writer=None, + extra_suffix="", +): + device = torch.device(args.device) + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + model.train() + loss = LPLoss(args.lp_loss) + + dataloader, sampler = data["train"].dataloader, data["train"].sampler + if args.distributed and sampler is not None: + sampler.set_epoch(epoch) + num_batches_per_epoch = dataloader.num_batches + sample_digits = math.ceil(math.log(dataloader.num_samples + 1, 10)) + + # for toy dataset + if args.dataset_type == "toy": + dataloader.dataset.generate_queue() + + loss_m = AverageMeter() + batch_time_m = AverageMeter() + data_time_m = AverageMeter() + end = time.time() + + for i, batch in enumerate(dataloader): + step = num_batches_per_epoch * epoch + i + + if isinstance(scheduler, dict): + for s in scheduler.values(): + s(step) + else: + scheduler(step) + + audio = batch # contains mel_spec, wavform, and longer list + class_label = batch["class_label"] + # audio = audio.to(device=device, non_blocking=True) + class_label = class_label.to(device=device, non_blocking=True) + + if args.mixup: + # https://github.com/RetroCirce/HTS-Audio-Transformer/blob/main/utils.py#L146 + mix_lambda = torch.from_numpy( + get_mix_lambda(0.5, len(audio["waveform"])) + ).to(device) + class_label = do_mixup(class_label, mix_lambda) + else: + mix_lambda = None + + data_time_m.update(time.time() - end) + if isinstance(optimizer, dict): + for o_ in optimizer.values(): + o_.zero_grad() + else: + optimizer.zero_grad() + + with autocast(): + pred = model(audio, mix_lambda=mix_lambda, device=device) + total_loss = loss(pred, class_label) + + if isinstance(optimizer, dict): + if scaler is not None: + scaler.scale(total_loss).backward() + for o_ in optimizer.values(): + if args.horovod: + o_.synchronize() + scaler.unscale_(o_) + with o_.skip_synchronize(): + scaler.step(o_) + else: + scaler.step(o_) + scaler.update() + else: + total_loss.backward() + for o_ in optimizer.values(): + o_.step() + else: + if scaler is not None: + scaler.scale(total_loss).backward() + if args.horovod: + optimizer.synchronize() + scaler.unscale_(optimizer) + with optimizer.skip_synchronize(): + scaler.step(optimizer) + else: + scaler.step(optimizer) + scaler.update() + else: + total_loss.backward() + optimizer.step() + + # Note: we clamp to 4.6052 = ln(100), as in the original paper. + with torch.no_grad(): + unwrap_model(model).clap_model.logit_scale_a.clamp_(0, math.log(100)) + unwrap_model(model).clap_model.logit_scale_t.clamp_(0, math.log(100)) + + batch_time_m.update(time.time() - end) + end = time.time() + batch_count = i + 1 + + if is_master(args) and (i % 100 == 0 or batch_count == num_batches_per_epoch): + if isinstance(audio, dict): + batch_size = len(audio["waveform"]) + else: + batch_size = len(audio) + num_samples = batch_count * batch_size * args.world_size + samples_per_epoch = dataloader.num_samples + percent_complete = 100.0 * batch_count / num_batches_per_epoch + + # NOTE loss is coarsely sampled, just master node and per log update + loss_m.update(total_loss.item(), batch_size) + if isinstance(optimizer, dict): + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {[o_.param_groups[0]['lr'] for o_ in optimizer.values()]}" + ) + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "lr": [o_.param_groups[0]["lr"] for o_ in optimizer.values()], + } + else: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {optimizer.param_groups[0]['lr']:5f} " + ) + + # Save train loss / etc. Using non avg meter values as loggers have their own smoothing + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "lr": optimizer.param_groups[0]["lr"], + } + for name, val in log_data.items(): + name = f"train{extra_suffix}/{name}" + if tb_writer is not None: + tb_writer.add_scalar(name, val, step) + if args.wandb: + assert wandb is not None, "Please install wandb." + wandb.log({name: val, "step": step}) + + # resetting batch / data time meters per log window + batch_time_m.reset() + data_time_m.reset() + # end for + + +def evaluate(model, data, epoch, args, tb_writer=None, extra_suffix=""): + metrics = {} + if not args.parallel_eval: + if not is_master(args): + return metrics + device = torch.device(args.device) + model.eval() + + # CHANGE + # zero_shot_metrics = zero_shot_eval(model, data, epoch, args) + # metrics.update(zero_shot_metrics) + if is_master(args): + print("Evaluating...") + metric_names = args.lp_metrics.split(",") + eval_tool = LPMetrics(metric_names=metric_names) + + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + if "val" in data and ( + args.val_frequency + and ((epoch % args.val_frequency) == 0 or epoch == args.epochs) + ): + if args.parallel_eval: + dataloader, sampler = data["val"].dataloader, data["val"].sampler + if args.distributed and sampler is not None: + sampler.set_epoch(epoch) + samples_per_val = dataloader.num_samples + else: + dataloader = data["val"].dataloader + num_samples = 0 + samples_per_val = dataloader.num_samples + + eval_info = {"pred": [], "target": []} + with torch.no_grad(): + for i, batch in enumerate(dataloader): + audio = batch # contains mel_spec, wavform, and longer list + class_label = batch["class_label"] + + # audio = audio.to(device=device, non_blocking=True) + class_label = class_label.to(device=device, non_blocking=True) + + with autocast(): + pred = model(audio, device=device) + if args.parallel_eval: + pred, class_label = lp_gather_features( + pred, class_label, args.world_size, args.horovod + ) + eval_info["pred"].append(pred) + eval_info["target"].append(class_label) + + num_samples += class_label.shape[0] + + if (i % 100) == 0: # and i != 0: + logging.info( + f"Eval Epoch: {epoch} [{num_samples} / {samples_per_val}]" + ) + + if is_master(args): + eval_info["pred"] = torch.cat(eval_info["pred"], 0).cpu() + eval_info["target"] = torch.cat(eval_info["target"], 0).cpu() + metric_dict = eval_tool.evaluate_mertics( + eval_info["pred"], eval_info["target"] + ) + metrics.update(metric_dict) + if "epoch" not in metrics.keys(): + metrics.update({"epoch": epoch}) + + if is_master(args): + if not metrics: + return metrics + + logging.info( + f"Eval Epoch: {epoch} " + + "\n".join( + ["\t".join([f"{m}: {round(metrics[m], 4):.4f}"]) for m in metrics] + ) + ) + if args.save_logs: + for name, val in metrics.items(): + if tb_writer is not None: + tb_writer.add_scalar(f"val{extra_suffix}/{name}", val, epoch) + + with open(os.path.join(args.checkpoint_path, "results.jsonl"), "a+") as f: + f.write(json.dumps(metrics)) + f.write("\n") + + if args.wandb: + assert wandb is not None, "Please install wandb." + for name, val in metrics.items(): + wandb.log({f"val{extra_suffix}/{name}": val, "epoch": epoch}) + + return metrics + else: + return metrics diff --git a/core/models/encoders/clap_modules/training/main.py b/core/models/encoders/clap_modules/training/main.py new file mode 100644 index 0000000000000000000000000000000000000000..3b563a5d001be7adfbe779dee7ad8ac49aadc50d --- /dev/null +++ b/core/models/encoders/clap_modules/training/main.py @@ -0,0 +1,596 @@ +from inspect import getargs +import logging +import os +import random +from datetime import datetime +import bisect +import copy +import numpy as np +import torch +import torch.backends.cudnn as cudnn +from torch import optim +from torch.cuda.amp import GradScaler +import faulthandler +import pathlib + +try: + import wandb +except ImportError: + wandb = None + +try: + import torch.utils.tensorboard as tensorboard +except ImportError: + tensorboard = None + +try: + import horovod.torch as hvd +except ImportError: + hvd = None + +from open_clip import create_model_and_transforms, trace_model, create_model +from training.data import get_data +from training.distributed import is_master, init_distributed_device, world_info_from_env +from training.logger import setup_logging +from training.params import parse_args +from training.scheduler import cosine_lr +from training.train import train_one_epoch, evaluate +from open_clip.utils import dataset_split, get_optimizer + + +def maintain_ckpts(args, startidx, all_idx_len): + for i in reversed(range(startidx, all_idx_len)): + if os.path.exists(os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt")): + os.rename( + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + os.path.join(args.checkpoint_path, f"epoch_top_{i+1}.pt"), + ) + if os.path.exists( + os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt") + ): + os.remove(os.path.join(args.checkpoint_path, f"epoch_top_{all_idx_len}.pt")) + return + + +def update_top_k_performance( + new_metrics_inputs, current_top_k_ckpt_metrics, args, ckpt, bignumbetter=True +): + """ + Record the top-k performance of the current epoch. + current_top_k_metrics is a dictionary of the form: {1: top_1_ckpt_measure, 2: top_2_ckpt_measure, ...} + """ + if isinstance(new_metrics_inputs, (list, tuple)): + new_metrics_inputs = np.mean(new_metrics_inputs) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, dict): + new_metrics_inputs = np.mean(list(new_metrics_inputs.values())) + return update_top_k_performance( + new_metrics_inputs, + current_top_k_ckpt_metrics, + args=args, + ckpt=ckpt, + bignumbetter=bignumbetter, + ) + elif isinstance(new_metrics_inputs, (float, int)): + update_flag = {k: False for k in current_top_k_ckpt_metrics.keys()} + sorted_keys = sorted(current_top_k_ckpt_metrics.keys()) + sorted_values = sorted( + current_top_k_ckpt_metrics.values(), reverse=bignumbetter + ) + sorted_values_ = copy.deepcopy(sorted_values) + sorted_values.append(new_metrics_inputs) + sorted_values = sorted(sorted_values, reverse=bignumbetter) + sorted_values = sorted_values[:-1] + + if sorted_values == sorted_values_: + return current_top_k_ckpt_metrics, new_metrics_inputs + else: + for i in range(len(sorted_keys)): + if current_top_k_ckpt_metrics[sorted_keys[i]] != sorted_values[i]: + current_top_k_ckpt_metrics[sorted_keys[i]] = sorted_values[i] + update_flag[sorted_keys[i]] = True + for i in range(len(update_flag)): + if update_flag[i]: + maintain_ckpts(args, i, len(sorted_keys)) + torch.save( + ckpt, + os.path.join(args.checkpoint_path, f"epoch_top_{i}.pt"), + ) + break + return current_top_k_ckpt_metrics, new_metrics_inputs + + +# def updateifNone(a, b): +# a = b if None else a +# return a + + +def is_pretrained_params(n): + return ( + n.startswith("transformer") + or n in ["positional_embedding", "text_projection"] + or n.startswith("token_embedding") + or n.startswith("ln_final") + or n.startswith("logit_scale_t") + ) + + +def random_seed(seed=42, rank=0): + torch.manual_seed(seed + rank) + np.random.seed(seed + rank) + random.seed(seed + rank) + + +def main(): + args = parse_args() + # sanitize model name for filesystem / uri use, easier if we don't use / in name as a rule? + args.amodel = args.amodel.replace("/", "-") + # download sizes.json file + + # (yusong): the below two lines are for debug + # print("setting up faulthandler") + # faulthandler.register(10) + + random.seed(args.seed) + torch.manual_seed(args.seed) + torch.cuda.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + np.random.seed(args.seed) + if args.tmodel == "bert" or args.tmodel == "roberta" or args.tmodel == "bart": + assert ( + args.pretrained == "" or args.pretrained is None + ), "bert/roberta/bart text encoder does not support pretrained models." + + # get the name of the experiments + if args.name is None: + args.name = "-".join( + [ + datetime.now().strftime("%Y_%m_%d-%H_%M_%S"), + f"model_{args.amodel}", + f"lr_{args.lr}", + f"b_{args.batch_size}", + f"j_{args.workers}", + f"p_{args.precision}", + ] + ) + + # discover initial world args early so we can log properly + args.distributed = False + args.local_rank, args.rank, args.world_size = world_info_from_env() + + if args.remotedata and is_master(args): + for dataset_name in args.datasetnames: + for split in dataset_split[dataset_name]: + if not os.path.exists(f"./json_files/{dataset_name}/{split}"): + os.makedirs(f"./json_files/{dataset_name}/{split}") + os.system( + f"aws s3 cp s3://s-laion-audio/webdataset_tar/{dataset_name}/{split}/sizes.json ./json_files/{dataset_name}/{split}/sizes.json" + ) + + args.log_path = None + if is_master(args, local=args.log_local): + log_base_path = os.path.join(args.logs, args.name) + os.makedirs(log_base_path, exist_ok=True) + log_filename = f"out-{args.rank}" if args.log_local else "out.log" + args.log_path = os.path.join(log_base_path, log_filename) + if os.path.exists(args.log_path): + print( + "Error. Experiment already exists. Use --name {} to specify a new experiment." + ) + return -1 + + # Set logger + args.log_level = logging.DEBUG if args.debug else logging.INFO + setup_logging(args.log_path, args.log_level) + + # fully initialize distributed device environment + device = init_distributed_device(args) + + args.wandb = "wandb" in args.report_to or "all" in args.report_to + args.tensorboard = "tensorboard" in args.report_to or "all" in args.report_to + if is_master(args): + args.tensorboard_path = ( + os.path.join(args.logs, args.name, "tensorboard") + if args.tensorboard + else "" + ) + args.checkpoint_path = os.path.join(args.logs, args.name, "checkpoints") + for dirname in [args.tensorboard_path, args.checkpoint_path]: + if dirname: + os.makedirs(dirname, exist_ok=True) + else: + args.tensorboard_path = "" + args.checkpoint_path = "" + + if args.copy_codebase: + copy_codebase(args) + + assert args.precision in ["amp", "fp16", "fp32"] + if args.precision == "fp16": + logging.warning( + "It is recommended to use AMP mixed-precision instead of FP16. " + "FP16 support needs further verification and tuning, especially for train." + ) + + if args.horovod: + logging.info( + f"Running in horovod mode with multiple processes / nodes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + elif args.distributed: + logging.info( + f"Running in distributed mode with multiple processes. Device: {args.device}." + f"Process (global: {args.rank}, local {args.local_rank}), total {args.world_size}." + ) + else: + logging.info(f"Running with a single process. Device {args.device}.") + + logging.info(f"openai cache dir: {os.path.expanduser(args.openai_model_cache_dir)}") + + model, model_cfg = create_model( + args.amodel, + args.tmodel, + args.pretrained, + precision=args.precision, + device=device, + jit=args.torchscript, + force_quick_gelu=args.force_quick_gelu, + openai_model_cache_dir=os.path.expanduser(args.openai_model_cache_dir), + skip_params=True, + pretrained_audio=args.pretrained_audio, + pretrained_text=args.pretrained_text, + enable_fusion=args.enable_fusion, + fusion_type=args.fusion_type, + ) + + if args.horovod: + with torch.no_grad(): + for param in model.parameters(): + param.set_(param.contiguous()) + + if args.trace: + model = trace_model(model, batch_size=args.batch_size, device=device) + + if is_master(args): + logging.info("Model:") + logging.info(f"{str(model)}") + logging.info("Params:") + params_file = os.path.join(args.logs, args.name, "params.txt") + with open(params_file, "w") as f: + for name in sorted(vars(args)): + val = getattr(args, name) + logging.info(f" {name}: {val}") + f.write(f"{name}: {val}\n") + + if args.distributed and not args.horovod: + if args.use_bn_sync: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) + ddp_args = {} + if args.ddp_static_graph: + # this doesn't exist in older PyTorch, arg only added if enabled + ddp_args["static_graph"] = True + model = torch.nn.parallel.DistributedDataParallel( + model, device_ids=[device], find_unused_parameters=True, **ddp_args + ) + + data = get_data(args, model_cfg) + assert len(data), "At least one train or eval dataset must be specified." + if args.trace: + assert "train" not in data, "Cannot train with traced model" + + exclude = ( + lambda n, p: p.ndim < 2 + or "bn" in n + or "ln" in n + or "bias" in n + or "logit_scale" in n + ) + include = lambda n, p: not exclude(n, p) + + named_parameters = list(model.named_parameters()) + + # freeze text encoder + text_freeze_parameters = [p for n, p in named_parameters if "text_branch" in n] + + if args.freeze_text: + print("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + + gain_or_bias_params = [ + p for n, p in named_parameters if exclude(n, p) and p.requires_grad + ] + rest_params = [p for n, p in named_parameters if include(n, p) and p.requires_grad] + + # set wd-related params to 0 if use adam optimizer + if args.optimizer == "adam": + args.wd = 0 + args.wd_pretrained = 0 + args.wd_new = 0 + + if args.train_data is None: + optimizer = None + scheduler = None + else: + total_steps = data["train"].dataloader.num_batches * args.epochs + + if args.split_opt: + for x in ["lr", "beta1", "beta2", "eps", "wd"]: + for y in ["_new", "_pretrained"]: + if getattr(args, x + y) is None: + setattr(args, x + y, getattr(args, x)) + + gain_or_bias_pretrained_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + rest_pretrained_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) and is_pretrained_params(n) + ] + gain_or_bias_new_params = [ + p + for n, p in named_parameters + if (exclude(n, p) and p.requires_grad) and (not is_pretrained_params(n)) + ] + rest_new_params = [ + p + for n, p in named_parameters + if (include(n, p) and p.requires_grad) and (not is_pretrained_params(n)) + ] + pretrained_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_pretrained_params, "weight_decay": 0.0}, + { + "params": rest_pretrained_params, + "weight_decay": args.wd_pretrained, + }, + ], + lr=args.lr_pretrained, + betas=(args.beta1_pretrained, args.beta2_pretrained), + eps=args.eps_pretrained, + momentum=args.momentum_pretrained, + optimizer_name=args.optimizer, + ) + pretrained_params_scheduler = cosine_lr( + pretrained_params_optimizer, + args.lr_pretrained, + args.warmup, + total_steps, + ) + new_params_optimizer = get_optimizer( + [ + {"params": gain_or_bias_new_params, "weight_decay": 0.0}, + {"params": rest_new_params, "weight_decay": args.wd_new}, + ], + lr=args.lr_new, + betas=(args.beta1_new, args.beta2_new), + eps=args.eps_new, + momentum=args.momentum_new, + optimizer_name=args.optimizer, + ) + + new_params_scheduler = cosine_lr( + new_params_optimizer, args.lr_new, args.warmup, total_steps + ) + + optimizer = { + "pretrained": pretrained_params_optimizer, + "new": new_params_optimizer, + } + scheduler = { + "pretrained": pretrained_params_scheduler, + "new": new_params_scheduler, + } + + if args.horovod: + pretrained_params_optimizer = hvd.DistributedOptimizer( + pretrained_params_optimizer, + named_parameters=model.named_parameters(), + ) + new_params_optimizer = hvd.DistributedOptimizer( + new_params_optimizer, named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state(pretrained_params_optimizer, root_rank=0) + hvd.broadcast_optimizer_state(new_params_optimizer, root_rank=0) + else: + optimizer = get_optimizer( + [ + {"params": gain_or_bias_params, "weight_decay": 0.0}, + {"params": rest_params, "weight_decay": args.wd}, + ], + lr=args.lr, + betas=(args.beta1, args.beta2), + eps=args.eps, + momentum=args.momentum, + optimizer_name=args.optimizer, + ) + + scheduler = cosine_lr(optimizer, args.lr, args.warmup, total_steps) + + if args.horovod: + optimizer = hvd.DistributedOptimizer( + optimizer, named_parameters=model.named_parameters() + ) + hvd.broadcast_parameters(model.state_dict(), root_rank=0) + hvd.broadcast_optimizer_state(optimizer, root_rank=0) + + scaler = GradScaler() if args.precision == "amp" else None + + # optionally resume from a checkpoint + start_epoch = 0 + if args.resume is not None: + if os.path.isfile(args.resume): + checkpoint = torch.load(args.resume, map_location=device) + if "epoch" in checkpoint: + # resuming a train checkpoint w/ epoch and optimizer state + start_epoch = checkpoint["epoch"] + sd = checkpoint["state_dict"] + if not args.distributed and next(iter(sd.items()))[0].startswith( + "module" + ): + sd = {k[len("module.") :]: v for k, v in sd.items()} + model.load_state_dict(sd) + if args.split_opt: + if optimizer is not None: + for k, o_ in optimizer.items(): + o_.load_state_dict(checkpoint[k + "_" + "optimizer"]) + if optimizer is not None: + optimizer.load_state_dict(checkpoint["optimizer"]) + if scaler is not None and "scaler" in checkpoint: + scaler.load_state_dict(checkpoint["scaler"]) + logging.info( + f"=> resuming checkpoint '{args.resume}' (epoch {start_epoch})" + ) + else: + # loading a bare (model only) checkpoint for fine-tune or evaluation + model.load_state_dict(checkpoint) + logging.info( + f"=> loaded checkpoint '{args.resume}' (epoch {start_epoch})" + ) + if args.freeze_text: + print("Freeze Text!!!!") + for k in text_freeze_parameters: + k.requires_grad = False + else: + logging.info("=> no checkpoint found at '{}'".format(args.resume)) + + cudnn.benchmark = True + cudnn.deterministic = False + + # determine if this worker should save logs and checkpoints. only do so if it is rank == 0 + args.save_logs = args.logs and args.logs.lower() != "none" and is_master(args) + writer = None + if args.save_logs and args.tensorboard: + assert tensorboard is not None, "Please install tensorboard." + writer = tensorboard.SummaryWriter(args.tensorboard_path) + + if args.wandb and is_master(args): + assert wandb is not None, "Please install wandb." + logging.debug("Starting wandb.") + args.train_sz = data["train"].dataloader.num_samples + if args.val_data is not None: + args.val_sz = data["val"].dataloader.num_samples + # you will have to configure this for your project! + wandb.init( + project="clap", + notes=args.wandb_notes, + name=args.wandb_notes, + tags=[], + config=vars(args), + ) + if args.debug: + wandb.watch(model, log="all") + wandb.save(params_file) + logging.debug("Finished loading wandb.") + + if "train" not in data: + evaluate(model, data, start_epoch, args, writer) + return + elif start_epoch == 0 and "val" in data and not args.no_eval: + evaluate(model, data, 0, args, writer) + # print(f'rank {args.rank}, Start First Evaluation')# (yusong): for debug + if args.save_top_performance: + current_top_k_ckpt_metrics = { + i: 0 for i in range(args.save_top_performance) + } # initialize the top-k metric for ckpts to 0 + + # print(f'rank {args.rank}, Start Training') # (yusong): for debug + for epoch in range(start_epoch, args.epochs): + # freeze the text param after (include) args.freeze_text_after, this is -1 by default + if epoch == args.freeze_text_after: + print("Text pretrained parameters are freezed since this epoch.") + for k in text_freeze_parameters: + k.requires_grad = False + if is_master(args): + logging.info(f"Start epoch {epoch}") + + train_one_epoch(model, data, epoch, optimizer, scaler, scheduler, args, writer) + completed_epoch = epoch + 1 + + if ( + any(v in data for v in ("val", "imagenet-val", "imagenet-v2")) + and not args.no_eval + ): + metrics = evaluate(model, data, completed_epoch, args, writer) + if args.save_top_performance: + top_k_dataset = args.top_k_checkpoint_select_dataset + top_k_metric = args.top_k_checkpoint_select_metric + filtered_metrics = [ + v + for k, v in metrics.items() + if top_k_metric in k and top_k_dataset in k + ] # check all R@10 metrics (all dataset) and use it to update the ckpt + # Saving checkpoints. + if args.save_logs: + if args.split_opt: + opt_dict = { + k + "_" + "optimizer": v.state_dict() for k, v in optimizer.items() + } + else: + opt_dict = {"optimizer": optimizer.state_dict()} + checkpoint_dict = { + "epoch": completed_epoch, + "name": args.name, + "state_dict": model.state_dict(), + } + checkpoint_dict.update(opt_dict) + if scaler is not None: + checkpoint_dict["scaler"] = scaler.state_dict() + + if completed_epoch == args.epochs or ( + args.save_frequency > 0 and (completed_epoch % args.save_frequency) == 0 + ): + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_{completed_epoch}.pt"), + ) + if args.save_most_recent: + torch.save( + checkpoint_dict, + os.path.join(args.checkpoint_path, f"epoch_latest.pt"), + ) + if args.save_top_performance and not args.no_eval: + update_top_k_performance( + filtered_metrics, + current_top_k_ckpt_metrics, + args, + checkpoint_dict, + bignumbetter=True, + ) + + if args.wandb and is_master(args): + wandb.finish() + + +def copy_codebase(args): + from shutil import copytree, ignore_patterns + + new_code_path = os.path.join(args.logs, args.name, "code") + if os.path.exists(new_code_path): + print( + f"Error. Experiment already exists at {new_code_path}. Use --name to specify a new experiment." + ) + return -1 + print(f"Copying codebase to {new_code_path}") + current_code_path = os.path.realpath(__file__) + for _ in range(3): + current_code_path = os.path.dirname(current_code_path) + copytree( + current_code_path, new_code_path, ignore=ignore_patterns("log", "logs", "wandb") + ) + print("Done copying code.") + return 1 + + +if __name__ == "__main__": + main() diff --git a/core/models/encoders/clap_modules/training/params.py b/core/models/encoders/clap_modules/training/params.py new file mode 100644 index 0000000000000000000000000000000000000000..0cc1a0e2d982e900988cf5a4b24b2e59b093537b --- /dev/null +++ b/core/models/encoders/clap_modules/training/params.py @@ -0,0 +1,563 @@ +import argparse + + +def get_default_params(model_name): + # Params from paper (https://arxiv.org/pdf/2103.00020.pdf) + model_name = model_name.lower() + if "vit" in model_name: + return {"lr": 5.0e-4, "beta1": 0.9, "beta2": 0.98, "eps": 1.0e-6} + else: + return {"lr": 5.0e-4, "beta1": 0.9, "beta2": 0.999, "eps": 1.0e-8} + + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--train-data", + type=str, + default=None, + help="Path to h5 filewith training data", + ) + parser.add_argument( + "--val-data", + type=str, + default=None, + help="Path to h5 file with validation data", + ) + parser.add_argument( + "--freeze-text", + default=False, + action="store_true", + help="if you need to freeze the text encoder, make this True", + ) + parser.add_argument( + "--freeze-text-after", + type=int, + default=-1, + help="if you need to freeze the text encoder after (include) epoch x, set this param to x. Set -1 to disable it", + ) + parser.add_argument( + "--train-ipc", + type=str, + default=None, + help="Path to npy file of the number of instance per class in training data", + ) + parser.add_argument( + "--val-ipc", + type=str, + default=None, + help="Path to npy file of the number of instance per class in validation data", + ) + parser.add_argument( + "--train-num-samples", + type=int, + default=None, + help="Number of samples in dataset. Required for webdataset if not available in info file.", + ) + parser.add_argument( + "--val-num-samples", + type=int, + default=None, + help="Number of samples in dataset. Useful for webdataset if not available in info file.", + ) + parser.add_argument( + "--dataset-type", + choices=["webdataset", "csv", "auto", "toy"], + default="auto", + help="Which type of dataset to process.", + ) + parser.add_argument( + "--csv-separator", + type=str, + default="\t", + help="For csv-like datasets, which separator to use.", + ) + parser.add_argument( + "--csv-img-key", + type=str, + default="filepath", + help="For csv-like datasets, the name of the key for the image paths.", + ) + parser.add_argument( + "--csv-caption-key", + type=str, + default="title", + help="For csv-like datasets, the name of the key for the captions.", + ) + parser.add_argument( + "--imagenet-val", + type=str, + default=None, + help="Path to imagenet val set for conducting zero shot evaluation.", + ) + parser.add_argument( + "--imagenet-v2", + type=str, + default=None, + help="Path to imagenet v2 for conducting zero shot evaluation.", + ) + parser.add_argument( + "--datasetnames", + nargs="+", + default=None, + help="If loading webdataset, spedify the dataset names to load. Can be some of these: Clotho, audioset, audiocaps, BBCSoundEffects", + ) + parser.add_argument( + "--full-train-dataset", + nargs="+", + default=None, + help="Which dataset will be trained with all the subsets. (train+test)", + ) + parser.add_argument( + "--exclude-eval-dataset", + nargs="+", + default=None, + help="Which dataset will be excluded with evaluation", + ) + parser.add_argument( + "--datasetinfos", + nargs="+", + default=None, + help="If loading webdataset, spedify the dataset types to load. Can be some of these: train, test, valid, unbalanced_train, balanced_train, eval", + ) + parser.add_argument( + "--dataset-proportion", + type=float, + default=1.0, + help="How much proportion of dataset we want to train.", + ) + parser.add_argument( + "--remotedata", + default=False, + action="store_true", + help="if the dataset is remote, set this flag", + ) + parser.add_argument( + "--class-label-path", + type=str, + default=None, + help="The path of the class label pickle or csv.", + ) + parser.add_argument( + "--datasetpath", + type=str, + default="/mnt/audio_clip/webdataset_tar", + help="The path to the dataset", + ) + parser.add_argument( + "--logs", + type=str, + default="./logs/", + help="Where to store tensorboard logs. Use None to avoid storing logs.", + ) + parser.add_argument( + "--log-local", + action="store_true", + default=False, + help="log files on local master, otherwise global master only.", + ) + parser.add_argument( + "--name", + type=str, + default=None, + help="Optional identifier for the experiment when storing logs. Otherwise use current time.", + ) + parser.add_argument( + "--workers", type=int, default=1, help="Number of workers per GPU." + ) + parser.add_argument( + "--batch-size", type=int, default=64, help="Batch size per GPU." + ) + parser.add_argument( + "--epochs", type=int, default=32, help="Number of epochs to train for." + ) + parser.add_argument("--lr", type=float, default=None, help="Learning rate.") + parser.add_argument("--beta1", type=float, default=None, help="Adam beta 1.") + parser.add_argument("--beta2", type=float, default=None, help="Adam beta 2.") + parser.add_argument("--eps", type=float, default=None, help="Adam epsilon.") + parser.add_argument("--momentum", type=float, default=None, help="SGD epsilon.") + parser.add_argument("--wd", type=float, default=0.2, help="Weight decay.") + + parser.add_argument( + "--split-opt", + action="store_true", + default=False, + help="Use this flag to skip the learning rate decay.", + ) + parser.add_argument( + "--lr-pretrained", type=float, default=None, help="Learning rate for text." + ) + parser.add_argument( + "--beta1-pretrained", type=float, default=None, help="Adam beta 1 for text." + ) + parser.add_argument( + "--beta2-pretrained", type=float, default=None, help="Adam beta 2 for text." + ) + parser.add_argument( + "--eps-pretrained", type=float, default=None, help="Adam epsilon for text." + ) + parser.add_argument( + "--wd-pretrained", type=float, default=0.2, help="Weight decay for text." + ) + parser.add_argument( + "--momentum-pretrained", type=float, default=0.9, help="Momentum for text." + ) + parser.add_argument( + "--lr-new", type=float, default=None, help="Learning rate for audio." + ) + parser.add_argument( + "--beta1-new", type=float, default=None, help="Adam beta 1 for audio." + ) + parser.add_argument( + "--beta2-new", type=float, default=None, help="Adam beta 2 for audio." + ) + parser.add_argument( + "--eps-new", type=float, default=None, help="Adam epsilon for audio." + ) + parser.add_argument( + "--wd-new", type=float, default=0.2, help="Weight decay for audio." + ) + parser.add_argument( + "--momentum-new", type=float, default=0.9, help="Momentum for audio." + ) + parser.add_argument( + "--warmup", type=int, default=10000, help="Number of steps to warmup for." + ) + parser.add_argument( + "--use-bn-sync", + default=False, + action="store_true", + help="Whether to use batch norm sync.", + ) + parser.add_argument( + "--skip-scheduler", + action="store_true", + default=False, + help="Use this flag to skip the learning rate decay.", + ) + parser.add_argument( + "--save-frequency", type=int, default=1, help="How often to save checkpoints." + ) + parser.add_argument( + "--save-top-performance", + type=int, + default=0, + help="Save the top x performance weights if the value >0", + ) + parser.add_argument( + "--save-most-recent", + action="store_true", + default=False, + help="Always save the most recent model trained to epoch_latest.pt.", + ) + parser.add_argument( + "--zeroshot-frequency", type=int, default=2, help="How often to run zero shot." + ) + parser.add_argument( + "--val-frequency", + type=int, + default=1, + help="How often to run evaluation with val data.", + ) + parser.add_argument( + "--resume", + default=None, + type=str, + help="path to latest checkpoint (default: none)", + ) + parser.add_argument( + "--precision", + choices=["amp", "fp16", "fp32"], + default="amp", + help="Floating point precision.", + ) + parser.add_argument( + "--amodel", + type=str, + default="RN50", + help="Name of the audio backbone to use.", + ) + parser.add_argument( + "--tmodel", + type=str, + default="transformer", + help="Name of the text backbone to use. Can be [transformer, bert, roberta, bart]", + ) + parser.add_argument( + "--pretrained-audio", + default="", + type=str, + help="Use a pretrained audio model weights for the audio encoder of CLAP", + ) + parser.add_argument( + "--pretrained-text", + default="", + type=str, + help="Use a pretrained text model weights for the text encoder of CLAP", + ) + parser.add_argument( + "--pretrained", + default="", + type=str, + help="Use a pretrained CLIP model weights with the specified tag or file path.", + ) + parser.add_argument( + "--pretrained-image", + default=False, + action="store_true", + help="Load imagenet pretrained weights for image tower backbone if available.", + ) + parser.add_argument( + "--lock-image", + default=False, + action="store_true", + help="Lock full image tower by disabling gradients.", + ) + parser.add_argument( + "--lock-image-unlocked-groups", + type=int, + default=0, + help="Leave last n image tower layer groups unlocked.", + ) + parser.add_argument( + "--lock-image-freeze-bn-stats", + default=False, + action="store_true", + help="Freeze BatchNorm running stats in image tower for any locked layers.", + ) + parser.add_argument( + "--local-loss", + default=False, + action="store_true", + help="calculate loss w/ local features @ global (instead of realizing full global @ global matrix)", + ) + parser.add_argument( + "--gather-with-grad", + default=False, + action="store_true", + help="enable full distributed gradient for feature gather", + ) + parser.add_argument( + "--force-quick-gelu", + default=False, + action="store_true", + help="Force use of QuickGELU activation for non-OpenAI transformer models.", + ) + parser.add_argument( + "--torchscript", + default=False, + action="store_true", + help="torch.jit.script the model, also uses jit version of OpenAI models if pretrained=='openai'", + ) + parser.add_argument( + "--trace", + default=False, + action="store_true", + help="torch.jit.trace the model for inference / eval only", + ) + # arguments for distributed training + parser.add_argument( + "--dist-url", + default="env://", + type=str, + help="url used to set up distributed training", + ) + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--report-to", + default="", + type=str, + help="Options are ['wandb', 'tensorboard', 'wandb,tensorboard']", + ) + parser.add_argument( + "--wandb-notes", default="", type=str, help="Notes if logging with wandb" + ) + parser.add_argument( + "--C", type=float, default=3.16, help="inverse regularizer for logistic reg." + ) + parser.add_argument( + "--debug", + default=False, + action="store_true", + help="If true, more information is logged.", + ) + parser.add_argument( + "--copy-codebase", + default=False, + action="store_true", + help="If true, we copy the entire base on the log diretory, and execute from there.", + ) + parser.add_argument( + "--horovod", + default=False, + action="store_true", + help="Use horovod for distributed training.", + ) + parser.add_argument( + "--ddp-static-graph", + default=False, + action="store_true", + help="Enable static graph optimization for DDP in PyTorch >= 1.11.", + ) + parser.add_argument( + "--no-set-device-rank", + default=False, + action="store_true", + help="Don't set device index from local rank (when CUDA_VISIBLE_DEVICES restricted to one per proc).", + ) + parser.add_argument("--seed", type=int, default=4242, help="Default random seed.") + + parser.add_argument( + "--top-k-checkpoint-select-dataset", + type=str, + default="all", + help="The dataset of selecting top-k checkpoint.", + ) + + # @R10, @R@5, @R1, mAP@10 + parser.add_argument( + "--top-k-checkpoint-select-metric", + type=str, + default="_R@10", + help="The metric for selecting top-k checkpoint.", + ) + parser.add_argument( + "--openai-model-cache-dir", + type=str, + default="~/.cache/clip", + help="Directory to download OpenAI models.", + ) + parser.add_argument( + "--optimizer", + type=str, + default="adamw", + help="can be AdamW or SGD", + ) + parser.add_argument( + "--parallel-eval", + default=False, + action="store_true", + help="Eval in parallel (multi-GPU, multi-node).", + ) + + parser.add_argument( + "--no-eval", + default=False, + action="store_true", + help="Training without evaluation.", + ) + + parser.add_argument( + "--lp-mlp", + default=False, + action="store_true", + help="Linear Probe using MLP layer or not.", + ) + + parser.add_argument( + "--lp-freeze", + default=False, + action="store_true", + help="Linear Probe using Freeze CLAP or not", + ) + + parser.add_argument( + "--lp-act", + default="None", + type=str, + help="Options are ['relu','elu','prelu','softmax','sigmoid']", + ) + + parser.add_argument( + "--lp-loss", type=str, default="bce", help="Loss func of Linear Probe." + ) + + parser.add_argument( + "--lp-metrics", + type=str, + default="map,mauc,acc", + help="Metrics of Linear Probe.", + ) + + parser.add_argument( + "--lp-lr", type=float, default=1e-4, help="learning rate of linear probe" + ) + parser.add_argument( + "--kappa", + type=float, + default=0, + help="the kappa in the weighted contrastive loss, default is to turn off the weighted contrastive loss", + ) + + parser.add_argument( + "--data-filling", + type=str, + default="pad", + help="type of data filling when the audio length is shorter than the max length." + "Can be one of the following: repeat, repeatpad, pad", + ) + parser.add_argument( + "--data-truncating", + type=str, + default="rand_trunc", + help="type of data truncation when the audio length is longer than the max length." + "Can be one of the following: rand_trunc, fusion", + ) + + parser.add_argument( + "--clap-mlploss", + default=False, + action="store_true", + help="Using MLP loss for CLAP model or not", + ) + + parser.add_argument( + "--wandb-id", + type=str, + default=None, + help="the id of wandb experiment to restore.", + ) + + parser.add_argument( + "--sleep", type=float, default=0, help="sleep n seconds before start training" + ) + + # variable length processing + parser.add_argument( + "--enable-fusion", + default=False, + action="store_true", + help="Enable feature funsion for variable-length data", + ) + + parser.add_argument( + "--fusion-type", + type=str, + default="None", + help="Type is among ['channel_map', 'daf_1d','aff_1d','iaff_1d','daf_2d','aff_2d','iaff_2d']", + ) + + parser.add_argument( + "--mixup", + default=False, + action="store_true", + help="Enable mixup in finetuning training.", + ) + parser.add_argument( + "--text-augment-selection", + type=str, + default=None, + help="For selecting levels of augmented text. Type is among ['all', 'augment_only', 'none']", + ) + + args = parser.parse_args() + + # If some params are not passed, we use the default values based on model name. + default_params = get_default_params(args.amodel) + for name, val in default_params.items(): + if getattr(args, name) is None: + setattr(args, name, val) + + return args diff --git a/core/models/encoders/clap_modules/training/scheduler.py b/core/models/encoders/clap_modules/training/scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..7151ffbab25a113673b7627027b443b27f22cb0f --- /dev/null +++ b/core/models/encoders/clap_modules/training/scheduler.py @@ -0,0 +1,24 @@ +import numpy as np + + +def assign_learning_rate(optimizer, new_lr): + for param_group in optimizer.param_groups: + param_group["lr"] = new_lr + + +def _warmup_lr(base_lr, warmup_length, step): + return base_lr * (step + 1) / warmup_length + + +def cosine_lr(optimizer, base_lr, warmup_length, steps): + def _lr_adjuster(step): + if step < warmup_length: + lr = _warmup_lr(base_lr, warmup_length, step) + else: + e = step - warmup_length + es = steps - warmup_length + lr = 0.5 * (1 + np.cos(np.pi * e / es)) * base_lr + assign_learning_rate(optimizer, lr) + return lr + + return _lr_adjuster diff --git a/core/models/encoders/clap_modules/training/train.py b/core/models/encoders/clap_modules/training/train.py new file mode 100644 index 0000000000000000000000000000000000000000..f5759c4679d2ee9c0748444adf66b8453cf09728 --- /dev/null +++ b/core/models/encoders/clap_modules/training/train.py @@ -0,0 +1,838 @@ +import json +import logging +import math +import os +import time +from contextlib import suppress + +import numpy as np +import torch +import torch.nn.functional as F + +try: + import wandb +except ImportError: + wandb = None + +from open_clip import ClipLoss, gather_features +from .distributed import is_master +from .zero_shot import zero_shot_eval + + +class AverageMeter(object): + """Computes and stores the average and current value""" + + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + +def unwrap_model(model): + if hasattr(model, "module"): + return model.module + else: + return model + + +def train_one_epoch( + model, data, epoch, optimizer, scaler, scheduler, args, tb_writer=None +): + device = torch.device(args.device) + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + model.train() + loss = ClipLoss( + local_loss=args.local_loss, + gather_with_grad=args.gather_with_grad, + cache_labels=True, + rank=args.rank, + world_size=args.world_size, + use_horovod=args.horovod, + mlp_loss=args.clap_mlploss, + weight_loss_kappa=args.kappa, + ) + + dataloader, sampler = data["train"].dataloader, data["train"].sampler + if args.distributed and sampler is not None: + sampler.set_epoch(epoch) + num_batches_per_epoch = dataloader.num_batches + sample_digits = math.ceil(math.log(dataloader.num_samples + 1, 10)) + + # for toy dataset + if args.dataset_type == "toy": + dataloader.dataset.generate_queue() + + loss_m = AverageMeter() + batch_time_m = AverageMeter() + data_time_m = AverageMeter() + end = time.time() + + for i, batch in enumerate(dataloader): + # logging.info(f"batch {i} of {num_batches_per_epoch}") + step = num_batches_per_epoch * epoch + i + if isinstance(scheduler, dict): + for s in scheduler.values(): + s(step) + else: + scheduler(step) + audios = batch # contains mel_spec, wavform, and longer list + texts = batch["text"] + # audios = audios.to(device=device, non_blocking=True) + # texts = texts.to(device=device, non_blocking=True) + + data_time_m.update(time.time() - end) + if isinstance(optimizer, dict): + for o_ in optimizer.values(): + o_.zero_grad() + else: + optimizer.zero_grad() + + with autocast(): + ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + logit_scale_a, + logit_scale_t, + ) = model(audios, texts, device) + + if args.clap_mlploss: + total_loss = loss( + audio_features=audio_features, + text_features=text_features, + logit_scale_a=logit_scale_a, + logit_scale_t=logit_scale_t, + audio_features_mlp=audio_features_mlp, + text_features_mlp=text_features_mlp, + ) + else: + total_loss = loss( + audio_features=audio_features, + text_features=text_features, + logit_scale_a=logit_scale_a, + ) + if isinstance(optimizer, dict): + if scaler is not None: + scaler.scale(total_loss).backward() + for o_ in optimizer.values(): + if args.horovod: + o_.synchronize() + scaler.unscale_(o_) + with o_.skip_synchronize(): + scaler.step(o_) + else: + scaler.step(o_) + scaler.update() + else: + total_loss.backward() + for o_ in optimizer.values(): + o_.step() + else: + if scaler is not None: + scaler.scale(total_loss).backward() + if args.horovod: + optimizer.synchronize() + scaler.unscale_(optimizer) + with optimizer.skip_synchronize(): + scaler.step(optimizer) + else: + scaler.step(optimizer) + scaler.update() + else: + total_loss.backward() + optimizer.step() + + # Note: we clamp to 4.6052 = ln(100), as in the original paper. + with torch.no_grad(): + unwrap_model(model).logit_scale_a.clamp_(0, math.log(100)) + if args.clap_mlploss: + unwrap_model(model).logit_scale_t.clamp_(0, math.log(100)) + + batch_time_m.update(time.time() - end) + end = time.time() + batch_count = i + 1 + if is_master(args) and (i % 100 == 0 or batch_count == num_batches_per_epoch): + if isinstance(audios, dict): + batch_size = len(audios["waveform"]) + else: + batch_size = len(audios) + num_samples = batch_count * batch_size * args.world_size + samples_per_epoch = dataloader.num_samples + percent_complete = 100.0 * batch_count / num_batches_per_epoch + + # NOTE loss is coarsely sampled, just master node and per log update + loss_m.update(total_loss.item(), batch_size) + logit_scale_scalar_a = logit_scale_a.item() + logit_scale_scalar_t = logit_scale_t.item() + if isinstance(optimizer, dict): + if args.clap_mlploss: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {[o_.param_groups[0]['lr'] for o_ in optimizer.values()]} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + f"Logit Scale Text: {logit_scale_scalar_t:.3f}" + ) + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "scale_text": logit_scale_scalar_t, + "lr": [o_.param_groups[0]["lr"] for o_ in optimizer.values()], + } + else: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {[o_.param_groups[0]['lr'] for o_ in optimizer.values()]} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + ) + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "lr": [o_.param_groups[0]["lr"] for o_ in optimizer.values()], + } + + else: + if args.clap_mlploss: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {optimizer.param_groups[0]['lr']:5f} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + f"Logit Scale Text: {logit_scale_scalar_t:.3f}" + ) + + # Save train loss / etc. Using non avg meter values as loggers have their own smoothing + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "scale_text": logit_scale_scalar_t, + "lr": optimizer.param_groups[0]["lr"], + } + else: + logging.info( + f"Train Epoch: {epoch} [{num_samples:>{sample_digits}}/{samples_per_epoch} ({percent_complete:.0f}%)] " + f"Loss: {loss_m.val:#.5g} ({loss_m.avg:#.4g}) " + f"Data (t): {data_time_m.avg:.3f} " + f"Batch (t): {batch_time_m.avg:.3f} " + f"LR: {optimizer.param_groups[0]['lr']:5f} " + f"Logit Scale Audio: {logit_scale_scalar_a:.3f}" + ) + + # Save train loss / etc. Using non avg meter values as loggers have their own smoothing + log_data = { + "loss": loss_m.val, + "data_time": data_time_m.val, + "batch_time": batch_time_m.val, + "scale_audio": logit_scale_scalar_a, + "lr": optimizer.param_groups[0]["lr"], + } + for name, val in log_data.items(): + name = "train/" + name + if tb_writer is not None: + tb_writer.add_scalar(name, val, step) + if args.wandb: + assert wandb is not None, "Please install wandb." + wandb.log({name: val, "step": step}) + + # resetting batch / data time meters per log window + batch_time_m.reset() + data_time_m.reset() + # end for + + +def evaluate(model, data, epoch, args, tb_writer=None): + metrics = {} + if not args.parallel_eval: + if not is_master(args): + return metrics + device = torch.device(args.device) + model.eval() + + # CHANGE + # zero_shot_metrics = zero_shot_eval(model, data, epoch, args) + # metrics.update(zero_shot_metrics) + if is_master(args): + print("Evaluating...") + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + if args.val_dataset_names == ["Clotho", "audiocaps"]: + # if only clotho and audiocaps are used, then we will use a different evaluation function. + # This is because in the Clotho and audiocaps valid and test set, there are 5 text for 1 audio. + if args.parallel_eval: + # (yusong): just a hack here. Don't use parallel eval when evaluating only clotho and audiocaps. + raise NotImplementedError( + "Parallel evaluation not supported for eval only Clotho and audiocaps." + ) + val_metrics_per_dataset = evaluate_clotho_audiocaps( + model, data, epoch, args, autocast, device, tb_writer + ) + for m in val_metrics_per_dataset.values(): + metrics.update(m) + if "epoch" not in metrics.keys(): + metrics.update({"epoch": epoch}) + metrics = select_top_metric_clotho_audiocaps( + metrics, val_metrics_per_dataset, args + ) + elif "val" in data and ( + args.val_frequency + and ((epoch % args.val_frequency) == 0 or epoch == args.epochs) + ): + dataloader = data["val"].dataloader + num_samples = 0 + samples_per_val = dataloader.num_samples + + # FIXME this does not scale past small eval datasets + # all_audio_features @ all_text_features will blow up memory and compute very quickly + eval_info = {} + if args.clap_mlploss: + eval_info["all"] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + "all_audio_features_mlp": [], + "all_text_features_mlp": [], + } # cumulative_loss = 0.0 + else: + eval_info["all"] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + } # cumu + # all_audio_features, all_text_features, all_audio_features_mlp, all_text_features_mlp = [], [], [], [] + with torch.no_grad(): + for i, batch in enumerate(dataloader): + audios = batch # contains mel_spec, wavform, and longer list + texts = batch["text"] + # audios = audios.to(device=device, non_blocking=True) + + all_names = list( + set(["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]]) + ) + for name in all_names: + if name not in eval_info.keys(): + if args.clap_mlploss: + eval_info[name] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + "all_audio_features_mlp": [], + "all_text_features_mlp": [], + } + else: + eval_info[name] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + } + with autocast(): + ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + logit_scale_a, + logit_scale_t, + ) = model(audios, texts, device) + + if args.parallel_eval: + # multi-GPU eval + if args.clap_mlploss: + ( + audio_features, + text_features, + audio_features_mlp, + text_features_mlp, + ) = gather_features( + audio_features=audio_features, + text_features=text_features, + audio_features_mlp=audio_features_mlp, + text_features_mlp=text_features_mlp, + local_loss=False, + gather_with_grad=False, + rank=args.rank, + world_size=args.world_size, + use_horovod=args.horovod, + mlp_loss=args.clap_mlploss, + ) + else: + (audio_features, text_features,) = gather_features( + audio_features=audio_features, + text_features=text_features, + local_loss=False, + gather_with_grad=False, + rank=args.rank, + world_size=args.world_size, + use_horovod=args.horovod, + mlp_loss=args.clap_mlploss, + ) + + if is_master(args): + num_samples += audio_features.shape[0] + for n in [*all_names, "all"]: + if n == "all": + eval_info[n]["all_audio_features"].append( + audio_features.cpu() + ) + eval_info[n]["all_text_features"].append( + text_features.cpu() + ) + if args.clap_mlploss: + eval_info[n]["all_audio_features_mlp"].append( + audio_features_mlp.cpu() + ) + eval_info[n]["all_text_features_mlp"].append( + text_features_mlp.cpu() + ) + else: + idx = np.where( + np.array( + [ + "-".join(b.split("/")[-3:-1]) + for b in batch["__url__"] + ] + ) + == n + )[0] + eval_info[n]["all_audio_features"].append( + audio_features.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + eval_info[n]["all_text_features"].append( + text_features.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + if args.clap_mlploss: + eval_info[n]["all_audio_features_mlp"].append( + audio_features_mlp.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + eval_info[n]["all_text_features_mlp"].append( + text_features_mlp.cpu().index_select( + 0, torch.tensor(idx).long() + ) + ) + # print(f'eval step {i}') # (yusong): for debug + + # cumulative_loss += total_loss * batch_size + # num_samples += batch_size + if is_master(args) and (i % 100) == 0: # and i != 0: + logging.info( + f"Eval Epoch: {epoch} [{num_samples} / {samples_per_val}]" + ) + if is_master(args): + val_metrics_per_dataset = {} + for n in eval_info.keys(): + if args.clap_mlploss: + metrics_single_dataset = get_metrics( + audio_features=torch.cat( + eval_info[n]["all_audio_features"] + ), + text_features=torch.cat(eval_info[n]["all_text_features"]), + logit_scale_a=logit_scale_a.cpu(), + audio_features_mlp=torch.cat( + eval_info[n]["all_audio_features_mlp"] + ), + text_features_mlp=torch.cat( + eval_info[n]["all_text_features_mlp"] + ), + logit_scale_t=logit_scale_t.cpu(), + mlp_loss=args.clap_mlploss, + ) + else: + metrics_single_dataset = get_metrics( + audio_features=torch.cat( + eval_info[n]["all_audio_features"] + ), + text_features=torch.cat(eval_info[n]["all_text_features"]), + logit_scale_a=logit_scale_a.cpu(), + mlp_loss=args.clap_mlploss, + ) + val_metrics_per_dataset[n] = { + n + "/" + k: v for k, v in metrics_single_dataset.items() + } + metrics.update(val_metrics_per_dataset[n]) + if "epoch" not in metrics.keys(): + metrics.update({"epoch": epoch}) + if is_master(args): + if not metrics: + return metrics + + logging.info( + f"Eval Epoch: {epoch} " + + "\n".join( + [ + "\t".join([f"{k}: {round(v, 4):.4f}" for k, v in m.items()]) + for m in val_metrics_per_dataset.values() + ] + ) + ) + + if args.save_logs: + for name, val in metrics.items(): + if tb_writer is not None: + tb_writer.add_scalar(f"val/{name}", val, epoch) + + with open(os.path.join(args.checkpoint_path, "results.jsonl"), "a+") as f: + f.write(json.dumps(metrics)) + f.write("\n") + + if args.wandb: + assert wandb is not None, "Please install wandb." + for name, val in metrics.items(): + wandb.log({f"val/{name}": val, "epoch": epoch}) + + return metrics + else: + return metrics + + +def get_metrics( + audio_features, + text_features, + logit_scale_a, + audio_features_mlp=None, + text_features_mlp=None, + logit_scale_t=None, + mlp_loss=False, +): + metrics = {} + if mlp_loss: + # Set up audio to text & text to audio similary matrice + a_logits_per_audio = ( + (logit_scale_a * audio_features @ text_features_mlp.t()).detach().cpu() + ) + a_logits_per_text = a_logits_per_audio.t().detach().cpu() + t_logits_per_audio = ( + (logit_scale_t * audio_features_mlp @ text_features.t()).detach().cpu() + ) + t_logits_per_text = t_logits_per_audio.t().detach().cpu() + + labels = torch.arange(audio_features.shape[0]).long() + # Change the loss from two terms into four terms with 2x2 combined CE loss + total_loss = ( + F.cross_entropy(a_logits_per_audio, labels) + + F.cross_entropy(a_logits_per_text, labels) + + F.cross_entropy(t_logits_per_audio, labels) + + F.cross_entropy(t_logits_per_text, labels) + ) / 4 + + metrics[f"cumulative_loss"] = total_loss.item() + metrics[f"num_samples"] = audio_features.shape[0] + + logits = { + "audio_to_text": (a_logits_per_audio + t_logits_per_audio) / 2, + "text_to_audio": (a_logits_per_text + t_logits_per_text) / 2, + } + ground_truth = torch.arange(len(text_features)).view(-1, 1) + + else: + # print("text_features", text_features) + # print("text_features.shape", text_features.shape) + logits_per_audio = ( + (logit_scale_a * audio_features @ text_features.t()).detach().cpu() + ) + logits_per_text = logits_per_audio.t().detach().cpu() + + labels = torch.arange(audio_features.shape[0]).long() + # Change the loss from two terms into four terms with 2x2 combined CE loss + total_loss = ( + F.cross_entropy(logits_per_audio, labels) + + F.cross_entropy(logits_per_text, labels) + ) / 2 + + metrics[f"cumulative_loss"] = total_loss.item() + metrics[f"num_samples"] = audio_features.shape[0] + + logits = {"audio_to_text": logits_per_audio, "text_to_audio": logits_per_text} + + ground_truth = torch.arange(len(text_features)).view(-1, 1) + + for name, logit in logits.items(): + ranking = torch.argsort(logit, descending=True) + preds = torch.where(ranking == ground_truth)[ + 1 + ] # (yusong) this line is slow because it uses single thread + preds = preds.detach().cpu().numpy() + metrics[f"{name}_mean_rank"] = preds.mean() + 1 + metrics[f"{name}_median_rank"] = np.floor(np.median(preds)) + 1 + for k in [1, 5, 10]: + metrics[f"{name}_R@{k}"] = np.mean(preds < k) + # map@10 + metrics[f"{name}_mAP@10"] = np.mean(np.where(preds < 10, 1 / (preds + 1), 0.0)) + + return metrics + + +def evaluate_clotho_audiocaps( + model, data, epoch, args, autocast, device, tb_writer=None +): + """ + Adapted from https://github.com/XinhaoMei/audio-text_retrieval/blob/main/tools/utils.py. + 1. for text-to-audio retrieval, do 5 times and average the results + 2. for R@1, R@5, R@10 in audio-to-text retrieval, take the best rank among 5 text + 3. for map@10 in audio-to-text retrieval: + 3.1: sort the rank of 5 text + 3.2: exclude the rank >=10 (0-index) + 3.3: compute the map regarding the remaining ranks: np.mean(np.arange(1, len(ranks)+1) / ranks). + (3.3) That is, take the top ranks of 5 text that is < 10, and assign the descending number as ground truth. + (3.3) E.g.: the ground truth of first rank of the 5 text should be 1, the second rank should be 2, etc. + """ + # TODO: (yusong) only support single GPU evaluation and only support non-mlp case for now. + dataloader = data["val"].dataloader + with torch.no_grad(): + eval_info = {} + for i, batch in enumerate(dataloader): + audios = batch # contains mel_spec, wavform, and longer list + + # each item in the list has 5 texts + if args.tmodel == "transformer": + from open_clip import tokenize + + texts = [tokenize(t) for t in batch["full_text"]] + texts = torch.cat(texts) + else: + from .data import tokenizer + + texts = [ + tokenizer(t) for t in batch["full_text"] + ] # 5 texts for each audio + texts = { + k: torch.cat([t[k] for t in texts]) for k in texts[0].keys() + } # 5 x batch + + # audios = audios.to(device=device, non_blocking=True) + + all_names = list( + set(["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]]) + ) + for name in all_names: + if name not in eval_info.keys(): + # we will not use mlp outputs even if args.clap_mlploss=True + eval_info[name] = { + "cumulative_loss": 0.0, + "num_samples": 0, + "all_audio_features": [], + "all_text_features": [], + } + with autocast(): + audio_features = model(audios, None, device) + text_features = model(None, texts, device) + audio_features = F.normalize(audio_features, dim=-1) + text_features = F.normalize(text_features, dim=-1) + + all_names = list( + set(["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]]) + ) + for n in all_names: + idx = np.where( + np.array( + ["-".join(b.split("/")[-3:-1]) for b in batch["__url__"]] + ) + == n + )[0] + eval_info[n]["all_audio_features"].append( + audio_features.cpu().index_select(0, torch.tensor(idx).long()) + ) + # (yusong) please double-check. This is for selecting 5 text features at once. + # because idx is a list of indices in size of num_samples, + # and text_features is a tensor of size (5*num_samples, dim) + # so we need to select 5 consecutive indices at once for a single index in idx. + eval_info[n]["all_text_features"].append( + text_features.cpu() + .reshape([-1, 5, text_features.shape[1]]) + .index_select(0, torch.tensor(idx).long()) + .reshape([-1, text_features.shape[1]]) + ) + + val_metrics_all = {} + + for n in eval_info.keys(): + logit_scale_a, logit_scale_t = model(None, None, device) + logit_scale_a = logit_scale_a.cpu() + + audio_features = torch.cat(eval_info[n]["all_audio_features"], dim=0) + text_features = torch.cat(eval_info[n]["all_text_features"], dim=0) + + logits_per_audio = ( + (logit_scale_a * audio_features @ text_features.t()).detach().cpu() + ) + logits_per_text = logits_per_audio.t().detach().cpu() + + # logits_per_audio shape: [num_samples, num_samples*5] + # logits_per_text shape: [num_samples*5, num_samples] + + logging.info( + f"dataset {n}, logits_per_audio shape: {logits_per_audio.shape}, " + f"logits_per_text shape: {logits_per_text.shape}" + ) + + metrics = {} + num_samples = audio_features.shape[0] + metrics[f"num_samples"] = num_samples + + # (yusong) the following code is very important, please double-check: + # logits_per_audio.reshape(num_samples, num_samples, 5)[:, :, d] + # logits_per_text.reshape(num_samples, 5, num_samples)[:, d, :] + # Those two are retrieving one of the 5 text for each audio. + labels = torch.arange(audio_features.shape[0]).long() + audio_to_text_loss = [ + F.cross_entropy( + logits_per_audio.reshape(num_samples, num_samples, 5)[:, :, d], + labels, + ) + for d in range(5) + ] + text_to_audio_loss = [ + F.cross_entropy( + logits_per_text.reshape(num_samples, 5, num_samples)[:, d, :], + labels, + ) + for d in range(5) + ] + total_loss = (np.mean(audio_to_text_loss) + np.mean(text_to_audio_loss)) / 2 + + metrics[f"cumulative_loss"] = total_loss.item() + + # text to audio: do 5 times + pred_text = [] + for d in range(5): + logit = logits_per_text.reshape(num_samples, 5, num_samples)[:, d, :] + ground_truth = torch.arange(len(logit)).view(-1, 1) + ranking = torch.argsort( + logit, descending=True + ) # [num_samples, num_samples] + preds = torch.where(ranking == ground_truth)[1] + pred_text.append(preds.detach().cpu().numpy()) + pred_text_concat = np.concatenate(pred_text, axis=0) # [5*num_samples] + metrics[f"text_to_audio_mean_rank"] = pred_text_concat.mean() + 1 + metrics[f"text_to_audio_median_rank"] = ( + np.floor(np.median(pred_text_concat)) + 1 + ) + for k in [1, 5, 10]: + metrics[f"text_to_audio_R@{k}"] = np.mean(pred_text_concat < k) + # map@10 + metrics[f"text_to_audio_mAP@10"] = np.mean( + np.where(pred_text_concat < 10, 1 / (pred_text_concat + 1), 0.0) + ) + + # audio to text: take the best result + # for audio to text map 10, sort and assign descending ground truth. + # see https://github.com/XinhaoMei/audio-text_retrieval/blob/main/tools/utils.py#L103 + # map@10 + map_all = [] + pred_audio_all = [] + for d in range(num_samples): + # logits_per_audio: [num_samples, num_samples*5] + logit_single = logits_per_audio[d, :] # [5*num_samples] + # Ground-truth index: [d*5, d*5+1, d*5+2, d*5+3, d*5+4] + ranking = torch.argsort( + logit_single, descending=True + ) # [5*num_samples] + # ranking: the index of first match, second match, ... + ground_truth = torch.arange(d * 5, d * 5 + 5)[None] + all_pred = torch.where( + torch.stack([ranking] * 5) == ground_truth.view(-1, 1) + )[1] + min_pred = torch.min(all_pred) + pred_audio_all.append(min_pred.detach().cpu().numpy()) + all_pred_filter = all_pred[all_pred < 10].detach().cpu().numpy() + # /5 because we have 5 text, so it means for the text rank >=10 we count as 0. + map_single = ( + np.sum( + (np.arange(1, len(all_pred_filter) + 1) / (all_pred_filter + 1)) + ) + / 5 + ) + map_all.append(map_single) + metrics[f"audio_to_text_mAP@10"] = np.mean(map_all) + for k in [1, 5, 10]: + metrics[f"audio_to_text_R@{k}"] = np.mean(np.array(pred_audio_all) < k) + + val_metrics_all[n] = {n + "/" + k: v for k, v in metrics.items()} + return val_metrics_all + + +def calculate_selection_performance_clotho_audiocaps(val_metrics_per_dataset): + """ + Calculate performance for Clotho+AudioCaps for model selection. + """ + selection_performance_all = [] + for n in val_metrics_per_dataset.keys(): + selection_performance = ( + val_metrics_per_dataset[n][f"{n}/audio_to_text_mAP@10"] + + val_metrics_per_dataset[n][f"{n}/text_to_audio_mAP@10"] + ) / 2 + selection_performance_all.append(selection_performance) + return np.mean(selection_performance_all) + + +def select_top_metric_clotho_audiocaps(metrics, val_metrics_per_dataset, args): + # val_metrics_per_dataset: dict, key: dataset name, value: dict, key: metric name, value: metric value + # metrics: dict, key: metric name, value: metric value + # Hack: use args to save the top performance + if not hasattr(args, "top_selection_performance"): + selection_performance = calculate_selection_performance_clotho_audiocaps( + val_metrics_per_dataset + ) + # TODO: write the if and else together + metric_update = {} + for n in val_metrics_per_dataset.keys(): + for k in val_metrics_per_dataset[n].keys(): + metric_update[ + k.split("/")[0] + "-top" + "/" + k.split("/")[1] + ] = val_metrics_per_dataset[n][k] + metric_update["top_selection_performance"] = selection_performance + metric_update["top-selection-epoch"] = metrics["epoch"] + metrics.update(metric_update) + args.top_metric = metric_update + args.top_selection_performance = selection_performance + else: + selection_performance_new = calculate_selection_performance_clotho_audiocaps( + val_metrics_per_dataset + ) + selection_performance_old = args.top_selection_performance + if selection_performance_new > selection_performance_old: + metric_update = {} + for n in val_metrics_per_dataset.keys(): + for k in val_metrics_per_dataset[n].keys(): + metric_update[ + k.split("/")[0] + "-top" + "/" + k.split("/")[1] + ] = val_metrics_per_dataset[n][k] + metric_update["top_selection_performance"] = selection_performance_new + metric_update["top-selection-epoch"] = metrics["epoch"] + metrics.update(metric_update) + args.top_metric = metric_update + args.top_selection_performance = selection_performance_new + else: + metrics.update(args.top_metric) + return metrics diff --git a/core/models/encoders/clap_modules/training/zero_shot.py b/core/models/encoders/clap_modules/training/zero_shot.py new file mode 100644 index 0000000000000000000000000000000000000000..28b8fccc1af17fc69002857a7f529ac041c374f2 --- /dev/null +++ b/core/models/encoders/clap_modules/training/zero_shot.py @@ -0,0 +1,95 @@ +# NOTE: This script is currently not supported for CLAP. +import logging +from contextlib import suppress + +import torch +import torch.nn.functional as F +from tqdm import tqdm + +from open_clip import tokenize +from .imagenet_zeroshot_data import imagenet_classnames, openai_imagenet_template + + +def zero_shot_classifier(model, classnames, templates, args): + with torch.no_grad(): + zeroshot_weights = [] + for classname in tqdm(classnames): + texts = [template(classname) for template in templates] # format with class + texts = tokenize(texts).to(args.device) # tokenize + if args.distributed and not args.horovod: + class_embeddings = model.module.encode_text(texts) + else: + class_embeddings = model.encode_text(texts) + class_embedding = F.normalize(class_embeddings, dim=-1).mean(dim=0) + class_embedding /= class_embedding.norm() + zeroshot_weights.append(class_embedding) + zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(args.device) + return zeroshot_weights + + +def accuracy(output, target, topk=(1,)): + pred = output.topk(max(topk), 1, True, True)[1].t() + correct = pred.eq(target.view(1, -1).expand_as(pred)) + return [ + float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) + for k in topk + ] + + +def run(model, classifier, dataloader, args): + autocast = torch.cuda.amp.autocast if args.precision == "amp" else suppress + with torch.no_grad(): + top1, top5, n = 0.0, 0.0, 0.0 + for images, target in tqdm(dataloader, unit_scale=args.batch_size): + images = images.to(args.device) + target = target.to(args.device) + + with autocast(): + # predict + if args.distributed and not args.horovod: + image_features = model.module.encode_image(images) + else: + image_features = model.encode_image(images) + image_features = F.normalize(image_features, dim=-1) + logits = 100.0 * image_features @ classifier + + # measure accuracy + acc1, acc5 = accuracy(logits, target, topk=(1, 5)) + top1 += acc1 + top5 += acc5 + n += images.size(0) + + top1 = top1 / n + top5 = top5 / n + return top1, top5 + + +def zero_shot_eval(model, data, epoch, args): + if "imagenet-val" not in data and "imagenet-v2" not in data: + return {} + if args.zeroshot_frequency == 0: + return {} + if (epoch % args.zeroshot_frequency) != 0 and epoch != args.epochs: + return {} + + logging.info("Starting zero-shot imagenet.") + + logging.info("Building zero-shot classifier") + classifier = zero_shot_classifier( + model, imagenet_classnames, openai_imagenet_template, args + ) + + logging.info("Using classifier") + results = {} + if "imagenet-val" in data: + top1, top5 = run(model, classifier, data["imagenet-val"].dataloader, args) + results["imagenet-zeroshot-val-top1"] = top1 + results["imagenet-zeroshot-val-top5"] = top5 + if "imagenet-v2" in data: + top1, top5 = run(model, classifier, data["imagenet-v2"].dataloader, args) + results["imagenetv2-zeroshot-val-top1"] = top1 + results["imagenetv2-zeroshot-val-top5"] = top5 + + logging.info("Finished zero-shot imagenet.") + + return results diff --git a/core/models/encoders/clip.py b/core/models/encoders/clip.py new file mode 100644 index 0000000000000000000000000000000000000000..864b910c160a25600ccc0bf6f48ca319319bc260 --- /dev/null +++ b/core/models/encoders/clip.py @@ -0,0 +1,152 @@ +from typing import List +import os + +import torch +import torch.nn as nn +import numpy as np +from functools import partial +from core.models.common.get_model import register +from einops import rearrange + +from transformers import CLIPTokenizer, CLIPTextModel +from .clip_modules import CLIPProcessor, CLIPModel, CLIPTokenizer, CLIPConfig + +version = '0' +symbol = 'clip' + + +class AbstractEncoder(nn.Module): + def __init__(self): + super().__init__() + + def encode(self, *args, **kwargs): + raise NotImplementedError + + +@register('clip_text_frozen', version) +class FrozenCLIPTextEmbedder(AbstractEncoder): + """Uses the CLIP transformer encoder for text (from huggingface)""" + + def __init__(self, version="openai/clip-vit-large-patch14", device="cuda", max_length=77): # clip-vit-base-patch32 + super().__init__() + self.tokenizer = CLIPTokenizer.from_pretrained(version) + self.transformer = CLIPTextModel.from_pretrained(version) + self.device = device + self.max_length = max_length + + def forward(self, text): + batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True, + return_overflowing_tokens=False, padding="max_length", return_tensors="pt") + tokens = batch_encoding["input_ids"].to(self.device) + outputs = self.transformer(input_ids=tokens) + z = outputs.last_hidden_state + return z + + def encode(self, text): + return self(text) + + +@register('clip_frozen', version) +class FrozenCLIP(AbstractEncoder): + def __init__(self, + version="openai/clip-vit-large-patch14", + max_length=77, + encode_type='encode_text', + fp16=False, + data_dir='.'): + super().__init__() + self.tokenizer = CLIPTokenizer.from_pretrained(version) + self.processor = CLIPProcessor.from_pretrained(version) + config = CLIPConfig.from_pretrained(version) + self.model = CLIPModel(config, add_temporal_attention=True) + self.max_length = max_length + self.encode_type = encode_type + self.fp16 = fp16 + + @property + def dtype(self): + return torch.float32 + + @property + def device(self): + return self.model.text_projection.weight.device + + def get_device(self): + # A trick to get device + return self.model.text_projection.weight.device + + def encode_text_pooled(self, text): + batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True, + return_overflowing_tokens=False, padding="max_length", return_tensors="pt") + tokens = batch_encoding["input_ids"].to(self.get_device()) + outputs = self.model.get_text_features(input_ids=tokens) + return outputs + + def encode_vision_pooled(self, images): + inputs = self.processor(images=images, return_tensors="pt") + pixels = inputs['pixel_values'].half() if self.fp16 else inputs['pixel_values'] + pixels = pixels.to(self.get_device()) + return self.model.get_image_features(pixel_values=pixels) + + def encode_text_noproj(self, text): + batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True, + return_overflowing_tokens=False, padding="max_length", return_tensors="pt") + tokens = batch_encoding["input_ids"].to(self.get_device()) + if self.dtype == torch.half: + tokens = tokens.short() + outputs = self.model.text_model(input_ids=tokens) + return outputs.last_hidden_state + + def encode_vision_noproj(self, vision_inputs): + # vision_inputs = ((vision_inputs + 1) / 2).to('cpu').numpy() + vision_inputs = vision_inputs.to('cpu').numpy() + + if vision_inputs.ndim == 5: + num_frames = vision_inputs.shape[2] + vision_inputs = rearrange(vision_inputs, 'b c f h w -> (b f) h w c') + else: + num_frames = 1 + vision_inputs = rearrange(vision_inputs, 'b c h w -> b h w c') + + vision_inputs = [vi for vi in vision_inputs] + inputs = self.processor(images=vision_inputs, return_tensors="pt") + + pixels = inputs['pixel_values'].to(self.dtype).to(self.device) + + if num_frames > 1: + pixels = rearrange(pixels, '(b f) h w c -> b f h w c', f=num_frames) + outputs = self.model.vision_model(pixel_values=pixels) + return outputs + + def encode_text(self, text): + if isinstance(text, List): + batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True, + return_overflowing_tokens=False, padding="max_length", return_tensors="pt") + tokens = batch_encoding["input_ids"].to(self.get_device()) + else: + tokens = text + outputs = self.model.text_model(input_ids=tokens) + z_pooled = outputs.pooler_output + z_pooled = self.model.text_projection(z_pooled) + z_pooled = z_pooled / torch.norm(z_pooled, dim=-1, keepdim=True) + return z_pooled.unsqueeze(1) + + def encode_vision(self, images): + z = self.encode_vision_noproj(images) + z_pooled = z.pooler_output + z_pooled = self.model.visual_projection(z_pooled) + z_pooled = z_pooled / torch.norm(z_pooled, dim=-1, keepdim=True) + return z_pooled.unsqueeze(1) + + def encode(self, *args, **kwargs): + return getattr(self, self.encode_type)(*args, **kwargs) + + def forward(self, input, encode_type): + if encode_type == 'encode_text': + return self.encode_text(input) + elif encode_type == 'encode_vision': + # Se il numero di canali è 1, copiamo l'immagine su 3 canali essendo un'immagine in scala di grigi + if input.shape[1] == 1: + input = torch.cat([input, input, input], dim=1) + return self.encode_vision(input) + diff --git a/core/models/encoders/clip_modules/__init__.py b/core/models/encoders/clip_modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1931d894f148d227831fc60e0dc22c704faaa0df --- /dev/null +++ b/core/models/encoders/clip_modules/__init__.py @@ -0,0 +1,164 @@ +# flake8: noqa +# There's no way to ignore "F401 '...' imported but unused" warnings in this +# module, but to preserve other warnings. So, don't check this module at all. + +# Copyright 2021 The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import TYPE_CHECKING + +from transformers.utils import ( + OptionalDependencyNotAvailable, + _LazyModule, + is_flax_available, + is_tf_available, + is_tokenizers_available, + is_torch_available, + is_vision_available, +) + +_import_structure = { + "configuration_clip": ["CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP", "CLIPConfig", "CLIPTextConfig", "CLIPVisionConfig"], + "tokenization_clip": ["CLIPTokenizer"], +} + +try: + if not is_tokenizers_available(): + raise OptionalDependencyNotAvailable() +except OptionalDependencyNotAvailable: + pass +else: + _import_structure["tokenization_clip_fast"] = ["CLIPTokenizerFast"] + +try: + if not is_vision_available(): + raise OptionalDependencyNotAvailable() +except OptionalDependencyNotAvailable: + pass +else: + _import_structure["feature_extraction_clip"] = ["CLIPFeatureExtractor"] + _import_structure["processing_clip"] = ["CLIPProcessor"] + +try: + if not is_torch_available(): + raise OptionalDependencyNotAvailable() +except OptionalDependencyNotAvailable: + pass +else: + _import_structure["modeling_clip"] = [ + "CLIP_PRETRAINED_MODEL_ARCHIVE_LIST", + "CLIPModel", + "CLIPPreTrainedModel", + "CLIPTextModel", + "CLIPVisionModel", + ] + +try: + if not is_tf_available(): + raise OptionalDependencyNotAvailable() +except OptionalDependencyNotAvailable: + pass +else: + _import_structure["modeling_tf_clip"] = [ + "TF_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST", + "TFCLIPModel", + "TFCLIPPreTrainedModel", + "TFCLIPTextModel", + "TFCLIPVisionModel", + ] + +try: + if not is_flax_available(): + raise OptionalDependencyNotAvailable() +except OptionalDependencyNotAvailable: + pass +else: + _import_structure["modeling_flax_clip"] = [ + "FlaxCLIPModel", + "FlaxCLIPPreTrainedModel", + "FlaxCLIPTextModel", + "FlaxCLIPTextPreTrainedModel", + "FlaxCLIPVisionModel", + "FlaxCLIPVisionPreTrainedModel", + ] + + +if TYPE_CHECKING: + from .configuration_clip import CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP, CLIPConfig, CLIPTextConfig, CLIPVisionConfig + from .tokenization_clip import CLIPTokenizer + + try: + if not is_tokenizers_available(): + raise OptionalDependencyNotAvailable() + except OptionalDependencyNotAvailable: + pass + else: + from .tokenization_clip_fast import CLIPTokenizerFast + + try: + if not is_vision_available(): + raise OptionalDependencyNotAvailable() + except OptionalDependencyNotAvailable: + pass + else: + from .feature_extraction_clip import CLIPFeatureExtractor + from .processing_clip import CLIPProcessor + + try: + if not is_torch_available(): + raise OptionalDependencyNotAvailable() + except OptionalDependencyNotAvailable: + pass + else: + from .modeling_clip import ( + CLIP_PRETRAINED_MODEL_ARCHIVE_LIST, + CLIPModel, + CLIPPreTrainedModel, + CLIPTextModel, + CLIPVisionModel, + ) + + try: + if not is_tf_available(): + raise OptionalDependencyNotAvailable() + except OptionalDependencyNotAvailable: + pass + else: + from .modeling_tf_clip import ( + TF_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST, + TFCLIPModel, + TFCLIPPreTrainedModel, + TFCLIPTextModel, + TFCLIPVisionModel, + ) + + try: + if not is_flax_available(): + raise OptionalDependencyNotAvailable() + except OptionalDependencyNotAvailable: + pass + else: + from .modeling_flax_clip import ( + FlaxCLIPModel, + FlaxCLIPPreTrainedModel, + FlaxCLIPTextModel, + FlaxCLIPTextPreTrainedModel, + FlaxCLIPVisionModel, + FlaxCLIPVisionPreTrainedModel, + ) + + +else: + import sys + + sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) diff --git a/core/models/encoders/clip_modules/__pycache__/__init__.cpython-311.pyc b/core/models/encoders/clip_modules/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25eb508f250e5cf3db3f3a8851e259ef426cf565 Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/__init__.cpython-38.pyc b/core/models/encoders/clip_modules/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb2c0e11d568ca845d54116787f4273dd9214b6f Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/configuration_clip.cpython-311.pyc b/core/models/encoders/clip_modules/__pycache__/configuration_clip.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b2e4433fdac406c9b62d7b3c768c538513de62ff Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/configuration_clip.cpython-311.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/configuration_clip.cpython-38.pyc b/core/models/encoders/clip_modules/__pycache__/configuration_clip.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1403731dbf390f5b0e5cf2ec940a260e4981124c Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/configuration_clip.cpython-38.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/modeling_clip.cpython-311.pyc b/core/models/encoders/clip_modules/__pycache__/modeling_clip.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5432f3de3ca222e6f1eeda32e955d3c45f41e4cf Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/modeling_clip.cpython-311.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/modeling_clip.cpython-38.pyc b/core/models/encoders/clip_modules/__pycache__/modeling_clip.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2728c6fc3c1f9b2242dd9b1e94b2d38a5bea8ba5 Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/modeling_clip.cpython-38.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/modules_video.cpython-311.pyc b/core/models/encoders/clip_modules/__pycache__/modules_video.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ae4c9189417700f7decf07c530c0e86ccc53073 Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/modules_video.cpython-311.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/modules_video.cpython-38.pyc b/core/models/encoders/clip_modules/__pycache__/modules_video.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2cdbb75cc14f21ac0e005259b75bc228c28d6c9c Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/modules_video.cpython-38.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/processing_clip.cpython-311.pyc b/core/models/encoders/clip_modules/__pycache__/processing_clip.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fcade1e8ba4f92c1e6349f2b4d4deab1f8fdaed2 Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/processing_clip.cpython-311.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/processing_clip.cpython-38.pyc b/core/models/encoders/clip_modules/__pycache__/processing_clip.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..585ad9027181fd2209b704d3573a524a36431a82 Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/processing_clip.cpython-38.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/tokenization_clip.cpython-311.pyc b/core/models/encoders/clip_modules/__pycache__/tokenization_clip.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..57c5f5aae683416ce3c0dfaf5f36029925e3b511 Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/tokenization_clip.cpython-311.pyc differ diff --git a/core/models/encoders/clip_modules/__pycache__/tokenization_clip.cpython-38.pyc b/core/models/encoders/clip_modules/__pycache__/tokenization_clip.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a04072d857f317165c852441d04aa599ade7ade Binary files /dev/null and b/core/models/encoders/clip_modules/__pycache__/tokenization_clip.cpython-38.pyc differ diff --git a/core/models/encoders/clip_modules/configuration_clip.py b/core/models/encoders/clip_modules/configuration_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..a59a0a13fe02a7747aee2d50b04e9c2a603934d4 --- /dev/null +++ b/core/models/encoders/clip_modules/configuration_clip.py @@ -0,0 +1,317 @@ +# coding=utf-8 +# Copyright 2021 The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" CLIP model configuration""" + +import copy +import os +from typing import Union + +from transformers.configuration_utils import PretrainedConfig +from transformers.utils import logging + + +logger = logging.get_logger(__name__) + +CLIP_PRETRAINED_CONFIG_ARCHIVE_MAP = { + "openai/clip-vit-base-patch32": "https://huggingface.co/openai/clip-vit-base-patch32/resolve/main/config.json", + # See all CLIP models at https://huggingface.co/models?filter=clip +} + + +class CLIPTextConfig(PretrainedConfig): + r""" + This is the configuration class to store the configuration of a [`CLIPModel`]. It is used to instantiate an CLIP + model according to the specified arguments, defining the model architecture. Instantiating a configuration with the + defaults will yield a similar configuration to that of the CLIP + [openai/clip-vit-base-patch32](https://huggingface.co/openai/clip-vit-base-patch32) architecture. + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + + Args: + vocab_size (`int`, *optional*, defaults to 49408): + Vocabulary size of the CLIP text model. Defines the number of different tokens that can be represented by + the `inputs_ids` passed when calling [`CLIPModel`]. + hidden_size (`int`, *optional*, defaults to 512): + Dimensionality of the encoder layers and the pooler layer. + intermediate_size (`int`, *optional*, defaults to 2048): + Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. + num_hidden_layers (`int`, *optional*, defaults to 12): + Number of hidden layers in the Transformer encoder. + num_attention_heads (`int`, *optional*, defaults to 8): + Number of attention heads for each attention layer in the Transformer encoder. + max_position_embeddings (`int`, *optional*, defaults to 77): + The maximum sequence length that this model might ever be used with. Typically set this to something large + just in case (e.g., 512 or 1024 or 2048). + hidden_act (`str` or `function`, *optional*, defaults to `"quick_gelu"`): + The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, + `"relu"`, `"selu"` and `"gelu_new"` ``"quick_gelu"` are supported. layer_norm_eps (`float`, *optional*, + defaults to 1e-5): The epsilon used by the layer normalization layers. + attention_dropout (`float`, *optional*, defaults to 0.0): + The dropout ratio for the attention probabilities. + dropout (`float`, *optional*, defaults to 0.0): + The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler. + initializer_range (`float`, *optional*, defaults to 0.02): + The standard deviation of the truncated_normal_initializer for initializing all weight matrices. + initializer_factor (`float``, *optional*, defaults to 1): + A factor for initializing all weight matrices (should be kept to 1, used internally for initialization + testing). + + Example: + + ```python + >>> from transformers import CLIPTextModel, CLIPTextConfig + + >>> # Initializing a CLIPTextModel with openai/clip-vit-base-patch32 style configuration + >>> configuration = CLIPTextConfig() + + >>> # Initializing a CLIPTextConfig from the openai/clip-vit-base-patch32 style configuration + >>> model = CLIPTextModel(configuration) + + >>> # Accessing the model configuration + >>> configuration = model.config + ```""" + model_type = "clip_text_model" + + def __init__( + self, + vocab_size=49408, + hidden_size=512, + intermediate_size=2048, + num_hidden_layers=12, + num_attention_heads=8, + max_position_embeddings=77, + hidden_act="quick_gelu", + layer_norm_eps=0.00001, + dropout=0.0, + attention_dropout=0.0, + initializer_range=0.02, + initializer_factor=1.0, + pad_token_id=1, + bos_token_id=0, + eos_token_id=2, + **kwargs + ): + super().__init__(pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs) + + self.vocab_size = vocab_size + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.dropout = dropout + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.max_position_embeddings = max_position_embeddings + self.layer_norm_eps = layer_norm_eps + self.hidden_act = hidden_act + self.initializer_range = initializer_range + self.initializer_factor = initializer_factor + self.attention_dropout = attention_dropout + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig": + + config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs) + + # get the text config dict if we are loading from CLIPConfig + if config_dict.get("model_type") == "clip": + config_dict = config_dict["text_config"] + + if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type: + logger.warning( + f"You are using a model of type {config_dict['model_type']} to instantiate a model of type " + f"{cls.model_type}. This is not supported for all configurations of models and can yield errors." + ) + + return cls.from_dict(config_dict, **kwargs) + + +class CLIPVisionConfig(PretrainedConfig): + r""" + This is the configuration class to store the configuration of a [`CLIPModel`]. It is used to instantiate an CLIP + model according to the specified arguments, defining the model architecture. Instantiating a configuration with the + defaults will yield a similar configuration to that of the CLIP + [openai/clip-vit-base-patch32](https://huggingface.co/openai/clip-vit-base-patch32) architecture. + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + + Args: + hidden_size (`int`, *optional*, defaults to 768): + Dimensionality of the encoder layers and the pooler layer. + intermediate_size (`int`, *optional*, defaults to 3072): + Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. + num_hidden_layers (`int`, *optional*, defaults to 12): + Number of hidden layers in the Transformer encoder. + num_attention_heads (`int`, *optional*, defaults to 12): + Number of attention heads for each attention layer in the Transformer encoder. + image_size (`int`, *optional*, defaults to 224): + The size (resolution) of each image. + patch_size (`int`, *optional*, defaults to 32): + The size (resolution) of each patch. + hidden_act (`str` or `function`, *optional*, defaults to `"quick_gelu"`): + The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`, + `"relu"`, `"selu"` and `"gelu_new"` ``"quick_gelu"` are supported. layer_norm_eps (`float`, *optional*, + defaults to 1e-5): The epsilon used by the layer normalization layers. + dropout (`float`, *optional*, defaults to 0.0): + The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler. + attention_dropout (`float`, *optional*, defaults to 0.0): + The dropout ratio for the attention probabilities. + initializer_range (`float`, *optional*, defaults to 0.02): + The standard deviation of the truncated_normal_initializer for initializing all weight matrices. + initializer_factor (`float``, *optional*, defaults to 1): + A factor for initializing all weight matrices (should be kept to 1, used internally for initialization + testing). + + Example: + + ```python + >>> from transformers import CLIPVisionModel, CLIPVisionConfig + + >>> # Initializing a CLIPVisionModel with openai/clip-vit-base-patch32 style configuration + >>> configuration = CLIPVisionConfig() + + >>> # Initializing a CLIPVisionModel model from the openai/clip-vit-base-patch32 style configuration + >>> model = CLIPVisionModel(configuration) + + >>> # Accessing the model configuration + >>> configuration = model.config + ```""" + + model_type = "clip_vision_model" + + def __init__( + self, + hidden_size=768, + intermediate_size=3072, + num_hidden_layers=12, + num_attention_heads=12, + image_size=224, + patch_size=32, + hidden_act="quick_gelu", + layer_norm_eps=0.00001, + dropout=0.0, + attention_dropout=0.0, + initializer_range=0.02, + initializer_factor=1.0, + **kwargs + ): + super().__init__(**kwargs) + + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.dropout = dropout + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.patch_size = patch_size + self.image_size = image_size + self.initializer_range = initializer_range + self.initializer_factor = initializer_factor + self.attention_dropout = attention_dropout + self.layer_norm_eps = layer_norm_eps + self.hidden_act = hidden_act + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig": + + config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs) + + # get the vision config dict if we are loading from CLIPConfig + if config_dict.get("model_type") == "clip": + config_dict = config_dict["vision_config"] + + if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type: + logger.warning( + f"You are using a model of type {config_dict['model_type']} to instantiate a model of type " + f"{cls.model_type}. This is not supported for all configurations of models and can yield errors." + ) + + return cls.from_dict(config_dict, **kwargs) + + +class CLIPConfig(PretrainedConfig): + r""" + [`CLIPConfig`] is the configuration class to store the configuration of a [`CLIPModel`]. It is used to instantiate + CLIP model according to the specified arguments, defining the text model and vision model configs. + + Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the + documentation from [`PretrainedConfig`] for more information. + + Args: + text_config_dict (`dict`, *optional*): + Dictionary of configuration options used to initialize [`CLIPTextConfig`]. + vision_config_dict (`dict`, *optional*): + Dictionary of configuration options used to initialize [`CLIPVisionConfig`]. + projection_dim (`int`, *optional*, defaults to 512): + Dimentionality of text and vision projection layers. + logit_scale_init_value (`float`, *optional*, defaults to 2.6592): + The inital value of the *logit_scale* paramter. Default is used as per the original CLIP implementation. + kwargs (*optional*): + Dictionary of keyword arguments. + """ + + model_type = "clip" + is_composition = True + + def __init__( + self, + text_config_dict=None, + vision_config_dict=None, + projection_dim=512, + logit_scale_init_value=2.6592, + **kwargs + ): + super().__init__(text_config_dict=text_config_dict, vision_config_dict=vision_config_dict, **kwargs) + + if text_config_dict is None: + text_config_dict = {} + logger.info("text_config_dict is None. Initializing the CLIPTextConfig with default values.") + + if vision_config_dict is None: + vision_config_dict = {} + logger.info("vision_config_dict is None. initializing the CLIPVisionConfig with default values.") + + self.text_config = CLIPTextConfig(**text_config_dict) + self.vision_config = CLIPVisionConfig(**vision_config_dict) + + self.projection_dim = projection_dim + self.logit_scale_init_value = logit_scale_init_value + self.initializer_factor = 1.0 + + @classmethod + def from_text_vision_configs(cls, text_config: CLIPTextConfig, vision_config: CLIPVisionConfig, **kwargs): + r""" + Instantiate a [`CLIPConfig`] (or a derived class) from clip text model configuration and clip vision model + configuration. + + Returns: + [`CLIPConfig`]: An instance of a configuration object + """ + + return cls(text_config_dict=text_config.to_dict(), vision_config_dict=vision_config.to_dict(), **kwargs) + + def to_dict(self): + """ + Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`]. + + Returns: + `Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance, + """ + output = copy.deepcopy(self.__dict__) + output["text_config"] = self.text_config.to_dict() + output["vision_config"] = self.vision_config.to_dict() + output["model_type"] = self.__class__.model_type + return output diff --git a/core/models/encoders/clip_modules/convert_clip_original_pytorch_to_hf.py b/core/models/encoders/clip_modules/convert_clip_original_pytorch_to_hf.py new file mode 100644 index 0000000000000000000000000000000000000000..58886aa88a3440b691235b66be98f75e99b313e0 --- /dev/null +++ b/core/models/encoders/clip_modules/convert_clip_original_pytorch_to_hf.py @@ -0,0 +1,148 @@ +# coding=utf-8 +# Copyright 2021 The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse + +import torch + +from clip import load +from transformers import CLIPConfig, CLIPModel + + +def copy_attn_layer(hf_attn_layer, pt_attn_layer): + q_proj, k_proj, v_proj = pt_attn_layer.in_proj_weight.chunk(3, dim=0) + q_proj_bias, k_proj_bias, v_proj_bias = pt_attn_layer.in_proj_bias.chunk(3, dim=0) + + out_proj_weights = pt_attn_layer.out_proj.weight + out_proj_bias = pt_attn_layer.out_proj.bias + + hf_attn_layer.q_proj.weight.data = q_proj + hf_attn_layer.q_proj.bias.data = q_proj_bias + + hf_attn_layer.k_proj.weight.data = k_proj + hf_attn_layer.k_proj.bias.data = k_proj_bias + + hf_attn_layer.v_proj.weight.data = v_proj + hf_attn_layer.v_proj.bias.data = v_proj_bias + + hf_attn_layer.out_proj.weight = out_proj_weights + hf_attn_layer.out_proj.bias = out_proj_bias + + +def copy_mlp(hf_mlp, pt_mlp): + copy_linear(hf_mlp.fc1, pt_mlp.c_fc) + copy_linear(hf_mlp.fc2, pt_mlp.c_proj) + + +def copy_linear(hf_linear, pt_linear): + hf_linear.weight = pt_linear.weight + hf_linear.bias = pt_linear.bias + + +def copy_layer(hf_layer, pt_layer): + # copy layer norms + copy_linear(hf_layer.layer_norm1, pt_layer.ln_1) + copy_linear(hf_layer.layer_norm2, pt_layer.ln_2) + + # copy MLP + copy_mlp(hf_layer.mlp, pt_layer.mlp) + + # copy attn + copy_attn_layer(hf_layer.self_attn, pt_layer.attn) + + +def copy_layers(hf_layers, pt_layers): + for hf_layer, pt_layer in zip(hf_layers, pt_layers): + copy_layer(hf_layer, pt_layer) + + +def copy_encoder(hf_encoder, pt_model): + # copy embeds + hf_encoder.embeddings.token_embedding.weight = pt_model.token_embedding.weight + hf_encoder.embeddings.position_embedding.weight.data = pt_model.positional_embedding + + # copy layer norm + copy_linear(hf_encoder.final_layer_norm, pt_model.ln_final) + + # copy hidden layers + copy_layers(hf_encoder.encoder.layers, pt_model.transformer.resblocks) + + +def copy_text_model_and_projection(hf_model, pt_model): + # copy projection + hf_model.text_projection.weight.data = pt_model.text_projection.data.T + + # copy text encoder + copy_encoder(hf_model.text_model, pt_model) + + +def copy_vison_model_and_projection(hf_model, pt_model): + # copy projection + hf_model.visual_projection.weight.data = pt_model.visual.proj.data.T + + # copy layer norms + copy_linear(hf_model.vision_model.pre_layrnorm, pt_model.visual.ln_pre) + copy_linear(hf_model.vision_model.post_layernorm, pt_model.visual.ln_post) + + # copy embeds + hf_model.vision_model.embeddings.patch_embedding.weight.data = pt_model.visual.conv1.weight.data + hf_model.vision_model.embeddings.class_embedding = pt_model.visual.class_embedding + hf_model.vision_model.embeddings.position_embedding.weight.data = pt_model.visual.positional_embedding.data + + # copy encoder + copy_layers(hf_model.vision_model.encoder.layers, pt_model.visual.transformer.resblocks) + + +@torch.no_grad() +def convert_clip_checkpoint(checkpoint_path, pytorch_dump_folder_path, config_path=None): + """ + Copy/paste/tweak model's weights to transformers design. + """ + if config_path is not None: + config = CLIPConfig.from_pretrained(config_path) + else: + config = CLIPConfig(projection_dim=512, text_config={}, vision_config={}) + + hf_model = CLIPModel(config).eval() + + pt_model, _ = load(checkpoint_path, device="cpu", jit=False) + pt_model = pt_model.eval() + + copy_text_model_and_projection(hf_model, pt_model) + copy_vison_model_and_projection(hf_model, pt_model) + hf_model.logit_scale = pt_model.logit_scale + + input_ids = torch.arange(0, 77).unsqueeze(0) + pixel_values = torch.randn(1, 3, 224, 224) + + hf_logits_per_image, hf_logits_per_text = hf_model( + input_ids=input_ids, pixel_values=pixel_values, return_dict=True + )[1:3] + pt_logits_per_image, pt_logits_per_text = pt_model(pixel_values, input_ids) + + assert torch.allclose(hf_logits_per_image, pt_logits_per_image, atol=1e-3) + assert torch.allclose(hf_logits_per_text, pt_logits_per_text, atol=1e-3) + + hf_model.save_pretrained(pytorch_dump_folder_path) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--pytorch_dump_folder_path", default=None, type=str, help="Path to the output PyTorch model.") + parser.add_argument("--checkpoint_path", default=None, type=str, help="Path to fairseq checkpoint") + parser.add_argument("--config_path", default=None, type=str, help="Path to hf config.json of model to convert") + args = parser.parse_args() + + convert_clip_checkpoint(args.checkpoint_path, args.pytorch_dump_folder_path, args.config_path) diff --git a/core/models/encoders/clip_modules/feature_extraction_clip.py b/core/models/encoders/clip_modules/feature_extraction_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..07b7c41828d8e71473783cd1a663b0e4f7259ce4 --- /dev/null +++ b/core/models/encoders/clip_modules/feature_extraction_clip.py @@ -0,0 +1,165 @@ +# coding=utf-8 +# Copyright 2021 The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Feature extractor class for CLIP.""" + +from typing import List, Optional, Union + +import numpy as np +from PIL import Image + +from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin +from ...image_utils import ImageFeatureExtractionMixin, is_torch_tensor +from ...utils import TensorType, logging + + +logger = logging.get_logger(__name__) + + +class CLIPFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin): + r""" + Constructs a CLIP feature extractor. + + This feature extractor inherits from [`FeatureExtractionMixin`] which contains most of the main methods. Users + should refer to this superclass for more information regarding those methods. + + Args: + do_resize (`bool`, *optional*, defaults to `True`): + Whether to resize the input to a certain `size`. + size (`int`, *optional*, defaults to 224): + Resize the input to the given size. Only has an effect if `do_resize` is set to `True`. + resample (`int`, *optional*, defaults to `PIL.Image.BICUBIC`): + An optional resampling filter. This can be one of `PIL.Image.NEAREST`, `PIL.Image.BOX`, + `PIL.Image.BILINEAR`, `PIL.Image.HAMMING`, `PIL.Image.BICUBIC` or `PIL.Image.LANCZOS`. Only has an effect + if `do_resize` is set to `True`. + do_center_crop (`bool`, *optional*, defaults to `True`): + Whether to crop the input at the center. If the input size is smaller than `crop_size` along any edge, the + image is padded with 0's and then center cropped. + crop_size (`int`, *optional*, defaults to 224): + Desired output size when applying center-cropping. Only has an effect if `do_center_crop` is set to `True`. + do_normalize (`bool`, *optional*, defaults to `True`): + Whether or not to normalize the input with `image_mean` and `image_std`. + image_mean (`List[int]`, defaults to `[0.485, 0.456, 0.406]`): + The sequence of means for each channel, to be used when normalizing images. + image_std (`List[int]`, defaults to `[0.229, 0.224, 0.225]`): + The sequence of standard deviations for each channel, to be used when normalizing images. + convert_rgb (`bool`, defaults to `True`): + Whether or not to convert `PIL.Image.Image` into `RGB` format + """ + + model_input_names = ["pixel_values"] + + def __init__( + self, + do_resize=True, + size=224, + resample=Image.BICUBIC, + do_center_crop=True, + crop_size=224, + do_normalize=False, + image_mean=None, + image_std=None, + do_convert_rgb=False, + **kwargs + ): + super().__init__(**kwargs) + self.do_resize = do_resize + self.size = size + self.resample = resample + self.do_center_crop = do_center_crop + self.crop_size = crop_size + self.do_normalize = do_normalize + self.image_mean = image_mean if image_mean is not None else [0.48145466, 0.4578275, 0.40821073] + self.image_std = image_std if image_std is not None else [0.26862954, 0.26130258, 0.27577711] + self.do_convert_rgb = do_convert_rgb + + def __call__( + self, + images: Union[ + Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"] # noqa + ], + return_tensors: Optional[Union[str, TensorType]] = None, + **kwargs + ) -> BatchFeature: + """ + Main method to prepare for the model one or several image(s). + + + + NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass + PIL images. + + + + Args: + images (`PIL.Image.Image`, `np.ndarray`, `torch.Tensor`, `List[PIL.Image.Image]`, `List[np.ndarray]`, `List[torch.Tensor]`): + The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch + tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a + number of channels, H and W are image height and width. + + return_tensors (`str` or [`~utils.TensorType`], *optional*, defaults to `'np'`): + If set, will return tensors of a particular framework. Acceptable values are: + + - `'tf'`: Return TensorFlow `tf.constant` objects. + - `'pt'`: Return PyTorch `torch.Tensor` objects. + - `'np'`: Return NumPy `np.ndarray` objects. + - `'jax'`: Return JAX `jnp.ndarray` objects. + + Returns: + [`BatchFeature`]: A [`BatchFeature`] with the following fields: + + - **pixel_values** -- Pixel values to be fed to a model. + """ + # Input type checking for clearer error + valid_images = False + + # Check that images has a valid type + if isinstance(images, (Image.Image, np.ndarray)) or is_torch_tensor(images): + valid_images = True + elif isinstance(images, (list, tuple)): + if len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]): + valid_images = True + + if not valid_images: + raise ValueError( + "Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example), " + "`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)." + ) + + is_batched = bool( + isinstance(images, (list, tuple)) + and (isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0])) + ) + + if not is_batched: + images = [images] + + # transformations (convert rgb + resizing + center cropping + normalization) + if self.do_convert_rgb: + images = [self.convert_rgb(image) for image in images] + if self.do_resize and self.size is not None and self.resample is not None: + images = [ + self.resize(image=image, size=self.size, resample=self.resample, default_to_square=False) + for image in images + ] + if self.do_center_crop and self.crop_size is not None: + images = [self.center_crop(image, self.crop_size) for image in images] + if self.do_normalize: + images = [self.normalize(image=image, mean=self.image_mean, std=self.image_std) for image in images] + + # return as BatchFeature + data = {"pixel_values": images} + encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors) + + return encoded_inputs diff --git a/core/models/encoders/clip_modules/modeling_clip.py b/core/models/encoders/clip_modules/modeling_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..ad4cde5498aa0c4a54fecc5626e8b174e450a548 --- /dev/null +++ b/core/models/encoders/clip_modules/modeling_clip.py @@ -0,0 +1,1088 @@ +# coding=utf-8 +# Copyright 2021 The OpenAI Team Authors and The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" PyTorch CLIP model.""" + +from einops import rearrange +from dataclasses import dataclass +from typing import Any, Optional, Tuple, Union + +import torch +import torch.utils.checkpoint +from torch import nn + +from transformers.activations import ACT2FN +from transformers.modeling_outputs import BaseModelOutput, BaseModelOutputWithPooling +from transformers.modeling_utils import PreTrainedModel +from transformers.utils import ( + ModelOutput, + add_start_docstrings, + add_start_docstrings_to_model_forward, + logging, + replace_return_docstrings, +) +from .configuration_clip import CLIPConfig, CLIPTextConfig, CLIPVisionConfig +from .modules_video import SpatioTemporalAttention, ContinuousPositionBias, FeedForward + +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "openai/clip-vit-base-patch32" + +CLIP_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "openai/clip-vit-base-patch32", + # See all CLIP models at https://huggingface.co/models?filter=clip +] + + +# Copied from transformers.models.bart.modeling_bart._expand_mask +def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): + """ + Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. + """ + bsz, src_len = mask.size() + tgt_len = tgt_len if tgt_len is not None else src_len + + expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) + + inverted_mask = 1.0 - expanded_mask + + return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min) + + +# contrastive loss function, adapted from +# https://sachinruk.github.io/blog/pytorch/pytorch%20lightning/loss%20function/gpu/2021/03/07/CLIP.html +def contrastive_loss(logits: torch.Tensor) -> torch.Tensor: + return nn.functional.cross_entropy(logits, torch.arange(len(logits), device=logits.device)) + + +def clip_loss(similarity: torch.Tensor) -> torch.Tensor: + caption_loss = contrastive_loss(similarity) + image_loss = contrastive_loss(similarity.T) + return (caption_loss + image_loss) / 2.0 + + +@dataclass +class CLIPOutput(ModelOutput): + """ + Args: + loss (`torch.Tensor` of shape `(1,)`, *optional*, returned when `return_loss` is `True`): + Contrastive loss for image-text similarity. + logits_per_image:(`torch.Tensor` of shape `(image_batch_size, text_batch_size)`): + The scaled dot product scores between `image_embeds` and `text_embeds`. This represents the image-text + similarity scores. + logits_per_text:(`torch.Tensor` of shape `(text_batch_size, image_batch_size)`): + The scaled dot product scores between `text_embeds` and `image_embeds`. This represents the text-image + similarity scores. + text_embeds(`torch.Tensor` of shape `(batch_size, output_dim`): + The text embeddings obtained by applying the projection layer to the pooled output of [`CLIPTextModel`]. + image_embeds(`torch.Tensor` of shape `(batch_size, output_dim`): + The image embeddings obtained by applying the projection layer to the pooled output of [`CLIPVisionModel`]. + text_model_output(`BaseModelOutputWithPooling`): + The output of the [`CLIPTextModel`]. + vision_model_output(`BaseModelOutputWithPooling`): + The output of the [`CLIPVisionModel`]. + """ + + loss: Optional[torch.Tensor] = None + logits_per_image: torch.Tensor = None + logits_per_text: torch.Tensor = None + text_embeds: torch.Tensor = None + image_embeds: torch.Tensor = None + text_model_output: BaseModelOutputWithPooling = None + vision_model_output: BaseModelOutputWithPooling = None + + def to_tuple(self) -> Tuple[Any]: + return tuple( + self[k] if k not in ["text_model_output", "vision_model_output"] else getattr(self, k).to_tuple() + for k in self.keys() + ) + + +class CLIPVisionEmbeddings(nn.Module): + def __init__(self, config: CLIPVisionConfig): + super().__init__() + self.config = config + self.embed_dim = config.hidden_size + self.image_size = config.image_size + self.patch_size = config.patch_size + + self.class_embedding = nn.Parameter(torch.randn(self.embed_dim)) + + self.patch_embedding = nn.Conv2d( + in_channels=3, out_channels=self.embed_dim, kernel_size=self.patch_size, stride=self.patch_size, bias=False + ) + + self.num_patches = (self.image_size // self.patch_size) ** 2 + self.num_positions = self.num_patches + 1 + self.position_embedding = nn.Embedding(self.num_positions, self.embed_dim) + self.register_buffer("position_ids", torch.arange(self.num_positions).expand((1, -1))) + + def forward(self, pixel_values: torch.Tensor) -> torch.Tensor: + batch_size = pixel_values.shape[0] + patch_embeds = self.patch_embedding(pixel_values) # shape = [*, width, grid, grid] + patch_embeds = patch_embeds.flatten(2).transpose(1, 2) + + class_embeds = self.class_embedding.expand(batch_size, 1, -1) + embeddings = torch.cat([class_embeds, patch_embeds], dim=1) + embeddings = embeddings + self.position_embedding(self.position_ids) + return embeddings + + +class CLIPTextEmbeddings(nn.Module): + def __init__(self, config: CLIPTextConfig): + super().__init__() + embed_dim = config.hidden_size + + self.token_embedding = nn.Embedding(config.vocab_size, embed_dim) + self.position_embedding = nn.Embedding(config.max_position_embeddings, embed_dim) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1))) + + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + inputs_embeds: Optional[torch.Tensor] = None, + ) -> torch.Tensor: + seq_length = input_ids.shape[-1] if input_ids is not None else inputs_embeds.shape[-2] + if position_ids is None: + position_ids = self.position_ids[:, :seq_length] + + if inputs_embeds is None: + inputs_embeds = self.token_embedding(input_ids) + + position_embeddings = self.position_embedding(position_ids) + embeddings = inputs_embeds + position_embeddings + + return embeddings + + +class CLIPAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__(self, config): + super().__init__() + self.config = config + self.embed_dim = config.hidden_size + self.num_heads = config.num_attention_heads + self.head_dim = self.embed_dim // self.num_heads + if self.head_dim * self.num_heads != self.embed_dim: + raise ValueError( + f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`:" + f" {self.num_heads})." + ) + self.scale = self.head_dim**-0.5 + self.dropout = config.attention_dropout + + self.k_proj = nn.Linear(self.embed_dim, self.embed_dim) + self.v_proj = nn.Linear(self.embed_dim, self.embed_dim) + self.q_proj = nn.Linear(self.embed_dim, self.embed_dim) + self.out_proj = nn.Linear(self.embed_dim, self.embed_dim) + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + causal_attention_mask: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = False, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + """Input shape: Batch x Time x Channel""" + + bsz, tgt_len, embed_dim = hidden_states.size() + + # get query proj + query_states = self.q_proj(hidden_states) * self.scale + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + + proj_shape = (bsz * self.num_heads, -1, self.head_dim) + query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) + key_states = key_states.view(*proj_shape) + value_states = value_states.view(*proj_shape) + + src_len = key_states.size(1) + attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) + + if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): + raise ValueError( + f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is" + f" {attn_weights.size()}" + ) + + # apply the causal_attention_mask first + if causal_attention_mask is not None: + if causal_attention_mask.size() != (bsz, 1, tgt_len, src_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is" + f" {causal_attention_mask.size()}" + ) + attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + causal_attention_mask + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, tgt_len, src_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" + ) + attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) + + attn_weights = nn.functional.softmax(attn_weights, dim=-1) + + if output_attentions: + # this operation is a bit awkard, but it's required to + # make sure that attn_weights keeps its gradient. + # In order to do so, attn_weights have to reshaped + # twice and have to be reused in the following + attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) + else: + attn_weights_reshaped = None + + attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training) + + attn_output = torch.bmm(attn_probs, value_states) + + if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) + attn_output = attn_output.transpose(1, 2) + attn_output = attn_output.reshape(bsz, tgt_len, embed_dim) + + attn_output = self.out_proj(attn_output) + + return attn_output, attn_weights_reshaped + + +class CLIPMLP(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.activation_fn = ACT2FN[config.hidden_act] + self.fc1 = nn.Linear(config.hidden_size, config.intermediate_size) + self.fc2 = nn.Linear(config.intermediate_size, config.hidden_size) + + def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: + hidden_states = self.fc1(hidden_states) + hidden_states = self.activation_fn(hidden_states) + hidden_states = self.fc2(hidden_states) + return hidden_states + + +class CLIPEncoderLayer(nn.Module): + def __init__(self, config: CLIPConfig, add_temporal_attention: bool = False): + super().__init__() + self.embed_dim = config.hidden_size + self.num_heads = config.num_attention_heads + self.self_attn = CLIPAttention(config) + self.layer_norm1 = nn.LayerNorm(self.embed_dim) + self.mlp = CLIPMLP(config) + self.layer_norm2 = nn.LayerNorm(self.embed_dim) + + if add_temporal_attention: + self.temporal_self_attn = SpatioTemporalAttention(dim = self.embed_dim, + dim_head = self.embed_dim // self.num_heads, + heads = self.num_heads) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: torch.Tensor, + causal_attention_mask: torch.Tensor, + output_attentions: Optional[bool] = False, + ) -> Tuple[torch.Tensor]: + """ + Args: + hidden_states (`torch.Tensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.Tensor`): attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + `(config.encoder_attention_heads,)`. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + """ + is_video = hidden_states.ndim == 4 + if is_video: + b, f, l, c = hidden_states.shape + hidden_states = rearrange(hidden_states, 'b f l c -> (b f) l c') + residual = hidden_states + + hidden_states = self.layer_norm1(hidden_states) + hidden_states, _ = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + causal_attention_mask=causal_attention_mask, + output_attentions=output_attentions, + ) + hidden_states = residual + hidden_states + if is_video: + hidden_states = rearrange(hidden_states, '(b f) l c -> (b l) f c', f=f) + hidden_states = self.temporal_self_attn(hidden_states) + hidden_states = rearrange(hidden_states, '(b l) f c -> b f l c', b=b) + residual = hidden_states + hidden_states = self.layer_norm2(hidden_states) + hidden_states = self.mlp(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += (attn_weights,) + + return outputs + + +class CLIPPreTrainedModel(PreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = CLIPConfig + base_model_prefix = "clip" + supports_gradient_checkpointing = True + _keys_to_ignore_on_load_missing = [r"position_ids"] + + def _init_weights(self, module): + """Initialize the weights""" + factor = self.config.initializer_factor + if isinstance(module, CLIPTextEmbeddings): + module.token_embedding.weight.data.normal_(mean=0.0, std=factor * 0.02) + module.position_embedding.weight.data.normal_(mean=0.0, std=factor * 0.02) + elif isinstance(module, CLIPVisionEmbeddings): + factor = self.config.initializer_factor + nn.init.normal_(module.class_embedding, mean=0.0, std=module.embed_dim**-0.5 * factor) + nn.init.normal_(module.patch_embedding.weight, std=module.config.initializer_range * factor) + nn.init.normal_(module.position_embedding.weight, std=module.config.initializer_range * factor) + elif isinstance(module, CLIPAttention): + factor = self.config.initializer_factor + in_proj_std = (module.embed_dim**-0.5) * ((2 * module.config.num_hidden_layers) ** -0.5) * factor + out_proj_std = (module.embed_dim**-0.5) * factor + nn.init.normal_(module.q_proj.weight, std=in_proj_std) + nn.init.normal_(module.k_proj.weight, std=in_proj_std) + nn.init.normal_(module.v_proj.weight, std=in_proj_std) + nn.init.normal_(module.out_proj.weight, std=out_proj_std) + elif isinstance(module, CLIPMLP): + factor = self.config.initializer_factor + in_proj_std = ( + (module.config.hidden_size**-0.5) * ((2 * module.config.num_hidden_layers) ** -0.5) * factor + ) + fc_std = (2 * module.config.hidden_size) ** -0.5 * factor + nn.init.normal_(module.fc1.weight, std=fc_std) + nn.init.normal_(module.fc2.weight, std=in_proj_std) + elif isinstance(module, CLIPModel): + nn.init.normal_( + module.text_projection.weight, + std=module.text_embed_dim**-0.5 * self.config.initializer_factor, + ) + nn.init.normal_( + module.visual_projection.weight, + std=module.vision_embed_dim**-0.5 * self.config.initializer_factor, + ) + + if isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, CLIPEncoder): + module.gradient_checkpointing = value + + +CLIP_START_DOCSTRING = r""" + This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it + as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and + behavior. + + Parameters: + config ([`CLIPConfig`]): Model configuration class with all the parameters of the model. + Initializing with a config file does not load the weights associated with the model, only the + configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + +CLIP_TEXT_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`CLIPTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.max_position_embeddings - 1]`. + + [What are position IDs?](../glossary#position-ids) + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + +CLIP_VISION_INPUTS_DOCSTRING = r""" + Args: + pixel_values (`torch.Tensor` of shape `(batch_size, num_channels, height, width)`): + Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using + [`CLIPFeatureExtractor`]. See [`CLIPFeatureExtractor.__call__`] for details. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + +CLIP_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`CLIPTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.max_position_embeddings - 1]`. + + [What are position IDs?](../glossary#position-ids) + pixel_values (`torch.Tensor` of shape `(batch_size, num_channels, height, width)`): + Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using + [`CLIPFeatureExtractor`]. See [`CLIPFeatureExtractor.__call__`] for details. + return_loss (`bool`, *optional*): + Whether or not to return the contrastive loss. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +class CLIPEncoder(nn.Module): + """ + Transformer encoder consisting of `config.num_hidden_layers` self attention layers. Each layer is a + [`CLIPEncoderLayer`]. + + Args: + config: CLIPConfig + """ + + def __init__(self, config: CLIPConfig, add_temporal_attention: bool = False): + super().__init__() + self.config = config + self.layers = nn.ModuleList([CLIPEncoderLayer(config, add_temporal_attention=add_temporal_attention) for _ in range(config.num_hidden_layers)]) + self.gradient_checkpointing = False + + def forward( + self, + inputs_embeds, + attention_mask: Optional[torch.Tensor] = None, + causal_attention_mask: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutput]: + r""" + Args: + inputs_embeds (`torch.Tensor` of shape `(batch_size, sequence_length, hidden_size)`): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + causal_attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Causal mask for the text model. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + encoder_states = () if output_hidden_states else None + all_attentions = () if output_attentions else None + + hidden_states = inputs_embeds + for idx, encoder_layer in enumerate(self.layers): + if output_hidden_states: + encoder_states = encoder_states + (hidden_states,) + if self.gradient_checkpointing and self.training: + + def create_custom_forward(module): + def custom_forward(*inputs): + return module(*inputs, output_attentions) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(encoder_layer), + hidden_states, + attention_mask, + causal_attention_mask, + ) + else: + layer_outputs = encoder_layer( + hidden_states, + attention_mask, + causal_attention_mask, + output_attentions=output_attentions, + ) + + hidden_states = layer_outputs[0] + + if output_attentions: + all_attentions = all_attentions + (layer_outputs[1],) + if output_hidden_states: + encoder_states = encoder_states + (hidden_states,) + + if not return_dict: + return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) + return BaseModelOutput( + last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions + ) + + +class CLIPTextTransformer(nn.Module): + def __init__(self, config: CLIPTextConfig): + super().__init__() + self.config = config + embed_dim = config.hidden_size + self.embeddings = CLIPTextEmbeddings(config) + self.encoder = CLIPEncoder(config) + self.final_layer_norm = nn.LayerNorm(embed_dim) + + @add_start_docstrings_to_model_forward(CLIP_TEXT_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=CLIPTextConfig) + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPooling]: + r""" + Returns: + + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if input_ids is None: + raise ValueError("You have to specify either input_ids") + + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_shape[-1]) + + hidden_states = self.embeddings(input_ids=input_ids, position_ids=position_ids) + + bsz, seq_len = input_shape + # CLIP's text model uses causal mask, prepare it here. + # https://github.com/openai/CLIP/blob/cfcffb90e69f37bf2ff1e988237a0fbe41f33c04/clip/model.py#L324 + causal_attention_mask = self._build_causal_attention_mask(bsz, seq_len).to(hidden_states.device) + causal_attention_mask = causal_attention_mask.to(hidden_states.dtype) + # expand attention_mask + if attention_mask is not None: + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + attention_mask = _expand_mask(attention_mask, hidden_states.dtype) + + encoder_outputs = self.encoder( + inputs_embeds=hidden_states, + attention_mask=attention_mask, + causal_attention_mask=causal_attention_mask, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + last_hidden_state = encoder_outputs[0] + last_hidden_state = self.final_layer_norm(last_hidden_state) + + # text_embeds.shape = [batch_size, sequence_length, transformer.width] + # take features from the eot embedding (eot_token is the highest number in each sequence) + pooled_output = last_hidden_state[torch.arange(last_hidden_state.shape[0]), input_ids.argmax(dim=-1)] + + if not return_dict: + return (last_hidden_state, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPooling( + last_hidden_state=last_hidden_state, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + def _build_causal_attention_mask(self, bsz, seq_len): + # lazily create causal attention mask, with full attention between the vision tokens + # pytorch uses additive attention mask; fill with -inf + mask = torch.empty(bsz, seq_len, seq_len) + mask.fill_(torch.tensor(float("-inf"))) + mask.triu_(1) # zero out the lower diagonal + mask = mask.unsqueeze(1) # expand mask + return mask + + +class CLIPTextModel(CLIPPreTrainedModel): + config_class = CLIPTextConfig + + def __init__(self, config: CLIPTextConfig): + super().__init__(config) + self.text_model = CLIPTextTransformer(config) + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self) -> nn.Module: + return self.text_model.embeddings.token_embedding + + def set_input_embeddings(self, value): + self.text_model.embeddings.token_embedding = value + + @add_start_docstrings_to_model_forward(CLIP_TEXT_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=CLIPTextConfig) + def forward( + self, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPooling]: + r""" + Returns: + + Examples: + + ```python + >>> from transformers import CLIPTokenizer, CLIPTextModel + + >>> model = CLIPTextModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="pt") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooled_output = outputs.pooler_output # pooled (EOS token) states + ```""" + return self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + +class CLIPVisionTransformer(nn.Module): + def __init__(self, config: CLIPVisionConfig, add_temporal_attention: bool): + super().__init__() + self.config = config + embed_dim = config.hidden_size + + self.embeddings = CLIPVisionEmbeddings(config) + self.pre_layrnorm = nn.LayerNorm(embed_dim) + self.encoder = CLIPEncoder(config, add_temporal_attention=add_temporal_attention) + self.post_layernorm = nn.LayerNorm(embed_dim) + + @add_start_docstrings_to_model_forward(CLIP_VISION_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=CLIPVisionConfig) + def forward( + self, + pixel_values: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPooling]: + r""" + Returns: + + """ + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + if pixel_values is None: + raise ValueError("You have to specify pixel_values") + + is_video = pixel_values.ndim == 5 + if is_video: + b, f, h, w, c = pixel_values.shape + pixel_values = rearrange(pixel_values, 'b f h w c -> (b f) h w c') + hidden_states = self.embeddings(pixel_values) + hidden_states = self.pre_layrnorm(hidden_states) + if is_video: + hidden_states = rearrange(hidden_states, '(b f) l c -> b f l c', f=f) + encoder_outputs = self.encoder( + inputs_embeds=hidden_states, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + last_hidden_state = encoder_outputs[0] + if is_video: + last_hidden_state = last_hidden_state.mean(1) + pooled_output = last_hidden_state[:, 0, :] + pooled_output = self.post_layernorm(pooled_output) + + if not return_dict: + return (last_hidden_state, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPooling( + last_hidden_state=last_hidden_state, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + +class CLIPVisionModel(CLIPPreTrainedModel): + config_class = CLIPVisionConfig + main_input_name = "pixel_values" + + def __init__(self, config: CLIPVisionConfig, add_temporal_attention: bool = False): + super().__init__(config) + self.vision_model = CLIPVisionTransformer(config, add_temporal_attention) + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self) -> nn.Module: + return self.vision_model.embeddings.patch_embedding + + @add_start_docstrings_to_model_forward(CLIP_VISION_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=CLIPVisionConfig) + def forward( + self, + pixel_values: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPooling]: + r""" + Returns: + + Examples: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, CLIPVisionModel + + >>> model = CLIPVisionModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor(images=image, return_tensors="pt") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooled_output = outputs.pooler_output # pooled CLS states + ```""" + return self.vision_model( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + +@add_start_docstrings(CLIP_START_DOCSTRING) +class CLIPModel(CLIPPreTrainedModel): + config_class = CLIPConfig + + def __init__(self, config: CLIPConfig, add_temporal_attention: bool = False): + super().__init__(config) + + if not isinstance(config.text_config, CLIPTextConfig): + raise ValueError( + "config.text_config is expected to be of type CLIPTextConfig but is of type" + f" {type(config.text_config)}." + ) + + if not isinstance(config.vision_config, CLIPVisionConfig): + raise ValueError( + "config.vision_config is expected to be of type CLIPVisionConfig but is of type" + f" {type(config.vision_config)}." + ) + + text_config = config.text_config + vision_config = config.vision_config + + self.projection_dim = config.projection_dim + self.text_embed_dim = text_config.hidden_size + self.vision_embed_dim = vision_config.hidden_size + + self.text_model = CLIPTextTransformer(text_config) + self.vision_model = CLIPVisionTransformer(vision_config, add_temporal_attention=add_temporal_attention) + + self.visual_projection = nn.Linear(self.vision_embed_dim, self.projection_dim, bias=False) + self.text_projection = nn.Linear(self.text_embed_dim, self.projection_dim, bias=False) + self.logit_scale = nn.Parameter(torch.ones([]) * self.config.logit_scale_init_value) + + # Initialize weights and apply final processing + self.post_init() + + @add_start_docstrings_to_model_forward(CLIP_TEXT_INPUTS_DOCSTRING) + def get_text_features( + self, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> torch.Tensor: + r""" + Returns: + text_features (`torch.Tensor` of shape `(batch_size, output_dim`): The text embeddings obtained by + applying the projection layer to the pooled output of [`CLIPTextModel`]. + + Examples: + + ```python + >>> from transformers import CLIPTokenizer, CLIPModel + + >>> model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="pt") + >>> text_features = model.get_text_features(**inputs) + ```""" + # Use CLIP model's config for some fields (if specified) instead of those of vision & text components. + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + text_outputs = self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + pooled_output = text_outputs[1] + text_features = self.text_projection(pooled_output) + + return text_features + + @add_start_docstrings_to_model_forward(CLIP_VISION_INPUTS_DOCSTRING) + def get_image_features( + self, + pixel_values: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> torch.Tensor: + r""" + Returns: + image_features (`torch.Tensor` of shape `(batch_size, output_dim`): The image embeddings obtained by + applying the projection layer to the pooled output of [`CLIPVisionModel`]. + + Examples: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, CLIPModel + + >>> model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor(images=image, return_tensors="pt") + + >>> image_features = model.get_image_features(**inputs) + ```""" + # Use CLIP model's config for some fields (if specified) instead of those of vision & text components. + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + vision_outputs = self.vision_model( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + pooled_output = vision_outputs[1] # pooled_output + image_features = self.visual_projection(pooled_output) + + return image_features + + @add_start_docstrings_to_model_forward(CLIP_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=CLIPOutput, config_class=CLIPConfig) + def forward( + self, + input_ids: Optional[torch.LongTensor] = None, + pixel_values: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + return_loss: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, CLIPOutput]: + r""" + Returns: + + Examples: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, CLIPModel + + >>> model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor( + ... text=["a photo of a cat", "a photo of a dog"], images=image, return_tensors="pt", padding=True + ... ) + + >>> outputs = model(**inputs) + >>> logits_per_image = outputs.logits_per_image # this is the image-text similarity score + >>> probs = logits_per_image.softmax(dim=1) # we can take the softmax to get the label probabilities + ```""" + # Use CLIP model's config for some fields (if specified) instead of those of vision & text components. + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + vision_outputs = self.vision_model( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + text_outputs = self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + image_embeds = vision_outputs[1] + image_embeds = self.visual_projection(image_embeds) + + text_embeds = text_outputs[1] + text_embeds = self.text_projection(text_embeds) + + # normalized features + image_embeds = image_embeds / image_embeds.norm(p=2, dim=-1, keepdim=True) + text_embeds = text_embeds / text_embeds.norm(p=2, dim=-1, keepdim=True) + + # cosine similarity as logits + logit_scale = self.logit_scale.exp() + logits_per_text = torch.matmul(text_embeds, image_embeds.t()) * logit_scale + logits_per_image = logits_per_text.T + + loss = None + if return_loss: + loss = clip_loss(logits_per_text) + + if not return_dict: + output = (logits_per_image, logits_per_text, text_embeds, image_embeds, text_outputs, vision_outputs) + return ((loss,) + output) if loss is not None else output + + return CLIPOutput( + loss=loss, + logits_per_image=logits_per_image, + logits_per_text=logits_per_text, + text_embeds=text_embeds, + image_embeds=image_embeds, + text_model_output=text_outputs, + vision_model_output=vision_outputs, + ) diff --git a/core/models/encoders/clip_modules/modeling_flax_clip.py b/core/models/encoders/clip_modules/modeling_flax_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..aa8ef87d5bf10bd8bfca7011f68629742ca24d01 --- /dev/null +++ b/core/models/encoders/clip_modules/modeling_flax_clip.py @@ -0,0 +1,1180 @@ +# coding=utf-8 +# Copyright 2021 The OpenAI Team Authors, The Google Flax Team Authors and The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Optional, Tuple, Union + +import flax +import flax.linen as nn +import jax +import jax.numpy as jnp +from flax.core.frozen_dict import FrozenDict, freeze, unfreeze +from flax.linen import combine_masks, make_causal_mask +from flax.linen.attention import dot_product_attention_weights +from flax.traverse_util import flatten_dict, unflatten_dict +from jax import lax + +from ...modeling_flax_outputs import FlaxBaseModelOutput, FlaxBaseModelOutputWithPooling +from ...modeling_flax_utils import ( + ACT2FN, + FlaxPreTrainedModel, + append_replace_return_docstrings, + overwrite_call_docstring, +) +from ...utils import ModelOutput, add_start_docstrings, logging +from .configuration_clip import CLIPConfig, CLIPTextConfig, CLIPVisionConfig + + +logger = logging.get_logger(__name__) + +CLIP_START_DOCSTRING = r""" + + This model inherits from [`FlaxPreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading, saving and converting weights from PyTorch models) + + This model is also a Flax Linen [flax.linen.Module](https://flax.readthedocs.io/en/latest/flax.linen.html#module) + subclass. Use it as a regular Flax linen Module and refer to the Flax documentation for all matter related to + general usage and behavior. + + Finally, this model supports inherent JAX features such as: + + - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit) + - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation) + - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap) + - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap) + + Parameters: + config ([`CLIPConfig`]): Model configuration class with all the parameters of the model. + Initializing with a config file does not load the weights associated with the model, only the + configuration. Check out the [`~FlaxPreTrainedModel.from_pretrained`] method to load the model weights. + dtype (`jax.numpy.dtype`, *optional*, defaults to `jax.numpy.float32`): + The data type of the computation. Can be one of `jax.numpy.float32`, `jax.numpy.float16` (on GPUs) and + `jax.numpy.bfloat16` (on TPUs). + + This can be used to enable mixed-precision training or half-precision inference on GPUs or TPUs. If + specified all the computation will be performed with the given `dtype`. + + **Note that this only specifies the dtype of the computation and does not influence the dtype of model + parameters.** + + If you wish to change the dtype of the model parameters, see [`~FlaxPreTrainedModel.to_fp16`] and + [`~FlaxPreTrainedModel.to_bf16`]. +""" + +CLIP_TEXT_INPUTS_DOCSTRING = r""" + Args: + input_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`CLIPTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.max_position_embeddings - 1]`. + + [What are position IDs?](../glossary#position-ids) + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + +CLIP_VISION_INPUTS_DOCSTRING = r""" + Args: + pixel_values (`numpy.ndarray` of shape `(batch_size, num_channels, height, width)`): + Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using + [`CLIPFeatureExtractor`]. See [`CLIPFeatureExtractor.__call__`] for details. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + +CLIP_INPUTS_DOCSTRING = r""" + Args: + input_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`CLIPTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + position_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.max_position_embeddings - 1]`. + + [What are position IDs?](../glossary#position-ids) + pixel_values (`numpy.ndarray` of shape `(batch_size, num_channels, height, width)`): + Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained using + [`CLIPFeatureExtractor`]. See [`CLIPFeatureExtractor.__call__`] for details. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +@flax.struct.dataclass +class FlaxCLIPOutput(ModelOutput): + """ + Args: + logits_per_image:(`jnp.ndarray` of shape `(image_batch_size, text_batch_size)`): + The scaled dot product scores between `image_embeds` and `text_embeds`. This represents the image-text + similarity scores. + logits_per_text:(`jnp.ndarray` of shape `(text_batch_size, image_batch_size)`): + The scaled dot product scores between `text_embeds` and `image_embeds`. This represents the text-image + similarity scores. + text_embeds(`jnp.ndarray` of shape `(batch_size, output_dim`): + The text embeddings obtained by applying the projection layer to the pooled output of + [`FlaxCLIPTextModel`]. + image_embeds(`jnp.ndarray` of shape `(batch_size, output_dim`): + The image embeddings obtained by applying the projection layer to the pooled output of + [`FlaxCLIPVisionModel`]. + text_model_output(`FlaxBaseModelOutputWithPooling`): + The output of the [`FlaxCLIPTextModel`]. + vision_model_output(`FlaxBaseModelOutputWithPooling`): + The output of the [`FlaxCLIPVisionModel`]. + """ + + logits_per_image: jnp.ndarray = None + logits_per_text: jnp.ndarray = None + text_embeds: jnp.ndarray = None + image_embeds: jnp.ndarray = None + text_model_output: FlaxBaseModelOutputWithPooling = None + vision_model_output: FlaxBaseModelOutputWithPooling = None + + def to_tuple(self) -> Tuple[Any]: + return tuple( + self[k] if k not in ["text_model_output", "vision_model_output"] else getattr(self, k).to_tuple() + for k in self.keys() + ) + + +class FlaxCLIPVisionEmbeddings(nn.Module): + config: CLIPVisionConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + embed_dim = self.config.hidden_size + image_size = self.config.image_size + patch_size = self.config.patch_size + + self.class_embedding = self.param("class_embedding", jax.nn.initializers.normal(stddev=0.02), (embed_dim,)) + + self.patch_embedding = nn.Conv( + embed_dim, + kernel_size=(patch_size, patch_size), + strides=(patch_size, patch_size), + padding="VALID", + use_bias=False, + dtype=self.dtype, + kernel_init=jax.nn.initializers.normal(), + ) + + self.num_patches = (image_size // patch_size) ** 2 + num_positions = self.num_patches + 1 + self.position_embedding = nn.Embed(num_positions, embed_dim, embedding_init=jax.nn.initializers.normal()) + self.position_ids = jnp.expand_dims(jnp.arange(0, num_positions, dtype="i4"), axis=0) + + def __call__(self, pixel_values): + patch_embeds = self.patch_embedding(pixel_values) + batch_size, height, width, channels = patch_embeds.shape + patch_embeds = jnp.reshape(patch_embeds, (batch_size, height * width, channels)) + + class_embeds = jnp.expand_dims(self.class_embedding, axis=(0, 1)) + class_embeds = jnp.tile(class_embeds, (batch_size, 1, 1)) + embeddings = jnp.concatenate([class_embeds, patch_embeds], axis=1) + embeddings = embeddings + self.position_embedding(self.position_ids) + return embeddings + + +class FlaxCLIPTextEmbeddings(nn.Module): + config: CLIPTextConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + embed_dim = self.config.hidden_size + + self.token_embedding = nn.Embed(self.config.vocab_size, embed_dim, embedding_init=jax.nn.initializers.normal()) + self.position_embedding = nn.Embed( + self.config.max_position_embeddings, embed_dim, embedding_init=jax.nn.initializers.normal() + ) + self.position_ids = jnp.expand_dims( + jnp.arange(0, self.config.max_position_embeddings, dtype="i4"), axis=(0, 1) + ) + + def __call__(self, input_ids, position_ids): + input_embeds = self.token_embedding(input_ids.astype("i4")) + position_embeds = self.position_embedding(position_ids.astype("i4")) + + embeddings = input_embeds + position_embeds + return embeddings + + +class FlaxCLIPAttention(nn.Module): + config: Union[CLIPTextConfig, CLIPVisionConfig] + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.embed_dim = self.config.hidden_size + self.num_heads = self.config.num_attention_heads + self.head_dim = self.embed_dim // self.num_heads + if self.head_dim * self.num_heads != self.embed_dim: + raise ValueError( + f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`:" + f" {self.num_heads})." + ) + self.scale = self.head_dim**-0.5 + self.dropout = self.config.attention_dropout + + self.k_proj = nn.Dense(self.embed_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(0.01)) + self.v_proj = nn.Dense(self.embed_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(0.01)) + self.q_proj = nn.Dense(self.embed_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(0.01)) + self.out_proj = nn.Dense(self.embed_dim, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(0.01)) + + self.causal = isinstance(self.config, CLIPTextConfig) + if self.causal: + self.causal_mask = make_causal_mask(jnp.ones((1, self.config.max_position_embeddings), dtype="i4")) + + def _split_heads(self, hidden_states): + return hidden_states.reshape(hidden_states.shape[:2] + (self.num_heads, self.head_dim)) + + def _merge_heads(self, hidden_states): + return hidden_states.reshape(hidden_states.shape[:2] + (self.embed_dim,)) + + def __call__( + self, + hidden_states, + attention_mask=None, + deterministic: bool = True, + output_attentions: bool = False, + ): + query = self.q_proj(hidden_states) + key = self.k_proj(hidden_states) + value = self.v_proj(hidden_states) + + query = self._split_heads(query) + key = self._split_heads(key) + value = self._split_heads(value) + + causal_attention_mask = None + if self.causal: + query_length, key_length = query.shape[1], key.shape[1] + causal_attention_mask = self.causal_mask[:, :, key_length - query_length : key_length, :key_length] + + if attention_mask is not None and causal_attention_mask is not None: + attention_mask = jnp.expand_dims(attention_mask, axis=(-3, -2)) + attention_mask = combine_masks(attention_mask, causal_attention_mask, dtype="i4") + elif causal_attention_mask is not None: + attention_mask = causal_attention_mask + elif attention_mask is not None: + attention_mask = jnp.expand_dims(attention_mask, axis=(-3, -2)) + + if attention_mask is not None: + attention_bias = lax.select( + attention_mask > 0, + jnp.full(attention_mask.shape, 0.0).astype(self.dtype), + jnp.full(attention_mask.shape, -1e4).astype(self.dtype), + ) + else: + attention_bias = None + + dropout_rng = None + if not deterministic and self.dropout > 0.0: + dropout_rng = self.make_rng("dropout") + + attn_weights = dot_product_attention_weights( + query, + key, + bias=attention_bias, + dropout_rng=dropout_rng, + dropout_rate=self.dropout, + deterministic=deterministic, + dtype=self.dtype, + precision=None, + ) + + attn_output = jnp.einsum("...hqk,...khd->...qhd", attn_weights, value) + attn_output = self._merge_heads(attn_output) + attn_output = self.out_proj(attn_output) + + outputs = (attn_output, attn_weights) if output_attentions else (attn_output,) + return outputs + + +class FlaxCLIPMLP(nn.Module): + config: Union[CLIPTextConfig, CLIPVisionConfig] + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.activation_fn = ACT2FN[self.config.hidden_act] + self.fc1 = nn.Dense( + self.config.intermediate_size, + dtype=self.dtype, + kernel_init=jax.nn.initializers.normal(0.01), + ) + self.fc2 = nn.Dense(self.config.hidden_size, dtype=self.dtype, kernel_init=jax.nn.initializers.normal(0.01)) + + def __call__(self, hidden_states): + hidden_states = self.fc1(hidden_states) + hidden_states = self.activation_fn(hidden_states) + hidden_states = self.fc2(hidden_states) + return hidden_states + + +class FlaxCLIPEncoderLayer(nn.Module): + config: Union[CLIPTextConfig, CLIPVisionConfig] + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.self_attn = FlaxCLIPAttention(self.config, dtype=self.dtype) + self.layer_norm1 = nn.LayerNorm(epsilon=self.config.layer_norm_eps, dtype=self.dtype) + self.mlp = FlaxCLIPMLP(self.config, dtype=self.dtype) + self.layer_norm2 = nn.LayerNorm(epsilon=self.config.layer_norm_eps, dtype=self.dtype) + + def __call__( + self, + hidden_states, + attention_mask, + deterministic: bool = True, + output_attentions: bool = False, + ): + residual = hidden_states + + hidden_states = self.layer_norm1(hidden_states) + attn_outputs = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + deterministic=deterministic, + output_attentions=output_attentions, + ) + hidden_states = attn_outputs[0] + hidden_states = residual + hidden_states + + residual = hidden_states + hidden_states = self.layer_norm2(hidden_states) + hidden_states = self.mlp(hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + + if output_attentions: + outputs += attn_outputs[1:] + + return outputs + + +class FlaxCLIPLayerCollection(nn.Module): + config: Union[CLIPTextConfig, CLIPVisionConfig] + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.layers = [ + FlaxCLIPEncoderLayer(self.config, name=str(i), dtype=self.dtype) + for i in range(self.config.num_hidden_layers) + ] + + def __call__( + self, + hidden_states, + attention_mask=None, + deterministic: bool = True, + output_attentions: bool = False, + output_hidden_states: bool = False, + return_dict: bool = True, + ): + all_attentions = () if output_attentions else None + all_hidden_states = () if output_hidden_states else None + + for layer in self.layers: + if output_hidden_states: + all_hidden_states += (hidden_states,) + + layer_outputs = layer( + hidden_states, attention_mask, deterministic=deterministic, output_attentions=output_attentions + ) + hidden_states = layer_outputs[0] + + if output_attentions: + all_attentions += (layer_outputs[1],) + + if output_hidden_states: + all_hidden_states += (hidden_states,) + + outputs = (hidden_states,) + + if not return_dict: + return tuple(v for v in outputs if v is not None) + + return FlaxBaseModelOutput( + last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_attentions + ) + + +class FlaxCLIPEncoder(nn.Module): + config: Union[CLIPTextConfig, CLIPVisionConfig] + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.layers = FlaxCLIPLayerCollection(self.config, dtype=self.dtype) + + def __call__( + self, + inputs_embeds, + attention_mask=None, + deterministic: bool = True, + output_attentions: bool = False, + output_hidden_states: bool = False, + return_dict: bool = True, + ): + return self.layers( + hidden_states=inputs_embeds, + attention_mask=attention_mask, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + +class FlaxCLIPTextTransformer(nn.Module): + config: CLIPTextConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.embeddings = FlaxCLIPTextEmbeddings(self.config, dtype=self.dtype) + self.encoder = FlaxCLIPEncoder(self.config, dtype=self.dtype) + self.final_layer_norm = nn.LayerNorm(epsilon=self.config.layer_norm_eps, dtype=self.dtype) + + def __call__( + self, + input_ids, + attention_mask, + position_ids, + deterministic: bool = True, + output_attentions: bool = False, + output_hidden_states: bool = False, + return_dict: bool = True, + ): + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + hidden_states = self.embeddings(input_ids=input_ids, position_ids=position_ids) + + encoder_outputs = self.encoder( + inputs_embeds=hidden_states, + attention_mask=attention_mask, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + last_hidden_state = encoder_outputs[0] + last_hidden_state = self.final_layer_norm(last_hidden_state) + + # text_embeds.shape = [batch_size, sequence_length, transformer.width] + # take features from the EOS embedding (eos_token_id is the highest number in each sequence) + pooled_output = last_hidden_state[jnp.arange(last_hidden_state.shape[0]), input_ids.argmax(axis=-1)] + + if not return_dict: + return (last_hidden_state, pooled_output) + encoder_outputs[1:] + + return FlaxBaseModelOutputWithPooling( + last_hidden_state=last_hidden_state, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + +class FlaxCLIPVisionTransformer(nn.Module): + config: CLIPVisionConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.embeddings = FlaxCLIPVisionEmbeddings(self.config, dtype=self.dtype) + self.pre_layrnorm = nn.LayerNorm(epsilon=self.config.layer_norm_eps, dtype=self.dtype) + self.encoder = FlaxCLIPEncoder(self.config, dtype=self.dtype) + self.post_layernorm = nn.LayerNorm(epsilon=self.config.layer_norm_eps, dtype=self.dtype) + + def __call__( + self, + pixel_values=None, + deterministic: bool = True, + output_attentions=None, + output_hidden_states=None, + return_dict: bool = True, + ): + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + hidden_states = self.embeddings(pixel_values) + hidden_states = self.pre_layrnorm(hidden_states) + + encoder_outputs = self.encoder( + inputs_embeds=hidden_states, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + last_hidden_state = encoder_outputs[0] + pooled_output = last_hidden_state[:, 0, :] + pooled_output = self.post_layernorm(pooled_output) + + if not return_dict: + return (last_hidden_state, pooled_output) + encoder_outputs[1:] + + return FlaxBaseModelOutputWithPooling( + last_hidden_state=last_hidden_state, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + +class FlaxCLIPTextPreTrainedModel(FlaxPreTrainedModel): + config_class = CLIPTextConfig + module_class: nn.Module = None + + def __init__( + self, + config: CLIPTextConfig, + input_shape=(1, 1), + seed: int = 0, + dtype: jnp.dtype = jnp.float32, + _do_init: bool = True, + **kwargs + ): + module = self.module_class(config=config, dtype=dtype, **kwargs) + super().__init__(config, module, input_shape=input_shape, seed=seed, dtype=dtype, _do_init=_do_init) + + def init_weights(self, rng: jax.random.PRNGKey, input_shape: Tuple, params: FrozenDict = None) -> FrozenDict: + # init input tensor + input_ids = jnp.zeros(input_shape, dtype="i4") + position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(input_ids).shape[-1]), input_shape) + attention_mask = jnp.ones_like(input_ids) + + params_rng, dropout_rng = jax.random.split(rng) + rngs = {"params": params_rng, "dropout": dropout_rng} + + random_params = self.module.init(rngs, input_ids, attention_mask, position_ids)["params"] + + if params is not None: + random_params = flatten_dict(unfreeze(random_params)) + params = flatten_dict(unfreeze(params)) + for missing_key in self._missing_keys: + params[missing_key] = random_params[missing_key] + self._missing_keys = set() + return freeze(unflatten_dict(params)) + else: + return random_params + + def __call__( + self, + input_ids, + attention_mask=None, + position_ids=None, + params: dict = None, + dropout_rng: jax.random.PRNGKey = None, + train: bool = False, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ): + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.return_dict + + if position_ids is None: + position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(input_ids).shape[-1]), input_ids.shape) + + if attention_mask is None: + attention_mask = jnp.ones_like(input_ids) + + # Handle any PRNG if needed + rngs = {} + if dropout_rng is not None: + rngs["dropout"] = dropout_rng + + return self.module.apply( + {"params": params or self.params}, + jnp.array(input_ids, dtype="i4"), + jnp.array(attention_mask, dtype="i4"), + jnp.array(position_ids, dtype="i4"), + not train, + output_attentions, + output_hidden_states, + return_dict, + rngs=rngs, + ) + + +class FlaxCLIPVisionPreTrainedModel(FlaxPreTrainedModel): + config_class = CLIPVisionConfig + main_input_name = "pixel_values" + module_class: nn.Module = None + + def __init__( + self, + config: CLIPVisionConfig, + input_shape: Optional[Tuple] = None, + seed: int = 0, + dtype: jnp.dtype = jnp.float32, + _do_init: bool = True, + **kwargs + ): + if input_shape is None: + input_shape = (1, config.image_size, config.image_size, 3) + module = self.module_class(config=config, dtype=dtype, **kwargs) + super().__init__(config, module, input_shape=input_shape, seed=seed, dtype=dtype, _do_init=_do_init) + + def init_weights(self, rng: jax.random.PRNGKey, input_shape: Tuple, params: FrozenDict = None) -> FrozenDict: + # init input tensor + pixel_values = jax.random.normal(rng, input_shape) + + params_rng, dropout_rng = jax.random.split(rng) + rngs = {"params": params_rng, "dropout": dropout_rng} + + random_params = self.module.init(rngs, pixel_values)["params"] + + if params is not None: + random_params = flatten_dict(unfreeze(random_params)) + params = flatten_dict(unfreeze(params)) + for missing_key in self._missing_keys: + params[missing_key] = random_params[missing_key] + self._missing_keys = set() + return freeze(unflatten_dict(params)) + else: + return random_params + + def __call__( + self, + pixel_values, + params: dict = None, + dropout_rng: jax.random.PRNGKey = None, + train: bool = False, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ): + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.return_dict + + pixel_values = jnp.transpose(pixel_values, (0, 2, 3, 1)) + + # Handle any PRNG if needed + rngs = {} + if dropout_rng is not None: + rngs["dropout"] = dropout_rng + + return self.module.apply( + {"params": params or self.params}, + jnp.array(pixel_values, dtype=jnp.float32), + not train, + output_attentions, + output_hidden_states, + return_dict, + rngs=rngs, + ) + + +class FlaxCLIPPreTrainedModel(FlaxPreTrainedModel): + config_class = CLIPConfig + module_class: nn.Module = None + + def __init__( + self, + config: CLIPConfig, + input_shape: Optional[Tuple] = None, + seed: int = 0, + dtype: jnp.dtype = jnp.float32, + _do_init: bool = True, + **kwargs + ): + if input_shape is None: + input_shape = ((1, 1), (1, config.vision_config.image_size, config.vision_config.image_size, 3)) + module = self.module_class(config=config, dtype=dtype, **kwargs) + super().__init__(config, module, input_shape=input_shape, seed=seed, dtype=dtype, _do_init=_do_init) + + def init_weights(self, rng: jax.random.PRNGKey, input_shape: Tuple, params: FrozenDict = None) -> FrozenDict: + # init input tensor + input_ids = jnp.zeros(input_shape[0], dtype="i4") + position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(input_ids).shape[-1]), input_shape[0]) + attention_mask = jnp.ones_like(input_ids) + + pixel_values = jax.random.normal(rng, input_shape[1]) + + params_rng, dropout_rng = jax.random.split(rng) + rngs = {"params": params_rng, "dropout": dropout_rng} + + random_params = self.module.init(rngs, input_ids, pixel_values, attention_mask, position_ids)["params"] + + if params is not None: + random_params = flatten_dict(unfreeze(random_params)) + params = flatten_dict(unfreeze(params)) + for missing_key in self._missing_keys: + params[missing_key] = random_params[missing_key] + self._missing_keys = set() + return freeze(unflatten_dict(params)) + else: + return random_params + + def __call__( + self, + input_ids, + pixel_values, + attention_mask=None, + position_ids=None, + params: dict = None, + dropout_rng: jax.random.PRNGKey = None, + train: bool = False, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ): + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.return_dict + + if position_ids is None: + position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(input_ids).shape[-1]), input_ids.shape) + + if attention_mask is None: + attention_mask = jnp.ones_like(input_ids) + + pixel_values = jnp.transpose(pixel_values, (0, 2, 3, 1)) + + # Handle any PRNG if needed + rngs = {} + if dropout_rng is not None: + rngs["dropout"] = dropout_rng + + return self.module.apply( + {"params": params or self.params}, + jnp.array(input_ids, dtype="i4"), + jnp.array(pixel_values, dtype=jnp.float32), + jnp.array(attention_mask, dtype="i4"), + jnp.array(position_ids, dtype="i4"), + not train, + output_attentions, + output_hidden_states, + return_dict, + rngs=rngs, + ) + + def get_text_features( + self, + input_ids, + attention_mask=None, + position_ids=None, + params: dict = None, + dropout_rng: jax.random.PRNGKey = None, + train=False, + ): + r""" + Args: + input_ids (`numpy.ndarray` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`CLIPTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + + Returns: + text_features (`jnp.ndarray` of shape `(batch_size, output_dim`): The text embeddings obtained by applying + the projection layer to the pooled output of [`FlaxCLIPTextModel`]. + + Examples: + + ```python + >>> from transformers import CLIPTokenizer, FlaxCLIPModel + + >>> model = FlaxCLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="np") + >>> text_features = model.get_text_features(**inputs) + ```""" + if position_ids is None: + position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(input_ids).shape[-1]), input_ids.shape) + + if attention_mask is None: + attention_mask = jnp.ones_like(input_ids) + + # Handle any PRNG if needed + rngs = {} + if dropout_rng is not None: + rngs["dropout"] = dropout_rng + + def _get_features(module, input_ids, attention_mask, position_ids, deterministic): + text_outputs = module.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + deterministic=deterministic, + ) + pooled_output = text_outputs[1] + text_features = module.text_projection(pooled_output) + return text_features + + return self.module.apply( + {"params": params or self.params}, + jnp.array(input_ids, dtype="i4"), + jnp.array(attention_mask, dtype="i4"), + jnp.array(position_ids, dtype="i4"), + not train, + method=_get_features, + rngs=rngs, + ) + + def get_image_features( + self, pixel_values, params: dict = None, dropout_rng: jax.random.PRNGKey = None, train=False + ): + r""" + Args: + pixel_values (`numpy.ndarray` of shape `(batch_size, num_channels, height, width)`): + Pixel values. Padding will be ignored by default should you provide it. Pixel values can be obtained + using [`CLIPFeatureExtractor`]. See [`CLIPFeatureExtractor.__call__`] for details. + + Returns: + image_features (`jnp.ndarray` of shape `(batch_size, output_dim`): The image embeddings obtained by + applying the projection layer to the pooled output of [`FlaxCLIPVisionModel`] + + Examples: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, FlaxCLIPModel + + >>> model = FlaxCLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor(images=image, return_tensors="np") + + >>> image_features = model.get_image_features(**inputs) + ```""" + pixel_values = jnp.transpose(pixel_values, (0, 2, 3, 1)) + + # Handle any PRNG if needed + rngs = {} + if dropout_rng is not None: + rngs["dropout"] = dropout_rng + + def _get_features(module, pixel_values, deterministic): + vision_outputs = module.vision_model(pixel_values=pixel_values, deterministic=deterministic) + pooled_output = vision_outputs[1] # pooled_output + image_features = module.visual_projection(pooled_output) + return image_features + + return self.module.apply( + {"params": params or self.params}, + jnp.array(pixel_values, dtype=jnp.float32), + not train, + method=_get_features, + rngs=rngs, + ) + + +class FlaxCLIPTextModule(nn.Module): + config: CLIPTextConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.text_model = FlaxCLIPTextTransformer(self.config, dtype=self.dtype) + + def __call__( + self, + input_ids, + attention_mask, + position_ids, + deterministic: bool = True, + output_attentions: bool = False, + output_hidden_states: bool = False, + return_dict: bool = True, + ): + return self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + +class FlaxCLIPTextModel(FlaxCLIPTextPreTrainedModel): + module_class = FlaxCLIPTextModule + + +FLAX_CLIP_TEXT_MODEL_DOCSTRING = """ + Returns: + + Example: + + ```python + >>> from transformers import CLIPTokenizer, FlaxCLIPTextModel + + >>> model = FlaxCLIPTextModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="np") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooler_output = outputs.pooler_output # pooled (EOS token) states + ``` +""" + +overwrite_call_docstring(FlaxCLIPTextModel, CLIP_TEXT_INPUTS_DOCSTRING + FLAX_CLIP_TEXT_MODEL_DOCSTRING) +append_replace_return_docstrings( + FlaxCLIPTextModel, output_type=FlaxBaseModelOutputWithPooling, config_class=CLIPTextConfig +) + + +class FlaxCLIPVisionModule(nn.Module): + config: CLIPVisionConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + self.vision_model = FlaxCLIPVisionTransformer(self.config, dtype=self.dtype) + + def __call__( + self, + pixel_values, + deterministic: bool = True, + output_attentions: bool = False, + output_hidden_states: bool = False, + return_dict: bool = True, + ): + return self.vision_model( + pixel_values=pixel_values, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + +class FlaxCLIPVisionModel(FlaxCLIPVisionPreTrainedModel): + module_class = FlaxCLIPVisionModule + + +FLAX_CLIP_VISION_MODEL_DOCSTRING = """ + Returns: + + Example: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, FlaxCLIPVisionModel + + >>> model = FlaxCLIPVisionModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor(images=image, return_tensors="np") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooler_output = outputs.pooler_output # pooled CLS states + ``` +""" + +overwrite_call_docstring(FlaxCLIPVisionModel, CLIP_VISION_INPUTS_DOCSTRING + FLAX_CLIP_VISION_MODEL_DOCSTRING) +append_replace_return_docstrings( + FlaxCLIPVisionModel, output_type=FlaxBaseModelOutputWithPooling, config_class=CLIPVisionConfig +) + + +class FlaxCLIPModule(nn.Module): + config: CLIPConfig + dtype: jnp.dtype = jnp.float32 + + def setup(self): + text_config = self.config.text_config + vision_config = self.config.vision_config + + self.projection_dim = self.config.projection_dim + self.text_embed_dim = text_config.hidden_size + self.vision_embed_dim = vision_config.hidden_size + + self.text_model = FlaxCLIPTextTransformer(text_config, dtype=self.dtype) + self.vision_model = FlaxCLIPVisionTransformer(vision_config, dtype=self.dtype) + + self.visual_projection = nn.Dense( + self.projection_dim, + dtype=self.dtype, + kernel_init=jax.nn.initializers.normal(0.02), + use_bias=False, + ) + self.text_projection = nn.Dense( + self.projection_dim, + dtype=self.dtype, + kernel_init=jax.nn.initializers.normal(0.02), + use_bias=False, + ) + + self.logit_scale = self.param( + "logit_scale", lambda _, shape: jnp.ones(shape) * self.config.logit_scale_init_value, [] + ) + + def __call__( + self, + input_ids=None, + pixel_values=None, + attention_mask=None, + position_ids=None, + deterministic: bool = True, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + ): + return_dict = return_dict if return_dict is not None else self.config.return_dict + + vision_outputs = self.vision_model( + pixel_values=pixel_values, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + text_outputs = self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + deterministic=deterministic, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + image_embeds = vision_outputs[1] + image_embeds = self.visual_projection(image_embeds) + + text_embeds = text_outputs[1] + text_embeds = self.text_projection(text_embeds) + + # normalized features + image_embeds = image_embeds / jnp.linalg.norm(image_embeds, axis=-1, keepdims=True) + text_embeds = text_embeds / jnp.linalg.norm(text_embeds, axis=-1, keepdims=True) + + # cosine similarity as logits + logit_scale = jnp.exp(self.logit_scale) + logits_per_text = jnp.matmul(text_embeds, image_embeds.T) * logit_scale + logits_per_image = logits_per_text.T + + if not return_dict: + return (logits_per_image, logits_per_text, text_embeds, image_embeds, text_outputs, vision_outputs) + + return FlaxCLIPOutput( + logits_per_image=logits_per_image, + logits_per_text=logits_per_text, + text_embeds=text_embeds, + image_embeds=image_embeds, + text_model_output=text_outputs, + vision_model_output=vision_outputs, + ) + + +@add_start_docstrings(CLIP_START_DOCSTRING) +class FlaxCLIPModel(FlaxCLIPPreTrainedModel): + module_class = FlaxCLIPModule + + +FLAX_CLIP_MODEL_DOCSTRING = """ + Returns: + + Example: + + ```python + >>> import jax + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, FlaxCLIPModel + + >>> model = FlaxCLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor( + ... text=["a photo of a cat", "a photo of a dog"], images=image, return_tensors="np", padding=True + ... ) + + >>> outputs = model(**inputs) + >>> logits_per_image = outputs.logits_per_image # this is the image-text similarity score + >>> probs = jax.nn.softmax(logits_per_image, axis=1) # we can take the softmax to get the label probabilities + ``` +""" + +overwrite_call_docstring(FlaxCLIPModel, CLIP_INPUTS_DOCSTRING + FLAX_CLIP_MODEL_DOCSTRING) +append_replace_return_docstrings(FlaxCLIPModel, output_type=FlaxCLIPOutput, config_class=CLIPConfig) diff --git a/core/models/encoders/clip_modules/modeling_tf_clip.py b/core/models/encoders/clip_modules/modeling_tf_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..6ba83f04b8436b86221dd3eeeeff722177f3c363 --- /dev/null +++ b/core/models/encoders/clip_modules/modeling_tf_clip.py @@ -0,0 +1,1399 @@ +# coding=utf-8 +# Copyright 2021 The OpenAI Team Authors and The HuggingFace Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" TF 2.0 CLIP model.""" + + +import math +from dataclasses import dataclass +from typing import Any, Dict, Optional, Tuple, Union + +import numpy as np +import tensorflow as tf + +from ...activations_tf import get_tf_activation +from ...modeling_tf_outputs import TFBaseModelOutput, TFBaseModelOutputWithPooling + +# Public API +from ...modeling_tf_utils import ( + DUMMY_INPUTS, + TFModelInputType, + TFPreTrainedModel, + get_initializer, + keras_serializable, + unpack_inputs, +) +from ...tf_utils import shape_list, stable_softmax +from ...utils import ( + ModelOutput, + add_start_docstrings, + add_start_docstrings_to_model_forward, + logging, + replace_return_docstrings, +) +from .configuration_clip import CLIPConfig, CLIPTextConfig, CLIPVisionConfig + + +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = "openai/clip-vit-base-patch32" + +TF_CLIP_PRETRAINED_MODEL_ARCHIVE_LIST = [ + "openai/clip-vit-base-patch32", + # See all CLIP models at https://huggingface.co/models?filter=clip +] + + +LARGE_NEGATIVE = -1e8 + + +# Copied from transformers.models.bart.modeling_tf_bart._expand_mask +def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0): + """ + Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. + """ + src_len = shape_list(mask)[1] + tgt_len = tgt_len if tgt_len is not None else src_len + one_cst = tf.constant(1.0) + mask = tf.cast(mask, dtype=one_cst.dtype) + expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1)) + + return (one_cst - expanded_mask) * LARGE_NEGATIVE + + +# contrastive loss function, adapted from +# https://sachinruk.github.io/blog/pytorch/pytorch%20lightning/loss%20function/gpu/2021/03/07/CLIP.html +def contrastive_loss(logits: tf.Tensor) -> tf.Tensor: + return tf.math.reduce_mean( + tf.keras.metrics.sparse_categorical_crossentropy( + y_true=tf.range(shape_list(logits)[0]), y_pred=logits, from_logits=True + ) + ) + + +def clip_loss(similarity: tf.Tensor) -> tf.Tensor: + caption_loss = contrastive_loss(similarity) + image_loss = contrastive_loss(tf.transpose(similarity)) + return (caption_loss + image_loss) / 2.0 + + +@dataclass +class TFCLIPOutput(ModelOutput): + """ + Args: + loss (`tf.Tensor` of shape `(1,)`, *optional*, returned when `return_loss` is `True`): + Contrastive loss for image-text similarity. + logits_per_image:(`tf.Tensor` of shape `(image_batch_size, text_batch_size)`): + The scaled dot product scores between `image_embeds` and `text_embeds`. This represents the image-text + similarity scores. + logits_per_text:(`tf.Tensor` of shape `(text_batch_size, image_batch_size)`): + The scaled dot product scores between `text_embeds` and `image_embeds`. This represents the text-image + similarity scores. + text_embeds(`tf.Tensor` of shape `(batch_size, output_dim`): + The text embeddings obtained by applying the projection layer to the pooled output of [`TFCLIPTextModel`]. + image_embeds(`tf.Tensor` of shape `(batch_size, output_dim`): + The image embeddings obtained by applying the projection layer to the pooled output of + [`TFCLIPVisionModel`]. + text_model_output([`~modeling_tf_utils.TFBaseModelOutputWithPooling`]): + The output of the [`TFCLIPTextModel`]. + vision_model_output([`~modeling_tf_utils.TFBaseModelOutputWithPooling`]): + The output of the [`TFCLIPVisionModel`]. + """ + + loss: Optional[tf.Tensor] = None + logits_per_image: tf.Tensor = None + logits_per_text: tf.Tensor = None + text_embeds: tf.Tensor = None + image_embeds: tf.Tensor = None + text_model_output: TFBaseModelOutputWithPooling = None + vision_model_output: TFBaseModelOutputWithPooling = None + + def to_tuple(self) -> Tuple[Any]: + return tuple( + self[k] if k not in ["text_model_output", "vision_model_output"] else getattr(self, k).to_tuple() + for k in self.keys() + ) + + +class TFCLIPVisionEmbeddings(tf.keras.layers.Layer): + def __init__(self, config: CLIPVisionConfig, **kwargs): + super().__init__(**kwargs) + + self.embed_dim = config.hidden_size + self.image_size = config.image_size + self.patch_size = config.patch_size + + self.num_patches = (self.image_size // self.patch_size) ** 2 + self.num_positions = self.num_patches + 1 + + self.config = config + + self.patch_embedding = tf.keras.layers.Conv2D( + filters=self.embed_dim, + kernel_size=self.patch_size, + strides=self.patch_size, + padding="valid", + data_format="channels_last", + use_bias=False, + kernel_initializer=get_initializer(self.config.initializer_range * self.config.initializer_factor), + name="patch_embedding", + ) + + def build(self, input_shape: tf.TensorShape): + + factor = self.config.initializer_factor + + self.class_embedding = self.add_weight( + shape=(self.embed_dim,), + initializer=get_initializer(self.embed_dim**-0.5 * factor), + trainable=True, + name="class_embedding", + ) + + with tf.name_scope("position_embedding"): + self.position_embedding = self.add_weight( + shape=(self.num_positions, self.embed_dim), + initializer=get_initializer(self.config.initializer_range * factor), + trainable=True, + name="embeddings", + ) + + super().build(input_shape) + + def call(self, pixel_values: tf.Tensor) -> tf.Tensor: + """`pixel_values` is expected to be of NCHW format.""" + + batch_size, num_channels, height, width = shape_list(pixel_values) + + # When running on CPU, `tf.nn.conv2d` doesn't support `NCHW` format. + # So change the input format from `NCHW` to `NHWC`. + # shape = (batch_size, in_height, in_width, in_channels=num_channels) + pixel_values = tf.transpose(pixel_values, perm=(0, 2, 3, 1)) + + patch_embeds = self.patch_embedding(pixel_values) + + # Change the 2D spatial dimensions to a single temporal dimension. + # shape = (batch_size, num_patches, out_channels=embed_dim) + patch_embeds = tf.reshape(tensor=patch_embeds, shape=(batch_size, self.num_patches, -1)) + + # add the [CLS] token to the embedded patch tokens + class_embeds = tf.broadcast_to(self.class_embedding, shape=(batch_size, 1, self.embed_dim)) + embeddings = tf.concat((class_embeds, patch_embeds), axis=1) + + embeddings = embeddings + self.position_embedding + + return embeddings + + +class TFCLIPTextEmbeddings(tf.keras.layers.Layer): + def __init__(self, config: CLIPTextConfig, **kwargs): + super().__init__(**kwargs) + + self.embed_dim = config.hidden_size + self.vocab_size = config.vocab_size + + self.config = config + + def build(self, input_shape: tf.TensorShape): + + with tf.name_scope("token_embedding"): + self.weight = self.add_weight( + shape=(self.vocab_size, self.embed_dim), + initializer=get_initializer(self.config.initializer_factor * self.config.initializer_range), + trainable=True, + name="weight", + ) + + with tf.name_scope("position_embedding"): + self.position_embedding = self.add_weight( + shape=(self.config.max_position_embeddings, self.embed_dim), + initializer=get_initializer(self.config.initializer_factor * self.config.initializer_range), + trainable=True, + name="embeddings", + ) + + super().build(input_shape) + + def call( + self, + input_ids: tf.Tensor = None, + position_ids: tf.Tensor = None, + inputs_embeds: tf.Tensor = None, + ) -> tf.Tensor: + """ + Applies embedding based on inputs tensor. + + Returns: + final_embeddings (`tf.Tensor`): output embedding tensor. + """ + if input_ids is None and inputs_embeds is None: + raise ValueError("You have to specify either input_ids or inputs_embeds") + + if inputs_embeds is None: + inputs_embeds = tf.gather(params=self.weight, indices=input_ids) + + input_shape = shape_list(inputs_embeds)[:-1] + + if position_ids is None: + position_ids = tf.expand_dims(tf.range(start=0, limit=input_shape[-1]), axis=0) + + position_embeds = tf.gather(params=self.position_embedding, indices=position_ids) + position_embeds = tf.tile(input=position_embeds, multiples=(input_shape[0], 1, 1)) + final_embeddings = inputs_embeds + position_embeds + + return final_embeddings + + +class TFCLIPAttention(tf.keras.layers.Layer): + """Multi-headed attention from 'Attention Is All You Need' paper""" + + def __init__(self, config: CLIPConfig, **kwargs): + super().__init__(**kwargs) + + self.embed_dim = config.hidden_size + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = self.embed_dim // self.num_attention_heads + if self.attention_head_size * self.num_attention_heads != self.embed_dim: + raise ValueError( + f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`:" + f" {self.num_attention_heads})." + ) + + factor = config.initializer_factor + in_proj_std = (self.embed_dim**-0.5) * ((2 * config.num_hidden_layers) ** -0.5) * factor + out_proj_std = (self.embed_dim**-0.5) * factor + + self.sqrt_att_head_size = math.sqrt(self.attention_head_size) + + self.q_proj = tf.keras.layers.Dense( + units=self.embed_dim, kernel_initializer=get_initializer(in_proj_std), name="q_proj" + ) + self.k_proj = tf.keras.layers.Dense( + units=self.embed_dim, kernel_initializer=get_initializer(in_proj_std), name="k_proj" + ) + self.v_proj = tf.keras.layers.Dense( + units=self.embed_dim, kernel_initializer=get_initializer(in_proj_std), name="v_proj" + ) + + self.dropout = tf.keras.layers.Dropout(rate=config.attention_dropout) + + self.out_proj = tf.keras.layers.Dense( + units=self.embed_dim, kernel_initializer=get_initializer(out_proj_std), name="out_proj" + ) + + # copied from transformers.models.bert.modeling_tf_bert.TFBertSelfAttention.transpose_for_scores + def transpose_for_scores(self, tensor: tf.Tensor, batch_size: int) -> tf.Tensor: + # Reshape from [batch_size, seq_length, all_head_size] to [batch_size, seq_length, num_attention_heads, attention_head_size] + tensor = tf.reshape(tensor=tensor, shape=(batch_size, -1, self.num_attention_heads, self.attention_head_size)) + + # Transpose the tensor from [batch_size, seq_length, num_attention_heads, attention_head_size] to [batch_size, num_attention_heads, seq_length, attention_head_size] + return tf.transpose(tensor, perm=[0, 2, 1, 3]) + + def call( + self, + hidden_states: tf.Tensor, + attention_mask: tf.Tensor, + causal_attention_mask: tf.Tensor, + output_attentions: bool, + training: bool = False, + ) -> Tuple[tf.Tensor]: + """Input shape: Batch x Time x Channel""" + + batch_size = shape_list(hidden_states)[0] + mixed_query_layer = self.q_proj(inputs=hidden_states) + mixed_key_layer = self.k_proj(inputs=hidden_states) + mixed_value_layer = self.v_proj(inputs=hidden_states) + query_layer = self.transpose_for_scores(mixed_query_layer, batch_size) + key_layer = self.transpose_for_scores(mixed_key_layer, batch_size) + value_layer = self.transpose_for_scores(mixed_value_layer, batch_size) + + # Take the dot product between "query" and "key" to get the raw attention scores. + # (batch size, num_heads, seq_len_q, seq_len_k) + attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True) + dk = tf.cast(self.sqrt_att_head_size, dtype=attention_scores.dtype) + attention_scores = tf.divide(attention_scores, dk) + + # apply the causal_attention_mask first + if causal_attention_mask is not None: + # Apply the causal attention mask (precomputed for all layers in TFCLIPModel call() function) + attention_scores = tf.add(attention_scores, causal_attention_mask) + + if attention_mask is not None: + # Apply the attention mask (precomputed for all layers in TFCLIPModel call() function) + attention_scores = tf.add(attention_scores, attention_mask) + + # Normalize the attention scores to probabilities. + _attention_probs = stable_softmax(logits=attention_scores, axis=-1) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = self.dropout(inputs=_attention_probs, training=training) + + attention_output = tf.matmul(attention_probs, value_layer) + attention_output = tf.transpose(attention_output, perm=[0, 2, 1, 3]) + + # (batch_size, seq_len_q, embed_dim) + attention_output = tf.reshape(tensor=attention_output, shape=(batch_size, -1, self.embed_dim)) + + attention_output = self.out_proj(attention_output, training=training) + # In TFBert, attention weights are returned after dropout. + # However, in CLIP, they are returned before dropout. + outputs = (attention_output, _attention_probs) if output_attentions else (attention_output,) + + return outputs + + +class TFCLIPMLP(tf.keras.layers.Layer): + def __init__(self, config: CLIPConfig, **kwargs): + super().__init__(**kwargs) + + self.activation_fn = get_tf_activation(config.hidden_act) + + factor = config.initializer_factor + in_proj_std = (config.hidden_size**-0.5) * ((2 * config.num_hidden_layers) ** -0.5) * factor + fc_std = (2 * config.hidden_size) ** -0.5 * factor + + self.fc1 = tf.keras.layers.Dense( + units=config.intermediate_size, kernel_initializer=get_initializer(fc_std), name="fc1" + ) + self.fc2 = tf.keras.layers.Dense( + units=config.hidden_size, kernel_initializer=get_initializer(in_proj_std), name="fc2" + ) + + def call(self, hidden_states: tf.Tensor) -> tf.Tensor: + + hidden_states = self.fc1(inputs=hidden_states) + hidden_states = self.activation_fn(hidden_states) + hidden_states = self.fc2(inputs=hidden_states) + return hidden_states + + +class TFCLIPEncoderLayer(tf.keras.layers.Layer): + def __init__(self, config: CLIPConfig, **kwargs): + super().__init__(**kwargs) + + self.embed_dim = config.hidden_size + self.self_attn = TFCLIPAttention(config, name="self_attn") + self.layer_norm1 = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="layer_norm1") + self.mlp = TFCLIPMLP(config, name="mlp") + self.layer_norm2 = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="layer_norm2") + + def call( + self, + hidden_states: tf.Tensor, + attention_mask: tf.Tensor, + causal_attention_mask: tf.Tensor, + output_attentions: bool, + training: bool = False, + ) -> Tuple[tf.Tensor]: + """ + Args: + hidden_states (`tf.Tensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`tf.Tensor`): attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + causal_attention_mask (`tf.Tensor`): causal attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + output_attentions (`bool`): + Whether or not to return the attentions tensors of all attention layers. See `outputs` under returned + tensors for more detail. + """ + residual = hidden_states + + hidden_states = self.layer_norm1(inputs=hidden_states) + attention_outputs = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + causal_attention_mask=causal_attention_mask, + output_attentions=output_attentions, + training=training, + ) + hidden_states = attention_outputs[0] + hidden_states = residual + hidden_states + + residual = hidden_states + hidden_states = self.layer_norm2(inputs=hidden_states) + hidden_states = self.mlp(hidden_states=hidden_states) + hidden_states = residual + hidden_states + + outputs = (hidden_states,) + attention_outputs[1:] # add attentions if we output them + + return outputs + + +class TFCLIPEncoder(tf.keras.layers.Layer): + """ + Transformer encoder consisting of `config.num_hidden_layers` self attention layers. Each layer is a + [`TFCLIPEncoderLayer`]. + + Args: + config: CLIPConfig + """ + + def __init__(self, config: CLIPConfig, **kwargs): + super().__init__(**kwargs) + + self.layers = [TFCLIPEncoderLayer(config, name=f"layers_._{i}") for i in range(config.num_hidden_layers)] + + def call( + self, + hidden_states: tf.Tensor, + attention_mask: tf.Tensor, + causal_attention_mask: tf.Tensor, + output_attentions: bool, + output_hidden_states: bool, + return_dict: bool, + training: bool = False, + ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: + all_hidden_states = () if output_hidden_states else None + all_attentions = () if output_attentions else None + + for i, layer_module in enumerate(self.layers): + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_outputs = layer_module( + hidden_states=hidden_states, + attention_mask=attention_mask, + causal_attention_mask=causal_attention_mask, + output_attentions=output_attentions, + training=training, + ) + hidden_states = layer_outputs[0] + + if output_attentions: + all_attentions = all_attentions + (layer_outputs[1],) + + # Add last layer + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + if not return_dict: + return tuple(v for v in [hidden_states, all_hidden_states, all_attentions] if v is not None) + + return TFBaseModelOutput( + last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_attentions + ) + + +class TFCLIPTextTransformer(tf.keras.layers.Layer): + def __init__(self, config: CLIPTextConfig, **kwargs): + super().__init__(**kwargs) + + self.embeddings = TFCLIPTextEmbeddings(config, name="embeddings") + self.encoder = TFCLIPEncoder(config, name="encoder") + self.final_layer_norm = tf.keras.layers.LayerNormalization( + epsilon=config.layer_norm_eps, name="final_layer_norm" + ) + + def call( + self, + input_ids: TFModelInputType, + attention_mask: tf.Tensor, + position_ids: tf.Tensor, + output_attentions: bool, + output_hidden_states: bool, + return_dict: bool, + training: bool = False, + ) -> Union[TFBaseModelOutputWithPooling, Tuple[tf.Tensor]]: + input_shape = shape_list(input_ids) + + embedding_output = self.embeddings(input_ids=input_ids, position_ids=position_ids) + + batch_size, seq_length = input_shape + # CLIP's text model uses causal mask, prepare it here. + # https://github.com/openai/CLIP/blob/cfcffb90e69f37bf2ff1e988237a0fbe41f33c04/clip/model.py#L324 + causal_attention_mask = self._build_causal_attention_mask(batch_size, seq_length, dtype=embedding_output.dtype) + + # check attention mask and invert + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + attention_mask = _expand_mask(attention_mask) + + encoder_outputs = self.encoder( + hidden_states=embedding_output, + attention_mask=attention_mask, + causal_attention_mask=causal_attention_mask, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + sequence_output = encoder_outputs[0] + sequence_output = self.final_layer_norm(inputs=sequence_output) + + # text_embeds.shape = [batch_size, n_ctx, transformer.width] + # take features from the eot embedding (eot_token is the highest number in each sequence) + pooled_output = tf.gather_nd( + params=sequence_output, + indices=tf.stack( + values=(tf.range(input_shape[0], dtype=tf.int64), tf.math.argmax(input_ids, axis=-1)), axis=1 + ), + ) + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return TFBaseModelOutputWithPooling( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + def _build_causal_attention_mask(self, batch_size, seq_length, dtype=tf.float32): + # It is possible with an unspecified sequence length for seq_length to be + # a runtime value, which is unsupported by tf.constant. Per the TensorFlow + # docs, tf.fill can handle runtime dynamic shapes: + # https://www.tensorflow.org/api_docs/python/tf/fill + diag = tf.cast(tf.fill((seq_length,), 0.0), dtype) + + # set an additive 2D attention mask with all places being masked + to_mask = tf.cast(tf.fill((seq_length, seq_length), -10000.0), dtype) + + # set diagonal & lower triangular parts to 0 (i.e. the places not to be masked) + # TIP: think the 2D matrix as the space of (query_seq, key_seq) + to_mask = tf.linalg.band_part(to_mask, 0, -1) + # to_mask = tf.linalg.band_part(to_mask, -1, 0) + to_mask = tf.linalg.set_diag(to_mask, diagonal=diag) + + return tf.broadcast_to(input=to_mask, shape=(batch_size, 1, seq_length, seq_length)) + + +@keras_serializable +class TFCLIPTextMainLayer(tf.keras.layers.Layer): + config_class = CLIPTextConfig + + def __init__(self, config: CLIPTextConfig, **kwargs): + super().__init__(**kwargs) + self.config = config + self.text_model = TFCLIPTextTransformer(config, name="text_model") + + def get_input_embeddings(self) -> tf.keras.layers.Layer: + return self.text_model.embeddings + + def set_input_embeddings(self, value: tf.Variable): + self.text_model.embeddings.weight = value + self.text_model.embeddings.vocab_size = shape_list(value)[0] + + @unpack_inputs + def call( + self, + input_ids: Optional[TFModelInputType] = None, + attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, + position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> Union[TFBaseModelOutputWithPooling, Tuple[tf.Tensor]]: + if input_ids is None: + raise ValueError("You have to specify input_ids") + + input_shape = shape_list(input_ids) + + if attention_mask is None: + attention_mask = tf.fill(dims=input_shape, value=1) + + text_model_outputs = self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + return text_model_outputs + + +class TFCLIPVisionTransformer(tf.keras.layers.Layer): + def __init__(self, config: CLIPVisionConfig, **kwargs): + super().__init__(**kwargs) + + self.embeddings = TFCLIPVisionEmbeddings(config, name="embeddings") + self.pre_layernorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="pre_layrnorm") + self.encoder = TFCLIPEncoder(config, name="encoder") + self.post_layernorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="post_layernorm") + + def call( + self, + pixel_values: TFModelInputType, + output_attentions: bool, + output_hidden_states: bool, + return_dict: bool, + training: bool = False, + ) -> Union[TFBaseModelOutputWithPooling, Tuple[tf.Tensor]]: + + embedding_output = self.embeddings(pixel_values=pixel_values) + embedding_output = self.pre_layernorm(inputs=embedding_output) + + encoder_outputs = self.encoder( + hidden_states=embedding_output, + attention_mask=None, + causal_attention_mask=None, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + sequence_output = encoder_outputs[0] + pooled_output = sequence_output[:, 0, :] + pooled_output = self.post_layernorm(inputs=pooled_output) + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return TFBaseModelOutputWithPooling( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + ) + + +@keras_serializable +class TFCLIPVisionMainLayer(tf.keras.layers.Layer): + config_class = CLIPVisionConfig + + def __init__(self, config: CLIPVisionConfig, **kwargs): + super().__init__(**kwargs) + self.config = config + self.vision_model = TFCLIPVisionTransformer(config, name="vision_model") + + def get_input_embeddings(self) -> tf.keras.layers.Layer: + return self.vision_model.embeddings + + @unpack_inputs + def call( + self, + pixel_values: Optional[TFModelInputType] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> Union[TFBaseModelOutputWithPooling, Tuple[tf.Tensor]]: + + if pixel_values is None: + raise ValueError("You have to specify pixel_values") + + vision_model_outputs = self.vision_model( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + return vision_model_outputs + + +@keras_serializable +class TFCLIPMainLayer(tf.keras.layers.Layer): + config_class = CLIPConfig + + def __init__(self, config: CLIPConfig, **kwargs): + super().__init__(**kwargs) + + if not isinstance(config.text_config, CLIPTextConfig): + raise ValueError( + "config.text_config is expected to be of type CLIPTextConfig but is of type" + f" {type(config.text_config)}." + ) + + if not isinstance(config.vision_config, CLIPVisionConfig): + raise ValueError( + "config.vision_config is expected to be of type CLIPVisionConfig but is of type" + f" {type(config.vision_config)}." + ) + + self.config = config + + text_config = config.text_config + vision_config = config.vision_config + + self.projection_dim = config.projection_dim + + self.text_model = TFCLIPTextTransformer(text_config, name="text_model") + self.vision_model = TFCLIPVisionTransformer(vision_config, name="vision_model") + + self.visual_projection = tf.keras.layers.Dense( + units=self.projection_dim, + kernel_initializer=get_initializer(vision_config.hidden_size**-0.5 * self.config.initializer_factor), + use_bias=False, + name="visual_projection", + ) + + self.text_projection = tf.keras.layers.Dense( + units=self.projection_dim, + kernel_initializer=get_initializer(text_config.hidden_size**-0.5 * self.config.initializer_factor), + use_bias=False, + name="text_projection", + ) + + def build(self, input_shape: tf.TensorShape): + + self.logit_scale = self.add_weight( + shape=(1,), + initializer=tf.keras.initializers.Constant(self.config.logit_scale_init_value), + trainable=True, + name="logit_scale", + ) + + super().build(input_shape) + + @unpack_inputs + def get_text_features( + self, + input_ids: Optional[TFModelInputType] = None, + attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, + position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> tf.Tensor: + + if input_ids is None: + raise ValueError("You have to specify either input_ids") + + input_shape = shape_list(input_ids) + + if attention_mask is None: + attention_mask = tf.fill(dims=input_shape, value=1) + + text_outputs = self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + pooled_output = text_outputs[1] + text_features = self.text_projection(inputs=pooled_output) + + return text_features + + @unpack_inputs + def get_image_features( + self, + pixel_values: Optional[TFModelInputType] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> tf.Tensor: + if pixel_values is None: + raise ValueError("You have to specify pixel_values") + + vision_outputs = self.vision_model( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + pooled_output = vision_outputs[1] # pooled_output + image_features = self.visual_projection(inputs=pooled_output) + + return image_features + + @unpack_inputs + def call( + self, + input_ids: Optional[TFModelInputType] = None, + pixel_values: Optional[TFModelInputType] = None, + attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, + position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, + return_loss: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> Union[TFCLIPOutput, Tuple[tf.Tensor]]: + + if input_ids is None: + raise ValueError("You have to specify either input_ids") + if pixel_values is None: + raise ValueError("You have to specify pixel_values") + + input_shape = shape_list(input_ids) + + if attention_mask is None: + attention_mask = tf.fill(dims=input_shape, value=1) + + vision_outputs = self.vision_model( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + text_outputs = self.text_model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + image_embeds = vision_outputs[1] + image_embeds = self.visual_projection(inputs=image_embeds) + + text_embeds = text_outputs[1] + text_embeds = self.text_projection(inputs=text_embeds) + + # normalized features + image_embeds = image_embeds / tf.norm(tensor=image_embeds, ord="euclidean", axis=-1, keepdims=True) + text_embeds = text_embeds / tf.norm(tensor=text_embeds, ord="euclidean", axis=-1, keepdims=True) + + # cosine similarity as logits + logit_scale = tf.math.exp(self.logit_scale) + logits_per_text = tf.matmul(text_embeds, image_embeds, transpose_b=True) * logit_scale + logits_per_image = tf.transpose(logits_per_text) + + loss = None + if return_loss: + loss = clip_loss(logits_per_text) + + if not return_dict: + output = (logits_per_image, logits_per_text, text_embeds, image_embeds, text_outputs, vision_outputs) + return (loss,) + output if loss is not None else output + + return TFCLIPOutput( + loss=loss, + logits_per_image=logits_per_image, + logits_per_text=logits_per_text, + text_embeds=text_embeds, + image_embeds=image_embeds, + text_model_output=text_outputs, + vision_model_output=vision_outputs, + ) + + +class TFCLIPPreTrainedModel(TFPreTrainedModel): + """ + An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained + models. + """ + + config_class = CLIPConfig + base_model_prefix = "clip" + + +CLIP_START_DOCSTRING = r""" + + This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it + as a regular TF 2.0 Keras Model and refer to the TF 2.0 documentation for all matter related to general usage and + behavior. + + + + TF 2.0 models accepts two formats as inputs: + + - having all inputs as keyword arguments (like PyTorch models), or + - having all inputs as a list, tuple or dict in the first positional arguments. + + This second option is useful when using [`tf.keras.Model.fit`] method which currently requires having all the + tensors in the first argument of the model call function: `model(inputs)`. + + If you choose this second option, there are three possibilities you can use to gather all the input Tensors in the + first positional argument : + + - a single Tensor with `input_ids` only and nothing else: `model(input_ids)` + - a list of varying length with one or several input Tensors IN THE ORDER given in the docstring: + `model([input_ids, attention_mask])` or `model([input_ids, attention_mask, token_type_ids])` + - a dictionary with one or several input Tensors associated to the input names given in the docstring: + `model({"input_ids": input_ids, "token_type_ids": token_type_ids})` + + + + Args: + config ([`CLIPConfig`]): Model configuration class with all the parameters of the model. + Initializing with a config file does not load the weights associated with the model, only the + configuration. Check out the [`~TFPreTrainedModel.from_pretrained`] method to load the model weights. +""" + +CLIP_TEXT_INPUTS_DOCSTRING = r""" + Args: + input_ids (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have the shape `({0})`): + Indices of input sequence tokens in the vocabulary. + + Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and + [`PreTrainedTokenizer.encode`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + position_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.max_position_embeddings - 1]`. + + [What are position IDs?](../glossary#position-ids) + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the + config will be used instead. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. This argument can be used only in eager mode, in graph mode the value in the config will be + used instead. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in + eager mode, in graph mode the value will always be set to True. + training (`bool`, *optional*, defaults to `False``): + Whether or not to use the model in training mode (some modules like dropout modules have different + behaviors between training and evaluation). +""" + +CLIP_VISION_INPUTS_DOCSTRING = r""" + Args: + pixel_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have the shape `(batch_size, num_channels, height, width)`): + Pixel values. Pixel values can be obtained using [`CLIPFeatureExtractor`]. See + [`CLIPFeatureExtractor.__call__`] for details. output_attentions (`bool`, *optional*): Whether or not to + return the attentions tensors of all attention layers. See `attentions` under returned tensors for more + detail. This argument can be used only in eager mode, in graph mode the value in the config will be used + instead. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. This argument can be used only in eager mode, in graph mode the value in the config will be + used instead. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in + eager mode, in graph mode the value will always be set to True. + training (`bool`, *optional*, defaults to `False``): + Whether or not to use the model in training mode (some modules like dropout modules have different + behaviors between training and evaluation). +""" + +CLIP_INPUTS_DOCSTRING = r""" + Args: + input_ids (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have the shape `({0})`): + Indices of input sequence tokens in the vocabulary. + + Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and + [`PreTrainedTokenizer.encode`] for details. + + [What are input IDs?](../glossary#input-ids) + pixel_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` `Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have the shape `(batch_size, num_channels, height, width)`): + Pixel values. Pixel values can be obtained using [`CLIPFeatureExtractor`]. See + [`CLIPFeatureExtractor.__call__`] for details. + attention_mask (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + position_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): + Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0, + config.max_position_embeddings - 1]`. + + [What are position IDs?](../glossary#position-ids) + return_loss (`bool`, *optional*): + Whether or not to return the contrastive loss. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. This argument can be used only in eager mode, in graph mode the value in the + config will be used instead. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. This argument can be used only in eager mode, in graph mode the value in the config will be + used instead. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. This argument can be used in + eager mode, in graph mode the value will always be set to True. + training (`bool`, *optional*, defaults to `False``): + Whether or not to use the model in training mode (some modules like dropout modules have different + behaviors between training and evaluation). +""" + + +class TFCLIPTextModel(TFCLIPPreTrainedModel): + config_class = CLIPTextConfig + + def __init__(self, config: CLIPTextConfig, *inputs, **kwargs): + super().__init__(config, *inputs, **kwargs) + + self.clip = TFCLIPTextMainLayer(config, name="clip") + + @unpack_inputs + @add_start_docstrings_to_model_forward(CLIP_TEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) + @replace_return_docstrings(output_type=TFBaseModelOutputWithPooling, config_class=CLIPTextConfig) + def call( + self, + input_ids: Optional[TFModelInputType] = None, + attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, + position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: Optional[bool] = False, + ) -> Union[TFBaseModelOutputWithPooling, Tuple[tf.Tensor]]: + r""" + Returns: + + Examples: + + ```python + >>> from transformers import CLIPTokenizer, TFCLIPTextModel + + >>> model = TFCLIPTextModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="tf") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooled_output = outputs.pooler_output # pooled (EOS token) states + ```""" + + outputs = self.clip( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + return outputs + + @tf.function( + input_signature=[ + { + "input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"), + "attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"), + } + ] + ) + def serving(self, inputs: Dict[str, tf.Tensor]) -> TFBaseModelOutputWithPooling: + output = self.call(inputs) + return self.serving_output(output) + + def serving_output(self, output: TFBaseModelOutputWithPooling) -> TFBaseModelOutputWithPooling: + hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None + attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None + + return TFBaseModelOutputWithPooling( + last_hidden_state=output.last_hidden_state, + pooler_output=output.pooler_output, + hidden_states=hs, + attentions=attns, + ) + + +class TFCLIPVisionModel(TFCLIPPreTrainedModel): + config_class = CLIPVisionConfig + main_input_name = "pixel_values" + + def __init__(self, config: CLIPVisionConfig, *inputs, **kwargs): + super().__init__(config, *inputs, **kwargs) + + self.clip = TFCLIPVisionMainLayer(config, name="clip") + + @property + def dummy_inputs(self) -> Dict[str, tf.Tensor]: + """ + Dummy inputs to build the network. + + Returns: + `Dict[str, tf.Tensor]`: The dummy inputs. + """ + VISION_DUMMY_INPUTS = tf.random.uniform( + shape=(len(DUMMY_INPUTS), 3, self.config.image_size, self.config.image_size), dtype=tf.float32 + ) + return {"pixel_values": VISION_DUMMY_INPUTS} + + @tf.function( + input_signature=[ + { + "pixel_values": tf.TensorSpec((None, None, None, None), tf.float32, name="pixel_values"), + } + ] + ) + def serving(self, inputs: Dict[str, tf.Tensor]) -> TFBaseModelOutputWithPooling: + """ + Method used for serving the model. + + Args: + inputs (`Dict[str, tf.Tensor]`): + The input of the saved model as a dictionary of tensors. + """ + output = self.call(inputs) + + return self.serving_output(output) + + @unpack_inputs + @add_start_docstrings_to_model_forward(CLIP_VISION_INPUTS_DOCSTRING) + @replace_return_docstrings(output_type=TFBaseModelOutputWithPooling, config_class=CLIPVisionConfig) + def call( + self, + pixel_values: Optional[TFModelInputType] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: Optional[bool] = False, + ) -> Union[TFBaseModelOutputWithPooling, Tuple[tf.Tensor]]: + r""" + Returns: + + Examples: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, TFCLIPVisionModel + + >>> model = TFCLIPVisionModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor(images=image, return_tensors="tf") + + >>> outputs = model(**inputs) + >>> last_hidden_state = outputs.last_hidden_state + >>> pooled_output = outputs.pooler_output # pooled CLS states + ```""" + + outputs = self.clip( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + training=training, + ) + + return outputs + + def serving_output(self, output: TFBaseModelOutputWithPooling) -> TFBaseModelOutputWithPooling: + hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None + attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None + + return TFBaseModelOutputWithPooling( + last_hidden_state=output.last_hidden_state, + pooler_output=output.pooler_output, + hidden_states=hs, + attentions=attns, + ) + + +@add_start_docstrings(CLIP_START_DOCSTRING) +class TFCLIPModel(TFCLIPPreTrainedModel): + config_class = CLIPConfig + + def __init__(self, config: CLIPConfig, *inputs, **kwargs): + super().__init__(config, *inputs, **kwargs) + + self.clip = TFCLIPMainLayer(config, name="clip") + + @property + def dummy_inputs(self) -> Dict[str, tf.Tensor]: + """ + Dummy inputs to build the network. + + Returns: + `Dict[str, tf.Tensor]`: The dummy inputs. + """ + VISION_DUMMY_INPUTS = tf.random.uniform( + shape=(len(DUMMY_INPUTS), 3, self.config.vision_config.image_size, self.config.vision_config.image_size), + dtype=tf.float32, + ) + return { + "input_ids": tf.constant(DUMMY_INPUTS, dtype=tf.int32), + "pixel_values": VISION_DUMMY_INPUTS, + } + + @tf.function( + input_signature=[ + { + "input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"), + "pixel_values": tf.TensorSpec((None, None, None, None), tf.float32, name="pixel_values"), + "attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"), + } + ] + ) + def serving(self, inputs: Dict[str, tf.Tensor]) -> TFCLIPOutput: + """ + Method used for serving the model. + + Args: + inputs (`Dict[str, tf.Tensor]`): + The input of the saved model as a dictionary of tensors. + """ + output = self.call(inputs) + + return self.serving_output(output) + + @unpack_inputs + @add_start_docstrings_to_model_forward(CLIP_TEXT_INPUTS_DOCSTRING.format("batch_size, sequence_length")) + def get_text_features( + self, + input_ids: Optional[TFModelInputType] = None, + attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, + position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> tf.Tensor: + r""" + Returns: + text_features (`tf.Tensor` of shape `(batch_size, output_dim`): The text embeddings obtained by applying + the projection layer to the pooled output of [`TFCLIPTextModel`]. + + Examples: + + ```python + >>> from transformers import CLIPTokenizer, TFCLIPModel + + >>> model = TFCLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32") + + >>> inputs = tokenizer(["a photo of a cat", "a photo of a dog"], padding=True, return_tensors="tf") + >>> text_features = model.get_text_features(**inputs) + ```""" + + text_features = self.clip.get_text_features( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + return text_features + + @unpack_inputs + @add_start_docstrings_to_model_forward(CLIP_VISION_INPUTS_DOCSTRING) + def get_image_features( + self, + pixel_values: Optional[TFModelInputType] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> tf.Tensor: + r""" + Returns: + image_features (`tf.Tensor` of shape `(batch_size, output_dim`): The image embeddings obtained by applying + the projection layer to the pooled output of [`TFCLIPVisionModel`]. + + Examples: + + ```python + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, TFCLIPModel + + >>> model = TFCLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor(images=image, return_tensors="tf") + + >>> image_features = model.get_image_features(**inputs) + ```""" + + image_features = self.clip.get_image_features( + pixel_values=pixel_values, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + return image_features + + @unpack_inputs + @add_start_docstrings_to_model_forward(CLIP_INPUTS_DOCSTRING.format("batch_size, sequence_length")) + @replace_return_docstrings(output_type=TFCLIPOutput, config_class=CLIPConfig) + def call( + self, + input_ids: Optional[TFModelInputType] = None, + pixel_values: Optional[TFModelInputType] = None, + attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None, + position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None, + return_loss: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + training: bool = False, + ) -> Union[TFCLIPOutput, Tuple[tf.Tensor]]: + r""" + Returns: + + Examples: + + ```python + >>> import tensorflow as tf + >>> from PIL import Image + >>> import requests + >>> from transformers import CLIPProcessor, TFCLIPModel + + >>> model = TFCLIPModel.from_pretrained("openai/clip-vit-base-patch32") + >>> processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32") + + >>> url = "http://images.cocodataset.org/val2017/000000039769.jpg" + >>> image = Image.open(requests.get(url, stream=True).raw) + + >>> inputs = processor( + ... text=["a photo of a cat", "a photo of a dog"], images=image, return_tensors="tf", padding=True + ... ) + + >>> outputs = model(**inputs) + >>> logits_per_image = outputs.logits_per_image # this is the image-text similarity score + >>> probs = tf.nn.softmax(logits_per_image, axis=1) # we can take the softmax to get the label probabilities + ```""" + + outputs = self.clip( + input_ids=input_ids, + pixel_values=pixel_values, + attention_mask=attention_mask, + position_ids=position_ids, + return_loss=return_loss, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + return outputs + + def serving_output(self, output: TFCLIPOutput) -> TFCLIPOutput: + # TODO: As is this currently fails with saved_model=True, because + # TensorFlow cannot trace through nested dataclasses. Reference: + # https://github.com/huggingface/transformers/pull/16886 + return output diff --git a/core/models/encoders/clip_modules/modules_video.py b/core/models/encoders/clip_modules/modules_video.py new file mode 100644 index 0000000000000000000000000000000000000000..1c4afaecd28258d62f20b59f6af0197ff0278edb --- /dev/null +++ b/core/models/encoders/clip_modules/modules_video.py @@ -0,0 +1,250 @@ +""" +https://github.com/lucidrains/make-a-video-pytorch +Duplicate +""" + +import math +import functools +from operator import mul + +import torch +from torch import nn, einsum +import torch.nn.functional as F + +from einops import rearrange, repeat, pack, unpack +from einops.layers.torch import Rearrange + +# helper functions + +def exists(val): + return val is not None + +def default(val, d): + return val if exists(val) else d + +def mul_reduce(tup): + return functools.reduce(mul, tup) + +def divisible_by(numer, denom): + return (numer % denom) == 0 + +mlist = nn.ModuleList + +# for time conditioning + +class SinusoidalPosEmb(nn.Module): + def __init__(self, dim, theta = 10000): + super().__init__() + self.theta = theta + self.dim = dim + + def forward(self, x): + dtype, device = x.dtype, x.device + assert dtype == torch.float, 'input to sinusoidal pos emb must be a float type' + + half_dim = self.dim // 2 + emb = math.log(self.theta) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, device = device, dtype = dtype) * -emb) + emb = rearrange(x, 'i -> i 1') * rearrange(emb, 'j -> 1 j') + return torch.cat((emb.sin(), emb.cos()), dim = -1).type(dtype) + + +class ChanLayerNorm(nn.Module): + def __init__(self, dim): + super().__init__() + self.g = nn.Parameter(torch.ones(1, dim, 1)) + + def forward(self, x): + eps = 1e-5 if x.dtype == torch.float32 else 1e-3 + var = torch.var(x, dim = 1, unbiased = False, keepdim = True) + mean = torch.mean(x, dim = 1, keepdim = True) + x = (x - mean) * var.clamp(min = eps).rsqrt() + dtype = self.g.dtype + return x.to(dtype) * self.g + + +class LayerNorm(nn.Module): + def __init__(self, dim): + super().__init__() + self.g = nn.Parameter(torch.ones(dim)) + + def forward(self, x): + eps = 1e-5 if x.dtype == torch.float32 else 1e-3 + var = torch.var(x, dim = 1, unbiased = False, keepdim = True) + mean = torch.mean(x, dim = 1, keepdim = True) + return (x - mean) * var.clamp(min = eps).rsqrt() * self.g + + +# feedforward +class GEGLU(nn.Module): + def forward(self, x): + x = x.float() + x, gate = x.chunk(2, dim = 1) + x = x * F.gelu(gate) + return x + +class FeedForward(nn.Module): + def __init__(self, dim, mult = 4): + super().__init__() + + inner_dim = int(dim * mult * 2 / 3) + self.proj_in = nn.Sequential( + nn.Conv1d(dim, inner_dim * 2, 1, bias = False), + GEGLU() + ) + + self.proj_out = nn.Sequential( + ChanLayerNorm(inner_dim), + nn.Conv1d(inner_dim, dim, 1, bias = False) + ) + + nn.init.zeros_(self.proj_out[1].weight) + + def forward(self, x): + dtype = x.dtype + x = self.proj_in(x) + x = self.proj_out(x) + return x + + +# feedforwa +# best relative positional encoding + +class ContinuousPositionBias(nn.Module): + """ from https://arxiv.org/abs/2111.09883 """ + + def __init__( + self, + *, + dim, + heads, + num_dims = 1, + layers = 2, + log_dist = True, + cache_rel_pos = False + ): + super().__init__() + self.num_dims = num_dims + self.log_dist = log_dist + + self.net = nn.ModuleList([]) + self.net.append(nn.Sequential(nn.Linear(self.num_dims, dim), nn.SiLU())) + + for _ in range(layers - 1): + self.net.append(nn.Sequential(nn.Linear(dim, dim), nn.SiLU())) + + self.net.append(nn.Linear(dim, heads)) + + self.cache_rel_pos = cache_rel_pos + self.register_buffer('rel_pos', None, persistent = False) + + @property + def device(self): + return next(self.parameters()).device + + @property + def dtype(self): + return next(self.parameters()).dtype + + def forward(self, *dimensions): + device = self.device + + if not exists(self.rel_pos) or not self.cache_rel_pos: + positions = [torch.arange(d, device = device) for d in dimensions] + grid = torch.stack(torch.meshgrid(*positions, indexing = 'ij')) + grid = rearrange(grid, 'c ... -> (...) c') + rel_pos = rearrange(grid, 'i c -> i 1 c') - rearrange(grid, 'j c -> 1 j c') + + if self.log_dist: + rel_pos = torch.sign(rel_pos) * torch.log(rel_pos.abs() + 1) + + self.register_buffer('rel_pos', rel_pos, persistent = False) + + rel_pos = self.rel_pos.to(self.dtype) + + for layer in self.net: + rel_pos = layer(rel_pos) + + return rearrange(rel_pos, 'i j h -> h i j') + +# helper classes + +class Attention(nn.Module): + def __init__( + self, + dim, + dim_head = 64, + heads = 8 + ): + super().__init__() + self.heads = heads + self.scale = dim_head ** -0.5 + inner_dim = dim_head * heads + self.norm = LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias = False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias = False) + self.to_out = nn.Linear(inner_dim, dim, bias = False) + + nn.init.zeros_(self.to_out.weight.data) # identity with skip connection + + self.pos_embeds = nn.Parameter(torch.randn([1, 30, dim])) + self.frame_rate_embeds = nn.Parameter(torch.randn([1, 30, dim])) + + def forward( + self, + x, + context = None, + rel_pos_bias = None, + framerate = None, + ): + if framerate is not None: + x = x + self.pos_embeds[:, :x.shape[1]].repeat(x.shape[0], 1, 1) + + if context is None: + context = x + + x = self.norm(x) + context = self.norm(context) + + q, k, v = self.to_q(x), *self.to_kv(context).chunk(2, dim = -1) + + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = self.heads), (q, k, v)) + + q = q * self.scale + sim = einsum('b h i d, b h j d -> b h i j', q, k) + + if exists(rel_pos_bias): + sim = sim + rel_pos_bias + + attn = sim.softmax(dim = -1) + + out = einsum('b h i j, b h j d -> b h i d', attn, v) + + out = rearrange(out, 'b h n d -> b n (h d)') + return torch.nan_to_num(self.to_out(out)) + + +class SpatioTemporalAttention(nn.Module): + def __init__( + self, + dim, + *, + dim_head = 64, + heads = 8 + ): + super().__init__() + self.temporal_attn = Attention(dim = dim, dim_head = dim_head, heads = heads) + self.temporal_rel_pos_bias = ContinuousPositionBias(dim = dim // 2, heads = heads, num_dims = 1) + + self.ff = FeedForward(dim = dim, mult = 4) + + def forward( + self, + x, + ): + b = x.shape[0] + time_rel_pos_bias = self.temporal_rel_pos_bias(x.shape[1]) + x = self.temporal_attn(x, rel_pos_bias = time_rel_pos_bias) + x + x = self.ff(x.transpose(1, 2)).transpose(1, 2) + x + return x \ No newline at end of file diff --git a/core/models/encoders/clip_modules/processing_clip.py b/core/models/encoders/clip_modules/processing_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..5956ce53d342d245c6bcd91eddba9133f56d3f59 --- /dev/null +++ b/core/models/encoders/clip_modules/processing_clip.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# Copyright 2021 The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Image/Text processor class for CLIP +""" +from transformers.processing_utils import ProcessorMixin +from transformers.tokenization_utils_base import BatchEncoding + + +class CLIPProcessor(ProcessorMixin): + r""" + Constructs a CLIP processor which wraps a CLIP feature extractor and a CLIP tokenizer into a single processor. + + [`CLIPProcessor`] offers all the functionalities of [`CLIPFeatureExtractor`] and [`CLIPTokenizerFast`]. See the + [`~CLIPProcessor.__call__`] and [`~CLIPProcessor.decode`] for more information. + + Args: + feature_extractor ([`CLIPFeatureExtractor`]): + The feature extractor is a required input. + tokenizer ([`CLIPTokenizerFast`]): + The tokenizer is a required input. + """ + feature_extractor_class = "CLIPFeatureExtractor" + tokenizer_class = ("CLIPTokenizer", "CLIPTokenizerFast") + + def __init__(self, feature_extractor, tokenizer): + super().__init__(feature_extractor, tokenizer) + self.current_processor = self.feature_extractor + + def __call__(self, text=None, images=None, return_tensors=None, **kwargs): + """ + Main method to prepare for the model one or several sequences(s) and image(s). This method forwards the `text` + and `kwargs` arguments to CLIPTokenizerFast's [`~CLIPTokenizerFast.__call__`] if `text` is not `None` to encode + the text. To prepare the image(s), this method forwards the `images` and `kwrags` arguments to + CLIPFeatureExtractor's [`~CLIPFeatureExtractor.__call__`] if `images` is not `None`. Please refer to the + doctsring of the above two methods for more information. + + Args: + text (`str`, `List[str]`, `List[List[str]]`): + The sequence or batch of sequences to be encoded. Each sequence can be a string or a list of strings + (pretokenized string). If the sequences are provided as list of strings (pretokenized), you must set + `is_split_into_words=True` (to lift the ambiguity with a batch of sequences). + images (`PIL.Image.Image`, `np.ndarray`, `torch.Tensor`, `List[PIL.Image.Image]`, `List[np.ndarray]`, `List[torch.Tensor]`): + The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch + tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a + number of channels, H and W are image height and width. + + return_tensors (`str` or [`~utils.TensorType`], *optional*): + If set, will return tensors of a particular framework. Acceptable values are: + + - `'tf'`: Return TensorFlow `tf.constant` objects. + - `'pt'`: Return PyTorch `torch.Tensor` objects. + - `'np'`: Return NumPy `np.ndarray` objects. + - `'jax'`: Return JAX `jnp.ndarray` objects. + + Returns: + [`BatchEncoding`]: A [`BatchEncoding`] with the following fields: + + - **input_ids** -- List of token ids to be fed to a model. Returned when `text` is not `None`. + - **attention_mask** -- List of indices specifying which tokens should be attended to by the model (when + `return_attention_mask=True` or if *"attention_mask"* is in `self.model_input_names` and if `text` is not + `None`). + - **pixel_values** -- Pixel values to be fed to a model. Returned when `images` is not `None`. + """ + + if text is None and images is None: + raise ValueError("You have to specify either text or images. Both cannot be none.") + + if text is not None: + encoding = self.tokenizer(text, return_tensors=return_tensors, **kwargs) + + if images is not None: + image_features = self.feature_extractor(images, return_tensors=return_tensors, **kwargs) + + if text is not None and images is not None: + encoding["pixel_values"] = image_features.pixel_values + return encoding + elif text is not None: + return encoding + else: + return BatchEncoding(data=dict(**image_features), tensor_type=return_tensors) + + def batch_decode(self, *args, **kwargs): + """ + This method forwards all its arguments to CLIPTokenizerFast's [`~PreTrainedTokenizer.batch_decode`]. Please + refer to the docstring of this method for more information. + """ + return self.tokenizer.batch_decode(*args, **kwargs) + + def decode(self, *args, **kwargs): + """ + This method forwards all its arguments to CLIPTokenizerFast's [`~PreTrainedTokenizer.decode`]. Please refer to + the docstring of this method for more information. + """ + return self.tokenizer.decode(*args, **kwargs) diff --git a/core/models/encoders/clip_modules/tokenization_clip.py b/core/models/encoders/clip_modules/tokenization_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..d4513ce8be79e3007cdba8fd66f33df72179934d --- /dev/null +++ b/core/models/encoders/clip_modules/tokenization_clip.py @@ -0,0 +1,363 @@ +# coding=utf-8 +# Copyright 2021 The Open AI Team Authors and The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes for CLIP.""" + +import json +import os +from functools import lru_cache +from typing import List, Optional, Tuple + +import regex as re +from transformers.models.bert.tokenization_bert import BasicTokenizer + +from transformers.tokenization_utils import AddedToken, PreTrainedTokenizer +from transformers.utils import logging + + +logger = logging.get_logger(__name__) + +VOCAB_FILES_NAMES = { + "vocab_file": "vocab.json", + "merges_file": "merges.txt", +} + +PRETRAINED_VOCAB_FILES_MAP = { + "vocab_file": { + "openai/clip-vit-base-patch32": "https://huggingface.co/openai/clip-vit-base-patch32/resolve/main/vocab.json", + }, + "merges_file": { + "openai/clip-vit-base-patch32": "https://huggingface.co/openai/clip-vit-base-patch32/resolve/main/merges.txt", + }, +} + +PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { + "openai/clip-vit-base-patch32": 77, +} + + +PRETRAINED_INIT_CONFIGURATION = { + "openai/clip-vit-base-patch32": {}, +} + + +@lru_cache() +def bytes_to_unicode(): + """ + Returns list of utf-8 byte and a mapping to unicode strings. We specifically avoids mapping to whitespace/control + characters the bpe code barfs on. + + The reversible bpe codes work on unicode strings. This means you need a large # of unicode characters in your vocab + if you want to avoid UNKs. When you're at something like a 10B token dataset you end up needing around 5K for + decent coverage. This is a significant percentage of your normal, say, 32K bpe vocab. To avoid that, we want lookup + tables between utf-8 bytes and unicode strings. + """ + bs = ( + list(range(ord("!"), ord("~") + 1)) + list(range(ord("¡"), ord("¬") + 1)) + list(range(ord("®"), ord("ÿ") + 1)) + ) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8 + n) + n += 1 + cs = [chr(n) for n in cs] + return dict(zip(bs, cs)) + + +def get_pairs(word): + """ + Return set of symbol pairs in a word. + + Word is represented as tuple of symbols (symbols being variable-length strings). + """ + pairs = set() + prev_char = word[0] + for char in word[1:]: + pairs.add((prev_char, char)) + prev_char = char + return pairs + + +def whitespace_clean(text): + text = re.sub(r"\s+", " ", text) + text = text.strip() + return text + + +class CLIPTokenizer(PreTrainedTokenizer): + """ + Construct a CLIP tokenizer. Based on byte-level Byte-Pair-Encoding. + + This tokenizer inherits from [`PreTrainedTokenizer`] which contains most of the main methods. Users should refer to + this superclass for more information regarding those methods. + + Args: + vocab_file (`str`): + Path to the vocabulary file. + merges_file (`str`): + Path to the merges file. + errors (`str`, *optional*, defaults to `"replace"`): + Paradigm to follow when decoding bytes to UTF-8. See + [bytes.decode](https://docs.python.org/3/library/stdtypes.html#bytes.decode) for more information. + unk_token (`str`, *optional*, defaults to `<|endoftext|>`): + The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this + token instead. + bos_token (`str`, *optional*, defaults to `<|endoftext|>`): + The beginning of sequence token. + eos_token (`str`, *optional*, defaults to `<|endoftext|>`): + The end of sequence token. + """ + + vocab_files_names = VOCAB_FILES_NAMES + pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP + max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES + model_input_names = ["input_ids", "attention_mask"] + + def __init__( + self, + vocab_file, + merges_file, + errors="replace", + unk_token="<|endoftext|>", + bos_token="<|startoftext|>", + eos_token="<|endoftext|>", + pad_token="<|endoftext|>", # hack to enable padding + **kwargs + ): + bos_token = AddedToken(bos_token, lstrip=False, rstrip=False) if isinstance(bos_token, str) else bos_token + eos_token = AddedToken(eos_token, lstrip=False, rstrip=False) if isinstance(eos_token, str) else eos_token + unk_token = AddedToken(unk_token, lstrip=False, rstrip=False) if isinstance(unk_token, str) else unk_token + + super().__init__( + errors=errors, + unk_token=unk_token, + bos_token=bos_token, + eos_token=eos_token, + pad_token=pad_token, + **kwargs, + ) + + try: + import ftfy + + self.fix_text = ftfy.fix_text + except ImportError: + logger.warning("ftfy or spacy is not installed using BERT BasicTokenizer instead of ftfy.") + self.nlp = BasicTokenizer(do_lower_case=True) + self.fix_text = None + + with open(vocab_file, encoding="utf-8") as vocab_handle: + self.encoder = json.load(vocab_handle) + self.decoder = {v: k for k, v in self.encoder.items()} + self.errors = errors # how to handle errors in decoding + self.byte_encoder = bytes_to_unicode() + self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} + with open(merges_file, encoding="utf-8") as merges_handle: + bpe_merges = merges_handle.read().strip().split("\n")[1 : 49152 - 256 - 2 + 1] + bpe_merges = [tuple(merge.split()) for merge in bpe_merges] + self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges)))) + self.cache = {"<|startoftext|>": "<|startoftext|>", "<|endoftext|>": "<|endoftext|>"} + + self.pat = re.compile( + r"""<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""", + re.IGNORECASE, + ) + + @property + def vocab_size(self): + return len(self.encoder) + + def get_vocab(self): + return dict(self.encoder, **self.added_tokens_encoder) + + def build_inputs_with_special_tokens( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and + adding special tokens. A CLIP sequence has the following format: + + - single sequence: `<|startoftext|> X <|endoftext|>` + + Pairs of sequences are not the expected use case, but they will be handled without a separator. + + Args: + token_ids_0 (`List[int]`): + List of IDs to which the special tokens will be added. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. + """ + bos_token = [self.bos_token_id] + eos_token = [self.eos_token_id] + + if token_ids_1 is None: + return bos_token + token_ids_0 + eos_token + return bos_token + token_ids_0 + eos_token + eos_token + token_ids_1 + eos_token + + def get_special_tokens_mask( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None, already_has_special_tokens: bool = False + ) -> List[int]: + """ + Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding + special tokens using the tokenizer `prepare_for_model` method. + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + already_has_special_tokens (`bool`, *optional*, defaults to `False`): + Whether or not the token list is already formatted with special tokens for the model. + + Returns: + `List[int]`: A list of integers in the range [0, 1]: 1 for a special token, 0 for a sequence token. + """ + + if already_has_special_tokens: + return super().get_special_tokens_mask( + token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True + ) + + if token_ids_1 is None: + return [1] + ([0] * len(token_ids_0)) + [1] + return [1] + ([0] * len(token_ids_0)) + [1] + [1] + ([0] * len(token_ids_1)) + [1] + + def create_token_type_ids_from_sequences( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Create a mask from the two sequences passed. CLIP does not make use of token type ids, therefore a list of + zeros is returned. + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of zeros. + """ + bos_token = [self.bos_token_id] + eos_token = [self.eos_token_id] + + if token_ids_1 is None: + return len(bos_token + token_ids_0 + eos_token) * [0] + return len(bos_token + token_ids_0 + eos_token + eos_token + token_ids_1 + eos_token) * [0] + + def bpe(self, token): + if token in self.cache: + return self.cache[token] + word = tuple(token[:-1]) + (token[-1] + "",) + pairs = get_pairs(word) + + if not pairs: + return token + "" + + while True: + bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf"))) + if bigram not in self.bpe_ranks: + break + first, second = bigram + new_word = [] + i = 0 + while i < len(word): + try: + j = word.index(first, i) + except ValueError: + new_word.extend(word[i:]) + break + else: + new_word.extend(word[i:j]) + i = j + + if word[i] == first and i < len(word) - 1 and word[i + 1] == second: + new_word.append(first + second) + i += 2 + else: + new_word.append(word[i]) + i += 1 + new_word = tuple(new_word) + word = new_word + if len(word) == 1: + break + else: + pairs = get_pairs(word) + word = " ".join(word) + self.cache[token] = word + return word + + def _tokenize(self, text): + """Tokenize a string.""" + bpe_tokens = [] + if self.fix_text is None: + text = " ".join(self.nlp.tokenize(text)) + else: + text = whitespace_clean(self.fix_text(text)).lower() + + for token in re.findall(self.pat, text): + token = "".join( + self.byte_encoder[b] for b in token.encode("utf-8") + ) # Maps all our bytes to unicode strings, avoiding control tokens of the BPE (spaces in our case) + bpe_tokens.extend(bpe_token for bpe_token in self.bpe(token).split(" ")) + return bpe_tokens + + def _convert_token_to_id(self, token): + """Converts a token (str) in an id using the vocab.""" + return self.encoder.get(token, self.encoder.get(self.unk_token)) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (str) using the vocab.""" + return self.decoder.get(index) + + def convert_tokens_to_string(self, tokens): + """Converts a sequence of tokens (string) in a single string.""" + text = "".join(tokens) + byte_array = bytearray([self.byte_decoder[c] for c in text]) + text = byte_array.decode("utf-8", errors=self.errors).replace("", " ").strip() + return text + + def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: + if not os.path.isdir(save_directory): + logger.error("Vocabulary path ({}) should be a directory".format(save_directory)) + return + vocab_file = os.path.join( + save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["vocab_file"] + ) + merge_file = os.path.join( + save_directory, (filename_prefix + "-" if filename_prefix else "") + VOCAB_FILES_NAMES["merges_file"] + ) + + with open(vocab_file, "w", encoding="utf-8") as f: + f.write(json.dumps(self.encoder, indent=2, sort_keys=True, ensure_ascii=False) + "\n") + + index = 0 + with open(merge_file, "w", encoding="utf-8") as writer: + writer.write("#version: 0.2\n") + for bpe_tokens, token_index in sorted(self.bpe_ranks.items(), key=lambda kv: kv[1]): + if index != token_index: + logger.warning( + "Saving vocabulary to {}: BPE merge indices are not consecutive." + " Please check that the tokenizer is not corrupted!".format(merge_file) + ) + index = token_index + writer.write(" ".join(bpe_tokens) + "\n") + index += 1 + + return vocab_file, merge_file diff --git a/core/models/encoders/clip_modules/tokenization_clip_fast.py b/core/models/encoders/clip_modules/tokenization_clip_fast.py new file mode 100644 index 0000000000000000000000000000000000000000..5fe6d3d445bb09fc14418b01421050be62d8637c --- /dev/null +++ b/core/models/encoders/clip_modules/tokenization_clip_fast.py @@ -0,0 +1,175 @@ +# coding=utf-8 +# Copyright 2021 The Open AI Team Authors and The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes for OpenAI GPT.""" + + +from typing import List, Optional, Tuple + +from tokenizers import pre_tokenizers + +from ...tokenization_utils_fast import PreTrainedTokenizerFast +from ...utils import logging +from .tokenization_clip import CLIPTokenizer + + +logger = logging.get_logger(__name__) + +VOCAB_FILES_NAMES = {"vocab_file": "vocab.json", "merges_file": "merges.txt", "tokenizer_file": "tokenizer.json"} + +PRETRAINED_VOCAB_FILES_MAP = { + "vocab_file": { + "openai/clip-vit-base-patch32": "https://huggingface.co/openai/clip-vit-base-patch32/resolve/main/vocab.json", + }, + "merges_file": { + "openai/clip-vit-base-patch32": "https://huggingface.co/openai/clip-vit-base-patch32/resolve/main/merges.txt", + }, + "tokenizer_file": { + "openai/clip-vit-base-patch32": ( + "https://huggingface.co/openai/clip-vit-base-patch32/resolve/main/tokenizer.json" + ), + }, +} + +PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { + "openai/clip-vit-base-patch32": 77, +} + + +class CLIPTokenizerFast(PreTrainedTokenizerFast): + """ + Construct a "fast" CLIP tokenizer (backed by HuggingFace's *tokenizers* library). Based on byte-level + Byte-Pair-Encoding. + + This tokenizer inherits from [`PreTrainedTokenizerFast`] which contains most of the main methods. Users should + refer to this superclass for more information regarding those methods. + + Args: + vocab_file (`str`): + Path to the vocabulary file. + merges_file (`str`): + Path to the merges file. + unk_token (`str`, *optional*, defaults to `<|endoftext|>`): + The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this + token instead. + bos_token (`str`, *optional*, defaults to `<|endoftext|>`): + The beginning of sequence token. + eos_token (`str`, *optional*, defaults to `<|endoftext|>`): + The end of sequence token. + """ + + vocab_files_names = VOCAB_FILES_NAMES + pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP + max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES + model_input_names = ["input_ids", "attention_mask"] + slow_tokenizer_class = CLIPTokenizer + + def __init__( + self, + vocab_file=None, + merges_file=None, + tokenizer_file=None, + unk_token="<|endoftext|>", + bos_token="<|startoftext|>", + eos_token="<|endoftext|>", + pad_token="<|endoftext|>", # hack to enable padding + **kwargs + ): + super().__init__( + vocab_file, + merges_file, + tokenizer_file=tokenizer_file, + unk_token=unk_token, + bos_token=bos_token, + eos_token=eos_token, + pad_token=pad_token, + **kwargs, + ) + + if not isinstance(self.backend_tokenizer.pre_tokenizer, pre_tokenizers.Sequence): + raise ValueError( + "The `backend_tokenizer` provided does not match the expected format. The CLIP tokenizer has been" + " heavily modified from transformers version 4.17.0. You need to convert the tokenizer you are using" + " to be compatible with this version.The easiest way to do so is" + ' `CLIPTokenizerFast.from_pretrained("path_to_local_folder_or_hub_repo, from_slow=True)`. If you want' + " to use your existing tokenizer, you will have to revert to a version prior to 4.17.0 of" + " transformers." + ) + + self._wrap_decode_method_backend_tokenizer() + + # Very ugly hack to enable padding to have a correct decoding see https://github.com/huggingface/tokenizers/issues/872 + def _wrap_decode_method_backend_tokenizer(self): + orig_decode_method = self.backend_tokenizer.decode + + def new_decode_method(*args, **kwargs): + text = orig_decode_method(*args, **kwargs) + text = text.replace(self.backend_tokenizer.model.end_of_word_suffix, " ").strip() + return text + + self.backend_tokenizer.decode = new_decode_method + + def build_inputs_with_special_tokens( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and + adding special tokens. A CLIP sequence has the following format: + + - single sequence: `<|startoftext|> X <|endoftext|>` + + Pairs of sequences are not the expected use case, but they will be handled without a separator. + + Args: + token_ids_0 (`List[int]`): + List of IDs to which the special tokens will be added. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of [input IDs](../glossary#input-ids) with the appropriate special tokens. + """ + bos_token = [self.bos_token_id] + eos_token = [self.eos_token_id] + + if token_ids_1 is None: + return bos_token + token_ids_0 + eos_token + return bos_token + token_ids_0 + eos_token + eos_token + token_ids_1 + eos_token + + def create_token_type_ids_from_sequences( + self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None + ) -> List[int]: + """ + Create a mask from the two sequences passed. CLIP does not make use of token type ids, therefore a list of + zeros is returned. + + Args: + token_ids_0 (`List[int]`): + List of IDs. + token_ids_1 (`List[int]`, *optional*): + Optional second list of IDs for sequence pairs. + + Returns: + `List[int]`: List of zeros. + """ + bos_token = [self.bos_token_id] + eos_token = [self.eos_token_id] + + if token_ids_1 is None: + return len(bos_token + token_ids_0 + eos_token) * [0] + return len(bos_token + token_ids_0 + eos_token + eos_token + token_ids_1 + eos_token) * [0] + + def save_vocabulary(self, save_directory: str, filename_prefix: Optional[str] = None) -> Tuple[str]: + files = self._tokenizer.model.save(save_directory, name=filename_prefix) + return tuple(files) diff --git a/core/models/latent_diffusion/__pycache__/diffusion_unet.cpython-38.pyc b/core/models/latent_diffusion/__pycache__/diffusion_unet.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7dd761e76c407dafc7cd6ad71086725ff817b0a4 Binary files /dev/null and b/core/models/latent_diffusion/__pycache__/diffusion_unet.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/__pycache__/modules_attention.cpython-38.pyc b/core/models/latent_diffusion/__pycache__/modules_attention.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac2ed2d5f71193cba5f935f7cd863d6ee6495198 Binary files /dev/null and b/core/models/latent_diffusion/__pycache__/modules_attention.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/__pycache__/modules_conv.cpython-38.pyc b/core/models/latent_diffusion/__pycache__/modules_conv.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..acd64d4cf583538222ef3066268cd4048f456cce Binary files /dev/null and b/core/models/latent_diffusion/__pycache__/modules_conv.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/__pycache__/modules_video.cpython-38.pyc b/core/models/latent_diffusion/__pycache__/modules_video.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..09f6623a23682f6a5507745f2342781fb35c6609 Binary files /dev/null and b/core/models/latent_diffusion/__pycache__/modules_video.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/diffusion_unet.py b/core/models/latent_diffusion/diffusion_unet.py new file mode 100644 index 0000000000000000000000000000000000000000..ebc448c35415ca3db185fdf7f255d06117aa5d8d --- /dev/null +++ b/core/models/latent_diffusion/diffusion_unet.py @@ -0,0 +1,1662 @@ +from abc import abstractmethod +from functools import partial +import math +from typing import Iterable + +import random +import numpy as np +import torch as th +import torch.nn as nn +from torch import nn, einsum +import torch.nn.functional as F +from einops import rearrange + +from .modules_conv import \ + checkpoint, conv_nd, linear, avg_pool_nd, \ + zero_module, normalization, timestep_embedding +from .modules_attention import SpatialTransformer +from .modules_video import SpatioTemporalAttention + +from ..common.get_model import get_model, register + +version = '0' +symbol = 'openai' + + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + + +class VideoSequential(nn.Sequential): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + pass + + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, x, emb=None, context=None, x_0=None): + is_video = (x.ndim == 5) + if is_video: + num_frames = x.shape[2] + if emb is not None: + emb = emb.unsqueeze(1).repeat(1, num_frames, 1) + emb = rearrange(emb, 'b t c -> (b t) c') + if context is not None: + context_vid = context.unsqueeze(1).repeat(1, num_frames, 1, 1) + context_vid = rearrange(context_vid, 'b t n c -> (b t) n c') + + for layer in self: + if isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialTransformer): + if is_video: + x = rearrange(x, 'b c t h w -> (b t) c h w ') + x = layer(x, context_vid) + x = rearrange(x, '(b t) c h w -> b c t h w', t=num_frames) + else: + x = layer(x, context) + elif isinstance(layer, SpatioTemporalAttention): + x = layer(x, x_0) + elif isinstance(layer, VideoSequential) or isinstance(layer, nn.ModuleList): + x = layer[0](x, emb) + x = layer[1](x, x_0) + else: + if is_video: + x = rearrange(x, 'b c t h w -> (b t) c h w ') + x = layer(x) + if is_video: + x = rearrange(x, '(b t) c h w -> b c t h w', t=num_frames) + return x + + +class UpsampleDeterministic(nn.Module): + def __init__(self, upscale=2): + super(UpsampleDeterministic, self).__init__() + self.upscale = upscale + + def forward(self, x): + return x[:, :, :, None, :, None] \ + .expand(-1, -1, -1, self.upscale, -1, self.upscale) \ + .reshape(x.size(0), x.size(1), x.size(2) * self.upscale, x.size(3) * self.upscale) + + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=padding) + self.upsample = UpsampleDeterministic(2) + + def forward(self, x): + assert x.shape[1] == self.channels + x = self.upsample(x) + if self.use_conv: + x = self.conv(x) + return x + + +class TransposedUpsample(nn.Module): + 'Learned 2x upsampling without padding' + + def __init__(self, channels, out_channels=None, ks=5): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + + self.up = nn.ConvTranspose2d(self.channels, self.out_channels, kernel_size=ks, stride=2) + + def forward(self, x): + return self.up(x) + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ConnectorOut(nn.Module): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + dropout=0, + out_channels=None, + use_conv=False, + dims=2, + use_checkpoint=False, + use_temporal_attention=False, + ): + super().__init__() + self.channels = channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, 3, padding=1), + ) + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1) + ), + ) + self.use_temporal_attention = use_temporal_attention + if use_temporal_attention: + self.temporal_attention = SpatioTemporalAttention( + dim=self.out_channels, + dim_head=self.out_channels // 4, + heads=8, + use_resnet=False, + ) + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x,), self.parameters(), self.use_checkpoint + ) + + def _forward(self, x): + is_video = x.ndim == 5 + if is_video: + num_frames = x.shape[2] + if self.use_temporal_attention: + x = self.temporal_attention(x) + x = rearrange(x, 'b c t h w -> (b t) c h w ') + + h = self.in_layers(x) + h = self.out_layers(h) + out = self.skip_connection(x) + h + if is_video: + out = rearrange(out, '(b t) c h w -> b c t h w', t=num_frames) + out = out.mean(2) + return out.mean([2, 3]).unsqueeze(1) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, 3, padding=1), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims) + self.x_upd = Upsample(channels, False, dims) + elif down: + self.h_upd = Downsample(channels, False, dims) + self.x_upd = Downsample(channels, False, dims) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, + ), + ) + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + def _forward(self, x, emb): + is_video = x.ndim == 5 + if is_video: + num_frames = x.shape[2] + x = rearrange(x, 'b c t h w -> (b t) c h w ') + + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + emb_out = self.emb_layers(emb) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = th.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + h = h + emb_out + h = self.out_layers(h) + + out = self.skip_connection(x) + h + if is_video: + out = rearrange(out, '(b t) c h w -> b c t h w', t=num_frames) + return out + + +class AttentionBlock(nn.Module): + """ + An attention block that allows spatial positions to attend to each other. + Originally ported from here, but adapted to the N-d case. + https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66. + """ + + def __init__( + self, + channels, + num_heads=1, + num_head_channels=-1, + use_checkpoint=False, + use_new_attention_order=False, + ): + super().__init__() + self.channels = channels + if num_head_channels == -1: + self.num_heads = num_heads + else: + assert ( + channels % num_head_channels == 0 + ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}" + self.num_heads = channels // num_head_channels + self.use_checkpoint = use_checkpoint + self.norm = normalization(channels) + self.qkv = conv_nd(1, channels, channels * 3, 1) + if use_new_attention_order: + # split qkv before split heads + self.attention = QKVAttention(self.num_heads) + else: + # split heads before split qkv + self.attention = QKVAttentionLegacy(self.num_heads) + + self.proj_out = zero_module(conv_nd(1, channels, channels, 1)) + + def forward(self, x): + return checkpoint(self._forward, (x,), self.parameters(), + True) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!! + + def _forward(self, x): + b, c, *spatial = x.shape + x = x.reshape(b, c, -1) + qkv = self.qkv(self.norm(x)) + h = self.attention(qkv) + h = self.proj_out(h) + return (x + h).reshape(b, c, *spatial) + + +def count_flops_attn(model, _x, y): + """ + A counter for the `thop` package to count the operations in an + attention operation. + Meant to be used like: + macs, params = thop.profile( + model, + inputs=(inputs, timestamps), + custom_ops={QKVAttention: QKVAttention.count_flops}, + ) + """ + b, c, *spatial = y[0].shape + num_spatial = int(np.prod(spatial)) + # We perform two matmuls with the same number of ops. + # The first computes the weight matrix, the second computes + # the combination of the value vectors. + matmul_ops = 2 * b * (num_spatial ** 2) * c + model.total_ops += th.DoubleTensor([matmul_ops]) + + +class QKVAttentionLegacy(nn.Module): + """ + A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight, dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v) + return a.reshape(bs, -1, length) + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class QKVAttention(nn.Module): + """ + A module which performs QKV attention and splits in a different order. + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.chunk(3, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", + (q * scale).view(bs * self.n_heads, ch, length), + (k * scale).view(bs * self.n_heads, ch, length), + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight, dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v.reshape(bs * self.n_heads, ch, length)) + return a.reshape(bs, -1, length) + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +from functools import partial + + +@register('openai_unet_2d', version) +class UNetModel2D(nn.Module): + def __init__(self, + input_channels, + model_channels, + output_channels, + context_dim=768, + num_noattn_blocks=(2, 2, 2, 2), + channel_mult=(1, 2, 4, 8), + with_attn=[True, True, True, False], + channel_mult_connector=(1, 2, 4), + num_noattn_blocks_connector=(1, 1, 1), + with_connector=[True, True, True, False], + connector_output_channel=1280, + num_heads=8, + use_checkpoint=True, + use_video_architecture=False, + video_dim_scale_factor=4, + init_connector=True): + + super().__init__() + ResBlockPreset = partial( + ResBlock, dropout=0, dims=2, use_checkpoint=use_checkpoint, + use_scale_shift_norm=False) + + self.input_channels = input_channels + self.model_channels = model_channels + self.num_noattn_blocks = num_noattn_blocks + self.channel_mult = channel_mult + self.num_heads = num_heads + + ################## + # Time embedding # + ################## + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), ) + + ################## + # Connector # + ################## + + if init_connector: + current_channel = model_channels // 2 + self.connecters_out = nn.ModuleList([TimestepEmbedSequential( + nn.Conv2d(input_channels, current_channel, 3, padding=1, bias=True))]) + for level_idx, mult in enumerate(channel_mult_connector): + for _ in range(num_noattn_blocks_connector[level_idx]): + if use_video_architecture: + layers = [nn.ModuleList([ + ResBlockPreset( + current_channel, time_embed_dim, + out_channels=mult * model_channels), + SpatioTemporalAttention( + dim=mult * model_channels, + dim_head=mult * model_channels // video_dim_scale_factor, + heads=8 + )])] + else: + layers = [ + ResBlockPreset( + current_channel, time_embed_dim, + out_channels=mult * model_channels)] + + current_channel = mult * model_channels + self.connecters_out.append(TimestepEmbedSequential(*layers)) + + if level_idx != len(channel_mult_connector) - 1: + self.connecters_out.append( + TimestepEmbedSequential( + Downsample( + current_channel, use_conv=True, + dims=2, out_channels=current_channel, ))) + + out = TimestepEmbedSequential( + *[normalization(current_channel), + nn.SiLU(), + nn.Conv2d(current_channel, connector_output_channel, 3, padding=1)], ) + self.connecters_out.append(out) + connector_out_channels = connector_output_channel + + else: + with_connector = [False] * len(with_connector) + + ################ + # input_blocks # + ################ + current_channel = model_channels + input_blocks = [ + TimestepEmbedSequential( + nn.Conv2d(input_channels, model_channels, 3, padding=1, bias=True))] + input_block_channels = [current_channel] + + input_block_connecters_in = [None] + + for level_idx, mult in enumerate(channel_mult): + for _ in range(self.num_noattn_blocks[level_idx]): + if use_video_architecture: + layers = [nn.ModuleList([ + ResBlockPreset( + current_channel, time_embed_dim, + out_channels=mult * model_channels), + SpatioTemporalAttention( + dim=mult * model_channels, + dim_head=mult * model_channels // video_dim_scale_factor, + heads=8 + )])] + else: + layers = [ + ResBlockPreset( + current_channel, time_embed_dim, + out_channels=mult * model_channels)] + + current_channel = mult * model_channels + dim_head = current_channel // num_heads + if with_attn[level_idx]: + layers += [ + SpatialTransformer( + current_channel, num_heads, dim_head, + depth=1, context_dim=context_dim)] + + input_blocks += [TimestepEmbedSequential(*layers)] + input_block_channels.append(current_channel) + if with_connector[level_idx] and init_connector: + input_block_connecters_in.append( + TimestepEmbedSequential(*[SpatialTransformer( + current_channel, num_heads, dim_head, + depth=1, context_dim=connector_out_channels)]) + ) + else: + input_block_connecters_in.append(None) + + if level_idx != len(channel_mult) - 1: + input_blocks += [ + TimestepEmbedSequential( + Downsample( + current_channel, use_conv=True, + dims=2, out_channels=current_channel, ))] + input_block_channels.append(current_channel) + input_block_connecters_in.append(None) + + self.input_blocks = nn.ModuleList(input_blocks) + self.input_block_connecters_in = nn.ModuleList(input_block_connecters_in) + + ################# + # middle_blocks # + ################# + + if use_video_architecture: + layer1 = nn.ModuleList([ + ResBlockPreset( + current_channel, time_embed_dim), + SpatioTemporalAttention( + dim=current_channel, + dim_head=current_channel // video_dim_scale_factor, + heads=8 + )]) + layer2 = nn.ModuleList([ + ResBlockPreset( + current_channel, time_embed_dim), + SpatioTemporalAttention( + dim=current_channel, + dim_head=current_channel // video_dim_scale_factor, + heads=8 + )]) + else: + layer1 = ResBlockPreset( + current_channel, time_embed_dim) + layer2 = ResBlockPreset( + current_channel, time_embed_dim) + + middle_block = [ + layer1, + SpatialTransformer( + current_channel, num_heads, dim_head, + depth=1, context_dim=context_dim), + layer2] + + self.middle_block = TimestepEmbedSequential(*middle_block) + + ################# + # output_blocks # + ################# + output_blocks = [] + output_block_connecters_out = [] + output_block_connecters_in = [] + for level_idx, mult in list(enumerate(channel_mult))[::-1]: + for block_idx in range(self.num_noattn_blocks[level_idx] + 1): + extra_channel = input_block_channels.pop() + if use_video_architecture: + layers = [nn.ModuleList([ + ResBlockPreset( + current_channel + extra_channel, + time_embed_dim, + out_channels=model_channels * mult), + SpatioTemporalAttention( + dim=mult * model_channels, + dim_head=mult * model_channels // video_dim_scale_factor, + heads=8 + )])] + else: + layers = [ + ResBlockPreset( + current_channel + extra_channel, + time_embed_dim, + out_channels=model_channels * mult)] + + current_channel = model_channels * mult + dim_head = current_channel // num_heads + + if with_attn[level_idx]: + layers += [ + SpatialTransformer( + current_channel, num_heads, dim_head, + depth=1, context_dim=context_dim)] + if with_connector[level_idx] and init_connector: + output_block_connecters_in.append( + TimestepEmbedSequential(*[SpatialTransformer( + current_channel, num_heads, dim_head, + depth=1, context_dim=connector_out_channels)]) + ) + else: + output_block_connecters_in.append(None) + + if level_idx != 0 and block_idx == self.num_noattn_blocks[level_idx]: + layers += [ + Upsample( + current_channel, use_conv=True, + dims=2, out_channels=current_channel)] + + output_blocks += [TimestepEmbedSequential(*layers)] + + self.output_blocks = nn.ModuleList(output_blocks) + self.output_block_connecters_in = nn.ModuleList(output_block_connecters_in) + + self.out = nn.Sequential( + normalization(current_channel), + nn.SiLU(), + zero_module(nn.Conv2d(model_channels, output_channels, 3, padding=1)), ) + + def forward(self, x, timesteps=None, context=None): + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False) + emb = self.time_embed(t_emb) + + h = x + is_video = h.ndim == 5 + + for module in self.input_blocks: + h = module(h, emb, context) + hs.append(h) + h = self.middle_block(h, emb, context) + for module in self.output_blocks: + h = th.cat([h, hs.pop()], dim=1) + h = module(h, emb, context) + return self.out(h) + + +class FCBlock(TimestepBlock): + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_checkpoint=False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_checkpoint = use_checkpoint + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + nn.Conv2d(channels, self.out_channels, 1, padding=0), ) + + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear(emb_channels, self.out_channels, ), ) + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module(nn.Conv2d(self.out_channels, self.out_channels, 1, padding=0)), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + else: + self.skip_connection = nn.Conv2d(channels, self.out_channels, 1, padding=0) + + def forward(self, x, emb): + if len(x.shape) == 2: + x = x[:, :, None, None] + elif len(x.shape) == 4: + pass + else: + raise ValueError + y = checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint) + if len(x.shape) == 2: + return y[:, :, 0, 0] + elif len(x.shape) == 4: + return y + + def _forward(self, x, emb): + h = self.in_layers(x) + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class Linear_MultiDim(nn.Linear): + def __init__(self, in_features, out_features, *args, **kwargs): + in_features = [in_features] if isinstance(in_features, int) else list(in_features) + out_features = [out_features] if isinstance(out_features, int) else list(out_features) + self.in_features_multidim = in_features + self.out_features_multidim = out_features + super().__init__( + np.array(in_features).prod(), + np.array(out_features).prod(), + *args, **kwargs) + + def forward(self, x): + shape = x.shape + n = len(self.in_features_multidim) + x = x.reshape(*shape[0:-n], self.in_features) + y = super().forward(x) + y = y.view(*shape[0:-n], *self.out_features_multidim) + return y + + +class FCBlock_MultiDim(FCBlock): + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_checkpoint=False, ): + channels = [channels] if isinstance(channels, int) else list(channels) + channels_all = np.array(channels).prod() + self.channels_multidim = channels + + if out_channels is not None: + out_channels = [out_channels] if isinstance(out_channels, int) else list(out_channels) + out_channels_all = np.array(out_channels).prod() + self.out_channels_multidim = out_channels + else: + out_channels_all = channels_all + self.out_channels_multidim = self.channels_multidim + + self.channels = channels + super().__init__( + channels=channels_all, + emb_channels=emb_channels, + dropout=dropout, + out_channels=out_channels_all, + use_checkpoint=use_checkpoint, ) + + def forward(self, x, emb): + shape = x.shape + n = len(self.channels_multidim) + x = x.reshape(*shape[0:-n], self.channels, 1, 1) + x = x.view(-1, self.channels, 1, 1) + y = checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint) + y = y.view(*shape[0:-n], -1) + y = y.view(*shape[0:-n], *self.out_channels_multidim) + return y + + +@register('openai_unet_0dmd', version) +class UNetModel0D_MultiDim(nn.Module): + def __init__(self, + input_channels, + model_channels, + output_channels, + context_dim=768, + num_noattn_blocks=(2, 2, 2, 2), + channel_mult=(1, 2, 4, 8), + second_dim=(4, 4, 4, 4), + with_attn=[True, True, True, False], + channel_mult_connector=(1, 2, 4), + num_noattn_blocks_connector=(1, 1, 1), + second_dim_connector=(4, 4, 4), + with_connector=[True, True, True, False], + connector_output_channel=1280, + num_heads=8, + use_checkpoint=True, + init_connector=True): + + super().__init__() + + FCBlockPreset = partial(FCBlock_MultiDim, dropout=0, use_checkpoint=use_checkpoint) + + self.input_channels = input_channels + self.model_channels = model_channels + self.num_noattn_blocks = num_noattn_blocks + self.channel_mult = channel_mult + self.second_dim = second_dim + self.num_heads = num_heads + + ################## + # Time embedding # + ################## + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), ) + + ################## + # Connector # + ################## + + if init_connector: + sdim = second_dim[0] + current_channel = [model_channels // 2, sdim, 1] + self.connecters_out = nn.ModuleList([TimestepEmbedSequential( + Linear_MultiDim([input_channels, 1, 1], current_channel, bias=True))]) + for level_idx, (mult, sdim) in enumerate(zip(channel_mult_connector, second_dim_connector)): + for _ in range(num_noattn_blocks_connector[level_idx]): + layers = [ + FCBlockPreset( + current_channel, + time_embed_dim, + out_channels=[mult * model_channels, sdim, 1], )] + + current_channel = [mult * model_channels, sdim, 1] + self.connecters_out += [TimestepEmbedSequential(*layers)] + + if level_idx != len(channel_mult_connector) - 1: + self.connecters_out += [ + TimestepEmbedSequential( + Linear_MultiDim(current_channel, current_channel, bias=True, ))] + out = TimestepEmbedSequential( + *[normalization(current_channel[0]), + nn.SiLU(), + Linear_MultiDim(current_channel, [connector_output_channel, 1, 1], bias=True, )]) + self.connecters_out.append(out) + connector_out_channels = connector_output_channel + else: + with_connector = [False] * len(with_connector), + + ################ + # input_blocks # + ################ + sdim = second_dim[0] + current_channel = [model_channels, sdim, 1] + input_blocks = [ + TimestepEmbedSequential( + Linear_MultiDim([input_channels, 1, 1], current_channel, bias=True))] + input_block_channels = [current_channel] + input_block_connecters_in = [None] + + for level_idx, (mult, sdim) in enumerate(zip(channel_mult, second_dim)): + for _ in range(self.num_noattn_blocks[level_idx]): + layers = [ + FCBlockPreset( + current_channel, + time_embed_dim, + out_channels=[mult * model_channels, sdim, 1], )] + + current_channel = [mult * model_channels, sdim, 1] + dim_head = current_channel[0] // num_heads + if with_attn[level_idx]: + layers += [ + SpatialTransformer( + current_channel[0], num_heads, dim_head, + depth=1, context_dim=context_dim, )] + + input_blocks += [TimestepEmbedSequential(*layers)] + input_block_channels.append(current_channel) + + if with_connector[level_idx]: + input_block_connecters_in.append( + TimestepEmbedSequential(*[SpatialTransformer( + current_channel[0], num_heads, dim_head, + depth=1, context_dim=connector_out_channels)]) + ) + else: + input_block_connecters_in.append(None) + + if level_idx != len(channel_mult) - 1: + input_blocks += [ + TimestepEmbedSequential( + Linear_MultiDim(current_channel, current_channel, bias=True, ))] + input_block_channels.append(current_channel) + input_block_connecters_in.append(None) + + self.input_blocks = nn.ModuleList(input_blocks) + self.input_block_connecters_in = nn.ModuleList(input_block_connecters_in) + + ################# + # middle_blocks # + ################# + middle_block = [ + FCBlockPreset( + current_channel, time_embed_dim, ), + SpatialTransformer( + current_channel[0], num_heads, dim_head, + depth=1, context_dim=context_dim, ), + FCBlockPreset( + current_channel, time_embed_dim, ), ] + self.middle_block = TimestepEmbedSequential(*middle_block) + + ################# + # output_blocks # + ################# + output_blocks = [] + output_block_connecters_in = [] + for level_idx, (mult, sdim) in list(enumerate(zip(channel_mult, second_dim)))[::-1]: + for block_idx in range(self.num_noattn_blocks[level_idx] + 1): + extra_channel = input_block_channels.pop() + layers = [ + FCBlockPreset( + [current_channel[0] + extra_channel[0]] + current_channel[1:], + time_embed_dim, + out_channels=[mult * model_channels, sdim, 1], )] + + current_channel = [mult * model_channels, sdim, 1] + dim_head = current_channel[0] // num_heads + if with_attn[level_idx]: + layers += [ + SpatialTransformer( + current_channel[0], num_heads, dim_head, + depth=1, context_dim=context_dim, )] + + if with_connector[level_idx] and init_connector: + output_block_connecters_in.append( + TimestepEmbedSequential(*[SpatialTransformer( + current_channel[0], num_heads, dim_head, + depth=1, context_dim=connector_out_channels)]) + ) + else: + output_block_connecters_in.append(None) + + if level_idx != 0 and block_idx == self.num_noattn_blocks[level_idx]: + layers += [ + Linear_MultiDim(current_channel, current_channel, bias=True, )] + + output_blocks += [TimestepEmbedSequential(*layers)] + + self.output_blocks = nn.ModuleList(output_blocks) + self.output_block_connecters_in = nn.ModuleList(output_block_connecters_in) + + self.out = nn.Sequential( + normalization(current_channel[0]), + nn.SiLU(), + zero_module(Linear_MultiDim(current_channel, [output_channels, 1, 1], bias=True, )), ) + + def forward(self, x, timesteps=None, context=None): + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False) + emb = self.time_embed(t_emb) + + h = x + for module in self.input_blocks: + h = module(h, emb, context) + hs.append(h) + h = self.middle_block(h, emb, context) + for module in self.output_blocks: + h = th.cat([h, hs.pop()], dim=1) + h = module(h, emb, context) + return self.out(h) + + +class dummy_class: + pass + + +def create_dummy_class(): + dummy_class_item = dummy_class() + + dummy_class_item.input_blocks = [] + dummy_class_item.input_block_connecters_in = [] + for i in range(12): + dummy_class_item.input_blocks.append(range(2)) + dummy_class_item.input_block_connecters_in.append(None) + dummy_class_item.middle_block = range(3) + dummy_class_item.output_blocks = [] + dummy_class_item.output_block_connecters_in = [] + for i in range(12): + dummy_class_item.output_blocks.append(range(2)) + dummy_class_item.output_block_connecters_in.append(None) + return dummy_class_item + + +@register('openai_unet_codi', version) +class UNetModelCoDi(nn.Module): + def __init__(self, + unet_image_cfg, + unet_text_cfg, + unet_audio_cfg, + model_type): + + super().__init__() + + if 'video' in model_type or 'image' in model_type: + self.unet_image = get_model()(unet_image_cfg) + self.image_model_channels = self.unet_image.model_channels + else: + self.unet_image = create_dummy_class() + + if 'text' in model_type: + self.unet_text = get_model()(unet_text_cfg) + self.text_model_channels = self.unet_text.model_channels + else: + self.unet_text = create_dummy_class() + + if 'audio' in model_type: + self.unet_audio = get_model()(unet_audio_cfg) + self.audio_model_channels = self.unet_audio.model_channels + else: + self.unet_audio = create_dummy_class() + + if 'video_interp' in model_type: + self.unet_video_interp = get_model()(unet_image_cfg) + self.video_interp_model_channels = self.unet_video_interp.model_channels + + def forward(self, x, timesteps, condition, xtype, condition_types, x_0=[None], x_0_type='first_frame', + mix_weight={'audio': 1, 'text': 1, 'image': 1}): + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + device = x[0].device + # Prepare conditioning + if isinstance(condition_types, str): + condition_types = [condition_types] + weights = np.array(list(map(mix_weight.get, condition_types))) + norm_weights = weights / weights.sum() + context = 0.0 + for i in range(len(condition)): + context += condition[i] * norm_weights[i] + + if x_0_type == 'first_last_frame' and x_0[0]: + hs = [] + x = x[0].to(device) + x_0 = x_0[0] + timesteps = timesteps.to(device) + + t_emb_video = timestep_embedding(timesteps, self.video_interp_model_channels, repeat_only=False).to(x) + emb = self.unet_video_interp.time_embed(t_emb_video) + h = x + for module in self.unet_video_interp.input_blocks: + h = module(h, emb, context, x_0) + hs.append(h) + self.unet_video_interp.middle_block(h, emb, context, x_0) + for module in self.unet_video_interp.output_blocks: + temp = hs.pop() + h = th.cat([h, temp], dim=1) + h = module(h, emb, context, x_0) + + out_all = [] + num_frames = h.shape[2] + h = rearrange(h, 'b c t h w -> (b t) c h w ') + out = self.unet_video_interp.out(h) + out = rearrange(out, '(b t) c h w -> b c t h w', t=num_frames) + return [out] + + # Prepare inputs + hs = [] + x = [temp.to(device) for temp in x] + timesteps = timesteps.to(device) + context = context.to(device) + if 'image' in xtype or 'video' in xtype: + t_emb_image = timestep_embedding(timesteps, self.image_model_channels, repeat_only=False).to(x[0]) + emb_image = self.unet_image.time_embed(t_emb_image) + if 'text' in xtype: + t_emb_text = timestep_embedding(timesteps, self.text_model_channels, repeat_only=False).to(x[0]) + emb_text = self.unet_text.time_embed(t_emb_text) + if 'audio' in xtype: + t_emb_audio = timestep_embedding(timesteps, self.audio_model_channels, repeat_only=False).to(x[0]) + emb_audio = self.unet_audio.time_embed(t_emb_audio) + + for i in range(len(xtype)): + if xtype[i] == 'text': + x[i] = x[i][:, :, None, None] + + if not isinstance(x_0, list): + x_0 = [x_0] + + if not x_0[0]: + x_0 = [None] * len(x) + + # Environment encoders + if len(xtype) > 1: # this means two outputs present and thus joint decoding + h_con = [temp for temp in x] + for i_con_in, t_con_in, a_con_in in zip( + self.unet_image.connecters_out, self.unet_text.connecters_out, self.unet_audio.connecters_out, + ): + for i, xtype_i in enumerate(xtype): + if xtype_i == 'audio': + h_con[i] = a_con_in(h_con[i], emb_audio, context) + elif xtype_i in ['video', 'image']: + h_con[i] = i_con_in(h_con[i], emb_image, context) + elif xtype_i == 'text': + h_con[i] = t_con_in(h_con[i], emb_text, context) + else: + raise + for i in range(len(h_con)): + if h_con[i].ndim == 5: + h_con[i] = h_con[i].mean(2).mean(2).mean(2).unsqueeze(1) + else: + h_con[i] = h_con[i].mean(2).mean(2).unsqueeze(1) + h_con[i] = h_con[i] / th.norm(h_con[i], dim=-1, keepdim=True) + else: + h_con = None + + # Joint / single generation + h = x + for (i_module, t_module, a_module, + i_con_in, t_con_in, a_con_in) \ + in zip( + self.unet_image.input_blocks, self.unet_text.input_blocks, self.unet_audio.input_blocks, + self.unet_image.input_block_connecters_in, self.unet_text.input_block_connecters_in, + self.unet_audio.input_block_connecters_in, + ): + h = [h_i for h_i in h] + for i, xtype_i in enumerate(xtype): + if xtype_i == 'audio': + h[i] = a_module(h[i], emb_audio, context, x_0[i]) + elif xtype_i in ['video', 'image']: + h[i] = i_module(h[i], emb_image, context, x_0[i]) + elif xtype_i == 'text': + h[i] = t_module(h[i], emb_text, context, x_0[i]) + else: + raise + + if i_con_in is not None and h_con is not None: + for i, xtype_i in enumerate(xtype): + if xtype_i == 'audio': + h[i] = a_con_in(h[i], context=h_con[i]) + elif xtype_i in ['video', 'image']: + h[i] = i_con_in(h[i], context=h_con[i]) + elif xtype_i == 'text': + h[i] = t_con_in(h[i], context=h_con[i]) + else: + raise + + hs.append(h) + + for i, xtype_i in enumerate(xtype): + if xtype_i == 'audio': + h[i] = self.unet_audio.middle_block(h[i], emb_audio, context, x_0[i]) + elif xtype_i in ['video', 'image']: + h[i] = self.unet_image.middle_block(h[i], emb_image, context, x_0[i]) + elif xtype_i == 'text': + h[i] = self.unet_text.middle_block(h[i], emb_text, context, x_0[i]) + else: + raise + + for (i_module, t_module, a_module, + i_con_in, t_con_in, a_con_in,) \ + in zip( + self.unet_image.output_blocks, self.unet_text.output_blocks, self.unet_audio.output_blocks, + self.unet_image.output_block_connecters_in, self.unet_text.output_block_connecters_in, + self.unet_audio.output_block_connecters_in, + ): + temp = hs.pop() + h_connector_out = [] + for i, xtype_i in enumerate(xtype): + h[i] = th.cat([h[i], temp[i]], dim=1) + if xtype_i == 'audio': + h[i] = a_module(h[i], emb_audio, context, x_0[i]) + elif xtype_i in ['video', 'image']: + h[i] = i_module(h[i], emb_image, context, x_0[i]) + elif xtype_i == 'text': + h[i] = t_module(h[i], emb_text, context, x_0[i]) + else: + raise + + if i_con_in is not None and h_con is not None: + for i, xtype_i in enumerate(xtype): + if xtype_i == 'audio': + h[i] = a_con_in(h[i], context=h_con[i]) + elif xtype_i in ['video', 'image']: + h[i] = i_con_in(h[i], context=h_con[i]) + elif xtype_i == 'text': + h[i] = t_con_in(h[i], context=h_con[i]) + else: + raise + + out_all = [] + for i, xtype_i in enumerate(xtype): + if xtype_i == 'video': + num_frames = h[i].shape[2] + h[i] = rearrange(h[i], 'b c t h w -> (b t) c h w ') + out = self.unet_image.out(h[i]) + out = rearrange(out, '(b t) c h w -> b c t h w', t=num_frames) + elif xtype_i == 'image': + out = self.unet_image.out(h[i]) + elif xtype_i == 'text': + out = self.unet_text.out(h[i]).squeeze(-1).squeeze(-1) + elif xtype_i == 'audio': + out = self.unet_audio.out(h[i]) + out_all.append(out) + return out_all + + +@register('prova', version) +class UNetModelCoDi(nn.Module): + def __init__(self, + unet_frontal_cfg, + unet_text_cfg, + unet_lateral_cfg, + model_type): + + super().__init__() + + if 'frontal' in model_type: + self.unet_frontal = get_model()(unet_frontal_cfg) + self.frontal_model_channels = self.unet_frontal.model_channels + else: + self.unet_frontal = create_dummy_class() + + if 'text' in model_type: + self.unet_text = get_model()(unet_text_cfg) + self.text_model_channels = self.unet_text.model_channels + else: + self.unet_text = create_dummy_class() + + if 'lateral' in model_type: + self.unet_lateral = get_model()(unet_lateral_cfg) + self.lateral_model_channels = self.unet_lateral.model_channels + else: + self.unet_lateral = create_dummy_class() + + def freeze(self): + for param in self.parameters(): + param.requires_grad = False + + def unfreeze(self, modules): + for module in modules: + for param in module.parameters(): + param.requires_grad = True + + def forward(self, x, timesteps, condition, xtype, condition_types, x_0=[None], x_0_type='first_frame', + mix_weight={'lateral': 1, 'text': 1, 'frontal': 1}, env_enc=False): + # Se env_enc è True, dobbiamo fare la backpropagation solo nei layer specifici, che sono quelli che si utilizzando quando xtype ha lunghezza maggiore di 1 + # e quelli che si usano quando h_con è diverso da None + # Non sapendo bene come fare, ricopio tutto il codice e metto un if + if not env_enc: + device = x[0].device + # Prepare conditioning + if isinstance(condition_types, str): + condition_types = [condition_types] + weights = np.array(list(map(mix_weight.get, condition_types))) + norm_weights = weights / weights.sum() + context = 0.0 + for i in range(len(condition)): + context += condition[i] * norm_weights[i] + + # Prepare inputs + hs = [] + x = [temp.to(device) for temp in x] + timesteps = timesteps.to(device) + context = context.to(device) + if 'frontal' in xtype: + t_emb_frontal = timestep_embedding(timesteps, self.frontal_model_channels, repeat_only=False).to(x[0]) + emb_frontal = self.unet_frontal.time_embed(t_emb_frontal) + if 'text' in xtype: + t_emb_text = timestep_embedding(timesteps, self.text_model_channels, repeat_only=False).to(x[0]) + emb_text = self.unet_text.time_embed(t_emb_text) + if 'lateral' in xtype: + t_emb_lateral = timestep_embedding(timesteps, self.lateral_model_channels, repeat_only=False).to(x[0]) + emb_lateral = self.unet_lateral.time_embed(t_emb_lateral) + + for i in range(len(xtype)): + if xtype[i] == 'text': + x[i] = x[i][:, :, None, None] + + if not isinstance(x_0, list): + x_0 = [x_0] + + if not x_0[0]: + x_0 = [None] * len(x) + + # Environment encoders + if len(xtype) > 1: # this means two outputs present and thus joint decoding + h_con = [temp for temp in x] + for i_con_in, t_con_in, a_con_in in zip( + self.unet_frontal.connecters_out, self.unet_text.connecters_out, self.unet_lateral.connecters_out, + ): + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h_con[i] = a_con_in(h_con[i], emb_lateral, context) + elif xtype_i == 'frontal': + h_con[i] = i_con_in(h_con[i], emb_frontal, context) + elif xtype_i == 'text': + h_con[i] = t_con_in(h_con[i], emb_text, context) + else: + raise + for i in range(len(h_con)): + if h_con[i].ndim == 5: + h_con[i] = h_con[i].mean(2).mean(2).mean(2).unsqueeze(1) + else: + h_con[i] = h_con[i].mean(2).mean(2).unsqueeze(1) + h_con[i] = h_con[i] / th.norm(h_con[i], dim=-1, keepdim=True) + else: + h_con = None + + # Joint / single generation + h = x + for (i_module, t_module, a_module, + i_con_in, t_con_in, a_con_in) \ + in zip( + self.unet_frontal.input_blocks, self.unet_text.input_blocks, self.unet_lateral.input_blocks, + self.unet_frontal.input_block_connecters_in, self.unet_text.input_block_connecters_in, + self.unet_lateral.input_block_connecters_in, + ): + h = [h_i for h_i in h] + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = a_module(h[i], emb_lateral, context, x_0[i]) + elif xtype_i == 'frontal': + h[i] = i_module(h[i], emb_frontal, context, x_0[i]) + elif xtype_i == 'text': + h[i] = t_module(h[i], emb_text, context, x_0[i]) + else: + raise + + if i_con_in is not None and h_con is not None: + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = a_con_in(h[i], context=h_con[i]) + elif xtype_i == 'frontal': + h[i] = i_con_in(h[i], context=h_con[i]) + elif xtype_i == 'text': + h[i] = t_con_in(h[i], context=h_con[i]) + else: + raise + + hs.append(h) + + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = self.unet_lateral.middle_block(h[i], emb_lateral, context, x_0[i]) + elif xtype_i == 'frontal': + h[i] = self.unet_frontal.middle_block(h[i], emb_frontal, context, x_0[i]) + elif xtype_i == 'text': + h[i] = self.unet_text.middle_block(h[i], emb_text, context, x_0[i]) + else: + raise + + for (i_module, t_module, a_module, + i_con_in, t_con_in, a_con_in,) \ + in zip( + self.unet_frontal.output_blocks, self.unet_text.output_blocks, self.unet_lateral.output_blocks, + self.unet_frontal.output_block_connecters_in, self.unet_text.output_block_connecters_in, + self.unet_lateral.output_block_connecters_in, + ): + temp = hs.pop() + h_connector_out = [] + for i, xtype_i in enumerate(xtype): + h[i] = th.cat([h[i], temp[i]], dim=1) + if xtype_i == 'lateral': + h[i] = a_module(h[i], emb_lateral, context, x_0[i]) + elif xtype_i == 'frontal': + h[i] = i_module(h[i], emb_frontal, context, x_0[i]) + elif xtype_i == 'text': + h[i] = t_module(h[i], emb_text, context, x_0[i]) + else: + raise + + if i_con_in is not None and h_con is not None: + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = a_con_in(h[i], context=h_con[i]) + elif xtype_i == 'frontal': + h[i] = i_con_in(h[i], context=h_con[i]) + elif xtype_i == 'text': + h[i] = t_con_in(h[i], context=h_con[i]) + else: + raise + + out_all = [] + for i, xtype_i in enumerate(xtype): + if xtype_i == 'frontal': + out = self.unet_frontal.out(h[i]) + elif xtype_i == 'text': + out = self.unet_text.out(h[i]).squeeze(-1).squeeze(-1) + elif xtype_i == 'lateral': + out = self.unet_lateral.out(h[i]) + out_all.append(out) + return out_all + else: + device = x[0].device + # Prepare conditioning + if isinstance(condition_types, str): + condition_types = [condition_types] + weights = np.array(list(map(mix_weight.get, condition_types))) + norm_weights = weights / weights.sum() + context = 0.0 + for i in range(len(condition)): + context += condition[i] * norm_weights[i] + + # Prepare inputs + hs = [] + x = [temp.to(device) for temp in x] + timesteps = timesteps.to(device) + context = context.to(device) + if 'frontal' in xtype: + t_emb_frontal = timestep_embedding(timesteps, self.frontal_model_channels, repeat_only=False).to(x[0]) + emb_frontal = self.unet_frontal.time_embed(t_emb_frontal) + if 'text' in xtype: + t_emb_text = timestep_embedding(timesteps, self.text_model_channels, repeat_only=False).to(x[0]) + emb_text = self.unet_text.time_embed(t_emb_text) + if 'lateral' in xtype: + t_emb_lateral = timestep_embedding(timesteps, self.lateral_model_channels, repeat_only=False).to(x[0]) + emb_lateral = self.unet_lateral.time_embed(t_emb_lateral) + + for i in range(len(xtype)): + if xtype[i] == 'text': + x[i] = x[i][:, :, None, None] + + if not isinstance(x_0, list): + x_0 = [x_0] + + if not x_0[0]: + x_0 = [None] * len(x) + + # Environment encoders, prima di usarli, unfreeziamo i parametri + if len(xtype) > 1: # this means two outputs present and thus joint decoding + h_con = [temp for temp in x] + for i_con_in, t_con_in, a_con_in in zip( + self.unet_frontal.connecters_out, self.unet_text.connecters_out, + self.unet_lateral.connecters_out, + ): + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h_con[i] = a_con_in(h_con[i], emb_lateral, context) + elif xtype_i == 'frontal': + h_con[i] = i_con_in(h_con[i], emb_frontal, context) + elif xtype_i == 'text': + h_con[i] = t_con_in(h_con[i], emb_text, context) + else: + raise + for i in range(len(h_con)): + if h_con[i].ndim == 5: + h_con[i] = h_con[i].mean(2).mean(2).mean(2).unsqueeze(1) + else: + h_con[i] = h_con[i].mean(2).mean(2).unsqueeze(1) + h_con[i] = h_con[i] / th.norm(h_con[i], dim=-1, keepdim=True) + else: + h_con = None + + # Joint / single generation + h = x + for (i_module, t_module, a_module, + i_con_in, t_con_in, a_con_in) \ + in zip( + self.unet_frontal.input_blocks, self.unet_text.input_blocks, self.unet_lateral.input_blocks, + self.unet_frontal.input_block_connecters_in, self.unet_text.input_block_connecters_in, + self.unet_lateral.input_block_connecters_in, + ): + h = [h_i for h_i in h] + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = a_module(h[i], emb_lateral, context, x_0[i]) + elif xtype_i == 'frontal': + h[i] = i_module(h[i], emb_frontal, context, x_0[i]) + elif xtype_i == 'text': + h[i] = t_module(h[i], emb_text, context, x_0[i]) + else: + raise + + if i_con_in is not None and h_con is not None: + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = a_con_in(h[i], context=h_con[i]) + elif xtype_i == 'frontal': + h[i] = i_con_in(h[i], context=h_con[i]) + elif xtype_i == 'text': + h[i] = t_con_in(h[i], context=h_con[i]) + else: + raise + + hs.append(h) + + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = self.unet_lateral.middle_block(h[i], emb_lateral, context, x_0[i]) + elif xtype_i == 'frontal': + h[i] = self.unet_frontal.middle_block(h[i], emb_frontal, context, x_0[i]) + elif xtype_i == 'text': + h[i] = self.unet_text.middle_block(h[i], emb_text, context, x_0[i]) + else: + raise + + # Prima di usare i moduli, unfreeziamo i parametri + for (i_module, t_module, a_module, + i_con_in, t_con_in, a_con_in,) \ + in zip( + self.unet_frontal.output_blocks, self.unet_text.output_blocks, self.unet_lateral.output_blocks, + self.unet_frontal.output_block_connecters_in, self.unet_text.output_block_connecters_in, + self.unet_lateral.output_block_connecters_in, + ): + temp = hs.pop() + h_connector_out = [] + for i, xtype_i in enumerate(xtype): + h[i] = th.cat([h[i], temp[i]], dim=1) + if xtype_i == 'lateral': + h[i] = a_module(h[i], emb_lateral, context, x_0[i]) + elif xtype_i == 'frontal': + h[i] = i_module(h[i], emb_frontal, context, x_0[i]) + elif xtype_i == 'text': + h[i] = t_module(h[i], emb_text, context, x_0[i]) + else: + raise + + if i_con_in is not None and h_con is not None: + for i, xtype_i in enumerate(xtype): + if xtype_i == 'lateral': + h[i] = a_con_in(h[i], context=h_con[i]) + elif xtype_i == 'frontal': + h[i] = i_con_in(h[i], context=h_con[i]) + elif xtype_i == 'text': + h[i] = t_con_in(h[i], context=h_con[i]) + else: + raise + + out_all = [] + for i, xtype_i in enumerate(xtype): + if xtype_i == 'frontal': + out = self.unet_frontal.out(h[i]) + elif xtype_i == 'text': + out = self.unet_text.out(h[i]).squeeze(-1).squeeze(-1) + elif xtype_i == 'lateral': + out = self.unet_lateral.out(h[i]) + out_all.append(out) + return out_all, h_con + + diff --git a/core/models/latent_diffusion/modules_attention.py b/core/models/latent_diffusion/modules_attention.py new file mode 100644 index 0000000000000000000000000000000000000000..dab77707fea573f53b5f2f73754414ade17450ce --- /dev/null +++ b/core/models/latent_diffusion/modules_attention.py @@ -0,0 +1,263 @@ +from inspect import isfunction +import math +import torch +import torch.nn.functional as F +from torch import nn, einsum +from einops import rearrange, repeat + +from .modules_conv import checkpoint + + +def exists(val): + return val is not None + + +def uniq(arr): + return {el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = nn.Sequential( + nn.Linear(dim, inner_dim), + nn.GELU() + ) if not glu else GEGLU(dim, inner_dim) + + self.net = nn.Sequential( + project_in, + nn.Dropout(dropout), + nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def Normalize(in_channels): + return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True) + + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) + self.to_out = nn.Conv2d(hidden_dim, dim, 1) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads=self.heads, qkv=3) + k = k.softmax(dim=-1) + context = torch.einsum('bhdn,bhen->bhde', k, v) + out = torch.einsum('bhde,bhdn->bhen', context, q) + out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w) + return self.to_out(out) + + +class SpatialSelfAttention(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = rearrange(q, 'b c h w -> b (h w) c') + k = rearrange(k, 'b c h w -> b c (h w)') + w_ = torch.einsum('bij,bjk->bik', q, k) + + w_ = w_ * (int(c) ** (-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = rearrange(v, 'b c h w -> b c (h w)') + w_ = rearrange(w_, 'b i j -> b j i') + h_ = torch.einsum('bij,bjk->bik', v, w_) + h_ = rearrange(h_, 'b c (h w) -> b c h w', h=h) + h_ = self.proj_out(h_) + + return x + h_ + + +class CrossAttention(nn.Module): + def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.): + super().__init__() + inner_dim = dim_head * heads + context_dim = default(context_dim, query_dim) + + self.scale = dim_head ** -0.5 + self.heads = heads + + self.to_q = nn.Linear(query_dim, inner_dim, bias=False) + self.to_k = nn.Linear(context_dim, inner_dim, bias=False) + self.to_v = nn.Linear(context_dim, inner_dim, bias=False) + + self.to_out = nn.Sequential( + nn.Linear(inner_dim, query_dim), + nn.Dropout(dropout) + ) + + def forward(self, x, context=None, mask=None): + h = self.heads + + q = self.to_q(x) + context = default(context, x) + k = self.to_k(context) + v = self.to_v(context) + + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v)) + + sim = einsum('b i d, b j d -> b i j', q, k) * self.scale + + if exists(mask): + mask = rearrange(mask, 'b ... -> b (...)') + max_neg_value = -torch.finfo(sim.dtype).max + mask = repeat(mask, 'b j -> (b h) () j', h=h) + sim.masked_fill_(~mask, max_neg_value) + + # attention, what we cannot get enough of + attn = sim.softmax(dim=-1) + + out = einsum('b i j, b j d -> b i d', attn, v) + out = rearrange(out, '(b h) n d -> b n (h d)', h=h) + return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True): + super().__init__() + self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, + dropout=dropout) # is a self-attention + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff) + self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim, + heads=n_heads, dim_head=d_head, dropout=dropout) # is self-attn if context is none + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + self.norm3 = nn.LayerNorm(dim) + self.checkpoint = checkpoint + + def forward(self, x, context=None): + return checkpoint(self._forward, (x, context), self.parameters(), self.checkpoint) + + def _forward(self, x, context=None): + x = self.attn1(self.norm1(x)) + x + x = self.attn2(self.norm2(x), context=context) + x + x = self.ff(self.norm3(x)) + x + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + """ + + def __init__(self, in_channels, n_heads, d_head, + depth=1, dropout=0., context_dim=None): + super().__init__() + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = Normalize(in_channels) + + self.proj_in = nn.Conv2d(in_channels, + inner_dim, + kernel_size=1, + stride=1, + padding=0) + + self.transformer_blocks = nn.ModuleList( + [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim) + for d in range(depth)] + ) + + self.proj_out = zero_module(nn.Conv2d(inner_dim, + in_channels, + kernel_size=1, + stride=1, + padding=0)) + + def forward(self, x, context=None): + # note: if no context is given, cross-attention defaults to self-attention + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + x = self.proj_in(x) + x = rearrange(x, 'b c h w -> b (h w) c') + for block in self.transformer_blocks: + x = block(x, context=context) + x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w) + x = self.proj_out(x) + return x + x_in diff --git a/core/models/latent_diffusion/modules_conv.py b/core/models/latent_diffusion/modules_conv.py new file mode 100644 index 0000000000000000000000000000000000000000..a07eabd6ec2e57abd57149d4009570229278f03e --- /dev/null +++ b/core/models/latent_diffusion/modules_conv.py @@ -0,0 +1,188 @@ +import os +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + [x for x in ctx.input_params if x.requires_grad], + output_grads, + allow_unused=True, + ) + + grads = list(grads) + + input_grads = [] + for tensor in ctx.input_tensors + ctx.input_params: + if tensor.requires_grad: + input_grads.append(grads.pop(0)) # Get the next computed gradient + else: + input_grads.append(None) # No gradient required for this tensor + + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + tuple(input_grads) + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + else: + embedding = repeat(timesteps, 'b -> b d', d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + # return super().forward(x.float()).type(x.dtype) + return super().forward(x) + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +class HybridConditioner(nn.Module): + + def __init__(self, c_concat_config, c_crossattn_config): + super().__init__() + self.concat_conditioner = instantiate_from_config(c_concat_config) + self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + + def forward(self, c_concat, c_crossattn): + c_concat = self.concat_conditioner(c_concat) + c_crossattn = self.crossattn_conditioner(c_crossattn) + return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]} + + +def noise_like(x, repeat=False): + noise = torch.randn_like(x) + if repeat: + bs = x.shape[0] + noise = noise[0:1].repeat(bs, *((1,) * (len(x.shape) - 1))) + return noise diff --git a/core/models/latent_diffusion/modules_video.py b/core/models/latent_diffusion/modules_video.py new file mode 100644 index 0000000000000000000000000000000000000000..eacb72b5c7f70a18183553a8f0467ad550cea5b1 --- /dev/null +++ b/core/models/latent_diffusion/modules_video.py @@ -0,0 +1,559 @@ +""" +https://github.com/lucidrains/make-a-video-pytorch +""" + +import math +import functools +from operator import mul + +import torch +from torch import nn, einsum +import torch.nn.functional as F + +from einops import rearrange, repeat, pack, unpack +from einops.layers.torch import Rearrange + +from .modules_conv import avg_pool_nd, zero_module, normalization, conv_nd + +# helper functions + + +def exists(val): + return val is not None + + +def default(val, d): + return val if exists(val) else d + + +def mul_reduce(tup): + return functools.reduce(mul, tup) + + +def divisible_by(numer, denom): + return (numer % denom) == 0 + + +mlist = nn.ModuleList + +# for time conditioning + + +class SinusoidalPosEmb(nn.Module): + def __init__(self, dim, theta = 10000): + super().__init__() + self.theta = theta + self.dim = dim + + def forward(self, x): + dtype, device = x.dtype, x.device + assert dtype == torch.float, 'input to sinusoidal pos emb must be a float type' + + half_dim = self.dim // 2 + emb = math.log(self.theta) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, device = device, dtype = dtype) * -emb) + emb = rearrange(x, 'i -> i 1') * rearrange(emb, 'j -> 1 j') + return torch.cat((emb.sin(), emb.cos()), dim = -1).type(dtype) + + +class ChanLayerNorm(nn.Module): + def __init__(self, dim): + super().__init__() + self.g = nn.Parameter(torch.ones(dim, 1, 1, 1)) + + def forward(self, x): + eps = 1e-5 if x.dtype == torch.float32 else 1e-3 + var = torch.var(x, dim = 1, unbiased = False, keepdim = True) + mean = torch.mean(x, dim = 1, keepdim = True) + x = (x - mean) * var.clamp(min = eps).rsqrt() + dtype = self.g.dtype + return x.to(dtype) * self.g + + +def shift_token(t): + t, t_shift = t.chunk(2, dim = 1) + t_shift = F.pad(t_shift, (0, 0, 0, 0, 1, -1), value = 0.) + return torch.cat((t, t_shift), dim = 1) + + +class LayerNorm(nn.Module): + def __init__(self, dim): + super().__init__() + self.g = nn.Parameter(torch.ones(dim)) + + def forward(self, x): + eps = 1e-5 if x.dtype == torch.float32 else 1e-3 + var = torch.var(x, dim = 1, unbiased = False, keepdim = True) + mean = torch.mean(x, dim = 1, keepdim = True) + return (x - mean) * var.clamp(min = eps).rsqrt() * self.g + + +# feedforward + +class GEGLU(nn.Module): + def forward(self, x): + x = x.float() + x, gate = x.chunk(2, dim = 1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, mult = 4): + super().__init__() + + inner_dim = int(dim * mult * 2 / 3) + self.proj_in = nn.Sequential( + nn.Conv3d(dim, inner_dim * 2, 1, bias = False), + GEGLU() + ) + + self.proj_out = nn.Sequential( + ChanLayerNorm(inner_dim), + nn.Conv3d(inner_dim, dim, 1, bias = False) + ) + + def forward(self, x, enable_time=True): + x = self.proj_in(x) + if enable_time: + x = shift_token(x) + return self.proj_out(x) + + +# feedforwa +# best relative positional encoding + +class ContinuousPositionBias(nn.Module): + """ from https://arxiv.org/abs/2111.09883 """ + + def __init__( + self, + *, + dim, + heads, + num_dims = 1, + layers = 2, + log_dist = True, + cache_rel_pos = False + ): + super().__init__() + self.num_dims = num_dims + self.log_dist = log_dist + + self.net = nn.ModuleList([]) + self.net.append(nn.Sequential(nn.Linear(self.num_dims, dim), nn.SiLU())) + + for _ in range(layers - 1): + self.net.append(nn.Sequential(nn.Linear(dim, dim), nn.SiLU())) + + self.net.append(nn.Linear(dim, heads)) + + self.cache_rel_pos = cache_rel_pos + self.register_buffer('rel_pos', None, persistent = False) + + @property + def device(self): + return next(self.parameters()).device + + @property + def dtype(self): + return next(self.parameters()).dtype + + def forward(self, *dimensions): + device = self.device + + if not exists(self.rel_pos) or not self.cache_rel_pos: + positions = [torch.arange(d, device = device) for d in dimensions] + grid = torch.stack(torch.meshgrid(*positions, indexing = 'ij')) + grid = rearrange(grid, 'c ... -> (...) c') + rel_pos = rearrange(grid, 'i c -> i 1 c') - rearrange(grid, 'j c -> 1 j c') + + if self.log_dist: + rel_pos = torch.sign(rel_pos) * torch.log(rel_pos.abs() + 1) + + self.register_buffer('rel_pos', rel_pos, persistent = False) + + rel_pos = self.rel_pos.to(self.dtype) + + for layer in self.net: + rel_pos = layer(rel_pos) + + return rearrange(rel_pos, 'i j h -> h i j') + +# helper classes + + +class Attention(nn.Module): + def __init__( + self, + dim, + dim_head = 64, + heads = 8 + ): + super().__init__() + self.heads = heads + self.scale = dim_head ** -0.5 + inner_dim = dim_head * heads + self.norm = LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias = False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias = False) + self.to_out = nn.Linear(inner_dim, dim, bias = False) + + nn.init.zeros_(self.to_out.weight.data) # identity with skip connection + + self.pos_embeds = nn.Parameter(torch.randn([1, 30, dim])) + self.frame_rate_embeds = nn.Parameter(torch.randn([1, 30, dim])) + + def forward( + self, + x, + context = None, + rel_pos_bias = None, + framerate = None, + ): + if framerate is not None: + x = x + self.pos_embeds[:, :x.shape[1]].repeat(x.shape[0], 1, 1) + x = x + self.frame_rate_embeds[:, framerate-1:framerate].repeat(x.shape[0], x.shape[1], 1) + + if context is None: + context = x + + x = self.norm(x) + context = self.norm(context) + + q, k, v = self.to_q(x), *self.to_kv(context).chunk(2, dim = -1) + + q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = self.heads), (q, k, v)) + + q = q * self.scale + sim = einsum('b h i d, b h j d -> b h i j', q, k) + + if exists(rel_pos_bias): + sim = sim + rel_pos_bias + + attn = sim.softmax(dim = -1) + + out = einsum('b h i j, b h j d -> b h i d', attn, v) + + out = rearrange(out, 'b h n d -> b n (h d)') + return self.to_out(out) + +# main contribution - pseudo 3d conv + + +class PseudoConv3d(nn.Module): + def __init__( + self, + dim, + dim_out = None, + kernel_size = 3, + *, + temporal_kernel_size = None, + **kwargs + ): + super().__init__() + dim_out = default(dim_out, dim) + temporal_kernel_size = default(temporal_kernel_size, kernel_size) + + self.spatial_conv = nn.Conv2d(dim, dim_out, kernel_size = kernel_size, padding = kernel_size // 2) + self.temporal_conv = nn.Conv1d(dim_out, dim_out, kernel_size = temporal_kernel_size, padding = temporal_kernel_size // 2) if kernel_size > 1 else None + + if exists(self.temporal_conv): + nn.init.dirac_(self.temporal_conv.weight.data) # initialized to be identity + nn.init.zeros_(self.temporal_conv.bias.data) + + def forward( + self, + x, + enable_time = True + ): + b, c, *_, h, w = x.shape + + is_video = x.ndim == 5 + enable_time &= is_video + + if is_video: + x = rearrange(x, 'b c t h w -> (b t) c h w') + + x = self.spatial_conv(x) + + if is_video: + x = rearrange(x, '(b t) c h w -> b c t h w', b = b) + + if not enable_time or not exists(self.temporal_conv): + return x + + x = rearrange(x, 'b c t h w -> (b h w) c t') + + x = self.temporal_conv(x) + + x = rearrange(x, '(b h w) c t -> b c t h w', h = h, w = w) + + return x + + +def frame_shift(x, shift_num=8): + num_frame = x.shape[2] + x = list(x.chunk(shift_num, 1)) + for i in range(shift_num): + if i > 0: + shifted = torch.cat([torch.zeros_like(x[i][:, :, :i]), x[i][:, :, :-i]], 2) + else: + shifted = x[i] + x[i] = shifted + return torch.cat(x, 1) + + +class ResBlockFrameShift(nn.Module): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + dropout, + out_channels=None, + use_conv=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + ): + super().__init__() + self.channels = channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + + self.out_layers = nn.Sequential( + normalization(self.channels), + nn.SiLU(), + zero_module( + conv_nd(dims, self.channels, self.out_channels, 3, padding=1) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :return: an [N x C x ...] Tensor of outputs. + """ + num_frames = x.shape[2] + x = rearrange(x, 'b c t h w -> (b t) c h w') + + h = self.out_layers(x) + + h = rearrange(h, '(b t) c h w -> b c t h w', t=num_frames) + h = frame_shift(h) + h = rearrange(h, 'b c t h w -> (b t) c h w') + + out = self.skip_connection(x) + h + out = rearrange(out, '(b t) c h w -> b c t h w', t=num_frames) + return out + + +class ResBlockVideo(nn.Module): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + ): + super().__init__() + self.channels = channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, 3, padding=1), + ) + + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :return: an [N x C x ...] Tensor of outputs. + """ + num_frames = x.shape[2] + x = rearrange(x, 'b c t h w -> (b t) c h w ') + + h = x + h = self.in_layers(h) + h = self.out_layers(h) + + out = self.skip_connection(x) + h + out = rearrange(out, '(b t) c h w -> b c t h w', t=num_frames) + return out + + +class Downsample3D(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, stride=None, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 1 + if use_conv: + self.op = conv_nd( + dims, self.channels, self.out_channels, 3, stride=stride, padding=padding + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class SpatioTemporalAttention(nn.Module): + def __init__( + self, + dim, + *, + dim_head = 64, + heads = 8, + use_resnet = False, + use_frame_shift = True, + use_context_att = False, + use_temp_att = True, + use_context = False, + ): + super().__init__() + self.use_resnet = use_resnet + self.use_frame_shift = use_frame_shift + self.use_context_att = use_context_att + self.use_temp_att = use_temp_att + + if use_resnet: + self.resblock = ResBlockVideo(dim, dropout=0, dims=2) + if use_frame_shift: + self.frameshiftblock = ResBlockFrameShift(dim, dropout=0, dims=2) + if use_context_att: + self.downsample_x0 = Downsample3D(4, True, 2, out_channels=dim) + self.temporal_attn_x0 = Attention(dim = dim, dim_head = dim_head, heads = heads) + + if use_temp_att: + self.temporal_attn = Attention(dim = dim, dim_head = dim_head, heads = heads) + self.temporal_rel_pos_bias = ContinuousPositionBias(dim = dim // 2, heads = heads, num_dims = 1) + + self.ff = FeedForward(dim = dim, mult = 4) + + def forward( + self, + x, + x_0 = None, + enable_time = True, + framerate = 4, + is_video = False, + ): + + x_ndim = x.ndim + is_video = x_ndim == 5 or is_video + enable_time &= is_video + + if enable_time: + img_size = x.shape[-1] + if self.use_temp_att: + if x_ndim == 5: + b, c, *_, h, w = x.shape + x = rearrange(x, 'b c t h w -> (b h w) t c') + time_rel_pos_bias = self.temporal_rel_pos_bias(x.shape[1]) + + if self.use_context_att and x_0 is not None: + x_0_img_size = x_0.shape[-1] + kernel_size = x_0_img_size // img_size + x_0 = F.avg_pool2d(x_0, [kernel_size, kernel_size], stride=None, padding=0, ceil_mode=False, count_include_pad=True, divisor_override=None) + x_0 = self.downsample_x0(x_0).unsqueeze(2) + if x_ndim == 5: + x_0 = rearrange(x_0, 'b c t h w -> (b h w) t c') + x = self.temporal_attn_x0(x, context=x_0, rel_pos_bias = time_rel_pos_bias, framerate = framerate) + x + + if self.use_temp_att: + x = self.temporal_attn(x, rel_pos_bias = time_rel_pos_bias, framerate = framerate) + x + if x_ndim == 5: + x = rearrange(x, '(b h w) t c -> b c t h w', w = w, h = h) + x = self.ff(x, enable_time=enable_time) + x + + if self.use_frame_shift: + x = self.frameshiftblock(x) + + if self.use_resnet: + x = self.resblock(x) + return x diff --git a/core/models/latent_diffusion/vae/__pycache__/audioldm.cpython-38.pyc b/core/models/latent_diffusion/vae/__pycache__/audioldm.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16a757c5c4e9e04cf8ac9b5e0732f8fd5cedd76e Binary files /dev/null and b/core/models/latent_diffusion/vae/__pycache__/audioldm.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/__pycache__/autokl.cpython-38.pyc b/core/models/latent_diffusion/vae/__pycache__/autokl.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfe50e8aa901531ab9b93dc9ca59fd61a83d7463 Binary files /dev/null and b/core/models/latent_diffusion/vae/__pycache__/autokl.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/__pycache__/optimus.cpython-38.pyc b/core/models/latent_diffusion/vae/__pycache__/optimus.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..573531902bf9284f58d76fedf39b06f2aa142364 Binary files /dev/null and b/core/models/latent_diffusion/vae/__pycache__/optimus.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm.py b/core/models/latent_diffusion/vae/audioldm.py new file mode 100644 index 0000000000000000000000000000000000000000..a40ccadaab9841e8afd3684478f9162bde4e5fb9 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm.py @@ -0,0 +1,112 @@ +import torch +from .audioldm_modules.latent_diffusion.ema import * +from .audioldm_modules.variational_autoencoder.modules import Encoder, Decoder +from .audioldm_modules.variational_autoencoder.distributions import DiagonalGaussianDistribution + +from .audioldm_modules.hifigan.utilities import get_vocoder, vocoder_infer + +from .audioldm_modules.audio.tools import wav_to_fbank +from .audioldm_modules.audio.stft import TacotronSTFT + +from ...common.get_model import register + + +@register('audioldm_autoencoder') +class AudioAutoencoderKL(nn.Module): + def __init__( + self, + ddconfig, + lossconfig=None, + image_key="fbank", + embed_dim=8, + time_shuffle=1, + subband=1, + ckpt_path=None, + reload_from_ckpt=None, + ignore_keys=[], + colorize_nlabels=None, + monitor=None, + base_learning_rate=1e-5, + ): + super().__init__() + + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + + self.subband = int(subband) + + if self.subband > 1: + print("Use subband decomposition %s" % self.subband) + + self.quant_conv = torch.nn.Conv2d(2 * ddconfig["z_channels"], 2 * embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1) + + self.vocoder = get_vocoder(None, "cpu") + self.embed_dim = embed_dim + + self.fn_STFT = TacotronSTFT() + + self.time_shuffle = time_shuffle + self.reload_from_ckpt = reload_from_ckpt + self.reloaded = False + self.mean, self.std = None, None + + def encode(self, x, time=10.0): + temp_dtype = x.dtype + x = wav_to_fbank( + x.float(), target_length=int(time * 102.4), fn_STFT=self.fn_STFT.float() + ).to(x.device).to(temp_dtype) + x = self.freq_split_subband(x) + h = self.encoder(x) + moments = self.quant_conv(h) + posterior = DiagonalGaussianDistribution(moments) + return posterior + + def decode(self, z): + z = self.post_quant_conv(z) + dec = self.decoder(z) + dec = self.freq_merge_subband(dec) + return dec + + def decode_to_waveform(self, dec): + dec = dec.squeeze(1).permute(0, 2, 1) + wav_reconstruction = vocoder_infer(dec, self.vocoder) + return wav_reconstruction + + def forward(self, input, sample_posterior=True): + + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample() + else: + z = posterior.mode() + + if self.flag_first_run: + print("Latent size: ", z.size()) + self.flag_first_run = False + + dec = self.decode(z) + + return dec, posterior + + def freq_split_subband(self, fbank): + if self.subband == 1 or self.image_key != "stft": + return fbank + + bs, ch, tstep, fbins = fbank.size() + + assert fbank.size(-1) % self.subband == 0 + assert ch == 1 + + return ( + fbank.squeeze(1) + .reshape(bs, tstep, self.subband, fbins // self.subband) + .permute(0, 2, 1, 3) + ) + + def freq_merge_subband(self, subband_fbank): + if self.subband == 1 or self.image_key != "stft": + return subband_fbank + assert subband_fbank.size(1) == self.subband # Channel dimension + bs, sub_ch, tstep, fbins = subband_fbank.size() + return subband_fbank.permute(0, 2, 1, 3).reshape(bs, tstep, -1).unsqueeze(1) \ No newline at end of file diff --git a/core/models/latent_diffusion/vae/audioldm_modules/__init__.py b/core/models/latent_diffusion/vae/audioldm_modules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/models/latent_diffusion/vae/audioldm_modules/__pycache__/__init__.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b6a611235af946c9895fd86b59d05ce9382b0e4 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/__init__.py b/core/models/latent_diffusion/vae/audioldm_modules/audio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/__init__.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..524d4af6f86ac5797f1535a810c83f2c7c4042b8 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/audio_processing.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/audio_processing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b373fe761e039124741b1fd8826f51d73c566c60 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/audio_processing.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/stft.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/stft.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b283c6db335e0f5792a95d259b40fe0c3efadf9 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/stft.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/tools.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/tools.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89180ed83a5cceca2317297d90d00c71187a31a3 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/audio/__pycache__/tools.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/audio_processing.py b/core/models/latent_diffusion/vae/audioldm_modules/audio/audio_processing.py new file mode 100644 index 0000000000000000000000000000000000000000..77a4057aa82f226f68474f4c2a19eba84510d663 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/audio/audio_processing.py @@ -0,0 +1,100 @@ +import torch +import numpy as np +import librosa.util as librosa_util +from scipy.signal import get_window + + +def window_sumsquare( + window, + n_frames, + hop_length, + win_length, + n_fft, + dtype=np.float32, + norm=None, +): + """ + # from librosa 0.6 + Compute the sum-square envelope of a window function at a given hop length. + + This is used to estimate modulation effects induced by windowing + observations in short-time fourier transforms. + + Parameters + ---------- + window : string, tuple, number, callable, or list-like + Window specification, as in `get_window` + + n_frames : int > 0 + The number of analysis frames + + hop_length : int > 0 + The number of samples to advance between frames + + win_length : [optional] + The length of the window function. By default, this matches `n_fft`. + + n_fft : int > 0 + The length of each analysis frame. + + dtype : np.dtype + The data type of the output + + Returns + ------- + wss : np.ndarray, shape=`(n_fft + hop_length * (n_frames - 1))` + The sum-squared envelope of the window function + """ + if win_length is None: + win_length = n_fft + + n = n_fft + hop_length * (n_frames - 1) + x = np.zeros(n, dtype=dtype) + + # Compute the squared window at the desired length + win_sq = get_window(window, win_length, fftbins=True) + win_sq = librosa_util.normalize(win_sq, norm=norm) ** 2 + win_sq = librosa_util.pad_center(win_sq, n_fft) + + # Fill the envelope + for i in range(n_frames): + sample = i * hop_length + x[sample : min(n, sample + n_fft)] += win_sq[: max(0, min(n_fft, n - sample))] + return x + + +def griffin_lim(magnitudes, stft_fn, n_iters=30): + """ + PARAMS + ------ + magnitudes: spectrogram magnitudes + stft_fn: STFT class with transform (STFT) and inverse (ISTFT) methods + """ + + angles = np.angle(np.exp(2j * np.pi * np.random.rand(*magnitudes.size()))) + angles = angles.astype(np.float32) + angles = torch.autograd.Variable(torch.from_numpy(angles)) + signal = stft_fn.inverse(magnitudes, angles).squeeze(1) + + for i in range(n_iters): + _, angles = stft_fn.transform(signal) + signal = stft_fn.inverse(magnitudes, angles).squeeze(1) + return signal + + +def dynamic_range_compression(x, normalize_fun=torch.log, C=1, clip_val=1e-5): + """ + PARAMS + ------ + C: compression factor + """ + return normalize_fun(torch.clamp(x, min=clip_val) * C) + + +def dynamic_range_decompression(x, C=1): + """ + PARAMS + ------ + C: compression factor used to compress + """ + return torch.exp(x) / C diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/stft.py b/core/models/latent_diffusion/vae/audioldm_modules/audio/stft.py new file mode 100644 index 0000000000000000000000000000000000000000..76591eb7cd76afbf7a8c35b2216cc3895726848c --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/audio/stft.py @@ -0,0 +1,180 @@ +import torch +import torch.nn.functional as F +import numpy as np +from scipy.signal import get_window +from librosa.util import pad_center, tiny +from librosa.filters import mel as librosa_mel_fn + +from .audio_processing import ( + dynamic_range_compression, + dynamic_range_decompression, + window_sumsquare, +) + + +class STFT(torch.nn.Module): + """adapted from Prem Seetharaman's https://github.com/pseeth/pytorch-stft""" + + def __init__(self, filter_length, hop_length, win_length, window="hann"): + super(STFT, self).__init__() + self.filter_length = filter_length + self.hop_length = hop_length + self.win_length = win_length + self.window = window + self.forward_transform = None + scale = self.filter_length / self.hop_length + fourier_basis = np.fft.fft(np.eye(self.filter_length)) + + cutoff = int((self.filter_length / 2 + 1)) + fourier_basis = np.vstack( + [np.real(fourier_basis[:cutoff, :]), np.imag(fourier_basis[:cutoff, :])] + ) + + forward_basis = torch.FloatTensor(fourier_basis[:, None, :]) + inverse_basis = torch.FloatTensor( + np.linalg.pinv(scale * fourier_basis).T[:, None, :] + ) + + if window is not None: + assert filter_length >= win_length + # get window and zero center pad it to filter_length + fft_window = get_window(window, win_length, fftbins=True) + fft_window = pad_center(fft_window, size=filter_length) + fft_window = torch.from_numpy(fft_window).float() + + # window the bases + forward_basis *= fft_window + inverse_basis *= fft_window + + self.register_buffer("forward_basis", forward_basis.float()) + self.register_buffer("inverse_basis", inverse_basis.float()) + + def transform(self, input_data): + num_batches = input_data.size(0) + num_samples = input_data.size(1) + + self.num_samples = num_samples + + # similar to librosa, reflect-pad the input + input_data = input_data.view(num_batches, 1, num_samples) + input_data = F.pad( + input_data.unsqueeze(1), + (int(self.filter_length / 2), int(self.filter_length / 2), 0, 0), + mode="reflect", + ) + input_data = input_data.squeeze(1) + + forward_transform = F.conv1d( + input_data, + torch.autograd.Variable(self.forward_basis, requires_grad=False).to(input_data.dtype), + stride=self.hop_length, + padding=0, + ).cpu() + + cutoff = int((self.filter_length / 2) + 1) + real_part = forward_transform[:, :cutoff, :] + imag_part = forward_transform[:, cutoff:, :] + + magnitude = torch.sqrt(real_part**2 + imag_part**2) + phase = torch.autograd.Variable(torch.atan2(imag_part.data, real_part.data)) + + return magnitude, phase + + def inverse(self, magnitude, phase): + recombine_magnitude_phase = torch.cat( + [magnitude * torch.cos(phase), magnitude * torch.sin(phase)], dim=1 + ) + + inverse_transform = F.conv_transpose1d( + recombine_magnitude_phase, + torch.autograd.Variable(self.inverse_basis, requires_grad=False).to(input_data.dtype), + stride=self.hop_length, + padding=0, + ) + + if self.window is not None: + window_sum = window_sumsquare( + self.window, + magnitude.size(-1), + hop_length=self.hop_length, + win_length=self.win_length, + n_fft=self.filter_length, + dtype=np.float32, + ) + # remove modulation effects + approx_nonzero_indices = torch.from_numpy( + np.where(window_sum > tiny(window_sum))[0] + ) + window_sum = torch.autograd.Variable( + torch.from_numpy(window_sum), requires_grad=False + ) + window_sum = window_sum + inverse_transform[:, :, approx_nonzero_indices] /= window_sum[ + approx_nonzero_indices + ] + + # scale by hop ratio + inverse_transform *= float(self.filter_length) / self.hop_length + + inverse_transform = inverse_transform[:, :, int(self.filter_length / 2) :] + inverse_transform = inverse_transform[:, :, : -int(self.filter_length / 2) :] + + return inverse_transform + + def forward(self, input_data): + self.magnitude, self.phase = self.transform(input_data) + reconstruction = self.inverse(self.magnitude, self.phase) + return reconstruction + + +class TacotronSTFT(torch.nn.Module): + def __init__( + self, + filter_length=1024, + hop_length=160, + win_length=1024, + n_mel_channels=64, + sampling_rate=16000, + mel_fmin=0, + mel_fmax=8000, + ): + super(TacotronSTFT, self).__init__() + self.n_mel_channels = n_mel_channels + self.sampling_rate = sampling_rate + self.stft_fn = STFT(filter_length, hop_length, win_length) + mel_basis = librosa_mel_fn( + sr=sampling_rate, n_fft=filter_length, n_mels=n_mel_channels, fmin=mel_fmin, fmax=mel_fmax + ) + mel_basis = torch.from_numpy(mel_basis).float() + self.register_buffer("mel_basis", mel_basis) + + def spectral_normalize(self, magnitudes, normalize_fun): + output = dynamic_range_compression(magnitudes, normalize_fun) + return output + + def spectral_de_normalize(self, magnitudes): + output = dynamic_range_decompression(magnitudes) + return output + + def mel_spectrogram(self, y, normalize_fun=torch.log): + """Computes mel-spectrograms from a batch of waves + PARAMS + ------ + y: Variable(torch.FloatTensor) with shape (B, T) in range [-1, 1] + + RETURNS + ------- + mel_output: torch.FloatTensor of shape (B, n_mel_channels, T) + """ + assert torch.min(y.data) >= -1, torch.min(y.data) + assert torch.max(y.data) <= 1, torch.max(y.data) + + magnitudes, phases = self.stft_fn.transform(y) + magnitudes = magnitudes.data + mel_output = torch.matmul(self.mel_basis.to(magnitudes), magnitudes) + mel_output = self.spectral_normalize(mel_output, normalize_fun) + energy = torch.norm(magnitudes, dim=1) + + log_magnitudes = self.spectral_normalize(magnitudes, normalize_fun) + + return mel_output, log_magnitudes, energy diff --git a/core/models/latent_diffusion/vae/audioldm_modules/audio/tools.py b/core/models/latent_diffusion/vae/audioldm_modules/audio/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..837ed133ce172a2326c7cada66a40ef562747e1e --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/audio/tools.py @@ -0,0 +1,70 @@ +import torch +import numpy as np +import torchaudio + + +def get_mel_from_wav(audio, _stft): + dtype = audio.dtype + audio = torch.clip(audio.unsqueeze(0), -1, 1).cuda() + audio = torch.autograd.Variable(audio, requires_grad=False).to(dtype) + melspec, log_magnitudes_stft, energy = _stft.mel_spectrogram(audio) + melspec = torch.squeeze(melspec, 0) + log_magnitudes_stft = ( + torch.squeeze(log_magnitudes_stft, 0) + ) + energy = torch.squeeze(energy, 0) + return melspec, log_magnitudes_stft, energy + + +def _pad_spec(fbank, target_length=1024): + n_frames = fbank.shape[0] + p = target_length - n_frames + # cut and pad + if p > 0: + m = torch.nn.ZeroPad2d((0, 0, 0, p)) + fbank = m(fbank) + elif p < 0: + fbank = fbank[0:target_length, :] + + if fbank.size(-1) % 2 != 0: + fbank = fbank[..., :-1] + + return fbank + + +def pad_wav(waveform, segment_length): + batch_size, waveform_length = waveform.shape + assert waveform_length > 100, "Waveform is too short, %s" % waveform_length + if waveform_length == segment_length: + return waveform + elif waveform_length > segment_length: + return waveform[:, :segment_length] + elif waveform_length < segment_length: + temp_wav = torch.zeros((batch_size, segment_length)) + temp_wav[:, :waveform_length] = waveform + return temp_wav + + +def normalize_wav(waveform): + waveform = waveform - torch.mean(waveform, 1, keepdim=True) + waveform = waveform / (torch.max(torch.abs(waveform), 1).values + 1e-8).unsqueeze(1) + return waveform * 0.5 + + +def process_wav_file(waveform, segment_length): + waveform = normalize_wav(waveform) + waveform = pad_wav(waveform, segment_length) + return waveform + + +def wav_to_fbank(waveform, target_length=1024, fn_STFT=None): + assert fn_STFT is not None + + # mixup + waveform = process_wav_file(waveform, target_length * 160) # hop size is 160 + outputs = [] + for waveform_i in waveform: + fbank, log_magnitudes_stft, energy = get_mel_from_wav(waveform_i, fn_STFT) + fbank = fbank[:, :target_length].T + outputs.append(fbank) + return torch.stack(outputs, 0).unsqueeze(1) \ No newline at end of file diff --git a/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__init__.py b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e0ae476fe58c48e998c56234a55b871beba4042d --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__init__.py @@ -0,0 +1,7 @@ +from .models import Generator + + +class AttrDict(dict): + def __init__(self, *args, **kwargs): + super(AttrDict, self).__init__(*args, **kwargs) + self.__dict__ = self diff --git a/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/__init__.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb76367c371cb69f132de805e593c0ecc4741e38 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/models.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a744509a17b6f2bf7a413b647b2a563faedaef13 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/models.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/utilities.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/utilities.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8af9e602a1aea35db68ff7cf23acb54634db78d5 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/__pycache__/utilities.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/hifigan/models.py b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/models.py new file mode 100644 index 0000000000000000000000000000000000000000..c4382cc39de0463f9b7c0f33f037dbc233e7cb36 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/models.py @@ -0,0 +1,174 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.nn import Conv1d, ConvTranspose1d +from torch.nn.utils import weight_norm, remove_weight_norm + +LRELU_SLOPE = 0.1 + + +def init_weights(m, mean=0.0, std=0.01): + classname = m.__class__.__name__ + if classname.find("Conv") != -1: + m.weight.data.normal_(mean, std) + + +def get_padding(kernel_size, dilation=1): + return int((kernel_size * dilation - dilation) / 2) + + +class ResBlock(torch.nn.Module): + def __init__(self, h, channels, kernel_size=3, dilation=(1, 3, 5)): + super(ResBlock, self).__init__() + self.h = h + self.convs1 = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[0], + padding=get_padding(kernel_size, dilation[0]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[1], + padding=get_padding(kernel_size, dilation[1]), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=dilation[2], + padding=get_padding(kernel_size, dilation[2]), + ) + ), + ] + ) + self.convs1.apply(init_weights) + + self.convs2 = nn.ModuleList( + [ + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + weight_norm( + Conv1d( + channels, + channels, + kernel_size, + 1, + dilation=1, + padding=get_padding(kernel_size, 1), + ) + ), + ] + ) + self.convs2.apply(init_weights) + + def forward(self, x): + for c1, c2 in zip(self.convs1, self.convs2): + xt = F.leaky_relu(x, LRELU_SLOPE) + xt = c1(xt) + xt = F.leaky_relu(xt, LRELU_SLOPE) + xt = c2(xt) + x = xt + x + return x + + def remove_weight_norm(self): + for l in self.convs1: + remove_weight_norm(l) + for l in self.convs2: + remove_weight_norm(l) + + +class Generator(torch.nn.Module): + def __init__(self, h): + super(Generator, self).__init__() + self.h = h + self.num_kernels = len(h.resblock_kernel_sizes) + self.num_upsamples = len(h.upsample_rates) + self.conv_pre = weight_norm( + Conv1d(h.num_mels, h.upsample_initial_channel, 7, 1, padding=3) + ) + resblock = ResBlock + + self.ups = nn.ModuleList() + for i, (u, k) in enumerate(zip(h.upsample_rates, h.upsample_kernel_sizes)): + self.ups.append( + weight_norm( + ConvTranspose1d( + h.upsample_initial_channel // (2**i), + h.upsample_initial_channel // (2 ** (i + 1)), + k, + u, + padding=(k - u) // 2, + ) + ) + ) + + self.resblocks = nn.ModuleList() + for i in range(len(self.ups)): + ch = h.upsample_initial_channel // (2 ** (i + 1)) + for j, (k, d) in enumerate( + zip(h.resblock_kernel_sizes, h.resblock_dilation_sizes) + ): + self.resblocks.append(resblock(h, ch, k, d)) + + self.conv_post = weight_norm(Conv1d(ch, 1, 7, 1, padding=3)) + self.ups.apply(init_weights) + self.conv_post.apply(init_weights) + + def forward(self, x): + x = self.conv_pre(x) + for i in range(self.num_upsamples): + x = F.leaky_relu(x, LRELU_SLOPE) + x = self.ups[i](x) + xs = None + for j in range(self.num_kernels): + if xs is None: + xs = self.resblocks[i * self.num_kernels + j](x) + else: + xs += self.resblocks[i * self.num_kernels + j](x) + x = xs / self.num_kernels + x = F.leaky_relu(x) + x = self.conv_post(x) + x = torch.tanh(x) + + return x + + def remove_weight_norm(self): + # print("Removing weight norm...") + for l in self.ups: + remove_weight_norm(l) + for l in self.resblocks: + l.remove_weight_norm() + remove_weight_norm(self.conv_pre) + remove_weight_norm(self.conv_post) diff --git a/core/models/latent_diffusion/vae/audioldm_modules/hifigan/utilities.py b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/utilities.py new file mode 100644 index 0000000000000000000000000000000000000000..d22f7476d019141d11453aec48882ddab3df6124 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/hifigan/utilities.py @@ -0,0 +1,85 @@ +import os +import json + +import torch +import numpy as np + +import core.models.latent_diffusion.vae.audioldm_modules.hifigan as hifigan + +HIFIGAN_16K_64 = { + "resblock": "1", + "num_gpus": 6, + "batch_size": 16, + "learning_rate": 0.0002, + "adam_b1": 0.8, + "adam_b2": 0.99, + "lr_decay": 0.999, + "seed": 1234, + "upsample_rates": [5, 4, 2, 2, 2], + "upsample_kernel_sizes": [16, 16, 8, 4, 4], + "upsample_initial_channel": 1024, + "resblock_kernel_sizes": [3, 7, 11], + "resblock_dilation_sizes": [[1, 3, 5], [1, 3, 5], [1, 3, 5]], + "segment_size": 8192, + "num_mels": 64, + "num_freq": 1025, + "n_fft": 1024, + "hop_size": 160, + "win_size": 1024, + "sampling_rate": 16000, + "fmin": 0, + "fmax": 8000, + "fmax_for_loss": None, + "num_workers": 4, + "dist_config": { + "dist_backend": "nccl", + "dist_url": "tcp://localhost:54321", + "world_size": 1, + }, +} + + +def get_available_checkpoint_keys(model, ckpt): + print("==> Attemp to reload from %s" % ckpt) + state_dict = torch.load(ckpt)["state_dict"] + current_state_dict = model.state_dict() + new_state_dict = {} + for k in state_dict.keys(): + if ( + k in current_state_dict.keys() + and current_state_dict[k].size() == state_dict[k].size() + ): + new_state_dict[k] = state_dict[k] + else: + print("==> WARNING: Skipping %s" % k) + print( + "%s out of %s keys are matched" + % (len(new_state_dict.keys()), len(state_dict.keys())) + ) + return new_state_dict + + +def get_param_num(model): + num_param = sum(param.numel() for param in model.parameters()) + return num_param + + +def get_vocoder(config, device): + config = hifigan.AttrDict(HIFIGAN_16K_64) + vocoder = hifigan.Generator(config) + vocoder.eval() + vocoder.remove_weight_norm() + vocoder.to(device) + return vocoder + + +def vocoder_infer(mels, vocoder, lengths=None): + with torch.no_grad(): + wavs = vocoder(mels).squeeze(1) + + wavs = (wavs.cpu().numpy() * 32768).astype("int16") + + if lengths is not None: + wavs = wavs[:, :lengths] + + return wavs diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__init__.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/__init__.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6fc00e9d0f44f0d006802a919a4c25c0a25abce Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/attention.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/attention.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..207353f316728968e4192fc290a753b9d4b483c1 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/attention.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/ema.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/ema.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..483910a79049bebced147c56b1bf792a7280b78c Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/ema.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/util.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5111eebbbe14995e9cec620908ef667113a37d3d Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/__pycache__/util.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/attention.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..fa389683f54a2d54d23488012a0780a20a0e7d42 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/attention.py @@ -0,0 +1,469 @@ +from inspect import isfunction +import math +import torch +import torch.nn.functional as F +from torch import nn +from einops import rearrange + +from .util import checkpoint + + +def exists(val): + return val is not None + + +def uniq(arr): + return {el: True for el in arr}.keys() + + +def default(val, d): + if exists(val): + return val + return d() if isfunction(d) else d + + +def max_neg_value(t): + return -torch.finfo(t.dtype).max + + +def init_(tensor): + dim = tensor.shape[-1] + std = 1 / math.sqrt(dim) + tensor.uniform_(-std, std) + return tensor + + +# feedforward +class GEGLU(nn.Module): + def __init__(self, dim_in, dim_out): + super().__init__() + self.proj = nn.Linear(dim_in, dim_out * 2) + + def forward(self, x): + x, gate = self.proj(x).chunk(2, dim=-1) + return x * F.gelu(gate) + + +class FeedForward(nn.Module): + def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.0): + super().__init__() + inner_dim = int(dim * mult) + dim_out = default(dim_out, dim) + project_in = ( + nn.Sequential(nn.Linear(dim, inner_dim), nn.GELU()) + if not glu + else GEGLU(dim, inner_dim) + ) + + self.net = nn.Sequential( + project_in, nn.Dropout(dropout), nn.Linear(inner_dim, dim_out) + ) + + def forward(self, x): + return self.net(x) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def Normalize(in_channels): + return torch.nn.GroupNorm( + num_groups=32, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias=False) + self.to_out = nn.Conv2d(hidden_dim, dim, 1) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange( + qkv, "b (qkv heads c) h w -> qkv b heads c (h w)", heads=self.heads, qkv=3 + ) + k = k.softmax(dim=-1) + context = torch.einsum("bhdn,bhen->bhde", k, v) + out = torch.einsum("bhde,bhdn->bhen", context, q) + out = rearrange( + out, "b heads c (h w) -> b (heads c) h w", heads=self.heads, h=h, w=w + ) + return self.to_out(out) + + +class SpatialSelfAttention(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = rearrange(q, "b c h w -> b (h w) c") + k = rearrange(k, "b c h w -> b c (h w)") + w_ = torch.einsum("bij,bjk->bik", q, k) + + w_ = w_ * (int(c) ** (-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = rearrange(v, "b c h w -> b c (h w)") + w_ = rearrange(w_, "b i j -> b j i") + h_ = torch.einsum("bij,bjk->bik", v, w_) + h_ = rearrange(h_, "b c (h w) -> b c h w", h=h) + h_ = self.proj_out(h_) + + return x + h_ + + +class CrossAttention(nn.Module): + """ + ### Cross Attention Layer + This falls-back to self-attention when conditional embeddings are not specified. + """ + + # use_flash_attention: bool = True + use_flash_attention: bool = False + + def __init__( + self, + query_dim, + context_dim=None, + heads=8, + dim_head=64, + dropout=0.0, + is_inplace: bool = True, + ): + # def __init__(self, d_model: int, d_cond: int, n_heads: int, d_head: int, is_inplace: bool = True): + """ + :param d_model: is the input embedding size + :param n_heads: is the number of attention heads + :param d_head: is the size of a attention head + :param d_cond: is the size of the conditional embeddings + :param is_inplace: specifies whether to perform the attention softmax computation inplace to + save memory + """ + super().__init__() + + self.is_inplace = is_inplace + self.n_heads = heads + self.d_head = dim_head + + # Attention scaling factor + self.scale = dim_head**-0.5 + + # The normal self-attention layer + if context_dim is None: + context_dim = query_dim + + # Query, key and value mappings + d_attn = dim_head * heads + self.to_q = nn.Linear(query_dim, d_attn, bias=False) + self.to_k = nn.Linear(context_dim, d_attn, bias=False) + self.to_v = nn.Linear(context_dim, d_attn, bias=False) + + # Final linear layer + self.to_out = nn.Sequential(nn.Linear(d_attn, query_dim), nn.Dropout(dropout)) + + # Setup [flash attention](https://github.com/HazyResearch/flash-attention). + # Flash attention is only used if it's installed + # and `CrossAttention.use_flash_attention` is set to `True`. + try: + # You can install flash attention by cloning their Github repo, + # [https://github.com/HazyResearch/flash-attention](https://github.com/HazyResearch/flash-attention) + # and then running `python setup.py install` + from flash_attn.flash_attention import FlashAttention + + self.flash = FlashAttention() + # Set the scale for scaled dot-product attention. + self.flash.softmax_scale = self.scale + # Set to `None` if it's not installed + except ImportError: + self.flash = None + + def forward(self, x, context=None, mask=None): + """ + :param x: are the input embeddings of shape `[batch_size, height * width, d_model]` + :param cond: is the conditional embeddings of shape `[batch_size, n_cond, d_cond]` + """ + + # If `cond` is `None` we perform self attention + has_cond = context is not None + if not has_cond: + context = x + + # Get query, key and value vectors + q = self.to_q(x) + k = self.to_k(context) + v = self.to_v(context) + + # Use flash attention if it's available and the head size is less than or equal to `128` + if ( + CrossAttention.use_flash_attention + and self.flash is not None + and not has_cond + and self.d_head <= 128 + ): + return self.flash_attention(q, k, v) + # Otherwise, fallback to normal attention + else: + return self.normal_attention(q, k, v) + + def flash_attention(self, q: torch.Tensor, k: torch.Tensor, v: torch.Tensor): + """ + #### Flash Attention + :param q: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` + :param k: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` + :param v: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` + """ + + # Get batch size and number of elements along sequence axis (`width * height`) + batch_size, seq_len, _ = q.shape + + # Stack `q`, `k`, `v` vectors for flash attention, to get a single tensor of + # shape `[batch_size, seq_len, 3, n_heads * d_head]` + qkv = torch.stack((q, k, v), dim=2) + # Split the heads + qkv = qkv.view(batch_size, seq_len, 3, self.n_heads, self.d_head) + + # Flash attention works for head sizes `32`, `64` and `128`, so we have to pad the heads to + # fit this size. + if self.d_head <= 32: + pad = 32 - self.d_head + elif self.d_head <= 64: + pad = 64 - self.d_head + elif self.d_head <= 128: + pad = 128 - self.d_head + else: + raise ValueError(f"Head size ${self.d_head} too large for Flash Attention") + + # Pad the heads + if pad: + qkv = torch.cat( + (qkv, qkv.new_zeros(batch_size, seq_len, 3, self.n_heads, pad)), dim=-1 + ) + + # Compute attention + # $$\underset{seq}{softmax}\Bigg(\frac{Q K^\top}{\sqrt{d_{key}}}\Bigg)V$$ + # This gives a tensor of shape `[batch_size, seq_len, n_heads, d_padded]` + # TODO here I add the dtype changing + out, _ = self.flash(qkv.type(torch.float16)) + # Truncate the extra head size + out = out[:, :, :, : self.d_head].float() + # Reshape to `[batch_size, seq_len, n_heads * d_head]` + out = out.reshape(batch_size, seq_len, self.n_heads * self.d_head) + + # Map to `[batch_size, height * width, d_model]` with a linear layer + return self.to_out(out) + + def normal_attention(self, q: torch.Tensor, k: torch.Tensor, v: torch.Tensor): + """ + #### Normal Attention + + :param q: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` + :param k: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` + :param v: are the query vectors before splitting heads, of shape `[batch_size, seq, d_attn]` + """ + + # Split them to heads of shape `[batch_size, seq_len, n_heads, d_head]` + q = q.view(*q.shape[:2], self.n_heads, -1) # [bs, 64, 20, 32] + k = k.view(*k.shape[:2], self.n_heads, -1) # [bs, 1, 20, 32] + v = v.view(*v.shape[:2], self.n_heads, -1) + + # Calculate attention $\frac{Q K^\top}{\sqrt{d_{key}}}$ + attn = torch.einsum("bihd,bjhd->bhij", q, k) * self.scale + + # Compute softmax + # $$\underset{seq}{softmax}\Bigg(\frac{Q K^\top}{\sqrt{d_{key}}}\Bigg)$$ + if self.is_inplace: + half = attn.shape[0] // 2 + attn[half:] = attn[half:].softmax(dim=-1) + attn[:half] = attn[:half].softmax(dim=-1) + else: + attn = attn.softmax(dim=-1) + + # Compute attention output + # $$\underset{seq}{softmax}\Bigg(\frac{Q K^\top}{\sqrt{d_{key}}}\Bigg)V$$ + # attn: [bs, 20, 64, 1] + # v: [bs, 1, 20, 32] + out = torch.einsum("bhij,bjhd->bihd", attn, v) + # Reshape to `[batch_size, height * width, n_heads * d_head]` + out = out.reshape(*out.shape[:2], -1) + # Map to `[batch_size, height * width, d_model]` with a linear layer + return self.to_out(out) + + +# class CrossAttention(nn.Module): +# def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.): +# super().__init__() +# inner_dim = dim_head * heads +# context_dim = default(context_dim, query_dim) + +# self.scale = dim_head ** -0.5 +# self.heads = heads + +# self.to_q = nn.Linear(query_dim, inner_dim, bias=False) +# self.to_k = nn.Linear(context_dim, inner_dim, bias=False) +# self.to_v = nn.Linear(context_dim, inner_dim, bias=False) + +# self.to_out = nn.Sequential( +# nn.Linear(inner_dim, query_dim), +# nn.Dropout(dropout) +# ) + +# def forward(self, x, context=None, mask=None): +# h = self.heads + +# q = self.to_q(x) +# context = default(context, x) +# k = self.to_k(context) +# v = self.to_v(context) + +# q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v)) + +# sim = einsum('b i d, b j d -> b i j', q, k) * self.scale + +# if exists(mask): +# mask = rearrange(mask, 'b ... -> b (...)') +# max_neg_value = -torch.finfo(sim.dtype).max +# mask = repeat(mask, 'b j -> (b h) () j', h=h) +# sim.masked_fill_(~mask, max_neg_value) + +# # attention, what we cannot get enough of +# attn = sim.softmax(dim=-1) + +# out = einsum('b i j, b j d -> b i d', attn, v) +# out = rearrange(out, '(b h) n d -> b n (h d)', h=h) +# return self.to_out(out) + + +class BasicTransformerBlock(nn.Module): + def __init__( + self, + dim, + n_heads, + d_head, + dropout=0.0, + context_dim=None, + gated_ff=True, + checkpoint=True, + ): + super().__init__() + self.attn1 = CrossAttention( + query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout + ) # is a self-attention + self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff) + self.attn2 = CrossAttention( + query_dim=dim, + context_dim=context_dim, + heads=n_heads, + dim_head=d_head, + dropout=dropout, + ) # is self-attn if context is none + self.norm1 = nn.LayerNorm(dim) + self.norm2 = nn.LayerNorm(dim) + self.norm3 = nn.LayerNorm(dim) + self.checkpoint = checkpoint + + def forward(self, x, context=None): + if context is None: + return checkpoint(self._forward, (x,), self.parameters(), self.checkpoint) + else: + return checkpoint( + self._forward, (x, context), self.parameters(), self.checkpoint + ) + + def _forward(self, x, context=None): + x = self.attn1(self.norm1(x)) + x + x = self.attn2(self.norm2(x), context=context) + x + x = self.ff(self.norm3(x)) + x + return x + + +class SpatialTransformer(nn.Module): + """ + Transformer block for image-like data. + First, project the input (aka embedding) + and reshape to b, t, d. + Then apply standard transformer action. + Finally, reshape to image + """ + + def __init__( + self, + in_channels, + n_heads, + d_head, + depth=1, + dropout=0.0, + context_dim=None, + no_context=False, + ): + super().__init__() + + if no_context: + context_dim = None + + self.in_channels = in_channels + inner_dim = n_heads * d_head + self.norm = Normalize(in_channels) + + self.proj_in = nn.Conv2d( + in_channels, inner_dim, kernel_size=1, stride=1, padding=0 + ) + + self.transformer_blocks = nn.ModuleList( + [ + BasicTransformerBlock( + inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim + ) + for d in range(depth) + ] + ) + + self.proj_out = zero_module( + nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0) + ) + + def forward(self, x, context=None): + # note: if no context is given, cross-attention defaults to self-attention + b, c, h, w = x.shape + x_in = x + x = self.norm(x) + x = self.proj_in(x) + x = rearrange(x, "b c h w -> b (h w) c") + for block in self.transformer_blocks: + x = block(x, context=context) + x = rearrange(x, "b (h w) c -> b c h w", h=h, w=w) + x = self.proj_out(x) + return x + x_in diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ddim.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ddim.py new file mode 100644 index 0000000000000000000000000000000000000000..53c3490f5c894e2b80e09d50e7f0b367691ea2a9 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ddim.py @@ -0,0 +1,377 @@ +"""SAMPLING ONLY.""" + +import torch +import numpy as np +from tqdm import tqdm + +from audioldm.latent_diffusion.util import ( + make_ddim_sampling_parameters, + make_ddim_timesteps, + noise_like, + extract_into_tensor, +) + + +class DDIMSampler(object): + def __init__(self, model, schedule="linear", **kwargs): + super().__init__() + self.model = model + self.ddpm_num_timesteps = model.num_timesteps + self.schedule = schedule + + def register_buffer(self, name, attr): + if type(attr) == torch.Tensor: + if attr.device != torch.device("cuda"): + attr = attr.to(torch.device("cuda")) + setattr(self, name, attr) + + def make_schedule( + self, ddim_num_steps, ddim_discretize="uniform", ddim_eta=0.0, verbose=True + ): + self.ddim_timesteps = make_ddim_timesteps( + ddim_discr_method=ddim_discretize, + num_ddim_timesteps=ddim_num_steps, + num_ddpm_timesteps=self.ddpm_num_timesteps, + verbose=verbose, + ) + alphas_cumprod = self.model.alphas_cumprod + assert ( + alphas_cumprod.shape[0] == self.ddpm_num_timesteps + ), "alphas have to be defined for each timestep" + to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device) + + self.register_buffer("betas", to_torch(self.model.betas)) + self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod)) + self.register_buffer( + "alphas_cumprod_prev", to_torch(self.model.alphas_cumprod_prev) + ) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer( + "sqrt_alphas_cumprod", to_torch(np.sqrt(alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", + to_torch(np.sqrt(1.0 - alphas_cumprod.cpu())), + ) + self.register_buffer( + "log_one_minus_alphas_cumprod", to_torch(np.log(1.0 - alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_recip_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod.cpu())) + ) + self.register_buffer( + "sqrt_recipm1_alphas_cumprod", + to_torch(np.sqrt(1.0 / alphas_cumprod.cpu() - 1)), + ) + + # ddim sampling parameters + ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters( + alphacums=alphas_cumprod.cpu(), + ddim_timesteps=self.ddim_timesteps, + eta=ddim_eta, + verbose=verbose, + ) + self.register_buffer("ddim_sigmas", ddim_sigmas) + self.register_buffer("ddim_alphas", ddim_alphas) + self.register_buffer("ddim_alphas_prev", ddim_alphas_prev) + self.register_buffer("ddim_sqrt_one_minus_alphas", np.sqrt(1.0 - ddim_alphas)) + sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt( + (1 - self.alphas_cumprod_prev) + / (1 - self.alphas_cumprod) + * (1 - self.alphas_cumprod / self.alphas_cumprod_prev) + ) + self.register_buffer( + "ddim_sigmas_for_original_num_steps", sigmas_for_original_sampling_steps + ) + + @torch.no_grad() + def sample( + self, + S, + batch_size, + shape, + conditioning=None, + callback=None, + normals_sequence=None, + img_callback=None, + quantize_x0=False, + eta=0.0, + mask=None, + x0=None, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + verbose=True, + x_T=None, + log_every_t=100, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ... + **kwargs, + ): + if conditioning is not None: + if isinstance(conditioning, dict): + cbs = conditioning[list(conditioning.keys())[0]].shape[0] + if cbs != batch_size: + print( + f"Warning: Got {cbs} conditionings but batch-size is {batch_size}" + ) + else: + if conditioning.shape[0] != batch_size: + print( + f"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}" + ) + + self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose) + # sampling + C, H, W = shape + size = (batch_size, C, H, W) + samples, intermediates = self.ddim_sampling( + conditioning, + size, + callback=callback, + img_callback=img_callback, + quantize_denoised=quantize_x0, + mask=mask, + x0=x0, + ddim_use_original_steps=False, + noise_dropout=noise_dropout, + temperature=temperature, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + x_T=x_T, + log_every_t=log_every_t, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + return samples, intermediates + + @torch.no_grad() + def ddim_sampling( + self, + cond, + shape, + x_T=None, + ddim_use_original_steps=False, + callback=None, + timesteps=None, + quantize_denoised=False, + mask=None, + x0=None, + img_callback=None, + log_every_t=100, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + ): + device = self.model.betas.device + b = shape[0] + if x_T is None: + img = torch.randn(shape, device=device) + else: + img = x_T + + if timesteps is None: + timesteps = ( + self.ddpm_num_timesteps + if ddim_use_original_steps + else self.ddim_timesteps + ) + elif timesteps is not None and not ddim_use_original_steps: + subset_end = ( + int( + min(timesteps / self.ddim_timesteps.shape[0], 1) + * self.ddim_timesteps.shape[0] + ) + - 1 + ) + timesteps = self.ddim_timesteps[:subset_end] + + intermediates = {"x_inter": [img], "pred_x0": [img]} + time_range = ( + reversed(range(0, timesteps)) + if ddim_use_original_steps + else np.flip(timesteps) + ) + total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0] + # print(f"Running DDIM Sampling with {total_steps} timesteps") + + # iterator = gr.Progress().tqdm(time_range, desc="DDIM Sampler", total=total_steps) + iterator = tqdm(time_range, desc="DDIM Sampler", total=total_steps) + + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full((b,), step, device=device, dtype=torch.long) + if mask is not None: + assert x0 is not None + img_orig = self.model.q_sample( + x0, ts + ) # TODO deterministic forward pass? + img = ( + img_orig * mask + (1.0 - mask) * img + ) # In the first sampling step, img is pure gaussian noise + + outs = self.p_sample_ddim( + img, + cond, + ts, + index=index, + use_original_steps=ddim_use_original_steps, + quantize_denoised=quantize_denoised, + temperature=temperature, + noise_dropout=noise_dropout, + score_corrector=score_corrector, + corrector_kwargs=corrector_kwargs, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + img, pred_x0 = outs + if callback: + callback(i) + if img_callback: + img_callback(pred_x0, i) + + if index % log_every_t == 0 or index == total_steps - 1: + intermediates["x_inter"].append(img) + intermediates["pred_x0"].append(pred_x0) + + return img, intermediates + + @torch.no_grad() + def stochastic_encode(self, x0, t, use_original_steps=False, noise=None): + # fast, but does not allow for exact reconstruction + # t serves as an index to gather the correct alphas + if use_original_steps: + sqrt_alphas_cumprod = self.sqrt_alphas_cumprod + sqrt_one_minus_alphas_cumprod = self.sqrt_one_minus_alphas_cumprod + else: + sqrt_alphas_cumprod = torch.sqrt(self.ddim_alphas) + sqrt_one_minus_alphas_cumprod = self.ddim_sqrt_one_minus_alphas + + if noise is None: + noise = torch.randn_like(x0) + + return ( + extract_into_tensor(sqrt_alphas_cumprod, t, x0.shape) * x0 + + extract_into_tensor(sqrt_one_minus_alphas_cumprod, t, x0.shape) * noise + ) + + @torch.no_grad() + def decode( + self, + x_latent, + cond, + t_start, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + use_original_steps=False, + ): + + timesteps = ( + np.arange(self.ddpm_num_timesteps) + if use_original_steps + else self.ddim_timesteps + ) + timesteps = timesteps[:t_start] + + time_range = np.flip(timesteps) + total_steps = timesteps.shape[0] + # print(f"Running DDIM Sampling with {total_steps} timesteps") + + # iterator = gr.Progress().tqdm(time_range, desc="Decoding image", total=total_steps) + iterator = tqdm(time_range, desc="Decoding image", total=total_steps) + x_dec = x_latent + + for i, step in enumerate(iterator): + index = total_steps - i - 1 + ts = torch.full( + (x_latent.shape[0],), step, device=x_latent.device, dtype=torch.long + ) + x_dec, _ = self.p_sample_ddim( + x_dec, + cond, + ts, + index=index, + use_original_steps=use_original_steps, + unconditional_guidance_scale=unconditional_guidance_scale, + unconditional_conditioning=unconditional_conditioning, + ) + return x_dec + + @torch.no_grad() + def p_sample_ddim( + self, + x, + c, + t, + index, + repeat_noise=False, + use_original_steps=False, + quantize_denoised=False, + temperature=1.0, + noise_dropout=0.0, + score_corrector=None, + corrector_kwargs=None, + unconditional_guidance_scale=1.0, + unconditional_conditioning=None, + ): + b, *_, device = *x.shape, x.device + + if unconditional_conditioning is None or unconditional_guidance_scale == 1.0: + e_t = self.model.apply_model(x, t, c) + else: + x_in = torch.cat([x] * 2) + t_in = torch.cat([t] * 2) + c_in = torch.cat([unconditional_conditioning, c]) + e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2) + # When unconditional_guidance_scale == 1: only e_t + # When unconditional_guidance_scale == 0: only unconditional + # When unconditional_guidance_scale > 1: add more unconditional guidance + e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond) + + if score_corrector is not None: + assert self.model.parameterization == "eps" + e_t = score_corrector.modify_score( + self.model, e_t, x, t, c, **corrector_kwargs + ) + + alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas + alphas_prev = ( + self.model.alphas_cumprod_prev + if use_original_steps + else self.ddim_alphas_prev + ) + sqrt_one_minus_alphas = ( + self.model.sqrt_one_minus_alphas_cumprod + if use_original_steps + else self.ddim_sqrt_one_minus_alphas + ) + sigmas = ( + self.model.ddim_sigmas_for_original_num_steps + if use_original_steps + else self.ddim_sigmas + ) + # select parameters corresponding to the currently considered timestep + a_t = torch.full((b, 1, 1, 1), alphas[index], device=device) + a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device) + sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device) + sqrt_one_minus_at = torch.full( + (b, 1, 1, 1), sqrt_one_minus_alphas[index], device=device + ) + + # current prediction for x_0 + pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt() + if quantize_denoised: + pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0) + # direction pointing to x_t + dir_xt = (1.0 - a_prev - sigma_t**2).sqrt() * e_t + noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature + if noise_dropout > 0.0: + noise = torch.nn.functional.dropout(noise, p=noise_dropout) + x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise # TODO + return x_prev, pred_x0 diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ddpm.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ddpm.py new file mode 100644 index 0000000000000000000000000000000000000000..1f3b2cca00138773fca04a03cebd863f784429c2 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ddpm.py @@ -0,0 +1,435 @@ +""" +wild mixture of +https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +https://github.com/openai/improved-diffusion/blob/e94489283bb876ac1477d5dd7709bbbd2d9902ce/improved_diffusion/gaussian_diffusion.py +https://github.com/CompVis/taming-transformers +-- merci +""" +import sys +import os + +import torch +import torch.nn as nn +import numpy as np +from contextlib import contextmanager +from functools import partial +from tqdm import tqdm + +from audioldm.utils import exists, default, count_params, instantiate_from_config +from audioldm.latent_diffusion.ema import LitEma +from audioldm.latent_diffusion.util import ( + make_beta_schedule, + extract_into_tensor, + noise_like, +) +import soundfile as sf +import os + + +__conditioning_keys__ = {"concat": "c_concat", "crossattn": "c_crossattn", "adm": "y"} + + +def uniform_on_device(r1, r2, shape, device): + return (r1 - r2) * torch.rand(*shape, device=device) + r2 + + +class DiffusionWrapper(nn.Module): + def __init__(self, diff_model_config, conditioning_key): + super().__init__() + self.diffusion_model = instantiate_from_config(diff_model_config) + self.conditioning_key = conditioning_key + assert self.conditioning_key in [ + None, + "concat", + "crossattn", + "hybrid", + "adm", + "film", + ] + + def forward( + self, x, t, c_concat: list = None, c_crossattn: list = None, c_film: list = None + ): + x = x.contiguous() + t = t.contiguous() + + if self.conditioning_key is None: + out = self.diffusion_model(x, t) + elif self.conditioning_key == "concat": + xc = torch.cat([x] + c_concat, dim=1) + out = self.diffusion_model(xc, t) + elif self.conditioning_key == "crossattn": + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(x, t, context=cc) + elif self.conditioning_key == "hybrid": + xc = torch.cat([x] + c_concat, dim=1) + cc = torch.cat(c_crossattn, 1) + out = self.diffusion_model(xc, t, context=cc) + elif ( + self.conditioning_key == "film" + ): # The condition is assumed to be a global token, which wil pass through a linear layer and added with the time embedding for the FILM + cc = c_film[0].squeeze(1) # only has one token + out = self.diffusion_model(x, t, y=cc) + elif self.conditioning_key == "adm": + cc = c_crossattn[0] + out = self.diffusion_model(x, t, y=cc) + else: + raise NotImplementedError() + + return out + + +class DDPM(nn.Module): + # classic DDPM with Gaussian diffusion, in image space + def __init__( + self, + unet_config, + timesteps=1000, + beta_schedule="linear", + loss_type="l2", + ckpt_path=None, + ignore_keys=[], + load_only_unet=False, + monitor="val/loss", + use_ema=True, + first_stage_key="image", + latent_t_size=256, + latent_f_size=16, + channels=3, + log_every_t=100, + clip_denoised=True, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + given_betas=None, + original_elbo_weight=0.0, + v_posterior=0.0, # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta + l_simple_weight=1.0, + conditioning_key=None, + parameterization="eps", # all assuming fixed variance schedules + scheduler_config=None, + use_positional_encodings=False, + learn_logvar=False, + logvar_init=0.0, + ): + super().__init__() + assert parameterization in [ + "eps", + "x0", + ], 'currently only supporting "eps" and "x0"' + self.parameterization = parameterization + self.state = None + # print(f"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode") + self.cond_stage_model = None + self.clip_denoised = clip_denoised + self.log_every_t = log_every_t + self.first_stage_key = first_stage_key + + self.latent_t_size = latent_t_size + self.latent_f_size = latent_f_size + + self.channels = channels + self.use_positional_encodings = use_positional_encodings + self.model = DiffusionWrapper(unet_config, conditioning_key) + count_params(self.model, verbose=True) + self.use_ema = use_ema + if self.use_ema: + self.model_ema = LitEma(self.model) + # print(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + self.use_scheduler = scheduler_config is not None + if self.use_scheduler: + self.scheduler_config = scheduler_config + + self.v_posterior = v_posterior + self.original_elbo_weight = original_elbo_weight + self.l_simple_weight = l_simple_weight + + if monitor is not None: + self.monitor = monitor + + self.register_schedule( + given_betas=given_betas, + beta_schedule=beta_schedule, + timesteps=timesteps, + linear_start=linear_start, + linear_end=linear_end, + cosine_s=cosine_s, + ) + + self.loss_type = loss_type + + self.learn_logvar = learn_logvar + self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,)) + if self.learn_logvar: + self.logvar = nn.Parameter(self.logvar, requires_grad=True) + else: + self.logvar = nn.Parameter(self.logvar, requires_grad=False) + + self.logger_save_dir = None + self.logger_project = None + self.logger_version = None + self.label_indices_total = None + # To avoid the system cannot find metric value for checkpoint + self.metrics_buffer = { + "val/kullback_leibler_divergence_sigmoid": 15.0, + "val/kullback_leibler_divergence_softmax": 10.0, + "val/psnr": 0.0, + "val/ssim": 0.0, + "val/inception_score_mean": 1.0, + "val/inception_score_std": 0.0, + "val/kernel_inception_distance_mean": 0.0, + "val/kernel_inception_distance_std": 0.0, + "val/frechet_inception_distance": 133.0, + "val/frechet_audio_distance": 32.0, + } + self.initial_learning_rate = None + + def get_log_dir(self): + if ( + self.logger_save_dir is None + and self.logger_project is None + and self.logger_version is None + ): + return os.path.join( + self.logger.save_dir, self.logger._project, self.logger.version + ) + else: + return os.path.join( + self.logger_save_dir, self.logger_project, self.logger_version + ) + + def set_log_dir(self, save_dir, project, version): + self.logger_save_dir = save_dir + self.logger_project = project + self.logger_version = version + + def register_schedule( + self, + given_betas=None, + beta_schedule="linear", + timesteps=1000, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3, + ): + if exists(given_betas): + betas = given_betas + else: + betas = make_beta_schedule( + beta_schedule, + timesteps, + linear_start=linear_start, + linear_end=linear_end, + cosine_s=cosine_s, + ) + alphas = 1.0 - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1.0, alphas_cumprod[:-1]) + + (timesteps,) = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert ( + alphas_cumprod.shape[0] == self.num_timesteps + ), "alphas have to be defined for each timestep" + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer("betas", to_torch(betas)) + self.register_buffer("alphas_cumprod", to_torch(alphas_cumprod)) + self.register_buffer("alphas_cumprod_prev", to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer("sqrt_alphas_cumprod", to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer( + "sqrt_one_minus_alphas_cumprod", to_torch(np.sqrt(1.0 - alphas_cumprod)) + ) + self.register_buffer( + "log_one_minus_alphas_cumprod", to_torch(np.log(1.0 - alphas_cumprod)) + ) + self.register_buffer( + "sqrt_recip_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod)) + ) + self.register_buffer( + "sqrt_recipm1_alphas_cumprod", to_torch(np.sqrt(1.0 / alphas_cumprod - 1)) + ) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + posterior_variance = (1 - self.v_posterior) * betas * ( + 1.0 - alphas_cumprod_prev + ) / (1.0 - alphas_cumprod) + self.v_posterior * betas + # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) + self.register_buffer("posterior_variance", to_torch(posterior_variance)) + # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain + self.register_buffer( + "posterior_log_variance_clipped", + to_torch(np.log(np.maximum(posterior_variance, 1e-20))), + ) + self.register_buffer( + "posterior_mean_coef1", + to_torch(betas * np.sqrt(alphas_cumprod_prev) / (1.0 - alphas_cumprod)), + ) + self.register_buffer( + "posterior_mean_coef2", + to_torch( + (1.0 - alphas_cumprod_prev) * np.sqrt(alphas) / (1.0 - alphas_cumprod) + ), + ) + + if self.parameterization == "eps": + lvlb_weights = self.betas**2 / ( + 2 + * self.posterior_variance + * to_torch(alphas) + * (1 - self.alphas_cumprod) + ) + elif self.parameterization == "x0": + lvlb_weights = ( + 0.5 + * np.sqrt(torch.Tensor(alphas_cumprod)) + / (2.0 * 1 - torch.Tensor(alphas_cumprod)) + ) + else: + raise NotImplementedError("mu not supported") + # TODO how to choose this term + lvlb_weights[0] = lvlb_weights[1] + self.register_buffer("lvlb_weights", lvlb_weights, persistent=False) + assert not torch.isnan(self.lvlb_weights).all() + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.model.parameters()) + self.model_ema.copy_to(self.model) + if context is not None: + # print(f"{context}: Switched to EMA weights") + pass + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.model.parameters()) + if context is not None: + # print(f"{context}: Restored training weights") + pass + + def q_mean_variance(self, x_start, t): + """ + Get the distribution q(x_t | x_0). + :param x_start: the [N x C x ...] tensor of noiseless inputs. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :return: A tuple (mean, variance, log_variance), all of x_start's shape. + """ + mean = extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape) + log_variance = extract_into_tensor( + self.log_one_minus_alphas_cumprod, t, x_start.shape + ) + return mean, variance, log_variance + + def predict_start_from_noise(self, x_t, t, noise): + return ( + extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t + - extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) + * noise + ) + + def q_posterior(self, x_start, x_t, t): + posterior_mean = ( + extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start + + extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = extract_into_tensor( + self.posterior_log_variance_clipped, t, x_t.shape + ) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance(self, x, t, clip_denoised: bool): + model_out = self.model(x, t) + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + if clip_denoised: + x_recon.clamp_(-1.0, 1.0) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior( + x_start=x_recon, x_t=x, t=t + ) + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, t, clip_denoised=True, repeat_noise=False): + b, *_, device = *x.shape, x.device + model_mean, _, model_log_variance = self.p_mean_variance( + x=x, t=t, clip_denoised=clip_denoised + ) + noise = noise_like(x.shape, device, repeat_noise) + # no noise when t == 0 + nonzero_mask = ( + (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))).contiguous() + ) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_loop(self, shape, return_intermediates=False): + device = self.betas.device + b = shape[0] + img = torch.randn(shape, device=device) + intermediates = [img] + for i in tqdm( + reversed(range(0, self.num_timesteps)), + desc="Sampling t", + total=self.num_timesteps, + ): + img = self.p_sample( + img, + torch.full((b,), i, device=device, dtype=torch.long), + clip_denoised=self.clip_denoised, + ) + if i % self.log_every_t == 0 or i == self.num_timesteps - 1: + intermediates.append(img) + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample(self, batch_size=16, return_intermediates=False): + shape = (batch_size, channels, self.latent_t_size, self.latent_f_size) + channels = self.channels + return self.p_sample_loop(shape, return_intermediates=return_intermediates) + + def q_sample(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + return ( + extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) + * noise + ) + + def forward(self, x, *args, **kwargs): + t = torch.randint( + 0, self.num_timesteps, (x.shape[0],), device=self.device + ).long() + return self.p_losses(x, t, *args, **kwargs) + + def get_input(self, batch, k): + # fbank, log_magnitudes_stft, label_indices, fname, waveform, clip_label, text = batch + fbank, log_magnitudes_stft, label_indices, fname, waveform, text = batch + ret = {} + + ret["fbank"] = ( + fbank.unsqueeze(1).to(memory_format=torch.contiguous_format).float() + ) + ret["stft"] = log_magnitudes_stft.to( + memory_format=torch.contiguous_format + ).float() + # ret["clip_label"] = clip_label.to(memory_format=torch.contiguous_format).float() + ret["waveform"] = waveform.to(memory_format=torch.contiguous_format).float() + ret["text"] = list(text) + ret["fname"] = fname + + return ret[k] diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ema.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..880ca3d205d9b4d7450e146930a93f2e63c58b70 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/ema.py @@ -0,0 +1,82 @@ +import torch +from torch import nn + + +class LitEma(nn.Module): + def __init__(self, model, decay=0.9999, use_num_upates=True): + super().__init__() + if decay < 0.0 or decay > 1.0: + raise ValueError("Decay must be between 0 and 1") + + self.m_name2s_name = {} + self.register_buffer("decay", torch.tensor(decay, dtype=torch.float32)) + self.register_buffer( + "num_updates", + torch.tensor(0, dtype=torch.int) + if use_num_upates + else torch.tensor(-1, dtype=torch.int), + ) + + for name, p in model.named_parameters(): + if p.requires_grad: + # remove as '.'-character is not allowed in buffers + s_name = name.replace(".", "") + self.m_name2s_name.update({name: s_name}) + self.register_buffer(s_name, p.clone().detach().data) + + self.collected_params = [] + + def forward(self, model): + decay = self.decay + + if self.num_updates >= 0: + self.num_updates += 1 + decay = min(self.decay, (1 + self.num_updates) / (10 + self.num_updates)) + + one_minus_decay = 1.0 - decay + + with torch.no_grad(): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + + for key in m_param: + if m_param[key].requires_grad: + sname = self.m_name2s_name[key] + shadow_params[sname] = shadow_params[sname].type_as(m_param[key]) + shadow_params[sname].sub_( + one_minus_decay * (shadow_params[sname] - m_param[key]) + ) + else: + assert not key in self.m_name2s_name + + def copy_to(self, model): + m_param = dict(model.named_parameters()) + shadow_params = dict(self.named_buffers()) + for key in m_param: + if m_param[key].requires_grad: + m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data) + else: + assert not key in self.m_name2s_name + + def store(self, parameters): + """ + Save the current parameters for restoring later. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + temporarily stored. + """ + self.collected_params = [param.clone() for param in parameters] + + def restore(self, parameters): + """ + Restore the parameters stored with the `store` method. + Useful to validate the model with EMA parameters without affecting the + original optimization process. Store the parameters before the + `copy_to` method. After validation (or model saving), use this to + restore the former parameters. + Args: + parameters: Iterable of `torch.nn.Parameter`; the parameters to be + updated with the stored parameters. + """ + for c_param, param in zip(self.collected_params, parameters): + param.data.copy_(c_param.data) diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/openaimodel.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/openaimodel.py new file mode 100644 index 0000000000000000000000000000000000000000..831d7aafb36bba16888e4389153979a6c13639f5 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/openaimodel.py @@ -0,0 +1,1069 @@ +from abc import abstractmethod +import math + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + +from audioldm.latent_diffusion.util import ( + checkpoint, + conv_nd, + linear, + avg_pool_nd, + zero_module, + normalization, + timestep_embedding, +) +from audioldm.latent_diffusion.attention import SpatialTransformer + + +# dummy replace +def convert_module_to_f16(x): + pass + + +def convert_module_to_f32(x): + pass + + +## go +class AttentionPool2d(nn.Module): + """ + Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py + """ + + def __init__( + self, + spacial_dim: int, + embed_dim: int, + num_heads_channels: int, + output_dim: int = None, + ): + super().__init__() + self.positional_embedding = nn.Parameter( + th.randn(embed_dim, spacial_dim**2 + 1) / embed_dim**0.5 + ) + self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1) + self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1) + self.num_heads = embed_dim // num_heads_channels + self.attention = QKVAttention(self.num_heads) + + def forward(self, x): + b, c, *_spatial = x.shape + x = x.reshape(b, c, -1).contiguous() # NC(HW) + x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1) # NC(HW+1) + x = x + self.positional_embedding[None, :, :].to(x.dtype) # NC(HW+1) + x = self.qkv_proj(x) + x = self.attention(x) + x = self.c_proj(x) + return x[:, :, 0] + + +class TimestepBlock(nn.Module): + """ + Any module where forward() takes timestep embeddings as a second argument. + """ + + @abstractmethod + def forward(self, x, emb): + """ + Apply the module to `x` given `emb` timestep embeddings. + """ + + +class TimestepEmbedSequential(nn.Sequential, TimestepBlock): + """ + A sequential module that passes timestep embeddings to the children that + support it as an extra input. + """ + + def forward(self, x, emb, context=None): + for layer in self: + if isinstance(layer, TimestepBlock): + x = layer(x, emb) + elif isinstance(layer, SpatialTransformer): + x = layer(x, context) + else: + x = layer(x) + return x + + +class Upsample(nn.Module): + """ + An upsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + upsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + if use_conv: + self.conv = conv_nd( + dims, self.channels, self.out_channels, 3, padding=padding + ) + + def forward(self, x): + assert x.shape[1] == self.channels + if self.dims == 3: + x = F.interpolate( + x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode="nearest" + ) + else: + x = F.interpolate(x, scale_factor=2, mode="nearest") + if self.use_conv: + x = self.conv(x) + return x + + +class TransposedUpsample(nn.Module): + "Learned 2x upsampling without padding" + + def __init__(self, channels, out_channels=None, ks=5): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + + self.up = nn.ConvTranspose2d( + self.channels, self.out_channels, kernel_size=ks, stride=2 + ) + + def forward(self, x): + return self.up(x) + + +class Downsample(nn.Module): + """ + A downsampling layer with an optional convolution. + :param channels: channels in the inputs and outputs. + :param use_conv: a bool determining if a convolution is applied. + :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then + downsampling occurs in the inner-two dimensions. + """ + + def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1): + super().__init__() + self.channels = channels + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.dims = dims + stride = 2 if dims != 3 else (1, 2, 2) + if use_conv: + self.op = conv_nd( + dims, + self.channels, + self.out_channels, + 3, + stride=stride, + padding=padding, + ) + else: + assert self.channels == self.out_channels + self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride) + + def forward(self, x): + assert x.shape[1] == self.channels + return self.op(x) + + +class ResBlock(TimestepBlock): + """ + A residual block that can optionally change the number of channels. + :param channels: the number of input channels. + :param emb_channels: the number of timestep embedding channels. + :param dropout: the rate of dropout. + :param out_channels: if specified, the number of out channels. + :param use_conv: if True and out_channels is specified, use a spatial + convolution instead of a smaller 1x1 convolution to change the + channels in the skip connection. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param use_checkpoint: if True, use gradient checkpointing on this module. + :param up: if True, use this block for upsampling. + :param down: if True, use this block for downsampling. + """ + + def __init__( + self, + channels, + emb_channels, + dropout, + out_channels=None, + use_conv=False, + use_scale_shift_norm=False, + dims=2, + use_checkpoint=False, + up=False, + down=False, + ): + super().__init__() + self.channels = channels + self.emb_channels = emb_channels + self.dropout = dropout + self.out_channels = out_channels or channels + self.use_conv = use_conv + self.use_checkpoint = use_checkpoint + self.use_scale_shift_norm = use_scale_shift_norm + + self.in_layers = nn.Sequential( + normalization(channels), + nn.SiLU(), + conv_nd(dims, channels, self.out_channels, 3, padding=1), + ) + + self.updown = up or down + + if up: + self.h_upd = Upsample(channels, False, dims) + self.x_upd = Upsample(channels, False, dims) + elif down: + self.h_upd = Downsample(channels, False, dims) + self.x_upd = Downsample(channels, False, dims) + else: + self.h_upd = self.x_upd = nn.Identity() + + self.emb_layers = nn.Sequential( + nn.SiLU(), + linear( + emb_channels, + 2 * self.out_channels if use_scale_shift_norm else self.out_channels, + ), + ) + self.out_layers = nn.Sequential( + normalization(self.out_channels), + nn.SiLU(), + nn.Dropout(p=dropout), + zero_module( + conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1) + ), + ) + + if self.out_channels == channels: + self.skip_connection = nn.Identity() + elif use_conv: + self.skip_connection = conv_nd( + dims, channels, self.out_channels, 3, padding=1 + ) + else: + self.skip_connection = conv_nd(dims, channels, self.out_channels, 1) + + def forward(self, x, emb): + """ + Apply the block to a Tensor, conditioned on a timestep embedding. + :param x: an [N x C x ...] Tensor of features. + :param emb: an [N x emb_channels] Tensor of timestep embeddings. + :return: an [N x C x ...] Tensor of outputs. + """ + return checkpoint( + self._forward, (x, emb), self.parameters(), self.use_checkpoint + ) + + def _forward(self, x, emb): + if self.updown: + in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1] + h = in_rest(x) + h = self.h_upd(h) + x = self.x_upd(x) + h = in_conv(h) + else: + h = self.in_layers(x) + emb_out = self.emb_layers(emb).type(h.dtype) + while len(emb_out.shape) < len(h.shape): + emb_out = emb_out[..., None] + if self.use_scale_shift_norm: + out_norm, out_rest = self.out_layers[0], self.out_layers[1:] + scale, shift = th.chunk(emb_out, 2, dim=1) + h = out_norm(h) * (1 + scale) + shift + h = out_rest(h) + else: + h = h + emb_out + h = self.out_layers(h) + return self.skip_connection(x) + h + + +class AttentionBlock(nn.Module): + """ + An attention block that allows spatial positions to attend to each other. + Originally ported from here, but adapted to the N-d case. + https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66. + """ + + def __init__( + self, + channels, + num_heads=1, + num_head_channels=-1, + use_checkpoint=False, + use_new_attention_order=False, + ): + super().__init__() + self.channels = channels + if num_head_channels == -1: + self.num_heads = num_heads + else: + assert ( + channels % num_head_channels == 0 + ), f"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}" + self.num_heads = channels // num_head_channels + self.use_checkpoint = use_checkpoint + self.norm = normalization(channels) + self.qkv = conv_nd(1, channels, channels * 3, 1) + if use_new_attention_order: + # split qkv before split heads + self.attention = QKVAttention(self.num_heads) + else: + # split heads before split qkv + self.attention = QKVAttentionLegacy(self.num_heads) + + self.proj_out = zero_module(conv_nd(1, channels, channels, 1)) + + def forward(self, x): + return checkpoint( + self._forward, (x,), self.parameters(), True + ) # TODO: check checkpoint usage, is True # TODO: fix the .half call!!! + # return pt_checkpoint(self._forward, x) # pytorch + + def _forward(self, x): + b, c, *spatial = x.shape + x = x.reshape(b, c, -1).contiguous() + qkv = self.qkv(self.norm(x)).contiguous() + h = self.attention(qkv).contiguous() + h = self.proj_out(h).contiguous() + return (x + h).reshape(b, c, *spatial).contiguous() + + +def count_flops_attn(model, _x, y): + """ + A counter for the `thop` package to count the operations in an + attention operation. + Meant to be used like: + macs, params = thop.profile( + model, + inputs=(inputs, timestamps), + custom_ops={QKVAttention: QKVAttention.count_flops}, + ) + """ + b, c, *spatial = y[0].shape + num_spatial = int(np.prod(spatial)) + # We perform two matmuls with the same number of ops. + # The first computes the weight matrix, the second computes + # the combination of the value vectors. + matmul_ops = 2 * b * (num_spatial**2) * c + model.total_ops += th.DoubleTensor([matmul_ops]) + + +class QKVAttentionLegacy(nn.Module): + """ + A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = ( + qkv.reshape(bs * self.n_heads, ch * 3, length).contiguous().split(ch, dim=1) + ) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", q * scale, k * scale + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum("bts,bcs->bct", weight, v) + return a.reshape(bs, -1, length).contiguous() + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class QKVAttention(nn.Module): + """ + A module which performs QKV attention and splits in a different order. + """ + + def __init__(self, n_heads): + super().__init__() + self.n_heads = n_heads + + def forward(self, qkv): + """ + Apply QKV attention. + :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs. + :return: an [N x (H * C) x T] tensor after attention. + """ + bs, width, length = qkv.shape + assert width % (3 * self.n_heads) == 0 + ch = width // (3 * self.n_heads) + q, k, v = qkv.chunk(3, dim=1) + scale = 1 / math.sqrt(math.sqrt(ch)) + weight = th.einsum( + "bct,bcs->bts", + (q * scale).view(bs * self.n_heads, ch, length), + (k * scale).view(bs * self.n_heads, ch, length), + ) # More stable with f16 than dividing afterwards + weight = th.softmax(weight.float(), dim=-1).type(weight.dtype) + a = th.einsum( + "bts,bcs->bct", + weight, + v.reshape(bs * self.n_heads, ch, length).contiguous(), + ) + return a.reshape(bs, -1, length).contiguous() + + @staticmethod + def count_flops(model, _x, y): + return count_flops_attn(model, _x, y) + + +class UNetModel(nn.Module): + """ + The full UNet model with attention and timestep embedding. + :param in_channels: channels in the input Tensor. + :param model_channels: base channel count for the model. + :param out_channels: channels in the output Tensor. + :param num_res_blocks: number of residual blocks per downsample. + :param attention_resolutions: a collection of downsample rates at which + attention will take place. May be a set, list, or tuple. + For example, if this contains 4, then at 4x downsampling, attention + will be used. + :param dropout: the dropout probability. + :param channel_mult: channel multiplier for each level of the UNet. + :param conv_resample: if True, use learned convolutions for upsampling and + downsampling. + :param dims: determines if the signal is 1D, 2D, or 3D. + :param num_classes: if specified (as an int), then this model will be + class-conditional with `num_classes` classes. + :param use_checkpoint: use gradient checkpointing to reduce memory usage. + :param num_heads: the number of attention heads in each attention layer. + :param num_heads_channels: if specified, ignore num_heads and instead use + a fixed channel width per attention head. + :param num_heads_upsample: works with num_heads to set a different number + of heads for upsampling. Deprecated. + :param use_scale_shift_norm: use a FiLM-like conditioning mechanism. + :param resblock_updown: use residual blocks for up/downsampling. + :param use_new_attention_order: use a different attention pattern for potentially + increased efficiency. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + num_classes=None, + extra_film_condition_dim=None, + use_checkpoint=False, + use_fp16=False, + num_heads=-1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + extra_film_use_concat=False, # If true, concatenate extrafilm condition with time embedding, else addition + resblock_updown=False, + use_new_attention_order=False, + use_spatial_transformer=False, # custom transformer support + transformer_depth=1, # custom transformer support + context_dim=None, # custom transformer support + n_embed=None, # custom support for prediction of discrete ids into codebook of first stage vq model + legacy=True, + ): + super().__init__() + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + if num_heads == -1: + assert ( + num_head_channels != -1 + ), "Either num_heads or num_head_channels has to be set" + + if num_head_channels == -1: + assert ( + num_heads != -1 + ), "Either num_heads or num_head_channels has to be set" + + self.image_size = image_size + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.num_classes = num_classes + self.extra_film_condition_dim = extra_film_condition_dim + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + self.predict_codebook_ids = n_embed is not None + self.extra_film_use_concat = extra_film_use_concat + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + assert not ( + self.num_classes is not None and self.extra_film_condition_dim is not None + ), "As for the condition of theh UNet model, you can only set using class label or an extra embedding vector (such as from CLAP). You cannot set both num_classes and extra_film_condition_dim." + + if self.num_classes is not None: + self.label_emb = nn.Embedding(num_classes, time_embed_dim) + + self.use_extra_film_by_concat = ( + self.extra_film_condition_dim is not None and self.extra_film_use_concat + ) + self.use_extra_film_by_addition = ( + self.extra_film_condition_dim is not None and not self.extra_film_use_concat + ) + + if self.extra_film_condition_dim is not None: + self.film_emb = nn.Linear(self.extra_film_condition_dim, time_embed_dim) + # print("+ Use extra condition on UNet channel using Film. Extra condition dimension is %s. " % self.extra_film_condition_dim) + # if(self.use_extra_film_by_concat): + # print("\t By concatenation with time embedding") + # elif(self.use_extra_film_by_concat): + # print("\t By addition with time embedding") + + if use_spatial_transformer and ( + self.use_extra_film_by_concat or self.use_extra_film_by_addition + ): + # print("+ Spatial transformer will only be used as self-attention. Because you have choose to use film as your global condition.") + spatial_transformer_no_context = True + else: + spatial_transformer_no_context = False + + if use_spatial_transformer and not spatial_transformer_no_context: + assert ( + context_dim is not None + ), "Fool!! You forgot to include the dimension of your cross-attention conditioning..." + + if context_dim is not None and not spatial_transformer_no_context: + assert ( + use_spatial_transformer + ), "Fool!! You forgot to use the spatial transformer for your cross-attention conditioning..." + from omegaconf.listconfig import ListConfig + + if type(context_dim) == ListConfig: + context_dim = list(context_dim) + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + if not use_spatial_transformer + else SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=context_dim, + no_context=spatial_transformer_no_context, + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ch // num_heads if use_spatial_transformer else num_head_channels + self.middle_block = TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + if not use_spatial_transformer + else SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=context_dim, + no_context=spatial_transformer_no_context, + ), + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + ) + self._feature_size += ch + + self.output_blocks = nn.ModuleList([]) + for level, mult in list(enumerate(channel_mult))[::-1]: + for i in range(num_res_blocks + 1): + ich = input_block_chans.pop() + layers = [ + ResBlock( + ch + ich, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=model_channels * mult, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = model_channels * mult + if ds in attention_resolutions: + if num_head_channels == -1: + dim_head = ch // num_heads + else: + num_heads = ch // num_head_channels + dim_head = num_head_channels + if legacy: + # num_heads = 1 + dim_head = ( + ch // num_heads + if use_spatial_transformer + else num_head_channels + ) + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads_upsample, + num_head_channels=dim_head, + use_new_attention_order=use_new_attention_order, + ) + if not use_spatial_transformer + else SpatialTransformer( + ch, + num_heads, + dim_head, + depth=transformer_depth, + context_dim=context_dim, + no_context=spatial_transformer_no_context, + ) + ) + if level and i == num_res_blocks: + out_ch = ch + layers.append( + ResBlock( + ch, + time_embed_dim + if (not self.use_extra_film_by_concat) + else time_embed_dim * 2, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + up=True, + ) + if resblock_updown + else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch) + ) + ds //= 2 + self.output_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)), + ) + if self.predict_codebook_ids: + self.id_predictor = nn.Sequential( + normalization(ch), + conv_nd(dims, model_channels, n_embed, 1), + # nn.LogSoftmax(dim=1) # change to cross_entropy and produce non-normalized logits + ) + + self.shape_reported = False + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + self.output_blocks.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + self.output_blocks.apply(convert_module_to_f32) + + def forward(self, x, timesteps=None, context=None, y=None, **kwargs): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :param context: conditioning plugged in via crossattn + :param y: an [N] Tensor of labels, if class-conditional. an [N, extra_film_condition_dim] Tensor if film-embed conditional + :return: an [N x C x ...] Tensor of outputs. + """ + if not self.shape_reported: + # print("The shape of UNet input is", x.size()) + self.shape_reported = True + + assert (y is not None) == ( + self.num_classes is not None or self.extra_film_condition_dim is not None + ), "must specify y if and only if the model is class-conditional or film embedding conditional" + hs = [] + t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False) + emb = self.time_embed(t_emb) + + if self.num_classes is not None: + assert y.shape == (x.shape[0],) + emb = emb + self.label_emb(y) + + if self.use_extra_film_by_addition: + emb = emb + self.film_emb(y) + elif self.use_extra_film_by_concat: + emb = th.cat([emb, self.film_emb(y)], dim=-1) + + h = x.type(self.dtype) + for module in self.input_blocks: + h = module(h, emb, context) + hs.append(h) + h = self.middle_block(h, emb, context) + for module in self.output_blocks: + h = th.cat([h, hs.pop()], dim=1) + h = module(h, emb, context) + h = h.type(x.dtype) + if self.predict_codebook_ids: + return self.id_predictor(h) + else: + return self.out(h) + + +class EncoderUNetModel(nn.Module): + """ + The half UNet model with attention and timestep embedding. + For usage, see UNet. + """ + + def __init__( + self, + image_size, + in_channels, + model_channels, + out_channels, + num_res_blocks, + attention_resolutions, + dropout=0, + channel_mult=(1, 2, 4, 8), + conv_resample=True, + dims=2, + use_checkpoint=False, + use_fp16=False, + num_heads=1, + num_head_channels=-1, + num_heads_upsample=-1, + use_scale_shift_norm=False, + resblock_updown=False, + use_new_attention_order=False, + pool="adaptive", + *args, + **kwargs, + ): + super().__init__() + + if num_heads_upsample == -1: + num_heads_upsample = num_heads + + self.in_channels = in_channels + self.model_channels = model_channels + self.out_channels = out_channels + self.num_res_blocks = num_res_blocks + self.attention_resolutions = attention_resolutions + self.dropout = dropout + self.channel_mult = channel_mult + self.conv_resample = conv_resample + self.use_checkpoint = use_checkpoint + self.dtype = th.float16 if use_fp16 else th.float32 + self.num_heads = num_heads + self.num_head_channels = num_head_channels + self.num_heads_upsample = num_heads_upsample + + time_embed_dim = model_channels * 4 + self.time_embed = nn.Sequential( + linear(model_channels, time_embed_dim), + nn.SiLU(), + linear(time_embed_dim, time_embed_dim), + ) + + self.input_blocks = nn.ModuleList( + [ + TimestepEmbedSequential( + conv_nd(dims, in_channels, model_channels, 3, padding=1) + ) + ] + ) + self._feature_size = model_channels + input_block_chans = [model_channels] + ch = model_channels + ds = 1 + for level, mult in enumerate(channel_mult): + for _ in range(num_res_blocks): + layers = [ + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=mult * model_channels, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ) + ] + ch = mult * model_channels + if ds in attention_resolutions: + layers.append( + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ) + ) + self.input_blocks.append(TimestepEmbedSequential(*layers)) + self._feature_size += ch + input_block_chans.append(ch) + if level != len(channel_mult) - 1: + out_ch = ch + self.input_blocks.append( + TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + out_channels=out_ch, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + down=True, + ) + if resblock_updown + else Downsample( + ch, conv_resample, dims=dims, out_channels=out_ch + ) + ) + ) + ch = out_ch + input_block_chans.append(ch) + ds *= 2 + self._feature_size += ch + + self.middle_block = TimestepEmbedSequential( + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + AttentionBlock( + ch, + use_checkpoint=use_checkpoint, + num_heads=num_heads, + num_head_channels=num_head_channels, + use_new_attention_order=use_new_attention_order, + ), + ResBlock( + ch, + time_embed_dim, + dropout, + dims=dims, + use_checkpoint=use_checkpoint, + use_scale_shift_norm=use_scale_shift_norm, + ), + ) + self._feature_size += ch + self.pool = pool + if pool == "adaptive": + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + nn.AdaptiveAvgPool2d((1, 1)), + zero_module(conv_nd(dims, ch, out_channels, 1)), + nn.Flatten(), + ) + elif pool == "attention": + assert num_head_channels != -1 + self.out = nn.Sequential( + normalization(ch), + nn.SiLU(), + AttentionPool2d( + (image_size // ds), ch, num_head_channels, out_channels + ), + ) + elif pool == "spatial": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + nn.ReLU(), + nn.Linear(2048, self.out_channels), + ) + elif pool == "spatial_v2": + self.out = nn.Sequential( + nn.Linear(self._feature_size, 2048), + normalization(2048), + nn.SiLU(), + nn.Linear(2048, self.out_channels), + ) + else: + raise NotImplementedError(f"Unexpected {pool} pooling") + + def convert_to_fp16(self): + """ + Convert the torso of the model to float16. + """ + self.input_blocks.apply(convert_module_to_f16) + self.middle_block.apply(convert_module_to_f16) + + def convert_to_fp32(self): + """ + Convert the torso of the model to float32. + """ + self.input_blocks.apply(convert_module_to_f32) + self.middle_block.apply(convert_module_to_f32) + + def forward(self, x, timesteps): + """ + Apply the model to an input batch. + :param x: an [N x C x ...] Tensor of inputs. + :param timesteps: a 1-D batch of timesteps. + :return: an [N x K] Tensor of outputs. + """ + emb = self.time_embed(timestep_embedding(timesteps, self.model_channels)) + + results = [] + h = x.type(self.dtype) + for module in self.input_blocks: + h = module(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = self.middle_block(h, emb) + if self.pool.startswith("spatial"): + results.append(h.type(x.dtype).mean(dim=(2, 3))) + h = th.cat(results, axis=-1) + return self.out(h) + else: + h = h.type(x.dtype) + return self.out(h) diff --git a/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/util.py b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/util.py new file mode 100644 index 0000000000000000000000000000000000000000..a6aaa6f5947791572cc22ce8c94cb3377460f38a --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/latent_diffusion/util.py @@ -0,0 +1,295 @@ +# adopted from +# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py +# and +# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py +# and +# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py +# +# thanks! + + +import os +import math +import torch +import torch.nn as nn +import numpy as np +from einops import repeat + +# from core.models.audioldm.utils import instantiate_from_config + + +def make_beta_schedule( + schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3 +): + if schedule == "linear": + betas = ( + torch.linspace( + linear_start**0.5, linear_end**0.5, n_timestep, dtype=torch.float64 + ) + ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace( + linear_start, linear_end, n_timestep, dtype=torch.float64 + ) + elif schedule == "sqrt": + betas = ( + torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + ** 0.5 + ) + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def make_ddim_timesteps( + ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True +): + if ddim_discr_method == "uniform": + c = num_ddpm_timesteps // num_ddim_timesteps + ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c))) + elif ddim_discr_method == "quad": + ddim_timesteps = ( + (np.linspace(0, np.sqrt(num_ddpm_timesteps * 0.8), num_ddim_timesteps)) ** 2 + ).astype(int) + else: + raise NotImplementedError( + f'There is no ddim discretization method called "{ddim_discr_method}"' + ) + + # assert ddim_timesteps.shape[0] == num_ddim_timesteps + # add one to get the final alpha values right (the ones from first scale to data during sampling) + steps_out = ddim_timesteps + 1 + if verbose: + print(f"Selected timesteps for ddim sampler: {steps_out}") + return steps_out + + +def make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True): + # select alphas for computing the variance schedule + alphas = alphacums[ddim_timesteps] + alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist()) + + # according the the formula provided in https://arxiv.org/abs/2010.02502 + sigmas = eta * np.sqrt( + (1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev) + ) + if verbose: + print( + f"Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}" + ) + print( + f"For the chosen value of eta, which is {eta}, " + f"this results in the following sigma_t schedule for ddim sampler {sigmas}" + ) + return sigmas, alphas, alphas_prev + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t).contiguous() + return out.reshape(b, *((1,) * (len(x_shape) - 1))).contiguous() + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_tensors = list(args[:length]) + ctx.input_params = list(args[length:]) + + with torch.no_grad(): + output_tensors = ctx.run_function(*ctx.input_tensors) + return output_tensors + + @staticmethod + def backward(ctx, *output_grads): + ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] + with torch.enable_grad(): + # Fixes a bug where the first op in run_function modifies the + # Tensor storage in place, which is not allowed for detach()'d + # Tensors. + shallow_copies = [x.view_as(x) for x in ctx.input_tensors] + output_tensors = ctx.run_function(*shallow_copies) + input_grads = torch.autograd.grad( + output_tensors, + ctx.input_tensors + ctx.input_params, + output_grads, + allow_unused=True, + ) + del ctx.input_tensors + del ctx.input_params + del output_tensors + return (None, None) + input_grads + + +def timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False): + """ + Create sinusoidal timestep embeddings. + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + if not repeat_only: + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) + * torch.arange(start=0, end=half, dtype=torch.float32) + / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat( + [embedding, torch.zeros_like(embedding[:, :1])], dim=-1 + ) + else: + embedding = repeat(timesteps, "b -> b d", d=dim) + return embedding + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * torch.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + return super().forward(x.float()).type(x.dtype) + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +# class HybridConditioner(nn.Module): +# def __init__(self, c_concat_config, c_crossattn_config): +# super().__init__() +# self.concat_conditioner = instantiate_from_config(c_concat_config) +# self.crossattn_conditioner = instantiate_from_config(c_crossattn_config) + +# def forward(self, c_concat, c_crossattn): +# c_concat = self.concat_conditioner(c_concat) +# c_crossattn = self.crossattn_conditioner(c_crossattn) +# return {"c_concat": [c_concat], "c_crossattn": [c_crossattn]} + + +def noise_like(shape, device, repeat=False): + repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat( + shape[0], *((1,) * (len(shape) - 1)) + ) + noise = lambda: torch.randn(shape, device=device) + return repeat_noise() if repeat else noise() diff --git a/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__init__.py b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/__init__.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ba458cc246817467fa358f2ee87a4fe4c3f3050 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/__init__.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/distributions.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/distributions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a22893b851be7d83b97636a99e802ad12aa62393 Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/distributions.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/modules.cpython-38.pyc b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/modules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0df05370dd13e1f697f255b7ab35027fe54ccb1a Binary files /dev/null and b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/__pycache__/modules.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/distributions.py b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..c1ad1ddc5f899a2d7adfd58d1d20782c4f4d5318 --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/distributions.py @@ -0,0 +1,103 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to( + device=self.parameters.device + ) + + def sample(self): + + x = self.mean + self.std * torch.randn(self.mean.shape).to( + device=self.parameters.device + ) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.0]) + else: + if other is None: + return 0.5 * torch.mean( + torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar, + dim=[1, 2, 3], + ) + else: + return 0.5 * torch.mean( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var + - 1.0 + - self.logvar + + other.logvar, + dim=[1, 2, 3], + ) + + def nll(self, sample, dims=[1, 2, 3]): + if self.deterministic: + return torch.Tensor([0.0]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims, + ) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/modules.py b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/modules.py new file mode 100644 index 0000000000000000000000000000000000000000..af15b1fc2f6fcd238aea9b693c099ff4fef1204b --- /dev/null +++ b/core/models/latent_diffusion/vae/audioldm_modules/variational_autoencoder/modules.py @@ -0,0 +1,1073 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import torch.nn as nn +import numpy as np +from einops import rearrange + +from ..latent_diffusion.attention import LinearAttention + + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.float()[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) + return emb + + +def nonlinearity(x): + # swish + return x * torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return torch.nn.GroupNorm( + num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True + ) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class UpsampleTimeStride4(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=5, stride=1, padding=2 + ) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=(4.0, 2.0), mode="nearest") + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # Do time downsampling here + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=3, stride=2, padding=0 + ) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class DownsampleTimeStride4(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # Do time downsampling here + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=5, stride=(4, 2), padding=1 + ) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=(4, 2), stride=(4, 2)) + return x + + +class ResnetBlock(nn.Module): + def __init__( + self, + *, + in_channels, + out_channels=None, + conv_shortcut=False, + dropout, + temb_channels=512, + ): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d( + out_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + else: + self.nin_shortcut = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x + h + + +class LinAttnBlock(LinearAttention): + """to match AttnBlock usage""" + + def __init__(self, in_channels): + super().__init__(dim=in_channels, heads=1, dim_head=in_channels) + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.k = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.v = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + self.proj_out = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=1, stride=1, padding=0 + ) + + def forward(self, x): + dtype = x.dtype + h_ = x + + h_ = self.norm(h_) + + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, c, h * w).contiguous() + q = q.permute(0, 2, 1).contiguous() # b,hw,c + k = k.reshape(b, c, h * w).contiguous() # b,c,hw + w_ = torch.bmm(q, k).contiguous().to(dtype) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c) ** (-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + # attend to values + v = v.reshape(b, c, h * w).contiguous() + w_ = w_.permute(0, 2, 1).contiguous() # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm( + v, w_ + ).contiguous() # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b, c, h, w).contiguous() + h_ = self.proj_out(h_) + + return x + h_ + + +def make_attn(in_channels, attn_type="vanilla"): + assert attn_type in ["vanilla", "linear", "none"], f"attn_type {attn_type} unknown" + # print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + return AttnBlock(in_channels) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels) + + +class Model(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + use_timestep=True, + use_linear_attn=False, + attn_type="vanilla", + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch * 4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList( + [ + torch.nn.Linear(self.ch, self.temb_ch), + torch.nn.Linear(self.temb_ch, self.temb_ch), + ] + ) + + # downsampling + self.conv_in = torch.nn.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + skip_in = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + if i_block == self.num_res_blocks: + skip_in = ch * in_ch_mult[i_level] + block.append( + ResnetBlock( + in_channels=block_in + skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x, t=None, context=None): + # assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions - 1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb + ) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + double_z=True, + use_linear_attn=False, + attn_type="vanilla", + downsample_time_stride4_levels=[], + **ignore_kwargs, + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.downsample_time_stride4_levels = downsample_time_stride4_levels + + if len(self.downsample_time_stride4_levels) > 0: + assert max(self.downsample_time_stride4_levels) < self.num_resolutions, ( + "The level to perform downsample 4 operation need to be smaller than the total resolution number %s" + % str(self.num_resolutions) + ) + + # downsampling + self.conv_in = torch.nn.Conv2d( + in_channels, self.ch, kernel_size=3, stride=1, padding=1 + ) + + curr_res = resolution + in_ch_mult = (1,) + tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + if i_level in self.downsample_time_stride4_levels: + down.downsample = DownsampleTimeStride4(block_in, resamp_with_conv) + else: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, + 2 * z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1, + ) + + self.swapped_to_float = False + + def forward(self, x): + dtype = x.dtype + # timestep embedding + temb = None + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions - 1: + hs.append(self.down[i_level].downsample(hs[-1])) + + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + if not self.swapped_to_float: + self.mid.attn_1 = self.mid.attn_1.float() + self.swapped_to_float = True + h = self.mid.attn_1(h.float()).to(dtype) + h = self.mid.block_2(h, temb) + # end + h = self.norm_out(h) + + h = nonlinearity(h) + + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__( + self, + *, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + in_channels, + resolution, + z_channels, + give_pre_end=False, + tanh_out=False, + use_linear_attn=False, + downsample_time_stride4_levels=[], + attn_type="vanilla", + **ignorekwargs, + ): + super().__init__() + if use_linear_attn: + attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + self.downsample_time_stride4_levels = downsample_time_stride4_levels + + if len(self.downsample_time_stride4_levels) > 0: + assert max(self.downsample_time_stride4_levels) < self.num_resolutions, ( + "The level to perform downsample 4 operation need to be smaller than the total resolution number %s" + % str(self.num_resolutions) + ) + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,) + tuple(ch_mult) + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + # print("Working with z of shape {} = {} dimensions.".format( + # self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d( + z_channels, block_in, kernel_size=3, stride=1, padding=1 + ) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout, + ) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + if i_level - 1 in self.downsample_time_stride4_levels: + up.upsample = UpsampleTimeStride4(block_in, resamp_with_conv) + else: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_ch, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, z): + # assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + if self.tanh_out: + h = torch.tanh(h) + return h + + +class SimpleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, *args, **kwargs): + super().__init__() + self.model = nn.ModuleList( + [ + nn.Conv2d(in_channels, in_channels, 1), + ResnetBlock( + in_channels=in_channels, + out_channels=2 * in_channels, + temb_channels=0, + dropout=0.0, + ), + ResnetBlock( + in_channels=2 * in_channels, + out_channels=4 * in_channels, + temb_channels=0, + dropout=0.0, + ), + ResnetBlock( + in_channels=4 * in_channels, + out_channels=2 * in_channels, + temb_channels=0, + dropout=0.0, + ), + nn.Conv2d(2 * in_channels, in_channels, 1), + Upsample(in_channels, with_conv=True), + ] + ) + # end + self.norm_out = Normalize(in_channels) + self.conv_out = torch.nn.Conv2d( + in_channels, out_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + for i, layer in enumerate(self.model): + if i in [1, 2, 3]: + x = layer(x, None) + else: + x = layer(x) + + h = self.norm_out(x) + h = nonlinearity(h) + x = self.conv_out(h) + return x + + +class UpsampleDecoder(nn.Module): + def __init__( + self, + in_channels, + out_channels, + ch, + num_res_blocks, + resolution, + ch_mult=(2, 2), + dropout=0.0, + ): + super().__init__() + # upsampling + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + block_in = in_channels + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.res_blocks = nn.ModuleList() + self.upsample_blocks = nn.ModuleList() + for i_level in range(self.num_resolutions): + res_block = [] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + res_block.append( + ResnetBlock( + in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout, + ) + ) + block_in = block_out + self.res_blocks.append(nn.ModuleList(res_block)) + if i_level != self.num_resolutions - 1: + self.upsample_blocks.append(Upsample(block_in, True)) + curr_res = curr_res * 2 + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, out_channels, kernel_size=3, stride=1, padding=1 + ) + + def forward(self, x): + # upsampling + h = x + for k, i_level in enumerate(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.res_blocks[i_level][i_block](h, None) + if i_level != self.num_resolutions - 1: + h = self.upsample_blocks[k](h) + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class LatentRescaler(nn.Module): + def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2): + super().__init__() + # residual block, interpolate, residual block + self.factor = factor + self.conv_in = nn.Conv2d( + in_channels, mid_channels, kernel_size=3, stride=1, padding=1 + ) + self.res_block1 = nn.ModuleList( + [ + ResnetBlock( + in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0, + ) + for _ in range(depth) + ] + ) + self.attn = AttnBlock(mid_channels) + self.res_block2 = nn.ModuleList( + [ + ResnetBlock( + in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0, + ) + for _ in range(depth) + ] + ) + + self.conv_out = nn.Conv2d( + mid_channels, + out_channels, + kernel_size=1, + ) + + def forward(self, x): + x = self.conv_in(x) + for block in self.res_block1: + x = block(x, None) + x = torch.nn.functional.interpolate( + x, + size=( + int(round(x.shape[2] * self.factor)), + int(round(x.shape[3] * self.factor)), + ), + ) + x = self.attn(x).contiguous() + for block in self.res_block2: + x = block(x, None) + x = self.conv_out(x) + return x + + +class MergedRescaleEncoder(nn.Module): + def __init__( + self, + in_channels, + ch, + resolution, + out_ch, + num_res_blocks, + attn_resolutions, + dropout=0.0, + resamp_with_conv=True, + ch_mult=(1, 2, 4, 8), + rescale_factor=1.0, + rescale_module_depth=1, + ): + super().__init__() + intermediate_chn = ch * ch_mult[-1] + self.encoder = Encoder( + in_channels=in_channels, + num_res_blocks=num_res_blocks, + ch=ch, + ch_mult=ch_mult, + z_channels=intermediate_chn, + double_z=False, + resolution=resolution, + attn_resolutions=attn_resolutions, + dropout=dropout, + resamp_with_conv=resamp_with_conv, + out_ch=None, + ) + self.rescaler = LatentRescaler( + factor=rescale_factor, + in_channels=intermediate_chn, + mid_channels=intermediate_chn, + out_channels=out_ch, + depth=rescale_module_depth, + ) + + def forward(self, x): + x = self.encoder(x) + x = self.rescaler(x) + return x + + +class MergedRescaleDecoder(nn.Module): + def __init__( + self, + z_channels, + out_ch, + resolution, + num_res_blocks, + attn_resolutions, + ch, + ch_mult=(1, 2, 4, 8), + dropout=0.0, + resamp_with_conv=True, + rescale_factor=1.0, + rescale_module_depth=1, + ): + super().__init__() + tmp_chn = z_channels * ch_mult[-1] + self.decoder = Decoder( + out_ch=out_ch, + z_channels=tmp_chn, + attn_resolutions=attn_resolutions, + dropout=dropout, + resamp_with_conv=resamp_with_conv, + in_channels=None, + num_res_blocks=num_res_blocks, + ch_mult=ch_mult, + resolution=resolution, + ch=ch, + ) + self.rescaler = LatentRescaler( + factor=rescale_factor, + in_channels=z_channels, + mid_channels=tmp_chn, + out_channels=tmp_chn, + depth=rescale_module_depth, + ) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Upsampler(nn.Module): + def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2): + super().__init__() + assert out_size >= in_size + num_blocks = int(np.log2(out_size // in_size)) + 1 + factor_up = 1.0 + (out_size % in_size) + print( + f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}" + ) + self.rescaler = LatentRescaler( + factor=factor_up, + in_channels=in_channels, + mid_channels=2 * in_channels, + out_channels=in_channels, + ) + self.decoder = Decoder( + out_ch=out_channels, + resolution=out_size, + z_channels=in_channels, + num_res_blocks=2, + attn_resolutions=[], + in_channels=None, + ch=in_channels, + ch_mult=[ch_mult for _ in range(num_blocks)], + ) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Resize(nn.Module): + def __init__(self, in_channels=None, learned=False, mode="bilinear"): + super().__init__() + self.with_conv = learned + self.mode = mode + if self.with_conv: + print( + f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode" + ) + raise NotImplementedError() + assert in_channels is not None + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d( + in_channels, in_channels, kernel_size=4, stride=2, padding=1 + ) + + def forward(self, x, scale_factor=1.0): + if scale_factor == 1.0: + return x + else: + x = torch.nn.functional.interpolate( + x, mode=self.mode, align_corners=False, scale_factor=scale_factor + ) + return x + + +class FirstStagePostProcessor(nn.Module): + def __init__( + self, + ch_mult: list, + in_channels, + pretrained_model: nn.Module = None, + reshape=False, + n_channels=None, + dropout=0.0, + pretrained_config=None, + ): + super().__init__() + if pretrained_config is None: + assert ( + pretrained_model is not None + ), 'Either "pretrained_model" or "pretrained_config" must not be None' + self.pretrained_model = pretrained_model + else: + assert ( + pretrained_config is not None + ), 'Either "pretrained_model" or "pretrained_config" must not be None' + self.instantiate_pretrained(pretrained_config) + + self.do_reshape = reshape + + if n_channels is None: + n_channels = self.pretrained_model.encoder.ch + + self.proj_norm = Normalize(in_channels, num_groups=in_channels // 2) + self.proj = nn.Conv2d( + in_channels, n_channels, kernel_size=3, stride=1, padding=1 + ) + + blocks = [] + downs = [] + ch_in = n_channels + for m in ch_mult: + blocks.append( + ResnetBlock( + in_channels=ch_in, out_channels=m * n_channels, dropout=dropout + ) + ) + ch_in = m * n_channels + downs.append(Downsample(ch_in, with_conv=False)) + + self.model = nn.ModuleList(blocks) + self.downsampler = nn.ModuleList(downs) + +# def instantiate_pretrained(self, config): +# model = instantiate_from_config(config) +# self.pretrained_model = model.eval() +# # self.pretrained_model.train = False +# for param in self.pretrained_model.parameters(): +# param.requires_grad = False + + @torch.no_grad() + def encode_with_pretrained(self, x): + c = self.pretrained_model.encode(x) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + return c + + def forward(self, x): + z_fs = self.encode_with_pretrained(x) + z = self.proj_norm(z_fs) + z = self.proj(z) + z = nonlinearity(z) + + for submodel, downmodel in zip(self.model, self.downsampler): + z = submodel(z, temb=None) + z = downmodel(z) + + if self.do_reshape: + z = rearrange(z, "b c h w -> b (h w) c") + return z diff --git a/core/models/latent_diffusion/vae/autokl.py b/core/models/latent_diffusion/vae/autokl.py new file mode 100644 index 0000000000000000000000000000000000000000..213823c122ca646a97eb8a56ee739cabcc581545 --- /dev/null +++ b/core/models/latent_diffusion/vae/autokl.py @@ -0,0 +1,179 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from contextlib import contextmanager +from einops import rearrange + +from core.models.common.get_model import get_model, register + +from .autokl_modules.diffusion_modules import Encoder, Decoder +from .autokl_modules.distributions import DiagonalGaussianDistribution + + +@register('autoencoderkl') +class AutoencoderKL(nn.Module): + def __init__(self, + ddconfig, + lossconfig, + embed_dim, + ckpt_path=None, + ignore_keys=[], + image_key="image", + colorize_nlabels=None, + monitor=None,): + super().__init__() + self.image_key = image_key + self.encoder = Encoder(**ddconfig) + self.decoder = Decoder(**ddconfig) + assert ddconfig["double_z"] + self.quant_conv = torch.nn.Conv2d(2*ddconfig["z_channels"], 2*embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig["z_channels"], 1) + self.embed_dim = embed_dim + if colorize_nlabels is not None: + assert type(colorize_nlabels)==int + self.register_buffer("colorize", torch.randn(3, colorize_nlabels, 1, 1)) + if monitor is not None: + self.monitor = monitor + + def encode(self, x): + if x.ndim == 5: + is_video = True + num_frames = x.shape[2] + x = rearrange(x, 'b c f h w -> (b f) c h w') + else: + is_video = False + if x.shape[1] == 1: + x = torch.cat([x, x, x], dim=1) + + h = self.encoder(x) + moments = self.quant_conv(h) + if is_video: + moments = rearrange(moments, '(b f) c h w -> b c f h w', f=num_frames) + posterior = DiagonalGaussianDistribution(moments) + return posterior + + def decode(self, z): + if z.ndim == 5: + is_video = True + num_frames = z.shape[2] + z = rearrange(z, 'b c f h w -> (b f) c h w') + else: + is_video = False + num_frames = 1 + + z = self.post_quant_conv(z) + dec = self.decoder(z, num_frames=num_frames) + + if is_video: + dec = rearrange(dec, '(b f) c h w -> b c f h w', f=num_frames) + return dec + + def forward(self, input, sample_posterior=True): + posterior = self.encode(input) + if sample_posterior: + z = posterior.sample().to(input.dtype) + else: + z = posterior.mode().to(input.dtype) + dec = self.decode(z) + return dec, posterior + + def get_input(self, batch, k): + x = batch[k] + if len(x.shape) == 3: + x = x[..., None] + x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format) + return x + + def training_step(self, batch, batch_idx, optimizer_idx): + inputs = self.get_input(batch, self.image_key) + reconstructions, posterior = self(inputs) + + if optimizer_idx == 0: + # train encoder+decoder+logvar + aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + self.log("aeloss", aeloss, prog_bar=True, logger=True, on_step=True, on_epoch=True) + self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=False) + return aeloss + + if optimizer_idx == 1: + # train the discriminator + discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step, + last_layer=self.get_last_layer(), split="train") + + self.log("discloss", discloss, prog_bar=True, logger=True, on_step=True, on_epoch=True) + self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False) + return discloss + + def validation_step(self, batch, batch_idx): + inputs = self.get_input(batch, self.image_key) + reconstructions, posterior = self(inputs) + aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, 0, self.global_step, + last_layer=self.get_last_layer(), split="val") + + discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, 1, self.global_step, + last_layer=self.get_last_layer(), split="val") + + self.log("val/rec_loss", log_dict_ae["val/rec_loss"]) + self.log_dict(log_dict_ae) + self.log_dict(log_dict_disc) + return self.log_dict + + def configure_optimizers(self): + lr = self.learning_rate + opt_ae = torch.optim.Adam(list(self.encoder.parameters())+ + list(self.decoder.parameters())+ + list(self.quant_conv.parameters())+ + list(self.post_quant_conv.parameters()), + lr=lr, betas=(0.5, 0.9)) + opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(), + lr=lr, betas=(0.5, 0.9)) + return [opt_ae, opt_disc], [] + + def get_last_layer(self): + return self.decoder.conv_out.weight + + @torch.no_grad() + def log_images(self, batch, only_inputs=False, **kwargs): + log = dict() + x = self.get_input(batch, self.image_key) + x = x.to(self.device) + if not only_inputs: + xrec, posterior = self(x) + if x.shape[1] > 3: + # colorize with random projection + assert xrec.shape[1] > 3 + x = self.to_rgb(x) + xrec = self.to_rgb(xrec) + log["samples"] = self.decode(torch.randn_like(posterior.sample())) + log["reconstructions"] = xrec + log["inputs"] = x + return log + + def to_rgb(self, x): + assert self.image_key == "segmentation" + if not hasattr(self, "colorize"): + self.register_buffer("colorize", torch.randn(3, x.shape[1], 1, 1).to(x)) + x = F.conv2d(x, weight=self.colorize) + x = 2.*(x-x.min())/(x.max()-x.min()) - 1. + return x + + +class IdentityFirstStage(nn.Module): + def __init__(self, *args, vq_interface=False, **kwargs): + self.vq_interface = vq_interface # TODO: Should be true by default but check to not break older stuff + super().__init__() + + def encode(self, x, *args, **kwargs): + return x + + def decode(self, x, *args, **kwargs): + return x + + def quantize(self, x, *args, **kwargs): + if self.vq_interface: + return x, None, [None, None, None] + return x + + def forward(self, x, *args, **kwargs): + return x diff --git a/core/models/latent_diffusion/vae/autokl_modules/__pycache__/attention.cpython-38.pyc b/core/models/latent_diffusion/vae/autokl_modules/__pycache__/attention.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78c935720f2aad4552c38961121af3a884975d8a Binary files /dev/null and b/core/models/latent_diffusion/vae/autokl_modules/__pycache__/attention.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/autokl_modules/__pycache__/diffusion_modules.cpython-38.pyc b/core/models/latent_diffusion/vae/autokl_modules/__pycache__/diffusion_modules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfb82d6d76d0087603085d0ecb30d51025b4bd77 Binary files /dev/null and b/core/models/latent_diffusion/vae/autokl_modules/__pycache__/diffusion_modules.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/autokl_modules/__pycache__/distributions.cpython-38.pyc b/core/models/latent_diffusion/vae/autokl_modules/__pycache__/distributions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1a2026105812ebaffb7d23e0ac9e3ab3c21bc4f Binary files /dev/null and b/core/models/latent_diffusion/vae/autokl_modules/__pycache__/distributions.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/autokl_modules/attention.py b/core/models/latent_diffusion/vae/autokl_modules/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..86f30ffff56ef07635b7cdf342b9fe6dcbed59d6 --- /dev/null +++ b/core/models/latent_diffusion/vae/autokl_modules/attention.py @@ -0,0 +1,21 @@ +import torch +from torch import nn, einsum +from einops import rearrange + +class LinearAttention(nn.Module): + def __init__(self, dim, heads=4, dim_head=32): + super().__init__() + self.heads = heads + hidden_dim = dim_head * heads + self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias = False) + self.to_out = nn.Conv2d(hidden_dim, dim, 1) + + def forward(self, x): + b, c, h, w = x.shape + qkv = self.to_qkv(x) + q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads = self.heads, qkv=3) + k = k.softmax(dim=-1) + context = torch.einsum('bhdn,bhen->bhde', k, v) + out = torch.einsum('bhde,bhdn->bhen', context, q) + out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w) + return self.to_out(out) \ No newline at end of file diff --git a/core/models/latent_diffusion/vae/autokl_modules/diffusion_modules.py b/core/models/latent_diffusion/vae/autokl_modules/diffusion_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..2051c43b46ba02e39c8618017fd9123209e0fc9a --- /dev/null +++ b/core/models/latent_diffusion/vae/autokl_modules/diffusion_modules.py @@ -0,0 +1,870 @@ +# pytorch_diffusion + derived encoder decoder +import math +import torch +import torch.nn as nn +import numpy as np +from einops import rearrange + +from .attention import LinearAttention + +from ...modules_video import SpatioTemporalAttention + +def get_timestep_embedding(timesteps, embedding_dim): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: + From Fairseq. + Build sinusoidal embeddings. + This matches the implementation in tensor2tensor, but differs slightly + from the description in Section 3.5 of "Attention Is All You Need". + """ + assert len(timesteps.shape) == 1 + + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim) * -emb) + emb = emb.to(device=timesteps.device) + emb = timesteps.to(emb.dtype)[:, None] * emb[None, :] + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) + if embedding_dim % 2 == 1: # zero pad + emb = torch.nn.functional.pad(emb, (0,1,0,0)) + return emb + + +def nonlinearity(x): + # swish + return x*torch.sigmoid(x) + + +def Normalize(in_channels, num_groups=32): + return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) + + +class UpsampleDeterministic(nn.Module): + def __init__(self, upscale=2): + super(UpsampleDeterministic, self).__init__() + self.upscale = upscale + + def forward(self, x): + return x[:,:,:, None, :, None]\ + .expand(-1,-1,-1, self.upscale, -1 , self.upscale)\ + .reshape(x. size(0), x.size(1), x.size(2)*self.upscale, x.size(3)*self.upscale) + + +class Upsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=1, + padding=1) + self.upsample = UpsampleDeterministic(2) + def forward(self, x): + x = self.upsample(x) + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=3, + stride=2, + padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0,1,0,1) + x = torch.nn.functional.pad(x, pad, mode="constant", value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, + dropout, temb_channels=512): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, + out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + else: + self.nin_shortcut = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x+h + + +class LinAttnBlock(LinearAttention): + """to match AttnBlock usage""" + def __init__(self, in_channels): + super().__init__(dim=in_channels, heads=1, dim_head=in_channels) + + +class AttnBlock(nn.Module): + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = Normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.k = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.v = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=1, + stride=1, + padding=0) + + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b,c,h,w = q.shape + q = q.reshape(b,c,h*w) + q = q.permute(0,2,1) # b,hw,c + k = k.reshape(b,c,h*w) # b,c,hw + w_ = torch.bmm(q,k) # b,hw,hw w[b,i,j]=sum_c q[b,i,c]k[b,c,j] + w_ = w_ * (int(c)**(-0.5)) + w_ = torch.nn.functional.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b,c,h*w) + w_ = w_.permute(0,2,1) # b,hw,hw (first hw of k, second of q) + h_ = torch.bmm(v,w_) # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j] + h_ = h_.reshape(b,c,h,w) + + h_ = self.proj_out(h_) + + return x+h_ + + +def make_attn(in_channels, attn_type="vanilla"): + assert attn_type in ["vanilla", "linear", "none"], f'attn_type {attn_type} unknown' + print(f"making attention of type '{attn_type}' with {in_channels} in_channels") + if attn_type == "vanilla": + return AttnBlock(in_channels) + elif attn_type == "none": + return nn.Identity(in_channels) + else: + return LinAttnBlock(in_channels) + + +class Model(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, attn_resolutions, + dropout=0.0, resamp_with_conv=True, in_channels, resolution, + use_timestep=True, use_linear_attn=False, use_video_arch=False, attn_type="vanilla"): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = self.ch*4 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + self.use_timestep = use_timestep + if self.use_timestep: + # timestep embedding + self.temb = nn.Module() + self.temb.dense = nn.ModuleList([ + torch.nn.Linear(self.ch, + self.temb_ch), + torch.nn.Linear(self.temb_ch, + self.temb_ch), + ]) + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + skip_in = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + if i_block == self.num_res_blocks: + skip_in = ch*in_ch_mult[i_level] + block.append(ResnetBlock(in_channels=block_in+skip_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x, t=None, context=None): + #assert x.shape[2] == x.shape[3] == self.resolution + if context is not None: + # assume aligned context, cat along channel axis + x = torch.cat((x, context), dim=1) + if self.use_timestep: + # timestep embedding + assert t is not None + temb = get_timestep_embedding(t, self.ch).to(x.dtype) + temb = self.temb.dense[0](temb) + temb = nonlinearity(temb) + temb = self.temb.dense[1](temb) + else: + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block]( + torch.cat([h, hs.pop()], dim=1), temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + def get_last_layer(self): + return self.conv_out.weight + + +class Encoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, double_z=True, use_linear_attn=False, attn_type="vanilla", + **ignore_kwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, + self.ch, + kernel_size=3, + stride=1, + padding=1) + + curr_res = resolution + in_ch_mult = (1,)+tuple(ch_mult) + self.in_ch_mult = in_ch_mult + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch*in_ch_mult[i_level] + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions-1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + 2*z_channels if double_z else z_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # timestep embedding + temb = None + + # downsampling + hs = [self.conv_in(x)] + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](hs[-1], temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + hs.append(h) + if i_level != self.num_resolutions-1: + hs.append(self.down[i_level].downsample(hs[-1])) + + # middle + h = hs[-1] + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class Decoder(nn.Module): + def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels, + resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False, + use_video_arch=False, attn_type="vanilla", heads=8, **ignorekwargs): + super().__init__() + if use_linear_attn: attn_type = "linear" + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.tanh_out = tanh_out + self.use_video_arch = use_video_arch + + # compute in_ch_mult, block_in and curr_res at lowest res + in_ch_mult = (1,)+tuple(ch_mult) + block_in = ch*ch_mult[self.num_resolutions-1] + curr_res = resolution // 2**(self.num_resolutions-1) + self.z_shape = (1,z_channels,curr_res,curr_res) + print("Working with z of shape {} = {} dimensions.".format( + self.z_shape, np.prod(self.z_shape))) + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, + block_in, + kernel_size=3, + stride=1, + padding=1) + + # middle + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + if use_video_arch: + self.mid.video_att = SpatioTemporalAttention(dim=block_in, + dim_head=block_in//heads, + heads=heads, + use_frame_shift=True, + use_temp_att=False) + self.mid.attn_1 = make_attn(block_in, attn_type=attn_type) + self.mid.block_2 = ResnetBlock(in_channels=block_in, + out_channels=block_in, + temb_channels=self.temb_ch, + dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + video_attn = nn.ModuleList() + block_out = ch*ch_mult[i_level] + for i_block in range(self.num_res_blocks+1): + block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + if use_video_arch and i_level != 0: + video_attn.append(SpatioTemporalAttention(dim=block_in, + dim_head=block_in//heads, + heads=heads, + use_frame_shift=True, + use_temp_att=False)) + if curr_res in attn_resolutions: + attn.append(make_attn(block_in, attn_type=attn_type)) + up = nn.Module() + up.block = block + up.attn = attn + up.video_attn = video_attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_ch, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, z, num_frames=1): + #assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + h = self.mid.block_1(h, temb) + if self.use_video_arch and num_frames > 1: + h = rearrange(h, '(b t) c h w -> b c t h w', t=num_frames) + h = self.mid.video_att(h) + h = rearrange(h, 'b c t h w -> (b t) c h w') + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks+1): + h = self.up[i_level].block[i_block](h, temb) + + if self.use_video_arch and num_frames > 1 and i_level != 0: + h = rearrange(h, '(b t) c h w -> b c t h w', t=num_frames) + h = self.up[i_level].video_attn[i_block](h) + h = rearrange(h, 'b c t h w -> (b t) c h w') + + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + if self.tanh_out: + h = torch.tanh(h) + return h + + +class SimpleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, *args, **kwargs): + super().__init__() + self.model = nn.ModuleList([nn.Conv2d(in_channels, in_channels, 1), + ResnetBlock(in_channels=in_channels, + out_channels=2 * in_channels, + temb_channels=0, dropout=0.0), + ResnetBlock(in_channels=2 * in_channels, + out_channels=4 * in_channels, + temb_channels=0, dropout=0.0), + ResnetBlock(in_channels=4 * in_channels, + out_channels=2 * in_channels, + temb_channels=0, dropout=0.0), + nn.Conv2d(2*in_channels, in_channels, 1), + Upsample(in_channels, with_conv=True)]) + # end + self.norm_out = Normalize(in_channels) + self.conv_out = torch.nn.Conv2d(in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + for i, layer in enumerate(self.model): + if i in [1,2,3]: + x = layer(x, None) + else: + x = layer(x) + + h = self.norm_out(x) + h = nonlinearity(h) + x = self.conv_out(h) + return x + + +class UpsampleDecoder(nn.Module): + def __init__(self, in_channels, out_channels, ch, num_res_blocks, resolution, + ch_mult=(2,2), dropout=0.0): + super().__init__() + # upsampling + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + block_in = in_channels + curr_res = resolution // 2 ** (self.num_resolutions - 1) + self.res_blocks = nn.ModuleList() + self.upsample_blocks = nn.ModuleList() + for i_level in range(self.num_resolutions): + res_block = [] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + res_block.append(ResnetBlock(in_channels=block_in, + out_channels=block_out, + temb_channels=self.temb_ch, + dropout=dropout)) + block_in = block_out + self.res_blocks.append(nn.ModuleList(res_block)) + if i_level != self.num_resolutions - 1: + self.upsample_blocks.append(Upsample(block_in, True)) + curr_res = curr_res * 2 + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, + out_channels, + kernel_size=3, + stride=1, + padding=1) + + def forward(self, x): + # upsampling + h = x + for k, i_level in enumerate(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.res_blocks[i_level][i_block](h, None) + if i_level != self.num_resolutions - 1: + h = self.upsample_blocks[k](h) + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class LatentRescaler(nn.Module): + def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2): + super().__init__() + # residual block, interpolate, residual block + self.factor = factor + self.conv_in = nn.Conv2d(in_channels, + mid_channels, + kernel_size=3, + stride=1, + padding=1) + self.res_block1 = nn.ModuleList([ResnetBlock(in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0) for _ in range(depth)]) + self.attn = AttnBlock(mid_channels) + self.res_block2 = nn.ModuleList([ResnetBlock(in_channels=mid_channels, + out_channels=mid_channels, + temb_channels=0, + dropout=0.0) for _ in range(depth)]) + + self.conv_out = nn.Conv2d(mid_channels, + out_channels, + kernel_size=1, + ) + + def forward(self, x): + x = self.conv_in(x) + for block in self.res_block1: + x = block(x, None) + x = torch.nn.functional.interpolate(x, size=(int(round(x.shape[2]*self.factor)), int(round(x.shape[3]*self.factor)))) + x = self.attn(x) + for block in self.res_block2: + x = block(x, None) + x = self.conv_out(x) + return x + + +class MergedRescaleEncoder(nn.Module): + def __init__(self, in_channels, ch, resolution, out_ch, num_res_blocks, + attn_resolutions, dropout=0.0, resamp_with_conv=True, + ch_mult=(1,2,4,8), rescale_factor=1.0, rescale_module_depth=1): + super().__init__() + intermediate_chn = ch * ch_mult[-1] + self.encoder = Encoder(in_channels=in_channels, num_res_blocks=num_res_blocks, ch=ch, ch_mult=ch_mult, + z_channels=intermediate_chn, double_z=False, resolution=resolution, + attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv, + out_ch=None) + self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=intermediate_chn, + mid_channels=intermediate_chn, out_channels=out_ch, depth=rescale_module_depth) + + def forward(self, x): + x = self.encoder(x) + x = self.rescaler(x) + return x + + +class MergedRescaleDecoder(nn.Module): + def __init__(self, z_channels, out_ch, resolution, num_res_blocks, attn_resolutions, ch, ch_mult=(1,2,4,8), + dropout=0.0, resamp_with_conv=True, rescale_factor=1.0, rescale_module_depth=1): + super().__init__() + tmp_chn = z_channels*ch_mult[-1] + self.decoder = Decoder(out_ch=out_ch, z_channels=tmp_chn, attn_resolutions=attn_resolutions, dropout=dropout, + resamp_with_conv=resamp_with_conv, in_channels=None, num_res_blocks=num_res_blocks, + ch_mult=ch_mult, resolution=resolution, ch=ch) + self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=z_channels, mid_channels=tmp_chn, + out_channels=tmp_chn, depth=rescale_module_depth) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Upsampler(nn.Module): + def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2): + super().__init__() + assert out_size >= in_size + num_blocks = int(np.log2(out_size//in_size))+1 + factor_up = 1.+ (out_size % in_size) + print(f"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}") + self.rescaler = LatentRescaler(factor=factor_up, in_channels=in_channels, mid_channels=2*in_channels, + out_channels=in_channels) + self.decoder = Decoder(out_ch=out_channels, resolution=out_size, z_channels=in_channels, num_res_blocks=2, + attn_resolutions=[], in_channels=None, ch=in_channels, + ch_mult=[ch_mult for _ in range(num_blocks)]) + + def forward(self, x): + x = self.rescaler(x) + x = self.decoder(x) + return x + + +class Resize(nn.Module): + def __init__(self, in_channels=None, learned=False, mode="bilinear"): + super().__init__() + self.with_conv = learned + self.mode = mode + if self.with_conv: + print(f"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode") + raise NotImplementedError() + assert in_channels is not None + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, + in_channels, + kernel_size=4, + stride=2, + padding=1) + + def forward(self, x, scale_factor=1.0): + if scale_factor==1.0: + return x + else: + x = torch.nn.functional.interpolate(x, mode=self.mode, align_corners=False, scale_factor=scale_factor) + return x + + +class FirstStagePostProcessor(nn.Module): + + def __init__(self, ch_mult:list, in_channels, + pretrained_model:nn.Module=None, + reshape=False, + n_channels=None, + dropout=0., + pretrained_config=None): + super().__init__() + if pretrained_config is None: + assert pretrained_model is not None, 'Either "pretrained_model" or "pretrained_config" must not be None' + self.pretrained_model = pretrained_model + else: + assert pretrained_config is not None, 'Either "pretrained_model" or "pretrained_config" must not be None' + self.instantiate_pretrained(pretrained_config) + + self.do_reshape = reshape + + if n_channels is None: + n_channels = self.pretrained_model.encoder.ch + + self.proj_norm = Normalize(in_channels,num_groups=in_channels//2) + self.proj = nn.Conv2d(in_channels,n_channels,kernel_size=3, + stride=1,padding=1) + + blocks = [] + downs = [] + ch_in = n_channels + for m in ch_mult: + blocks.append(ResnetBlock(in_channels=ch_in,out_channels=m*n_channels,dropout=dropout)) + ch_in = m * n_channels + downs.append(Downsample(ch_in, with_conv=False)) + + self.model = nn.ModuleList(blocks) + self.downsampler = nn.ModuleList(downs) + + def instantiate_pretrained(self, config): + model = instantiate_from_config(config) + self.pretrained_model = model.eval() + # self.pretrained_model.train = False + for param in self.pretrained_model.parameters(): + param.requires_grad = False + + @torch.no_grad() + def encode_with_pretrained(self,x): + c = self.pretrained_model.encode(x) + if isinstance(c, DiagonalGaussianDistribution): + c = c.mode() + return c + + def forward(self,x): + z_fs = self.encode_with_pretrained(x) + z = self.proj_norm(z_fs) + z = self.proj(z) + z = nonlinearity(z) + + for submodel, downmodel in zip(self.model,self.downsampler): + z = submodel(z,temb=None) + z = downmodel(z) + + if self.do_reshape: + z = rearrange(z,'b c h w -> b (h w) c') + return z + diff --git a/core/models/latent_diffusion/vae/autokl_modules/distributions.py b/core/models/latent_diffusion/vae/autokl_modules/distributions.py new file mode 100644 index 0000000000000000000000000000000000000000..f2b8ef901130efc171aa69742ca0244d94d3f2e9 --- /dev/null +++ b/core/models/latent_diffusion/vae/autokl_modules/distributions.py @@ -0,0 +1,92 @@ +import torch +import numpy as np + + +class AbstractDistribution: + def sample(self): + raise NotImplementedError() + + def mode(self): + raise NotImplementedError() + + +class DiracDistribution(AbstractDistribution): + def __init__(self, value): + self.value = value + + def sample(self): + return self.value + + def mode(self): + return self.value + + +class DiagonalGaussianDistribution(object): + def __init__(self, parameters, deterministic=False): + self.parameters = parameters + self.mean, self.logvar = torch.chunk(parameters, 2, dim=1) + self.logvar = torch.clamp(self.logvar, -30.0, 20.0) + self.deterministic = deterministic + self.std = torch.exp(0.5 * self.logvar) + self.var = torch.exp(self.logvar) + if self.deterministic: + self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device) + + def sample(self): + x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device) + return x + + def kl(self, other=None): + if self.deterministic: + return torch.Tensor([0.]) + else: + if other is None: + return 0.5 * torch.sum(torch.pow(self.mean, 2) + + self.var - 1.0 - self.logvar, + dim=[1, 2, 3]) + else: + return 0.5 * torch.sum( + torch.pow(self.mean - other.mean, 2) / other.var + + self.var / other.var - 1.0 - self.logvar + other.logvar, + dim=[1, 2, 3]) + + def nll(self, sample, dims=[1,2,3]): + if self.deterministic: + return torch.Tensor([0.]) + logtwopi = np.log(2.0 * np.pi) + return 0.5 * torch.sum( + logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, + dim=dims) + + def mode(self): + return self.mean + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12 + Compute the KL divergence between two gaussians. + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, torch.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for torch.exp(). + logvar1, logvar2 = [ + x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + torch.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * torch.exp(-logvar2) + ) diff --git a/core/models/latent_diffusion/vae/optimus.py b/core/models/latent_diffusion/vae/optimus.py new file mode 100644 index 0000000000000000000000000000000000000000..1e1ee4cd0d776f2b5f7fbefabfe17ae732d8eb46 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus.py @@ -0,0 +1,745 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +import numpy as np +import numpy.random as npr +import copy + +from ...common.get_model import get_model, register +from ...common import utils + +from .optimus_modules.tokenization_gpt2 import GPT2Tokenizer + +version = '0' +symbol = 'optimus' + + +@register('optimus_vae', version) +class optimus_vae(nn.Module): + """VAE with normal prior""" + def __init__(self, encoder, decoder, tokenizer_encoder, tokenizer_decoder, args): # + super().__init__() + self.encoder = encoder if isinstance(encoder, nn.Module) else get_model()(encoder) + self.decoder = decoder if isinstance(decoder, nn.Module) else get_model()(decoder) + self.tokenizer_encoder = tokenizer_encoder \ + if isinstance(tokenizer_encoder, nn.Module) \ + else get_model()(tokenizer_encoder, verbose=False) + self.tokenizer_decoder = tokenizer_decoder \ + if isinstance(tokenizer_decoder, nn.Module) \ + else get_model()(tokenizer_decoder, verbose=False) + + gpt2_special_tokens_dict = {'pad_token': '', 'bos_token': '', 'eos_token': ''} + if isinstance(self.tokenizer_encoder, GPT2Tokenizer): + self.tokenizer_encoder.add_special_tokens(gpt2_special_tokens_dict) + if isinstance(self.tokenizer_decoder, GPT2Tokenizer): + self.tokenizer_decoder.add_special_tokens(gpt2_special_tokens_dict) + + self.args = args + self.nz = args.latent_size + + self.eos_token_id = self.tokenizer_decoder.convert_tokens_to_ids( + [self.tokenizer_decoder.eos_token])[0] + self.pad_token_id = self.tokenizer_decoder.convert_tokens_to_ids( + [self.tokenizer_decoder.pad_token])[0] + + # connector: from Bert hidden units to the latent space + # self.linear = nn.Linear(args.nz, 2 * args.nz, bias=False) + + # Standard Normal prior + loc = torch.zeros(self.nz) + scale = torch.ones(self.nz) + self.prior = torch.distributions.normal.Normal(loc, scale) + + def connect(self, bert_fea, nsamples=1): + """ + Returns: Tensor1, Tensor2 + Tensor1: the tensor latent z with shape [batch, nsamples, nz] + Tensor2: the tenor of KL for each x with shape [batch] + """ + + # (batch_size, nz) + + mean, logvar = self.encoder.linear(bert_fea).chunk(2, -1) + # pdb.set_trace() + # mean, logvar = mean.squeeze(0), logvar.squeeze(0) + + # (batch, nsamples, nz) + z = self.reparameterize(mean, logvar, nsamples) + KL = 0.5 * (mean.pow(2) + logvar.exp() - logvar - 1).sum(dim=1) + + return z, KL + + def connect_deterministic(self, bert_fea, nsamples=1): + """ + Returns: Tensor1, Tensor2 + Tensor1: the tensor latent z with shape [batch, nsamples, nz] + Tensor2: the tenor of KL for each x with shape [batch] + """ + + # (batch_size, nz) + + mean, logvar = self.encoder.linear(bert_fea).chunk(2, -1) + # pdb.set_trace() + # mean, logvar = mean.squeeze(0), logvar.squeeze(0) + + logvar.fill_(.0) + # (batch, nsamples, nz) + z = self.reparameterize(mean, logvar, nsamples) + KL = 0.5 * (mean.pow(2) + logvar.exp() - logvar - 1).sum(dim=1) + + return z, KL + + def reparameterize(self, mu, logvar, nsamples=1): + """sample from posterior Gaussian family + Args: + mu: Tensor + Mean of gaussian distribution with shape (batch, nz) + logvar: Tensor + logvar of gaussian distibution with shape (batch, nz) + Returns: Tensor + Sampled z with shape (batch, nsamples, nz) + """ + batch_size, nz = mu.size() + std = logvar.mul(0.5).exp() + + mu_expd = mu.unsqueeze(1).expand(batch_size, nsamples, nz) + std_expd = std.unsqueeze(1).expand(batch_size, nsamples, nz) + + eps = torch.zeros_like(std_expd).normal_() + + return mu_expd + torch.mul(eps, std_expd) + + def forward(self, inputs, labels): + + # pdb.set_trace() + + attention_mask=(inputs > 0).float() + # logger.info(inputs) + # logger.info(attention_mask) + # logger.info(labels) + reconstrution_mask=(labels != 50257).float() # 50257 is the padding token for GPT2 + sent_length = torch.sum(reconstrution_mask, dim=1) + + outputs = self.encoder(inputs, attention_mask) + pooled_hidden_fea = outputs[1] # model outputs are always tuple in pytorch-transformers (see doc) + + if self.args.fb_mode==0: + # Connect hidden feature to the latent space + latent_z, loss_kl = self.connect(pooled_hidden_fea) + latent_z = latent_z.squeeze(1) + + # Decoding + outputs = self.decoder(input_ids=labels, past=latent_z, labels=labels, label_ignore=self.pad_token_id) + loss_rec = outputs[0] # model outputs are always tuple in pytorch-transformers (see doc) + + elif self.args.fb_mode==1: + # Connect hidden feature to the latent space + mu, logvar = self.encoder.linear(pooled_hidden_fea).chunk(2, -1) + latent_z = self.reparameterize(mu, logvar, nsamples=1) + latent_z = latent_z.squeeze(1) + loss_kl = 0.5 * (mu.pow(2) + logvar.exp() - logvar - 1) + kl_mask = (loss_kl > self.args.dim_target_kl).float() + loss_kl = (kl_mask * loss_kl).sum(dim=1) + + # pdb.set_trace() + # past = self.decoder.linear(latent_z) + # Decoding + outputs = self.decoder(input_ids=labels, past=latent_z, labels=labels, label_ignore=self.pad_token_id) + loss_rec = outputs[0] # model outputs are always tuple in pytorch-transformers (see doc) + + elif self.args.fb_mode==2: + # Connect hidden feature to the latent space + latent_z, loss_kl = self.connect_deterministic(pooled_hidden_fea) + latent_z = latent_z.squeeze(1) + + # past = self.decoder.linear(latent_z) + # Decoding + outputs = self.decoder(input_ids=labels, past=latent_z, labels=labels, label_ignore=self.pad_token_id) + loss_rec = outputs[0] # model outputs are always tuple in pytorch-transformers (see doc) + + # pdb.set_trace() + if self.args.length_weighted_loss: + loss = loss_rec / sent_length + self.args.beta * loss_kl + else: + loss = loss_rec + self.args.beta * loss_kl + + return loss_rec, loss_kl, loss + + def encoder_sample(self, bert_fea, nsamples): + """sampling from the encoder + Returns: Tensor1 + Tensor1: the tensor latent z with shape [batch, nsamples, nz] + """ + + # (batch_size, nz) + + mu, logvar = self.encoder.linear(bert_fea).chunk(2, -1) + mu, logvar = mu.squeeze(0), logvar.squeeze(0) + + # (batch, nsamples, nz) + z = self.reparameterize(mu, logvar, nsamples) + + return z, (mu, logvar) + + def encode_stats(self, x): + """ + Returns: Tensor1, Tensor2 + Tensor1: the mean of latent z with shape [batch, nz] + Tensor2: the logvar of latent z with shape [batch, nz] + """ + return self.encoder.encode_stats(x) + + def decode_steps(self, z, strategy, K=10): + """generate samples from z given strategy + Args: + z: [batch, nsamples, nz] + strategy: "beam" or "greedy" or "sample" + K: the beam width parameter + Returns: List1 + List1: a list of decoded word sequence + """ + + if strategy == "beam": + return self.decoder.beam_search_decode(z, K) + elif strategy == "greedy": + return self.decoder.greedy_decode(z) + elif strategy == "sample": + return self.decoder.sample_decode(z) + else: + raise ValueError("the decoding strategy is not supported") + + def decode(self, z, temperature=1.0, max_length=30): + bos_token = self.tokenizer_decoder.encode('') + eos_token = self.tokenizer_decoder.encode('') + context_tokens = torch.LongTensor(bos_token).to(z.device) + + sentenses = [] + for zi in z: + out = sample_single_sequence_conditional( + model=self.decoder, + context=context_tokens, + past=zi, temperature=temperature, + top_k=0, top_p=1.0, + max_length=max_length, + eos_token=eos_token[0],) + + """text = self.tokenizer_decoder.decode(out.tolist(), clean_up_tokenization_spaces=True) + text = text.split()[1:-1] + text = ' '.join(text) + sentenses.append(text)""" + sentenses.append(out) + return sentenses + + def reconstruct(self, x, decoding_strategy="greedy", K=5): + """reconstruct from input x + Args: + x: (batch, *) + decoding_strategy: "beam" or "greedy" or "sample" + K: the beam width parameter + Returns: List1 + List1: a list of decoded word sequence + """ + z = self.sample_from_inference(x).squeeze(1) + + return self.decode(z, decoding_strategy, K) + + def log_probability(self, x, z): + """Cross Entropy in the language case + Args: + x: (batch_size, seq_len) + z: (batch_size, n_sample, nz) + Returns: + log_p: (batch_size, n_sample). + log_p(x|z) across different x and z + """ + outputs = self.decoder(input_ids=x, past=z, labels=x, label_ignore=self.pad_token_id) + loss_rec = outputs[0] + return -loss_rec + + def loss_iw(self, x0, x1, nsamples=50, ns=1): + """ + Args: + x: if the data is constant-length, x is the data tensor with + shape (batch, *). Otherwise x is a tuple that contains + the data tensor and length list + Returns: Tensor1, Tensor2, Tensor3 + Tensor1: total loss [batch] + Tensor2: reconstruction loss shape [batch] + Tensor3: KL loss shape [batch] + """ + + # encoding into bert features + bert_fea = self.encoder(x0)[1] + + # (batch_size, nz) + + mu, logvar = self.encoder.linear(bert_fea).chunk(2, -1) + + + ################## + # compute KL + ################## + # pdb.set_trace() + KL = 0.5 * (mu.pow(2) + logvar.exp() - logvar - 1).sum(dim=1) + + # mu, logvar = mu.squeeze(0), logvar.squeeze(0) + ll_tmp, rc_tmp = [], [] + for _ in range(int(nsamples / ns)): + + # (batch, nsamples, nz) + z = self.reparameterize(mu, logvar, ns) + # past = self.decoder.linear(z) + past = z + + # [batch, nsamples] + log_prior = self.eval_prior_dist(z) + log_gen = self.eval_cond_ll(x1, past) + log_infer = self.eval_inference_dist(z, (mu, logvar)) + + # pdb.set_trace() + log_gen = log_gen.unsqueeze(0).contiguous().view(z.shape[0],-1) + + + # pdb.set_trace() + rc_tmp.append(log_gen) + ll_tmp.append(log_gen + log_prior - log_infer) + + + + log_prob_iw = log_sum_exp(torch.cat(ll_tmp, dim=-1), dim=-1) - math.log(nsamples) + log_gen_iw = torch.mean(torch.cat(rc_tmp, dim=-1), dim=-1) + + return log_prob_iw, log_gen_iw , KL + + def nll_iw(self, x0, x1, nsamples, ns=1): + """compute the importance weighting estimate of the log-likelihood + Args: + x0, x1: two different tokenization results of x, where x is the data tensor with shape (batch, *). + nsamples: Int + the number of samples required to estimate marginal data likelihood + Returns: Tensor1 + Tensor1: the estimate of log p(x), shape [batch] + """ + + # compute iw every ns samples to address the memory issue + # nsamples = 500, ns = 100 + # nsamples = 500, ns = 10 + + # TODO: note that x is forwarded twice in self.encoder.sample(x, ns) and self.eval_inference_dist(x, z, param) + #. this problem is to be solved in order to speed up + + tmp = [] + for _ in range(int(nsamples / ns)): + # [batch, ns, nz] + + # Chunyuan: + # encoding into bert features + pooled_hidden_fea = self.encoder(x0)[1] + + # param is the parameters required to evaluate q(z|x) + z, param = self.encoder_sample(pooled_hidden_fea, ns) + + # [batch, ns] + log_comp_ll = self.eval_complete_ll(x1, z) + log_infer_ll = self.eval_inference_dist(z, param) + + tmp.append(log_comp_ll - log_infer_ll) + + ll_iw = log_sum_exp(torch.cat(tmp, dim=-1), dim=-1) - math.log(nsamples) + + return ll_iw + + def KL(self, x): + _, KL = self.encode(x, 1) + + return KL + + def eval_prior_dist(self, zrange): + """perform grid search to calculate the true posterior + Args: + zrange: tensor + different z points that will be evaluated, with + shape (k^2, nz), where k=(zmax - zmin)/space + """ + + # (k^2) + return self.prior.log_prob(zrange).sum(dim=-1) + + def eval_complete_ll(self, x, z): + """compute log p(z,x) + Args: + x: Tensor + input with shape [batch, seq_len] + z: Tensor + evaluation points with shape [batch, nsamples, nz] + Returns: Tensor1 + Tensor1: log p(z,x) Tensor with shape [batch, nsamples] + """ + + # [batch, nsamples] + log_prior = self.eval_prior_dist(z) + log_gen = self.eval_cond_ll(x, z) + + return log_prior + log_gen + + def eval_cond_ll(self, x, z): + """compute log p(x|z) + """ + x_shape = list(x.size()) + z_shape = list(z.size()) + if len(z_shape) == 3: + x = x.unsqueeze(1).repeat(1, z_shape[1], 1).contiguous().view(x_shape[0]*z_shape[1], x_shape[-1]) + z = z.contiguous().view(x_shape[0]*z_shape[1], z_shape[-1]) + + return self.log_probability(x, z) + + def eval_log_model_posterior(self, x, grid_z): + """perform grid search to calculate the true posterior + this function computes p(z|x) + Args: + grid_z: tensor + different z points that will be evaluated, with + shape (k^2, nz), where k=(zmax - zmin)/pace + Returns: Tensor + Tensor: the log posterior distribution log p(z|x) with + shape [batch_size, K^2] + """ + try: + batch_size = x.size(0) + except: + batch_size = x[0].size(0) + + # (batch_size, k^2, nz) + grid_z = grid_z.unsqueeze(0).expand(batch_size, *grid_z.size()).contiguous() + + # (batch_size, k^2) + log_comp = self.eval_complete_ll(x, grid_z) + + # normalize to posterior + log_posterior = log_comp - log_sum_exp(log_comp, dim=1, keepdim=True) + + return log_posterior + + def sample_from_inference(self, x, nsamples=1): + """perform sampling from inference net + Returns: Tensor + Tensor: samples from infernece nets with + shape (batch_size, nsamples, nz) + """ + z, _ = self.encoder.sample(x, nsamples) + + return z + + def sample_from_posterior(self, x, nsamples): + """perform MH sampling from model posterior + Returns: Tensor + Tensor: samples from model posterior with + shape (batch_size, nsamples, nz) + """ + + # use the samples from inference net as initial points + # for MCMC sampling. [batch_size, nsamples, nz] + cur = self.encoder.sample_from_inference(x, 1) + cur_ll = self.eval_complete_ll(x, cur) + total_iter = self.args.mh_burn_in + nsamples * self.args.mh_thin + samples = [] + for iter_ in range(total_iter): + next = torch.normal(mean=cur, + std=cur.new_full(size=cur.size(), fill_value=self.args.mh_std)) + # [batch_size, 1] + next_ll = self.eval_complete_ll(x, next) + ratio = next_ll - cur_ll + + accept_prob = torch.min(ratio.exp(), ratio.new_ones(ratio.size())) + + uniform_t = accept_prob.new_empty(accept_prob.size()).uniform_() + + # [batch_size, 1] + mask = (uniform_t < accept_prob).float() + mask_ = mask.unsqueeze(2) + + cur = mask_ * next + (1 - mask_) * cur + cur_ll = mask * next_ll + (1 - mask) * cur_ll + + if iter_ >= self.args.mh_burn_in and (iter_ - self.args.mh_burn_in) % self.args.mh_thin == 0: + samples.append(cur.unsqueeze(1)) + + return torch.cat(samples, dim=1) + + def calc_model_posterior_mean(self, x, grid_z): + """compute the mean value of model posterior, i.e. E_{z ~ p(z|x)}[z] + Args: + grid_z: different z points that will be evaluated, with + shape (k^2, nz), where k=(zmax - zmin)/pace + x: [batch, *] + Returns: Tensor1 + Tensor1: the mean value tensor with shape [batch, nz] + """ + + # [batch, K^2] + log_posterior = self.eval_log_model_posterior(x, grid_z) + posterior = log_posterior.exp() + + # [batch, nz] + return torch.mul(posterior.unsqueeze(2), grid_z.unsqueeze(0)).sum(1) + + def calc_infer_mean(self, x): + """ + Returns: Tensor1 + Tensor1: the mean of inference distribution, with shape [batch, nz] + """ + + mean, logvar = self.encoder.forward(x) + + return mean + + def eval_inference_dist(self, z, param): + """this function computes log q(z | x) + Args: + z: tensor + different z points that will be evaluated, with + shape [batch, nsamples, nz] + Returns: Tensor1 + Tensor1: log q(z|x) with shape [batch, nsamples] + """ + + nz = z.size(2) + mu, logvar = param + + # (batch_size, 1, nz) + mu, logvar = mu.unsqueeze(1), logvar.unsqueeze(1) + var = logvar.exp() + + # (batch_size, nsamples, nz) + dev = z - mu + + # (batch_size, nsamples) + log_density = -0.5 * ((dev ** 2) / var).sum(dim=-1) - \ + 0.5 * (nz * math.log(2 * math.pi) + logvar.sum(-1)) + + return log_density + + def calc_mi(self, test_data_batch, args): + # calc_mi_v3 + import math + from modules.utils import log_sum_exp + + mi = 0 + num_examples = 0 + + mu_batch_list, logvar_batch_list = [], [] + neg_entropy = 0. + for batch_data in test_data_batch: + + x0, _, _ = batch_data + x0 = x0.to(args.device) + + # encoding into bert features + bert_fea = self.encoder(x0)[1] + + (batch_size, nz) + mu, logvar = self.encoder.linear(bert_fea).chunk(2, -1) + + x_batch, nz = mu.size() + + #print(x_batch, end=' ') + + num_examples += x_batch + + # E_{q(z|x)}log(q(z|x)) = -0.5*nz*log(2*\pi) - 0.5*(1+logvar).sum(-1) + + neg_entropy += (-0.5 * nz * math.log(2 * math.pi)- 0.5 * (1 + logvar).sum(-1)).sum().item() + mu_batch_list += [mu.cpu()] + logvar_batch_list += [logvar.cpu()] + + pdb.set_trace() + + neg_entropy = neg_entropy / num_examples + ##print() + + num_examples = 0 + log_qz = 0. + for i in range(len(mu_batch_list)): + ############### + # get z_samples + ############### + mu, logvar = mu_batch_list[i].cuda(), logvar_batch_list[i].cuda() + + # [z_batch, 1, nz] + + z_samples = self.reparameterize(mu, logvar, 1) + + z_samples = z_samples.view(-1, 1, nz) + num_examples += z_samples.size(0) + + ############### + # compute density + ############### + # [1, x_batch, nz] + #mu, logvar = mu_batch_list[i].cuda(), logvar_batch_list[i].cuda() + #indices = list(np.random.choice(np.arange(len(mu_batch_list)), 10)) + [i] + indices = np.arange(len(mu_batch_list)) + mu = torch.cat([mu_batch_list[_] for _ in indices], dim=0).cuda() + logvar = torch.cat([logvar_batch_list[_] for _ in indices], dim=0).cuda() + x_batch, nz = mu.size() + + mu, logvar = mu.unsqueeze(0), logvar.unsqueeze(0) + var = logvar.exp() + + # (z_batch, x_batch, nz) + dev = z_samples - mu + + # (z_batch, x_batch) + log_density = -0.5 * ((dev ** 2) / var).sum(dim=-1) - \ + 0.5 * (nz * math.log(2 * math.pi) + logvar.sum(-1)) + + # log q(z): aggregate posterior + # [z_batch] + log_qz += (log_sum_exp(log_density, dim=1) - math.log(x_batch)).sum(-1) + + log_qz /= num_examples + mi = neg_entropy - log_qz + + return mi + + def calc_au(self, eval_dataloader, args, delta=0.01): + """compute the number of active units + """ + cnt = 0 + for batch_data in eval_dataloader: + + x0, _, _ = batch_data + x0 = x0.to(args.device) + + # encoding into bert features + bert_fea = self.encoder(x0)[1] + + # (batch_size, nz) + mean, logvar = self.encoder.linear(bert_fea).chunk(2, -1) + + if cnt == 0: + means_sum = mean.sum(dim=0, keepdim=True) + else: + means_sum = means_sum + mean.sum(dim=0, keepdim=True) + cnt += mean.size(0) + + # (1, nz) + mean_mean = means_sum / cnt + + cnt = 0 + for batch_data in eval_dataloader: + + x0, _, _ = batch_data + x0 = x0.to(args.device) + + # encoding into bert features + bert_fea = self.encoder(x0)[1] + + # (batch_size, nz) + mean, _ = self.encoder.linear(bert_fea).chunk(2, -1) + + if cnt == 0: + var_sum = ((mean - mean_mean) ** 2).sum(dim=0) + else: + var_sum = var_sum + ((mean - mean_mean) ** 2).sum(dim=0) + cnt += mean.size(0) + + # (nz) + au_var = var_sum / (cnt - 1) + + return (au_var >= delta).sum().item(), au_var + + +from .optimus_modules.optimus_bert import BertForLatentConnector_XX + + +@register('optimus_bert_connector', version) +class optimus_bert_connector(BertForLatentConnector_XX): + pass + + +from .optimus_modules.tokenization_bert import BertTokenizer + + +@register('optimus_bert_tokenizer', version) +class optimus_bert_tokenizer(BertTokenizer): + pass + + +from .optimus_modules.optimus_gpt2 import GPT2ForLatentConnector_XX + + +@register('optimus_gpt2_connector', version) +class optimus_gpt2_connector(GPT2ForLatentConnector_XX): + pass + + +from .optimus_modules.tokenization_gpt2 import GPT2Tokenizer + + +@register('optimus_gpt2_tokenizer', version) +class optimus_gpt2_tokenizer(GPT2Tokenizer): + pass + +############################## +# some helpers for inference # +############################## + + +def sample_single_sequence_conditional( + model, + context, + past=None, + temperature=1, + top_k=0, + top_p=0.0, + eos_token=50829, + max_length=30, ): + past = past.unsqueeze(0) + generated = context.unsqueeze(0) + with torch.no_grad(): + while True: + inputs = {'input_ids': generated, 'past': past} + outputs = model(**inputs) + next_token_logits = outputs[0][0, -1, :] / temperature + filtered_logits = top_k_top_p_filtering(next_token_logits, top_k=top_k, top_p=top_p) + next_token = torch.multinomial(F.softmax(filtered_logits, dim=-1), num_samples=1) + generated = torch.cat((generated, next_token.unsqueeze(0)), dim=1) + if next_token[0].item() == eos_token: + break + if generated.shape[1] >= max_length: + generated[0, -1] = eos_token + break + return generated.squeeze(0) + + +def top_k_top_p_filtering(logits, top_k=0, top_p=0.0, filter_value=-float('Inf')): + """ Filter a distribution of logits using top-k and/or nucleus (top-p) filtering + Args: + logits: logits distribution shape (vocabulary size) + top_k > 0: keep only top k tokens with highest probability (top-k filtering). + top_p > 0.0: keep the top tokens with cumulative probability >= top_p (nucleus filtering). + Nucleus filtering is described in Holtzman et al. (http://arxiv.org/abs/1904.09751) + From: https://gist.github.com/thomwolf/1a5a29f6962089e871b94cbd09daf317 + """ + assert logits.dim() == 1 # batch size 1 for now - could be updated for more but the code would be less clear + top_k = min(top_k, logits.size(-1)) # Safety check + if top_k > 0: + # Remove all tokens with a probability less than the last token of the top-k + indices_to_remove = logits < torch.topk(logits, top_k)[0][..., -1, None] + logits[indices_to_remove] = filter_value + + if top_p > 0.0: + sorted_logits, sorted_indices = torch.sort(logits, descending=True) + cumulative_probs = torch.cumsum(F.softmax(sorted_logits, dim=-1), dim=-1) + + # Remove tokens with cumulative probability above the threshold + sorted_indices_to_remove = cumulative_probs > top_p + # Shift the indices to the right to keep also the first token above the threshold + sorted_indices_to_remove[..., 1:] = sorted_indices_to_remove[..., :-1].clone() + sorted_indices_to_remove[..., 0] = 0 + + indices_to_remove = sorted_indices[sorted_indices_to_remove] + logits[indices_to_remove] = filter_value + return logits \ No newline at end of file diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_bert.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_bert.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6dc62629210f669ccf4b3c57c98043b5a852c7d7 Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_bert.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_gpt2.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_gpt2.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c5f29602824d2ef942f4c96a3da0f36c766911a Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_gpt2.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_utils.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e1b884ae960c7140cd9aba61bf5ed2c4a3adf794 Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/configuration_utils.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/file_utils.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/file_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d6321a18951926b2c623cf9423d1b9e829fc959 Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/file_utils.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/modeling_utils.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/modeling_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f57986ac69f6359a4ee6ba21c60e18d80294187b Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/modeling_utils.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/optimus_bert.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/optimus_bert.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d27b1104a8dbccbe57adcf5eb160616256e9556 Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/optimus_bert.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/optimus_gpt2.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/optimus_gpt2.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cd7d37f23da672def4e7d2afab3b277a303d3b2d Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/optimus_gpt2.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_bert.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_bert.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1be82d095ffcc4afdaf11b148e58810ae36dc1fd Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_bert.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_gpt2.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_gpt2.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40d03edc5eda71968c64a877d629db677913b7a8 Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_gpt2.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_utils.cpython-38.pyc b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f72ce716a49e0bb158a74ae26e48df4add8a244 Binary files /dev/null and b/core/models/latent_diffusion/vae/optimus_modules/__pycache__/tokenization_utils.cpython-38.pyc differ diff --git a/core/models/latent_diffusion/vae/optimus_modules/configuration_bert.py b/core/models/latent_diffusion/vae/optimus_modules/configuration_bert.py new file mode 100644 index 0000000000000000000000000000000000000000..7fff3e5d058720900fb0388b3c54e31e86045a71 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/configuration_bert.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" BERT model configuration """ + +from __future__ import absolute_import, division, print_function, unicode_literals + +import json +import logging +import sys +from io import open + +from .configuration_utils import PretrainedConfig + +logger = logging.getLogger(__name__) + +BERT_PRETRAINED_CONFIG_ARCHIVE_MAP = { + 'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-config.json", + 'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-config.json", + 'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-config.json", + 'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-config.json", + 'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased-config.json", + 'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased-config.json", + 'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-config.json", + 'bert-base-german-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-german-cased-config.json", + 'bert-large-uncased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-config.json", + 'bert-large-cased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-config.json", + 'bert-large-uncased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-finetuned-squad-config.json", + 'bert-large-cased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-finetuned-squad-config.json", + 'bert-base-cased-finetuned-mrpc': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-finetuned-mrpc-config.json", +} + + +class BertConfig(PretrainedConfig): + r""" + :class:`~pytorch_transformers.BertConfig` is the configuration class to store the configuration of a + `BertModel`. + + + Arguments: + vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`. + hidden_size: Size of the encoder layers and the pooler layer. + num_hidden_layers: Number of hidden layers in the Transformer encoder. + num_attention_heads: Number of attention heads for each attention layer in + the Transformer encoder. + intermediate_size: The size of the "intermediate" (i.e., feed-forward) + layer in the Transformer encoder. + hidden_act: The non-linear activation function (function or string) in the + encoder and pooler. If string, "gelu", "relu" and "swish" are supported. + hidden_dropout_prob: The dropout probabilitiy for all fully connected + layers in the embeddings, encoder, and pooler. + attention_probs_dropout_prob: The dropout ratio for the attention + probabilities. + max_position_embeddings: The maximum sequence length that this model might + ever be used with. Typically set this to something large just in case + (e.g., 512 or 1024 or 2048). + type_vocab_size: The vocabulary size of the `token_type_ids` passed into + `BertModel`. + initializer_range: The sttdev of the truncated_normal_initializer for + initializing all weight matrices. + layer_norm_eps: The epsilon used by LayerNorm. + """ + pretrained_config_archive_map = BERT_PRETRAINED_CONFIG_ARCHIVE_MAP + + def __init__(self, + vocab_size_or_config_json_file=30522, + hidden_size=768, + num_hidden_layers=12, + num_attention_heads=12, + intermediate_size=3072, + hidden_act="gelu", + hidden_dropout_prob=0.1, + attention_probs_dropout_prob=0.1, + max_position_embeddings=512, + type_vocab_size=2, + initializer_range=0.02, + layer_norm_eps=1e-12, + **kwargs): + super(BertConfig, self).__init__(**kwargs) + if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2 + and isinstance(vocab_size_or_config_json_file, unicode)): + with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader: + json_config = json.loads(reader.read()) + for key, value in json_config.items(): + self.__dict__[key] = value + elif isinstance(vocab_size_or_config_json_file, int): + self.vocab_size = vocab_size_or_config_json_file + self.hidden_size = hidden_size + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + self.hidden_act = hidden_act + self.intermediate_size = intermediate_size + self.hidden_dropout_prob = hidden_dropout_prob + self.attention_probs_dropout_prob = attention_probs_dropout_prob + self.max_position_embeddings = max_position_embeddings + self.type_vocab_size = type_vocab_size + self.initializer_range = initializer_range + self.layer_norm_eps = layer_norm_eps + else: + raise ValueError("First argument must be either a vocabulary size (int)" + " or the path to a pretrained model config file (str)") diff --git a/core/models/latent_diffusion/vae/optimus_modules/configuration_gpt2.py b/core/models/latent_diffusion/vae/optimus_modules/configuration_gpt2.py new file mode 100644 index 0000000000000000000000000000000000000000..c83d9e82cef82f28b1caa443569bc407217439f7 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/configuration_gpt2.py @@ -0,0 +1,143 @@ +# coding=utf-8 +# Copyright 2018 The OpenAI Team Authors and HuggingFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" OpenAI GPT-2 configuration """ + +from __future__ import absolute_import, division, print_function, unicode_literals + +import json +import logging +import sys +from io import open + +from .configuration_utils import PretrainedConfig + +logger = logging.getLogger(__name__) + +GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP = {"gpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-config.json", + "gpt2-medium": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-config.json", + "gpt2-large": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-config.json"} + +class GPT2Config(PretrainedConfig): + """Configuration class to store the configuration of a `GPT2Model`. + + Args: + vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `GPT2Model` or a configuration json file. + n_positions: Number of positional embeddings. + n_ctx: Size of the causal mask (usually same as n_positions). + n_embd: Dimensionality of the embeddings and hidden states. + n_layer: Number of hidden layers in the Transformer encoder. + n_head: Number of attention heads for each attention layer in + the Transformer encoder. + layer_norm_epsilon: epsilon to use in the layer norm layers + resid_pdrop: The dropout probabilitiy for all fully connected + layers in the embeddings, encoder, and pooler. + attn_pdrop: The dropout ratio for the attention + probabilities. + embd_pdrop: The dropout ratio for the embeddings. + initializer_range: The sttdev of the truncated_normal_initializer for + initializing all weight matrices. + """ + pretrained_config_archive_map = GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP + + def __init__( + self, + vocab_size_or_config_json_file=50257, + n_positions=1024, + n_ctx=1024, + n_embd=768, + n_layer=12, + n_head=12, + resid_pdrop=0.1, + embd_pdrop=0.1, + attn_pdrop=0.1, + layer_norm_epsilon=1e-5, + initializer_range=0.02, + + num_labels=1, + summary_type='cls_index', + summary_use_proj=True, + summary_activation=None, + summary_proj_to_labels=True, + summary_first_dropout=0.1, + **kwargs + ): + """Constructs GPT2Config. + + Args: + vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `GPT2Model` or a configuration json file. + n_positions: Number of positional embeddings. + n_ctx: Size of the causal mask (usually same as n_positions). + n_embd: Dimensionality of the embeddings and hidden states. + n_layer: Number of hidden layers in the Transformer encoder. + n_head: Number of attention heads for each attention layer in + the Transformer encoder. + layer_norm_epsilon: epsilon to use in the layer norm layers + resid_pdrop: The dropout probabilitiy for all fully connected + layers in the embeddings, encoder, and pooler. + attn_pdrop: The dropout ratio for the attention + probabilities. + embd_pdrop: The dropout ratio for the embeddings. + initializer_range: The sttdev of the truncated_normal_initializer for + initializing all weight matrices. + """ + super(GPT2Config, self).__init__(**kwargs) + + if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2 + and isinstance(vocab_size_or_config_json_file, unicode)): + with open(vocab_size_or_config_json_file, "r", encoding="utf-8") as reader: + json_config = json.loads(reader.read()) + for key, value in json_config.items(): + self.__dict__[key] = value + elif isinstance(vocab_size_or_config_json_file, int): + self.vocab_size = vocab_size_or_config_json_file + self.n_ctx = n_ctx + self.n_positions = n_positions + self.n_embd = n_embd + self.n_layer = n_layer + self.n_head = n_head + self.resid_pdrop = resid_pdrop + self.embd_pdrop = embd_pdrop + self.attn_pdrop = attn_pdrop + self.layer_norm_epsilon = layer_norm_epsilon + self.initializer_range = initializer_range + + self.num_labels = num_labels + self.summary_type = summary_type + self.summary_use_proj = summary_use_proj + self.summary_activation = summary_activation + self.summary_first_dropout = summary_first_dropout + self.summary_proj_to_labels = summary_proj_to_labels + else: + raise ValueError( + "First argument must be either a vocabulary size (int)" + "or the path to a pretrained model config file (str)" + ) + + @property + def max_position_embeddings(self): + return self.n_positions + + @property + def hidden_size(self): + return self.n_embd + + @property + def num_attention_heads(self): + return self.n_head + + @property + def num_hidden_layers(self): + return self.n_layer diff --git a/core/models/latent_diffusion/vae/optimus_modules/configuration_utils.py b/core/models/latent_diffusion/vae/optimus_modules/configuration_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7efc735d4132124cd3d097cc1844f4407551b1db --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/configuration_utils.py @@ -0,0 +1,205 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Configuration base class and utilities.""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import copy +import json +import logging +import os +from io import open + +from .file_utils import cached_path, CONFIG_NAME + +logger = logging.getLogger(__name__) + +class PretrainedConfig(object): + r""" Base class for all configuration classes. + Handles a few parameters common to all models' configurations as well as methods for loading/downloading/saving configurations. + + Note: + A configuration file can be loaded and saved to disk. Loading the configuration file and using this file to initialize a model does **not** load the model weights. + It only affects the model's configuration. + + Class attributes (overridden by derived classes): + - ``pretrained_config_archive_map``: a python ``dict`` of with `short-cut-names` (string) as keys and `url` (string) of associated pretrained model configurations as values. + + Parameters: + ``finetuning_task``: string, default `None`. Name of the task used to fine-tune the model. This can be used when converting from an original (TensorFlow or PyTorch) checkpoint. + ``num_labels``: integer, default `2`. Number of classes to use when the model is a classification model (sequences/tokens) + ``output_attentions``: boolean, default `False`. Should the model returns attentions weights. + ``output_hidden_states``: string, default `False`. Should the model returns all hidden-states. + ``torchscript``: string, default `False`. Is the model used with Torchscript. + """ + pretrained_config_archive_map = {} + + def __init__(self, **kwargs): + self.finetuning_task = kwargs.pop('finetuning_task', None) + self.num_labels = kwargs.pop('num_labels', 2) + self.output_attentions = kwargs.pop('output_attentions', False) + self.output_hidden_states = kwargs.pop('output_hidden_states', False) + self.torchscript = kwargs.pop('torchscript', False) + self.pruned_heads = kwargs.pop('pruned_heads', {}) + + def save_pretrained(self, save_directory): + """ Save a configuration object to the directory `save_directory`, so that it + can be re-loaded using the :func:`~pytorch_transformers.PretrainedConfig.from_pretrained` class method. + """ + assert os.path.isdir(save_directory), "Saving path should be a directory where the model and configuration can be saved" + + # If we save using the predefined names, we can load using `from_pretrained` + output_config_file = os.path.join(save_directory, CONFIG_NAME) + + self.to_json_file(output_config_file) + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path, **kwargs): + r""" Instantiate a :class:`~pytorch_transformers.PretrainedConfig` (or a derived class) from a pre-trained model configuration. + + Parameters: + pretrained_model_name_or_path: either: + + - a string with the `shortcut name` of a pre-trained model configuration to load from cache or download, e.g.: ``bert-base-uncased``. + - a path to a `directory` containing a configuration file saved using the :func:`~pytorch_transformers.PretrainedConfig.save_pretrained` method, e.g.: ``./my_model_directory/``. + - a path or url to a saved configuration JSON `file`, e.g.: ``./my_model_directory/configuration.json``. + + cache_dir: (`optional`) string: + Path to a directory in which a downloaded pre-trained model + configuration should be cached if the standard cache should not be used. + + kwargs: (`optional`) dict: key/value pairs with which to update the configuration object after loading. + + - The values in kwargs of any keys which are configuration attributes will be used to override the loaded values. + - Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled by the `return_unused_kwargs` keyword parameter. + + force_download: (`optional`) boolean, default False: + Force to (re-)download the model weights and configuration files and override the cached versions if they exists. + + proxies: (`optional`) dict, default None: + A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. + The proxies are used on each request. + + return_unused_kwargs: (`optional`) bool: + + - If False, then this function returns just the final configuration object. + - If True, then this functions returns a tuple `(config, unused_kwargs)` where `unused_kwargs` is a dictionary consisting of the key/value pairs whose keys are not configuration attributes: ie the part of kwargs which has not been used to update `config` and is otherwise ignored. + + Examples:: + + # We can't instantiate directly the base class `PretrainedConfig` so let's show the examples on a + # derived class: BertConfig + config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from S3 and cache. + config = BertConfig.from_pretrained('./test/saved_model/') # E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')` + config = BertConfig.from_pretrained('./test/saved_model/my_configuration.json') + config = BertConfig.from_pretrained('bert-base-uncased', output_attention=True, foo=False) + assert config.output_attention == True + config, unused_kwargs = BertConfig.from_pretrained('bert-base-uncased', output_attention=True, + foo=False, return_unused_kwargs=True) + assert config.output_attention == True + assert unused_kwargs == {'foo': False} + + """ + cache_dir = kwargs.pop('cache_dir', None) + force_download = kwargs.pop('force_download', False) + proxies = kwargs.pop('proxies', None) + return_unused_kwargs = kwargs.pop('return_unused_kwargs', False) + + if pretrained_model_name_or_path in cls.pretrained_config_archive_map: + config_file = cls.pretrained_config_archive_map[pretrained_model_name_or_path] + elif os.path.isdir(pretrained_model_name_or_path): + config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME) + else: + config_file = pretrained_model_name_or_path + # redirect to the cache, if necessary + try: + resolved_config_file = cached_path(config_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies) + except EnvironmentError as e: + if pretrained_model_name_or_path in cls.pretrained_config_archive_map: + logger.error( + "Couldn't reach server at '{}' to download pretrained model configuration file.".format( + config_file)) + else: + logger.error( + "Model name '{}' was not found in model name list ({}). " + "We assumed '{}' was a path or url but couldn't find any file " + "associated to this path or url.".format( + pretrained_model_name_or_path, + ', '.join(cls.pretrained_config_archive_map.keys()), + config_file)) + raise e + if resolved_config_file == config_file: + logger.info("loading configuration file {}".format(config_file)) + else: + logger.info("loading configuration file {} from cache at {}".format( + config_file, resolved_config_file)) + + # Load config + config = cls.from_json_file(resolved_config_file) + + if hasattr(config, 'pruned_heads'): + config.pruned_heads = dict((int(key), set(value)) for key, value in config.pruned_heads.items()) + + # Update config with kwargs if needed + to_remove = [] + for key, value in kwargs.items(): + if hasattr(config, key): + setattr(config, key, value) + to_remove.append(key) + for key in to_remove: + kwargs.pop(key, None) + + logger.info("Model config %s", config) + if return_unused_kwargs: + return config, kwargs + else: + return config + + @classmethod + def from_dict(cls, json_object): + """Constructs a `Config` from a Python dictionary of parameters.""" + config = cls(vocab_size_or_config_json_file=-1) + for key, value in json_object.items(): + config.__dict__[key] = value + return config + + @classmethod + def from_json_file(cls, json_file): + """Constructs a `BertConfig` from a json file of parameters.""" + with open(json_file, "r", encoding='utf-8') as reader: + text = reader.read() + return cls.from_dict(json.loads(text)) + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + def __repr__(self): + return str(self.to_json_string()) + + def to_dict(self): + """Serializes this instance to a Python dictionary.""" + output = copy.deepcopy(self.__dict__) + return output + + def to_json_string(self): + """Serializes this instance to a JSON string.""" + return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" + + def to_json_file(self, json_file_path): + """ Save this instance to a json file.""" + with open(json_file_path, "w", encoding='utf-8') as writer: + writer.write(self.to_json_string()) diff --git a/core/models/latent_diffusion/vae/optimus_modules/file_utils.py b/core/models/latent_diffusion/vae/optimus_modules/file_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1db0918eab796481d779b8eb4011212bfe0a08dc --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/file_utils.py @@ -0,0 +1,294 @@ +""" +Utilities for working with the local dataset cache. +This file is adapted from the AllenNLP library at https://github.com/allenai/allennlp +Copyright by the AllenNLP authors. +""" +from __future__ import (absolute_import, division, print_function, unicode_literals) + +import sys +import json +import logging +import os +import six +import shutil +import tempfile +import fnmatch +from functools import wraps +from hashlib import sha256 +from io import open + +# import boto3 +# from botocore.config import Config +# from botocore.exceptions import ClientError +import requests +from tqdm import tqdm + +try: + from torch.hub import _get_torch_home + torch_cache_home = _get_torch_home() +except ImportError: + torch_cache_home = os.path.expanduser( + os.getenv('TORCH_HOME', os.path.join( + os.getenv('XDG_CACHE_HOME', '~/.cache'), 'torch'))) +default_cache_path = os.path.join(torch_cache_home, 'pytorch_transformers') + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse + +try: + from pathlib import Path + PYTORCH_PRETRAINED_BERT_CACHE = Path( + os.getenv('PYTORCH_TRANSFORMERS_CACHE', os.getenv('PYTORCH_PRETRAINED_BERT_CACHE', default_cache_path))) +except (AttributeError, ImportError): + PYTORCH_PRETRAINED_BERT_CACHE = os.getenv('PYTORCH_TRANSFORMERS_CACHE', + os.getenv('PYTORCH_PRETRAINED_BERT_CACHE', + default_cache_path)) + +PYTORCH_TRANSFORMERS_CACHE = PYTORCH_PRETRAINED_BERT_CACHE # Kept for backward compatibility + +WEIGHTS_NAME = "pytorch_model.bin" +TF_WEIGHTS_NAME = 'model.ckpt' +CONFIG_NAME = "config.json" + +logger = logging.getLogger(__name__) # pylint: disable=invalid-name + +if not six.PY2: + def add_start_docstrings(*docstr): + def docstring_decorator(fn): + fn.__doc__ = ''.join(docstr) + fn.__doc__ + return fn + return docstring_decorator + + def add_end_docstrings(*docstr): + def docstring_decorator(fn): + fn.__doc__ = fn.__doc__ + ''.join(docstr) + return fn + return docstring_decorator +else: + # Not possible to update class docstrings on python2 + def add_start_docstrings(*docstr): + def docstring_decorator(fn): + return fn + return docstring_decorator + + def add_end_docstrings(*docstr): + def docstring_decorator(fn): + return fn + return docstring_decorator + +def url_to_filename(url, etag=None): + """ + Convert `url` into a hashed filename in a repeatable way. + If `etag` is specified, append its hash to the url's, delimited + by a period. + """ + url_bytes = url.encode('utf-8') + url_hash = sha256(url_bytes) + filename = url_hash.hexdigest() + + if etag: + etag_bytes = etag.encode('utf-8') + etag_hash = sha256(etag_bytes) + filename += '.' + etag_hash.hexdigest() + + return filename + + +def filename_to_url(filename, cache_dir=None): + """ + Return the url and etag (which may be ``None``) stored for `filename`. + Raise ``EnvironmentError`` if `filename` or its stored metadata do not exist. + """ + if cache_dir is None: + cache_dir = PYTORCH_TRANSFORMERS_CACHE + if sys.version_info[0] == 3 and isinstance(cache_dir, Path): + cache_dir = str(cache_dir) + + cache_path = os.path.join(cache_dir, filename) + if not os.path.exists(cache_path): + raise EnvironmentError("file {} not found".format(cache_path)) + + meta_path = cache_path + '.json' + if not os.path.exists(meta_path): + raise EnvironmentError("file {} not found".format(meta_path)) + + with open(meta_path, encoding="utf-8") as meta_file: + metadata = json.load(meta_file) + url = metadata['url'] + etag = metadata['etag'] + + return url, etag + + +def cached_path(url_or_filename, cache_dir=None, force_download=False, proxies=None): + """ + Given something that might be a URL (or might be a local path), + determine which. If it's a URL, download the file and cache it, and + return the path to the cached file. If it's already a local path, + make sure the file exists and then return the path. + Args: + cache_dir: specify a cache directory to save the file to (overwrite the default cache dir). + force_download: if True, re-dowload the file even if it's already cached in the cache dir. + """ + if cache_dir is None: + cache_dir = PYTORCH_TRANSFORMERS_CACHE + if sys.version_info[0] == 3 and isinstance(url_or_filename, Path): + url_or_filename = str(url_or_filename) + if sys.version_info[0] == 3 and isinstance(cache_dir, Path): + cache_dir = str(cache_dir) + + parsed = urlparse(url_or_filename) + + if parsed.scheme in ('http', 'https', 's3'): + # URL, so get it from the cache (downloading if necessary) + return get_from_cache(url_or_filename, cache_dir=cache_dir, force_download=force_download, proxies=proxies) + elif os.path.exists(url_or_filename): + # File, and it exists. + return url_or_filename + elif parsed.scheme == '': + # File, but it doesn't exist. + raise EnvironmentError("file {} not found".format(url_or_filename)) + else: + # Something unknown + raise ValueError("unable to parse {} as a URL or as a local path".format(url_or_filename)) + + +def split_s3_path(url): + """Split a full s3 path into the bucket name and path.""" + parsed = urlparse(url) + if not parsed.netloc or not parsed.path: + raise ValueError("bad s3 path {}".format(url)) + bucket_name = parsed.netloc + s3_path = parsed.path + # Remove '/' at beginning of path. + if s3_path.startswith("/"): + s3_path = s3_path[1:] + return bucket_name, s3_path + + +def s3_request(func): + """ + Wrapper function for s3 requests in order to create more helpful error + messages. + """ + + @wraps(func) + def wrapper(url, *args, **kwargs): + try: + return func(url, *args, **kwargs) + except ClientError as exc: + if int(exc.response["Error"]["Code"]) == 404: + raise EnvironmentError("file {} not found".format(url)) + else: + raise + + return wrapper + + +@s3_request +def s3_etag(url, proxies=None): + """Check ETag on S3 object.""" + s3_resource = boto3.resource("s3", config=Config(proxies=proxies)) + bucket_name, s3_path = split_s3_path(url) + s3_object = s3_resource.Object(bucket_name, s3_path) + return s3_object.e_tag + + +@s3_request +def s3_get(url, temp_file, proxies=None): + """Pull a file directly from S3.""" + s3_resource = boto3.resource("s3", config=Config(proxies=proxies)) + bucket_name, s3_path = split_s3_path(url) + s3_resource.Bucket(bucket_name).download_fileobj(s3_path, temp_file) + + +def http_get(url, temp_file, proxies=None): + req = requests.get(url, stream=True, proxies=proxies) + content_length = req.headers.get('Content-Length') + total = int(content_length) if content_length is not None else None + progress = tqdm(unit="B", total=total) + for chunk in req.iter_content(chunk_size=1024): + if chunk: # filter out keep-alive new chunks + progress.update(len(chunk)) + temp_file.write(chunk) + progress.close() + + +def get_from_cache(url, cache_dir=None, force_download=False, proxies=None): + """ + Given a URL, look for the corresponding dataset in the local cache. + If it's not there, download it. Then return the path to the cached file. + """ + if cache_dir is None: + cache_dir = PYTORCH_TRANSFORMERS_CACHE + if sys.version_info[0] == 3 and isinstance(cache_dir, Path): + cache_dir = str(cache_dir) + if sys.version_info[0] == 2 and not isinstance(cache_dir, str): + cache_dir = str(cache_dir) + + if not os.path.exists(cache_dir): + os.makedirs(cache_dir) + + # Get eTag to add to filename, if it exists. + if url.startswith("s3://"): + etag = s3_etag(url, proxies=proxies) + else: + try: + response = requests.head(url, allow_redirects=True, proxies=proxies) + if response.status_code != 200: + etag = None + else: + etag = response.headers.get("ETag") + except EnvironmentError: + etag = None + + if sys.version_info[0] == 2 and etag is not None: + etag = etag.decode('utf-8') + filename = url_to_filename(url, etag) + + # get cache path to put the file + cache_path = os.path.join(cache_dir, filename) + + # If we don't have a connection (etag is None) and can't identify the file + # try to get the last downloaded one + if not os.path.exists(cache_path) and etag is None: + matching_files = fnmatch.filter(os.listdir(cache_dir), filename + '.*') + matching_files = list(filter(lambda s: not s.endswith('.json'), matching_files)) + if matching_files: + cache_path = os.path.join(cache_dir, matching_files[-1]) + + if not os.path.exists(cache_path) or force_download: + # Download to temporary file, then copy to cache dir once finished. + # Otherwise you get corrupt cache entries if the download gets interrupted. + with tempfile.NamedTemporaryFile() as temp_file: + logger.info("%s not found in cache or force_download set to True, downloading to %s", url, temp_file.name) + + # GET file object + if url.startswith("s3://"): + s3_get(url, temp_file, proxies=proxies) + else: + http_get(url, temp_file, proxies=proxies) + + # we are copying the file before closing it, so flush to avoid truncation + temp_file.flush() + # shutil.copyfileobj() starts at the current position, so go to the start + temp_file.seek(0) + + logger.info("copying %s to cache at %s", temp_file.name, cache_path) + with open(cache_path, 'wb') as cache_file: + shutil.copyfileobj(temp_file, cache_file) + + logger.info("creating metadata file for %s", cache_path) + meta = {'url': url, 'etag': etag} + meta_path = cache_path + '.json' + with open(meta_path, 'w') as meta_file: + output_string = json.dumps(meta) + if sys.version_info[0] == 2 and isinstance(output_string, str): + output_string = unicode(output_string, 'utf-8') # The beauty of python 2 + meta_file.write(output_string) + + logger.info("removing temp file %s", temp_file.name) + + return cache_path diff --git a/core/models/latent_diffusion/vae/optimus_modules/modeling_utils.py b/core/models/latent_diffusion/vae/optimus_modules/modeling_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d16725f2138f3f0bb33cfe3ddb544063c5e6ee55 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/modeling_utils.py @@ -0,0 +1,781 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PyTorch BERT model.""" + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + + +import pdb +import copy +import json +import logging +import os +from io import open + +import six +import torch +from torch import nn +from torch.nn import CrossEntropyLoss +from torch.nn import functional as F + +from .configuration_utils import PretrainedConfig +from .file_utils import cached_path, WEIGHTS_NAME, TF_WEIGHTS_NAME + +logger = logging.getLogger(__name__) + + +try: + from torch.nn import Identity +except ImportError: + # Older PyTorch compatibility + class Identity(nn.Module): + r"""A placeholder identity operator that is argument-insensitive. + """ + def __init__(self, *args, **kwargs): + super(Identity, self).__init__() + + def forward(self, input): + return input + + +class PreTrainedModel(nn.Module): + r""" Base class for all models. + + :class:`~pytorch_transformers.PreTrainedModel` takes care of storing the configuration of the models and handles methods for loading/downloading/saving models + as well as a few methods commons to all models to (i) resize the input embeddings and (ii) prune heads in the self-attention heads. + + Class attributes (overridden by derived classes): + - ``config_class``: a class derived from :class:`~pytorch_transformers.PretrainedConfig` to use as configuration class for this model architecture. + - ``pretrained_model_archive_map``: a python ``dict`` of with `short-cut-names` (string) as keys and `url` (string) of associated pretrained weights as values. + - ``load_tf_weights``: a python ``method`` for loading a TensorFlow checkpoint in a PyTorch model, taking as arguments: + + - ``model``: an instance of the relevant subclass of :class:`~pytorch_transformers.PreTrainedModel`, + - ``config``: an instance of the relevant subclass of :class:`~pytorch_transformers.PretrainedConfig`, + - ``path``: a path (string) to the TensorFlow checkpoint. + + - ``base_model_prefix``: a string indicating the attribute associated to the base model in derived classes of the same architecture adding modules on top of the base model. + """ + config_class = None + pretrained_model_archive_map = {} + load_tf_weights = lambda model, config, path: None + base_model_prefix = "" + + def __init__(self, config, *inputs, **kwargs): + super(PreTrainedModel, self).__init__() + if not isinstance(config, PretrainedConfig): + raise ValueError( + "Parameter config in `{}(config)` should be an instance of class `PretrainedConfig`. " + "To create a model from a pretrained model use " + "`model = {}.from_pretrained(PRETRAINED_MODEL_NAME)`".format( + self.__class__.__name__, self.__class__.__name__ + )) + # Save config in model + self.config = config + + def _get_resized_embeddings(self, old_embeddings, new_num_tokens=None): + """ Build a resized Embedding Module from a provided token Embedding Module. + Increasing the size will add newly initialized vectors at the end + Reducing the size will remove vectors from the end + + Args: + new_num_tokens: (`optional`) int + New number of tokens in the embedding matrix. + Increasing the size will add newly initialized vectors at the end + Reducing the size will remove vectors from the end + If not provided or None: return the provided token Embedding Module. + Return: ``torch.nn.Embeddings`` + Pointer to the resized Embedding Module or the old Embedding Module if new_num_tokens is None + """ + if new_num_tokens is None: + return old_embeddings + + old_num_tokens, old_embedding_dim = old_embeddings.weight.size() + if old_num_tokens == new_num_tokens: + return old_embeddings + + # Build new embeddings + new_embeddings = nn.Embedding(new_num_tokens, old_embedding_dim) + new_embeddings.to(old_embeddings.weight.device) + + # initialize all new embeddings (in particular added tokens) + self._init_weights(new_embeddings) + + # Copy word embeddings from the previous weights + num_tokens_to_copy = min(old_num_tokens, new_num_tokens) + new_embeddings.weight.data[:num_tokens_to_copy, :] = old_embeddings.weight.data[:num_tokens_to_copy, :] + + return new_embeddings + + def _tie_or_clone_weights(self, first_module, second_module): + """ Tie or clone module weights depending of weither we are using TorchScript or not + """ + if self.config.torchscript: + first_module.weight = nn.Parameter(second_module.weight.clone()) + else: + first_module.weight = second_module.weight + + if hasattr(first_module, 'bias') and first_module.bias is not None: + first_module.bias.data = torch.nn.functional.pad( + first_module.bias.data, + (0, first_module.weight.shape[0] - first_module.bias.shape[0]), + 'constant', + 0 + ) + + def resize_token_embeddings(self, new_num_tokens=None): + """ Resize input token embeddings matrix of the model if new_num_tokens != config.vocab_size. + Take care of tying weights embeddings afterwards if the model class has a `tie_weights()` method. + + Arguments: + + new_num_tokens: (`optional`) int: + New number of tokens in the embedding matrix. Increasing the size will add newly initialized vectors at the end. Reducing the size will remove vectors from the end. + If not provided or None: does nothing and just returns a pointer to the input tokens ``torch.nn.Embeddings`` Module of the model. + + Return: ``torch.nn.Embeddings`` + Pointer to the input tokens Embeddings Module of the model + """ + + + base_model = getattr(self, self.base_model_prefix, self) # get the base model if needed + + model_embeds = base_model._resize_token_embeddings(new_num_tokens) + if new_num_tokens is None: + return model_embeds + + # Update base model and current model config + self.config.vocab_size = new_num_tokens + base_model.vocab_size = new_num_tokens + + # Tie weights again if needed + if hasattr(self, 'tie_weights'): + self.tie_weights() + + return model_embeds + + def init_weights(self): + """ Initialize and prunes weights if needed. """ + # Initialize weights + self.apply(self._init_weights) + + # Prune heads if needed + if self.config.pruned_heads: + self.prune_heads(self.config.pruned_heads) + + def prune_heads(self, heads_to_prune): + """ Prunes heads of the base model. + + Arguments: + + heads_to_prune: dict with keys being selected layer indices (`int`) and associated values being the list of heads to prune in said layer (list of `int`). + E.g. {1: [0, 2], 2: [2, 3]} will prune heads 0 and 2 on layer 1 and heads 2 and 3 on layer 2. + """ + base_model = getattr(self, self.base_model_prefix, self) # get the base model if needed + + # save new sets of pruned heads as union of previously stored pruned heads and newly pruned heads + for layer, heads in heads_to_prune.items(): + union_heads = set(self.config.pruned_heads.get(layer, [])) | set(heads) + self.config.pruned_heads[layer] = list(union_heads) # Unfortunately we have to store it as list for JSON + + base_model._prune_heads(heads_to_prune) + + def save_pretrained(self, save_directory): + """ Save a model and its configuration file to a directory, so that it + can be re-loaded using the `:func:`~pytorch_transformers.PreTrainedModel.from_pretrained`` class method. + """ + assert os.path.isdir(save_directory), "Saving path should be a directory where the model and configuration can be saved" + + # Only save the model it-self if we are using distributed training + model_to_save = self.module if hasattr(self, 'module') else self + + # Save configuration file + model_to_save.config.save_pretrained(save_directory) + + # If we save using the predefined names, we can load using `from_pretrained` + output_model_file = os.path.join(save_directory, WEIGHTS_NAME) + + torch.save(model_to_save.state_dict(), output_model_file) + + @classmethod + def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): + r"""Instantiate a pretrained pytorch model from a pre-trained model configuration. + + The model is set in evaluation mode by default using ``model.eval()`` (Dropout modules are deactivated) + To train the model, you should first set it back in training mode with ``model.train()`` + + The warning ``Weights from XXX not initialized from pretrained model`` means that the weights of XXX do not come pre-trained with the rest of the model. + It is up to you to train those weights with a downstream fine-tuning task. + + The warning ``Weights from XXX not used in YYY`` means that the layer XXX is not used by YYY, therefore those weights are discarded. + + Parameters: + pretrained_model_name_or_path: either: + + - a string with the `shortcut name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. + - a path to a `directory` containing model weights saved using :func:`~pytorch_transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/``. + - a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a configuration object should be provided as ``config`` argument. This loading path is slower than converting the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and loading the PyTorch model afterwards. + + model_args: (`optional`) Sequence of positional arguments: + All remaning positional arguments will be passed to the underlying model's ``__init__`` method + + config: (`optional`) instance of a class derived from :class:`~pytorch_transformers.PretrainedConfig`: + Configuration for the model to use instead of an automatically loaded configuation. Configuration can be automatically loaded when: + + - the model is a model provided by the library (loaded with the ``shortcut-name`` string of a pretrained model), or + - the model was saved using :func:`~pytorch_transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. + - the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory. + + state_dict: (`optional`) dict: + an optional state dictionnary for the model to use instead of a state dictionary loaded from saved weights file. + This option can be used if you want to create a model from a pretrained configuration but load your own weights. + In this case though, you should check if using :func:`~pytorch_transformers.PreTrainedModel.save_pretrained` and :func:`~pytorch_transformers.PreTrainedModel.from_pretrained` is not a simpler option. + + cache_dir: (`optional`) string: + Path to a directory in which a downloaded pre-trained model + configuration should be cached if the standard cache should not be used. + + force_download: (`optional`) boolean, default False: + Force to (re-)download the model weights and configuration files and override the cached versions if they exists. + + proxies: (`optional`) dict, default None: + A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. + The proxies are used on each request. + + output_loading_info: (`optional`) boolean: + Set to ``True`` to also return a dictionnary containing missing keys, unexpected keys and error messages. + + kwargs: (`optional`) Remaining dictionary of keyword arguments: + Can be used to update the configuration object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided or automatically loaded: + + - If a configuration is provided with ``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__`` method (we assume all relevant updates to the configuration have already been done) + - If a configuration is not provided, ``kwargs`` will be first passed to the configuration class initialization function (:func:`~pytorch_transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute will be used to override said attribute with the supplied ``kwargs`` value. Remaining keys that do not correspond to any configuration attribute will be passed to the underlying model's ``__init__`` function. + + Examples:: + + model = BertModel.from_pretrained('bert-base-uncased') # Download model and configuration from S3 and cache. + model = BertModel.from_pretrained('./test/saved_model/') # E.g. model was saved using `save_pretrained('./test/saved_model/')` + model = BertModel.from_pretrained('bert-base-uncased', output_attention=True) # Update configuration during loading + assert model.config.output_attention == True + # Loading from a TF checkpoint file instead of a PyTorch model (slower) + config = BertConfig.from_json_file('./tf_model/my_tf_model_config.json') + model = BertModel.from_pretrained('./tf_model/my_tf_checkpoint.ckpt.index', from_tf=True, config=config) + + """ + config = kwargs.pop('config', None) + state_dict = kwargs.pop('state_dict', None) + cache_dir = kwargs.pop('cache_dir', None) + from_tf = kwargs.pop('from_tf', False) + force_download = kwargs.pop('force_download', False) + proxies = kwargs.pop('proxies', None) + output_loading_info = kwargs.pop('output_loading_info', False) + + # Load config + if config is None: + config, model_kwargs = cls.config_class.from_pretrained( + pretrained_model_name_or_path, *model_args, + cache_dir=cache_dir, return_unused_kwargs=True, + force_download=force_download, + **kwargs + ) + else: + model_kwargs = kwargs + + # Load model + if pretrained_model_name_or_path in cls.pretrained_model_archive_map: + archive_file = cls.pretrained_model_archive_map[pretrained_model_name_or_path] + elif os.path.isdir(pretrained_model_name_or_path): + if from_tf: + # Directly load from a TensorFlow checkpoint + archive_file = os.path.join(pretrained_model_name_or_path, TF_WEIGHTS_NAME + ".index") + else: + archive_file = os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME) + else: + if from_tf: + # Directly load from a TensorFlow checkpoint + archive_file = pretrained_model_name_or_path + ".index" + else: + archive_file = pretrained_model_name_or_path + # redirect to the cache, if necessary + try: + resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies) + except EnvironmentError as e: + if pretrained_model_name_or_path in cls.pretrained_model_archive_map: + logger.error( + "Couldn't reach server at '{}' to download pretrained weights.".format( + archive_file)) + else: + logger.error( + "Model name '{}' was not found in model name list ({}). " + "We assumed '{}' was a path or url but couldn't find any file " + "associated to this path or url.".format( + pretrained_model_name_or_path, + ', '.join(cls.pretrained_model_archive_map.keys()), + archive_file)) + raise e + if resolved_archive_file == archive_file: + logger.info("loading weights file {}".format(archive_file)) + else: + logger.info("loading weights file {} from cache at {}".format( + archive_file, resolved_archive_file)) + + # Instantiate model. + model = cls(config, *model_args, **model_kwargs) + + if state_dict is None and not from_tf: + state_dict = torch.load(resolved_archive_file, map_location='cpu') + if from_tf: + # Directly load from a TensorFlow checkpoint + return cls.load_tf_weights(model, config, resolved_archive_file[:-6]) # Remove the '.index' + + # Convert old format to new format if needed from a PyTorch state_dict + old_keys = [] + new_keys = [] + for key in state_dict.keys(): + new_key = None + if 'gamma' in key: + new_key = key.replace('gamma', 'weight') + if 'beta' in key: + new_key = key.replace('beta', 'bias') + if new_key: + old_keys.append(key) + new_keys.append(new_key) + for old_key, new_key in zip(old_keys, new_keys): + state_dict[new_key] = state_dict.pop(old_key) + + # Load from a PyTorch state_dict + missing_keys = [] + unexpected_keys = [] + error_msgs = [] + # copy state_dict so _load_from_state_dict can modify it + metadata = getattr(state_dict, '_metadata', None) + state_dict = state_dict.copy() + if metadata is not None: + state_dict._metadata = metadata + + def load(module, prefix=''): + local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) + module._load_from_state_dict( + state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs) + for name, child in module._modules.items(): + if child is not None: + load(child, prefix + name + '.') + + # Make sure we are able to load base models as well as derived models (with heads) + start_prefix = '' + model_to_load = model + if not hasattr(model, cls.base_model_prefix) and any(s.startswith(cls.base_model_prefix) for s in state_dict.keys()): + start_prefix = cls.base_model_prefix + '.' + if hasattr(model, cls.base_model_prefix) and not any(s.startswith(cls.base_model_prefix) for s in state_dict.keys()): + model_to_load = getattr(model, cls.base_model_prefix) + + load(model_to_load, prefix=start_prefix) + if len(missing_keys) > 0: + logger.info("Weights of {} not initialized from pretrained model: {}".format( + model.__class__.__name__, missing_keys)) + if len(unexpected_keys) > 0: + logger.info("Weights from pretrained model not used in {}: {}".format( + model.__class__.__name__, unexpected_keys)) + if len(error_msgs) > 0: + raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format( + model.__class__.__name__, "\n\t".join(error_msgs))) + + if hasattr(model, 'tie_weights'): + model.tie_weights() # make sure word embedding weights are still tied + + # Set model in evaluation mode to desactivate DropOut modules by default + model.eval() + + if output_loading_info: + loading_info = {"missing_keys": missing_keys, "unexpected_keys": unexpected_keys, "error_msgs": error_msgs} + return model, loading_info + + return model + + +class Conv1D(nn.Module): + def __init__(self, nf, nx): + """ Conv1D layer as defined by Radford et al. for OpenAI GPT (and also used in GPT-2) + Basically works like a Linear layer but the weights are transposed + """ + super(Conv1D, self).__init__() + self.nf = nf + w = torch.empty(nx, nf) + nn.init.normal_(w, std=0.02) + self.weight = nn.Parameter(w) + self.bias = nn.Parameter(torch.zeros(nf)) + + def forward(self, x): + size_out = x.size()[:-1] + (self.nf,) + x = torch.addmm(self.bias, x.view(-1, x.size(-1)), self.weight) + x = x.view(*size_out) + return x + + +class PoolerStartLogits(nn.Module): + """ Compute SQuAD start_logits from sequence hidden states. """ + def __init__(self, config): + super(PoolerStartLogits, self).__init__() + self.dense = nn.Linear(config.hidden_size, 1) + + def forward(self, hidden_states, p_mask=None): + """ Args: + **p_mask**: (`optional`) ``torch.FloatTensor`` of shape `(batch_size, seq_len)` + invalid position mask such as query and special symbols (PAD, SEP, CLS) + 1.0 means token should be masked. + """ + x = self.dense(hidden_states).squeeze(-1) + + if p_mask is not None: + if next(self.parameters()).dtype == torch.float16: + x = x * (1 - p_mask) - 65500 * p_mask + else: + x = x * (1 - p_mask) - 1e30 * p_mask + + return x + + +class PoolerEndLogits(nn.Module): + """ Compute SQuAD end_logits from sequence hidden states and start token hidden state. + """ + def __init__(self, config): + super(PoolerEndLogits, self).__init__() + self.dense_0 = nn.Linear(config.hidden_size * 2, config.hidden_size) + self.activation = nn.Tanh() + self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dense_1 = nn.Linear(config.hidden_size, 1) + + def forward(self, hidden_states, start_states=None, start_positions=None, p_mask=None): + """ Args: + One of ``start_states``, ``start_positions`` should be not None. + If both are set, ``start_positions`` overrides ``start_states``. + + **start_states**: ``torch.LongTensor`` of shape identical to hidden_states + hidden states of the first tokens for the labeled span. + **start_positions**: ``torch.LongTensor`` of shape ``(batch_size,)`` + position of the first token for the labeled span: + **p_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, seq_len)`` + Mask of invalid position such as query and special symbols (PAD, SEP, CLS) + 1.0 means token should be masked. + """ + assert start_states is not None or start_positions is not None, "One of start_states, start_positions should be not None" + if start_positions is not None: + slen, hsz = hidden_states.shape[-2:] + start_positions = start_positions[:, None, None].expand(-1, -1, hsz) # shape (bsz, 1, hsz) + start_states = hidden_states.gather(-2, start_positions) # shape (bsz, 1, hsz) + start_states = start_states.expand(-1, slen, -1) # shape (bsz, slen, hsz) + + x = self.dense_0(torch.cat([hidden_states, start_states], dim=-1)) + x = self.activation(x) + x = self.LayerNorm(x) + x = self.dense_1(x).squeeze(-1) + + if p_mask is not None: + x = x * (1 - p_mask) - 1e30 * p_mask + + return x + + +class PoolerAnswerClass(nn.Module): + """ Compute SQuAD 2.0 answer class from classification and start tokens hidden states. """ + def __init__(self, config): + super(PoolerAnswerClass, self).__init__() + self.dense_0 = nn.Linear(config.hidden_size * 2, config.hidden_size) + self.activation = nn.Tanh() + self.dense_1 = nn.Linear(config.hidden_size, 1, bias=False) + + def forward(self, hidden_states, start_states=None, start_positions=None, cls_index=None): + """ + Args: + One of ``start_states``, ``start_positions`` should be not None. + If both are set, ``start_positions`` overrides ``start_states``. + + **start_states**: ``torch.LongTensor`` of shape identical to ``hidden_states``. + hidden states of the first tokens for the labeled span. + **start_positions**: ``torch.LongTensor`` of shape ``(batch_size,)`` + position of the first token for the labeled span. + **cls_index**: torch.LongTensor of shape ``(batch_size,)`` + position of the CLS token. If None, take the last token. + + note(Original repo): + no dependency on end_feature so that we can obtain one single `cls_logits` + for each sample + """ + hsz = hidden_states.shape[-1] + assert start_states is not None or start_positions is not None, "One of start_states, start_positions should be not None" + if start_positions is not None: + start_positions = start_positions[:, None, None].expand(-1, -1, hsz) # shape (bsz, 1, hsz) + start_states = hidden_states.gather(-2, start_positions).squeeze(-2) # shape (bsz, hsz) + + if cls_index is not None: + cls_index = cls_index[:, None, None].expand(-1, -1, hsz) # shape (bsz, 1, hsz) + cls_token_state = hidden_states.gather(-2, cls_index).squeeze(-2) # shape (bsz, hsz) + else: + cls_token_state = hidden_states[:, -1, :] # shape (bsz, hsz) + + x = self.dense_0(torch.cat([start_states, cls_token_state], dim=-1)) + x = self.activation(x) + x = self.dense_1(x).squeeze(-1) + + return x + + +class SQuADHead(nn.Module): + r""" A SQuAD head inspired by XLNet. + + Parameters: + config (:class:`~pytorch_transformers.XLNetConfig`): Model configuration class with all the parameters of the model. + + Inputs: + **hidden_states**: ``torch.FloatTensor`` of shape ``(batch_size, seq_len, hidden_size)`` + hidden states of sequence tokens + **start_positions**: ``torch.LongTensor`` of shape ``(batch_size,)`` + position of the first token for the labeled span. + **end_positions**: ``torch.LongTensor`` of shape ``(batch_size,)`` + position of the last token for the labeled span. + **cls_index**: torch.LongTensor of shape ``(batch_size,)`` + position of the CLS token. If None, take the last token. + **is_impossible**: ``torch.LongTensor`` of shape ``(batch_size,)`` + Whether the question has a possible answer in the paragraph or not. + **p_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, seq_len)`` + Mask of invalid position such as query and special symbols (PAD, SEP, CLS) + 1.0 means token should be masked. + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned if both ``start_positions`` and ``end_positions`` are provided) ``torch.FloatTensor`` of shape ``(1,)``: + Classification loss as the sum of start token, end token (and is_impossible if provided) classification losses. + **start_top_log_probs**: (`optional`, returned if ``start_positions`` or ``end_positions`` is not provided) + ``torch.FloatTensor`` of shape ``(batch_size, config.start_n_top)`` + Log probabilities for the top config.start_n_top start token possibilities (beam-search). + **start_top_index**: (`optional`, returned if ``start_positions`` or ``end_positions`` is not provided) + ``torch.LongTensor`` of shape ``(batch_size, config.start_n_top)`` + Indices for the top config.start_n_top start token possibilities (beam-search). + **end_top_log_probs**: (`optional`, returned if ``start_positions`` or ``end_positions`` is not provided) + ``torch.FloatTensor`` of shape ``(batch_size, config.start_n_top * config.end_n_top)`` + Log probabilities for the top ``config.start_n_top * config.end_n_top`` end token possibilities (beam-search). + **end_top_index**: (`optional`, returned if ``start_positions`` or ``end_positions`` is not provided) + ``torch.LongTensor`` of shape ``(batch_size, config.start_n_top * config.end_n_top)`` + Indices for the top ``config.start_n_top * config.end_n_top`` end token possibilities (beam-search). + **cls_logits**: (`optional`, returned if ``start_positions`` or ``end_positions`` is not provided) + ``torch.FloatTensor`` of shape ``(batch_size,)`` + Log probabilities for the ``is_impossible`` label of the answers. + """ + def __init__(self, config): + super(SQuADHead, self).__init__() + self.start_n_top = config.start_n_top + self.end_n_top = config.end_n_top + + self.start_logits = PoolerStartLogits(config) + self.end_logits = PoolerEndLogits(config) + self.answer_class = PoolerAnswerClass(config) + + def forward(self, hidden_states, start_positions=None, end_positions=None, + cls_index=None, is_impossible=None, p_mask=None): + outputs = () + + start_logits = self.start_logits(hidden_states, p_mask=p_mask) + + if start_positions is not None and end_positions is not None: + # If we are on multi-GPU, let's remove the dimension added by batch splitting + for x in (start_positions, end_positions, cls_index, is_impossible): + if x is not None and x.dim() > 1: + x.squeeze_(-1) + + # during training, compute the end logits based on the ground truth of the start position + end_logits = self.end_logits(hidden_states, start_positions=start_positions, p_mask=p_mask) + + loss_fct = CrossEntropyLoss() + start_loss = loss_fct(start_logits, start_positions) + end_loss = loss_fct(end_logits, end_positions) + total_loss = (start_loss + end_loss) / 2 + + if cls_index is not None and is_impossible is not None: + # Predict answerability from the representation of CLS and START + cls_logits = self.answer_class(hidden_states, start_positions=start_positions, cls_index=cls_index) + loss_fct_cls = nn.BCEWithLogitsLoss() + cls_loss = loss_fct_cls(cls_logits, is_impossible) + + # note(zhiliny): by default multiply the loss by 0.5 so that the scale is comparable to start_loss and end_loss + total_loss += cls_loss * 0.5 + + outputs = (total_loss,) + outputs + + else: + # during inference, compute the end logits based on beam search + bsz, slen, hsz = hidden_states.size() + start_log_probs = F.softmax(start_logits, dim=-1) # shape (bsz, slen) + + start_top_log_probs, start_top_index = torch.topk(start_log_probs, self.start_n_top, dim=-1) # shape (bsz, start_n_top) + start_top_index_exp = start_top_index.unsqueeze(-1).expand(-1, -1, hsz) # shape (bsz, start_n_top, hsz) + start_states = torch.gather(hidden_states, -2, start_top_index_exp) # shape (bsz, start_n_top, hsz) + start_states = start_states.unsqueeze(1).expand(-1, slen, -1, -1) # shape (bsz, slen, start_n_top, hsz) + + hidden_states_expanded = hidden_states.unsqueeze(2).expand_as(start_states) # shape (bsz, slen, start_n_top, hsz) + p_mask = p_mask.unsqueeze(-1) if p_mask is not None else None + end_logits = self.end_logits(hidden_states_expanded, start_states=start_states, p_mask=p_mask) + end_log_probs = F.softmax(end_logits, dim=1) # shape (bsz, slen, start_n_top) + + end_top_log_probs, end_top_index = torch.topk(end_log_probs, self.end_n_top, dim=1) # shape (bsz, end_n_top, start_n_top) + end_top_log_probs = end_top_log_probs.view(-1, self.start_n_top * self.end_n_top) + end_top_index = end_top_index.view(-1, self.start_n_top * self.end_n_top) + + start_states = torch.einsum("blh,bl->bh", hidden_states, start_log_probs) + cls_logits = self.answer_class(hidden_states, start_states=start_states, cls_index=cls_index) + + outputs = (start_top_log_probs, start_top_index, end_top_log_probs, end_top_index, cls_logits) + outputs + + # return start_top_log_probs, start_top_index, end_top_log_probs, end_top_index, cls_logits + # or (if labels are provided) (total_loss,) + return outputs + + +class SequenceSummary(nn.Module): + r""" Compute a single vector summary of a sequence hidden states according to various possibilities: + Args of the config class: + summary_type: + - 'last' => [default] take the last token hidden state (like XLNet) + - 'first' => take the first token hidden state (like Bert) + - 'mean' => take the mean of all tokens hidden states + - 'cls_index' => supply a Tensor of classification token position (GPT/GPT-2) + - 'attn' => Not implemented now, use multi-head attention + summary_use_proj: Add a projection after the vector extraction + summary_proj_to_labels: If True, the projection outputs to config.num_labels classes (otherwise to hidden_size). Default: False. + summary_activation: 'tanh' => add a tanh activation to the output, Other => no activation. Default + summary_first_dropout: Add a dropout before the projection and activation + summary_last_dropout: Add a dropout after the projection and activation + """ + def __init__(self, config): + super(SequenceSummary, self).__init__() + + self.summary_type = config.summary_type if hasattr(config, 'summary_use_proj') else 'last' + if self.summary_type == 'attn': + # We should use a standard multi-head attention module with absolute positional embedding for that. + # Cf. https://github.com/zihangdai/xlnet/blob/master/modeling.py#L253-L276 + # We can probably just use the multi-head attention module of PyTorch >=1.1.0 + raise NotImplementedError + + self.summary = Identity() + if hasattr(config, 'summary_use_proj') and config.summary_use_proj: + if hasattr(config, 'summary_proj_to_labels') and config.summary_proj_to_labels and config.num_labels > 0: + num_classes = config.num_labels + else: + num_classes = config.hidden_size + self.summary = nn.Linear(config.hidden_size, num_classes) + + self.activation = Identity() + if hasattr(config, 'summary_activation') and config.summary_activation == 'tanh': + self.activation = nn.Tanh() + + self.first_dropout = Identity() + if hasattr(config, 'summary_first_dropout') and config.summary_first_dropout > 0: + self.first_dropout = nn.Dropout(config.summary_first_dropout) + + self.last_dropout = Identity() + if hasattr(config, 'summary_last_dropout') and config.summary_last_dropout > 0: + self.last_dropout = nn.Dropout(config.summary_last_dropout) + + def forward(self, hidden_states, cls_index=None): + """ hidden_states: float Tensor in shape [bsz, seq_len, hidden_size], the hidden-states of the last layer. + cls_index: [optional] position of the classification token if summary_type == 'cls_index', + shape (bsz,) or more generally (bsz, ...) where ... are optional leading dimensions of hidden_states. + if summary_type == 'cls_index' and cls_index is None: + we take the last token of the sequence as classification token + """ + if self.summary_type == 'last': + output = hidden_states[:, -1] + elif self.summary_type == 'first': + output = hidden_states[:, 0] + elif self.summary_type == 'mean': + output = hidden_states.mean(dim=1) + elif self.summary_type == 'cls_index': + if cls_index is None: + cls_index = torch.full_like(hidden_states[..., :1, :], hidden_states.shape[-2]-1, dtype=torch.long) + else: + cls_index = cls_index.unsqueeze(-1).unsqueeze(-1) + cls_index = cls_index.expand((-1,) * (cls_index.dim()-1) + (hidden_states.size(-1),)) + # shape of cls_index: (bsz, XX, 1, hidden_size) where XX are optional leading dim of hidden_states + output = hidden_states.gather(-2, cls_index).squeeze(-2) # shape (bsz, XX, hidden_size) + elif self.summary_type == 'attn': + raise NotImplementedError + + output = self.first_dropout(output) + output = self.summary(output) + output = self.activation(output) + output = self.last_dropout(output) + + return output + + +def prune_linear_layer(layer, index, dim=0): + """ Prune a linear layer (a model parameters) to keep only entries in index. + Return the pruned layer as a new layer with requires_grad=True. + Used to remove heads. + """ + index = index.to(layer.weight.device) + W = layer.weight.index_select(dim, index).clone().detach() + if layer.bias is not None: + if dim == 1: + b = layer.bias.clone().detach() + else: + b = layer.bias[index].clone().detach() + new_size = list(layer.weight.size()) + new_size[dim] = len(index) + new_layer = nn.Linear(new_size[1], new_size[0], bias=layer.bias is not None).to(layer.weight.device) + new_layer.weight.requires_grad = False + new_layer.weight.copy_(W.contiguous()) + new_layer.weight.requires_grad = True + if layer.bias is not None: + new_layer.bias.requires_grad = False + new_layer.bias.copy_(b.contiguous()) + new_layer.bias.requires_grad = True + return new_layer + + +def prune_conv1d_layer(layer, index, dim=1): + """ Prune a Conv1D layer (a model parameters) to keep only entries in index. + A Conv1D work as a Linear layer (see e.g. BERT) but the weights are transposed. + Return the pruned layer as a new layer with requires_grad=True. + Used to remove heads. + """ + index = index.to(layer.weight.device) + W = layer.weight.index_select(dim, index).clone().detach() + if dim == 0: + b = layer.bias.clone().detach() + else: + b = layer.bias[index].clone().detach() + new_size = list(layer.weight.size()) + new_size[dim] = len(index) + new_layer = Conv1D(new_size[1], new_size[0]).to(layer.weight.device) + new_layer.weight.requires_grad = False + new_layer.weight.copy_(W.contiguous()) + new_layer.weight.requires_grad = True + new_layer.bias.requires_grad = False + new_layer.bias.copy_(b.contiguous()) + new_layer.bias.requires_grad = True + return new_layer + + +def prune_layer(layer, index, dim=None): + """ Prune a Conv1D or nn.Linear layer (a model parameters) to keep only entries in index. + Return the pruned layer as a new layer with requires_grad=True. + Used to remove heads. + """ + if isinstance(layer, nn.Linear): + return prune_linear_layer(layer, index, dim=0 if dim is None else dim) + elif isinstance(layer, Conv1D): + return prune_conv1d_layer(layer, index, dim=1 if dim is None else dim) + else: + raise ValueError("Can't prune layer of class {}".format(layer.__class__)) diff --git a/core/models/latent_diffusion/vae/optimus_modules/optimus_bert.py b/core/models/latent_diffusion/vae/optimus_modules/optimus_bert.py new file mode 100644 index 0000000000000000000000000000000000000000..b4f9137d1597165575b44c1dc08f7cb1d32dd557 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/optimus_bert.py @@ -0,0 +1,1440 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PyTorch BERT model. """ + +from __future__ import absolute_import, division, print_function, unicode_literals + +import json +import logging +import math +import os +import sys +from io import open + +import pdb + +import torch +from torch import nn +from torch.nn import CrossEntropyLoss, MSELoss + +from .modeling_utils import PreTrainedModel, prune_linear_layer +from .configuration_bert import BertConfig +from .file_utils import add_start_docstrings + +logger = logging.getLogger(__name__) + +BERT_PRETRAINED_MODEL_ARCHIVE_MAP = { + 'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-pytorch_model.bin", + 'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-pytorch_model.bin", + 'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-pytorch_model.bin", + 'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-pytorch_model.bin", + 'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased-pytorch_model.bin", + 'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased-pytorch_model.bin", + 'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-pytorch_model.bin", + 'bert-base-german-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-german-cased-pytorch_model.bin", + 'bert-large-uncased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-pytorch_model.bin", + 'bert-large-cased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-pytorch_model.bin", + 'bert-large-uncased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-finetuned-squad-pytorch_model.bin", + 'bert-large-cased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-finetuned-squad-pytorch_model.bin", + 'bert-base-cased-finetuned-mrpc': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-finetuned-mrpc-pytorch_model.bin", +} + +def load_tf_weights_in_bert(model, config, tf_checkpoint_path): + """ Load tf checkpoints in a pytorch model. + """ + try: + import re + import numpy as np + import tensorflow as tf + except ImportError: + logger.error("Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see " + "https://www.tensorflow.org/install/ for installation instructions.") + raise + tf_path = os.path.abspath(tf_checkpoint_path) + logger.info("Converting TensorFlow checkpoint from {}".format(tf_path)) + # Load weights from TF model + init_vars = tf.train.list_variables(tf_path) + names = [] + arrays = [] + for name, shape in init_vars: + logger.info("Loading TF weight {} with shape {}".format(name, shape)) + array = tf.train.load_variable(tf_path, name) + names.append(name) + arrays.append(array) + + for name, array in zip(names, arrays): + name = name.split('/') + # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v + # which are not required for using pretrained model + if any(n in ["adam_v", "adam_m", "global_step"] for n in name): + logger.info("Skipping {}".format("/".join(name))) + continue + pointer = model + for m_name in name: + if re.fullmatch(r'[A-Za-z]+_\d+', m_name): + l = re.split(r'_(\d+)', m_name) + else: + l = [m_name] + if l[0] == 'kernel' or l[0] == 'gamma': + pointer = getattr(pointer, 'weight') + elif l[0] == 'output_bias' or l[0] == 'beta': + pointer = getattr(pointer, 'bias') + elif l[0] == 'output_weights': + pointer = getattr(pointer, 'weight') + elif l[0] == 'squad': + pointer = getattr(pointer, 'classifier') + else: + try: + pointer = getattr(pointer, l[0]) + except AttributeError: + logger.info("Skipping {}".format("/".join(name))) + continue + if len(l) >= 2: + num = int(l[1]) + pointer = pointer[num] + if m_name[-11:] == '_embeddings': + pointer = getattr(pointer, 'weight') + elif m_name == 'kernel': + array = np.transpose(array) + try: + assert pointer.shape == array.shape + except AssertionError as e: + e.args += (pointer.shape, array.shape) + raise + logger.info("Initialize PyTorch weight {}".format(name)) + pointer.data = torch.from_numpy(array) + return model + + +def gelu(x): + """Implementation of the gelu activation function. + For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): + 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) + Also see https://arxiv.org/abs/1606.08415 + """ + return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0))) + + +def swish(x): + return x * torch.sigmoid(x) + + +ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish} + + +# try: +# from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm +# except (ImportError, AttributeError) as e: +# logger.info("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex .") +BertLayerNorm = torch.nn.LayerNorm + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word, position and token_type embeddings. + """ + def __init__(self, config): + super(BertEmbeddings, self).__init__() + self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=0) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) + self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, input_ids, token_type_ids=None, position_ids=None): + seq_length = input_ids.size(1) + if position_ids is None: + position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + + words_embeddings = self.word_embeddings(input_ids) + position_embeddings = self.position_embeddings(position_ids) + token_type_embeddings = self.token_type_embeddings(token_type_ids) + + embeddings = words_embeddings + position_embeddings + token_type_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertSelfAttention(nn.Module): + def __init__(self, config): + super(BertSelfAttention, self).__init__() + if config.hidden_size % config.num_attention_heads != 0: + raise ValueError( + "The hidden size (%d) is not a multiple of the number of attention " + "heads (%d)" % (config.hidden_size, config.num_attention_heads)) + self.output_attentions = config.output_attentions + + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = int(config.hidden_size / config.num_attention_heads) + self.all_head_size = self.num_attention_heads * self.attention_head_size + + self.query = nn.Linear(config.hidden_size, self.all_head_size) + self.key = nn.Linear(config.hidden_size, self.all_head_size) + self.value = nn.Linear(config.hidden_size, self.all_head_size) + + self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + + def transpose_for_scores(self, x): + new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size) + x = x.view(*new_x_shape) + return x.permute(0, 2, 1, 3) + + def forward(self, hidden_states, attention_mask, head_mask=None): + mixed_query_layer = self.query(hidden_states) + mixed_key_layer = self.key(hidden_states) + mixed_value_layer = self.value(hidden_states) + + query_layer = self.transpose_for_scores(mixed_query_layer) + key_layer = self.transpose_for_scores(mixed_key_layer) + value_layer = self.transpose_for_scores(mixed_value_layer) + + # Take the dot product between "query" and "key" to get the raw attention scores. + attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2)) + attention_scores = attention_scores / math.sqrt(self.attention_head_size) + # Apply the attention mask is (precomputed for all layers in BertModel forward() function) + attention_scores = attention_scores + attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = nn.Softmax(dim=-1)(attention_scores) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs = self.dropout(attention_probs) + + # Mask heads if we want to + if head_mask is not None: + attention_probs = attention_probs * head_mask + + context_layer = torch.matmul(attention_probs, value_layer) + + context_layer = context_layer.permute(0, 2, 1, 3).contiguous() + new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,) + context_layer = context_layer.view(*new_context_layer_shape) + + outputs = (context_layer, attention_probs) if self.output_attentions else (context_layer,) + return outputs + + +class BertSelfOutput(nn.Module): + def __init__(self, config): + super(BertSelfOutput, self).__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertAttention(nn.Module): + def __init__(self, config): + super(BertAttention, self).__init__() + self.self = BertSelfAttention(config) + self.output = BertSelfOutput(config) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + mask = torch.ones(self.self.num_attention_heads, self.self.attention_head_size) + heads = set(heads) - self.pruned_heads # Convert to set and emove already pruned heads + for head in heads: + # Compute how many pruned heads are before the head and move the index accordingly + head = head - sum(1 if h < head else 0 for h in self.pruned_heads) + mask[head] = 0 + mask = mask.view(-1).contiguous().eq(1) + index = torch.arange(len(mask))[mask].long() + + # Prune linear layers + self.self.query = prune_linear_layer(self.self.query, index) + self.self.key = prune_linear_layer(self.self.key, index) + self.self.value = prune_linear_layer(self.self.value, index) + self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) + + # Update hyper params and store pruned heads + self.self.num_attention_heads = self.self.num_attention_heads - len(heads) + self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads + self.pruned_heads = self.pruned_heads.union(heads) + + def forward(self, input_tensor, attention_mask, head_mask=None): + self_outputs = self.self(input_tensor, attention_mask, head_mask) + attention_output = self.output(self_outputs[0], input_tensor) + outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them + return outputs + + +class BertIntermediate(nn.Module): + def __init__(self, config): + super(BertIntermediate, self).__init__() + self.dense = nn.Linear(config.hidden_size, config.intermediate_size) + if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + return hidden_states + + +class BertOutput(nn.Module): + def __init__(self, config): + super(BertOutput, self).__init__() + self.dense = nn.Linear(config.intermediate_size, config.hidden_size) + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertLayer(nn.Module): + def __init__(self, config): + super(BertLayer, self).__init__() + self.attention = BertAttention(config) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward(self, hidden_states, attention_mask, head_mask=None): + attention_outputs = self.attention(hidden_states, attention_mask, head_mask) + attention_output = attention_outputs[0] + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + outputs = (layer_output,) + attention_outputs[1:] # add attentions if we output them + return outputs + + +class BertEncoder(nn.Module): + def __init__(self, config): + super(BertEncoder, self).__init__() + self.output_attentions = config.output_attentions + self.output_hidden_states = config.output_hidden_states + self.layer = nn.ModuleList([BertLayer(config) for _ in range(config.num_hidden_layers)]) + + def forward(self, hidden_states, attention_mask, head_mask=None): + all_hidden_states = () + all_attentions = () + for i, layer_module in enumerate(self.layer): + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + layer_outputs = layer_module(hidden_states, attention_mask, head_mask[i]) + hidden_states = layer_outputs[0] + + if self.output_attentions: + all_attentions = all_attentions + (layer_outputs[1],) + + # Add last layer + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + outputs = (hidden_states,) + if self.output_hidden_states: + outputs = outputs + (all_hidden_states,) + if self.output_attentions: + outputs = outputs + (all_attentions,) + return outputs # last-layer hidden state, (all hidden states), (all attentions) + + +class BertPooler(nn.Module): + def __init__(self, config): + super(BertPooler, self).__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +class BertPredictionHeadTransform(nn.Module): + def __init__(self, config): + super(BertPredictionHeadTransform, self).__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if isinstance(config.hidden_act, str) or (sys.version_info[0] == 2 and isinstance(config.hidden_act, unicode)): + self.transform_act_fn = ACT2FN[config.hidden_act] + else: + self.transform_act_fn = config.hidden_act + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=config.layer_norm_eps) + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.LayerNorm(hidden_states) + return hidden_states + + +class BertLMPredictionHead(nn.Module): + def __init__(self, config): + super(BertLMPredictionHead, self).__init__() + self.transform = BertPredictionHeadTransform(config) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.decoder = nn.Linear(config.hidden_size, + config.vocab_size, + bias=False) + + self.bias = nn.Parameter(torch.zeros(config.vocab_size)) + + def forward(self, hidden_states): + hidden_states = self.transform(hidden_states) + hidden_states = self.decoder(hidden_states) + self.bias + return hidden_states + + +class BertOnlyMLMHead(nn.Module): + def __init__(self, config): + super(BertOnlyMLMHead, self).__init__() + self.predictions = BertLMPredictionHead(config) + + def forward(self, sequence_output): + prediction_scores = self.predictions(sequence_output) + return prediction_scores + + +class BertOnlyNSPHead(nn.Module): + def __init__(self, config): + super(BertOnlyNSPHead, self).__init__() + self.seq_relationship = nn.Linear(config.hidden_size, 2) + + def forward(self, pooled_output): + seq_relationship_score = self.seq_relationship(pooled_output) + return seq_relationship_score + + +class BertPreTrainingHeads(nn.Module): + def __init__(self, config): + super(BertPreTrainingHeads, self).__init__() + self.predictions = BertLMPredictionHead(config) + self.seq_relationship = nn.Linear(config.hidden_size, 2) + + def forward(self, sequence_output, pooled_output): + prediction_scores = self.predictions(sequence_output) + seq_relationship_score = self.seq_relationship(pooled_output) + return prediction_scores, seq_relationship_score + + +class BertPreTrainedModel(PreTrainedModel): + """ An abstract class to handle weights initialization and + a simple interface for dowloading and loading pretrained models. + """ + config_class = BertConfig + pretrained_model_archive_map = BERT_PRETRAINED_MODEL_ARCHIVE_MAP + load_tf_weights = load_tf_weights_in_bert + base_model_prefix = "bert" + + def _init_weights(self, module): + """ Initialize the weights """ + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + elif isinstance(module, BertLayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +BERT_START_DOCSTRING = r""" The BERT model was proposed in + `BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`_ + by Jacob Devlin, Ming-Wei Chang, Kenton Lee and Kristina Toutanova. It's a bidirectional transformer + pre-trained using a combination of masked language modeling objective and next sentence prediction + on a large corpus comprising the Toronto Book Corpus and Wikipedia. + + This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and + refer to the PyTorch documentation for all matter related to general usage and behavior. + + .. _`BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`: + https://arxiv.org/abs/1810.04805 + + .. _`torch.nn.Module`: + https://pytorch.org/docs/stable/nn.html#module + + Parameters: + config (:class:`~pytorch_transformers.BertConfig`): Model configuration class with all the parameters of the model. + Initializing with a config file does not load the weights associated with the model, only the configuration. + Check out the :meth:`~pytorch_transformers.PreTrainedModel.from_pretrained` method to load the model weights. +""" + +BERT_INPUTS_DOCSTRING = r""" + Inputs: + **input_ids**: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Indices of input sequence tokens in the vocabulary. + To match pre-training, BERT input sequence should be formatted with [CLS] and [SEP] tokens as follows: + + (a) For sequence pairs: + + ``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]`` + + ``token_type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1`` + + (b) For single sequences: + + ``tokens: [CLS] the dog is hairy . [SEP]`` + + ``token_type_ids: 0 0 0 0 0 0 0`` + + Bert is a model with absolute position embeddings so it's usually advised to pad the inputs on + the right rather than the left. + + Indices can be obtained using :class:`pytorch_transformers.BertTokenizer`. + See :func:`pytorch_transformers.PreTrainedTokenizer.encode` and + :func:`pytorch_transformers.PreTrainedTokenizer.convert_tokens_to_ids` for details. + **attention_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length)``: + Mask to avoid performing attention on padding token indices. + Mask values selected in ``[0, 1]``: + ``1`` for tokens that are NOT MASKED, ``0`` for MASKED tokens. + **token_type_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Segment token indices to indicate first and second portions of the inputs. + Indices are selected in ``[0, 1]``: ``0`` corresponds to a `sentence A` token, ``1`` + corresponds to a `sentence B` token + (see `BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`_ for more details). + **position_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Indices of positions of each input sequence tokens in the position embeddings. + Selected in the range ``[0, config.max_position_embeddings - 1]``. + **head_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(num_heads,)`` or ``(num_layers, num_heads)``: + Mask to nullify selected heads of the self-attention modules. + Mask values selected in ``[0, 1]``: + ``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**. +""" + +@add_start_docstrings("The bare Bert Model transformer outputting raw hidden-states without any specific head on top.", + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertModel(BertPreTrainedModel): + r""" + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)`` + Sequence of hidden-states at the output of the last layer of the model. + **pooler_output**: ``torch.FloatTensor`` of shape ``(batch_size, hidden_size)`` + Last layer hidden-state of the first token of the sequence (classification token) + further processed by a Linear layer and a Tanh activation function. The Linear + layer weights are trained from the next sentence prediction (classification) + objective during Bert pretraining. This output is usually *not* a good summary + of the semantic content of the input, you're often better with averaging or pooling + the sequence of hidden-states for the whole input sequence. + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertModel.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids) + last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple + + """ + def __init__(self, config): + super(BertModel, self).__init__(config) + + self.embeddings = BertEmbeddings(config) + self.encoder = BertEncoder(config) + self.pooler = BertPooler(config) + + self.init_weights() + + def _resize_token_embeddings(self, new_num_tokens): + old_embeddings = self.embeddings.word_embeddings + new_embeddings = self._get_resized_embeddings(old_embeddings, new_num_tokens) + self.embeddings.word_embeddings = new_embeddings + return self.embeddings.word_embeddings + + def _prune_heads(self, heads_to_prune): + """ Prunes heads of the model. + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} + See base class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None): + if attention_mask is None: + attention_mask = torch.ones_like(input_ids) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.num_hidden_layers, -1, -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer + head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.num_hidden_layers + + embedding_output = self.embeddings(input_ids, position_ids=position_ids, token_type_ids=token_type_ids) + encoder_outputs = self.encoder(embedding_output, + extended_attention_mask, + head_mask=head_mask) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) + + outputs = (sequence_output, pooled_output,) + encoder_outputs[1:] # add hidden_states and attentions if they are here + return outputs # sequence_output, pooled_output, (hidden_states), (attentions) + + + + + + +@add_start_docstrings("The bare Bert Model transformer outputting raw hidden-states without any specific head on top.", + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForLatentConnector(BertPreTrainedModel): + r""" + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)`` + Sequence of hidden-states at the output of the last layer of the model. + **pooler_output**: ``torch.FloatTensor`` of shape ``(batch_size, hidden_size)`` + Last layer hidden-state of the first token of the sequence (classification token) + further processed by a Linear layer and a Tanh activation function. The Linear + layer weights are trained from the next sentence prediction (classification) + objective during Bert pretraining. This output is usually *not* a good summary + of the semantic content of the input, you're often better with averaging or pooling + the sequence of hidden-states for the whole input sequence. + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertModel.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids) + last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple + + """ + def __init__(self, config, latent_size): + super(BertForLatentConnector, self).__init__(config) + + self.embeddings = BertEmbeddings(config) + self.encoder = BertEncoder(config) + self.pooler = BertPooler(config) + + self.linear = nn.Linear(config.hidden_size, 2 * latent_size, bias=False) + + self.init_weights() + + def _resize_token_embeddings(self, new_num_tokens): + old_embeddings = self.embeddings.word_embeddings + new_embeddings = self._get_resized_embeddings(old_embeddings, new_num_tokens) + self.embeddings.word_embeddings = new_embeddings + return self.embeddings.word_embeddings + + def _prune_heads(self, heads_to_prune): + """ Prunes heads of the model. + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} + See base class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None): + if attention_mask is None: + attention_mask = torch.ones_like(input_ids) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.num_hidden_layers, -1, -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer + head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.num_hidden_layers + + embedding_output = self.embeddings(input_ids, position_ids=position_ids, token_type_ids=token_type_ids) + encoder_outputs = self.encoder(embedding_output, + extended_attention_mask, + head_mask=head_mask) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) + + outputs = (sequence_output, pooled_output,) + encoder_outputs[1:] # add hidden_states and attentions if they are here + return outputs # sequence_output, pooled_output, (hidden_states), (attentions) + + + +@add_start_docstrings("""Bert Model with two heads on top as done during the pre-training: + a `masked language modeling` head and a `next sentence prediction (classification)` head. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForPreTraining(BertPreTrainedModel): + r""" + **masked_lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Labels for computing the masked language modeling loss. + Indices should be in ``[-1, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) + Tokens with indices set to ``-1`` are ignored (masked), the loss is only computed for the tokens with labels + in ``[0, ..., config.vocab_size]`` + **next_sentence_label**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair (see ``input_ids`` docstring) + Indices should be in ``[0, 1]``. + ``0`` indicates sequence B is a continuation of sequence A, + ``1`` indicates sequence B is a random sequence. + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when both ``masked_lm_labels`` and ``next_sentence_label`` are provided) ``torch.FloatTensor`` of shape ``(1,)``: + Total loss as the sum of the masked language modeling loss and the next sequence prediction (classification) loss. + **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)`` + Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). + **seq_relationship_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, 2)`` + Prediction scores of the next sequence prediction (classification) head (scores of True/False continuation before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForPreTraining.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids) + prediction_scores, seq_relationship_scores = outputs[:2] + + """ + def __init__(self, config): + super(BertForPreTraining, self).__init__(config) + + self.bert = BertModel(config) + self.cls = BertPreTrainingHeads(config) + + self.init_weights() + self.tie_weights() + + def tie_weights(self): + """ Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.cls.predictions.decoder, + self.bert.embeddings.word_embeddings) + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + masked_lm_labels=None, next_sentence_label=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + sequence_output, pooled_output = outputs[:2] + prediction_scores, seq_relationship_score = self.cls(sequence_output, pooled_output) + + outputs = (prediction_scores, seq_relationship_score,) + outputs[2:] # add hidden states and attention if they are here + + if masked_lm_labels is not None and next_sentence_label is not None: + loss_fct = CrossEntropyLoss(ignore_index=-1) + masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1)) + next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1)) + total_loss = masked_lm_loss + next_sentence_loss + outputs = (total_loss,) + outputs + + return outputs # (loss), prediction_scores, seq_relationship_score, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model with a `language modeling` head on top. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForMaskedLM(BertPreTrainedModel): + r""" + **masked_lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Labels for computing the masked language modeling loss. + Indices should be in ``[-1, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) + Tokens with indices set to ``-1`` are ignored (masked), the loss is only computed for the tokens with labels + in ``[0, ..., config.vocab_size]`` + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``masked_lm_labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Masked language modeling loss. + **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)`` + Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForMaskedLM.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, masked_lm_labels=input_ids) + loss, prediction_scores = outputs[:2] + + """ + def __init__(self, config): + super(BertForMaskedLM, self).__init__(config) + + self.bert = BertModel(config) + self.cls = BertOnlyMLMHead(config) + + self.init_weights() + self.tie_weights() + + def tie_weights(self): + """ Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.cls.predictions.decoder, + self.bert.embeddings.word_embeddings) + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + masked_lm_labels=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + sequence_output = outputs[0] + prediction_scores = self.cls(sequence_output) + + outputs = (prediction_scores,) + outputs[2:] # Add hidden states and attention if they are here + if masked_lm_labels is not None: + loss_fct = CrossEntropyLoss(ignore_index=-1) + masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1)) + outputs = (masked_lm_loss,) + outputs + + return outputs # (masked_lm_loss), prediction_scores, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model with a `next sentence prediction (classification)` head on top. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForNextSentencePrediction(BertPreTrainedModel): + r""" + **next_sentence_label**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for computing the next sequence prediction (classification) loss. Input should be a sequence pair (see ``input_ids`` docstring) + Indices should be in ``[0, 1]``. + ``0`` indicates sequence B is a continuation of sequence A, + ``1`` indicates sequence B is a random sequence. + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``next_sentence_label`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Next sequence prediction (classification) loss. + **seq_relationship_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, 2)`` + Prediction scores of the next sequence prediction (classification) head (scores of True/False continuation before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForNextSentencePrediction.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids) + seq_relationship_scores = outputs[0] + + """ + def __init__(self, config): + super(BertForNextSentencePrediction, self).__init__(config) + + self.bert = BertModel(config) + self.cls = BertOnlyNSPHead(config) + + self.init_weights() + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + next_sentence_label=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + pooled_output = outputs[1] + + seq_relationship_score = self.cls(pooled_output) + + outputs = (seq_relationship_score,) + outputs[2:] # add hidden states and attention if they are here + if next_sentence_label is not None: + loss_fct = CrossEntropyLoss(ignore_index=-1) + next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1)) + outputs = (next_sentence_loss,) + outputs + + return outputs # (next_sentence_loss), seq_relationship_score, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model transformer with a sequence classification/regression head on top (a linear layer on top of + the pooled output) e.g. for GLUE tasks. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForSequenceClassification(BertPreTrainedModel): + r""" + **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for computing the sequence classification/regression loss. + Indices should be in ``[0, ..., config.num_labels - 1]``. + If ``config.num_labels == 1`` a regression loss is computed (Mean-Square loss), + If ``config.num_labels > 1`` a classification loss is computed (Cross-Entropy). + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Classification (or regression if config.num_labels==1) loss. + **logits**: ``torch.FloatTensor`` of shape ``(batch_size, config.num_labels)`` + Classification (or regression if config.num_labels==1) scores (before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForSequenceClassification.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + labels = torch.tensor([1]).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, labels=labels) + loss, logits = outputs[:2] + + """ + def __init__(self, config): + super(BertForSequenceClassification, self).__init__(config) + self.num_labels = config.num_labels + + self.bert = BertModel(config) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + self.classifier = nn.Linear(config.hidden_size, self.config.num_labels) + self.use_freeze = False + + self.init_weights() + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, + position_ids=None, head_mask=None, labels=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + pooled_output = outputs[1] + + if self.use_freeze: + pooled_output = pooled_output.detach() + + pooled_output = self.dropout(pooled_output) + logits = self.classifier(pooled_output) + + outputs = (logits,) + outputs[2:] # add hidden states and attention if they are here + + if labels is not None: + if self.num_labels == 1: + # We are doing regression + loss_fct = MSELoss() + loss = loss_fct(logits.view(-1), labels.view(-1)) + else: + loss_fct = CrossEntropyLoss() + loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) + outputs = (loss,) + outputs + + # pdb.set_trace() + return outputs, pooled_output # (loss), logits, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model transformer with a sequence classification/regression head on top (a linear layer on top of + the pooled output) e.g. for GLUE tasks. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForSequenceClassificationLatentConnector(BertPreTrainedModel): + r""" + **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for computing the sequence classification/regression loss. + Indices should be in ``[0, ..., config.num_labels - 1]``. + If ``config.num_labels == 1`` a regression loss is computed (Mean-Square loss), + If ``config.num_labels > 1`` a classification loss is computed (Cross-Entropy). + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Classification (or regression if config.num_labels==1) loss. + **logits**: ``torch.FloatTensor`` of shape ``(batch_size, config.num_labels)`` + Classification (or regression if config.num_labels==1) scores (before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForSequenceClassificationLatentConnector.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + labels = torch.tensor([1]).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, labels=labels) + loss, logits = outputs[:2] + + """ + def __init__(self, config, latent_size): + super(BertForSequenceClassificationLatentConnector, self).__init__(config) + self.num_labels = config.num_labels + + self.bert = BertModel(config) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + self.classifier = nn.Linear(config.hidden_size, self.config.num_labels) + self.linear = nn.Linear(config.hidden_size, 2 * latent_size, bias=False) + self.use_freeze = False + + self.init_weights() + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, + position_ids=None, head_mask=None, labels=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + + pooled_output = outputs[1] + # mean, logvar = self.linear(pooled_output).chunk(2, -1) + + if self.use_freeze: + pooled_output = pooled_output.detach() + + pooled_output = self.dropout(pooled_output) + logits = self.classifier(pooled_output) + + outputs = (logits,) + outputs[2:] # add hidden states and attention if they are here + + if labels is not None: + if self.num_labels == 1: + # We are doing regression + loss_fct = MSELoss() + loss = loss_fct(logits.view(-1), labels.view(-1)) + else: + loss_fct = CrossEntropyLoss() + loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) + outputs = (loss,) + outputs + + return outputs, pooled_output # (loss), logits, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model with a multiple choice classification head on top (a linear layer on top of + the pooled output and a softmax) e.g. for RocStories/SWAG tasks. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForMultipleChoice(BertPreTrainedModel): + r""" + **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for computing the multiple choice classification loss. + Indices should be in ``[0, ..., num_choices]`` where `num_choices` is the size of the second dimension + of the input tensors. (see `input_ids` above) + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Classification loss. + **classification_scores**: ``torch.FloatTensor`` of shape ``(batch_size, num_choices)`` where `num_choices` is the size of the second dimension + of the input tensors. (see `input_ids` above). + Classification scores (before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForMultipleChoice.from_pretrained('bert-base-uncased') + choices = ["Hello, my dog is cute", "Hello, my cat is amazing"] + input_ids = torch.tensor([tokenizer.encode(s) for s in choices]).unsqueeze(0) # Batch size 1, 2 choices + labels = torch.tensor(1).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, labels=labels) + loss, classification_scores = outputs[:2] + + """ + def __init__(self, config): + super(BertForMultipleChoice, self).__init__(config) + + self.bert = BertModel(config) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + self.classifier = nn.Linear(config.hidden_size, 1) + + self.init_weights() + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, + position_ids=None, head_mask=None, labels=None): + num_choices = input_ids.shape[1] + + input_ids = input_ids.view(-1, input_ids.size(-1)) + attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None + token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None + position_ids = position_ids.view(-1, position_ids.size(-1)) if position_ids is not None else None + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + pooled_output = outputs[1] + + pooled_output = self.dropout(pooled_output) + logits = self.classifier(pooled_output) + reshaped_logits = logits.view(-1, num_choices) + + outputs = (reshaped_logits,) + outputs[2:] # add hidden states and attention if they are here + + if labels is not None: + loss_fct = CrossEntropyLoss() + loss = loss_fct(reshaped_logits, labels) + outputs = (loss,) + outputs + + return outputs # (loss), reshaped_logits, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model with a token classification head on top (a linear layer on top of + the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks. """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForTokenClassification(BertPreTrainedModel): + r""" + **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Labels for computing the token classification loss. + Indices should be in ``[0, ..., config.num_labels - 1]``. + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Classification loss. + **scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.num_labels)`` + Classification scores (before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForTokenClassification.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + labels = torch.tensor([1] * input_ids.size(1)).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, labels=labels) + loss, scores = outputs[:2] + + """ + def __init__(self, config): + super(BertForTokenClassification, self).__init__(config) + self.num_labels = config.num_labels + + self.bert = BertModel(config) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + self.classifier = nn.Linear(config.hidden_size, config.num_labels) + + self.init_weights() + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, + position_ids=None, head_mask=None, labels=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + sequence_output = outputs[0] + + sequence_output = self.dropout(sequence_output) + logits = self.classifier(sequence_output) + + outputs = (logits,) + outputs[2:] # add hidden states and attention if they are here + if labels is not None: + loss_fct = CrossEntropyLoss() + # Only keep active parts of the loss + if attention_mask is not None: + active_loss = attention_mask.view(-1) == 1 + active_logits = logits.view(-1, self.num_labels)[active_loss] + active_labels = labels.view(-1)[active_loss] + loss = loss_fct(active_logits, active_labels) + else: + loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) + outputs = (loss,) + outputs + + return outputs # (loss), scores, (hidden_states), (attentions) + + +@add_start_docstrings("""Bert Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear layers on top of + the hidden-states output to compute `span start logits` and `span end logits`). """, + BERT_START_DOCSTRING, BERT_INPUTS_DOCSTRING) +class BertForQuestionAnswering(BertPreTrainedModel): + r""" + **start_positions**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for position (index) of the start of the labelled span for computing the token classification loss. + Positions are clamped to the length of the sequence (`sequence_length`). + Position outside of the sequence are not taken into account for computing the loss. + **end_positions**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``: + Labels for position (index) of the end of the labelled span for computing the token classification loss. + Positions are clamped to the length of the sequence (`sequence_length`). + Position outside of the sequence are not taken into account for computing the loss. + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Total span extraction loss is the sum of a Cross-Entropy for the start and end positions. + **start_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length,)`` + Span-start scores (before SoftMax). + **end_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length,)`` + Span-end scores (before SoftMax). + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertForQuestionAnswering.from_pretrained('bert-base-uncased') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + start_positions = torch.tensor([1]) + end_positions = torch.tensor([3]) + outputs = model(input_ids, start_positions=start_positions, end_positions=end_positions) + loss, start_scores, end_scores = outputs[:2] + + """ + def __init__(self, config): + super(BertForQuestionAnswering, self).__init__(config) + self.num_labels = config.num_labels + + self.bert = BertModel(config) + self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels) + + self.init_weights() + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + start_positions=None, end_positions=None): + + outputs = self.bert(input_ids, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + sequence_output = outputs[0] + + logits = self.qa_outputs(sequence_output) + start_logits, end_logits = logits.split(1, dim=-1) + start_logits = start_logits.squeeze(-1) + end_logits = end_logits.squeeze(-1) + + outputs = (start_logits, end_logits,) + outputs[2:] + if start_positions is not None and end_positions is not None: + # If we are on multi-GPU, split add a dimension + if len(start_positions.size()) > 1: + start_positions = start_positions.squeeze(-1) + if len(end_positions.size()) > 1: + end_positions = end_positions.squeeze(-1) + # sometimes the start/end positions are outside our model inputs, we ignore these terms + ignored_index = start_logits.size(1) + start_positions.clamp_(0, ignored_index) + end_positions.clamp_(0, ignored_index) + + loss_fct = CrossEntropyLoss(ignore_index=ignored_index) + start_loss = loss_fct(start_logits, start_positions) + end_loss = loss_fct(end_logits, end_positions) + total_loss = (start_loss + end_loss) / 2 + outputs = (total_loss,) + outputs + + return outputs # (loss), start_logits, end_logits, (hidden_states), (attentions) + + +############ +# XX Added # +############ + +class BertForLatentConnector_XX(nn.Module): + def __init__(self, config, latent_size): + super().__init__() + self.config = config + self.embeddings = BertEmbeddings(config) + self.encoder = BertEncoder(config) + self.pooler = BertPooler(config) + self.linear = nn.Linear(config.hidden_size, 2 * latent_size, bias=False) + self.init_weights() + + def init_weights(self): + """ Initialize and prunes weights if needed. """ + # Initialize weights + self.apply(self._init_weights) + + # Prune heads if needed + if self.config.pruned_heads: + self.prune_heads(self.config.pruned_heads) + + def _init_weights(self, module): + """ Initialize the weights """ + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + elif isinstance(module, BertLayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + def _resize_token_embeddings(self, new_num_tokens): + old_embeddings = self.embeddings.word_embeddings + new_embeddings = self._get_resized_embeddings(old_embeddings, new_num_tokens) + self.embeddings.word_embeddings = new_embeddings + return self.embeddings.word_embeddings + + def _prune_heads(self, heads_to_prune): + """ Prunes heads of the model. + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} + See base class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + def forward(self, input_ids, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None): + if attention_mask is None: + attention_mask = torch.ones_like(input_ids) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + + embedding_output = self.embeddings(input_ids, position_ids=position_ids, token_type_ids=token_type_ids) + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to(dtype=embedding_output.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.num_hidden_layers, -1, -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer + head_mask = head_mask.to(dtype=embedding_output.dtype) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.num_hidden_layers + + + encoder_outputs = self.encoder(embedding_output, + extended_attention_mask, + head_mask=head_mask) + sequence_output = encoder_outputs[0] + pooled_output = self.pooler(sequence_output) + + outputs = (sequence_output, pooled_output,) + encoder_outputs[1:] # add hidden_states and attentions if they are here + return outputs # sequence_output, pooled_output, (hidden_states), (attentions) + + diff --git a/core/models/latent_diffusion/vae/optimus_modules/optimus_gpt2.py b/core/models/latent_diffusion/vae/optimus_modules/optimus_gpt2.py new file mode 100644 index 0000000000000000000000000000000000000000..dc96784f053086f8e48dda1ede262c9870464fdb --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/optimus_gpt2.py @@ -0,0 +1,1122 @@ +# coding=utf-8 +# Copyright 2018 The OpenAI Team Authors and HuggingFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PyTorch OpenAI GPT-2 model.""" + +from __future__ import absolute_import, division, print_function, unicode_literals + +import pdb + +import collections +import json +import logging +import math +import os +import sys +from io import open + +import torch +import torch.nn as nn +from torch.nn import CrossEntropyLoss +from torch.nn.parameter import Parameter + +from .modeling_utils import PreTrainedModel, Conv1D, prune_conv1d_layer, SequenceSummary +from .configuration_gpt2 import GPT2Config +from .file_utils import add_start_docstrings + +logger = logging.getLogger(__name__) + +GPT2_PRETRAINED_MODEL_ARCHIVE_MAP = {"gpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-pytorch_model.bin", + "gpt2-medium": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-pytorch_model.bin", + "gpt2-large": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-pytorch_model.bin"} + +def load_tf_weights_in_gpt2(model, config, gpt2_checkpoint_path): + """ Load tf checkpoints in a pytorch model + """ + try: + import re + import numpy as np + import tensorflow as tf + except ImportError: + logger.error("Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see " + "https://www.tensorflow.org/install/ for installation instructions.") + raise + tf_path = os.path.abspath(gpt2_checkpoint_path) + logger.info("Converting TensorFlow checkpoint from {}".format(tf_path)) + # Load weights from TF model + init_vars = tf.train.list_variables(tf_path) + names = [] + arrays = [] + for name, shape in init_vars: + logger.info("Loading TF weight {} with shape {}".format(name, shape)) + array = tf.train.load_variable(tf_path, name) + names.append(name) + arrays.append(array.squeeze()) + + for name, array in zip(names, arrays): + name = name[6:] # skip "model/" + name = name.split('/') + pointer = model + for m_name in name: + if re.fullmatch(r'[A-Za-z]+\d+', m_name): + l = re.split(r'(\d+)', m_name) + else: + l = [m_name] + if l[0] == 'w' or l[0] == 'g': + pointer = getattr(pointer, 'weight') + elif l[0] == 'b': + pointer = getattr(pointer, 'bias') + elif l[0] == 'wpe' or l[0] == 'wte': + pointer = getattr(pointer, l[0]) + pointer = getattr(pointer, 'weight') + else: + pointer = getattr(pointer, l[0]) + if len(l) >= 2: + num = int(l[1]) + pointer = pointer[num] + try: + assert pointer.shape == array.shape + except AssertionError as e: + e.args += (pointer.shape, array.shape) + raise + logger.info("Initialize PyTorch weight {}".format(name)) + pointer.data = torch.from_numpy(array) + return model + + +def gelu(x): + return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) + + +class Attention(nn.Module): + def __init__(self, nx, n_ctx, config, scale=False): + super(Attention, self).__init__() + self.output_attentions = config.output_attentions + + n_state = nx # in Attention: n_state=768 (nx=n_embd) + # [switch nx => n_state from Block to Attention to keep identical to TF implem] + assert n_state % config.n_head == 0 + self.register_buffer("bias", torch.tril(torch.ones(n_ctx, n_ctx)).view(1, 1, n_ctx, n_ctx)) + self.n_head = config.n_head + self.split_size = n_state + self.scale = scale + + self.c_attn = Conv1D(n_state * 3, nx) + self.c_proj = Conv1D(n_state, nx) + self.attn_dropout = nn.Dropout(config.attn_pdrop) + self.resid_dropout = nn.Dropout(config.resid_pdrop) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + mask = torch.ones(self.n_head, self.split_size // self.n_head) + heads = set(heads) - self.pruned_heads # Convert to set and emove already pruned heads + for head in heads: + # Compute how many pruned heads are before the head and move the index accordingly + head = head - sum(1 if h < head else 0 for h in self.pruned_heads) + mask[head] = 0 + mask = mask.view(-1).contiguous().eq(1) + index = torch.arange(len(mask))[mask].long() + index_attn = torch.cat([index, index + self.split_size, index + (2*self.split_size)]) + + # Prune conv1d layers + self.c_attn = prune_conv1d_layer(self.c_attn, index_attn, dim=1) + self.c_proj = prune_conv1d_layer(self.c_proj, index, dim=0) + + # Update hyper params + self.split_size = (self.split_size // self.n_head) * (self.n_head - len(heads)) + self.n_head = self.n_head - len(heads) + self.pruned_heads = self.pruned_heads.union(heads) + + def _attn(self, q, k, v, attention_mask=None, head_mask=None): + w = torch.matmul(q, k) + if self.scale: + w = w / math.sqrt(v.size(-1)) + nd, ns = w.size(-2), w.size(-1) + b = self.bias[:, :, ns-nd:ns, :ns] + w = w * b - 1e4 * (1 - b) + + if attention_mask is not None: + # Apply the attention mask + w = w + attention_mask + + w = nn.Softmax(dim=-1)(w) + w = self.attn_dropout(w) + + # Mask heads if we want to + if head_mask is not None: + w = w * head_mask + + outputs = [torch.matmul(w, v)] + if self.output_attentions: + outputs.append(w) + return outputs + + def merge_heads(self, x): + x = x.permute(0, 2, 1, 3).contiguous() + new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),) + return x.view(*new_x_shape) # in Tensorflow implem: fct merge_states + + def split_heads(self, x, k=False): + new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head) + x = x.view(*new_x_shape) # in Tensorflow implem: fct split_states + if k: + return x.permute(0, 2, 3, 1) # (batch, head, head_features, seq_length) + else: + return x.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features) + + def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): + x = self.c_attn(x) + query, key, value = x.split(self.split_size, dim=2) + query = self.split_heads(query) + key = self.split_heads(key, k=True) + value = self.split_heads(value) + + + if layer_past is not None: + past_key, past_value = layer_past[0], layer_past[1] # transpose back cf below + + past_key = self.split_heads(past_key, k=True) + past_value = self.split_heads(past_value) + # pdb.set_trace() + key = torch.cat((past_key, key), dim=-1) + value = torch.cat((past_value, value), dim=-2) + present = torch.stack((key.transpose(-2, -1), value)) # transpose to have same shapes for stacking + + attn_outputs = self._attn(query, key, value, attention_mask, head_mask) + a = attn_outputs[0] + + a = self.merge_heads(a) + a = self.c_proj(a) + a = self.resid_dropout(a) + + outputs = [a, present] + attn_outputs[1:] + return outputs # a, present, (attentions) + + +class MLP(nn.Module): + def __init__(self, n_state, config): # in MLP: n_state=3072 (4 * n_embd) + super(MLP, self).__init__() + nx = config.n_embd + self.c_fc = Conv1D(n_state, nx) + self.c_proj = Conv1D(nx, n_state) + self.act = gelu + self.dropout = nn.Dropout(config.resid_pdrop) + + def forward(self, x): + h = self.act(self.c_fc(x)) + h2 = self.c_proj(h) + return self.dropout(h2) + + +class Block(nn.Module): + def __init__(self, n_ctx, config, scale=False): + super(Block, self).__init__() + nx = config.n_embd + self.ln_1 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) + self.attn = Attention(nx, n_ctx, config, scale) + self.ln_2 = nn.LayerNorm(nx, eps=config.layer_norm_epsilon) + self.mlp = MLP(4 * nx, config) + + def forward(self, x, layer_past=None, attention_mask=None, head_mask=None): + output_attn = self.attn(self.ln_1(x), + layer_past=layer_past, + attention_mask=attention_mask, + head_mask=head_mask) + a = output_attn[0] # output_attn: a, present, (attentions) + + x = x + a + m = self.mlp(self.ln_2(x)) + x = x + m + + outputs = [x] + output_attn[1:] + return outputs # x, present, (attentions) + + +class GPT2PreTrainedModel(PreTrainedModel): + """ An abstract class to handle weights initialization and + a simple interface for dowloading and loading pretrained models. + """ + config_class = GPT2Config + pretrained_model_archive_map = GPT2_PRETRAINED_MODEL_ARCHIVE_MAP + load_tf_weights = load_tf_weights_in_gpt2 + base_model_prefix = "transformer" + + def __init__(self, *inputs, **kwargs): + super(GPT2PreTrainedModel, self).__init__(*inputs, **kwargs) + + def _init_weights(self, module): + """ Initialize the weights. + """ + if isinstance(module, (nn.Linear, nn.Embedding, Conv1D)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + if isinstance(module, (nn.Linear, Conv1D)) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + +GPT2_START_DOCSTRING = r""" OpenAI GPT-2 model was proposed in + `Language Models are Unsupervised Multitask Learners`_ + by Alec Radford*, Jeffrey Wu*, Rewon Child, David Luan, Dario Amodei** and Ilya Sutskever**. + It's a causal (unidirectional) transformer pre-trained using language modeling on a very large + corpus of ~40 GB of text data. + + This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and + refer to the PyTorch documentation for all matter related to general usage and behavior. + + .. _`Language Models are Unsupervised Multitask Learners`: + https://openai.com/blog/better-language-models/ + + .. _`torch.nn.Module`: + https://pytorch.org/docs/stable/nn.html#module + + Parameters: + config (:class:`~pytorch_transformers.GPT2Config`): Model configuration class with all the parameters of the model. + Initializing with a config file does not load the weights associated with the model, only the configuration. + Check out the :meth:`~pytorch_transformers.PreTrainedModel.from_pretrained` method to load the model weights. +""" + +GPT2_INPUTS_DOCSTRING = r""" Inputs: + **input_ids**: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Indices of input sequence tokens in the vocabulary. + GPT-2 is a model with absolute position embeddings so it's usually advised to pad the inputs on + the right rather than the left. + Indices can be obtained using :class:`pytorch_transformers.GPT2Tokenizer`. + See :func:`pytorch_transformers.PreTrainedTokenizer.encode` and + :func:`pytorch_transformers.PreTrainedTokenizer.convert_tokens_to_ids` for details. + **past**: + list of ``torch.FloatTensor`` (one for each layer): + that contains pre-computed hidden-states (key and values in the attention blocks) as computed by the model + (see `past` output below). Can be used to speed up sequential decoding. + **attention_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length)``: + Mask to avoid performing attention on padding token indices. + Mask values selected in ``[0, 1]``: + ``1`` for tokens that are NOT MASKED, ``0`` for MASKED tokens. + **token_type_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + A parallel sequence of tokens (can be used to indicate various portions of the inputs). + The embeddings from these tokens will be summed with the respective token embeddings. + Indices are selected in the vocabulary (unlike BERT which has a specific vocabulary for segment indices). + **position_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Indices of positions of each input sequence tokens in the position embeddings. + Selected in the range ``[0, config.max_position_embeddings - 1]``. + **head_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(num_heads,)`` or ``(num_layers, num_heads)``: + Mask to nullify selected heads of the self-attention modules. + Mask values selected in ``[0, 1]``: + ``1`` indicates the head is **not masked**, ``0`` indicates the head is **masked**. +""" + +@add_start_docstrings("The bare GPT2 Model transformer outputting raw hidden-states without any specific head on top.", + GPT2_START_DOCSTRING, GPT2_INPUTS_DOCSTRING) +class GPT2Model(GPT2PreTrainedModel): + r""" + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **last_hidden_state**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, hidden_size)`` + Sequence of hidden-states at the last layer of the model. + **past**: + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + that contains pre-computed hidden-states (key and values in the attention blocks). + Can be used (see `past` input) to speed up sequential decoding. + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + tokenizer = GPT2Tokenizer.from_pretrained('gpt2') + model = GPT2Model.from_pretrained('gpt2') + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids) + last_hidden_states = outputs[0] # The last hidden-state is the first element of the output tuple + + """ + def __init__(self, config): + super(GPT2Model, self).__init__(config) + self.output_hidden_states = config.output_hidden_states + self.output_attentions = config.output_attentions + + self.wte = nn.Embedding(config.vocab_size, config.n_embd) + self.wpe = nn.Embedding(config.n_positions, config.n_embd) + self.drop = nn.Dropout(config.embd_pdrop) + self.h = nn.ModuleList([Block(config.n_ctx, config, scale=True) for _ in range(config.n_layer)]) + self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon) + + try: + self.latent_size = config.latent_size + except: + self.latent_size = 32 # default size is 32 + + self.linear = nn.Linear(self.latent_size, config.hidden_size * config.n_layer, bias=False) # different latent vector for each layer + self.linear_emb = nn.Linear(self.latent_size, config.hidden_size, bias=False) # share the same latent vector as the embeddings + + self.config = config + self.init_weights() + + def _resize_token_embeddings(self, new_num_tokens): + self.wte = self._get_resized_embeddings(self.wte, new_num_tokens) + return self.wte + + def _prune_heads(self, heads_to_prune): + """ Prunes heads of the model. + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} + """ + for layer, heads in heads_to_prune.items(): + self.h[layer].attn.prune_heads(heads) + + def forward(self, input_ids, past=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, latent_as_gpt_emb=False, latent_as_gpt_memory=True): + + if past is None: + past_length = 0 + past = [None] * len(self.h) + else: + + + if latent_as_gpt_emb: + past_emb = self.linear_emb(past) # used as embeddings to add on other three embeddings + + if latent_as_gpt_memory: + past = self.linear(past) + share_latent = False + if share_latent: + # the same latent vector shared by all layers + past = [past.unsqueeze(-2), past.unsqueeze(-2)] # query, key + past = [past] * len(self.h) + past_length = past[0][0].size(-2) + else: + # different latent vectors for each layer + past_split = torch.split(past.unsqueeze(1), self.config.hidden_size, dim=2) + past = list(zip(past_split,past_split)) + + # past = past.view(batch_size,len(self.h),-1) + # past = [[past[:,i,:].unsqueeze(-2), past[:,i,:].unsqueeze(-2) ] for i in range(len(self.h))] + past_length = 1 # past[0][0].size(-2) + else: + past_length = 0 + past = [None] * len(self.h) + + + if position_ids is None: + position_ids = torch.arange(past_length, input_ids.size(-1) + past_length, dtype=torch.long, device=input_ids.device) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) + + + # Attention mask. + if attention_mask is not None: + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + attention_mask = attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility + attention_mask = (1.0 - attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # head_mask has shape n_layer x batch x n_heads x N x N + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.n_layer, -1, -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer + head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.n_layer + + + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_ids.size(-1)) + position_ids = position_ids.view(-1, position_ids.size(-1)) + + + inputs_embeds = self.wte(input_ids) + position_embeds = self.wpe(position_ids) + if token_type_ids is not None: + token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) + token_type_embeds = self.wte(token_type_ids) + else: + token_type_embeds = 0 + + + hidden_states = inputs_embeds + position_embeds + token_type_embeds + if latent_as_gpt_emb: + # pdb.set_trace() + hidden_states = hidden_states + past_emb.unsqueeze(1) + + hidden_states = self.drop(hidden_states) + + output_shape = input_shape + (hidden_states.size(-1),) + + presents = () + all_attentions = [] + all_hidden_states = () + for i, (block, layer_past) in enumerate(zip(self.h, past)): + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states.view(*output_shape),) + + + outputs = block(hidden_states, + layer_past=layer_past, + attention_mask=attention_mask, + head_mask=head_mask[i]) + + + hidden_states, present = outputs[:2] + presents = presents + (present,) + + if self.output_attentions: + all_attentions.append(outputs[2]) + + hidden_states = self.ln_f(hidden_states) + + hidden_states = hidden_states.view(*output_shape) + # Add last hidden state + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + outputs = (hidden_states, presents) + if self.output_hidden_states: + outputs = outputs + (all_hidden_states,) + if self.output_attentions: + # let the number of heads free (-1) so we can extract attention even after head pruning + attention_output_shape = input_shape[:-1] + (-1,) + all_attentions[0].shape[-2:] + all_attentions = tuple(t.view(*attention_output_shape) for t in all_attentions) + outputs = outputs + (all_attentions,) + return outputs # last hidden state, presents, (all hidden_states), (attentions) + + +@add_start_docstrings("""The GPT2 Model transformer with a language modeling head on top +(linear layer with weights tied to the input embeddings). """, GPT2_START_DOCSTRING, GPT2_INPUTS_DOCSTRING) +class GPT2LMHeadModel(GPT2PreTrainedModel): + r""" + **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Labels for language modeling. + Note that the labels **are shifted** inside the model, i.e. you can set ``lm_labels = input_ids`` + Indices are selected in ``[-1, 0, ..., config.vocab_size]`` + All labels set to ``-1`` are ignored (masked), the loss is only + computed for labels in ``[0, ..., config.vocab_size]`` + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Language modeling loss. + **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)`` + Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). + **past**: + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + that contains pre-computed hidden-states (key and values in the attention blocks). + Can be used (see `past` input) to speed up sequential decoding. + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + import torch + from pytorch_transformers import GPT2Tokenizer, GPT2LMHeadModel + + tokenizer = GPT2Tokenizer.from_pretrained('gpt2') + model = GPT2LMHeadModel.from_pretrained('gpt2') + + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, labels=input_ids) + loss, logits = outputs[:2] + + """ + def __init__(self, config): + super(GPT2LMHeadModel, self).__init__(config) + self.transformer = GPT2Model(config) + self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) + + self.init_weights() + self.tie_weights() + + + def tie_weights(self): + """ Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.lm_head, + self.transformer.wte) + + def forward(self, input_ids, past=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + labels=None, label_ignore=None): + transformer_outputs = self.transformer(input_ids, + past=past, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + hidden_states = transformer_outputs[0] + + lm_logits = self.lm_head(hidden_states) + + outputs = (lm_logits,) + transformer_outputs[1:] + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(ignore_index=label_ignore, reduce=False) # 50258 is the padding id, otherwise -1 is used for masked LM. + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), + shift_labels.view(-1)) + loss = torch.sum(loss.view(-1, shift_labels.shape[-1]), -1) + outputs = (loss,) + outputs + + + return outputs # (loss), lm_logits, presents, (all hidden_states), (attentions) + + + +@add_start_docstrings("""The GPT2 Model transformer with a language modeling head on top +(linear layer with weights tied to the input embeddings). """, GPT2_START_DOCSTRING, GPT2_INPUTS_DOCSTRING) +class GPT2ForLatentConnector(GPT2PreTrainedModel): + r""" + **labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Labels for language modeling. + Note that the labels **are shifted** inside the model, i.e. you can set ``lm_labels = input_ids`` + Indices are selected in ``[-1, 0, ..., config.vocab_size]`` + All labels set to ``-1`` are ignored (masked), the loss is only + computed for labels in ``[0, ..., config.vocab_size]`` + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Language modeling loss. + **prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, sequence_length, config.vocab_size)`` + Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). + **past**: + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + that contains pre-computed hidden-states (key and values in the attention blocks). + Can be used (see `past` input) to speed up sequential decoding. + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + import torch + from pytorch_transformers import GPT2Tokenizer, GPT2LMHeadModel + + tokenizer = GPT2Tokenizer.from_pretrained('gpt2') + model = GPT2LMHeadModel.from_pretrained('gpt2') + + input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1 + outputs = model(input_ids, labels=input_ids) + loss, logits = outputs[:2] + + """ + def __init__(self, config, latent_size=32, latent_as_gpt_emb=True, latent_as_gpt_memory=True): + + super(GPT2ForLatentConnector, self).__init__(config) + + + self.transformer = GPT2Model(config) + self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) + + self.init_weights() + self.tie_weights() + + self.latent_as_gpt_emb = latent_as_gpt_emb + self.latent_as_gpt_memory = latent_as_gpt_memory + + + + def tie_weights(self): + """ Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.lm_head, + self.transformer.wte) + + def forward(self, input_ids, past=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + labels=None, label_ignore=None): + + + transformer_outputs = self.transformer(input_ids, + past=past, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask, + latent_as_gpt_emb=self.latent_as_gpt_emb, + latent_as_gpt_memory=self.latent_as_gpt_memory) + hidden_states = transformer_outputs[0] + + lm_logits = self.lm_head(hidden_states) + + outputs = (lm_logits,) + transformer_outputs[1:] + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(ignore_index=label_ignore, reduce=False) # 50258 is the padding id, otherwise -1 is used for masked LM. + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), + shift_labels.view(-1)) + loss = torch.sum(loss.view(-1, shift_labels.shape[-1]), -1) + outputs = (loss,) + outputs + + + return outputs # (loss), lm_logits, presents, (all hidden_states), (attentions) + +@add_start_docstrings("""The GPT2 Model transformer with a language modeling and a multiple-choice classification +head on top e.g. for RocStories/SWAG tasks. The two heads are two linear layers. +The language modeling head has its weights tied to the input embeddings, +the classification head takes as input the input of a specified classification token index in the input sequence). +""", GPT2_START_DOCSTRING, GPT2_INPUTS_DOCSTRING) +class GPT2DoubleHeadsModel(GPT2PreTrainedModel): + r""" + **mc_token_ids**: (`optional`, default to index of the last token of the input) ``torch.LongTensor`` of shape ``(batch_size, num_choices)``: + Index of the classification token in each input sequence. + Selected in the range ``[0, input_ids.size(-1) - 1[``. + **lm_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``: + Labels for language modeling. + Note that the labels **are shifted** inside the model, i.e. you can set ``lm_labels = input_ids`` + Indices are selected in ``[-1, 0, ..., config.vocab_size]`` + All labels set to ``-1`` are ignored (masked), the loss is only + computed for labels in ``[0, ..., config.vocab_size]`` + **mc_labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size)``: + Labels for computing the multiple choice classification loss. + Indices should be in ``[0, ..., num_choices]`` where `num_choices` is the size of the second dimension + of the input tensors. (see `input_ids` above) + + Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs: + **lm_loss**: (`optional`, returned when ``lm_labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Language modeling loss. + **mc_loss**: (`optional`, returned when ``multiple_choice_labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``: + Multiple choice classification loss. + **lm_prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, num_choices, sequence_length, config.vocab_size)`` + Prediction scores of the language modeling head (scores for each vocabulary token before SoftMax). + **mc_prediction_scores**: ``torch.FloatTensor`` of shape ``(batch_size, num_choices)`` + Prediction scores of the multiplechoice classification head (scores for each choice before SoftMax). + **past**: + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + that contains pre-computed hidden-states (key and values in the attention blocks). + Can be used (see `past` input) to speed up sequential decoding. + **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``) + list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings) + of shape ``(batch_size, sequence_length, hidden_size)``: + Hidden-states of the model at the output of each layer plus the initial embedding outputs. + **attentions**: (`optional`, returned when ``config.output_attentions=True``) + list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``: + Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads. + + Examples:: + + import torch + from pytorch_transformers import GPT2Tokenizer, GPT2DoubleHeadsModel + + tokenizer = GPT2Tokenizer.from_pretrained('gpt2') + model = GPT2DoubleHeadsModel.from_pretrained('gpt2') + + # Add a [CLS] to the vocabulary (we should train it also!) + tokenizer.add_special_tokens({'cls_token': '[CLS]'}) + model.resize_token_embeddings(len(tokenizer)) # Update the model embeddings with the new vocabulary size + print(tokenizer.cls_token_id, len(tokenizer)) # The newly token the last token of the vocabulary + + choices = ["Hello, my dog is cute [CLS]", "Hello, my cat is cute [CLS]"] + encoded_choices = [tokenizer.encode(s) for s in choices] + cls_token_location = [tokens.index(tokenizer.cls_token_id) for tokens in encoded_choices] + + input_ids = torch.tensor(encoded_choices).unsqueeze(0) # Batch size: 1, number of choices: 2 + mc_token_ids = torch.tensor([cls_token_location]) # Batch size: 1 + + outputs = model(input_ids, mc_token_ids=mc_token_ids) + lm_prediction_scores, mc_prediction_scores = outputs[:2] + + """ + def __init__(self, config): + super(GPT2DoubleHeadsModel, self).__init__(config) + self.transformer = GPT2Model(config) + self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) + self.multiple_choice_head = SequenceSummary(config) + + self.init_weights() + self.tie_weights() + + def tie_weights(self): + """ Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.lm_head, + self.transformer.wte) + + def forward(self, input_ids, past=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + mc_token_ids=None, lm_labels=None, mc_labels=None): + transformer_outputs = self.transformer(input_ids, + past=past, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask) + + hidden_states = transformer_outputs[0] + + lm_logits = self.lm_head(hidden_states) + mc_logits = self.multiple_choice_head(hidden_states, mc_token_ids).squeeze(-1) + + outputs = (lm_logits, mc_logits) + transformer_outputs[1:] + if mc_labels is not None: + loss_fct = CrossEntropyLoss() + loss = loss_fct(mc_logits.view(-1, mc_logits.size(-1)), + mc_labels.view(-1)) + outputs = (loss,) + outputs + if lm_labels is not None: + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = lm_labels[..., 1:].contiguous() + loss_fct = CrossEntropyLoss(ignore_index=-1) + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), + shift_labels.view(-1)) + outputs = (loss,) + outputs + + return outputs # (lm loss), (mc loss), lm logits, mc logits, presents, (all hidden_states), (attentions) + +############ +# XX Added # +############ + +class GPT2Model_XX(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.output_hidden_states = config.output_hidden_states + self.output_attentions = config.output_attentions + + self.wte = nn.Embedding(config.vocab_size, config.n_embd) + self.wpe = nn.Embedding(config.n_positions, config.n_embd) + self.drop = nn.Dropout(config.embd_pdrop) + self.h = nn.ModuleList([Block(config.n_ctx, config, scale=True) for _ in range(config.n_layer)]) + self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon) + + try: + self.latent_size = config.latent_size + except: + self.latent_size = 32 # default size is 32 + + self.linear = nn.Linear(self.latent_size, config.hidden_size * config.n_layer, bias=False) # different latent vector for each layer + self.linear_emb = nn.Linear(self.latent_size, config.hidden_size, bias=False) # share the same latent vector as the embeddings + + self.config = config + self.init_weights() + + def init_weights(self): + """ Initialize and prunes weights if needed. """ + # Initialize weights + self.apply(self._init_weights) + + # Prune heads if needed + if self.config.pruned_heads: + self.prune_heads(self.config.pruned_heads) + + def _init_weights(self, module): + """ Initialize the weights. + """ + if isinstance(module, (nn.Linear, nn.Embedding, Conv1D)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + if isinstance(module, (nn.Linear, Conv1D)) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + def _resize_token_embeddings(self, new_num_tokens): + self.wte = self._get_resized_embeddings(self.wte, new_num_tokens) + return self.wte + + def _prune_heads(self, heads_to_prune): + """ Prunes heads of the model. + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} + """ + for layer, heads in heads_to_prune.items(): + self.h[layer].attn.prune_heads(heads) + + def forward(self, input_ids, past=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, latent_as_gpt_emb=False, latent_as_gpt_memory=True): + if past is None: + past_length = 0 + past = [None] * len(self.h) + else: + if latent_as_gpt_emb: + past_emb = self.linear_emb(past) # used as embeddings to add on other three embeddings + + if latent_as_gpt_memory: + past = self.linear(past) + share_latent = False + if share_latent: + # the same latent vector shared by all layers + past = [past.unsqueeze(-2), past.unsqueeze(-2)] # query, key + past = [past] * len(self.h) + past_length = past[0][0].size(-2) + else: + # different latent vectors for each layer + past_split = torch.split(past.unsqueeze(1), self.config.hidden_size, dim=2) + past = list(zip(past_split,past_split)) + + # past = past.view(batch_size,len(self.h),-1) + # past = [[past[:,i,:].unsqueeze(-2), past[:,i,:].unsqueeze(-2) ] for i in range(len(self.h))] + past_length = 1 # past[0][0].size(-2) + else: + past_length = 0 + past = [None] * len(self.h) + + + if position_ids is None: + position_ids = torch.arange(past_length, input_ids.size(-1) + past_length, dtype=torch.long, device=input_ids.device) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) + + + # Attention mask. + if attention_mask is not None: + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + attention_mask = attention_mask.to(dtype=next(self.parameters()).dtype) # fp16 compatibility + attention_mask = (1.0 - attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # head_mask has shape n_layer x batch x n_heads x N x N + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze(-1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.n_layer, -1, -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze(-1) # We can specify head_mask for each layer + head_mask = head_mask.to(dtype=next(self.parameters()).dtype) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.n_layer + + + input_shape = input_ids.size() + input_ids = input_ids.view(-1, input_ids.size(-1)) + position_ids = position_ids.view(-1, position_ids.size(-1)) + + + inputs_embeds = self.wte(input_ids) + position_embeds = self.wpe(position_ids) + if token_type_ids is not None: + token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) + token_type_embeds = self.wte(token_type_ids) + else: + token_type_embeds = 0 + + + hidden_states = inputs_embeds + position_embeds + token_type_embeds + if latent_as_gpt_emb: + # pdb.set_trace() + hidden_states = hidden_states + past_emb.unsqueeze(1) + + hidden_states = self.drop(hidden_states) + + output_shape = input_shape + (hidden_states.size(-1),) + + presents = () + all_attentions = [] + all_hidden_states = () + for i, (block, layer_past) in enumerate(zip(self.h, past)): + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states.view(*output_shape),) + + + outputs = block(hidden_states, + layer_past=layer_past, + attention_mask=attention_mask, + head_mask=head_mask[i]) + + + hidden_states, present = outputs[:2] + presents = presents + (present,) + + if self.output_attentions: + all_attentions.append(outputs[2]) + + hidden_states = self.ln_f(hidden_states) + + hidden_states = hidden_states.view(*output_shape) + # Add last hidden state + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states,) + + outputs = (hidden_states, presents) + if self.output_hidden_states: + outputs = outputs + (all_hidden_states,) + if self.output_attentions: + # let the number of heads free (-1) so we can extract attention even after head pruning + attention_output_shape = input_shape[:-1] + (-1,) + all_attentions[0].shape[-2:] + all_attentions = tuple(t.view(*attention_output_shape) for t in all_attentions) + outputs = outputs + (all_attentions,) + return outputs # last hidden state, presents, (all hidden_states), (attentions) + + def _get_resized_embeddings(self, old_embeddings, new_num_tokens=None): + """ Build a resized Embedding Module from a provided token Embedding Module. + Increasing the size will add newly initialized vectors at the end + Reducing the size will remove vectors from the end + + Args: + new_num_tokens: (`optional`) int + New number of tokens in the embedding matrix. + Increasing the size will add newly initialized vectors at the end + Reducing the size will remove vectors from the end + If not provided or None: return the provided token Embedding Module. + Return: ``torch.nn.Embeddings`` + Pointer to the resized Embedding Module or the old Embedding Module if new_num_tokens is None + """ + if new_num_tokens is None: + return old_embeddings + old_num_tokens, old_embedding_dim = old_embeddings.weight.size() + if old_num_tokens == new_num_tokens: + return old_embeddings + # Build new embeddings + new_embeddings = nn.Embedding(new_num_tokens, old_embedding_dim) + new_embeddings.to(old_embeddings.weight.device) + # initialize all new embeddings (in particular added tokens) + self._init_weights(new_embeddings) + # Copy word embeddings from the previous weights + num_tokens_to_copy = min(old_num_tokens, new_num_tokens) + new_embeddings.weight.data[:num_tokens_to_copy, :] = old_embeddings.weight.data[:num_tokens_to_copy, :] + return new_embeddings + +class GPT2ForLatentConnector_XX(nn.Module): + def __init__(self, + config, + latent_size=32, + latent_as_gpt_emb=True, + latent_as_gpt_memory=True): + + super().__init__() + self.config = config + self.transformer = GPT2Model_XX(config) + self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False) + self.init_weights() + self.tie_weights() + self.latent_as_gpt_emb = latent_as_gpt_emb + self.latent_as_gpt_memory = latent_as_gpt_memory + + def init_weights(self): + """ Initialize and prunes weights if needed. """ + # Initialize weights + self.apply(self._init_weights) + + # Prune heads if needed + if self.config.pruned_heads: + self.prune_heads(self.config.pruned_heads) + + def _init_weights(self, module): + """ Initialize the weights. + """ + if isinstance(module, (nn.Linear, nn.Embedding, Conv1D)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) + if isinstance(module, (nn.Linear, Conv1D)) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + def _tie_or_clone_weights(self, first_module, second_module): + """ Tie or clone module weights depending of weither we are using TorchScript or not + """ + if self.config.torchscript: + first_module.weight = nn.Parameter(second_module.weight.clone()) + else: + first_module.weight = second_module.weight + + if hasattr(first_module, 'bias') and first_module.bias is not None: + first_module.bias.data = torch.nn.functional.pad( + first_module.bias.data, + (0, first_module.weight.shape[0] - first_module.bias.shape[0]), + 'constant', 0,) + + def tie_weights(self): + """ Make sure we are sharing the input and output embeddings. + Export to TorchScript can't handle parameter sharing so we are cloning them instead. + """ + self._tie_or_clone_weights(self.lm_head, + self.transformer.wte) + + def forward(self, input_ids, past=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, + labels=None, label_ignore=None): + + + transformer_outputs = self.transformer(input_ids, + past=past, + attention_mask=attention_mask, + token_type_ids=token_type_ids, + position_ids=position_ids, + head_mask=head_mask, + latent_as_gpt_emb=self.latent_as_gpt_emb, + latent_as_gpt_memory=self.latent_as_gpt_memory) + hidden_states = transformer_outputs[0] + + lm_logits = self.lm_head(hidden_states) + + outputs = (lm_logits,) + transformer_outputs[1:] + if labels is not None: + # Shift so that tokens < n predict n + shift_logits = lm_logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(ignore_index=label_ignore, reduce=False) # 50258 is the padding id, otherwise -1 is used for masked LM. + loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), + shift_labels.view(-1)) + loss = torch.sum(loss.view(-1, shift_labels.shape[-1]), -1) + outputs = (loss,) + outputs + + return outputs # (loss), lm_logits, presents, (all hidden_states), (attentions) + + def resize_token_embeddings(self, new_num_tokens=None): + model_embeds = self.transformer._resize_token_embeddings(new_num_tokens) + if new_num_tokens is None: + return model_embeds + self.config.vocab_size = new_num_tokens + self.transformer.vocab_size = new_num_tokens + if hasattr(self, 'tie_weights'): + self.tie_weights() + return model_embeds diff --git a/core/models/latent_diffusion/vae/optimus_modules/tokenization_bert.py b/core/models/latent_diffusion/vae/optimus_modules/tokenization_bert.py new file mode 100644 index 0000000000000000000000000000000000000000..b85a4ccf9c382f49e2ba6c68a4e76a8c8d99ef19 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/tokenization_bert.py @@ -0,0 +1,457 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes.""" + +from __future__ import absolute_import, division, print_function, unicode_literals + +import collections +import logging +import os +import unicodedata +from io import open + +from .tokenization_utils import PreTrainedTokenizer + +logger = logging.getLogger(__name__) + +VOCAB_FILES_NAMES = {'vocab_file': 'vocab.txt'} + +PRETRAINED_VOCAB_FILES_MAP = { + 'vocab_file': + { + 'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt", + 'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-vocab.txt", + 'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-vocab.txt", + 'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-vocab.txt", + 'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased-vocab.txt", + 'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased-vocab.txt", + 'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-vocab.txt", + 'bert-base-german-cased': "https://int-deepset-models-bert.s3.eu-central-1.amazonaws.com/pytorch/bert-base-german-cased-vocab.txt", + 'bert-large-uncased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-vocab.txt", + 'bert-large-cased-whole-word-masking': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-vocab.txt", + 'bert-large-uncased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-finetuned-squad-vocab.txt", + 'bert-large-cased-whole-word-masking-finetuned-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-finetuned-squad-vocab.txt", + 'bert-base-cased-finetuned-mrpc': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-finetuned-mrpc-vocab.txt", + } +} + +PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { + 'bert-base-uncased': 512, + 'bert-large-uncased': 512, + 'bert-base-cased': 512, + 'bert-large-cased': 512, + 'bert-base-multilingual-uncased': 512, + 'bert-base-multilingual-cased': 512, + 'bert-base-chinese': 512, + 'bert-base-german-cased': 512, + 'bert-large-uncased-whole-word-masking': 512, + 'bert-large-cased-whole-word-masking': 512, + 'bert-large-uncased-whole-word-masking-finetuned-squad': 512, + 'bert-large-cased-whole-word-masking-finetuned-squad': 512, + 'bert-base-cased-finetuned-mrpc': 512, +} + +PRETRAINED_INIT_CONFIGURATION = { + 'bert-base-uncased': {'do_lower_case': True}, + 'bert-large-uncased': {'do_lower_case': True}, + 'bert-base-cased': {'do_lower_case': False}, + 'bert-large-cased': {'do_lower_case': False}, + 'bert-base-multilingual-uncased': {'do_lower_case': True}, + 'bert-base-multilingual-cased': {'do_lower_case': False}, + 'bert-base-chinese': {'do_lower_case': False}, + 'bert-base-german-cased': {'do_lower_case': False}, + 'bert-large-uncased-whole-word-masking': {'do_lower_case': True}, + 'bert-large-cased-whole-word-masking': {'do_lower_case': False}, + 'bert-large-uncased-whole-word-masking-finetuned-squad': {'do_lower_case': True}, + 'bert-large-cased-whole-word-masking-finetuned-squad': {'do_lower_case': False}, + 'bert-base-cased-finetuned-mrpc': {'do_lower_case': False}, +} + + +def load_vocab(vocab_file): + """Loads a vocabulary file into a dictionary.""" + vocab = collections.OrderedDict() + with open(vocab_file, "r", encoding="utf-8") as reader: + tokens = reader.readlines() + for index, token in enumerate(tokens): + token = token.rstrip('\n') + vocab[token] = index + return vocab + + +def whitespace_tokenize(text): + """Runs basic whitespace cleaning and splitting on a piece of text.""" + text = text.strip() + if not text: + return [] + tokens = text.split() + return tokens + + +class BertTokenizer(PreTrainedTokenizer): + r""" + Constructs a BertTokenizer. + :class:`~pytorch_transformers.BertTokenizer` runs end-to-end tokenization: punctuation splitting + wordpiece + + Args: + vocab_file: Path to a one-wordpiece-per-line vocabulary file + do_lower_case: Whether to lower case the input. Only has an effect when do_wordpiece_only=False + do_basic_tokenize: Whether to do basic tokenization before wordpiece. + max_len: An artificial maximum length to truncate tokenized sequences to; Effective maximum length is always the + minimum of this value (if specified) and the underlying BERT model's sequence length. + never_split: List of tokens which will never be split during tokenization. Only has an effect when + do_wordpiece_only=False + """ + + vocab_files_names = VOCAB_FILES_NAMES + pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP + pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION + max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES + + def __init__(self, vocab_file, do_lower_case=True, do_basic_tokenize=True, never_split=None, + unk_token="[UNK]", sep_token="[SEP]", pad_token="[PAD]", cls_token="[CLS]", + mask_token="[MASK]", tokenize_chinese_chars=True, **kwargs): + """Constructs a BertTokenizer. + + Args: + **vocab_file**: Path to a one-wordpiece-per-line vocabulary file + **do_lower_case**: (`optional`) boolean (default True) + Whether to lower case the input + Only has an effect when do_basic_tokenize=True + **do_basic_tokenize**: (`optional`) boolean (default True) + Whether to do basic tokenization before wordpiece. + **never_split**: (`optional`) list of string + List of tokens which will never be split during tokenization. + Only has an effect when do_basic_tokenize=True + **tokenize_chinese_chars**: (`optional`) boolean (default True) + Whether to tokenize Chinese characters. + This should likely be deactivated for Japanese: + see: https://github.com/huggingface/pytorch-pretrained-BERT/issues/328 + """ + super(BertTokenizer, self).__init__(unk_token=unk_token, sep_token=sep_token, + pad_token=pad_token, cls_token=cls_token, + mask_token=mask_token, **kwargs) + self.max_len_single_sentence = self.max_len - 2 # take into account special tokens + self.max_len_sentences_pair = self.max_len - 3 # take into account special tokens + + if not os.path.isfile(vocab_file): + raise ValueError( + "Can't find a vocabulary file at path '{}'. To load the vocabulary from a Google pretrained " + "model use `tokenizer = BertTokenizer.from_pretrained(PRETRAINED_MODEL_NAME)`".format(vocab_file)) + self.vocab = load_vocab(vocab_file) + self.ids_to_tokens = collections.OrderedDict( + [(ids, tok) for tok, ids in self.vocab.items()]) + self.do_basic_tokenize = do_basic_tokenize + if do_basic_tokenize: + self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case, + never_split=never_split, + tokenize_chinese_chars=tokenize_chinese_chars) + self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab, unk_token=self.unk_token) + + @property + def vocab_size(self): + return len(self.vocab) + + def _tokenize(self, text): + split_tokens = [] + if self.do_basic_tokenize: + for token in self.basic_tokenizer.tokenize(text, never_split=self.all_special_tokens): + for sub_token in self.wordpiece_tokenizer.tokenize(token): + split_tokens.append(sub_token) + else: + split_tokens = self.wordpiece_tokenizer.tokenize(text) + return split_tokens + + def _convert_token_to_id(self, token): + """ Converts a token (str/unicode) in an id using the vocab. """ + return self.vocab.get(token, self.vocab.get(self.unk_token)) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (string/unicode) using the vocab.""" + return self.ids_to_tokens.get(index, self.unk_token) + + def convert_tokens_to_string(self, tokens): + """ Converts a sequence of tokens (string) in a single string. """ + out_string = ' '.join(tokens).replace(' ##', '').strip() + return out_string + + def add_special_tokens_single_sentence(self, token_ids): + """ + Adds special tokens to the a sequence for sequence classification tasks. + A BERT sequence has the following format: [CLS] X [SEP] + """ + return [self.cls_token_id] + token_ids + [self.sep_token_id] + + def add_special_tokens_sentences_pair(self, token_ids_0, token_ids_1): + """ + Adds special tokens to a sequence pair for sequence classification tasks. + A BERT sequence pair has the following format: [CLS] A [SEP] B [SEP] + """ + sep = [self.sep_token_id] + cls = [self.cls_token_id] + return cls + token_ids_0 + sep + token_ids_1 + sep + + def save_vocabulary(self, vocab_path): + """Save the tokenizer vocabulary to a directory or file.""" + index = 0 + if os.path.isdir(vocab_path): + vocab_file = os.path.join(vocab_path, VOCAB_FILES_NAMES['vocab_file']) + else: + vocab_file = vocab_path + with open(vocab_file, "w", encoding="utf-8") as writer: + for token, token_index in sorted(self.vocab.items(), key=lambda kv: kv[1]): + if index != token_index: + logger.warning("Saving vocabulary to {}: vocabulary indices are not consecutive." + " Please check that the vocabulary is not corrupted!".format(vocab_file)) + index = token_index + writer.write(token + u'\n') + index += 1 + return (vocab_file,) + + +class BasicTokenizer(object): + """Runs basic tokenization (punctuation splitting, lower casing, etc.).""" + + def __init__(self, do_lower_case=True, never_split=None, tokenize_chinese_chars=True): + """ Constructs a BasicTokenizer. + + Args: + **do_lower_case**: Whether to lower case the input. + **never_split**: (`optional`) list of str + Kept for backward compatibility purposes. + Now implemented directly at the base class level (see :func:`PreTrainedTokenizer.tokenize`) + List of token not to split. + **tokenize_chinese_chars**: (`optional`) boolean (default True) + Whether to tokenize Chinese characters. + This should likely be deactivated for Japanese: + see: https://github.com/huggingface/pytorch-pretrained-BERT/issues/328 + """ + if never_split is None: + never_split = [] + self.do_lower_case = do_lower_case + self.never_split = never_split + self.tokenize_chinese_chars = tokenize_chinese_chars + + def tokenize(self, text, never_split=None): + """ Basic Tokenization of a piece of text. + Split on "white spaces" only, for sub-word tokenization, see WordPieceTokenizer. + + Args: + **never_split**: (`optional`) list of str + Kept for backward compatibility purposes. + Now implemented directly at the base class level (see :func:`PreTrainedTokenizer.tokenize`) + List of token not to split. + """ + never_split = self.never_split + (never_split if never_split is not None else []) + text = self._clean_text(text) + # This was added on November 1st, 2018 for the multilingual and Chinese + # models. This is also applied to the English models now, but it doesn't + # matter since the English models were not trained on any Chinese data + # and generally don't have any Chinese data in them (there are Chinese + # characters in the vocabulary because Wikipedia does have some Chinese + # words in the English Wikipedia.). + if self.tokenize_chinese_chars: + text = self._tokenize_chinese_chars(text) + orig_tokens = whitespace_tokenize(text) + split_tokens = [] + for token in orig_tokens: + if self.do_lower_case and token not in never_split: + token = token.lower() + token = self._run_strip_accents(token) + split_tokens.extend(self._run_split_on_punc(token)) + + output_tokens = whitespace_tokenize(" ".join(split_tokens)) + return output_tokens + + def _run_strip_accents(self, text): + """Strips accents from a piece of text.""" + text = unicodedata.normalize("NFD", text) + output = [] + for char in text: + cat = unicodedata.category(char) + if cat == "Mn": + continue + output.append(char) + return "".join(output) + + def _run_split_on_punc(self, text, never_split=None): + """Splits punctuation on a piece of text.""" + if never_split is not None and text in never_split: + return [text] + chars = list(text) + i = 0 + start_new_word = True + output = [] + while i < len(chars): + char = chars[i] + if _is_punctuation(char): + output.append([char]) + start_new_word = True + else: + if start_new_word: + output.append([]) + start_new_word = False + output[-1].append(char) + i += 1 + + return ["".join(x) for x in output] + + def _tokenize_chinese_chars(self, text): + """Adds whitespace around any CJK character.""" + output = [] + for char in text: + cp = ord(char) + if self._is_chinese_char(cp): + output.append(" ") + output.append(char) + output.append(" ") + else: + output.append(char) + return "".join(output) + + def _is_chinese_char(self, cp): + """Checks whether CP is the codepoint of a CJK character.""" + # This defines a "chinese character" as anything in the CJK Unicode block: + # https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) + # + # Note that the CJK Unicode block is NOT all Japanese and Korean characters, + # despite its name. The modern Korean Hangul alphabet is a different block, + # as is Japanese Hiragana and Katakana. Those alphabets are used to write + # space-separated words, so they are not treated specially and handled + # like the all of the other languages. + if ((cp >= 0x4E00 and cp <= 0x9FFF) or # + (cp >= 0x3400 and cp <= 0x4DBF) or # + (cp >= 0x20000 and cp <= 0x2A6DF) or # + (cp >= 0x2A700 and cp <= 0x2B73F) or # + (cp >= 0x2B740 and cp <= 0x2B81F) or # + (cp >= 0x2B820 and cp <= 0x2CEAF) or + (cp >= 0xF900 and cp <= 0xFAFF) or # + (cp >= 0x2F800 and cp <= 0x2FA1F)): # + return True + + return False + + def _clean_text(self, text): + """Performs invalid character removal and whitespace cleanup on text.""" + output = [] + for char in text: + cp = ord(char) + if cp == 0 or cp == 0xfffd or _is_control(char): + continue + if _is_whitespace(char): + output.append(" ") + else: + output.append(char) + return "".join(output) + + +class WordpieceTokenizer(object): + """Runs WordPiece tokenization.""" + + def __init__(self, vocab, unk_token, max_input_chars_per_word=100): + self.vocab = vocab + self.unk_token = unk_token + self.max_input_chars_per_word = max_input_chars_per_word + + def tokenize(self, text): + """Tokenizes a piece of text into its word pieces. + + This uses a greedy longest-match-first algorithm to perform tokenization + using the given vocabulary. + + For example: + input = "unaffable" + output = ["un", "##aff", "##able"] + + Args: + text: A single token or whitespace separated tokens. This should have + already been passed through `BasicTokenizer`. + + Returns: + A list of wordpiece tokens. + """ + + output_tokens = [] + for token in whitespace_tokenize(text): + chars = list(token) + if len(chars) > self.max_input_chars_per_word: + output_tokens.append(self.unk_token) + continue + + is_bad = False + start = 0 + sub_tokens = [] + while start < len(chars): + end = len(chars) + cur_substr = None + while start < end: + substr = "".join(chars[start:end]) + if start > 0: + substr = "##" + substr + if substr in self.vocab: + cur_substr = substr + break + end -= 1 + if cur_substr is None: + is_bad = True + break + sub_tokens.append(cur_substr) + start = end + + if is_bad: + output_tokens.append(self.unk_token) + else: + output_tokens.extend(sub_tokens) + return output_tokens + + +def _is_whitespace(char): + """Checks whether `chars` is a whitespace character.""" + # \t, \n, and \r are technically contorl characters but we treat them + # as whitespace since they are generally considered as such. + if char == " " or char == "\t" or char == "\n" or char == "\r": + return True + cat = unicodedata.category(char) + if cat == "Zs": + return True + return False + + +def _is_control(char): + """Checks whether `chars` is a control character.""" + # These are technically control characters but we count them as whitespace + # characters. + if char == "\t" or char == "\n" or char == "\r": + return False + cat = unicodedata.category(char) + if cat.startswith("C"): + return True + return False + + +def _is_punctuation(char): + """Checks whether `chars` is a punctuation character.""" + cp = ord(char) + # We treat all non-letter/number ASCII as punctuation. + # Characters such as "^", "$", and "`" are not in the Unicode + # Punctuation class but we treat them as punctuation anyways, for + # consistency. + if ((cp >= 33 and cp <= 47) or (cp >= 58 and cp <= 64) or + (cp >= 91 and cp <= 96) or (cp >= 123 and cp <= 126)): + return True + cat = unicodedata.category(char) + if cat.startswith("P"): + return True + return False diff --git a/core/models/latent_diffusion/vae/optimus_modules/tokenization_gpt2.py b/core/models/latent_diffusion/vae/optimus_modules/tokenization_gpt2.py new file mode 100644 index 0000000000000000000000000000000000000000..79eb275e1ca14e0d4ed5ca4d778978a6a398528f --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/tokenization_gpt2.py @@ -0,0 +1,228 @@ +# coding=utf-8 +# Copyright 2018 The Open AI Team Authors and The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes for OpenAI GPT.""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import sys +import json +import logging +import os +import regex as re +from io import open + +try: + from functools import lru_cache +except ImportError: + # Just a dummy decorator to get the checks to run on python2 + # because honestly I don't want to support a byte-level unicode BPE tokenizer on python 2 right now. + def lru_cache(): + return lambda func: func + +from .tokenization_utils import PreTrainedTokenizer + +logger = logging.getLogger(__name__) + +VOCAB_FILES_NAMES = { + 'vocab_file': 'vocab.json', + 'merges_file': 'merges.txt', +} + +PRETRAINED_VOCAB_FILES_MAP = { + 'vocab_file': + { + 'gpt2': "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-vocab.json", + 'gpt2-medium': "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-vocab.json", + 'gpt2-large': "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-vocab.json", + }, + 'merges_file': + { + 'gpt2': "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-merges.txt", + 'gpt2-medium': "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-merges.txt", + 'gpt2-large': "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-merges.txt", + }, +} + +PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = { + 'gpt2': 1024, + 'gpt2-medium': 1024, + 'gpt2-large': 1024, +} + +@lru_cache() +def bytes_to_unicode(): + """ + Returns list of utf-8 byte and a mapping to unicode strings. + We specifically avoids mapping to whitespace/control characters the bpe code barfs on. + + The reversible bpe codes work on unicode strings. + This means you need a large # of unicode characters in your vocab if you want to avoid UNKs. + When you're at something like a 10B token dataset you end up needing around 5K for decent coverage. + This is a signficant percentage of your normal, say, 32K bpe vocab. + To avoid that, we want lookup tables between utf-8 bytes and unicode strings. + """ + _chr = unichr if sys.version_info[0] == 2 else chr + bs = list(range(ord("!"), ord("~")+1))+list(range(ord("¡"), ord("¬")+1))+list(range(ord("®"), ord("ÿ")+1)) + cs = bs[:] + n = 0 + for b in range(2**8): + if b not in bs: + bs.append(b) + cs.append(2**8+n) + n += 1 + cs = [_chr(n) for n in cs] + return dict(zip(bs, cs)) + +def get_pairs(word): + """Return set of symbol pairs in a word. + + Word is represented as tuple of symbols (symbols being variable-length strings). + """ + pairs = set() + prev_char = word[0] + for char in word[1:]: + pairs.add((prev_char, char)) + prev_char = char + return pairs + +class GPT2Tokenizer(PreTrainedTokenizer): + """ + GPT-2 BPE tokenizer. Peculiarities: + - Byte-level Byte-Pair-Encoding + - Requires a space to start the input string => will add a space is there isn't. + As a consequence, this tokenizer `encode` and `decode` method will not conserve + the absence of a space at the beginning of a string: `tokenizer.decode(tokenizer.encode("Hello")) = " Hello" + """ + vocab_files_names = VOCAB_FILES_NAMES + pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP + max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES + + def __init__(self, vocab_file, merges_file, errors='replace', unk_token="<|endoftext|>", + bos_token="<|endoftext|>", eos_token="<|endoftext|>", **kwargs): + super(GPT2Tokenizer, self).__init__(bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, **kwargs) + self.max_len_single_sentence = self.max_len # no default special tokens - you can update this value if you add special tokens + self.max_len_sentences_pair = self.max_len # no default special tokens - you can update this value if you add special tokens + + self.encoder = json.load(open(vocab_file, encoding="utf-8")) + self.decoder = {v: k for k, v in self.encoder.items()} + self.errors = errors # how to handle errors in decoding + self.byte_encoder = bytes_to_unicode() + self.byte_decoder = {v: k for k, v in self.byte_encoder.items()} + bpe_data = open(merges_file, encoding='utf-8').read().split('\n')[1:-1] + bpe_merges = [tuple(merge.split()) for merge in bpe_data] + self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges)))) + self.cache = {} + + # Should haved added re.IGNORECASE so BPE merges can happen for capitalized versions of contractions + self.pat = re.compile(r"""'s|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+""") + + @property + def vocab_size(self): + return len(self.encoder) + + def bpe(self, token): + if token in self.cache: + return self.cache[token] + word = tuple(token) + pairs = get_pairs(word) + + if not pairs: + return token + + while True: + bigram = min(pairs, key = lambda pair: self.bpe_ranks.get(pair, float('inf'))) + if bigram not in self.bpe_ranks: + break + first, second = bigram + new_word = [] + i = 0 + while i < len(word): + try: + j = word.index(first, i) + new_word.extend(word[i:j]) + i = j + except: + new_word.extend(word[i:]) + break + + if word[i] == first and i < len(word)-1 and word[i+1] == second: + new_word.append(first+second) + i += 2 + else: + new_word.append(word[i]) + i += 1 + new_word = tuple(new_word) + word = new_word + if len(word) == 1: + break + else: + pairs = get_pairs(word) + word = ' '.join(word) + self.cache[token] = word + return word + + def _tokenize(self, text): + """ Tokenize a string. """ + text = ' ' + text # GPT-2 (and RoBERTa) tokenizers need at least one space to begin the sentence with. + bpe_tokens = [] + for token in re.findall(self.pat, text): + if sys.version_info[0] == 2: + token = ''.join(self.byte_encoder[ord(b)] for b in token) # Maps all our bytes to unicode strings, avoiding controle tokens of the BPE (spaces in our case) + else: + token = ''.join(self.byte_encoder[b] for b in token.encode('utf-8')) # Maps all our bytes to unicode strings, avoiding controle tokens of the BPE (spaces in our case) + bpe_tokens.extend(bpe_token for bpe_token in self.bpe(token).split(' ')) + return bpe_tokens + + def _convert_token_to_id(self, token): + """ Converts a token (str/unicode) in an id using the vocab. """ + return self.encoder.get(token, self.encoder.get(self.unk_token)) + + def _convert_id_to_token(self, index): + """Converts an index (integer) in a token (string/unicode) using the vocab.""" + return self.decoder.get(index) + + def convert_tokens_to_string(self, tokens): + """ Converts a sequence of tokens (string) in a single string. """ + text = ''.join(tokens) + text = bytearray([self.byte_decoder[c] for c in text]).decode('utf-8', errors=self.errors) + return text + + def save_vocabulary(self, save_directory): + """Save the tokenizer vocabulary and merge files to a directory.""" + if not os.path.isdir(save_directory): + logger.error("Vocabulary path ({}) should be a directory".format(save_directory)) + return + vocab_file = os.path.join(save_directory, VOCAB_FILES_NAMES['vocab_file']) + merge_file = os.path.join(save_directory, VOCAB_FILES_NAMES['merges_file']) + + with open(vocab_file, 'w', encoding='utf-8') as f: + f.write(json.dumps(self.encoder, ensure_ascii=False)) + + index = 0 + with open(merge_file, "w", encoding="utf-8") as writer: + writer.write(u'#version: 0.2\n') + for bpe_tokens, token_index in sorted(self.bpe_ranks.items(), key=lambda kv: kv[1]): + if index != token_index: + logger.warning("Saving vocabulary to {}: BPE merge indices are not consecutive." + " Please check that the tokenizer is not corrupted!".format(merge_file)) + index = token_index + writer.write(' '.join(bpe_tokens) + u'\n') + index += 1 + + return vocab_file, merge_file + + # XX added + def add_special_tokens_single_sentence(self, token_ids): + return [self.added_tokens_encoder['']] + token_ids + [self.added_tokens_encoder['']] diff --git a/core/models/latent_diffusion/vae/optimus_modules/tokenization_utils.py b/core/models/latent_diffusion/vae/optimus_modules/tokenization_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..3d0c7916bc2efb93b6526f1ee0e19c58208a008c --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/tokenization_utils.py @@ -0,0 +1,809 @@ +# coding=utf-8 +# Copyright 2018 The Open AI Team Authors and The HuggingFace Inc. team. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tokenization classes for OpenAI GPT.""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import logging +import os +import json +import six +import copy +from io import open + +from .file_utils import cached_path + +logger = logging.getLogger(__name__) + +SPECIAL_TOKENS_MAP_FILE = 'special_tokens_map.json' +ADDED_TOKENS_FILE = 'added_tokens.json' +TOKENIZER_CONFIG_FILE = 'tokenizer_config.json' + +class PreTrainedTokenizer(object): + """ Base class for all tokenizers. + Handle all the shared methods for tokenization and special tokens as well as methods dowloading/caching/loading pretrained tokenizers as well as adding tokens to the vocabulary. + + This class also contain the added tokens in a unified way on top of all tokenizers so we don't have to handle the specific vocabulary augmentation methods of the various underlying dictionary structures (BPE, sentencepiece...). + + Class attributes (overridden by derived classes): + + - ``vocab_files_names``: a python ``dict`` with, as keys, the ``__init__`` keyword name of each vocabulary file required by the model, and as associated values, the filename for saving the associated file (string). + - ``pretrained_vocab_files_map``: a python ``dict of dict`` the high-level keys being the ``__init__`` keyword name of each vocabulary file required by the model, the low-level being the `short-cut-names` (string) of the pretrained models with, as associated values, the `url` (string) to the associated pretrained vocabulary file. + - ``max_model_input_sizes``: a python ``dict`` with, as keys, the `short-cut-names` (string) of the pretrained models, and as associated values, the maximum length of the sequence inputs of this model, or None if the model has no maximum input size. + - ``pretrained_init_configuration``: a python ``dict`` with, as keys, the `short-cut-names` (string) of the pretrained models, and as associated values, a dictionnary of specific arguments to pass to the ``__init__``method of the tokenizer class for this pretrained model when loading the tokenizer with the ``from_pretrained()`` method. + + Parameters: + + - ``bos_token``: (`Optional`) string: a beginning of sentence token. Will be associated to ``self.bos_token`` and ``self.bos_token_id`` + + - ``eos_token``: (`Optional`) string: an end of sentence token. Will be associated to ``self.eos_token`` and ``self.eos_token_id`` + + - ``unk_token``: (`Optional`) string: an unknown token. Will be associated to ``self.unk_token`` and ``self.unk_token_id`` + + - ``sep_token``: (`Optional`) string: a separation token (e.g. to separate context and query in an input sequence). Will be associated to ``self.sep_token`` and ``self.sep_token_id`` + + - ``pad_token``: (`Optional`) string: a padding token. Will be associated to ``self.pad_token`` and ``self.pad_token_id`` + + - ``cls_token``: (`Optional`) string: a classification token (e.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model). Will be associated to ``self.cls_token`` and ``self.cls_token_id`` + + - ``mask_token``: (`Optional`) string: a masking token (e.g. when training a model with masked-language modeling). Will be associated to ``self.mask_token`` and ``self.mask_token_id`` + + - ``additional_special_tokens``: (`Optional`) list: a list of additional special tokens. Adding all special tokens here ensure they won't be split by the tokenization process. Will be associated to ``self.additional_special_tokens`` and ``self.additional_special_tokens_ids`` + """ + vocab_files_names = {} + pretrained_vocab_files_map = {} + pretrained_init_configuration = {} + max_model_input_sizes = {} + + SPECIAL_TOKENS_ATTRIBUTES = ["bos_token", "eos_token", "unk_token", "sep_token", + "pad_token", "cls_token", "mask_token", + "additional_special_tokens"] + + @property + def bos_token(self): + """ Beginning of sentence token (string). Log an error if used while not having been set. """ + if self._bos_token is None: + logger.error("Using bos_token, but it is not set yet.") + return self._bos_token + + @property + def eos_token(self): + """ End of sentence token (string). Log an error if used while not having been set. """ + if self._eos_token is None: + logger.error("Using eos_token, but it is not set yet.") + return self._eos_token + + @property + def unk_token(self): + """ Unknown token (string). Log an error if used while not having been set. """ + if self._unk_token is None: + logger.error("Using unk_token, but it is not set yet.") + return self._unk_token + + @property + def sep_token(self): + """ Separation token (string). E.g. separate context and query in an input sequence. Log an error if used while not having been set. """ + if self._sep_token is None: + logger.error("Using sep_token, but it is not set yet.") + return self._sep_token + + @property + def pad_token(self): + """ Padding token (string). Log an error if used while not having been set. """ + if self._pad_token is None: + logger.error("Using pad_token, but it is not set yet.") + return self._pad_token + + @property + def cls_token(self): + """ Classification token (string). E.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model. Log an error if used while not having been set. """ + if self._cls_token is None: + logger.error("Using cls_token, but it is not set yet.") + return self._cls_token + + @property + def mask_token(self): + """ Mask token (string). E.g. when training a model with masked-language modeling. Log an error if used while not having been set. """ + if self._mask_token is None: + logger.error("Using mask_token, but it is not set yet.") + return self._mask_token + + @property + def additional_special_tokens(self): + """ All the additional special tokens you may want to use (list of strings). Log an error if used while not having been set. """ + if self._additional_special_tokens is None: + logger.error("Using additional_special_tokens, but it is not set yet.") + return self._additional_special_tokens + + @bos_token.setter + def bos_token(self, value): + self._bos_token = value + + @eos_token.setter + def eos_token(self, value): + self._eos_token = value + + @unk_token.setter + def unk_token(self, value): + self._unk_token = value + + @sep_token.setter + def sep_token(self, value): + self._sep_token = value + + @pad_token.setter + def pad_token(self, value): + self._pad_token = value + + @cls_token.setter + def cls_token(self, value): + self._cls_token = value + + @mask_token.setter + def mask_token(self, value): + self._mask_token = value + + @additional_special_tokens.setter + def additional_special_tokens(self, value): + self._additional_special_tokens = value + + @property + def bos_token_id(self): + """ Id of the beginning of sentence token in the vocabulary. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.bos_token) + + @property + def eos_token_id(self): + """ Id of the end of sentence token in the vocabulary. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.eos_token) + + @property + def unk_token_id(self): + """ Id of the unknown token in the vocabulary. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.unk_token) + + @property + def sep_token_id(self): + """ Id of the separation token in the vocabulary. E.g. separate context and query in an input sequence. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.sep_token) + + @property + def pad_token_id(self): + """ Id of the padding token in the vocabulary. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.pad_token) + + @property + def cls_token_id(self): + """ Id of the classification token in the vocabulary. E.g. to extract a summary of an input sequence leveraging self-attention along the full depth of the model. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.cls_token) + + @property + def mask_token_id(self): + """ Id of the mask token in the vocabulary. E.g. when training a model with masked-language modeling. Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.mask_token) + + @property + def additional_special_tokens_ids(self): + """ Ids of all the additional special tokens in the vocabulary (list of integers). Log an error if used while not having been set. """ + return self.convert_tokens_to_ids(self.additional_special_tokens) + + def __init__(self, max_len=None, **kwargs): + self._bos_token = None + self._eos_token = None + self._unk_token = None + self._sep_token = None + self._pad_token = None + self._cls_token = None + self._mask_token = None + self._additional_special_tokens = [] + + self.max_len = max_len if max_len is not None else int(1e12) + + # Added tokens + self.added_tokens_encoder = {} + self.added_tokens_decoder = {} + + # inputs and kwargs for saving and re-loading (see ``from_pretrained`` and ``save_pretrained``) + self.init_inputs = () + self.init_kwargs = {} + + for key, value in kwargs.items(): + if key in self.SPECIAL_TOKENS_ATTRIBUTES: + if key == 'additional_special_tokens': + assert isinstance(value, (list, tuple)) and all(isinstance(t, str) or (six.PY2 and isinstance(t, unicode)) for t in value) + else: + assert isinstance(value, str) or (six.PY2 and isinstance(value, unicode)) + setattr(self, key, value) + + + @classmethod + def from_pretrained(cls, *inputs, **kwargs): + r""" + Instantiate a :class:`~pytorch_transformers.PreTrainedTokenizer` (or a derived class) from a predefined tokenizer. + + Args: + pretrained_model_name_or_path: either: + + - a string with the `shortcut name` of a predefined tokenizer to load from cache or download, e.g.: ``bert-base-uncased``. + - a path to a `directory` containing vocabulary files required by the tokenizer, for instance saved using the :func:`~pytorch_transformers.PreTrainedTokenizer.save_pretrained` method, e.g.: ``./my_model_directory/``. + - (not applicable to all derived classes) a path or url to a single saved vocabulary file if and only if the tokenizer only requires a single vocabulary file (e.g. Bert, XLNet), e.g.: ``./my_model_directory/vocab.txt``. + + cache_dir: (`optional`) string: + Path to a directory in which a downloaded predefined tokenizer vocabulary files should be cached if the standard cache should not be used. + + force_download: (`optional`) boolean, default False: + Force to (re-)download the vocabulary files and override the cached versions if they exists. + + proxies: (`optional`) dict, default None: + A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. + The proxies are used on each request. + + inputs: (`optional`) positional arguments: will be passed to the Tokenizer ``__init__`` method. + + kwargs: (`optional`) keyword arguments: will be passed to the Tokenizer ``__init__`` method. Can be used to set special tokens like ``bos_token``, ``eos_token``, ``unk_token``, ``sep_token``, ``pad_token``, ``cls_token``, ``mask_token``, ``additional_special_tokens``. See parameters in the doc string of :class:`~pytorch_transformers.PreTrainedTokenizer` for details. + + Examples:: + + # We can't instantiate directly the base class `PreTrainedTokenizer` so let's show our examples on a derived class: BertTokenizer + + # Download vocabulary from S3 and cache. + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + + # If vocabulary files are in a directory (e.g. tokenizer was saved using `save_pretrained('./test/saved_model/')`) + tokenizer = BertTokenizer.from_pretrained('./test/saved_model/') + + # If the tokenizer uses a single vocabulary file, you can point directly to this file + tokenizer = BertTokenizer.from_pretrained('./test/saved_model/my_vocab.txt') + + # You can link tokens to special vocabulary when instantiating + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', unk_token='') + # You should be sure '' is in the vocabulary when doing that. + # Otherwise use tokenizer.add_special_tokens({'unk_token': ''}) instead) + assert tokenizer.unk_token == '' + + """ + return cls._from_pretrained(*inputs, **kwargs) + + + @classmethod + def _from_pretrained(cls, pretrained_model_name_or_path, *init_inputs, **kwargs): + cache_dir = kwargs.pop('cache_dir', None) + force_download = kwargs.pop('force_download', False) + proxies = kwargs.pop('proxies', None) + + s3_models = list(cls.max_model_input_sizes.keys()) + vocab_files = {} + init_configuration = {} + if pretrained_model_name_or_path in s3_models: + # Get the vocabulary from AWS S3 bucket + for file_id, map_list in cls.pretrained_vocab_files_map.items(): + vocab_files[file_id] = map_list[pretrained_model_name_or_path] + if cls.pretrained_init_configuration and pretrained_model_name_or_path in cls.pretrained_init_configuration: + init_configuration = cls.pretrained_init_configuration[pretrained_model_name_or_path] + else: + # Get the vocabulary from local files + logger.info( + "Model name '{}' not found in model shortcut name list ({}). " + "Assuming '{}' is a path or url to a directory containing tokenizer files.".format( + pretrained_model_name_or_path, ', '.join(s3_models), + pretrained_model_name_or_path)) + + # Look for the tokenizer main vocabulary files + for file_id, file_name in cls.vocab_files_names.items(): + if os.path.isdir(pretrained_model_name_or_path): + # If a directory is provided we look for the standard filenames + full_file_name = os.path.join(pretrained_model_name_or_path, file_name) + else: + # If a path to a file is provided we use it (will only work for non-BPE tokenizer using a single vocabulary file) + full_file_name = pretrained_model_name_or_path + if not os.path.exists(full_file_name): + logger.info("Didn't find file {}. We won't load it.".format(full_file_name)) + full_file_name = None + vocab_files[file_id] = full_file_name + + # Look for the additional tokens files + additional_files_names = {'added_tokens_file': ADDED_TOKENS_FILE, + 'special_tokens_map_file': SPECIAL_TOKENS_MAP_FILE, + 'tokenizer_config_file': TOKENIZER_CONFIG_FILE, + } + + # If a path to a file was provided, get the parent directory + saved_directory = pretrained_model_name_or_path + if os.path.exists(saved_directory) and not os.path.isdir(saved_directory): + saved_directory = os.path.dirname(saved_directory) + + for file_id, file_name in additional_files_names.items(): + full_file_name = os.path.join(saved_directory, file_name) + if not os.path.exists(full_file_name): + logger.info("Didn't find file {}. We won't load it.".format(full_file_name)) + full_file_name = None + vocab_files[file_id] = full_file_name + + if all(full_file_name is None for full_file_name in vocab_files.values()): + logger.error( + "Model name '{}' was not found in model name list ({}). " + "We assumed '{}' was a path or url but couldn't find tokenizer files" + "at this path or url.".format( + pretrained_model_name_or_path, ', '.join(s3_models), + pretrained_model_name_or_path, )) + return None + + # Get files from url, cache, or disk depending on the case + try: + resolved_vocab_files = {} + for file_id, file_path in vocab_files.items(): + if file_path is None: + resolved_vocab_files[file_id] = None + else: + resolved_vocab_files[file_id] = cached_path(file_path, cache_dir=cache_dir, force_download=force_download, proxies=proxies) + except EnvironmentError as e: + if pretrained_model_name_or_path in s3_models: + logger.error("Couldn't reach server to download vocabulary.") + else: + logger.error( + "Model name '{}' was not found in model name list ({}). " + "We assumed '{}' was a path or url but couldn't find files {} " + "at this path or url.".format( + pretrained_model_name_or_path, ', '.join(s3_models), + pretrained_model_name_or_path, str(vocab_files.keys()))) + raise e + + for file_id, file_path in vocab_files.items(): + if file_path == resolved_vocab_files[file_id]: + logger.info("loading file {}".format(file_path)) + else: + logger.info("loading file {} from cache at {}".format( + file_path, resolved_vocab_files[file_id])) + + # Prepare tokenizer initialization kwargs + # Did we saved some inputs and kwargs to reload ? + tokenizer_config_file = resolved_vocab_files.pop('tokenizer_config_file', None) + if tokenizer_config_file is not None: + init_kwargs = json.load(open(tokenizer_config_file, encoding="utf-8")) + saved_init_inputs = init_kwargs.pop('init_inputs', ()) + if not init_inputs: + init_inputs = saved_init_inputs + else: + init_kwargs = init_configuration + + # Update with newly provided kwargs + init_kwargs.update(kwargs) + + # Set max length if needed + if pretrained_model_name_or_path in cls.max_model_input_sizes: + # if we're using a pretrained model, ensure the tokenizer + # wont index sequences longer than the number of positional embeddings + max_len = cls.max_model_input_sizes[pretrained_model_name_or_path] + if max_len is not None and isinstance(max_len, (int, float)): + init_kwargs['max_len'] = min(init_kwargs.get('max_len', int(1e12)), max_len) + + # Merge resolved_vocab_files arguments in init_kwargs. + added_tokens_file = resolved_vocab_files.pop('added_tokens_file', None) + special_tokens_map_file = resolved_vocab_files.pop('special_tokens_map_file', None) + for args_name, file_path in resolved_vocab_files.items(): + if args_name not in init_kwargs: + init_kwargs[args_name] = file_path + if special_tokens_map_file is not None: + special_tokens_map = json.load(open(special_tokens_map_file, encoding="utf-8")) + for key, value in special_tokens_map.items(): + if key not in init_kwargs: + init_kwargs[key] = value + + # Instantiate tokenizer. + tokenizer = cls(*init_inputs, **init_kwargs) + + # Save inputs and kwargs for saving and re-loading with ``save_pretrained`` + tokenizer.init_inputs = init_inputs + tokenizer.init_kwargs = init_kwargs + + # Add supplementary tokens. + if added_tokens_file is not None: + added_tok_encoder = json.load(open(added_tokens_file, encoding="utf-8")) + added_tok_decoder = {v:k for k, v in added_tok_encoder.items()} + tokenizer.added_tokens_encoder.update(added_tok_encoder) + tokenizer.added_tokens_decoder.update(added_tok_decoder) + + return tokenizer + + def save_pretrained(self, save_directory): + """ Save the tokenizer vocabulary files together with: + - added tokens, + - special-tokens-to-class-attributes-mapping, + - tokenizer instantiation positional and keywords inputs (e.g. do_lower_case for Bert). + + This won't save modifications other than (added tokens and special token mapping) you may have + applied to the tokenizer after the instantion (e.g. modifying tokenizer.do_lower_case after creation). + + This method make sure the full tokenizer can then be re-loaded using the :func:`~pytorch_transformers.PreTrainedTokenizer.from_pretrained` class method. + """ + if not os.path.isdir(save_directory): + logger.error("Saving directory ({}) should be a directory".format(save_directory)) + return + + special_tokens_map_file = os.path.join(save_directory, SPECIAL_TOKENS_MAP_FILE) + added_tokens_file = os.path.join(save_directory, ADDED_TOKENS_FILE) + tokenizer_config_file = os.path.join(save_directory, TOKENIZER_CONFIG_FILE) + + tokenizer_config = copy.deepcopy(self.init_kwargs) + tokenizer_config['init_inputs'] = copy.deepcopy(self.init_inputs) + for file_id in self.vocab_files_names.keys(): + tokenizer_config.pop(file_id, None) + + with open(tokenizer_config_file, 'w', encoding='utf-8') as f: + f.write(json.dumps(tokenizer_config, ensure_ascii=False)) + + with open(special_tokens_map_file, 'w', encoding='utf-8') as f: + f.write(json.dumps(self.special_tokens_map, ensure_ascii=False)) + + with open(added_tokens_file, 'w', encoding='utf-8') as f: + if self.added_tokens_encoder: + out_str = json.dumps(self.added_tokens_encoder, ensure_ascii=False) + else: + out_str = u"{}" + f.write(out_str) + + vocab_files = self.save_vocabulary(save_directory) + + return vocab_files + (special_tokens_map_file, added_tokens_file) + + def save_vocabulary(self, save_directory): + """ Save the tokenizer vocabulary to a directory. This method does *NOT* save added tokens + and special token mappings. + + Please use :func:`~pytorch_transformers.PreTrainedTokenizer.save_pretrained` `()` to save the full Tokenizer state if you want to reload it using the :func:`~pytorch_transformers.PreTrainedTokenizer.from_pretrained` class method. + """ + raise NotImplementedError + + def vocab_size(self): + """ Size of the base vocabulary (without the added tokens) """ + raise NotImplementedError + + def __len__(self): + """ Size of the full vocabulary with the added tokens """ + return self.vocab_size + len(self.added_tokens_encoder) + + def add_tokens(self, new_tokens): + """ + Add a list of new tokens to the tokenizer class. If the new tokens are not in the + vocabulary, they are added to it with indices starting from length of the current vocabulary. + + Args: + new_tokens: list of string. Each string is a token to add. Tokens are only added if they are not already in the vocabulary (tested by checking if the tokenizer assign the index of the ``unk_token`` to them). + + Returns: + Number of tokens added to the vocabulary. + + Examples:: + + # Let's see how to increase the vocabulary of Bert model and tokenizer + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + model = BertModel.from_pretrained('bert-base-uncased') + + num_added_toks = tokenizer.add_tokens(['new_tok1', 'my_new-tok2']) + print('We have added', num_added_toks, 'tokens') + model.resize_token_embeddings(len(tokenizer)) # Notice: resize_token_embeddings expect to receive the full size of the new vocabulary, i.e. the length of the tokenizer. + """ + if not new_tokens: + return 0 + + to_add_tokens = [] + for token in new_tokens: + assert isinstance(token, str) or (six.PY2 and isinstance(token, unicode)) + if token != self.unk_token and \ + self.convert_tokens_to_ids(token) == self.convert_tokens_to_ids(self.unk_token): + to_add_tokens.append(token) + logger.info("Adding %s to the vocabulary", token) + + added_tok_encoder = dict((tok, len(self) + i) for i, tok in enumerate(to_add_tokens)) + added_tok_decoder = {v:k for k, v in added_tok_encoder.items()} + self.added_tokens_encoder.update(added_tok_encoder) + self.added_tokens_decoder.update(added_tok_decoder) + + return len(to_add_tokens) + + def add_special_tokens(self, special_tokens_dict): + """ + Add a dictionary of special tokens (eos, pad, cls...) to the encoder and link them + to class attributes. If special tokens are NOT in the vocabulary, they are added + to it (indexed starting from the last index of the current vocabulary). + + Using `add_special_tokens` will ensure your special tokens can be used in several ways: + + - special tokens are carefully handled by the tokenizer (they are never split) + - you can easily refer to special tokens using tokenizer class attributes like `tokenizer.cls_token`. This makes it easy to develop model-agnostic training and fine-tuning scripts. + + When possible, special tokens are already registered for provided pretrained models (ex: BertTokenizer cls_token is already registered to be '[CLS]' and XLM's one is also registered to be '') + + Args: + special_tokens_dict: dict of string. Keys should be in the list of predefined special attributes: + [``bos_token``, ``eos_token``, ``unk_token``, ``sep_token``, ``pad_token``, ``cls_token``, ``mask_token``, + ``additional_special_tokens``]. + + Tokens are only added if they are not already in the vocabulary (tested by checking if the tokenizer assign the index of the ``unk_token`` to them). + + Returns: + Number of tokens added to the vocabulary. + + Examples:: + + # Let's see how to add a new classification token to GPT-2 + tokenizer = GPT2Tokenizer.from_pretrained('gpt2') + model = GPT2Model.from_pretrained('gpt2') + + special_tokens_dict = {'cls_token': ''} + + num_added_toks = tokenizer.add_special_tokens(special_tokens_dict) + print('We have added', num_added_toks, 'tokens') + model.resize_token_embeddings(len(tokenizer)) # Notice: resize_token_embeddings expect to receive the full size of the new vocabulary, i.e. the length of the tokenizer. + + assert tokenizer.cls_token == '' + """ + if not special_tokens_dict: + return 0 + + added_tokens = 0 + for key, value in special_tokens_dict.items(): + assert key in self.SPECIAL_TOKENS_ATTRIBUTES + if key == 'additional_special_tokens': + assert isinstance(value, (list, tuple)) and all(isinstance(t, str) or (six.PY2 and isinstance(t, unicode)) for t in value) + added_tokens += self.add_tokens(value) + else: + assert isinstance(value, str) or (six.PY2 and isinstance(value, unicode)) + added_tokens += self.add_tokens([value]) + logger.info("Assigning %s to the %s key of the tokenizer", value, key) + setattr(self, key, value) + + return added_tokens + + def tokenize(self, text, **kwargs): + """ Converts a string in a sequence of tokens (string), using the tokenizer. + Split in words for word-based vocabulary or sub-words for sub-word-based + vocabularies (BPE/SentencePieces/WordPieces). + + Take care of added tokens. + """ + def split_on_token(tok, text): + result = [] + split_text = text.split(tok) + for i, sub_text in enumerate(split_text): + sub_text = sub_text.strip() + if i == 0 and not sub_text: + result += [tok] + elif i == len(split_text) - 1: + if sub_text: + result += [sub_text] + else: + pass + else: + if sub_text: + result += [sub_text] + result += [tok] + return result + + def split_on_tokens(tok_list, text): + if not text: + return [] + if not tok_list: + return self._tokenize(text, **kwargs) + + tokenized_text = [] + text_list = [text] + for tok in tok_list: + tokenized_text = [] + for sub_text in text_list: + if sub_text not in self.added_tokens_encoder \ + and sub_text not in self.all_special_tokens: + tokenized_text += split_on_token(tok, sub_text) + else: + tokenized_text += [sub_text] + text_list = tokenized_text + + return sum((self._tokenize(token, **kwargs) if token not \ + in self.added_tokens_encoder and token not in self.all_special_tokens \ + else [token] for token in tokenized_text), []) + + added_tokens = list(self.added_tokens_encoder.keys()) + self.all_special_tokens + tokenized_text = split_on_tokens(added_tokens, text) + return tokenized_text + + def _tokenize(self, text, **kwargs): + """ Converts a string in a sequence of tokens (string), using the tokenizer. + Split in words for word-based vocabulary or sub-words for sub-word-based + vocabularies (BPE/SentencePieces/WordPieces). + + Do NOT take care of added tokens. + """ + raise NotImplementedError + + def convert_tokens_to_ids(self, tokens): + """ Converts a single token, or a sequence of tokens, (str/unicode) in a single integer id + (resp. a sequence of ids), using the vocabulary. + """ + if tokens is None: + return None + + if isinstance(tokens, str) or (six.PY2 and isinstance(tokens, unicode)): + return self._convert_token_to_id_with_added_voc(tokens) + + ids = [] + for token in tokens: + ids.append(self._convert_token_to_id_with_added_voc(token)) + if len(ids) > self.max_len: + logger.warning("Token indices sequence length is longer than the specified maximum sequence length " + "for this model ({} > {}). Running this sequence through the model will result in " + "indexing errors".format(len(ids), self.max_len)) + return ids + + def _convert_token_to_id_with_added_voc(self, token): + if token is None: + return None + + if token in self.added_tokens_encoder: + return self.added_tokens_encoder[token] + return self._convert_token_to_id(token) + + def _convert_token_to_id(self, token): + raise NotImplementedError + + def encode(self, text, text_pair=None, add_special_tokens=False, **kwargs): + """ + Converts a string in a sequence of ids (integer), using the tokenizer and vocabulary. + + Same as doing ``self.convert_tokens_to_ids(self.tokenize(text))``. + + Args: + text: The first sequence to be encoded. + text_pair: Optional second sequence to be encoded. + add_special_tokens: if set to ``True``, the sequences will be encoded with the special tokens relative + to their model. + **kwargs: passed to the `self.tokenize()` method + """ + if text_pair is None: + if add_special_tokens: + return self.add_special_tokens_single_sentence(self.convert_tokens_to_ids(self.tokenize(text, **kwargs))) + else: + return self.convert_tokens_to_ids(self.tokenize(text, **kwargs)) + + first_sentence_tokens = [self._convert_token_to_id(token) for token in self.tokenize(text, **kwargs)] + second_sentence_tokens = [self._convert_token_to_id(token) for token in self.tokenize(text_pair, **kwargs)] + + if add_special_tokens: + return self.add_special_tokens_sentences_pair(first_sentence_tokens, second_sentence_tokens) + else: + return first_sentence_tokens, second_sentence_tokens + + def add_special_tokens_single_sentence(self, token_ids): + logger.warning("This tokenizer does not make use of special tokens. The sequence has been returned with no modification.") + return token_ids + + def add_special_tokens_sentences_pair(self, token_ids_0, token_ids_1): + logger.warning("This tokenizer does not make use of special tokens. The two sequences have been concatenated.") + return token_ids_0 + token_ids_1 + + def convert_ids_to_tokens(self, ids, skip_special_tokens=False): + """ Converts a single index or a sequence of indices (integers) in a token " + (resp.) a sequence of tokens (str/unicode), using the vocabulary and added tokens. + + Args: + skip_special_tokens: Don't decode special tokens (self.all_special_tokens). Default: False + """ + if isinstance(ids, int): + if ids in self.added_tokens_decoder: + return self.added_tokens_decoder[ids] + else: + return self._convert_id_to_token(ids) + tokens = [] + for index in ids: + if skip_special_tokens and index in self.all_special_ids: + continue + if index in self.added_tokens_decoder: + tokens.append(self.added_tokens_decoder[index]) + else: + tokens.append(self._convert_id_to_token(index)) + return tokens + + def _convert_id_to_token(self, index): + raise NotImplementedError + + def convert_tokens_to_string(self, tokens): + """ Converts a sequence of tokens (string) in a single string. + The most simple way to do it is ' '.join(self.convert_ids_to_tokens(token_ids)) + but we often want to remove sub-word tokenization artifacts at the same time. + """ + return ' '.join(self.convert_ids_to_tokens(tokens)) + + def decode(self, token_ids, skip_special_tokens=False, clean_up_tokenization_spaces=True): + """ + Converts a sequence of ids (integer) in a string, using the tokenizer and vocabulary + with options to remove special tokens and clean up tokenization spaces. + Similar to doing ``self.convert_tokens_to_string(self.convert_ids_to_tokens(token_ids))``. + """ + filtered_tokens = self.convert_ids_to_tokens(token_ids, skip_special_tokens=skip_special_tokens) + + # To avoid mixing byte-level and unicode for byte-level BPT + # we need to build string separatly for added tokens and byte-level tokens + # cf. https://github.com/huggingface/pytorch-transformers/issues/1133 + sub_texts = [] + current_sub_text = [] + for token in filtered_tokens: + if skip_special_tokens and token in self.all_special_ids: + continue + if token in self.added_tokens_encoder: + if current_sub_text: + sub_texts.append(self.convert_tokens_to_string(current_sub_text)) + current_sub_text = [] + sub_texts.append(" " + token) + else: + current_sub_text.append(token) + if current_sub_text: + sub_texts.append(self.convert_tokens_to_string(current_sub_text)) + text = ''.join(sub_texts) + + if self._sep_token is not None and self._sep_token in text: + text = text.replace(self._cls_token, self._sep_token) + split_text = list(filter(lambda sentence: len(sentence) > 0, text.split(self._sep_token))) + if clean_up_tokenization_spaces: + clean_text = [self.clean_up_tokenization(text) for text in split_text] + return clean_text + else: + return split_text + else: + if clean_up_tokenization_spaces: + clean_text = self.clean_up_tokenization(text) + return clean_text + else: + return text + + @property + def special_tokens_map(self): + """ A dictionary mapping special token class attribute (cls_token, unk_token...) to their + values ('', ''...) + """ + set_attr = {} + for attr in self.SPECIAL_TOKENS_ATTRIBUTES: + attr_value = getattr(self, "_" + attr) + if attr_value: + set_attr[attr] = attr_value + return set_attr + + @property + def all_special_tokens(self): + """ List all the special tokens ('', ''...) mapped to class attributes + (cls_token, unk_token...). + """ + all_toks = [] + set_attr = self.special_tokens_map + for attr_value in set_attr.values(): + all_toks = all_toks + (list(attr_value) if isinstance(attr_value, (list, tuple)) else [attr_value]) + all_toks = list(set(all_toks)) + return all_toks + + @property + def all_special_ids(self): + """ List the vocabulary indices of the special tokens ('', ''...) mapped to + class attributes (cls_token, unk_token...). + """ + all_toks = self.all_special_tokens + all_ids = list(self._convert_token_to_id(t) for t in all_toks) + return all_ids + + @staticmethod + def clean_up_tokenization(out_string): + """ Clean up a list of simple English tokenization artifacts like spaces before punctuations and abreviated forms. + """ + out_string = out_string.replace(' .', '.').replace(' ?', '?').replace(' !', '!').replace(' ,', ',' + ).replace(" ' ", "'").replace(" n't", "n't").replace(" 'm", "'m").replace(" do not", " don't" + ).replace(" 's", "'s").replace(" 've", "'ve").replace(" 're", "'re") + return out_string diff --git a/core/models/latent_diffusion/vae/optimus_modules/vocab/bert-base-cased-vocab.txt b/core/models/latent_diffusion/vae/optimus_modules/vocab/bert-base-cased-vocab.txt new file mode 100644 index 0000000000000000000000000000000000000000..2ea941cc79a6f3d7985ca6991ef4f67dad62af04 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/vocab/bert-base-cased-vocab.txt @@ -0,0 +1,28996 @@ +[PAD] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[unused99] +[UNK] +[CLS] +[SEP] +[MASK] +[unused100] +[unused101] +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +A +B +C +D +E +F +G +H +I +J +K +L +M +N +O +P +Q +R +S +T +U +V +W +X +Y +Z +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +¡ +¢ +£ +¥ +§ +¨ +© +ª +« +¬ +® +° +± +² +³ +´ +µ +¶ +· +¹ +º +» +¼ +½ +¾ +¿ +À +Á + +Ä +Å +Æ +Ç +È +É +Í +Î +Ñ +Ó +Ö +× +Ø +Ú +Ü +Þ +ß +à +á +â +ã +ä +å +æ +ç +è +é +ê +ë +ì +í +î +ï +ð +ñ +ò +ó +ô +õ +ö +÷ +ø +ù +ú +û +ü +ý +þ +ÿ +Ā +ā +ă +ą +Ć +ć +Č +č +ď +Đ +đ +ē +ė +ę +ě +ğ +ġ +Ħ +ħ +ĩ +Ī +ī +İ +ı +ļ +Ľ +ľ +Ł +ł +ń +ņ +ň +ŋ +Ō +ō +ŏ +ő +Œ +œ +ř +Ś +ś +Ş +ş +Š +š +Ţ +ţ +ť +ũ +ū +ŭ +ů +ű +ų +ŵ +ŷ +ź +Ż +ż +Ž +ž +Ə +ƒ +ơ +ư +ǎ +ǐ +ǒ +ǔ +ǫ +Ș +ș +Ț +ț +ɐ +ɑ +ɔ +ɕ +ə +ɛ +ɡ +ɣ +ɨ +ɪ +ɲ +ɾ +ʀ +ʁ +ʂ +ʃ +ʊ +ʋ +ʌ +ʐ +ʑ +ʒ +ʔ +ʰ +ʲ +ʳ +ʷ +ʻ +ʼ +ʾ +ʿ +ˈ +ː +ˡ +ˢ +ˣ +́ +̃ +̍ +̯ +͡ +Α +Β +Γ +Δ +Ε +Η +Θ +Ι +Κ +Λ +Μ +Ν +Ο +Π +Σ +Τ +Φ +Χ +Ψ +Ω +ά +έ +ή +ί +α +β +γ +δ +ε +ζ +η +θ +ι +κ +λ +μ +ν +ξ +ο +π +ρ +ς +σ +τ +υ +φ +χ +ψ +ω +ό +ύ +ώ +І +Ј +А +Б +В +Г +Д +Е +Ж +З +И +К +Л +М +Н +О +П +Р +С +Т +У +Ф +Х +Ц +Ч +Ш +Э +Ю +Я +а +б +в +г +д +е +ж +з +и +й +к +л +м +н +о +п +р +с +т +у +ф +х +ц +ч +ш +щ +ъ +ы +ь +э +ю +я +ё +і +ї +ј +њ +ћ +Ա +Հ +ա +ե +ի +կ +մ +յ +ն +ո +ս +տ +ր +ւ +ְ +ִ +ֵ +ֶ +ַ +ָ +ֹ +ּ +א +ב +ג +ד +ה +ו +ז +ח +ט +י +כ +ל +ם +מ +ן +נ +ס +ע +פ +צ +ק +ר +ש +ת +، +ء +آ +أ +إ +ئ +ا +ب +ة +ت +ث +ج +ح +خ +د +ذ +ر +ز +س +ش +ص +ض +ط +ظ +ع +غ +ف +ق +ك +ل +م +ن +ه +و +ى +ي +َ +ِ +ٹ +پ +چ +ک +گ +ہ +ی +ے +ं +आ +क +ग +च +ज +ण +त +द +ध +न +प +ब +भ +म +य +र +ल +व +श +ष +स +ह +ा +ि +ी +ु +े +ो +् +। +॥ +আ +ই +এ +ও +ক +খ +গ +চ +ছ +জ +ট +ত +থ +দ +ধ +ন +প +ব +ম +য +র +ল +শ +স +হ +় +া +ি +ী +ু +ে +ো +্ +য় +க +த +ப +ம +ய +ர +ல +வ +ா +ி +ு +் +ร +་ +ག +ང +ད +ན +བ +མ +ར +ལ +ས +ི +ུ +ེ +ོ +ა +ე +ი +ლ +ნ +ო +რ +ს +ᴬ +ᴵ +ᵀ +ᵃ +ᵇ +ᵈ +ᵉ +ᵍ +ᵏ +ᵐ +ᵒ +ᵖ +ᵗ +ᵘ +ᵢ +ᵣ +ᵤ +ᵥ +ᶜ +ᶠ +ḍ +Ḥ +ḥ +Ḩ +ḩ +ḳ +ṃ +ṅ +ṇ +ṛ +ṣ +ṭ +ạ +ả +ấ +ầ +ẩ +ậ +ắ +ế +ề +ể +ễ +ệ +ị +ọ +ố +ồ +ổ +ộ +ớ +ờ +ợ +ụ +ủ +ứ +ừ +ử +ữ +ự +ỳ +ỹ +ἀ +ἐ +ὁ +ὐ +ὰ +ὶ +ὸ +ῆ +ῖ +ῦ +ῶ +‐ +‑ +‒ +– +— +― +‖ +‘ +’ +‚ +“ +” +„ +† +‡ +• +… +‰ +′ +″ +⁄ +⁰ +ⁱ +⁴ +⁵ +⁶ +⁷ +⁸ +⁹ +⁺ +⁻ +ⁿ +₀ +₁ +₂ +₃ +₄ +₅ +₆ +₇ +₈ +₉ +₊ +₍ +₎ +ₐ +ₑ +ₒ +ₓ +ₕ +ₖ +ₘ +ₙ +ₚ +ₛ +ₜ +₤ +€ +₱ +₹ +ℓ +№ +ℝ +⅓ +← +↑ +→ +↔ +⇌ +⇒ +∂ +∈ +− +∗ +∘ +√ +∞ +∧ +∨ +∩ +∪ +≈ +≠ +≡ +≤ +≥ +⊂ +⊆ +⊕ +⋅ +─ +│ +■ +● +★ +☆ +☉ +♠ +♣ +♥ +♦ +♭ +♯ +⟨ +⟩ +ⱼ +、 +。 +《 +》 +「 +」 +『 +』 +〜 +い +う +え +お +か +き +く +け +こ +さ +し +す +せ +そ +た +ち +つ +て +と +な +に +の +は +ひ +ま +み +む +め +も +や +ゆ +よ +ら +り +る +れ +ん +ア +ィ +イ +ウ +エ +オ +カ +ガ +キ +ク +グ +コ +サ +シ +ジ +ス +ズ +タ +ダ +ッ +テ +デ +ト +ド +ナ +ニ +ハ +バ +パ +フ +ブ +プ +マ +ミ +ム +ャ +ュ +ラ +リ +ル +レ +ロ +ン +・ +ー +一 +三 +上 +下 +中 +事 +二 +井 +京 +人 +亻 +仁 +佐 +侍 +光 +公 +力 +北 +十 +南 +原 +口 +史 +司 +吉 +同 +和 +囗 +国 +國 +土 +城 +士 +大 +天 +太 +夫 +女 +子 +宀 +安 +宮 +宿 +小 +尚 +山 +島 +川 +州 +平 +年 +心 +愛 +戸 +文 +新 +方 +日 +明 +星 +書 +月 +木 +本 +李 +村 +東 +松 +林 +正 +武 +氏 +水 +氵 +江 +河 +海 +版 +犬 +王 +生 +田 +白 +皇 +省 +真 +石 +社 +神 +竹 +美 +義 +花 +藤 +西 +谷 +車 +辶 +道 +郎 +郡 +部 +野 +金 +長 +門 +陽 +青 +食 +馬 +高 +龍 +龸 +사 +씨 +의 +이 +한 +fi +fl +! +( +) +, +- +/ +: +the +of +and +to +in +was +The +is +for +as +on +with +that +##s +his +by +he +at +from +it +her +He +had +an +were +you +be +In +she +are +but +which +It +not +or +have +my +him +one +this +me +has +also +up +their +first +out +who +been +they +She +into +all +would +its +##ing +time +two +##a +##e +said +about +when +over +more +other +can +after +back +them +then +##ed +there +like +so +only +##n +could +##d +##i +##y +what +no +##o +where +This +made +than +if +You +##ly +through +we +before +##r +just +some +##er +years +do +New +##t +down +between +new +now +will +three +most +On +around +year +used +such +being +well +during +They +know +against +under +later +did +part +known +off +while +His +re +... +##l +people +until +way +American +didn +University +your +both +many +get +United +became +head +There +second +As +work +any +But +still +again +born +even +eyes +After +including +de +took +And +long +team +season +family +see +right +same +called +name +because +film +don +10 +found +much +school +##es +going +won +place +away +We +day +left +John +000 +hand +since +World +these +how +make +number +each +life +area +man +four +go +No +here +very +National +##m +played +released +never +began +States +album +home +last +too +held +several +May +own +##on +take +end +School +##h +ll +series +What +want +use +another +city +When +2010 +side +At +may +That +came +face +June +think +game +those +high +March +early +September +##al +2011 +looked +July +state +small +thought +went +January +October +##u +based +August +##us +world +good +April +York +us +12 +2012 +2008 +For +2009 +group +along +few +South +little +##k +following +November +something +2013 +December +set +2007 +old +2006 +2014 +located +##an +music +County +City +former +##in +room +ve +next +All +##man +got +father +house +##g +body +15 +20 +18 +started +If +2015 +town +our +line +War +large +population +named +British +company +member +five +My +single +##en +age +State +moved +February +11 +Her +should +century +government +built +come +best +show +However +within +look +men +door +without +need +wasn +2016 +water +One +system +knew +every +died +League +turned +asked +North +St +wanted +building +received +song +served +though +felt +##ia +station +band +##ers +local +public +himself +different +death +say +##1 +30 +##2 +2005 +16 +night +behind +children +English +members +near +saw +together +son +14 +voice +village +13 +hands +help +##3 +due +French +London +top +told +open +published +third +2017 +play +across +During +put +final +often +include +25 +##le +main +having +2004 +once +ever +let +book +led +gave +late +front +find +club +##4 +German +included +species +College +form +opened +mother +women +enough +West +must +2000 +power +really +17 +making +half +##6 +order +might +##is +given +million +times +days +point +full +service +With +km +major +##7 +original +become +seen +II +north +six +##te +love +##0 +national +International +##5 +24 +So +District +lost +run +couldn +career +always +##9 +2003 +##th +country +##z +House +air +tell +south +worked +woman +player +##A +almost +war +River +##ic +married +continued +Then +James +close +black +short +##8 +##na +using +history +returned +light +car +##ra +sure +William +things +General +##ry +2002 +better +support +100 +among +From +feet +King +anything +21 +19 +established +district +2001 +feel +great +##ton +level +Cup +These +written +games +others +already +title +story +##p +law +thing +US +record +role +however +By +students +England +white +control +least +inside +land +##C +22 +give +community +hard +##ie +non +##c +produced +George +round +period +Park +business +various +##ne +does +present +wife +far +taken +per +reached +David +able +version +working +young +live +created +joined +East +living +appeared +case +High +done +23 +important +President +Award +France +position +office +looking +total +general +class +To +production +##S +football +party +brother +keep +mind +free +Street +hair +announced +development +either +nothing +moment +Church +followed +wrote +why +India +San +election +1999 +lead +How +##ch +##rs +words +European +course +considered +America +arms +Army +political +##la +28 +26 +west +east +ground +further +church +less +site +First +Not +Australia +toward +California +##ness +described +works +An +Council +heart +past +military +27 +##or +heard +field +human +soon +founded +1998 +playing +trying +##x +##ist +##ta +television +mouth +although +taking +win +fire +Division +##ity +Party +Royal +program +Some +Don +Association +According +tried +TV +Paul +outside +daughter +Best +While +someone +match +recorded +Canada +closed +region +Air +above +months +elected +##da +##ian +road +##ar +brought +move +1997 +leave +##um +Thomas +1996 +am +low +Robert +formed +person +services +points +Mr +miles +##b +stop +rest +doing +needed +international +release +floor +start +sound +call +killed +real +dark +research +finished +language +Michael +professional +change +sent +50 +upon +29 +track +hit +event +2018 +term +example +Germany +similar +return +##ism +fact +pulled +stood +says +ran +information +yet +result +developed +girl +##re +God +1995 +areas +signed +decided +##ment +Company +seemed +##el +co +turn +race +common +video +Charles +Indian +##ation +blood +art +red +##able +added +rather +1994 +met +director +addition +design +average +minutes +##ies +##ted +available +bed +coming +friend +idea +kind +Union +Road +remained +##ting +everything +##ma +running +care +finally +Chinese +appointed +1992 +Australian +##ley +popular +mean +teams +probably +##land +usually +project +social +Championship +possible +word +Russian +instead +mi +herself +##T +Peter +Hall +Center +seat +style +money +1993 +else +Department +table +Music +current +31 +features +special +events +character +Two +square +sold +debut +##v +process +Although +Since +##ka +40 +Central +currently +education +placed +lot +China +quickly +forward +seven +##ling +Europe +arm +performed +Japanese +1991 +Henry +Now +Dr +##ion +week +Group +myself +big +UK +Washington +ten +deep +1990 +Club +Japan +space +La +directed +smile +episode +hours +whole +##de +##less +Why +wouldn +designed +strong +training +changed +Society +stage +involved +hadn +towards +leading +police +eight +kept +Institute +study +largest +child +eventually +private +modern +Court +throughout +getting +originally +attack +##E +talk +Great +longer +songs +alone +##ine +wide +dead +walked +shot +##ri +Oh +force +##st +Art +today +friends +Island +Richard +1989 +center +construction +believe +size +White +ship +completed +##B +gone +Just +rock +sat +##R +radio +below +entire +families +league +includes +type +lived +official +range +hold +featured +Most +##ter +president +passed +means +##f +forces +lips +Mary +Do +guitar +##ce +food +wall +Of +spent +Its +performance +hear +##P +Western +reported +sister +##et +morning +##M +especially +##ive +Minister +itself +post +bit +groups +1988 +##tion +Black +##ng +Well +raised +sometimes +Canadian +Paris +Spanish +replaced +schools +Academy +leaving +central +female +Christian +Jack +whose +college +onto +provided +##D +##ville +players +actually +stopped +##son +Museum +doesn +##ts +books +fight +allowed +##ur +beginning +Records +awarded +parents +coach +##os +Red +saying +##ck +Smith +Yes +Lake +##L +aircraft +1987 +##ble +previous +ft +action +Italian +African +happened +vocals +Act +future +court +##ge +1986 +degree +phone +##ro +Is +countries +winning +breath +Love +river +matter +Lord +Other +list +self +parts +##ate +provide +cut +shows +plan +1st +interest +##ized +Africa +stated +Sir +fell +owned +earlier +ended +competition +attention +1985 +lower +nearly +bad +older +stay +Saint +##se +certain +1984 +fingers +blue +try +fourth +Grand +##as +king +##nt +makes +chest +movement +states +moving +data +introduced +model +date +section +Los +deal +##I +skin +entered +middle +success +Texas +##w +summer +island +##N +Republic +length +husband +1980 +##ey +reason +anyone +forced +via +base +500 +job +covered +Festival +Roman +successful +rights +cover +Man +writing +Ireland +##F +related +goal +takes +buildings +true +weeks +1983 +Because +opening +novel +ISBN +meet +gold +##ous +mid +km² +standing +Football +Chicago +shook +whom +##ki +1982 +Day +feeling +scored +boy +higher +Force +leader +heavy +fall +question +sense +army +Second +energy +meeting +themselves +kill +##am +board +census +##ya +##ns +mine +meant +market +required +battle +campaign +attended +approximately +Kingdom +runs +active +##ha +contract +clear +previously +health +1979 +Arts +complete +Catholic +couple +units +##ll +##ty +Committee +shoulder +sea +systems +listed +##O +caught +tournament +##G +northern +author +Film +Your +##men +holding +offered +personal +1981 +southern +artist +traditional +studio +200 +capital +##ful +regular +ask +giving +organization +month +news +Are +read +managed +helped +studied +student +defeated +natural +industry +Year +noted +decision +Government +quite +##id +smiled +1972 +Maybe +tracks +##ke +Mark +al +media +engine +hour +Their +relationship +plays +property +structure +1976 +ago +Hill +Martin +1978 +ready +Many +Like +Bay +immediately +generally +Italy +Greek +practice +caused +division +significant +Joseph +speed +Let +thinking +completely +1974 +primary +mostly +##field +##K +1975 +##to +Even +writer +##led +dropped +magazine +collection +understand +route +highest +particular +films +lines +network +Science +loss +carried +direction +green +1977 +location +producer +according +Women +Queen +neck +thus +independent +view +1970 +Angeles +Soviet +distance +problem +Board +tour +western +income +appearance +access +Mexico +nodded +street +surface +arrived +believed +Old +1968 +1973 +becoming +whether +1945 +figure +singer +stand +Following +issue +window +wrong +pain +everyone +lives +issues +park +slowly +la +act +##va +bring +Lee +operations +key +comes +fine +cold +famous +Navy +1971 +Me +additional +individual +##ner +Zealand +goals +county +contains +Service +minute +2nd +reach +talking +particularly +##ham +movie +Director +glass +paper +studies +##co +railway +standard +Education +45 +represented +Chief +Louis +launched +Star +terms +60 +1969 +experience +watched +Another +Press +Tom +staff +starting +subject +break +Virginia +nine +eye +##age +evidence +foot +##est +companies +Prince +##V +gun +create +Big +People +guy +Green +simply +numerous +##line +increased +twenty +##ga +##do +1967 +award +officer +stone +Before +material +Northern +grew +male +plant +Life +legs +step +Al +unit +35 +except +answer +##U +report +response +Edward +commercial +edition +trade +science +##ca +Irish +Law +shown +rate +failed +##ni +remains +changes +mm +limited +larger +Later +cause +waiting +Time +##wood +cost +Bill +manager +activities +likely +allow +operated +retired +##ping +65 +directly +Who +associated +effect +hell +Florida +straight +hot +Valley +management +girls +expected +eastern +Mike +chance +cast +centre +chair +hurt +problems +##li +walk +programs +Team +characters +Battle +edge +pay +maybe +corner +majority +medical +Joe +Summer +##io +attempt +Pacific +command +Radio +##by +names +municipality +1964 +train +economic +Brown +feature +sex +source +agreed +remember +Three +1966 +1965 +Pennsylvania +victory +senior +annual +III +Southern +results +Sam +serving +religious +Jones +appears +##der +despite +claimed +Both +musical +matches +fast +security +selected +Young +double +complex +hospital +chief +Times +##ve +Championships +filled +Public +Despite +beautiful +Research +plans +Province +##ally +Wales +##ko +artists +metal +nearby +Spain +##il +32 +houses +supported +piece +##no +stared +recording +nature +legal +Russia +##ization +remaining +looks +##sh +bridge +closer +cases +scene +marriage +Little +##é +uses +Earth +specific +Frank +theory +Good +discovered +referred +bass +culture +university +presented +Congress +##go +metres +continue +1960 +isn +Awards +meaning +cell +composed +separate +Series +forms +Blue +cross +##tor +increase +test +computer +slightly +Where +Jewish +Town +tree +status +1944 +variety +responsible +pretty +initially +##way +realized +pass +provides +Captain +Alexander +recent +score +broke +Scott +drive +financial +showed +Line +stories +ordered +soldiers +genus +operation +gaze +sitting +society +Only +hope +actor +follow +Empire +Yeah +technology +happy +focus +policy +spread +situation +##ford +##ba +Mrs +watch +Can +1963 +Commission +touch +earned +troops +Under +1962 +individuals +cannot +19th +##lin +mile +expression +exactly +suddenly +weight +dance +stepped +places +appear +difficult +Railway +anti +numbers +kilometres +star +##ier +department +ice +Britain +removed +Once +##lo +Boston +value +##ant +mission +trees +Order +sports +join +serve +Major +poor +Poland +mainly +Theatre +pushed +Station +##it +Lady +federal +silver +##ler +foreign +##ard +Eastern +##den +box +hall +subsequently +lies +acquired +1942 +ancient +CD +History +Jean +beyond +##ger +El +##les +growing +championship +native +Parliament +Williams +watching +direct +overall +offer +Also +80 +Secretary +spoke +Latin +ability +##ated +safe +presence +##ial +headed +regional +planned +1961 +Johnson +throat +consists +##W +extended +Or +bar +walls +Chris +stations +politician +Olympics +influence +share +fighting +speak +hundred +Carolina +die +stars +##tic +color +Chapter +##ish +fear +sleep +goes +Francisco +oil +Bank +sign +physical +##berg +Dutch +seasons +##rd +Games +Governor +sorry +lack +Centre +memory +baby +smaller +charge +Did +multiple +ships +shirt +Assembly +amount +leaves +3rd +Foundation +conditions +1943 +Rock +Democratic +Daniel +##at +winner +products +##ina +store +latter +Professor +civil +prior +host +1956 +soft +vote +needs +Each +rules +1958 +pressure +letter +normal +proposed +levels +records +1959 +paid +intended +Victoria +purpose +okay +historical +issued +1980s +broadcast +rule +simple +picked +firm +Sea +1941 +Elizabeth +1940 +serious +featuring +highly +graduated +mentioned +choice +1948 +replied +percent +Scotland +##hi +females +constructed +1957 +settled +Steve +recognized +cities +crew +glanced +kiss +competed +flight +knowledge +editor +More +Conference +##H +fifth +elements +##ee +##tes +function +newspaper +recently +Miss +cultural +brown +twice +Office +1939 +truth +Creek +1946 +households +USA +1950 +quality +##tt +border +seconds +destroyed +pre +wait +ahead +build +image +90 +cars +##mi +33 +promoted +professor +et +bank +medal +text +broken +Middle +revealed +sides +wing +seems +channel +1970s +Ben +loved +effort +officers +Will +##ff +70 +Israel +Jim +upper +fully +label +Jr +assistant +powerful +pair +positive +##ary +gives +1955 +20th +races +remain +kitchen +primarily +##ti +Sydney +easy +Tour +whispered +buried +300 +News +Polish +1952 +Duke +Columbia +produce +accepted +00 +approach +minor +1947 +Special +44 +Asian +basis +visit +Fort +Civil +finish +formerly +beside +leaned +##ite +median +rose +coast +effects +supposed +Cross +##hip +Corps +residents +Jackson +##ir +Bob +basketball +36 +Asia +seem +Bishop +Book +##ber +ring +##ze +owner +BBC +##ja +transferred +acting +De +appearances +walking +Le +press +grabbed +1954 +officially +1953 +##pe +risk +taught +review +##X +lay +##well +council +Avenue +seeing +losing +Ohio +Super +province +ones +travel +##sa +projects +equipment +spot +Berlin +administrative +heat +potential +shut +capacity +elections +growth +fought +Republican +mixed +Andrew +teacher +turning +strength +shoulders +beat +wind +1949 +Health +follows +camp +suggested +perhaps +Alex +mountain +contact +divided +candidate +fellow +34 +Show +necessary +workers +ball +horse +ways +questions +protect +gas +activity +younger +bottom +founder +Scottish +screen +treatment +easily +com +##house +dedicated +Master +warm +Night +Georgia +Long +von +##me +perfect +website +1960s +piano +efforts +##ide +Tony +sort +offers +Development +Simon +executive +##nd +save +Over +Senate +1951 +1990s +draw +master +Police +##ius +renamed +boys +initial +prominent +damage +Co +##ov +##za +online +begin +occurred +captured +youth +Top +account +tells +Justice +conducted +forest +##town +bought +teeth +Jersey +##di +purchased +agreement +Michigan +##ure +campus +prison +becomes +product +secret +guess +Route +huge +types +drums +64 +split +defeat +estate +housing +##ot +brothers +Coast +declared +happen +titled +therefore +sun +commonly +alongside +Stadium +library +Home +article +steps +telling +slow +assigned +refused +laughed +wants +Nick +wearing +Rome +Open +##ah +Hospital +pointed +Taylor +lifted +escape +participated +##j +drama +parish +Santa +##per +organized +mass +pick +Airport +gets +Library +unable +pull +Live +##ging +surrounding +##ries +focused +Adam +facilities +##ning +##ny +38 +##ring +notable +era +connected +gained +operating +laid +Regiment +branch +defined +Christmas +machine +Four +academic +Iran +adopted +concept +Men +compared +search +traffic +Max +Maria +greater +##ding +widely +##burg +serves +1938 +37 +Go +hotel +shared +typically +scale +1936 +leg +suffered +yards +pieces +Ministry +Wilson +episodes +empty +1918 +safety +continues +yellow +historic +settlement +400 +Come +Corporation +enemy +content +picture +evening +territory +method +trial +solo +driver +Here +##ls +entrance +Prize +spring +whatever +##ent +75 +##ji +reading +Arthur +##cy +Our +clothes +Prime +Illinois +Kong +code +##ria +sit +Harry +Federal +chosen +administration +bodies +begins +stomach +Though +seats +Hong +density +Sun +leaders +Field +museum +chart +platform +languages +##ron +birth +holds +Gold +##un +fish +combined +##ps +4th +1937 +largely +captain +trust +Game +van +boat +Oxford +basic +beneath +Islands +painting +nice +Toronto +path +males +sources +block +conference +parties +murder +clubs +crowd +calling +About +Business +peace +knows +lake +speaking +stayed +Brazil +allowing +Born +unique +thick +Technology +##que +receive +des +semi +alive +noticed +format +##ped +coffee +digital +##ned +handed +guard +tall +faced +setting +plants +partner +claim +reduced +temple +animals +determined +classes +##out +estimated +##ad +Olympic +providing +Massachusetts +learned +Inc +Philadelphia +Social +carry +42 +possibly +hosted +tonight +respectively +Today +shape +Mount +roles +designated +brain +etc +Korea +thoughts +Brian +Highway +doors +background +drew +models +footballer +tone +turns +1935 +quiet +tower +wood +bus +write +software +weapons +flat +marked +1920 +newly +tight +Eric +finger +Journal +FC +Van +rise +critical +Atlantic +granted +returning +communities +humans +quick +39 +48 +ranked +sight +pop +Swedish +Stephen +card +analysis +attacked +##wa +Sunday +identified +Jason +champion +situated +1930 +expanded +tears +##nce +reaching +Davis +protection +Emperor +positions +nominated +Bridge +tax +dress +allows +avoid +leadership +killing +actress +guest +steel +knowing +electric +cells +disease +grade +unknown +##ium +resulted +Pakistan +confirmed +##ged +tongue +covers +##Y +roof +entirely +applied +votes +drink +interview +exchange +Township +reasons +##ised +page +calls +dog +agent +nose +teaching +##ds +##ists +advanced +wish +Golden +existing +vehicle +del +1919 +develop +attacks +pressed +Sports +planning +resulting +facility +Sarah +notes +1933 +Class +Historic +winter +##mo +audience +Community +household +Netherlands +creation +##ize +keeping +1914 +claims +dry +guys +opposite +##ak +explained +Ontario +secondary +difference +Francis +actions +organizations +yard +animal +Up +Lewis +titles +Several +1934 +Ryan +55 +Supreme +rolled +1917 +distribution +figures +afraid +rural +yourself +##rt +sets +barely +Instead +passing +awards +41 +silence +authority +occupied +environment +windows +engineering +surprised +flying +crime +reports +Mountain +powers +driving +succeeded +reviews +1929 +Head +missing +Song +Jesus +opportunity +inspired +ends +albums +conversation +impact +injury +surprise +billion +learning +heavily +oldest +union +creating +##ky +festival +literature +letters +sexual +##tte +apartment +Final +comedy +nation +orders +##sen +contemporary +Power +drawn +existence +connection +##ating +Post +Junior +remembered +message +Medal +castle +note +engineer +sounds +Beach +crossed +##dy +ear +scientific +sales +##ai +theme +starts +clearly +##ut +trouble +##gan +bag +##han +BC +sons +1928 +silent +versions +daily +Studies +ending +Rose +guns +1932 +headquarters +reference +obtained +Squadron +concert +none +du +Among +##don +prevent +Member +answered +staring +Between +##lla +portion +drug +liked +association +performances +Nations +formation +Castle +lose +learn +scoring +relatively +quarter +47 +Premier +##ors +Sweden +baseball +attempted +trip +worth +perform +airport +fields +enter +honor +Medical +rear +commander +officials +condition +supply +materials +52 +Anna +volume +threw +Persian +43 +interested +Gallery +achieved +visited +laws +relief +Area +Matt +singles +Lieutenant +Country +fans +Cambridge +sky +Miller +effective +tradition +Port +##ana +minister +extra +entitled +System +sites +authorities +acres +committee +racing +1931 +desk +trains +ass +weren +Family +farm +##ance +industrial +##head +iron +49 +abandoned +Out +Holy +chairman +waited +frequently +display +Light +transport +starring +Patrick +Engineering +eat +FM +judge +reaction +centuries +price +##tive +Korean +defense +Get +arrested +1927 +send +urban +##ss +pilot +Okay +Media +reality +arts +soul +thirty +##be +catch +generation +##nes +apart +Anne +drop +See +##ving +sixth +trained +Management +magic +cm +height +Fox +Ian +resources +vampire +principal +Was +haven +##au +Walter +Albert +rich +1922 +causing +entry +##ell +shortly +46 +worry +doctor +composer +rank +Network +bright +showing +regions +1924 +wave +carrying +kissed +finding +missed +Earl +lying +target +vehicles +Military +controlled +dinner +##board +briefly +lyrics +motion +duty +strange +attempts +invited +kg +villages +5th +Land +##mer +Christ +prepared +twelve +check +thousand +earth +copies +en +transfer +citizens +Americans +politics +nor +theatre +Project +##bo +clean +rooms +laugh +##ran +application +contained +anyway +containing +Sciences +1925 +rare +speech +exist +1950s +falling +passenger +##im +stands +51 +##ol +##ow +phase +governor +kids +details +methods +Vice +employed +performing +counter +Jane +heads +Channel +wine +opposition +aged +1912 +Every +1926 +highway +##ura +1921 +aired +978 +permanent +Forest +finds +joint +approved +##pur +brief +doubt +acts +brand +wild +closely +Ford +Kevin +chose +shall +port +sweet +fun +asking +Be +##bury +sought +Dave +Mexican +mom +Right +Howard +Moscow +Charlie +Stone +##mann +admitted +##ver +wooden +1923 +Officer +relations +Hot +combat +publication +chain +shop +inhabitants +proved +ideas +address +1915 +Memorial +explain +increasing +conflict +Anthony +Melbourne +narrow +temperature +slid +1916 +worse +selling +documentary +Ali +Ray +opposed +vision +dad +extensive +Infantry +commissioned +Doctor +offices +programming +core +respect +storm +##pa +##ay +##om +promotion +der +struck +anymore +shit +Region +receiving +DVD +alternative +##ue +ride +maximum +1910 +##ious +Third +Affairs +cancer +Executive +##op +dream +18th +Due +##ker +##worth +economy +IV +Billboard +identity +subsequent +statement +skills +##back +funding +##ons +Round +Foreign +truck +Please +lights +wondered +##ms +frame +yes +Still +districts +fiction +Colonel +converted +150 +grown +accident +critics +fit +Information +architecture +Point +Five +armed +Billy +poet +functions +consisted +suit +Turkish +Band +object +desire +##ities +sounded +flow +Norwegian +articles +Marie +pulling +thin +singing +Hunter +Human +Battalion +Federation +Kim +origin +represent +dangerous +weather +fuel +ex +##sing +Last +bedroom +aid +knees +Alan +angry +assumed +plane +Something +founding +concerned +global +Fire +di +please +Portuguese +touched +Roger +nuclear +Register +Jeff +fixed +royal +lie +finals +NFL +Manchester +towns +handle +shaped +Chairman +Dean +launch +understanding +Children +violence +failure +sector +Brigade +wrapped +fired +sharp +tiny +developing +expansion +Free +institutions +technical +Nothing +otherwise +Main +inch +Saturday +wore +Senior +attached +cheek +representing +Kansas +##chi +##kin +actual +advantage +Dan +Austria +##dale +hoped +multi +squad +Norway +streets +1913 +Services +hired +grow +pp +wear +painted +Minnesota +stuff +Building +54 +Philippines +1900 +##ties +educational +Khan +Magazine +##port +Cape +signal +Gordon +sword +Anderson +cool +engaged +Commander +images +Upon +tied +Security +cup +rail +Vietnam +successfully +##red +Muslim +gain +bringing +Native +hers +occurs +negative +Philip +Kelly +Colorado +category +##lan +600 +Have +supporting +wet +56 +stairs +Grace +observed +##ung +funds +restaurant +1911 +Jews +##ments +##che +Jake +Back +53 +asks +journalist +accept +bands +bronze +helping +##ice +decades +mayor +survived +usual +influenced +Douglas +Hey +##izing +surrounded +retirement +Temple +derived +Pope +registered +producing +##ral +structures +Johnny +contributed +finishing +buy +specifically +##king +patients +Jordan +internal +regarding +Samuel +Clark +##q +afternoon +Finally +scenes +notice +refers +quietly +threat +Water +Those +Hamilton +promise +freedom +Turkey +breaking +maintained +device +lap +ultimately +Champion +Tim +Bureau +expressed +investigation +extremely +capable +qualified +recognition +items +##up +Indiana +adult +rain +greatest +architect +Morgan +dressed +equal +Antonio +collected +drove +occur +Grant +graduate +anger +Sri +worried +standards +##ore +injured +somewhere +damn +Singapore +Jimmy +pocket +homes +stock +religion +aware +regarded +Wisconsin +##tra +passes +fresh +##ea +argued +Ltd +EP +Diego +importance +Census +incident +Egypt +Missouri +domestic +leads +ceremony +Early +camera +Father +challenge +Switzerland +lands +familiar +hearing +spend +educated +Tennessee +Thank +##ram +Thus +concern +putting +inches +map +classical +Allen +crazy +valley +Space +softly +##my +pool +worldwide +climate +experienced +neighborhood +scheduled +neither +fleet +1908 +Girl +##J +Part +engines +locations +darkness +Revolution +establishment +lawyer +objects +apparently +Queensland +Entertainment +bill +mark +Television +##ong +pale +demand +Hotel +selection +##rn +##ino +Labour +Liberal +burned +Mom +merged +Arizona +request +##lia +##light +hole +employees +##ical +incorporated +95 +independence +Walker +covering +joining +##ica +task +papers +backing +sell +biggest +6th +strike +establish +##ō +gently +59 +Orchestra +Winter +protein +Juan +locked +dates +Boy +aren +shooting +Luke +solid +charged +Prior +resigned +interior +garden +spoken +improve +wonder +promote +hidden +##med +combination +Hollywood +Swiss +consider +##ks +Lincoln +literary +drawing +Marine +weapon +Victor +Trust +Maryland +properties +##ara +exhibition +understood +hung +Tell +installed +loud +fashion +affected +junior +landing +flowers +##he +Internet +beach +Heart +tries +Mayor +programme +800 +wins +noise +##ster +##ory +58 +contain +fair +delivered +##ul +wedding +Square +advance +behavior +Program +Oregon +##rk +residence +realize +certainly +hill +Houston +57 +indicated +##water +wounded +Village +massive +Moore +thousands +personnel +dating +opera +poetry +##her +causes +feelings +Frederick +applications +push +approached +foundation +pleasure +sale +fly +gotten +northeast +costs +raise +paintings +##ney +views +horses +formal +Arab +hockey +typical +representative +rising +##des +clock +stadium +shifted +Dad +peak +Fame +vice +disappeared +users +Way +Naval +prize +hoping +values +evil +Bell +consisting +##ón +Regional +##ics +improved +circle +carefully +broad +##ini +Fine +maintain +operate +offering +mention +Death +stupid +Through +Princess +attend +interests +ruled +somewhat +wings +roads +grounds +##ual +Greece +Champions +facing +hide +voted +require +Dark +Matthew +credit +sighed +separated +manner +##ile +Boys +1905 +committed +impossible +lip +candidates +7th +Bruce +arranged +Islamic +courses +criminal +##ened +smell +##bed +08 +consecutive +##ening +proper +purchase +weak +Prix +1906 +aside +introduction +Look +##ku +changing +budget +resistance +factory +Forces +agency +##tone +northwest +user +1907 +stating +##one +sport +Design +environmental +cards +concluded +Carl +250 +accused +##ology +Girls +sick +intelligence +Margaret +responsibility +Guard +##tus +17th +sq +goods +1909 +hate +##ek +capture +stores +Gray +comic +Modern +Silver +Andy +electronic +wheel +##ied +Deputy +##bs +Czech +zone +choose +constant +reserve +##lle +Tokyo +spirit +sub +degrees +flew +pattern +compete +Dance +##ik +secretary +Imperial +99 +reduce +Hungarian +confused +##rin +Pierre +describes +regularly +Rachel +85 +landed +passengers +##ise +##sis +historian +meters +Youth +##ud +participate +##cing +arrival +tired +Mother +##gy +jumped +Kentucky +faces +feed +Israeli +Ocean +##Q +##án +plus +snow +techniques +plate +sections +falls +jazz +##ris +tank +loan +repeated +opinion +##res +unless +rugby +journal +Lawrence +moments +shock +distributed +##ded +adjacent +Argentina +crossing +uncle +##ric +Detroit +communication +mental +tomorrow +session +Emma +Without +##gen +Miami +charges +Administration +hits +coat +protected +Cole +invasion +priest +09 +Gary +enjoyed +plot +measure +bound +friendly +throw +musician +##lon +##ins +Age +knife +damaged +birds +driven +lit +ears +breathing +Arabic +Jan +faster +Jonathan +##gate +Independent +starred +Harris +teachers +Alice +sequence +mph +file +translated +decide +determine +Review +documents +sudden +threatened +##ft +bear +distinct +decade +burning +##sky +1930s +replace +begun +extension +##time +1904 +equivalent +accompanied +Christopher +Danish +##ye +Besides +##more +persons +fallen +Rural +roughly +saved +willing +ensure +Belgium +05 +musicians +##ang +giant +Six +Retrieved +worst +purposes +##bly +mountains +seventh +slipped +brick +07 +##py +somehow +Carter +Iraq +cousin +favor +islands +journey +FIFA +contrast +planet +vs +calm +##ings +concrete +branches +gray +profit +Russell +##ae +##ux +##ens +philosophy +businesses +talked +parking +##ming +owners +Place +##tle +agricultural +Kate +06 +southeast +draft +Eddie +earliest +forget +Dallas +Commonwealth +edited +66 +inner +ed +operates +16th +Harvard +assistance +##si +designs +Take +bathroom +indicate +CEO +Command +Louisiana +1902 +Dublin +Books +1901 +tropical +1903 +##tors +Places +tie +progress +forming +solution +62 +letting +##ery +studying +##jo +duties +Baseball +taste +Reserve +##ru +Ann +##gh +visible +##vi +notably +link +NCAA +southwest +Never +storage +mobile +writers +favorite +Pro +pages +truly +count +##tta +string +kid +98 +Ross +row +##idae +Kennedy +##tan +Hockey +hip +waist +grandfather +listen +##ho +feels +busy +72 +stream +obvious +cycle +shaking +Knight +##ren +Carlos +painter +trail +web +linked +04 +Palace +existed +##ira +responded +closing +End +examples +Marshall +weekend +jaw +Denmark +lady +township +medium +chin +Story +option +fifteen +Moon +represents +makeup +investment +jump +childhood +Oklahoma +roll +normally +Ten +Operation +Graham +Seattle +Atlanta +paused +promised +rejected +treated +returns +flag +##ita +Hungary +danger +glad +movements +visual +subjects +credited +soldier +Norman +ill +translation +José +Quebec +medicine +warning +theater +praised +municipal +01 +commune +churches +acid +folk +8th +testing +add +survive +Sound +devices +residential +severe +presidential +Mississippi +Austin +Perhaps +Charlotte +hanging +Montreal +grin +##ten +racial +partnership +shoot +shift +##nie +Les +downtown +Brothers +Garden +matters +restored +mirror +forever +winners +rapidly +poverty +##ible +Until +DC +faith +hundreds +Real +Ukraine +Nelson +balance +Adams +contest +relative +ethnic +Edinburgh +composition +##nts +emergency +##van +marine +reputation +Down +pack +12th +Communist +Mountains +pro +stages +measures +##ld +ABC +Li +victims +benefit +Iowa +Broadway +gathered +rating +Defense +classic +##ily +ceiling +##ions +snapped +Everything +constituency +Franklin +Thompson +Stewart +entering +Judge +forth +##sk +wanting +smiling +moves +tunnel +premiered +grass +unusual +Ukrainian +bird +Friday +tail +Portugal +coal +element +Fred +guards +Senator +collaboration +beauty +Wood +chemical +beer +justice +signs +##Z +sees +##zi +Puerto +##zed +96 +smooth +Bowl +gift +limit +97 +heading +Source +wake +requires +Ed +Constitution +factor +Lane +factors +adding +Note +cleared +pictures +pink +##ola +Kent +Local +Singh +moth +Ty +##ture +courts +Seven +temporary +involving +Vienna +emerged +fishing +agree +defensive +stuck +secure +Tamil +##ick +bottle +03 +Player +instruments +Spring +patient +flesh +contributions +cry +Malaysia +120 +Global +da +Alabama +Within +##work +debuted +expect +Cleveland +concerns +retained +horror +10th +spending +Peace +Transport +grand +Crown +instance +institution +acted +Hills +mounted +Campbell +shouldn +1898 +##ably +chamber +soil +88 +Ethan +sand +cheeks +##gi +marry +61 +weekly +classification +DNA +Elementary +Roy +definitely +Soon +Rights +gate +suggests +aspects +imagine +golden +beating +Studios +Warren +differences +significantly +glance +occasionally +##od +clothing +Assistant +depth +sending +possibility +mode +prisoners +requirements +daughters +dated +Representatives +prove +guilty +interesting +smoke +cricket +93 +##ates +rescue +Connecticut +underground +Opera +13th +reign +##ski +thanks +leather +equipped +routes +fan +##ans +script +Wright +bishop +Welsh +jobs +faculty +eleven +Railroad +appearing +anniversary +Upper +##down +anywhere +Rugby +Metropolitan +Meanwhile +Nicholas +champions +forehead +mining +drinking +76 +Jerry +membership +Brazilian +Wild +Rio +scheme +Unlike +strongly +##bility +fill +##rian +easier +MP +Hell +##sha +Stanley +banks +Baron +##ique +Robinson +67 +Gabriel +Austrian +Wayne +exposed +##wan +Alfred +1899 +manage +mix +visitors +eating +##rate +Sean +commission +Cemetery +policies +Camp +parallel +traveled +guitarist +02 +supplies +couples +poem +blocks +Rick +Training +Energy +achieve +appointment +Wing +Jamie +63 +novels +##em +1890 +songwriter +Base +Jay +##gar +naval +scared +miss +labor +technique +crisis +Additionally +backed +destroy +seriously +tools +tennis +91 +god +##ington +continuing +steam +obviously +Bobby +adapted +fifty +enjoy +Jacob +publishing +column +##ular +Baltimore +Donald +Liverpool +92 +drugs +movies +##ock +Heritage +##je +##istic +vocal +strategy +gene +advice +##bi +Ottoman +riding +##side +Agency +Indonesia +11th +laughing +sleeping +und +muttered +listening +deck +tip +77 +ownership +grey +Claire +deeply +provincial +popularity +Cooper +##á +Emily +##sed +designer +Murray +describe +Danny +Around +Parker +##dae +68 +rates +suffering +considerable +78 +nervous +powered +tons +circumstances +wished +belonged +Pittsburgh +flows +9th +##use +belt +81 +useful +15th +context +List +Dead +Iron +seek +Season +worn +frequency +legislation +replacement +memories +Tournament +Again +Barry +organisation +copy +Gulf +waters +meets +struggle +Oliver +1895 +Susan +protest +kick +Alliance +components +1896 +Tower +Windows +demanded +regiment +sentence +Woman +Logan +Referee +hosts +debate +knee +Blood +##oo +universities +practices +Ward +ranking +correct +happening +Vincent +attracted +classified +##stic +processes +immediate +waste +increasingly +Helen +##po +Lucas +Phil +organ +1897 +tea +suicide +actors +lb +crash +approval +waves +##ered +hated +grip +700 +amongst +69 +74 +hunting +dying +lasted +illegal +##rum +stare +defeating +##gs +shrugged +°C +Jon +Count +Orleans +94 +affairs +formally +##and +##ves +criticized +Disney +Vol +successor +tests +scholars +palace +Would +celebrated +rounds +grant +Schools +Such +commanded +demon +Romania +##all +Karl +71 +##yn +84 +Daily +totally +Medicine +fruit +Die +upset +Lower +Conservative +14th +Mitchell +escaped +shoes +Morris +##tz +queen +harder +prime +Thanks +indeed +Sky +authors +rocks +definition +Nazi +accounts +printed +experiences +##ters +divisions +Cathedral +denied +depending +Express +##let +73 +appeal +loose +colors +filed +##isation +gender +##ew +throne +forests +Finland +domain +boats +Baker +squadron +shore +remove +##ification +careful +wound +railroad +82 +seeking +agents +##ved +Blues +##off +customers +ignored +net +##ction +hiding +Originally +declined +##ess +franchise +eliminated +NBA +merely +pure +appropriate +visiting +forty +markets +offensive +coverage +cave +##nia +spell +##lar +Benjamin +##ire +Convention +filmed +Trade +##sy +##ct +Having +palm +1889 +Evans +intense +plastic +Julia +document +jeans +vessel +SR +##fully +proposal +Birmingham +le +##ative +assembly +89 +fund +lock +1893 +AD +meetings +occupation +modified +Years +odd +aimed +reform +Mission +Works +shake +cat +exception +convinced +executed +pushing +dollars +replacing +soccer +manufacturing +##ros +expensive +kicked +minimum +Josh +coastal +Chase +ha +Thailand +publications +deputy +Sometimes +Angel +effectively +##illa +criticism +conduct +Serbian +landscape +NY +absence +passage +##ula +Blake +Indians +1892 +admit +Trophy +##ball +Next +##rated +##ians +charts +kW +orchestra +79 +heritage +1894 +rough +exists +boundary +Bible +Legislative +moon +medieval +##over +cutting +print +##ett +birthday +##hood +destruction +Julian +injuries +influential +sisters +raising +statue +colour +dancing +characteristics +orange +##ok +##aries +Ken +colonial +twin +Larry +surviving +##shi +Barbara +personality +entertainment +assault +##ering +talent +happens +license +86 +couch +Century +soundtrack +shower +swimming +cash +Staff +bent +1885 +bay +lunch +##lus +dozen +vessels +CBS +greatly +critic +Test +symbol +panel +shell +output +reaches +87 +Front +motor +ocean +##era +##ala +maintenance +violent +scent +Limited +Las +Hope +Theater +Which +survey +Robin +recordings +compilation +##ward +bomb +insurance +Authority +sponsored +satellite +Jazz +refer +stronger +blow +whilst +Wrestling +suggest +##rie +climbed +##els +voices +shopping +1891 +Neil +discovery +##vo +##ations +burst +Baby +peaked +Brooklyn +knocked +lift +##try +false +nations +Hugh +Catherine +preserved +distinguished +terminal +resolution +ratio +pants +cited +competitions +completion +DJ +bone +uniform +schedule +shouted +83 +1920s +rarely +Basketball +Taiwan +artistic +bare +vampires +arrest +Utah +Marcus +assist +gradually +qualifying +Victorian +vast +rival +Warner +Terry +Economic +##cia +losses +boss +versus +audio +runner +apply +surgery +Play +twisted +comfortable +##cs +Everyone +guests +##lt +Harrison +UEFA +lowered +occasions +##lly +##cher +chapter +youngest +eighth +Culture +##room +##stone +1888 +Songs +Seth +Digital +involvement +expedition +relationships +signing +1000 +fault +annually +circuit +afterwards +meat +creature +##ou +cable +Bush +##net +Hispanic +rapid +gonna +figured +extent +considering +cried +##tin +sigh +dynasty +##ration +cabinet +Richmond +stable +##zo +1864 +Admiral +Unit +occasion +shares +badly +longest +##ify +Connor +extreme +wondering +girlfriend +Studio +##tions +1865 +tribe +exact +muscles +hat +Luis +Orthodox +decisions +amateur +description +##lis +hips +kingdom +##ute +Portland +whereas +Bachelor +outer +discussion +partly +Arkansas +1880 +dreams +perfectly +Lloyd +##bridge +asleep +##tti +Greg +permission +trading +pitch +mill +Stage +liquid +Keith +##tal +wolf +processing +stick +Jerusalem +profile +rushed +spiritual +argument +Ice +Guy +till +Delhi +roots +Section +missions +Glasgow +penalty +NBC +encouraged +identify +keyboards +##zing +##ston +disc +plain +informed +Bernard +thinks +fled +Justin +##day +newspapers +##wick +Ralph +##zer +unlike +Stars +artillery +##ified +recovered +arrangement +searching +##pers +##tory +##rus +deaths +Egyptian +diameter +##í +marketing +corporate +teach +marks +Turner +staying +hallway +Sebastian +chapel +naked +mistake +possession +1887 +dominated +jacket +creative +Fellow +Falls +Defence +suspended +employment +##rry +Hebrew +Hudson +Week +Wars +recognize +Natural +controversial +Tommy +thank +Athletic +benefits +decline +intention +##ets +Lost +Wall +participation +elevation +supports +parliament +1861 +concentration +Movement +##IS +competing +stops +behalf +##mm +limits +funded +discuss +Collins +departure +obtain +woods +latest +universe +alcohol +Laura +rush +blade +funny +Dennis +forgotten +Amy +Symphony +apparent +graduating +1862 +Rob +Grey +collections +Mason +emotions +##ugh +literally +Any +counties +1863 +nomination +fighter +habitat +respond +external +Capital +exit +Video +carbon +sharing +Bad +opportunities +Perry +photo +##mus +Orange +posted +remainder +transportation +portrayed +Labor +recommended +percussion +rated +Grade +rivers +partially +suspected +strip +adults +button +struggled +intersection +Canal +##ability +poems +claiming +Madrid +1886 +Together +##our +Much +Vancouver +instrument +instrumental +1870 +mad +angle +Control +Phoenix +Leo +Communications +mail +##ette +##ev +preferred +adaptation +alleged +discussed +deeper +##ane +Yet +Monday +volumes +thrown +Zane +##logy +displayed +rolling +dogs +Along +Todd +##ivity +withdrew +representation +belief +##sia +crown +Late +Short +hardly +grinned +romantic +Pete +##ken +networks +enemies +Colin +Eventually +Side +donated +##su +steady +grab +guide +Finnish +Milan +pregnant +controversy +reminded +1884 +Stuart +##bach +##ade +Race +Belgian +LP +Production +Zone +lieutenant +infantry +Child +confusion +sang +resident +##ez +victim +1881 +channels +Ron +businessman +##gle +Dick +colony +pace +producers +##ese +agencies +Craig +Lucy +Very +centers +Yorkshire +photography +##ched +Album +championships +Metro +substantial +Standard +terrible +directors +contribution +advertising +emotional +##its +layer +segment +sir +folded +Roberts +ceased +Hampshire +##ray +detailed +partners +m² +##pt +Beth +genre +commented +generated +remote +aim +Hans +credits +concerts +periods +breakfast +gay +shadow +defence +Too +Had +transition +Afghanistan +##book +eggs +defend +##lli +writes +Systems +bones +mess +seed +scientists +Shortly +Romanian +##zy +Freedom +muscle +hero +parent +agriculture +checked +Islam +Bristol +Freyja +Arena +cabin +Germans +electricity +ranks +viewed +medals +Wolf +associate +Madison +Sorry +fort +Chile +detail +widespread +attorney +boyfriend +##nan +Students +Spencer +##ig +bite +Maine +demolished +Lisa +erected +Someone +operational +Commissioner +NHL +Coach +Bar +forcing +Dream +Rico +cargo +Murphy +##fish +##ase +distant +##master +##ora +Organization +doorway +Steven +traded +electrical +frequent +##wn +Branch +Sure +1882 +placing +Manhattan +attending +attributed +excellent +pounds +ruling +principles +component +Mediterranean +Vegas +machines +percentage +infrastructure +throwing +affiliated +Kings +secured +Caribbean +Track +Ted +honour +opponent +Virgin +Construction +grave +produces +Challenge +stretched +paying +murmured +##ata +integrated +waved +Nathan +##ator +transmission +videos +##yan +##hu +Nova +descent +AM +Harold +conservative +Therefore +venue +competitive +##ui +conclusion +funeral +confidence +releases +scholar +##sson +Treaty +stress +mood +##sm +Mac +residing +Action +Fund +##ship +animated +fitted +##kar +defending +voting +tend +##berry +answers +believes +##ci +helps +Aaron +##tis +themes +##lay +populations +Players +stroke +Trinity +electoral +paint +abroad +charity +keys +Fair +##pes +interrupted +participants +murdered +Days +supporters +##ab +expert +borders +mate +##llo +solar +architectural +tension +##bling +Parish +tape +operator +Cultural +Clinton +indicates +publisher +ordinary +sugar +arrive +rifle +acoustic +##uring +assets +##shire +SS +sufficient +options +HMS +Classic +bars +rebuilt +governments +Beijing +reporter +screamed +Abbey +crying +mechanical +instantly +communications +Political +cemetery +Cameron +Stop +representatives +USS +texts +mathematics +innings +civilian +Serbia +##hill +practical +patterns +dust +Faculty +debt +##end +##cus +junction +suppose +experimental +Computer +Food +wrist +abuse +dealing +bigger +cap +principle +##pin +Muhammad +Fleet +Collection +attempting +dismissed +##burn +regime +Herbert +##ua +shadows +1883 +Eve +Lanka +1878 +Performance +fictional +##lock +Noah +Run +Voivodeship +exercise +broadcasting +##fer +RAF +Magic +Bangladesh +suitable +##low +##del +styles +toured +Code +identical +links +insisted +110 +flash +Model +slave +Derek +Rev +fairly +Greater +sole +##lands +connecting +zero +bench +##ome +switched +Fall +Owen +yours +Electric +shocked +convention +##bra +climb +memorial +swept +Racing +decides +belong +##nk +parliamentary +##und +ages +proof +##dan +delivery +1860 +##ów +sad +publicly +leaning +Archbishop +dirt +##ose +categories +1876 +burn +##bing +requested +Guinea +Historical +rhythm +relation +##heim +ye +pursue +merchant +##mes +lists +continuous +frowned +colored +tool +gods +involves +Duncan +photographs +Cricket +slight +Gregory +atmosphere +wider +Cook +##tar +essential +Being +FA +emperor +wealthy +nights +##bar +licensed +Hawaii +viewers +Language +load +nearest +milk +kilometers +platforms +##ys +territories +Rogers +sheet +Rangers +contested +##lation +isolated +assisted +swallowed +Small +Contemporary +Technical +Edwards +express +Volume +endemic +##ei +tightly +Whatever +indigenous +Colombia +##ulation +hp +characterized +##ida +Nigeria +Professional +duo +Soccer +slaves +Farm +smart +Attorney +Attendance +Common +salt +##vin +tribes +nod +sentenced +bid +sample +Drive +switch +instant +21st +Cuba +drunk +Alaska +proud +awareness +hitting +sessions +Thai +locally +elsewhere +Dragon +gentle +touching +##lee +Springs +Universal +Latino +spin +1871 +Chart +recalled +Type +pointing +##ii +lowest +##ser +grandmother +Adelaide +Jacques +spotted +Buffalo +restoration +Son +Joan +farmers +Lily +1879 +lucky +##dal +luck +eldest +##rant +Market +drummer +deployed +warned +prince +sing +amazing +sailed +##oon +1875 +Primary +traveling +Masters +Sara +cattle +Trail +gang +Further +desert +relocated +##tch +##ord +Flight +illness +Munich +ninth +repair +Singles +##lated +Tyler +tossed +boots +Work +sized +earning +shoved +magazines +housed +dam +researchers +Former +spun +premiere +spaces +organised +wealth +crimes +devoted +stones +Urban +automatic +hop +affect +outstanding +tanks +mechanism +Muslims +Ms +shots +argue +Jeremy +connections +Armenian +increases +rubbed +1867 +retail +gear +Pan +bonus +jurisdiction +weird +concerning +whisper +##gal +Microsoft +tenure +hills +www +Gmina +porch +files +reportedly +venture +Storm +##ence +Nature +killer +panic +fate +Secret +Wang +scream +drivers +belongs +Chamber +clan +monument +mixing +Peru +bet +Riley +Friends +Isaac +submarine +1877 +130 +judges +harm +ranging +affair +prepare +pupils +householder +Policy +decorated +Nation +slammed +activist +implemented +Room +qualify +Publishing +establishing +Baptist +touring +subsidiary +##nal +legend +1872 +laughter +PC +Athens +settlers +ties +dual +dear +Draft +strategic +Ivan +reveal +closest +dominant +Ah +##ult +Denver +bond +boundaries +drafted +tables +##TV +eyed +Edition +##ena +1868 +belonging +1874 +Industrial +cream +Ridge +Hindu +scholarship +Ma +opens +initiated +##ith +yelled +compound +random +Throughout +grades +physics +sank +grows +exclusively +settle +Saints +brings +Amsterdam +Make +Hart +walks +battery +violin +##born +explanation +##ware +1873 +##har +provinces +thrust +exclusive +sculpture +shops +##fire +VI +constitution +Barcelona +monster +Devon +Jefferson +Sullivan +bow +##din +desperate +##ć +Julie +##mon +##ising +terminus +Jesse +abilities +golf +##ple +##via +##away +Raymond +measured +jury +firing +revenue +suburb +Bulgarian +1866 +##cha +timber +Things +##weight +Morning +spots +Alberta +Data +explains +Kyle +friendship +raw +tube +demonstrated +aboard +immigrants +reply +breathe +Manager +ease +##ban +##dia +Diocese +##vy +##ía +pit +ongoing +##lie +Gilbert +Costa +1940s +Report +voters +cloud +traditions +##MS +gallery +Jennifer +swung +Broadcasting +Does +diverse +reveals +arriving +initiative +##ani +Give +Allied +Pat +Outstanding +monastery +blind +Currently +##war +bloody +stopping +focuses +managing +Florence +Harvey +creatures +900 +breast +internet +Artillery +purple +##mate +alliance +excited +fee +Brisbane +lifetime +Private +##aw +##nis +##gue +##ika +phrase +regulations +reflected +manufactured +conventional +pleased +client +##ix +##ncy +Pedro +reduction +##con +welcome +jail +comfort +Iranian +Norfolk +Dakota +##tein +evolution +everywhere +Initially +sensitive +Olivia +Oscar +implementation +sits +stolen +demands +slide +grandson +##ich +merger +##mic +Spirit +##° +ticket +root +difficulty +Nevada +##als +lined +Dylan +Original +Call +biological +EU +dramatic +##hn +Operations +treaty +gap +##list +Am +Romanized +moral +Butler +perspective +Furthermore +Manuel +absolutely +unsuccessful +disaster +dispute +preparation +tested +discover +##ach +shield +squeezed +brushed +battalion +Arnold +##ras +superior +treat +clinical +##so +Apple +Syria +Cincinnati +package +flights +editions +Leader +minority +wonderful +hang +Pop +Philippine +telephone +bell +honorary +##mar +balls +Democrat +dirty +thereafter +collapsed +Inside +slip +wrestling +##ín +listened +regard +bowl +None +Sport +completing +trapped +##view +copper +Wallace +Honor +blame +Peninsula +##ert +##oy +Anglo +bearing +simultaneously +honest +##ias +Mix +Got +speaker +voiced +impressed +prices +error +1869 +##feld +trials +Nine +Industry +substitute +Municipal +departed +slept +##ama +Junction +Socialist +flower +dropping +comment +fantasy +##ress +arrangements +travelled +furniture +fist +relieved +##tics +Leonard +linear +earn +expand +Soul +Plan +Leeds +Sierra +accessible +innocent +Winner +Fighter +Range +winds +vertical +Pictures +101 +charter +cooperation +prisoner +interviews +recognised +sung +manufacturer +exposure +submitted +Mars +leaf +gauge +screaming +likes +eligible +##ac +gathering +columns +##dra +belly +UN +maps +messages +speakers +##ants +garage +unincorporated +Number +Watson +sixteen +lots +beaten +Could +Municipality +##ano +Horse +talks +Drake +scores +Venice +genetic +##mal +##ère +Cold +Jose +nurse +traditionally +##bus +Territory +Key +Nancy +##win +thumb +São +index +dependent +carries +controls +Comics +coalition +physician +referring +Ruth +Based +restricted +inherited +internationally +stretch +THE +plates +margin +Holland +knock +significance +valuable +Kenya +carved +emotion +conservation +municipalities +overseas +resumed +Finance +graduation +blinked +temperatures +constantly +productions +scientist +ghost +cuts +permitted +##ches +firmly +##bert +patrol +##yo +Croatian +attacking +1850 +portrait +promoting +sink +conversion +##kov +locomotives +Guide +##val +nephew +relevant +Marc +drum +originated +Chair +visits +dragged +Price +favour +corridor +properly +respective +Caroline +reporting +inaugural +1848 +industries +##ching +edges +Christianity +Maurice +Trent +Economics +carrier +Reed +##gon +tribute +Pradesh +##ale +extend +attitude +Yale +##lu +settlements +glasses +taxes +targets +##ids +quarters +##ological +connect +hence +metre +collapse +underneath +banned +Future +clients +alternate +explosion +kinds +Commons +hungry +dragon +Chapel +Buddhist +lover +depression +pulls +##ges +##uk +origins +computers +crosses +kissing +assume +emphasis +lighting +##ites +personally +crashed +beam +touchdown +lane +comparison +##mont +Hitler +##las +execution +##ene +acre +sum +Pearl +ray +##point +essentially +worker +convicted +tear +Clay +recovery +Literature +Unfortunately +##row +partial +Petersburg +Bulgaria +coaching +evolved +reception +enters +narrowed +elevator +therapy +defended +pairs +##lam +breaks +Bennett +Uncle +cylinder +##ison +passion +bases +Actor +cancelled +battles +extensively +oxygen +Ancient +specialized +negotiations +##rat +acquisition +convince +interpretation +##00 +photos +aspect +colleges +Artist +keeps +##wing +Croatia +##ona +Hughes +Otto +comments +##du +Ph +Sweet +adventure +describing +Student +Shakespeare +scattered +objective +Aviation +Phillips +Fourth +athletes +##hal +##tered +Guitar +intensity +née +dining +curve +Obama +topics +legislative +Mill +Cruz +##ars +Members +recipient +Derby +inspiration +corresponding +fed +YouTube +coins +pressing +intent +Karen +cinema +Delta +destination +shorter +Christians +imagined +canal +Newcastle +Shah +Adrian +super +Males +160 +liberal +lord +bat +supplied +Claude +meal +worship +##atic +Han +wire +°F +##tha +punishment +thirteen +fighters +##ibility +1859 +Ball +gardens +##ari +Ottawa +pole +indicating +Twenty +Higher +Bass +Ivy +farming +##urs +certified +Saudi +plenty +##ces +restaurants +Representative +Miles +payment +##inger +##rit +Confederate +festivals +references +##ić +Mario +PhD +playoffs +witness +rice +mask +saving +opponents +enforcement +automatically +relegated +##oe +radar +whenever +Financial +imperial +uncredited +influences +Abraham +skull +Guardian +Haven +Bengal +impressive +input +mixture +Warsaw +altitude +distinction +1857 +collective +Annie +##ean +##bal +directions +Flying +##nic +faded +##ella +contributing +##ó +employee +##lum +##yl +ruler +oriented +conductor +focusing +##die +Giants +Mills +mines +Deep +curled +Jessica +guitars +Louise +procedure +Machine +failing +attendance +Nepal +Brad +Liam +tourist +exhibited +Sophie +depicted +Shaw +Chuck +##can +expecting +challenges +##nda +equally +resignation +##logical +Tigers +loop +pitched +outdoor +reviewed +hopes +True +temporarily +Borough +torn +jerked +collect +Berkeley +Independence +cotton +retreat +campaigns +participating +Intelligence +Heaven +##ked +situations +borough +Democrats +Harbor +##len +Liga +serial +circles +fourteen +##lot +seized +filling +departments +finance +absolute +Roland +Nate +floors +raced +struggling +deliver +protests +##tel +Exchange +efficient +experiments +##dar +faint +3D +binding +Lions +lightly +skill +proteins +difficulties +##cal +monthly +camps +flood +loves +Amanda +Commerce +##oid +##lies +elementary +##tre +organic +##stein +##ph +receives +Tech +enormous +distinctive +Joint +experiment +Circuit +citizen +##hy +shelter +ideal +practically +formula +addressed +Foster +Productions +##ax +variable +punk +Voice +fastest +concentrated +##oma +##yer +stored +surrender +vary +Sergeant +Wells +ward +Wait +##ven +playoff +reducing +cavalry +##dle +Venezuela +tissue +amounts +sweat +##we +Non +##nik +beetle +##bu +##tu +Jared +Hunt +##₂ +fat +Sultan +Living +Circle +Secondary +Suddenly +reverse +##min +Travel +##bin +Lebanon +##mas +virus +Wind +dissolved +enrolled +holiday +Keep +helicopter +Clarke +constitutional +technologies +doubles +instructions +##ace +Azerbaijan +##ill +occasional +frozen +trick +wiped +writings +Shanghai +preparing +challenged +mainstream +summit +180 +##arian +##rating +designation +##ada +revenge +filming +tightened +Miguel +Montana +reflect +celebration +bitch +flashed +signals +rounded +peoples +##tation +renowned +Google +characteristic +Campaign +sliding +##rman +usage +Record +Using +woke +solutions +holes +theories +logo +Protestant +relaxed +brow +nickname +Reading +marble +##tro +symptoms +Overall +capita +##ila +outbreak +revolution +deemed +Principal +Hannah +approaches +inducted +Wellington +vulnerable +Environmental +Drama +incumbent +Dame +1854 +travels +samples +accurate +physically +Sony +Nashville +##sville +##lic +##og +Producer +Lucky +tough +Stanford +resort +repeatedly +eyebrows +Far +choir +commenced +##ep +##ridge +rage +swing +sequel +heir +buses +ad +Grove +##late +##rick +updated +##SA +Delaware +##fa +Athletics +warmth +Off +excitement +verse +Protection +Villa +corruption +intellectual +Jenny +##lyn +mystery +prayer +healthy +##ologist +Bear +lab +Ernest +Remix +register +basement +Montgomery +consistent +tier +1855 +Preston +Brooks +##maker +vocalist +laboratory +delayed +wheels +rope +bachelor +pitcher +Block +Nevertheless +suspect +efficiency +Nebraska +siege +FBI +planted +##AC +Newton +breeding +##ain +eighteen +Argentine +encounter +servant +1858 +elder +Shadow +Episode +fabric +doctors +survival +removal +chemistry +volunteers +Kane +variant +arrives +Eagle +Left +##fe +Jo +divorce +##ret +yesterday +Bryan +handling +diseases +customer +Sheriff +Tiger +Harper +##oi +resting +Linda +Sheffield +gasped +sexy +economics +alien +tale +footage +Liberty +yeah +fundamental +Ground +flames +Actress +photographer +Maggie +Additional +joke +custom +Survey +Abu +silk +consumption +Ellis +bread +##uous +engagement +puts +Dog +##hr +poured +guilt +CDP +boxes +hardware +clenched +##cio +stem +arena +extending +##com +examination +Steel +encountered +revised +140 +picking +Car +hasn +Minor +pride +Roosevelt +boards +##mia +blocked +curious +drag +narrative +brigade +Prefecture +mysterious +namely +connects +Devil +historians +CHAPTER +quit +installation +Golf +empire +elevated +##eo +releasing +Bond +##uri +harsh +ban +##BA +contracts +cloth +presents +stake +chorus +##eau +swear +##mp +allies +generations +Motor +meter +pen +warrior +veteran +##EC +comprehensive +missile +interaction +instruction +Renaissance +rested +Dale +fix +fluid +les +investigate +loaded +widow +exhibit +artificial +select +rushing +tasks +signature +nowhere +Engineer +feared +Prague +bother +extinct +gates +Bird +climbing +heels +striking +artwork +hunt +awake +##hin +Formula +thereby +commitment +imprisoned +Beyond +##MA +transformed +Agriculture +Low +Movie +radical +complicated +Yellow +Auckland +mansion +tenth +Trevor +predecessor +##eer +disbanded +sucked +circular +witch +gaining +lean +Behind +illustrated +rang +celebrate +bike +consist +framework +##cent +Shane +owns +350 +comprises +collaborated +colleagues +##cast +engage +fewer +##ave +1856 +observation +diplomatic +legislature +improvements +Interstate +craft +MTV +martial +administered +jet +approaching +permanently +attraction +manuscript +numbered +Happy +Andrea +shallow +Gothic +Anti +##bad +improvement +trace +preserve +regardless +rode +dies +achievement +maintaining +Hamburg +spine +##air +flowing +encourage +widened +posts +##bound +125 +Southeast +Santiago +##bles +impression +receiver +Single +closure +##unt +communist +honors +Northwest +105 +##ulated +cared +un +hug +magnetic +seeds +topic +perceived +prey +prevented +Marvel +Eight +Michel +Transportation +rings +Gate +##gne +Byzantine +accommodate +floating +##dor +equation +ministry +##ito +##gled +Rules +earthquake +revealing +Brother +Celtic +blew +chairs +Panama +Leon +attractive +descendants +Care +Ambassador +tours +breathed +threatening +##cho +smiles +Lt +Beginning +##iness +fake +assists +fame +strings +Mobile +Liu +parks +http +1852 +brush +Aunt +bullet +consciousness +##sta +##ther +consequences +gather +dug +1851 +bridges +Doug +##sion +Artists +ignore +Carol +brilliant +radiation +temples +basin +clouds +##cted +Stevens +spite +soap +consumer +Damn +Snow +recruited +##craft +Advanced +tournaments +Quinn +undergraduate +questioned +Palmer +Annual +Others +feeding +Spider +printing +##orn +cameras +functional +Chester +readers +Alpha +universal +Faith +Brandon +François +authored +Ring +el +aims +athletic +possessed +Vermont +programmes +##uck +bore +Fisher +statements +shed +saxophone +neighboring +pronounced +barrel +bags +##dge +organisations +pilots +casualties +Kenneth +##brook +silently +Malcolm +span +Essex +anchor +##hl +virtual +lessons +Henri +Trump +Page +pile +locomotive +wounds +uncomfortable +sustained +Diana +Eagles +##pi +2000s +documented +##bel +Cassie +delay +kisses +##ines +variation +##ag +growled +##mark +##ways +Leslie +studios +Friedrich +aunt +actively +armor +eaten +historically +Better +purse +honey +ratings +##ée +naturally +1840 +peer +Kenny +Cardinal +database +Looking +runners +handsome +Double +PA +##boat +##sted +protecting +##jan +Diamond +concepts +interface +##aki +Watch +Article +Columbus +dialogue +pause +##rio +extends +blanket +pulse +1853 +affiliate +ladies +Ronald +counted +kills +demons +##zation +Airlines +Marco +Cat +companion +mere +Yugoslavia +Forum +Allan +pioneer +Competition +Methodist +patent +nobody +Stockholm +##ien +regulation +##ois +accomplished +##itive +washed +sake +Vladimir +crops +prestigious +humor +Sally +labour +tributary +trap +altered +examined +Mumbai +bombing +Ash +noble +suspension +ruins +##bank +spare +displays +guided +dimensional +Iraqi +##hon +sciences +Franz +relating +fence +followers +Palestine +invented +proceeded +Batman +Bradley +##yard +##ova +crystal +Kerala +##ima +shipping +handled +Want +abolished +Drew +##tter +Powell +Half +##table +##cker +exhibitions +Were +assignment +assured +##rine +Indonesian +Grammy +acknowledged +Kylie +coaches +structural +clearing +stationed +Say +Total +Rail +besides +glow +threats +afford +Tree +Musical +##pp +elite +centered +explore +Engineers +Stakes +Hello +tourism +severely +assessment +##tly +crack +politicians +##rrow +sheets +volunteer +##borough +##hold +announcement +recover +contribute +lungs +##ille +mainland +presentation +Johann +Writing +1849 +##bird +Study +Boulevard +coached +fail +airline +Congo +Plus +Syrian +introduce +ridge +Casey +manages +##fi +searched +Support +succession +progressive +coup +cultures +##lessly +sensation +Cork +Elena +Sofia +Philosophy +mini +trunk +academy +Mass +Liz +practiced +Reid +##ule +satisfied +experts +Wilhelm +Woods +invitation +Angels +calendar +joy +Sr +Dam +packed +##uan +bastard +Workers +broadcasts +logic +cooking +backward +##ack +Chen +creates +enzyme +##xi +Davies +aviation +VII +Conservation +fucking +Knights +##kan +requiring +hectares +wars +ate +##box +Mind +desired +oak +absorbed +Really +Vietnamese +Paulo +athlete +##car +##eth +Talk +Wu +##cks +survivors +Yang +Joel +Almost +Holmes +Armed +Joshua +priests +discontinued +##sey +blond +Rolling +suggesting +CA +clay +exterior +Scientific +##sive +Giovanni +Hi +farther +contents +Winners +animation +neutral +mall +Notes +layers +professionals +Armstrong +Against +Piano +involve +monitor +angel +parked +bears +seated +feat +beliefs +##kers +Version +suffer +##ceae +guidance +##eur +honored +raid +alarm +Glen +Ellen +Jamaica +trio +enabled +##ils +procedures +##hus +moderate +upstairs +##ses +torture +Georgian +rebellion +Fernando +Nice +##are +Aires +Campus +beast +##hing +1847 +##FA +Isle +##logist +Princeton +cathedral +Oakland +Solomon +##tto +Milwaukee +upcoming +midfielder +Neither +sacred +Eyes +appreciate +Brunswick +secrets +Rice +Somerset +Chancellor +Curtis +##gel +Rich +separation +grid +##los +##bon +urge +##ees +##ree +freight +towers +psychology +requirement +dollar +##fall +##sman +exile +tomb +Salt +Stefan +Buenos +Revival +Porter +tender +diesel +chocolate +Eugene +Legion +Laboratory +sheep +arched +hospitals +orbit +Full +##hall +drinks +ripped +##RS +tense +Hank +leagues +##nberg +PlayStation +fool +Punjab +relatives +Comedy +sur +1846 +Tonight +Sox +##if +Rabbi +org +speaks +institute +defender +painful +wishes +Weekly +literacy +portions +snake +item +deals +##tum +autumn +sharply +reforms +thighs +prototype +##ition +argues +disorder +Physics +terror +provisions +refugees +predominantly +independently +march +##graphy +Arabia +Andrews +Bus +Money +drops +##zar +pistol +matrix +revolutionary +##ust +Starting +##ptic +Oak +Monica +##ides +servants +##hed +archaeological +divorced +rocket +enjoying +fires +##nel +assembled +qualification +retiring +##fied +Distinguished +handful +infection +Durham +##itz +fortune +renewed +Chelsea +##sley +curved +gesture +retain +exhausted +##ifying +Perth +jumping +Palestinian +Simpson +colonies +steal +##chy +corners +Finn +arguing +Martha +##var +Betty +emerging +Heights +Hindi +Manila +pianist +founders +regret +Napoleon +elbow +overhead +bold +praise +humanity +##ori +Revolutionary +##ere +fur +##ole +Ashley +Official +##rm +lovely +Architecture +##sch +Baronet +virtually +##OS +descended +immigration +##das +##kes +Holly +Wednesday +maintains +theatrical +Evan +Gardens +citing +##gia +segments +Bailey +Ghost +##city +governing +graphics +##ined +privately +potentially +transformation +Crystal +Cabinet +sacrifice +hesitated +mud +Apollo +Desert +bin +victories +Editor +Railways +Web +Case +tourists +Brussels +Franco +compiled +topped +Gene +engineers +commentary +egg +escort +nerve +arch +necessarily +frustration +Michelle +democracy +genes +Facebook +halfway +##ient +102 +flipped +Won +##mit +NASA +Lynn +Provincial +ambassador +Inspector +glared +Change +McDonald +developments +tucked +noting +Gibson +circulation +dubbed +armies +resource +Headquarters +##iest +Mia +Albanian +Oil +Albums +excuse +intervention +Grande +Hugo +integration +civilians +depends +reserves +Dee +compositions +identification +restrictions +quarterback +Miranda +Universe +favourite +ranges +hint +loyal +Op +entity +Manual +quoted +dealt +specialist +Zhang +download +Westminster +Rebecca +streams +Anglican +variations +Mine +detective +Films +reserved +##oke +##key +sailing +##gger +expanding +recall +discovers +particles +behaviour +Gavin +blank +permit +Java +Fraser +Pass +##non +##TA +panels +statistics +notion +courage +dare +venues +##roy +Box +Newport +travelling +Thursday +warriors +Glenn +criteria +360 +mutual +restore +varied +bitter +Katherine +##lant +ritual +bits +##à +Henderson +trips +Richardson +Detective +curse +psychological +Il +midnight +streak +facts +Dawn +Indies +Edmund +roster +Gen +##nation +1830 +congregation +shaft +##ically +##mination +Indianapolis +Sussex +loving +##bit +sounding +horrible +Continental +Griffin +advised +magical +millions +##date +1845 +Safety +lifting +determination +valid +dialect +Penn +Know +triple +avoided +dancer +judgment +sixty +farmer +lakes +blast +aggressive +Abby +tag +chains +inscription +##nn +conducting +Scout +buying +##wich +spreading +##OC +array +hurried +Environment +improving +prompted +fierce +Taking +Away +tune +pissed +Bull +catching +##ying +eyebrow +metropolitan +terrain +##rel +Lodge +manufacturers +creator +##etic +happiness +ports +##ners +Relations +fortress +targeted +##ST +allegedly +blues +##osa +Bosnia +##dom +burial +similarly +stranger +pursued +symbols +rebels +reflection +routine +traced +indoor +eventual +##ska +##ão +##una +MD +##phone +oh +grants +Reynolds +rid +operators +##nus +Joey +vital +siblings +keyboard +br +removing +societies +drives +solely +princess +lighter +Various +Cavalry +believing +SC +underwent +relay +smelled +syndrome +welfare +authorized +seemingly +Hard +chicken +##rina +Ages +Bo +democratic +barn +Eye +shorts +##coming +##hand +disappointed +unexpected +centres +Exhibition +Stories +Site +banking +accidentally +Agent +conjunction +André +Chloe +resist +width +Queens +provision +##art +Melissa +Honorary +Del +prefer +abruptly +duration +##vis +Glass +enlisted +##ado +discipline +Sisters +carriage +##ctor +##sburg +Lancashire +log +fuck +##iz +closet +collecting +holy +rape +trusted +cleaning +inhabited +Rocky +104 +editorial +##yu +##ju +succeed +strict +Cuban +##iya +Bronze +outcome +##ifies +##set +corps +Hero +barrier +Kumar +groaned +Nina +Burton +enable +stability +Milton +knots +##ination +slavery +##borg +curriculum +trailer +warfare +Dante +Edgar +revival +Copenhagen +define +advocate +Garrett +Luther +overcome +pipe +750 +construct +Scotia +kings +flooding +##hard +Ferdinand +Felix +forgot +Fish +Kurt +elaborate +##BC +graphic +gripped +colonel +Sophia +Advisory +Self +##uff +##lio +monitoring +seal +senses +rises +peaceful +journals +1837 +checking +legendary +Ghana +##power +ammunition +Rosa +Richards +nineteenth +ferry +aggregate +Troy +inter +##wall +Triple +steep +tent +Cyprus +1844 +##woman +commanding +farms +doi +navy +specified +na +cricketer +transported +Think +comprising +grateful +solve +##core +beings +clerk +grain +vector +discrimination +##TC +Katie +reasonable +drawings +veins +consideration +Monroe +repeat +breed +dried +witnessed +ordained +Current +spirits +remarkable +consultant +urged +Remember +anime +singers +phenomenon +Rhode +Carlo +demanding +findings +manual +varying +Fellowship +generate +safely +heated +withdrawn +##ao +headquartered +##zon +##lav +##ency +Col +Memphis +imposed +rivals +Planet +healing +##hs +ensemble +Warriors +##bone +cult +Frankfurt +##HL +diversity +Gerald +intermediate +##izes +reactions +Sister +##ously +##lica +quantum +awkward +mentions +pursuit +##ography +varies +profession +molecular +consequence +lectures +cracked +103 +slowed +##tsu +cheese +upgraded +suite +substance +Kingston +1800 +Idaho +Theory +##een +ain +Carson +Molly +##OR +configuration +Whitney +reads +audiences +##tie +Geneva +Outside +##nen +##had +transit +volleyball +Randy +Chad +rubber +motorcycle +respected +eager +Level +coin +##lets +neighbouring +##wski +confident +##cious +poll +uncertain +punch +thesis +Tucker +IATA +Alec +##ographic +##law +1841 +desperately +1812 +Lithuania +accent +Cox +lightning +skirt +##load +Burns +Dynasty +##ug +chapters +Working +dense +Morocco +##kins +casting +Set +activated +oral +Brien +horn +HIV +dawn +stumbled +altar +tore +considerably +Nicole +interchange +registration +biography +Hull +Stan +bulk +consent +Pierce +##ER +Fifth +marched +terrorist +##piece +##itt +Presidential +Heather +staged +Plant +relegation +sporting +joins +##ced +Pakistani +dynamic +Heat +##lf +ourselves +Except +Elliott +nationally +goddess +investors +Burke +Jackie +##ā +##RA +Tristan +Associate +Tuesday +scope +Near +bunch +##abad +##ben +sunlight +##aire +manga +Willie +trucks +boarding +Lion +lawsuit +Learning +Der +pounding +awful +##mine +IT +Legend +romance +Serie +AC +gut +precious +Robertson +hometown +realm +Guards +Tag +batting +##vre +halt +conscious +1838 +acquire +collar +##gg +##ops +Herald +nationwide +citizenship +Aircraft +decrease +em +Fiction +Female +corporation +Located +##ip +fights +unconscious +Tampa +Poetry +lobby +Malta +##sar +##bie +layout +Tate +reader +stained +##bre +##rst +##ulate +loudly +Eva +Cohen +exploded +Merit +Maya +##rable +Rovers +##IC +Morrison +Should +vinyl +##mie +onwards +##gie +vicinity +Wildlife +probability +Mar +Barnes +##ook +spinning +Moses +##vie +Surrey +Planning +conferences +protective +Plaza +deny +Canterbury +manor +Estate +tilted +comics +IBM +destroying +server +Dorothy +##horn +Oslo +lesser +heaven +Marshal +scales +strikes +##ath +firms +attract +##BS +controlling +Bradford +southeastern +Amazon +Travis +Janet +governed +1842 +Train +Holden +bleeding +gifts +rent +1839 +palms +##ū +judicial +Ho +Finals +conflicts +unlikely +draws +##cies +compensation +adds +elderly +Anton +lasting +Nintendo +codes +ministers +pot +associations +capabilities +##cht +libraries +##sie +chances +performers +runway +##af +##nder +Mid +Vocals +##uch +##eon +interpreted +priority +Uganda +ruined +Mathematics +cook +AFL +Lutheran +AIDS +Capitol +chase +axis +Moreover +María +Saxon +storyline +##ffed +Tears +Kid +cent +colours +Sex +##long +pm +blonde +Edwin +CE +diocese +##ents +##boy +Inn +##ller +Saskatchewan +##kh +stepping +Windsor +##oka +##eri +Xavier +Resources +1843 +##top +##rad +##lls +Testament +poorly +1836 +drifted +slope +CIA +remix +Lords +mature +hosting +diamond +beds +##ncies +luxury +trigger +##lier +preliminary +hybrid +journalists +Enterprise +proven +expelled +insects +Beautiful +lifestyle +vanished +##ake +##ander +matching +surfaces +Dominican +Kids +referendum +Orlando +Truth +Sandy +privacy +Calgary +Speaker +sts +Nobody +shifting +##gers +Roll +Armenia +Hand +##ES +106 +##ont +Guild +larvae +Stock +flame +gravity +enhanced +Marion +surely +##tering +Tales +algorithm +Emmy +darker +VIII +##lash +hamlet +deliberately +occurring +choices +Gage +fees +settling +ridiculous +##ela +Sons +cop +custody +##ID +proclaimed +Cardinals +##pm +Metal +Ana +1835 +clue +Cardiff +riders +observations +MA +sometime +##och +performer +intact +Points +allegations +rotation +Tennis +tenor +Directors +##ats +Transit +thigh +Complex +##works +twentieth +Factory +doctrine +Daddy +##ished +pretend +Winston +cigarette +##IA +specimens +hydrogen +smoking +mathematical +arguments +openly +developer +##iro +fists +somebody +##san +Standing +Caleb +intelligent +Stay +Interior +echoed +Valentine +varieties +Brady +cluster +Ever +voyage +##of +deposits +ultimate +Hayes +horizontal +proximity +##ás +estates +exploration +NATO +Classical +##most +bills +condemned +1832 +hunger +##ato +planes +deserve +offense +sequences +rendered +acceptance +##ony +manufacture +Plymouth +innovative +predicted +##RC +Fantasy +##une +supporter +absent +Picture +bassist +rescued +##MC +Ahmed +Monte +##sts +##rius +insane +novelist +##és +agrees +Antarctic +Lancaster +Hopkins +calculated +startled +##star +tribal +Amendment +##hoe +invisible +patron +deer +Walk +tracking +Lyon +tickets +##ED +philosopher +compounds +chuckled +##wi +pound +loyalty +Academic +petition +refuses +marking +Mercury +northeastern +dimensions +scandal +Canyon +patch +publish +##oning +Peak +minds +##boro +Presbyterian +Hardy +theoretical +magnitude +bombs +cage +##ders +##kai +measuring +explaining +avoiding +touchdowns +Card +theology +##ured +Popular +export +suspicious +Probably +photograph +Lou +Parks +Arms +compact +Apparently +excess +Banks +lied +stunned +territorial +Filipino +spectrum +learns +wash +imprisonment +ugly +##rose +Albany +Erik +sends +##hara +##rid +consumed +##gling +Belgrade +Da +opposing +Magnus +footsteps +glowing +delicate +Alexandria +Ludwig +gorgeous +Bros +Index +##PA +customs +preservation +bonds +##mond +environments +##nto +instructed +parted +adoption +locality +workshops +goalkeeper +##rik +##uma +Brighton +Slovenia +##ulating +##tical +towel +hugged +stripped +Bears +upright +Wagner +##aux +secretly +Adventures +nest +Course +Lauren +Boeing +Abdul +Lakes +450 +##cu +USSR +caps +Chan +##nna +conceived +Actually +Belfast +Lithuanian +concentrate +possess +militia +pine +protagonist +Helena +##PS +##band +Belle +Clara +Reform +currency +pregnancy +1500 +##rim +Isabella +hull +Name +trend +journalism +diet +##mel +Recording +acclaimed +Tang +Jace +steering +vacant +suggestion +costume +laser +##š +##ink +##pan +##vić +integral +achievements +wise +classroom +unions +southwestern +##uer +Garcia +toss +Tara +Large +##tate +evident +responsibilities +populated +satisfaction +##bia +casual +Ecuador +##ght +arose +##ović +Cornwall +embrace +refuse +Heavyweight +XI +Eden +activists +##uation +biology +##shan +fraud +Fuck +matched +legacy +Rivers +missionary +extraordinary +Didn +holder +wickets +crucial +Writers +Hurricane +Iceland +gross +trumpet +accordance +hurry +flooded +doctorate +Albania +##yi +united +deceased +jealous +grief +flute +portraits +##а +pleasant +Founded +Face +crowned +Raja +advisor +Salem +##ec +Achievement +admission +freely +minimal +Sudan +developers +estimate +disabled +##lane +downstairs +Bruno +##pus +pinyin +##ude +lecture +deadly +underlying +optical +witnesses +Combat +Julius +tapped +variants +##like +Colonial +Critics +Similarly +mouse +voltage +sculptor +Concert +salary +Frances +##ground +hook +premises +Software +instructor +nominee +##ited +fog +slopes +##zu +vegetation +sail +##rch +Body +Apart +atop +View +utility +ribs +cab +migration +##wyn +bounded +2019 +pillow +trails +##ub +Halifax +shade +Rush +##lah +##dian +Notre +interviewed +Alexandra +Springfield +Indeed +rubbing +dozens +amusement +legally +##lers +Jill +Cinema +ignoring +Choice +##ures +pockets +##nell +laying +Blair +tackles +separately +##teen +Criminal +performs +theorem +Communication +suburbs +##iel +competitors +rows +##hai +Manitoba +Eleanor +interactions +nominations +assassination +##dis +Edmonton +diving +##dine +essay +##tas +AFC +Edge +directing +imagination +sunk +implement +Theodore +trembling +sealed +##rock +Nobel +##ancy +##dorf +##chen +genuine +apartments +Nicolas +AA +Bach +Globe +Store +220 +##10 +Rochester +##ño +alert +107 +Beck +##nin +Naples +Basin +Crawford +fears +Tracy +##hen +disk +##pped +seventeen +Lead +backup +reconstruction +##lines +terrified +sleeve +nicknamed +popped +##making +##ern +Holiday +Gospel +ibn +##ime +convert +divine +resolved +##quet +ski +realizing +##RT +Legislature +reservoir +Rain +sinking +rainfall +elimination +challenging +tobacco +##outs +Given +smallest +Commercial +pin +rebel +comedian +exchanged +airing +dish +Salvador +promising +##wl +relax +presenter +toll +aerial +##eh +Fletcher +brass +disappear +zones +adjusted +contacts +##lk +sensed +Walt +mild +toes +flies +shame +considers +wildlife +Hanna +Arsenal +Ladies +naming +##ishing +anxiety +discussions +cute +undertaken +Cash +strain +Wyoming +dishes +precise +Angela +##ided +hostile +twins +115 +Built +##pel +Online +tactics +Newman +##bourne +unclear +repairs +embarrassed +listing +tugged +Vale +##gin +Meredith +bout +##cle +velocity +tips +froze +evaluation +demonstrate +##card +criticised +Nash +lineup +Rao +monks +bacteria +lease +##lish +frightened +den +revived +finale +##rance +flee +Letters +decreased +##oh +Sounds +wrap +Sharon +incidents +renovated +everybody +stole +Bath +boxing +1815 +withdraw +backs +interim +react +murders +Rhodes +Copa +framed +flown +Estonia +Heavy +explored +##rra +##GA +##ali +Istanbul +1834 +##rite +##aging +##ues +Episcopal +arc +orientation +Maxwell +infected +##rot +BCE +Brook +grasp +Roberto +Excellence +108 +withdrawal +Marines +rider +Lo +##sin +##run +Subsequently +garrison +hurricane +facade +Prussia +crushed +enterprise +##mber +Twitter +Generation +Physical +Sugar +editing +communicate +Ellie +##hurst +Ernst +wagon +promotional +conquest +Parliamentary +courtyard +lawyers +Superman +email +Prussian +lately +lecturer +Singer +Majesty +Paradise +sooner +Heath +slot +curves +convoy +##vian +induced +synonym +breeze +##plane +##ox +peered +Coalition +##hia +odds +##esh +##lina +Tomorrow +Nadu +##ico +##rah +damp +autonomous +console +Victory +counts +Luxembourg +intimate +Archived +Carroll +spy +Zero +habit +Always +faction +teenager +Johnston +chaos +ruin +commerce +blog +##shed +##the +reliable +Word +Yu +Norton +parade +Catholics +damned +##iling +surgeon +##tia +Allison +Jonas +remarked +##ès +idiot +Making +proposals +Industries +strategies +artifacts +batteries +reward +##vers +Agricultural +distinguish +lengths +Jeffrey +Progressive +kicking +Patricia +##gio +ballot +##ios +skilled +##gation +Colt +limestone +##AS +peninsula +##itis +LA +hotels +shapes +Crime +depicting +northwestern +HD +silly +Das +##² +##ws +##ash +##matic +thermal +Has +forgive +surrendered +Palm +Nacional +drank +haired +Mercedes +##foot +loading +Timothy +##roll +mechanisms +traces +digging +discussing +Natalie +##zhou +Forbes +landmark +Anyway +Manor +conspiracy +gym +knocking +viewing +Formation +Pink +Beauty +limbs +Phillip +sponsor +Joy +granite +Harbour +##ero +payments +Ballet +conviction +##dam +Hood +estimates +lacked +Mad +Jorge +##wen +refuge +##LA +invaded +Kat +suburban +##fold +investigated +Ari +complained +creek +Georges +##uts +powder +accepting +deserved +carpet +Thunder +molecules +Legal +cliff +strictly +enrollment +ranch +##rg +##mba +proportion +renovation +crop +grabbing +##liga +finest +entries +receptor +helmet +blown +Listen +flagship +workshop +resolve +nails +Shannon +portal +jointly +shining +Violet +overwhelming +upward +Mick +proceedings +##dies +##aring +Laurence +Churchill +##rice +commit +170 +inclusion +Examples +##verse +##rma +fury +paths +##SC +ankle +nerves +Chemistry +rectangular +sworn +screenplay +cake +Mann +Seoul +Animal +sizes +Speed +vol +Population +Southwest +Hold +continuously +Qualified +wishing +Fighting +Made +disappointment +Portsmouth +Thirty +##beck +Ahmad +teammate +MLB +graph +Charleston +realizes +##dium +exhibits +preventing +##int +fever +rivalry +Male +mentally +dull +##lor +##rich +consistently +##igan +Madame +certificate +suited +Krishna +accuracy +Webb +Budapest +Rex +1831 +Cornell +OK +surveillance +##gated +habitats +Adventure +Conrad +Superior +Gay +sofa +aka +boot +Statistics +Jessie +Liberation +##lip +##rier +brands +saint +Heinrich +Christine +bath +Rhine +ballet +Jin +consensus +chess +Arctic +stack +furious +cheap +toy +##yre +##face +##gging +gastropod +##nne +Romans +membrane +answering +25th +architects +sustainable +##yne +Hon +1814 +Baldwin +dome +##awa +##zen +celebrity +enclosed +##uit +##mmer +Electronic +locals +##CE +supervision +mineral +Chemical +Slovakia +alley +hub +##az +heroes +Creative +##AM +incredible +politically +ESPN +yanked +halls +Aboriginal +Greatest +yield +##20 +congressional +robot +Kiss +welcomed +MS +speeds +proceed +Sherman +eased +Greene +Walsh +Geoffrey +variables +rocky +##print +acclaim +Reverend +Wonder +tonnes +recurring +Dawson +continent +finite +AP +continental +ID +facilitate +essays +Rafael +Neal +1833 +ancestors +##met +##gic +Especially +teenage +frustrated +Jules +cock +expense +##oli +##old +blocking +Notable +prohibited +ca +dock +organize +##wald +Burma +Gloria +dimension +aftermath +choosing +Mickey +torpedo +pub +##used +manuscripts +laps +Ulster +staircase +sphere +Insurance +Contest +lens +risks +investigations +ERA +glare +##play +Graduate +auction +Chronicle +##tric +##50 +Coming +seating +Wade +seeks +inland +Thames +Rather +butterfly +contracted +positioned +consumers +contestants +fragments +Yankees +Santos +administrator +hypothesis +retire +Denis +agreements +Winnipeg +##rill +1820 +trophy +crap +shakes +Jenkins +##rium +ya +twist +labels +Maritime +##lings +##iv +111 +##ensis +Cairo +Anything +##fort +opinions +crowded +##nian +abandon +##iff +drained +imported +##rr +tended +##rain +Going +introducing +sculptures +bankruptcy +danced +demonstration +stance +settings +gazed +abstract +pet +Calvin +stiff +strongest +wrestler +##dre +Republicans +grace +allocated +cursed +snail +advancing +Return +errors +Mall +presenting +eliminate +Amateur +Institution +counting +##wind +warehouse +##nde +Ethiopia +trailed +hollow +##press +Literary +capability +nursing +preceding +lamp +Thomson +Morton +##ctic +Crew +Close +composers +boom +Clare +missiles +112 +hunter +snap +##oni +##tail +Us +declaration +##cock +rally +huh +lion +straightened +Philippe +Sutton +alpha +valued +maker +navigation +detected +favorable +perception +Charter +##ña +Ricky +rebounds +tunnels +slapped +Emergency +supposedly +##act +deployment +socialist +tubes +anybody +corn +##NA +Seminary +heating +pump +##AA +achieving +souls +##ass +Link +##ele +##smith +greeted +Bates +Americas +Elder +cure +contestant +240 +fold +Runner +Uh +licked +Politics +committees +neighbors +fairy +Silva +Leipzig +tipped +correctly +exciting +electronics +foundations +cottage +governmental +##hat +allied +claws +presidency +cruel +Agreement +slender +accompanying +precisely +##pass +driveway +swim +Stand +crews +##mission +rely +everyday +Wings +demo +##hic +recreational +min +nationality +##duction +Easter +##hole +canvas +Kay +Leicester +talented +Discovery +shells +##ech +Kerry +Ferguson +Leave +##place +altogether +adopt +butt +wolves +##nsis +##ania +modest +soprano +Boris +##ught +electron +depicts +hid +cruise +differ +treasure +##nch +Gun +Mama +Bengali +trainer +merchants +innovation +presumably +Shirley +bottles +proceeds +Fear +invested +Pirates +particle +Dominic +blamed +Fight +Daisy +##pper +##graphic +nods +knight +Doyle +tales +Carnegie +Evil +Inter +Shore +Nixon +transform +Savannah +##gas +Baltic +stretching +worlds +protocol +Percy +Toby +Heroes +brave +dancers +##aria +backwards +responses +Chi +Gaelic +Berry +crush +embarked +promises +Madonna +researcher +realised +inaugurated +Cherry +Mikhail +Nottingham +reinforced +subspecies +rapper +##kie +Dreams +Re +Damon +Minneapolis +monsters +suspicion +Tel +surroundings +afterward +complaints +OF +sectors +Algeria +lanes +Sabha +objectives +Donna +bothered +distracted +deciding +##ives +##CA +##onia +bishops +Strange +machinery +Voiced +synthesis +reflects +interference +##TS +##ury +keen +##ign +frown +freestyle +ton +Dixon +Sacred +Ruby +Prison +##ión +1825 +outfit +##tain +curiosity +##ight +frames +steadily +emigrated +horizon +##erly +Doc +philosophical +Table +UTC +Marina +##DA +secular +##eed +Zimbabwe +cops +Mack +sheriff +Sanskrit +Francesco +catches +questioning +streaming +Kill +testimony +hissed +tackle +countryside +copyright +##IP +Buddhism +##rator +ladder +##ON +Past +rookie +depths +##yama +##ister +##HS +Samantha +Dana +Educational +brows +Hammond +raids +envelope +##sco +##hart +##ulus +epic +detection +Streets +Potter +statistical +für +ni +accounting +##pot +employer +Sidney +Depression +commands +Tracks +averaged +lets +Ram +longtime +suits +branded +chip +Shield +loans +ought +Said +sip +##rome +requests +Vernon +bordered +veterans +##ament +Marsh +Herzegovina +Pine +##igo +mills +anticipation +reconnaissance +##ef +expectations +protested +arrow +guessed +depot +maternal +weakness +##ap +projected +pour +Carmen +provider +newer +remind +freed +##rily +##wal +##tones +intentions +Fiji +timing +Match +managers +Kosovo +Herman +Wesley +Chang +135 +semifinals +shouting +Indo +Janeiro +Chess +Macedonia +Buck +##onies +rulers +Mail +##vas +##sel +MHz +Programme +Task +commercially +subtle +propaganda +spelled +bowling +basically +Raven +1828 +Colony +109 +##ingham +##wara +anticipated +1829 +##iers +graduates +##rton +##fication +endangered +ISO +diagnosed +##tage +exercises +Battery +bolt +poison +cartoon +##ción +hood +bowed +heal +Meyer +Reagan +##wed +subfamily +##gent +momentum +infant +detect +##sse +Chapman +Darwin +mechanics +NSW +Cancer +Brooke +Nuclear +comprised +hire +sanctuary +wingspan +contrary +remembering +surprising +Basic +stealing +OS +hatred +##lled +masters +violation +Rule +##nger +assuming +conquered +louder +robe +Beatles +legitimate +##vation +massacre +Rica +unsuccessfully +poets +##enberg +careers +doubled +premier +battalions +Dubai +Paper +Louisville +gestured +dressing +successive +mumbled +Vic +referee +pupil +##cated +##rre +ceremonies +picks +##IN +diplomat +alike +geographical +rays +##HA +##read +harbour +factories +pastor +playwright +Ultimate +nationalist +uniforms +obtaining +kit +Amber +##pling +screenwriter +ancestry +##cott +Fields +PR +Coleman +rat +Bavaria +squeeze +highlighted +Adult +reflecting +Mel +1824 +bicycle +organizing +sided +Previously +Underground +Prof +athletics +coupled +mortal +Hampton +worthy +immune +Ava +##gun +encouraging +simplified +##ssa +##nte +##ann +Providence +entities +Pablo +Strong +Housing +##ista +##ators +kidnapped +mosque +Kirk +whispers +fruits +shattered +fossil +Empress +Johns +Webster +Thing +refusing +differently +specimen +Ha +##EN +##tina +##elle +##night +Horn +neighbourhood +Bolivia +##rth +genres +Pre +##vich +Amelia +swallow +Tribune +Forever +Psychology +Use +##bers +Gazette +ash +##usa +Monster +##cular +delegation +blowing +Oblast +retreated +automobile +##ex +profits +shirts +devil +Treasury +##backs +Drums +Ronnie +gameplay +expertise +Evening +resides +Caesar +unity +Crazy +linking +Vision +donations +Isabel +valve +Sue +WWE +logical +availability +fitting +revolt +##mill +Linux +taxi +Access +pollution +statues +Augustus +##pen +cello +##some +lacking +##ati +Gwen +##aka +##ovich +1821 +Wow +initiatives +Uruguay +Cain +stroked +examine +##ī +mentor +moist +disorders +buttons +##tica +##anna +Species +Lynch +museums +scorer +Poor +eligibility +op +unveiled +cats +Title +wheat +critically +Syracuse +##osis +marketed +enhance +Ryder +##NG +##ull +##rna +embedded +throws +foods +happily +##ami +lesson +formats +punched +##rno +expressions +qualities +##sal +Gods +##lity +elect +wives +##lling +jungle +Toyota +reversed +Grammar +Cloud +Agnes +##ules +disputed +verses +Lucien +threshold +##rea +scanned +##bled +##dley +##lice +Kazakhstan +Gardner +Freeman +##rz +inspection +Rita +accommodation +advances +chill +Elliot +thriller +Constantinople +##mos +debris +whoever +1810 +Santo +Carey +remnants +Guatemala +##irs +carriers +equations +mandatory +##WA +anxious +measurement +Summit +Terminal +Erin +##zes +LLC +##uo +glancing +sin +##₃ +Downtown +flowering +Euro +Leigh +Lance +warn +decent +recommendations +##ote +Quartet +##rrell +Clarence +colleague +guarantee +230 +Clayton +Beast +addresses +prospect +destroyer +vegetables +Leadership +fatal +prints +190 +##makers +Hyde +persuaded +illustrations +Southampton +Joyce +beats +editors +mount +##grave +Malaysian +Bombay +endorsed +##sian +##bee +applying +Religion +nautical +bomber +Na +airfield +gravel +##rew +Cave +bye +dig +decree +burden +Election +Hawk +Fe +##iled +reunited +##tland +liver +Teams +Put +delegates +Ella +##fect +Cal +invention +Castro +bored +##kawa +##ail +Trinidad +NASCAR +pond +develops +##pton +expenses +Zoe +Released +##rf +organs +beta +parameters +Neill +##lene +lateral +Beat +blades +Either +##hale +Mitch +##ET +##vous +Rod +burnt +phones +Rising +##front +investigating +##dent +Stephanie +##keeper +screening +##uro +Swan +Sinclair +modes +bullets +Nigerian +melody +##ques +Rifle +##12 +128 +##jin +charm +Venus +##tian +fusion +advocated +visitor +pinned +genera +3000 +Ferry +Solo +quantity +regained +platinum +shoots +narrowly +preceded +update +##ichi +equality +unaware +regiments +ally +##tos +transmitter +locks +Seeing +outlets +feast +reopened +##ows +struggles +Buddy +1826 +bark +elegant +amused +Pretty +themed +schemes +Lisbon +Te +patted +terrorism +Mystery +##croft +##imo +Madagascar +Journey +dealer +contacted +##quez +ITV +vacation +Wong +Sacramento +organisms +##pts +balcony +coloured +sheer +defines +MC +abortion +forbidden +accredited +Newfoundland +tendency +entrepreneur +Benny +Tanzania +needing +finalist +mythology +weakened +gown +sentences +Guest +websites +Tibetan +UFC +voluntary +annoyed +Welcome +honestly +correspondence +geometry +Deutsche +Biology +Help +##aya +Lines +Hector +##ael +reluctant +##ages +wears +inquiry +##dell +Holocaust +Tourism +Wei +volcanic +##mates +Visual +sorts +neighborhoods +Running +apple +shy +Laws +bend +Northeast +feminist +Speedway +Murder +visa +stuffed +fangs +transmitted +fiscal +Ain +enlarged +##ndi +Cecil +Peterson +Benson +Bedford +acceptable +##CC +##wer +purely +triangle +foster +Alberto +educator +Highland +acute +LGBT +Tina +Mi +adventures +Davidson +Honda +translator +monk +enacted +summoned +##ional +collector +Genesis +Un +liner +Di +Statistical +##CS +filter +Knox +Religious +Stella +Estonian +Turn +##ots +primitive +parishes +##lles +complexity +autobiography +rigid +cannon +pursuing +exploring +##gram +##mme +freshman +caves +Expedition +Traditional +iTunes +certification +cooling +##ort +##gna +##IT +##lman +##VA +Motion +explosive +licence +boxer +shrine +loosely +Brigadier +Savage +Brett +MVP +heavier +##elli +##gged +Buddha +Easy +spells +fails +incredibly +Georg +stern +compatible +Perfect +applies +cognitive +excessive +nightmare +neighbor +Sicily +appealed +static +##₁ +Aberdeen +##leigh +slipping +bride +##guard +Um +Clyde +1818 +##gible +Hal +Frost +Sanders +interactive +Hour +##vor +hurting +bull +termed +shelf +capturing +##pace +rolls +113 +##bor +Chilean +teaches +##rey +exam +shipped +Twin +borrowed +##lift +Shit +##hot +Lindsay +Below +Kiev +Lin +leased +##sto +Eli +Diane +Val +subtropical +shoe +Bolton +Dragons +##rification +Vatican +##pathy +Crisis +dramatically +talents +babies +##ores +surname +##AP +##cology +cubic +opted +Archer +sweep +tends +Karnataka +Judy +stint +Similar +##nut +explicitly +##nga +interact +Mae +portfolio +clinic +abbreviated +Counties +##iko +hearts +##ı +providers +screams +Individual +##etti +Monument +##iana +accessed +encounters +gasp +##rge +defunct +Avery +##rne +nobility +useless +Phase +Vince +senator +##FL +1813 +surprisingly +##illo +##chin +Boyd +rumors +equity +Gone +Hearts +chassis +overnight +Trek +wrists +submit +civic +designers +##rity +prominence +decorative +derives +starter +##AF +wisdom +Powers +reluctantly +measurements +doctoral +Noel +Gideon +Baden +Cologne +lawn +Hawaiian +anthology +##rov +Raiders +embassy +Sterling +##pal +Telugu +troubled +##FC +##bian +fountain +observe +ore +##uru +##gence +spelling +Border +grinning +sketch +Benedict +Xbox +dialects +readily +immigrant +Constitutional +aided +nevertheless +SE +tragedy +##ager +##rden +Flash +##MP +Europa +emissions +##ield +panties +Beverly +Homer +curtain +##oto +toilet +Isn +Jerome +Chiefs +Hermann +supernatural +juice +integrity +Scots +auto +Patriots +Strategic +engaging +prosecution +cleaned +Byron +investments +adequate +vacuum +laughs +##inus +##nge +Usually +Roth +Cities +Brand +corpse +##ffy +Gas +rifles +Plains +sponsorship +Levi +tray +owed +della +commanders +##ead +tactical +##rion +García +harbor +discharge +##hausen +gentleman +endless +highways +##itarian +pleaded +##eta +archive +Midnight +exceptions +instances +Gibraltar +cart +##NS +Darren +Bonnie +##yle +##iva +OCLC +bra +Jess +##EA +consulting +Archives +Chance +distances +commissioner +##AR +LL +sailors +##sters +enthusiasm +Lang +##zia +Yugoslav +confirm +possibilities +Suffolk +##eman +banner +1822 +Supporting +fingertips +civilization +##gos +technically +1827 +Hastings +sidewalk +strained +monuments +Floyd +Chennai +Elvis +villagers +Cumberland +strode +albeit +Believe +planets +combining +Mohammad +container +##mouth +##tures +verb +BA +Tank +Midland +screened +Gang +Democracy +Helsinki +screens +thread +charitable +##version +swiftly +ma +rational +combine +##SS +##antly +dragging +Cliff +Tasmania +quest +professionally +##aj +rap +##lion +livestock +##hua +informal +specially +lonely +Matthews +Dictionary +1816 +Observatory +correspondent +constitute +homeless +waving +appreciated +Analysis +Meeting +dagger +##AL +Gandhi +flank +Giant +Choir +##not +glimpse +toe +Writer +teasing +springs +##dt +Glory +healthcare +regulated +complaint +math +Publications +makers +##hips +cement +Need +apologize +disputes +finishes +Partners +boring +ups +gains +1793 +Congressional +clergy +Folk +##made +##nza +Waters +stays +encoded +spider +betrayed +Applied +inception +##urt +##zzo +wards +bells +UCLA +Worth +bombers +Mo +trademark +Piper +##vel +incorporates +1801 +##cial +dim +Twelve +##word +Appeals +tighter +spacecraft +##tine +coordinates +##iac +mistakes +Zach +laptop +Teresa +##llar +##yr +favored +Nora +sophisticated +Irving +hammer +División +corporations +niece +##rley +Patterson +UNESCO +trafficking +Ming +balanced +plaque +Latvia +broader +##owed +Save +confined +##vable +Dalton +tide +##right +##ural +##num +swords +caring +##eg +IX +Acting +paved +##moto +launching +Antoine +substantially +Pride +Philharmonic +grammar +Indoor +Ensemble +enabling +114 +resided +Angelo +publicity +chaired +crawled +Maharashtra +Telegraph +lengthy +preference +differential +anonymous +Honey +##itation +wage +##iki +consecrated +Bryant +regulatory +Carr +##én +functioning +watches +##ú +shifts +diagnosis +Search +app +Peters +##SE +##cat +Andreas +honours +temper +counsel +Urdu +Anniversary +maritime +##uka +harmony +##unk +essence +Lorenzo +choked +Quarter +indie +##oll +loses +##prints +amendment +Adolf +scenario +similarities +##rade +##LC +technological +metric +Russians +thoroughly +##tead +cruiser +1806 +##nier +1823 +Teddy +##psy +au +progressed +exceptional +broadcaster +partnered +fitness +irregular +placement +mothers +unofficial +Garion +Johannes +1817 +regain +Solar +publishes +Gates +Broken +thirds +conversations +dive +Raj +contributor +quantities +Worcester +governance +##flow +generating +pretending +Belarus +##voy +radius +skating +Marathon +1819 +affection +undertook +##wright +los +##bro +locate +PS +excluded +recreation +tortured +jewelry +moaned +##logue +##cut +Complete +##rop +117 +##II +plantation +whipped +slower +crater +##drome +Volunteer +attributes +celebrations +regards +Publishers +oath +utilized +Robbie +Giuseppe +fiber +indication +melted +archives +Damien +storey +affecting +identifying +dances +alumni +comparable +upgrade +rented +sprint +##kle +Marty +##lous +treating +railways +Lebanese +erupted +occupy +sympathy +Jude +Darling +Qatar +drainage +McCarthy +heel +Klein +computing +wireless +flip +Du +Bella +##ast +##ssen +narrator +mist +sings +alignment +121 +2020 +securing +##rail +Progress +missionaries +brutal +mercy +##shing +Hip +##ache +##olo +switching +##here +Malay +##ob +constituted +Mohammed +Often +standings +surge +teachings +ink +detached +systematic +Trial +Myanmar +##wo +offs +Reyes +decoration +translations +wherever +reviewer +speculation +Bangkok +terminated +##ester +beard +RCA +Aidan +Associated +Emerson +Charity +1803 +generous +Dudley +ATP +##haven +prizes +toxic +gloves +##iles +##dos +Turning +myth +Parade +##building +Hits +##eva +teamed +Above +Duchess +Holt +##oth +Sub +Ace +atomic +inform +Ship +depend +Jun +##bes +Norwich +globe +Baroque +Christina +Cotton +Tunnel +kidding +Concerto +Brittany +tasted +phases +stems +angles +##TE +##nam +##40 +charted +Alison +intensive +Willis +glory +##lit +Bergen +est +taller +##dicate +labeled +##ido +commentator +Warrior +Viscount +shortened +aisle +Aria +Spike +spectators +goodbye +overlooking +mammals +##lude +wholly +Barrett +##gus +accompany +seventy +employ +##mb +ambitious +beloved +basket +##mma +##lding +halted +descendant +pad +exclaimed +cloak +##pet +Strait +Bang +Aviv +sadness +##ffer +Donovan +1880s +agenda +swinging +##quin +jerk +Boat +##rist +nervously +Silence +Echo +shout +implies +##iser +##cking +Shiva +Weston +damages +##tist +effectiveness +Horace +cycling +Rey +ache +Photography +PDF +Dear +leans +Lea +##vision +booth +attained +disbelief +##eus +##ution +Hop +pension +toys +Eurovision +faithful +##heads +Andre +owe +default +Atlas +Megan +highlights +lovers +Constantine +Sixth +masses +##garh +emerge +Auto +Slovak +##oa +##vert +Superintendent +flicked +inventor +Chambers +Frankie +Romeo +pottery +companions +Rudolf +##liers +diary +Unless +tap +alter +Randall +##ddle +##eal +limitations +##boards +utterly +knelt +guaranteed +Cowboys +Islander +horns +##ike +Wendy +sexually +Smart +breasts +##cian +compromise +Duchy +AT +Galaxy +analog +Style +##aking +weighed +Nigel +optional +Czechoslovakia +practicing +Ham +##0s +feedback +batted +uprising +operative +applicable +criminals +classrooms +Somehow +##ode +##OM +Naomi +Winchester +##pping +Bart +Regina +competitor +Recorded +Yuan +Vera +lust +Confederation +##test +suck +1809 +Lambert +175 +Friend +##ppa +Slowly +##⁺ +Wake +Dec +##aneous +chambers +Color +Gus +##site +Alternative +##world +Exeter +Omaha +celebrities +striker +210 +dwarf +meals +Oriental +Pearson +financing +revenues +underwater +Steele +screw +Feeling +Mt +acids +badge +swore +theaters +Moving +admired +lung +knot +penalties +116 +fork +##cribed +Afghan +outskirts +Cambodia +oval +wool +fossils +Ned +Countess +Darkness +delicious +##nica +Evelyn +Recordings +guidelines +##CP +Sandra +meantime +Antarctica +modeling +granddaughter +##rial +Roma +Seventh +Sunshine +Gabe +##nton +Shop +Turks +prolific +soup +parody +##nta +Judith +disciplines +resign +Companies +Libya +Jets +inserted +Mile +retrieve +filmmaker +##rand +realistic +unhappy +##30 +sandstone +##nas +##lent +##ush +##rous +Brent +trash +Rescue +##unted +Autumn +disgust +flexible +infinite +sideways +##oss +##vik +trailing +disturbed +50th +Newark +posthumously +##rol +Schmidt +Josef +##eous +determining +menu +Pole +Anita +Luc +peaks +118 +Yard +warrant +generic +deserted +Walking +stamp +tracked +##berger +paired +surveyed +sued +Rainbow +##isk +Carpenter +submarines +realization +touches +sweeping +Fritz +module +Whether +resembles +##form +##lop +unsure +hunters +Zagreb +unemployment +Senators +Georgetown +##onic +Barker +foul +commercials +Dresden +Words +collision +Carlton +Fashion +doubted +##ril +precision +MIT +Jacobs +mob +Monk +retaining +gotta +##rod +remake +Fast +chips +##pled +sufficiently +##lights +delivering +##enburg +Dancing +Barton +Officers +metals +##lake +religions +##ré +motivated +differs +dorsal +##birds +##rts +Priest +polished +##aling +Saxony +Wyatt +knockout +##hor +Lopez +RNA +##link +metallic +##kas +daylight +Montenegro +##lining +wrapping +resemble +Jam +Viking +uncertainty +angels +enables +##fy +Stuttgart +tricks +tattoo +127 +wicked +asset +breach +##yman +MW +breaths +Jung +im +1798 +noon +vowel +##qua +calmly +seasonal +chat +ingredients +cooled +Randolph +ensuring +##ib +##idal +flashing +1808 +Macedonian +Cool +councils +##lick +advantages +Immediately +Madras +##cked +Pain +fancy +chronic +Malayalam +begged +##nese +Inner +feathers +##vey +Names +dedication +Sing +pan +Fischer +nurses +Sharp +inning +stamps +Meg +##ello +edged +motioned +Jacksonville +##ffle +##dic +##US +divide +garnered +Ranking +chasing +modifications +##oc +clever +midst +flushed +##DP +void +##sby +ambulance +beaches +groan +isolation +strengthen +prevention +##ffs +Scouts +reformed +geographic +squadrons +Fiona +Kai +Consequently +##uss +overtime +##yas +Fr +##BL +Papua +Mixed +glances +Haiti +Sporting +sandy +confronted +René +Tanner +1811 +##IM +advisory +trim +##ibe +González +gambling +Jupiter +##ility +##owski +##nar +122 +apology +teased +Pool +feminine +wicket +eagle +shiny +##lator +blend +peaking +nasty +nodding +fraction +tech +Noble +Kuwait +brushing +Italia +Canberra +duet +Johan +1805 +Written +cameo +Stalin +pig +cord +##zio +Surely +SA +owing +holidays +123 +Ranger +lighthouse +##ige +miners +1804 +##ë +##gren +##ried +crashing +##atory +wartime +highlight +inclined +Torres +Tax +##zel +##oud +Own +##corn +Divine +EMI +Relief +Northwestern +ethics +BMW +click +plasma +Christie +coordinator +Shepherd +washing +cooked +##dio +##eat +Cerambycidae +algebra +Engine +costumes +Vampire +vault +submission +virtue +assumption +##rell +Toledo +##oting +##rva +crept +emphasized +##lton +##ood +Greeks +surgical +crest +Patrol +Beta +Tessa +##GS +pizza +traits +rats +Iris +spray +##GC +Lightning +binary +escapes +##take +Clary +crowds +##zong +hauled +maid +##fen +Manning +##yang +Nielsen +aesthetic +sympathetic +affiliation +soaked +Mozart +personalities +begging +##iga +clip +Raphael +yearly +Lima +abundant +##lm +1794 +strips +Initiative +reporters +##vsky +consolidated +##itated +Civic +rankings +mandate +symbolic +##ively +1807 +rental +duck +nave +complications +##nor +Irene +Nazis +haunted +scholarly +Pratt +Gran +Embassy +Wave +pity +genius +bats +canton +Tropical +marker +##cos +escorted +Climate +##posed +appreciation +freezing +puzzle +Internal +pools +Shawn +pathway +Daniels +Fitzgerald +extant +olive +Vanessa +marriages +cocked +##dging +prone +chemicals +doll +drawer +##HF +Stark +Property +##tai +flowed +Sheridan +##uated +Less +Omar +remarks +catalogue +Seymour +wreck +Carrie +##bby +Mercer +displaced +sovereignty +rip +Flynn +Archie +Quarterfinals +Hassan +##ards +vein +Osaka +pouring +wages +Romance +##cript +##phere +550 +##eil +##stown +Documentary +ancestor +CNN +Panthers +publishers +Rise +##mu +biting +Bright +String +succeeding +119 +loaned +Warwick +Sheikh +Von +Afterwards +Jax +Camden +helicopters +Hence +Laurel +##ddy +transaction +Corp +clause +##owing +##kel +Investment +cups +Lucia +Moss +Giles +chef +López +decisive +30th +distress +linguistic +surveys +Ready +maiden +Touch +frontier +incorporate +exotic +mollusk +Leopold +Ride +##wain +##ndo +teammates +tones +drift +ordering +Feb +Penny +Normandy +Present +Flag +pipes +##rro +delight +motto +Tibet +leap +Eliza +Produced +teenagers +sitcom +Try +Hansen +Cody +wandered +terrestrial +frog +scare +resisted +employers +coined +##DS +resistant +Fly +captive +dissolution +judged +associates +defining +##court +Hale +##mbo +raises +clusters +twelfth +##metric +Roads +##itude +satisfy +Android +Reds +Gloucester +Category +Valencia +Daemon +stabbed +Luna +Churches +Canton +##eller +Attack +Kashmir +annexed +grabs +asteroid +Hartford +recommendation +Rodriguez +handing +stressed +frequencies +delegate +Bones +Erie +Weber +Hands +Acts +millimetres +24th +Fat +Howe +casually +##SL +convent +1790 +IF +##sity +1795 +yelling +##ises +drain +addressing +amino +Marcel +Sylvia +Paramount +Gerard +Volleyball +butter +124 +Albion +##GB +triggered +1792 +folding +accepts +##ße +preparations +Wimbledon +dose +##grass +escaping +##tling +import +charging +##dation +280 +Nolan +##fried +Calcutta +##pool +Cove +examining +minded +heartbeat +twisting +domains +bush +Tunisia +Purple +Leone +##code +evacuated +battlefield +tiger +Electrical +##ared +chased +##cre +cultivated +Jet +solved +shrug +ringing +Impact +##iant +kilometre +##log +commemorate +migrated +singular +designing +promptly +Higgins +##own +##aves +freshwater +Marketing +Payne +beg +locker +pray +implied +AAA +corrected +Trans +Europeans +Ashe +acknowledge +Introduction +##writer +##llen +Munster +auxiliary +growl +Hours +Poems +##AT +reduces +Plain +plague +canceled +detention +polite +necklace +Gustav +##gu +##lance +En +Angola +##bb +dwelling +##hea +5000 +Qing +Dodgers +rim +##ored +##haus +spilled +Elisabeth +Viktor +backpack +1802 +amended +##worthy +Phantom +##ctive +keeper +##loom +Vikings +##gua +employs +Tehran +specialty +##bate +Marx +Mirror +Jenna +rides +needle +prayers +clarinet +forewings +##walk +Midlands +convincing +advocacy +Cao +Birds +cycles +Clement +Gil +bubble +Maximum +humanitarian +Tan +cries +##SI +Parsons +Trio +offshore +Innovation +clutched +260 +##mund +##duct +Prairie +relied +Falcon +##ste +Kolkata +Gill +Swift +Negro +Zoo +valleys +##OL +Opening +beams +MPs +outline +Bermuda +Personal +exceed +productive +##MT +republic +forum +##sty +tornado +Known +dipped +Edith +folks +mathematician +watershed +Ricardo +synthetic +##dication +deity +##₄ +gaming +subjected +suspects +Foot +swollen +Motors +##tty +##ý +aloud +ceremonial +es +nuts +intend +Carlisle +tasked +hesitation +sponsors +unified +inmates +##ctions +##stan +tiles +jokes +whereby +outcomes +Lights +scary +Stoke +Portrait +Blind +sergeant +violations +cultivation +fuselage +Mister +Alfonso +candy +sticks +teen +agony +Enough +invite +Perkins +Appeal +mapping +undergo +Glacier +Melanie +affects +incomplete +##dd +Colombian +##nate +CBC +purchasing +bypass +Drug +Electronics +Frontier +Coventry +##aan +autonomy +scrambled +Recent +bounced +cow +experiencing +Rouge +cuisine +Elite +disability +Ji +inheritance +wildly +Into +##wig +confrontation +Wheeler +shiver +Performing +aligned +consequently +Alexis +Sin +woodland +executives +Stevenson +Ferrari +inevitable +##cist +##dha +##base +Corner +comeback +León +##eck +##urus +MacDonald +pioneering +breakdown +landscapes +Veterans +Rican +Theological +stirred +participant +Credit +Hyderabad +snails +Claudia +##ocene +compliance +##MI +Flags +Middlesex +storms +winding +asserted +er +##ault +##kal +waking +##rates +abbey +Augusta +tooth +trustees +Commodore +##uded +Cunningham +NC +Witch +marching +Sword +Same +spiral +Harley +##ahan +Zack +Audio +1890s +##fit +Simmons +Kara +Veronica +negotiated +Speaking +FIBA +Conservatory +formations +constituencies +explicit +facial +eleventh +##ilt +villain +##dog +##case +##hol +armored +tin +hairs +##umi +##rai +mattress +Angus +cease +verbal +Recreation +savings +Aurora +peers +Monastery +Airways +drowned +additions +downstream +sticking +Shi +mice +skiing +##CD +Raw +Riverside +warming +hooked +boost +memorable +posed +treatments +320 +##dai +celebrating +blink +helpless +circa +Flowers +PM +uncommon +Oct +Hawks +overwhelmed +Sparhawk +repaired +Mercy +pose +counterpart +compare +survives +##½ +##eum +coordinate +Lil +grandchildren +notorious +Yi +Judaism +Juliet +accusations +1789 +floated +marathon +roar +fortified +reunion +145 +Nov +Paula +##fare +##toria +tearing +Cedar +disappearance +Si +gifted +scar +270 +PBS +Technologies +Marvin +650 +roller +cupped +negotiate +##erman +passport +tram +miracle +styled +##tier +necessity +Des +rehabilitation +Lara +USD +psychic +wipe +##lem +mistaken +##lov +charming +Rider +pageant +dynamics +Cassidy +##icus +defenses +##tadt +##vant +aging +##inal +declare +mistress +supervised +##alis +##rest +Ashton +submerged +sack +Dodge +grocery +ramp +Teacher +lineage +imagery +arrange +inscriptions +Organisation +Siege +combines +pounded +Fleming +legends +columnist +Apostolic +prose +insight +Arabian +expired +##uses +##nos +Alone +elbows +##asis +##adi +##combe +Step +Waterloo +Alternate +interval +Sonny +plains +Goals +incorporating +recruit +adjoining +Cheshire +excluding +marrying +ducked +Cherokee +par +##inate +hiking +Coal +##bow +natives +ribbon +Allies +con +descriptions +positively +##lal +defendant +22nd +Vivian +##beat +Weather +possessions +Date +sweetheart +inability +Salisbury +adviser +ideology +Nordic +##eu +Cubs +IP +Administrative +##nick +facto +liberation +Burnett +Javier +fashioned +Electoral +Turin +theft +unanimous +Per +1799 +Clan +Hawkins +Teachers +##wes +Cameroon +Parkway +##gment +demolition +atoms +nucleus +##thi +recovering +##yte +##vice +lifts +Must +deposit +Hancock +Semi +darkened +Declaration +moan +muscular +Myers +attractions +sauce +simulation +##weed +Alps +barriers +##baum +Barack +galleries +Min +holders +Greenwich +donation +Everybody +Wolfgang +sandwich +Kendra +Collegiate +casino +Slavic +ensuing +Porto +##grapher +Jesuit +suppressed +tires +Ibrahim +protesters +Ibn +Amos +1796 +phenomena +Hayden +Paraguay +Squad +Reilly +complement +aluminum +##eers +doubts +decay +demise +Practice +patience +fireplace +transparent +monarchy +##person +Rodney +mattered +rotating +Clifford +disposal +Standards +paced +##llie +arise +tallest +tug +documentation +node +freeway +Nikolai +##cite +clicked +imaging +Lorraine +Tactical +Different +Regular +Holding +165 +Pilot +guarded +##polis +Classics +Mongolia +Brock +monarch +cellular +receptors +Mini +Chandler +financed +financially +Lives +erection +Fuller +unnamed +Kannada +cc +passive +plateau +##arity +freak +##rde +retrieved +transactions +##sus +23rd +swimmer +beef +fulfill +Arlington +offspring +reasoning +Rhys +saves +pseudonym +centimetres +shivered +shuddered +##ME +Feel +##otic +professors +Blackburn +##eng +##life +##haw +interred +lodge +fragile +Della +guardian +##bbled +catalog +clad +observer +tract +declaring +##headed +Lok +dean +Isabelle +1776 +irrigation +spectacular +shuttle +mastering +##aro +Nathaniel +Retired +##lves +Brennan +##kha +dick +##dated +##hler +Rookie +leapt +televised +weekends +Baghdad +Yemen +##fo +factions +ion +Lab +mortality +passionate +Hammer +encompasses +confluence +demonstrations +Ki +derivative +soils +##unch +Ranch +Universities +conventions +outright +aiming +hierarchy +reside +illusion +graves +rituals +126 +Antwerp +Dover +##ema +campuses +Hobart +lifelong +aliens +##vity +Memory +coordination +alphabet +##mina +Titans +pushes +Flanders +##holder +Normal +excellence +capped +profound +Taipei +portrayal +sparked +scratch +se +##eas +##hir +Mackenzie +##cation +Neo +Shin +##lined +magnificent +poster +batsman +##rgent +persuade +##ement +Icelandic +miserable +collegiate +Feature +geography +##mura +Comic +Circus +processor +barracks +Tale +##11 +Bulls +##rap +strengthened +##bell +injection +miniature +broadly +Letter +fare +hostage +traders +##nium +##mere +Fortune +Rivera +Lu +triumph +Browns +Bangalore +cooperative +Basel +announcing +Sawyer +##him +##cco +##kara +darted +##AD +##nova +sucking +##position +perimeter +flung +Holdings +##NP +Basque +sketches +Augustine +Silk +Elijah +analyst +armour +riots +acquiring +ghosts +##ems +132 +Pioneer +Colleges +Simone +Economy +Author +semester +Soldier +il +##unting +##bid +freaking +Vista +tumor +##bat +murderer +##eda +unreleased +##grove +##sser +##té +edit +statute +sovereign +##gawa +Killer +stares +Fury +comply +##lord +##nant +barrels +Andhra +Maple +generator +mascot +unusually +eds +##ante +##runner +rod +##tles +Historically +Jennings +dumped +Established +resemblance +##lium +##cise +##body +##voke +Lydia +##hou +##iring +nonetheless +1797 +corrupt +patrons +physicist +sneak +Livingston +Citizens +Architects +Werner +trends +Melody +eighty +markings +brakes +##titled +oversaw +processed +mock +Midwest +intervals +##EF +stretches +werewolf +##MG +Pack +controller +##dition +Honours +cane +Griffith +vague +repertoire +Courtney +orgasm +Abdullah +dominance +occupies +Ya +introduces +Lester +instinct +collaborative +Indigenous +refusal +##rank +outlet +debts +spear +155 +##keeping +##ulu +Catalan +##osh +tensions +##OT +bred +crude +Dunn +abdomen +accurately +##fu +##lough +accidents +Row +Audrey +rude +Getting +promotes +replies +Paolo +merge +##nock +trans +Evangelical +automated +Canon +##wear +##ggy +##gma +Broncos +foolish +icy +Voices +knives +Aside +dreamed +generals +molecule +AG +rejection +insufficient +##nagar +deposited +sacked +Landing +arches +helpful +devotion +intake +Flower +PGA +dragons +evolutionary +##mail +330 +GM +tissues +##tree +arcade +composite +lid +Across +implications +lacks +theological +assessed +concentrations +Den +##mans +##ulous +Fu +homeland +##stream +Harriet +ecclesiastical +troop +ecological +winked +##xed +eighteenth +Casino +specializing +##sworth +unlocked +supreme +devastated +snatched +trauma +GDP +Nord +saddle +Wes +convenient +competes +##nu +##iss +Marian +subway +##rri +successes +umbrella +##far +##ually +Dundee +##cence +spark +##rix +##я +Quality +Geological +cockpit +rpm +Cam +Bucharest +riot +##PM +Leah +##dad +##pose +Ka +m³ +Bundesliga +Wolfe +grim +textile +quartet +expressing +fantastic +destroyers +eternal +picnic +##oro +contractor +1775 +spanning +declining +##cating +Lowe +Sutherland +Emirates +downward +nineteen +violently +scout +viral +melting +enterprises +##cer +Crosby +Jubilee +antenna +urgent +Rory +##uin +##sure +wandering +##gler +##vent +Suzuki +Lifetime +Dirty +occupying +##quent +Disc +Guru +mound +Lennon +Humanities +listeners +Walton +uh +Braves +Bologna +##bis +##gra +Dwight +crawl +flags +memoir +Thorne +Archdiocese +dairy +##uz +##tery +roared +adjust +patches +inn +Knowing +##bbed +##zan +scan +Papa +precipitation +angrily +passages +postal +Phi +embraced +blacks +economist +triangular +Sen +shooter +punished +Millennium +Swimming +confessed +Aston +defeats +Era +cousins +Williamson +##rer +daytime +dumb +##rek +underway +specification +Buchanan +prayed +concealed +activation +##issa +canon +awesome +Starr +plural +summers +##fields +Slam +unnecessary +1791 +resume +trilogy +compression +##rough +selective +dignity +Yan +##xton +immense +##yun +lone +seeded +hiatus +lightweight +summary +Yo +approve +Galway +rejoined +Elise +garbage +burns +speeches +129 +Honduras +##liness +inventory +jersey +FK +assure +slumped +Lionel +Suite +##sbury +Lena +continuation +##AN +brightly +##nti +GT +Knowledge +##park +##lius +lethal +##tribution +##sions +Certificate +Mara +##lby +algorithms +Jade +blows +pirates +fleeing +wheelchair +Stein +sophomore +Alt +Territorial +diploma +snakes +##olic +##tham +Tiffany +Pius +flush +urging +Hanover +Reich +##olate +Unity +Pike +collectively +Theme +ballad +kindergarten +rocked +zoo +##page +whip +Rodríguez +strokes +checks +Becky +Stern +upstream +##uta +Silent +volunteered +Sigma +##ingen +##tract +##ede +Gujarat +screwed +entertaining +##action +##ryn +defenders +innocence +lesbian +que +Richie +nodes +Lie +juvenile +Jakarta +safer +confront +Bert +breakthrough +gospel +Cable +##zie +institutional +Archive +brake +liquor +feeds +##iate +chancellor +Encyclopedia +Animation +scanning +teens +##mother +Core +Rear +Wine +##flower +reactor +Ave +cardinal +sodium +strands +Olivier +crouched +Vaughan +Sammy +Image +scars +Emmanuel +flour +bias +nipple +revelation +##ucci +Denny +##ssy +Form +Runners +admits +Rama +violated +Burmese +feud +underwear +Mohamed +Named +swift +statewide +Door +Recently +comparing +Hundred +##idge +##nity +##rds +Rally +Reginald +Auburn +solving +waitress +Treasurer +##ilization +Halloween +Ministers +Boss +Shut +##listic +Rahman +demonstrating +##pies +Gaza +Yuri +installations +Math +schooling +##bble +Bronx +exiled +gasoline +133 +bundle +humid +FCC +proportional +relate +VFL +##dez +continuity +##cene +syndicated +atmospheric +arrows +Wanderers +reinforcements +Willow +Lexington +Rotten +##yon +discovering +Serena +portable +##lysis +targeting +£1 +Goodman +Steam +sensors +detachment +Malik +##erie +attitudes +Goes +Kendall +Read +Sleep +beans +Nikki +modification +Jeanne +knuckles +Eleven +##iously +Gross +Jaime +dioxide +moisture +Stones +UCI +displacement +Metacritic +Jury +lace +rendering +elephant +Sergei +##quire +GP +Abbott +##type +projection +Mouse +Bishops +whispering +Kathleen +Rams +##jar +whites +##oran +assess +dispatched +##hire +kin +##mir +Nursing +advocates +tremendous +sweater +assisting +##bil +Farmer +prominently +reddish +Hague +cyclone +##SD +Sage +Lawson +Sanctuary +discharged +retains +##ube +shotgun +wilderness +Reformed +similarity +Entry +Watts +Bahá +Quest +Looks +visions +Reservoir +Arabs +curls +Blu +dripping +accomplish +Verlag +drill +sensor +Dillon +physicians +smashed +##dir +painters +Renault +straw +fading +Directorate +lounge +commissions +Brain +##graph +neo +##urg +plug +coordinated +##houses +Critical +lamps +illustrator +Returning +erosion +Crow +##ciation +blessing +Thought +Wife +medalist +synthesizer +Pam +Thornton +Esther +HBO +fond +Associates +##raz +pirate +permits +Wide +tire +##PC +Ernie +Nassau +transferring +RFC +##ntly +um +spit +AS +##mps +Mining +polar +villa +anchored +##zzi +embarrassment +relates +##ă +Rupert +counterparts +131 +Baxter +##18 +Igor +recognizes +Clive +##hane +##eries +##ibly +occurrence +##scope +fin +colorful +Rapids +banker +tile +##rative +##dus +delays +destinations +##llis +Pond +Dane +grandparents +rewarded +socially +motorway +##hof +##lying +##human +modeled +Dayton +Forward +conscience +Sharma +whistle +Mayer +Sasha +##pical +circuits +Zhou +##ça +Latvian +finalists +predators +Lafayette +closes +obligations +Resolution +##vier +Trustees +reminiscent +##hos +Highlands +Protected +asylum +evacuation +##acy +Chevrolet +confession +Somalia +emergence +separating +##rica +alright +calcium +Laurent +Welfare +Leonardo +ashes +dental +Deal +minerals +##lump +##mount +accounted +staggered +slogan +photographic +builder +##imes +##raft +tragic +144 +SEC +Hit +tailed +##ples +##rring +##rson +ethical +wrestlers +concludes +lunar +##ept +nitrogen +Aid +cyclist +quarterfinals +##ه +harvest +##hem +Pasha +IL +##mis +continually +##forth +Intel +bucket +##ended +witches +pretended +dresses +viewer +peculiar +lowering +volcano +Marilyn +Qualifier +clung +##sher +Cut +modules +Bowie +##lded +onset +transcription +residences +##pie +##itor +scrapped +##bic +Monaco +Mayo +eternity +Strike +uncovered +skeleton +##wicz +Isles +bug +Promoted +##rush +Mechanical +XII +##ivo +gripping +stubborn +velvet +TD +decommissioned +operas +spatial +unstable +Congressman +wasted +##aga +##ume +advertisements +##nya +obliged +Cannes +Conway +bricks +##gnant +##mity +##uise +jumps +Clear +##cine +##sche +chord +utter +Su +podium +spokesman +Royce +assassin +confirmation +licensing +liberty +##rata +Geographic +individually +detained +##ffe +Saturn +crushing +airplane +bushes +knights +##PD +Lilly +hurts +unexpectedly +Conservatives +pumping +Forty +candle +Pérez +peasants +supplement +Sundays +##ggs +##rries +risen +enthusiastic +corresponds +pending +##IF +Owens +floods +Painter +inflation +presumed +inscribed +Chamberlain +bizarre +1200 +liability +reacted +tub +Legacy +##eds +##pted +shone +##litz +##NC +Tiny +genome +bays +Eduardo +robbery +stall +hatch +Depot +Variety +Flora +reprinted +trembled +outlined +CR +Theresa +spans +##plication +Jensen +##eering +posting +##rky +pays +##ost +Marcos +fortifications +inferior +##ential +Devi +despair +Talbot +##chus +updates +ego +Booth +Darius +tops +##lau +Scene +##DC +Harlem +Trey +Generally +candles +##α +Neville +Admiralty +##hong +iconic +victorious +1600 +Rowan +abundance +miniseries +clutching +sanctioned +##words +obscure +##ision +##rle +##EM +disappearing +Resort +Obviously +##eb +exceeded +1870s +Adults +##cts +Cry +Kerr +ragged +selfish +##lson +circled +pillars +galaxy +##asco +##mental +rebuild +caution +Resistance +Start +bind +splitting +Baba +Hogan +ps +partnerships +slam +Peggy +courthouse +##OD +organizational +packages +Angie +##nds +possesses +##rp +Expressway +Gould +Terror +Him +Geoff +nobles +##ope +shark +##nh +identifies +##oor +testified +Playing +##ump +##isa +stool +Idol +##pice +##tana +Byrne +Gerry +grunted +26th +observing +habits +privilege +immortal +wagons +##thy +dot +Bring +##lian +##witz +newest +##uga +constraints +Screen +Issue +##RNA +##vil +reminder +##gles +addiction +piercing +stunning +var +##rita +Signal +accumulated +##wide +float +devastating +viable +cartoons +Uttar +flared +##encies +Theology +patents +##bahn +privileges +##ava +##CO +137 +##oped +##NT +orchestral +medication +225 +erect +Nadia +École +fried +Sales +scripts +##rease +airs +Cage +inadequate +structured +countless +Avengers +Kathy +disguise +mirrors +Investigation +reservation +##nson +Legends +humorous +Mona +decorations +attachment +Via +motivation +Browne +strangers +##ński +Shadows +Twins +##pressed +Alma +Nominated +##ott +Sergio +canopy +152 +Semifinals +devised +##irk +upwards +Traffic +Goddess +Move +beetles +138 +spat +##anne +holdings +##SP +tangled +Whilst +Fowler +anthem +##ING +##ogy +snarled +moonlight +songwriting +tolerance +Worlds +exams +##pia +notices +sensitivity +poetic +Stephens +Boone +insect +reconstructed +Fresh +27th +balloon +##ables +Brendan +mug +##gee +1780 +apex +exports +slides +Lahore +hiring +Shell +electorate +sexuality +poker +nonprofit +##imate +cone +##uce +Okinawa +superintendent +##HC +referenced +turret +Sprint +Citizen +equilibrium +Stafford +curb +Driver +Valerie +##rona +aching +impacts +##bol +observers +Downs +Shri +##uth +airports +##uda +assignments +curtains +solitary +icon +patrols +substances +Jasper +mountainous +Published +ached +##ingly +announce +dove +damaging +##tism +Primera +Dexter +limiting +batch +##uli +undergoing +refugee +Ye +admiral +pavement +##WR +##reed +pipeline +desires +Ramsey +Sheila +thickness +Brotherhood +Tea +instituted +Belt +Break +plots +##ais +masculine +##where +Theo +##aged +##mined +Experience +scratched +Ethiopian +Teaching +##nov +Aiden +Abe +Samoa +conditioning +##mous +Otherwise +fade +Jenks +##encing +Nat +##lain +Anyone +##kis +smirk +Riding +##nny +Bavarian +blessed +potatoes +Hook +##wise +likewise +hardened +Merry +amid +persecution +##sten +Elections +Hoffman +Pitt +##vering +distraction +exploitation +infamous +quote +averaging +healed +Rhythm +Germanic +Mormon +illuminated +guides +##ische +interfere +##ilized +rector +perennial +##ival +Everett +courtesy +##nham +Kirby +Mk +##vic +Medieval +##tale +Luigi +limp +##diction +Alive +greeting +shove +##force +##fly +Jasmine +Bend +Capt +Suzanne +ditch +134 +##nning +Host +fathers +rebuilding +Vocal +wires +##manship +tan +Factor +fixture +##LS +Māori +Plate +pyramid +##umble +slap +Schneider +yell +##ulture +##tional +Goodbye +sore +##pher +depressed +##dox +pitching +Find +Lotus +##wang +strand +Teen +debates +prevalent +##bilities +exposing +hears +billed +##rse +reorganized +compelled +disturbing +displaying +##tock +Clinical +emotionally +##iah +Derbyshire +grouped +##quel +Bahrain +Journalism +IN +persistent +blankets +Crane +camping +Direct +proving +Lola +##dding +Corporate +birthplace +##boats +##ender +Figure +dared +Assam +precursor +##nched +Tribe +Restoration +slate +Meyrick +hunted +stroking +Earlier +Kind +polls +appeals +monetary +##reate +Kira +Langdon +explores +GPS +extensions +squares +Results +draped +announcer +merit +##ennial +##tral +##roved +##cion +robots +supervisor +snorted +##group +Cannon +procession +monkey +freeze +sleeves +Nile +verdict +ropes +firearms +extraction +tensed +EC +Saunders +##tches +diamonds +Marriage +##amble +curling +Amazing +##haling +unrelated +##roads +Daughter +cum +discarded +kidney +cliffs +forested +Candy +##lap +authentic +tablet +notation +##nburg +Bulldogs +Callum +Meet +mouths +coated +##xe +Truman +combinations +##mation +Steelers +Fan +Than +paternal +##father +##uti +Rebellion +inviting +Fun +theatres +##ي +##rom +curator +##cision +networking +Oz +drought +##ssel +granting +MBA +Shelby +Elaine +jealousy +Kyoto +shores +signaling +tenants +debated +Intermediate +Wise +##hes +##pu +Havana +duke +vicious +exited +servers +Nonetheless +Reports +explode +##beth +Nationals +offerings +Oval +conferred +eponymous +folklore +##NR +Shire +planting +1783 +Zeus +accelerated +Constable +consuming +troubles +McCartney +texture +bust +Immigration +excavated +hopefully +##cession +##coe +##name +##ully +lining +Einstein +Venezuelan +reissued +minorities +Beatrice +crystals +##nies +circus +lava +Beirut +extinction +##shu +Becker +##uke +issuing +Zurich +extract +##esta +##rred +regulate +progression +hut +alcoholic +plea +AB +Norse +Hubert +Mansfield +ashamed +##put +Bombardment +stripes +electrons +Denise +horrified +Nor +arranger +Hay +Koch +##ddling +##iner +Birthday +Josie +deliberate +explorer +##jiang +##signed +Arrow +wiping +satellites +baritone +mobility +##rals +Dorset +turbine +Coffee +185 +##lder +Cara +Colts +pits +Crossing +coral +##birth +Tai +zombie +smoothly +##hp +mates +##ady +Marguerite +##tary +puzzled +tapes +overly +Sonic +Prayer +Thinking +##uf +IEEE +obligation +##cliffe +Basil +redesignated +##mmy +nostrils +Barney +XIII +##phones +vacated +unused +Berg +##roid +Towards +viola +136 +Event +subdivided +rabbit +recruiting +##nery +Namibia +##16 +##ilation +recruits +Famous +Francesca +##hari +Goa +##lat +Karachi +haul +biblical +##cible +MGM +##rta +horsepower +profitable +Grandma +importantly +Martinez +incoming +##kill +beneficial +nominal +praying +##isch +gable +nail +noises +##ttle +Polytechnic +rub +##cope +Thor +audition +erotic +##ending +##iano +Ultimately +armoured +##mum +presently +pedestrian +##tled +Ipswich +offence +##ffin +##borne +Flemish +##hman +echo +##cting +auditorium +gentlemen +winged +##tched +Nicaragua +Unknown +prosperity +exhaust +pie +Peruvian +compartment +heights +disabilities +##pole +Harding +Humphrey +postponed +moths +Mathematical +Mets +posters +axe +##nett +Nights +Typically +chuckle +councillors +alternating +141 +Norris +##ately +##etus +deficit +dreaming +cooler +oppose +Beethoven +##esis +Marquis +flashlight +headache +investor +responding +appointments +##shore +Elias +ideals +shades +torch +lingering +##real +pier +fertile +Diploma +currents +Snake +##horse +##15 +Briggs +##ota +##hima +##romatic +Coastal +Kuala +ankles +Rae +slice +Hilton +locking +Approximately +Workshop +Niagara +strangely +##scence +functionality +advertisement +Rapid +Anders +ho +Soviets +packing +basal +Sunderland +Permanent +##fting +rack +tying +Lowell +##ncing +Wizard +mighty +tertiary +pencil +dismissal +torso +grasped +##yev +Sand +gossip +##nae +Beer +implementing +##19 +##riya +Fork +Bee +##eria +Win +##cid +sailor +pressures +##oping +speculated +Freddie +originating +##DF +##SR +##outh +28th +melt +Brenda +lump +Burlington +USC +marginal +##bine +Dogs +swamp +cu +Ex +uranium +metro +spill +Pietro +seize +Chorus +partition +##dock +##media +engineered +##oria +conclusions +subdivision +##uid +Illustrated +Leading +##hora +Berkshire +definite +##books +##cin +##suke +noun +winced +Doris +dissertation +Wilderness +##quest +braced +arbitrary +kidnapping +Kurdish +##but +clearance +excavations +wanna +Allmusic +insult +presided +yacht +##SM +Honour +Tin +attracting +explosives +Gore +Bride +##ience +Packers +Devils +Observer +##course +Loser +##erry +##hardt +##mble +Cyrillic +undefeated +##stra +subordinate +##ame +Wigan +compulsory +Pauline +Cruise +Opposition +##ods +Period +dispersed +expose +##60 +##has +Certain +Clerk +Wolves +##hibition +apparatus +allegiance +orbital +justified +thanked +##ević +Biblical +Carolyn +Graves +##tton +Hercules +backgrounds +replica +1788 +aquatic +Mega +Stirling +obstacles +filing +Founder +vowels +Deborah +Rotterdam +surpassed +Belarusian +##ologists +Zambia +Ren +Olga +Alpine +bi +councillor +Oaks +Animals +eliminating +digit +Managing +##GE +laundry +##rdo +presses +slamming +Tudor +thief +posterior +##bas +Rodgers +smells +##ining +Hole +SUV +trombone +numbering +representations +Domingo +Paralympics +cartridge +##rash +Combined +shelves +Kraków +revision +##frame +Sánchez +##tracted +##bler +Alain +townships +sic +trousers +Gibbs +anterior +symmetry +vaguely +Castile +IRA +resembling +Penguin +##ulent +infections +##stant +raped +##pressive +worrying +brains +bending +JR +Evidence +Venetian +complexes +Jonah +850 +exported +Ambrose +Gap +philanthropist +##atus +Marxist +weighing +##KO +##nath +Soldiers +chiefs +reject +repeating +shaky +Zürich +preserving +##xin +cigarettes +##break +mortar +##fin +Already +reproduction +socks +Waiting +amazed +##aca +dash +##path +Airborne +##harf +##get +descending +OBE +Sant +Tess +Lucius +enjoys +##ttered +##ivation +##ete +Leinster +Phillies +execute +geological +unfinished +Courts +SP +Beaver +Duck +motions +Platinum +friction +##aud +##bet +Parts +Stade +entirety +sprang +Smithsonian +coffin +prolonged +Borneo +##vise +unanimously +##uchi +Cars +Cassandra +Australians +##CT +##rgen +Louisa +spur +Constance +##lities +Patent +racism +tempo +##ssion +##chard +##nology +##claim +Million +Nichols +##dah +Numerous +ing +Pure +plantations +donor +##EP +##rip +convenience +##plate +dots +indirect +##written +Dong +failures +adapt +wizard +unfortunately +##gion +practitioners +economically +Enrique +unchanged +kingdoms +refined +definitions +lazy +worries +railing +##nay +Kaiser +##lug +cracks +sells +ninety +##WC +Directed +denotes +developmental +papal +unfortunate +disappointing +sixteenth +Jen +##urier +NWA +drifting +Horror +##chemical +behaviors +bury +surfaced +foreigners +slick +AND +##rene +##ditions +##teral +scrap +kicks +comprise +buddy +##anda +Mental +##ype +Dom +wines +Limerick +Luca +Rand +##won +Tomatoes +homage +geometric +##nted +telescope +Shelley +poles +##fan +shareholders +Autonomous +cope +intensified +Genoa +Reformation +grazing +##tern +Zhao +provisional +##bies +Con +##riel +Cynthia +Raleigh +vivid +threaten +Length +subscription +roses +Müller +##isms +robin +##tial +Laos +Stanton +nationalism +##clave +##ND +##17 +##zz +staging +Busch +Cindy +relieve +##spective +packs +neglected +CBE +alpine +Evolution +uneasy +coastline +Destiny +Barber +Julio +##tted +informs +unprecedented +Pavilion +##bei +##ference +betrayal +awaiting +leaked +V8 +puppet +adverse +Bourne +Sunset +collectors +##glass +##sque +copied +Demon +conceded +resembled +Rafe +Levy +prosecutor +##ject +flora +manned +deaf +Mosque +reminds +Lizzie +Products +Funny +cassette +congress +##rong +Rover +tossing +prompting +chooses +Satellite +cautiously +Reese +##UT +Huang +Gloucestershire +giggled +Kitty +##å +Pleasant +Aye +##ond +judging +1860s +intentionally +Hurling +aggression +##xy +transfers +employing +##fies +##oda +Archibald +Blessed +Ski +flavor +Rosie +##burgh +sunset +Scholarship +WC +surround +ranged +##jay +Degree +Houses +squeezing +limb +premium +Leningrad +steals +##inated +##ssie +madness +vacancy +hydraulic +Northampton +##prise +Marks +Boxing +##fying +academics +##lich +##TY +CDs +##lma +hardcore +monitors +paperback +cables +Dimitri +upside +advent +Ra +##clusive +Aug +Christchurch +objected +stalked +Simple +colonists +##laid +CT +discusses +fellowship +Carnival +cares +Miracle +pastoral +rooted +shortage +borne +Quentin +meditation +tapping +Novel +##ades +Alicia +Burn +famed +residency +Fernández +Johannesburg +Zhu +offended +Mao +outward +##inas +XV +denial +noticing +##ís +quarry +##hound +##amo +Bernie +Bentley +Joanna +mortgage +##rdi +##sumption +lenses +extracted +depiction +##RE +Networks +Broad +Revenue +flickered +virgin +flanked +##о +Enterprises +probable +Liberals +Falcons +drowning +phrases +loads +assumes +inhaled +awe +logs +slightest +spiders +waterfall +##pate +rocking +shrub +##uil +roofs +##gard +prehistoric +wary +##rak +TO +clips +sustain +treason +microphone +voter +Lamb +psychologist +wrinkled +##ères +mating +Carrier +340 +##lbert +sensing +##rino +destiny +distract +weaker +UC +Nearly +neurons +spends +Apache +##rem +genuinely +wells +##lanted +stereo +##girl +Lois +Leaving +consul +fungi +Pier +Cyril +80s +Jungle +##tani +illustration +Split +##hana +Abigail +##patrick +1787 +diminished +Selected +packaging +##EG +Martínez +communal +Manufacturing +sentiment +143 +unwilling +praising +Citation +pills +##iti +##rax +muffled +neatly +workforce +Yep +leisure +Tu +##nding +Wakefield +ancestral +##uki +destructive +seas +Passion +showcase +##ceptive +heroic +142 +exhaustion +Customs +##aker +Scholar +sliced +##inian +Direction +##OW +Swansea +aluminium +##eep +ceramic +McCoy +Career +Sector +chartered +Damascus +pictured +Interest +stiffened +Plateau +obsolete +##tant +irritated +inappropriate +overs +##nko +bail +Talent +Sur +ours +##nah +barred +legged +sociology +Bud +dictionary +##luk +Cover +obey +##oring +annoying +##dong +apprentice +Cyrus +Role +##GP +##uns +##bag +Greenland +Porsche +Rocket +##32 +organism +##ntary +reliability +##vocation +##й +Found +##hine +motors +promoter +unfair +##oms +##note +distribute +eminent +rails +appealing +chiefly +meaningful +Stephan +##rehension +Consumer +psychiatric +bowler +saints +##iful +##н +1777 +Pol +Dorian +Townsend +hastily +##jima +Quincy +Sol +fascinated +Scarlet +alto +Avon +certainty +##eding +Keys +##chu +Chu +##VE +ions +tributaries +Thanksgiving +##fusion +astronomer +oxide +pavilion +Supply +Casa +Bollywood +sadly +mutations +Keller +##wave +nationals +##rgo +##ym +predict +Catholicism +Vega +##eration +##ums +Mali +tuned +Lankan +Plans +radial +Bosnian +Lexi +##14 +##ü +sacks +unpleasant +Empty +handles +##taking +Bon +switches +intently +tuition +antique +##jk +fraternity +notebook +Desmond +##sei +prostitution +##how +deed +##OP +501 +Somewhere +Rocks +##mons +campaigned +frigate +gases +suppress +##hang +Merlin +Northumberland +dominate +expeditions +thunder +##ups +##rical +Cap +thorough +Ariel +##kind +renewable +constructing +pacing +terrorists +Bowen +documentaries +westward +##lass +##nage +Merchant +##ued +Beaumont +Din +##hian +Danube +peasant +Garrison +encourages +gratitude +reminding +stormed +##ouse +pronunciation +##ailed +Weekend +suggestions +##ffing +##DI +Active +Colombo +##logists +Merrill +##cens +Archaeological +Medina +captained +##yk +duel +cracking +Wilkinson +Guam +pickup +renovations +##ël +##izer +delighted +##iri +Weaver +##ctional +tens +##hab +Clint +##usion +##each +petals +Farrell +##sable +caste +##will +Ezra +##qi +##standing +thrilled +ambush +exhaled +##SU +Resource +blur +forearm +specifications +contingent +cafe +##iology +Antony +fundraising +grape +##rgy +turnout +##udi +Clifton +laboratories +Irvine +##opus +##lid +Monthly +Bihar +statutory +Roses +Emil +##rig +lumber +optimal +##DR +pumps +plaster +Mozambique +##aco +nightclub +propelled +##hun +ked +surplus +wax +##urai +pioneered +Sunny +imprint +Forget +Eliot +approximate +patronage +##bek +##ely +##mbe +Partnership +curl +snapping +29th +Patriarch +##jord +seldom +##ature +astronomy +Bremen +XIV +airborne +205 +1778 +recognizing +stranded +arrogant +bombardment +destined +ensured +146 +robust +Davenport +Interactive +Offensive +Fi +prevents +probe +propeller +sorrow +Blade +mounting +automotive +##dged +wallet +201 +lashes +Forrest +##ift +Cell +Younger +shouts +##cki +folds +##chet +Epic +yields +homosexual +tunes +##minate +##text +Manny +chemist +hindwings +##urn +pilgrimage +##sfield +##riff +MLS +##rive +Huntington +translates +Path +slim +##ndra +##oz +climax +commuter +desperation +##reet +denying +##rious +daring +seminary +polo +##clamation +Teatro +Torah +Cats +identities +Poles +photographed +fiery +popularly +##cross +winters +Hesse +##vio +Nurse +Senegal +Salon +prescribed +justify +##gues +##и +##orted +HQ +##hiro +evaluated +momentarily +##unts +Debbie +##licity +##TP +Mighty +Rabbit +##chal +Events +Savoy +##ht +Brandenburg +Bordeaux +##laus +Release +##IE +##kowski +1900s +SK +Strauss +##aly +Sonia +Updated +synagogue +McKay +flattened +370 +clutch +contests +toast +evaluate +pope +heirs +jam +tutor +reverted +##ading +nonsense +hesitate +Lars +Ceylon +Laurie +##guchi +accordingly +customary +148 +Ethics +Multiple +instincts +IGN +##ä +bullshit +##hit +##par +desirable +##ducing +##yam +alias +ashore +licenses +##lification +misery +147 +Cola +assassinated +fiercely +##aft +las +goat +substrate +lords +Cass +Bridges +ICC +lasts +sights +reproductive +##asi +Ivory +Clean +fixing +##lace +seeming +aide +1850s +harassment +##FF +##LE +reasonably +##coat +##cano +NYC +1784 +Fifty +immunity +Canadians +Cheng +comforting +meanwhile +##tera +##blin +breeds +glowed +##vour +Aden +##verted +##aded +##oral +neat +enforced +poisoning +##ews +##hone +enforce +predecessors +survivor +Month +unfamiliar +pierced +waived +dump +responds +Mai +Declan +angular +Doesn +interpretations +##yar +invest +Dhaka +policeman +Congregation +Eighth +painfully +##este +##vior +Württemberg +##cles +blockade +encouragement +##fie +Caucasus +Malone +Universidad +utilize +Nissan +inherent +151 +agreeing +syllable +determines +Protocol +conclude +##gara +40th +Xu +Taiwanese +##ather +boiler +printer +Lacey +titular +Klaus +Fallon +Wembley +fox +Chandra +Governorate +obsessed +##Ps +micro +##25 +Cooke +gymnasium +weaving +Shall +Hussein +glaring +softball +Reader +Dominion +Trouble +varsity +Cooperation +Chaos +Kang +Kramer +Eisenhower +proves +Connie +consortium +governors +Bethany +opener +Normally +Willy +linebacker +Regent +Used +AllMusic +Twilight +##shaw +Companion +Tribunal +simpler +##gam +Experimental +Slovenian +cellar +deadline +trout +Hubbard +ads +idol +##hetto +Granada +clues +salmon +1700 +Omega +Caldwell +softened +Bills +Honolulu +##gn +Terrace +suitcase +##IL +frantic +##oons +Abbot +Sitting +Fortress +Riders +sickness +enzymes +trustee +Bern +forged +##13 +##ruff +##rl +##versity +inspector +champagne +##held +##FI +hereditary +Taliban +handball +##wine +Sioux +##dicated +honoured +139 +##tude +Skye +meanings +##rkin +cardiac +analyzed +vegetable +##FS +Royals +dial +freelance +##fest +partisan +petroleum +ridden +Lincolnshire +panting +##comb +presidents +Haley +##chs +contributes +Jew +discoveries +panicked +Woody +eyelids +Fate +Tulsa +mg +whiskey +zombies +Wii +##udge +investigators +##bull +centred +##screen +Bone +Lana +##oise +forts +##ske +Conan +Lyons +##writing +SH +##ride +rhythmic +154 +##llah +pioneers +##bright +captivity +Sanchez +Oman +##mith +Flint +Platform +##ioned +emission +packet +Persia +##formed +takeover +tempted +Vance +Few +Toni +receptions +##ن +exchanges +Camille +whale +Chronicles +##rent +##ushing +##rift +Alto +Genus +##asing +onward +foremost +longing +Rockefeller +containers +##cribe +intercepted +##olt +pleading +Bye +bee +##umbling +153 +undertake +Izzy +cheaper +Ultra +validity +##pse +Sa +hovering +##pert +vintage +engraved +##rise +farmland +##ever +##ifier +Atlantis +propose +Catalonia +plunged +##edly +demonstrates +gig +##cover +156 +Osborne +cowboy +herd +investigator +loops +Burning +rests +Instrumental +embarrassing +focal +install +readings +swirling +Chatham +parameter +##zin +##holders +Mandarin +Moody +converting +Escape +warnings +##chester +incarnation +##ophone +adopting +##lins +Cromwell +##laws +Axis +Verde +Kappa +Schwartz +Serbs +caliber +Wanna +Chung +##ality +nursery +principally +Bulletin +likelihood +logging +##erty +Boyle +supportive +twitched +##usive +builds +Marseille +omitted +motif +Lands +##lusion +##ssed +Barrow +Airfield +Harmony +WWF +endured +merging +convey +branding +examinations +167 +Italians +##dh +dude +1781 +##teau +crawling +thoughtful +clasped +concluding +brewery +Moldova +Wan +Towers +Heidelberg +202 +##ict +Lagos +imposing +##eval +##serve +Bacon +frowning +thirteenth +conception +calculations +##ович +##mile +##ivated +mutation +strap +##lund +demographic +nude +perfection +stocks +##renched +##dit +Alejandro +bites +fragment +##hack +##rchy +GB +Surgery +Berger +punish +boiling +consume +Elle +Sid +Dome +relies +Crescent +treasurer +Bloody +1758 +upheld +Guess +Restaurant +signatures +font +millennium +mural +stakes +Abel +hailed +insists +Alumni +Breton +##jun +digits +##FM +##thal +Talking +motive +reigning +babe +masks +##ø +Shaun +potato +sour +whitish +Somali +##derman +##rab +##wy +chancel +telecommunications +Noise +messenger +tidal +grinding +##ogenic +Rebel +constituent +peripheral +recruitment +##ograph +##tler +pumped +Ravi +poked +##gley +Olive +diabetes +discs +liking +sting +fits +stir +Mari +Sega +creativity +weights +Macau +mandated +Bohemia +disastrous +Katrina +Baku +Rajasthan +waiter +##psis +Siberia +verbs +##truction +patented +1782 +##ndon +Relegated +Hunters +Greenwood +Shock +accusing +skipped +Sessions +markers +subset +monumental +Viola +comparative +Alright +Barbados +setup +Session +standardized +##ík +##sket +appoint +AFB +Nationalist +##WS +Troop +leaped +Treasure +goodness +weary +originates +100th +compassion +expresses +recommend +168 +composing +seventeenth +Tex +Atlético +bald +Finding +Presidency +Sharks +favoured +inactive +##lter +suffix +princes +brighter +##ctus +classics +defendants +culminated +terribly +Strategy +evenings +##ção +##iver +##urance +absorb +##rner +Territories +RBI +soothing +Martín +concurrently +##tr +Nicholson +fibers +swam +##oney +Allie +Algerian +Dartmouth +Mafia +##bos +##tts +Councillor +vocabulary +##bla +##lé +intending +##dler +Guerrero +sunshine +pedal +##TO +administrators +periodic +scholarships +Loop +Madeline +exaggerated +##ressed +Regan +##cellular +Explorer +##oids +Alexandre +vows +Reporter +Unable +Average +absorption +##bedience +Fortunately +Auxiliary +Grandpa +##HP +##ovo +potent +temporal +adrenaline +##udo +confusing +guiding +Dry +qualifications +joking +wherein +heavyweight +##ices +nightmares +pharmaceutical +Commanding +##aled +##ove +Gregor +##UP +censorship +degradation +glorious +Austro +##rench +380 +Miriam +sped +##orous +offset +##KA +fined +specialists +Pune +João +##dina +propped +fungus +##ς +frantically +Gabrielle +Hare +committing +##plied +Ask +Wilmington +stunt +numb +warmer +preacher +earnings +##lating +integer +##ija +federation +homosexuality +##cademia +epidemic +grumbled +shoving +Milk +Satan +Tobias +innovations +##dington +geology +memoirs +##IR +spared +culminating +Daphne +Focus +severed +stricken +Paige +Mans +flats +Russo +communes +litigation +strengthening +##powered +Staffordshire +Wiltshire +Painting +Watkins +##د +specializes +Select +##rane +##aver +Fulton +playable +##VN +openings +sampling +##coon +##21 +Allah +travelers +allocation +##arily +Loch +##hm +commentators +fulfilled +##troke +Emeritus +Vanderbilt +Vijay +pledged +##tative +diagram +drilling +##MD +##plain +Edison +productivity +31st +##rying +##ption +##gano +##oration +##bara +posture +bothering +platoon +politely +##inating +redevelopment +Job +##vale +stark +incorrect +Mansion +renewal +threatens +Bahamas +fridge +##tata +Uzbekistan +##edia +Sainte +##mio +gaps +neural +##storm +overturned +Preservation +shields +##ngo +##physics +ah +gradual +killings +##anza +consultation +premiership +Felipe +coincidence +##ène +##any +Handbook +##loaded +Edit +Guns +arguably +##ş +compressed +depict +seller +##qui +Kilkenny +##kling +Olympia +librarian +##acles +dramas +JP +Kit +Maj +##lists +proprietary +##nged +##ettes +##tok +exceeding +Lock +induction +numerical +##vist +Straight +foyer +imaginary +##pop +violinist +Carla +bouncing +##ashi +abolition +##uction +restoring +scenic +##č +Doom +overthrow +para +##vid +##ughty +Concord +HC +cocaine +deputies +##aul +visibility +##wart +Kapoor +Hutchinson +##agan +flashes +kn +decreasing +##ronology +quotes +vain +satisfying +##iam +##linger +310 +Hanson +fauna +##zawa +##rrel +Trenton +##VB +Employment +vocational +Exactly +bartender +butterflies +tow +##chers +##ocks +pigs +merchandise +##game +##pine +Shea +##gration +Connell +Josephine +monopoly +##dled +Cobb +warships +cancellation +someday +stove +##Cs +candidacy +superhero +unrest +Toulouse +admiration +undergone +whirled +Reconnaissance +costly +##ships +290 +Cafe +amber +Tory +##mpt +definitive +##dress +proposes +redesigned +acceleration +##asa +##raphy +Presley +exits +Languages +##cel +Mode +spokesperson +##tius +Ban +forthcoming +grounded +ACC +compelling +logistics +retailers +abused +##gating +soda +##yland +##lution +Landmark +XVI +blush +##tem +hurling +dread +Tobago +Foley +##uad +scenarios +##mentation +##rks +Score +fatigue +hairy +correspond +##iard +defences +confiscated +##rudence +1785 +Formerly +Shot +advertised +460 +Text +ridges +Promise +Dev +exclusion +NHS +tuberculosis +rockets +##offs +sparkling +256 +disappears +mankind +##hore +HP +##omo +taxation +Multi +DS +Virgil +##ams +Dell +stacked +guessing +Jump +Nope +cheer +hates +ballots +overlooked +analyses +Prevention +maturity +dos +##cards +##lect +Mare +##yssa +Petty +##wning +differing +iOS +##ior +Joachim +Sentinel +##nstein +90s +Pamela +480 +Asher +##lary +Vicente +landings +portray +##rda +##xley +Virtual +##uary +finances +Jain +Somebody +Tri +behave +Michele +##ider +dwellings +FAA +Gallagher +##lide +Monkey +195 +aforementioned +##rism +##bey +##kim +##puted +Mesa +hopped +unopposed +recipients +Reality +Been +gritted +149 +playground +pillar +##rone +Guinness +##tad +Théâtre +depended +Tipperary +Reuben +frightening +wooded +Target +globally +##uted +Morales +Baptiste +drunken +Institut +characterised +##chemistry +Strip +discrete +Premiership +##zzling +gazing +Outer +##quisition +Sikh +Booker +##yal +contemporaries +Jericho +##chan +##physical +##witch +Militia +##rez +##zard +dangers +##utter +##₀ +Programs +darling +participates +railroads +##ienne +behavioral +bureau +##rook +161 +Hicks +##rises +Comes +inflicted +bees +kindness +norm +##ković +generators +##pard +##omy +##ili +methodology +Alvin +façade +latitude +##plified +DE +Morse +##mered +educate +intersects +##MF +##cz +##vated +AL +##graded +##fill +constitutes +artery +feudal +avant +cautious +##ogue +immigrated +##chenko +Saul +Clinic +Fang +choke +Cornelius +flexibility +temperate +pins +##erson +oddly +inequality +157 +Natasha +Sal +##uter +215 +aft +blinking +##ntino +northward +Exposition +cookies +Wedding +impulse +Overseas +terrifying +##ough +Mortimer +##see +440 +https +og +imagining +##cars +Nicola +exceptionally +threads +##cup +Oswald +Provisional +dismantled +deserves +1786 +Fairy +discourse +Counsel +departing +Arc +guarding +##orse +420 +alterations +vibrant +Em +squinted +terrace +rowing +Led +accessories +SF +Sgt +cheating +Atomic +##raj +Blackpool +##iary +boarded +substituted +bestowed +lime +kernel +##jah +Belmont +shaken +sticky +retrospective +Louie +migrants +weigh +sunglasses +thumbs +##hoff +excavation +##nks +Extra +Polo +motives +Drum +infrared +tastes +berth +verge +##stand +programmed +warmed +Shankar +Titan +chromosome +cafeteria +dividing +pepper +CPU +Stevie +satirical +Nagar +scowled +Died +backyard +##gata +##reath +##bir +Governors +portraying +##yah +Revenge +##acing +1772 +margins +Bahn +OH +lowland +##razed +catcher +replay +##yoshi +Seriously +##licit +Aristotle +##ald +Habsburg +weekday +Secretariat +CO +##dly +##joy +##stad +litre +ultra +##cke +Mongol +Tucson +correlation +compose +traps +Groups +Hai +Salvatore +##dea +cents +##eese +concession +clash +Trip +Panzer +Moroccan +cruisers +torque +Ba +grossed +##arate +restriction +concentrating +FDA +##Leod +##ones +Scholars +##esi +throbbing +specialised +##heses +Chicken +##fia +##ificant +Erich +Residence +##trate +manipulation +namesake +##tom +Hoover +cue +Lindsey +Lonely +275 +##HT +combustion +subscribers +Punjabi +respects +Jeremiah +penned +##gor +##rilla +suppression +##tration +Crimson +piston +Derry +crimson +lyrical +oversee +portrays +CF +Districts +Lenin +Cora +searches +clans +VHS +##hel +Jacqueline +Redskins +Clubs +desktop +indirectly +alternatives +marijuana +suffrage +##smos +Irwin +##liff +Process +##hawks +Sloane +##bson +Sonata +yielded +Flores +##ares +armament +adaptations +integrate +neighbours +shelters +##tour +Skinner +##jet +##tations +1774 +Peterborough +##elles +ripping +Liang +Dickinson +charities +Rwanda +monasteries +crossover +racist +barked +guerrilla +##ivate +Grayson +##iques +##vious +##got +Rolls +denominations +atom +affinity +##delity +Wish +##inted +##inae +interrogation +##cey +##erina +##lifting +192 +Sands +1779 +mast +Likewise +##hyl +##oft +contempt +##por +assaulted +fills +establishments +Mal +consulted +##omi +##sight +greet +##roma +##egan +Pulitzer +##rried +##dius +##ractical +##voked +Hasan +CB +##zzy +Romanesque +Panic +wheeled +recorder +##tters +##warm +##gly +botanist +Balkan +Lockheed +Polly +farewell +suffers +purchases +Eaton +##80 +Quick +commenting +Saga +beasts +hides +motifs +##icks +Alonso +Springer +Wikipedia +circulated +encoding +jurisdictions +snout +UAE +Integrated +unmarried +Heinz +##lein +##figured +deleted +##tley +Zen +Cycling +Fuel +Scandinavian +##rants +Conner +reef +Marino +curiously +lingered +Gina +manners +activism +Mines +Expo +Micah +promotions +Server +booked +derivatives +eastward +detailing +reelection +##chase +182 +Campeonato +Po +158 +Peel +winger +##itch +canyon +##pit +LDS +A1 +##shin +Giorgio +pathetic +##rga +##mist +Aren +##lag +confronts +motel +textbook +shine +turbines +1770 +Darcy +##cot +Southeastern +##lessness +Banner +recognise +stray +Kitchen +paperwork +realism +Chrysler +filmmakers +fishermen +##hetic +variously +Vishnu +fiddle +Eddy +Origin +##tec +##ulin +Flames +Rs +bankrupt +Extreme +Pomeranian +##emption +ratified +##iu +jockey +Stratford +##ivating +##oire +Babylon +pardon +AI +affordable +deities +disturbance +Trying +##sai +Ida +Papers +advancement +70s +archbishop +Luftwaffe +announces +tugging +##lphin +##sistence +##eel +##ishes +ambition +aura +##fled +##lected +##vue +Prasad +boiled +clarity +Violin +investigative +routing +Yankee +##uckle +McMahon +bugs +eruption +##rooms +Minutes +relics +##ckle +##nse +sipped +valves +weakly +##ital +Middleton +collided +##quer +bamboo +insignia +Tyne +exercised +Ninth +echoing +polynomial +considerations +lunged +##bius +objections +complain +disguised +plaza +##VC +institutes +Judicial +ascent +imminent +Waterford +hello +Lumpur +Niger +Goldman +vendors +Kensington +Wren +browser +##bner +##tri +##mize +##pis +##lea +Cheyenne +Bold +Settlement +Hollow +Paralympic +axle +##toire +##actic +impose +perched +utilizing +slips +Benz +Michaels +manipulate +Chiang +##mian +Dolphins +prohibition +attacker +ecology +Estadio +##SB +##uild +attracts +recalls +glacier +lad +##rima +Barlow +kHz +melodic +##aby +##iracy +assumptions +Cornish +##aru +DOS +Maddie +##mers +lyric +Luton +nm +##tron +Reno +Fin +YOU +Broadcast +Finch +sensory +##bent +Jeep +##uman +additionally +Buildings +businessmen +treaties +235 +Stranger +gateway +Charlton +accomplishments +Diary +apologized +zinc +histories +supplier +##tting +162 +asphalt +Treatment +Abbas +##pating +##yres +Bloom +sedan +soloist +##cum +antagonist +denounced +Fairfax +##aving +##enko +noticeable +Budget +Buckingham +Snyder +retreating +Jai +spoon +invading +giggle +woven +gunfire +arrests +##vered +##come +respiratory +violet +##aws +Byrd +shocking +tenant +Jamaican +Ottomans +Seal +theirs +##isse +##48 +cooperate +peering +##nius +163 +Composer +organist +Mongolian +Bauer +Spy +collects +prophecy +congregations +##moor +Brick +calculation +fixtures +exempt +##dden +Ada +Thousand +##lue +tracing +##achi +bodyguard +vicar +supplying +Łódź +interception +monitored +##heart +Paso +overlap +annoyance +##dice +yellowish +stables +elders +illegally +honesty +##oar +skinny +spinal +##puram +Bourbon +##cor +flourished +Medium +##stics +##aba +Follow +##ckey +stationary +##scription +dresser +scrutiny +Buckley +Clearly +##SF +Lyrics +##heimer +drying +Oracle +internally +rains +##last +Enemy +##oes +McLean +Ole +phosphate +Rosario +Rifles +##mium +battered +Pepper +Presidents +conquer +Château +castles +##aldo +##ulf +Depending +Lesser +Boom +trades +Peyton +164 +emphasize +accustomed +SM +Ai +Classification +##mins +##35 +##rons +leak +piled +deeds +lush +##self +beginnings +breathless +1660 +McGill +##ago +##chaft +##gies +humour +Bomb +securities +Might +##zone +##eves +Matthias +Movies +Levine +vengeance +##ads +Challenger +Misty +Traditionally +constellation +##rass +deepest +workplace +##oof +##vina +impatient +##ML +Mughal +Alessandro +scenery +Slater +postseason +troupe +##ń +Volunteers +Facility +militants +Reggie +sanctions +Expeditionary +Nam +countered +interpret +Basilica +coding +expectation +Duffy +def +Tong +wakes +Bowling +Vehicle +Adler +salad +intricate +stronghold +medley +##uries +##bur +joints +##rac +##yx +##IO +Ordnance +Welch +distributor +Ark +cavern +trench +Weiss +Mauritius +decreases +docks +eagerly +irritation +Matilda +biographer +Visiting +##marked +##iter +##ear +##gong +Moreno +attendant +Bury +instrumentation +theologian +clit +nuns +symphony +translate +375 +loser +##user +##VR +##meter +##orious +harmful +##yuki +Commissioners +Mendoza +sniffed +Hulk +##dded +##ulator +##nz +Donnell +##eka +deported +Met +SD +Aerospace +##cultural +##odes +Fantastic +cavity +remark +emblem +fearing +##iance +ICAO +Liberia +stab +##yd +Pac +Gymnasium +IS +Everton +##vanna +mantle +##ief +Ramon +##genic +Shooting +Smoke +Random +Africans +MB +tavern +bargain +voluntarily +Ion +Peoples +Rusty +attackers +Patton +sins +##cake +Hat +moderately +##hala +##alia +requesting +mechanic +##eae +Seine +Robbins +##ulum +susceptible +Bravo +Slade +Strasbourg +rubble +entrusted +Creation +##amp +smoothed +##uintet +evenly +reviewers +skip +Sculpture +177 +Rough +##rrie +Reeves +##cede +Administrator +garde +minus +carriages +grenade +Ninja +fuscous +##kley +Punk +contributors +Aragon +Tottenham +##cca +##sir +VA +laced +dealers +##sonic +crisp +harmonica +Artistic +Butch +Andes +Farmers +corridors +unseen +##tium +Countries +Lone +envisioned +Katy +##lang +##cc +Quarterly +##neck +consort +##aceae +bidding +Corey +concurrent +##acts +##gum +Highness +##lient +##rators +arising +##unta +pathways +49ers +bolted +complaining +ecosystem +libretto +Ser +narrated +212 +Soft +influx +##dder +incorporation +plagued +tents +##ddled +1750 +Risk +citation +Tomas +hostilities +seals +Bruins +Dominique +attic +competent +##UR +##cci +hugging +Breuning +bacterial +Shrewsbury +vowed +eh +elongated +hangs +render +centimeters +##ficient +Mu +turtle +besieged +##gaard +grapes +bravery +collaborations +deprived +##amine +##using +##gins +arid +##uve +coats +hanged +##sting +Pa +prefix +##ranged +Exit +Chain +Flood +Materials +suspicions +##ö +hovered +Hidden +##state +Malawi +##24 +Mandy +norms +fascinating +airlines +delivers +##rust +Cretaceous +spanned +pillows +##onomy +jar +##kka +regent +fireworks +morality +discomfort +lure +uneven +##jack +Lucian +171 +archaeology +##til +mornings +Billie +Marquess +impending +spilling +tombs +##volved +Celia +Coke +underside +##bation +Vaughn +Daytona +Godfrey +Pascal +Alien +##sign +172 +##lage +iPhone +Gonna +genocide +##rber +oven +endure +dashed +simultaneous +##phism +Wally +##rō +ants +predator +reissue +##aper +Speech +funk +Rudy +claw +Hindus +Numbers +Bing +lantern +##aurus +scattering +poisoned +##active +Andrei +algebraic +baseman +##ritz +Gregg +##cola +selections +##putation +lick +Laguna +##IX +Sumatra +Warning +turf +buyers +Burgess +Oldham +exploit +worm +initiate +strapped +tuning +filters +haze +##е +##ledge +##ydro +##culture +amendments +Promotion +##union +Clair +##uria +petty +shutting +##eveloped +Phoebe +Zeke +conducts +grains +clashes +##latter +illegitimate +willingly +Deer +Lakers +Reference +chaplain +commitments +interrupt +salvation +Panther +Qualifying +Assessment +cancel +efficiently +attorneys +Dynamo +impress +accession +clinging +randomly +reviewing +Romero +Cathy +charting +clapped +rebranded +Azerbaijani +coma +indicator +punches +##tons +Sami +monastic +prospects +Pastor +##rville +electrified +##CI +##utical +tumbled +Chef +muzzle +selecting +UP +Wheel +protocols +##tat +Extended +beautifully +nests +##stal +Andersen +##anu +##³ +##rini +kneeling +##reis +##xia +anatomy +dusty +Safe +turmoil +Bianca +##elo +analyze +##ر +##eran +podcast +Slovene +Locke +Rue +##retta +##uni +Person +Prophet +crooked +disagreed +Versailles +Sarajevo +Utrecht +##ogen +chewing +##ception +##iidae +Missile +attribute +majors +Arch +intellectuals +##andra +ideological +Cory +Salzburg +##fair +Lot +electromagnetic +Distribution +##oper +##pered +Russ +Terra +repeats +fluttered +Riga +##ific +##gt +cows +Hair +labelled +protects +Gale +Personnel +Düsseldorf +Moran +rematch +##OE +Slow +forgiveness +##ssi +proudly +Macmillan +insist +undoubtedly +Québec +Violence +##yuan +##aine +mourning +linen +accidental +##iol +##arium +grossing +lattice +maneuver +##marine +prestige +petrol +gradient +invasive +militant +Galerie +widening +##aman +##quist +disagreement +##ales +creepy +remembers +buzz +##erial +Exempt +Dirk +mon +Addison +##inen +deposed +##agon +fifteenth +Hang +ornate +slab +##lades +Fountain +contractors +das +Warwickshire +1763 +##rc +Carly +Essays +Indy +Ligue +greenhouse +slit +##sea +chewed +wink +##azi +Playhouse +##kon +Gram +Ko +Samson +creators +revive +##rians +spawned +seminars +Craft +Tall +diverted +assistants +computational +enclosure +##acity +Coca +##eve +databases +Drop +##loading +##hage +Greco +Privy +entrances +pork +prospective +Memories +robes +##market +transporting +##lik +Rudolph +Horton +visually +##uay +##nja +Centro +Tor +Howell +##rsey +admitting +postgraduate +herbs +##att +Chin +Rutherford +##bot +##etta +Seasons +explanations +##bery +Friedman +heap +##ryl +##sberg +jaws +##agh +Choi +Killing +Fanny +##suming +##hawk +hopeful +##aid +Monty +gum +remarkably +Secrets +disco +harp +advise +##avia +Marathi +##cycle +Truck +abbot +sincere +urine +##mology +masked +bathing +##tun +Fellows +##TM +##gnetic +owl +##jon +hymn +##leton +208 +hostility +##cée +baked +Bottom +##AB +shudder +##ater +##von +##hee +reorganization +Cycle +##phs +Lex +##style +##rms +Translation +##erick +##imeter +##ière +attested +Hillary +##DM +gal +wander +Salle +##laming +Perez +Pit +##LP +USAF +contexts +Disease +blazing +aroused +razor +walled +Danielle +Mont +Funk +royalty +thee +203 +donors +##erton +famously +processors +reassigned +welcoming +Goldberg +##quities +undisclosed +Orient +Patty +vaccine +refrigerator +Cypriot +consonant +##waters +176 +sober +##lement +Racecourse +##uate +Luckily +Selection +conceptual +vines +Breaking +wa +lions +oversight +sheltered +Dancer +ponds +borrow +##BB +##pulsion +Daly +##eek +fertility +spontaneous +Worldwide +gasping +##tino +169 +ABS +Vickers +ambient +energetic +prisons +##eson +Stacy +##roach +GmbH +Afro +Marin +farmhouse +pinched +##cursion +##sp +Sabine +##pire +181 +nak +swelling +humble +perfume +##balls +Rai +cannons +##taker +Married +Maltese +canals +interceptions +hats +lever +slowing +##ppy +Nike +Silas +Scarborough +skirts +166 +inauguration +Shuttle +alloy +beads +belts +Compton +Cause +battling +critique +surf +Dock +roommate +##ulet +invade +Garland +##slow +nutrition +persona +##zam +Wichita +acquaintance +coincided +##cate +Dracula +clamped +##gau +overhaul +##broken +##rrier +melodies +ventures +Paz +convex +Roots +##holding +Tribute +transgender +##ò +chimney +##riad +Ajax +Thereafter +messed +nowadays +pH +##100 +##alog +Pomerania +##yra +Rossi +glove +##TL +Races +##asily +tablets +Jase +##ttes +diner +##rns +Hu +Mohan +anytime +weighted +remixes +Dove +cherry +imports +##urity +GA +##TT +##iated +##sford +Clarkson +evidently +rugged +Dust +siding +##ometer +acquitted +choral +##mite +infants +Domenico +gallons +Atkinson +gestures +slated +##xa +Archaeology +unwanted +##ibes +##duced +premise +Colby +Geelong +disqualified +##pf +##voking +simplicity +Walkover +Qaeda +Warden +##bourg +##ān +Invasion +Babe +harness +183 +##tated +maze +Burt +bedrooms +##nsley +Horizon +##oast +minimize +peeked +MLA +Trains +tractor +nudged +##iform +Growth +Benton +separates +##about +##kari +buffer +anthropology +brigades +foil +##wu +Domain +licking +whore +##rage +##sham +Initial +Courthouse +Rutgers +dams +villains +supermarket +##brush +Brunei +Palermo +arises +Passenger +outreach +##gill +Labrador +McLaren +##uy +Lori +##fires +Heads +magistrate +¹⁄₂ +Weapons +##wai +##roke +projecting +##ulates +bordering +McKenzie +Pavel +midway +Guangzhou +streamed +racer +##lished +eccentric +spectral +206 +##mism +Wilde +Grange +preparatory +lent +##tam +starving +Gertrude +##cea +##ricted +Breakfast +Mira +blurted +derive +##lair +blunt +sob +Cheltenham +Henrik +reinstated +intends +##istan +unite +##ector +playful +sparks +mapped +Cadet +luggage +prosperous +##ein +salon +##utes +Biological +##rland +Tyrone +buyer +##lose +amounted +Saw +smirked +Ronan +Reviews +Adele +trait +##proof +Bhutan +Ginger +##junct +digitally +stirring +##isted +coconut +Hamlet +Dinner +Scale +pledge +##RP +Wrong +Goal +Panel +therapeutic +elevations +infectious +priesthood +##inda +Guyana +diagnostic +##mbre +Blackwell +sails +##arm +literal +periodically +gleaming +Robot +Rector +##abulous +##tres +Reaching +Romantic +CP +Wonderful +##tur +ornamental +##nges +traitor +##zilla +genetics +mentioning +##eim +resonance +Areas +Shopping +##nard +Gail +Solid +##rito +##mara +Willem +Chip +Matches +Volkswagen +obstacle +Organ +invites +Coral +attain +##anus +##dates +Midway +shuffled +Cecilia +dessert +Gateway +Ch +Napoleonic +Petroleum +jets +goose +striped +bowls +vibration +Sims +nickel +Thirteen +problematic +intervene +##grading +##unds +Mum +semifinal +Radical +##izations +refurbished +##sation +##harine +Maximilian +cites +Advocate +Potomac +surged +preserves +Curry +angled +ordination +##pad +Cade +##DE +##sko +researched +torpedoes +Resident +wetlands +hay +applicants +depart +Bernstein +##pic +##ario +##rae +favourable +##wari +##р +metabolism +nobleman +Defaulted +calculate +ignition +Celebrity +Belize +sulfur +Flat +Sc +USB +flicker +Hertfordshire +Sept +CFL +Pasadena +Saturdays +Titus +##nir +Canary +Computing +Isaiah +##mler +formidable +pulp +orchid +Called +Solutions +kilograms +steamer +##hil +Doncaster +successors +Stokes +Holstein +##sius +sperm +API +Rogue +instability +Acoustic +##rag +159 +undercover +Wouldn +##pra +##medical +Eliminated +honorable +##chel +denomination +abrupt +Buffy +blouse +fi +Regardless +Subsequent +##rdes +Lover +##tford +bacon +##emia +carving +##cripts +Massacre +Ramos +Latter +##ulp +ballroom +##gement +richest +bruises +Rest +Wiley +##aster +explosions +##lastic +Edo +##LD +Mir +choking +disgusted +faintly +Barracks +blasted +headlights +Tours +ensued +presentations +##cale +wrought +##oat +##coa +Quaker +##sdale +recipe +##gny +corpses +##liance +comfortably +##wat +Landscape +niche +catalyst +##leader +Securities +messy +##RL +Rodrigo +backdrop +##opping +treats +Emilio +Anand +bilateral +meadow +VC +socialism +##grad +clinics +##itating +##ppe +##ymphonic +seniors +Advisor +Armoured +Method +Alley +##orio +Sad +fueled +raided +Axel +NH +rushes +Dixie +Otis +wrecked +##22 +capitalism +café +##bbe +##pion +##forcing +Aubrey +Lublin +Whenever +Sears +Scheme +##lana +Meadows +treatise +##RI +##ustic +sacrifices +sustainability +Biography +mystical +Wanted +multiplayer +Applications +disliked +##tisfied +impaired +empirical +forgetting +Fairfield +Sunni +blurred +Growing +Avalon +coil +Camera +Skin +bruised +terminals +##fted +##roving +Commando +##hya +##sper +reservations +needles +dangling +##rsch +##rsten +##spect +##mbs +yoga +regretted +Bliss +Orion +Rufus +glucose +Olsen +autobiographical +##dened +222 +humidity +Shan +##ifiable +supper +##rou +flare +##MO +campaigning +descend +socio +declares +Mounted +Gracie +Arte +endurance +##ety +Copper +costa +airplay +##MB +Proceedings +dislike +grimaced +occupants +births +glacial +oblivious +cans +installment +muddy +##ł +captains +pneumonia +Quiet +Sloan +Excuse +##nine +Geography +gymnastics +multimedia +drains +Anthology +Gear +cylindrical +Fry +undertaking +##pler +##tility +Nan +##recht +Dub +philosophers +piss +Atari +##pha +Galicia +México +##nking +Continuing +bump +graveyard +persisted +Shrine +##erapy +defects +Advance +Bomber +##oil +##ffling +cheerful +##lix +scrub +##eto +awkwardly +collaborator +fencing +##alo +prophet +Croix +coughed +##lication +roadway +slaughter +elephants +##erated +Simpsons +vulnerability +ivory +Birth +lizard +scarce +cylinders +fortunes +##NL +Hate +Priory +##lai +McBride +##copy +Lenny +liaison +Triangle +coronation +sampled +savage +amidst +Grady +whatsoever +instinctively +Reconstruction +insides +seizure +Drawing +##rlin +Antioch +Gao +Díaz +1760 +Sparks +##tien +##bidae +rehearsal +##bbs +botanical +##hers +compensate +wholesale +Seville +shareholder +prediction +astronomical +Reddy +hardest +circling +whereabouts +termination +Rep +Assistance +Dramatic +Herb +##ghter +climbs +188 +Poole +301 +##pable +wit +##istice +Walters +relying +Jakob +##redo +proceeding +Langley +affiliates +ou +##allo +##holm +Samsung +##ishi +Missing +Xi +vertices +Claus +foam +restless +##uating +##sso +##ttering +Philips +delta +bombed +Catalogue +coaster +Ling +Willard +satire +410 +Composition +Net +Orioles +##ldon +fins +Palatinate +Woodward +tease +tilt +brightness +##70 +##bbling +##loss +##dhi +##uilt +Whoever +##yers +hitter +Elton +Extension +ace +Affair +restructuring +##loping +Paterson +hi +##rya +spouse +Shay +Himself +piles +preaching +##gical +bikes +Brave +expulsion +Mirza +stride +Trees +commemorated +famine +masonry +Selena +Watt +Banking +Rancho +Stockton +dip +tattoos +Vlad +acquainted +Flyers +ruthless +fourteenth +illustrate +##akes +EPA +##rows +##uiz +bumped +Designed +Leaders +mastered +Manfred +swirled +McCain +##rout +Artemis +rabbi +flinched +upgrades +penetrate +shipyard +transforming +caretaker +##eiro +Maureen +tightening +##founded +RAM +##icular +##mper +##rung +Fifteen +exploited +consistency +interstate +##ynn +Bridget +contamination +Mistress +##rup +coating +##FP +##jective +Libyan +211 +Gemma +dependence +shrubs +##ggled +Germain +retaliation +traction +##PP +Dangerous +terminology +psychiatrist +##garten +hurdles +Natal +wasting +Weir +revolves +stripe +##reased +preferences +##entation +##lde +##áil +##otherapy +Flame +##ologies +viruses +Label +Pandora +veil +##ogical +Coliseum +Cottage +creeping +Jong +lectured +##çaise +shoreline +##fference +##hra +Shade +Clock +Faye +bilingual +Humboldt +Operating +##fter +##was +algae +towed +amphibious +Parma +impacted +smacked +Piedmont +Monsters +##omb +Moor +##lberg +sinister +Postal +178 +Drummond +Sign +textbooks +hazardous +Brass +Rosemary +Pick +Sit +Architect +transverse +Centennial +confess +polling +##aia +Julien +##mand +consolidation +Ethel +##ulse +severity +Yorker +choreographer +1840s +##ltry +softer +versa +##geny +##quila +##jō +Caledonia +Friendship +Visa +rogue +##zzle +bait +feather +incidence +Foods +Ships +##uto +##stead +arousal +##rote +Hazel +##bolic +Swing +##ej +##cule +##jana +##metry +##uity +Valuable +##ₙ +Shropshire +##nect +365 +Ones +realise +Café +Albuquerque +##grown +##stadt +209 +##ᵢ +prefers +withstand +Lillian +MacArthur +Hara +##fulness +domination +##VO +##school +Freddy +ethnicity +##while +adorned +hormone +Calder +Domestic +Freud +Shields +##phus +##rgan +BP +Segunda +Mustang +##GI +Bonn +patiently +remarried +##umbria +Crete +Elephant +Nuremberg +tolerate +Tyson +##evich +Programming +##lander +Bethlehem +segregation +Constituency +quarterly +blushed +photographers +Sheldon +porcelain +Blanche +goddamn +lively +##fused +bumps +##eli +curated +coherent +provoked +##vet +Madeleine +##isco +rainy +Bethel +accusation +ponytail +gag +##lington +quicker +scroll +##vate +Bow +Gender +Ira +crashes +ACT +Maintenance +##aton +##ieu +bitterly +strains +rattled +vectors +##arina +##ishly +173 +parole +##nx +amusing +Gonzalez +##erative +Caucus +sensual +Penelope +coefficient +Mateo +##mani +proposition +Duty +lacrosse +proportions +Plato +profiles +Botswana +Brandt +reins +mandolin +encompassing +##gens +Kahn +prop +summon +##MR +##yrian +##zaki +Falling +conditional +thy +##bao +##ych +radioactive +##nics +Newspaper +##people +##nded +Gaming +sunny +##look +Sherwood +crafted +NJ +awoke +187 +timeline +giants +possessing +##ycle +Cheryl +ng +Ruiz +polymer +potassium +Ramsay +relocation +##leen +Sociology +##bana +Franciscan +propulsion +denote +##erjee +registers +headline +Tests +emerges +Articles +Mint +livery +breakup +kits +Rap +Browning +Bunny +##mington +##watch +Anastasia +Zachary +arranging +biographical +Erica +Nippon +##membrance +Carmel +##sport +##xes +Paddy +##holes +Issues +Spears +compliment +##stro +##graphs +Castillo +##MU +##space +Corporal +##nent +174 +Gentlemen +##ilize +##vage +convinces +Carmine +Crash +##hashi +Files +Doctors +brownish +sweating +goats +##conductor +rendition +##bt +NL +##spiration +generates +##cans +obsession +##noy +Danger +Diaz +heats +Realm +priorities +##phon +1300 +initiation +pagan +bursts +archipelago +chloride +Screenplay +Hewitt +Khmer +bang +judgement +negotiating +##ait +Mabel +densely +Boulder +knob +430 +Alfredo +##kt +pitches +##ées +##ان +Macdonald +##llum +imply +##mot +Smile +spherical +##tura +Derrick +Kelley +Nico +cortex +launches +differed +parallels +Navigation +##child +##rming +canoe +forestry +reinforce +##mote +confirming +tasting +scaled +##resh +##eting +Understanding +prevailing +Pearce +CW +earnest +Gaius +asserts +denoted +landmarks +Chargers +warns +##flies +Judges +jagged +##dain +tails +Historian +Millie +##sler +221 +##uard +absurd +Dion +##ially +makeshift +Specifically +ignorance +Eat +##ieri +comparisons +forensic +186 +Giro +skeptical +disciplinary +battleship +##45 +Libby +520 +Odyssey +ledge +##post +Eternal +Missionary +deficiency +settler +wonders +##gai +raging +##cis +Romney +Ulrich +annexation +boxers +sect +204 +ARIA +dei +Hitchcock +te +Varsity +##fic +CC +lending +##nial +##tag +##rdy +##obe +Defensive +##dson +##pore +stellar +Lam +Trials +contention +Sung +##uminous +Poe +superiority +##plicate +325 +bitten +conspicuous +##olly +Lila +Pub +Petit +distorted +ISIL +distinctly +##family +Cowboy +mutant +##cats +##week +Changes +Sinatra +epithet +neglect +Innocent +gamma +thrill +reggae +##adia +##ational +##due +landlord +##leaf +visibly +##ì +Darlington +Gomez +##iting +scarf +##lade +Hinduism +Fever +scouts +##roi +convened +##oki +184 +Lao +boycott +unemployed +##lore +##ß +##hammer +Curran +disciples +odor +##ygiene +Lighthouse +Played +whales +discretion +Yves +##ceived +pauses +coincide +##nji +dizzy +##scopic +routed +Guardians +Kellan +carnival +nasal +224 +##awed +Mitsubishi +640 +Cast +silky +Projects +joked +Huddersfield +Rothschild +zu +##olar +Divisions +mildly +##eni +##lge +Appalachian +Sahara +pinch +##roon +wardrobe +##dham +##etal +Bubba +##lini +##rumbling +Communities +Poznań +unification +Beau +Kris +SV +Rowing +Minh +reconciliation +##saki +##sor +taped +##reck +certificates +gubernatorial +rainbow +##uing +litter +##lique +##oted +Butterfly +benefited +Images +induce +Balkans +Velvet +##90 +##xon +Bowman +##breaker +penis +##nitz +##oint +##otive +crust +##pps +organizers +Outdoor +nominees +##rika +TX +##ucks +Protestants +##imation +appetite +Baja +awaited +##points +windshield +##igh +##zled +Brody +Buster +stylized +Bryce +##sz +Dollar +vest +mold +ounce +ok +receivers +##uza +Purdue +Harrington +Hodges +captures +##ggio +Reservation +##ssin +##tman +cosmic +straightforward +flipping +remixed +##athed +Gómez +Lim +motorcycles +economies +owning +Dani +##rosis +myths +sire +kindly +1768 +Bean +graphs +##mee +##RO +##geon +puppy +Stephenson +notified +##jer +Watching +##rama +Sino +urgency +Islanders +##mash +Plata +fumble +##chev +##stance +##rack +##she +facilitated +swings +akin +enduring +payload +##phine +Deputies +murals +##tooth +610 +Jays +eyeing +##quito +transparency +##cote +Timor +negatively +##isan +battled +##fected +thankful +Rage +hospitality +incorrectly +207 +entrepreneurs +##cula +##wley +hedge +##cratic +Corpus +Odessa +Whereas +##ln +fetch +happier +Amherst +bullying +graceful +Height +Bartholomew +willingness +qualifier +191 +Syed +Wesleyan +Layla +##rrence +Webber +##hum +Rat +##cket +##herence +Monterey +contaminated +Beside +Mustafa +Nana +213 +##pruce +Reason +##spense +spike +##gé +AU +disciple +charcoal +##lean +formulated +Diesel +Mariners +accreditation +glossy +1800s +##ih +Mainz +unison +Marianne +shear +overseeing +vernacular +bowled +##lett +unpopular +##ckoned +##monia +Gaston +##TI +##oters +Cups +##bones +##ports +Museo +minors +1773 +Dickens +##EL +##NBC +Presents +ambitions +axes +Río +Yukon +bedside +Ribbon +Units +faults +conceal +##lani +prevailed +214 +Goodwin +Jaguar +crumpled +Cullen +Wireless +ceded +remotely +Bin +mocking +straps +ceramics +##avi +##uding +##ader +Taft +twenties +##aked +Problem +quasi +Lamar +##ntes +##avan +Barr +##eral +hooks +sa +##ône +194 +##ross +Nero +Caine +trance +Homeland +benches +Guthrie +dismiss +##lex +César +foliage +##oot +##alty +Assyrian +Ahead +Murdoch +dictatorship +wraps +##ntal +Corridor +Mackay +respectable +jewels +understands +##pathic +Bryn +##tep +ON +capsule +intrigued +Sleeping +communists +##chayat +##current +##vez +doubling +booklet +##uche +Creed +##NU +spies +##sef +adjusting +197 +Imam +heaved +Tanya +canonical +restraint +senators +stainless +##gnate +Matter +cache +restrained +conflicting +stung +##ool +Sustainable +antiquity +193 +heavens +inclusive +##ador +fluent +303 +911 +archaeologist +superseded +##plex +Tammy +inspire +##passing +##lub +Lama +Mixing +##activated +##yote +parlor +tactic +198 +Stefano +prostitute +recycling +sorted +banana +Stacey +Musée +aristocratic +cough +##rting +authorised +gangs +runoff +thoughtfully +##nish +Fisheries +Provence +detector +hum +##zhen +pill +##árez +Map +Leaves +Peabody +skater +vent +##color +390 +cerebral +hostages +mare +Jurassic +swell +##isans +Knoxville +Naked +Malaya +scowl +Cobra +##anga +Sexual +##dron +##iae +196 +##drick +Ravens +Blaine +##throp +Ismail +symmetric +##lossom +Leicestershire +Sylvester +glazed +##tended +Radar +fused +Families +Blacks +Sale +Zion +foothills +microwave +slain +Collingwood +##pants +##dling +killers +routinely +Janice +hearings +##chanted +##ltration +continents +##iving +##yster +##shot +##yna +injected +Guillaume +##ibi +kinda +Confederacy +Barnett +disasters +incapable +##grating +rhythms +betting +draining +##hak +Callie +Glover +##iliated +Sherlock +hearted +punching +Wolverhampton +Leaf +Pi +builders +furnished +knighted +Photo +##zle +Touring +fumbled +pads +##ий +Bartlett +Gunner +eerie +Marius +Bonus +pots +##hino +##pta +Bray +Frey +Ortiz +stalls +belongings +Subway +fascination +metaphor +Bat +Boer +Colchester +sway +##gro +rhetoric +##dheim +Fool +PMID +admire +##hsil +Strand +TNA +##roth +Nottinghamshire +##mat +##yler +Oxfordshire +##nacle +##roner +BS +##nces +stimulus +transports +Sabbath +##postle +Richter +4000 +##grim +##shima +##lette +deteriorated +analogous +##ratic +UHF +energies +inspiring +Yiddish +Activities +##quential +##boe +Melville +##ilton +Judd +consonants +labs +smuggling +##fari +avid +##uc +truce +undead +##raith +Mostly +bracelet +Connection +Hussain +awhile +##UC +##vention +liable +genetically +##phic +Important +Wildcats +daddy +transmit +##cas +conserved +Yesterday +##lite +Nicky +Guys +Wilder +Lay +skinned +Communists +Garfield +Nearby +organizer +Loss +crafts +walkway +Chocolate +Sundance +Synod +##enham +modify +swayed +Surface +analysts +brackets +drone +parachute +smelling +Andrés +filthy +frogs +vertically +##OK +localities +marries +AHL +35th +##pian +Palazzo +cube +dismay +relocate +##на +Hear +##digo +##oxide +prefecture +converts +hangar +##oya +##ucking +Spectrum +deepened +spoiled +Keeping +##phobic +Verona +outrage +Improvement +##UI +masterpiece +slung +Calling +chant +Haute +mediated +manipulated +affirmed +##hesis +Hangul +skies +##llan +Worcestershire +##kos +mosaic +##bage +##wned +Putnam +folder +##LM +guts +noteworthy +##rada +AJ +sculpted +##iselle +##rang +recognizable +##pent +dolls +lobbying +impatiently +Se +staple +Serb +tandem +Hiroshima +thieves +##ynx +faculties +Norte +##alle +##trusion +chords +##ylon +Gareth +##lops +##escu +FIA +Levin +auspices +groin +Hui +nun +Listed +Honourable +Larsen +rigorous +##erer +Tonga +##pment +##rave +##track +##aa +##enary +540 +clone +sediment +esteem +sighted +cruelty +##boa +inverse +violating +Amtrak +Status +amalgamated +vertex +AR +harmless +Amir +mounts +Coronation +counseling +Audi +CO₂ +splits +##eyer +Humans +Salmon +##have +##rado +##čić +216 +takeoff +classmates +psychedelic +##gni +Gypsy +231 +Anger +GAA +ME +##nist +##tals +Lissa +Odd +baptized +Fiat +fringe +##hren +179 +elevators +perspectives +##TF +##ngle +Question +frontal +950 +thicker +Molecular +##nological +Sixteen +Baton +Hearing +commemorative +dorm +Architectural +purity +##erse +risky +Georgie +relaxing +##ugs +downed +##rar +Slim +##phy +IUCN +##thorpe +Parkinson +217 +Marley +Shipping +sweaty +Jesuits +Sindh +Janata +implying +Armenians +intercept +Ankara +commissioners +ascended +sniper +Grass +Walls +salvage +Dewey +generalized +learnt +PT +##fighter +##tech +DR +##itrus +##zza +mercenaries +slots +##burst +##finger +##nsky +Princes +Rhodesia +##munication +##strom +Fremantle +homework +ins +##Os +##hao +##uffed +Thorpe +Xiao +exquisite +firstly +liberated +technician +Oilers +Phyllis +herb +sharks +MBE +##stock +Product +banjo +##morandum +##than +Visitors +unavailable +unpublished +oxidation +Vogue +##copic +##etics +Yates +##ppard +Leiden +Trading +cottages +Principles +##Millan +##wife +##hiva +Vicar +nouns +strolled +##eorological +##eton +##science +precedent +Armand +Guido +rewards +##ilis +##tise +clipped +chick +##endra +averages +tentatively +1830s +##vos +Certainly +305 +Société +Commandant +##crats +##dified +##nka +marsh +angered +ventilation +Hutton +Ritchie +##having +Eclipse +flick +motionless +Amor +Fest +Loire +lays +##icit +##sband +Guggenheim +Luck +disrupted +##ncia +Disco +##vigator +criticisms +grins +##lons +##vial +##ody +salute +Coaches +junk +saxophonist +##eology +Uprising +Diet +##marks +chronicles +robbed +##iet +##ahi +Bohemian +magician +wavelength +Kenyan +augmented +fashionable +##ogies +Luce +F1 +Monmouth +##jos +##loop +enjoyment +exemption +Centers +##visor +Soundtrack +blinding +practitioner +solidarity +sacrificed +##oso +##cture +##riated +blended +Abd +Copyright +##nob +34th +##reak +Claudio +hectare +rotor +testify +##ends +##iably +##sume +landowner +##cess +##ckman +Eduard +Silesian +backseat +mutually +##abe +Mallory +bounds +Collective +Poet +Winkler +pertaining +scraped +Phelps +crane +flickering +Proto +bubbles +popularized +removes +##86 +Cadillac +Warfare +audible +rites +shivering +##sist +##nst +##biotic +Mon +fascist +Bali +Kathryn +ambiguous +furiously +morale +patio +Sang +inconsistent +topology +Greens +monkeys +Köppen +189 +Toy +vow +##ías +bombings +##culus +improvised +lodged +subsidiaries +garment +startling +practised +Hume +Thorn +categorized +Till +Eileen +wedge +##64 +Federico +patriotic +unlock +##oshi +badminton +Compared +Vilnius +##KE +Crimean +Kemp +decks +spaced +resolutions +sighs +##mind +Imagine +Cartoon +huddled +policemen +forwards +##rouch +equals +##nter +inspected +Charley +MG +##rte +pamphlet +Arturo +dans +scarcely +##ulton +##rvin +parental +unconstitutional +watts +Susannah +Dare +##sitive +Rowland +Valle +invalid +##ué +Detachment +acronym +Yokohama +verified +##lsson +groove +Liza +clarified +compromised +265 +##rgon +##orf +hesitant +Fruit +Application +Mathias +icons +##cell +Qin +interventions +##uron +punt +remnant +##rien +Ames +manifold +spines +floral +##zable +comrades +Fallen +orbits +Annals +hobby +Auditorium +implicated +researching +Pueblo +Ta +terminate +##pella +Rings +approximation +fuzzy +##ús +thriving +##ket +Conor +alarmed +etched +Cary +##rdon +Ally +##rington +Pay +mint +##hasa +##unity +##dman +##itate +Oceania +furrowed +trams +##aq +Wentworth +ventured +choreography +prototypes +Patel +mouthed +trenches +##licing +##yya +Lies +deception +##erve +##vations +Bertrand +earthquakes +##tography +Southwestern +##aja +token +Gupta +##yō +Beckett +initials +ironic +Tsar +subdued +shootout +sobbing +liar +Scandinavia +Souls +ch +therapist +trader +Regulation +Kali +busiest +##pation +32nd +Telephone +Vargas +##moky +##nose +##uge +Favorite +abducted +bonding +219 +255 +correction +mat +drown +fl +unbeaten +Pocket +Summers +Quite +rods +Percussion +##ndy +buzzing +cadet +Wilkes +attire +directory +utilities +naive +populous +Hendrix +##actor +disadvantage +1400 +Landon +Underworld +##ense +Occasionally +mercury +Davey +Morley +spa +wrestled +##vender +eclipse +Sienna +supplemented +thou +Stream +liturgical +##gall +##berries +##piration +1769 +Bucks +abandoning +##jutant +##nac +232 +venom +##31 +Roche +dotted +Currie +Córdoba +Milo +Sharif +divides +justification +prejudice +fortunate +##vide +##ābād +Rowe +inflammatory +##eld +avenue +Sources +##rimal +Messenger +Blanco +advocating +formulation +##pute +emphasizes +nut +Armored +##ented +nutrients +##tment +insistence +Martins +landowners +##RB +comparatively +headlines +snaps +##qing +Celebration +##mad +republican +##NE +Trace +##500 +1771 +proclamation +NRL +Rubin +Buzz +Weimar +##AG +199 +posthumous +##ental +##deacon +Distance +intensely +overheard +Arcade +diagonal +hazard +Giving +weekdays +##ù +Verdi +actresses +##hare +Pulling +##erries +##pores +catering +shortest +##ctors +##cure +##restle +##reta +##runch +##brecht +##uddin +Moments +senate +Feng +Prescott +##thest +218 +divisional +Bertie +sparse +surrounds +coupling +gravitational +werewolves +##lax +Rankings +##mated +##tries +Shia +##mart +##23 +##vocative +interfaces +morphology +newscast +##bide +inputs +solicitor +Olaf +cabinets +puzzles +##tains +Unified +##firmed +WA +solemn +##opy +Tito +Jaenelle +Neolithic +horseback +##ires +pharmacy +prevalence +##lint +Swami +##bush +##tudes +Philipp +mythical +divers +Scouting +aperture +progressively +##bay +##nio +bounce +Floor +##elf +Lucan +adulthood +helm +Bluff +Passage +Salvation +lemon +napkin +scheduling +##gets +Elements +Mina +Novak +stalled +##llister +Infrastructure +##nky +##tania +##uished +Katz +Norma +sucks +trusting +1765 +boilers +Accordingly +##hered +223 +Crowley +##fight +##ulo +Henrietta +##hani +pounder +surprises +##chor +##glia +Dukes +##cracy +##zier +##fs +Patriot +silicon +##VP +simulcast +telegraph +Mysore +cardboard +Len +##QL +Auguste +accordion +analytical +specify +ineffective +hunched +abnormal +Transylvania +##dn +##tending +Emilia +glittering +Maddy +##wana +1762 +External +Lecture +endorsement +Hernández +Anaheim +Ware +offences +##phorus +Plantation +popping +Bonaparte +disgusting +neared +##notes +Identity +heroin +nicely +##raverse +apron +congestion +##PR +padded +##fts +invaders +##came +freshly +Halle +endowed +fracture +ROM +##max +sediments +diffusion +dryly +##tara +Tam +Draw +Spin +Talon +Anthropology +##lify +nausea +##shirt +insert +Fresno +capitalist +indefinitely +apples +Gift +scooped +60s +Cooperative +mistakenly +##lover +murmur +##iger +Equipment +abusive +orphanage +##9th +##lterweight +##unda +Baird +ant +saloon +33rd +Chesapeake +##chair +##sound +##tend +chaotic +pornography +brace +##aret +heiress +SSR +resentment +Arbor +headmaster +##uren +unlimited +##with +##jn +Bram +Ely +Pokémon +pivotal +##guous +Database +Marta +Shine +stumbling +##ovsky +##skin +Henley +Polk +functioned +##layer +##pas +##udd +##MX +blackness +cadets +feral +Damian +##actions +2D +##yla +Apocalypse +##aic +inactivated +##china +##kovic +##bres +destroys +nap +Macy +sums +Madhya +Wisdom +rejects +##amel +60th +Cho +bandwidth +##sons +##obbing +##orama +Mutual +shafts +##estone +##rsen +accord +replaces +waterfront +##gonal +##rida +convictions +##ays +calmed +suppliers +Cummings +GMA +fearful +Scientist +Sinai +examines +experimented +Netflix +Enforcement +Scarlett +##lasia +Healthcare +##onte +Dude +inverted +##36 +##regation +##lidae +Munro +##angay +Airbus +overlapping +Drivers +lawsuits +bodily +##udder +Wanda +Effects +Fathers +##finery +##islav +Ridley +observatory +pod +##utrition +Electricity +landslide +##mable +##zoic +##imator +##uration +Estates +sleepy +Nickelodeon +steaming +irony +schedules +snack +spikes +Hmm +##nesia +##bella +##hibit +Greenville +plucked +Harald +##ono +Gamma +infringement +roaring +deposition +##pol +##orum +660 +seminal +passports +engagements +Akbar +rotated +##bina +##gart +Hartley +##lown +##truct +uttered +traumatic +Dex +##ôme +Holloway +MV +apartheid +##nee +Counter +Colton +OR +245 +Spaniards +Regency +Schedule +scratching +squads +verify +##alk +keyboardist +rotten +Forestry +aids +commemorating +##yed +##érie +Sting +##elly +Dai +##fers +##berley +##ducted +Melvin +cannabis +glider +##enbach +##rban +Costello +Skating +cartoonist +AN +audit +##pectator +distributing +226 +312 +interpreter +header +Alternatively +##ases +smug +##kumar +cabins +remastered +Connolly +Kelsey +LED +tentative +Check +Sichuan +shaved +##42 +Gerhard +Harvest +inward +##rque +Hopefully +hem +##34 +Typical +binds +wrath +Woodstock +forcibly +Fergus +##charged +##tured +prepares +amenities +penetration +##ghan +coarse +##oned +enthusiasts +##av +##twined +fielded +##cky +Kiel +##obia +470 +beers +tremble +youths +attendees +##cademies +##sex +Macon +communism +dir +##abi +Lennox +Wen +differentiate +jewel +##SO +activate +assert +laden +unto +Gillespie +Guillermo +accumulation +##GM +NGO +Rosenberg +calculating +drastically +##omorphic +peeled +Liège +insurgents +outdoors +##enia +Aspen +Sep +awakened +##eye +Consul +Maiden +insanity +##brian +furnace +Colours +distributions +longitudinal +syllables +##scent +Martian +accountant +Atkins +husbands +sewage +zur +collaborate +highlighting +##rites +##PI +colonization +nearer +##XT +dunes +positioning +Ku +multitude +luxurious +Volvo +linguistics +plotting +squared +##inder +outstretched +##uds +Fuji +ji +##feit +##ahu +##loat +##gado +##luster +##oku +América +##iza +Residents +vine +Pieces +DD +Vampires +##ová +smoked +harshly +spreads +##turn +##zhi +betray +electors +##settled +Considering +exploits +stamped +Dusty +enraged +Nairobi +##38 +intervened +##luck +orchestras +##lda +Hereford +Jarvis +calf +##itzer +##CH +salesman +Lovers +cigar +Angelica +doomed +heroine +##tible +Sanford +offenders +##ulously +articulated +##oam +Emanuel +Gardiner +Edna +Shu +gigantic +##stable +Tallinn +coasts +Maker +ale +stalking +##oga +##smus +lucrative +southbound +##changing +Reg +##lants +Schleswig +discount +grouping +physiological +##OH +##sun +Galen +assurance +reconcile +rib +scarlet +Thatcher +anarchist +##oom +Turnpike +##ceding +cocktail +Sweeney +Allegheny +concessions +oppression +reassuring +##poli +##ticus +##TR +##VI +##uca +##zione +directional +strikeouts +Beneath +Couldn +Kabul +##national +hydroelectric +##jit +Desire +##riot +enhancing +northbound +##PO +Ok +Routledge +volatile +Bernardo +Python +333 +ample +chestnut +automobiles +##innamon +##care +##hering +BWF +salaries +Turbo +acquisitions +##stituting +strengths +pilgrims +Ponce +Pig +Actors +Beard +sanitation +##RD +##mett +Telecommunications +worms +##idas +Juno +Larson +Ventura +Northeastern +weighs +Houghton +collaborating +lottery +##rano +Wonderland +gigs +##lmer +##zano +##edd +##nife +mixtape +predominant +tripped +##ruly +Alexei +investing +Belgarath +Brasil +hiss +##crat +##xham +Côte +560 +kilometer +##cological +analyzing +##As +engined +listener +##cakes +negotiation +##hisky +Santana +##lemma +IAAF +Seneca +skeletal +Covenant +Steiner +##lev +##uen +Neptune +retention +##upon +Closing +Czechoslovak +chalk +Navarre +NZ +##IG +##hop +##oly +##quatorial +##sad +Brewery +Conflict +Them +renew +turrets +disagree +Petra +Slave +##reole +adjustment +##dela +##regard +##sner +framing +stature +##rca +##sies +##46 +##mata +Logic +inadvertently +naturalist +spheres +towering +heightened +Dodd +rink +##fle +Keyboards +bulb +diver +ul +##tsk +Exodus +Deacon +España +Canadiens +oblique +thud +reigned +rug +Whitman +Dash +##iens +Haifa +pets +##arland +manually +dart +##bial +Sven +textiles +subgroup +Napier +graffiti +revolver +humming +Babu +protector +typed +Provinces +Sparta +Wills +subjective +##rella +temptation +##liest +FL +Sadie +manifest +Guangdong +Transfer +entertain +eve +recipes +##33 +Benedictine +retailer +##dence +establishes +##cluded +##rked +Ursula +##ltz +##lars +##rena +qualifiers +##curement +colt +depictions +##oit +Spiritual +differentiation +staffed +transitional +##lew +1761 +fatalities +##oan +Bayern +Northamptonshire +Weeks +##CU +Fife +capacities +hoarse +##latt +##ة +evidenced +##HD +##ographer +assessing +evolve +hints +42nd +streaked +##lve +Yahoo +##estive +##rned +##zas +baggage +Elected +secrecy +##champ +Character +Pen +Decca +cape +Bernardino +vapor +Dolly +counselor +##isers +Benin +##khar +##CR +notch +##thus +##racy +bounty +lend +grassland +##chtenstein +##dating +pseudo +golfer +simplest +##ceive +Lucivar +Triumph +dinosaur +dinosaurs +##šić +Seahawks +##nco +resorts +reelected +1766 +reproduce +universally +##OA +ER +tendencies +Consolidated +Massey +Tasmanian +reckless +##icz +##ricks +1755 +questionable +Audience +##lates +preseason +Quran +trivial +Haitian +Freeway +dialed +Appointed +Heard +ecosystems +##bula +hormones +Carbon +Rd +##arney +##working +Christoph +presiding +pu +##athy +Morrow +Dar +ensures +posing +remedy +EA +disclosed +##hui +##rten +rumours +surveying +##ficiency +Aziz +Jewel +Plays +##smatic +Bernhard +Christi +##eanut +##friend +jailed +##dr +govern +neighbour +butler +Acheron +murdering +oils +mac +Editorial +detectives +bolts +##ulon +Guitars +malaria +36th +Pembroke +Opened +##hium +harmonic +serum +##sio +Franks +fingernails +##gli +culturally +evolving +scalp +VP +deploy +uploaded +mater +##evo +Jammu +Spa +##icker +flirting +##cursions +Heidi +Majority +sprawled +##alytic +Zheng +bunker +##lena +ST +##tile +Jiang +ceilings +##ently +##ols +Recovery +dire +##good +Manson +Honestly +Montréal +1764 +227 +quota +Lakshmi +incentive +Accounting +##cilla +Eureka +Reaper +buzzed +##uh +courtroom +dub +##mberg +KC +Gong +Theodor +Académie +NPR +criticizing +protesting +##pired +##yric +abuses +fisheries +##minated +1767 +yd +Gemini +Subcommittee +##fuse +Duff +Wasn +Wight +cleaner +##tite +planetary +Survivor +Zionist +mounds +##rary +landfall +disruption +yielding +##yana +bids +unidentified +Garry +Ellison +Elmer +Fishing +Hayward +demos +modelling +##anche +##stick +caressed +entertained +##hesion +piers +Crimea +##mass +WHO +boulder +trunks +1640 +Biennale +Palestinians +Pursuit +##udes +Dora +contender +##dridge +Nanjing +##ezer +##former +##ibel +Whole +proliferation +##tide +##weiler +fuels +predictions +##ente +##onium +Filming +absorbing +Ramón +strangled +conveyed +inhabit +prostitutes +recession +bonded +clinched +##eak +##iji +##edar +Pleasure +Rite +Christy +Therapy +sarcasm +##collegiate +hilt +probation +Sarawak +coefficients +underworld +biodiversity +SBS +groom +brewing +dungeon +##claiming +Hari +turnover +##ntina +##omer +##opped +orthodox +styling +##tars +##ulata +priced +Marjorie +##eley +##abar +Yong +##tically +Crambidae +Hernandez +##ego +##rricular +##ark +##lamour +##llin +##augh +##tens +Advancement +Loyola +##4th +##hh +goin +marshes +Sardinia +##ša +Ljubljana +Singing +suspiciously +##hesive +Félix +Regarding +flap +stimulation +##raught +Apr +Yin +gaping +tighten +skier +##itas +##lad +##rani +264 +Ashes +Olson +Problems +Tabitha +##rading +balancing +sunrise +##ease +##iture +##ritic +Fringe +##iciency +Inspired +Linnaeus +PBA +disapproval +##kles +##rka +##tails +##urger +Disaster +Laboratories +apps +paradise +Aero +Came +sneaking +Gee +Beacon +ODI +commodity +Ellington +graphical +Gretchen +spire +##skaya +##trine +RTÉ +efficacy +plc +tribunal +##ytic +downhill +flu +medications +##kaya +widen +Sunrise +##nous +distinguishing +pawn +##BO +##irn +##ssing +##ν +Easton +##vila +Rhineland +##aque +defect +##saurus +Goose +Ju +##classified +Middlesbrough +shaping +preached +1759 +##erland +Ein +Hailey +musicals +##altered +Galileo +Hilda +Fighters +Lac +##ometric +295 +Leafs +Milano +##lta +##VD +##ivist +penetrated +Mask +Orchard +plaintiff +##icorn +Yvonne +##fred +outfielder +peek +Collier +Caracas +repealed +Bois +dell +restrict +Dolores +Hadley +peacefully +##LL +condom +Granny +Orders +sabotage +##toon +##rings +compass +marshal +gears +brigadier +dye +Yunnan +communicating +donate +emerald +vitamin +administer +Fulham +##classical +##llas +Buckinghamshire +Held +layered +disclosure +Akira +programmer +shrimp +Crusade +##ximal +Luzon +bakery +##cute +Garth +Citadel +uniquely +Curling +info +mum +Para +##ști +sleek +##ione +hey +Lantern +mesh +##lacing +##lizzard +##gade +prosecuted +Alba +Gilles +greedy +twists +##ogged +Viper +##kata +Appearances +Skyla +hymns +##pelled +curving +predictable +Grave +Watford +##dford +##liptic +##vary +Westwood +fluids +Models +statutes +##ynamite +1740 +##culate +Framework +Johanna +##gression +Vuelta +imp +##otion +##raga +##thouse +Ciudad +festivities +##love +Beyoncé +italics +##vance +DB +##haman +outs +Singers +##ueva +##urning +##51 +##ntiary +##mobile +285 +Mimi +emeritus +nesting +Keeper +Ways +##onal +##oux +Edmond +MMA +##bark +##oop +Hampson +##ñez +##rets +Gladstone +wreckage +Pont +Playboy +reluctance +##ná +apprenticeship +preferring +Value +originate +##wei +##olio +Alexia +##rog +Parachute +jammed +stud +Eton +vols +##ganized +1745 +straining +creep +indicators +##mán +humiliation +hinted +alma +tanker +##egation +Haynes +Penang +amazement +branched +rumble +##ddington +archaeologists +paranoid +expenditure +Absolutely +Musicians +banished +##fining +baptism +Joker +Persons +hemisphere +##tieth +##ück +flock +##xing +lbs +Kung +crab +##dak +##tinent +Regulations +barrage +parcel +##ós +Tanaka +##rsa +Natalia +Voyage +flaws +stepfather +##aven +##eological +Botanical +Minsk +##ckers +Cinderella +Feast +Loving +Previous +Shark +##took +barrister +collaborators +##nnes +Croydon +Graeme +Juniors +##7th +##formation +##ulos +##ák +£2 +##hwa +##rove +##ș +Whig +demeanor +Otago +##TH +##ooster +Faber +instructors +##ahl +##bha +emptied +##schen +saga +##lora +exploding +##rges +Crusaders +##caster +##uations +streaks +CBN +bows +insights +ka +1650 +diversion +LSU +Wingspan +##liva +Response +sanity +Producers +imitation +##fine +Lange +Spokane +splash +weed +Siberian +magnet +##rocodile +capitals +##rgus +swelled +Rani +Bells +Silesia +arithmetic +rumor +##hampton +favors +Weird +marketplace +##orm +tsunami +unpredictable +##citation +##ferno +Tradition +postwar +stench +succeeds +##roup +Anya +Users +oversized +totaling +pouch +##nat +Tripoli +leverage +satin +##cline +Bathurst +Lund +Niall +thereof +##quid +Bangor +barge +Animated +##53 +##alan +Ballard +utilizes +Done +ballistic +NDP +gatherings +##elin +##vening +Rockets +Sabrina +Tamara +Tribal +WTA +##citing +blinded +flux +Khalid +Una +prescription +##jee +Parents +##otics +##food +Silicon +cured +electro +perpendicular +intimacy +##rified +Lots +##ceiving +##powder +incentives +McKenna +##arma +##ounced +##rinkled +Alzheimer +##tarian +262 +Seas +##cam +Novi +##hout +##morphic +##hazar +##hul +##nington +Huron +Bahadur +Pirate +pursed +Griffiths +indicted +swap +refrain +##mulating +Lal +stomped +##Pad +##mamoto +Reef +disposed +plastered +weeping +##rato +Minas +hourly +tumors +##ruising +Lyle +##yper +##sol +Odisha +credibility +##Dowell +Braun +Graphic +lurched +muster +##nex +##ührer +##connected +##iek +##ruba +Carthage +Peck +maple +bursting +##lava +Enrico +rite +##jak +Moment +##skar +Styx +poking +Spartan +##urney +Hepburn +Mart +Titanic +newsletter +waits +Mecklenburg +agitated +eats +##dious +Chow +matrices +Maud +##sexual +sermon +234 +##sible +##lung +Qi +cemeteries +mined +sprinter +##ckett +coward +##gable +##hell +##thin +##FB +Contact +##hay +rainforest +238 +Hemisphere +boasts +##nders +##verance +##kat +Convent +Dunedin +Lecturer +lyricist +##bject +Iberian +comune +##pphire +chunk +##boo +thrusting +fore +informing +pistols +echoes +Tier +battleships +substitution +##belt +moniker +##charya +##lland +Thoroughbred +38th +##01 +##tah +parting +tongues +Cale +##seau +Unionist +modular +celebrates +preview +steamed +Bismarck +302 +737 +vamp +##finity +##nbridge +weaknesses +husky +##berman +absently +##icide +Craven +tailored +Tokugawa +VIP +syntax +Kazan +captives +doses +filtered +overview +Cleopatra +Conversely +stallion +Burger +Suez +Raoul +th +##reaves +Dickson +Nell +Rate +anal +colder +##sław +Arm +Semitic +##green +reflective +1100 +episcopal +journeys +##ours +##pository +##dering +residue +Gunn +##27 +##ntial +##crates +##zig +Astros +Renee +Emerald +##vili +connectivity +undrafted +Sampson +treasures +##kura +##theon +##vern +Destroyer +##iable +##ener +Frederic +briefcase +confinement +Bree +##WD +Athena +233 +Padres +Thom +speeding +##hali +Dental +ducks +Putin +##rcle +##lou +Asylum +##usk +dusk +pasture +Institutes +ONE +jack +##named +diplomacy +Intercontinental +Leagues +Towns +comedic +premature +##edic +##mona +##ories +trimmed +Charge +Cream +guarantees +Dmitry +splashed +Philosophical +tramway +##cape +Maynard +predatory +redundant +##gratory +##wry +sobs +Burgundy +edible +outfits +Handel +dazed +dangerously +idle +Operational +organizes +##sional +blackish +broker +weddings +##halt +Becca +McGee +##gman +protagonists +##pelling +Keynes +aux +stumble +##ordination +Nokia +reel +sexes +##woods +##pheric +##quished +##voc +##oir +##pathian +##ptus +##sma +##tating +##ê +fulfilling +sheath +##ayne +Mei +Ordinary +Collin +Sharpe +grasses +interdisciplinary +##OX +Background +##ignment +Assault +transforms +Hamas +Serge +ratios +##sik +swaying +##rcia +Rosen +##gant +##versible +cinematographer +curly +penny +Kamal +Mellon +Sailor +Spence +phased +Brewers +amassed +Societies +##ropriations +##buted +mythological +##SN +##byss +##ired +Sovereign +preface +Parry +##ife +altitudes +crossings +##28 +Crewe +southernmost +taut +McKinley +##owa +##tore +254 +##ckney +compiling +Shelton +##hiko +228 +Poll +Shepard +Labs +Pace +Carlson +grasping +##ов +Delaney +Winning +robotic +intentional +shattering +##boarding +##git +##grade +Editions +Reserves +ignorant +proposing +##hanna +cutter +Mongols +NW +##eux +Codex +Cristina +Daughters +Rees +forecast +##hita +NGOs +Stations +Beaux +Erwin +##jected +##EX +##trom +Schumacher +##hrill +##rophe +Maharaja +Oricon +##sul +##dynamic +##fighting +Ce +Ingrid +rumbled +Prospect +stairwell +Barnard +applause +complementary +##uba +grunt +##mented +Bloc +Carleton +loft +noisy +##hey +490 +contrasted +##inator +##rief +##centric +##fica +Cantonese +Blanc +Lausanne +License +artifact +##ddin +rot +Amongst +Prakash +RF +##topia +milestone +##vard +Winters +Mead +churchyard +Lulu +estuary +##ind +Cha +Infinity +Meadow +subsidies +##valent +CONCACAF +Ching +medicinal +navigate +Carver +Twice +abdominal +regulating +RB +toilets +Brewer +weakening +ambushed +##aut +##vignon +Lansing +unacceptable +reliance +stabbing +##mpo +##naire +Interview +##ested +##imed +bearings +##lts +Rashid +##iation +authenticity +vigorous +##frey +##uel +biologist +NFC +##rmaid +##wash +Makes +##aunt +##steries +withdrawing +##qa +Buccaneers +bleed +inclination +stain +##ilo +##ppel +Torre +privileged +cereal +trailers +alumnus +neon +Cochrane +Mariana +caress +##47 +##ients +experimentation +Window +convict +signaled +##YP +rower +Pharmacy +interacting +241 +Strings +dominating +kinase +Dinamo +Wire +pains +sensations +##suse +Twenty20 +##39 +spotlight +##hend +elemental +##pura +Jameson +Swindon +honoring +pained +##ediatric +##lux +Psychological +assemblies +ingredient +Martial +Penguins +beverage +Monitor +mysteries +##ION +emigration +mused +##sique +crore +AMC +Funding +Chinatown +Establishment +Finalist +enjoyable +1756 +##mada +##rams +NO +newborn +CS +comprehend +Invisible +Siemens +##acon +246 +contraction +##volving +##moration +##rok +montane +##ntation +Galloway +##llow +Verity +directorial +pearl +Leaning +##rase +Fernandez +swallowing +Automatic +Madness +haunting +paddle +##UE +##rrows +##vies +##zuki +##bolt +##iber +Fender +emails +paste +##lancing +hind +homestead +hopeless +##dles +Rockies +garlic +fatty +shrieked +##ismic +Gillian +Inquiry +Schultz +XML +##cius +##uld +Domesday +grenades +northernmost +##igi +Tbilisi +optimistic +##poon +Refuge +stacks +Bose +smash +surreal +Nah +Straits +Conquest +##roo +##weet +##kell +Gladys +CH +##lim +##vitation +Doctorate +NRHP +knocks +Bey +Romano +##pile +242 +Diamonds +strides +eclectic +Betsy +clade +##hady +##leashed +dissolve +moss +Suburban +silvery +##bria +tally +turtles +##uctive +finely +industrialist +##nary +Ernesto +oz +pact +loneliness +##hov +Tomb +multinational +risked +Layne +USL +ne +##quiries +Ad +Message +Kamen +Kristen +reefs +implements +##itative +educators +garments +gunshot +##essed +##rve +Montevideo +vigorously +Stamford +assemble +packaged +##same +état +Viva +paragraph +##eter +##wire +Stick +Navajo +MCA +##pressing +ensembles +ABA +##zor +##llus +Partner +raked +##BI +Iona +thump +Celeste +Kiran +##iscovered +##rith +inflammation +##arel +Features +loosened +##yclic +Deluxe +Speak +economical +Frankenstein +Picasso +showcased +##zad +##eira +##planes +##linear +##overs +monsoon +prosecutors +slack +Horses +##urers +Angry +coughing +##truder +Questions +##tō +##zak +challenger +clocks +##ieving +Newmarket +##acle +cursing +stimuli +##mming +##qualified +slapping +##vasive +narration +##kini +Advertising +CSI +alliances +mixes +##yes +covert +amalgamation +reproduced +##ardt +##gis +1648 +id +Annette +Boots +Champagne +Brest +Daryl +##emon +##jou +##llers +Mean +adaptive +technicians +##pair +##usal +Yoga +fronts +leaping +Jul +harvesting +keel +##44 +petitioned +##lved +yells +Endowment +proponent +##spur +##tised +##zal +Homes +Includes +##ifer +##oodoo +##rvette +awarding +mirrored +ransom +Flute +outlook +##ganj +DVDs +Sufi +frontman +Goddard +barren +##astic +Suicide +hillside +Harlow +Lau +notions +Amnesty +Homestead +##irt +GE +hooded +umpire +mustered +Catch +Masonic +##erd +Dynamics +Equity +Oro +Charts +Mussolini +populace +muted +accompaniment +##lour +##ndes +ignited +##iferous +##laced +##atch +anguish +registry +##tub +##hards +##neer +251 +Hooker +uncomfortably +##6th +##ivers +Catalina +MiG +giggling +1754 +Dietrich +Kaladin +pricing +##quence +Sabah +##lving +##nical +Gettysburg +Vita +Telecom +Worst +Palais +Pentagon +##brand +##chichte +Graf +unnatural +1715 +bio +##26 +Radcliffe +##utt +chatting +spices +##aus +untouched +##eper +Doll +turkey +Syndicate +##rlene +##JP +##roots +Como +clashed +modernization +1757 +fantasies +##iating +dissipated +Sicilian +inspect +sensible +reputed +##final +Milford +poised +RC +metabolic +Tobacco +Mecca +optimization +##heat +lobe +rabbits +NAS +geologist +##liner +Kilda +carpenter +nationalists +##brae +summarized +##venge +Designer +misleading +beamed +##meyer +Matrix +excuses +##aines +##biology +401 +Moose +drafting +Sai +##ggle +Comprehensive +dripped +skate +##WI +##enan +##ruk +narrower +outgoing +##enter +##nounce +overseen +##structure +travellers +banging +scarred +##thing +##arra +Ebert +Sometime +##nated +BAFTA +Hurricanes +configurations +##MLL +immortality +##heus +gothic +##mpest +clergyman +viewpoint +Maxim +Instituto +emitted +quantitative +1689 +Consortium +##rsk +Meat +Tao +swimmers +Shaking +Terence +mainline +##linity +Quantum +##rogate +Nair +banquet +39th +reprised +lagoon +subdivisions +synonymous +incurred +password +sprung +##vere +Credits +Petersen +Faces +##vu +statesman +Zombie +gesturing +##going +Sergey +dormant +possessive +totals +southward +Ángel +##odies +HM +Mariano +Ramirez +Wicked +impressions +##Net +##cap +##ème +Transformers +Poker +RIAA +Redesignated +##chuk +Harcourt +Peña +spacious +tinged +alternatively +narrowing +Brigham +authorization +Membership +Zeppelin +##amed +Handball +steer +##orium +##rnal +##rops +Committees +endings +##MM +##yung +ejected +grams +##relli +Birch +Hilary +Stadion +orphan +clawed +##kner +Motown +Wilkins +ballads +outspoken +##ancipation +##bankment +##cheng +Advances +harvested +novelty +ineligible +oversees +##´s +obeyed +inevitably +Kingdoms +burying +Fabian +relevance +Tatiana +##MCA +sarcastic +##onda +Akron +229 +sandwiches +Adobe +Maddox +##azar +Hunting +##onized +Smiling +##tology +Juventus +Leroy +Poets +attach +lo +##rly +##film +Structure +##igate +olds +projections +SMS +outnumbered +##tase +judiciary +paramilitary +playfully +##rsing +##tras +Chico +Vin +informally +abandonment +##russ +Baroness +injuring +octagonal +deciduous +##nea +##olm +Hz +Norwood +poses +Marissa +alerted +willed +##KS +Dino +##ddler +##vani +Barbie +Thankfully +625 +bicycles +shimmering +##tinuum +##wolf +Chesterfield +##idy +##urgency +Knowles +sweetly +Ventures +##ponents +##valence +Darryl +Powerplant +RAAF +##pec +Kingsley +Parramatta +penetrating +spectacle +##inia +Marlborough +residual +compatibility +hike +Underwood +depleted +ministries +##odus +##ropriation +rotting +Faso +##inn +Happiness +Lille +Suns +cookie +rift +warmly +##lvin +Bugs +Gotham +Gothenburg +Properties +##seller +##ubi +Created +MAC +Noelle +Requiem +Ulysses +##ails +franchises +##icious +##rwick +celestial +kinetic +720 +STS +transmissions +amplitude +forums +freeing +reptiles +tumbling +##continent +##rising +##tropy +physiology +##uster +Loves +bodied +neutrality +Neumann +assessments +Vicky +##hom +hampered +##uku +Custom +timed +##eville +##xious +elastic +##section +rig +stilled +shipment +243 +artworks +boulders +Bournemouth +##hly +##LF +##linary +rumored +##bino +##drum +Chun +Freiburg +##dges +Equality +252 +Guadalajara +##sors +##taire +Roach +cramped +##ultural +Logistics +Punch +fines +Lai +caravan +##55 +lame +Collector +pausing +315 +migrant +hawk +signalling +##erham +##oughs +Demons +surfing +Rana +insisting +Wien +adolescent +##jong +##rera +##umba +Regis +brushes +##iman +residues +storytelling +Consider +contrasting +regeneration +##elling +##hlete +afforded +reactors +costing +##biotics +##gat +##евич +chanting +secondly +confesses +##ikos +##uang +##ronological +##− +Giacomo +##eca +vaudeville +weeds +rejecting +revoked +affluent +fullback +progresses +geologic +proprietor +replication +gliding +recounted +##bah +##igma +Flow +ii +newcomer +##lasp +##miya +Candace +fractured +interiors +confidential +Inverness +footing +##robe +Coordinator +Westphalia +jumper +##chism +dormitory +##gno +281 +acknowledging +leveled +##éra +Algiers +migrate +Frog +Rare +##iovascular +##urous +DSO +nomadic +##iera +woken +lifeless +##graphical +##ifications +Dot +Sachs +crow +nmi +Tacoma +Weight +mushroom +RS +conditioned +##zine +Tunisian +altering +##mizing +Handicap +Patti +Monsieur +clicking +gorge +interrupting +##powerment +drawers +Serra +##icides +Specialist +##itte +connector +worshipped +##ask +consoles +tags +##iler +glued +##zac +fences +Bratislava +honeymoon +313 +A2 +disposition +Gentleman +Gilmore +glaciers +##scribed +Calhoun +convergence +Aleppo +shortages +##43 +##orax +##worm +##codes +##rmal +neutron +##ossa +Bloomberg +Salford +periodicals +##ryan +Slayer +##ynasties +credentials +##tista +surveyor +File +stinging +unnoticed +Medici +ecstasy +espionage +Jett +Leary +circulating +bargaining +concerto +serviced +37th +HK +##fueling +Delilah +Marcia +graded +##join +Kaplan +feasible +##nale +##yt +Burnley +dreadful +ministerial +Brewster +Judah +##ngled +##rrey +recycled +Iroquois +backstage +parchment +##numbered +Kern +Motorsports +Organizations +##mini +Seems +Warrington +Dunbar +Ezio +##eor +paralyzed +Ara +yeast +##olis +cheated +reappeared +banged +##ymph +##dick +Lyndon +glide +Mat +##natch +Hotels +Household +parasite +irrelevant +youthful +##smic +##tero +##anti +2d +Ignacio +squash +##nets +shale +##اد +Abrams +##oese +assaults +##dier +##otte +Swamp +287 +Spurs +##economic +Fargo +auditioned +##mé +Haas +une +abbreviation +Turkic +##tisfaction +favorites +specials +##lial +Enlightenment +Burkina +##vir +Comparative +Lacrosse +elves +##lerical +##pear +Borders +controllers +##villa +excelled +##acher +##varo +camouflage +perpetual +##ffles +devoid +schooner +##bered +##oris +Gibbons +Lia +discouraged +sue +##gnition +Excellent +Layton +noir +smack +##ivable +##evity +##lone +Myra +weaken +weaponry +##azza +Shake +backbone +Certified +clown +occupational +caller +enslaved +soaking +Wexford +perceive +shortlisted +##pid +feminism +Bari +Indie +##avelin +##ldo +Hellenic +Hundreds +Savings +comedies +Honors +Mohawk +Told +coded +Incorporated +hideous +trusts +hose +Calais +Forster +Gabon +Internationale +AK +Colour +##UM +##heist +McGregor +localized +##tronomy +Darrell +##iara +squirrel +freaked +##eking +##manned +##ungen +radiated +##dua +commence +Donaldson +##iddle +MR +SAS +Tavern +Teenage +admissions +Instruments +##ilizer +Konrad +contemplated +##ductor +Jing +Reacher +recalling +Dhabi +emphasizing +illumination +##tony +legitimacy +Goethe +Ritter +McDonnell +Polar +Seconds +aspiring +derby +tunic +##rmed +outlines +Changing +distortion +##cter +Mechanics +##urly +##vana +Egg +Wolverine +Stupid +centralized +knit +##Ms +Saratoga +Ogden +storylines +##vres +lavish +beverages +##grarian +Kyrgyzstan +forcefully +superb +Elm +Thessaloniki +follower +Plants +slang +trajectory +Nowadays +Bengals +Ingram +perch +coloring +carvings +doubtful +##aph +##gratulations +##41 +Curse +253 +nightstand +Campo +Meiji +decomposition +##giri +McCormick +Yours +##amon +##bang +Texans +injunction +organise +periodical +##peculative +oceans +##aley +Success +Lehigh +##guin +1730 +Davy +allowance +obituary +##tov +treasury +##wayne +euros +readiness +systematically +##stered +##igor +##xen +##cliff +##lya +Send +##umatic +Celtics +Judiciary +425 +propagation +rebellious +##ims +##lut +Dal +##ayman +##cloth +Boise +pairing +Waltz +torment +Hatch +aspirations +diaspora +##hame +Rank +237 +Including +Muir +chained +toxicity +Université +##aroo +Mathews +meadows +##bio +Editing +Khorasan +##them +##ahn +##bari +##umes +evacuate +##sium +gram +kidnap +pinning +##diation +##orms +beacon +organising +McGrath +##ogist +Qur +Tango +##ceptor +##rud +##cend +##cie +##jas +##sided +Tuscany +Venture +creations +exhibiting +##rcerer +##tten +Butcher +Divinity +Pet +Whitehead +falsely +perished +handy +Moines +cyclists +synthesizers +Mortal +notoriety +##ronic +Dialogue +expressive +uk +Nightingale +grimly +vineyards +Driving +relentless +compiler +##district +##tuated +Hades +medicines +objection +Answer +Soap +Chattanooga +##gogue +Haryana +Parties +Turtle +##ferred +explorers +stakeholders +##aar +##rbonne +tempered +conjecture +##tee +##hur +Reeve +bumper +stew +##church +##generate +##ilitating +##chanized +##elier +##enne +translucent +##lows +Publisher +evangelical +inherit +##rted +247 +SmackDown +bitterness +lesions +##worked +mosques +wed +##lashes +Ng +Rebels +booking +##nail +Incident +Sailing +yo +confirms +Chaplin +baths +##kled +modernist +pulsing +Cicero +slaughtered +boasted +##losure +zipper +##hales +aristocracy +halftime +jolt +unlawful +Marching +sustaining +Yerevan +bracket +ram +Markus +##zef +butcher +massage +##quisite +Leisure +Pizza +collapsing +##lante +commentaries +scripted +##disciplinary +##sused +eroded +alleging +vase +Chichester +Peacock +commencement +dice +hotter +poisonous +executions +##occo +frost +fielding +vendor +Counts +Troops +maize +Divisional +analogue +shadowy +Nuevo +Ville +radiating +worthless +Adriatic +Buy +blaze +brutally +horizontally +longed +##matical +federally +Rolf +Root +exclude +rag +agitation +Lounge +astonished +##wirl +Impossible +transformations +##IVE +##ceded +##slav +downloaded +fucked +Egyptians +Welles +##ffington +U2 +befriended +radios +##jid +archaic +compares +##ccelerator +##imated +##tosis +Hung +Scientists +Thousands +geographically +##LR +Macintosh +fluorescent +##ipur +Wehrmacht +##BR +##firmary +Chao +##ague +Boyer +##grounds +##hism +##mento +##taining +infancy +##cton +510 +Boca +##loy +1644 +ben +dong +stresses +Sweat +expressway +graders +ochreous +nets +Lawn +thirst +Uruguayan +satisfactory +##tracts +baroque +rusty +##ław +Shen +Gdańsk +chickens +##graving +Hodge +Papal +SAT +bearer +##ogo +##rger +merits +Calendar +Highest +Skills +##ortex +Roberta +paradigm +recounts +frigates +swamps +unitary +##oker +balloons +Hawthorne +Muse +spurred +advisors +reclaimed +stimulate +fibre +pat +repeal +##dgson +##iar +##rana +anthropologist +descends +flinch +reared +##chang +##eric +##lithic +commissioning +##cumenical +##lume +##rchen +Wolff +##tsky +Eurasian +Nepali +Nightmare +ZIP +playback +##latz +##vington +Warm +##75 +Martina +Rollins +Saetan +Variations +sorting +##م +530 +Joaquin +Ptolemy +thinner +##iator +##pticism +Cebu +Highlanders +Linden +Vanguard +##SV +##mor +##ulge +ISSN +cartridges +repression +Étienne +311 +Lauderdale +commodities +null +##rb +1720 +gearbox +##reator +Ang +Forgotten +dubious +##rls +##dicative +##phate +Groove +Herrera +##çais +Collections +Maximus +##published +Fell +Qualification +filtering +##tized +Roe +hazards +##37 +##lative +##tröm +Guadalupe +Tajikistan +Preliminary +fronted +glands +##paper +##iche +##iding +Cairns +rallies +Location +seduce +##mple +BYU +##itic +##FT +Carmichael +Prentice +songwriters +forefront +Physicians +##rille +##zee +Preparatory +##cherous +UV +##dized +Navarro +misses +##nney +Inland +resisting +##sect +Hurt +##lino +galaxies +##raze +Institutions +devote +##lamp +##ciating +baron +##bracing +Hess +operatic +##CL +##ος +Chevalier +Guiana +##lattered +Fed +##cuted +##smo +Skull +denies +236 +Waller +##mah +Sakura +mole +nominate +sermons +##bering +widowed +##röm +Cavendish +##struction +Nehru +Revelation +doom +Gala +baking +Nr +Yourself +banning +Individuals +Sykes +orchestrated +630 +Phone +steered +620 +specialising +starvation +##AV +##alet +##upation +seductive +##jects +##zure +Tolkien +Benito +Wizards +Submarine +dictator +Duo +Caden +approx +basins +##nc +shrink +##icles +##sponsible +249 +mit +outpost +##bayashi +##rouse +##tl +Jana +Lombard +RBIs +finalized +humanities +##function +Honorable +tomato +##iot +Pie +tee +##pect +Beaufort +Ferris +bucks +##graduate +##ocytes +Directory +anxiously +##nating +flanks +##Ds +virtues +##believable +Grades +criterion +manufactures +sourced +##balt +##dance +##tano +Ying +##BF +##sett +adequately +blacksmith +totaled +trapping +expanse +Historia +Worker +Sense +ascending +housekeeper +##oos +Crafts +Resurrection +##verty +encryption +##aris +##vat +##pox +##runk +##iability +gazes +spying +##ths +helmets +wired +##zophrenia +Cheung +WR +downloads +stereotypes +239 +Lucknow +bleak +Bragg +hauling +##haft +prohibit +##ermined +##castle +barony +##hta +Typhoon +antibodies +##ascism +Hawthorn +Kurdistan +Minority +Gorge +Herr +appliances +disrupt +Drugs +Lazarus +##ilia +##ryo +##tany +Gotta +Masovian +Roxy +choreographed +##rissa +turbulent +##listed +Anatomy +exiting +##det +##isław +580 +Kaufman +sage +##apa +Symposium +##rolls +Kaye +##ptera +##rocław +jerking +##menclature +Guo +M1 +resurrected +trophies +##lard +Gathering +nestled +serpent +Dow +reservoirs +Claremont +arbitration +chronicle +eki +##arded +##zers +##mmoth +Congregational +Astronomical +NE +RA +Robson +Scotch +modelled +slashed +##imus +exceeds +##roper +##utile +Laughing +vascular +superficial +##arians +Barclay +Caucasian +classmate +sibling +Kimberly +Shreveport +##ilde +##liche +Cheney +Deportivo +Veracruz +berries +##lase +Bed +MI +Anatolia +Mindanao +broadband +##olia +##arte +##wab +darts +##immer +##uze +believers +ordinance +violate +##wheel +##ynth +Alongside +Coupe +Hobbs +arrondissement +earl +townland +##dote +##lihood +##sla +Ghosts +midfield +pulmonary +##eno +cues +##gol +##zda +322 +Siena +Sultanate +Bradshaw +Pieter +##thical +Raceway +bared +competence +##ssent +Bet +##urer +##ła +Alistair +Göttingen +appropriately +forge +##osterone +##ugen +DL +345 +convoys +inventions +##resses +##cturnal +Fay +Integration +slash +##roats +Widow +barking +##fant +1A +Hooper +##cona +##runched +unreliable +##emont +##esign +##stabulary +##stop +Journalists +bony +##iba +##trata +##ège +horrific +##bish +Jocelyn +##rmon +##apon +##cier +trainers +##ulatory +1753 +BR +corpus +synthesized +##bidden +##rafford +Elgin +##entry +Doherty +clockwise +##played +spins +##ample +##bley +Cope +constructions +seater +warlord +Voyager +documenting +fairies +##viator +Lviv +jewellery +suites +##gold +Maia +NME +##eavor +##kus +Eugène +furnishings +##risto +MCC +Metropolis +Older +Telangana +##mpus +amplifier +supervising +1710 +buffalo +cushion +terminating +##powering +steak +Quickly +contracting +dem +sarcastically +Elsa +##hein +bastards +narratives +Takes +304 +composure +typing +variance +##ifice +Softball +##rations +McLaughlin +gaped +shrines +##hogany +Glamorgan +##icle +##nai +##ntin +Fleetwood +Woodland +##uxe +fictitious +shrugs +##iper +BWV +conform +##uckled +Launch +##ductory +##mized +Tad +##stituted +##free +Bel +Chávez +messing +quartz +##iculate +##folia +##lynn +ushered +##29 +##ailing +dictated +Pony +##opsis +precinct +802 +Plastic +##ughter +##uno +##porated +Denton +Matters +SPD +hating +##rogen +Essential +Deck +Dortmund +obscured +##maging +Earle +##bred +##ittle +##ropolis +saturated +##fiction +##ression +Pereira +Vinci +mute +warehouses +##ún +biographies +##icking +sealing +##dered +executing +pendant +##wives +murmurs +##oko +substrates +symmetrical +Susie +##mare +Yusuf +analogy +##urage +Lesley +limitation +##rby +##ío +disagreements +##mise +embroidered +nape +unarmed +Sumner +Stores +dwell +Wilcox +creditors +##rivatization +##shes +##amia +directs +recaptured +scouting +McGuire +cradle +##onnell +Sato +insulin +mercenary +tolerant +Macquarie +transitions +cradled +##berto +##ivism +##yotes +FF +Ke +Reach +##dbury +680 +##bill +##oja +##sui +prairie +##ogan +reactive +##icient +##rits +Cyclone +Sirius +Survival +Pak +##coach +##trar +halves +Agatha +Opus +contrasts +##jection +ominous +##iden +Baylor +Woodrow +duct +fortification +intercourse +##rois +Colbert +envy +##isi +Afterward +geared +##flections +accelerate +##lenching +Witness +##rrer +Angelina +Material +assertion +misconduct +Nix +cringed +tingling +##eti +##gned +Everest +disturb +sturdy +##keepers +##vied +Profile +heavenly +##kova +##victed +translating +##sses +316 +Invitational +Mention +martyr +##uristic +Barron +hardness +Nakamura +405 +Genevieve +reflections +##falls +jurist +##LT +Pyramid +##yme +Shoot +heck +linguist +##tower +Ives +superiors +##leo +Achilles +##phological +Christophe +Padma +precedence +grassy +Oral +resurrection +##itting +clumsy +##lten +##rue +huts +##stars +Equal +##queduct +Devin +Gaga +diocesan +##plating +##upe +##graphers +Patch +Scream +hail +moaning +tracts +##hdi +Examination +outsider +##ergic +##oter +Archipelago +Havilland +greenish +tilting +Aleksandr +Konstantin +warship +##emann +##gelist +##ought +billionaire +##blivion +321 +Hungarians +transplant +##jured +##fters +Corbin +autism +pitchers +Garner +thence +Scientology +transitioned +integrating +repetitive +##dant +Rene +vomit +##burne +1661 +Researchers +Wallis +insulted +wavy +##wati +Ewing +excitedly +##kor +frescoes +injustice +##achal +##lumber +##úl +novella +##sca +Liv +##enstein +##river +monstrous +topping +downfall +looming +sinks +trillion +##pont +Effect +##phi +##urley +Sites +catchment +##H1 +Hopper +##raiser +1642 +Maccabi +lance +##chia +##sboro +NSA +branching +retorted +tensor +Immaculate +drumming +feeder +##mony +Dyer +homicide +Temeraire +fishes +protruding +skins +orchards +##nso +inlet +ventral +##finder +Asiatic +Sul +1688 +Melinda +assigns +paranormal +gardening +Tau +calming +##inge +##crow +regimental +Nik +fastened +correlated +##gene +##rieve +Sick +##minster +##politan +hardwood +hurled +##ssler +Cinematography +rhyme +Montenegrin +Packard +debating +##itution +Helens +Trick +Museums +defiance +encompassed +##EE +##TU +##nees +##uben +##ünster +##nosis +435 +Hagen +cinemas +Corbett +commended +##fines +##oman +bosses +ripe +scraping +##loc +filly +Saddam +pointless +Faust +Orléans +Syriac +##♭ +longitude +##ropic +Alfa +bliss +gangster +##ckling +SL +blending +##eptide +##nner +bends +escorting +##bloid +##quis +burials +##sle +##è +Ambulance +insults +##gth +Antrim +unfolded +##missible +splendid +Cure +warily +Saigon +Waste +astonishment +boroughs +##VS +##dalgo +##reshing +##usage +rue +marital +versatile +unpaid +allotted +bacterium +##coil +##cue +Dorothea +IDF +##location +##yke +RPG +##tropical +devotees +liter +##pree +Johnstone +astronaut +attends +pollen +periphery +doctrines +meta +showered +##tyn +GO +Huh +laude +244 +Amar +Christensen +Ping +Pontifical +Austen +raiding +realities +##dric +urges +##dek +Cambridgeshire +##otype +Cascade +Greenberg +Pact +##cognition +##aran +##urion +Riot +mimic +Eastwood +##imating +reversal +##blast +##henian +Pitchfork +##sunderstanding +Staten +WCW +lieu +##bard +##sang +experimenting +Aquino +##lums +TNT +Hannibal +catastrophic +##lsive +272 +308 +##otypic +41st +Highways +aggregator +##fluenza +Featured +Reece +dispatch +simulated +##BE +Communion +Vinnie +hardcover +inexpensive +til +##adores +groundwater +kicker +blogs +frenzy +##wala +dealings +erase +Anglia +##umour +Hapoel +Marquette +##raphic +##tives +consult +atrocities +concussion +##érard +Decree +ethanol +##aen +Rooney +##chemist +##hoot +1620 +menacing +Schuster +##bearable +laborers +sultan +Juliana +erased +onstage +##ync +Eastman +##tick +hushed +##yrinth +Lexie +Wharton +Lev +##PL +Testing +Bangladeshi +##bba +##usions +communicated +integers +internship +societal +##odles +Loki +ET +Ghent +broadcasters +Unix +##auer +Kildare +Yamaha +##quencing +##zman +chilled +##rapped +##uant +Duval +sentiments +Oliveira +packets +Horne +##rient +Harlan +Mirage +invariant +##anger +##tensive +flexed +sweetness +##wson +alleviate +insulting +limo +Hahn +##llars +##hesia +##lapping +buys +##oaming +mocked +pursuits +scooted +##conscious +##ilian +Ballad +jackets +##kra +hilly +##cane +Scenic +McGraw +silhouette +whipping +##roduced +##wark +##chess +##rump +Lemon +calculus +demonic +##latine +Bharatiya +Govt +Que +Trilogy +Ducks +Suit +stairway +##ceipt +Isa +regulator +Automobile +flatly +##buster +##lank +Spartans +topography +Tavi +usable +Chartered +Fairchild +##sance +##vyn +Digest +nuclei +typhoon +##llon +Alvarez +DJs +Grimm +authoritative +firearm +##chschule +Origins +lair +unmistakable +##xial +##cribing +Mouth +##genesis +##shū +##gaon +##ulter +Jaya +Neck +##UN +##oing +##static +relativity +##mott +##utive +##esan +##uveau +BT +salts +##roa +Dustin +preoccupied +Novgorod +##asus +Magnum +tempting +##histling +##ilated +Musa +##ghty +Ashland +pubs +routines +##etto +Soto +257 +Featuring +Augsburg +##alaya +Bit +loomed +expects +##abby +##ooby +Auschwitz +Pendleton +vodka +##sent +rescuing +systemic +##inet +##leg +Yun +applicant +revered +##nacht +##ndas +Muller +characterization +##patient +##roft +Carole +##asperated +Amiga +disconnected +gel +##cologist +Patriotic +rallied +assign +veterinary +installing +##cedural +258 +Jang +Parisian +incarcerated +stalk +##iment +Jamal +McPherson +Palma +##oken +##viation +512 +Rourke +irrational +##rippled +Devlin +erratic +##NI +##payers +Ni +engages +Portal +aesthetics +##rrogance +Milne +assassins +##rots +335 +385 +Cambodian +Females +fellows +si +##block +##otes +Jayne +Toro +flutter +##eera +Burr +##lanche +relaxation +##fra +Fitzroy +##undy +1751 +261 +comb +conglomerate +ribbons +veto +##Es +casts +##ege +1748 +Ares +spears +spirituality +comet +##nado +##yeh +Veterinary +aquarium +yer +Councils +##oked +##ynamic +Malmö +remorse +auditions +drilled +Hoffmann +Moe +Nagoya +Yacht +##hakti +##race +##rrick +Talmud +coordinating +##EI +##bul +##his +##itors +##ligent +##uerra +Narayan +goaltender +taxa +##asures +Det +##mage +Infinite +Maid +bean +intriguing +##cription +gasps +socket +##mentary +##reus +sewing +transmitting +##different +##furbishment +##traction +Grimsby +sprawling +Shipyard +##destine +##hropic +##icked +trolley +##agi +##lesh +Josiah +invasions +Content +firefighters +intro +Lucifer +subunit +Sahib +Myrtle +inhibitor +maneuvers +##teca +Wrath +slippery +##versing +Shoes +##dial +##illiers +##luded +##mmal +##pack +handkerchief +##edestal +##stones +Fusion +cumulative +##mell +##cacia +##rudge +##utz +foe +storing +swiped +##meister +##orra +batter +strung +##venting +##kker +Doo +Taste +immensely +Fairbanks +Jarrett +Boogie +1746 +mage +Kick +legislators +medial +##ilon +##logies +##ranton +Hybrid +##uters +Tide +deportation +Metz +##secration +##virus +UFO +##fell +##orage +##raction +##rrigan +1747 +fabricated +##BM +##GR +##rter +muttering +theorist +##tamine +BMG +Kincaid +solvent +##azed +Thin +adorable +Wendell +ta +##viour +pulses +##pologies +counters +exposition +sewer +Luciano +Clancy +##angelo +##riars +Showtime +observes +frankly +##oppy +Bergman +lobes +timetable +##bri +##uest +FX +##dust +##genus +Glad +Helmut +Meridian +##besity +##ontaine +Revue +miracles +##titis +PP +bluff +syrup +307 +Messiah +##erne +interfering +picturesque +unconventional +dipping +hurriedly +Kerman +248 +Ethnic +Toward +acidic +Harrisburg +##65 +intimidating +##aal +Jed +Pontiac +munitions +##nchen +growling +mausoleum +##ération +##wami +Cy +aerospace +caucus +Doing +##around +##miring +Cuthbert +##poradic +##rovisation +##wth +evaluating +##scraper +Belinda +owes +##sitic +##thermal +##fast +economists +##lishing +##uerre +##ân +credible +##koto +Fourteen +cones +##ebrates +bookstore +towels +##phony +Appearance +newscasts +##olin +Karin +Bingham +##elves +1680 +306 +disks +##lston +##secutor +Levant +##vout +Micro +snuck +##ogel +##racker +Exploration +drastic +##kening +Elsie +endowment +##utnant +Blaze +##rrosion +leaking +45th +##rug +##uernsey +760 +Shapiro +cakes +##ehan +##mei +##ité +##kla +repetition +successively +Friendly +Île +Koreans +Au +Tirana +flourish +Spirits +Yao +reasoned +##leam +Consort +cater +marred +ordeal +supremacy +##ritable +Paisley +euro +healer +portico +wetland +##kman +restart +##habilitation +##zuka +##Script +emptiness +communion +##CF +##inhabited +##wamy +Casablanca +pulsed +##rrible +##safe +395 +Dual +Terrorism +##urge +##found +##gnolia +Courage +patriarch +segregated +intrinsic +##liography +##phe +PD +convection +##icidal +Dharma +Jimmie +texted +constituents +twitch +##calated +##mitage +##ringing +415 +milling +##geons +Armagh +Geometridae +evergreen +needy +reflex +template +##pina +Schubert +##bruck +##icted +##scher +##wildered +1749 +Joanne +clearer +##narl +278 +Print +automation +consciously +flashback +occupations +##ests +Casimir +differentiated +policing +repay +##aks +##gnesium +Evaluation +commotion +##CM +##smopolitan +Clapton +mitochondrial +Kobe +1752 +Ignoring +Vincenzo +Wet +bandage +##rassed +##unate +Maris +##eted +##hetical +figuring +##eit +##nap +leopard +strategically +##reer +Fen +Iain +##ggins +##pipe +Matteo +McIntyre +##chord +##feng +Romani +asshole +flopped +reassure +Founding +Styles +Torino +patrolling +##erging +##ibrating +##ructural +sincerity +##ät +##teacher +Juliette +##cé +##hog +##idated +##span +Winfield +##fender +##nast +##pliant +1690 +Bai +Je +Saharan +expands +Bolshevik +rotate +##root +Britannia +Severn +##cini +##gering +##say +sly +Steps +insertion +rooftop +Piece +cuffs +plausible +##zai +Provost +semantic +##data +##vade +##cimal +IPA +indictment +Libraries +flaming +highlands +liberties +##pio +Elders +aggressively +##pecific +Decision +pigeon +nominally +descriptive +adjustments +equestrian +heaving +##mour +##dives +##fty +##yton +intermittent +##naming +##sets +Calvert +Casper +Tarzan +##kot +Ramírez +##IB +##erus +Gustavo +Roller +vaulted +##solation +##formatics +##tip +Hunger +colloquially +handwriting +hearth +launcher +##idian +##ilities +##lind +##locating +Magdalena +Soo +clubhouse +##kushima +##ruit +Bogotá +Organic +Worship +##Vs +##wold +upbringing +##kick +groundbreaking +##urable +##ván +repulsed +##dira +##ditional +##ici +melancholy +##bodied +##cchi +404 +concurrency +H₂O +bouts +##gami +288 +Leto +troll +##lak +advising +bundled +##nden +lipstick +littered +##leading +##mogeneous +Experiment +Nikola +grove +##ogram +Mace +##jure +cheat +Annabelle +Tori +lurking +Emery +Walden +##riz +paints +Markets +brutality +overrun +##agu +##sat +din +ostensibly +Fielding +flees +##eron +Pound +ornaments +tornadoes +##nikov +##organisation +##reen +##Works +##ldred +##olten +##stillery +soluble +Mata +Grimes +Léon +##NF +coldly +permitting +##inga +##reaked +Agents +hostess +##dl +Dyke +Kota +avail +orderly +##saur +##sities +Arroyo +##ceps +##egro +Hawke +Noctuidae +html +seminar +##ggles +##wasaki +Clube +recited +##sace +Ascension +Fitness +dough +##ixel +Nationale +##solidate +pulpit +vassal +570 +Annapolis +bladder +phylogenetic +##iname +convertible +##ppan +Comet +paler +##definite +Spot +##dices +frequented +Apostles +slalom +##ivision +##mana +##runcated +Trojan +##agger +##iq +##league +Concept +Controller +##barian +##curate +##spersed +##tring +engulfed +inquired +##hmann +286 +##dict +##osy +##raw +MacKenzie +su +##ienced +##iggs +##quitaine +bisexual +##noon +runways +subsp +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##+ +##, +##- +##. +##/ +##: +##; +##< +##= +##> +##? +##@ +##[ +##\ +##] +##^ +##_ +##` +##{ +##| +##} +##~ +##¡ +##¢ +##£ +##¥ +##§ +##¨ +##© +##ª +##« +##¬ +##® +##± +##´ +##µ +##¶ +##· +##¹ +##º +##» +##¼ +##¾ +##¿ +##À +##Á +## +##Ä +##Å +##Æ +##Ç +##È +##É +##Í +##Î +##Ñ +##Ó +##Ö +##× +##Ø +##Ú +##Ü +##Þ +##â +##ã +##æ +##ç +##î +##ï +##ð +##ñ +##ô +##õ +##÷ +##û +##þ +##ÿ +##Ā +##ą +##Ć +##Č +##ď +##Đ +##đ +##ē +##ė +##ę +##ě +##ğ +##ġ +##Ħ +##ħ +##ĩ +##Ī +##İ +##ļ +##Ľ +##ľ +##Ł +##ņ +##ň +##ŋ +##Ō +##ŏ +##ő +##Œ +##œ +##ř +##Ś +##ś +##Ş +##Š +##Ţ +##ţ +##ť +##ũ +##ŭ +##ů +##ű +##ų +##ŵ +##ŷ +##ź +##Ż +##ż +##Ž +##ž +##Ə +##ƒ +##ơ +##ư +##ǎ +##ǐ +##ǒ +##ǔ +##ǫ +##Ș +##Ț +##ț +##ɐ +##ɑ +##ɔ +##ɕ +##ə +##ɛ +##ɡ +##ɣ +##ɨ +##ɪ +##ɲ +##ɾ +##ʀ +##ʁ +##ʂ +##ʃ +##ʊ +##ʋ +##ʌ +##ʐ +##ʑ +##ʒ +##ʔ +##ʰ +##ʲ +##ʳ +##ʷ +##ʻ +##ʼ +##ʾ +##ʿ +##ˈ +##ː +##ˡ +##ˢ +##ˣ +##́ +##̃ +##̍ +##̯ +##͡ +##Α +##Β +##Γ +##Δ +##Ε +##Η +##Θ +##Ι +##Κ +##Λ +##Μ +##Ν +##Ο +##Π +##Σ +##Τ +##Φ +##Χ +##Ψ +##Ω +##ά +##έ +##ή +##ί +##β +##γ +##δ +##ε +##ζ +##η +##θ +##ι +##κ +##λ +##μ +##ξ +##ο +##π +##ρ +##σ +##τ +##υ +##φ +##χ +##ψ +##ω +##ό +##ύ +##ώ +##І +##Ј +##А +##Б +##В +##Г +##Д +##Е +##Ж +##З +##И +##К +##Л +##М +##Н +##О +##П +##Р +##С +##Т +##У +##Ф +##Х +##Ц +##Ч +##Ш +##Э +##Ю +##Я +##б +##в +##г +##д +##ж +##з +##к +##л +##м +##п +##с +##т +##у +##ф +##х +##ц +##ч +##ш +##щ +##ъ +##ы +##ь +##э +##ю +##ё +##і +##ї +##ј +##њ +##ћ +##Ա +##Հ +##ա +##ե +##ի +##կ +##մ +##յ +##ն +##ո +##ս +##տ +##ր +##ւ +##ְ +##ִ +##ֵ +##ֶ +##ַ +##ָ +##ֹ +##ּ +##א +##ב +##ג +##ד +##ה +##ו +##ז +##ח +##ט +##י +##כ +##ל +##ם +##מ +##ן +##נ +##ס +##ע +##פ +##צ +##ק +##ר +##ש +##ת +##، +##ء +##آ +##أ +##إ +##ئ +##ا +##ب +##ت +##ث +##ج +##ح +##خ +##ذ +##ز +##س +##ش +##ص +##ض +##ط +##ظ +##ع +##غ +##ف +##ق +##ك +##ل +##و +##ى +##َ +##ِ +##ٹ +##پ +##چ +##ک +##گ +##ہ +##ی +##ے +##ं +##आ +##क +##ग +##च +##ज +##ण +##त +##द +##ध +##न +##प +##ब +##भ +##म +##य +##र +##ल +##व +##श +##ष +##स +##ह +##ा +##ि +##ी +##ु +##े +##ो +##् +##। +##॥ +##আ +##ই +##এ +##ও +##ক +##খ +##গ +##চ +##ছ +##জ +##ট +##ত +##থ +##দ +##ধ +##ন +##প +##ব +##ম +##য +##র +##ল +##শ +##স +##হ +##় +##া +##ি +##ী +##ু +##ে +##ো +##্ +##য় +##க +##த +##ப +##ம +##ய +##ர +##ல +##வ +##ா +##ி +##ு +##் +##ร +##་ +##ག +##ང +##ད +##ན +##བ +##མ +##ར +##ལ +##ས +##ི +##ུ +##ེ +##ོ +##ა +##ე +##ი +##ლ +##ნ +##ო +##რ +##ს +##ᴬ +##ᴵ +##ᵀ +##ᵃ +##ᵇ +##ᵈ +##ᵉ +##ᵍ +##ᵏ +##ᵐ +##ᵒ +##ᵖ +##ᵗ +##ᵘ +##ᵣ +##ᵤ +##ᵥ +##ᶜ +##ᶠ +##ḍ +##Ḥ +##ḥ +##Ḩ +##ḩ +##ḳ +##ṃ +##ṅ +##ṇ +##ṛ +##ṣ +##ṭ +##ạ +##ả +##ấ +##ầ +##ẩ +##ậ +##ắ +##ế +##ề +##ể +##ễ +##ệ +##ị +##ọ +##ố +##ồ +##ổ +##ộ +##ớ +##ờ +##ợ +##ụ +##ủ +##ứ +##ừ +##ử +##ữ +##ự +##ỳ +##ỹ +##ἀ +##ἐ +##ὁ +##ὐ +##ὰ +##ὶ +##ὸ +##ῆ +##ῖ +##ῦ +##ῶ +##‐ +##‑ +##‒ +##– +##— +##― +##‖ +##‘ +##’ +##‚ +##“ +##” +##„ +##† +##‡ +##• +##… +##‰ +##′ +##″ +##⁄ +##⁰ +##ⁱ +##⁴ +##⁵ +##⁶ +##⁷ +##⁸ +##⁹ +##⁻ +##ⁿ +##₅ +##₆ +##₇ +##₈ +##₉ +##₊ +##₍ +##₎ +##ₐ +##ₑ +##ₒ +##ₓ +##ₕ +##ₖ +##ₘ +##ₚ +##ₛ +##ₜ +##₤ +##€ +##₱ +##₹ +##ℓ +##№ +##ℝ +##⅓ +##← +##↑ +##→ +##↔ +##⇌ +##⇒ +##∂ +##∈ +##∗ +##∘ +##√ +##∞ +##∧ +##∨ +##∩ +##∪ +##≈ +##≠ +##≡ +##≤ +##≥ +##⊂ +##⊆ +##⊕ +##⋅ +##─ +##│ +##■ +##● +##★ +##☆ +##☉ +##♠ +##♣ +##♥ +##♦ +##♯ +##⟨ +##⟩ +##ⱼ +##、 +##。 +##《 +##》 +##「 +##」 +##『 +##』 +##〜 +##い +##う +##え +##お +##か +##き +##く +##け +##こ +##さ +##し +##す +##せ +##そ +##た +##ち +##つ +##て +##と +##な +##に +##の +##は +##ひ +##ま +##み +##む +##め +##も +##や +##ゆ +##よ +##ら +##り +##る +##れ +##ん +##ア +##ィ +##イ +##ウ +##エ +##オ +##カ +##ガ +##キ +##ク +##グ +##コ +##サ +##シ +##ジ +##ス +##ズ +##タ +##ダ +##ッ +##テ +##デ +##ト +##ド +##ナ +##ニ +##ハ +##バ +##パ +##フ +##ブ +##プ +##マ +##ミ +##ム +##ャ +##ュ +##ラ +##リ +##ル +##レ +##ロ +##ン +##・ +##ー +##一 +##三 +##上 +##下 +##中 +##事 +##二 +##井 +##京 +##人 +##亻 +##仁 +##佐 +##侍 +##光 +##公 +##力 +##北 +##十 +##南 +##原 +##口 +##史 +##司 +##吉 +##同 +##和 +##囗 +##国 +##國 +##土 +##城 +##士 +##大 +##天 +##太 +##夫 +##女 +##子 +##宀 +##安 +##宮 +##宿 +##小 +##尚 +##山 +##島 +##川 +##州 +##平 +##年 +##心 +##愛 +##戸 +##文 +##新 +##方 +##日 +##明 +##星 +##書 +##月 +##木 +##本 +##李 +##村 +##東 +##松 +##林 +##正 +##武 +##氏 +##水 +##氵 +##江 +##河 +##海 +##版 +##犬 +##王 +##生 +##田 +##白 +##皇 +##省 +##真 +##石 +##社 +##神 +##竹 +##美 +##義 +##花 +##藤 +##西 +##谷 +##車 +##辶 +##道 +##郎 +##郡 +##部 +##野 +##金 +##長 +##門 +##陽 +##青 +##食 +##馬 +##高 +##龍 +##龸 +##사 +##씨 +##의 +##이 +##한 +##fi +##fl +##! +##( +##) +##, +##- +##/ +##: diff --git a/core/models/latent_diffusion/vae/optimus_modules/vocab/bert_vocab_download_info.json b/core/models/latent_diffusion/vae/optimus_modules/vocab/bert_vocab_download_info.json new file mode 100644 index 0000000000000000000000000000000000000000..445a2113a3e4cb557889868fa08c74605932ac79 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/vocab/bert_vocab_download_info.json @@ -0,0 +1,15 @@ +{ + "bert-base-uncased": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt", + "bert-large-uncased": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-vocab.txt", + "bert-base-cased": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-vocab.txt", + "bert-large-cased": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-vocab.txt", + "bert-base-multilingual-uncased": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased-vocab.txt", + "bert-base-multilingual-cased": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased-vocab.txt", + "bert-base-chinese": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-vocab.txt", + "bert-base-german-cased": "https://int-deepset-models-bert.s3.eu-central-1.amazonaws.com/pytorch/bert-base-german-cased-vocab.txt", + "bert-large-uncased-whole-word-masking": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-vocab.txt", + "bert-large-cased-whole-word-masking": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-vocab.txt", + "bert-large-uncased-whole-word-masking-finetuned-squad": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-whole-word-masking-finetuned-squad-vocab.txt", + "bert-large-cased-whole-word-masking-finetuned-squad": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-whole-word-masking-finetuned-squad-vocab.txt", + "bert-base-cased-finetuned-mrpc": "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-finetuned-mrpc-vocab.txt" +} \ No newline at end of file diff --git a/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-merges.txt b/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-merges.txt new file mode 100644 index 0000000000000000000000000000000000000000..226b0752cac7789c48f0cb3ec53eda48b7be36cc --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-merges.txt @@ -0,0 +1,50001 @@ +#version: 0.2 +Ġ t +Ġ a +h e +i n +r e +o n +Ġt he +e r +Ġ s +a t +Ġ w +Ġ o +e n +Ġ c +i t +i s +a n +o r +e s +Ġ b +e d +Ġ f +in g +Ġ p +o u +Ġa n +a l +a r +Ġt o +Ġ m +Ġo f +Ġ in +Ġ d +Ġ h +Ġan d +i c +a s +l e +Ġt h +i on +o m +l l +en t +Ġ n +Ġ l +s t +Ġ re +v e +Ġ e +r o +l y +Ġb e +Ġ g +Ġ T +c t +Ġ S +i d +o t +Ġ I +u t +e t +Ġ A +Ġ is +Ġ on +i m +a m +o w +a y +a d +s e +Ġth at +Ġ C +i g +Ġf or +a c +Ġ y +v er +u r +Ġ u +l d +Ġs t +Ġ M +' s +Ġ he +Ġ it +at ion +it h +i r +c e +Ġy ou +i l +Ġ B +Ġw h +o l +Ġ P +Ġw ith +Ġ 1 +t er +c h +Ġa s +Ġw e +Ġ ( +n d +i ll +Ġ D +i f +Ġ 2 +a g +er s +k e +Ġ " +Ġ H +e m +Ġc on +Ġ W +Ġ R +he r +Ġw as +Ġ r +o d +Ġ F +u l +at e +Ġa t +r i +p p +o re +ĠT he +Ġs e +u s +Ġp ro +Ġh a +u m +Ġa re +Ġd e +a in +an d +Ġo r +ig h +es t +is t +a b +r om +Ġ N +t h +Ġc om +Ġ G +u n +o p +0 0 +Ġ L +Ġn ot +es s +Ġe x +Ġ v +re s +Ġ E +e w +it y +an t +Ġb y +e l +o s +or t +o c +q u +Ġf rom +Ġha ve +Ġs u +i ve +ou ld +Ġs h +Ġth is +n t +r a +p e +igh t +ar t +m ent +Ġa l +u st +en d +- - +al l +Ġ O +ac k +Ġc h +Ġ le +i es +re d +ar d +â Ģ +ou t +Ġ J +Ġa b +e ar +i v +al ly +ou r +o st +g h +p t +Ġp l +as t +Ġc an +a k +om e +u d +T he +Ġh is +Ġd o +Ġg o +Ġh as +g e +' t +Ġ U +r ou +Ġs a +Ġ j +Ġb ut +Ġw or +Ġa ll +e ct +Ġ k +am e +Ġw ill +o k +Ġw he +Ġthe y +id e +0 1 +f f +ic h +p l +t her +Ġt r +. . +Ġin t +i e +u re +ag e +Ġn e +i al +a p +in e +ic e +Ġm e +Ġo ut +an s +on e +on g +ion s +Ġwh o +Ġ K +Ġu p +Ġthe ir +Ġa d +Ġ 3 +Ġu s +at ed +ou s +Ġm ore +u e +o g +ĠS t +in d +i ke +Ġs o +im e +p er +. " +b er +i z +a ct +Ġon e +Ġsa id +Ġ - +a re +Ġyou r +c c +ĠT h +Ġc l +e p +a ke +ab le +i p +Ġcon t +Ġwh ich +i a +Ġ im +Ġab out +Ġwe re +ver y +u b +Ġh ad +Ġ en +Ġcom p +, " +ĠI n +Ġu n +Ġa g +i re +ac e +a u +ar y +Ġw ould +as s +r y +Ġ âĢ +c l +o ok +e re +s o +Ġ V +ig n +i b +Ġof f +Ġt e +v en +Ġ Y +i le +o se +it e +or m +Ġ2 01 +Ġre s +Ġm an +Ġp er +Ġo ther +or d +ul t +Ġbe en +Ġl ike +as e +an ce +k s +ay s +ow n +en ce +Ġd is +ct ion +Ġan y +Ġa pp +Ġs p +in t +res s +ation s +a il +Ġ 4 +ic al +Ġthe m +Ġhe r +ou nt +ĠC h +Ġa r +Ġ if +Ġthe re +Ġp e +Ġy ear +a v +Ġm y +Ġs ome +Ġwhe n +ou gh +ac h +Ġth an +r u +on d +ic k +Ġo ver +ve l +Ġ qu +Ċ Ċ +Ġs c +re at +re e +ĠI t +ou nd +p ort +Ġal so +Ġp art +f ter +Ġk n +Ġbe c +Ġt ime +en s +Ġ 5 +op le +Ġwh at +Ġn o +d u +m er +an g +Ġn ew +-- -- +Ġg et +or y +it ion +ing s +Ġj ust +Ġint o +Ġ 0 +ent s +o ve +t e +Ġpe ople +Ġp re +Ġit s +Ġre c +Ġt w +i an +ir st +ar k +or s +Ġwor k +ad e +o b +Ġs he +Ġo ur +w n +in k +l ic +Ġ1 9 +ĠH e +is h +nd er +au se +Ġh im +on s +Ġ [ +Ġ ro +f orm +i ld +at es +ver s +Ġon ly +o ll +Ġs pe +c k +e ll +am p +Ġa cc +Ġb l +i ous +ur n +f t +o od +Ġh ow +he d +Ġ ' +Ġa fter +a w +Ġat t +o v +n e +Ġpl ay +er v +ic t +Ġc ould +it t +Ġa m +Ġf irst +Ġ 6 +Ġa ct +Ġ $ +e c +h ing +u al +u ll +Ġcom m +o y +o ld +c es +at er +Ġf e +Ġbe t +w e +if f +Ġtw o +oc k +Ġb ack +) . +id ent +Ġu nder +rou gh +se l +x t +Ġm ay +rou nd +Ġp o +p h +is s +Ġd es +Ġm ost +Ġd id +Ġad d +j ect +Ġin c +f ore +Ġp ol +on t +Ġag ain +cl ud +ter n +Ġkn ow +Ġne ed +Ġcon s +Ġc o +Ġ . +Ġw ant +Ġse e +Ġ 7 +n ing +i ew +ĠTh is +c ed +Ġe ven +Ġin d +t y +ĠW e +at h +Ġthe se +Ġp r +Ġu se +Ġbec ause +Ġf l +n g +Ġn ow +ĠâĢ ĵ +c om +is e +Ġm ake +Ġthe n +ow er +Ġe very +ĠU n +Ġse c +os s +u ch +Ġe m +Ġ = +ĠR e +i ed +r it +Ġin v +le ct +Ġsu pp +at ing +Ġl ook +m an +pe ct +Ġ 8 +ro w +Ġb u +Ġwhe re +if ic +Ġyear s +i ly +Ġd iff +Ġsh ould +Ġre m +T h +I n +Ġe v +d ay +' re +ri b +Ġre l +s s +Ġde f +Ġr ight +Ġs y +) , +l es +00 0 +he n +Ġth rough +ĠT r +_ _ +Ġw ay +Ġd on +Ġ , +Ġ1 0 +as ed +Ġas s +ub lic +Ġre g +ĠA nd +i x +Ġ very +Ġin clud +ot her +Ġim p +ot h +Ġsu b +ĠâĢ Ķ +Ġbe ing +ar g +ĠW h += = +ib le +Ġdo es +an ge +r am +Ġ 9 +er t +p s +it ed +ation al +Ġb r +Ġd own +Ġman y +ak ing +Ġc all +ur ing +it ies +Ġp h +ic s +al s +Ġde c +at ive +en er +Ġbe fore +il ity +Ġwe ll +Ġm uch +ers on +Ġth ose +Ġsu ch +Ġ ke +Ġ end +ĠB ut +as on +t ing +Ġl ong +e f +Ġth ink +y s +Ġbe l +Ġs m +it s +a x +Ġo wn +Ġpro v +Ġs et +if e +ment s +b le +w ard +Ġsh ow +Ġp res +m s +om et +Ġo b +Ġs ay +ĠS h +t s +f ul +Ġe ff +Ġg u +Ġin st +u nd +re n +c ess +Ġ ent +ĠY ou +Ġgo od +Ġst art +in ce +Ġm ade +t t +st em +ol og +u p +Ġ | +um p +Ġhe l +ver n +ul ar +u ally +Ġa c +Ġm on +Ġl ast +Ġ2 00 +1 0 +Ġst ud +u res +ĠA r +sel f +ar s +mer ic +u es +c y +Ġm in +oll ow +Ġc ol +i o +Ġm od +Ġc ount +ĠC om +he s +Ġf in +a ir +i er +âĢ Ķ +re ad +an k +at ch +e ver +Ġst r +Ġpo int +or k +ĠN ew +Ġs ur +o ol +al k +em ent +Ġus ed +ra ct +we en +Ġs ame +ou n +ĠA l +c i +Ġdiff ere +Ġwh ile +---- ---- +Ġg ame +ce pt +Ġs im +.. . +Ġin ter +e k +Ġre port +Ġpro du +Ġst ill +l ed +a h +Ġhe re +Ġwor ld +Ġth ough +Ġn um +ar ch +im es +al e +ĠS e +ĠI f +/ / +ĠL e +Ġre t +Ġre f +Ġtr ans +n er +ut ion +ter s +Ġt ake +ĠC l +Ġcon f +w ay +a ve +Ġgo ing +Ġs l +u g +ĠA meric +Ġspe c +Ġh and +Ġbet ween +ist s +ĠD e +o ot +I t +Ġe ar +Ġagain st +Ġh igh +g an +a z +at her +Ġex p +Ġo p +Ġin s +Ġg r +Ġhel p +Ġre qu +et s +in s +ĠP ro +is m +Ġf ound +l and +at a +us s +am es +Ġp erson +Ġg reat +p r +Ġs ign +ĠA n +' ve +Ġs omet +Ġs er +h ip +Ġr un +Ġ : +Ġt er +ire ct +Ġf ollow +Ġd et +ic es +Ġf ind +1 2 +Ġm em +Ġc r +e red +e x +Ġex t +ut h +en se +c o +Ġte am +v ing +ou se +as h +at t +v ed +Ġsy stem +ĠA s +d er +iv es +m in +Ġle ad +ĠB l +c ent +Ġa round +Ġgo vern +Ġc ur +vel op +an y +Ġc our +al th +ag es +iz e +Ġc ar +od e +Ġl aw +Ġre ad +' m +c on +Ġre al +Ġsupp ort +Ġ1 2 +.. .. +Ġre ally +n ess +Ġf act +Ġd ay +Ġb oth +y ing +Ġs erv +ĠF or +Ġth ree +Ġw om +Ġm ed +od y +ĠThe y +5 0 +Ġex per +t on +Ġe ach +ak es +Ġc he +Ġc re +in es +Ġre p +1 9 +g g +ill ion +Ġg rou +ut e +i k +W e +g et +E R +Ġm et +Ġs ays +o x +Ġd uring +er n +iz ed +a red +Ġf am +ic ally +Ġha pp +ĠI s +Ġch ar +m ed +v ent +Ġg ener +i ent +p le +i et +re nt +1 1 +v es +pt ion +Ġ2 0 +form ation +Ġc or +Ġoff ic +ie ld +Ġto o +is ion +Ġin f +Ġ Z +t he +o ad +Ġp ublic +Ġpro g +r ic +* * +Ġw ar +Ġp ower +v iew +Ġf ew +Ġl oc +Ġdiffere nt +Ġst ate +Ġhe ad +' ll +Ġp oss +Ġst at +re t +ant s +Ġv al +Ġis s +Ġc le +i vers +an c +Ġex pl +Ġan other +Ġ Q +Ġa v +th ing +n ce +W h +Ġch ild +Ġs ince +i red +l ess +Ġl ife +Ġde velop +itt le +Ġde p +Ġp ass +ã ĥ +Ġt urn +or n +Th is +b ers +ro ss +ĠA d +Ġf r +Ġres p +Ġsec ond +o h +Ġ / +Ġdis c +Ġ & +Ġsomet hing +Ġcomp le +Ġ ed +Ġf il +Ġmon th +a j +u c +Ġgovern ment +Ġwith out +Ġle g +Ġd ist +Ġp ut +Ġqu est +an n +Ġpro t +2 0 +Ġne ver +i ence +Ġle vel +Ġar t +Ġth ings +Ġm ight +Ġeff ect +Ġcont ro +Ġc ent +Ġ1 8 +Ġall ow +Ġbel ie +ch ool +ot t +Ġinc re +Ġfe el +Ġres ult +Ġl ot +Ġf un +ot e +Ġt y +ere st +Ġcont in +Ġus ing +Ġb ig +2 01 +Ġas k +Ġb est +Ġ ) +I N +Ġo pp +3 0 +Ġnum ber +in ess +S t +le ase +Ġc a +Ġm ust +Ġd irect +Ġg l +Ġ < +Ġop en +Ġp ost +Ġcom e +Ġse em +ord ing +Ġwe ek +ate ly +it al +Ġe l +ri end +Ġf ar +Ġt ra +in al +Ġp ri +ĠU S +Ġpl ace +Ġfor m +Ġto ld +" : +ain s +at ure +ĠTr ump +Ġst and +Ġ # +id er +ĠF r +Ġne xt +Ġs oc +Ġp ur +Ġle t +Ġl ittle +Ġh um +Ġ i +r on +1 5 +Ġ1 5 +Ġcomm un +Ġm ark +ĠThe re +Ġw r +ĠTh at +Ġin formation +w ays +Ġb us +a pp +Ġinv est +m e +Ġh ard +ain ed +e ad +Ġim port +Ġapp ro +Ġt est +Ġt ri +Ġre st +os ed +Ġf ull +Ġc are +ĠS p +Ġc ase +O N +Ġs k +Ġl ess +Ġ + +Ġpart ic +ĠP l +ab ly +u ck +is hed +ch n +b e +Ġl ist +at or +Ġto p +Ġad v +ĠB e +ru ct +Ġd em +r ation +l ing +g y +re en +g er +Ġh ome +Ġle ft +Ġbet ter +Ġd ata +Ġ1 1 +Ġatt ack +Ġpro ble +l ine +ard s +Ġbe h +r al +ĠH ow +ĠS he +ar ge +Ġ -- +: // +Ġb ro +ĠP h +at s +Ġbu ild +w w +id ed +a im +as es +en cy +Ġm ain +in ed +Ġinclud ing +Ġ { +Ġg ot +Ġint erest +Ġke ep +Ġ X +Ġe as +ain ing +Ġcl ass +âĢ ¦ +ĠN o +Ġv ar +Ġsm all +amp le +A T +Ġ ide +ĠS o +Ġre ce +Ġpol it +Ġm ov +Ġpl an +Ġper cent +iv ing +Ġc amp +Ġp ay +1 4 +s c +is ed +Ġu nt +one y +pl oy +== == +Ġdid n +ĠI nd +el s +ert ain +Ġp os +__ __ +i ver +Ġpro cess +Ġprog ram +if ied +ĠR ep +1 6 +u ro +olog y +at ter +in a +Ġn ame +ĠA ll +Ġf our +Ġret urn +v ious +b s +Ġcall ed +Ġm ove +ĠS c +ir d +Ġgrou p +Ġb re +Ġm en +Ġc ap +t en +e e +Ġd ri +le g +he re +uth or +Ġp at +Ġcur rent +id es +Ġp op +t o +ent ion +Ġal ways +Ġm il +Ġwom en +Ġ1 6 +Ġo ld +iv en +ra ph +ĠO r +r or +ent ly +Ġn ear +ĠE x +re am +s h +Ġ1 4 +Ġf ree +iss ion +st and +ĠC on +al ity +us ed +1 3 +Ġdes ign +Ġch ange +Ġch ang +Ġb o +Ġv is +em ber +Ġb ook +read y +Ġk ill +2 5 +pp ed +Ġa way +Ġab le +Ġcount ry +Ġcon st +ar n +Ġor der +A R +i or +i um +or th +1 8 +ail able +Ġs w +Ġm illion +Ġ1 3 +at ic +t ed +ĠG o +Ġo per +en g +Ġth ing +aj or +con om +ĠCom m +Ġwh y +u red +ur al +Ġs chool +b y +ĠM ar +Ġa ff +Ġd ays +Ġan n +us h +an e +I f +e g +Ġpro f +Ġhe alth +ou th +B ut +ion al +. , +Ġs ol +Ġal ready +Ġ3 0 +Ġchar act +H e +Ġf riend +E S +i ans +ic le +' d +ĠO n +Ġle ast +Ġp rom +Ġd r +Ġh ist +it her +Ġ est +i qu +1 7 +s on +Ġte ll +Ġt alk +oh n +o int +le ction +A N +Ġunt il +au gh +Ġl ater +Ġ ve +Ġv iew +end ing +iv ed +Ġwor d +w are +Ġc ost +Ġen ough +Ġg ive +ĠUn ited +Ġte chn +are nt +O R +Ġp ar +ĠD r +Ġ201 6 +r ist +er ing +Ġ  +Ġl arge +s ide +ac y +cc ess +Ġw in +Ġimport ant +Ġ19 9 +Ġdoes n +Ġ1 7 +Ġbus iness +Ġcle ar +Ġre se +" , +ur y +Ġe qu +as ter +al f +ĠAmeric an +n ect +Ġex pect +ivers ity +Ġo cc +ĠF l +Ġk ind +Ġme an +Ġp ast +Ġde v +Ġb as +le t +ra ft +Ġor gan +Ġde l +Ġper form +Ġst ory +Ġse ason +ĠC ol +Ġcl aim +Ġc ame +Ġwith in +Ġl ine +Ġpro ject +ĠA t +Ġcontro l +end ed +ĠS y +Ġa ir +iz ation +Ġ * +le y +Ġm oney +id d +Y ou +f or +Ġfam ily +Ġm aking +Ġb it +Ġpol ice +Ġhapp en +Ġ vers +on y +u ff +ĠW hen +Ġs it +ide o +l f +is on +Ġsu re +g in +Ġapp ear +Ġl ight +Ġ es +o f +Ġw ater +Ġt imes +n ot +Ġg row +Ġcomp any +ĠT e +ow s +Ġm ar +our ce +i ol +ar m +b r +Ġex ample +Ġcon c +Ġf ore +ĠT o +p ro +E N +ri es +Ġ2 5 +ĠC an +ne y +Ġact ually +Ġe ver +ur ity +ak en +ap s +Ġt ax +Ġm ajor +am a +Ġof ten +er al +Ġhum an +Ġj ob +is ter +Ġav ailable +oc r +en n +a id +iv id +Ġrec ord +? " +Ġs ing +ĠA m +id ence +Ġnew s +st er +Ġe conom +Ġfollow ing +ĠB r +is ing +Ġh our +m ost +um ent +Ġse x +Ġdes c +Ġbec ome +ĠE d +Ġto ok +Ġha ving +Ġprodu ct +a ult +A s +ar ing +Ġme ans +Ġh op +un e +Ġch o +Ġc ertain +Ġn on +Ġde al +2 4 +le ment +oc i +en e +Ġs ide +ĠP r +ĠM ay +Ġre ason +u ed +c hed +ul ation +Ġe lect +Ġoffic ial +Ġposs ible +Ġh old +and s +ot s +Ġc ity +or ies +Ġse ver +Ġchild ren +Ġon ce +Ġact iv +l er +Ġn ight +it ions +ĠJ ohn +a pe +pl ay +Ġd one +Ġl im +Ġwork ing +ĠP res +or ld +e b +ĠC o +Ġb ody +ail s +ut es +ĠM r +Ġwhe ther +Ġa uthor +ro p +Ġpro per +Ġse en +) ; +Ġf ac +ĠS u +Ġcon d +it ing +Ġcour se +Ġ } +-------- -------- +a ign +Ġev ent +Ġen g +Ġp ot +Ġin tern +i am +Ġsh ort +em pt +ã Ĥ +ĠG od +il ar +8 0 +Ġor ig +I S +our n +ab ility +it ive +Ġd am +Ġ1 00 +Ġp ress +Ġdo ing +Ġprot ect +r ing +Ġthough t +Ġquest ion +re w +ĠW ar +Ġsever al +ĠSt ate +Ġg iven +Ġf und +ĠT w +Ġw ent +an ces +w ork +p or +m y +4 0 +Ġar g +art ment +ust om +Ġpol ic +Ġme et +Ġc reat +2 2 +ĠSt ates +Ġg ames +ra w +ut ure +Ġunder stand +ur s +ĠO b +l ish +s y +Ġm akes +Ġw on +ag on +Ġh tt +Ġl ove +ent ial +Ġcomple te +p ar +ĠI m +A L +Ġacc ount + ł +ore d +ver t +Ġ ident +Ġ201 5 +Ġother s +ĠM in +i ber +ver age +The re +ition al +d d +Ġpro b +Ġyou ng +Ġal ong +Ġacc ording +Ġy et +Ġmem bers +ĠWh at +o id +ĠM an +A nd +Ġam ong +a i +Ġem ploy +ĠR es +Ġ > +Ġinv ol +Ġl ow +a f +ĠC ar +Ġh ig +ĠO ne +ĠS ec +in ation +Ġlike ly +Ġan t +ag ed +ĠR uss +Ġb en +Ġre le +F or +b ack +ĠN ot +Ġpres ident +b all +Ġacc ess +ivid ual +ĠD em +ĠE uro +6 0 +Ġkn own +ir l +ĠG r +Ġear ly +u se +iet y +âĢ ĵ +Ġf ight +Ġs ent +Ġto day +Ġmark et +" . +Ġb ased +Ġstr ong +ur ther +Ġde b +m ber +Ġproble m +Ġde ath +Ġsoc ial +im ate +A S +ort un +Ġcamp aign +er y +C h +Ġe y +i ally +Ġm us +w h +p os +Ġ er +Ġsa f +Ġmonth s +ir on +Ġv iol +Ġf ive +Ġst re +Ġplay ers +in c +al d +y ear +a un +Ġsu ccess +Ġpres ent +ere nce +Ġ201 4 +Ġsu gg +Ġpartic ular +Ġtr y +Ġsugg est +ĠCh rist +on es +Ġpri v +2 3 +Ġc rit +Ġl and +Ġloc al +if y +2 9 +Ġa ut +E D +ĠG u +Ġm ult +Ġpolit ical +Ġask ed +Ġfor mer +it ter +ri pt +Ġcl ose +Ġp ract +ĠY ork +Ġget ting +Ġac ross +Ġcom b +Ġbelie ve +Ġ z +Ġto get +Ġtoget her +ĠC ent +ir c +Ġind ividual +ĠM c +2 7 +is k +ĠE ng +Ġf ace +Ġ2 4 +Ġval ue +Ġare a +e v +Ġw rit +ĠPres ident +Ġv ot +Ġke y +Ġm om +p ut +Ġany thing +Ġexper ience +att le +Ġm ind +a ff +om m +Ġf uture +g ed +Ġc ut +Ġto t +it ch +Ġv ideo +Ġinvest ig +Ġn et +ĠM y +r ict +i en +. ) +Ġimp ro +th ough +ward s +Ġcon nect +ĠM ed +sel ves +ens ive +m b +o ber +at ors +A n +Ġ5 0 +Ġre du +res ent +Ġab ove +Ġf re +ĠEuro pe +s w +Ġam ount +ĠA pp +Ġe ither +Ġmil it +Ġan al +Ġf ail +ĠE n +al es +Ġspec ial +Ġbl ack +I T +c her +Ġlook ing +Ġf ire +y n +Ġal most +o on +Ġstud y +Ġm iss +c hes +ro wn +Ġt re +Ġcommun ity +Ġmed ia +Ġf ood +Ġcom es +ĠUn iversity +Ġsing le +Wh at +u ly +Ġh alf +ag ue +h od +ĠRep ublic +Ġstart ed +Ġqu ick +ot o +b ook +Ġiss ue +it or +Ġel se +Ġcons ider +2 6 +ro du +Ġt aken +2 8 +9 9 +ĠW ith +Ġtr ue +Ġw a +Ġtr ad +Ġag o +Ġm ess +ie f +Ġadd ed +o ke +Ġb ad +Ġf av +3 3 +Ġsim ilar +as k +ĠD on +Ġcharact er +ort s +ĠH ouse +Ġreport ed +Ġty pe +v al +i od +ĠHow ever +Ġt arg +Ġent ire +pp ing +Ġhist ory +Ġl ive +ff ic +.... .... +ed eral +Ġtr ying +Ġdisc uss +ĠH ar +ac es +l ished +Ġse lf +os p +re st +Ġro om +el t +Ġf all +ol ution +Ġe t +Ġ x +Ġis n +Ġide a +b o +Ġs ound +ĠD ep +Ġsome one +ci ally +ull y +Ġf oc +Ġob ject +if t +ap er +Ġplay er +Ġr ather +Ġserv ice +as hing +ĠD o +ĠP art +ru g +m on +p ly +Ġm or +Ġnot hing +Ġprov ide +I C +un g +Ġpart y +Ġex ist +Ġm ag +7 0 +Ġr ul +Ġh ouse +Ġbeh ind +Ġhow ever +ĠW orld +Ġs um +Ġapp lic +Ġ ; +Ġfun ction +g r +ĠP ol +Ġfr ont +2 00 +Ġser ies +Ġt em +Ġty p +ill s +Ġo pt +Ġpoint s +Ġbel ow +itt ed +Ġspec ific +Ġ201 7 +um b +Ġr a +Ġpre vious +Ġpre t +re me +Ġc ustom +Ġcour t +ĠM e +Ġre pl +Ġwho le +g o +c er +Ġt reat +ĠA ct +Ġprob ably +Ġle arn +end er +ĠA ss +Ġvers ion +n ow +Ġche ck +ĠC al +R E +min ist +O n +our ces +Ġben ef +Ġd oc +Ġdet er +Ġen c +Ġsu per +Ġadd ress +Ġv ict +Ġ201 3 +Ġme as +t r +Ġf ield +W hen +Ġsign ific +u ge +Ġfe at +Ġcomm on +l oad +Ġbe gin +Ġbr ing +Ġa ction +er man +Ġdesc rib +Ġind ust +Ġwant ed +ri ed +m ing +Ġatt empt +4 5 +f er +Ġd ue +ress ion +# # +Ġsh all +Ġs ix +o o +Ġst ep +Ġp ub +Ġhim self +Ġ2 3 +Ġc op +Ġd est +Ġst op +A C +ib ility +Ġl ab +ic ult +Ġhour s +Ġcre ate +Ġf urther +ĠAmeric a +ĠC ity +Ġd ou +he ad +S T +ĠN orth +c ing +Ġn ational +u le +ĠIn st +Ġt aking +ĠQ u +ir t +Ġre d +Ġrese arch +v iron +ĠG e +Ġbre ak +an a +Ġsp ace +ater ial +Ġrec ent +ĠA b +Ġgener al +Ġh it +Ġper iod +Ġevery thing +ive ly +Ġph ys +Ġsay ing +an ks +Ġc ou +Ġc ult +ac ed +e al +u ation +Ġc oun +l u +Ġinclud e +Ġpos ition +ĠA fter +ĠCan ad +ĠE m +Ġim m +ĠR ed +Ġp ick +Ġcom pl +Ġm atter +re g +e xt +ang u +is c +o le +a ut +Ġcomp et +e ed +f ect +Ġ2 1 +ĠS en +ĠThe se +as ing +Ġcan not +Ġin it +Ġrel ations +ac hed +Ġb ar +Ġ4 0 +ĠT H +Ġ201 2 +Ġv ol +Ġg round +Ġsec urity +Ġup d +il t +3 5 +Ġconc ern +ĠJ ust +Ġwh ite +Ġseem s +ĠH er +pe cially +i ents +Ġann oun +Ġf ig +ight s +Ġst ri +l ike +id s +Ġs us +Ġw atch +Ġ â +Ġw ind +ĠC ont +Ġit self +Ġm ass +A l +y le +iqu e +ĠN ational +Ġab s +Ġp ack +Ġout side +Ġan im +Ġp ain +et er +Ġman ag +du ct +og n +Ġ ] +ĠSe pt +se c +o ff +ĠJ an +Ġf oot +ad es +Ġth ird +Ġm ot +Ġev idence +int on +Ġth reat +a pt +pl es +c le +Ġl o +Ġde cl +Ġit em +med i +Ġrep resent +om b +am er +Ġsignific ant +og raph +s u +Ġc al +i res +00 00 +I D +A M +Ġsim ply +Ġlong er +Ġf ile +O T +c he +S o +ate g +or g +ĠH is +Ġen er +Ġd om +Ġup on +il i +": " +Ġthem selves +Ġcom ing +Ġqu ite +Ġdiff icult +ĠB ar +il ities +re l +end s +c ial +6 4 +Ġwom an +ra p +y r +Ġne cess +ip s +Ġte xt +Ġrequ ire +Ġmilit ary +Ġre view +Ġresp ons +7 5 +Ġsub ject +Ġinst ead +Ġiss ues +Ġg en +" ," +Ġmin utes +Ġwe ap +r ay +am ed +t ime +b l +H ow +Ġc ode +ĠS m +Ġhig her +ĠSt e +r is +Ġp age +Ġstud ents +ĠIn tern +Ġmet hod +ĠA ug +ĠP er +ĠA g +Ġpolic y +ĠS w +Ġex ec +Ġac cept +um e +rib ut +Ġword s +Ġfin al +Ġchang es +ĠDem ocr +Ġfriend s +Ġres pect +Ġe p +Ġcomp an +iv il +Ġdam age +** ** +og le +viron ment +Ġne g +ent al +Ġa p +Ġtot al +iv al +! " +l im +Ġneed s +Ġag re +Ġdevelop ment +Ġa ge +ip le +2 1 +Ġresult s +ĠA f +S h +Ġg un +ĠOb ama +ro ll +Ġ @ +Ġright s +ĠB rit +Ġrun ning +Ġwas n +Ġp ort +Ġr ate +Ġpret ty +Ġtarg et +Ġsa w +Ġc irc +Ġwor ks +ic ro +al t +o ver +ww w +Th at +l ier +Ġevery one +ud e +Ġp ie +idd le +ra el +Ġr ad +Ġbl ock +Ġw alk +T o +ã ģ +n es +ĠA ust +a ul +ro te +ĠS outh +ess ion +op h +Ġshow s +Ġs ite +Ġj o +Ġr isk +cl us +l t +Ġin j +id ing +ĠS pe +Ġch all +ir m +Ġ2 2 +itt ing +st r +Ġh y +L E +ke y +Ġbe gan +at ur +ashing ton +l am +ĠD av +b it +Ġs ize +ĠP ar +3 8 +ourn al +f ace +Ġdec ision +Ġl arg +Ġj ud +re ct +Ġcontin ue +ĠO ct +ove red +ĠI nt +==== ==== +Ġp arent +ĠW ill +Ġeas y +Ġd rug +ang er +Ġs ense +Ġd i +id ay +Ġener gy +ist ic +Ġass oci +ar ter +ob al +e ks +ĠE l +ur ch +Ġg irl +o e +it le +Ġ2 8 +ĠC he +Ġrequ est +Ġso on +Ġh ost +k y +Ġst ates +om es +Ġm aterial +le x +Ġmom ent +Ġan sw +on se +Ġes pecially +Ġn orm +Ġserv ices +p ite +r an +Ġro le +4 4 +) : +Ġc red +C l +____ ____ +Ġm at +Ġl og +ĠCl inton +O U +Ġoff ice +Ġ2 6 +Ġch arg +Ġtr ack +m a +Ġhe art +Ġb all +Ġperson al +Ġbuild ing +n a +s et +b ody +ĠBl ack +Ġincre ase +itt en +Ġneed ed +3 6 +3 2 += " +Ġl ost +Ġbec ame +Ġgrou ps +ĠM us +Ġw rote +ĠP e +Ġpro p +j oy +à © +ĠWh ite +Ġde ad +. ' +Ġhtt p +Ġwe bs +O S +Ġins ide +Ġwr ong +Ġstat ement +Ġ ... +y l +Ġfil m +Ġmus ic +Ġsh are +ific ation +Ġre lease +Ġfor ward +Ġst ay +Ġcomp ut +it te +s er +Ġorig inal +Ġc ard +Ġc and +Ġd iv +at ural +Ġfav or +O M +Ġc ases +us es +Ġse ction +Ġle ave +g ing +ov ed +ĠW ashington +3 9 +ĠG l +Ġrequ ired +act ion +ap an +o or +it er +ĠK ing +Ġcount ries +ĠG erman +ll ing +Ġ2 7 +3 4 +Ġquest ions +Ġpr im +Ġc ell +Ġsh oot +Ġany one +ĠW est +Ġaff ect +ep end +Ġon line +ĠIs rael +ĠSept ember +Ġab ility +Ġcont ent +is es +Ġre ve +Ġl aun +Ġind ic +Ġfor ce +c ast +Ġso ld +av ing +f l +Ġso ft +Ġcompan ies +ce ed +Ġart icle +Ġa ud +Ġre v +Ġed uc +Ġplay ing +0 5 +Ġhe ld +ct or +Ġrele ased +Ġf ederal +3 7 +Ġad minist +Ġinter view +Ġinst all +Ġrece ived +Ġs ource +u k +P h +Ġser ious +Ġcre ated +Ġc ause +Ġim medi +Ġdef in +u el +ĠDep artment +ct ions +ĠC our +ĠN ow +z e +it es +it ution +Ġl ate +Ġspe ak +n ers +Ġleg al +ar i +ĠC or +Ġwe eks +Ġmod el +Ġp red +Ġex act +B C +ĠB y +IN G +os ing +Ġt akes +Ġreg ard +Ġopp ortun +Ġpr ice +Ġ19 8 +ĠA pr +f ully +Ġor d +Ġproble ms +ru ction +h am +ĠC ount +le ge +Ġlead ers +E T +le v +Ġde ep +olog ical +es e +h aps +ĠS ome +Ġp ers +Ġcont ract +Ġrelations hip +s p +ou d +Ġb ase +4 8 +m it +A d +anc ial +Ġcons um +Ġpot ential +Ġl angu +re m +et h +Ġrel ig +ress ed +6 6 +Ġl ink +Ġl ower +ay er +ĠJ une +Ġf em +un t +er c +ur d +Ġcont act +Ġ ill +Ġm other +Ġest ab +h tt +ĠM arch +ĠB ro +ĠCh ina +Ġ2 9 +Ġs qu +Ġprov ided +Ġa verage +as ons +Ġ201 1 +Ġex am +l in +5 5 +n ed +Ġper fect +Ġt ou +al se +u x +Ġbu y +Ġsh ot +Ġcol lect +Ġph ot +Ġplay ed +Ġsur pr +Ġofficial s +Ġsim ple +av y +Ġindust ry +Ġhand s +g round +Ġp ull +Ġr ound +Ġus er +Ġr ange +u ary +Ġpriv ate +op s +e es +Ġw ays +ĠM ich +Ġve h +Ġex cept +Ġter ms +im um +pp er +I ON +ore s +ĠDr agon +ou l +Ġd en +Ġperform ance +Ġb ill +c il +4 7 +Ġen vironment +Ġex c +ad d +Ġwor th +Ġp ict +Ġch ance +Ġ201 8 +b or +Ġspe ed +ict ion +Ġal leg +ĠJ apan +at ory +re et +Ġm atch +ĠI I +Ġst ru +ord er +Ġst e +Ġl iving +Ġst ruct +in o +Ġse par +her n +Ġresp onse +Ġen joy +Ġv ia +A D +um ents +ace book +Ġmem ber +ib r +iz ing +Ġto ol +ĠM on +ĠWh ile +h ood +ĠA ng +ĠD ef +Ġoff er +T r +a ur +Ġturn ed +ĠJ uly +d own +an ced +Ġrec ently +ĠE ar +Ġc e +ĠSt ar +ĠC ong +rough t +Ġbl ood +Ġhop e +Ġcom ment +ain t +Ġar ri +il es +Ġpartic ip +ough t +ri ption +0 8 +4 9 +Ġg ave +Ġse lect +Ġkill ed +sy ch +Ġgo es +i j +Ġc oll +Ġimp act +at ives +ĠS er +0 9 +ĠAug ust +Ġb oy +d e +ĠD es +Ġf elt +U S +Ġexpect ed +Ġim age +ĠM ark +cc ording +o ice +E C +ĠM ag +en ed +h old +ĠP ost +Ġpre vent +N o +Ġinvol ved +Ġey es +Ġquick ly +A t +un k +Ġbeh av +Ġ ur +Ġl ed +c ome +e y +Ġcand id +Ġear lier +Ġfoc us +et y +P ro +led ge +ix ed +ill ed +Ġpop ular +A P +Ġset t +l ight +Ġvar ious +in ks +Ġlevel s +Ġro ad +ell ig +ab les +he l +itte e +ĠG ener +y pe +Ġhe ard +ic les +Ġm is +Ġus ers +ĠS an +Ġimpro ve +Ġf ather +Ġse arch +The y +v il +Ġprof ess +Ġkn ew +Ġl oss +Ġev ents +6 5 +Ġb illion +0 7 +0 2 +ĠNew s +ĠA M +Ġco ver +w here +ens ion +Ġb ott +Ġare as +en ces +op e +ĠTw itter +a el +Ġget s +ĠGo ogle +Ġs n +i ant +Ġv ote +Ġnear ly +Ġinclud ed +Ġrec ogn +z z +m m +al ed +Ġhappen ed +0 4 +Ġh ot +Ġwho se +Ġc ivil +Ġsu ff +o es +it iz +ĠSy ri +Ġresp ond +Ġh on +Ġfeat ures +Ġeconom ic +ĠApr il +r im +Ġtechn ology +Ġo ption +ag ing +Ġpur ch +R e +Ġl at +ch ie +is l +Ġrec omm +u f +Ġtr aining +Ġeffect s +Ġf ast +Ġ201 0 +Ġocc ur +Ġwebs ite +Ġem ail +Ġs ens +e ch +Ġo il +Ġinf lu +Ġcurrent ly +ĠS ch +ĠAd d +Ġgo al +Ġsc ient +Ġcon v +1 00 +em y +Ġdec ided +Ġtra vel +Ġm ention +L L +0 3 +Ġe lection +Ġph one +Ġlook s +Ġsit uation +Ġc y +Ġh or +b ed +ĠCour t +a ily +av es +Ġqu ality +ĠCom p +w ise +Ġt able +Ġst aff +ĠW ind +et t +Ġtri ed +ide red +Ġadd ition +Ġb ox +Ġl ack +ar ily +Ġw ide +Ġm id +Ġbo ard +ys is +Ġant i +h a +Ġd ig +en ing +Ġd ro +C on +6 8 +Ġsl ow +b ased +se qu +Ġp ath +E x +ak er +Ġwork ed +Ġp en +Ġeng ine +Ġlook ed +ĠSu per +ĠS erv +Ġvict im +U n +Ġproper ty +Ġint rodu +Ġexec ut +ĠP M +L e +Ġcol or +ĠM ore +Ġ6 0 +Ġnet work +Ġd ate +c ul +id ge +Ġext ra +3 1 +Ġs le +6 7 +Ġw ond +Ġreport s +j ust +ĠAust ral +Ġcap ital +Ġen s +Ġcomm and +Ġallow ed +Ġpre p +Ġca pt +h ib +Ġnum bers +ch an +Ġf air +m p +om s +Ġre ach +W ith +t ain +Ġbro ad +Ġcou ple +ec ause +ly ing +ĠF eb +Ġsc reen +Ġl ives +Ġpri or +ĠCong ress +A r +Ġappro ach +Ġe mer +ar ies +ĠD is +s erv +ĠN e +Ġbu ilt +c ies +Ġre pe +Ġrul es +for ce +ĠP al +Ġfin ancial +Ġcons idered +ĠCh ar +n ces +ĠI S +Ġb rought +Ġb i +i ers +ĠS im +O P +Ġproduct s +Ġvis it +Ġdoc ument +Ġcon duct +Ġcomplete ly +in ing +ĠCal if +ib ly +Ġwr itten +ĠT V +em ents +Ġd raw +O ne +Ġpub lished +Ġsec ret +r ain +he t +ĠF acebook +ond ay +ĠU p +Ġsex ual +Ġth ous +ĠP at +Ġ ess +Ġstand ard +Ġar m +g es +ect ion +Ġf ell +Ġfore ign +an i +ĠFr iday +Ġreg ular +in ary +Ġincre ased +Ġus ually +Ġdem on +Ġd ark +Ġadd itional +ro l +ĠO f +Ġprodu ction +! ! +und red +Ġintern ational +id ents +ĠF ree +rou p +Ġr ace +Ġm ach +Ġh uge +A ll +le ar +ove mber +Ġto wn +Ġatt ention +ĠO ff +y ond +ĠThe n +f ield +Ġter ror +ra z +ĠB o +Ġmeet ing +ĠP ark +Ġar rest +Ġf ear +Ġa w +ĠV al +or ing +' , +Ġext reme +ar r +Ġwork ers +A fter +Ġ3 1 +n et +am ent +Ġdirect ly +Ġpop ulation +ub e +ĠOct ober +ĠI N +ĠJan uary +5 9 +ĠDav id +Ġc ross +ce mber +ĠF irst +Ġmess age +ir it +Ġn ation +Ġp oll +is ions +Ġansw er +n y +is ode +Ġcar ry +ĠRuss ia +Ġhe ar +eng th +ro y +Ġn atural +in ally +Ġdo g +m itted +Ġtr ade +Ġsub st +Ġmult iple +ĠAf ric +Ġf ans +Ġs ort +Ġgl obal +ic ation +ĠW ed +ar a +Ġa chie +Ġlangu age +ve y +Ġt al +Ġnecess ary +Ġdet ails +Ġs en +ĠS und +ĠRe g +ĠR ec +0 6 +Ġs il +ress ive +Ġmed ical +un ch +orn ia +Ġu nd +f ort +oc ks +ĠM onday +ues day +c raft +7 7 +ur t +Ġ ver +ĠH ill +Ġrece ive +Ġmor ning +es tern +Ġb ank +Ġs at +ir th +ĠH igh +Ġdev ice +ĠTH E +ĠCent er +Ġsaf e +Ġp le +ĠCanad a +Ġsystem s +Ġass ist +Ġsur v +Ġb attle +ĠS oc +vert is +S he +Ġp aper +Ġgrow th +Ġc ast +S c +Ġpl ans +ll ed +Ġpart s +Ġw all +Ġmove ment +Ġpract ice +im ately +Ġdis play +Ġsomet imes +om p +ĠP aul +ĠY es +k ing +5 8 +o ly +Ġs on +Ġav oid +ok es +ĠJ ew +Ġto wards +as c +Ġ // +ĠK ore +Ġtalk ing +Ġcor rect +Ġsp ent +ic ks +i able +e ared +Ġter m +Ġwant s +om ing +Ġ ut +Ġdou b +Ġfor ces +Ġp lease +6 9 +ĠN ovember +at form +ond on +Ġon es +Ġimmedi ately +ĠRuss ian +ĠM et +Ġde g +Ġparent s +C H +ĠAmeric ans +al y +ĠM od +Ġsh own +Ġcond itions +Ġst uff +Ġre b +ĠY our +Ġinclud es +n own +ĠS am +Ġexper ien +m ission +ĠE ven +augh t +Ġannoun ced +ĠRepublic an +Ġdeter min +Ġdescrib ed +ĠCount y +( ) +Ġdo or +Ġchang ed +Ġne igh +ĠH ere +Ġcle an +Ġp an +ĠDe cember +ĠEurope an +ir ing +ap ter +Ġcl ub +ĠT uesday +Ġp aid +ĠN et +Ġattack s +Ġcharact ers +Ġal one +Ġdirect or +d om +Ġ3 5 +Ġl oad +Ġr out +ĠCalif ornia +Ġfin ally +Ġr ac +Ġcont r +Ġexact ly +res h +p ri +ĠIs lam +Ġn ature +Ġcare er +Ġlat est +Ġcon vers +ĠS l +p ose +ci ent +ĠIn c +iv ity +8 8 +ĠA tt +ĠM or +nes day +Ġwe ight +k en +Ġnot e +Ġteam s +Ġ \ +air s +ĠG reen +Ġh undred +on ent +Ġstre ng +Ġcons ist +ic ated +Ġreg ul +Ġl ic +ast ic +Ġt en +urs day +ellig ence +ous ly +ĠU K +B I +Ġcost s +Ġind epend +ĠA P +Ġnorm al +Ġh om +Ġob vious +Ġs we +Ġst ar +Ġread y +ac her +Ġimp lement +g est +Ġs ong +ĠG et +ĠL ab +Ġinterest ing +us ing +Ġg iving +ĠSund ay +Ġet c +Ġm iddle +Ġrem ember +r ight +os ition +ut ions +Ġm ax +4 6 +Ġyour self +Ġdem and +Ġtreat ment +Ġd anger +ĠC ons +Ġgu y +ĠBrit ish +Ġphys ical +Ġrel ated +Ġrem ain +Ġcould n +Ġref er +Ġc itiz +b ox +EN T +bo ard +Ġin n +I G +er o +ĠSt reet +osp ital +ren ch +cher s +Ġst ra +O L +ag er +ĠA N +Ġeas ily +I A +en ge +in y +Ġcl os +ock ed +Ġus es +ĠC oun +I m +u ild +? ? +m ore +Ġan g +Ġwr ite +ol ute +5 7 +Ġlead er +Ġread ing +< / +Ġaut om +est s +4 3 +Ġleg isl +ĠG old +Ġdesign ed +ĠS T +ĠLe g +a res +Ġbe aut +ĠT ex +Ġappear s +Ġstru gg +ĠR om +Ġ 00 +Ġcho ice +Ġparticular ly +ĠF rom +op er +ĠL ondon +ann ed +Ġallow s +ob ile +Ġdiffere nce +âĢ ¢ +ĠV iew +ĠWed nesday +Ġal though +Ġrel ative +Ġapplic ation +ate ver +Ġare n +Ġmy self +Ġim ag +Ġdis e +Ġsoc iety +Ġfre qu +ĠEng lish +Ġpo or +ĠD ay +Ġwrit ing +Ġse ven +Ġstart ing +Ġb ud +Ġpr int +ĠTr ans +uf act +ĠSt ud +n ew +Ġcr im +Ġg ives +Ġco ol +a e +i ance +ĠGener al +Ġthink ing +Ġsa ve +Ġlim ited +ĠPart y +Ġmean ing +p en +ow ers +ĠJ ack +E M +Ġn ice +ru pt +Ġg as +Ġe ight +Ġfe et +Ġeff ort +Ġ ign +ic it +B l +co in +Ġop in +Ġbr ain +Wh ile +he st +ĠTh ursday +Ġwould n +augh ter +Ġtou ch +le ments +Ġstud ies +Ġcent er +c ont +or ge +Ġcomput er +Ġinvestig ation +P l +or ks +Ġ200 8 +Ġincre asing +Ġst ore +Ġcom ments +Ġb al +m en +Ġdo ll +Ġl iber +Ġw ife +Ġlaw s +atur day +it ness +Ġmod ern +ĠS k +Ġadminist ration +Ġopportun ity +Ġs al +Ġpower ful +M y +Ġclaim s +ĠEar th +ord s +Ġt itle +Ġes c +n ame +N ot +om en +Ġbe yond +Ġc amer +Ġse ll +it ute +ear ch +Ġapp l +im ent +4 2 +ĠAr t +Ġun f +Ġviol ence +ur g +ĠE ast +Ġcomp ared +Ġopt ions +Ġthrough out +Ġv s +ig r +. [ +ac hes +7 8 +Ġfil es +F L +E L +ar ian +ĠJ ames +ĠA ir +an ch +Ġdet ail +Ġpie ce +P S +Ġn amed +Ġeduc ation +Ġdri ve +Ġitem s +Ġstud ent +ic ed +: : +ic o +Ġth row +Ġsc ene +Ġcomple x +Ġ200 9 +Ġpre c +ĠB re +7 9 +Ġcon cept +Ġstat us +am ing +Ġd ied +Ġknow ledge +Ġbegin ning +O D +ru ary +Ġcertain ly +Ġgu ys +Ġsl ight +in n +ound s +Ġf ine +Ġf at +ic ations +Ġper haps +ĠA nt +Ġinc ome +Ġhtt ps +Ġmajor ity +port s +st on +Ġgreat er +Ġfe ed +ent ially +Ġsaf ety +Ġun ique +and om +Ġg one +Ġshow ed +Ġhist or +Ġcoun ter +i us +id a +Ġlead ing +i pe +Ġs end +ĠDon ald +er ve +Ġdef ense +ines e +Ġy es +ĠF ire +ĠMus lim +ra q +Ġcontin ued +os h +Ġprov ides +Ġpr ison +ĠP re +Ġhapp y +Ġeconom y +Ġtr ust +ag s +ĠG ame +Ġweap ons +um an +ĠC le +it ation +Ġanal ysis +ĠT imes +Ġsc ience +- > +Ġfig ure +Ġdis app +ent y +Ġsoft ware +Ġu lt +Ġoffic ers +N ew +I s +Ġrem ains +ĠInd ia +Ġp sych +ri ef +Ġc at +es c +Ġob serv +Ġst age +ĠD ark +Ġent er +ch ange +Ġpass ed +Ġdes pite +ĠO ut +Ġmov ie +r s +Ġv oice +m ine +ĠPl ay +Ġto ward +ĠT er +Ġreg ion +Ġval ues +or ters +Ġm ount +Ġoffic er +ĠO ther +b an +Ġh ous +w ood +ro om +I V +ĠS un +se e +ĠO ver +ro g +9 0 +Ġl ay +ĠT ur +a wn +Ġpress ure +ĠS ub +Ġbook s +ed om +ĠS and +A A +ag o +Ġre asons +f ord +Ġactiv ity +U T +N ow +ĠSen ate +ce ll +n ight +Ġcall s +in ter +Ġlet ter +ĠR ob +ĠJ e +Ġcho ose +ĠL aw +G et +B e +Ġro b +Ġtyp es +Ġpl atform +Ġqu arter +R A +ĠT ime +Ġmay be +ĠC r +9 5 +p re +Ġmov ing +Ġl if +Ġgo ld +Ġs om +Ġpat ients +Ġtr uth +ĠK e +ur ance +ant ly +m ar +Ġchar ge +ĠG reat +Ġce le +---------------- ---------------- +Ġro ck +ro id +an cy +Ġcred it +a ud +B y +ĠE very +Ġmov ed +ing er +rib ution +Ġn ames +Ġstra ight +ĠHe alth +ĠW ell +Ġfe ature +Ġr ule +Ġsc he +in ated +ĠMich ael +ber g +4 1 +il ed +b and +Ġcl ick +ĠAng el +on ents +Â Ń +ĠI raq +ĠS aturday +Ġa ware +p art +Ġpat tern +O W +ĠL et +Ġgr ad +ign ed +Ġassoci ated +Ġst yle +n o +i ation +a ith +il ies +Ġst ories +ur ation +Ġindividual s +ĠâĢ ¦ +m iss +ĠAss oci +ish ing +ab y +Ġsum mer +ĠB en +Ġ3 2 +Ġar ch +ut y +ĠTex as +h ol +Ġfull y +Ġm ill +Ġfollow ed +ĠB ill +ĠInd ian +ĠSec ret +ĠB el +ĠFeb ruary +Ġjob s +Ġseem ed +ĠGo vern +i pped +Ġreal ity +Ġl ines +Ġp ark +Ġmeas ure +ĠO ur +I M +Ġbro ther +Ġgrow ing +Ġb an +Ġest im +Ġc ry +ĠS chool +Ġme chan +ĠO F +ĠWind ows +Ġr ates +ĠO h +Ġpos itive +Ġcult ure +ist ics +ic a +Ġh ar +y a +ite ly +i pp +Ġm ap +en cies +ĠWill iam +I I +ak ers +5 6 +ĠM art +ĠR em +Ġal tern +it ude +Ġco ach +row d +D on +Ġk ids +Ġj ournal +Ġcor por +Ġf alse +Ġwe b +Ġsle ep +Ġcont ain +Ġst o +Ġb ed +iver se +ĠR ich +ĠCh inese +Ġp un +Ġme ant +k nown +Ġnot ice +Ġfavor ite +a ven +Ġcond ition +Ġpur pose +) ) +Ġorgan ization +Ġchall eng +Ġman ufact +Ġsus p +ĠA c +Ġcrit ic +un es +uc lear +Ġm er +vent ion +Ġ8 0 +Ġm ist +ĠU s +ĠT or +htt p +ol f +Ġlarg er +Ġadv ant +Ġrese ar +Ġact ions +m l +Ġke pt +Ġa im +, ' +c ol +Ġbenef its +if ying +Ġact ual +ĠIntern ational +Ġveh icle +Ġch ief +Ġeff orts +ĠLe ague +ĠM ost +Ġwa it +Ġad ult +Ġover all +Ġspe ech +Ġhigh ly +Ġfem ale +Ġer ror +Ġeffect ive +5 4 +Ġenc our +w ell +Ġfail ed +Ġcons erv +Ġprogram s +Ġt rou +Ġa head +5 00 +vertis ement +I P +ĠF ound +p ir +Ġ % +Ġcr ime +and er +Ġloc ation +ĠI ran +Ġbehav ior +az ing +Ġr are +Ġem b +Ġca used +Ġsh ip +Ġact ive +Ġcont ribut +Ġg reen +Ġac qu +Ġref lect +ven ue +Ġf irm +Ġb irth +] . +Ġclear ly +Ġem ot +Ġag ency +ri age +Ġmem ory +9 8 +S A +ĠSe e +ac ing +C C +Ġbig gest +Ġr ap +Ġbas ic +Ġb and +e at +Ġsus pect +ĠM ac +Ġ9 0 +m ark +ist an +Ġsp read +am s +k i +as y +ra v +ĠR ober +Ġdemon str +r ated +Ġabs olute +Ġpl aces +Ġim pl +ibr ary +Ġc ards +Ġdest roy +Ġv irt +ve re +Ġapp eared +y an +p oint +Ġbe g +Ġtem per +s pe +ant ed +ear s +ĠD irect +Ġl ength +Ġbl og +am b +Ġint eg +Ġres ources +ac c +if ul +Ġsp ot +Ġfor ced +Ġthous ands +ĠMin ister +Ġqu al +ĠF rench +at ically +Ġgener ally +Ġdr ink +Ġth us +I L +od es +Ġappro pri +ĠRe ad +Ġwh om +Ġey e +Ġcol lege +Ġ4 5 +ire ction +Ġens ure +Ġapp arent +id ers +Ġrelig ious +Ġmin or +ol ic +Ġt ro +ĠWh y +rib ute +m et +Ġprim ary +Ġdevelop ed +Ġpe ace +Ġsk in +st e +av a +Ġbl ue +Ġfam ilies +Ġ ir +Ġapp ly +Ġin form +ĠSm ith +C T +i i +Ġlim it +Ġres ist +........ ........ +um n +Ġconf lic +Ġtw e +ud d +ĠT om +Ġl iter +qu e +b on +Ġha ir +Ġevent ually +Ġp us +Ġhelp ed +Ġag g +or ney +ĠApp le +Ġf it +ĠS ur +Ġpre m +Ġs ales +Ġsecond s +Ġstreng th +Ġfeel ing +¿ ½ +Ġt our +Ġknow s +o om +Ġex erc +Ġsom ew +ï ¿½ +> > +Ġsp okes +Ġide as +Ġreg ist +so ft +ĠD el +ĠP C +Ġpro pos +Ġlaun ch +Ġbott om +T H +ĠP lease +v est +it z +ĠIn ter +Ġsc ript +Ġr at +ar ning +Ġ il +ĠJ er +ĠA re +Ġwh atever +ok en +ci ence +Ġmod e +Ġag ree +Ġs ources +Ġinit ial +Ġrest rict +Ġwond er +us ion +## ## +ĠS il +vil le +Ġb urn +t w +as ion +Ġ £ +Ġn or +u ing +Ġre ached +Ġs un +Ġc ateg +ig ration +Ġc ook +Ġprom ot +Ġm ale +Ġcl imate +Ġf ix +Ġalleg ed +U R +all ed +Ġim ages +C ont +ot a +Ġschool s +i os +Ġd rop +Ġst ream +ĠM o +Ġprevious ly +al ing +Ġp et +Ġdou ble +Ġ( @ +ann el +Ġdef ault +t ies +Ġr ank +ĠD ec +ĠCoun cil +Ġweap on +Ġst ock +Ġanal y +ĠSt r +Ġpict ure +ĠPol ice +f erence +Ġcent ury +Ġcitiz ens +Ġon to +Ġexp and +Ġhe ro +ĠS ol +Ġw ild +Ġupd ate +Ġcustom ers +r ont +d ef +Ġl ik +Ġcrim inal +ĠChrist ian +S P +7 6 +Ġle aving +Ġother wise +ĠD ist +Ġbas is +5 2 +5 3 +ic ip +ĠB er +Ġrecomm end +Ġfl oor +Ġc rowd +ol es +Ġ7 0 +Ġcent ral +ĠE v +Ġd ream +Ġdown load +Ġconf ir +ĠTh om +Ġwind ow +Ġhapp ens +Ġun it +Ġt end +Ġs pl +Ġbec omes +Ġfight ing +Ġpred ict +ĠP ress +ĠP ower +Ġhe avy +ak ed +Ġf an +or ter +ate gy +B A +iz es +Ġsp end +H ere +Ġ200 7 +Ġad op +ĠH am +Ġfoot ball +ĠP ort +od ay +5 1 +amp ions +Ġtrans fer +h t +Ġ3 8 +ter m +ac ity +Ġb ur +] , +tern al +r ig +b ut +Ġthere fore +ĠB ecause +res p +re y +Ġm ission +S ome +Ġnot ed +Ġass um +Ġdise ase +Ġed it +Ġprog ress +r d +ĠB rown +oc al +Ġadd ing +Ġra ised +ĠAn y +Ġt ick +Ġsee ing +ĠPe ople +Ġagre ement +Ġser ver +Ġw at +Ġdeb ate +Ġsupp osed +il ing +Ġlarg est +Ġsuccess ful +ĠP ri +ĠDemocr atic +Ġj ump +ĠSyri a +Ġown ers +Ġoff ers +Ġshoot ing +Ġeff ic +se y +Ġha ven +ver se +te red +ĠL ight +im al +ĠB ig +Ġdef end +Ġbe at +Ġrecord s +% ) +Ġsc en +Ġemploy ees +Ġdev ices +he m +Ġcom mer +ĠM ex +Ġbenef it +ĠPro f +Ġil leg +Ġsur face +ĠAl so +Ġh arm +ing ly +w ide +ĠA lex +Ġsh ut +ĠC ur +Ġl ose +p m +Ġchall enge +se mb +Ġst ation +Ġint elligence +Ġacc ur +ĠFl or +Ġrequ ires +ĠM al +b um +Ġh ospital +Ġsp irit +Ġoff ered +Ġprodu ce +ĠComm un +Ġcreat ing +Ġcr is +s pect +Ġend ed +Ġd aily +Ġvot ers +land s +i as +i h +on a +Ġsm art +ĠOff ice +ĠL ord +ri al +ĠIntern et +Ġcirc um +Ġextreme ly +' . +Ġopin ion +ĠM il +Ġg ain +B S +ĠF in +y p +Ġuse ful +Ġbud get +Ġcom fort +is f +Ġback ground +el ine +Ġep isode +Ġen emy +Ġtri al +Ġestab lish +d ate +ĠC ap +Ġcontin ues +Ġshow ing +ĠUn ion +w ith +Ġpost ed +ĠSy stem +Ġe at +ri an +Ġr ise +ĠGerman y +il s +Ġsign ed +Ġv ill +Ġgr and +m or +ĠEng land +Ġproject s +um ber +Ġconf erence +z a +Ġrespons ible +ĠAr ab +Ġlearn ed +âĢĶ âĢĶ +i pping +ĠGe orge +O C +Ġreturn ed +ĠAustral ia +Ġb rief +Q u +Ġbr and +ill ing +ab led +Ġhig hest +Ġtr ain +ĠComm ission +wh ile +Ġn om +cept ion +Ġm ut +ĠBl ue +Ġinc ident +v ant +8 6 +ĠI D +Ġn uclear +7 4 +ĠL ike +ĠR E +ĠM icro +l i +m ail +Ġcharg es +8 9 +Ġad just +ad o +Ġear th +N A +Ġpr ices +P A +Ġd raft +Ġrun s +Ġcandid ate +ens es +Ġmanag ement +ĠPh il +ĠM iss +Ġte ach +g ram +Ġunderstand ing +a it +ic ago +A dd +ĠE p +sec ut +Ġsepar ate +Ġinst ance +Ġe th +Ġun less +**** **** +ĠF ore +in ate +Ġoper ations +S p +Ġf aith +g ar +ĠCh urch +ron ic +Ġconf ig +os ure +Ġactiv ities +Ġtrad itional +Ġ3 6 +Ġd irection +Ġmach ine +Ġsur round +Ġp ush +un ction +ĠE U +Ġeas ier +Ġarg ument +G B +Ġm icro +Ġsp ending +iz ations +Ġthe ory +ad ow +Ġcall ing +ĠL ast +Ġd er +Ġinflu ence +Ġcomm it +Ġph oto +Ġun c +ist ry +g n +ast e +ack s +Ġdis p +ad y +d o +ĠG ood +Ġ ` +Ġw ish +Ġreve aled +Âł Âł +l ig +Ġen force +ĠComm ittee +Ġche m +Ġmil es +Ġinterest ed +Ġsol ution +ic y +in ct +Ġ- > +ĠD et +Ġrem oved +Ġcomp ar +e ah +Ġpl ant +ĠS ince +Ġachie ve +Ġadvant age +Ġslight ly +b ing +Ġpl aced +u nder +201 5 +ĠM ad +Ġt im +os es +Ġc ru +ĠR ock +Ġmost ly +Ġneg ative +Ġset ting +Ġprodu ced +Ġm ur +Ġconnect ion +ĠM er +Ġdri ver +Ġexecut ive +Ġass ault +Ġb orn +ĠV er +t ained +Ġstruct ure +Ġredu ce +Ġdec ades +Ġd ed +u ke +ĠM any +idd en +Ġle ague +S e +Ġjo in +Ġdis co +Ġd ie +c ks +act ions +Ġass ess +ag n +Ġgo als +our s +I R +Ġsen ior +ill er +m od +ip ment +oc ol +u y +ĠQ ue +Ġpart ies +ir gin +Ġle arning +it able +Ġstre et +Ġcamer a +A pp +Ġsk ills +b re +c ious +Ġcele br +ĠFr anc +Ġexist ing +Ġwill ing +l or +Ġ id +ĠSp ace +Ġcrit ical +ĠL a +ortun ately +Ġser ve +Ġc old +Ġspec ies +T S +Ġanim als +ĠB ay +Ġold er +ĠU nder +est ic +ĠT re +Ġte acher +Ġpre fer +v is +Ġth read +ĠM att +Ġmanag er +ãĥ » +Ġprofess ional +ĠV ol +Ġnot es +The se +ul a +Ġf resh +ent ed +u zz +ed y +clus ion +ĠR el +Ġdoub t +E O +Ġopen ed +ĠB it +Ad vertisement +Ġgu ess +ĠU N +Ġse qu +Ġexpl ain +ott en +Ġatt ract +ak s +Ġstr ing +Ġcont ext +oss ible +ĠRepublic ans +Ġsol id +Ġc ities +Ġask ing +Ġr andom +u ps +ur ies +ar ant +dd en +g l +ĠFlor ida +Ġdep end +ĠSc ott +Ġ3 3 +Ġi T +ic on +Ġmention ed +Ġ2 000 +Ġclaim ed +Ġdefin itely +ul f +Ġc ore +Ġopen ing +ĠCon st +wh ich +ĠT ra +A G +7 2 +Ġbelie ved +ad a +Ġ4 8 +ĠSec urity +yr ight +ĠP et +ĠL ou +Ġhold ing +======== ======== +Ġ ice +Ġb row +Ġauthor ities +h ost +w ord +Ġsc ore +ĠD iv +Ġcell s +Ġtrans l +Ġneigh bor +Ġrem ove +u ct +Ġdist rict +ĠA ccording +Ġwor se +Ġconcern s +Ġpresident ial +Ġpolic ies +ĠH all +7 3 +Ġh us +A Y +Ġ200 6 +ĠJ ud +Ġindepend ent +ĠJust ice +ili ar +pr int +igh ter +Ġprotect ion +z en +Ġsu dden +h ouse +ĠJ es +P R +ĠIn f +Ġb ul +Ġ _ +ĠServ ice +ĠP R +Ġstr ategy +ff ect +Ġgirl s +Ġmiss ing +oy al +ĠTe am +ul ated +Ġd at +Ġpolit ics +ab or +A ccording +Ġspe ll +Ġg raph +ort hern +T C +A b +Ġlab or +is her +Ġk ick +ĠiT unes +Ġstep s +pos es +Ġsmall er +E n +ber t +Ġro ll +Ġresear chers +Ġcl osed +Ġtrans port +Ġlaw y +________ ________ +ĠCh icago +Ġas pect +Ġn one +Ġmar riage +9 6 +Ġe lements +ĠF re +ĠS al +Ġd ram +F C +t op +e qu +Ġhe aring +Ġsupport ed +Ġtest ing +co hol +Ġmass ive +Ġst ick +Ġgu ard +is co +ph one +F rom +How ever +Ġb order +Ġcop y +ograph y +l ist +7 1 +Ġown er +cl ass +ru it +r ate +ĠO nce +Ġdig ital +Ġt ask +ER S +Ġinc red +t es ++ + +ĠFr ance +Ġb reat +ow l +Ġiss ued +ĠW estern +Ġdet ect +Ġpart ners +Ġsh ared +ĠC all +Ġcan cer +ac he +rib e +Ġexpl ained +Ġhe at +{ " +Ġinvest ment +ĠB ook +Ġw ood +Ġtool s +ĠAl though +Ġbelie f +Ġcris is +Ġg e +ĠM P +Ġoper ation +ty pe +~ ~ +g a +Ġcont ains +ant a +Ġexp ress +ĠG roup +ĠJ ournal +k a +Ġam b +ĠUS A +Ġfind ing +Ġfund ing +h ow +Ġestab lished +ide os +Ġdeg ree +Ġdanger ous +ang ing +Ġfre edom +pp ort +out hern +Ġch urch +Ġc atch +ĠTw o +Ġpres ence +ĠGu ard +U p +Ġauthor ity +ĠPro ject +Ġbut ton +Ġcon sequ +Ġval id +Ġwe ak +Ġstart s +Ġref erence +ĠM em +" ) +U N +or age +ĠO pen +Ġcol lection +y m +g ency +Ġbeaut iful +ro s +Ġtell s +Ġwa iting +n el +Ġprov iding +ĠDemocr ats +Ġd aughter +Ġm aster +Ġpur poses +ĠJapan ese +Ġequ al +Ġturn s +Ġdoc uments +Ġwatch ing +R es +Ġr an +201 4 +Ġre ject +ĠKore a +Ġvictim s +Le vel +ere nces +Ġw itness +Ġ3 4 +Ġre form +com ing +Ġocc up +Ġc aught +Ġtra ffic +ad ing +Ġmod els +ar io +Ġserv ed +Ġb atter +u ate +ĠSecret ary +Ġagre ed +Ġtr uly +yn am +ĠR et +Ġun its +ĠRes earch +h and +az ine +ĠM ike +Ġvar iety +ot al +Ġam azing +Ġconfir med +Ġentire ly +Ġpurch ase +Ġe lement +Ġc ash +Ġdeter mine +D e +Ġc ars +ĠW all +â ĸ +Ġview s +Ġdrug s +Ġdep artment +ĠSt ep +u it +Ġ3 9 +as ure +ĠCl ass +Ġc overed +ĠB ank +Ġme re +u ana +Ġmult i +Ġm ix +Ġun like +lev ision +Ġsto pped +Ġs em +ĠG al +ul es +Ġwe l +ĠJohn son +l a +Ġsk ill +Ġbec oming +ri e +Ġappropri ate +f e +ell ow +ĠPro t +ul ate +oc ation +Ġweek end +od ies +Ġsit es +Ġanim al +ĠT im +Ġsc ale +Ġcharg ed +Ġinst ruct +ill a +Ġmethod s +Ġc ert +Ġjud ge +ĠH el +Ġdoll ars +Ġstand ing +ĠS qu +Ġdeb t +l iam +Ġdri ving +ĠS um +ĠEd ition +Ġal bum +and on +I F +ĠU k +6 3 +ad er +Ġcommer cial +es h +ĠGovern ment +Ġdisc overed +Ġout put +ĠHill ary +ĠCar ol +Ġ200 5 +Ġab use +anc ing +Ġsw itch +Ġann ual +T w +Ġst ated +ag ement +in ner +Ġdem ocr +Ġres idents +Ġallow ing +Ġfact ors +od d +Ġf uck +em ies +Ġoccur red +ot i +Ġn orth +ĠP ublic +Ġinj ury +Ġins urance +C L +oll y +ã Ģ +Ġrepe ated +Ġar ms +ang ed +Ġconst ruction +Ġf le +P U +ic ians +Ġfor ms +ĠMc C +ant ic +Ġm ental +p ire +Ġequ ipment +Ġf ant +Ġdiscuss ion +Ġregard ing +k in +ar p +Ġch air +og ue +Ġpro ceed +ĠI d +O ur +Ġmur der +M an +Ġ4 9 +as p +Ġsupp ly +Ġin put +Ġwe alth +liam ent +Ġpro ced +or ial +ĠSt at +ĠN FL +hen s +ĠInst itute +Ġput ting +ourn ament +et ic +Ġloc ated +Ġk id +er ia +r un +Ġpr inc +Ġ ! +go ing +ĠB et +Ġcl ot +Ġtell ing +Ġprop osed +i ot +or ry +Ġfund s +g ment +ĠL ife +Ġb aby +ĠB ack +Ġsp oke +Im age +Ġear n +ĠA T +g u +Ġex change +ĠL in +ov ing +Ġp air +M ore +az on +Ġarrest ed +Ġkill ing +c an +ĠC ard +y d +Ġident ified +Ġm obile +Ġthan ks +ony m +ĠF orm +Ġhundred s +ĠCh ris +ĠC at +Ġtre nd +h at +ĠA v +om an +Ġelect ric +ĠW il +S E +O f +Ġrest aur +ot ed +Ġtr ig +Ġn ine +Ġb omb +Wh y + ¯ +Ġco verage +Ġapp eal +ĠRober t +ĠS up +Ġfin ished +Ġfl ow +Ġdel iver +Ġcal cul +Ġphot os +Ġph il +Ġpie ces +Ġapp re +k es +Ġr ough +D o +Ġpart ner +Ġconcern ed +Ġ3 7 +ĠG en +C ol +ct ors +Ġ= > +st ate +Ġsuggest ed +ĠFor ce +C E +Ġher self +ĠPl an +w orks +o oth +ren cy +Ġcor ner +Ġhus band +Ġintern et +ĠA ut +em s +os en +ĠAt l +g en +Ġbal ance +6 2 +Ġsound s +te xt +Ġar r +ov es +Ġmill ions +Ġrad io +Ġsat isf +ĠD am +M r +G o +S pe +Ġcomb at +r ant +ĠG ree +Ġf uel +Ġdist ance +Ġtest s +Ġdec re +ĠE r +Ġman aged +D S +Ġt it +Ġmeas ures +ĠL iber +Ġatt end +as hed +ĠJ ose +ĠN ight +d it +ĠN ov +ĠE nd +out s +Ġgener ation +Ġadv oc +y th +Ġconvers ation +ĠS ky +act ive +ce l +ri er +ĠFr ank +Ġg ender +Ġcon cent +Ġcar ried +and a +ĠV irgin +Ġarri ved +ic ide +ad ed +Ġfail ure +Ġmin imum +le ts +Ġwor st +Ġkeep ing +Ġint ended +Ġilleg al +Ġsub sc +Ġdetermin ed +Ġtri p +Y es +Ġra ise +Ġ ~ +Ġfeel s +Ġpack age +ĠJ o +h i +201 6 +re al +Ġf ra +Ġsy mb +M e +uck y +p ret +ĠK h +ĠEd it +ĠWe b +em ic +ĠCol or +Ġjust ice +I nt +Ġfar m +ck now +" > +el ess +Ġredu ced +Ġ5 00 +x x +ĠR ad +ĠW ood +Ġcl in +Ġhy p +il er +ur a +k ins +8 5 +6 1 +ĠThe ir +ĠM ary +Ġs an +Ġno vel +ĠWh o +Ġcap acity +Ġimp ossible +Ġpl ays +Ġmin ister +ij uana +ic ate +ĠS et +Ġf ram +Ġ ing +Ġcommun ities +ĠF BI +it a +Ġb on +Ġstr ateg +Ġinterest s +l ock +g ers +m as +ĠAN D +Ġconflic t +Ġrequire ments +Ġs ac +Ġoper ating +in i +rel ated +Ġcomm itted +Ġrelative ly +Ġs outh +¯ ¯ +Ġaff ord +Ġident ity +Ġdec isions +Ġacc used +pl ace +Ġvict ory +o ch +i at +N ame +C om +t ion +ed s +Ġsee k +Ġt ight +ĠIm ages +Ġinit i +Ġhum ans +Ġfam iliar +Ġaud ience +Ġintern al +vent ure +Ġs ides +ĠT O +Ġd im +Ġcon clud +Ġapp oint +Ġenforce ment +ĠJ im +ĠAssoci ation +Ġcircum st +ĠCanad ian +Ġjo ined +Ġdiffere nces +ĠL os +Ġprot est +Ġtw ice +w in +Ġgl ass +ars h +ĠAr my +Ġexp ression +Ġdec ide +Ġplan ning +an ia +Ġhand le +ĠMicro soft +ĠN or +Ġmax imum +ĠRe v +Ġse a +Ġev al +Ġhel ps +re f +Ġb ound +Ġm outh +Ġstand ards +Ġcl im +ĠC amp +ĠF ox +cl es +Ġar my +ĠTe chn +ack ing +x y +S S +Ġ4 2 +Ġbu g +ĠUk rain +ĠM ax +ĠJ ones +ĠSh ow +l o +Ġplan et +Ġ7 5 +Ġwin ning +Ġf aster +Ġspe ct +Ġbro ken +T R +Ġdef ined +Ġhealth y +Ġcompet ition +htt ps +ĠIs land +ĠF e +Ġannoun ce +ĠC up +ĠInst ead +Ġcl ient +Ġposs ibly +se ction +ock et +l ook +Ġfin ish +Ġcre w +Ġres erv +Ġed itor +Ġh ate +Ġs ale +Ġcontro vers +Ġp ages +w ing +Ġnum er +Ġopp osition +Ġ200 4 +Ġref uge +Ġfl ight +Ġap art +ĠL at +A meric +ĠAfric a +Ġapplic ations +ĠPal est +ĠB ur +Ġg ar +ĠSoc ial +Ġup gr +Ġsh ape +Ġspe aking +ans ion +a o +ĠS n +Ġwor ry +ĠBrit ain +P lease +rou d +Ġh un +Ġintrodu ced +Ġd iet +I nd +ĠSec ond +Ġfun ctions +ut s +ĠE ach +ĠJe ff +Ġst ress +Ġaccount s +Ġgu arant +ĠAn n +ed ia +Ġhon est +Ġt ree +ĠAfric an +ĠB ush +} , +Ġs ch +ĠOn ly +Ġf if +ig an +Ġexerc ise +ĠEx p +Ġscient ists +Ġlegisl ation +ĠW ork +ĠS pr +à Ĥ +ĠH uman +Ġ è +Ġsur vey +Ġr ich +ri p +Ġmain tain +Ġfl o +Ġleaders hip +st ream +ĠIslam ic +Ġ 01 +ĠCol lege +Ġmag ic +ĠPr ime +Ġfig ures +201 7 +ind er +x ual +ĠDe ad +Ġabsolute ly +Ġfour th +Ġpresent ed +resp ond +rib le +Ġal cohol +at o +ĠD E +por ary +Ġgr ab +Ġvar i +Ġqu ant +ĠPh oto +Ġpl us +r ick +ar ks +Ġaltern ative +Ġp il +Ġappro x +th at +Ġobject s +ĠR o +ĠAnd roid +Ġsignificant ly +ĠR oad +k ay +R ead +av or +Ġa cknow +ĠH D +ĠS ing +O r +ĠM ont +Ġun s +pro f +Ġneg oti +ĠAr ch +ik i +Ġte levision +ĠJew ish +Ġcomm ittee +Ġmot or +Ġappear ance +Ġs itting +Ġstri ke +ĠD own +com p +ĠH ist +Ġf old +ac ement +ĠLou is +Ġbel ong +ĠâĢ ¢ +Ġm ort +Ġprep ared +Ġ6 4 +ĠM aster +Ġind eed +ĠD en +Ġre nt +T A +our ney +ar c +S u +9 7 +Ġadv ice +Ġchang ing +Ġlist ed +Ġlaun ched +is ation +ĠP eter +is hes +Ġl ived +ĠM el +ĠSup reme +ĠF ederal +Ġ) ; +ruct ure +Ġset s +Ġphil os +u ous +Ġ ł +Ġappl ied +ĠN OT +Ġhous ing +ĠM ount +Ġo dd +Ġsu st +D A +ffic ient +Ġ ? +ol ved +Ġp owers +Ġth r +Ġrem aining +ĠW ater +L C +Ġca uses +ãģ ® +Ġman ner +ad s +Ġsuggest s +Ġend s +stand ing +f ig +ĠD un +id th +Ġg ay +Ġter min +ĠAngel es +M S +Ġscient ific +Ġco al +ap ers +b ar +ĠThom as +Ġsy m +ĠR un +th is +P C +igr ants +Ġmin ute +ĠDist rict +cell ent +Ġle aves +Ġcomple ted +am in +Ġfoc used +Ġmon itor +Ġveh icles +M A +ĠM ass +ĠGr and +Ġaffect ed +itution al +Ġconst ruct +Ġfollow s +Ġt on +re ens +Ġh omes +ĠE xt +ĠLe vel +r ast +ĠI r +Ġel im +Ġlarge ly +ĠJ oe +Ġvot es +all s +Ġbusiness es +ĠFound ation +ĠCent ral +Ġy ards +Ġmaterial s +ul ner +Ġgu ide +Ġclos er +um s +Ġsp orts +ed er +J ust +Ġtax es +8 4 +ĠO ld +Ġdec ade +ol a +Ġv ir +Ġdro pped +Ġdel ay +it ect +Ġsec ure +ste in +le vel +Ġtre ated +Ġfil ed +ain e +Ġv an +Ġm ir +Ġcol umn +ict ed +e per +Ġro t +Ġcons ult +Ġent ry +Ġmar ijuana +ĠD ou +Ġapparent ly +ok ing +clus ive +Ġincre ases +an o +Ġspecific ally +Ġte le +ens ions +Ġrelig ion +ab ilities +Ġfr ame +ĠN ote +ĠLe e +Ġhelp ing +Ġed ge +ost on +Ġorgan izations +à ĥ +ĠB oth +hip s +Ġbig ger +Ġbo ost +ĠSt and +Ġro w +ul s +ab ase +Ġr id +L et +are n +ra ve +Ġst ret +P D +Ġv ision +Ġwe aring +Ġappre ci +Ġa ward +ĠU se +Ġfact or +w ar +ul ations +) ( +Ġg od +Ġter rit +Ġpar am +ast s +8 7 +Ġen emies +ĠG ames +F F +Ġacc ident +W ell +ĠMart in +T ER +Ġat h +ĠHe ll +Ġfor g +Ġve ter +ĠMed ic +f ree +Ġst ars +Ġexp ensive +Ġac ad +ra wn +ĠW he +Ġl ock +Ġform at +Ġsold iers +s m +Ġag ent +Ġrespons ibility +or a +ĠS cience +Ġrap id +Ġt ough +ĠJes us +Ġbelie ves +M L +Ġwe ar +le te +Ãĥ ÃĤ +ĠD ri +Ġcomm ission +ĠB ob +O h +ap ed +Ġwar m +ÃĥÃĤ ÃĥÃĤ +Ġ200 3 +ort ion +Ġhas n +ust er +Ġun ivers +ĠI ll +Ġk ing +olog ies +9 4 +ĠT em +ĠM os +Ġpat ient +ĠMex ico +ce an +ĠDe ath +ĠSand ers +y ou +ĠC ast +ĠComp any +pt y +Ġhappen ing +F P +ĠB attle +Ġb ought +A m +M od +U s +ut ers +ĠC re +ĠTh ose +Ġ4 4 +is er +Ġs oul +ĠT op +ĠHar ry +ĠA w +Ġse at +ff ee +Ġrev olution +Ġ( " +ĠD uring +et te +Ġr ing +Ġoff ensive +Ġreturn s +Ġv ideos +Ġdis cl +Ġfam ous +en ced +ĠS ign +ĠR iver +Ġ3 00 +P M +ĠB us +ĠC H +Ġcandid ates +ard en +Ġpercent age +Ġvis ual +Ġthan k +Ġtrou ble +ner gy +Ġ200 1 +Ġpro ve +ash ion +Ġen h +ĠL ong +U M +Ġconnect ed +Ġposs ibility +O ver +Ġexper t +Ġl ibrary +art s +ĠDirect or +Ġfell ow +9 2 +ir ty +Ġd ry +Ġsign s +ĠL ove +Ġqu iet +f oot +Ġp ure +ĠH un +Ġf illed +ph as +ĠE lect +end ment +ĠEx pl +Ġun able +n s +m o +Ġv ast +ob e +Ġident ify +app ing +ĠCarol ina +g ress +Ġpro te +Ġf ish +Ġcircumst ances +raz y +ĠPh ot +Ġb odies +ĠM ur +Ġdevelop ing +ĠA R +Ġexperien ced +Ġsubst ant +ĠBo ard +es ome +Ġdom estic +Ġcomb ined +ĠP ut +Ġchem ical +ĠCh ild +Ġpo ol +ĠC y +Ġe gg +c ons +st ers +Ġh urt +Ġmark ets +Ġconserv ative +Ġsupp orters +Ġag encies +id el +O b +ur b +Ġ4 3 +ĠDef ense +y e +ĠA p +du le +Ġtemper ature +Ġconduct ed +ĠCh ief +Ġpull ed +Ġf ol +L ast +ont o +os is +V ER +D es +ĠP an +F irst +Ġadv ance +Ġlic ense +r ors +ĠJ on +Ġimag ine +Ġhe ll +Ġf ixed +Ġinc or +os ite +ĠL og +ick en +] : +Ġsurpr ise +h ab +Ġc raft +ol t +ĠJ ul +Ġd ial +Ġrele vant +Ġent ered +Ġlead s +ĠA D +ĠCle an +Ġpict ures +ess or +Ġal t +Ġpay ing +P er +ĠMark et +Ġupd ates +am ily +ĠT ype +ĠH ome +Ġ5 5 +semb ly +rom e +8 3 +Ġgreat est +Ġhe ight +Ġhe av +ain ts +Ġlist en +as er +ĠS H +Ġcap able +ac le +Ġpers pect +in ating +Ġoff ering +ry pt +ĠDe velop +ab in +r c +Ġbr ight +al ty +ar row +Ġsupp l +ind ing +ack ed +gy pt +ĠAn other +p g +ĠVirgin ia +ĠL u +Ġpl anned +Ġp it +Ġswe et +T ype +ĠD i +Ġtyp ically +ĠFranc isco +Ġpro spect +ĠD an +Ġte en +re es +Ġsc hed +Ġh ol +Ġsc r +Ġlot s +l ife +Ġnews p +Ġfor get +ĠN one +ĠM iddle +ĠR yan +ed d +Ġse vere +Ġsu it +ll er +9 3 +Ġcor respond +Ġexpl os +u ations +Ġfl ag +g ame +r id +Ġpr in +ĠD ata +Ġde ploy +ĠEn ter +su it +gh an +ĠM en +Ġthough ts +Ġmat ters +Ġad apt +ĠA ri +Ġf ill +Ġfor th +Ġs am +Ġ4 1 +Ġpay ment +ĠH or +Ġsp ring +du c +Ġl osing +Ġbring ing +F O +al a +Ġdist ribution +he red +b our +ĠIsrael i +om a +Ġcomb ination +Ġpl enty +V E +C an +ĠH aw +Ġper man +ĠSpe cial +Ġto w +Ġsee king +Ġexam ples +Ġclass es +c r +Ġbe er +Ġmov es +ĠI P +ĠK n +Ġpan el +E ven +Ġproper ly +Ġr is +Ġpl ug +Ġestim ated +E very +Ġdef ensive +ag raph +Ġpre gn +Ġinst it +ĠV ict +Ġvol ume +Ġpos itions +Ġl inks +ĠPro gram +ĠWe ek +ag ues +Ġtrans form +k er +ĠC EO +Ġc as +Ġopp onent +Ġtwe et +ĠC ode +Ġsh op +Ġf ly +Ġtal ks +Ġb ag +Ph one +Ġa id +Ġpl ants +Ġ6 5 +Ġatt orney +ar ters +qu est +ĠMag ic +Ġbeg ins +Ġmy ster +Ġenvironment al +Ġst orage +N N +Ġm arg +Ġs ke +Ġmet al +ell y +Ġord ered +Ġrem ained +Ġl oved +Ġprom pt +Ġupd ated +Ġexper ts +Ġwalk ing +Ġan cient +Ġperform ed +AT E +Ġne ither +i ency +Ġmanufact ure +ĠP ak +Ġselect ed +Ġm ine +Ġult imately +Ġexpl an +Ġlab el +ĠServ ices +ribut ed +Tr ump +Ġsy n +ĠU lt +S C +Ġme at +Ġg iant +ĠW ars +ĠO N +Ġad m +Ġinter pret +Ġeven ing +Ġev il +ĠB oston +ĠW ild +Ġ à +ĠBit coin +ĠAm azon +D r +ĠIn formation +Ġobvious ly +Ġadv anced +Ph oto +ol ar +Ġwe ather +Ġsymb ol +Ġso le +Ġpot entially +ost er +Ġorig inally +m un +3 00 +az e +ess ions +Ġde ck +Ġst ood +Ġyou th +ĠB ern +R ep +ĠT est +Ġbas ically +ot ic +Ġinvol ve +ol it +ly n +S ee +Ġair craft +Ġconf irm +E W +Ġmess ages +ĠRich ard +Ġk it +Ġpro hib +Ġv ulner +is ters +Ġexist ence +Ġturn ing +ĠS P +Ġdes ire +Ġfl at +Ġm ent +se ason +ang es +Ġneighbor hood +ĠL ake +AT ION +Ġpoint ed +b ur +Ġinn ov +uc ks +U L +Ġprofess or +Ġexp ressed +A B +ic ious +Ġ200 2 +ĠDe v +Ġs ession +Ġb are +s en +Ġdis s +ĠC ath +ĠP ass +ĠP oint +Ġdo ctor +or row +ail ed +ĠR ub +ĠD C +ĠChar l +p erson +Ġwrit er +igh ters +ure au +Ġob lig +Ġrecord ed +Ġbro ke +Ġord ers +il ty +Ġmot ion +in ity +l aw +ad ium +Ġimm igration +Ġcontr ast +Ġb att +Ġex cellent +Ġtechn ical +am i +Ġt un +Ġcl oud +ĠY ear +ge on +Ġcre ation +Ġstr ange +Ġa uth +Ġfor t +b orn +Ġext ent +ĠT oday +ĠCl ub +Ġr ain +Ġs ample +Ġaccept ed +Ġt act +Ġf ired +ĠS on +Ġstand s +Ġb oot +Ġ4 7 +Ġstat ements +Ġvers ions +Ġse lling +ound ed +Ġ199 0 +Ġwere n +ĠW atch +Ġexper iment +P ost +Ġret ail +ul ed +In st +un te +ãĥ ¼ +Ġdep art +Ġb ond +i very +om pl +Ġre action +ĠSyri an +ĠP ac +app ed +ani el +D P +Ġres olution +Ġre act +Ġappro ved +on om +m ond +ĠO ffic +-- - +Ġrepl ace +Ġt ack +Ġsp ort +Ġch ain +Ġemer gency +r ad +ĠPalest in +Ġ4 6 +Ġautom atically +Ġrout e +Ġp al +Ġb anks +ĠPar is +ĠMed ia +ro ad +ic ing +i xt +ist ed +Ġg rew +Ġco ord +ĠW here +om in +Ġsub s +� � +Ġ ± +Ġcorpor ate +Ġse lection +n oon +ĠRep ort +c s +clud ing +ord ers +anc he +ĠIt s +Ġslow ly +ĠE gypt +ĠA cc +Ġcol le +iqu es +E X +Ġattempt s +ur l +ĠC ross +Ġfind ings +ĠS C +ĠO R +Ġind ex +ens ity +ĠW ay +ĠL and +Ġsh ock +d is +Ġd ynam +Ġc art +m osp +S ince +i est +ĠB oy +Ġst orm +ĠCont in +201 3 +he w +il it +Ġess ential +iqu id +O ther +ive red +Ġreason able +A ct +Ġsub sequ +ĠP ack +ĠF ort +Ġconsider ing +Ġun iversity +l og +Ġmar ried +Ġill ust +ĠTr ue +£ ı +Ġnumer ous +rast ructure +Ġserious ly +Ġrefer red +u a +Ġconsist ent +on na +ĠRe al +ru ption +ci ples +Ġfact s +9 1 +ot es +er g +The n +Ġacc ompl +N ote +Ġre venue +Ġpass ing +Ġm al +e en +ĠY et +Ġg ather +ter day +ew ork +ĠA uthor +P e +Ġopt im +Ġr ub +Ġè £ı +Ġun known +st one +Ġun ion +ol ve +Ġopportun ities +Ġbrow ser +ĠW al +ĠC ost +Ġreport ing +st s +p et +Ġs and +Ġsudden ly +Ġsurpr ising +ĠV R +Ġsomew hat +ĠB as +ult ure +iz z +ĠC D +Ġchalleng es +Ġsett ings +Ġexperien ces +ĠF ull +Ġcan n +Ġrece iving +ES T +Ġj oint +Ġcult ural +Ġa st +8 2 +as tern +ce ived +ĠC ru +Ġb ull +p ired +am m +Ġfac ing +p ower +Ġb oss +ĠH ol +Ġinst r +Ġincreasing ly +Ġsh ift +Ġstre ets +ĠWilliam s +ab b +Ġl ie +Ġl augh +ĠC a +P L +Ġadult s +Ġcustom er +Ġob tained +Ġsupport ing +ht ml +f ire +Ġdetail ed +Ġpick ed +ĠR ight +ld er +E E +st ood +ĠK im +Ġw ire +Ġs ight +Ġdevelop ers +Ġpers ons +Ġs ad +Ġc up +Ġwar ning +Ġboy s +l ong +Ġb ird +f o +Ġw al +Ġobserv ed +Ġz one +iven ess +Ġch annel +c ript +Ġref used +ĠAg ain +Ġsu c +Ġspokes man +ĠRe f +r ite +ou ston +ãĥ ³ +ĠS her +Ġact s +ĠN ame +Ġstrugg le +ar ry +omet imes +Ġdisc rim +H T +Ġcateg ory +Ġreal ize +Ġemploy ee +ĠAf ghan +en ger +Ġgun s +ĠSte ve +ĠM ot +ĠO l +ok ed +Ġth ick +Ġfair ly +ill y +Ġsur ve +ĠM at +we ight +â Ķ +Ġtro ops +Ġag ents +Ġbatter y +Ġmot iv +à ¡ +S ec +d en +o very +L S +Ġfl u +Ġconf ident +ĠO per +Ġem pty +Ġp hen +Ġse ctor +Ġexc ited +Ġrem ote +ap h +o en +Ġdestroy ed +Ġmor al +ĠH P +ĠR on +Ġd ress +ĠB at +Ġl it +ĠM S +Ġa f +H L +r um +is ms +Ġshould n +Ġsym pt +ĠTor onto +het ic +Ġcar bon +Ġinstall ed +Ġviol ent +Ġsol ar +j a +Ġpract ices +Ġr ide +ĠP enn +Ġimpro ved +Ġaud io +Ġbehav i +ĠP S +Ġe ating +D ata +ĠRe view +p ass +cl aim +u ated +ang ers +c hen +Ġproper ties +Ġany where +An other +Ġbl ow +ĠJack son +Ġp roud +Ġplan e +l ines +Ġsqu are +Ġpro of +ans as +Ġtalk ed +m akers +Ġs ister +Ġhold s +Ġres ident +Ġ= = +Ġresist ance +Ġspl it +Ġpro secut +Ġconf idence +res ents +Ġcut s +Ġexcept ion +Ġz ero +Get ty +Ġcop yright +Ġtot ally +orm al +ific ations +ĠAustral ian +Ġs ick +Ġ1 50 +Ġhouse hold +Ġfe es +Ġdri vers +og en +ĠN Y +Ġnecess arily +Ġregul ations +ear ing +s l +Ġperspect ive +c are +ic ial +H is +Ġesc ape +Ġsurpr ised +ĠV an +ur rent +Ġv ac +8 1 +ĠTh us +Ġem phas +ĠCh ampions +ĠI ce +Ġn arr +Ġhead s +Ġca using +b el +f ortunately +ĠM a +Ġtarg ets +ci pl +Ġafter noon +Ġadd s +ĠMay be +ĠF our +ess ed +ple te +Ġus ual +ch o +ing u +Ġwith d +ĠE nergy +ĠE conom +O O +Ġart icles +Ġinj ured +Ġman age +Ġexpl ains +Ġdi agn +R ec +at ures +Ġlink ed +Ġdiscuss ed +Ġexpl o +Ġocc asion +ath an +Ġopp osite +Ġfac es +Ġden ied +ĠK night +Ġn ut +Ġapprox imately +Ġdisapp oint +onym ous +ĠB est +ĠL o +ĠH y +ĠA ff +Ġvot ing +an while +ĠII I +Ġinstit utions +ag ram +ĠD aily +Ġdr ag +Ġnear by +Ġgu ilty +Ġcon ver +P re +s hip +Ġre ward +Ġphilos oph +ĠS S +u gh +Ġapp s +f riend +Ġu pper +Ġad vert +Ġs now +Ġfr ust +Ġour selves +F r +ĠD ie +amp ion +Ġdis miss +Ġc ere +Ġsign al +f rom +Ġ ). +Ġ5 2 +Ġcr imes +it ors +est ival +use um +Ġcoun cil +ĠS aud +M ay +ĠG un +ic ian +et her +Ġsu fficient +ĠH en +so le +Ġhistor ical +ĠF ar +ĠT urn +Ġp in +Ġsuc ceed +m at +ly mp +Ġtrad ition +ĠO k +Ġc ro +Ġdesc ription +al le +Ġsk y +T e +Ġwide ly +Ġw ave +Ġdefin ition +ĠJew s +Ġcy cle +Ġref ere +Ġbr ings +us al +Ġal ive +Ġfrequ ently +Ġint ention +ĠCont rol +l v +y stem +Ġpriv acy +g ent +ren ce +ĠQu est +ĠChrist mas +Ġr ail +Ġco oper +Ġtest ed +ĠC apt +as ks +Ġcomfort able +Ġdel ivered +sc ape +Ġdep th +ĠG OP +Ġwrit es +Ġass ets +Ġsa v +im ents +Ġtrans ition +Ġart ist +ĠL ook +Ġl ob +Ġcomp onents +ar ity +Ġwalk ed +Ġro ot +Ġparticip ants +Ġnot iced +Ġres c +Ġn av +ĠAd minist +d a +ut ral +pl ate +Ġimport ance +Ġass ert +ious ly +c ription +Ġinj uries +ĠChe ck +Ġregist ered +Ġint ent +Ġmiss ed +ograph ic +Ġsent ence +oun ter +Ġassist ance +ev in +Ġdat abase +Ġbuild ings +Ġclass ic +Ġth inks +ĠOh io +P r +ug g +Ġfe e +p an +Ġeffect ively +Ġfac ility +Ġbe ar +Ġch apter +Ġdog s +ĠCol umb +Ġl atter +it ial +Ġad mitted +T V +ĠGe org +Ġpost s +\ \ +Ġlawy er +Ġequ ival +Ġm and +Ġcontro lled +ĠW alk +ĠAnd rew +Ġmen u +am ental +Ġprotect ed +v a +Ġadminist r +or al +Ġre in +ĠS ar +Ġamount s +Ġn ative +ĠM oon +Ġrep resents +Ġab andon +Ġcarry ing +Ġt ank +m ary +Ġdecl ared +T ube +Ġh at +Ġpun ish +el lect +m es +Ġun iverse +ĠR od +ph y +Ġinf rastructure +Ġ5 1 +Ġopp osed +ow nt +c a +ĠM ake +Ġhard ware +Ġco ffee +R el +b al +w orld +ĠS af +ĠSe a +in als +Ġown ed +Ġh all +ers ion +Ġdescrib e +ĠP ot +Ġport ion +Ġat mosp +Ġgovern ments +Ġdep ending +Ġoff ense +Ġtr ick +aw a +ĠL ine +ĠV is +ĠH ard +ĠOr ig +ĠCl ick +Ġdes k +ĠVal ley +ĠS ov +Ġmov ies +Ġrem ark +Ġm ail +Ġcons cious +Ġrul ing +ĠR ights +Ġmed ic +he nt +ĠW omen +> < +Ġrepl aced +ĠP rem +ĠTh anks +Ġre new +ĠB all +if orm +Ġsh ots +C omm +Ġar med +Ġconst ant +Ġt aste +Ġreal ized +Ġbu ff +Ġm o +Ġeffic ient +M ost +or ation +if ies +Ġcommun ication +Ġfl ood +Ġconsequ ences +Ġany way +ig g +ĠG M +ĠTh ank +Ġ iron +Ġev olution +ĠC op +tw itter +Ġ9 5 +Ġrelationship s +ad el +ĠYou ng +Ġpropos al +ay ers +uild ing +ĠH ot +OR E +c os +Ġcoll abor +P G +ax y +Ġknow ing +Ġsupport s +ow ed +Ġcontrol s +Ġmere ly +um er +Ġath let +Ġf ashion +p ath +Ġg ift +Ġer a +AN D +Ġkind s +ĠKore an +Ġleg it +ul ous +Ġess entially +Ġthe rap +n ic +Ġsuff ered +Ġh ur +Ġprom ise +Ġex cess +Ġover w +Ġpr ime +ĠH ouston +er ry +ĠM s +R S +201 2 +Ġst ores +ĠO lymp +Ġj ourney +Al though +S ub +ĠE duc +ĠCh apter +Ġrequest s +Ġconsum ers +Ġt iny +Ġis ol +ĠF air +b a +ĠY OU +Ġcr ash +ce ler +Ġemot ional +Ġgood s +Ġelect ed +Ġmod er +ĠLin ux +Ġbl ocks +Ġis land +ĠSoc iety +Ġelect ions +Ġbroad cast +Ġche ap +Ġn ations +Ġse asons +4 00 +Ġwas te +ĠS at +Ġfield s +em ploy +Ġprof ile +Ġauth ors +AL L +ĠG ra +w est +ĠT y +Ġdeath s +Ġv acc +Ġfor med +Ġd u +Ġon going +ĠMuslim s +el f +ig ure +Ġass ume +ĠUkrain e +w ater +Ġco ast +Ġvot ed +g or +ĠA S +ĠMich igan +az a +ĠAr m +i ro +Ġf lex +as ters +' ' +Ġwel come +ar l +Ġloc ations +ig ation +ĠF il +Ġbu ying +Ġarch itect +Ġhard er +ĠC ub +Ġinter face +Ġrestaur ant +Ġdisco ver +Ġex ceed +Ġfav our +ger y +Ġd uty +Ġp itch +ad or +ĠM ach +b oy +Ġrespond ed +Ġext ended +her s +M any +ra id +if er +ĠIn s +S er +Ġmed ium +s he +ĠS ports +Ġmag azine +ut ation +Ġlim its +ĠG all +Ġex ternal +raz il +Ġyoung er +t le +Ġrem ind +ĠC ON +Ġimmedi ate +Ġh idden +Ġvol unte +Ġsim pl +od cast +Ġph ase +d r +Ġpl ot +Ġexp osure +R I +og rap +v in +an ish +ĠAc ad +ĠEng ine +Ġexp ansion +ĠP ay +Y our +Ġpus hed +ĠE ll +ĠHe ad +Ġmarket ing +ĠA C +k et +Ġh its +Ġg ro +ĠA ge +ĠSc ot +] [ +Ġst im +Ġi Phone +Ī Ĵ +Ġn arrow +ĠGet ty +ĠTur key +Ġperfect ly +Ġen able +ut ch +Ġprec ise +Ġreg ime +Ġsh if +Ġcomp ens +g un +d iv +Ġch osen +ĠK en +An y +Ġtre es +Ġrecomm ended +ĠR en +u able +ĠH T +F ollow +E G +ĠH and +ĠK enn +Ġarg uments +Ġex ists +Ġb ike +ĠCons erv +Ġbre aking +ĠG ar +Ġc razy +Ġvirt ual +ay lor +ix el +Ġ19 80 +Ġper mission +ĠSer ies +Ġconsum er +Ġclose ly +c alled +Ġ5 4 +Ġhop es +Ġar ray +ĠW in +ĠLab our +Ġsp ons +ĠI re +Ġp ow +Ġread ers +Ġemploy ment +Ġcreat ure +Ġresult ing +Ġaccur ate +Ġmom ents +Ġarg ued +Ġp ed +D uring +Ġ5 3 +ĠT al +Ġs ought +Ġsuff ering +Ġ icon +le e +Ġ( $ +al ian + ° +Ġp ra +Ġbon us +( " +k o +Ġact ing +D E +f all +Ġcompar ison +Ġsm ooth +ĠN AS +u pp +ĠJose ph +ep ing +ĠT ake +ĠM id +Ġs ending +f ast +ĠF all +Ġdeal ing +us er +ĠOr gan +C o +Ġatt ached +Ġse es +% . +Ġtyp ical +AR T +Ġfind s +ĠAs ia +um in +ĠC ore +ĠE nt +in ent +u ce +ĠBl ood +ĠN ever +Ġem ails +Ġhigh light +Ġconf ront +at us +ut ed +Ġun us +Ġtop ic +ĠAd am +Ġb le +at i +Ġunder stood +S et +st ruct +T P +Ġm ob +a a +ĠSt art +pect ed +se ll +Ġded icated +ĠC A +u an +Ġsong s +esc ription +Ġte ch +Ġr ape +Ġas ide +Ġgr ant +Ġ5 6 +s ub +Ġarg ue +Ġcont aining +Ġsche dule +Ġliber al +Ġpublic ly +Ġheav ily +ĠU t +in er +ĠS ection +ĠC are +we et +l s +D is +âĶ Ģ +ĠF ollow +B ack +ĠI T +Ġb es +j i +ĠH it +est ed +Ġevery body +ĠSw ed +Ġfem in +Ġfac ilities +Ġcon ven +C omp +ĠO S +c ore +Ġan x +Ġdiv ision +ĠC am +ĠSt an +m ates +Ġexpl ore +pl om +Ġsh ares +pl oad +an es +Ġide al +et ers +ĠB ase +Ġpl astic +Ġdist inct +ĠNet work +ĠSe attle +Ġtrad ing +ens us +int end +Ġex hib +Ġinit ially +ĠF ood +Ġthous and +ĠBus iness +act er +Ġpar agraph +Ġrough ly +Ġw ww +Ġcreat ive +ĠCon f +Ġconsum ption +Ġfil ms +ag an +Ġob tain +Ġt all +Ġt or +Ġacknow led +Ġg rown +al o +K E +Ġ4 00 +end ers +t aining +U G +Ġsu icide +Ġwat ched +ĠL ist +al i +re hens +Ġsurround ing +Ġp ip +Ġf lying +ĠJ ava +ord an +Ġserv ing +in ations +p ost +Ġsh o +A v +Ġj ail +z y +Ġ199 9 +Ġ< / +Ġliter ally +ĠS ir +Ġexp osed +Ġl ies +st ar +Ġb at +Ġear ned +ĠD ig +Ġspec ified +ĠSe ason +Ġdeg rees +Don ald +Ġcent re +Ġsh aring +Ġwin ter +ĠC O +C he +Ġ Î +M P +Ġun w +Ġfew er +ĠM ir +Ġsomew here +ĠK ey +Ġattack ed +ĠK ir +Ġdom ain +Ġstrong er +Ġ9 9 +Ġpen alty +I d +Sc ript +Ġdecl ined +Ġne ck +Ġfra ud +Ġcur rency +Ġr ising +R C +âĢ¦ âĢ¦ +H z +Ġt ab +Ġtal ent +n am +ĠN BA +Ġvill age +Ġleg s +ĠN ext +E d +Ġac id +Ġhy d +8 00 +Ġinvol ving +ĠIm age +ĠBe fore +F l +Ġyes terday +S ource +Ġterror ist +Ġsu p +Ġsy nt +ĠSaud i +Ġw est +Ġr u +b urg +Ġvis ible +Ġstru ck +r ison +Ġaw esome +Ġd rawn +Ġansw ers +ĠG irl +ĠR am +Ġthreat s +Ġdef eat +os it +Ġv ent +atur ally +Americ an +end a +ĠH oly +Ġr um +% , +c ase +ĠHist ory +ĠYou Tube +Ġsit uations +ĠD NA +S te +Ġsa ved +It em +Ġrec ip +olog ist +Ġfac ed +Ġel ig +O nce +ĠL i +u h +Ġmist ake +ĠDiv ision +ĠB ell +Ġsympt oms + ® +Ġdom in +Ġfall ing +Ġend ing +as hes +Ġmat ches +ĠOn line +Ġexplan ation +D ef +red it +Ġany more +ĠT otal +ĠF OR +us hed +Ġlet ters +Ġris ks +ĠO K +Ġreported ly +: \ +Ġpl ate +Ġsubject s +Ġattempt ed +if ier +ian a +Ġunlike ly +ĠTh ough +um a +ĠIn vest +ĠPr in +ic an +ĠD ar +ĠColor ado +au g +Ġve get +a os +ri a +Ġshe l +Ġmark ed +Ġ( ) +Ġsp r +p o +ĠL ink +Ġdef e +ĠJ r +Ġthem e +Ġpass ion +ĠP en +Ġinf o +iz er +Ġsh it +ĠC ivil +ap se +c re +Ġpo ly +Ġcomp onent +ĠChar les +ĠIre land +ĠPro v +Ġdo ctors +Ġgr anted +Ġpain t +Ġhon or +Ġsm oke +Ġpay ments +Ġprim arily +ĠKing dom +r ich +ate ll +Ġde als +Ġsched uled +Ġfund amental +Ġprote in +Ġnewsp aper +Ġcl ients +yth on +ĠD ate +h us +Ġfeed back +Ġstret ch +Ġc ock +Ġhot el +ĠQue en +Ġsu gar +Ġj u +Ġmil k +Ġappro val +ĠL ive +Ġequival ent +ef ully +Ġins ert +z ona +Ġext ension +d ri +J ohn +Ġacc omp +S m +ĠF und +Ġconst antly +Ġ` ` +Ġgener ated +ĠA ction +ĠP sych +ĠT ri +Ġrecogn ize +Ġv ary +ph a +ĠR a +d f +et ch +ĠSov iet +Tw o +Ġpattern s +Ġprof ession +an ing +T ime +ĠL im +Ġcol ors +ĠA z +ĠT R +Ġinf ect +Ġphen omen +Ġshe ll +Al so +Ġput s +Ġdel ivery +Ġbro wn +Ġprocess ing +Ġlight s +ess age +ĠBro ok +ĠA ud +l ation +Ġindust rial +L ike +ĠB razil +rou s +ES S +ĠL uc +Ġsome how +Ġ8 5 +Ġpro port +Ġpolit icians +Ġindic ate +Ġh ole +Ġtechn iques +Ġcompet itive +Ġph r +Ġv o +ist ent +ĠD ream +Ġcamp us +Ġaspect s +Ġhelp ful +Ġsh ield +or se +Ġtrig ger +m al +Ġ5 8 +Ġt ort +Ġperson ally +Ġt ag +Ġkeep s +ĠV ideo +Ġben ch +Ġg ap +a ire +Ġe ast +Ġrec overy +per ial +Ġprof it +ĠM ic +Ġ5 7 +Ġcol on +Ġstrong ly +st yle +Ġalleg ations +h an +Ġrep orters +j o +r ine +arg et +and al +Ġ0 3 +Ġfl ash +tr ans +Ġstr ict +Ġpark ing +ĠPak istan +Ġl i +Ġwe ird +ĠE ric +Ġreg ions +ĠJ un +Ġint ellect +ĠW H +od ing +rib utes +up id +ĠT it +Ġf inger +or ia +Ġe lev +ĠF ield +Ġcon clusion +; ; +Ġfeel ings +Ġext ensive +Ġm ixed +Ġne uro +v y +Ġhar ass +ĠC irc +ou ch +Ġterrit ory +Ġsuccess fully +M ar +Ġing red +Ġoverw hel +Ġl ayer +V iew +Ġall ies +ill ance +ĠTh ree +Ġb unch +Ġnorm ally +Ġnet works +Ġsac r +ĠC IA +b les +Ġch ose +Ġopp onents +Ġregard less +Ġfr anch +Ġpre f +ĠP o +Ġbr idge +ann a +ĠSil ver +Ġw age +p age +ri or +Ġrad ical +ĠL ittle +Ġman ip +Ġsecret ary +Ġg ang +D R +F A +Ġdec ent +ĠSp irit +Ġun cle +ĠDevelop ment +Ġinvest ors +Ġwall s +Ġpub lish +Ġgener ate +iss ions +c ar +Ġprom ote +Ġcut ting +Ġche st +Ġdrink ing +Ġcollect ed +Ġ7 2 +Ġhop ing +Ġem br +gor ith +Ġwar ned +Ġinstruct ions +O G +ĠD id +ĠAg ency +Ġg ear +Ġcritic ism +ĠF urther +Ġut il +ann y +R ed +Ġcoun sel +ĠAs ian +Ġredu ction +p ool +Ġteach ing +Ġdeep ly +i y +Ġestim ates +Ġcho ices +Ġperman ent +in em +ke l +Ġf asc +p se +f ile +ĠL ow +ĠP erson +Ġt ournament +st al +Ġm el +U ST +ĠR ay +az i +V al +Ġcont ained +ĠH olly +Ġw ake +Ġreve al +Ġprocess es +ĠIS IS +Ġ0 9 +Ġbl ind +Ġste el +ĠB ad +Ġcare fully +app y +ro it +Ġg aming +Ġhous es +ĠC oll +Ġtr uck +er m +Ġsc ored +Ġocc as +ret urn +b ound +v ar +Ġsh arp +Ġaf raid +ĠE X +am ber +c ific +Ġsche me +N C +ĠPol it +Ġdecl ine +Ġ199 8 +Ġpus hing +Ġposs ession +Ġpriv ile +Ġteacher s +Ġy ield +H A +ĠDav is +it led +#### #### +Ġr ig +ĠD aniel +ac on +Ġh ide +ut en +Ġcolle agues +Ġprin ciples +Ġl oud +Ġs in +ĠDem on +Ġst one +Ġ0 2 +Ġt aught +Ġter rible +Ġst uck +ĠPol icy +te en +Ġimplement ation +ĠB BC +ĠAP I +Ġwhe el +all as +Ġch ampions +ol ars +play er +Ġrepeated ly +ĠSt ill +Ġlik es +ast y +es ter +ĠCath olic +R L +Ġb ath +Ġno ise +t itle +Ġn orthern +P art +Ġmag n +Ġf ab +ĠAs h +Ġdis pl +Ġtick et +Ġm urd +Ġalong side +ĠMus ic +Ġr iver +ĠSte el +ĠC L +ĠPl ayer +ĠM ult +ow ing +re p +s ize +Ġt ur +ĠGeorg ia +isc al +ra ction +Ġc able +Ġ5 9 +Ġw ins +Ġup coming +Ġsurv ive +Ġins pired +ĠEduc ation +Ġstat istics +ĠF oot +iam i +Ġy ellow +ĠP age +. - +ĠH as +Ġur ban +Ġa x +es sel +\ " +Ġquarter back +Ġreg ister +ĠLab or +Ġab ilities +ĠF amily +Ġvar iable +ĠPr ice +Ġcont em +Ġth in +ĠE qu +d ata +Ġg otten +Ġconst it +Ġas ks +Ġt ail +Ġexc iting +ĠE ffect +ĠSp anish +Ġencour age +ins on +ĠA h +Ġcommit ment +C S +Ġr ally +Ġ: : +Ġsubs id +Ġsp in +Ġcapt ured +201 8 +Ġinn oc +Ġalleged ly +ĠC ome +Ġart ists +ĠN umber +Ġelect ronic +Ġreg ional +ap es +Ġw ra +Ġmy th +pr ise +ĠM iller +ĠC reat +ĠEp isode +b ell +Ġdirect ed +Ġext ract +Ġs orry +Ġv ice +ag ger +ĠSu pport +Ġ6 6 +ĠI ron +Ġwonder ful +Ġg ra +N et +ion e +E ng +Ġsh ips +ik es +ĠK evin +it ar +Ġactiv ists +tr ue +ĠAri zona +ent h +ĠDes pite +ĠS E +Ġha bit +ern el +Ġin qu +Ġab ortion +Ġv oid +Ġexpl icit +Ġeng aged +Ġang ry +Ġr ating +Ġfr ag +b ro +ick ing +d ev +Ġwor ried +Ġob ser +Ġap artment +ĠG T +Ġest ate +ĠConst itution +em on +ĠS now +Ġcount y +Ġdis ag +ĠStep hen +Ġimm igrants +w ind +ĠN ations +Ġfol ks +O ut +Ġg all +Ġtarget ed +Ġst ead +ĠB on +ĠL ib +Ġinform ed +Ġ12 0 +ch ain +idel ines +or ough +Ġdri ven +Ġregular ly +Ġbas ket +Ġprinc iple +oc ument +Ġst un +ib ilities +ĠRom an +ĠAb out +Ġal ert +Ġdemocr acy +Ġrepresent ed +H S +c ers +p arent +Ar t +p ack +Ġdi plom +re ts +ĠN O +Ġcapt ure +ĠAd v +Ħ ¢ +Ġannounce ment +ĠL ear +Ġh ook +Ġpur s +ĠS uch +ĠC amer +Ġrefuge es +ĠV e +P ol +Ġrecogn ized +l ib +Ġhad n +A ss +Ġpil ot +us hing +Ġreturn ing +Ġtra il +ĠSt one +Ġrout ine +Ġcour ts +Ġdes per +Ġfriend ly +ĠIt aly +Ġpl ed +Ġbreat h +Ġstud io +N S +Ġimp ressive +ĠAfghan istan +Ġf ing +Ġd ownt +ink ing +ĠR og +i ary +col or +se x +ar on +Ġf ault +ĠN ick +D own +ĠR ose +ĠS outhern +X X +is odes +L ist +6 00 +Ġout come +er r +Ġelse where +Ġret ire +Ġp ounds +ĠGl obal +Pe ople +Ġcommun ications +Ġlo an +Ġrat io +ĠEm pire +Ġg onna +Ġinv ent +D F +Ġ19 70 +ĠComm on +p at +Ġprom ised +Ġd inner +ĠH om +Ġcreat es +Ġoper ate +ver ty +ĠJ ordan +et ime +Ġsust ain +R eg +Ġincred ible +im a +Ġwar rant +Ġm m +A tt +Ġlaw suit +Ġreview s +it ure +ĠS ource +l ights +ĠF ord +Ġ6 3 +g roup +st ore +Ġfeat ured +Ġfore ver +Ġpo verty +ĠP op +ĠC NN +az z +ab is +ach ing +Ġl aid +ĠSu pp +Ġfil ter +en a +ĠCommun ity +Ġcreat ures +u ction +ĠR oyal +Ġassoci ation +ĠCon nect +ĠBr ad +âĸ Ī +l ers +the re +ĠG i +Ġval uable +AC K +ĠT aylor +Ġl iquid +ĠAtt orney +ĠCar l +ĠF inal +ag a +ĠWil son +B ecause +ĠProf essor +ak a +Ġincred ibly +r ance +! ) +R ef +s k +Ġsol utions +Ġatmosp here +Ġbl ame +um es +ĠN ob +C A +um ps +r ical +ĠPut in +ĠD est +or ic +ĠP A +Ġrespect ively +w an +Ġfif th +â Ħ¢ +ĠC ry +Ġgovern or +res ident +Ġpurch ased +Ġh ack +Ġint ense +ob s +Ġorig in +Ġdef ine +Ġcare ful +** * +Ġshould er +Cl ick +Ġt ied +Ġdest ruction +ou red +Ġno body +Ġh o +ĠEx per +Ġt ip +" ; +Ġtechn ique +Ġj ur +ĠP ok +b ow +Ġleg end +Ġacc ord +Ġbus y +ĠInt el +Ġh ang +ak i +. ] +âĢĶâĢĶ âĢĶâĢĶ +Ġsur gery +Ġrep rodu +Ġun iform +Ġscen es +c ode +Ġ6 2 +l isher +ĠH ave +ph ia +Ġcry pt +Ġrec on +Ġsc ream +Ġadop ted +Ġsc ores +N e +ĠIt alian +in cluding +B O +Ġindic ated +Ġent ertain +G u +T ext +i el +Ġtw enty +Ġeng age +off s +ĠPac ific +Ġsm ile +Ġperson nel +Ġto ler +Ġdo ors +Ġt one +Ġmach ines +Ġent ering +ten ance +C O +ĠJer sey +Ġfore st +Ġhor se +Ġcompl aint +ĠSpr ing +y o +ĠPl us +ed ing +ĠRet urn +qu arters +ial s +c ow +Ġacad emic +Ġf ruit +Ġ199 6 +og ether +Ġw ine +Ġpur su +ĠSte ven +Ġlic ens +Wh o +Ġclot hes +re ction +Ġsqu ad +Ġst able +Ġr aw +z ens +St ar +ut ies +anc er +Ġke ys +ĠM u +Ġcompl icated +ig er +ĠTe xt +Ġabs or +Ġ6 8 +Ġfun ny +Ġrel ief +ĠL ew +ĠC ook +Ġch art +Ġdraw ing +G E +Ġmod ule +ĠB ull +I LL +Ġs alt +0000 0000 +il le +Ġres ource +aw ay +adel phia +ĠB ru +Ġ6 7 +Ġsome body +Ġparticip ate +Ġro se +we red +Ġmus cle +Ġcons ent +Ġcontin uing +ĠGuard ian +ĠOr der +reg on +Ġre ar +Ġprov ision +Ġlik ed +ri ent +Ġb ra +Tr ans +Ġmeet ings +Ġto x +Ġcon vent +Ġaut o +Ġrec ording +ĠSo ft +00 1 +ĠR oll +Ġprogram ming +Ġp ic +Ġprov ed +Ġst ab +ĠA st +Ġca ption +ul ating +ĠAtt ack +Ġnew ly +Ġ199 7 +f r +Ġdis cipl +ĠGree k +Ġed ition +ĠDo es +ĠB ox +if le +ack et +Ġpass es +Ġgu est +Ġac celer +it als +U D +Ġaut hent +ĠR est +ov al +t a +u ine +Ġarm or +ĠT own +Ġcomp at +Ġinc hes +Des pite +Ġass ign +he rent +Ġprep are +ĠM eg +oc key +Ġdep ends +Ġtrack s +w atch +Ġl ists +ĠN orthern +Ġal ter +re c +ĠE astern +Ġcond em +Ġevery where +? ' +Ġaff ili +Ġf ought +": {" +Ġm ac +it arian +Ġsc ope +ĠA L +aw s +ar ms +Ġqu e +Ġenjoy ed +nes ota +Ġagg ressive +ĠSt ory +ĠI V +Ġrec ipe +Ġrare ly +ĠMed ical +val ue +ang el +ay ing +omet hing +Ġsub section +Ġs outhern +Ġfrequ ency +re te +roll ed +ult s +ĠN ic +Ġbeh alf +Ġsequ ence +ab et +Ġcontrovers ial +Ġcomp rom +Ġwork er +Ġmain ly +Ġal gorith +ĠM ajor +or ce +g ender +Ġorgan ized +Ġf ake +Ġconclud ed +ĠE D +ĠEx ec +r age +Ġch ances +ber ry +ĠTr ad +Ġconfig uration +Ġwithd raw +Ġf ro +ud es +ĠBro ther +ĠB rian +Ġtri es +Ġsam ples +Ġb id +ĠGold en +Ġphot ograph +if est +ĠD O +ĠPar liament +******** ******** +R em +Ġcont est +Ġsign ing +p x +ĠZ eal +âĶĢ âĶĢ +E ar +Ġex it +Be fore +ĠCor por +n ull +mon th +Ġrac ial +ott ed +ĠV eg +ĠRe uters +Ġsw ord +ps on +ĠRom ney +a ed +Ġt rib +Ġin ner +Ġprot ocol +ĠB i +ĠM iami +ever al +p ress +Ġsh ipping +ĠAm endment +ĠHow ard +con nect +ĠD isc +ĠJ ac +iam ond +ĠThere fore +s es +ĠPrin cess +ĠUS B +ĠAn th +Ġsurve illance +Ġap olog +Ġ6 1 +ow a +Ġf ulf +j s +Ġl uck +ust ed +Ġ § +n i +Ġant icip +em an +Ġwin ner +Ġsil ver +ll a +ic ity +Ġunus ual +Ġcr ack +Ġt ies +e z +Ġpract ical +Ġprov ince +ĠPl ace +Ġprior ity +IC E +Ġdescrib es +Ġbr anch +F orm +ask a +miss ions +b i +Ġp orn +ĠTur k +Ġent hus +Ġf ighters +Ġ0 8 +ĠDet roit +Ġfound ation +av id +A re +Ġjud gment +cl ing +Ġsol ve +ĠDes ign +W here +hes is +ĠT ro +a fter +Ġne utral +ĠPalestin ian +ĠHolly wood +Ġadv is +ĠN on +y es +ol is +Ġrep utation +Ġsm ell +Ġb read +ĠB ul +ĠBe ach +Ġclaim ing +Ġgen etic +Ġtechn ologies +Ġupgr ade +row s +Ġdevelop er +ĠJ osh +ĠDis ney +erv ed +ip al +Ġun ex +Ġbare ly +t hen +ĠP ub +Ġill ness +et ary +ĠB al +Ġp atch +Ġbut t +Ġst upid +ĠD og +ĠD allas +f ront +ie ce +Ġprot ests +Ġch at +oen ix +Ġw ing +Ġpar liament +Ġ7 7 +ose xual +Ġre nder +pt ions +ĠCo ast +os a +ĠG reg +h op +ĠMan agement +Ġbit coin +Ġrec over +Ġincor por +or ne +ĠUs ing +Ġpre ced +Ġthreat ened +Ġspirit ual +ĠE vent +ĠF red +Ġadvert ising +Ġimprove ments +ĠC ustom +Ġer rors +Ġsens itive +ĠN avy +Ġcre am +L ook +Ġex clusive +Ġcomp rehens +Ġde leg +Ġcon ce +Ġrem em +Ġstruct ures +Ġst ored +N D +Ġ1 000 +U P +ĠB udd +A F +w oman +ĠAcad emy +ð Ł +se a +Ġtem porary +Ab out +es ters +Ġtick ets +Ġposs ess +in ch +o z +Ġl a +Ġcontract s +Ġun p +Ġc ig +ĠK at +ult ural +as m +Ġmount ain +ĠCapt ain +St ep +m aking +ĠSp ain +Ġequ ally +Ġl ands +at ers +Ġreject ed +er a +im m +ri x +C D +Ġtrans action +g ener +less ly +Ġ| | +Ġc os +ĠHen ry +Ġprov isions +Ġg ained +Ġdirect ory +Ġra ising +ĠS ep +ol en +ond er +Ġcon sole +in st +Ġb om +Ġunc ertain +1 50 +ock ing +Ġmeas ured +Ġpl ain +Ġse ats +Ġd ict +S L +af e +Ġest imate +iz on +at hered +Ġcontribut ed +Ġep isodes +omm od +G r +AN T +Ġ6 9 +G ener +Ġ2 50 +vious ly +rog en +Ġterror ism +Ġmove ments +ent le +oun ce +ĠS oul +Ġpre v +ĠT able +act s +ri ors +t ab +Ġsuff er +Ġn erv +Ġmain stream +ĠW olf +Ġfranch ise +b at +Ġdem ands +Ġag enda +Ġdo zen +Ġclin ical +iz ard +ĠO p +t d +Ġvis ited +ĠPer haps +Ġact or +Ġde lic +Ġcont ribute +Ġin ject +ĠE s +ac co +Ġlist ening +Ġcon gress +epend ent +Ġprem ium +Ġ7 6 +ĠIr ish +Ġass igned +ĠPh ys +Ġworld wide +Ġnarr ative +ot ype +m ont +b ase +ĠB owl +ĠAdminist ration +Ġrel ation +ĠE V +C P +Ġco vers +Ġ7 8 +Ġcert ific +Ġgr ass +Ġ0 4 +pir acy +ir a +Ġengine ering +ĠM ars +Ġun employ +ĠFore ign +st ract +Ġv en +Ġst eal +Ġrepl ied +Ġult imate +Ġtit les +d ated +Ġj oy +a us +Ġhy per +ak u +Ġoffic ially +ĠPro duct +Ġdifficult y +per or +Ġresult ed +rib ed +l ink +wh o +~~ ~~ +ĠSpe ed +ĠV iet +W ind +ĠBar ack +Ġrestrict ions +ĠSh are +Ġ199 5 +ition ally +Ġbeaut y +op t +Ġm aps +ĠC R +ĠN ation +ĠCru z +W ill +Ġelectric ity +Ġor g +Ġb urd +Ġviol ation +Ġus age +Ġper mit +ĠCh ron +ĠF ant +Ġn aturally +Ġ0 7 +Ġth rown +ĠAw oken +Ġal ien +ĠHer o +ĠK ent +ĠR ick +ri ke +Ġp ace +}, {" +G L +Ġpo ison +ĠT ower +Ġform al +al ysis +Ġgen uine +Ġk il +a ver +Ġproced ure +ĠPro p +intend o +ĠM ain +as ant +Ġtr ained +G ame +ĠL oad +ĠM A +Ġcru cial +Ġle ts +ĠF R +Ġch ampion +1 01 +ĠCon ference +Ġwrit ers +Ġconnect ions +Ġo kay +ir ms +ĠR and +Ġenc ounter +ĠB uff +Ġachie ved +Ġche cks +isc ons +Ġassist ant +Ġwhen ever +ĠA ccess +ĠU r +b in +Ġcl ock +is p +op her +Ġb orrow +Ġm ad +Ġperson ality +on ly +IS T +ab ama +Ġg ains +Ġcommon ly +Ġter r +Ġhyp ot +Ġre ly +Ġt iss +iscons in +Ġrid ic +f unction +ĠO regon +Ġun com +r ating +el and +ĠN C +Ġm oon +ann on +Ġvulner able +ut ive +³³ ³³ +ĠRad io +Ġw estern +se ct +ĠT ony +Ġocc urs +ĠO s +ĠH on +Ã Ń +Ġv essel +ĠScot land +Ġdiscrim ination +Ġsubsequ ent +st ring +Ġfant asy +ĠSh adow +Ġtest im +W E +it i +r as +Ġbo at +Ġmar ks +Ġord inary +Ġre n +Ġrepresent ative +Ġpet ition +Ġ7 3 +Ġad venture +Ġign ore +ĠPhil adelphia +ĠS av +V P +Ġfact ory +Ġt asks +Ġdep ression +z ed +................ ................ +ĠSt orm +Ġc ogn +Ġelig ible +Ġredu cing +v ia +Ġ0 5 +Ġstri king +Ġdoll ar +h o +O V +Ġinstr ument +Ġphilosoph y +ĠMo ore +ĠA venue +Ġrul ed +ĠFr ont +IN E +ĠM ah +Ġscen ario +ĠNAS A +Ġen orm +Ġdeb ut +Ġte a +T oday +Ġabs ence +S im +Ġh am +le ep +Ġt ables +ĠHe art +M I +K e +re qu +V D +m ap +Ġchair man +Ġp ump +Ġrapid ly +v i +Ġsubstant ial +E P +d es +ch ant +ili pp +ĠS anta +ri ers +anche ster +L oad +ĠC ase +Ġsa ving +Ġ7 4 +ĠA FP +er ning +oun ced +ĠMin nesota +ĠW as +Ġrec ru +Ġassess ment +ĠB ron +U E +Ġdynam ic +Ġf urn +ul ator +Ġprop ag +h igh +Ġacc ommod +Ġst ack +ĠS us +w rit +Ġre ven +ĠGod d +ĠZeal and +ab s +Ġbr ut +Ġper pet +h ot +Ġhard ly +ĠB urn +ãĤ ¹ +Ġst y +Ġtrans actions +Ġg ate +Ġsc reens +Ġsub mitted +Ġ1 01 +Ġlangu ages +ugh t +em en +Ġfall s +Ġc oc +Ĥ ¬ +Ġstri kes +p a +Ġdel iber +ĠI M +Ġrel ax +ann els +ĠSen ator +Ġext rem +Ġ} , +ĠDe b +Ġbe ll +Ġdis order +c ut +Ġi OS +Ġl ocked +Ġem issions +Ġshort ly +" ] +ĠJud ge +ĠS ometimes +Ġr ival +Ġd ust +Ġreach ing +F ile +¯¯ ¯¯ +ino is +ĠJ ason +Ġs atell +are t +Ġst ations +Ġag ric +ĠTechn ology +com es +ĠUn fortunately +ĠChild ren +Ġappl ies +ast ed +Ġan ger +ail ability +ĠDam age +Ġcomp are +ĠStand ard +Ġaim ed +ĠB a +angu age +Ġreg ulation +Ġj ury +Ġair port +Ġse ctions +ĠPr ince +em ed +Ġmedic ine +Ġh itting +Ġsp ark +ol ves +Ġad s +St ate +Ġfood s +Ġrepl acement +Ġch icken +Ġlow est +Ġmind s +Ġinvol ves +u i +Ġarr ang +Ġproced ures +ĠWh ich +ivers ary +Ġb ills +Ġimprove ment +Ġin ev +Ġexpect ations +Ġintellect ual +Ġsp aces +Ġmechan ism +2 50 +bre ak +ĠZ e +ĠT enn +ĠB alt +Ġbar rel +Ġstat ic +man n +Pol ice +Ġt ips +Ġhand ling +c us +od ed +il ton +ir y +Ġjournal ists +our se +Ġcom ic +Ġnom ine +IT Y +Ġvers us +Ġlo op +Ġsur f +ĠInd ust +ĠHun ter +Ġbelief s +is an +Ġset up +Ġbre w +im age +Ġcomput ers +f ol +} ," +ĠMed al +Ġtax p +Ġdisplay ed +Ġg rav +Ġf iscal +M on +ĠMos cow +ĠK ong +ĠCent re +Ġcamer as +ĠMr s +ĠH ay +Ġa ver +ĠK elly +p y +Ġrequire ment +Ġent itled +omb ie +Ġsh adow +ag ic +ĠA k +Ġel ite +Ġdiv ided +Ġhead ing +Ġcop ies +Ġloss es +Ġv it +k ed +ĠB ry +Ġan s +ĠSte am +Ġrep orter +he im +ĠIt em +Ġsuper ior +d on +ere nt +à ¶ +Ġtherap y +Ġpe ak +ĠMod el +Ġl ying +Ġg am +z er +r itten +Ġrespons es +Ġconsider ation +ĠB ible +Ġl oyal +Ġinst ant +Ġp m +ĠFore st +à ¼ +Ġext end +Ġconv icted +Ġfound er +Ġconv in +ĠO ak +che ck +Ġsch olars +p ed +Ġover se +T op +c ount +ĠAr k + · +Ġ0 6 +ĠL A +m d +ĠLat in +im ental +ĠC PU +Ġsubst ance +Ġminor ity +Ġmanufact uring +E r +ocol ate +Ġatt ended +ĠMan ager +r ations +Ġappreci ate +om y +GB T +id ency +B L +Ġguarant ee +pos ition +Ġo cean +clud e +Ġhead ed +Ġt ape +Ġlo ose +Ġlog ic +Ġpro ven +Ġsp ir +Ġad mit +is a +Ġinvestig ate +Ġ199 4 +sy lv +ĠL ost +c est +Ġ7 1 +Ġrequest ed +Ġwind ows +ĠPok é +ĠWith out +M et +Ġbehavi our +Ġread er +Ġh ung +ĠKe ep +Ġro les +Ġimplement ed +Ġbl ank +Ġserv es +ĠJ ay +Ġc ited +ĠF riend +prof it +ap on +Ġrep air +it em +arr ass +Ġcrit ics +ad i +ĠF ather +Ġsh out +Ġf ool +Ġ8 8 +Ġprodu cing +Ġl ib +Ġround s +Ġcirc le +Ġpre par +Ġsub mit +Ġn ic +mor row +ãĥ « +U nder +Ġv ital +ater n +Ġpass word +Ġpublic ation +Ġprom inent +Ġspeak s +Ġb ars +Ġde eper +ĠM ill +port ed +Ġw id +Ġbut ter +Ġsm oking +Ġindic ates +K ey +rop ri +ĠF ile +all ing +ast ing +ĠR us +Ġad j +Ġ7 9 +av al +Ġpres um +bur gh +on ic +Ġf ur +Ġpoll s +ik a +Ġsecond ary +Ġmon ster +ig s +ĠCur rent +E vent +Ġowners hip +end ar +Ġarri ve +ĠT ax +Ġn ull +ĠPri v +Ġth ro +Ġk iss +c at +Ġup set +ang le +it ches +ect or +olog ists +ĠGal axy +Ġcor ruption +Ġh int +ent er +ĠH ospital +Ġgreat ly +Ġbeg un +es y +Ġso il +ĠAnt on +Ġmain tenance +ãĥ © +Ġdo zens +Ġhuman ity +ĠAl abama +Ġr om +w orth +ap ing +sylv ania +l ah +Ġg athered +G A +Ġattack ing +f ound +ĠSqu are +Ġar bit +ict ions +ĠW isconsin +Ġd ance +ĠS aint +arch y +Ġbase ball +Ġcontribut ions +Ġliter ature +Ġex ha +per ty +t est +Ġb ab +Ġcontain er +let ter +Ġfall en +Ġwebs ites +Ġbott le +ĠS ac +Ġbre ast +ĠP L +Ġveter an +Ġinterview s +ĠA le +Ġb anned +eng ers +ĠRev olution +in th +Ġconc erning +IV E +Ġexp enses +ĠMatt hew +ĠColumb ia +d s +ist ance +Ġent ity +.. ." +Ġrel iable +Ġpar alle +ĠChrist ians +Ġopin ions +Ġin du +l ow +Ġcompet e +Ġth orough +Ġemploy ed +Ġestablish ment +ig en +ĠC ro +Ġlawy ers +ĠSt ation +T E +ĠL ind +ĠP ur +it ary +Ġeffic iency +âĢ IJ +ĠL y +Ġm ask +Ġdis aster +Ġag es +ER E +es is +ĠH old +Ġcas ual +b led +Ġen abled +ĠEn vironment +ĠInt elligence +i per +ĠM ap +ĠB E +Ġemer ged +is dom +Ġc abin +Ġregist ration +Ġfing ers +Ġro ster +Ġfram ework +ĠDo ctor +et ts +Ġtransport ation +Ġaware ness +H er +Ġattempt ing +O ff +ĠSt ore +ÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤ +ĠK now +Ġdef ence +Ġsc an +ĠT en +ĠCh air +ĠP H +ĠAtl anta +Ġfuck ing +Ġans wered +b n +ĠK ar +Ġcateg ories +Ġr ational +Ġc ust +Ġrob ot +Ġcorrect ly +Ġg if +Ġgraph ics +m ic +Ġground s +ĠO pp +i ate +Ġdist ributed +Ġsan ctions +Ġchalleng ing +ut o +Ġingred ients +Ġinv ited +Ġfound ed +ĠRe qu +d ed +Ġb owl +Ġbrother s +ĠH a +I O +Ġw ages +im ore +oc ial +Ġse ed +ative ly +Ġaddress es +ĠI owa +ab eth +Ġatt itude +is d +ch ild +Ġm ole +Ġdisco very +y ard +B r +Ġ8 2 +Ġsuppl ies +ell ing +Ġdist ingu +C R +Ġre cept +Ġ vert +Ġsw im +b ec +d oor +ĠY eah +Ġg al +Ġinter act +ĠE SP +ĠC S +amp s +Ġconvin ced +Ġobject ive +Ġdis h +ĠPhot os +l ad +Ġdownt own +o il +in ction +Ġto morrow +ĠC OM +Ġsurv ival +sh ot +Ġsett lement +C ons +ĠX box +int erest +ĠS M +arg o +en ess +Ġeth nic +b ered +M in +ĠT ok +Ġinc ent +ĠComm and +Ġmain tained +Ġbreak s +br idge +at ar +ag g +ĠF inally +un icip +ĠO nt +le ft +Ġrecogn ition +Ġ* / +ĠP ers +Ġwe lf +Ġaddress ed +ĠK ansas +Ġvir us +Ġwhere as +Ġp apers +ram s +ĠMin istry +Ġple asure +Ġacqu ired +Ġd uration +j pg +Ġcal m +ĠN HL +Ġburn ing +Ġfold er +ick ed +ĠP y +ĠIll inois +Cl ass +ĠGodd ess +Ġperform ing +Ġwelf are +j ar +In ter +Ġl in +Ġenh ance +Ġnot ion +f are +yp es +ĠAre a +Ġcann abis +ĠDie go +f s +ĠM anchester +com m +in ite +Ġcover ing +ĠS ound +Ġ19 60 +Ġ8 4 +e lect +z ing +Ġcitiz en +Ġph ones +Ġr aid +Ġign ored +ĠOb ject +Ġu pload +c ard +Ġmod ified +Ġroom s +ia h +r ange +he ast +ach us +Ġsuggest ing +âĢ ĭ +gr ade +E l +Ġclot hing +Ġr h +ĠH an +un ity +en cing +ĠAust in +sec ution +t ra +d em +ĠQ ual +Ġhe aven +Ġst ages +Ġw edd +pl us +ific ial +ĠIm m +ĠH o +iet ies +Ġphr ase +Ġbr ill +act ory +Ġprov iders +Ġsil ence +Ġa er +ĠA I +ĠAd venture +Ġplatform s +Ġdemonstr ated +Ġinter f +ing ton +Ġr aces +Ġgr ade +ult ane +ĠTh rough +f alse +Ġb ow +ĠA B +Ġfl avor +Ġhistor ic +g ov +Ġcol our +Ġview ed +ĠEm ail +el come +Ġinter vention +Ġd iversity +Ġperiod s +Ġre verse +ĠV ery +Ġqu ote +ĠLe ft +th rough +Ġsc rew +Ġland ing +Ġp ill +Ġw et +Ġprot esters +Ġrepe at +av ed +er k +Ġsal ary +ĠPenn sylvania +St ill +Ġmay or +Ġkit chen +Ġfeat uring +ĠM useum +ĠT ournament +ĠF al +Ġser vers +U C +Ġany body +im g +ĠTr ade +ixt ure +the less +Ġfin ance +Ġcl osing +ĠPat ri +i ac +ab el +Ġ> > +or ous +Ġf irms +sc reen +un a +Ġemb arrass +ul se +Ġlet ting +Ġth rew +ile y +Ġch annels +l an +ĠVeg as +Ġse ar +Ġfant astic +ar re +uzz le +ĠD er +Th ose +Ġsw ing +Ġshe et +ind ex +co ver +og an +Ġvari ables +ĠTe ch +Ġsp oken +ac hel +ĠD a +ĠMount ain +Ġload ed +Ġfoot age +vers ion +Ġun l +ĠPh oenix +Ġthrow ing +Ġf iring +Ġtrack ing +Ġw idth +Ġstrugg ling +ro oms +ot ion +Ġmonth ly +ĠSer ver +Ġegg s +op en +M C +Ġ199 3 +Ġh ired +Ġstay ed +ĠAll en +Ġst ro +Ġ9 8 +st ep +ĠTurk ish +Ġfab ric +ist ing +ĠD om +Ġd ates +Ġpr on +Ġbasket ball +Ġl ucky +ĠArab ia +Ġassum ed +est y +Ġaff airs +Ġgl ad +ĠInd eed +ĠF A +ĠW ord +Ġjo ining +if ice +p read +ir ts +ĠSe lect +Ġpop ulations +aw are +Ġn ose +Ġcompl aints +st art +Ġsc oring +Th anks +Ġmin ing +Ġvisit ors +S H +Ġdam aged +Ġcharacter istics +ĠP ent +D C +Ġ8 3 +ĠS ix +r ates +Ġfl ags +ĠB rew +d og +M ark +// // +Ġexec ution +Ġj oke +ph ones +Ġtestim ony +Ġob st +Q L +ĠC ut +Ġstud ied +ĠN intendo +ick et +ĠN BC +Ġl ad +ĠB ra +ĠM oh +Ġk ernel +Ġoverwhel ming +Ġag ed +Ġapplic able +ĠC ond +Ġroad s +ĠBl ock +m ade +od ge +Ġcomm ands +Ġoff ices +vel and +Ġt ut +Ġrece iver +ĠF ro +Ġsho pping +Ġi P +ĠSt re +ĠA BC +Ġentertain ment +ĠB ow +ort ed +M c +Ġread s +gr ad +ĠCol lect +Ġâ ĪĴ +ĠCap ital +eder ation +Ġemploy er +Ġinvolve ment +Ġanx iety +al ia +Ġro of +ĠAm ong +ĠDemocr at +Ġstat s +ĠV ill +Ġconst itutional +Ġrefer ring +itt y +Ġtack le +out ube +Ġback ed +ĠH ong +ĠBro ad +Ġe le +ĠO tt +Ġ199 2 +h our +achus etts +C al +Ġdefe ated +Ġ8 1 +es p +Ġseem ingly +w as +ĠJ enn +ĠK urd +Ġg ene +Ġdisc ount +R et +EC T +( ); +Ġclub s +Ġs id +ĠM arsh +Che ck +Ġp p +ĠE ag +ides pread +Ġbe ings +F T +Ġintrodu ction +ĠCh ange +AR D +Ġ1 10 +ad ows +ier ce +Ġme al +a uthor +ĠB ang +lah oma +Ġr anks +201 1 +?? ?? +m ax +Ġcoll apse +Ġop ens +Ġe cho +Ġs oph +Ġrac ist +Ġenorm ous +Ġw aves +Ġt ap +Ġcomprehens ive +. -- +ĠR oy +Ġfarm ers +Rel ated +a ired +ron es +ĠC rim +Ġproport ion +Ġdesign s +Ġnegoti ations +Ġvirt ually +ĠBat man +Ġwar n +Ġlegit imate +m ate +Ġcon vention +, , +net ic +ĠS D +Ġconsist ently +Ġcompens ation +Ġpunish ment +Ġy e +Ġt ie +ĠB ureau +ir lf +ĠB u +ĠA ren +ĠPh ilipp +Ġkn ife +Ġmem ories +ĠR oss +Ġang le +Ġ8 6 +ĠTh under +Ġre nd +ĠT our +Ġcount s +s ung +ĠIm p +Ġeduc ational +Ġaccess ible +C OM +Ġd rew +y er +G l +am ine +OR T +O B +I B +m aster +Ġtri als +og y +h ar +ĠTr ust +Ġprefer red +irlf riend +ĠN ev +Ġb in +Ġc ow +P age +Ġsign ature +ĠB L +7 00 +Ġret ired +Ġby tes +Ġneigh b +ĠLeg end +Ġdev ast +Ġsuspect ed +is ons +ĠPoké mon +sc ale +Ġcap abilities +Ġre vel +Ġche ese +d y +igr ant +Ġfail ing +b its +ĠHer oes +ĠG host +ĠS cient +Ġappoint ed +ur i +Ġinst itution +Ġexpand ed +g reg +Ġmonitor ing +Ġp odcast +Ġcoal ition +Ġ9 6 +J o +Ġst olen +ĠS ab +Ġstop s +Ġhol iday +Ġint r +C ar +Bl ack +ĠL GBT +Ġwar ming +ĠAnd erson +Ġ8 9 +Ġprodu cer +M ed +Ġaccur acy +ĠMar vel +iz abeth +ĠPat rick +m ony +Ġmin i +ac les +Ġover t +the y +Ġmembers hip +ĠV en +Ġex ch +Ġrem oval +ĠD ave +T Y +m ad +ĠF ind +Ġad equ +Ġe c +Ġte eth +Ġemot ion +Ġper m +Ġsole ly +d b +Ġextra ord +IG HT +c al +Ġgu idelines +Ġd ying +Ġsusp ended +ĠPrem ier +ĠAnth ony +el ve +Ġd ad +ĠE th +ĠFoot ball +Ġabandon ed +Ġ< < +Ġm arch +Ġhor ror +âĢ¦ " +Ġchild hood +Ġcampaign s +Ġl unch +ĠAl bert +bl ock +âĸĪ âĸĪ +ound ing +Ġb one +or gan +ad ers +ĠFl ash +ĠDri ve +Ġton ight +Ġw ars +ĠF L +Ġform ation +con st +New s +Ġcom pe +or ious +ĠSt aff +Ġdiscuss ions +ĠProt ection +ĠJ am +Ġcrit eria +Ġinstall ation +Ġaccompl ish +iz za +Ġpub lisher +Ġresc ue +ĠT ry +U LL +ĠS om +ĠH op +ore t +th s +ord on +Ġp ocket +ĠIn v +Down load +ĠCr ime +Ġb ene +ĠGu ide +ĠAs sembly +Ġparam eters +I E +ĠAlex ander +Ġconc ert +ĠSc he +Ġsh oes +Ġvis iting +Ġrec all +Ġb ub +Ġr ural +Ġconc rete +ĠR os +N ext +R uss +Ġlo ans +ĠSh ield +Ġtre m +hem at +k g +ĠHar ris +is ition +ĠM ove +ĠF C +Ġf ate +ĠCh o +Ġt ired +Ġprinc ipal +h ist +ien ces +ath y +Ġse vent +Ġm ood +Ġstrateg ic +Ġdise ases +Ġfor um +Ġtem por +Ġhead quarters +P ar +ig e +fl ix +Ġgu itar +Ġ9 4 +On ly +Ġrele ases +ro ph +================ ================ +Ġ6 00 +ĠContin ue +ig ate +ĠC rit +sy stem +Ġdis abled +Ġunex pected +ith ub +Ġuncle ar +ĠE st +Ġcontr ad +Ġstrateg ies +vent ures +Ġpass age +AM E +Ġimpro ving +Ġreve als +Ġdecre ase +ov a +Ġann oy +ĠSh ort +ĠL ibrary +Ġcy ber +n ell +ĠH ur +ĠC B +Ġphot ograp +U I +Ġs ed +G e +Ġ8 7 +Ġd iverse +Ġencour aged +Ġcons piracy +Ġbird s +Ġoper ator +Ġhand ful +Ġclass ified +? ) +Ġdram atic +Ġinvestig ators +it o +Ġw idespread +ĠR oom +-------------------------------- -------------------------------- +Ġcollect ive +Ġjournal ist +St ring +Ġtemper atures +il a +Ġgu id +Ġins pect +Ġmiss ile +ĠMay or +Ġman ual +Ġsim ultane +Ġrat ings +Ġsu ck +Ġ9 7 +Ġunivers al +Ġph arm +Ġdis rupt +ian o +A V +Ġf t +Ġstat ist +old s +ĠWalk er +ph p +Ġunder t +ĠL as +ish op +nt il +res hold +ĠWhe ther +M s +Ġden y +ĠCl oud +Ġprov ider +Ġsurv iv +ĠUp date +h as +Ġmist akes +ch arge +pl ed +r ity +Ġn ode +ĠMass achusetts +ool s +lic ation +Ġf ails +em ale +or i +back s +Ġsh irt +Ġ' ' +ĠN AT +Ġwat ers +els on +Ġe ase +Ġsc ar +Ġcont ents +m ind +Ġcont ribution +Ġsh r +Ġhand ed +Ġst ability +Ġtra ve +E m +Ġmir ror +12 3 +Ġwe igh +Ġf iction +ou ver +ist ant +r ition +ĠF ed +Ġphys ically +Ġst ake +ĠArt icle +ĠAr c +ĠLew is +ĠM ind +Ġdemonstr ate +Ġprof its +v ision +om ic +ol id +Ġbatt les +Ġdri ves +Ġeas tern +ĠS ony +!! ! +ar ation +v ard +ĠG L +port ation +Ġ9 2 +Ġlaw makers +Ġprotect ing +ĠE PA +Ġy eah +Ġsh ame +ol ph +e ven +x it +Ġatt ach +Ġrepresent ing +Ġob s +ĠUt ah +iff s +ĠFre edom +à ³ +A K +Ġinc idents +it age +Ġview ers +c d +Ġm ouse +Ġcl ar +Ġaccord ance +Ġb ot +c or +ĠSum mer +he ld +Ġinnoc ent +Ġiniti ative +ol s +________________ ________________ +Ġsp ots +p ace +Ġconvent ional +Ġcorpor ations +Ġblock ed +H D +at tered +Ġref ers +Ġbu ck +ĠDig ital +12 0 +Ġtop ics +T F +Ä ģ +br id +re ement +Ġunder lying +ĠM ember +Ġinvestig ating +Ġpregn ancy +Ġtouch down +ĠB and +ĠCall er +Ġinst ances +P P +w a +G ood +Ġ199 1 +ĠC old +Ġfear s +Ġrem arks +Ĩ Ĵ +at al +Ġm it +Ġexper iments +i pt +Col or +ind u +Up date +Ġ9 3 +A g +Ġ å +anc ouver +B oth +Ġjud ges +Ob ject +Ġst ere +umb n +Ġparticip ation +ĠSt ars +ĠJ ere +Ġweek ly +ĠB an +Ġconvers ations +ĠP itt +u z +ĠIndian a +ĠK ick +Ġinf ection +Ġhero es +Ġsett led +Ġstri p +Ġh al +Ġd ump +ĠS ci +Ġl es +Ġref erences +ĠU RL +ĠBr idge +Ġwant ing +For ce +Ġex clus +Me anwhile +m n +Ġg entle +m aker +sen al +ĠG ro +ou ri +ĠR ain +ĠAll iance +Ġl ift +el a +S D +ĠCle veland +Ġrank ed +Ġst adium +Ġdead ly +ä ¸ +Ġr iding +ar ia +ĠAr mor +Ġdocument ation +ĠGree ce +ree k +Ġl ens +ĠS a +Ġg ross +ĠE mer +ag ers +ĠD ub +ĠR h +ĠAM D +Ġarri val +Ġdes ert +Ġsupp lement +ĠRes p +Ġkn ee +Ġmarg in +f ont +og g +201 0 +ĠP ir +ĠP rom +iv als +Ġint ake +Ġdifferent ly +ug s +Ġb its +clud ed +Ġsearch ing +ĠD u +um ble +Ġfunction al +ĠBalt imore +ĠC ould +Ġdes ired +Ġcirc uit +ĠL yn +ĠG O +ĠF alse +re pre +' : +alt ies +Ġmin im +Ġdro ve +ĠSh ould +Ġh ip +Ġpro s +Ġut ility +ĠN ature +ĠM ode +P resident +o pp +r at +form ance +Ġconcent ration +Ġf ont +ĠB ud +Ġam id +Ġre vers +ĠM L +B ar +Ġinter action +Ġjur isd +Ġspell s +d ep +f il +Ġcivil ians +ut ter +ĠCo oper +ĠBel ow +Ġent rance +Ġcon vert +Ġcontrovers y +ow ered +Ġcontr ary +Ġar c +ĠExec utive +ĠOffic er +Ġpack ages +Ġprog ressive +w idth +Ġreserv ed +v ol +ĠSam sung +Ġprint ed +Ġcent ers +Ġintrodu ce +ĠKenn edy +Ġodd s +Ġsure ly +Ġindepend ence +Ġpass engers +repre ne +ĠBe h +Ġl oves +ĠESP N +Ġfac ilit +Ġident ical +Ġdo ct +Ġpartners hip +con f +ĠH ide +Ġconf used +ĠC ow +M en +Ġw rest +ĠIraq i +Ġh oles +ĠStud ies +Ġpregn ant +h ard +Ġsign als +I X +Ġpull ing +Ġgrad uate +Ġnomine e +D ate +Ġper mitted +Ġâ Ĥ¬ +ĠOk lahoma +St art +Ġauthor ized +Ġal arm +ĠC os +v an +Ġgener ations +c ular +Ġdr agon +ĠSoft ware +ĠEd ward +Ġcontro ller +S en +ge red +ĠV ik +Ġappro ached +Th ank +Ġcan ce +Ġform ula +ĠSm all +Ġweak ness +Ġr amp +it udes +j ud +Ġbrill iant +Ġacc us +s ource +Ġ8 00 +ĠE vil +S w +Ġhom eless +we ek +i ens +r ics +ĠTh ird +T O +Ġorgan ic +Ġpresent ation +ag h +ĠDown load +v ation +Ġas sembly +or able +hold ers +ĠBern ie +ĠHel p +Ġt ong +ĠF ight +Ġbe ach +B ook +ĠL ic +Ġr ush +ĠR ound +ou p +ĠMar x +Ġcalcul ated +ĠDe vil +ĠSar ah +Ġoccasion ally +Ġbul let +Av ailable +g ate +Ġ9 1 +Ġh osp +Ġprom ises +ĠH IV +ĠSt adium +ĠSt ock +ĠCorpor ation +g age +N G +ĠC redit +Ġs ne +ib l +Ġacc um +s uch +Ġterror ists +Ġconscious ness +ĠZ h +Ġdram a +ool a +pir ation +Ġlab our +ĠN in +Ġut ter +Ġdemocr atic +Ġass ass +il ation +Ġg est +Ġab road +Ġmet ab +Ġs orts +Ġfl av +U B +Ġm g +ĠNot hing +ĠO d +Ġmus ical +200 9 +Ġdro ps +oc ated +ater al +0000 00 +Ġg re +Ġequ ality +Ġburd en +Ġv ig +ĠLe ader +-------- ---- +Ġcere mony +Ġf ighter +Ġact ors +Ġ æ +am an +F i +Ġal ign +put er +Ġe lder +ĠN SA +Ġrepresent ation +ĠOnt ario +IT H +usal em +Ġharass ment +itz er +Ġsy mp +Ġbox es +ĠD R +Ġman ifest +at re +Ġ ^ +Ġd ies +le ton +Ġmiss ions +et he +Ġres olve +Ġfollow ers +Ġas c +Ġk m +l ord +am med +Ġsil ent +ĠAssoci ated +Ġtim ing +Ġprison ers +ĠK ings +ĠF ive +Ġtow er +Ġappro aches +Ġprecise ly +Ġb ureau +ĠM other +ĠI ss +Ġkey board +it ual +Ġfund ed +Ġstay ing +Ġpsych ological +Ġm ile +ĠLe on +ĠBar b +w ill +Ġw ider +ĠAtl antic +Ġt ill +ĠR ome +ro t +Ġaccomp an +Ġfl our +ac o +W orld +ĠExp ress +ĠY u +C or +Ġple ased +part y +Ġpoint ing +Ġinf lation +Ġro y +Ġ ), +ain er +Ġwedd ing +orm on +Ġrequ iring +Ġqual ified +Ġse gment +EN D +Ġs izes +e als +Ġcor rupt +ass ador +Ġcele b +Ġdream s +ĠM ess +Ġcheck ing +ĠV ersion +Ġprep aring +Ġact ively +ĠD iff +Ġl ux +ĠW inter +act eria +ĠN E +Ġdep uty +Ġtrans gender +Ġsum mary +Ġin her +er ies +ch ar +ĠY an +Ġkn ock +ĠP ath +Ġl ip +roll er +Ġimp ression +Ġcelebr ate +Ġsl ide +Ġgu ests +Ġcl ip +F S +Ġsav ings +Ġcapt ain +Ġleg acy +ĠDen ver +Ġw ounded +tab oola +AC T +Ġpurs ue +Ġo xy +Ġ q +Ġsem i +ĠN eed +ĠAff airs +Ġob sc +Ġcheck ed +Ġd ual +C ode +ĠM D +le m +ult y +Ġ © +ĠEl izabeth +Ġcent uries +ard ed +s rc +Ġev ident +enn is +at in +Ġunemploy ment +ĠMar io +Ġint im +Ch rist +Ġbi ological +Ġsold ier +ĠAdd ed +Ġm ath +ĠG il +Ġbi as +Ġd ating +ĠO cean +Ġm ice +M us +h ire +ĠT es +Ser ver +lim ited +S ize +Ġmet ers +Ġrock et +es see +Ġcertific ate +ĠIran ian +AS S +Ġgr id +D ec +Ġro lling +com mun +ĠSwed en +b ury +Ġtiss ue +Ġrac ism +ĠL ocal +Ġmyster y +Ġexam ine +Ġst em +Ġs its +Ġhop ed +ot ing +Ġdial ogue +Ġpers u +W atch +l ay +M AN +Ġch ronic +ĠPort land +mark et +ĠS EC +Ġparalle l +Ġsc andal +Ġcar ries +Ġphenomen on +h uman +ack er +ĠO x +Ġretire ment +tain ment +ov ie +ĠG ear +Ġd uties +Ġdo se +Ġsc roll +M B +in f +Ġsa uce +Ġland scape +red dit +ĠChampions hip +ĠRed dit +al id +Ġco in +Ġover s +Ġpost ing +ab out +Ġf el +and y +Ġb old +Ġfocus ing +e ffect +G R +Ġde emed +Ġrecommend ations +Ġste pped +Ġvot er +ĠDe ep +ĠInst agram +Ġmoder ate +ĠMary land +Ġrestrict ed +ĠM B +ĠCh all +Ġto b +Ġc ir +ĠO cc +ĠE ver +Ġcoll aps +IN FO += - +ĠP ict +ĠAcc ount +n c +Ġo ught +Ġex port +Ġdr unk +( ' +Ġw ise +ĠM ort +ne cess +Ġan cest +ĠInc re +Ġfrequ ent +m ir +Ġinterpret ation +Ġdepend ent +Ġco ins +ĠB ol +V ideo +ĠJust in +Ġfat al +Ġcook ing +Ġconf usion +ip her +Ġcust ody +ĠMor gan +om ach +ĠGovern or +Ġrestaur ants +el ing +Ġacknowled ged +Ġthe r +Ġgen es +ch ing +He y +Ġtact ics +ĠMex ican +Ġv end +Ġhe s +qu er +Ġnot ing +ĠCamer on +Ġtarget ing +ro ck +Ġcred its +Ġemot ions +Ġrepresent atives +new s +Ġlegisl ative +Ġrem oving +Ġtweet ed +ĠCar ter +ĠF ixed +Ġfor cing +Ġspeak er +Ġm ales +ĠViet nam +l ined +Ġconcept s +Ġvo ices +o ir +ĠT rib +W he +ĠJer usalem +ĠS ant +Ġc ul +Ġl ady +ĠHaw ai +Ġar ts +ĠIn n +ĠMach ine +ĠEm peror +Ġsl ot +g ly +ĠPro cess +II I +Ġathlet es +ĠTem ple +ĠRep resent +Ġpres c +Ġt ons +Ġgold en +Ġp unch +ĠG R +iver pool +Ġen act +Ġlob by +Ġm os +Ġpick ing +Ġlif etime +Ġcogn itive +E ach +z o +Ġd ub +Ġcons ists +ol n +Ġf estival +am ous +Ġint ellig +w ords +ĠSm art +Ġde le +Ġl apt +Ġmag ical +ĠS in +b us +ur ities +igh th +ĠRub y +ĠS ure +ol ving +Ġj un +O ST +Ġimp osed +Ġast ron +Ġcor rel +ĠN S +ĠK it +ĠF uture +b urn +Ġimm une +oc us +Ġcour ses +ĠSt ring +Ġle an +Ġg host +Ġout comes +Ġexp ense +Ġevery day +Ġaccept able +A h +Ġequ ipped +Ġor ange +F R +ĠD utch +Th ough +ĠR ank +Q U +ĠRober ts +wh at +re nd +Ġdisapp ear +Ġsp awn +ĠL am +o is +Ġdes erve +Ġmin imal +Ġnerv ous +ĠW ould +Ġro ok +ĠV ancouver +Ġres ign +sh ire +ĠW orks +ĠB uild +Ġafford able +ĠG ary +ĠAren a +Ġh anging +Ġimpl ications +ĠS ong +Ġmain taining +Ġgu ards +C ON +Ġder ived +Ġexecut ed +Ġthe ories +Ġqu oted +ĠAnd re +og a +sel ess +in fo +ĠBel g +Ġt ears +ĠSur v +Ġbirth day +ig ious +im mer +Ġspect rum +Ġarchitect ure +Ġrec ruit +arm a +T able +Ġmon sters +ĠG ov +Ġdest ination +Ġattract ive +Ġf oss +ĠMore over +Ġpres ents +TH E +Ġrep ly +pt on +Ġc um +Ġdel ight +Ġaffect s +Ġdon ations +ĠT oy +ĠH im +M ENT +Ġover come +it ched +ĠFant asy +ĠH at +ĠBe ast +b ott +Ġinvestig ations +R un +Ġhun ting +d i +f und +Ġs essions +est yle +Ġport ray +oid s +Y eah +Ġcommun icate +Ġcom edy +ĠY ang +Ġbel t +ĠMar ine +Ġpredict ed +Pl ay +Ġimportant ly +Ġremark able +Ġelim inate +D avid +Ġb ind +V ID +Ġadvoc ates +ĠG aza +im p +D B +ĠN a +ĠSim ilar +I ES +Ġchar ity +v as +m ath +Ġâ ĸ +ok er +nd um +Ġcap s +ĠH al +2 000 +e an +Ġfle et +Ġrec re +R ight +Ġsleep ing +ij ing +k ind +Ġdesign ated +à ¤ +Ġanim ation +ke e +ĠInt rodu +Ġ/ > +Ġdelay ed +Ġtrem end +Ġcur ious +U se +Ġle ct +d am +Ġinnov ation +ĠPoint s +Ġload ing +Ġdisp ute +ct ic +ird s +ĠB Y +Ġn urs +ĠVal ue +ION S +ĠH um +Ġtem plate +m ers +Ġappear ances +ĠEnter tainment +Ġtransl ation +Ġsa ke +Ġbene ath +Ġin hib +Ġe uro +abet es +Ġstud ying +ĠM as +Ġper ceived +Ġexam ined +Ġe ager +Ġco aches +Ġim per +ch i +Ġprodu ces +" ). +ĠEvery one +Ġm unicip +Ġg irlfriend +Ġh ire +ĠV ice +Ġsu itable +op y +Ġin equ +ĠD uke +f ish +f irst +ĠO bs +Ġinter ior +ĠBru ce +ĠR y +Ġanal ys +Ġconsider able +Ġfore cast +Ġf ert +ors hip +ĠD rug +ĠA LL +: " +th ur +ĠM ail +Ġball ot +Ġinst antly +ĠCh annel +Ġp icks +Ġ198 9 +Ġt ent +ol i +Ġcivil ian +b ling +ell o +b u +Ġin ch +Ġlog o +Ġcooper ation +Ġwal ks +Ġinvest ments +Ġimp rison +ĠF estival +ĠK y +Ġleg ally +Ġg ri +ch arg +S l +Ġthreat ening +du ction +fl ow +Ġdismiss ed +ibr aries +c ap +e le +ĠMc G +ĠHar vard +ĠConserv ative +ĠC BS +p ng +Ġro ots +ĠH aving +umb led +ĠF un +\ / +ĠS earch +ple x +Ġdiscuss ing +Ġcontin u +ĠT ai +ĠW ik +F ree +f it +Ġref use +Ġmanag ing +Ġsy nd +ip edia +w alk +Ġprofession als +Ġguid ance +Ġunivers ities +Ġas semb +unt u +F inally +AS E +ĠAut o +ĠH ad +Ġann iversary +L D +ĠD ur +ĠUlt imate +ih ad +pro duct +Ġtrans it +Ġrest ore +Ġexpl aining +Ġass et +Ġtransfer red +Ġbur st +ap olis +ĠMag azine +ĠC ra +ĠB R +gg ed +ĠH E +M ich +b et +ĠL ady +yl um +erv es +Ġme ets +wh ite +L og +Ġcorrespond ing +Ġins isted +G G +Ġsurround ed +Ġt ens +Ġl ane +Ġco inc +h ome +Ġexist ed +ect ed +ĠDou ble +lam m +Ġske pt +ex p +Ġper ception +ie v +ĠBe ing +o ft +Ġadop t +. : +] ; +Wind ows +Ġsatell ite +AS H +Ġinf ant +d escription +ĠMe anwhile +c m +oc a +ĠT reat +act or +Ġtob acco +ĠN orm +em ption +Ġfl esh +Ġj e +o op +ĠHe aven +Ġbe ating +an im +Ġgather ing +Ġcult iv +G O +ab e +ĠJon athan +ĠSaf ety +Ġbad ly +pro t +Ġcho osing +Ġcontact ed +Ġqu it +Ġdist ur +Ġst ir +Ġto ken +D et +ĠP a +Ġfunction ality +00 3 +s ome +Ġlimit ations +Ġmet h +b uild +con fig +N T +re ll +ble m +ĠM om +Ġveter ans +ĠH u +Ġtrend s +are r +ĠG iven +ĠCa ption +m ay +AS T +Ġwond ering +ĠCl ark +n ormal +Ġsepar ated +Ġdes p +st ic +b rew +Ġrel ating +ĠN ik +ĠF arm +Ġenthus i +g ood +d eb +Ġactiv ist +Ġm art +Ġexplos ion +ĠEconom ic +L ink +Ġins ight +Ġconven ient +Ġcounter part +su pport +ĠV irt +ag en +ĠTenn essee +ĠSim on +ĠA ward +OC K +ĠF igure +Ġoverse as +Ġpr ide +ĠC as +n ote +m g +C urrent +Ġdispl ays +cont ent +Ġtravel ing +Ġhosp itals +ĠFin ancial +ĠP ast +Ġdefend ant +Ġstream ing +m ble +ĠBer lin +uk i +Ġdist ribut +Ġant ib +Ġch ocolate +ĠCast le +Ġinter rupt +ĠR ow +Ġconvers ion +Ġbug s +ĠR ather +li est +L Y +ĠJe an +com mon +ak h +Ġ1 30 +ot ton +ĠDe an +Ġam endment +Ġgame play +ĠWar ren +od a +Ġhigh lights +Ġir re +ĠNAT O +Ġball s +Ġdemand ing +U RE +ĠL uke +F igure +st op +on ia +z one +iz ers +ĠW R +Ġaward ed +Ġregul atory +ĠH art +ĠS N +pl ing +Ġs our +ĠP ixel +us ive +Ġf et +ĠS ent +Ġautom atic +Ġf er +vern ment +ĠKh an +T ON +f ather +Ġextraord inary +th rop +ĠP ython +ĠG PU +Ġsex ually +Ġdesk top +it ivity +ĠAnton io +Ġo rient +Ġe ars +ob by +ous es +vertis ements +Ġmanufacture rs +ic ient +min ute +Ġconv iction +Ġg arden +p ublic +Ġsatisf ied +f old +O K +Ġin hab +ĠTh ink +Ġprogram me +Ġst omach +Ġcoord in +Ġh oly +Ġth reshold +Ġr het +Ġser ial +Ġemploy ers +ĠEvery thing +ra h +Ġb other +Ġbr ands +Val ue +ĠT ed +ĠPlan et +Ġp ink +ĠFurther more +s a +P E +re ck +ĠUS D +ot te +Ġ& & +Ġland ed +g ets +Ġprodu cers +Ġhealth care +Ġdomin ant +Ġdest ro +Ġam ended +ch ron +Ġf its +ĠSy d +ĠAuthor ity +AT CH +Ġfight s +ĠL LC +Ġ-- - +ĠCor p +Ġtox ic +spe cific +ĠC orn +ĠChe l +Ġtele phone +ĠP ant +Ġmyster ious +aun ch +od ox +med ia +Ġwitness es +ag u +Ġquestion ed +ĠBre xit +ĠRem ember +ene z +Ġend orse +iat ric +ĠId ent +Ġridic ulous +1 10 +Ġpr ayer +Ġscient ist +Ġ19 50 +ĠA qu +Ġunder ground +ĠU FC +m are +ĠL ater +w ich +Ġsubsc rib +Ġhost s +Ġer r +Ġgr ants +ant om +Ġsum mon +ear ly +ĠC lear +ĠPr im +Ġsusp ension +Ġguarant eed +app er +Ġr ice +ĠSe an +ĠSh in +Ġrefere ndum +Ġfl ed +r ust +Ġ3 60 +ter y +Ġsh ocked +B R +ĠO il +ĠAll ah +Ġpart ly +Ġign or +Ġtrans mission +Ġhom osexual +ivers al +Ġhop efully +ãĤ ¤ +Ġless on +L eg +Ġ .. +Y et +t able +app ropri +re tt +Ġbo ards +Ġincor rect +Ġb acteria +ar u +am ac +Ġsn ap +.' " +Ġpar ad +t em +he art +Ġav ailability +Ġw isdom +Ġ( + +Ġpri est +ĠÂł ĠÂł +O pen +Ġsp an +Ġparam eter +Ġconv ince +Ġ( %) +r ac +Ġf o +Ġsafe ly +Ġconver ted +ĠOlymp ic +Ġres erve +Ġhe aling +ĠM ine +M ax +Ġin herent +ĠGra ham +Ġinteg rated +D em +Ġpip eline +Ġapp lying +Ġem bed +ĠCharl ie +Ġc ave +200 8 +Ġcons ensus +Ġre wards +P al +ĠHT ML +Ġpopular ity +look ing +ĠSw ord +ĠAr ts +' ) +Ġelect ron +clus ions +Ġinteg rity +Ġexclus ively +Ġgr ace +Ġtort ure +Ġburn ed +tw o +Ġ18 0 +P rodu +Ġent reprene +raph ics +Ġg ym +ric ane +ĠT am +Ġadministr ative +Ġmanufacture r +Ġ vel +ĠN i +Ġisol ated +ĠMedic ine +Ġback up +Ġpromot ing +Ġcommand er +Ġfle e +ĠRus sell +Ġforg otten +ĠMiss ouri +Ġres idence +m ons +Ġrese mb +Ġw and +Ġmeaning ful +P T +Ġb ol +Ġhe lic +Ġwealth y +Ġr ifle +str ong +row ing +pl an +as ury +âĢ¦ . +Ġexpand ing +ĠHam ilton +Ġrece ives +S I +eat ures +ĠAn im +RE E +P ut +Ġbrief ly +ri ve +Ġstim ul +Ġ`` ( +Ġ __ +Ġch ip +Ġha z +Ġpri ze +ĠTh ings +AC E +ul in +d ict +ok u +Ġassoci ate +ock ets +y outube +St ory +ateg ory +Ġm ild +ail ing +ĠY e +O rig +ĠK a +or ig +Ġpropag anda +Ġan onymous +Ġstrugg led +Ġout rage +AT ED +ĠBe ijing +r ary +Ġle ather +Ġworld s +Ġbroad er +12 5 +id al +ĠBet ter +Ġt ear +E xt +Ġpropos als +Ġit er +ĠSqu ad +Ġvol unt +m i +D id +ĠP u +p in +Ġspeak ers +Ġb orders +Ġfig ured += ' +Ġsimultane ously +aed a +Ġcharg ing +Ġur ged +Ġcon j +25 6 +ĠG ordon +mer ce +Ġdocument ary +Sh are +it ol +ON E +ĠG arden +h att +ĠThom pson +ane ous +ap ore +Ġt anks +Ġless ons +tr ack +Ġout standing +Ġvolunte ers +Ġsp ray +Ġmanag ers +l arge +Ġcamp s +Ġart ificial +ĠR u +Ġb ags +th al +Ġcompat ible +ĠBl ade +Ġf ed +Ġarg ues +F I +Ġunf air +Ġcor n +Ġoff set +Ġdirect ions +Ġdisappoint ed +ĠCon vention +Ġview ing +M E +oc ity +Ġtown s +Ġlay ers +Ġro lled +Ġjump ed +Ġatt ribute +Ġun necess +inc oln +Ġsupp ose +ĠNet her +ch a +Ġbur ied +Ġsix th +B en +ress ing +OU R +Ġw ound +Ġcy cl +Ġmechan isms +Ġcongress ional +ĠE lement +Ġagre ements +Ġdec or +Ġclos est +ĠM it +Go ogle +} } +Ġm ixture +Ġflu id +S ign +ĠSch olar +Ġp ist +ask et +ab ling +Ġrac ing +he ro +ri el +ass y +Ġche aper +b en +Ġvert ical +amac are +ĠRead ing +g ments +Ġhelic op +Ġsacr ifice +ay a +p aren +V A +ĠL es +ĠStud io +Ġviol ations +ĠAn na +ac er +é ¾ +ĠR at +ĠBe ck +ĠD ick +ĠA CT +Ġcomp osition +Ġtext ure +ĠO wn +Ġsmart phone +ĠN A +Ġfor b +im port +Ġdef ending +il st +re r +Ġo h +ĠJere my +Ġbank ing +cept ions +Ġrespect ive +/ . +Ġdr inks +ĠW i +Ġb ands +ĠL iverpool +Ġg rip +ĠB uy +Ġopen ly +Ġreview ed +per t +Ġver ify +ĠCo le +ĠW ales +M O +Ġun pre +Ġshel ter +ĠIm perial +Ġgu i +ĠD ak +Ġsuggest ions +Ġexplicit ly +Ġsl ave +Ġblock chain +Ġcompet ing +Ġprom ising +S ON +Ġsoc cer +Ġconst itution +4 29 +Ġdist ract +ĠU ser +es ides +ĠMet hod +ĠTok yo +Ġaccompan ied +Cl ient +s ur +al og +Ġident ification +Ġinv asion +as ma +Ġindust ries +pp ers +Ġsub tle +ĠUn it +n atural +Ġsurv ived +Ġfl aw +ĺ ħ +ĠH oll +Ġdef icit +Ġtut orial +ĠCh ance +Ġarg uing +Ġcontem porary +Ġinteg ration +for ward +Ġt um +it is +Ġh iding +ĠD omin +ĠT an +ĠB uilding +ĠV in +Ġspokes person +ĠNot es +Ġemer ging +Ġprepar ation +Ġpro st +Ġsuspect s +Ġaut onom +D escription +Ġdeal t +ĠP ear +Ġstead y +Ġdecre ased +Ġso vere +ĠCl in +Ġgrad ually +ors es +ĠW AR +S erv +ãĤ ¢ +h r +Ġd irty +ĠB arn +ĠB C +Ġd il +Ġcal endar +Ġcompl iance +Ġch amber +b b +Ġpass enger +ate ful +ĠT itle +ĠSyd ney +ĠG ot +Ġdark ness +Ġdef ect +Ġpack ed +ass ion +Ġgod s +Ġh arsh +IC K +le ans +Ġalgorith m +Ġoxy gen +Ġvis its +Ġbl ade +Ġkil omet +ĠKent ucky +Ġkill er +P ack +enn y +Ġdiv ine +Ġnom ination +be ing +Ġeng ines +Ġc ats +Ġbuff er +ĠPh ill +Ġtra ff +AG E +Ġtong ue +Ġrad iation +ere r +m em +ĠExpl icit +é¾ į +Ġcou ples +Ġphys ics +ĠMc K +Ġpolit ically +aw ks +ĠBl oom +Ġwor ship +e ger +ut er +ĠF O +Ġmat hemat +Ġsent enced +Ġdis k +ĠM arg +Ġ/ * +P I +Ġoption al +Ġbab ies +Ġse eds +ĠScott ish +Ġth y +] ] +ĠHit ler +P H +ng th +Ġrec overed +ing e +Ġpow der +Ġl ips +Ġdesign er +Ġdis orders +Ġcour age +Ġch aos +" },{" +Ġcar rier +b ably +H igh +ĠR T +es ity +l en +Ġrout es +u ating +F il +N OT +w all +s burgh +Ġeng aging +ĠJava Script +ore r +li hood +Ġun ions +ĠF ederation +ĠTes la +Ġcomple tion +ĠT a +Ġprivile ge +ĠOr ange +Ġne ur +paren cy +Ġb ones +Ġtit led +Ġprosecut ors +ĠM E +Ġengine er +ĠUn iverse +ĠH ig +n ie +o ard +Ġheart s +ĠG re +uss ion +Ġmin istry +Ġpen et +ĠN ut +ĠO w +ĠX P +in stein +Ġbul k +S ystem +ic ism +ĠMarket able +Ġpre val +Ġpost er +Ġatt ending +ur able +Ġlicens ed +ĠG h +et ry +ĠTrad able +Ġbl ast +à ¤ +ĠTit an +ell ed +d ie +H ave +ĠFl ame +Ġprof ound +Ġparticip ating +Ġan ime +ĠE ss +Ġspec ify +Ġregard ed +ĠSpe ll +Ġs ons +own ed +Ġm erc +Ġexper imental +land o +h s +ĠDun geon +in os +Ġcomp ly +ĠSystem s +ar th +Ġse ized +l ocal +ĠGirl s +ud o +on ed +ĠF le +Ġconstruct ed +Ġhost ed +Ġsc ared +act ic +ĠIs lands +ĠM ORE +Ġbl ess +Ġblock ing +Ġch ips +Ġev ac +P s +Ġcorpor ation +Ġo x +Ġlight ing +Ġneighb ors +ĠU b +ar o +Ġbe ef +ĠU ber +F acebook +ar med +it ate +ĠR ating +ĠQu ick +Ġoccup ied +Ġaim s +ĠAdd itionally +ĠInt erest +Ġdram atically +Ġhe al +Ġpain ting +Ġengine ers +M M +ĠM ust +Ġquant ity +P aul +Ġearn ings +ĠPost s +st ra +ãĥ¼ ãĥ +Ġst ance +Ġdro pping +sc ript +Ġd ressed +M ake +Ġjust ify +ĠL td +Ġprompt ed +Ġscr ut +Ġspeed s +ĠGi ants +om er +ĠEd itor +Ġdescrib ing +ĠL ie +ment ed +Ġnow here +oc aly +Ġinst ruction +fort able +Ġent ities +Ġc m +ĠN atural +Ġinqu iry +Ġpress ed +iz ont +for ced +Ġra ises +ĠNet flix +ĠS ide +Ġout er +Ġamong st +im s +ows ki +Ġclim b +ne ver +Ġcomb ine +d ing +Ġcomp r +Ġsignific ance +Ġremem bered +ĠNev ada +ĠT el +ĠSc ar +ĠWar riors +ĠJ ane +Ġcou p +b as +Ġtermin al +, - +O H +Ġt ension +Ġw ings +ĠMy ster +�� �� +ĠUn like +val id +viron ments +ĠAl i +Ġn aked +book s +ĠM un +ĠG ulf +Ġd ensity +Ġdim in +Ġdesper ate +Ġpres idency +Ġ198 6 +h y +IN D +Ġun lock +im ens +Ġhand led +ĠE b +Ġdisapp eared +Ġgen re +Ġ198 8 +Ġdetermin ation +St ream +ik o +ap ters +Ġacknow ledge +J an +Ġcapital ism +P at +Ġ20 20 +Ġpain ful +Ġcur ve +Ġbom bs +st orm +ĠMet al +en cer +ĠF ig +ĠA aron +anc hes +Ġins piration +Ġexha ust +t ains +ash i +Ġdesc ript +Ġr itual +ĠChel sea +Ġpromot ion +ĠH ung +ĠW ard +iv a +ĠE T +Ġto ss +all ow +ĠFranc is +D ep +Ġhapp iness +ĠGl ass +Ġbet a +Ġstreng then +N E +o a +Ġbutt ons +ĠMur ray +Ġkick ed +Qu est +ĠT alk +ĠS everal +ĠZ ero +Ġdr one +ul k +Ġc am +ĠM obile +Ġprevent ing +Ġret ro +ĠA x +Ġcru el +Ġflo at +. ), +Ġfil ing +ĠGr ant +ĠB or +Ġr ib +Ġchampions hip +ĠM erc +Ġsty les +Ġc ake +Ġbuild s +ĠS elf +io x +Ġep ic +oy d +B el +ĠSt ew +. ( +ah u +ĠBe yond +Ġout s +Ġsol o +ĠT ree +Ġpres erve +Ġt ub +AR E +ro c +ĠIm pro +ĠW right +Ġbu nd +Ġtr aged +Ġoccas ional +b ian +Sec ond +r ons +Ġinter actions +form ed +s ing +Ġown s +Ġh ockey +Gener al +Ġlog ical +Ġexp end +Ġesc al +ĠGr iff +ĠC rown +ĠRes erve +Ġsto pping +Ġexc use +sec ond +Ġoper ated +Ġre aches +ĠMal ays +Ġpoll ution +ĠBrook lyn +Ġde lete +Ġhas h +Bl ock +ah a +âĢ ³ +Ġsh orter +p iece +> >> +ĠM ormon +t or +Ġpartic les +ĠB art +ry ption +Ġad min +Ġsqu ee +VID IA +Ġcreat or +iam eter +ic ular +N BC +Ġgrab bed +Ġn odd +Ġr ated +Ġrot ation +Ġgr asp +Ġexcess ive +ĠE C +ĠWh it +Ġinvent ory +ault s +ĠF B +Ġe cosystem +Ġbill ions +Ġvent ure +n amed +Ġdef ender +out e +Inst ead +ir able +W ar +Ġassum ption +Ġb ite +Ġearth qu +t ail +sp ace +Ġgif ts +boy s +Ġinev itable +Ġstruct ural +Ġbenef icial +Ġcompe lling +h ole +erv ation +Ġco at +o j +inc arn +ĠY ears +Ġdetermin ing +Ġrhet oric +Ġbound aries +Ġwh ites +A nt +add y +) - +ra ham +eter min +Ġhar vest +ĠCon c +Ġlapt op +ĠM atch +Ġenjoy ing +cc a +oll ar +Ġtri ps +Ġadd iction +ĠS ak +Ġpow ered +Ġc ous +ĠRuss ians +ie re +Ġret rie +qu ality +Ġdiff er +Ġking dom +ĠL aur +ĠCap itol +Ġcon clusions +ĠAl tern +ĠN av +Ġtrans parent +B ER +G roup +ĠCom plete +Ġinf er +Ġint rig +Ġins ane +R O +oph ob +is en +qu al +Mich ael +Ġm useum +ĠP ope +Ġres et +r ative +f ive +Ġagg reg +itte es +osit ory +Ġcar b +ĠRec ord +Ġdec ides +ĠF ix +Ġexcept ions +ĠCommission er +un s +ĠEnvironment al +Ġlegend ary +ist ence +Ġtun nel +k m +Ġins ult +Ġt roll +Ġsh ake +Ġdet ention +qu es +ĠCh rome +ĠF iles +Ġsub t +Ġprospect s +Ġpro l +re nder +pro of +Ġperform ances +St r +Ġh ref +ern ame +Ġachieve ment +Ġf ut +F ull +ĠLe ban +go ogle +ãĥ Ī +amp a +May be +Ġproject ed +ĠE mb +Ġcol leg +Ġa wards +Ġâ Ķ +G old +ĠBl ake +ĠR aj +if ting +Ġp ending +Ġinst inct +Ġdevelop ments +Con nect +ĠM and +ĠW ITH +ĠPhilipp ines +prof ile +Ġalt ogether +ĠB und +ĠT D +oo oo +amp ed +ip h +Ġste am +Ġold est +Ġdet ection +ul pt +Ġ ç +ĠWay ne +200 6 +f a +Ġcir cles +ĠF u +Ġdon ors +appropri ate +ĠDak ota +j amin +Ġmotiv ated +Ġpurch ases +ĠLouis iana +ĠS pl +Ġgl obe +Ġ10 5 +z ip +c all +Ġdepart ments +Ġsustain able +10 5 +ĠO P +if iers +Ġprevent ed +Ġinc omp +ĠComm ander +Ġdom inated +Ġ » +Ġinvest ed +Ġcomplex ity +Ġin cl +Ġens uring +Ġreal m +yn c +ĠInd ependent +r ained +ĠJ en +ĠFl ight +Ġat he +Ġspec ulation +ĠT E +oc ate +t ic +Ġpl aint +her ry +Ġto y +Ġ1 11 +Ġpl ates +st atus +ĠIs a +Ġdev oted +C op +ĠE S +25 5 +ur rency +M ain +Ġsl aves +Ġpe pper +Ġqu otes +Ġce iling +ĠF ish +Ġtrans formation +Ġfra ction +Ġadvant ages +Ġto ile +Ġstun ning +Ġmo ist +bre aking +s i +ĠL ocation +ĠMed ium +Ġtext s +Ġu gly +Ġb io +. âĢĶ +ĠB ased +Ġtr ains +ĠW ing +ĠAn cient +ĠRec ords +ĠH ope +Spe cial +ades h +ob i +[ / +Ġtempor arily +V er +h u +os er +Ġover night +Ġm amm +ĠTre asury +ĠV enezuel +ĠMeg a +Ġt ar +Ġexpect s +bl ack +or ph +\\ \\ +Ġaccept ance +Ġrad ar +s is +Ġjun ior +Ġfram es +Ġobserv ation +ac ies +P ower +ĠAdv anced +M ag +olog ically +ĠMe chan +Ġsent ences +Ġanaly sts +augh ters +force ment +Ġv ague +Ġcl ause +Ġdirect ors +Ġeval uate +Ġcabin et +M att +ĠClass ic +A ng +Ġcl er +ĠB uck +Ġresear cher +Ġ16 0 +Ġpoor ly +Ġexperien cing +ĠP ed +ĠMan hattan +Ġfre ed +Ġthem es +ad vant +Ġn in +Ġpra ise +10 4 +ĠLib ya +b est +Ġtrust ed +Ġce ase +Ġd ign +D irect +Ġbomb ing +Ġm igration +ĠSci ences +Ġmunicip al +ĠA verage +Ġgl ory +Ġreve aling +Ġare na +Ġuncertain ty +Ġbattle field +ia o +G od +Ġc inem +ra pe +el le +ap ons +Ġlist ing +Ġwa ited +Ġsp otted +ke ley +ĠAud io +e or +ard ing +idd ing +ig ma +ĠN eg +Ġl one +Ġ ---- +ex e +d eg +Ġtrans f +Ġwas h +Ġsl avery +Ġexpl oring +ĠW W +ats on +Ġen cl +l ies +ĠC reek +Ġwood en +Man ager +ĠBr and +um my +ĠAr thur +Ġbureau cr +Ġbl end +ar ians +F urther +Ġsupposed ly +Ġwind s +Ġ19 79 +Ġgrav ity +Ġanalys es +ĠTra vel +ĠV eter +Ġd umb +Ġaltern ate +g al +Ġconsum ed +Ġeffect iveness +.' ' +Ġpath s +ond a +L A +ĠStr ong +Ġen ables +Ġesc aped +Ġ" " +Ġ1 12 +Ġ198 3 +Ġsm iled +Ġtend ency +F ire +Ġp ars +ĠR oc +Ġl ake +Ġf itness +ĠA th +ĠH orn +Ġh ier +Ġimp ose +m other +Ġp ension +ic ut +bor ne +ic iary +. _ +ĠS U +Ġpol ar +is y +eng u +itial ized +AT A +w rite +Ġexerc ises +ĠD iamond +ot ypes +Ġharm ful +on z +Ġprint ing +st ory +Ġexpert ise +ĠG er +Ġtraged y +ĠF ly +Ġd ivid +amp ire +st ock +M em +Ġre ign +Ġun ve +Ġam end +ĠProp het +Ġmut ual +ĠF ac +Ġrepl acing +H ar +ĠCirc uit +Ġthro at +ĠSh ot +Ġbatter ies +Ġto ll +Ġaddress ing +ĠMedic aid +Ġp upp +ĠN ar +ol k +Ġequ ity +M R +ĠHis pan +ĠL arge +m id +D ev +Ġexp ed +Ġdem o +ĠMarsh all +erg us +Ġf iber +Ġdiv orce +ĠCre ate +Ġsl ower +ĠPark er +ĠStud ent +ĠTr aining +Ret urn +ĠT ru +Ġc ub +ĠRe ached +Ġpan ic +Ġqu arters +Ġre ct +Ġtreat ing +Ġr ats +ĠChristian ity +ol er +Ġsac red +Ġdecl are +ul ative +et ing +Ġdeliver ing +est one +Ġt el +ĠL arry +Ġmet a +ac cept +art z +ĠRog er +hand ed +Ġhead er +Ġtra pped +ĠCent ury +Ġkn ocked +ĠOx ford +Ġsurviv ors +b ot +Ġdemon stration +Ġd irt +Ġass ists +OM E +ĠD raft +ortun ate +fol io +pe red +ust ers +g t +ĠL ock +Ġjud icial +ver ted +Ġsec ured +out ing +ĠBook s +Ġhost ing +Ġlif ted +l ength +Ġj er +Ġwhe els +ĠR ange +umbn ails +Ġdiagn osis +te ch +ĠStew art +ĠP ract +Ġnation wide +Ġde ar +Ġoblig ations +Ġgrow s +Ġmand atory +Ġsusp icious +! ' +A pr +G reat +Ġmort gage +Ġprosecut or +Ġeditor ial +ĠK r +Ġprocess ed +ung le +Ġflex ibility +Ear lier +ĠC art +ĠS ug +Ġfoc uses +Ġstart up +Ġbre ach +ĠT ob +cy cle +ãĢ Į +ro se +Ġb izarre +ãĢ į +Ġveget ables +$ $ +Ġret reat +osh i +ĠSh op +ĠG round +ĠSt op +ĠHawai i +ĠA y +Per haps +ĠBe aut +uff er +enn a +Ġproduct ivity +F ixed +cont rol +Ġabs ent +ĠCamp aign +G reen +Ġident ifying +Ġreg ret +Ġpromot ed +ĠSe ven +Ġer u +ne ath +aug hed +ĠP in +ĠL iving +C ost +om atic +me ga +ĠN ig +oc y +Ġin box +Ġem pire +Ġhor izont +Ġbr anches +Ġmet aph +Act ive +ed i +ĠFil m +ĠS omething +Ġmod s +inc ial +ĠOrig inal +G en +Ġspir its +Ġear ning +H ist +Ġr iders +Ġsacr ific +M T +ĠV A +ĠS alt +Ġoccup ation +ĠM i +Ġdis g +lic t +Ġn it +Ġn odes +e em +ĠP ier +Ġhat red +ps y +ãĥ ī +Ġthe ater +Ġsophistic ated +Ġdef ended +Ġbes ides +Ġthorough ly +ĠMedic are +Ġbl amed +arent ly +Ġcry ing +F OR +pri v +Ġsing ing +ĠI l +Ġc ute +o ided +olit ical +ĠNe uro +å ¤ +Ġdon ation +ĠEag les +ĠG ive +T om +Ġsubstant ially +ĠLic ense +ĠJ a +Ġg rey +ĠAn imal +ĠE R +ĠU nd +Ġke en +Ġconclud e +ĠMississ ippi +Eng ine +ĠStud ios +P ress +o vers +ll ers +Ġ3 50 +ĠR angers +Ġr ou +ert o +E p +iss a +iv an +Ġse al +ĠReg ist +dis play +Ġwe aken +u um +ĠComm ons +ĠS ay +Ġcult ures +Ġl aughed +Ġsl ip +Ġtreat ments +iz able +m art +ĠR ice +Ġbe ast +Ġob esity +ĠLa ure +ig a +Wh ich +hold er +Ġelder ly +Ġp ays +Ġcompl ained +Ġc rop +Ġpro c +Ġexplos ive +ĠF an +ĠAr senal +A uthor +ef ul +Ġme als +Ġ( - +id ays +Ġimag ination +Ġann ually +Ġm s +as ures +H ead +ik h +m atic +Ġboy friend +ĠCom puter +Ġb ump +Ġsur ge +ĠCra ig +ĠKir k +D el +medi ate +Ġscen arios +ĠM ut +ĠSt ream +Ġcompet itors +Ù Ħ +ĠStan ford +ĠRes ources +az ed +b age +Ġorgan is +ĠRe lease +Ġsepar ately +Ġha bits +Ġmeasure ments +ĠCl ose +Ġaccomp any +Ġg ly +Ġt ang +ĠR ou +Ġplug in +Ġcon vey +ĠChall enge +oot s +j an +Ġcur s +ĠRel ations +ke eper +Ġapproach ing +p ing +Spe aking +Ġarrang ement +ĠV I +are ttes +Ġaffect ing +Ġperm its +b ecause +Ġu seless +ĠH us +!! !! +Ġdestro ying +Un fortunately +Ġfasc inating +S em +Ġelect oral +Ġtrans parency +ĠCh aos +Ġvolunte er +Ġstatist ical +Ġactiv ated +ro x +We b +H E +ĠHamp shire +is ive +M ap +Ġtr ash +ĠLaw rence +st ick +C r +Ġr ings +EX T +Ġoper ational +op es +D oes +ĠEv ans +Ġwitness ed +P ort +Ġlaunch ing +ec onom +w ear +ĠPart icip +um m +cul es +ĠR AM +ĠT un +Ġass ured +Ġb inary +Ġbet ray +Ġexpl oration +ĠF el +Ġad mission +it ated +S y +Ġav oided +ĠSim ulator +Ġcelebr ated +ĠElect ric +¥ ŀ +Ġcl uster +itzer land +he alth +L ine +ĠN ash +at on +Ġsp are +Ġenter prise +ĠD IS +clud es +Ġfl ights +Ġreg ards +ĠÃ Ĺ +h alf +Ġtr ucks +Ġcontact s +Ġunc ons +ĠCl imate +Ġimm ense +N EW +oc c +ect ive +Ġemb od +Ġpat rol +Ġbes ide +Ġv iable +Ġcre ep +Ġtrig gered +ver ning +Ġcompar able +q l +Ġg aining +ass es +Ġ( ); +ĠG rey +ĠM LS +s ized +Ġpros per +" ? +Ġpoll ing +Ġsh ar +ĠR C +Ġfire arm +or ient +Ġf ence +Ġvari ations +g iving +ĠP i +osp el +Ġpled ge +Ġc ure +Ġsp y +Ġviol ated +Ġr ushed +Ġstro ke +ĠBl og +sel s +ĠE c +,' ' +Ġp ale +ĠColl ins +ter ror +ĠCanad ians +Ġt une +Ġlabor atory +Ġn ons +t arian +Ġdis ability +ĠG am +Ġsing er +al g +ĠSen ior +Ġtrad ed +ĠWar rior +Ġinf ring +ĠFrank lin +Ġstr ain +ĠSwed ish +Ġsevent h +ĠB enn +ĠT ell +Ġsynd rome +Ġwond ered +id en +++ ++ +ig o +Ġpur ple +Ġjournal ism +Ġreb el +Ġf u +bl og +Ġinv ite +ren cies +ĠCont act +Is rael +ĠCont ent +Ġche er +Ġbed room +ĠEngine ering +ĠQue ens +Ġd well +ĠPlay Station +ĠD im +ĠCol on +l r +Ġoper ates +Ġmotiv ation +US A +ast ered +C ore +ĠTr uth +ol o +OS E +ĠMem ory +Ġpred ec +Ġan arch +Ġ19 20 +ĠY am +à ¨ +b id +Ġgr ateful +Ġexc itement +Ġtre asure +Ġlong est +ct ive +Ġdes erves +Ġreserv es +Ġcop s +ĠOtt awa +ĠEgypt ian +ank ed +Ġart if +Ġhypot hesis +: / +Ġpurch asing +Ġlove ly +H P +Ġdiv ide +Ġstrict ly +Ġquestion ing +Ġtaxp ayers +ĠJ oy +Ġroll s +ĠHe avy +Ġp orts +Ġmag netic +Ġinf lamm +Ġbr ush +t ics +â ĪĴ +Ġbott les +pp y +Ġp add +ãĤ ¯ +m illion +Ġdevast ating +Ġcomp iled +Ġmed ication +Ġtw elve +ĠPer ry +Sp ace +im b +y our +Ġle aked +ĠT ar +Ġun ity +Ġinfect ed +Ġtravel ed +ID E +ĠMc Donald +t xt +ĠPr inc +Ġinter ven +ĠTai wan +ĠP ow +Ġbe aring +ĠTh read +Ġz ones +iz ards +un ks +Ch apter +ll or +Ġ · +Ġw ounds +Ġdisc retion +Ġsucceed ed +ik ing +Ġicon ic +C all +Ġscreen ing +ĠM is +ict s +Ġmin isters +Ġsepar ation +Pl ayer +Ġb ip +Ġbel oved +Ġcount ing +ĠE ye +ar ound +ing ing +Ġtable t +Ġoff ence +in ance +h ave +ĠInf o +ĠNin ja +Ġprotect ive +ĠC ass +M ac +ĠQual ity +N orth +Ġ ic +ĠCub a +ĠChron icle +ĠPro perty +Ġfast est +ot os +ĠG erm +OW N +Ġbo om +ĠStan ley +ergus on +Ġcle ver +Ġent ers +m ode +ter ior +ĠS ens +Ġlin ear +AR K +Ġcomp aring +Ġpure ly +Ġsaf er +ĠPot ter +Ġc ups +R T +Ġgl uc +Ġatt ributed +Ġdu pl +ĠP ap +Ġprec ious +Ġp a +iction ary +ĠT ig +ĠTo o +ol utions +st an +Ġrob ots +Ġlob b +Ġstat ute +Ġprevent ion +w estern +16 0 +ĠAct ive +ĠMar ia +h al +N one +ell ar +ĠK B +ĠPart ners +ĠSing le +ĠFollow ing +ang o +ac ious +Ġth ou +Ġk g +Ġinflu ential +ĠFriend s +S ur +ain ted +Ġfor ums +Ġst arter +Ġcitizens hip +ĠE lection +on ge +ot ation +os ph +;; ;; +ut ical +p ur +ere n +Ġaccus ations +bit ious +ab bit +ĠOr d +Post ed +ir k +Ġsens itivity +ic he +ĠAm y +ĠF ab +Ġsum mit +Ġped est +Ġrub ber +Ġagric ultural +Ġcan cel +A E +Ġin aug +Ġcont am +Ġfirm ly +i w +st age +ĠK an +Ġt ier +Ġinv ention +Ġtransl ated +ĠR ules +B ox +Tw itter +ID S +Ġp izza +Ġdeb ug +ĠD rop +v s +Ġh orses +b ig +Ġb oring +Ġh ood +ĠMcC ain +at ched +ĠBro s +Ġsk ip +Ġess ay +st at +ĠLeg ends +Ġam munition +au c +Ġshoot er +Ġun h +Ġsuppl ied +Ġgener ic +ĠS K +ib an +yr ics +Ġ25 5 +Ġclim bing +Form er +Ġfl ip +Ġjump ing +Ġfrust ration +ĠTer ry +Ġneighborhood s +Ġmed ian +be an +Ġbr ains +Follow ing +Ġsh aped +Ġdraw s +Ġal tered +J ack +Ġrecip es +Ġsk illed +we alth +ach i +e lection +Ġbehavi ors +de als +ĠU ntil +F e +Ġdecl aration +mar ks +ĠBet ween +cel ona +Ġres on +Ġbub ble +Am ong +Ġim perial +G S +Ġfemin ist +200 5 +ĠK yle +Ġaccount ing +ĠTe le +ĠT yr +Ġconnect ing +Ġre hab +ĠP red +s im +Ġmeant ime +Ġphys ician +M W +ĠCamp bell +ĠBr andon +Ġcontribut ing +ĠR ule +ĠWe ight +ĠN ap +Ġinter active +Ġv ag +Ġhel met +ĠCom b +f our +Ġsh ipped +Ġcomple ting +ĠP D +PD ATE +Ġspread ing +Ġsc ary +erv ing +ĠG as +Ġfr ank +s chool +Ġrom antic +Ġstab il +R ob +Ġaccur ately +Ġac ute +ĠH ann +Ġsymbol s +Ġcivil ization +ĠA W +Ġlight ning +Ġcons iders +Ġven ue +Ġ × +Ġo ven +ĠS F +h is +Ġn u +ĠLear n +Ġpe oples +Ġst d +Ġsle e +Ġs lic +ĠStat istics +Ġcor ners +ĠB aker +Ġ: ) +ment ation +ol ver +Ġlaugh ing +ĠT odd +ond e +ĠH ills +Ġn uts +ĠW oman +pl ane +Ġl iver +ĠIn side +S orry +Ġagre es +Ġfund ament +ĠF isher +Ġa uction +Ġthread s +gl as +ĠBas ic +ĠN at +Ġlack ing +Ġceleb ration +j u +Ġs illy +E uro +Ġt att +ight y +cont rolled +T est +ĠSing h +Ġr age +Ġrh yth +o ffic +ĠPh antom +Ġhead lines +Ġrespond ing +ĠMor ning +Ġvit amin +Ġboot s +ĠS ite +al in +p i +Ġvir al +ĠU C +D ER +ĠSe x +Ġst ocks +c urrent +Ġch urches +ĠR are +ĠMur phy +Ġden ial +ĠG aming +Ġtou g +Ġn ick +Ġm akers +ĠRon ald +Ġgener ous +ĠD oc +ĠMor ris +Ġtransform ed +ĠN ormal +Ġ10 4 +ĠKick starter +ĠUp on +On line +ĠI RS +Ġw rap +Ġl oving +Ġarri ves +ĠD ue +Ġhe ter +ĠM ade +Ġrent al +Ġbelong s +Ġatt orneys +Ġcro ps +Ġmat ched +ul um +ol ine +10 9 +Ġdis par +Ġbuy ers +ĠCam bridge +Ġeth ics +rou ps +Ġjust ified +Ġmarg inal +Ġrespect ed +win ning +Ġnodd ed +ĠSer ge +ĠForm er +C raft +######## ######## +ĠWar ner +Ġd ash +et e +Ġent ert +ĠE scape +out heast +Ġkn ees +ĠB omb +Ġr ug +P ass +Ġatt itudes +go vernment +ĠPri or +Ġqual ities +Ġnot ification +ĠPh one +l ie +Ġanticip ated +ĠCom bat +ĠBar ry +Ġ198 2 +Us ers +on er +Ġcomput ing +ĠConnect icut +Ġless er +Ġpe ers +ĠC u +Ġtechn ically +Ġsub mission +ĠUn iversal +Ġman ually +our ge +Ġrespond ents +ĠB TC +ĠH ost +Ġf are +ĠB ird +Ġrece ipt +al so +Ġj ack +Ġagric ulture +Ġsk ull +Ġ! = +Ġpass ive +ĠC I +Ġsoc ieties +Ġremind ed +Ġinter ference +B uy +Ġâ ľ +g on +Ġscrut iny +ĠW itch +Ġconduct ing +Ġ ãĥ +Ġexch anges +ĠMit chell +Ġinhab it +Ġtw ist +B D +Ġwhere ver +group on +Ġj okes +ĠBen jamin +ĠR andom +fr ame +ĠL ions +Ġhighlight ed +ĠArk ansas +E nt +Ġp ile +Ġpre lim +g s +mind ed +Ġfel ony +ĠG A +ĠL uck +Ġpract ically +ĠB os +Ġact ress +D am +ĠB ou +Ġvis a +Ġembed ded +Ġhy brid +Ġear liest +Ġsoon er +s ocial +ĠH A +Ġste ep +Ġdis advant +Ġexplo it +ĠE gg +ĠUlt ra +Ġnecess ity +L ocal +ie ge +Ġd ated +Ġmass es +Ġsubsc ription +pl ess +Ġan onym +Ġpresum ably +Bl ue +The ir +asket ball +ĠPhil ip +Ġcom ed +load ed +r ane +Ġref lection +Ch ina +Ġext ends +Ġform ing +Ġund ers +200 1 +Ġgr at +Ġconcent rations +Ġins ulin +Ġsec ular +Ġwh ilst +Ġwin ners +Ad vertisements +Ġdeliber ately +ĠWork ing +Ġs ink +et ics +d ale +Ġmand ate +Ġg ram +Ġvac ation +Ġwarn ings +ri pp +ĠTH AT +Ġcomment ary +Ġint u +Ġa est +Ġreason ing +Ġbreak down +ĠZ ombie +Ġ-- > +ĠPolit ical +c ott +Ġthr ust +Ġtechn ological +Ġdec iding +Ġtraff icking +L ong +W elcome +pr ising +ĠCommun ications +Ġend ors +Ġsw ift +Ġmetab ol +co ins +res a +ĠHT TP +Ġen roll +ĠH appy +us r +int age +Ġ[ " +u ably +ĠM aterial +Ġrepe al +Se pt +k h +ĠMod i +Ġunder neath +ĠI L +sh ore +Ġdiagn osed +ace utical +Ġsh ower +au x +ĠSw itch +ĠStre ngth +Ġj ihad +n ational +Ġtra uma +uss y +on i +Ġcons olid +Ġcal ories +ĠF lynn +ag ged +16 8 +ĠP ink +Ġfulf ill +Ġch ains +Ġnot ably +ĠA V +L ife +ĠCh uck +m us +ĠUr ban +ĠH end +Ġdep osit +ĠS ad +Ġaff air +OR K +ie val +ĠF DA +Ġt rop +ĠOver all +Ġvirt ue +Ġsatisf action +au nd +Ġl un +ĠSw itzerland +ĠOper ation +pro cess +Ġsh ook +Ġcount ies +le ased +ĠCharl otte +1 12 +Ġtrans cript +Ġre dd +p ush +ĠHe y +ĠAn alysis +[ " +Ġaltern atives +ard less +Ġele ph +Ġpre jud +ĠLe af +H aving +ĠH ub +Ġexpress ions +ĠVol ume +Ġshock ing +ĠRed s +Ġread ily +Ġplan ets +ad ata +Ġcollaps ed +ĠMad rid +Ġir rit +i pper +ĠEn c +ĠW ire +Ġbu zz +ĠG P +ash a +Ġaccident ally +ur u +Ġfrust rated +ĠS A +Ġhung ry +ĠH uff +Ġlab els +ant o +ĠE P +Ġbar riers +) | +ĠBer keley +ĠJ ets +Ġp airs +ĠL an +J ames +ĠB ear +Ġhum or +ĠLiber ty +Ġmagn itude +Ġag ing +ĠM ason +Ġfriends hip +umb ling +Ġemer ge +Ġnewsp apers +Ġam bitious +ĠRich ards +atern al +Ġ198 1 +Ġcook ies +Ġsc ulpt +Ġpur suit +L ocation +Ġscript s +p c +Ġarrang ements +Ġd iameter +Ġl oses +am ation +Ġl iqu +ĠJ ake +aret te +Ġunderstand s +ĠZ en +v m +Ġappro ve +Ġw ip +Ġult ra +Ġint end +ĠD I +asc ular +Ġst ays +ĠK or +ĠK l +Ġinvest ing +L a +Ġbelie ving +b ad +m outh +Ġtaxp ayer +ãĥ ĥ +ĠQue bec +Ġl ap +ĠSw iss +d rop +Ġdr ain +ir i +et c +ft en +ĠN ex +Ġst raw +Ġscream ing +Ġcount ed +Ġdam aging +Ġamb assador +cent ury +Ġpro x +Ġarrest s +u v +il ateral +ĠCh arg +Ġpresc ribed +Ġindepend ently +Ġf ierce +ĠB aby +Ġb rave +Ġsu its += > +Ġbas eline +ĠR ate +Ġis lands +Ġ( ( +g reen +ix els +Ġname ly +ĠVill age +th an +am y +V ersion +g mail +ential s +ĠS ud +ĠMel bourne +Ġarri ving +Ġquant um +e ff +rop olitan +T ri +Ġfun eral +ĠI R +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +ĠC ob +it ably +Ġt urb +Ġcomb o +Re view +Ġdeploy ment +u ity +ĠB ott +Ġinv isible +Ġrender ing +Ġunl ocked +Ġa qu +ĠVlad imir +Ġp ad +ĠBr ain +ĠLeg acy +dr agon +ĠKurd ish +Ġsound ed +Ġdet ained +ĠD M +g ary +Ġd aughters +Ġdistur bing +uk a +ĠPar ad +Ġt ast +Ġunf ortunate +Ġu l +em in +Ġattend ance +tr l +Ġpar ks +ĠMem orial +ĠAl ice +oth y +gu ard +ĠD ise +ĠSh an +ĠFor um +R ich +Ġshif ted +ue z +Ġl ighter +ĠMag n +Ġc od +S ch +ham mad +P ub +3 50 +ĠP okemon +Ġprot otype +Ġun re +B ase +ĠStud ents +ĠRep ly +ĠCommun ist +Ġg au +ĠTy ler +I Z +Ġparticip ated +Ġsup rem +ĠDet ails +Ġvessel s +ro d +Ġt ribe +ke ep +Ġassum ptions +Ġp ound +Ġcr ude +ĠAv ailable +Ġswim ming +Ġin clusion +Ġadv ances +c ulation +Ġconserv ation +Ġover d +ĠBuff alo +Art icle +ed ge +Ġaw a +ĠMad ison +Ġsid ew +Ġcat ast +ĠK rist +uc le +ĠHigh way +ĠTer ror +Ġactiv ation +Ġuncons cious +ĠSat an +ĠSus an +ill ery +Ġarr anged +i op +Ġrum ors +ur ring +th ink +ĠKe ith +ĠK ind +Ġavoid ing +by n +n ut +ĠSpe aker +r us +n ames +Ġgu ilt +ĠOlymp ics +Ġsa il +ĠM es +lev ant +ĠColumb us +a ft +C ity +S outh +ĠHar vey +ĠP un +S everal +Ġment ally +Ġimp ress +m ount +ĠUb untu +âĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶ +ĠSuper man +ĠMP s +Ġintent ions +ĠR acing +Ġlike lihood +Ġ2 40 +T otal +Ġto ys +ĠW atson +Ġur ge +L ear +ĠP aper +Ġoccur ring +ĠB eng +ĠC ert +Ġst ones +T im +ĠTw in +z b +ĠD ynam +Ġpolit ician +k ens +ĠEnter prise +UT ERS +Ġab ol +Ġref resh +Ġarbit rary +pe ction +Ġtrou bles +Ġ} ); +t v +Ġpil ots +Ġdist ribute +Ġaud it +Ġp ause +orig inal +Ġr ivals + £ +F ig +T L +ab il +ry ing +L in +ion ed +l on +Ġf ancy +Ġcr ashed +Ġt ract +Ġshe d +Ġcons ume +B ased +down load +in it +Ġvolt age +Int rodu +Ġcondem ned +ĠFin ance +res pect +Ġex cluded +Ġestablish ing +her ic +Ġher itage +Ġspect acular +Ġun st +ĠSnow den +ĠL ane +S an +Ġprotect ions +st ruction +inc inn +Ġmac ro +C ustom +ios ity +Ġes p +Ġfunction ing +Ġm ush +Ġp uzzle +Ġeth ical +M al +Ġgo verning +ĠF erguson +Ġrest ored +Ġst ressed +ĠCoun ter +ĠK as +cl ip +AN S +Ġse iz +U K +by ss +old own +ap i +Ġperman ently +oun ters +W est +Th rough +L ight +at oes +Ġne at +Ġc ord +ure r +Ġsevere ly +ĠA ven +Ġinter rog +Ġtri ple +G iven +N umber +Ġar ise +Ġs her +pl ant +Ġfl ower +ĠC ou +Ġat e +Ġnew er +b ul +Ġmean while +ĠL air +Ġadjust ment +ĠCop yright +Ġd ivers +i ological +Ġgam ers +o at +Ġhistor ically +Ġanal og +Ġlong time +Ġpres cription +ĠM ist +ĠHy per +ĠM aine +ĠDe ity +Ġmulti pl +ĠRe incarn +ĠH yd +ĠP ic +S il +r ants +ĠC ris +. ; +( { +epend ence +Ġrec y +ate ur +Ġqu ad +Ġgl ob +Ġcon ced +te am +Ġcapital ist +ĠL ot +Ġroy al +ĠCy ber +Ġblack s +met ic +ri v +ĠD anny +Ġsp o +ĠR O +Ġanim ated +rypt ed +ĠDep uty +Ġrend ered +F E +Ġstre ak +Ġcloud s +ĠDou g +~~~~ ~~~~ +Ġdisc our +ĠVe h +Ġpsych ology +ĠJ ourney +Ġcry stal +ĠFro st +Ġsuspic ion +Ġrel ate +or us +ĠC rypt +ĠN VIDIA +com ed +ut ing +incinn ati +Ġvulner ability +ost ic +Ġisol ation +Ġcool ing +ĠCoal ition +Ġ1 19 +F our +ĠDe al +Ġâ ī +se mble +ram ent +ĠBar celona +Ġ10 2 +Ġcoc aine +ocaly pse +F eb +ogen ic +Ġmut ation +Ġcrypt oc +ĠK el +ĠG it +a is +Ġs isters +AN K +Ġactiv ate +T er +Ġd read +yl on +Ġprop ri +A ust +ĠDef ault +Ġout door +Ġshe er +ce ive +Ġg ently +Ð ¾ +Pro gram +Ġâ ĨĴ +Ġve gan +ĠCr us +Ġrespons ibilities +ĠH R +OL D +Ġprev ents +Ġst iff +ĠW ere +Ġathlet ic +ĠSc ore +Ġ) : +Ġcolumn s +ĠL oc +av ailable +ĠF ram +ĠS essions +Ġcompan ion +Ġpack s +14 0 +ĠKn ights +Ġf art +Ġstream s +Ġsh ore +Ġapp eals +ĠPer formance +h aul +ĠSt ra +ĠN ag +10 3 +ĠTrans portation +B B +E v +z an +P ublic +Ġtw in +uls ion +M ult +Ġelect ro +Ġstat ue +ation ally +ĠN ort +Ġins pection +/ * +ig ue +Ġcomp assion +ĠT ales +ĠSte in +ĠSc reen +ĠB ug +ĠL ion +g irl +Ġwithdraw al +Ġobject ives +Ġblood y +Ġprelim inary +Ġj acket +Ġdim ensions +ĠC ool +ĠOcc up +Ġw reck +Ġdoub led +ank ing +Ġ19 75 +Ġglass es +ĠW ang +pro v +P ath +connect ed +ĠMult i +ĠNor way +agon ist +Ġfe ared +Ġtouch ing +Ġarg uably +¯¯¯¯ ¯¯¯¯ +ĠNC AA +che m +Ġsp at +ĠW WE +ĠC el +ig ger +Ġattack er +ĠJo in +ob ject +ett a +Ġelim inated +d et +Ġdest ruct +ĠLuc as +ct uary +18 0 +ĠBr ady +ĠBl ues +B ay +au kee +Ġtim eline +Ġdeleg ates +w ritten +uff icient +Ġsh apes +Cop yright +ou ble +serv ice +Ġp ione +Ġcolleg es +Ġrow s +Ġsp ite +Ġassess ed +3 60 +Ġle ase +Ġconfident ial +ck er +ĠMan ning +ĠV oice +Ġse aled +Ġcalcul ate +N O +ĠAss istant +Ġteen ager +ul ent +ather ine +Ġm ock +Ġd iamond +Ġf est +Ġsw itched +Ġres ume +ĠPu erto +Ġl anes +ir ation +ĠSimilar ly +Ġro d +ĠS el +ĠPal ace +ĠLim ited +e ous +Ġvar iant +Ġw ard +Ġ) ) +Sh ow +OO K +A lex +ĠN ep +br is +ĠWik ipedia +Ġexcept ional +Ġman ages +ĠD raw +Ag ain +Ġco pper +ut t +Ġex ports +Ġport folio +Ġelev ated +R ated +ĠOther wise +ĠT act +ĠShe l +ĠT X +" âĢĶ +Ġres ur +ĠW a +ven ant +Ġmon etary +pe ople +E mail +Ġfif ty +ĠS weet +ĠMalays ia +Ġconf using +ĠR io +ud a +uten ant +" ); +Ġpra ised +Ġvol umes +t urn +Ġm ature +Ġnon profit +Ġpassion ate +ĠPriv ate +Ġ10 3 +Ġdesc end +ç ¥ŀ +uff y +head ed +Whe ther +ri en +ze ch +be it +Ġch rom +ĠMc M +Ġd ancing +Ġe leg +ĠNot iced +11 5 +Ġadvoc acy +ENT S +amb ling +ĠMin or +ĠF inn +Ġprior ities +Ġthere of +ĠSt age +ĠRog ers +Ġsubst itute +ĠJ ar +ĠJeff erson +Ġlight ly +10 2 +ĠL isa +u its +ys ical +Ġshif ts +Ġd rones +Ġwork place +Ġres id +ens ed +ah n +Ġpref erences +ser ver +Ġdeb ates +d oc +ĠGod s +Ġhelicop ter +Ġhon our +Ġconsider ably +ed ed +ĠF emale +ĠAn ne +Ġre un +ĠF ace +ĠHall ow +ĠBud get +Ġcondem n +Ġt ender +Pro f +ocr atic +ĠTurn er +ĠAg ric +Ġ19 76 +Ġa pt +d isc +ĠF ighter +ĠA ur +Ġgar bage +in put +ĠK arl +ĠOl iver +ĠL anguage +k n +N on +ĠCl ar +Ġtrad itions +Ġad vertisement +ĠS or +Ġarch ive +Ġvill ages +7 50 +Ġimplement ing +w aukee +Ġdiet ary +Ġswitch ing +Rep ublic +Ġvel ocity +Ġc it +ĠA wards +Ġfin ancing +Ġlast ed +) ] +Ġrem inder +P erson +Ġprec ision +Ġdesign ers +ĠF ried +ĠB order +Ġtr agic +Ġw ield +Ġiniti atives +ĠT ank +w er +Ġjo ins +R o +in ery +Ġar row +Ġgener ating +found er +Ġsear ches +Ġrandom ly +A ccess +Ġb atch +Ġp osed +l at +Ġpursu ing +as a +Ġtest ified +form ing +ĠSh ar +w iki +ĠE ither +S ometimes +Ġsen ators +ĠJohn ny +ĠTal iban +ĠG PS +":" / +ãģ® å +Ġanaly zed +ĠRub io +ĠMove ment +op ard +ii i +St and +f ight +Ġign oring +i ang +ĠG N +so ever +ĠST AT +Ġref using +Ġswe at +Ġb ay +P ORT +ir med +ak y +Ġdis pro +Ġlabel ed +Ġ10 8 +H ello +Ġple asant +ab a +Ġtri umph +Ġab oard +Ġinc om +ĠC row +le tt +Ġfol k +Ġch ase +` ` +ĠBr us +Ġte ens +c ue +Ġter rain +h yd +il ight +OR Y +Su pport +ew s +ll i +rain ts +ĠC and +Ġab used +ach ment +l arg +B as +ĠC ancer +Ġ19 78 +Ġsupp orter +ac cess +ĠTer min +ĠT ampa +ĠAN Y +Ġnew est +ĠCrim inal +ed u +Ġ19 30 +Ġadm its +Ġend e +Ġfail ures +ur ate +ful ness +cy cl +ĠSub ject +Ġinf inite +th ree +W A +p it +ĠInst all +R ad +ili ation +G M +Ġcontin ent +Ġaccommod ate +ĠCl ay +Ġp up +ĠF unction +Ġham mer +ĠAlbert a +Ġrev ised +Ġminor ities +Ġmeasure ment +Con nell +Ġdis able +ĠM ix +In cre +Ġfor k +ĠR osen +Ġimpl ies +umb lr +AN G +Ġprote ins +Ġagg ression +Ġfacilit ate +S N +Ġilleg ally +u er +Ġacad em +Ġp uzz +ĠSh ift +p ay +oll o +Ġaud iences +B uild +Ġno ble +Ġsynt ax +â ĺħ +Ġbe am +ĠB ed +ĠA ld +Ġorig ins +v ideo +Ġ19 77 +ĠAss ault +Ġgar age +Te am +Ġver dict +Ġd war +ĠVirt ual +e vent +Ke ep +Ġsent iment +Ġwild life +sh irt +Ġb urg +Ġrecommend ation +rep resent +Ġgall ery +own ers +Ġsch olar +Ġconven ience +ĠSw ift +Ġconv inc +C ap +Ġwar fare +ĠVis ual +Ġconst itute +Ġab ort +ĠWe ather +ĠLook ing +ĠH em +Ġmart ial +Ġinc oming +et ition +Ġtoler ance +ĠCre ated +Ġfl ows +ĠE lder +Ġsoul s +Ġf oul +ĠP ain +ĠC AN +Ġ2 20 +b c +he nd +Ġgen ius +R eal +ĠW r +omet er +p ad +Ġlim iting +ĠS i +ĠL ore +ĠAd ventures +Ġvar ied +D isc +f in +ĠPerson al +Ch ris +Ġinv ented +Ġd ive +ĠR ise +Ġo z +ĠCom ics +Ġexp ose +ĠRe b +let ters +s ite +im ated +Ġh acking +Ġeduc ated +ĠNob ody +Ġdep ri +Ġincent ive +ãĤ · +Ġovers ight +Ġtrib es +ĠBelg ium +Ġlicens ing +our t +Produ ct +ah l +ĠG em +Ġspecial ist +Ġc ra +ann ers +ĠCor byn +Ġ19 73 +RE AD +Ġsum mar +Ġover look +ĠApp lication +Ġin appropriate +Ġdownload ed +Q ue +ĠB ears +Ġth umb +ĠChar acter +ĠReincarn ated +ĠS id +Ġdemonstr ates +s ky +ĠBloom berg +ĠAr ray +ĠRes ults +ĠFour th +ĠED T +ĠO scar +c end +Ġ10 6 +ĠN ULL +ĠH ERE +m atch +ĠBr un +Ġgluc ose +ie g +eg u +Ġcert ified +Ġrel ie +Ġhuman itarian +Ġpr ayers +K ing +Ġn an +h ou +10 8 +ul u +Ġrenew able +Ġdistingu ish +Ġd ense +ĠV ent +ĠPack age +ĠB oss +Ġedit ors +Ġm igr +T ra +ĠPet ers +ĠAr ctic +200 4 +ĠC ape +Ġloc ally +Ġlast ing +Ġhand y +. ). +P an +ĠR ES +Ind ex +Ġt ensions +Ġformer ly +Ġide ological +Ġsens ors +Ġdeal ers +Ġdef ines +S k +Ġproceed s +Ġpro xy +az ines +ĠB ash +ĠP ad +ĠC raft +eal ous +Ġshe ets +omet ry +J une +cl ock +T T +ĠThe atre +ĠB uzz +Ġch apters +Ġmill enn +Ġd ough +ĠCongress ional +Ġimag ined +av ior +Ġclin ic +Ġ19 45 +Ġhold er +ro ot +oles ter +Ġrest art +B N +ĠHam as +ĠJ ob +Ġor b +Ġr am +Ġdiscl ose +Ġtransl ate +Ġimm igrant +Ġannoy ing +Ġtreat y +an ium +ĠTe a +ĠLeg ion +Ġcrowd s +ĠB ec +ĠA er +oh yd +B ro +Look ing +Ġl bs +Ġagg ress +Ġse am +Ġinter cept +ĠM I +mer cial +act iv +ĠC it +Ġdim ension +Ġconsist ency +Ġr ushing +ĠDou glas +Ġtr im +Inst all +ick er +Ġsh y +10 6 +Ġment ions +pe lled +ĠT ak +c ost +Ġclass room +Ġfort une +dri ven +Ġun le +ĠWhe el +Ġinvest or +ĠM asters +k it +Ġassoci ations +ĠEv olution +op ing +us cript +Ġprov incial +ĠWal ter +av i +S O +Ġun limited +Eng lish +ĠC ards +ĠEb ola +ne red +Ġreven ge +Ġout right +um per +Ġf itting +ĠSol id +Ġform ally +Ġproblem atic +Ġhaz ard +Ġenc ryption +Ġstraight forward +ĠA K +Ġp se +ĠOr b +ĠCh amber +ĠM ak +Cont ents +Ġloyal ty +Ġl yrics +ĠSy m +Ġwel comed +Ġcook ed +Ġmon op +Ġn urse +Ġmis leading +Ġe ternal +Ġshif ting +Ġ+ = +V is +Ġinst itutional +ill ary +Ġp ant +VER T +ĠA CC +ĠEn h +Ġinc on +ĠRE UTERS +Ġdon ated +âĢ¦âĢ¦ âĢ¦âĢ¦ +In tern +Ġexhib it +Ġt ire +ĠR ic +ĠCh ampion +ĠMu hammad +N ING +ĠSoc cer +Ġmob ility +Ġvary ing +ĠM ovie +Ġl ord +o ak +F ield +Ġve ctor +us ions +Ġsc rap +Ġen abling +m ake +T or +. * +| | +ĠWe bsite +ĠN PC +Ġsocial ist +ĠBill y +ĠAdd itional +Ġc argo +Ġfar ms +ĠSo on +ĠPri ze +Ġmid night +Ġ9 00 +se en +ĠSp ot +Ġshe ep +Ġspons ored +ĠH i +ĠJ ump +Ġ19 67 +Micro soft +ĠAg ent +Ġch arts +d ir +Ġadj acent +Ġtr icks +Ġman ga +Ġex agger +/ > +foot ball +ĠF CC +G C +ĠT ier +and ra +OU ND +% ), +Ġfru its +V C +ĠA A +R ober +Ġmid st +â Ĺ +ank a +Ġlegisl ature +ĠNe il +Ġtour ists +" " +ĠWar ning +ĠNever theless +ĠOffic ial +ĠWh atever +Ġm old +Ġdraft ed +Ġsubst ances +Ġbre ed +Ġt ags +ĠT ask +Ġver b +Ġmanufact ured +com ments +ĠPol ish +Pro v +Ġdetermin es +Ob ama +k ers +Ġutter ly +Ġse ct +sc he +ĠG ates +ĠCh ap +Ġal uminum +Ġz ombie +ĠT ouch +ĠU P +Ġsatisf y +Ġpred omin +asc ript +Ġelabor ate +Ġ19 68 +Ġmeas uring +ĠV ari +any ahu +Ġs ir +ul ates +id ges +ick ets +ĠSp encer +T M +oub ted +Ġpre y +Ġinstall ing +ĠC ab +re ed +re ated +Su pp +Ġwr ist +ĠK erry +10 7 +ĠK le +ĠR achel +Ġc otton +ĠA RE +ĠE le +Cont rol +Ġload s +ĠD od +an as +b one +Ġclass ical +ĠReg ional +ĠInt eg +V M +Ġdes ires +Ġaut ism +support ed +ĠM essage +Ġcomp act +writ er +Ġ10 9 +ĠHur ricane +c ision +Ġcy cles +Ġdr ill +Ġcolle ague +Ġm aker +G erman +Ġmist aken +S un +ĠG ay +Ġwhat soever +Ġsell s +ĠA irl +l iv +ĠO ption +Ġsol ved +Ġse ctors +Ġhorizont al +Ġequ ation +ĠSk ill +ĠB io +g ement +ĠSn ap +ĠLeg al +Ġtradem ark +Ġmake up +Ġassemb led +Ġsa ves +ĠHallow een +ĠVer mont +ĠFR OM +Ġfar ming +ĠP odcast +accept able +ĠHig her +Ġas leep +ull ivan +Ġrefere n +ĠLe v +Ġbul lets +ok o +H C +Ġst airs +Ġmain tains +ĠL ower +ĠV i +Ġmar ine +Ġac res +Ġcoordin ator +ĠJ oh +Ġcounterpart s +ĠBrother s +Ġind ict +b ra +Ġch unk +Ġc ents +H ome +ĠMon th +Ġaccording ly +if les +ĠGerm ans +ĠSy n +H ub +Ġey eb +âĶĢâĶĢ âĶĢâĶĢ +Ġr anges +ĠHoll and +ĠRob ot +f c +M ike +Ġpl asma +Ġsw ap +Ġath lete +ĠR ams +,' " +Ġinfect ions +Ġcor rid +Ġv ib +Ġpat ches +Ġtradition ally +Ġrevel ation +Ġswe ep +Ġgl ance +Ġin ex +200 3 +ĠR aw +work ing +os ures +ĠD at +ĠLyn ch +Ġle verage +ĠRe id +Ġcorrel ation +ian ces +av ascript +Ġrep ository +ret ty +Ġ19 72 +24 0 +Ġo un +p ol +ĠRe ed +Ġtact ical +is ite +App le +ĠQu inn +Ġrap ed +ill o +Euro pe +Ġalgorith ms +ĠRod rig +i u +Ġill um +Ġf ame +Ġintrodu cing +Ġdel ays +ĠRaid ers +Ġwh istle +Ġnovel s +ĠRe ally +Ġder iv +Ġpublic ations +ĠNe ither +ĠCom merce +Ġa ston +l anguage +Not es +ĠR oth +ĠF ear +Ġm ate +Ġpar ade +ĠQ B +Ġman eu +ĠC incinnati +m itting +Ġwa ist +ĠR ew +Ġdisc ont +Ð ° +Ġst aring +Ġal ias +Ġsec urities +Ġtoile t +ĠJ edi +Ġun law +v ised +//// //// +] ( +ĠWe iss +Ġpre st +ĠComp an +Ġmem o +ĠGr ace +J uly +ĠEl ite +cent er +ĠSt ay +Ġgal axy +Ġto oth +ĠS ettings +Ġsubject ed +ãĤ ¦ +Ġline back +Ġretail ers +ĠW ant +Ġd angers +A ir +Ġvolunt ary +ew ay +Ġinterpret ed +ot ine +à § +Ġp el +Serv ice +ĠEvent ually +Ġcare ers +Ġthreat en +Ġmem or +ĠBrad ley +anc ies +s n +ĠUn known +N ational +Ġsh adows +ail and +ĠD ash +Every one +izz ard +M arch += ( +Ġpull s +Ġstr anger +Ġback wards +ĠBern ard +imens ional +Ġch ron +Ġtheoret ical +k top +Ġw are +ĠInvest ig +ĠIn iti +ĠOper ations +o ven +oc ide +* / +Ġfl ames +ĠC ash +sh it +Ġc ab +ĠAn aly +ĠSe ah +Ġdefin ing +Ġorder ing +Ġimm un +Ġpers istent +AC H +Russ ian +m ans +Ġh ind +Ġphot ography + © +Ġh ug +Ġ10 7 +ĠH ence +i ots +ude au +Ġsubsid ies +Ġroutine ly +ĠDev ice +it ic +Ġdisg ust +land er +Ġ19 40 +Ġassign ment +ĠB esides +w ick +ĠD ust +us c +struct ed +11 1 +de velop +Ġf ond +Ġinter section +Ġdign ity +Ġcommission er +With out +re ach +Ġcart oon +Ġsc ales +ãĥ Ń +F IG +Ġsurve ys +ĠIndones ia +Ġart work +Ġun ch +Ġcy cling +un ct +au er +or ate +ĠOb viously +Ġcharacter ized +fe ld +Ġaff irm +Ġinn ings +Ġ é +Ġal iens +Ġcl oth +et ooth +ĠC ertain + § +Ġdig est +k now +ĠX L +Ġpredict ions +Ġd in +W AR +Ġafter math +Ex ample +ĠSu ccess +ĠTh r +IG N +Ġmin er +B us +Ġcl arity +heim er +ĠO UT +ĠS end +ĠCirc le +ĠD iet +Ġpron ounced +Ġcreat ors +Ġearthqu ake +atter y +ge ons +Ġo d +Ġlay ing +or p +U lt +pro ject +Ġunder min +Ġsequ el +S am +ĠDark ness +Ġre ception +b ull +Y S +ĠV ir +Ġsequ ences +ĠCo in +Ġout fit +ĠW ait +1 19 +Ġdel ivers +.... .. +Ġbl own +ĠE sc +ĠM ath +per m +ĠU l +Ġgl im +Ġfac ial +Ġgreen house +Ġto kens +/ - +ĠAnn ual +ĠON E +Ġteen age +ĠPhys ical +ĠL ang +ĠC elt +Ġsu ed +ivid ually +Ġpat ience +ch air +reg ular +Ġa ug +in v +ex cept +ĠL il +Ġn est +f d +s um +ĠCh ase +Russ ia +ĠJenn ifer +Ġoff season +Over all +F ore +Ġr iot +A ud +form er +Ġdefend ers +ĠC T +iot ic +rib ly +Ġautom ated +Ġpen is +Ġins ist +Ġdi agram +ĠS QL +ĠG arc +Ġw itch +cl ient +ier ra +am bers +Ġrec ount +f ar +V ery +oster one +Ġappreci ated +ĠPer fect +S ection +Ġd oses +oca ust +Ġcost ly +Ġg rams +ĠSh i +Ġwrest ling +Ġ19 71 +Ġtro phy +Ġn erve +ĠK az +ĠExper ience +Ġpled ged +Ġplay back +Ġcreat ivity +by e +Ġattack ers +Ġhold ers +ĠCo ach +ĠPh D +Ġtransf ers +Ġcol ored +ĠH indu +Ġd rown +Ġlist ened +ĠW A +ias m +P O +Ġappeal ing +Ġdiscl osed +ĠCh icken +ag ging +Ġple aded +Ġnav igation +ĠReturn s +Ġ[ [ +R OR +E A +Ġphotograp her +ĠR ider +ipp ers +Ġsl ice +Ġe rect +Ġhe d +iss ance +ĠVik ings +ur ious +Ġapp et +oubted ly +Ch ild +Ġauthent ic +o os +ĠM aking +Ġannoun cing +Ġb od +Ġmet er +ĠN ine +ĠR ogue +Ġwork force +Ġrenew ed +Ġorganis ations +ac s +P LE +Sh ort +Ġcomp ounds +ĠVis it +Ġen velop +ear th +Ġsupport ive +gg le +ĠBrus sels +ĠGu ild +Cre ate +RE L +Ġaver aged +Ġ19 69 +ri ages +Ġlength y +Ġforg ot +O kay +ĠE rd +Ġdeal er +Ġrec ession +D D +Ġdesper ately +Ġhun ger +Ġst icks +Ġm ph +ĠF aith +Ġintention ally +Ġdem ol +ue ller +ĠS ale +Ġde bris +s pring +Ġle ap +>> >> +Ġcontain ers +se lling +rane an +atter ing +Ġcomment ed +ĠC M +on ut +Ġwood s +es pecially +Ġorgan ize +iv ic +ĠWood s +ang a +s qu +Ġm aj +am on +Ġax is +Ġ19 74 +ĠDen mark +Ġwar rior +ĠP and +Ġout lined +ĠB O +ins ula +z illa +eb ook +Ġd are +Ġsear ched +Ġnav igate +S n +writ ing +Ġun ited +J apan +ĠHe brew +Ġfl ame +Ġrel ies +Ġcatch ing +ĠSh o +Ġimprison ment +Ġp ockets +Ġclos ure +ĠF am +t im +ade qu +Act ivity +Ġrecru iting +ĠW ATCH +ĠArgent ina +d est +Ġapolog ize +or o +Ġlack s +Ġtun ed +ĠGriff in +Ġinf amous +Ġcelebr ity +ss on +Ġ ---------------------------------------------------------------- +ĠIs is +ĠDis play +Ġcred ibility +Ġeconom ies +Ġhead line +ĠCow boys +Ġind ef +Ġl ately +Ġincent ives +but ton +ĠM ob +A ut +Ġres igned +ĠO m +c amp +Ġprof iles +Ġsche mes +olph ins +ay ed +Cl inton +en h +ĠY ahoo +Ġab st +Ġan k +su its +Ġw ished +ĠMar co +udd en +Ġsp here +ĠB ishop +Ġincorpor ated +ĠPl ant +11 4 +Ġh ated +p ic +Ġdon ate +Ġl ined +Ġbe ans +Ġsteal ing +Ġcost ume +Ġsher iff +Ġfor ty +Ġint act +Ġadapt ed +Ġtrave lling +b art +Ġnice ly +Ġdri ed +Ġsc al +os ity +NOT E +ĠB h +ĠBron cos +ĠI gn +Ġint imate +Ġchem istry +Ġopt imal +D eb +ĠGener ation +Ġ] , +ich i +ĠW ii +ĠYOU R +vent ions +W rite +Ġpop ul +un ning +ĠW or +V ol +Ġqu een +head s +K K +Ġanaly ze +op ic +ear chers +Ġd ot +leg raph +ast ically +Ġupgr ades +Ġca res +Ġext ending +Ġfree ze +Ġin ability +Ġorg ans +Ġpret end +Ġout let +11 3 +ol an +ĠM all +ul ing +t alk +Ġexpress ing +ĠAl ways +ĠBe gin +f iles +Ġlic enses +% % +ĠM itt +Ġfil ters +ĠMil waukee +G N +Ġunf old +M o +Ġnut rition +pp o +B o +Ġfound ing +Ġunder mine +Ġeas iest +ĠC zech +ĠM ack +Ġsexual ity +ĠN ixon +W in +ĠAr n +ĠK in +ãĤ £ +ic er +Ġfort un +Ġsurf aces +agh d +Ġcar riers +ĠP ART +ĠT ib +Ġinter val +Ġfrust rating +ĠSh ip +ĠAr med +ff e +Ġbo ats +ĠAb raham +in is +Ġsu ited +th read +i ov +ab ul +ĠVenezuel a +Ġto m +su per +Ġcast le +alth ough +iox ide +ec hes +Ġevolution ary +Ġnegoti ate +Ġconfront ed +Rem ember +Ġ17 0 +S uch +Ġ9 11 +m ult +ĠA byss +ur ry +ke es +spe c +ĠBarb ara +Ġbelong ing +Ġvill ain +ist ani +Ġaccount able +Ġport ions +ĠDe cl +U r +ĠK ate +g re +Ġmag azines +UC K +Ġregul ate +om on +ĠAl most +Ġover view +Ġsc ram +Ġl oot +ĠF itz +Ġcharacter istic +ĠSn ake +s ay +ĠR ico +Ġtra it +ĠJo ined +au cus +Ġadapt ation +ĠAirl ines +Ġarch ae +ĠI de +Ġb ikes +Ġliter ary +Ġinflu ences +ĠUs ed +C reat +Ġple a +ĠDef ence +ĠAss ass +Ġp ond +UL T +) " +Ġeval uated +Ġob taining +Ġdem ographic +Ġvig il +ale y +Ġsp ouse +ĠSeah awks +resp ons +ĠB elt +um atic +Ġr ises +run ner +ĠMichel le +Ġpot ent +r ace +ĠP AC +F ind +olester ol +IS S +ĠIntrodu ced +ress es +ign ment +O s +ĠT u +ĠDe x +ic ides +Ġspark ed +ĠLaur a +ĠBry ant +Ġsm iling +ĠNex us +Ġdefend ants +ĠCat al +Ġdis hes +sh aped +Ġpro long +m t +( $ +ãĢ Ĥ +Ġcalcul ations +ĠS ame +Ġp iv +H H +Ġcance lled +Ġgr in +Ġterrit ories +ist ically +C ome +ĠP arent +Pro ject +Ġneg lig +ĠPriv acy +Ġam mo +LE CT +olute ly +ĠEp ic +Ġmis under +w al +Apr il +m os +path y +ĠC arson +Ġalbum s +ĠE asy +Ġpist ol +< < +Ġ\ ( +t arget +hel p +Ġinter pre +cons cious +ĠH ousing +ĠJ oint +12 7 +Ġbe ers +s cience +ĠFire fox +effect ive +ĠC abin +ĠO kay +ĠApp lic +Ġspace craft +ĠS R +ve t +ĠStr ange +S B +Ġcor ps +iber al +e fficient +Ġpreval ence +Ġeconom ists +11 8 +Th read +ord able +OD E +ĠC ant +=- =- +if iable +ĠA round +Ġpo le +Ġwilling ness +CL A +ĠK id +Ġcomple ment +Ġsc attered +Ġin mates +Ġble eding +e very +Ġque ue +ĠTr ain +Ġh ij +Ġme lee +ple ted +Ġdig it +Ġg em +offic ial +Ġlif ting +Ð µ +Re qu +it utes +Ġpack aging +ĠWork ers +h ran +ĠLeban on +ol esc +Ġpun ished +ĠJ uan +Ġj am +ĠD ocument +Ġm apping +ic ates +Ġinev itably +Ġvan illa +ĠT on +Ġwat ches +Ġle agues +Ġiniti ated +deg ree +port ion +Ġrec alls +Ġru in +Ġm elt +I AN +Ġhe m +Ex p +Ġb aking +ĠCol omb +at ible +Ġrad ius +pl ug +ĠI F +et ically +Ġf ict +H ER +ĠT ap +atin um +Ġin k +Ġco h +ĠW izard +b oth +te x +Ġsp ends +ĠCurrent ly +ĠP it +Ġneur ons +ig nt +Ġr all +Ġbus es +b uilding +Ġadjust ments +Ġc ried +ibl ical +att ed +ĠZ ion +ĠM atter +Ġmed itation +ĠD ennis +Ġour s +ĠT ab +Ġrank ings +ort al +Ġad vers +Ġsur render +ĠG ob +ci um +om as +im eter +Ġmulti player +Ġhero in +Ġoptim istic +Ġindic ator +ĠBr ig +Ġgro cery +Ġapplic ant +ĠRock et +v id +Ex ception +p ent +Ġorgan izing +Ġenc ounters +ĠT OD +Ġjew el +S ave +ĠChrist ie +Ġhe ating +Ġl azy +ĠC P +Ġcous in +Con fig +Ġreg ener +Ġne arest +Ġachie ving +EN S +th row +ĠRich mond +ant le +200 2 +Ġan ten +b ird +13 3 +Ġn arc +r aint +un ny +ĠHispan ic +ourn aments +Ġprop he +ĠTh ailand +ĠT i +Ġinject ion +Ġinher it +rav is +Ġmed i +Ġwho ever +ĠDE BUG +G P +ĠH ud +C ard +p rom +Ġp or +Ġover head +L aw +Ġviol ate +Ġhe ated +Ġdescript ions +Ġachieve ments +ĠBe er +ĠQu ant +W as +Ġe ighth +ĠI v +Ġspecial ized +U PDATE +ĠD elta +P op +J ul +ĠAs k +oph y +Ġnews letters +ĠT ool +Ġg ard +ĠConf eder +ĠGM T +ĠAb bott +Ġimm unity +ĠV M +Is lam +Ġimpl icit +w d +Ġ19 44 +rav ity +omet ric +Ġsurv iving +ur ai +ĠPr ison +Ġr ust +ĠSk etch +Ġbe es +ĠThe ory +Ġmer it +T ex +ch at +Ġm im +Ġpast e +ĠK och +Ġignor ance +ĠSh oot +Ġbas ement +Un ited +ĠAd vis +he ight +Ġf oster +Ġdet ain +in formation +Ġne ural +' ; +Ġprov es +all ery +Ġinv itation +um bers +Ġc attle +Ġbicy cle +z i +Ġconsult ant +Ġap ology +ĠT iger +Ġ12 3 +99 9 +Ġind ividually +r t +ig ion +ĠBrazil ian +Ġdist urb +Ġentreprene urs +Ġfore sts +cer pt +pl ates +p her +clip se +Ġtw itter +Ġac ids +ograph ical +h um +ĠB ald +if ully +Ġcomp iler +ĠD A +Ġdon or +as i +Ġtrib al +l ash +ĠCon fig +Ġapplic ants +Ġsal aries +13 5 +Put in +ĠF ocus +ir s +Ġmisc onduct +ĠH az +Ġeat en +M obile +Mus lim +ĠMar cus +v iol +Ġfavor able +Ġst ub +ad in +ĠH ob +Ġfaith ful +Ġelectron ics +Ġvac uum +w ait +back ed +econom ic +d ist +Ġten ure +Ġsince re +ĠT ogether +ĠW ave +Ġprog ression +Ġden ying +Ġdist ress +br aska +th ird +Ġmix ing +Ġcolon ial +Ġpriv ately +Ġun rest +atern ity +Ġprem ises +ant i +greg ation +Ġlic ence +ĠH ind +ĠSam uel +Ġconvinc ing +ĠA ce +ĠR ust +ĠNet anyahu +Ġhand les +ĠP atch +orient ed +ah o +ĠG onz +Ġhack ers +claim er +Ġcustom s +ĠGr an +f ighters +Ġl uc +Ġman uscript +aren thood +Ġdev il +Ġwar riors +Ġoff enders +Will iam +Ġhol idays +Ġnight mare +Ġle ver +iff erent +St at +Ġexhib ition +put ed +ĠP ure +Ġal pha +Ġenthus iasm +ĠRepresent atives +E AR +ĠT yp +Ġwhe at +ĠAl f +Ġcor rection +Ġev angel +AT T +M iss +Ġs oup +Ġimpl ied +par am +Ġsex y +ĠL ux +Ġrep ublic +p atch +ab lish +Ġic ons +Ġfather s +ĠG ET +ĠCar ib +Ġregul ated +ĠCo hen +ĠBob by +Ġn er +Ġb ent +vent ory +ĠAl ong +ĠE ST +ĠWall ace +Ġmurd ers +r ise +ke ll +ĠCommon wealth +Ġn asty +et a +ĠM IT +Ġadminist ered +Ġgenuine ly +Ed itor +n ick +Ġhyd ro +**************** **************** +ĠB le +Ġfin es +Ġg orge +aus ible +r h +Ġapp le +ment ioned +Ġro pe +ot yp +H R +Ġdisappoint ing +Ġc age +n ik +Ġdoub ts +ĠF REE +print s +ĠM UST +Ġvend ors +ĠIn qu +Ġliber als +Ġcontract or +Ġup side +child ren +Ġtrick y +Ġregul ators +charg ed +l iter +Ġ *** +Ġreb ell +l ang +Ġloc als +Ġphys icians +Ġhe y +ar se +t m +ĠLe x +Ġbehavior al +success ful +F X +Ġbr ick +ov ic +Ġcon form +Ġreview ing +Ġins ights +Ġbi ology +ĠRem ove +ĠExt ra +Ġcomm itting +indu ced +ignt y +ig m +Ġat omic +Comm on +ĠE M +ĠP ere +ĠIt ems +e h +Ġpres erved +ĠH ood +Ġprison er +Ġbankrupt cy +Ġg ren +us hes +Ġexplo itation +Ġsign atures +Ġfin an +] ," +ĠM R +Ġme g +rem lin +Ġmusic ians +Ġselect ing +Ġexam ining +IN K +l ated +H i +Ġart ic +Ġp ets +Ġimp air +ĠM AN +Ġtable ts +in clude +R ange +Ġca ut +Ġlog s +Ġmount ing +Ġun aware +Ġdynam ics +ĠPalest ine +ĠQu arter +ĠPur ple +Ġm a +ĠIm port +Ġcollect ions +ci ation +Ġsuccess or +Ġcl one +Ġaim ing +Ġposs essed +Ġstick ing +Ġsh aking +Ġloc ate +ĠH ockey +T urn +17 0 +Ġfif teen +ĠHar rison +Ġcontinu ously +ĠT C +ĠVal ent +ĠRes cue +Ġby pass +am ount +Ġm ast +Ġprotect s +Ġart istic +Ġsomet ime +Ġsh oe +Ġshout ed +ific ant +et itive +ĠReg ister +ĠJ in +Ġconcent rated +ling ton +on ies +Ġgener ator +yr im +ĠAr men +Ġclear ing +id o +ĠT W +al ph +Ġlad ies +H ard +Ġdial og +Ġinput s +æ ľ +Ġpos es +Ġsl ots +ĠPrem ium +Ġle aks +Ġboss es +Ġ11 3 +c ourse +A cc +ĠNew ton +ĠAust ria +ĠM age +Ġte aches +ab ad +Ġwe ars +Ġc yl +Ġcur se +ĠS ales +ĠW ings +Ġp sy +Ġg aps +ĠIce land +ĠP interest +Ġland lord +Ġdefin itions +ĠK er +Ġsufficient ly +ĠP ence +ĠArch itect +Ġsur pass +Ġ11 4 +Ġsuper hero +ĠDise ase +Ġpri ests +ĠC ulture +Ġdefin itive +Ġsecret ly +ĠD ance +inst all +ch ief +ĠJess ica +W ould +Up dated +Ġlock er +ĠK ay +Ġmem orial +è ¦ +f at +Ġdis gu +Ġflav ors +ĠBase ball +ĠRes istance +Ġk icks +Ġen v +Ġteen agers +D ark +ĠC AR +Ġh alt +ĠL G +ĠGab riel +Ġfe ver +Ġs atur +Ġm all +Ġaffili ate +ĠS leep +ĠSpe cific +ĠV el +Ġj ar +ĠSac red +ĠEd wards +ĠA CL +Ġret ained +ĠG iant +Ġlim itation +in ces +Ġref usal +ĠT ale +ĠBut ler +Ġacc idents +ĠC SS +Ġimport ed +ĠCop y +Î ± +ER T +z el +Ġdiv isions +h ots +ĠAl b +ĠD S +Load er +W ashington +at isf +ĠCreat ive +\ . +ĠAut om +red ict +Ġrecept or +ĠCarl os +Met hod +ok a +Ġmal icious +Ġste pping +, [ +ĠD ad +Ġatt raction +ĠEffect s +ĠPir ate +ĠC er +ĠIndust ry +ĠR ud +Ġchar ter +Ġd ining +Ġins ists +Ġconfig ure +Ġ( # +ĠSim ple +ĠSc roll +UT C +17 5 +ĠK on +Ġmarket place +Ġ ãĤ +Ġref res +Ġg ates +er red +ĠP od +Ġbeh ave +Fr ank +n ode +Ġendors ed +he tt +as ive +ĠHom eland +Ġr ides +ĠLe ave +er ness +Ġflood ing +A FP +Ġris en +Ġcontin ually +Ġun anim +ĠCont ract +ĠP as +Ġgu ided +ĠCh ile +b d +Ġsu cc +pt ic +Ġcomm ittees +ĠL uther +ĠAny one +Ġs ab +12 4 +Ġp ixel +ĠB ak +ĠT ag +ĠBenn ett +En ter +sm all +ĠPresident ial +Ġp ul +Ġcontr ace +arch ive +Ġcoast al +ĠK ids +19 2 +âĢ ² +ick y +ING TON +Ġw olf +ĠSt alin +T ur +id get +am as +ĠUn less +Ġspons or +Ġmor ph +ĠCho ose +Ġrun ner +Ġun bel +Ġm ud +ĠMan a +Ġdub bed +Ġg odd +ure rs +wind ow +Ġrel ied +Ġcelebr ating +os c +Ġ13 5 +Ġlobb ying +Ġincom plete +Ġrestrict ion +Ġinc ap +it us +Ġexpect ation +ĠAp ollo +Ġint ens +Ġsyn c +G H +Ġmanip ulation +B Y +Ġspe ar +Ġbre asts +Ġvol can +il ia +M aterial +Ġform ats +ĠB ast +Ġparliament ary +Ġsn ake +Ġserv ants +ĠTr udeau +ĠGr im +ĠArab ic +ĠSC P +ĠBoy s +st ation +Ġprospect ive +ord e +in itialized +Ġb ored +AB LE +Ġaccess ed +Ġtax i +ĠShe ll +aid en +urs ed +in ates +ĠIns urance +ĠPet e +Sept ember +6 50 +Ġad ventures +ĠCo ver +Ġt ribute +Ġsk etch +Ġem power +Ġ Ø +ĠGl enn +ĠD aw += \" +ĠPolit ics +Ġgu ides +Ġd ioxide +ĠG ore +ĠBr ight +ĠS ierra +Ġval ued +c ond +Ġpo inter +Se lect +Ġrisk y +Ġabsor b +im ages +Ġref uses +Ġbon uses +__ _ +Ġh ilar +ĠF eatures +2 20 +ĠCollect or +F oot +Ġ19 64 +cul us +Ġd awn +Ġwork out +ĠL O +Ġphilosoph ical +ĠSand y +ĠYou th +Ġl iable +A f +bl ue +Ġovert urn +less ness +ĠTrib une +ĠIn g +Ġfact ories +Ġcat ches +Ġpr one +Ġmat rix +Ġlog in +Ġin acc +Ġex ert +s ys +Ġneed le +ĠQ ur +Ġnot ified +ould er +t x +Ġremind s +Ġpublisher s +Ġn ort +Ġg it +Ġfl ies +ĠEm ily +Ġflow ing +ĠAl ien +ĠStr ateg +Ġhard est +Ġmod ification +AP I +ĠM Y +Ġcr ashes +st airs +n umber +Ġur ging +ch annel +ĠFal con +Ġinhabit ants +Ġterr ifying +Ġutil ize +Ġban ner +Ġcig arettes +Ġsens es +ĠHol mes +Ġpract ition +ĠPhill ips +ott o +Ġcomp ile +Mod el +ĠK o +Ġ[ ] +Americ ans +ĠTer ms +Ġmed ications +ĠAn a +Ġfundament ally +ĠNot ice +Ġwe aker +Ġ 0000 +Ġgar lic +Ġout break +Ġeconom ist +ĠB irth +Ġobst acles +ar cer +ĠOr thodox +Ġplace bo +ĠC rew +asp berry +ĠAng els +Ġdis charge +Ġdestruct ive +11 7 +ĠR ising +Ġd airy +l ate +Ġcoll ision +ĠTig ers +ean or +ocument ed +ĠIn valid +Ġd ont +ĠL iter +ĠV a +Ġhyd rogen +Ġvari ants +ĠBrown s +Ġ19 65 +Ġind igenous +Ġtrad es +Ġremain der +Ġswe pt +ĠImp act +Ġred ist +Ġun int +grad uate +ãĥ ķ +ĠW ILL +ãģ® ç +ĠCrit ical +Ġf isher +Ġv icious +Ġrevers ed +Y ear +ĠS ox +Ġshoot ings +Ġfil ming +Ġtouchdown s +ai res +m el +Ġgrand father +Ġaffect ion +ing le +Ġover ly +Add itional +Ġsup reme +ĠGr ad +Ġsport ing +Ġmer cy +ĠBrook s +ount y +Ġperform s +Ġtight ly +Ġdem ons +Ġkill ings +Ġfact ion +ĠNov a +aut s +Ġund oubtedly +ar in +Ġunder way +ra k +Ġl iv +ĠReg ion +Ġbrief ing +s ers +cl oud +ĠM ik +us p +Ġpred iction +az or +Ġport able +ĠG and +Ġpresent ing +Ġ10 80 + » +ush i +ĠSp ark +there um +Ġjust ification +ĠN y +Ġcontract ors +ming ham +ĠSt yle +å ħ +ĠChron icles +ĠPict ure +Ġprov ing +Ġw ives +set t +Ġmole cules +ĠFair y +Ġconsist ing +Ġp ier +al one +in ition +Ġn ucle +j son +Ġg otta +Ġmob il +Ġver bal +ar ium +Ġmon ument +uck ed +Ġ25 6 +T ech +mine craft +ĠTr ack +Ġt ile +Ġcompat ibility +as is +Ġs add +Ġinstruct ed +ĠM ueller +Ġle thal +Ġhorm one +Ġor che +el se +Ġske let +Ġentert aining +Ġminim ize +ag ain +Ġunder go +Ġconst raints +Ġcig arette +ĠIslam ist +Ġtravel s +ĠPant hers +l ings +C are +Ġlaw suits +ur as +Ġcry st +Ġlow ered +Ġaer ial +Ġcomb inations +Ġha un +Ġch a +Ġv ine +Ġquant ities +Ġlink ing +b ank +Ġso y +B ill +ĠAngel a +Ġrecip ient +ĠProt est +Ġs ocket +Ġsolid arity +Ġâ Ĩ +m ill +Ġvar ies +ĠPak istani +Dr agon +Ġun e +Ġhor izon +³³³³ ³³³³ +Ġprov inces +Ġfrank ly +Ġenact ed +not es +[ ' +Ġ19 2 +ocr acy +Ġendorse ment +Ġover time +Tr ue +L ab +lic ted +ĠD NC +Ġbe ats +ĠJam ie +15 2 +ĠIN T +Cont act +Ġaccount ed +h ash +ĠPack ers +p ires +Ġles bian +Ġamend ments +Ġhop eful +ĠFin land +Ġspot light +Ġconfig ured +Ġtrou bled +Ġg aze +ĠCal gary +Ġrel iability +Ġins urg +sw er +b uy +ĠSk in +Ġp ixels +Ġhand gun +Ġpar as +Ġcateg or +ĠE L +ĠRe x +Ind eed +Ġkind a +Ġconj unction +ĠBry an +ĠMan ufact +y ang +Pl us +S QL +ish ment +Ġdom inate +Ġn ail +Ġo ath +Ġeru pt +ĠF ine +it bart +ĠCh ip +ĠAb d +ĠN am +Ġbuy er +Ġdiss ent +Le aks +Cont in +Ġr ider +ĠSome one +Ġill usion +c in +ĠBoe ing +Ġin adequ +ov ation +i ants +Ġreb uild +4 50 +ĠDest iny +S W +ĠT ill +H it +ia z +ĠBang l +acher s +ĠRe form +Ġse gments +Ġsystem atic +d c +ĠConserv atives +Ġport al +h or +ĠDragon bound +Ġdrag ged +om o +Ġthe e +ad vert +ĠRep orts +ĠE t +Ġbarrel s +Aug ust +Ġcompar isons +Ġhe x +Ġan throp +" [ +bor ough +ab i +Ġpict ured +play ing +ĠAdd ress +ĠMir ror +Sm ith +Ġt ires +ĠN PR +AA AA +Ġclass ification +ĠTh an +ĠH arm +ĠR A +Ġreject ion +min ation +Ġr anged +ĠF alls +D I +H ost +ãĤ ´ +ĠEx ample +list ed +th irds +Ġsaf egu +br and +Ġprob able +Can ada +IT ION +ĠQ aeda +Ġch ick +Ġimport s +h it +l oc +W W +Ġble w +Ġany time +Ġwh oles +ik ed +Ġcal culation +cre ate +ĠO ri +Ġupgr aded +Ġapp ar +ut ory +ĠM ol +B rit +ĠJ ong +IN AL +ĠStart ing +Ġd ice +urt le +Ġre lying +cl osure +Ġprof itable +Ġsl aughter +ĠMan ual +c aster +Ġ" $ +Ġfe ather +ĠSim ply +ie ves +Ġdeter ior +ĠPC I +Ġst amp +Ġfl aws +Ġsh ade +ham mer +Ġpass port +Ġcont ing +am el +Ġobser vers +Ġneg lect +ĠR B +ĠBrother hood +Ġskept ical +f amily +us k +Ġemotion ally +â Ļ +ĠBet a +ason able +id ity +ĠM ul +Ġkick ing +ĠC arm +oll ah +VERT IS +ĠAt hen +Ġlad der +ĠBul let +å £ +00 01 +ĠWild life +ĠM ask +ĠN an +R ev +Ġun acceptable +leg al +Ġcrowd ed +ag i +ĠC ox +j e +Ġmor ality +Ġfu els +Ġc ables +Ġman kind +ĠCarib bean +Ġanch or +Ġby te +ĠO ften +ĠO z +Ġcraft ed +Ġhistor ian +ĠW u +Ġtow ers +ĠCitiz ens +Ġhel m +Ġcred entials +Ġsing ular +ĠJes se +Ġtack les +Ġcont empt +Ġa fore +ĠSh adows +Ġn il +Ġur gent +app le +bl ood +Ġv on +Ġoff line +Ġbreat he +Ġj umps +Ġirre levant +ox ic +om al +import ant +J im +Ġgl oves +arm ing +dep th +Ġtal ents +ook ie +ĠS B +Ġpal m +uff s +est a +IG H +Ġcan on +ĠVer izon +ĠP le +Ġcou pled +vel t +Ġfundra ising +ĠGet ting +ĠD LC +Ġmathemat ical +ĠH S +ĠCard inals +te lling +Ġspons ors +Ġ Ï +ĠBull s +op tion +Ġprop ose +Ġmem orable +Ġembr aced +Ġdecl ining +He alth +ed a +Ġ} ; +Ġsp am +m ile +Ġpit cher +ĠE ight +Ġcar ing +ut ic +ro le +Ġair line +ernand ez +ĠAth let +Ġcert ification +ux e +rig er +Ġem pir +Ġsens ation +Ġdis m +Ġb olt +Ġev olve +H ouse +Ġconsult ation +ĠD uty +Ġtou ches +ĠN athan +Ġf aint +h ad +" ( +ĠCons umer +ĠExt reme +Ġ12 7 +ĠHer m +ĠSac rament +iz oph +Ġanx ious +ul ously +Ġsoc ially +ĠU TC +Ġsol ving +ĠLet ter +Hist ory +ed uc +Pr ice +) ); +Ġrel oad +am ic +Ġp ork +Ġdisc ourse +Ġt ournaments +ai ro +ĠK ur +ĠCost a +Ġviol ating +Ġinterf ere +Ġrecre ational +uff le +Ġspe eches +Ġneed ing +Ġremem bers +Ġcred ited +n ia +f ocused +amer a +Ġb ru +um bs +ĠCub an +Ġpreced ing +Ġnons ense +ac ial +Ġsmart phones +ĠSt ories +S ports +ĠEmer gency +oun cing +ef ined +Ġb er +Ġconsult ing +Ġm asters +he astern +." [ +ĠRun ning +Ġsus cept +ĠF eng +Americ a +pr ises +st itial +ĠWeek ly +ĠGreat er +mod ules +if ter +G raphics +ul er +Ġwho lly +Ġsupp ress +Ġconce aled +Ġhapp ily +Ġaccept s +ĠEn joy +Ġr ivers +ĠEx cept +2 25 +ĠN HS +ĠMc Connell +Ġp ussy +fer red +ut able +Ġatt ain +Ġ> = +Ġdepos its +roph ic +Ġnot orious +ĠSh aw +il itation +Ġepid emic +all ic +Ġsmall est +ov ich +Ġaccess ories +per ties +Ġsur plus +ĠMe ch +Ġamb ig +ĠImm igration +Ġch im +ev al +Ġpract icing +ĠMyster y +Ġdom ains +ĠSil icon +app s +Ġkilomet ers +e a +ĠSm ash +Ġwarrant y +Ġn ost +s il +re v +J on +ĠDub lin +Ġtast es +Ġb out +g reat +er ror +Ġsw itches +ĠB apt +D O +ok i +Ġsour ced +pro du +Ġattach ment +ĠIss ue +ĠQuest ion +Jo in +Ġf itted +Ġunlaw ful +^ ^ +ere k +Ġauthent ication +Ġst ole +Ġaccount ability +l abel +S earch +Ġal beit +atic an +fund ed +ĠAdd ing +ĠI Q +Ġsub mar +l it +a que +ĠLear ning +Ġint eger +M aster +ĠCh rom +Ġprem ier +O p +ĠLi u +Ġbl essed +ĠGl obe +ĠResp onse +Ġlegit im +ĠMer kel +Ġdispos al + ´ +Ġgau ge +pe at +Ġindu ced +Ġquestion able +arth y +ĠV it +ĠF eed +U ntil +U t +worth y +R Y +ĠH erald +ĠHam mer +Ġmed al +ĠR ivers +ĠH ack +Ġclar ify +Ġtrack ed +Ġautonom ous +Ġten ant +ĠQ atar +er ie +Ġgr im +ĠMon itor +Ġresist ant +ĠSpe c +ĠWell s +N AS +14 8 +Ġmin ers +iot ics +Ġmiss es +11 6 +g ian +g it +ĠE yes +p res +Ġgrad uated +Ġang el +Ġsyn chron +Ġefficient ly +Ġtrans mitted +H arry +Ġglob ally +EN CE +ĠMont ana +r aged +ĠPre vention +Ġp iss +ĠL l +Ġshe lf +ĠB JP +ĠTest ament +ĠL ate +ik er +ĠH app +ĠJul ian +h all +Ġsp ont +Ġshut down +Ġincons istent +Ġsubscrib ers +Ġske leton +ĠNe braska +Ġins pire +ĠV oid +F eed +Ġang les +ĠSpr ings +Ġbench mark +Ġvacc ines +izoph ren +se xual +uff ed +Ġsh ine +ĠK ath +Ġgest ure +ine a +Ġr ip +Ġopp ression +Ġcons cience +b t +ĠL um +Ġinc idence +ĠF a +w r +Ġmin eral +ĠSp urs +alk y +Ġth under +Ġop io +Be ing +ĠPal m +Ġwas ted +Ġl b +i aries +ĠIniti ative +Ġcur ric +Ġmark er +ĠMc L +Ġext ensions +ĠP v +ĠAr ms +Ġoffer ings +Ġdef enses +Ġvend or +Ġcontrad ict +ĠCol in +Ġredd it +Ġper ipher +12 2 +Ġs ins +E dit +IC T +So ft +ĠSh ah +Ġadministr ator +ĠT rip +Ġporn ography +Ġtu ition +in ence +ĠPro gress +Ġcat alog +Ġsu ite +Ġh ike +Ġreprodu ctive +eng ine +Ġd rought +ĠNo ah +Ġ2 30 +Ġd ude +Ġrelax ed +Ġpart ition +Ġparticip ant +Ġtel esc +Ġfe as +ĠF F +own er +Ġswe eping +Ġl enses +Ġmatch up +ĠRe pl +ourn als +Ġcred ible +Ġgrand mother +Ġther mal +Ġsubscrib ing +Ġident ities +col m +U CT +Ġreluct ant +us ers +ĠC ort +Ġassist ed +OS S +ATION S +IS H +Ġpharm aceutical +ic able +ad ian +ĠSon ic +ĠF ury +ĠM ong +A H +ĠPsych ology +Ġph osph +Ġtreat s +Ń Ķ +Ġstead ily +ĠHell o +Ġrel ates +Ġcl ue +Ex pl +a uth +Ġrev ision +Ġe ld +os ion +Ġbr on +14 4 +ri kes +Ġmin es +Ġblank et +ĠF ail +el ed +ĠIm agine +ĠPl anned +a ic +Re quest +M ad +ĠHor se +ĠEag le +Ġcap ac +15 7 +Ġl ing +ĠN ice +ĠP arenthood +min ster +og s +ens itive +Not hing +Ġcar n +F in +ĠP E +Ġr ifles +ĠL P +S and +Ġgui Active +Ġtour ist +C NN +Ġunve iled +Ġpredec essor +} { +u ber +Ġoff shore +Ġopt ical +ĠR ot +ĠPear l +et on +Ġst ared +Ġfart her +at ility +cont in +ĠG y +ĠF oster +ĠC oc +ri ents +Ġdesign ing +ĠEconom y +ON G +W omen +ĠN ancy +er ver +Ġmas cul +Ġcasual ties +Ġ2 25 +ĠS ullivan +ĠCh oice +Ġa ster +w s +Ġhot els +Ġconsider ations +Ġcou ch +ĠSt rip +ĠG n +Ġmanip ulate +l ied +Ġsynt hetic +Ġassault ed +Ġoff enses +ĠDra ke +Ġim pe +Oct ober +ĠHer itage +h l +ĠBl air +Un like +Ġg rief +Ġ4 50 +Ġopt ed +Ġresign ation +il o +Ġver se +ĠT omb +Ġu pt +Ġa ired +ĠH ook +ĠML B +Ġassum es +out ed +ĠV ers +Ġinfer ior +Ġbund le +ĠD NS +ograp her +Ġmult ip +ĠSoul s +Ġillust rated +Ġtact ic +Ġdress ing +Ġdu o +Con f +Ġrel ent +Ġc ant +Ġscar ce +Ġcand y +ĠC F +Ġaffili ated +Ġspr int +yl an +ĠGarc ia +Ġj unk +Pr int +ex ec +C rit +Ġport rait +ir ies +ĠOF F +Ġdisp utes +W R +L ove +ãģ Ħ +ĠRe yn +Ġh ipp +op ath +Ġflo ors +ĠFe el +Ġwor ries +Ġsett lements +ĠP os +Ġmos que +Ġfin als +Ġcr ushed +ĠPro bably +ĠB ot +ĠM ans +ĠPer iod +Ġsovere ignty +Ġsell er +Ġap ost +Ġam ateur +Ġd orm +Ġconsum ing +Ġarm our +ĠRo ose +Ġint ensive +Ġelim inating +ĠSun ni +ĠAle ppo +j in +Ġadv ise +p al +ĠH alo +Ġdes cent +Ġsimpl er +Ġbo oth +ST R +L ater +ĠC ave +== = +Ġm ol +Ġf ist +Ġshot gun +su pp +Ġrob bery +E ffect +Ġobsc ure +ĠProf essional +Ġemb assy +Ġmilit ant +Ġinc arcer +Ġgener ates +Ġlaun ches +Ġadministr ators +Ġsh aft +Ġcirc ular +Ġfresh man +ĠW es +ĠJo el +ĠD rew +ĠDun can +ĠApp arently +s ight +ĠIntern al +ĠInd ividual +ĠF E +Ġb ore +ĠM t +Ġbroad ly +ĠO ptions +ount ain +ip es +ĠV ideos +20 4 +Ġh ills +Ġsim ulation +Ġdisappoint ment +it an +ĠLabor atory +Ġup ward +Ġbound ary +Ġdark er +h art +Ġdomin ance +C ong +ĠOr acle +ĠL ords +Ġscholars hip +ĠVin cent +ed e +ĠR ah +Ġencour ages +ro v +Ġqu o +Ġprem ise +ĠCris is +ĠHol ocaust +Ġrhyth m +Ġmet ric +cl ub +Ġtransport ed +Ġn od +ĠP ist +Ġancest ors +ĠFred er +th umbnails +ĠC E +ON D +Ph il +ven ge +ĠProduct s +cast le +Ġqual ifying +ĠK aren +VERTIS EMENT +Ġmight y +Ġexplan ations +Ġfix ing +D i +Ġdecl aring +Ġanonym ity +Ġju ven +ĠN ord +ĠDo om +ĠAct ually +O k +ph is +ĠDes ert +Ġ11 6 +I K +ĠF M +Ġinc omes +V EL +ok ers +Ġpe cul +Ġlight weight +g ue +Ġacc ent +Ġincre ment +ĠCh an +Ġcompl aining +ĠB aghd +Ġmidfield er +Ġover haul +Pro cess +ĠH ollow +ĠTit ans +Sm all +man uel +ĠUn ity +ĠEv ents +S ty +Ġdispro portion +n esty +en es +ĠC od +Ġdemonstr ations +ĠCrim son +ĠO H +Ġen rolled +Ġc el +ĠBre tt +Ġa ide +Ġhe els +Ġbroad band +Ġmark ing +Ġw izard +ĠN J +ĠChief s +Ġingred ient +Ġd ug +ĠSh ut +urch ase +end or +Ġfar mer +ĠGold man +12 9 +15 5 +Or der +Ġl ion +i ably +Ġst ain +ar ray +ilit ary +ĠFA Q +Ġexpl oded +ĠMcC arthy +ĠT weet +ĠG reens +ek ing +l n +ens en +Ġmotor cycle +Ġpartic le +Ġch olesterol +B ron +Ġst air +Ġox id +Ġdes irable +ib les +Ġthe or +for cing +Ġpromot ional +ov o +b oot +ĠBon us +raw ling +Ġshort age +ĠP sy +Ġrecru ited +Ġinf ants +Ġtest osterone +Ġded uct +Ġdistinct ive +Ġfirm ware +bu ilt +14 5 +Ġexpl ored +Ġfact ions +Ġv ide +Ġtatt oo +Ġfinan cially +Ġfat igue +Ġproceed ing +const itutional +Ġmis er +Ġch airs +gg ing +ipp le +Ġd ent +Ġdis reg +ç Ķ +st ant +ll o +b ps +aken ing +Ġab normal +ĠE RA +å£ « +ĠH BO +ĠM AR +Ġcon cess +Ġserv ant +Ġas pir +l av +ĠPan el +am o +Ġprec ip +Ġrecord ings +Ġproceed ed +Ġcol ony +ĠT ang +ab lo +Ġstri pped +Le ft +to o +Ġpot atoes +Ġfin est +% ). +Ġc rap +ĠZ ach +ab ases +ĠG oth +Ġbillion aire +w olf +Ġsan ction +S K +Ġlog ged +P o +ey ed +un al +Ġcr icket +Ġarm ies +Ġunc overed +Cl oud +ó n +Ġreb ounds +Ġm es +O per +P ac +Ġnation ally +Ġinsert ed +p ict +Ġgovern ance +Ð ¸ +Ġprivile ges +G ET +Ġfavor ites +im ity +Ġlo ver +the m +em pl +Ġgorge ous +An n +Ġsl ipped +Ġve to +B ob +Ġsl im +u cc +ĠF ame +udden ly +Ġden ies +ĠM aur +Ġdist ances +Ġw anna +t ar +ĠS ER +Ġâ Ī +Ġle mon +at hetic +Ġlit eral +Ġdistingu ished +Ġansw ering +G I +Ġrelig ions +ĠPhil os +ĠL ay +Ġcomp os +ire ments +ĠK os +ine z +roll ing +Ġyoung est +and ise +ĠB orn +Ġalt ar +am ina +ĠB oot +v oc +Ġdig ging +Ġpress ures +Ġl en +26 4 +Ġassass ination +ĠBir mingham +ĠMy th +Ġsovere ign +ĠArt ist +ĠPhot ograph +Ġdep icted +Ġdisp ens +orth y +Ġamb ul +int eg +ĠC ele +ĠTib et +Ġhier archy +Ġc u +Ġpre season +ĠPet erson +Ġcol ours +Ġworry ing +Ġback ers +ĠPal mer +ĠÎ ¼ +Ġcontribut or +Ġhear ings +Ġur ine +Ġ Ù +ourge ois +Sim ilar +ĠZ immer +s omething +ĠUS C +Ġstrength s +ĠF I +Ġlog ging +As ked +ĠTh ai +in qu +ĠW alt +Ġcrew s +it ism +3 01 +Ġshar ply +um ed +Ġred irect +r ators +In f +ĠWe apons +Ġte asp +19 99 +L ive +ĠEs pecially +ĠS ter +ĠVeter ans +Ġint ro +other apy +Ġmal ware +Ġbre eding +Ġmole cular +ĠR oute +ĠCom ment +oc hem +Ġa in +Se ason +Ġlineback er +Ä « +ĠEconom ics +es ar +ĠL ives +ĠEm ma +Ġk in +ĠTer rit +Ġpl anted +ot on +ĠBut ter +ĠSp ons +P ER +Ġdun geon +Ġsymb olic +Ġfil med +Ġdi ets +Ġconclud es +Ġcertain ty +ĠForm at +Ġstr angers +form at +ĠPh ase +Ġcop ied +Ġmet res +ld a +ĠUs ers +Ġdeliber ate +Ġwas hed +ĠL ance +im ation +Ġimpro per +ĠGen esis +ick r +ĠK ush +Ġreal ise +Ġembarrass ing +alk ing +b ucks +Ġver ified +Ġout line +year s +ĠIn come +20 2 +Ġz ombies +F inal +ĠMill enn +Ġmod ifications +ĠV ision +ĠM oses +ver b +iter ranean +ĠJ et +Ġnav al +ĠA gg +Ġur l +Ġvict ories +Ġnon etheless +Ġinj ust +ĠF act +ç ļ +Ġins ufficient +re view +face book +Ġnegoti ating +Ġguarant ees +im en +uten berg +Ġg ambling +Ġcon gr +Load ing +Ġnever theless +Ġpres idents +ĠIndust rial +Ġ11 8 +Ġp oured +ĠT ory +Ġ17 5 +Ġ: = +Sc ott +ange red +T ok +Ġorgan izers +M at +ĠG rowth +Ġad ul +Ġens ures +Ġ11 7 +é¾į å +Ġmass acre +Ġgr ades +be fore +AD VERTISEMENT +ĠSl ow +ĠM MA +âĢĶ " +ĠV atican +Q aeda +Ġo we +66 66 +ĠS orry +ĠGr ass +Ġbackground s +Ġexha usted +Ġcl an +Ġcomprom ised +ĠE lf +ĠIsa ac +ens on +In vest +IF A +Ġinterrupt ed +ãĥī ãĥ© +Ġtw isted +ĠDrag ons +M ode +ĠK remlin +Ġfert il +he res +ph an +ĠN ode +f ed +ĠOr c +Ġunw illing +C ent +Ġprior it +Ġgrad uates +Ġsubject ive +Ġiss uing +ĠL t +Ġview er +Ġw oke +Th us +bro ok +Ġdep ressed +Ġbr acket +ĠG or +ĠFight ing +Ġstri ker +Rep ort +ĠPortug al +Ġne o +w ed +19 9 +Ġflee ing +sh adow +ident ified +US E +Ste am +Ġstret ched +Ġrevel ations +art ed +ĠD w +Ġalign ment +est on +ĠJ ared +S ep +Ġblog s +up date +g om +r isk +Ġcl ash +ĠH our +Ġrun time +Ġunw anted +Ġsc am +Ġr ack +Ġen light +on est +ĠF err +Ġconv ictions +Ġp iano +Ġcirc ulation +ĠW elcome +Ġback lash +ĠW ade +Ġrece ivers +ot ive +J eff +Ġnetwork ing +ĠPre p +ĠExpl orer +Ġlect ure +Ġupload ed +ĠMe at +B LE +ĠNaz is +ĠSy nd +st ud +ro ots +ri ans +Ġportray ed +Ġ ?? +ĠBudd ha +s un +Rober t +ĠCom plex +Ġover see +Ġste alth +T itle +ĠJ obs +ĠK um +Ġappreci ation +ĠM OD +Ġbas ics +Ġcl ips +Ġnurs ing +Ġpropos ition +Ġreal ised +ĠNY C +Ġall ocated +ri um +ar an +ĠPro duction +ĠV ote +Ġsm ugg +Ġhun ter +az er +ĠCh anges +Ġfl uct +y on +Ar ray +Ġk its +W ater +Ġuncom mon +Ġrest ing +ell s +w ould +Ġpurs ued +Ġassert ion +omet own +ĠMos ul +ĠPl atform +io let +Ġshare holders +Ġtra ils +P ay +ĠEn forcement +ty pes +ĠAn onymous +Ġsatisf ying +il ogy +Ġ( ' +w ave +c ity +Ste ve +Ġconfront ation +ĠE ld +C apt +ah an +ht m +ĠC trl +ON S +2 30 +if a +hold ing +Ġdelic ate +Ġj aw +ĠGo ing +or um +S al +Ġd ull +ĠB eth +Ġpr isons +Ġe go +ĠEl sa +avor ite +ĠG ang +ĠN uclear +Ġsp ider +ats u +Ġsam pling +Ġabsor bed +ĠPh arm +iet h +Ġbuck et +ĠRec omm +O F +ĠF actory +AN CE +Ġb acter +H as +ĠObs erv +12 1 +Ġprem iere +De velop +Ġcur rencies +C ast +Ġaccompany ing +ĠNash ville +Ġfat ty +ĠBre nd +Ġloc ks +Ġcent ered +ĠU T +augh s +or ie +ĠAff ordable +v ance +D L +em et +Ġthr one +ĠBlu etooth +Ġn aming +if ts +AD E +Ġcorrect ed +Ġprompt ly +ĠST R +Ġgen ome +Ġcop e +Ġval ley +Ġround ed +ĠK end +al ion +p ers +Ġtour ism +Ġst ark +v l +Ġblow ing +ĠSche dule +st d +Ġunh appy +Ġlit igation +ced es +Ġand roid +Ġinteg ral +ere rs +ud ed +t ax +Ġre iter +ĠMot ors +oci ated +Ġwond ers +ĠAp ost +uck ing +ĠRoose velt +f ram +Ġyield s +Ġconstit utes +aw k +Int erest +Ġinter im +Ġbreak through +ĠC her +Ġpro sec +ĠD j +ĠM T +Res p +ĠP T +Ġs perm +ed it +B T +Lin ux +count ry +le ague +Ġd ick +Ġo ct +Ġinsert ing +Ġsc ra +ĠBrew ing +Ġ19 66 +Ġrun ners +Ġpl un +id y +ĠD ian +Ġdys function +Ġex clusion +Ġdis gr +Ġincorpor ate +Ġrecon c +Ġnom inated +ĠAr cher +d raw +achel or +Ġwrit ings +Ġshall ow +Ġh ast +ĠB MW +ĠR S +Ġth igh +Ġ19 63 +Ġl amb +Ġfav ored +ag le +Ġcool er +ĠH ours +ĠG U +ĠOrig in +Ġglim pse +---------------- ---- +L im +Ġche ek +Ġj ealous +- ' +Ġhar ness +ĠPo ison +Ġdis abilities +ne apolis +Ġout look +Ġnot ify +ĠIndian apolis +Ġab rupt +ns ic +Ġenc rypted +Ġfor fe +reat h +Ġr abb +Ġfound ations +Ġcompl iment +ĠInter view +ĠS we +Ġad olesc +Ġmon itors +ĠSacrament o +Ġtime ly +Ġcontem pl +Ġposition ed +Ġpost ers +ph ies +iov ascular +v oid +ĠFif th +Ġinvestig ative +OU N +Ġinteg rate +ĠIN C +ish a +ibl ings +ĠRe quest +ĠRodrig uez +Ġsl ides +ĠD X +Ġfemin ism +Ġdat as +Ġb end +ir us +ĠNig eria +F ox +Ch ange +Ġair plane +ĠLad en +Ġpublic ity +ixt y +Ġcommit ments +Ġaggreg ate +Ġdisplay ing +ĠAr row +Ġ12 2 +Ġrespect s +and roid +s ix +ĠSh a +Ġrest oration +) \ +W S +oy s +Ġillust rate +with out +12 6 +ĠâĶ Ĥ +Ġpick up +n els +Ġ .... +f ood +ĠF en +) ? +Ġphenomen a +Ġcompan ions +ĠW rite +Ġsp ill +Ġbr idges +ĠUp dated +ĠF o +Ġinsect s +ASH INGTON +Ġsc are +il tr +ĠZh ang +Ġsever ity +Ġind ul +14 9 +ĠCo ffee +Ġnorm s +Ġp ulse +ĠF T +Ġhorr ific +ĠDest roy +ĠJ SON +Ġo live +Ġdiscuss es +R est +E lect +ĠW inn +ĠSurv iv +ĠH ait +S ure +op ed +Ġro oted +ĠS ke +ĠBron ze +Ġl ol +Def ault +Ġcommod ity +red ited +Ġliber tarian +Ġforb idden +Ġgr an +à ¨ +Ġl ag +en z +dri ve +Ġmathemat ics +Ġw ires +Ġcrit ically +Ġcarb ohyd +ĠChance llor +ĠEd die +Ġban ning +ĠF ri +Ġcompl ications +et ric +ĠBangl adesh +Ġband width +St op +ĠOrig inally +Ġhalf way +yn asty +sh ine +Ġt ales +rit ies +av ier +Ġspin ning +ĠWH O +Ġneighbour hood +b ach +Ġcommer ce +ĠS le +B U +Ġentreprene ur +Ġpecul iar +ĠCom ments +f re +3 20 +IC S +Ġimag ery +ĠCan on +ĠElect ronic +sh ort +( ( +D ig +Ġcomm em +u ced +Ġincl ined +ĠSum mon +Ġcl iff +ĠMed iterranean +Ġpo etry +Ġprosper ity +ĠRe ce +Ġp ills +m ember +Ġfin ale +un c +ĠG ig +ä ½ +Ġl od +Ġback ward +- + +ĠFor ward +Ġth ri +s ure +Ġso ap +ĠF X +R ES +ĠSe xual +oul os +Ġfool ish +Ġright eous +Ġco ff +terror ism +ust ain +ot er +Ġab uses +ne xt +Ġab usive +Ġthere after +Ġprohib ition +ĠS UP +Ġd ip +Ġr ipped +Ġinher ited +Ġb ats +st ru +G T +Ġflaw ed +ph abet +Ġf og +do ors +Ġim aging +Ġdig its +ĠHung ary +Ġar rog +Ġteach ings +Ġprotocol s +ĠB anks +à ¸ +p ound +ĠC urt +." ) +. / +Ġex emption +end ix +ĠM ull +Ġimpro ves +ĠG amer +d imensional +I con +ĠMarg aret +St atus +d ates +Ġint ends +Ġdep ict +Ġpark ed +J oe +ĠMar ines +chn ology +! ). +Ġjud ged +Ġwe ights +R ay +Ġapart ments +he ster +Ġrein force +Ġoff ender +occ up +Ġs ore +e pt +ĠPH P +ĠB row +Ġauthor ization +ĠR isk +ĠDel aware +ĠQ U +Ġnot ifications +Ġsun light +Ġex clude +d at +Ġm esh +ĠSud an +Ġbelong ed +Ġsub way +Ġno on +ĠInter ior +ol ics +ĠL akers +Ġc oding +Dis claimer +Cal if +O ld +Ġdis l +???? ? +Ġconfir ms +Ġrecruit ment +Ġhom icide +Cons ider +ĠJeff rey +ft y +} ; +Ġobject ion +do ing +ĠLe o +W ant +Ġgl ow +ĠClar ke +ĠNorm an +Ġver ification +Ġpack et +ĠForm ula +Ġpl ag +es ville +Ġshout ing +Ġo v +ĠR EC +ĠB ub +Ġn inth +Ġener g +Ġvalid ity +Ġup s +j ack +Ġneighbor ing +ĠN ec +ew orks +ĠH ab +are z +Ġsp ine +Ġevent ual +ĠLe aders +ĠC arn +Ġprob ation +Ġrom ance +ms g +ĠMechan ical +ER Y +R ock +Ġpart isan +N ode +ass ets +min ent +Ġforeign ers +Ġtest ify +ĠUs ually +l ords +ĠG ren +ĠPow ell +BI L +Ġs r +Ġadd ict +Ġshell s +Ġs igh +ĠY ale +tern ity +Ġ7 50 +E U +ĠR ifle +Ġpat ron +em a +ĠB annon +an ity +Ġtrop ical +ĠV II +c ross +Every thing +ĠIS O +Ġhum ble +ass ing +ĠF IG +Ġupd ating +ys on +Ġcal cium +Ġcompet ent +Ġste ering +Pro t +ĠS Y +ĠFin als +ĠR ug +15 9 +13 7 +ĠG olf +Ġ12 6 +Ġaccommod ation +ĠHug hes +Ġaest hetic +art isan +ĠTw ilight +Ġpr ince +ĠAgric ulture +ĠDis co +Ġpreced ent +Ġtyp ing +author ized +O ption +ĠA ub +l ishes +ach t +m ag +P eter +ĠU FO +mont on +ĠL ith +Ġa rom +Ġsec uring +Ġconf ined +priv ate +Ġsw ords +Ġmark ers +Ġmetab olic +se lect +ĠCur se +ĠO t +g ressive +Ġinc umb +ĠS aga +Ġpr iced +Ġclear ance +Cont ent +Ġdr illing +Ġnot ices +Ġb ourgeois +Ġv est +Ġcook ie +ĠGuard ians +ry s +in yl +Ġ12 4 +Ġpl ausible +on gh +ĠOd in +Ġconcept ion +ĠY uk +ĠBaghd ad +ĠFl ag +Aust ral +ĠI BM +Ġintern ationally +ĠWiki Leaks +I ED +Ġc yn +Ġcho oses +ĠP ill +Ġcomb ining +Ġrad i +ĠMoh ammed +def ense +atch ing +Sub ject +ic iency +Fr ame +Ġ{ " +Ġche ss +Ġtim er +19 0 +Ġt in +Ġord inance +emet ery +Ġacc using +Ġnotice able +Ġcent res +Ġl id +ĠM ills +img ur +Ġz oom +erg ic +Ġcomp ression +pr im +f ind +Ġsur g +Ġp and +ĠK ee +ĠCh ad +cell ence +oy le +Ġsocial ism +ĠT ravis +ĠM Hz +Ġgu ild +ALL Y +ĠSub scribe +ĠRel ated +Ġoccur rence +itch ing +Ġfict ional +Ġcr ush +ĠE A +c od +m ix +ĠTri ple +Ġretrie ve +Ġstimul us +Ġpsych iat +ĠDo or +Ġhomosexual ity +Ġelement ary +Ġcell ular +id ian +ĠL aun +Ġintrig uing +Ġfo am +ĠB ass +id i +its u +Ġass ure +Ġcongr at +Ġbusiness man +ĠBo ost +cl ose +Ġl ied +Ġsc iences +ĠO mega +ĠG raphics +Ġ< = +sp oken +Ġconnect ivity +S aturday +ĠAven gers +Ġto ggle +Ġank le +Ġnational ist +mod el +ĠP ool +ophob ia +V ar +ĠM ons +ator ies +Ġaggress ively +C lear +For ge +act ers +Ġhed ge +Ġpip es +Ġbl unt +Ġs q +Ġremote ly +W ed +as ers +Ġref riger +Ġt iles +Ġresc ued +Ġcompr ised +ins ky +Ġman if +avan augh +Ġprol ifer +Ġal igned +x ml +Ġtri v +Ġcoord ination +ĠP ER +ĠQu ote +13 4 +b f +ĠS aw +Ġtermin ation +Ġ19 0 +Ġadd itions +Ġtri o +Ġproject ions +Ġpositive ly +Ġin clusive +Ġmem br +19 90 +old er +Ġpract iced +ink le +Ar ch +Ġstar ters +ari us +Ġinter mediate +ĠBen ef +ĠK iller +Ġinter ventions +ĠK il +ĠF lying +In v +Ġprem ature +Ġpsych iatric +Ġind ie +Ġcoll ar +ĠRain bow +af i +Ġdis ruption +ĠFO X +cast ing +Ġmis dem +c ro +Ġw ipe +ard on +Ġb ast +ĠTom my +ĠRepresent ative +Ġbell y +ĠP O +ĠBre itbart +13 2 +Ġmess aging +Sh ould +Ref erences +ĠG RE +ist ical +L P +ĠC av +ĠC razy +Ġintu itive +ke eping +ĠM oss +Ġdiscont in +ĠMod ule +Ġun related +ĠPract ice +ĠTrans port +Ġstatist ically +orn s +Ġs ized +p u +Ġca f +ĠWorld s +ĠRod gers +ĠL un +ĠCom ic +l iving +Ġc ared +Ġclim bed +) { +Ġconsist ed +Ġmed ieval +fol k +Ġh acked +Ġd ire +ĠHerm ione +Ġt ended +ce ans +D aniel +w ent +Ġlegisl ators +Ġred es +g ames +Ġg n +am iliar +Ġ+ + +gg y +th reat +Ġmag net +Ġper ceive +Ġz ip +Ġindict ment +Ġcrit ique +g ard +ĠSaf e +ĠC ream +Ġad vent +ob a +Ġv owed +ous ands +Ġsk i +Ġabort ions +u art +Ġstun ned +Ġadv ancing +Ġlack ed +Ġ\ " +Ġsch izophren +Ġeleg ant +Ġconf erences +Ġcance led +ĠHud son +ĠHop efully +Ġtr ump +Ġfrequ encies +Ġmet eor +ĠJun ior +ĠFle et +ĠMal colm +ĠT ools +Ġ ........ +Ġh obby +ĠEurope ans +Ġ15 00 +ĠInt o +Ġs way +ĠApp ro +ĠCom pl +Comm unity +Ġt ide +ĠSum mit +ä » +Ġinter vals +ĠE ther +Ġhabit at +ĠSteven s +lish ing +ĠDom ain +Ġtrig gers +Ġch asing +Ġchar m +ĠFl ower +it ored +Ġbless ing +Ġtext ures +F ive +Ġliqu or +R P +F IN +Ġ19 62 +C AR +Un known +Ġres il +ĠL ily +Ġabund ance +Ġpredict able +r ar +Ġbull shit +le en +che t +M or +M uch +ä ¹ +Ġemphas ized +Ġcr ust +Ġprim itive +Ġenjoy able +ĠPict ures +Ġteam mate +pl er +ĠT ol +ĠK ane +Ġsummon ed +th y +ram a +ĠH onda +Ġreal izing +Ġquick er +Ġconcent rate +cle ar +Ġ2 10 +ĠErd ogan +ar is +Ġrespond s +ĠB I +Ġelig ibility +Ġpus hes +ĠId aho +Ġagg rav +Ġru ins +ur ations +Ġb ans +Ġan at +sh are +Ġgr ind +h in +um en +Ġut ilities +ĠYan kees +Ġdat abases +ĠD D +Ġdispl aced +Ġdepend encies +Ġstim ulation +h un +h ouses +ĠP retty +ĠRaven s +ĠTOD AY +Ġassoci ates +Ġthe rape +cl ed +Ġde er +Ġrep airs +rent ice +Ġrecept ors +Ġrem ed +ĠC e +Ġmar riages +Ġball ots +ĠSold ier +Ġhilar ious +op l +13 8 +Ġinherent ly +Ġignor ant +Ġb ounce +ĠE aster +REL ATED +ĠCur rency +E V +ãĥ ŀ +ĠLe ad +Ġdece ased +B rien +ĠMus k +J S +Ġmer ge +heart ed +c reat +m itt +m und +ĠâĢ ĭ +ĠB ag +Ġproject ion +Ġj ava +ĠStand ards +ĠLeon ard +Ġcoc onut +ĠPop ulation +Ġtra ject +Ġimp ly +Ġcur iosity +ĠD B +ĠF resh +ĠP or +Ġheav ier +ne ys +gom ery +Ġdes erved +Ġphr ases +ĠG C +Ġye ast +d esc +De ath +Ġreb oot +Ġmet adata +IC AL +Ġrep ay +ĠInd ependence +Ġsubur ban +ical s +Ġat op +Ġall ocation +gener ation +ĠG ram +Ġmoist ure +Ġp ine +ĠLiber als +Ġa ides +Ġund erest +ĠBer ry +Ġcere mon +3 70 +ast rous +ĠPir ates +Ġt ense +ĠIndust ries +ĠApp eals +ĠN ear +Ġè£ı ç +Ġlo vers +ĠC AP +ĠC raw +Ġg iants +Ġeffic acy +E lement +ĠBeh avior +ĠToy ota +Ġint est +P riv +A I +Ġmaneu ver +Ġperfect ion +Ġb ang +p aper +r ill +Ge orge +b order +in ters +ĠS eth +Ġcl ues +ĠLe vi +ĠRe venue +14 7 +Ġv apor +Ġfortun ate +Ġthreat ens +Ġve t +Ġdepend ency +ers ed +art icle +ĠBl izzard +Ġch lor +Ġmin us +ĠB ills +Ġcryptoc urrency +Ġmetabol ism +ter ing +Ġp estic +step s +ĠTre asure +ract ed +ĠConst ant +Ġtem p +13 9 +ĠDet ective +ur ally +Ġrecover ing +Ġcort ex +Ġ14 4 +cl osed +Ġprejud ice +aun ted +Ġstorm s +ĠN OW +Ġmach inery +Add ress +Ġcompe lled +27 0 +Ġdesp air +b ane +Ġveget able +Ġbed s +Lear n +Ġcolor ful +Ġsp ike +Ġmarg ins +Ġsymp athy +Ġworks hop +ĠC BC +S at +Ġburn s +ĠG ender +Ġ12 9 +ĠC able +Ġdeb ts +ĠThe resa +Ġreflect ing +Ġa irst +Ġr im +ram id +Ġweakness es +W rit +ogg le +t i +ĠCh arge +Ġwe ighed +Ġ( . +Ġl aughter +Ġrou ter +ĠDemocr acy +D ear +Ġhas ht +Ġd y +Ġhint s +run ning +Ġfin ishes +ar us +M ass +res ult +asc us +Ġv intage +Ġcon qu +Ġwild ly +ac ist +Ġl ingu +Ġprot agonist +st rom +te enth +ĠSol o +m ac +f illed +Ġre nown +it ives +Ġmot ive +ĠAnt ar +ĠM ann +ĠAd just +Ġrock ets +Ġtrou bling +e i +Ġorgan isms +ass is +Christ ian +Ġ14 5 +ĠH ass +Ġsw all +Ġw ax +ĠSurv ival +V S +ĠM urd +v d +stand ard +Ġdrag ons +Ġacceler ation +r ational +f inal +Ġp aired +ĠE thereum +Ġinterf aces +Ġres ent +Ġartif acts +Å « +are l +Ġcompet itor +ĠNich olas +ĠSur face +c pp +ĠT ot +Ġeconom ically +Ġorgan ised +Ġen forced +in ho +Ġvar ieties +Ġab dom +ĠBa iley +id av +ĠSal v +p aid +Ġalt itude +ess ert +ĠG utenberg +are a +op oulos +Ġprofess ors +igg s +ĠF ate +he y +Ġ3 000 +D ist +Ġtw ins +c ill +ĠM aps +Ġtra ps +Ġwe ed +ĠK iss +Ġy oga +Ġrecip ients +ĠWest minster +Ġpool s +ĠWal mart +18 8 +ĠSchool s +att ack +ĠAR M +par agraph +W arning +j l +Ġself ish +anche z +ĠHe ights +F re +ĠS oph +Ġ -------------------------------- +t ml +33 3 +Ġraid s +Ġsatell ites +KE Y +Ġlast s +Ñ Ĥ +In s +ĠD ame +Ġunp redict +// / +gh ai +Ġart illery +Ġcru ise +Ġg el +ĠCabin et +Ġbl ows +ĠE sp +Ġprox imity +ot he +ĠSk ills +ĠU pper +ob o +ĠN DP +Ġenjoy s +Ġrepe ating +ĠConst ruction +ĠQuest ions +H illary +Ġu int +Ġprocess ors +ĠGib son +ĠMult iple +q a +ĠB om +ĠM iles +vent ional +Ġhur ts +s kin +ĠA IDS +Ġadvis ers +ĠR oot +Ġmethod ology +ĠD ale +Ġdet on +ĠKnow ledge +sequ ently +Ġ12 1 +Ġconnect s +C y +ĠD anger +Ġcontribut ors +ĠB ent +Ġbr ass +ĠGun s +int o +ĠFort une +Ġbro ker +bal ance +Ġlength s +Ġv ic +Ġaver aging +Ġappropri ately +ĠCamer a +Ġsand wich +ĠCD C +Ġcoord inate +Ġnav ig +Ġgood ness +l aim +Ġbra ke +Ġextrem ist +ĠW ake +ĠM end +ĠT iny +ĠC OL +ĠR F +ĠD ual +ĠW ine +C ase +Ġref ined +Ġl amp +L ead +Ġb apt +ĠCar b +ĠS add +ĠMin neapolis +PD F +Ear ly +ĠH idden +I ts +ĠT IME +Ġp ap +Ġcommission ed +ĠF ew +ĠCol ts +ĠB ren +Ġbot hered +Ġlike wise +Ex per +ĠSch w +c ry +n n +ĠM itch +im on +M G +b m +UM P +r ays +Ġregist ry +Ġ2 70 +ach ine +re lla +ant ing +00 000 +Ġru ined +sp ot +Ġt a +Ġmaxim ize +Ġincon ven +D ead +H uman +En abled +ĠMar ie +Ġch ill +ĠParad ise +Ġstar ring +ĠLat ino +ĠProt ocol +ĠE VER +Ġsuppl iers +m essage +ĠBro ck +Ġser um +âĸĪâĸĪ âĸĪâĸĪ +Ġen comp +Ġamb ition +ues e +Ġar rows +And rew +Ġanten na +Ġ19 61 +ĠB ark +Ġb ool +ãĤ ª +ĠSt orage +Ġrail way +Ġtoug her +ĠC ad +Ġwas hing +P y +' ] +em bed +ĠMem phis +ack le +Ġfam ously +ĠF ortunately +ov ies +Ġmind set +Ġsne ak +ĠD h +RA W +ĠSim pson +Ġliv est +Ġland mark +Ġc ement +L ow +Ġthr illed +ĠCour se +in el +Ġch uck +id ate +gl obal +Ġwh it +Ġ � +ad ays +s ki +ĠS V +Ġvir uses +30 6 +ĠResp ons +Ġthe aters +ĠBr anch +ĠGene va +ĠM K +Ġunbel iev +Ġcommun ist +Orig inal +ĠRe ceived +ĠTrans fer +ĠAr g +In put +ĠStr ategy +Ġpal ace +the ning +D ri +Ġsent encing +umbn ail +Ġp ins +re cy +Ġs iblings +Get ting +ĠB U +ĠNorth west +Ġprolong ed +ĠSak ura +C omb +ĠB our +Ġinadequ ate +ĠK ash +Ġus ername +ĠImpro ve +Ġbatt ling +ĠM AC +Ġcurric ulum +Ġs oda +ĠC annon +Ġsens ible +sp ons +De cember +Ġw icked +ĠP engu +Ġdict ators +ĠHe arts +og yn +Ġsimilar ities +ĠSt ats +Ġh ollow +it ations +": [ +Ġh over +ĠList en +s ch +S und +Ġc ad +ĠPar ks +Ġl ur +Ġhy pe +ĠL em +N AME +is ure +Fr iday +Ġshoot s +Ġclos es +Ġd b +ĠR idge +ĠDiff erent +Ġrepl ies +ĠBroad way +op ers +Ġint oler +ĠZe us +akes pe +Ġpropri etary +Ġrequest ing +Ġcontro llers +ĠM IN +im edia +be cca +Ġexp ans +Ġoil s +B ot +ĠCh and +Ġpr inter +Ġto pped +ĠP OL +ĠEar lier +S ocial +av in +Ġdecre ases +ĠSe b +Ġspecific ations +ĠBl ast +ĠK urt +Ġfre el +B rown +Ġdil ig +ro e +ĠPro blem +ĠQu ad +Ġdecent ral +ĠV ector +an ut +Ġplug ins +ĠGreg ory +Ġfuck ed +el ines +ĠAmb assador +t ake +Ġcle ans +ong yang +An onymous +st ro +" } +al ine +ĠO dd +ĠE ug +2 16 +Ġbo il +ĠP owers +Ġnurs es +Ob viously +ĠTechn ical +Ġexceed ed +OR S +Ġextrem ists +Ġtr aces +ex pl +Ġcom r +ĠS ach +) / +Ġm asks +Ġsc i +B on +Ġreg ression +we gian +Ġadvis or +it ures +ĠV o +ex ample +ĠInst ruct +Ġs iege +Ġredu ctions +pt r +Ġstat utory +Ġrem oves +Ġp uck +red its +Ġbe e +Ġsal ad +Ġpromot ions +ĠJosh ua +with standing +ET H +ĠCh a +im us +Ġexpend iture +aun ting +Ġdelight ed +Ġ15 5 +be h +Ġcar pet +ĠSp art +Ġj ungle +l ists +Ġbull ying +ĠNob el +ĠGl en +Ġreferen ced +Ġintrodu ces +se in +Ġcho pped +gl ass +ĠW rest +Ġneutral ity +Ġâ Ļ +Ġinvestig ator +Ġshel ves +Ġun constitutional +Ġreprodu ction +Ġmer chant +m ia +Ġmet rics +Ġexplos ives +ĠSon ia +Ġbod ily +Ġthick ness +Ġpredomin antly +ĠAb ility +Ġmon itored +IC H +Ġ] . +ĠMart inez +Ġvis ibility +Ġqu eries +Ġgen ocide +ĠWar fare +Qu ery +Ġstud ios +Ġemb ry +Ġcorrid or +Ġclean ed +com plete +ĠM H +Ġenroll ment +ING S +Ġimpact ed +Ġdis astrous +ĠY un +ĠCl aire +ĠBas ically +y t +uster ity +Ġindirect ly +w ik +Ġd od +ĠCar r +Ġam p +Ġprohib it +ĠIn itial +ĠR d +ij i +Ġeduc ate +c orn +i ott +ĠBeaut y +Ġdetect ive +ĠCon n +s ince +Ġst agger +Ġob ese +Ġb ree +olog ic +is se +walk er +Ġbl ades +Ġlaw ful +fun c +ĠBeh ind +Ġappet ite +Ġ( * +Ġt ennis +Ġoff spring +Ġj ets +Ġstruct ured +Ġafore mentioned +N ov +Ġsc aling +f ill +Ġst ew +Ġcur b +ĠStep han +ed In +S F +ob ic +é ŃĶ +ou g +ĠM M +Ġgen etically +ope z +13 6 +Ġu mb +anc ers +Ġcoh ort +Ġmerch andise +Ġimp osing +ĠLegisl ature +ĠArch ive +iv ia +ĠN aval +Ġoff ences +Ġmir acle +Ġsn apped +Ġf oes +Ġextensive ly +ĠR af +Ġc ater +ed ience +K it +ĠB in +Ġrecomm ends +ĠC ities +Ġrig id +ĠRE AD +ĠNob le +ĠT ian +Ġcertific ates +ant is +o iler +ĠBudd hist +d id +Ġsurvey ed +Ġdown ward +Ġprint s +ĠMot ion +ron ics +ĠS ans +oss ibly +u ctions +Ġcolon ies +ĠDan ish +un it +Ġsp oil +Ġadvis ory +ber ries +Pl an +Ġspecific ation +op hers +ĠRes ource +Ġsh irts +prising ly +commun ications +Ġtriv ial +Ġmention ing +ise xual +Ġsupp lements +Ġsuper vision +B P +v or +Ġw it +Ġco oldown +Ġplaint iff +ĠReview s +ĠS ri +ĠM int +ĠSug ar +Ġafter ward +ĠPri est +ĠInvest ment +og ene +ĠT aking +Ġstretch ing +Ġinflamm ation +ĠTe hran +Ġl ining +Ġfree zing +ĠEnt ity +Ġins piring +spe cial +pr ice +Ġsu e +ĠP orter +oun ge +ET A +ĠD erek +ĠLu is +u o +ym ph +Ġex terior +ih il +ĠAsh ley +in ator +Ġnut rients +ĠTh rones +Ġfin ances +ĠIn spect +Ġspe cially +ĠRequ ired +ĠP TS +ĠViol ence +oint ed +sh ots +Ġex cerpt +co on +IN S +ĠG ri +Ġrecogn ised +We ek +You ng +Ġv om +is le +ĠCur ry +ĠBudd h +Ġnot ebook +Ġd urable +/ ? +ĠG ad +ĠP upp +Ġforg ive +p ark +Ġpersonal ities +an alysis +cl amation +Ġelev ator +Ġware house +ĠR ole +un n +Ġillust ration +ĠSc an +Ġatmosp heric +Im port +AN C +rict ed +f u +01 0 +Ġar che +Ġreward ed +akespe are +Ġintern ally +ĠR BI +alk er +Ġeleph ant +ow itz +ĠP izza +Ġbip artisan +é s +Ġslow ed +ĠSt ark +Ġover ride +OU S +Ġ3 20 +undred s +ĠDe ck +ĠC ensus +be e +14 6 +ot or +Ġ ip +Ġu b +oc ations +ĠBut ton +r ice +Ġc ripp +ff f +Ġorig inated +Ġoverwhel med +app a +Ġfore most +âĢ ij +ĠL EG +re lease +eat ured +at ches +Ġre ps +Ġl ending +ĠRe ference +ĠCl ient +16 5 +vent h +Com plete +ĠPat rol +Ġsw orn +c am +Ġshut tle +ĠR alph +Ġh ometown +- , +on al +ĠB P +å ı +Ġpersu ade +ĠAlex and +Ġcomb ines +Ġv ivid +ĠL ag +Ġenc oding +Ġsal vation +w en +ĠRec overy +i ya +Un iversity +ĠB iden +Ġbud gets +ĠTex ans +f its +Ġhon ored +Ġp ython +T D +## # +cl one +Ġbl ink +ĠL iquid +Ġunemploy ed +Ġcl ashes +ĠCoun sel +Ġdirect ing +Ġpun ct +ĠFal cons +Ġsh ark +ĠDam ascus +Ġje ans +Ġemb ark +Ġse ize +Ġup wards +2 80 +ĠE z +ĠAny thing +Ġex otic +l ower +ĠCreat or +ĠU m +Ġsubur bs +ber ger +ĠW end +Ġm int +ĠX X +ĠD ro +Ġsuff ers +Ġher b +t ree +Ġfrag ile +Ġflood ed +ĠAl cohol +ole an +ny der +ĠK O +F ram +Ġ13 6 +Ġow ed +ĠMe lee +ĠH ash +Ġwh isk +Ġsu do +r r +Qu ick +app ro +Ġi i +ĠEx amples +he e +Ġpromot es +per ature +k ar +ĠHon or +Ġs odium +ĠL if +ros so +intend ent +Ġcorrespond ent +F ound +sec ret +Ġident ifies +ag ne +Ġl ou +ĠP P +Ġcoinc idence +m ove +Ġmilit ia +Ġinf iltr +ĠPrim ary +Ġpitch ing +ĠI b +ĠGO OD +ãĤ ¸ +ĠW izards +ir al +ĠVen us +R R +ĠâĢ ķ +ĠCase y +Ġsad ly +Ġadm ire +Ġembarrass ed +c b +M el +Ġtub es +Ġbeaut ifully +ĠQueens land +Bel ow +re z +qu et +ple asant +Ġ « +C amp +Ġdec isive +19 98 +ĠL amb +ut ton +h n +ĠJ agu +au nder +ĠC ord +Ġcl erk +Ġca ffe +Ġwip ed +Ġre im +ĠMount ains +Ġimprison ed +Ġdevelop s +ĠP ra +Ġmodel ing +Any one +ance l +ĠS it +Ġshield s +Ġl awn +Ġcard iovascular +Ġdemonstr ating +Ġpar se +ĠIsrael is +Ġeuro s +14 3 +Ġgl orious +ins ki +ec d +Ġcondition ing +Ġhel pless +Ġmicro sc +ĠHar bor +Ġst akes +Ġ2 60 +Ġun equ +ĠFl oyd +Ġd amp +Ġappar atus +ĠLaw s +Ġcoun ters +Ġindu ce +at able +ĠAh med +Ġsl am +N ovember +Ġpers ist +Ġim minent +á n +Ġsh red +Ġph ases +ĠEd monton +ĠArm strong +ĠMe et +ĠK itty +Ñ Ģ +c irc +ĠAd ult +Ġa rose +ĠX en +D an +g ow +Ġsuper f +ĠAd mir +Ġend ure +Ġkey word +yr us +Ġy arn +Ġpath way +ĠHop kins +mid t +Ġcens orship +d ependent +Ġinstruct or +S ources +Ġto e +Ġball oon +N ob +Ġsw ear +ĠCast ro +Ġgl oss +ĠK avanaugh +Ġremark ably +Ph otos +ĠN om +ĠS outheast +y ers +Ġvalid ation +Ġcann on +ĠVict ory +ĠPier re +Ġcaut ious +Aud io +Ġf etch +ĠG ift +ĠH yp +Ġrem edy +Z E +Ġsc ent +Ġbe ard +ĠR ut +- " +Ġpat ents +H y +Ġun just +Ġpot ato +Ġforth coming +Ġche f +ĠR ift +aff e +ĠR OM +ĠL aunch +Ġp ads +ĠNe o +Ġon set +Ġsquee ze +s afe +Ġpref ix +ĠT M +ĠN early +ĠClin ical +ĠM ental +ot iation +ĠUn ic +ant ry +ĠC ir +Ġep it +à ¦ +Ġextract ed +verse ly +ri ad +Ġstr ains +Ġto ps +Ġpo em +ĠRand y +ĠMap le +TH ER +up iter +ĠSS D +ļ é +Ġun con +per ing +Ġsle pt +in ers +Ġunder water +ĠEv idence +g one +20 5 +Ġhistor ians +Ġsynt hesis +Ġf rog +b asketball +Ġvibr ant +Ġsub ord +Ġ3 65 +ĠD ial +Ġcooper ate +HA HA +Ġgreet ed +15 8 +Ġj azz +Ġinto x +ĠWalk ing +Ġsuper visor +ĠF usion +ĠMer cedes +s end +H am +s d +n l +Ġtour s +ĠF IFA +Ġcul p +g d +30 4 +Ġple as +Ġillust rates +ĠColomb ia +Ġhighlight ing +ĠSum mary +Ġexp osing +ĠD ru +Ġir ony +r itional +ĠCar roll +ĠEll is +P ict +ĠR apt +Ġad apter +Ġun m +Ġcor pse +Ġceleb rities +D en +at um +ĠAp ocalypse +ĠW ag +lin ing +Ġhorm ones +R ub +ĠX i +ĠV aults +20 8 +alky rie +inos aur +Ġfeed s +v ity +Ġdefe ating +W ait +Ġemphas ize +ĠSteel ers +yr inth +le ys +ĠWhe never +Current ly +ĠCl ock +Ġcollect ively +any on +ĠJ P +Ġment ality +Ġdownload s +Ġsurround ings +ĠBarn es +Ġflags hip +Ġindic ators +Ġgra pp +Jan uary +ĠElement al +ĠAthen a +ib al +Ġs ights +Ġcap ita +ĠTreat y +Ġvo iced +ĠG az +let te +Ġy a +Ġexp ired +Leg end +H ot +n ature +Ġunst able +Ġ2 80 +à º +Com ment +AL E +Ġquest s +Ġhand ler +n is +Ġvers atile +Ġconce al +enge ance +ĠInter active +Ġobs essed +ĠDog s +Ġcr acked +S ound +s v +ĠD ylan +ro ads +f x +ĠCath olics +ĠH ag +Ġsl ammed +Ġgl owing +s ale +Ġtiss ues +ĠCh i +ne e +Ġc her +s ic +ur rection +Ġb acon +ul atory +) ." +Ġir regular +FOR M +ass ed +Ġintention al +Ġcompens ate +ĠSpe aking +ĠS ets +15 3 +Ġconvent ions +b ands +em ade +Ġe cc +ĠWin ston +ĠAssass in +ĠBelg ian +Ġdepend ence +Ġnic he +Ġb ark +ĠJ azz +Ġdisadvant age +Ġgas oline +Ġ16 5 +çļ Ħ +ess a +mod ule +ang ular +O Y +ĠTreat ment +it as +ol ation +ĠArn old +Ġfe ud +ĠN est +Ġthe atre +ew ater +Ġmin ors +olic y +ĠH aven +div ision +Ġtr unk +F ar +ĠP ull +Ġcapt uring +Ġ18 00 +ĠTe en +Ġex empl +Ġclin ics +ĠB urg +Ġsubst it +Ġpay load +ĠL av +ĠT roy +ĠW itness +Ġfrag ments +Ġpass words +Ġg ospel +ĠG in +Ġten ants +ol ith +S ix +Pre vious +ĠAg es +ĠDar win +Ġbl at +Ġem pathy +sm ith +b ag +ĠE cho +ĠC amb +ĠM add +ĠB oo +Ġred e +ĠBurn ing +Ġsmooth ly +ĠAd rian +ĠV ampire +ĠMon sters +ste am +Sty le +M a +re a +ĠD war +aly st +urs or +Ġelim ination +Ġcrypt o +ch t +ĠE ternal +âĢ¦ ] +ĠS orce +I ll +N ER +Ġu h +Con clusion +w age +Ġresp ir +Ġrem inis +het ical +Ġg y +Ġutil ized +ic idal +Ġ19 00 +Ġhun ters +ĠSw an +ĠRe act +Ġvis itor +ĠThanks giving +30 8 +Post s +Ġh ips +19 97 +om ers +Ġkn ocking +ĠVeh icle +Ġt il +Ġ13 8 +Ġm i +ĠInvest igation +ĠKen ya +Ġcas ino +Ġmot ives +Ġreg ain +re x +Ġweek ends +Ġstab bed +bor o +Ġexplo ited +ĠHA VE +ĠTe levision +c ock +Ġprepar ations +Ġende av +ĠRem ote +ĠM aker +ĠPro du +ĠEv an +Ġinform ational +ĠLouis ville +15 4 +ĠDream s +Ġpl ots +ĠRun ner +Ġhur ting +Ġacad emy +ĠMont gomery +n m +ĠL anc +ĠAl z +2 10 +el ong +Ġretail er +Ġar ising +Ġrebell ion +Ġbl onde +play ed +Ġinstrument al +C ross +Ġret ention +Ġtherape utic +Ġse as +Ġinfant ry +ĠCl int +Ġprompt ing +Ġbit ch +Ġst ems +ĠK ra +Ġthe sis +ĠB og +ru ed +Ġk ings +Ġcl ay +ific ent +ĠY ES +ĠTh ing +ĠCub s +vey ard +els h +in arily +ĠE y +ĠRoll ing +Ġev olving +Ind ia +Ġrecogn izes +Ġgrad uation +is ers +Ġfert ility +ĠMil an +Comm and +Ġbox ing +Ġ19 43 +Ġgl uten +ĠEm ir +Ġid ol +Ġcon ceived +ĠCre ation +Mer it +udd y +uss ions +ĠLie utenant +iet al +Ġunch anged +ĠSc ale +ĠCrime a +ball s +ator ial +Ġdepth s +Ġempir ical +Ġtrans m +Ġuns afe +miss ible +com fort +15 6 +Ġmechan ic +00 2 +l ins +Ġsm oked +P os +Ġslow ing +Ġl av +Tex as +Ġche ating +ĠMet ropolitan +eth yl +Ġdiscover ing +as se +Ġpen cil +ĠPy ongyang +Ġclos et +ĠShe et +ĠEnt ry +ou stic +Ġmy st +er ate +ari at +Ġminer als +Ġmusic ian +ĠP ul +ĠM az +24 9 +Ġper missions +Ġ iv +en ary +ick ers +ĠB ing +he a +en able +Ġgri ev +Ġassert ed +ĠColon el +Ġaff idav +w o +Ġse ated +ĠR ide +Ġpaint ings +ĠP ix +Ġ13 7 +ish i +umb ai +g otten +ĠEar l +Ġin ning +Ġc ensus +Ġtrave lled +ĠCons ult +18 5 +b ind +Ġsimpl icity +Ġoverlook ed +ĠHelp ful +Ġmon key +Ġoverwhelming ly +Bl ood +ĠFl int +ĠJ ama +ĠPres ent +ĠR age +ĠT A +pt ive +Ġturn out +w ald +ĠD olphins +ĠV PN +Ġon ion +Ġcraft ing +m ma +ĠMerc ury +Ġarr ange +Ġalert s +ĠO T +zb ollah +Ġg ases +ĠRichards on +s al +l ar +Ġfro st +Ġlower ing +Ġacc laim +Ġstart ups +ĠG ain +ess ment +Ġguard ian +äº º +ĠP ie +ĠL inks +Ġmer its +Ġaw ake +Ġparent al +Ġexceed s +Ġid le +ĠPil ot +Ġe Bay +ĠAc cept +ipe g +C am +ĠK ot +Ġtrad ers +olit ics +unk er +ĠP ale +os i +an mar +Ġ19 47 +ĠF ell +est ial +it ating +G F +ĠS r +if ted +Ġconnect or +ĠB one +ill es +2 60 +h ma +Ġoverl ap +ĠGit Hub +Ġclean er +ĠBapt ist +ĠW AS +Ġlung s +Ñ ģ +ĠB UT +Ġc ite +Ġpit ched +reat ment +Ġtro phies +ĠN u +38 6 +ĠPr ide +Ġattend ees +[ ] +17 9 +Ġspat ial +Ġpri zes +ĠRel igion +Ġshow case +ĠC ategory +vid ia +T arget +Pro perty +? , +Ġf usion +p ie +ĠU CLA +Ġsound track +Ġprin cess +ĠC aval +sh ould +Ġlim bs +Back ground +Ġlone ly +Ġc ores +ĠT ail +she et +Ġ13 2 +R a +ãĤ « +ĠB olt +Ġbook ed +Ġadmin ister +Ġequ als +w y +Ġobserv ing +ĠBar on +ĠAd obe +Ġv irgin +ĠSocial ist +M ove +gh azi +ĠLind a +2 12 +Ġbre wing +Ġmerch ants +bur se +Ġdiv or +Ġmet als +ĠN er +Ġsum s +ĠEn emy +Ġen vision +Ġgrant ing +ĠH oney +ĠSk yrim +Ġsoc io +gr aded +Ġselect ive +W ASHINGTON +Ġ19 48 +ĠSir ius +ĠG ross +act ivity +ĠI van +Ġfur ious +BS D +ĠPre vious +Ġrespons ive +Ġchar itable +Ġle aning +ĠP ew +Ġviol ates +\\\\ \\\\ +ĠCom ing +w ire +Ġpo et +Ġres olutions +comm and +ĠPortug uese +Ġnick name +Ġde af +Feb ruary +Ġrecogn ise +Ġentire ty +Ġseason al +pl aced +ĠTe legraph +Ġmicro phone +our ing +Ġgr ains +Ġgovern ed +Ġpost p +ĠW aters +in ement +Ġund ocumented +ĠCom cast +Ġf ox +Ġassault s +re on +man y +ĠJen kins +ĠAny way +Ġassess ments +Ġdown s +ĠM ouse +Ġsuper b +k t +ĠD ow +Ġtax ation +4 01 +Ġsm iles +Ġundert aken +Ġex h +Ġenthusi astic +Ġtw ent +Ġgovernment al +Ġautonom y +ĠTechn ologies +ĠCh ain +Ġpreval ent +f b +Ġnic otine +og ram +j ob +Ġawa iting +ĠMen u +Ġdep uties +k ov +ish ops +But ton +ĠShan ghai +Ġdies el +ĠD uck +R yan +ĠPC s +N F +j ury +ent e +Ġinacc urate +edd y +Wh atever +Ġshow c +ĠN ad +od us +et r +Ġplaint iffs +ĠW OR +ĠAss ange +Ġpriv at +Ġpremium s +Ġt am +UR L +Ġel ites +ĠR anger +otten ham +ĠH off +ĠAt hens +Ġdefin ite +Ġs ighed +Ġeven ly +2 11 +ĠAm ber +ak ia +Ġmail ing +Ġcr ashing +ĠConfeder ate +ru gged +W al +ĠDep ths +Ġjuven ile +Ġreact or +Introdu ction +ĠDel uxe +19 95 +ĠS anchez +ĠM ead +iv able +: - +ĠPlan ning +ĠT rap +qu in +ĠProt ect +ve red +In formation +Ġkid ney +inn amon +l as +Ġpolic ing +Ġtoler ate +ĠQ i +Ġbi ased +F ort +ĠK i +s ave +Ġprivile ged +Ġbe asts +ĠGl as +ĠC inem +Ġcome back +Sund ay +Ġext inction +h ops +Ġtrans mit +Ġdoub les +ĠFl at +16 7 +Ġdis puted +Ġinjust ice +f oo +V ict +role um +ĠJul ie +Con text +ĠR arity +iss ue +Comp onent +Ġcounsel ing +an ne +d ark +Ġobject ions +u ilt +Ġg ast +Ġpl ac +Ġun used +ãĥ ĩ +ĠT rial +ĠJ as +hed ral +ob b +Ġtempor al +ĠPR O +ĠN W +ĠAnn iversary +L arge +Ġther m +Ġd avid +Ġsystem ic +ĠSh ir +m ut +ĠNe pt +add ress +Ġscan ning +Ġunderstand able +Ġcan vas +C at +ĠZ oo +Ġang els +L O +ĠStat ement +ĠS ig +ov able +ĠA way +sh aring +ocr ats +st ated +Ġweigh ing +N or +w ild +B ey +Ġaston ishing +ĠReyn olds +Ġop ener +Ġtrain er +Ġsurg ical +p n +Ġadjust ing +whe el +Ġf rown +erv ative +Ġsusp end +With in +te in +Ġobst acle +Ġliber ties +ym es +Ġur anium +ans om +an ol +ub a +ĠL oss +Ġa rous +ĠHend erson +W ow +s pl +c ur +ĠÂ Ń +Ġtheir s +Dam age +Ġdownload ing +Ġdisc ern +ĠSt o +ĠFl a +Ġh ath +ĠA j +Ġun pleasant +Europe an +exp ensive +Ġscreens hot +ĠU V +Ġall ied +ĠPers ian +Ġmonop oly +Ġat om +ĠReds kins +"> < +Ġcan cell +Ġcinem a +13 1 +f air +ĠAlf red +Ġd uck +arg s +22 3 +ĠIS I +Ġsign aling +in ar +Ġlaugh s +Ġfor wards +Ġreck less +Ġlisten ers +at ivity +Ġvast ly +n ant +L ess +ĠHun ting +ĠScient ific +IT ED +Ġkn ight +ĠH TC +us a +t mp +Ġr ude +ĠLegend ary +Ġar ises +B ad +ĠCl aim +pe g +Ġreal ities +Th ink +Ġ ° +Ġro de +Ġstri ve +Ġan ecd +Ġshort s +Ġhypot hes +Ġcoord inated +ĠGand hi +ĠF PS +R ED +Ġsuscept ible +Ġshr ink +ĠCh art +Hel p +Ġ ion +de ep +rib es +ĠK ai +ĠCustom er +Sum mary +Ġc ough +w ife +Ġl end +Ġposition ing +Ġlot tery +ĠC anyon +Ġf ade +Ġbron ze +ĠKenn y +Ġbo asts +ĠEnh anced +rec ord +Ġemer gence +Ġa kin +ĠB ert +it ous +âĸ ij +Ġst ip +Ġexch anged +om ore +als h +Ġreserv oir +Ġstand point +W M +Ġiniti ate +Ġdec ay +Ġbrew ery +Ġter ribly +Ġmort al +lev ard +Ġrev is +N I +el o +Ġconf ess +ĠMS NBC +Ġsub missions +Cont roller +Ġ20 2 +ĠR uth +} ); +ĠAz ure +Ġ ." +20 6 +ĠMarket ing +Ġl aund +ien cies +Ġrenown ed +ĠT rou +ĠN GO +ble ms +Ġterr ified +Ġwar ns +Ġper t +Ġuns ure +4 80 +ale z +ult z +ĠOut side +Ġst yl +ĠUnder ground +Ġp anc +Ġd ictionary +Ġf oe +rim inal +ĠNor wegian +Ġj ailed +Ġm aternal +é e +ĠLu cy +c op +Ch o +Ġuns igned +ĠZe lda +ĠIns ider +ĠContin ued +Ġ13 3 +ĠNar uto +ĠMajor ity +16 9 +ĠW o +ãĤ ĵ +Ġpast or +Ġinform al +Ð ½ +an throp +jo in +ãģ Ĺ +it ational +N P +ĠWrit ing +f n +ĠB ever +19 5 +Ġy elling +Ġdr astically +Ġe ject +Ġne ut +Ġth rive +ĠFre qu +ou x +Ġpossess es +ĠSen ators +ĠD ES +ĠSh akespeare +ĠFran co +ĠL B +uch i +Ġinc arn +Ġfound ers +F unction +Ġbright ness +ĠB T +Ġwh ale +ĠThe ater +m ass +ĠD oll +S omething +Ġecho ed +ĠHe x +c rit +af ia +Ġgodd ess +Ġele ven +ĠPre view +ĠAur ora +Ġ4 01 +uls ive +ĠLog an +in burgh +ĠCent ers +ĠON LY +ĠA id +Ġparad ox +Ġh urd +ĠL C +D ue +c ourt +Ġoff ended +Ġeval uating +ĠMatthew s +Ġto mb +Ġpay roll +Ġextra ction +ĠH ands +if i +Ġsuper natural +ĠCOM M +] = +dog s +Ġ5 12 +ĠMe eting +Rich ard +ĠMax imum +Ġide als +Th ings +m and +ĠReg ardless +Ġhum ili +b uffer +L ittle +ĠD ani +ĠN ak +Ġliber ation +ĠA be +ĠO L +Ġstuff ed +ac a +ind a +raph ic +Ġmos qu +Ġcampaign ing +Ġoccup y +S qu +r ina +ĠW el +ĠV S +Ġphys ic +Ġp uls +r int +oad ed +ET F +ĠArch ives +Ġven ues +h ner +ĠTur bo +Ġl ust +Ġappeal ed +que z +il ib +ĠTim othy +Ġo mn +d ro +Ġobs ession +ĠSav age +19 96 +Gl obal +J es +2 14 +Ġsl iding +Ġdisapp ro +ĠMag ical +Ġvolunt arily +g b +ane y +Ġprop het +ĠRe in +ĠJul ia +ĠW orth +aur us +Ġb ounds +ie u +)) ) +Ġcro re +ĠCitiz en +S ky +Ġcolumn ist +Ġseek ers +ond o +IS A +ĠL ength +Ġnost alg +Ġnew com +Ġdet rim +ent ric +3 75 +ĠG E +Ġaut op +Ġacadem ics +App Data +ĠS hen +Ġid iot +ĠTrans it +Ġteasp oon +W il +K O +ĠCom edy +> , +Ġpop ulated +W D +Ġp igs +ĠO culus +Ġsymp athetic +Ġmar athon +19 8 +Ġseiz ure +s ided +Ġd op +irt ual +L and +ĠFl oor +osa urs +... ] +Ġl os +Ġsubsid iary +E Y +ĠPart s +ĠSt ef +ĠJud iciary +Ġ13 4 +Ġmir rors +Ġk et +t imes +Ġneuro log +Ġc av +ĠGu est +Ġtum or +sc ill +ĠLl oyd +E st +Ġcle arer +Ġstere otypes +Ġd ur +not hing +Red dit +Ġnegoti ated +---------------- -------- +23 5 +Ġfl own +ĠSe oul +ĠRes ident +ĠS CH +Ġdisappear ance +ĠV ince +g rown +Ġgrab s +r il +ĠInf inite +ĠTw enty +Ġpedest rian +Ġjer sey +ĠF ur +ĠInf inity +ĠEll iott +Ġment or +Ġmor ally +Ġob ey +sec ure +iff e +Ġantib iotics +ang led +ĠFre eman +ĠIntrodu ction +J un +Ġm arsh +ic ans +ĠEV ENTS +och ond +W all +icult y +Ġmisdem eanor +Ġl y +Th omas +ĠRes olution +Ġanim ations +ĠD ry +Ġinter course +ĠNew castle +ĠH og +ĠEqu ipment +17 7 +Ġterrit orial +Ġarch ives +20 3 +Fil ter +ĠMun ich +Ġcommand ed +ĠW and +Ġpit ches +ĠCro at +Ġrat ios +ĠM its +Ġaccum ulated +ĠSpecific ally +Ġgentle man +acer b +Ġp enn +Ġa ka +ĠF uk +Ġinterven e +ĠRef uge +ĠAlz heimer +Ġsuccess ion +oh an +d oes +L ord +Ġsepar at +Ġcorrespond ence +Ġsh iny +P rior +Ġs ulf +Ġmiser able +Ġded ication +( ). +Ġspecial ists +Ġdefect s +ĠC ult +ĠX ia +Ġje opard +ĠO re +Ab ility +Ġle ar +Ġamb itions +ĠB MI +ĠArab s +Ġ19 42 +Ġpres ervation +ific ate +Ġash amed +l oss +ĠRest aur +Ġrese mble +Ġen rich +ĠK N +ĠCl an +fl oat +Ġplay able +IT T +Ġharm ony +arr ison +ĠWe instein +w ere +Ġpoison ing +ĠCom put +ĠWord Press +m ajor +ĠVal ve +F an +ĠTh row +ĠRom ans +ĠDep ression +ad os +Ġtort ured +Ġbal ancing +bott om +Ġacqu iring +ĠMon te +ard i +Ġa ura +Ġ# # +ĠStand ing +ĠAtl as +C F +Ġintr ins +ĠBen ghazi +Ġcamp ing +Ġt apped +bl ade +st rous +ĠR abb +ĠW ritten +t ip +ĠNe igh +ster dam +ĠAll ow +ĠHe aling +ĠR hod +n um +Ġcaffe ine +ĠPer cent +Ġbo o +Ġapp les +30 5 +Ġwel coming +Ġappl aud +Ġa usterity + ± +ĠRe ality +ef e +å ® +Ġsu cks +Ġtab s +ĠPay Pal +Ġback pack +Ġgif ted +abul ary +ĠSc out +ir teen +Ġch in +Ġo mitted +Ġnegative ly +Ġaccess ing +ĠE arn +Ġambul ance +Ġhead phones +Ġ20 5 +ĠRef resh +p resident +ĠKit chen +ĠEnt ered +ĠS nyder +00 5 +om ical +Ġborrow ed +ĠN em +Ġav iation +Ġst all +rim ination +Ġuniform s +it ime +ĠSim mons +ener gy +ab lished +y y +qual ified +Ġrall ies +ĠSt uart +fl ight +Ġgang s +r ag +Ġv ault +lu x +ĠCom par +Ġdesign ation +20 9 +ĠJ os +d ollar +z ero +Ġwell s +30 3 +Ġconstitu ents +Ġhe ck +Ġc ows +Ġcommand ers +Ġdifferent ial +ĠC atherine +29 9 +Ġval ve +Ġbr ace +Ġperspect ives +c ert +f act +icular ly +ĠMc N +pl anes +Ġint ric +Ġpe as +ov an +Ġtoss ed +ret ch +ĠL opez +Ġunf amiliar +de ath +ĠA part +ĠCh ang +Ġrelie ved +rop he +Ġair ports +Ġfre ak +ut il +M ill +ĠCh in +ĠOw en +m ale +ĠBro ken +ĠWind s +ro b +r ising +Ġfire fighters +Ġauthor itarian +Ġ14 8 +Bit coin +ex ternal +Ġbrow sers +iche ver +or ian +Ġun b +Ġpo ke +ĠZ ot +M id +ĠPop ular +Ġco vert +Ġcont ributes +Ġ6 50 +Ġcont ention +G ate +Ġcons oles +Ġchrom os +ĠI X +Ġvis ually +ĠE isen +Ġjewel ry +Ġdeleg ation +Ġacceler ate +ĠR iley +Ġsl ope +Ġind oor +it ially +Ġhuge ly +Ġtun nels +Ġfin ed +Ġdirect ive +Ġfore head +ustom ed +Ġsk ate +Mus ic +g as +Ġrecogn izing +am bo +Ġover weight +ĠGr ade +Ù Ĭ +Ġsound ing +Ġlock ing +ĠR EM +St ore +Ġexc av +ĠLike wise +ĠL ights +Ġel bow +ĠSupp ly +w ic +Ġhands ome +19 94 +C oll +Ġadequ ately +ĠAssoci ate +Ġstri ps +Ġcrack down +Ġmar vel +ĠK un +Ġpass ages +@@ @@ +ĠT all +Ġthought ful +names e +Ġprost itution +bus iness +Ġball istic +person al +c ig +iz ational +R ound +ĠÂłĠÂł ĠÂłĠÂł +ĠCole man +Ġadm itting +ĠPl ug +Ġbit coins +ĠSu z +Ġfair ness +Ġsupp lier +Ġcatast rophic +ĠHel en +o qu +M arc +ĠArt icles +g ie +Ġend angered +Ġdest iny +ĠVol t +ol ia +ax is +Ġche at +Ġun ified +IC O +qu ote +30 2 +ĠS ed +Ġsupp ression +Ġanaly zing +Ġsqu at +Ġfig uring +Ġcoordin ates +Ġch unks +Ġ19 46 +Ġsub p +Ġw iki +ĠFor bes +ĠJ upiter +ĠE rik +im er +ĠCom mercial +\ ) +Ġlegitim acy +Ġd ental +ĠMe an +Ġdefic its +5 50 +Orig inally +ĠHor ror +Ġcontam ination +ll ah +Ġconf isc +ĠCl are +T B +ĠF ailed +an ed +Ġrul er +ĠCont roller +Ġfemin ists +F ix +g ay +20 7 +Ġr abbit +Th ird +ownt own +Ġgl ue +Ġvol atile +Ġsh ining +Ġf oll +Ġimp aired +Ġsup ers +æ Ī +Ġcl utch +ļé ĨĴ +Ġpro let +Ġ( ! +Ġy elled +ĠK iev +ĠEr n +ĠSh ock +K B +Ġsit uated +qu ery +ĠN as +Ġan nex +char acter +ĠHol iday +Ġautom ation +ĠJ ill +ĠRem astered +Ġl inem +Ġwild erness +ĠHor izon +ĠGu inea +A Z +Ġmain land +Ġsec recy +LE ASE +Ġp unk +ĠProv ince +( ), +Spe ed +Ġhand ing +ĠSeb ast +S ir +r ase +Ġj ournals +Ġcon gest +ĠT ut +ir rel +Ġschizophren ia +Ġmis ogyn +health y +I ron +Ġreact ed +- $ +25 2 +Ġpl ural +Ġpl um +Ġbarg ain +Ġground ed +f inder +Ġdis se +ĠL az +O OD +Ġat roc +F actory +Ġmin ions +Ġo ri +ĠB rave +ĠP RE +ĠMy anmar +ĠH od +Ġexped ition +Ġexpl ode +ĠCo ord +Ġext r +ĠB rief +ĠAD HD +Ġhard core +feed ing +Ġd ile +ĠF ruit +Ġvacc ination +ĠM ao +osp here +Ġcont ests +- | +Ġf ren +isp here +R om +ĠSh arp +ĠTre nd +Ġdis connect +âĢ¢ âĢ¢ +Ġper secution +Ear th +Ġhealth ier +38 4 +Ġc ob +ĠTr inity +OW S +AN N +Ġspecial ty +Ġg ru +Ġcooper ative +wh y +Start ing +ĠIss ues +st re +ens or +Ġ18 5 +Ad v +! ? +ĠRe vel +em ia +ĠH ulk +Ġcelebr ations +ĠS ou +ra ud +ĠKle in +Ġun real +con text +Ġpartners hips +Ġadop ting +t ical +Ġspl ash +ĠHe zbollah +c ategory +cycl op +xt on +ĠD ot +urd y +t z +Ġenvelop e +ĠN L +â ķ +Ġwhere in +Spe c +18 4 +Ġte lev +al iation +Ġmyth s +å ° +Ġrig orous +Ġcommun icating +Ġobser ver +Ġre he +ĠW ash +Ġapolog ized +ĠT in +Ġexpend itures +work ers +d ocument +Ġhes itate +ĠLen in +Ġunpredict able +Ġrenew al +cl er +ok ia +ĠCON T +Ġpost season +Tok ens +Ġex acerb +Ġbet ting +Ġ14 7 +Ġelev ation +W ood +ĠSol omon +19 4 +00 4 +out put +Ġredu nd +ĠM umbai +Ġp H +Ġreprodu ce +ĠD uration +MA X +Ġb og +C BS +ĠBal ance +ĠS gt +ĠRec ent +Ġc d +Ġpo pped +Ġincomp et +pro p +ay an +g uy +Pac ific +Ġty r +Ġ{ { +ĠMy stic +ĠD ana +Ġmast urb +Ġge ometry +à ¢ +ĠCor rect +Ġtraject ory +Ġdistract ed +Ġf oo +ĠW elsh +L uc +m ith +Ġrug by +Ġrespir atory +Ġtri angle +Ġ2 15 +Ġunder graduate +ĠSuper ior +ch anging +_ - +Ġright ly +Ġrefere e +Ġluc rative +Ġun authorized +Ġresemb les +ĠGN U +ĠDer by +Ġpath ways +ĠL ed +Ġend urance +Ġst int +Ġcollect or +F ast +Ġd ots +Ġnational s +ĠSec urities +Ġwh ip +Par am +Ġlearn s +M agic +Ġdetail ing +m oon +Ġbroadcast ing +Ġb aked +26 5 +hol m +ĠS ah +ĠHus sein +ĠCourt esy +17 4 +Ġ14 6 +Ġge ographic +pe ace +Ġjud ging +ĠS tern +B ur +Ġstory line +G un +ĠSt ick +24 5 +30 7 +ãĤ´ ãĥ³ +ĠAdminist rator +Ġbur nt +Ġp ave +ch oes +Ex ec +Ġcamp uses +Res ult +Ġmut ations +ĠCh arter +Ġcapt ures +Ġcomp ares +Ġbad ge +S cient +Ġer ad +ier y +o i +ett es +ĠE state +Ġst rap +Ġproud ly +Ġf ried +Ġwithd rawn +ĠV oy +ph ony +It ems +ĠP ierce +b ard +Ġann otation +ant on +ill on +Im pro +... ) +Ġhapp ier +---- -- +ad just +Ġstaff ers +Ġactiv ism +Ġper f +Ġal right +N eed +Ġcomm ence +Ġopio id +ĠAm anda +E s +ĠP ars +ĠK aw +W orks +24 8 +Ġind o +t c +end ant +ĠM oto +Ġlegal ization +OT E +Ġtask ed +Ġt sp +ĠACT IONS +16 6 +Ġrefres hing +ĠN R +ĠPere z +Ġinfring ement +S Y +List en +in ning +k u +Ġrot ate +pro gram +ar ah +Des ign +Ġ( £ +Ġst oring +Ġwar rants +Ġjud gement +ĠB rist +us ually +ph oto +ĠR an +ĠP ine +Ġoutrage ous +ĠValent ine +lu ence +ĠEvery body +Al tern +Ġrele vance +Ġtermin ated +Ġd essert +Ġfulf illed +Ġprosecut ed +ĠW ords +Ġm igrant +Ġcultiv ation +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +idel ity +ĠV ern +ĠLog in +Ġmetaph or +ĠT ip +Ġrecru its +ĠP ig +rib ing +Ġenthusi asts +ex per +Ġfright ening +ĠH air +ans on +str ate +Ġh i +He ight +Ġown ing +n one +Ġdis like +Ġkn ives +pher d +Ġloud ly +ĠAP Is +Dis play +ĠL ac +ĠUS S +ab l +ver ages +J ew +Ġ17 2 +ĠHist orical +at oon +ĠPhys ics +in tern +Ġwarm th +Ġto pp +D M +Ġgun man +Ġem peror +od i +ãĥ £ +in atory +ĠR ib +Ġ13 1 +ĠSat urn +ĠSh ining +Ġw aking +Qu otes +Ġcomed ian +en berg + ½ +Ġbelie vers +Ġpaper work +c ustom +Ġle v +Ġl ament +Ġpour ing +22 2 +p olitical +ĠSupp lement +m aid +Ġcruel ty +Ġt read +ys ics +A w +rit es +Ġmod ifier +ĠP osition +Ad am +l b +ub s +Ġimper fect +Ġcl usters +ĠEngine er +ĠC herry +Ġinaug uration +ĠS au +Ġembod iment +ĠUn cle +Ġover r +Ġexplos ions +c ule +ĠPrinc eton +ĠAndre a +Ġincorrect ly +Ġearn est +Ġpil gr +ĠS print +Ġslee ve +Ġhe ars +ĠAm azing +Ġbrow sing +ag in +Ġhom eland +Ġha w +Ġd iving +ist ered +17 8 +Ġbarg aining +ĠArc ade +Ġdeleg ate +ters on +................................ ................................ +ĠJackson ville +27 5 +Ġst agn +Ġad am +ĠSher man +C B +Ġsub urb +ĠFood s +Ġconver ting +ĠAr ist +Ġch ambers +l ove +Ġam ino +ĠG an +Ġmad ness +m c +ĠUS E +def ined +Ġul tr +ind ust +Ġw olves +l ance +Add itionally +Ġcr acks +as ia +ĠRe ason +ĠP ump +Ġaccident al +ĠL aser +ĠR id +Ġinitial ized +ell i +Ġun named +Ġn oun +ĠPass ed +Ġhost age +ĠEth iop +sh irts +Ġun rel +ĠEmb assy +Ġ19 41 +Ġat oms +Ġpur ported +16 4 +ĠF i +Ġgall ons +ĠMon ica +Ġp g +en ment +Ġsort ed +ĠG ospel +Ġhe ights +Ġtr aced +Ġunder going +She ll +Ġs acks +Ġproport ions +Ġhall uc +F ont +ac et +Ġwar mer +ĠIN TER +Ġgrab bing +Pl ug +Ġreal ization +ĠBur ke +Ġen chant +AT ER +ĠSe ed +Ġabund ant +F M +Ġc ivic +V s +is i +Ġv ow +Ġre per +ĠPartners hip +Ġpenet ration +Ġax e +Ġsh attered +ĠZ ombies +Ġv inyl +ĠAl ert +e on +Ġoblig ed +ĠIll ust +ĠPl aza +ĠFront ier +Ġdavid jl +ĠSer ial +ĠH av +ĠNut rition +B i +Ġâĸ Ī +ĠJ ays +lin ux +Ġhur ry +Ġv oy +Ġhop eless +ĠSte alth +Ġ ãģ +ess ors +tt le +b org +ĠSaf ari +f ell +Ġw ary +d ue +ĠAb ove +H a +E LL +Ġnot or +ĠW on +T oo +Ġoccup ations +Ġposs essions +Ġinv iting +Ġpred ators +Ġacceler ated +Ġ15 7 +uter te +ĠC ube +e ast +acc ount +G ive +Ġtrans plant +red ients +id able +Ġscreens hots +ĠG und +ĠF S +Ġtravel ers +Ġsens ory +ĠF iat +ĠRock ets +İ ĭ +_ { +F riend +Ġchar ming +AL S +Ġenjoy ment +m ph +Ġ5 000 +ĠRE G +Ù Ĩ +b ia +Ġcomp ilation +ro st +ĠV P +ĠSch ne +201 9 +Ġcop ying +M ORE +ĠFl ore +f alls +2 15 +t otal +Ġdis ciples +d ouble +Ġexceed ing +Ġsm ashed +Ġconcept ual +ĠRom ania +ĠB rent +ĠI CE +ĠT ou +Ġg rap +Ġn ails +18 9 +ãĥ ĺ +Ġproc ure +e ur +Ġconfir ming +ĠC ec +aw i +ĠEd en +Ġn g +Ġengine ered +at ics +Ġhook ed +Ġdisgust ing +ĠMur der +ãĤ ¿ +L ibrary +Ġ16 8 +Al most +hem atic +Men u +ĠNot re +ĠJ ur +Ġkidn apped +Ġhack er +ĠJ ade +Ġcreep y +Ġdraw ings +ĠSpons or +Ġcycl ists +ĠGob lin +Ġoptim ized +Ġst aged +ĠMc D +bet ween +A ge +en o +S ex +ĠW ide +n ings +av is +Ġincap able +ĠK ob +Ġreward ing +ĠL one +oles cent +Ġcontract ed +Ġstick y +J ose +B all +f est +ĠIn put +ĠRec ently +Ġto mat +squ are +App lication +Ġnit rogen +Ġdupl icate +ĠRec on +ĠD ear +L ondon +Ġint ra +Ġd ock +Ġout reach +ĠM illion +Ġmamm als +am pton +V AL +Ġsn aps +Ġd os +ĠWh ole +ĠRead y +T ry +ĠWinn ipeg +ear ance +Ġinc urred +ren ched +ĠNS W +il ot +rain e +Ġc ube +g ot +Ġrun way +etermin ed +ĠHaw ks +Ġsurviv or +ĠW ish +ĠD in +ĠDE F +ĠV ault +18 7 +Ġmush rooms +Ġcris p +be y +ĠDisco very +Ġdevelopment al +Ġparad igm +Ġcha otic +ĠT su +Ġ3 33 +b ons +Ġbacter ial +Ġcomm its +Ġcos mic +Ġme ga +oc ative +ĠP aint +ophob ic +Ġv ain +Ġcar ved +ĠTh ief +ĠG ul +ows hip +Ġc ites +ĠEd inburgh +Ġdimin ished +Ġacknowled ges +ĠK ills +Ġmic row +ĠHer a +Ġsen iors +Ġwhere by +H op +at ron +Ġun available +ĠN ate +Ġ4 80 +Ġsl ated +ĠRe becca +ĠB attery +Ġgram mar +Ġhead set +Ġcurs or +Ġex cluding +any e +aunder ing +eb in +Ġfeas ible +ĠPub lishing +ĠLab s +ĠCl iff +ĠFerr ari +Ġp ac +vis ible +mark ed +pe ll +Ġpol ite +Ġstagger ing +ĠGal actic +Ġsuper st +Ġpar an +ĠOffic ers +ãĢ ģ +Ġspecific s +ul us +23 9 +ĠP aste +AM P +ĠPan ama +ĠDe lete +angu ard +rest rial +Ġhero ic +ĠD y +ا ÙĦ +Ġincumb ent +Ġcr unch +t ro +Ġsc oop +Ġblog ger +Ġsell ers +ure n +Ġmedic ines +ĠC aps +ĠAnim ation +ox y +Ġout ward +Ġinqu iries +22 9 +Ġpsych ologist +ĠS ask +ev il +Ġcontam inated +ãĤ ¨ +he rence +Ġbrand ed +ĠAbd ul +z h +Ġparagraph s +Ġmin s +Ġcor related +er b +Ġimp art +Ġmil estone +ĠSol utions +ot le +Ġunder cover +Ġmar ched +ĠCharg ers +f ax +ĠSec rets +Ġr uth +we ather +Ġfemin ine +Ġsh am +Ġprest igious +igg ins +Ġs ung +hist ory +ett le +gg ie +Ġout dated +ol and +Ġper ceptions +ĠS ession +ĠDod gers +u j +ĠE ND +D oc +Ġdefic iency +Gr and +ĠJ oker +Ġretro spect +Ġdiagn ostic +Ġharm less +Ġro gue +ĠA val +E qu +Ġtrans c +ĠRoberts on +ĠDep ending +ĠBurn s +iv o +Ġhost ility +F eatures +ĵ ĺ +Ġdis comfort +ĠL CD +spec ified +ĠEx pect +3 40 +Ġimper ative +ĠReg ular +Ch inese +Ġstate wide +Ġsy mm +Ġlo ops +Ġaut umn +N ick +Ġsh aping +Ġqu ot +Ġc herry +ĠCross ref +è¦ ļéĨĴ +Stand ard +he ed +ĠD ell +ĠViet namese +Ġo st +ĠV alkyrie +O A +Ass ad +Ġreb ound +ĠTra ffic +pl aces +æ ĺ +ĠB uc +17 2 +Ġshel ters +Ġins isting +ĠCertain ly +ĠKenn eth +ĠT CP +Ġpen al +ĠRe play +he ard +Ġdial ect +iz a +ĠF Y +it cher +ĠD L +Ġspir al +Ġquarterback s +Ġh ull +Ġgo ogle +Ġto dd +ĠSter ling +ĠPl ate +Ġsp ying +mb ol +ĠReal m +ĠPro ced +ĠCr ash +Ġtermin ate +Ġprotest ing +C enter +gu ided +Ġun cover +Ġboy cott +Ġreal izes +s ound +Ġpret ending +ĠV as +19 80 +Ġfram ed +Ġ13 9 +Ġdesc ended +Ġrehab ilitation +Ġborrow ing +ĠB uch +Ġbl ur +R on +ĠFro zen +en za +Ch ief +ĠP oor +Ġtransl ates +M IN +Ġ2 12 +J ECT +Ġerupt ed +Ġsuccess es +S EC +Ġpl ague +Ġg ems +d oms +Ġstret ches +ĠSp y +Ġstory telling +C redit +ĠP ush +Ġtra ction +Ġin effective +ĠL una +Ġt apes +Ġanaly tics +erc ise +Ġprogram mes +ĠCar bon +Ġbeh old +he avy +ĠConserv ation +ĠF IR +Ġs ack +ter min +ric ks +Ġhous ed +Ġunus ually +I ce +Ġexecut ing +ĠMor oc +ed ay +Ġed itions +Ġsm arter +ĠB A +Ġout law +Ġvan ished +ib a +AL SE +ĠSil va +23 8 +C ould +Ġphilos opher +Ġevac uated +Sec ret +14 2 +Ġvis as +ãĤ ¬ +ĠM alt +ĠClear ly +ĠN iger +ĠC airo +ĠF ist +3 80 +ĠX ML +aut o +it ant +Ġrein forced +Rec ord +ĠSurviv or +G Hz +Ġscrew s +parent s +Ġo ceans +ma res +Ġbra kes +vas ive +Ġhell o +ĠS IM +rim p +Ġo re +ĠArm our +24 7 +Ġterr ific +Ġt ones +14 1 +ĠMin utes +Ep isode +Ġcur ves +Ġinflamm atory +Ġbat ting +ĠBeaut iful +L ay +Ġunp op +v able +Ġr iots +ĠTact ics +b augh +ĠC ock +Ġorg asm +ĠS as +Ġconstruct or +et z +G ov +Ġant agon +Ġthe at +Ġde eds +ha o +c uts +ĠMc Cl +Ġu m +ĠScient ists +Ġgrass roots +ys sey +"] => +Ġsurf aced +Ġsh ades +Ġneighb ours +Ġad vertis +oy a +Ġmer ged +Up on +Ġg ad +Ġanticip ate +Any way +Ġsl ogan +Ġdis respect +I ran +ĠT B +act ed +Ġsubp oen +medi ately +OO OO +Ġwa iver +Ġvulner abilities +ott esville +ĠHuff ington +J osh +ĠD H +M onday +ĠEll en +K now +x on +it ems +22 8 +Ġf ills +ĠN ike +Ġcum ulative +and als +I r +Ġ ì +Ġfr iction +ig ator +Ġsc ans +ĠVi enna +ld om +Ġperform ers +P rim +Ġb idding +M ur +Ġlean ed +ĠPri x +al ks +Ġ[ âĢ¦] +ĠTw itch +ĠDevelop er +ĠG ir +Ġcall back +Ab stract +Ġacc ustomed +Ġfreed oms +ĠP G +ur acy +Ġl ump +is man +,, ,, +19 92 +ĠR ED +Ġwor m +M atch +ĠPl atinum +I J +ĠOwn er +Tri via +com pl +Ġnew born +Ġfant as +O wn +Ġ19 59 +Ġsymp ath +Ġub iqu +Ġoutput s +Ġal lev +Ġpr ag +K evin +Ġfav ors +Ġbur ial +Ġn urt +so lete +c ache +Ġ15 6 +Ġunl ocks +te chn +M aking +Ġcon quer +ad ic +æ ĸ +Ġel f +Ġelect orate +ĠKurd s +ĠSt ack +ĠSam urai +Ġâ ĺħ +Ġ{ } +ĠS aid +ĠFall out +Ġkind ness +ĠCustom s +ĠBou levard +Ġhelicop ters +ot ics +ĠVe get +com ment +Ġcritic ised +Ġpol ished +ĠRem ix +ĠC ultural +Ġrec ons +Ġdo i +at em +Sc reen +Ġbar red +Com ments +ĠGener ally +Ġsl ap +7 20 +V ari +p ine +Ġem pt +Ġh ats +ĠPlay ing +l ab +a verage +form s +ĠC otton +Ġcan s +ĠD ON +ĠSom alia +C rypt +ĠIncre ases +E ver +mod ern +Ġsur geon +3 000 +Ġrandom ized +================================ ================================ +B ern +im pl +ĠC OR +Ġpro claim +th ouse +Ġto es +Ġam ple +Ġpres erving +Ġdis bel +gr and +B esides +Ġsil k +ĠPat tern +h m +Ġenter prises +Ġaffidav it +ĠAdvis ory +Ġadvert ised +ĠRel igious +se ctions +psy ch +ĠField s +aw ays +Ġhasht ag +ĠNight mare +Ġv ampire +Ġfore nsic +rosso ver +n ar +Ġn avy +Ġvac ant +ĠD uel +Ġhall way +Ġface book +ident ally +ĠN RA +Ġm att +Ġhur ricane +ĠKir by +ĠP uzzle +Ġsk irt +ou st +du llah +Ġanal ogy +in ion +Ġtomat oes +ĠN V +ĠPe ak +ĠMe yer +Ġappoint ments +Ġm asc +Ġal ley +re hend +Ġchar ities +Ġund o +Ġdest inations +ĠTest ing +"> " +c ats +* . +Ġgest ures +gener al +Le ague +Ġpack ets +ĠInspect or +ĠBer g +Ġfraud ulent +Ġcritic ize +F un +Ġbl aming +nd ra +Ġsl ash +ĠE ston +Ġpropos ing +Ġwh ales +Ġtherap ist +Ġsub set +Ġle isure +EL D +ĠC VE +ĠAct ivity +Ġcul min +sh op +ĠD AY +is cher +ĠAdmir al +ĠAtt acks +Ġ19 58 +Ġmem oir +Ġfold ed +Ġsex ist +Ġ15 3 +ĠL I +Ġread ings +Ġembarrass ment +ĠEmploy ment +w art +ch in +Ġcontin uation +l ia +Rec ently +Ġd uel +Ġevac uation +ĠKash mir +Ġdis position +ĠR ig +Ġbol ts +Ġins urers +4 67 +M ex +Ġret aliation +Ġmis ery +Ġunre asonable +r aining +I mm +ĠP U +em er +Ġgen ital +ãĤ ³ +ĠC andy +Ġon ions +ĠP att +lin er +Ġconced ed +Ġf a +Ġfor c +ĠH ernandez +ĠGe off +deb ian +ĠTe ams +Ġc ries +Ġhome owners +23 7 +A BC +Ġst itch +Ġstat istic +Ġhead ers +ĠBi ology +Ġmot ors +ĠG EN +ĠL ip +Ġh ates +Ġhe el +S elf +i pl +ED IT +ort ing +Ġann ot +ĠSpe ech +old emort +ĠJ avascript +ĠLe Bron +Ġfoot print +Ġf n +Ġseiz ures +n as +h ide +Ġ19 54 +ĠBe e +ĠDecl aration +ĠKat ie +Ġreserv ations +N R +f emale +Ġsatur ated +Ġb iblical +Ġtroll s +Dev ice +ph otos +Ġdr ums +ãĥīãĥ© ãĤ´ãĥ³ +N ight +f ighter +ĠH ak +ri ber +Ġc ush +Ġdiscipl inary +ba um +ĠG H +ĠSch midt +ilib rium +Ġs ixty +ĠKush ner +ro ts +Ġp und +ĠR ac +Ġspr ings +Ġcon ve +Bus iness +F all +Ġqual ifications +Ġvers es +Ġnarc iss +ĠK oh +ĠW ow +ĠCharl ottesville +ed o +Ġinterrog ation +ĠW ool +36 5 +B rian +Ġâľ ĵ +Ġalleg es +ond s +id ation +ĠJack ie +y u +Ġl akes +Ġworth while +Ġcryst als +ĠJud a +Ġcomp rehend +Ġfl ush +Ġabsor ption +ĠO C +Ġfright ened +ĠCh ocolate +Mart in +Ġbu ys +Ġbu cks +Ġapp ell +ĠChampions hips +Ġlist ener +ĠDef ensive +Ġc z +ud s +ĠM ate +Ġre play +Ġdecor ated +Ġs unk +ĠV IP +ĠAn k +Ġ19 5 +aa aa +Nob ody +ĠMil k +ĠG ur +ĠM k +ĠS ara +Ġse ating +ĠW id +Tr ack +Ġemploy s +Ġgig antic +AP P +ãĤ § +in ventory +Ġtow el +at che +l asting +ĠT L +Ġlat ency +Ġkn e +B er +me aning +Ġup held +Ġplay ground +Ġm ant +S ide +Ġstere o +Ġnorth west +Ġexception ally +Ġr ays +Ġrec urring +D rive +Ġup right +Ġab duct +ĠMar athon +Ġgood bye +Ġal phabet +h p +Ġcourt room +ring ton +ot hing +T ag +Ġdiplom ats +Ġbar bar +ĠAqu a +18 3 +33 33 +Ġmat urity +Ġinst ability +ĠAp ache +Ġ= == +Ġfast ing +ĠGr id +Mod Loader +Ġ15 2 +A bs +ĠOper ating +ett i +Ġacqu aint +Don nell +ĠK em +ĠFor ge +Ġarm ored +M il +Ġphilos ophers +in vest +Pl ayers +â Ī +Ġmy riad +Ġcomr ades +R ot +Ġremember ing +Ġcorrespond s +Ġprogram mers +ĠLyn n +Ġo lig +Ġco herent +yn chron +ĠChem ical +Ġj ugg +p air +post s +E ye +ĠIn ner +Ġsem ester +ott est +ĠEmir ates +ric anes +or ously +m its +ĠW is +Ġd odge +l ocation +Ġf aded +Am azon +ĠPro ceed +ĠIN FO +j ournal +ĠTru ck +T en +Ġ2 17 +Ġstat utes +m obile +ĠT ypes +Rec omm +b uster +pe x +Ġleg ends +Ġhead ache +f aced +ĠWi Fi +if ty +ĠH ER +Ġcirc uits +ER ROR +22 6 +ol in +Ġcyl inder +osp ace +ik ers +P rem +Qu ant +Ġconflic ting +Ġslight est +Ġfor ged +ion age +Step hen +ĠK ub +ĠOpp ortun +ĠHe al +Ġbl o +Ġrul ers +Ġh uh +Ġsubmar ine +f y +ass er +Ġallow ance +ĠKas ich +ĠT as +ĠAustral ians +Forge ModLoader +ĠâĨ ij +ĠMat rix +am ins +Ġ12 00 +ĠAc qu +23 6 +D ocument +ĠBre aking +19 3 +ĠSub st +ĠRoll er +ĠPro perties +ĠN I +t ier +Ġcr ushing +Ġadvoc ating +Further more +keep ers +Ġsex ism +x d +Ġcall er +ĠS ense +chie ve +ĠT F +Ġfuel ed +Ġreminis cent +Ġobs ess +ur st +Ġup hold +ĠF ans +het ics +Ġâ Ĺ +ĠB ath +Ġbe verage +Ġo scill +25 4 +Ġpol es +Ġgrad ual +Ġex ting +ĠS uff +ĠS uddenly +Ġlik ing +Ġ19 49 +un ciation +am ination +ĠO mar +ĠL V +ĠCon sequently +Ġsynt hes +ĠG IF +Ġp ains +Ġinteract ing +u ously +inc re +Ġrum or +ĠScient ology +19 7 +ĠZ ig +Ġspe lling +ĠA SS +Ġexting u +ms on +Ġg h +Ġremark ed +ĠStrateg ic +ĠM ON +å ¥ +g ae +ĠWH AT +E ric +ĠCamp us +Ġmeth ane +Ġimag in +J UST +ĠAl m +X T +i q +ĠR SS +Ġwrong doing +att a +Ġbig ot +Ġdemonstr ators +ĠCal vin +ĠV illa +Ġmembr ane +ĠAw esome +Ġbenef ic +26 8 +Ġmagn ificent +ĠL ots +G reg +ĠBor is +Ġdetain ees +ĠH erman +Ġwhis pered +Ġa we +Prof essor +fund ing +Ġphys iological +ĠDest ruction +Ġlim b +Ġmanip ulated +Ġbub bles +Ġpse ud +Ġhyd ra +ĠBrist ol +Ġst ellar +ĠExp ansion +ĠK ell +ĠInterest ingly +Ġm ans +Ġdrag ging +Ġec ological +ĠF it +Ġg ent +Ġbenef ited +ĠHait i +Ġpoly g +ãĥ İ +Ġ20 30 +Ġpro w +Ġrecon struction +Ġwas t +Ġpsych ic +ĠGree ks +Hand ler +16 2 +ĠP ulse +Ġsol icit +Ġsy s +Ġinflu x +ĠG entle +per cent +Ġprolifer ation +Ġtax able +Ġdisreg ard +Ġesc aping +Ġg inger +Ġwith stand +Ġdevast ated +ĠD ew +ser ies +Ġinject ed +ela ide +Ġturn over +he at +Ļ Ĥ +H appy +ĠSil ent +ãĤ Ń +iv ism +Ġir rational +AM A +Ġre ef +r ub +Ġ16 2 +Ġbank ers +ĠEth ics +v v +Ġcritic isms +K n +18 6 +M ovie +ĠT ories +Ġno od +Ġdist ortion +F alse +od ore +Ġt asty +Res earch +ĠU ID +- ) +Ġdivor ced +ĠM U +ĠHay es +ĠIs n +ian i +ĠH Q +Ġ" # +ign ant +Ġtra umatic +ĠL ing +H un +Ġsab ot +on line +r andom +Ġren amed +ra red +K A +d ead +é t +ĠAss istance +Ġse af +++++ ++++ +Ġse ldom +ĠWeb b +Ġbo olean +u let +Ġref rain +ĠDI Y +ru le +Ġshut ting +Ġutil izing +load ing +ĠPar am +co al +oot er +Ġattract ing +ĠD ol +Ġher s +ag netic +ĠRe ach +im o +Ġdisc arded +ĠP ip +01 5 +ü r +Ġm ug +Im agine +C OL +Ġcurs ed +ĠSh ows +ĠCurt is +ĠSach s +spe aking +ĠV ista +ĠFram ework +ong o +Ġsub reddit +Ġcr us +ĠO val +R ow +g rowing +Ġinstall ment +Ġgl ac +ĠAdv ance +EC K +ĠLGBT Q +LE Y +Ġac et +Ġsuccess ive +ĠNic ole +Ġ19 57 +Qu ote +Ġcircumst ance +ack ets +Ġ14 2 +ort ium +Ġguess ed +ĠFr ame +Ġperpet rators +ĠAv iation +ĠBen ch +Ġhand c +A p +Ġ19 56 +25 9 +r and +Net Message +d in +urt les +h ig +ĠV III +ff iti +ĠSw ords +b ial +Ġkidn apping +dev ice +Ġb arn +ĠEl i +auc as +S end +Con structed +Ġ ½ +Ġneed les +Ġad vertisements +Ġv ou +Ġexhib ited +ĠFort ress +As k +B erry +TY PE +Ġcan cers +ump ing +ĠTerrit ory +Ġpr ud +Ġn as +Ġathe ist +Ġbal ances +ãģ Ł +ĠSh awn +& & +Ġland sc +ĠR GB +Ġpet ty +Ġex cellence +Ġtransl ations +Ġpar cel +ĠChe v +E ast +ĠOut put +im i +Ġamb ient +ĠTh reat +Ġvill ains +Ġ5 50 +IC A +Ġtall er +Ġle aking +c up +Ġpol ish +Ġinfect ious +ĠK C +Ġ@ @ +back ground +Ġbureaucr acy +ĠS ai +un less +it ious +ĠSky pe +At l +ID ENT +00 8 +Ġhyp ocr +Ġpit chers +Ġguess ing +ĠF INAL +Bet ween +Ġvill agers +Ġ25 2 +f ashion +ĠTun is +Be h +ĠEx c +ĠM ID +28 8 +ĠHas kell +19 6 +ĠN OR +Ġspec s +Ġinv ari +Ġgl ut +ĠC ars +Ġimp ulse +Ġhon ors +g el +Ġjurisd ictions +ĠBund le +ul as +Calif ornia +ĠIncre ase +Ġp ear +Ġsing les +Ġc ues +Ġunder went +ĠW S +Ġexagger ated +Ġdub ious +Ġfl ashing +L OG +) ]. +J ournal +t g +V an +ĠI stanbul +ĠIn sp +ĠFrank en +D raw +Ġsad ness +Ġiron ic +ĠF ry +x c +Ġ16 4 +is ch +W ay +ĠProtest ant +h orn +Ġun aff +ĠV iv +ill as +ĠProduct ions +ĠH ogan +Ġper imeter +ĠS isters +Ġspont aneous +Ġdown side +Ġdescend ants +Ġor n +w orm +Japan ese +Ġ19 55 +Ġ15 1 +ĠDo ing +els en +umb les +Ġrad ically +ĠDr um +ĠB ach +Ġli abilities +ĠO B +ĠElement ary +Ġmem e +yn es +Ġfinger print +ĠGr ab +Ġundert ake +Mem bers +ĠRead er +ĠSim s +g od +Ġhypot hetical +s cient +ĠA J +Ġchar ism +Ġad missions +ĠMiss ile +tr ade +Ġexerc ising +ĠBack ground +W ritten +Ġvoc als +whe ther +Ġv i +ĠW inner +Ġl itter +ĠSh ooting +ST EM +ãĤ ¡ +ĠA FL +Ġvari ability +Ġe ats +ĠD PS +b row +Ġeleph ants +Ġstr at +Ġ Å +Ġsett lers +Matt hew +Ġin advert +H I +ĠIM F +ĠGo al +Ġnerv es +John son +ey e +ablish ment +Th ursday +BIL ITY +H ad +am oto +het amine +ep s +Ġmit ochond +Ġcomp ressed +ĠTre vor +ĠAnim als +T ool +L ock +Ġtwe ak +Ġpin ch +Ġcancell ation +P ot +Ġfoc al +ĠAst ron +17 3 +ĠA SC +ĠO THER +umn i +Ġdem ise +d l +Ù ħ +Sem itism +Ġcr acking +Ġcollabor ative +Ġexpl ores +s ql +Ġher bs +Ġconfig urations +m is +ĠRes ult +ace y +ĠSm oke +Ġsan ct +el ia +Ġdeg ener +Ġdeep est +Ġscream ed +Ġn ap +Soft ware +ĠST AR +E F +ĠX in +spons ored +mans hip +23 3 +Ġprim aries +Ġfilter ing +Ġas semble +m il +ĠMy ers +b ows +Ġpun ched +M ic +Ġinnov ations +Ġfun c +and o +Ġfr acking +ĠV ul +о Ð +osh op +ĠIm mun +Ġsett ling +Ġadolesc ents +Ġreb uilding +Ġtransform ing +Ġpar ole +Ġhar bor +Ġbook ing +ot ional +onge vity +ĠY o +b ug +Ġemer ges +ĠMethod s +ĠCh u +P res +ĠDun geons +Ġtra iling +ĠR um +ĠH ugh +å¤ © +ĠE ra +ĠBatt les +Res ults +ĠTr ading +Ġvers a +c ss +ax ies +he et +Ġgre ed +19 89 +Ġgard ens +Ġconting ent +P ark +ĠLeaf s +h ook +ro be +Ġdiplom acy +ĠF uel +ĠInv asion +Ġupgr ading +M ale +Ġe lic +Ġrelent less +ĠCo venant +ap esh +ĠT rop +T y +pro duction +art y +Ġpun ches +ak o +cyclop edia +ĠR abbit +ĠHD MI +Ġ14 1 +Ġf oil +Item Image +ĠF G +Ġimplement ations +ĠP om +ixt ures +Ġaw ait +Ġ3 30 +am us +Ġumb rella +Ġfore see +se par +Ġcircum cision +Ġperipher al +S ay +ĠExper t +In c +Ġwithd rew +ĠAnd ers +f ried +Ġradio active +ĠOp ening +Ġboard ing +ĠN D +Ġover throw +Act iv +W P +ĠAct s +× Ļ +Ġmot ions +v ic +ĠM ighty +ĠDef ender +a er +Ġthank ful +ĠK illing +ĠBr is +mo il +Ġpredict ing +26 6 +ch oice +Ġkill ers +Ġinc ub +ĠChe st +ather ing +Ġpro claimed +fl ower +oss om +umbled ore +ĠCy cling +ĠOccup y +AG ES +P en +ĠY ug +Ġpack aged +Ġheight ened +c ot +st ack +C ond +Ġst amps +m age +Ġpersu aded +Ġens l +ĠCard inal +Ġsol itary +Ġpossess ing +ĠC ork +Ġev id +ĠT ay +Ġbl ues +Ġextrem ism +Ġlun ar +Ġcl own +Te chn +Ġfest ivals +ĠPv P +ĠL ar +Ġconsequ ently +p resent +Ġsom eday +ç İĭ +ĠMet eor +Ġtour ing +c ulture +Ġbe aches +S hip +c ause +ĠFl ood +ãĥ ¯ +Ġpur ity +th ose +Ġem ission +b olt +Ġch ord +ĠScript ure +L u +Ġ$ { +cre ated +Other s +25 8 +Ġelement al +Ġannoy ed +ĠA E +d an +ĠS ag +Res earchers +Ġfair y +âĢĵ âĢĵ +======== ==== +Sm art +GG GG +Ġskelet ons +Ġpup ils +link ed +Ġur gency +en abled +ĠF uck +Ġcoun cill +r ab +U AL +T I +Ġlif es +Ġconf essed +B ug +Ġharm on +ĠCON FIG +ĠNe utral +D ouble +Ġst aple +ĠSH A +Brit ish +ĠSN P +AT OR +oc o +Ġswing ing +ge x +ole on +pl ain +ĠMiss ing +ĠTro phy +v ari +ran ch +Ġ3 01 +4 40 +00000000 00000000 +Ġrest oring +Ġha ul +uc ing +ner g +Ġfut ures +Ġstrateg ist +quest ion +Ġlater al +ĠB ard +Ġs or +ĠRhod es +ĠD owntown +????? - +ĠL it +ĠB ened +Ġco il +st reet +ĠPort al +FI LE +ĠG ru +* , +23 1 +ne um +Ġsuck ed +Ġr apper +Ġtend encies +ĠLaure n +cell aneous +26 7 +Ġbrow se +Ġover c +head er +o ise +Ġbe et +ĠG le +St ay +Ġm um +Ġtyp ed +Ġdiscount s +T alk +ĠO g +ex isting +ĠS ell +u ph +C I +ĠAust rian +ĠW arm +Ġdismiss al +Ġaver ages +c amera +Ġalleg iance +L AN +=" # +Ġcomment ators +ĠSet ting +ĠMid west +Ġpharm ac +ĠEX P +Ġstain less +Ch icago +Ġt an +24 4 +Ġcountry side +ĠV ac +29 5 +Ġpin ned +Ġcr ises +Ġstandard ized +T ask +ĠJ ail +ĠD ocker +col ored +f orth +" }, +Ġpat rons +Ġsp ice +Ġm ourn +ĠM ood +Ġlaund ry +Ġequ ip +ĠM ole +y ll +ĠTH C +n ation +ĠSher lock +Ġiss u +ĠK re +ĠAmeric as +ĠA AA +Ġsystem atically +Ġcont ra +ĠS ally +Ġrational e +Ġcar riage +Ġpe aks +Ġcontrad iction +ens ation +ĠFail ure +Ġpro ps +Ġnames pace +Ġc ove +field s +ãĤ ĭ +Ġw ool +ĠC atch +Ġpresum ed +ĠD iana +r agon +ig i +Ġh amm +Ġst unt +ĠG UI +ĠObserv atory +ĠSh ore +Ġsmell s +ann ah +Ġcock pit +ĠD uterte +8 50 +Ġopp ressed +bre aker +ĠCont ribut +ĠPer u +ĠMons anto +ĠAtt empt +Ġcommand ing +Ġfr idge +ĠR in +ĠChe ss +ual ity +Ġo l +Republic an +ĠGl ory +ĠW IN +.... ... +ag ent +read ing +Ġin h +J ones +Ġcl icks +al an +Ġ[ ]; +ĠMaj esty +ĠC ed +op us +ate l +à ª +AR C +ĠEc uador +ãĥ ł +ĠK uro +Ġritual s +Ġcapt ive +Ġoun ce +Ġdisag reement +Ġsl og +f uel +P et +M ail +Ġexerc ised +Ġsol ic +Ġrain fall +Ġdev otion +ĠAss essment +Ġrob otic +opt ions +ĠR P +ĠFam ilies +ĠFl ames +Ġassign ments +00 7 +aked own +Ġvoc abulary +Re illy +Ġc aval +g ars +Ġsupp ressed +ĠS ET +ĠJohn s +Ġwar p +bro ken +Ġstat ues +Ġadvoc ated +Ġ2 75 +Ġper il +om orph +ĠF emin +per fect +Ġh atch +L ib +5 12 +Ġlif elong +3 13 +Ġche eks +Ġnum bered +ĠM ug +B ody +ra vel +We ight +ĠJ ak +ĠHe ath +Ġkiss ing +ĠJ UST +Ġw aving +u pload +Ġins ider +ĠPro gressive +ĠFil ter +tt a +ĠBe am +Ġviol ently +ip ation +Ġskept icism +Ġ19 18 +ĠAnn ie +ĠS I +Ġgen etics +Ġon board +at l +ĠFried man +ĠB ri +cept ive +Ġpir ate +ĠRep orter +27 8 +Ġmyth ology +Ġe clipse +Ġsk ins +Ġgly ph +ing ham +F iles +C our +w omen +Ġreg imes +Ġphotograp hed +K at +ĠMA X +Offic ials +Ġunexpected ly +Ġimpress ions +F ront +;;;; ;;;; +Ġsuprem acy +Ġs ang +Ġaggrav ated +Ġabrupt ly +ĠS ector +Ġexc uses +Ġcost ing +ide press +St ack +ĠR NA +ob il +Ġghost s +ld on +at ibility +Top ics +Ġreim burse +ĠH M +ĠDe g +Ġth ief +y et +ogen esis +le aning +ĠK ol +ĠB asketball +Ġf i +ĠSee ing +Ġrecy cling +Ġ[ - +Cong ress +Ġlect ures +P sy +Ġne p +Ġm aid +Ġori ented +A X +Ġrespect ful +re ne +fl ush +ĠUn loaded +re quest +gr id +ĠAltern atively +ĠHug o +Ġdec ree +ĠBuddh ism +and um +And roid +ĠCong o +ĠJoy ce +Ġacknowled ging +hes ive +ĠTom orrow +ĠH iro +th ren +ĠM aced +Ġho ax +ĠIncre ased +ĠPr adesh +W ild +____ __ +16 1 +Ġa unt +Ġdistribut ing +ĠT ucker +ĠSS L +ĠW olves +B uilding +ou lt +ĠLu o +ĠY as +ĠSp ir +ĠSh ape +ĠCamb od +ĠIP v +Ġm l +Ġext rad +39 0 +ĠPenn y +d ream +Ġstation ed +opt ional +ew orthy +. +ĠWorks hop +ĠRet ail +ĠAv atar +6 25 +N a +ĠV C +ĠSec ure +M Y +19 88 +oss ip +Ġpro state +Ġund en +Ġg amer +ĠCont ents +ĠWar hammer +ĠSent inel +3 10 +Ġse gregation +ĠF lex +ĠM AY +Ġdr ills +ĠDrug s +Islam ic +Ġsp ur +Ġca fe +Ġimag inary +Ġgu iding +Ġsw ings +ĠThe me +ob y +Ġn ud +Ġbe gging +Ġstr ongh +Ġreject ing +Ġpedest rians +ĠPro spect +R are +s le +Ġconcess ions +ĠConst itutional +Ġbe ams +Ġfib ers +p oon +Ġinstinct s +pro perty +ĠB IG +Sand ers +im ates +Ġco ating +Ġcorps es +ĠTR UE +check ed +Ġ16 6 +A sh +ĠJ S +ĠF iction +Ġcommun al +Ġener getic +oooo oooo +Ġnow adays +IL D +ib o +ĠSU V +R en +Ġdwell ing +Sil ver +Ġt ally +ĠM oving +Ġcow ard +Ġgener als +Ġhorn s +Ġcirc ulated +Ġrob bed +ĠUn limited +Ġharass ed +Ġinhib it +Ġcomp oser +ĠSpot ify +Ġspread s +3 64 +Ġsu icidal +Ġno ises +ĠSt ur +Ġs aga +ĠK ag +is o +Ġtheoret ically +M oney +Ġsimilar ity +Ġslic ed +ut ils +ing es +" - +Ġan th +Ġimp ed +Mod ule +Through out +Ġmen us +comm ittee +and i +ob j +in av +f ired +ĠAb dullah +Ġund ead +Ġfont s +H old +EN G +Ġsustain ability +Ġfl ick +Ġr azor +ĠF est +ĠChar acters +Ġword ing +Ġpopul ist +Ġcritic izing +Ġm use +v ine +Ġcard board +Ġkind ly +Ġfr inge +ĠThe ft +icult ural +Ġgovern ors +Ġ ���� +Ġ16 3 +Ġtime out +ĠA uth +Child ren +A U +Ġred emption +ĠAl ger +Ġ19 14 +Ġw aved +Ġastron auts +og rams +Ġsw amp +ĠFinn ish +Ġcand le +Ġton nes +ut m +Ġr ay +Ġsp un +Ġfear ful +art icles +Ġca us +or ically +ĠRequ ires +ĠG ol +Ġpop e +Ġinaug ural +Ġg le +AD A +ĠIS IL +ĠOff ensive +Ġwatch dog +Ġbal con +ent ity +ĠH oo +Ġgall on +AC C +Ġdoub ling +Ġimpl ication +ĠS ight +Ġdoct r +---- --- +Ġ\ \ +Ġm alt +R oll +Ġâī ¥ +Ġrec ap +add ing +u ces +ĠB end +fig ure +Ġtur key +Ġsoc ietal +ĠT ickets +Ġcommer cially +Ġsp icy +Ġ2 16 +ĠR amp +Ġsuperior ity +à ¯ +ĠTr acker +C arl +ĠC oy +ĠPatri ot +Ġconsult ed +Ġlist ings +Ġsle w +reens hot +ĠG one +Ġ[ ...] +30 9 +Ġh ottest +Ø ± +Ġrock y +ĠD iaz +Ġmass age +Ġpar aly +Ġp ony +A z +Ġcart ridge +ĠN Z +Ġsn ack +ĠLam ar +ple ment +ĠLes lie +Ġm ater +Ġsn ipp +24 6 +Ġjoint ly +ĠBris bane +ĠiP od +Ġpump ing +Ġgo at +ĠSh aron +eal ing +Ġcor on +Ġan omal +rah im +ĠConnect ion +Ġsculpt ure +Ġsched uling +ĠD addy +at hing +Ġeyeb rows +Ġcur ved +Ġsent iments +Ġdraft ing +D rop +( [ +Ġnom inal +ĠLeaders hip +ĠG row +Ġ17 6 +Ġconstruct ive +iv ation +Ġcorrupt ed +ger ald +ĠC ros +ĠChe ster +ĠL ap +ãģ ª +OT H +D ATA +Ġal mond +pro bably +I mp +Ġfe ast +ĠWar craft +F lor +Ġcheck point +Ġtrans cription +Ġ20 4 +Ġtwe aks +Ġrel ieve +S cience +Ġperform er +Z one +Ġtur moil +ig ated +hib it +ĠC afe +the med +Ġflu or +ben ch +Ġde com +ĠU nt +ĠBar rett +ĠF acts +Ġt asting +ĠPTS D +ĠSe al +ĠJuda ism +ĠDynam ic +ĠC ors +V e +ĠM ing +ĠTrans form +v on +ĠDef enders +ĠTact ical +ĠV on +ĠUn ivers +Ġdist orted +ĠB reath +?' " +Ġag on +ĠDead ly +Ġl an +ĠCy cle +orn ed +Ġrel iably +Ġgl or +ĠMon key +ãĥ ¡ +Ġad ren +Ġmicrow ave +ĠAl ban +irc raft +dig it +sm art +ĠD read +¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ +{ { +ĠRoc hester +Ġsimpl ified +Ġinf licted +Ġtake over +Ġyour selves +ad itional +Ġmus cular +K S +Ġing en +T ax +ĠFe ature +27 7 +Ġcru c +Ġcr ate +Ġun identified +Ġacclaim ed +ĠM anga +ĠFr ances +ĠNep al +ĠG erald +ĠKu wait +Ġsl ain +ĠHe b +ĠG oku +ãģ® æ +28 6 +M rs +ĠC ody +ĠSan ctuary +01 6 +Ġdism ant +Ġdatas et +ĠH ond +b uck +ĠPat terson +Ġpal ette +ĠG D +ic ol +ĠL odge +Ġplanet ary +ak in +ĠRegist ered +ab we +ĠPeters burg +Ġha iled +ĠP iece +S che +ĠDO J +Ġen umer +18 1 +ĠObs erver +ĠB old +f ounded +com merce +Ġexplo its +ĠF inding +UR N +ĠS ne +ĠAc id +ay ette +ĠVal ues +Ġdr astic +Ġarchitect ural +Ġ" . +× ķ +ump ed +Ġwra pping +Ġwid ow +ĠSl ayer +l ace +on ce +German y +av oid +Ġtem ples +P AR +à ´ +ĠLuc ifer +ĠFl ickr +l ov +for ces +Ġsc outing +Ġlou der +tes y +Ġbefore hand +Ä ĵ +ĠNe on +ĠW ol +ĠTyp ically +ĠPolit ico +-+ -+ +Ġbuild er +Ġder ive +K ill +Ġp oker +Ġambig uous +Ġlif ts +Ġcy t +Ġrib s +ood le +ĠS ounds +h air +ĠSynd rome +t f +Ġproport ional +u id +Ġper taining +ĠKind le +ĠNeg ro +Ġreiter ated +ĠTon ight +oth s +ĠCorn ell +Ġo wing +Ġ20 8 +elf are +oc ating +ĠB irds +Sub scribe +Ġess ays +Ġburd ens +Ġillust rations +ar ious +ER AL +ĠCal cul +Ġx en +ĠLink edIn +ĠJ ung +Ġredes ign +Con nor +29 6 +Ġrevers al +ĠAd elaide +ĠL L +Ġs inking +Ġg um +US H +c apt +ĠGr imm +Ġfoot steps +ĠCB D +isp ers +Ġpro se +Wed nesday +ĠM ovies +ed in +Ġoverturn ed +Ġcontent ious +US B +~~~~~~~~ ~~~~~~~~ +ĠCo pper +Ġpoint less +N V +val ues +olph in +d ain +Ġdepos ited +ĠG W +Ġpreced ed +ĠCl a +ĠGo lem +ĠN im +ĠÎ ² +ĠEngine ers +m iddle +Ġfl att +oper ative +Ġcouncil s +imb abwe +el in +Ġstress ful +ĠL D +Ġres h +l ake +Ġwheel chair +ĠAltern ative +Ġoptim ize +oper ation +Ġpe ek +Ġones elf +ig il +Ġtrans itions +op athy +bl ank +Ġ16 9 +17 1 +________________________________ ________________________________ +Ġl aundering +En c +ĠD EC +Ġwork outs +Ġsp ikes +Ġdin osaurs +Ġdiscrim inatory +P ool +R ather +38 5 +R NA +tes ters +et o +ĠIdent ity +Ġve in +ĠBur ton +Ġarc ade +4 20 +Ult imately +ĠSad ly +à ° +p ill +Ġcub ic +ĠSpect rum +the se +st ates +Ġun official +h awks +ĠEVER Y +Ġrain bow +Ġincarcer ation +and ing +Ġsy ll +ĠEver ton +Ġ17 9 +ĠSer bia +Ġ18 9 +m eter +ĠMic key +Ġant iqu +Ġfact ual +ne ck +ĠN are +n orm +m ust +Ġhigh ways +Ġgl am +Ġdivid ing +ĠSquad ron +ĠMar tha +Ġbirth s +C over +//////// //////// +ĠW ong +Ph ot +ĠA LS +ri o +ĠNon etheless +ĠL emon +Ġ20 6 +ĠE E +Ġderiv ative +ĠWW II +v ote +Ġthere in +Ġsepar ating +44 6 +sy nc +ĠStre ets +Ġr att +Ġmunicip ality +ĠShort ly +Ġmon k +) ," +Ġscr ub +Ġoper atives +Ne ither +Pl ace +ĠLim it +F emale +ĠAct or +Char acter +Ġconstit uted +35 7 +Ġprotest ed +ĠSt raw +ĠHe ight +ild a +ĠTy ph +Ġflood s +Ġcos metic +W AY +pert ure +up on +t ons +ess ing +ĠP ocket +Ġro oft +ĠC aucas +Ġant idepress +Ġincomp atible +EC D +Ġoper a +ĠCont est +Ġgener ators +l ime +Def ense +19 87 +for um +Ġsav age +ĠHung arian +n z +Ġmet allic +Ġex pelled +Ġres idency +Ġdress es +66 6 +ĠC lement +f ires +C ategory +Ġge ek +al is +Ġc emetery +educ ated +Ġc rawl +ĠUn able +ĠT yson +ak is +Ġp ardon +ĠW ra +Ġstrengthen ed +ĠF ors +33 5 +ĠH C +ĠM ond +Ġvisual s +ĠBeat les +ett lement +Ġ ï +g ro +Ġb ash +Ġpo orest +Ġex cel +Ġaspir ations +ĠM unicip +ens ible +Ġceremon ies +Ġintimid ation +ĠCON TR +be ck +ĠK ap +as u +Ġtradem arks +ĠS ew +ĠComp etition +net work +ĠAr ri +ĠT et +Ro aming +W C +D at +Ġso b +Ġpair ing +Ġoverd ose +SA Y +ab er +Ġrev olt +ĠF ah +act ing +e q +est ation +F ight +ĠMar ks +27 3 +Ġ17 8 +R aw +ãģ ĭ +34 9 +bl ocks +Ġver ge +est ine +ĠPod esta +Ġinv asive +Ġprofound ly +ĠA o +e ach +Ġl est +inter pret +Ġshr inking +Ġerr one +Ġche es +ly s +ĠI vy +ĠDirect ory +Ġhint ed +V ICE +Ġcontact ing +ĠG ent +he i +Ġlabel ing +Ġmerc ury +ĠL ite +Ġexp ires +Ġdest abil +rit is +c u +Ġfeather s +Ġste er +Ġprogram med +ĠV ader +Go ing +ĠE lim +Ġy o +ĠMic he +Ġ20 3 +Ġslee ves +Ġb ully +ĠHum ans +36 8 +Ġcomp ress +ĠBan ner +AR S +Ġa while +Ġcal ib +Ġspons orship +ĠDiff iculty +ĠP apers +Ġident ifier +} . +Ġy og +ĠSh ia +Ġclean up +Ġvib e +int rodu +im ming +Austral ia +Ġout lines +ĠY outube +tr ain +ĠM akes +Ġde ported +Ġcent r +ĠD ug +ĠB oulder +ĠBuff y +Ġinj unction +ĠHar ley +ĠG roups +ĠD umbledore +ĠCl ara +Ġ" - +Ġsacrific ed +ep h +Sh adow +ib ling +Ġfreel ance +Ġevident ly +ph al +Ġret ains +M ir +Ġfin ite +d ar +ĠC ous +Ġrep aired +Ġperiod ic +Ġchampions hips +Ġaster oid +bl ind +Ġexpress ly +ĠAst ros +Ġsc aled +Ġge ographical +ĠRap ids +En joy +Ġel astic +ĠMoh amed +Mark et +be gin +Ġdisco vers +Ġtele communications +Ġscan ner +Ġen large +Ġsh arks +Ġpsy chedel +ĠRou ge +Ġsnap shot +is ine +X P +Ġpestic ides +ĠL SD +ĠDist ribution +re ally +Ġde gradation +Ġdisgu ise +Ġbi om +ĠEX T +Ġequ ations +Ġhaz ards +ĠComp ared +) * +Ġvirt ues +Ġeld ers +Ġenh ancing +ĠAc ross +er os +ang ling +Ġcomb ust +ucc i +Ġconc ussion +Ġcontrace ption +ĠK ang +Ġexpress es +Ġa ux +ĠP ione +Ġexhib its +Deb ug +OT AL +ĠAl ready +ĠWheel er +Ġexp ands +? : +Ġreconc iliation +Ġpir ates +Ġpur se +Ġdiscour age +Ġspect acle +R ank +Ġwra ps +ĠTh ought +Ġimp ending +O pp +ĠAng lo +ĠE UR +Ġscrew ed +ret ched +Ġencour agement +mod els +Ġconf use +mm m +ĠVit amin +âĸij âĸij +C ru +Ġkn ights +Ġdisc ard +Ġb ishops +ĠW ear +ĠGar rett +k an +ãĥ Ł +Ġmascul ine +cap ital +ĠA us +Ġfat ally +th anks +ĠA U +ĠG ut +12 00 +Ġ 00000000 +Ġsur rog +ĠBI OS +ra its +ĠWat ts +Ġresur rection +ĠElect oral +ĠT ips +4 000 +Ġnut rient +Ġdepict ing +Ġspr ink +Ġm uff +ĠL IM +ĠS ample +ps c +ib i +gener ated +Ġspec imens +Ġdiss atisf +Ġtail ored +Ġhold ings +ĠMonth ly +ĠE at +po ons +Ġne c +ĠC age +ĠLot us +ĠLan tern +Ġfront ier +Ġp ensions +Ġj oked +ĠHard y +=-=- =-=- +r ade +U ID +Ġr ails +Ġem it +Ġsl ate +Ġsm ug +Ġsp it +ĠCall s +ĠJac obs +f eat +ĠU E +Ġrest ruct +Ġregener ation +Ġenerg ies +ĠCon nor +OH N +ĠChe ese +Ġg er +Ġresur rect +man agement +N W +Ġpres ently +ĠBru ins +M ember +ĠM ang +id an +Ġboost ing +w yn ++ . +requ isite +ĠNY PD +ĠMe gan +ĠCond itions +Ġp ics +nes ium +ĠR ash +Ġ17 4 +ĠD ucks +Ġemb ro +z u +on ian +rel igious +Ġc raz +ĠAC A +ĠZ ucker +EM A +ĠPro s +We apon +ĠKn ox +ĠAr duino +Ġst ove +Ġheaven s +ĠP urchase +Ġher d +Ġfundra iser +Dig ital +5 000 +Ġprop onents +/ âĢĭ +Ġj elly +ĠVis a +Ġmon ks +Ġadvance ment +ĠW er +Ġ18 7 +e us +ert ility +Ġfet al +Ġ19 36 +L o +Ġout fits +Ġstair case +b omb +Ġcustom ized +cl air +T ree +Ġm apped +ĠConsider ing +ĠTor res +Ġmeth yl +Ġapprox imate +Ġdo om +ĠHans en +Ġc rossover +Ġstand alone +ä ¼ +Ġinv ites +Ġgra veyard +Ġh p +Donald Trump +Ġesc ort +G ar +Ġpredec essors +Ġh ay +Ġen zyme +ĠStra ight +vis ors +I ng +ane ously +ĠApp lied +Ġf ec +ĠDur ant +Ġout spoken +or b +Ġz eal +Ġdisgr ace +' ). +ĠChe ng +28 9 +ĠRen a +ĠSu icide +29 4 +Ġout raged +ĠNew man +ĠN vidia +ĠA ber +ĠB ers +Ġrecre ation +Wind ow +ĠD P +x e +Ġped oph +Ġfall out +ambo o +Ġpresent ations +ĠApp s +Ġh tml +3 45 +ĠX XX +Ġrub bing +ĠLe ather +Ġhum idity +se ys +est ablished +ĠUn its +64 6 +Ġrespect able +A uto +Ġthri ving +ĠInn ovation +ang s +Ext ra +reg ulation +29 8 +p ick +Ex amples +ĠC J +Att ack +Ġdr acon +L T +Ġstick er +re rs +Ġsun ny +I ss +reg ulated +d im +ĠAb stract +Ġhus bands +Off ice +om ination +it ars +AN GE +asc al +ĠK ris +ĠInf antry +Ġm alf +ĠA the +ĠR ally +bal anced +................ ........ +OU P +Ġmole cule +met ics +ĠSpl it +ĠInstruct ions +ĠN ights +c ards +Ġt ug +Ġcon e +å Ń +Ġt x +ĠDisc ussion +Ġcatast rophe +pp e +g io +Ġcommun ism +Ġhal ted +ĠGu ant +cle an +ĠSc hed +ĠK anye +Ġw ander +ĠSer iously +Ġ18 8 +enn ial +f ollow +product ive +ĠFl ow +ĠS ail +Ġc raw +Ġsim ulations +or u +ang les +ĠN olan +Ġmen stru +4 70 +Ġ20 7 +aj a +Ġcas ually +board ing +Ġ2 22 +ov y +ĠN umbers +um at +O E +28 7 +ĠCle mson +Ġcert s +Ġsl id +ĠT ribe +Ġto ast +Ġfort unes +Ġf als +ĠComm ittees +Ġg p +Ġf iery +ĠN ets +ĠAn ime +Pack age +ĠComp are +l aughter +in fect +Ġatroc ities +Ġjust ices +Ġins ults +ĠVern on +Ġsh aken +Ġperson a +est amp +36 7 +br ain +Ġexperiment ing +K en +ĠElect ronics +Ġ16 1 +dom ain +Ġgraph ical +b ishop +Ġwho pping +ĠEv angel +Ġadvertis ers +ĠSpe ar +Ġb ids +Ġdestro ys +ut z +Ġunders c +ĠAD D +Ġan ts +ĠC um +ipp les +ĠF ill +Ġgl anced +Ġind icted +ĠE ff +Ġmis con +ĠDes ktop +Ġab ide +ãĥ Ģ +ĠI o +ĠC oul +Ġcaps ule +ĠCh rys +M ON +Ġund es +ĠI RA +Ġc itation +Ġdict ate +ĠNet works +ĠConf lict +ĠSt uff +x a +is ec +ĠChem istry +Ġquarter ly +William s +an an +O pt +ĠAlexand ria +out heastern +ĠSpring field +ĠBlack s +Ġge ography +24 2 +Ġut most +ĠEx xon +ab outs +E VA +ĠEn able +ĠBar r +Ġdisag reed +ĠCy prus +Ġdement ia +Ġlab s +Ġubiqu itous +ĠLO VE +Ġconsolid ated +s r +Ġcream y +ĠTim ber +Reg ardless +ĠCert ificate +Ġ" ... +ogen ous +Capt ain +Ġinsult ing +ĠSor os +ĠInst r +ĠBulgar ia +bet ter +Ġsuck ing +ĠDavid son +at z +Ġcoll ateral +g if +Ġplag ued +ĠC ancel +ĠGard ner +R B +Ġsix teen +Rem ove +ur istic +c ook +R od +Ġcompr ising +f le +) âĢĶ +ĠVik ing +g rowth +agon al +Ġsr f +af ety +m ot +N early +st own +ĠF actor +Ġautom obile +Ġproced ural +m ask +amp ires +Ġdisapp ears +j ab +3 15 +Ġ19 51 +ne eded +Ġd aring +le ader +Ġp odium +Ġun healthy +Ġm und +Ġpy ramid +oc re +Ġkiss ed +Ġdream ed +ĠFant astic +ĠG ly +å Ĭ +Ġgreat ness +Ġsp ices +Ġmet ropolitan +Ġcomp uls +i ets +101 6 +ĠSh am +ĠP yr +fl ies +ĠMid night +Ġswall owed +Ġgen res +ĠL ucky +ĠRew ards +Ġdisp atch +ĠI PA +ĠApp ly +Ġa ven +al ities +3 12 +th ings +Ġ( ). +Ġm ates +ĠS z +ĠC OP +ol ate +O FF +Ġre charge +c aps +ĠYork er +ic one +Ġgal axies +ile aks +D ave +ĠP uzz +ĠCelt ic +ĠA FC +27 6 +ĠS ons +Ġaffirm ative +H or +Ġtutorial s +ĠC ITY +ĠR osa +ĠExt ension +Ser ies +Ġf ats +Ġr ab +l is +Ġun ic +Ġe ve +ĠSp in +Ġadul thood +ty p +Ġsect arian +Ġcheck out +ĠCy cl +S ingle +Ġmart yr +Ġch illing +88 8 +ou fl +Ġ] ; +Ġcongest ion +m k +ĠWhere as +Ġ19 38 +ur rencies +er ion +Ġbo ast +ĠPat ients +Ġch ap +ĠB D +real DonaldTrump +Ġexam ines +h ov +Ġstart ling +ĠBab ylon +w id +om ew +br ance +ĠOd yssey +w ig +Ġtor ch +ĠV ox +ĠMo z +ĠT roll +ĠAn s +Similar ly +ĠF ul +00 6 +Un less +ĠAl one +st ead +ĠPub lisher +r ights +t u +ĠDoes n +Ġprofession ally +Ġcl o +ic z +Ġste als +Ġ á +19 86 +Ġst urdy +ĠJoh ann +Ġmed als +Ġfil ings +ĠFr aser +d one +Ġmult inational +Ġf eder +Ġworth less +Ġp est +Yes terday +ank ind +Ġg ays +Ġb orne +ĠP OS +Pict ure +Ġpercent ages +25 1 +r ame +Ġpot ions +AM D +ĠLeban ese +Ġr ang +ĠL SU +ong s +Ġpen insula +ĠCl ause +AL K +oh a +ĠMac Book +Ġunanim ous +Ġl enders +Ġhang s +Ġfranch ises +ore rs +ĠUp dates +Ġisol ate +and ro +S oon +Ġdisrupt ive +ĠSur ve +Ġst itches +ĠSc orp +ĠDomin ion +Ġsupp lying +Ar g +Ġtur ret +ĠL uk +Ġbr ackets +* ) +ĠRevolution ary +ĠHon est +Ġnot icing +ĠSh annon +Ġafford ed +Ġth a +ĠJan et +! -- +ĠNare ndra +ĠPl ot +H ol +se ver +e enth +Ġobst ruction +Ġ10 24 +st aff +j as +or get +sc enes +l aughs +ĠF argo +cr ime +Ġorche str +Ġde let +ili ary +rie ved +Ġmilit ar +ĠGreen e +âĹ ı +ãģ ¦ +ĠGu ards +Ġunle ashed +ĠWe ber +Ġadjust able +Ġcal iber +Ġmotiv ations +Ġà ł +m Ah +ĠL anka +hand le +Ġp ent +ĠR av +ĠAng ular +ĠK au +umb ing +Ġphil anthrop +Ġde hyd +Ġtox icity +e er +ĠY ORK +w itz +å ¼ +ĠI E +commun ity +ĠA H +Ġret ali +Ġmass ively +ĠDani els +ĠD EL +Ġcar cin +Ur l +Ġrout ing +ĠNPC s +ĠR AF +ry ce +Ġwa ived +ĠGu atem +Every body +Ġco venant +Ġ17 3 +Ġrelax ing +Ġqu art +al most +Ġguard ed +ĠSold iers +ĠPL AY +Ġout going +L AND +Ġre write +ĠM OV +ĠIm per +ĠS olution +Ġphenomen al +Ġl ongevity +Ġimp at +ĠN issan +ir ie +Ġod or +ĠZ ar +ok s +Ġmilit ias +ĠSP EC +Ġtoler ated +ars er +ĠBrad ford ++ , +Ġsur real +s f +Can adian +Ġresemb lance +Ġcarbohyd rate +VI EW +Ġaccess ory +me al +larg est +ieg el +Some one +Ġtoug hest +os o +Ġfun nel +Ġcondemn ation +lu ent +Ġw ired +ĠSun set +Jes us +ĠP ST +ĠP ages +ĠTy coon +ĠP F +Ġselect ions +Ġ ठ+part isan +Ġhigh s +ĠR une +Ġcraft s +le ad +ĠParent s +Ġre claim +ek er +ĠAll ied +ae per +Ġlo oming +Ġbenefic iaries +ĠH ull +Stud ents +Jew ish +d j +Ġp act +tem plate +ĠOffic ials +ĠBay lor +Ġhe mp +Ġyouth s +ĠLevel s +ĠX iao +ĠC hes +Ġende avor +ĠRem oved +Ġhipp ocamp +H ell +ãĤ Ĭ +80 5 +Ġd inosaur +ĠWr ath +ĠIndones ian +Ġcalcul ator +ĠD ictionary +Ġ4 20 +ĠM AG +( _ +! , +t arians +Ġrestrict ing +rac use +Ġweek day +OU NT +Ġsh rugged +leg round +Ġb ald +ĠDo ctors +Ġt outed +ĠMax well +Ġ2 14 +Ġdiplom at +Ġrep ression +Ġconstitu ency +v ice +r anked +ĠNap oleon +g ang +ĠFore ver +t un +Ġbul b +ĠPD T +ĠC isco +V EN +Ġres umed +Ste ven +ĠManit oba +Ġfab ulous +ĠAg ents +19 84 +Ġam using +ĠMyster ies +Ġor thodox +fl oor +Ġquestion naire +Ġpenet rate +Ġfilm makers +ĠUn c +Ġst amped +Ġth irteen +Ġout field +Ġforward ed +Ġapp ra +Ġa ided +t ry +Ġunf ocused +ĠL iz +ĠWend y +ĠSc ene +Ch arg +Ġreject s +Ġleft ist +ĠProv idence +ĠBr id +reg n +Ġprophe cy +ĠL IVE +4 99 +Ġfor ge +ĠF ML +Ġintrins ic +ĠF rog +Ġw ont +ĠH olt +Ġfam ed +CL US +aeper nick +ĠH ate +ĠC ay +Ġregister ing +ort ality +rop y +ocaly ptic +a an +n av +Ġfasc ist +IF IED +Ġimpl icated +ĠRes ort +ĠChand ler +ĠBr ick +P in +ys c +Us age +ĠHel m +us ra +âĺħ âĺħ +ĠAb bas +Ġunanim ously +Ġke eper +Ġadd icted +?? ? +Ġhelm ets +Ġant ioxid +aps ed +80 8 +gi ene +Ġwa its +Ġmin ion +ra ved +ĠP orsche +Ġdream ing +Ġ17 1 +ĠC ain +Ġun for +ass o +ĠConfig uration +k un +hard t +Ġn ested +ĠL DS +L ES +Ġt ying +en os +Ġc ue +ĠMar qu +sk irts +Ġclick ed +Ġexp iration +ĠAccording ly +ĠW C +Ġbless ings +Ġaddict ive +ĠN arr +y x +ĠJagu ars +Ġrent s +ĠS iber +Ġt ipped +ous se +ĠFitz gerald +Ġhier arch +out ine +Ġwa velength +> . +ch id +ĠProcess ing +/ + +r anking +E asy +ĠConst ruct +Ġt et +ins ured +H UD +Ġqu oting +Ġcommun icated +in x +Ġin mate +Ġerect ed +ĠAbs olutely +ĠSure ly +Ġun im +ĠThr one +he id +Ġcl aws +Ġsuper star +ĠL enn +ĠWh is +U k +ab ol +Ġsk et +ĠN iet +Ġper ks +Ġaff inity +Ġopen ings +phas is +Ġdiscrim inate +T ip +v c +Ġgr inding +ĠJenn y +Ġast hma +hol es +ĠHom er +Ġreg isters +ĠGl ad +Ġcre ations +Ġlith ium +Ġappl ause +unt il +Just ice +ĠTur ks +Ġsc andals +Ġb ake +t ank +M ech +ĠMe ans +ĠM aid +Republic ans +is al +wind ows +ĠSant os +Ġveget ation +33 8 +t ri +Ġfl ux +ins ert +Ġclar ified +Ġmort g +ĠCh im +ĠT ort +Ġdiscl aim +met al +ĠAs ide +Ġindu ction +Ġinf l +Ġathe ists +amp h +Ġe ther +ĠV ital +ĠBu ilt +M ind +Ġweapon ry +S ET +Ġ18 6 +ad min +g am +cont ract +af a +Ġderiv atives +Ġsn acks +Ġch urn +E conom +Ġca pped +ĠUnder standing +ĠH ers +ĠI z +Ġd uct +I ENT +augh ty +Ġâľ Ķ +ĠN P +Ġsa iling +In itialized +Ġt ed +Ġreact ors +ĠL omb +Ġcho ke +ĠW orm +Ġadm iration +Ġsw ung +ens ibly +Ġr ash +ĠGo als +ĠImport ant +Sh ot +ĠR as +Ġtrain ers +ĠB un +Work ing +Ġhar med +ĠPand ora +ĠL TE +Ġmush room +ĠCH AR +ĠF ee +ĠM oy +B orn +ol iberal +ĠMart ial +Ġgentle men +Ġling ering +Offic ial +Ġgra ffiti +ĠN ames +D er +Ġqu int +ist rate +aze era +ĠNOT ICE +ĠFlore nce +Ġpay able +Ġdep icts +ĠSpe cies +He art +âĶĢâĶĢâĶĢâĶĢ âĶĢâĶĢâĶĢâĶĢ +Ġencl osed +Incre ases +D aily +ĠL is +Ġenact ment +ĠB acon +ĠSt eele +dem and +Ġ18 3 +Ġmouth s +Ġstr anded +Ġenhance ment +01 1 +ĠWh ats +Ġhe aled +en y +ĠR ab +Ġ3 40 +ĠLab yrinth +ro ach +ĠY osh +ĠCl ippers +Ġconcert s +Intern et +35 5 +Ġstick ers +Ġter med +ĠAx e +Ġgrand parents +Fr ance +ĠCl im +ĠU h +ul ic +Ġthr ill +cent ric +ĠOver view +ĠCond uct +Ġsubstant ive +Ġ18 2 +m ur +Ġstr ay +ĠCo ff +Ġrep etitive +ĠFor gotten +Ġqual ification +ew itness +ĠZ imbabwe +Ġsim ulated +ĠJ D +25 3 +ĠW are +Ġun sc +T imes +Ġsum mons +Ġdis connected +Ġ18 4 +ci us +ĠGu jar +od ka +Ġer ase +ĠTob acco +elect ed +Ġun cont +ĠShe pard +ĠL amp +Ġalert ed +Ġoper ative +arn a +u int +Ġneglig ence +ac ements +Ġsup ra +Ġprev ail +ĠSh ark +Ġbel ts +ãģ « +Ġt ighter +Engine ers +Ġin active +Ġexp onent +ĠWill ie +a ples +Ġhe ir +ĠH its +ian n +ĠS ays +Ġcurrent s +ĠBeng al +Ġar ist +B uffer +Ġbree ze +ĠWes ley +Col a +Ġpron oun +Ġde ed +ĠK ling +Ġof t +Ġinf lict +Ġpun ishing +Ġn m +ik u +OD UCT +01 4 +Ġsubsid y +ĠDE A +ĠHer bert +ĠJ al +B ank +Ġdef erred +Ġship ment +B ott +Ġal le +b earing +HT ML +Off line +Ġ2 13 +Ġscroll ing +Ġsc anned +ĠLib yan +ĠT OP +ch rom +d t +col umn +Psy NetMessage +Z ero +Ġtor so +0 50 +âķ IJ +Ġimp erson +ĠSchw artz +ud ic +Ġpiss ed +ĠS app +25 7 +ĠIS Ps +og l +Ġsuper vised +Ġad olescent +Ġatt ained +ĠDel ivery +ĠB unny +Ġ19 37 +Ġmini ature +Ġo s +Ġ3 70 +60 8 +ĠMour inho +Ġinn ate +Ġtem po +ĠN M +ĠFall en +00 9 +Ġprov ocative +Stream er +ĠBened ict +ĠBol she +Ġt urtle +ĠPC B +ĠEqu al +Direct or +ĠR end +Ġflu ids +Author ities +Ġcous ins +requ ency +ĠNeigh bor +s ets +sh ared +Char les +pass word +Ġg ears +Ġ2 11 +ĠHard ware +ri ka +Ġup stream +H om +Ġdisproportion ately +iv ities +Ġund efined +Ġelect rons +Ġcommem or +Event ually +Ġ> < +Ġir responsible +2 18 +ĠRe leased +ĠO VER +ĠI GN +ĠB read +st ellar +ĠS age +tt ed +dam age +ed ition +ĠPre c +Ġl ime +Ġconf inement +Ġcal orie +we apon +Ġdiff ering +ĠS ina +m ys +am d +Ġintric ate +k k +ĠP AT +ã o +st ones +lin ks +Ġr anch +Sem itic +Ġdifferent iate +ĠS inger +occup ied +Ġfort ress +c md +Ġinter ception +ĠAnk ara +Ġre pt +ĠSol itaire +Ġrem ake +p red +Ġd ared +aut ions +ĠB ACK +Run ning +Ġdebug ging +Ġgraph s +3 99 +ĠNig el +Ġb un +Ġpill ow +Ġprog ressed +fashion ed +Ġob edience +ER N +Ġrehe ars +C ell +t l +S her +Ġher ald +ĠPay ment +ĠC ory +ĠDe pt +Ġrep ent +ĠWe ak +uck land +Ġple asing +Ġshort ages +Ġjur ors +ĠK ab +q qa +Ant i +Ġw ow +ĠRC MP +Ġt sun +ĠS ic +Ġcomp rises +Ġsp ies +Ġprec inct +n u +Ġur ges +Ġtim ed +Ġstrip es +ĠB oots +Ġy en +Adv anced +Ġdisc rete +ĠArch angel +employ ment +D iff +Ġmon uments +Ġ20 9 +work er +Ġ19 6 +ĠI g +utter stock +T PS +J ac +Ġhomeless ness +Ġcomment ator +Ġrac ially +f ing +se ed +E le +ell ation +Ġeth anol +Ġpar ish +ĠD ong +ĠAw akening +Ġdev iation +ĠB earing +ĠTsu k +Ġrec ess +Ġl ymph +ĠCann abis +å ľ +ĠNEW S +Ġd ra +ĠStef an +ĠWr ong +ĠS AM +Ġloose ly +Ġinterpre ter +ĠPl ain +Go vernment +Ġbigot ry +Ġgren ades +ave z +pict ured +Ġmand ated +ĠMon k +ĠPed ro +Ġl ava +27 4 +Ġcyn ical +ĠScroll s +l ocks +M p +Ġcon gregation +orn ings +ph il +ĠI bid +Ġf erv +Ġdisapp earing +Ġarrog ant +sy n +ĠMa ver +ĠSu it +24 1 +Ġab bre +ack ers +P a +ĠY el +Whe never +Ġ23 5 +ĠV ine +ĠAn at +Ġext inct +LE T +Ġexecut able +V ERS +ox ide +D NA +ĠP rel +Ġresent ment +Ġcompr ise +ĠAv iv +Ġinter ceptions +Ġprol ific +IN A +ĠEr in +though t +2 19 +ĠPsychiat ry +un ky +chem ist +H o +ĠMcC oy +Ġbr icks +L os +ri ly +ĠUS SR +Ġr ud +Ġl aud +ĠW ise +ĠEmer ald +Ġrev ived +Ġdam ned +ĠRep air +id em +ct ica +Ġpatri arch +ĠN urs +me g +Ġcheap est +re ements +empt y +ĠCele br +Ġdepri vation +ch anted +ĠTh umbnails +E nergy +ĠEth an +ĠQ ing +Ġopp oses +W IND +v ik +ĠM au +ĠS UB +66 7 +G RE +ĠVol unte +nt on +C ook +å IJ +es que +Ġplum met +Ġsu ing +Ġpron ounce +Ġresist ing +ĠF ishing +ĠTri als +Ġy ell +Ġ3 10 +Ġin duct +Ġpersonal ized +oft en +R eb +EM BER +Ġview point +Ġexist ential +() ) +rem ove +MENT S +l asses +Ġev apor +Ġa isle +met a +Ġreflect ive +Ġentit lement +Ġdev ised +mus ic +asc ade +Ġwind ing +off set +Ġaccess ibility +ke red +Bet ter +ĠJohn ston +th inking +S now +ĠCroat ia +ĠAt omic +27 1 +34 8 +Ġtext book +ĠSix th +Ġ اÙĦ +Ġsl ider +ĠBur ger +b ol +S ync +Ġgrand children +Ġc erv ++ ) +Ġe ternity +Ġtweet ing +Ġspec ulative +Ġpiv otal +ĠW P +ĠT ER +ynam ic +Ġu pl +ĠC ats +per haps +Ġclass mates +Ġblat ant +' - +Ġl akh +ant ine +ĠB org +i om +/ ( +ĠAthlet ic +Ġs ar +OT A +ĠHoff man +Never theless +Ġad orable +Ġspawn ed +Ass ociated +ĠDom estic +Ġimpl ant +ĠLux em +ĠK ens +Ġp umps +ĠS AT +Att ributes +50 9 +av our +Ġcentral ized +ĠT N +Ġfresh ly +ĠA chieve +Ġouts iders +her ty +ĠRe e +ĠT owers +ĠD art +ak able +Ġm p +ĠHeaven ly +Ġr ipe +ĠCarol ine +ry an +Ġclass ics +Ġret iring +Ġ2 28 +Ġa h +Ġdeal ings +Ġpunch ing +ĠChap man +O ptions +max well +vol ume +Ġst al +Ġex ported +ĠQu ite +Ġnumer ical +B urn +F act +ĠKey stone +Ġtrend ing +Ġalter ing +ĠAfric ans +47 8 +ĠM N +ĠKn ock +Ġtempt ation +Ġprest ige +Over view +ĠTrad itional +ĠBah rain +Priv ate +ĠH OU +Ġbar r +ĠT at +C ube +US D +ĠGrand e +ĠG at +ĠFl o +Ġres ides +Ġind ec +vol ent +Ġperpet ual +ub es +Ġworld view +ĠQuant um +Ġfil tered +Ġen su +orget own +ERS ON +ĠM ild +37 9 +OT T +à ¥ +Ġvit amins +Ġrib bon +Ġsincere ly +ĠH in +Ġeight een +Ġcontradict ory +Ġgl aring +Ġexpect ancy +Ġcons pir +Ġmon strous +Ġ3 80 +re ci +Ġhand ic +Ġpump ed +Ġindic ative +Ġr app +Ġav ail +ĠLEG O +ĠMar ijuana +19 85 +ert on +Ġtwent ieth +################ ################ +ĠSw amp +Ġval uation +Ġaffili ates +adjust ed +ĠFac ility +26 2 +Ġenz ymes +itud inal +Ġimp rint +S ite +Ġinstall er +ĠT RA +m ology +lin ear +ĠCollect ive +ig ating +ĠT oken +Ġspec ulated +K N +ĠC ly +or ity +Ġdef er +Ġinspect ors +appro ved +R M +ĠSun s +Ġinform ing +ĠSy racuse +ib li +7 65 +Ġgl ove +Ġauthor ize +âĢ¦âĢ¦âĢ¦âĢ¦ âĢ¦âĢ¦âĢ¦âĢ¦ +ĠCru ise +Ġcontract ing +she ll +IF E +ĠJew el +p ract +ĠPhot oshop +ĠKnow ing +h arm +Ġattract ions +ad an +et us +01 8 +w agen +Al t +Ġmultip ly +Ġequ ilibrium +: { +ĠF ighters +ĠEd gar +Ġfour teen +Go vern +Ġmis use +Ġab using +Ġancest ry +ram er +64 4 +Ġwor ms +Ġthick er +ĠComb ine +Ġpeas ants +Ġv ind +Ġcon quest +Ġm ocked +Ġc innamon +ĠC ald +ĠGall up +Ġavoid ance +Ġincarn ation +ĠStr at +Ġt asted +ent a +ĠN eal +p ared +Ġtermin ology +ject ion +Scient ists +ĠIN S +ĠDe e +Ġdirect ories +R oad +ĠSh ap +br ight +ĠDirect ors +ĠCol umn +Ġb ob +Ġprefer ably +Ġgl itch +f urt +Ġe g +id is +C BC +Ġsur rendered +Ġtest ament +33 6 +ug gest +ĠN il +an other +Ġpat hetic +ĠDon na +Ġ2 18 +ĠA very +Ġwhis key +Ġf ixture +ĠCon quest +Ġbet s +O cc +ĠLe icester +] ." +Ġ) ); +Ġfl ashes +45 6 +Ġmask ed +ge bra +Ġcomput ed +che l +aud er +Ġdefe ats +ĠLiber ation +ĠOs ama +ĠV ive +Ch anges +Ch annel +Ġtar iffs +Ġm age +ĠS ax +Ġinadvert ently +ĠC RE +ĠRe aper +ink y +gr ading +Ġstere otyp +Ġcur l +ĠF ANT +Ġfram eworks +M om +ĠAn ch +Ġflav our +car bon +Ġperm itting +let cher +ĠMo zilla +ĠPark ing +ĠCh amp +Sc roll +Ġmurd erer +Ġrest ed +Ġow es +ĠP oss +AD D +IF F +res olution +ĠMin ing +Ġcompar ative +D im +Ġneighbour ing +ĠA ST +ĠT oxic +Ġbi ases +Ġgun fire +ur ous +ĠMom ent +19 83 +Ġper vasive +tt p +ĠNorm ally +r ir +S arah +ĠAlb any +Ġun sett +ĠS MS +ip ers +l ayer +ĠWh ites +up le +Ġtur bo +ĠLe eds +Ġthat s +ĠMin er +M ER +ĠRe ign +Ġper me +ĠBl itz +Ġ19 34 +Ġintimid ating +t ube +Ġecc entric +ab olic +box es +ĠAssoci ates +v otes +Ġsim ulate +um bo +aster y +Ġship ments +FF FF +an th +Ġseason ed +Ġexperiment ation +âĸ ł +law s +Me et +idd les +ant ics +R ating +IS IS +h ift +Ġfront s +b uf +01 7 +Ġun att +ĠD il +le ases +ĠGard ens +77 7 +t ouch +ve ll +45 8 +Ġ= ==== +s aving +Ġer osion +ĠQu in +Ġearn s +Ġaccomplish ment +ĠWe i +Ġ< [ +____ _ +Ġir rig +ĠT eddy +Ġconqu ered +ĠArm ored +Ġassert s +Ġmanip ulating +r é +Ġtranscript s +G allery +Ġplot ting +Ne il +Ġbetray al +load er +ĠS ul +Ġdispl acement +Ġroy alty +ĠW I +he it +ĠDev ices +alle l +Ġmunicipal ities +Ġcan al +St ars +ĠU AE +Ġ" âĢ¦ +ĠC U +ab ove +Ġreson ance +ĠguiActive Un +add ed +ĠBra ves +ĠI bn +Ġhere by +ĠB RE +Ġshare holder +ĠH ir +ĠJ i +Ġstrange ly +Ġadm ired +Ġpl ight +Ġb achelor +ĠP ole +cipl inary +T ony +ĠArmen ian +Ġun man +ĠZion ist +St age +isco ver +Ġautom otive +Ġs idelines +Ġsl ick +ĠRena issance +ĠF UN +Im ages +ĠH aj +Ġp ing +Ġshort cut +ĠBl vd +ĠLook s +Ġbur sts +Ġcl amp +Ġm ish +Ġsort ing +Ġpatri ot +Ġcorrect ness +ĠScand inav +ĠCaval iers +p ython +az ar +Ġ3 75 +ĠJa une +40 9 +Ġdetrim ental +Ġstab bing +Ġpoison ed +Ġf ountain +oc ent +or st +ĠMar i +Ġr ains +ĠO vers +ĠInst itution +ud get +AM Y +t ale +ĠK R +ĠPr ices +Ġhead aches +Ġlands l +ĠA ura +Bon us +ĠZ hao +ĠH ip +Ġhop s +ĠKurd istan +Ġexplo iting +ry n +Ġhypocr isy +op ening +Ġgun shot +Ġw ed +inter stitial +Inter stitial +Ġam en +Bre aking +Ġmarket ed +W ire +ĠC rowd +Contin ue +ĠK nown +ĠEffect ive +ore an +iz ons +Jose ph +Ġescal ation +us ername +Ġcur tain +AT ES +ĠP AR +ĠM iy +Ġcounter fe +l ene +Ġcont enders +d aily +ĠAs c +ĠPhill ip +most ly +Ġfil ename +he ne +Ġresemb ling +Ġst aging +ĠCh loe +Ġw iring +H on +ĠRen ew +ott age +ĠHy brid +m uch +Ġstro kes +Ġpolicy makers +AP TER +ĠArk ham +pl ot +Ġassist ants +Ġde port +ĠSe ga +Ġinflu enza +ĠC ursed +ĠK obe +Ġskin ny +Prov ider +ĠR ip +Ġincrement al +product s +B F +Ġd ome +ĠC redits +Ġlos ers +int s +ĠBet ty +ĠTal ent +ĠD AM +L v +E ss +Ġd ens +tem p +J udge +od ic +Ġ' ( +UR ES +ets k +V O +Ġretrie ved +Ġarchitect s +Ù ĩ +Ġeth ic +ĠSecond ary +st ocks +ad ia +Ġ3 25 +ĠOp inion +Ġsimultane ous +Ġd izz +ul p +Ġsmugg ling +ipp ery +R andom +f acing +ĠD as +Ġstock p +Ġdiscl osures +po inter +Ġcor al +ĠSe lection +ĠP ike +ival ent +Ġruth less +ĠR im +Ġensu ing +ĠExper iment +Ġcongress man +Ġbelie ver +Ġun specified +ĠM ord +Ġknowledge able +ĠV ERY +T X +Ġstra ps +Ġtur f +apesh ifter +Ġmar ital +Ġfl ock +ãģ Ĩ +26 3 +AM ES +ĠOpp osition +Ġtre asures +ĠG OD +Ġmodel ed +ĠWOR LD +Ġ( [ +ĠUs age +H F +Ġ$ ( +uss ed +Ġpione er +E ight +par se +b read +rit z +ĠMir anda +ĠK ant +++ ) +ore n +Ġprov oked +Ġbre eds +ĠIn cludes +ĠPast ebin +ĠFl ip +J ava +Ġbr ink +Ġrum ored +Ġun seen +Ġgar nered +ĠDef in +al ted +Ġtatt oos +Ġhes itation +is itions +ĠWe aver +ĠReport ing +Ġtherap ies +Ġconsult ants +Ġresid ual +ĠMal i +ĠRom a +i ago +ĠRes idents +ub i +Ġremed ies +Ġadapt ive +ĠAl ive +ĠBar cl +Ġwal lets +c rypt +etermin ation +ĠPel osi +Ġsl ipping +oton in +Ġall iances +pat rick +ir is +Ġor th +ĠPer kins +ĠDe V +ĠG ets +Ġdry ing +ge e +fore st +ĠFor get +ore m +33 9 +Ġvague ly +ĠD ion +ĠP orn +ĠH OW +Ġp neum +Ġrub ble +ĠT aste +enc ia +ĠG el +Ġd st +Ġ24 5 +ĠMoroc co +inf lamm +ĠTw ins +Ġb ots +d aughter +ĠB alk +Ġbre thren +Ġlog os +Ġgo bl +f ps +Ġsub division +Ġp awn +Ġsquee zed +Ġmor ale +ĠD W +' " +Ġkn ot +ook y +Ġdiv isive +Ġboost ed +ch y +ãĥ IJ +if act +Ġnewcom ers +ĠWrest ling +Ġsc outs +w olves +R at +Ġnin eteenth +ĠOs borne +St ats +Ġem powered +Ġpsych opath +ĠO EM +ugg age +ĠP K +ĠMoh ammad +P ak +Ġanarch ists +ĠExt ract +est hes +ĠStock holm +l oo +ĠG raph +Ġdeploy ing +ĠStr anger +ĠM old +Ġstaff er +Ġdiscount ed +uck le +ple ase +ĠLand ing +ÃŃ a +Ġ19 3 +Ġan te +Ġrep etition +Ġ+ /- +Ġpar ody +Ġlive ly +AA A +ĠHor us +Ġp its +ind ers +L OC +ĠVen ice +40 6 +ĠDis cover +â Ĩ +ellect ual +Ġp ens +Ġey el +ig uous +Im pl +Ġj oking +Ġinv al +ĠBel fast +Ġcredit ors +ĠSky walker +ov sky +Ġcease fire +Ġse als +is oft +) ). +ĠFel ix +IT S +Ġt resp +ĠBlock chain +ew are +ĠSch war +en ne +mount ed +ĠBe acon +les h +Ġimmense ly +Ġche ering +Em ploy +sc ene +ish ly +atche wan +ĠNic olas +Ġdr ained +ĠEx it +ĠAz erb +j un +Ġflo ated +u ania +De ep +Ġsuper v +Ġmyst ical +ĠD ollar +ĠApost le +ĠR EL +ĠProv ided +ĠB ucks +ãĥ ´ +cut ting +Ġenhance ments +ĠPengu ins +ĠIsa iah +Ġj erk +ĠW yn +Ġst alled +Ġcryptoc urrencies +ĠR oland +sing le +Ġl umin +ĠF ellow +ĠCap acity +ĠKaz akh +W N +Ġfin anced +38 9 +Ġt id +Ġcoll usion +ĠMy r +î Ģ +Sen ator +Ġped iatric +Ġneat ly +Ġsandwic hes +ĠArchitect ure +Ġt ucked +Ġbalcon y +Ġearthqu akes +qu ire +F uture +Ġhe fty +é Ĺ +Ġspecial izes +Ġstress es +Ġs ender +Ġmisunder standing +Ġep ile +Ġprov oke +ĠCol ors +Ġdis may +uk o +[ _ +58 6 +ne utral +Ġdon ating +ĠRand all +Mult i +Ġconvenient ly +ĠS ung +ĠC oca +Ġt ents +ĠAc celer +Ġpart nered +27 2 +ir ming +ĠB AS +s ometimes +Ġobject ed +ub ric +p osed +LC S +gr ass +Ġattribut able +V IS +Israel i +Ġrepe ats +ĠR M +v ag +ut a +in ous +Ġin ert +ĠMig uel +æ Ń +ĠHawai ian +B oard +Ġart ific +ĠAzerb ai +as io +ĠR ent +A IN +Ġappl iances +Ġnational ity +Ġass hole +ĠN eb +Ġnot ch +h ani +ĠBr ide +Av ailability +Ġintercept ed +Ġcontin ental +Ġsw elling +ĠPers pect +b ies +. < +ith metic +ĠL ara +Ġtempt ing +add r +Ġoversee ing +cl ad +ĠD V +ĠGing rich +Ġm un +ĠApp ropri +Ġalter ations +ĠPat reon +Ġha voc +Ġdiscipl ines +Ġnotor iously +aku ya +ier i +? ). +ĠW ent +Ġsil icon +Ġtre mb +Cont ainer +K nown +Ġmort ar +est e +ick a +Ar thur +ĠPre viously +ĠMart y +Ġsp arse +g ins +Ġin ward +ĠParticip ant +C opy +ĠM isc +Ġantib iotic +ĠRet ro +Ġel usive +Ġass ail +ĠBatt alion +ĠB ought +Ġdimin ish +ĠEuro pa +s ession +ĠDanger ous +ies el +Ġdisbel ief +Ġbl asts +ext reme +ĠBoy d +ĠProject s +ĠGu ys +Ġunder gone +Ġgr ill +ĠDw ight +Ġ19 7 +US ER +Ġfiles ystem +Ġcl ocks +T aylor +Ġwra pper +Ġfold ing +ous and +ĠPhilipp ine +ATION AL +ĠPer th +Ġas hes +Ġaccum ulate +ĠGate way +Sh op +orks hire +H an +ĠBar rel +ĠLe h +ĠX V +Ġwh im +Ġrep o +ĠC G +ĠM am +Ġincorpor ating +Ġbail out +Ġlingu istic +Ġdis integ +C LE +Ġcinem atic +ĠF iber +S yn +il ion +ĠCom pos +c hens +Ġne oc +Ġbo iled +F INE +on o +un cle +ik en +ĠB M +Î ¹ +Ġreceipt s +Ġdisp osed +ĠTh irty +ĠR ough +ĠA BS +Ġnot withstanding +oll en +# $ +Ġunrel iable +Ġbl oom +Ġmedi ocre +Ġtr am +ĠTas man +Ġsh akes +Ġmanifest o +ĠM W +Ġsatisf actory +Ġsh ores +Ġcomput ation +Ġassert ions +orm ons +ar ag +ab it +Dem ocrats +ĠL oot +ĠVol ks +ha ired +Ġgrav itational +S ing +ĠM iz +Ġthro ttle +Ġtyr anny +ĠView s +Ġrob ber +ĠMinor ity +Ġsh rine +sc ope +pur pose +Ġnucle us +our cing +ĠUS DA +ĠD HS +w ra +ĠBow ie +Sc ale +ĠB EL +x i +I ter +Ġ( ), +w right +Ġsail ors +ous ed +NAS A +ĠPro of +ĠMin eral +t oken +ĠF D +R ew +Ġe ll +6 30 +Ġchance llor +ĠG os +Ġamount ed +ĠRec re +ome z +ĠOpt im +ĠOl ive +Ġtrack er +ow ler +ĠUn ique +R oot +Ġmar itime +ĠQur an +ĠAd apt +Ġecosystem s +ĠRe peat +ĠS oy +ĠI MP +Ġgrad uating +and em +P ur +ĠRes et +ĠTr ick +ĠPh illy +ĠT ue +ĠMalays ian +Ġclim ax +Ġb ury +Ġcons pic +ĠSouth ampton +ĠFl owers +Ġesc orted +ĠEduc ational +ĠI RC +Ġbrut ally +e ating +Ġpill ar +ĠS ang +ĠJ ude +ar ling +ĠAm nesty +Ġrem inding +ĠAdminist rative +hes da +Ġfl ashed +ĠP BS +per ate +fe ature +Ġsw ipe +Ġgra ves +oult ry +26 1 +bre aks +ĠGu er +Ġsh rimp +ĠV oting +qu ist +Ġanaly tical +Ġtables poons +ĠS OU +Ġresear ched +Ġdisrupt ed +Ġj our +Ġrepl ica +Ġcart oons +b ians +} ) +c opy +G ot +ou ched +P UT +Ġsw arm +not ations +s aid +Ġreb uilt +Ġcollabor ate +Ġr aging +Ġn ar +Ġdem ographics +ĠD DR +Ġdist rust +oss ier +ĠK ro +Ġpump kin +Ġreg rets +Ġfatal ities +ĠL ens +ĠO le +p d +Ġpupp et +ĠOut look +ĠSt am +O l +F air +U U +Ġre written +Ä ± +Ġfasc inated +Ġve ctors +Ġtrib unal +u ay +ĠM ats +ĠCo ins +[ [ +Ġ18 1 +Ġrend ers +ĠK aepernick +Ġesp ionage +Ġsum m +Ġd itch +Acc ount +Ġspread sheet +Ġmut ant +p ast +40 7 +Ġd ye +Ġinit iation +Ġ4 000 +Ġpunish able +Ġth inner +ĠKh al +Ġinter medi +D un +ĠGoth am +Ġeager ly +Ġvag inal +p owers +V W +ĠWATCH ED +Ġpred ator +ams ung +Ġdispar ity +Ġ[ * +Ġam ph +Ġout skirts +ĠSpir its +Ġskelet al +Ð » +ĠR ear +Ġissu ance +ĠLog ic +re leased +Z Z +ĠB ound +Ent ry +Ġex its +is ol +ĠFound er +Ġw re +ĠGreen land +ĠM MO +t aker +IN C +ãģ ¾ +Ġhour ly +hen ko +Ġfantas ies +Ġdis ob +Ġdemol ition +ãĥ ĭ +Ġen listed +rat ulations +Ġmis guided +Ġens ured +Ġdiscour aged +m ort +Ġfl ank +Ġc ess +Ġreact s +ĠS ere +s ensitive +ĠSer pent +ass ad +Ġ24 7 +Ġcalm ly +b usters +Ġble ed +ĠSt ro +Ġamuse ment +ĠAntar ctica +Ġs cept +ĠG aw +a q +ason ic +Ġsp rawling +n ative +atur ated +ĠBattle field +IV ERS +E B +ĠG ems +ĠNorth western +ĠFil ms +ĠAut omatic +Ġappre hend +ãģ ¨ +Ġgui Name +Ġback end +Ġevid enced +ge ant +01 2 +ĠS iege +Ġexternal To +Ġunfocused Range +ĠguiActiveUn focused +Ġgui Icon +ĠexternalTo EVA +ĠexternalToEVA Only +F ri +ch ard +en aries +Ġchief s +Ġc f +ĠH UD +Ġcorro bor +Ġd B +ĠT aken +ĠPat ricia +ra il +ĠCh arm +ĠLiber tarian +rie ve +Person al +ĠO UR +ger ies +Ġdump ing +Ġneurolog ical +it imate +ĠClint ons +raft ed +ĠM olly +Ġtermin als +reg ister +Ġfl are +Ġenc oded +Ġautop sy +p el +m achine +Ġexempt ions +ĠRoy als +d istance +Ġdraft s +Ġl ame +ĠC unning +Ġsp ouses +ĠMark ets +ĠCar rier +Ġimp lying +ĠY ak +s id +Ġl oser +Ġvigil ant +Ġimpe achment +Ġaug mented +ĠEmploy ees +Ġunint ended +tern ally +ĠW att +Ġrecogn izable +ess im +æ Ŀ +Ġco ated +r ha +Ġlie utenant +ĠLegisl ation +pub lished +44 4 +01 3 +Ġide ally +ĠPass word +Ġsimpl ify +ĠMet a +ĠM RI +Ġple ading +organ ized +hand ler +Ġun ravel +cor rect +Ġ icy +Ġparan oid +Ġpass er +Ġinspect ions +of er +ĠHealth care +28 3 +ĠBr ut +iol a +for ge +ĠMed ieval +MS N +ie vers +ĠProgram ming +å ī +Ġ2 23 +m u +ĠC LE +ug a +Ġsho ppers +Ġinform ative +ĠPl ans +Ġsupplement ation +ĠT ests +ty ard +ocy tes +ĠVeg a +ĠGujar at +erman ent +Ex cept +ĠL OT +all a +ĠC umm +ĠO sw +Ġven om +ĠDeb t +ĠD OWN +Ġreun ion +Ġm uc +ĠRel ief +Ġge op +ĠðŁ ĺ +al ogue +An th +ech o +Ġcor ros +Ġrepl ication +ĠBl azing +ĠD aughter +Ġinf lic +ĠLind sey +Ù Ī +28 4 +Ex it +Ġgl oom +TA IN +Ġundermin ing +Ġadv ising +h idden +Ġover flow +Ġg or +urd ue +Ġe choes +enh agen +Ġimp uls +d rug +c ash +Ġas ync +Ġmir ac +at ts +p unk +Ġpiv ot +ĠLegisl ative +Ġblog gers +ĠCl aw +s burg +d yl +ĠRecomm end +Ġver te +Ġprohib iting +ĠPant her +Jon athan +Ġo min +Ġhate ful +28 1 +ĠOr che +ĠMurd och +down s +Ġas ymm +G ER +Al ways +Ġinform s +ĠW M +ĠP ony +ĠApp endix +ĠAr lington +J am +Ġmedic inal +ĠS lam +IT IES +Ġre aff +ĠR i +F G +S pring +b ool +Ġthigh s +Ġmark ings +ĠRa qqa +ĠL ak +p oll +ts ky +ĠMort y +ĠDef inition +Ġdeb unk +end ered +ĠLe one +a vers +Ġmortg ages +App arently +N ic +ha us +ĠTh ousands +au ld +Ġm ash +sh oot +Ġdi arr +Ġconscious ly +H ero +e as +ĠN aturally +ĠDestroy er +Ġdash board +serv ices +R og +Ġmillenn ials +Ġinv ade +- ( +Ġcomm issions +ĠA uckland +Ġbroadcast s +Ġfront al +Ġcr ank +ĠHist oric +Ġrum ours +CT V +Ġster il +Ġboost er +rock et +ãĤ ¼ +ut sche +ĠP I +Ġ2 33 +ĠProdu cer +ĠAnaly tics +Ġinval uable +Ġunint ention +ĠC Y +Ġscrut in +Ġg igg +Ġeng ulf +Ġprolet ariat +Ġh acks +ĠH ew +ar ak +ĠSl ime +ield ing +ag her +ĠEll iot +Ġtele com +Ġ2 19 +ult an +ĠAr bor +ĠSc outs +B an +Ġlifes pan +Ġbl asp +38 8 +Ġjud iciary +ĠContin ental +ask ing +Mc C +L ED +Ġbag gage +ĠSorce rer +Ġrem nants +ĠGriff ith +ets u +ĠSub aru +ĠPerson ality +des igned +ush ima +agn ar +Ġrec oil +Ġpass ions +\ ": +Ġte e +Ġabol ition +ĠCreat ing +j ac +Ġ19 4 +01 9 +Ġpill ars +ric hed +/ " +t k +Ġlive lihood +Ġro asted +ah on +ĠH utch +ass ert +Ġdivid end +Ġkn it +Ġd aunting +Ġdisturb ance +Ġsh ale +Ġcultiv ated +Ġrefriger ator +L B +ĠN ET +Ġcommercial s +Ġthink ers +45 5 +Ġch op +B road +Ġsuspic ions +Ġtag ged +l ifting +Ġsty lish +ĠShield s +Short ly +Ġt ails +A uth +ST E +ĠG AME +Ġse ism +ĠK is +olog ne +Ġcow ork +Ġforc ibly +Ġthy roid +ĠP B +AN E +mar ried +h orse +Ġpoly mer +ĠCh al +od or +DE BUG +ĠCon text +Ġbl iss +Ġpin point +ĠMat hemat +leg ram +ĠWeek end +Ġlab elled +Ġb art +it les +Ġest rogen +âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ âĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶâĢĶ +" ' +Ġvis ibly +Ġouts ider +aid a +Are a +Ġdisse min +Ġdish onest +ĠCl osed +ĠBullet in +ĠRam sey +sw ord +ĠX I +our ced +S ame +34 6 +ĠRe pe +ĠK ou +c ake +em is +C ache +ĠMe aning +ĠEn light +onom y +Ġmanifest ation +sw orth +J ay +Ġch ore +ö r +D ream +Ġsanction ed +Ġcult urally +ĠA ra +N av +Ġthe ological +Ġstr ut +ĠV O +ĠHand book +Ġconstruct ing +Ġ ¶ +ĠBenef its +ĠPsych ological +s ac +å ¸ +p olicy +ĠMat ters +ĠReport ed +ĠBy te +Ġvit ro +ĠM aiden +Ġl am +ĠJenn ings +Ġgar ment +ĠRut gers +ĠStaff ord +ĠWell ington +Ġinter mitt +Ġn pm +Ġord eal +Ġplug ged +o oming +in ished +fram ework +Ġtim ber +Ġc ass +Ġ8 50 +il ess +ĠRed ux +7 68 +St re +Ġsurpass ed +w hel +Ġparalle ls +Ġve il +ĠG I +ĠR EST +Ġread iness +s ort +Ġmod ifying +ĠSl ate +ru ff +Ġmar ble +Ġinf rared +Ġaud itor +ĠFANT ASY +ĠP overty +ĠS PD +Ġ" ( +K y +RA Y +Ġexecut ions +ĠBever ly +ĠMarx ism +ĠBur st +ĠK ali +est ones +Clear ly +E ll +ãģ § +ĠProceed ings +T oken +IF IC +ñ a +Cent ral +ĠH aley +ĠD rama +Ġform ations +OR N +Book s +Ġdom inating +ĠFly ers +ĠCompan ion +Ġdiscipl ined +ĠYug oslav +ĠSpell s +Ġv engeance +Ġland lords +L en +ĠO gre +ano ia +Ġpier cing +Ġcon greg +Ġscore r +ob ia +Ġnic kel +ĠLear ns +Ġre jo +Ġmaster piece +Fl ash +Ġinhab ited +ĠOpen GL +ĠD ud +ĠI CO +Ġar ter +Ġpl ur +Ġmaster y +Ġlong standing +st ed +Ġw ines +Ġtelev ised +ĠSh rine +ĠBay ern +Ġâ ĵĺ +Ġencl osure +j ohn +Ġprophe ts +ĠRes urrection +ĠOrd ers +Ġun even +r als +Ġd wind +ĠL ah +ĠSl oven +37 8 +Ġins istence +aff le +ĠCl one +Ġhard ship +ĠCongress man +Ġple ad +Ġreview ers +Ġc ured +Ġ19 35 +as ley +f ake +ĠTh inking +yd ia +P ART +ĠD ota +o it +Ġwh ipped +Ġb ouncing +ĠHispan ics +com ings +Ġcann abin +ĠCh ambers +ĠZ ack +Option al +Ġco ats +Ġprow ess +ĠNort on +Ġplain ly +Ġfre ight +Ġinhib ition +Ġcl am +Ġ30 3 +ke f +ale igh +L uke +Ġpsych o +ator ium +M ED +Ġtreat ies +Ġind isc +Ġd c +OP S +Ġresil ient +ĠInter state +Ġsl ack +Ġmund ane +Ġestab lishes +35 9 +Ġstr ained +Ġn ond +S us +Ġcast e +ar ate +ie ving +Ġunfair ly +Ġpars er +on ial +urs ive +V ia +ĠOtt o +ĠAuthor ities +stro ke +K R +ĠMer cy +Ġfurn ished +Ġout set +Ġmet ic +19 82 +olith ic +ĠT ent +og ical +ĠA ircraft +Ġh ides +ĠBec ame +Ġeduc ators +re aching +Ġvol atility +Ġtodd ler +ĠNAS CAR +ĠTw elve +ĠHigh lights +Ġgra pe +Ġspl its +Ġpe asant +Ġre neg +ĠMS I +Tem p +st ars +Ġtre k +ĠHy de +b inding +Ġreal ism +Ġox ide +ĠH os +Ġmount s +Ġbit ing +Ġcollaps ing +Ġpost al +Ġmuse ums +Ġdet ached +Ġrespect ing +Ġmonop ol +Ġwork flow +ĠC ake +Tem plate +ĠOrgan isation +Ġpers istence +36 9 +C oming +B rad +Ġredund ant +ĠG TA +Ġb ending +Ġrev oked +Ġoff ending +Ġfram ing +Ġprint f +Comm un +mem bers +Out side +Ġconst rued +Ġc oded +F ORE +Ġch ast +Ch at +Ind ian +ĠY ard +? !" +ĠP orts +ĠX avier +ĠR ET +' ." +ĠBo at +iv ated +ich t +umer able +D s +ĠDun n +Ġcoff in +Ġsecure ly +ĠRapt ors +ĠB es +Install ation +Ġin ception +ĠHealth y +end ants +Ġpsych ologists +ĠShe ikh +c ultural +ĠBlack Berry +sh ift +F red +oc he +Ġc akes +ĠS EO +ĠG ian +ĠAs ians +og ging +e lement +Ġpund its +ĠV augh +ĠG avin +Ġh itter +Ġdrown ed +Ġch alk +ĠZ ika +Ġmeas les +80 2 +âĢ¦ .. +ĠAW S +] " +Ġdist ort +ĠM ast +Ġantib odies +ĠM ash +Mem ory +ĠUg anda +ĠPro b +Ġvom iting +ĠTurn s +Ġoccup ying +Ġev asion +ĠTher apy +Ġprom o +Ġelect r +Ġblue print +ĠD re +pr iced +ĠDep ot +Ġallev iate +ĠSom ali +m arg +n ine +Ġnostalg ia +ĠShe pherd +Ġcaval ry +Ġtor ped +ĠBlood y +x b +Ġs ank +Ġgo alt +report print +embed reportprint +clone embedreportprint +ĠIn itially +ĠF ischer +Ġnot eworthy +c ern +Ġin efficient +raw download +rawdownload cloneembedreportprint +c ation +ĠD ynasty +l ag +D ES +Ġdistinct ly +ĠEston ia +Ġopen ness +Ġg ossip +ru ck +W idth +ĠIb rahim +Ġpet roleum +Ġav atar +ĠH ed +ath a +ĠHog warts +Ġc aves +67 8 +Ġsafegu ard +ĠM og +iss on +ĠDur ham +sl aught +ĠGrad uate +Ġsub conscious +ĠEx cellent +ĠD um +---- - +Ġp iles +ĠW ORK +ĠG arn +ĠF ol +ĠAT M +Ġavoid s +ĠT ul +Ġble ak +EL Y +iv ist +light ly +P ers +ĠD ob +ĠL S +Ġins anity +Î µ +atal ie +En large +Ġtw ists +Ġfault y +Ġpir acy +Ġimp over +Ġrug ged +ĠF ashion +Ġs ands +' ? +sw ick +Ġn atives +Ġhe n +ĠNo ise +ãĥ Ĺ +Ġg reens +Ġfree zer +Ġd ynasty +ĠFather s +ĠNew ark +Ġarchae ological +Ġo t +ob ar +Ġblock ade +Ġall erg +L V +Ġdeb it +ĠR FC +ĠMil ton +ĠPress ure +Ġwill ingly +Ġdisproportion ate +Ġopp ressive +Ġdiamond s +Ġbelong ings +19 70 +Ġbell s +Ġimperial ism +Ġ2 27 +Ġexpl oding +ĠE clipse +Ġ19 19 +Ġr ant +Ġnom inations +34 7 +Ġpeace fully +ric a +ĠF UCK +Ġvib ration +mal ink +Ġro pes +ĠIv anka +ĠBrew ery +ĠBook er +ĠOw ens +go ers +Serv ices +ĠSn ape +Ġ19 1 +39 5 +Ġ2 99 +just ice +Ġb ri +Ġdisc s +Ġprom inently +Ġvul gar +Ġsk ipping +l ves +Ġtsun ami +37 4 +ĠU rug +ĠE id +rec ated +p hen +Ġfault s +ĠStart ed +9 50 +Ġp i +Ġdetect or +Ġbast ard +Ġvalid ated +Space Engineers +OUR CE +Ġ( ~ +Ġuns ur +Ġaff irmed +Ġfasc ism +Ġres olving +ĠCh avez +ĠC yn +Ġdet ract +L ost +Ġrig ged +Ġhom age +ĠBrun o +55 5 +ec a +Ġpress es +Ġhum our +Ġsp acing +Ġ' / +olk ien +C oun +OP ER +T re +S on +ĠCambod ia +ier re +m ong +o zy +Ġliquid ity +ĠSov iets +ĠFernand o +Ġ2 29 +Ġsl ug +ĠCatal an +elect ric +Ġsc enery +ĠH earth +Ġconst rained +Ġgoal ie +ĠGu idelines +ĠAm mo +ĠPear son +Ġtax ed +Ġfet us +Resp onse +ĠAlex is +th ia +G uy +Ġrecon struct +Ġextrem es +Ġconclud ing +ĠP eg +ook s +Ġded uctions +R ose +Ġground breaking +ĠT arg +ãĥ ģ +ĠRe ve +res ource +Ġmo ons +Ġelectrom agnetic +Ġamid st +ĠVik tor +N ESS +B ACK +Ġcomm ute +ĠAna heim +Ġfluct uations +6 40 +Ġnood les +ĠCop enhagen +ĠT ide +ĠGri zz +ĠS EE +Ġpip elines +Ġsc ars +end o +ag us +ĠE TF +/ # +ĠBec ome +44 8 +Ġvis c +ĠRecomm ended +Ġj umper +Ġcogn ition +Ġassass in +Ġwitness ing +ĠSet up +Ġl ac +v im +IS M +p ages +SS L +35 8 +Ġad ject +indust rial +l ore +cher y +Ġgl itter +Ġc alf +Flor ida +Ġspoil ers +Ġsucceed s +Ġch anting +Ġslog ans +ĠTr acy +Vis it +rol ogy +Ġm ornings +Ġline age +Ġs ip +Ġintense ly +Ġflour ish +ĠSle eping +ĠF em +or por +ĠK lan +ĠDar th +h ack +ĠNi elsen +Ġtum ors +Ġprocure ment +ĠY orkshire +Ġra ided +K Y +An na +Ġ// [ +ĠDis order +ĠMust ang +ĠW en +ĠTry ing +s q +Ġdeliver ies +Ġshut ter +Ġcere bral +Ġbip olar +ĠC N +l ass +j et +Ġdeb ating +> : +Ġe agle +gr ades +ĠD ixon +UG C +M AS +ĠDr aco +ĠMach ines +aff er +Ġem an + ² +pr on +ĠG ym +Ġcompar atively +ĠTrib unal +PR O +Ġle x +Ġfert ile +Ġdep ressing +Ġsuperf icial +ess ential +ĠHun ters +g p +Ġprom inence +L iber +ĠAn cest +ote chnology +Ġm ocking +ĠTra ff +ĸ ļ +Med ium +I raq +Ġpsychiat rist +Quant ity +ĠL ect +Ġno isy +5 20 +G Y +Ġsl apped +ĠM TV +Ġpar a +p ull +Mult iple +as her +Ġn our +ĠSe g +Spe ll +v ous +ord ial +Sen ior +ĠGold berg +ĠPl asma +ne ed +Ġmess enger +ere t +Ġteam ed +Ġliter acy +ĠLe ah +ĠD oyle +Ġem itted +U X +Ġev ade +Ġm aze +Ġwrong ly +ĠL ars +Ġstere otype +Ġpled ges +Ġarom a +ĠM ET +Ġac re +ĠO D +Ġf f +Ġbrew eries +ĠH ilton +und le +ĠK ak +ĠThank fully +ĠCan ucks +in ctions +ĠApp ears +Ġco er +Ġundermin ed +ro vers +And re +Ġbl aze +um ers +Ġfam ine +amp hetamine +ulk an +Am ount +Ġdesper ation +wik ipedia +develop ment +ĠCor inth +uss ia +Jack son +L I +N ative +R s +Oh io +ĠKath leen +F ortunately +Ġattend ant +ĠPre ferred +ĠDid n +ĠV s +M is +Ġrespond ent +Ġb oun +st able +Ġp aved +Ġunex pl +ĠChe ney +L M +ĠC ull +bl own +Ġconfront ing +oc ese +serv ing +W i +ĠLith uania +ann i +Ġst alk +h d +Ġv ener +AP H +ynchron ous +UR R +um ably +hist oric +H alf +H ay +Ġresil ience +spe ction +Ġabandon ing +O bs +ĠDeb bie +Ġgrad ient +ĠPl aint +ĠCan al +AR CH +Ġexpans ive +Ġfun g +Ġb ounced +U nd +Ġprec autions +Ġclar ification +Ġd agger +Ġgri ps +Ġ µ +ĠRiver a +ĠUnd ead +is ites +ĠFIR ST +ñ o +aud i +Ġhost ages +Ġcompl iant +Ġal umni +Se ven +Ġcyber security +e ither +Col lect +Ġinvari ably +ĠS oci +Ġlaw maker +Ġa le +ĠPerson ally +N azi +Ġcustom ization +ĠPro c +ĠSask atchewan +eat uring +Ġsp ared +Ġdiscontin ued +Ġcomput ational +ĠMotor ola +Ġsuprem acist +government al +Ġparad ise +ĠDown ing +ĠNik on +Ġcat alyst +ber ra +Tor onto +8 75 +bet a +ĠMac ron +Ġunreal istic +ve ctor +ĠVeh icles +it iveness +ĠR V +ĠCol bert +s in +o ji +ent in +ĠKr ish +hell o +ff ield +ok y +ĠT ate +Ġmap le +Ġa ids +chem ical +33 4 +n uts +ĠWar p +Ġx x +ĠRob b +umer ous +_- _ +ft ime +ĠV W +Ġw inger +ĠD ome +t ools +ĠP V +ĠGe orgetown +Ġg eared +Ġjihad ists +Ġc p +Ġster oids +M other +cler osis +ĠDR M +nes ia +Ġl inger +Ġimm ersive +ĠC OUN +Ġoutwe igh +ens ual +B and +Ġtransform s +mat ched +ps ons +ĠJud icial +f actor +Ġrefer ral +Ġodd ly +ĠW enger +B ring +ĠB ows +60 2 +IC LE +Ġl ions +ĠAcad emic +ĠTh orn +ĠRa ider +kef eller +St orage +L ower +ĠOr t +ĠEqu ality +AL T +ĠS OC +T ypes +Ġl yn +ĠAss et +co at +TP P +C VE +ĠPione er +app lication +Mod ern +ĠH K +En vironment +Al right +R ain +IP P +ĠShi ite +Ġm ound +ĠAb ilities +cond ition +St aff +Ġcompet ence +ĠM oor +ĠDi ablo +Ġwith held +Ġost ensibly +ĠB rom +Ġms g +Ġden omin +ĠRef erences +ĠF P +Ġplun ged +Ġp amph +m oving +cent ral +Ġdown right +Ġf ading +T al +T yp +ĠTh y +uk es +it he +Ġo ve +Ġbatt led +Ġseaf ood +Ġfig ur +ĠR D +c rop +Ġsqu ads +{ \ +à ¹ +ĠE h +Ġinterview ing +ĠQ in +Ġas piring +PL IC +Ġcla uses +ĠG ast +ĠN ir +Ġl uggage +Ġh ose +Ġsystem d +Ġdesc ending +ĠRev ised +ĠR ails +al ign +70 9 +33 7 +Ġf ug +charg ing +t ags +Ġut er +k ish +WAR NING +49 0 +prof its +Ġvoy age +Ġa ce +ĠV anguard +ĠT anks +ĠM uk +Ġ2 26 +S afe +Ar mor +Ġvolcan ic +Ġwom b +ĠM IL +Ġbegin ner +ĠRec ogn +ĠA AP +PL AY +) ! +Ġdetect ing +c n +Ġbre aches +Bas ically +ĠP ag +ĠMunicip al +ĠInd ie +ĠL af +ĠDis able +ĠOl son +Ġrest rained +Ġrul ings +Ġhum ane +ev ents +ĠCinem a +display Text +ĠH atch +action Date +onna issance +Ġassault ing +ĠL ug +CH AT +Ġvig orous +ĠPer se +Ġintoler ance +ĠSnap chat +ĠSh arks +Ġd ummy +ĠDi agn +ĠGu itar +im eters +40 3 +RE G +A x +Ġsepar ates +ĠMah m +Ġt v +j ah +O OL +C irc +ĠWinds or +uss ian +Ġintu ition +Ġdis dain +ĠDon ovan +Ġ2 21 +E mb +Ġcondem ning +Ġgener osity +zz y +Ġpant ies +ĠPre vent +Action Code +AN A +34 2 +external ActionCode +Ġspec ifying +Ġcryst all +J ere +Ġru pt +ĠApp rentice +Ġprof iling +Ð º +St rike +Ġsid eline +Ġoblig ated +Ġocc ult +Ġbureaucr atic +ant ically +rupt ed +neg ative +ĠEthiop ia +ĠC ivic +Ġins iders +el igible +ĠTV s +ĠB AR +ĠT I +i ologist +ĠA IR +Ġsubstit uted +Ar ab +ĠS aul +ĠY og +p rem +Ġbuild ers +Ġstation ary +Ġdoubt ful +Ġvig orously +Ġthr illing +Ph ysical +ĠCare y +ĠHyd ra +geon ing +ĠS ly +y ton +Ġborrow ers +ĠPark inson +Ġ ë +ĠJama ica +Ġsat ir +Ġinsurg ents +ĠF irm +Ġis ot +ĠK arn +our ning +ak ens +doc s +l ittle +ĠMon aco +CL ASS +Tur key +L y +ĠCon an +ass ic +Ġstar red +ĠPac ers +et ies +Ġt ipping +M oon +ĠR w +s ame +Ġcav ity +Ġgo of +ĠZ o +Sh ock +um mer +Ġemphas izes +Ġreg rett +Ġnovel ty +Ġen vy +ĠPass ive +r w +50 5 +Ġind ifferent +ĠR ica +ĠHim self +ĠFred die +Ġad ip +ä¸ Ģ +Ġbreak out +Ġhur ried +ĠHu ang +ĠD isk +Ġro aming +?????- ?????- +U V +ĠRick y +ĠS igma +Ġmarginal ized +Ġed its +Ġ30 4 +mem ory +Ġspec imen +29 3 +ãģ ¯ +Ġvert ically +Ġaud ition +ĠHe ck +Ġc aster +ĠHold ings +ad al +ĠC ron +ĠL iam +Ġdef lect +P ick +ĠDeb ug +RE F +Ġvers atility +ot hes +class ified +ĠMah ar +ĠH ort +C ounter +st asy +not iced +33 1 +ĠSh im +f uck +ĠB ie +Ġair ing +ĠPro tein +ĠHold ing +Ġspect ators +ili ated +ĠThat cher +n osis +ãĥ¼ ãĥ³ +Te le +B oston +ĠTem pl +st ay +Ġdecl arations +47 9 +Vol ume +ĠDesign er +ĠOver watch +id ae +Ġon wards +Ġn ets +ĠMan ila +part icularly +Ġpolit ic +o other +Ġport raits +Ġpave ment +c ffff +Ġs aints +Ġbegin ners +ES PN +Ġshort comings +âķIJ âķIJ +Ġcom et +ĠOrgan ic +qu el +Ġhospital ized +Bre ak +Ġpe el +dyl ib +asp x +ur ances +ĠT IM +P g +Ġread able +ĠMal ik +Ġm uzzle +Ġbench marks +d al +ĠV acc +ĠH icks +60 9 +ĠB iblical +he ng +Ġover load +ĠCivil ization +Ġimm oral +Ġf ries +ãĤ Ĵ +Ġreprodu ced +Ġform ulation +j ug +ire z +g ear +Ġco ached +Mp Server +ĠS J +ĠK w +In it +d eal +ĠO ro +ĠL oki +ĠSong s +Ġ23 2 +ĠLou ise +asion ally +Ġunc ond +olly wood +Ġprogress ives +ĠEn ough +ĠDo e +Ġwreck age +Ġbr ushed +ĠBase Type +Ġz oning +ish able +het ically +ĠC aucus +ĠH ue +Ġk arma +ĠSport ing +Ġtrad er +Ġseem ing +ĠCapt ure +4 30 +b ish +Ġt unes +Ġindo ors +ĠSp here +ĠD ancing +TER N +Ġno b +ĠG ST +m aps +Ġpe ppers +F it +Ġoverse es +ĠRabb i +ĠR uler +vert ising +off ice +xx x +Ġra ft +Ch anged +Ġtext books +L inks +ĠO mn +ãĢ ij +Ġinconven ience +ĠDon etsk += ~ +Ġimplicit ly +Ġboost s +ĠB ones +ĠBo om +Cour tesy +Ġsens ational +AN Y +Ġgre edy +ed en +Ġinex per +ĠL er +ĠV ale +Ġtight en +ĠE AR +ĠN um +Ġancest or +S ent +ĠH orde +urg ical +all ah +Ġsa p +amb a +ĠSp read +tw itch +Ġgrand son +Ġfract ure +Ġmoder ator +ĠSe venth +ĠRe verse +Ġestim ation +Cho ose +Ġpar ach +Ġbar ric +ãĢ IJ +Ġcomp ass +Ġall ergic +âĢ ķ +OT HER +err illa +Ġw agon +Ġz inc +Ġrub bed +ĠFull er +ĠLuxem bourg +ĠHoo ver +Ġli ar +ĠEven ing +ĠCob b +est eem +Ġselect or +ĠB rawl +is ance +ĠE k +Ġtro op +Ġg uts +ĠApp eal +ĠTibet an +Ġrout ines +ĠM ent +Ġsummar ized +steam apps +Ġtr anqu +Ġ19 29 +or an +ĠAut hent +Ġg maxwell +Ġappre hens +Ġpo ems +Ġsa usage +ĠWeb ster +ur us +Ġthem ed +Ġl ounge +Ġcharg er +Sp oiler +Ġsp illed +h og +ĠSu nder +ĠA in +ĠAng ry +Ġdis qual +ĠFrequ ency +ĠEther net +Ġhel per +Per cent +Ġhorr ifying +Ġa il +ĠAll an +EE E +ĠCross ing +44 9 +Ġh olog +ĠPuzz les +ĠGo es +eren n +60 4 +ãģ ı +ĠRaf ael +Ġatt en +ĠE manuel +Ġup ro +ĠSus p +P sych +ĠTr ainer +ĠN ES +ĠHun ts +bec ue +Ġcounsel or +R ule +Ġtox ins +Ġb anners +r ifice +Ġgreet ing +Ġfren zy +Ġall ocate +Ġ* ) +ex pr +50 3 +ĠCh ick +ĠT orn +Ġconsolid ation +ĠF letcher +sw itch +fr ac +cl ips +ĠMcK in +ĠLun ar +Mon th +IT CH +Ġscholar ly +rap ed +39 8 +Ġ19 10 +Ġe greg +Ġin secure +Ġvict orious +cffff cc +Ġsing led +Ġel ves +ĠW ond +bur st +Ġcam oufl +ĠBL ACK +Ġcondition ed +ç ī +ans wered +Ġcompuls ory +asc ist +Ġpodcast s +ĠFrank furt +bn b +Ġne oliberal +ĠKey board +ĠBel le +w arm +Ġtrust s +Ġins ured +ĠBu cc +us able +60 7 +ĠPl ains +Ġ18 90 +Ġsabot age +Ġlod ged +f elt +Ġg a +ĠN arc +ĠSal em +Ġsevent y +ĠBl ank +p ocket +Ġwhis per +Ġm ating +om ics +ĠSal man +ĠK ad +Ġan gered +Ġcoll isions +Ġextraord inarily +Ġcoerc ion +G host +b irds +è Ģ +k ok +Ġper missible +avor able +Ġpo inters +Ġdiss ip +ac i +Ġtheat rical +ĠCos mic +Ġforget ting +Ġfinal ized +å¤ § +y out +l ibrary +Ġbo oming +ĠBel ieve +ĠTe acher +ĠL iv +ĠGOOD MAN +ĠDomin ican +OR ED +ĠPart ies +Ġprecip itation +ĠSl ot +R oy +ĠComb ined +Ġinteg rating +Ġch rome +Ġintest inal +ĠRe bell +Ġmatch ups +Ġblock buster +ĠLore n +ĠLe vy +Ġpre aching +ĠS ending +ĠPur pose +ra x +f if +Ġauthor itative +ĠP ET +ast ical +Ġdish on +Ġchat ting +Ġ"$ :/ +Connect ion +Ġrecre ate +Ġdel inqu +Ġbro th +ĠD irty +ĠAd min +z man +Ġscholars hips +Ġ25 3 +cont act +als a +7 67 +c reen +abb age +Ġ19 15 +Ġbl ended +Ġal armed +L anguage +35 6 +Ġbl ends +ĠCh anged +W olf +Ġhe pat +Creat ing +Ġper secut +Ġsweet ness +art e +Ġforfe iture +ĠRober to +im pro +N FL +ĠMag net +Det ailed +Ġinsign ificant +ĠPOL IT +ĠBB Q +ĠC PS +Ġse aw +amin er +m L +end if +f inals +Ġ26 5 +u ish +Ġ} ) +ĠPro blems +Ġem blem +Ġserious ness +Ġpars ing +Ġsubst itution +Ġpress ured +Ġrecy cled +ale b +Rub y +Ġprof iciency +Dri ver +ĠW ester +: ' +AF TA +Ġm antle +ĠClay ton +fl ag +Ġpractition er +c overed +ĠSt ruct +add afi +4 25 +ĠTown ship +ĠHyd ro +Lou is +34 3 +Ġcond o +ĠT ao +Ġutil ization +Ġnause a +ĠDem s +rid ges +p ause +Ġform ulas +Ġchall enger +37 6 +Ġdefect ive +ĠRail way +ĠPub Med +Ġyog urt +l bs +ĠNor folk +OP E +ĠMood y +Ġdistribut or +Ġscroll s +Ġextract s +St an +Ġv iability +Ġexp oses +Ġstar vation +ĠStep s +ĠD odd +f ew +ST D +33 2 +Ġclos ures +Ġcomplement ary +ĠS asha +ump y +Ġmon et +Ġartic ulate +ĠDo ct +k iller +Ġsc rim +Ġ2 64 +Ġprost itutes +Ġse vered +Ġattach ments +Ġcool ed +L ev +ĠF alk +f ail +Ġpolic eman +ĠD ag +Ġpray ed +ĠK ernel +Ġcl ut +Ġc ath +Ġan omaly +St orm +em aker +ĠBreak fast +ul i +o ire +J J +h z +Oper ation +ĠS ick +35 4 +ĠGuatem ala +R ate +Ġexp osures +f aces +ĠArch ae +ra f +ĠM ia +Ġ20 25 +Ġop aque +Ġdisgu ised +ĠHead quarters +S ah +Ġp ots +9 78 +ĠM alf +Ġfrown ed +Ġpoison ous +ĠCon vers +ee ks +Ġcr ab +." " +Ġtre ason +Ġr anc +Ġescal ating +Ġwar r +Ġmob s +Ġl amps +ĠSun shine +ĠBrun swick +Ph ones +Ġspe lled +ĠSk ip +Ġ20 50 +Ġ19 11 +ĠPl uto +ĠAm end +Ġme ats +38 7 +Ġst omp +ĠZh ou +ĠLevi athan +ĠHaz ard +ad v +ĠOr well +Ġal oud +Ġb umper +ĠAn arch +ub untu +ĠSer ious +f itting +ĠOption al +ĠCec il +RE AM +Ġser otonin +Ġcultiv ate +ag ogue +} \ +Ġmos ques +ĠSun ny +Ġre active +rev olution +ĠL up +ĠFed ora +Ġdefense man +ĠV ID +ist ine +Ġdrown ing +ĠBroad casting +Ġthr iller +ĠS cy +Ġacceler ating +Ġdirect s +od ied +b ike +d uration +Ġpain fully +R edd +Ġproduct ions +Ġg ag +Ġwh ist +Ġs ock +Ġinf initely +ĠConc ern +ĠCit adel +Ġlie u +Ġcand les +ogene ous +arg er +Ġheaven ly +inflamm atory +Per formance +C s +ruct ose +az aki +Ġp essim +Ġinf erence +Ġpow d +ĠZ oe +Ġpain ts +Ġd azz +pt a +-------- --- +Ġins pir +ĠExper imental +ĠKn ife +reg or +b ors +Ġshow ers +rom eda +Ġs aint +Ġben ign +ĠJ iang +Ġenvision ed +Ġsh roud +IF T +H O +Ġsh uff +ĠI CC +Ġse greg +Ġrevis it +ighth ouse +L i +Ġsub strate +ĠSe as +ĠRew ard +ĠH ep +ĠBr ass +s bm +Ġelim inates +Ġst amina +ĠV AT +ĠLo an +Ġconst raint +Ġappropri ated +Ġp es +ĠA LE +r anging +Ġ40 4 +39 2 +Ġintellectual s +ach u +Ġrestruct uring +ĠLe vin +Ġrun es +Ġdelight ful +Ġcarbohyd rates +ĠMod els +ĠExp o +Ġtransport ing +all oc +Ġring ing +S amsung +Ġscarce ly +ĠURL s +ĠM AS +Ġprot otypes +Ġnarr ator +ĠCPU s +cd n +ĠBart on +Ġdecided ly +ĠSh u +ix ir +oc ious +ĠMy st +N intendo +Ġre use +Ġforg iven +F ew +in ical +n at +Ġseam less +ĠEv a +ĠE VE +ĠJ O +land ers +Ġso fter +neg ie +Ġtrans ient +Ġorb ital +Ġfulf il +ĠK om +Hop efully +Ġdynam ically +ĠHun ger +å Ľ +ĠArmen ia +el man +ber to +Ġp ige +ĠID s +lim it +Ġve ins +Ġso aring +p acks +Gold en +ĠCr ab +ist or +ĠR PM +Ġ$ $ +g ression +Ġjihad ist +Ġgam ble +Ġcare g +Ġinf lated +F ace +ĠFire arms +ĠEm manuel +â Ŀ +Ġsh ocks +gr ab +Ġspl end +ĠHP V +ab ortion +Ab ove +Ent ity +play ers +Ġcomm enced +ul ence +Ġfulfill ment +Ġembod iments +ĠW elfare +Ġha il +Ġ< @ +tt en +Ġcat cher +ĠJ azeera +Ġvolcan o +Ġstabil ize +ĠHand ler +Ġintens ified +ĠAb rams +Ġhum iliation +p aced +60 5 +ĠCent OS +Spe cific +Ġhe ed +ĠC AM +ĠGal ile +D ie +Ġabol ished +ĠThom son +ĠTe achers +ĠW ass +j ong +ĠIS BN +ĠAll ies +sh ake +å · +v ict +How ard +Ġde em +Ġexceed ingly +ĠSmart stocks +ib e +Ġdoor way +Ġcompet ed +ig mat +Ġnational ists +Ġg room +ĠKe en +Ġdispos able +de cl +ĠT olkien +ĠSche me +Ġb iod +Ġav id +ĠEl on +ag ar +ĠT SA +R oman +Ġartific ially +Ġadvis ors +X L +ĠInf erno +36 6 +Ġted ious +ĠPhot ography +ĠCar rie +Ġtro pe +ĠSand ra +Ġdec imal +Que en +ĠGund am +ĠO M +ote ch +N BA +Ġ19 32 +Ġent renched +ĠMar ion +Ġfr aternity +Lab our +Hen ry +Ġlat itude +E ither +Ġenh ances +ĠPot ential +Ġsh ines +id ad +Ġbread th +Ġcapac ities +ĠðŁ ĻĤ +ĠBron x +Ġsex es +Ġdifferent iation +Ġheavy weight +ĠT aj +d ra +Ġmigr ate +Ġexhaust ion +ĠR UN +els ius +ĠCu omo +Ġgu itars +Ġcl ones +ĠSom ew +ĠP ry +------------ - +Ġwarr anted +cy cles +Ġsalv age +Ġdis ks +R ANT +ĠNGO s +ĠMart ian +":[ {" +Ġadd icts +oj ure +il let +Ġamazing ly +art ments +p ixel +ĠGPU s +Lay out +è £ +ĠTam il +ĠBas il +Ġimpart ial +ĠSt ructure +f ork +b ryce +Ġr idge +ĠHamb urg +ri ous +Ġbl itz +cig arettes +Ġcan ned +40 2 +Ġiron ically +Ġcompassion ate +ĠHaw kins +. # +ĠCat hedral +Ġrall ied +in ternal +Ġqu ota +st akes +T EXT +m om +Ġcomple tes +Ġ23 8 +Ġsh rug +ãĥ ij +ĠN inth +Ġrev ise +ĠProv ider +Ġtre acher +Ġqu asi +ĠPR ES +Ġdep osition +Ġconfidential ity +iss ors +Ġim balance +Ġspan ning +Ġang ular +ĠC ul +commun ication +ĠNor a +ĠGen ius +op ter +Ġs acked +Sp ot +Ġfine ly +ĠCH R +28 2 +w aves +Pal est +ĠRo hing +N L +è ¿ +Ġsh itty +ĠSc alia +4 75 +Pro gress +Ġreferen cing +Ġclass rooms +ab ee +Ġs od +hes ion +70 8 +ĠZucker berg +ĠFin ish +ĠScot ia +ĠSav ior +ĠInstall ation +an tha +( - +Ġ30 2 +ĠP unk +Ġcr ater +yout u +Ġro ast +Ġinflu encing +Ġd up +ĠJ R +ĠG rav +Ġstat ure +Ġbath rooms +A side +W iki +me an +ĠZ ak +ĠOn es +ĠN ath +Ġhyper t +Ġcommence ment +C ivil +Ġmoder ately +Ġdistribut ors +Ġbreast feeding +Ġ9 80 +ĠS ik +ĠC ig +ĠAM ER +R IP +ĠCare er +ust ing +Ġmess ed +Ġe h +ĠJ ensen +/ $ +Ġblack mail +Ġconvers ions +Ġscientific ally +Ġmant ra +p aying +Ġiv ory +ĠCour ts +OU GH +aunt let +Ser ial +B row +ĠH undreds +3 23 +Ġpe e +Ġlin ux +Ġsub mer +ĠPrinc ipal +48 5 +ĠD SL +ĠCous ins +Ġdoctr ines +ĠAthlet ics +Ġ3 15 +ĠK arma +Ġatt ent +ur ger +Ġpresc ribe +Ġenc aps +ĠC ame +Ġsecret ive +ĠCr imes +d n +C lean +ĠEgypt ians +ĠCar penter +Ġ ll +H um +ĠMil o +Ġcapital ists +Ġbrief ed +T we +ĠBas in +elve t +M os +Ġplun ge +ĠKa iser +ĠFu j +ill in +Ġsafegu ards +Ġo ste +ĠOpportun ity +ĠM afia +ĠCall ing +ap a +ur ban +br ush +ill ard +c é +int elligence +ĠL ob +ĠDru id +Ġsm oother +Ġfoot ing +Ġmotor ists +arc ity +Ġmascul inity +Ġm ism +Ġabdom inal +ĠTa vern +ĠR oh +Ġesc apes +s igned +Anth ony +Ġsacrific ing +Ġintim acy +Ġan terior +ĠK od +Ġmot if +Ġg raz +Ġvisual ization +Ġguitar ist +ĠTro tsky +m agic +D ar +ĠMor i +Ġw ards +Ġtoile ts +l est +Ġtele port +ĠSund ays +ĠPl at +ET S +Ġe Sports +Pat rick +ĠK atherine +en ko +Ġhas sle +ĠM ick +gg les +Ġh ob +aint ain +Ġair borne +Ġsp ans +Ġch ili +Ġa perture +Ġvolunte ered +ĠInc ident +ĠF res +ĠVeter an +augh tered +ing o +Ġun insured +CL OSE +Ġf use +Ġer otic +Ġadvert ise +ra ising +Text ure +Ġatt ends +ĠRE AL +udd led +Ġsm oot +Ġ30 5 +ĠWill is +Ġbl ond +An alysis +ĠV T +on ica +Ġstrongh old +R F +N M +. >> +Ġprosper ous +Ġbo asted +29 2 +ĠManufact uring +PR ESS +g ren +Ġpharm acy +ĠRoc kefeller +k ai +Ġth umbs +ĠH ut +Ġmother board +Ġguard ians +ĠAl ter +ll ular +Ġsh ack +Ġwise ly +Ġback bone +erv a +Ġsu icides +ĠMcG regor +ij ah +E mer +ĠB rav +Ġdesign ate +P OST +produ ced +Ġcleans ing +irl wind +ex istent +ĠHum ph +ĠPay ne +Ġv ested +Å ¡ +Ġstring ent +ion a +Ġuns ub +Ġsum med +ĠHer cules +sub ject +ĠR agnar +ĠN os +Ġcharacter ization +Ġsav vy +ĠDaw son +ĠCas ino +Ġf ri +ĠBar rier +Ġmis information +Ġins ulation +Ġcorrid ors +Ġair planes +ĠNo ct +ah i +Ġ19 16 +k b +arm ac +Ġsh un +Ġsche ma +Ġhorr ified +Ġ23 9 +aund ers +N B +i ates +er ity +ĠSh ard +Ġr arity +Ġgroup ed +ĠGh ana +again st +ĠBi ological +ĠA ware +ow ell +Ï Ħ +ĠBe au +sh aw +H ack +ĠJul ius +US S +ol son +aun a +c ru +ĠMaur ice +ĠI k +Ġsequ encing +Ġradical s +Ġ( ?, +v irtual +Ġany ways +Ġreper c +Ġhand lers +Ġhes itant +é ĥ +ĠM F +ple mentation +ass ociated +Ġcampaign ed +ĠY ue +ut ations +ĠY oga +Ġsim mer +Ġro ds +Ġmel ody +Ġconv oy +v ideos +Ġscreen ed +N eg +ochem ical +Ġ( )) +Ġultr as +Ġant ip +ĠIsland ers +70 4 +Ġfet ish +Ġridic ulously +ĠK art +Ġmitochond rial +Ġinterf ering +Build er +Ġover fl +Ġac ne +ĠM ud +ĠK err +f lex +ĠPost al +ĠBalt ic +47 7 +ĠPers ons +our age +H B +ĠM use +ĠImm ortal +ĠDri ving +Ġpet itions +Ġsubsc ript +Ġs orce +ĠProcess or +ut on +S ony +Ġph on +Ġr aced +ĠAnth rop +Ġday time +ĠEx ercise +Add ing +Ġeng ages +ĠQual comm +Ġmir acles +Ġmem es +ĠDr ink +ĠOri oles +Ġhair s +ĠPol ar +ath om +Ġsl ippery +ĠR emy +Ġcar amel +ĠY EAR +Ġal k +I gn +a ution +ĠMer lin +ĠC ran +Ġap ologies +Ġ4 10 +Ġout ing +ĠMem ories +app ointed +Ġcount ered +u ld +pos ing +Ġfire wall +ĠW ast +ĠW et +work ed +se ller +Ġrepe aled +ere o +ass uming +BL IC +m ite +ĠCEO s +ĠChap el +ellig ent +________________ ________ +D og +Ġw art +Ġsubsc riber +s ports +Ġbe gged +ĠM V +Ġsem if +eth ical +Ġpre ach +Ġrev ital +Ġpun itive +Ġshort cuts +Ġinstit uted +ĠWars aw +Ġabdom en +ĠK ING +Ġsuper intendent +Ġf ry +ĠGe o +T OR +Ġcontrad ictions +apt ic +Ġlandsc apes +b ugs +Ġcl ust +Ġvol ley +c ribed +Ġt andem +Ġrob es +WH AT +Ġpromot er +Ġel oqu +review ed +ĠD K +ĠPl ato +Ġf ps +T ank +ĠDer rick +Ġpriorit ize +as per +ĠHond uras +ĠCom pleted +ne c +Ġm og +n ir +ĠMay o +DE F +st all +in ness +ĠVolks wagen +Ġprec aution +ĠM ell +i ak +ist ries +Ġ24 8 +Ġoverl apping +Sen ate +ĠEnh ance +res y +rac ial +OR TS +ĠM ormons +Str ong +ĠCo ch +Mex ico +ĠMad uro +Ġj ars +Ġcan e +W ik +oll a +iff erence +Ġphysic ist +ĠMag gie +Ġ28 5 +Ġdep iction +ĠMcL aren +J u +Ġsl ows +Ġcommission ers +ĠWill ow +ĠExpl os +hov ah +Ġtechn ician +Ġhom icides +ĠFl av +ĠTr uman +Ġ100 00 +u ctor +Ġsh ader +News letter +45 7 +Ġre ver +Ġhard ened +Ġwhere abouts +Ġrede velop +Ġcar bs +Ġtra vers +Ġsqu irrel +Ġfoll ower +Ġs ings +50 8 +Ġrabb its +emon ium +Ġdocument ing +Ġmisunder stood +) ' +R ick +gg ies +Ġprem ie +Ġsk ating +Ġpass ports +Ġf ists +aged don +H aw +AC P +0 80 +ĠThough ts +ĠCarl son +Ġpriest hood +h ua +Ġdun geons +ĠLo ans +Ġant is +Ġfamiliar ity +ĠS abb +op al +ĠIn k +st rike +Ġc ram +Ġlegal ized +Ġcu isine +Ġfib re +Tra vel +ĠMon ument +OD Y +eth y +Ġinter state +ĠP UR +em porary +ĠArab ian +develop ed +Ġsadd le +Ġg ithub +ĠOff er +ĠIS P +ro let +ĠSUP ER +ĠDen is +Ġmultipl ier +Ġstir red +Interest ingly +Ġcustom ary +Ġbill ed +he x +Ġmultipl ied +Ġfl ipping +ĠCros by +Ġfundament als +ia e +ĠPlay ed +ĠAt om +am azon +ĠFl am +ee z +activ ated +Ġtables poon +Ġliberal ism +ĠPal in +ĠP atel +N um +ĠT AM +Ġs urn +ĠRel oaded +Ġco ined +" ], +ĠCl ash +ĠAg u +Ġprag matic +ĠActiv ate +Ġ8 02 +Ġtrail ers +Ġsil hou +Ġprob es +Ġcirc us +ĠB ain +ĠLind say +ĠAb bey +Del ivery +Ġconcess ion +Ġgast ro +ĠSpr ite +Ä Ł +and el +Ġg imm +Ġaut obi +ĠT urtle +Ġwonder fully +ĠHar am +ĠWorld wide +ĠHand le +Ġtheor ists +Ġsle ek +ĠZh u +ograph ically +EG A +ĠOwn ers +ath s +ĠAntar ctic +n atal +=" " +fl ags +`` `` +Ġs ul +K h +Ġpot assium +Ġlinem an +Ġcere al +ĠSe asons +Ġ20 22 +Ġmat hematic +Ġastron omers +prof essional +Ġf ares +cknow led +Ġch i +Ġyoung sters +Ġmistaken ly +Ġhem isphere +ĠDiv inity +r one +Ġ" , +r ings +Ġattract s +v ana +å ¹ +C AP +Ġplay list +Ġpor ch +ãģ £ +Ġincorpor ates +Ġso ak +Ġassert ing +ĠTerror ism +ĠP ablo +J a +ces ter +Ġfear ing +ĠPr ayer +Ġescal ated +G W +Ġro be +ĠBright on +ac ists +ĠSym phony +ĠDwar f +ĠPar ade +ĠLe go +Ġinex pl +Ġl ords +le af +RA G +l iber +Ġcig ars +ĠJe hovah +60 6 +WIND OWS +ĠLiber ia +eb us +He avy +Ġl ubric +ĠR W +angu ages +Ġnarrow ed +com puter +ĠE mber +Ġmurder ing +Ġdown stream +ĠT uls +ĠT ables +Top ic +ĠAcc uracy += / +l ost +ĠRe i +Ġprogress es +b ear +Ġestablish ments +Just in +ĠPe ach +ĠG omez +å ¿ +ĠTri angle +Id ent +ĠH ive +Res ources +Ġmix es +ĠAss uming +M u +Ġhyp oc +Ġs ane +ĠW an +id ious +Su ccess +Ġ io +Ang el +Ġdanger ously +ĠCreat ure +W ORK +: [ +ĠKat rina +List ener +M iller +ĠId lib +h ang +Ġcircum vent +h ref +Ġcel estial +ĠWe eks +ĠP ug +ĠDal ton +Ġsubpoen a +uk u +Ġpers isted +pe i +old ing +ĠDoc uments +ĠH ast +ĠC ENT +Ġprim er +Ġsyn onymous +Ġn ib +om bs +Ġnot ation +ĠD ish +ĠAt mosp +Ġforb id +ĠAN G +pat tern +l os +Ġproject iles +b rown +." , +ĠVen om +Ġfierce ly +ub lished +ĠU ran +ĠNic arag +4 10 +ĠC AL +OT OS +ĠMir acle +ĠEn chant +Ġguard ing +app end +Att ach +Ġlevel ed +Ġcond oms +ih ilation +64 9 +Ġnight mares +ĠTHE Y +ĠST ART +ĠK inn +Ġroomm ate +Ġhy giene +o pping +J ob +Ġl vl +ĠV ER +ĠKe eping +ab etic +Ġformat ting +eral a +Ġrev isions +Ġres urg +T el +ĠGood man +35 3 +p od +Ġind isp +ĠTrans lation +Ġg own +ĠM und +Ġc is +Ġby stand +col lect +ĠPun jab +act ively +ĠG amb +te ll +Ġimport ing +g encies +Ġloc om +ĠBr ill +H oly +ĠBer ger +Ġshow down +Ġrespond ers +IL Y +Ġt akedown +le ted +Ġmat tered +Ġpredict ive +Ġover lay +G PU +ĠV ick +Ġconvey ed +T ab +pe er +Sc an +Ġdefensive ly +v ae +Ġappro ving +Ġt iers +ĠV ia +quer ade +ĠSaud is +Ġdemol ished +ĠProp he +Ġmon o +Ġhospital ity +H AM +ĠAri el +M OD +ĠTor ah +Ġbl ah +ĠBel arus +erent ial +ĠT uc +Ġbank er +39 7 +Ġmosqu it +ĠScient ist +ĠMus ical +Ġh ust +Sh ift +Ġtor ment +Ġstand off +E duc +ĠF og +Ġampl ifier +Sh ape +Inst ance +ĠCrit ics +Ġda emon +H ouston +Ġmatt ress +ĠID F +Ġobsc ene +ĠA mer +hett i +Ġcomp iling +35 2 +vere tt +ĠRed uction +ist ration +ĠBl essed +ĠB achelor +3 16 +Ġpr ank +ĠVul can +dd ing +Ġm ourning +ĠQu int +ĠBl aster +test ing +Ġsed iment +>> > +ĠE ternity +ĠWH ERE +ĠM aze +Ġreact ing +ĠAl v +oms day +ĠC RA +Ġtransl ator +Ġbog us +at u +We bsite +oll s +Ġbapt ism +Ġs ibling +ĠAut umn +ve z +ãģ® é +gu ards +Ge org +assad ors +ĠFre ud +Ġcontin ents +ĠReg istry +Bern ie +ĸļ 士 +Ġtoler ant +ĠU W +Ġhor ribly +99 5 +ĠMID I +Ġimpat ient +oc ado +er i +ĠWor st +ĠNor ris +ĠTalk ing +Ġdef ends +ens able +Ġ20 21 +Ġanat omy +L ew +Ġdraw er +ĠCan berra +Ġpatri otic +é¾įå ĸļ士 +ĠAv g +AR M +Ġundis closed +Ġfare well +45 9 +b able +ĠAll ison +OL OG +Ġcon co +t ight +ĠAC PI +ĠM ines +l ich +ĠâĶ ľ +represent ed +200 000 +Ġenthusi ast +OT S +b il +ĠIng redients +Ġinvent or +ĠMy SQL +³³ Âł +ĠAB OUT +with in +Ġm k +B ul +ĠF ake +Ġdracon ian +W a +hel m +ĠTer ran +erv ille +Ġcommon place +SI ZE +Ġ" < +re place +ograph s +ĠSE LECT +inc ible +ĠMost ly +ĠShe ffield +ĠID E +ugg le +Ġcit ations +h urst +ĠUn ix +Ġunle ash +ĠP iper +ĠN ano +Ġsucc umb +Ġreluct ance +Ġ25 00 +ĠMer chant +Ġwire t +Ġcomb os +ĠBirth day +Ġchar coal +ĠU PS +ĠFair fax +Ġdrive way +ĠT ek +ĠP itch +ove re +Ġtechn icians +ĠAct ual +fl ation +ĠF iscal +ĠEm pty +an amo +Ġmag nesium +Ġsl ut +Ġgrow ers +Invest igators +( ): +ĠS atellite +ĠKe ynes +miss ive +l ane +Ġb orough +3 44 +ĠTE AM +ĠBet hesda +C V +h ower +ĠR AD +Ġch ant +ĠR iy +Ġcompos itions +Ġmild ly +Ġmedd ling +Ġag ility +ane ers +5 01 +Ġsyn th +ling er +29 1 +Ġex claimed +Part y +Ġcont amin +ĠMan or +ĠResp ond +Ġpra ising +Ġman ners +fle et +Sum mer +ĠLy nd +ĠDef initely +gr im +Ġbow ling +st ri +ç Ľ +y nt +Ġmand ates +D IV +Ġreconc ile +view s +ĠDam on +vet te +F lo +ĠGreat est +il on +ic ia +Ġportray al +Ġcush ion +50 4 +19 79 +oss al +App lic +sc ription +Ġmit igation +AT S +p ac +Ġer ased +Ġdefic iencies +ĠHolland e +ĠX u +Ġb red +Ġpregn ancies +f emin +Ġem ph +Ġpl anners +Ġout per +utter ing +Ġperpet rator +Ġm otto +ĠEll ison +ĠNE VER +Ġadmitted ly +AR I +ĠAzerbai jan +Ġmill isec +Ġcombust ion +ĠBott le +ĠL und +ĠP s +ĠD ress +Ġfabric ated +Ġbat tered +Ġs idel +ĠNot ting +Fore ign +ĠJer ome +0 20 +ĠAr bit +Ġkn ots +ĠR IGHT +M oving +ãģ Ļ +Ġsur geries +Ġcour thouse +Ġm astered +Ġhover ing +ĠBr an +ĠAl ison +Ġsaf est +m ilitary +Ġbull ied +Ġbar rage +Read er +ES E +ĠGe ographic +T ools +3 14 +ĠGe ek +ro th +gl ers +ĠF IN +Ï ģ +ĠA ston +al tern +48 8 +Ġveter in +G amer +Ġint el +ren ches +Sh ield +Ġam nesty +ĠB har +Ġp iled +Ġhonor able +ĠInst itutes +Ġso aked +Ġcom a +ĠE FF +34 1 +by tes +ĠG mail +le in +ĠCanad iens +m aterial +I l +Ġinstruct ors +ĠK Y +Ġconce ive +ub b +ĠP ossible +Ġeas ing +ĠChrist ina +Ġcar ic +ĠHD R +R OM +Ġsho vel +de lete +Ġp uff +ĠCh anging +Ġseam lessly +Att ribute +Ġacqu isitions +ak ery +ĠE F +Ġaut istic +ĠT akes +ĠPow der +ĠSt ir +5 10 +ĠBub ble +sett ings +ĠF owler +Ġmust ard +Ġmore over +Ġcopyright ed +ĠLED s +15 00 +æ ī +ĠH IS +en f +Ġcust od +ĠH uck +G i +Ġim g +An swer +C t +j ay +ĠInf rastructure +Ġfeder ally +L oc +Ġmicro bes +Ġover run +dd s +ot ent +adi ator +>>>> >>>> +Ġtorn ado +Ġadj ud +Ġintrig ued +Ġs i +ĠRevel ation +pro gress +Ġburgl ary +ĠSai yan +ĠK athy +Ġser pent +ĠAndre as +Ġcomp el +ess ler +ĠPl astic +ĠAd vent +ĠPos itive +ĠQ t +ĠHind us +reg istered +ular ity +Ġrighteous ness +Ġdemon ic +u itive +ĠB DS +ĠGre gg +c ia +ĠCrus ade +ĠSina i +W ARE ++ ( +Ġme ll +Ġder ail +y ards +A st +Ġnotice ably +ĠO ber +R am +Ġun noticed +Ġse q +av age +T s +Ġ6 40 +Ġconced e +Ġ] ) +F ill +Ġcapt ivity +ĠImprove ment +ĠCrus ader +ara oh +M AP +æ Ĺ +Ġstr ide +al ways +F ly +N it +Ġal gae +ĠCook ing +ĠDo ors +Mal ley +Ġpolic emen +ãģ į +Ġastron aut +access ible +49 5 +ĠR AW +cl iffe +udic rous +Ġdep ended +al ach +Ġvent ures +ra ke +Ġt its +ĠH ou +Ġcond om +ormon al +Ġind ent +Ġupload ing +Foot note +Import ant +Ġ27 1 +Ġmind ful +Ġcont ends +C ra +Ġcal ibr +ĠO ECD +plug in +F at +ĠIS S +ĠDynam ics +ans en +68 6 +' ), +Ġsp rite +Ġhand held +ĠH ipp +=~ =~ +Tr ust +Ġsem antics +ĠBund es +ĠRen o +ĠLiter ature +s ense +G ary +ĠA eg +ĠTr in +EE K +Ġcler ic +ĠSS H +Ġch rist +Ġinv ading +ib u +Ġen um +aur a +Ġal lege +ĠInc redible +B BC +Ġth ru +Ġsa iled +Ġem ulate +Ġin security +Ġc rou +Ġaccommod ations +Ġincompet ent +Ġsl ips +ĠEarth qu +s ama +IL LE +Ġi Phones +as aki +Ġby e +Ġar d +Ġext ras +Ġsl aughtered +Ġcrowd funding +res so +Ġfil ib +ĠER ROR +ĠT LS +e gg +ĠIt al +Ġen list +ĠCatal onia +ĠSc ots +Ġser geant +Ġdiss olve +N H +Ġstand ings +ri que +I Q +Ġbenef iciary +Ġaqu arium +You Tube +ĠPower Shell +Ġbright est +ĠWar rant +S old +Writ ing +Ġbegin nings +ĠRes erved +ĠLatin os +head ing +Ġ4 40 +Ġrooft op +AT ING +Ġ3 90 +VP N +G s +k ernel +turn ed +Ġprefer able +Ġturn overs +ĠH els +S a +ĠShin ji +ve h +ĠMOD ULE +V iol +Ġex iting +Ġj ab +ĠVan illa +Ġac ron +ĠG ap +ber n +A k +ĠMc Gu +Ġend lessly +ĠFar age +ĠNo el +V a +M K +Ġbr ute +ĠK ru +ĠES V +ĠOl ivia +âĢ ł +ĠK af +Ġtrust ing +Ġh ots +3 24 +Ġmal aria +Ġj son +Ġp ounding +ort ment +Count ry +Ġpostp oned +Ġunequ iv +? ), +ĠRo oney +udd ing +ĠLe ap +ur rence +sh apeshifter +ĠH AS +os ate +Ġca vern +Ġconserv atism +ĠB AD +Ġmile age +Ġarrest ing +V aults +Ġmix er +Dem ocratic +ĠB enson +Ġauth ored +8 000 +Ġpro active +ĠSpirit ual +t re +Ġincarcer ated +ĠS ort +Ġpe aked +Ġwield ing +re ciation +×Ļ × +P atch +ĠEm my +Ġex qu +tt o +ĠRat io +ĠP icks +ĠG ry +ph ant +Ġf ret +Ġeth n +Ġarch ived +% - +c ases +ĠBl aze +Ġim b +c v +y ss +im ony +Ġcount down +Ġaw akening +ĠTunis ia +ĠRe fer +ĠM J +Ġun natural +ĠCar negie +iz en +ĠN uggets +he ss +Ġev ils +64 7 +Ġintrodu ctory +l oving +ĠMcM ahon +Ġambig uity +L abel +ĠAlm ighty +Ġcolor ing +ĠCl aus +set ting +N ULL +ĠF avorite +ĠS IG +> ( +ĠSh iva +ĠMay er +Ġstorm ed +ĠCo verage +we apons +igh am +Ġun answered +Ġle ve +Ġc oy +c as +b ags +as ured +Se attle +ĠSant orum +ser ious +Ġcourage ous +ĠS oup +Ġconfisc ated +Ġ// / +Ġuncon ventional +Ġmom s +ĠRohing ya +ĠOrche stra +ĠPot ion +Ġdisc redit +ĠF IL +f ixed +ĠDe er +do i +ĠDim ension +Ġbureaucr ats +et een +Ġaction Group +oh m +Ġb umps +ĠUt ility +Ġsubmar ines +ren heit +re search +ĠShap iro +Ġsket ches +Ġde ceptive +ĠV il +es ame +ĠEss entially +Ġramp age +isk y +Ġmut tered +th ritis +Ġ23 6 +f et +b ars +Ġpup il +ĠTh ou +o S +s ong +Ġfract ured +Ġre vert +pict ure +Ġcrit erion +us her +Ġreperc ussions +ĠV intage +ĠSuper intendent +Offic ers +Ġflag ged +Ġbl ames +Ġin verse +ograp hers +Ġmakes hift +Ġdev oid +Ġfoss ils +ĠArist otle +ĠFund s +Ġde pleted +ĠFl u +ĠY uan +Ġw oes +Ġlip id +Ġsit u +requ isites +Ġfurn ish +ĠSam ar +Ġshame ful +Ġadverse ly +Ġad ept +Ġrem orse +Ġmurder ous +uck les +ĠE SL +Ġ3 14 +s ent +Ġred ef +ĠC ache +ĠP urs +ig ans +Ġ4 60 +Ġpres criptions +Ġf res +F uck +ocr ates +Tw enty +ĠWe ird +ĠT oggle +ĠC alled +itiz ens +Ġp oultry +Ġharvest ing +ãĤ¦ ãĤ¹ +Bott om +Ġcaution ed +t n +39 6 +ĠNik ki +Ġeval uations +Ġharass ing +Ġbind ings +ĠMon etary +Ġhit ters +Ġadvers ary +un ts +Ġset back +Ġenc rypt +ĠC ait +Ġl ows +eng es +ĠN orn +Ġbul bs +Ġbott led +ĠVoy ager +3 17 +Ġsp heres +p olitics +Ġsubt ract +Ġsens ations +Ġapp alling +Ġ3 16 +Ġenvironment ally +ĠST EM +Ġpub lishes +5 60 +Ġdilig ence +48 4 +Ġadv ises +Ġpet rol +Ġimag ining +Ġpatrol s +ĠInt eger +ĠAs hes +act us +ĠRad iant +ĠL T +it ability +ht aking +Set ting +Ġnu anced +ĠRe ef +ĠDevelop ers +N i +pie ces +99 0 +Lic ense +Ġlow ers +ĠOtt oman +3 27 +oo o +Ġqu itting +mark ets +Beh ind +Ġbas in +Ġdoc s +an ie +fl ash +ct l +Ġcivil ized +ĠFuk ushima +"] ," +ĠK S +ĠHonest ly +ar at +Ġconstruct s +ĠL ans +ĠD ire +ĠLI KE +ĠTrou ble +Ġwith holding +ĠOb livion +Ġsan ity +any a +Con st +Ġgro cer +ĠC elsius +Ġrecount ed +ĠW ife +B order +ate red +h appy +Ġspo iler +Ġlog ically +H all +Ġsucceed ing +Ġpoly morph +Ġax es +ĠShot gun +ĠS lim +ĠPrin ciples +ĠL eth +art a +Ġsc or +Sc reenshot +Ġrelax ation +#$ #$ +Ġdeter rent +idd y +Ġpower less +Ġles bians +Ġch ords +ĠEd ited +se lected +Ġseparat ists +000 2 +Ġair space +Ġturn around +Ġc unning +P ATH +P oly +Ġbomb ed +Ġt ion +x s +Ġwith hold +Ġw aged +ĠLiber ties +Fl ag +Ġcomfort ing +45 4 +ĠI ris +are rs +Ġr ag +Ġrel ocated +ĠGu arant +Ġstrateg ically +Ġgam ma +uber ty +ĠLock heed +g res +Ġgr illed +ĠLow e +st ats +ĠR ocks +Ġsens ing +Ġrent ing +ĠGe ological +ا Ø +ot rop +Ġse w +Ġimproper ly +48 6 +Ġâĸ ł +Ġstar ving +ĠB j +Disc ussion +3 28 +ĠCom bo +ĠFix es +N AT +Ġstri ving +th ora +Ġharvest ed +ĠP ing +Ġplay ful +Ġaven ues +Ġoccup ational +Ġw akes +ĠCou rier +Ġdrum mer +ĠBrow ser +ĠH outh +it u +Ġapp arel +p aste +Ġhun ted +ĠSecond ly +l ain +X Y +ĠP IN +ic ons +Ġcock tails +Ġs izable +Ġhurd les +est inal +ĠRecre ation +Ġe co +64 8 +ĠD ied +m int +Ġfinger prints +Ġdis pose +ĠBos nia +ts y +22 00 +Ġins pected +ĠF ou +Ġf uss +Ġamb ush +ĠR ak +Ġmanif ested +Pro secut +Ġsuff ice +ren ces +Ġcompens ated +ĠC yrus +Ġgen us +ĠWolver ine +ĠTrend s +Ġh ikes +ĠSe en +Ġen rol +C old +Ġpol itely +ĠSl av +ĠRu pert +Ġey ewitness +ĠAl to +Ġun comp +Ġposter ior +M ust +ĠHer z +Ġprogress ively +Ġ23 4 +Ġind ifference +ĠCunning ham +Ġacadem ia +Ġse wer +Ġast ounding +ĠA ES +r ather +Ġeld est +Ġclim bs +ĠAdd s +Ġout cry +Ġcont ag +ĠH ouses +Ġpe pt +ĠMel ania +interest ed +ĠU CH +ĠR oots +ĠHub bard +ĠT BD +ĠRoman ian +fil ename +St one +ĠIm pl +Ġchromos ome +C le +d x +Ġscram bled +ĠP t +Ġ24 2 +OP LE +Ġtremend ously +St reet +Ġcra ving +Ġbund led +ĠR G +p ipe +Ġinj uring +Ġarc ane +Part icip +ĠHero ic +st y +Ġto pping +ĠTemp est +rent ices +b h +Ġpar anoia +ĠUnic ode +Ġegreg ious +Ġ\ ' +ĠOsw ald +Ġgra vel +ĠSim psons +Ġbl and +ĠGuant anamo +Writ er +lin ers +ĠD ice +J C +Ġpar ity +Ġs ided +Ġ23 7 +ĠPyr rha +at ters +d k +F ine +comp an +Ġform ulated +ĠId ol +il ers +hem oth +ĠF av +Ġintr usion +Ġcar rots +ĠL ayer +ĠH acker +Ġ ---------------- +Ġmoder ation +é ģ +oc oc +Ġcharacter ize +ĠTe resa +Ġsocio economic +Ġper k +ĠParticip ation +tr aining +ĠPaul o +ph ys +Ġtrust worthy +Ġembod ied +ĠMer ch +c urrency +ĠPrior ity +Ġte asing +Ġabsor bing +Ġunf inished +ĠCompar ison +Ġdis ple +writ ers +Ġprofess ions +ĠPengu in +Ġang rily +ĠL INK +68 8 +ĠCor respond +Ġprev ailed +Ġcart el +l p +as ms +ĠRed emption +ĠIslam ists +effect s +d ose +ĠL atter +ĠHal ifax +Ġv as +ĠTop ics +ĠN amed +advert ising +zz a +IC ES +Ġret arded +ach able +ĠPupp et +ĠItem Level +Ġret ract +Ġident ifiable +A aron +ĠB uster +s ol +hel le +as semb +H ope +r anged +B a +ĠP urch +é Ģ +ĠSir i +Ġarri vals +Ġ19 12 +Ġshort ened +Ġ3 12 +Ġdiscrep ancy +ĠTem perature +ĠWal ton +Ġkind erg +p olit +Ġrem ix +Ġconnect ors +ãĥĺ ãĥ© +ĠKazakh stan +dom inated +Ġsu gars +im ble +ĠPan ic +ĠDem and +ĠCol ony +on en +ĠM ER +7 75 +ur ia +aza ar +ĠDeg ree +P ri +Ġsun shine +Ġ25 1 +Ġpsychedel ic +Ġdigit ally +ĠBra un +Ġsh immer +Ġsh ave +ĠTel esc +ĠAst ral +ĠVenezuel an +ĠO G +Ġc rawling +Int eg +ĠFe ather +Ġunfold ing +Ġappropri ation +Ġè£ı è +ĠMob ility +ĠN ey +- . +b ilt +L IN +ĠT ube +ĠCon versely +Ġkey boards +ĠC ao +Ġover th +Ġla ure +>> \ +ĠV iper +ach a +Off set +ĠR aleigh +ĠJ ae +J ordan +j p +Ġtotal itarian +Connect or +Ġobserv es +ĠSpart an +ĠIm mediately +ĠSc al +C ool +Ġt aps +Ġro ar +P ast +Ġch ars +ĠB ender +ĠShe ldon +Ġpain ter +Ġbe acon +ĠCreat ures +Ġdownt urn +Ġh inder +ĠAnd romeda +à Ľ +cc oli +ĠF itness +et rical +Ġutil izes +Ġsen ate +Ġen semble +Ġche ers +T W +Ġaff luent +k il +ry lic +ord ering +Com puter +Ġgru esome +ost ics +ĠUb isoft +ĠKel ley +Ġw rench +Ġbourgeois ie +IB LE +ĠPrest on +w orn +ar ist +reat ing +Ġst ained +ar ine +Ġsl ime +EN N +Ġche sts +Ġground water +ann ot +ĠTr ay +ĠLoc ke +ĠC TR +Ġd udes +ĠEx ternal +ĠDec oder +Ġpar amed +ĠMed line +80 9 +ĠD inner +rup al +g z +ĠG um +ĠDem o +j ee +Ġd h +ber man +arch s +Ġen qu +ĠEp stein +Ġdevast ation +Ġfriends hips +ĠAr d +Ġ23 1 +ĠRub in +ĠDist ance +Ġsp urred +Ġd ossier +Ġover looking +\\\\\\\\ \\\\\\\\ +Fore st +ĠCom es +\ ", +ĠIran ians +Ġf ixtures +L aughs +Ġcur ry +ĠKing ston +Ġsqu ash +Ġcat alogue +Ġabnormal ities +Ġdigest ive +.... ..... +Ġsubord inate +og ly +Ġ24 9 +M iddle +Ġmass ac +Ġburg ers +Ġdown stairs +Ġ19 31 +39 4 +ĠV G +Ġl asers +ĠS ikh +ĠAlex a +der ived +Ġcycl ist +ãģ® éŃĶ +onel iness +!!!! !!!! +Ġbuff s +leg ate +Ġrap ing +Ġrecomm ending +ro red +Ġmult icultural +un ique +Ġbusiness men +Ġune asy +ĠM AP +Ġdisp ersed +cipl ine +J ess +ĠK erala +å § +Ġabst raction +Sur v +U h +Ġprin ters +ij a +ow der +Ġanalog ous +ĠA SP +af er +Ġunfold ed +Ġlevel ing +Ġbre ached +ĠH earing +Ġn at +Ġtransl ating +crit ical +Ġant agonist +ĠYes terday +Ġfuzz y +w ash +m ere +Ġbe wild +ĠM ae +V irgin +ph rase +Ġsign aled +ĠH IGH +Ġprot ester +Ġgar ner +unk nown +Ġk ay +Ġabduct ed +Ġst alking +am n +Ġdes erving +ĠR iv +ĠJ orge +Ġscratch ing +ĠS aving +ip ing +Ġte ase +Ġmission ary +ĠMor row +T IME +P resent +Ġchem otherapy +tern ess +ĠH omes +ĠP urdue +Ġst aunch +ĠWhit ney +ĠTH ERE +Î ¼ +iat us +ĠErn est +ĠDe ploy +Ġcove ted +F ML +ĠDial ogue +Ġex ited +f ruit +Ġner d +":" "," +Ġv ivo +ru ly +4 60 +ĠAm en +rehens ible +Ġâ ĺ +D IR +Ġad herence +Ġche w +ĠCo ke +ĠSerge i +dig ital +ĠNe ck +g ently +enth al +/ ) +Ġwe ary +Ġgu ise +ĠConc ord +ĠOn ion +at cher +Ġb inge +ĠDirect ive +Ġman ned +ans k +Ġill usions +Ġbillion aires +38 3 +oly n +odynam ic +ĠWhe at +ĠA lic +Ġcol oured +ĠN AFTA +ab o +Ġmac ros +ind ependent +s weet +Ġsp ac +ĠK abul +Ġ Ä +em e +Ġdict ated +Ġsh outs += { +Ġr ipping +ĠSh ay +ĠCr icket +direct ed +Ġanalys ed +ĠWAR RANT +ag ons +ĠBlaz ers +Ġche ered +Ġar ithmetic +ĠTan z +37 3 +ĠFl ags +Ġ29 5 +Ġw itches +ĠIn cluded +ĠG ained +ĠBl ades +G am +ĠSam antha +ĠAtl antis +ĠPr att +Ġspo iled +ĠI B +ĠRam irez +Pro bably +re ro +ĠN g +ĠWar lock +t p +Ġover he +Ġadministr ations +Ġt int +Ġreg iment +Ġpist ols +Ġblank ets +Ġep ist +Ġbowl s +Ġhydra ulic +Ġde an +Ġj ung +Ġasc end +70 5 +ĠSant iago +à ® +Ġun avoid +ĠSh aman +re b +Ġstem ming +99 8 +ĠM G +st icks +esthes ia +ER O +Ġmor bid +ĠGr ill +ĠP oe +any l +Ġdele ting +ĠSurve illance +Ġdirect ives +Ġiter ations +ĠR ox +ĠMil ky +F ather +Ġpat ented +44 7 +Ġprec ursor +Ġm aiden +ĠP hen +ĠVe gan +ĠPat ent +K elly +Redd itor +Ġn ods +Ġvent ilation +ĠSchwar z +Ġw izards +Ġomin ous +ĠHe ads +ĠB G +Ġl umber +ĠSp iel +Ġis Enabled +Ġancest ral +ĠSh ips +Ġwrest ler +ph i +Ġy uan +ĠRebell ion +Ġice berg +Ġmag ically +Ġdivers ion +ar ro +yth m +ĠR iders +ĠRob bie +ĠK ara +ĠMain tenance +ĠHer b +Ġhar ms +p acked +ĠFe instein +Ġmarry ing +Ġbl ending +ĠR ates +Ġ18 80 +Ġwr ink +ĠUn ch +ĠTor ch +desc ribed +Ġhuman oid +ilit ating +ĠCon v +ĠFe ld +IGH TS +Ġwhistlebl ower +ort mund +ets y +arre tt +ĠMon o +ĠI ke +ĠC NBC +ĠW AY +ĠMD MA +ĠIndividual s +Ġsupplement al +Ġpower house +ĠSt ru +F ocus +aph ael +ĠCol leg +att i +Z A +Ġp erenn +ĠSign ature +ĠRod ney +Ġcub es +idd led +ĠD ante +ĠIN V +iling ual +ĠC th +Ġso fa +Ġintimid ate +ĠR oe +ĠDi plom +ĠCount ries +ays on +Ġextrad ition +Ġdis abling +ĠCard iff +Ġmemor andum +ĠTr ace +Ġ?? ? +se ctor +ĠRou hani +ĠY ates +ĠFree ze +Ġbl adder +M otor +ĠProm ise +ant asy +Ġforesee able +ĠC ologne +cont ainer +ĠTre es +ĠG ors +ĠSin clair +Ġbar ring +key e +Ġsl ashed +ĠStat istical +é ĩ +Ġâĸ º +All ows +Ġhum ility +Ġdr illed +ĠF urn +44 3 +Ġse wage +Ġhome page +Ġcour tyard +Ġv ile +Ġsubsid iaries +aj o +direct ory +Ġam mon +V ers +charg es +Ġ} } +ĠCh ains +Ġ24 6 +n ob +Ġper cept +Ġg rit +Ġfisher men +ĠIraq is +ĠDIS TR +ĠF ULL +ĠEval uation +g raph +at ial +Ġcooper ating +Ġmel an +Ġenlight ened +Ġal i +t ailed +Ġsal ute +Ġweak est +ĠBull dogs +U A +ĠAll oy +Ġsem en +oc ene +ĠWilliam son +s pr +, âĢĶ +ĠG F +itt ens +Be at +ĠJ unk +iph ate +ĠFarm ers +ĠBit coins +ig ers +d h +ĠL oyal +p ayer +Ġentert ained +Ġpenn ed +Ġcoup on +Que ue +Ġweaken ing +c arry +Ġunderest imate +Ġshoot out +Ġcharism atic +ĠProced ure +Ġprud ent +in ances +Ġric hes +Ġcort ical +Ġstr ides +Ġd rib +ĠOil ers +5 40 +ĠPer form +ĠBang kok +Ġe uth +S ER +Ġsimpl istic +t ops +camp aign +Q uality +Ġimpover ished +ĠEisen hower +Ġaug ment +ĠH arden +Ġinterven ed +Ġlist ens +ĠK ok +Ġs age +Ġrub bish +ĠD ed +Ġm ull +pe lling +Ġvide ot +Produ ction +D J +m iah +Ġadapt ations +Ġmed ically +Ġboard ed +Ġarrog ance +Ġscra pped +Ġopp ress +FORM ATION +Ġj unction +4 15 +EE EE +S kill +Ġsub du +ĠSug gest +ĠP ett +Ġle tt +ĠMan ip +ĠC af +ĠCooper ation +T her +Ġreg ained +¶ æ +ref lect +Ġth ugs +ĠShel by +Ġdict ates +ĠWe iner +ĠH ale +Ġbatt leground +s child +Ġcond ol +h unt +osit ories +Ġacc uses +Fil ename +Ġsh ri +Ġmotiv ate +Ġreflect ions +N ull +ĠL obby +¥ µ +ĠS ATA +ĠBack up +Ñ ĥ +n in +ĠCor rection +Ġju icy +ut ra +ĠP ric +Ġrest raining +ĠAir bnb +ĠAr rest +Ġappropri ations +Ġsl opes +Ġmans laughter +Ġwork ings +ĠH uss +ĠF rey +Le ave +ĠHarm ony +ĠF eder +Ġ4 30 +Ġt rench +Ġglad ly +Ġbull pen +ĠG au +b ones +Ġgro ove +Ġpre text +ã ħĭ +Ġtransm itter +ĠComp onent +Ġunder age +ĠEm pires +T ile +Ġo y +ĠMar vin +ĠC AS +Ġbl oss +Ġrepl icated +ĠMar iners +Marc us +ĠBl ocks +Ġliber ated +Ġbutter fly +Fe el +Ġfer mentation +Ġyou tube +Ġoff end +ĠTer m +res ist +Ġcess ation +Ġinsurg ency +Ġb ir +ĠRa ise +59 5 +Ġhypothes es +50 2 +Ġpl aque +ocr at +Ġjack ets +ĠHuff Post +am ong +Ġconf er +48 7 +ĠL illy +Ġadapt ing +ĠF ay +Ġsh oved +ve c +Ġref ine +Ġg on +Ġgun men +z ai +ĠShut tle +ĠI zan +Ġ19 13 +Ġple thora +· · +Ġ5 10 +Ġp uberty +Ġ24 1 +ĠWe alth +ĠAl ma +ĠM EM +ĠAd ults +C as +pr ison +R ace +Ġwater proof +Ġathlet icism +Ġcapital ize +ĠJu ice +Ġillum inated +ĠP ascal +Ġirrit ation +ĠWitness es +ad le +ĠAst ro +Ġf ax +ĠEl vis +Prim ary +ĠL ich +ĠEl ves +Ġres iding +Ġst umble +3 19 +ĠP KK +Ġadvers aries +D OS +ĠR itual +Ġsm ear +Ġar son +ident al +Ġsc ant +Ġmon archy +Ġhal ftime +Ġresid ue +Ġind ign +ĠSh aun +ĠEl m +aur i +A ff +W ATCH +ĠLy on +hel ps +36 1 +Ġlobby ist +Ġdimin ishing +Ġout breaks +Ġgo ats +f avorite +ĠN ah +son ian +ĠBo oster +Ġsand box +ĠF are +ĠMalt a +Ġatt Rot +ĠM OR +ld e +Ġnavig ating +T ouch +Ġunt rue +ĠDis aster +Ġl udicrous +Pass word +ĠJ FK +blog spot +4 16 +ĠUN DER +ern al +Ġdelay ing +T OP +Ġimpl ants +ĠAV G +ĠH uge +att r +Ġjournal istic +ĠPe yton +ĠI A +R ap +go al +ĠProgram me +Ġsm ashing +w ives +print ln +ĠPl ague +in us +EE P +Ġcru iser +ĠPar ish +umin ium +Ġoccup ants +ĠJ ihad +m op +Ġp int +Ġhe ct +ĠMe cca +direct or +ĠFund ing +ĠM ixed +Ġst ag +T ier +Ġg ust +Ġbright ly +ors i +Ġup hill +R D +Ġles ions +ĠBund y +liv ious +Ġbi ologist +ĠFac ulty +ĠAuthor ization +Ġ24 4 +All ow +ï ¸ +ĠGi ul +Ġpert inent +ot aur +es se +ĠRo of +Ġunman ned +35 1 +ĠSh ak +ĠO rient +Ġend anger +D ir +Ġrepl en +ed ient +Ġtail or +Ġgad gets +Ġaud ible +âĺ Ĩ +N ice +Ġbomb ard +ĠR ape +Ġdef iance +ĠTW O +ĠFilip ino +Ġunaff ected +erv atives +Ġso ared +ĠBol ton +Ġcomprom ising +ĠBrew ers +R AL +ĠA HL +icy cle +Ġv ampires +Ġdi pped +oy er +ĠX III +Ġsidew ays +ĠW aste +ĠD iss +ĠâĶľ âĶĢâĶĢ +$ . +Ġhabit ats +ĠBe ef +tr uth +tr ained +spl it +R us +And y +ĠB ram +RE P +p id +è£ ħ +ĠMut ant +An im +ĠMar ina +Ġfut ile +hig hest +f requency +Ġepile psy +Ġcop ing +Ġconc ise +Ġtr acing +ĠS UN +pan el +ĠSoph ie +ĠCrow ley +ĠAd olf +ĠShoot er +Ġsh aky +ĠI G +ĠL ies +ĠBar ber +p kg +Ġupt ake +Ġpred atory +UL TS +/ ** +Ġintox icated +ĠWest brook +od der +he ment +Ġbas eman +AP D +st orage +ĠFif ty +ed itor +G EN +UT ION +ir ting +Ġse wing +r ift +Ġag ony +ĠS ands +Ġ25 4 +C ash +Ġl odge +Ġp unt +N atural +ĠIde as +Ġerrone ous +ĠSens or +ĠHann ity +Ġ19 21 +Ġm ould +ĠG on +kay a +Ġanonym ously +ĠK EY +Ġsim ulator +W inter +Ġstream ed +50 7 +? ", +Ġte ased +Ġco efficient +Ġwart ime +ĠTH R +' '. +ĠBank ing +mp ire +Ġf andom +Ġl ia +G a +Ġdown hill +Ġinterpre ting +Ind ividual +N orm +Ġjealous y +bit coin +Ġple asures +ĠToy s +ĠChev rolet +ĠAd visor +IZ E +Ġrecept ions +70 6 +C ro +Ġ26 2 +Ġcit rus +ir u +Review er +ject ed +U ES +an z +19 81 +ĠWork er +Ġcompl ied +ores cent +contin ental +T on +ĠPr ism +ĠShe ep +Ġ28 8 +n ox +ĠV og +O rd +Ġreal ms +te k +Ġirrig ation +Ġbicy cles +Ġelectron ically +p oly +t all +() ); +Ġaest hetics +ĠInteg rated +Expl ore +Ġd unk +47 6 +p ain +ĠJac ques +ĠD mit +Fram es +Ġreun ited +Ġhum id +D ro +P olitical +Ġyouth ful +Ġent ails +Ġmosqu ito +36 3 +spe cies +Ġcoord inating +ĠMay hem +ĠMagn us +M ount +Impro ved +ĠST ATE +ATT LE +Ġflow ed +Ġtack led +Ġfashion ed +Ġre organ +iv ari +f inger +Ġreluct antly +et ting +ĠV and +you ng +ĠGar land +Ġpresum ption +Ġamen ities +ĠPle asant +on ential +ĠO xy +Ġmor als +ĠY ah +Read y +Sim on +En h +D emon +Ġcl ich +Mon itor +ĠD U +Ġwel comes +Ġstand out +Ġdread ful +Ġban anas +Ġball oons +h ooting +bas ic +Ġsuff ix +Ġd uly +can o +Ch ain +at os +Ġgeop olitical +Ġ( & +ĠGem ini +ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ ÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤÃĥÃĤ +Ġacqu itted +L uck +prot ect +10 24 +Ġsc arcity +Ġmind fulness +ec ided +D N +pr ime +ĠPres idents +ĠVID EO +Ġ( âĪĴ +add ock +N OR +ĠP ru +p un +ĠL OL +)) )) +ĠL iqu +ĠS AS +Ġsty ling +Ġpunish ments +Ġnum b +Ġasc ertain +ĠRock ies +f lu +Th umbnail +Ġperpet rated +ĠSem i +Ġdis arm +ĠOld er +ĠEx ception +Ġexponent ially +ĠCommun ities +Ġabol ish +ĠPart ner +pt oms +Ġ7 77 +ĠFo ley +ĠC ases +Ġgre ase +ĠReb irth +G round +Ġ; ) +ĠDoct rine +ik ini +Y e +ĠBl ossom +Ġpers ists +b ill +Ġinf usion +Ġbud dies +9 11 +ĠPat ient +Ġdem os +Ġacquaint ance +ĠP aw +at ari +Ġx ml +Ġfasc ination +ĠSer ve +Ï Ĥ +br anded +Ġa z +Return s +Ġover shadow +Ġro am +Ġspeed y +n umbered +hel ial +Ġdisc iple +Ġass urances +g iven +pect ing +ĠN atalie +çĶ ° +Ġmosquit oes +rote in +Ġnumer ic +Ġindepend ents +Ġtrans itional +Ġreaction ary +ĠMech dragon +do ctor +Ġshort est +Ġsequ ential +ĠB ac +ĠAccount s +ãģ Į +ach y +ract ive +ĠReg iment +Ġbreat htaking +ffic iency +ĠB ates +Ġ3 11 +Ġward robe +ft s +ĠBer k +Sim ply +ĠRivers ide +iver ing +ident ial +lu cent +Ġen riched +ĠCon ver +ĠG iving +ãĥ Ļ +Ġlegal ize +ĠF TC +Ġfre aking +M ix +Ġter restrial +es ian +ci ents +W ing +LO AD +Ġled ge +ĠViol ent +ĠMet all +Ġ30 8 +Ġs outheastern +hett o +M eat +Ġslow down +Ġret reated +Jere my +end as +**** * +er ic +Ġre ins +opp able +ĠHuman ity +ear ances +rig an +C amera +Ġwa ivers +s oc +Ġalter ation +trans form +ĠC emetery +50 6 +Ġindef inite +Ġstim ulating +y g +60 3 +ĠS op +Ġdescript ive +Ph ase +ĠEd mund +Ġpneum onia +vent us +A mb +Ġlabor atories +ĠEx clusive +ug ar +W ere +Ġmalf unction +Ġhomosexual s +Ġ---- --- +un i +Ġturb ines +ĠEqu ity +D u +Ġmind ed +ĠR H +ĠBlack hawks +Ġfe ats +Ġ17 00 +re pl +36 2 +lad en +Ġindisp ensable +ly ss +tt i +Ġre el +Ġdiver ted +Ġlik eness +Ġsubscript ions +Ġfing ert +Ġfil thy +dest ruct +d raft +ĠBernard ino +l aunch +Ġper plex +ĠS UM +car b +Ġswe ater +ĠVent ure +ĠJ ag +ĠCele b +ĠV oters +Ġstead fast +Ġathlet ics +ĠHans on +ĠDr ac +Tr acker +Ġcomm end +ĠPres idency +ĠD ID +in formed +Ġweb page +P retty +Ġforce fully +ãĥĥ ãĤ¯ +Ġrel ocation +Ġsat ire +â ī +ĠSunder land +æ Ħ +V oice +???? ???? +Ġinform ant +Ġbow el +ĠUn iform +Ġ ..." +Ġpur ge +Ġpic nic +ĠU mb +ĠU PDATE +ĠSapp hire +ĠSt all +le arn +Ġobject ively +Ġob liter +Ġlooph ole +Ġjour neys +Ġo mission +Pro s +ĠSid ney +pl oma +Ġspray ed +Ġg uru +Ġtra itor +Ġtim et +Ġsn apping +ĠSe vent +urn al +ĠUk ip +Ġb owed +por al +l iberal +R os +Quest ions +i OS +Ġsummar ize +ST AT +Ġ18 50 +ap est +Ġl ender +ĠVari able +br inging +ĠL ORD +, ) +Ġcollaps es +x iety +ĠN ed +Y D +ĠSch a +Ġantib ody +Ġdis band +y re +ill usion +Ġro ver +s hed +ĠHiro sh +cc i +Ġcal am +ĠMort on +P interest +Ġ19 28 +ĠE uras +ord es +Ġf ences +ĠIn ventory +ĠVal encia +ĠU d +ĠT iff +Ġsqu e +Ġqu otation +Ġtroubles ome +er ker +QU EST +ĠKing doms +s outh +Ġle vy +Pr ince +ĠSt ing +Ġnick named +Ġapp e +Ġphot ographic +Ġcorp us +re ference +ĠT rog +U nt +) =( +ĠLat via +Ġactiv ating +Ġlicense e +Ġdispar ities +ĠNews letter +ãĥĥ ãĥĪ +Ġfree ing +ĠJe ep +ĠPer ception +ins k +Ġsil icone +ĠHay den +Le an +ĠSuz uki +ibr arian +66 8 +Ġsp or +Ġcorrel ations +ag hetti +Ġtu ber +ĠIP CC +il us +ĠV u +Ġwealth iest +ĠCarb uncle +an za +Ġfool ed +ĠZ ur +Ġd addy +ran o +il ian +Ġknock out +f man +requ ired +ĠWik ileaks +ĠD uffy +ON T +Ġins ol +ĠObject s +Ġb ou +ĠNord ic +ĠIns ert +sc an +Ġd ancers +Ġid iots +major ity +ĠNev ille +ĠFree BSD +Ġt art +pan ic +69 0 +Ġcoc oa +Ġsam pled +Ġlook up +Ind ust +Ġinject ions +gen re +Ġa u +Ġroad way +Ġgen itals +K ind +ĠEx aminer +ĠY az +F resh +Ġpar alysis +ĠAl uminum +Ġre ap +ok é +Ġsl oppy +ĠTun nel +pos ium +ner y +en ic +Ġher bal +ĠOut er +ĠBuild er +Ġinc ur +Ġide ologies +Ġback ups +cons uming +ĠDet ect +de ck +ĠKN OW +ĠG ret +ĠM IC +Ġtough ness +ĠEx hibit +Ġh ive +L es +ĠSCH OOL +ĠAt ari +ald e +ĠN ull +and estine +m ouse +Ġbrig ade +48 9 +Ġrev ol +ĠLaw son +ĠW ah +op oly +eb ted +ĠS aunders +Ġ3 13 +ĠW inc +Ġtab oo +ĠHel met +Ġw edge +ch ip +ĠT ina +b g +Ġinf uri +r n +Ġanomal ies +ĠSy nc +ĠEx am +ĠComm it +ĠDi ary +ĠALS O +ĠDe bor +omed ical +Ġcomprehens ion +6 55 +Ġempower ing +Ġ ire +Ġju ices +ĠE TH +ĠBox ing +=" / +Ġfacilit ated +p oke +ĠPars ons +ĠMod er +tra vel +Ġcivil izations +Ġliber tarians +Ġrun e +ĠCl arks +at hed +Ġcampaign ers +ĠDis patch +ĠFah renheit +ĠCap com +-------- -- +Ġl ace +Ġdr aining +Ġl iner +ĠArt ificial +é n +t ask +] ). +ĠGM O +ĠOper ator +ord inary +ĠInf luence +ĠU ps +Ġpot ency +uss en +osp ons +ĠSw im +ĠDead line +Un ity +Ġcul inary +Ġenlight enment +Ġwe arer +Ġmin ed +Ġp ly +Ġinc est +ĠDVD s +W alk +B TC +Tr ade +Ġdev al +ib and +ĠOvers ight +Palest inian +Ġd art +Ġm ul +L R +Ġrem ovable +ĠReal ms +ì Ŀ +Ġmisc ar +ĠV ulkan +68 5 +è re +ĠS ap +Ġmer ging +ĠCar ly +che ster +Ġbr isk +Ġlux urious +ĠGener ator +Ġbit terness +Ġed ible +Ġ24 3 +T G +Ġrect angle +With No +bel ow +J enn +Ġdark est +Ġh itch +Ġdos age +Ġsc aven +ĠK eller +ĠIllust rated +Certain ly +ĠMaver icks +Marg inal +Ġdiarr hea +Ġenorm ously +Ġ9 99 +sh r +qu art +Ġadam ant +ĠM ew +Ġren ovation +Ġcerv ical +ĠPercent age +en ers +ĠKim ber +Ġflo ats +Ġde x +ĠW itcher +ĠSwan sea +d m +Ġsal ty +y ellow +Ġca pe +ĠDr ain +ĠPaul a +ĠTol edo +les i +Mag azine +ĠW ick +ĠM n +ĠA ck +ĠR iding +AS ON +Ġhom ophobic +AR P +Ġwand ered +C PU +ood oo +ĠP ipe +Ġtight ening +ĠBut t +3 18 +Ġdesert ed +S ession +Ġfacilit ating +J ump +Ġemer gencies +OW ER +Ġexhaust ive +ĠAF TER +Ġheart beat +ĠLab el +ack y +ĠCert ified +ilt ration +Z e +ĠU tt +Ġ13 00 +Ġpres ume +ĠDis p +Ġsur ged +Ġdoll s +Col umb +Ġchim pan +ĠR azor +Ġt icks +Ġcouncill or +Ġpilgr image +ĠReb els +ĠQ C +ĠA uction +x ia +ik k +b red +Ġinsert ion +Ġco arse +d B +SE E +ĠZ ap +ĠF oo +Ġcontem por +ĠQuarter ly +ot ions +ĠAl chemist +ĠT rey +ĠDu o +S weet +80 4 +ĠGi ov +Ġfun n +N in +h off +Ġram ifications +Ġ19 22 +ĠExper ts +az es +Ġgar ments +ar ial +ĠN ab +Ġ25 7 +ĠV ed +Ġhum orous +ĠPom pe +Ġn ylon +Ġlur king +ĠSerge y +ĠMatt is +Ġmisogyn y +ĠComp onents +ĠWatch ing +ĠF olk +ract ical +B ush +Ġt aped +Ġgroup ing +Ġbe ads +Ġ20 48 +Ġcon du +quer que +Read ing +Ġgriev ances +Ult ra +Ġend point +H ig +ĠSt atic +ĠScar borough +L ua +ĠMess i +a qu +ĠPsy Net +ĠR udd +Ġa venue +v p +J er +Ġsh ady +ĠRes ist +ĠArt emis +Ġcare less +Ġbro kers +Ġtemper ament +Ġ5 20 +T ags +ĠTurn ing +Ġut tered +Ġp edd +Ġimpro vised +Ġ: ( +Ġtab l +Ġpl ains +16 00 +press ure +ĠEss ence +marg in +friend s +ĠRest oration +Ġpoll ut +ĠPok er +ĠAugust ine +ĠC IS +ĠSE AL +or ama +Ġth wart +se ek +Ġp agan + º +cp u +Ġg arn +Ġass ortment +ĠI LCS +t ower +Recomm ended +Ġun born +ĠRandom Redditor +ĠRandomRedditor WithNo +Ġparaly zed +Ġeru ption +Ġinter sect +ĠSt oke +ĠS co +B ind +å ¾ +ĠP NG +ĠNeg ative +ĠNO AA +Le on +Ġall oy +ĠL ama +ĠD iversity +5 75 +Ġunderest imated +ĠSc or +Ġm ural +Ġb usted +so on +l if +Ġnone x +Ġall ergy +ĠUnder world +ĠR ays +ĠBl asio +Ġh rs +ĠD ir +Ġ3 27 +by ter +Ġrepl acements +Ġactiv ates +ri ved +M H +Ġp ans +ĠH I +Ġlong itudinal +Ġnu isance +al er +Ġsw ell +ĠS igned +s ci +ĠIs les +ĠA GA +Ġdef iant +Ġson ic +oc on +K C +ĠA im +t ie +ah ah +Ġm L +D X +Ġb isc +ĠBill board +ĠSY STEM +NE Y +ga ard +Ġdist ressed +former ly +Al an +Ġche fs +Ġopt ics +ĠC omet +ĠAM C +Ġredes igned +irm ation +Ġsight ings +38 2 +3 11 +ĠW B +Ġcont raction +ĠT OTAL +D ual +Ġstart led +Ġunderstand ably +Ġsung lasses +ETH OD +Ġd ocker +Ġsurf ing +ĠH EL +ĠSl ack +ton es +Ġsh alt +Vis ual +49 8 +Dep artment +c ussion +Ġunrest ricted +Ġt ad +Ġre name +employ ed +Ġeduc ating +Ġgrin ned +bed room +ĠActiv ities +ĠV elvet +ĠSW AT +Ġsh uffle +ig or +Ġsatur ation +F inding +c ream +ic ter +Ġv odka +tr acking +te c +Ġfore ground +iest a +Ġve hement +ĠEC B +ĠT ie +E y +Ġt urtles +ĠRail road +ĠKat z +ĠFram es +Ġmen ace +ĠFell owship +ĠEss ential +ugg ish +Ġdri p +ch witz +ĠKy oto +s b +ĠN ina +Param eter +Ġal arms +ĠCl aud +Ġpione ering +Ġchief ly +ĠSc ream +Col lection +Ġthank fully +ĠRonald o +åŃ IJ +st rip +ĠDisney land +com mercial +See ing +S oul +Ġevac uate +Ġc iv +ĠAs he +Ġdiv ides +ĠD agger +rehens ive +Ġber ries +ĠD F +Ġs ushi +Ġplur ality +W I +Ġdisadvant aged +Ġbatt alion +ob iles +45 1 +Ġcl ing +Ġunden iable +ĠL ounge +Ġha unt +p he +Ġquant ify +Ġdiff ered +Ġ[* ] +ĠV iz +c um +sl ave +Ġvide og +Ġqu ar +Ġbund les +ĠAl onso +t ackle +Ġneur onal +Ġlandsl ide +conf irmed +ĠDep th +Ġrenew ables +B ear +ĠMaced onia +Ġjer seys +Ġb unk +ĠSp awn +ĠControl s +ĠBuch anan +Ġrobot ics +Ġemphas izing +ĠTut orial +h yp +ist on +Ġmonument al +æ ° +ĠCar ry +Ġt bsp +en ance +H ill +art hed +Ġro tten +De an +Ġtw isting +Ġgood will +Ġimm ersion +L iving +Ġbr ushes +ĠC GI +ĠAt k +tr aditional +Ġph antom +ĠSt amina +Ġexpans ions +ĠMar in +Ġembark ed +ĠE g +int estinal +ĠPE OPLE +ĠBo oth +ĠApp alach +Ġreleg ated +V T +M IT +Ġmust er +Ġwithdraw ing +Ġmicrosc ope +ĠG athering +ĠC rescent +ĠArgent ine +ĠDec re +ĠDomin ic +Ġbud s +ant age +ĠI on +Ġwid ened +ONS ORED +ĠGl oves +iann opoulos +raz en +fe el +Ġrepay ment +Ġhind sight +ĠRE ALLY +ĠPist ol +ĠBra h +Ġwat ts +Ġsurv ives +Ġfl urry +iss y +Al ert +ĠUrug uay +Ph oenix +S low +ĠG rave +ĠF ir +Ġmanage able +Ġtar iff +ĠU DP +ĠPist ons +ĠNiger ian +Ġstrike outs +Ġcos metics +whel ming +f ab +c ape +pro xy +Ġre think +Ġover coming +sim ple +Ġw oo +Ġdistract ing +ĠSt anton +ĠTuls a +ĠD ock +65 9 +Ġdisc ord +ĠEm acs +ĠV es +ĠR OB +Ġreass uring +Ġcons ortium +Muslim s +3 21 +Ġprompt s +se i +ĠH itch +imp osed +ĠF ool +Ġindisc rim +wr ong +bu querque +D avis +! ] +Ġtim eless +ĠNE ED +Ġpestic ide +Ġrally ing +ĠCal der +Ġå ¤ +Ġx p +ĠUn le +ĠEx port +lu aj +B uff +) [ +Ġsq or +S audi +Ġis tg +Ġindul ge +pro c +Ġdisg usted +Ġcomp ounded +Ġn em +Ġschool ing +ĠC ure +process ing +S ol +Ġpro verb +it ized +ĠAlv arez +Ġscar f +Ġrect angular +re ve +Ġh ormonal +ĠSt ress +itiz en +Ġ4 25 +girl s +ĠNo ir +ĠR app +Ġmar ches +ch urch +ĠUs es +Ġ40 5 +ĠBer m +Ġord inances +ĠJud gment +Charg es +ĠZ in +Ġdust y +Ġstraw berries +Ġper ce +ĠTh ur +ĠDebor ah +net flix +ĠLam bert +Ġam used +ĠGu ang +Y OU +R GB +ĠC CTV +Ġf iat +r ang +Ġf ederation +ĠM ant +ĠB ust +ĠM are +respect ive +ĠM igration +ĠB IT +59 0 +Ġpatriot ism +Ġout lining +reg ion +ĠJos é +Ġbl asting +ĠEz ra +B s +Ġundermin es +ĠSm ooth +Ġcl ashed +rad io +Ġtransition ing +ĠBucc aneers +ĠOw l +Ġplug s +Ġh iatus +ĠPin ball +Ġm ig +ĠNut r +ĠWolf e +Ġinteg ers +Ġor bits +ĠEd win +ĠDirect X +b ite +Ġbl azing +v r +Ed ge +ĠP ID +ex it +ĠCom ed +ĠPath finder +ĠGu id +ĠSign s +ĠZ er +ĠAg enda +Ġreimburse ment +M esh +i Phone +ĠMar cos +ĠS ites +h ate +en burg +Ġs ockets +p end +Bat man +v ir +ĠSH OW +Ġprovision al +con n +ĠDeath s +AT IVE +Pro file +sy m +J A +Ġnin ja +inst alled +id ates +eb ra +ĠOm aha +Ġse izing +ĠBe asts +Ġsal ts +M ission +Gener ally +ĠTr ilogy +he on +leg ates +Ġd ime +Ġf aire +par able +G raph +Ġtotal ing +Ġdiagram s +ĠYan uk +ple t +ĠMe h +Ġmyth ical +ĠStep hens +aut ical +ochem istry +Ġkil ograms +Ġel bows +anc ock +ĠB CE +ĠPr ague +Ġimpro v +ĠDev in +Ġ" \ +par alle +Ġsuprem acists +ĠB illion +Ġreg imen +inn acle +Ġrequ isite +ang an +ĠBur lington +ain ment +ĠObject ive +oms ky +G V +Ġun ilateral +Ġt c +Ġh ires +ment al +Ġinvol untary +Ġtrans pl +ĠASC II + ¨ +Ev ents +Ġdoub ted +ĠKa plan +ĠCour age +ig on +ĠMan aging +ĠT art +Ġfalse hood +ĠV iolet +Ġair s +Ġfertil izer +Brit ain +Ġaqu atic +ou f +W ords +ĠHart ford +Ġeven ings +ĠV engeance +qu ite +G all +ĠP ret +Ġp df +ĠL M +ĠSo chi +ĠInter cept +9 20 +Ġprofit ability +ĠId le +ĠMac Donald +ĠEst ablishment +um sy +Ġgather ings +ĠN aj +Charl ie +Ġas cent +ĠProt ector +Ġal gebra +Ġbi os +for ums +EL S +Introdu ced +Ġ3 35 +Ġastron omy +Cont ribut +ĠPol ic +Pl atform +Ġcontain ment +w rap +Ġcoron ary +ĠJ elly +man ager +Ġheart breaking +c air +ĠChe ro +c gi +Med ical +ĠAccount ability +! !" +oph ile +Ġpsych otic +ĠRest rict +Ġequ itable +iss ues +Ġ19 05 +ĠN ek +c ised +ĠTr acking +Ġo zone +Ġcook er +ros is +Ġre open +Ġinf inity +ĠPharm aceutical +ens ional +Att empt +ĠR ory +Mar co +Ġawa its +H OW +t reated +Ġbol st +Ġreve red +Ġp ods +opp ers +00 10 +Ġampl itude +ric an +SP ONSORED +Ġtrou sers +Ġhal ves +ĠK aine +ĠCut ler +ĠA UTH +Ġsplend id +Ġprevent ive +ĠDud ley +if acts +umin ati +ĠY in +Ġad mon +ĠV ag +Ġin verted +Ġhast ily +ĠH ague +L yn +Ġled ger +Ġastron omical +get ting +Ġcirc a +ĠC ic +ĠTenn is +Lim ited +Ġd ru +ĠBY U +Ġtrave llers +Ġp ane +ĠInt ro +Ġpatient ly +Ġa iding +Ġlo os +ĠT ough +Ġ29 3 +Ġconsum es +Source File +Ġ"" " +Ġbond ing +Ġtil ted +Ġmenstru al +ĠCel estial +UL AR +Plug in +Ġrisk ing +N az +ĠRiy adh +Ġacc redited +Ġsk irm +é Ľ +Ġexam iner +Ġmess ing +Ġnear ing +ĠC hern +ĠBeck ham +Ġsw apped +Ġgo ose +K ay +Ġlo fty +ĠWal let +Ġ[ ' +Ġap ocalypse +Ġb amboo +ĠSP ACE +ĠEl ena +Ġ30 6 +ac ons +Ġtight ened +Ġadolesc ence +Ġrain y +Ġvandal ism +ĠNew town +Ġcon ject +c akes +Ġche ated +Ġmoder ators +par ams +E FF +Ġdece it +ĠST L +ĠTanz ania +ĠR I +Ġ19 23 +ĠEx ile +the l +Ġthe olog +Ġquir ky +ĠIr vine +Ġneed y +or is +U m +K a +Ġmail box +3 22 +Ġb os +ĠPet ra +K ING +Ġenlarg ed +O ften +Ġbad ass +Ġ3 43 +ĠPl aces +ĠC AD +Ġpr istine +Ġinterven ing +d irection +Ġl az +ĠD SM +Ġproject ing +ĠF unk +ag og +pay ment +n ov +Ġch atter +AR B +Ġexam inations +ĠHouse hold +ĠG us +F ord +4 14 +B oss +Ġmy stic +Ġle aps +ĠB av +ul z +b udget +Foot ball +Ġsubsid ized +Ġfirst hand +Ġcoinc ide +oc ular +Con n +ĠColl abor +Ġfool s +am ura +ah ar +r ists +Ġsw ollen +Ġexp ended +ĠP au +s up +Ġsp ar +Ġkey note +s uff +Ġunequ al +Ġprogress ing +str ings +ĠGamer gate +Dis ney +ĠEle ven +om nia +Ġscript ed +Ġear ners +bro ther +ĠEn abled +æ ³ +Ġlar vae +ĠL OC +m ess +Wil son +ĠTem plate +success fully +Ġparam ount +Ġcamoufl age +Ġbind s +ĠQu iet +ĠSh utterstock +r ush +Ġmasc ot +fort une +ĠCol t +ĠBe yon +hab i +Ġha irc +Ġ26 7 +ĠDe us +Ġtw itch +Ġconcent rating +Ġn ipples +c ible +Ġg ir +N Z +M ath +n ih +Requ ired +Ġp onder +ĠS AN +Ġwedd ings +Ġl oneliness +N ES +ĠMah jong +69 5 +add le +ĠGar ner +ĠC OUR +Br idge +Ġsp ree +ĠCald well +Ġbri bery +Ġ���� ���� +plug ins +Ġr acket +Ġchamp agne +vers ible +V ote +Ġmod ifiers +May or +6 80 +Ġassemb lies +ĠS ultan +ĠN ing +ĠLad ies +Ġsulf ur +Ġor bs +Ġ---- - +____ ___ +ĠJournal ism +Ġes ports +Ġl ush +Ġh ue +Ġspect ral +H onest +ãĥ ı +Ġbus hes +Ġrein forcement +Ġre opened +ĠWhe els +ĠM org +rie ving +Ġaux iliary +Ġj Query +ĠB AT +tes que +Ġver tex +p ure +f rey +ãĤ º +d os +Ġty ph +Ġc ull +Ġe q +Ġdec on +Ġtoss ing +Ġdispar ate +ĠBr igham +print f +led ged +Ġsu nd +Ġco zy +Ġhepat itis +per forming +Ġav al +ĠG G +f uture +Ġpet ertodd +ĠKos ovo +Ġmagn ets +Al ready +ĠEd ison +ĠCe res +ĠRA ID +Ġbrill iance +57 6 +Ġder ives +Ġhypert ension +ĠÎ Ķ +Ġlamb da +Ġfl air +Ġmission aries +Ġrap es +ĠSt arter +ĠMon ths +Ġdef y +Ġseism ic +ĠR aphael +Ġeuro zone +65 6 +z sche +Ġscr atched +Ġb ows +ĠLenn on +ĠGa ia +Ġdri pping +f acts +A le +Ġfrog s +ĠBre ast +ogene ity +ĠProsecut or +Ġampl ified +ĠHod g +ĠF n +Th ousands +ĠNI H +ĠMonitor ing +FT WARE +ĠPri ebus +ĠG rowing +hun ter +Ġdiagn ose +ĠM ald +ĠL R +Ġcrown ed +Ġburst ing +Ġdiss olution +j avascript +Ġuseful ness +ĠExec ution +: ( +ĠIv ory +a ah +Ġpersecut ed +viol ence +ist as +ĠCr ate +Ġimpuls es +ĠSp ani +ed es +Hand le +ĠZ erg +think able +Last ly +Ġspont aneously +Ġinconven ient +Ġdismiss ing +Ġpl otted +Ġeight y +Ġ7 37 +r ish +ĠThor nton +ath am +Ġsit com +V en +Rec ipe +t el +l und +Ġcle ars +ĠSas uke +Ġ25 8 +Ġopt ing +Ġen raged +est hetic +ĠA e +uch s +Pre p +Fl ow +Ġrun off +ĠE ating +ĠG iles +ĠAct ing +res ources +ib aba +Ġr pm +Ġske wed +ĠBl anc +ĠS akuya +Ġhot ter +Ġ19 24 +op ian +ck o +Ġcr umbling +Ġcapt ains +ĠAppropri ations +le aders +dro pping +an uts +Ġrevers ing +ĠP ose +ĠS ek +Sc ot +ĠIde a +c ise +ĠSloven ia +Ġ3 17 +Do ctor +Ġcro cod +ald i +Se a +ĠFar rell +Ġmerc enaries +ĠR NC +ĠGu ess +Ġp acing +M achine +Streamer Bot +ĠChar ity +Ġ29 8 +Ġcann ons +ĠTob y +TPP StreamerBot +ĠPass ion +cf g +Th om +Ġbad ges +ĠBern stein +. âĢĵ +ĠP OP +ĠCon j +Ġinitial ization +Ġbiod iversity +D ub +Ġfeud al +Ġdisclaim er +Ġc row +Ġign ition +ar f +S HA +Ġk Hz +h azard +ĠArt ists +oe uv +67 9 +ĠRud y +N ine +ĠRam adan +å ½ +itt o +Ġadren aline +C ert +Ġsmell ed +Ġimp unity +Ġag endas +ĠRe born +ĠCon cent +ĠSe ems +Ġo mega +ĠDust in +Ġback er +ĠSau ce +ĠBoy le +W IN +Ġsp ins +Ġpa uses +u pt +Ġshred ded +Ġstra pped +ĠCor ruption +Ġscr atches +Ġn i +Ġatt ire +ĠS AF +Factory Reloaded +ĠI PS +Ġ( % +Ġsem inar +f ocus +c ivil +Ġ18 60 +int osh +Ġcontin ual +Ġabbre vi +ĠS ok +oc obo +X M +Ġfr antic +Ġunavoid able +Ġar tery +Ġannot ations +b ath +Cl imate +Ġd ors +ĠSl ide +co ord +ĠRel oad +ĠL DL +ĠLove craft +Ġunim agin +Ġresemb led +Ġbarr acks +n p +Ġsurrog ate +Ġcategor ized +ãĤ © +Ġvacc inated +Ġdrain age +Ġind ist +ĠWhats App +Ġ18 70 +oler ance +inv oke +am orph +Ġrecon nect +Ġem anc +Ġblind ness +Ġ12 80 +intern et +c ollar +Ġalt ru +Ġab yss +ĠT RI +65 7 +Ġinf used +HE AD +Ġforest ry +ĠWood y +ĠC i +w i +s am +78 4 +hol iday +Ġmog ul +ĠF ees +ĠD EN +In ternal +ur bed +f usc +at om +ĠIll usion +Ġpoll ed +Ġfl ap +Ġco ax +L GBT +An aly +ĠSect ions +ĠCalif orn +em n +Ġh ither +ĠN IGHT +Ġn ailed +ĠPip eline +39 1 +o of +ĠPr imal +vere nd +Ġsl ashing +Ġret ri +avi our +Ġdepart ing +g il +IS C +Ġmid way +Ġultras ound +Ġbeh aving +ĠT ara +class es +V irtual +ĠColon ial +Ġstri pping +Ġorchestr ated +ĠGra ves +45 2 +ĠIron ically +ĠWrit ers +Ġl ends +ĠMan z +Ġra ven +Ġoxid ative +Ġ26 6 +EL F +act ually +asc ar +D raft +Ġfavour able +Ġhumili ating +Ġf idelity +ĠH of +ĠX uan +49 6 +Ġlay ered +at is +79 0 +Ġpay check +it on +K ar +ĠVM ware +ĠFar mer +Ġserv ic +gl omer +Ġsl ump +ĠFab ric +ĠD OC +est ing +Ġreass ure +Ġph yl +v olt +it ory +R ules +Ġoxid ation +Ġpri zed +Ġmist ress +ĠDj ango +WAR N +å ij +Ġenc ode +ĠFeed back +Ġstupid ity +I an +ĠYugoslav ia +× ¨ +ac l +UT E +19 77 +Ġqual ifies +Ġpuls es +pret ty +Ġfro ze +Ġs s +Iter ator +Ġur gently +Ġm ailed +ĠCh am +Ġsust aining +Ġbas il +Ġpupp ies +il ant +ĠP LEASE +l ap +ace ous +F ear +ĠMaster y +aut omatic +ĠT AG +Ġant im +ag les +47 3 +fram es +Ġwh ispers +ĠWho ever +Ġbra very +ĠUK IP +ract ions +"" " +Ġt ame +Ġpart ed +every thing +CON T +Ġind ebted +Ġadd r +re k +IR ED +Ġem inent +cl inton +Ġo usted +Ġreview er +Ġmelt down +Ġre arr +ĠY ao +the real +aby te +Ġst umbling +Ġbat ches +Ġ25 9 +Ġcontrace ptive +Ġprost itute +ens is +De cl +ĠSt rikes +M ilitary +ĠO ath +v acc +pp ings +05 2 +Ġpart Name +amp ing +Rep orts +K I +CH R +Ġsubt ly +sw ers +Bl ake +us ual +Ġcontest ants +Ġcart ridges +ĠGRE AT +Ġbl ush +ĠâĢ º +47 2 +Ġreason ed +ãĥ ¤ +paralle led +Ġd yn +ag ate +Ġnight ly +å Ĩ +55 6 +Ġsem antic +ĠAdv oc +Ġ !! +Ġdisag rees +ĠB W +V eh +Ġharm ing +Ġembr aces +Ġstri ves +Ġin land +ĠK ard +Ġhe ats +ĠGin ny +ut an +ern aut +yl ene +ĠE lev +J D +Ġh ars +ĠStar r +Ġsk ysc +Ġcollabor ators +Us ually +Ġrev olutions +ĠSTAT S +Ġdism antle +Ġconfident ly +Ġkin etic +Al i +Ġpercent ile +Ġextract ing +ill ian +est ead +Ġphysic ists +ĠMarsh al +Ġfell owship +Ġd ashed +ĠU R +ĠSi oux +ĠComp act +am ide +P ython +ĠLe igh +ĠPharm ac +ist rates +her ical +Ġf ue +ĠE min +Ġ( { +ĠNeighbor hood +Ġdisrupt ing +ĠD up +Ġg land +ĠSe v +ĠMar ian +arg on +ĠD und +Ġ< !-- +Ġstr and +Ġstadium s +z os +Ġpsych osis +ĠR ack +Ġbrilliant ly +ï¸ ı +Ġsubmer ged +ĠInst it +ĠCh ow +Ġc ages +ĠH ats +ĠU rs +Ġdil uted +us at +ien ne +ĠMembers hip +ĠBur k +Ġ ie +Ġarche type +D rug +ult on +ĠSp ock +ĠMcK ay +ĠDep end +F eatured +S oc +19 78 +ĠB ere +Ġrelent lessly +Ġcripp ling +Ġar thritis +çĶ Ł +ĠTrop ical +ĠBul g +ĠCher yl +Ġadm irable +Ġsub title +Over ride +Ġorig inating +ĠC CP +Ġsw ore +ĠSo le +ĠDis orders +3 29 +Ġprocess ion +Ġref urb +Ġimm ersed +requ ently +Ġskept ics +Ġcer amic +m itter +en stein +b elt +ĠT IT +b idden +Ġf ir +m ist +> ] +Ġwe ave +ĠParad ox +Ġentr usted +ĠBarcl ays +Ġnovel ist +og ie +80 6 +Ġnin ety +Ġdisag reements +@@@@ @@@@ +ĠAus chwitz +c ars +ĠL ET +t ub +arant ine +P OS +Ġback story +Ġcheer ful +ĠR ag +ek a +bi ased +Ġinexper ienced +ak ra +ĠW itt +t an +Ġrap ist +Ġplate au +ch al +ĠInqu is +exp ression +Ġc ipher +Ġsh aving +add en +re ly +( \ +ism a +ĠReg ulatory +CH AR +ily n +N VIDIA +G U +Ġmur m +la us +Christ opher +Ġcontract ual +ĠPro xy +ĠJa ime +ĠMethod ist +Ġstew ards +st a +per ia +Ġphys iology +Ġbump ed +Ġf ructose +Austral ian +ĠMet allic +ĠMas querade +ar b +Ġprom ul +Ġdown fall +Ġbut cher +Ġb our +ĠIN FORMATION +ĠB is +pect s +ad ena +Ġcontempl ating +ar oo +cent ered +ĠPe aks +Us ed +Ġmod em +Ġg enders +Ġ8 000 +37 1 +Ġm aternity +ĠR az +Ġrock ing +Ġhandgun s +ĠD ACA +Aut om +ĠN ile +Ġtum ult +ĠBenef it +ĠAppro ach +works hop +ĠLe aving +G er +inst ead +Ġvibr ations +Ġrep ositories +49 7 +ĠA unt +ĠJ ub +ĠExp edition +Al pha +Ġs ans +Ġoverd ue +Ġoverc rowd +Ġlegisl atures +Ġp aternal +ĠLeon ardo +Ġexp ressive +Ġdistract ions +Ġsil enced +tr ust +Ġb iking +Ġ5 60 +Ġpropri et +Ġimp osition +Ġcon glomer +Ġ= ================================================================ +ĠTe aching +ĠY ose +int ensive +T own +Ġtroll ing +ĠGr ac +ĠAS US +Y o +Ġspecial s +ĠNep h +ĠGod zilla +Dat abase +ĠHe gel +Ġ27 2 +19 76 +ĠGl oria +Ġdis emb +ĠInvestig ations +ĠB ane +ag ements +St range +Ġtre asury +ĠPl ays +Ġundes irable +Ġwid ening +Ġverb ally +Ġinf ancy +Ġcut ter +f ml +Ġ21 00 +prot otype +f ine +Ġdec riminal +Ġdysfunction al +Ġbes ie +ĠErn st +z eb +Ġnort heastern +Ġa ust +por ate +ĠMar lins +Ġsegreg ated +ew orld +ĠMa her +Ġtra verse +Ġmon astery +ur gy +G ear +s and +Com pl +ĠE MP +Ġpl ent +ĠMer cer +Ġ27 6 +TA BLE +Config uration +H undreds +Ġpr ic +Ġcollabor ating +ĠPar amount +ĠCumm ings +Ġ( < +Ġrecord er +Ġfl ats +Ġ4 16 +wh ose +Font Size +ĠOr bit +Y R +Ġwr ists +Ġb akery +) } +ĠB ounty +ĠLanc aster +Ġend ings +acc ording +ĠSal am +e asy +75 5 +ĠBur r +ĠBarn ett +onom ous +Un ion +Ġpreced ence +ĠScholars hip +ĠU X +Ġroll out +Ġbo on +al m +ĠCan ter +æ µ +Ġround ing +Ġcl ad +Ġv ap +ĠF eatured +is ations +Ġ5 40 +pol ice +Ġunsett ling +Ġdr ifting +ĠLum ia +ĠObama Care +ĠF avor +Hy per +ĠRoth schild +ĠMil iband +an aly +ĠJul iet +H u +Ġrec alling +a head +69 6 +Ġunf avorable +Ġd ances +O x +Ġleg ality +Ġ40 3 +rom ancer +Ġinqu ire +ĠM oves +\ "> +ĠVari ant +ĠMess iah +ĠL CS +ĠBah á +75 6 +Ġeyeb row +Ġ ¥ +ĠMc F +ĠFort y +M as +Ġpan icked +Ġtransform ations +q q +Ġrev olves +ring e +ĠA i +ax e +Ġon ward +ĠC FR +ĠB are +log in +Ġliqu ids +Ġde comp +second ary +il an +ĠCon vert +ami ya +Ġprosecut ing +Ġâī ¡ +ĠYork ers +ĠByr ne +sl ow +aw ei +J ean +Ġ26 9 +ĠSky dragon +Ġ é +ĠNicarag ua +ĠHuck abee +ĠHigh ly +Ġamph ib +ĠPast or +ĠL ets +Ġbl urred +Ġvisc eral +ĠC BO +Ġcollabor ated +z ig +Leg al +Ġapart heid +Ġbr id +Ġpres et +ĠD ET +ĠAM A +× Ķ +arch ing +auc uses +build er +Ġpo etic +Ġem ulator +ĠMole cular +Ġhon oring +ise um +Ġtract or +ĠCl uster +ĠCal m +ared evil +Ġsidew alks +Ġviol in +Ġgeneral ized +ĠAle c +Ġemb argo +Ġfast ball +ĠHT TPS +ĠL ack +ĠCh ill +ri ver +C hel +ĠSw arm +ĠLev ine +ro ying +L aunch +Ġkick er +Ġadd itive +ĠDe als +W idget +cont aining +Ġescal ate +ĠOP EN +Ġtwe aked +Ġst ash +Ġsp arks +ĠEs sex +ĠE cc +Ġconv ict +Ġblog ging +I ER +ĠH L +Ġmurd erers +75 9 +ĠH ib +Ġde pl +ĠJ ord +S ac +Ġdis sect +ĠHow e +os her +Ġcustom izable +ĠFran z +Ġat ro +Ä ĩ +Ġ000 4 +Ġout post +R oss +Ġglyph osate +ĠHast ings +ĠBE FORE +Ġsh ove +o pped +ĠSc ala +Ġam ulet +an ian +Ġexacerb ated +Ġe ater +47 1 +UM E +Ġpul p +izont al +ĠZ am +ĠAT I +imm une +aby tes +Ġunnecess arily +ĠC AT +ĠAx is +Ġvisual ize +à ī +ĠRad ical +f m +Doc uments +ĠFor rest +Ġcontext ual +ĠSy mbol +Ġtent ative +ĠDO ES +ĠGood s +Ġintermitt ent +} : +medi ated +Ġridic ule +Ġathe ism +Ġpath ogens +ĠM um +Ġre introdu +Ġ30 7 +i HUD +Ġflash light +Ġsw earing +Ġp engu +B u +Ġrot ated +ĠCr ane +Ġ() ); +Ġfashion able +Ġendors ing +46 3 +) [ +Ġingest ion +Ġcook s +Ġ9 50 +ot omy +ĠIm am +Ġk a +Ġte aser +ĠGhost s +ĠãĤ µ +19 69 +Ï ĥ +ub by +Ġconver ter +zan ne +end e +ĠPre par +ĠNic kel +ĠChim era +h im +ĠTyr ann +ĠSabb ath +ĠNich ols +Ġra pt +ih ar +Ġshe lling +Ġillum inate +Ġdent ist +ut or +ĠInteg ration +Ġwh ims +ĠLiter ary +Be aut +Ġp archment +ag ara +Br and +Ġder og +âĢ¦ ) +ĠNor se +Ġunw itting +Ġc uc +Ġborder line +Ġupset ting +Ġrec ourse +Ġd raped +ĠRad ar +Ġcold er +ĠPep si +im inary +], [ +65 8 +V i +ĠF rem +ĠP es +Ġveter inary +ĠT ED +ĠEp idem +n ova +k id +Ġdev out +o ct +j ad +M oh +ĠP AY +Ġge ometric +Ġ3 23 +Ġcircum ference +ich ick +19 75 +ĠY uri +ĠSh all +ĠH over +un in +S pr +Ġg raft +ĠHapp iness +Ġdisadvant ages +att acks +Ġhub s +ĠStar Craft +é ĸ +Ġgall eries +ĠKor ra +Ġgrocer ies +ĠGors uch +Ġrap ists +Ġfun gi +ĠTyph oon +V ector +ĠEm press +b attle +4 68 +Ġparas ite +ĠBom ber +S G +ex ist +ĠP f +Ġun se +Ġsurge ons +B irth +ĠUn sure +ĠPrint ed +ĠBehavior al +ĠA ster +Pak istan +Ġun ethical +Ġs v +ĠIo T +Ġlay outs +P ain +Ġconst ants +ĠL W +ĠB ake +Ġtow els +Ġdeterior ation +ĠBol ivia +Ġblind ed +ĠW arden +ĠMist ress +Ġon stage +Ġcl ans +ĠB EST +19 60 +Ġant ique +Ġrhet orical +ĠPer cy +ĠRw anda +, . +B ruce +Ġtra umat +ĠParliament ary +Ġfoot note +id ia +ĠLear ned +se eking +gen ic +Ġdim ensional +H ide +èĢ ħ +Ġintrig ue +in se +Ġle ases +Ġapp rentices +w ashing +Ġ19 26 +V ILLE +Ġsw oop +s cl +Ġbed rooms +on ics +ĠCr unch +comp atible +Ġincap ac +ĠYemen i +ash tra +z hou +d anger +Ġmanifest ations +ĠDem ons +AA F +Secret ary +ACT ED +L OD +Ġam y +ra per +eth nic +4 17 +Ġpos itives +Ġ27 3 +ĠRefuge es +Ġus b +ĠV ald +odd y +ĠMahm oud +As ia +Ġskull s +ĠEx odus +ĠComp et +ĠL IC +ĠM ansion +ĠA me +Ġconsolid ate +storm s +ont ent +99 6 +Ġcl en +Ġm ummy +fl at +75 8 +ĠV OL +oter ic +n en +ĠMin ute +S ov +Ġfin er +R h +ly cer +Ġreinforce ments +ĠJohann es +ĠGall agher +Ġgym n +S uddenly +Ġext ortion +k r +i ator +T a +Ġhippocamp us +N PR +ĠComput ing +Ġsquare ly +Ġmod elling +ĠFor ums +ĠL isp +ĠKrish na +Ġ3 24 +Ġr ushes +Ġens ued +Ġcre eping +on te +n ai +il ater +ĠHorn ets +Ġob livious +IN ST +55 9 +Ġjeopard y +Ġdistingu ishing +j ured +Ġbeg s +sim ilar +ph ot +5 30 +ĠPark way +Ġs inks +ĠHearth stone +ib ur +ĠBat on +Av oid +Ġd ancer +Ġmag istrate +ary n +Ġdisturb ances +ĠRom ero +Ġpar aph +Ġmis chief +âĸ ĵ +ĠSh aria +Ġur inary +r oute +iv as +f itted +Ġeject ed +ĠAl buquerque +Ġ4 70 +Ġirrit ated +ĠZ ip +ĠB iol +à į +Ġden ounce +Ġbin aries +ĠVer se +Ġopp os +ĠKend rick +ĠG PL +Ġsp ew +ĠEl ijah +ĠE as +Ġdr ifted +so far +Ġannoy ance +ĠB ET +47 4 +ĠSt rongh +it ates +ĠCogn itive +oph one +ĠIdent ification +ocr ine +connect ion +Ġbox er +ĠAS D +ĠAre as +Y ang +t ch +ull ah +Ġdece ive +Comb at +ep isode +cre te +W itness +Ġcondol ences +ht ar +Ġhe als +Ġbuck ets +ĠLA W +B lu +Ġsl ab +ĠOR DER +oc l +att on +ĠSteven son +ĠG inger +ĠFriend ly +ĠVander bilt +sp irit +ig l +ĠReg arding +ĠPR OG +Ġse aling +start ing +Ġcard inal +ĠV ec +ĠBe ir +Ġmillisec onds +we ak +per se +Ġster ile +ĠCont emporary +ĠPh ant +ĠCl o +Ġout p +Ġex iled +Ġ27 7 +Ġself ie +Ġman ic +Ġn ano +ter ms +Alex ander +Ġres olves +Ġmillenn ia +Ġexpl odes +Ġconst ellation +Ġadul tery +m otion +D OC +Ġbroad casters +Ġkinderg arten +ĠMay weather +ĠE co +ich o +Ġ28 7 +l aun +Ġm ute +Ġdisc reet +Ġpres chool +Ġpre empt +De lete +ĠFre ed +P i +H K +Ġblock er +ĠC umber +Ġw rought +d ating +Ġins urer +Ġquot as +Ġpre ached +Ġev iction +ĠReg ina +ĠP ens +Ġsevent een +ĠN ass +D ick +Ġfold s +Ġd otted +ĠA ad +Un iversal +Ġp izz +ĠG uru +Ġso ils +Ġno vice +ĠNe ander +Ġst ool +Ġdeton ated +ĠPik achu +ĠMass ive +IV ER +ĠAb del +Ġsubdu ed +Ġtall est +Ġprec arious +Ġa y +r ification +ĠOb j +c ale +Ġun question +cul osis +ad as +igr ated +D ays +Ġque ens +ĠGaz ette +ĠCol our +ĠBow man +ĠJ J +ï ve +Ġdomin ates +Stud ent +Ġm u +Ġback log +ĠElect ro +Tr uth +48 3 +Ġcond ensed +r ules +ĠCons piracy +Ġacron ym +hand led +ĠMat te +j ri +ĠImp ossible +l ude +cre ation +Ġwar med +ĠSl ave +Ġmis led +Ġfer ment +ĠK ah +ink i +ke leton +cy l +ĠKar in +Hun ter +Reg ister +ĠSur rey +Ġst ares +ĠW idth +ĠN ay +ĠSk i +Ġblack list +uck et +Ġexp ulsion +im et +Ġret weet +vant age +Fe ature +Ġtro opers +Ġhom ers +9 69 +Ġconting ency +ĠW TC +ĠBrew er +fore ign +W are +S olar +Ġund ue +RE C +ulner able +path ic +ĠBo ise +Ġ3 22 +Ġarous ed +ĠY ing +ä¸ į +uel ess +Ġp as +Ġmor p +Ġfl oral +Ex press +ud ging +k B +ĠGr anted +Ø ¯ +ĠMich a +ĠGoth ic +ĠSPEC IAL +ĠRic ardo +F ran +Ġadminister ing +6 20 +por a +Ġ ® +Ġcomprom ises +Ġb itten +Ac cept +Th irty +Ð ² +Ġmater ially +ĠTer r +ig matic +ch ains +Ġdo ve +stad t +Mar vel +FA ULT +Ġwind shield +Ġ3 36 +ad ier +Ġsw apping +Ġflaw less +ĠPred ator +ĠMiche le +Ġprop ulsion +ĠPsych ic +Ġassign ing +Ġfabric ation +Ġbar ley +l ust +Ġtow ering +Ġalter cation +ĠBent ley +Sp here +Ġtun a +ĠClass es +Fre edom +un er +L ady +v oice +Ġcool est +or r +Ġpal p +$ { +Ġhyster ia +ĠMet atron +p ants +Ġspawn ing +Exper ts +ĠInvest ors +ĠAn archy +Ġshr unk +ĠVict im +Ġ28 9 +Ġec stasy +ĠB inding +58 5 +ĠMel ody +57 8 +ot ally +ĠE tsy +lig a +Ġapplaud ed +Ġswe ating +Ġredist ributed +Ġpop corn +Ġsem inal +f ur +ĠNeuro science +R and +ĠO st +ĠMadd en +ĠIncre asing +ĠDaw kins +ĠSub way +Ġar sen +cons erv +B UR +Ġsp iked +ĠLy ft +ĠImper ium +ĠDrop box +Ġfav oured +Ġencomp asses +gh ost +Ġins pires +Ġbur geoning +ĠY oshi +ĠVert ical +ĠAud itor +Ġint ending +Ġfilib uster +Bl oom +f ac +ĠCav s +ign ing +Ġcowork ers +ĠBarb arian +rem ember +FL AG +Ġaudit ory +ason ry +Col lege +Ġmut ed +gem ony +ob in +ĠPsych o +9 68 +Ġlav ish +Ġhierarch ical +ĠDr one +ou k +Ġcripp led +ĠMax im +Sl ot +Ġqu iz +ĠV id +if ling +Ġarchae ologists +Ġabandon ment +d ial +le on +ĠF as +T ed +Ġr aspberry +Ġmaneu vers +Ġbehavi ours +Ġins ure +Ġrem od +Sw itch +h oe +Ġsp aced +Ġafford ability +ĠF ern +not ation +ĠBal anced +Ġoccup ies +en vironment +Ġneck lace +Ġsed an +F U +ĠBrav o +Ġab users +ĠAn ita +met adata +ĠG ithub +ait o +ĠF aster +ĠWass erman +ĠF lesh +Ġth orn +r arily +ĠMer ry +w ine +Ġpopul ace +ĠL ann +Ġrepair ing +Ġpsy che +Ġmod ulation +aw aru +âĢĭ âĢĭ +ari j +Ġdecor ations +Ġapolog ise +ĠG arg +app ly +Ġgive away +ĠFl an +ĠWy att +U ber +Ġauthor ised +ĠMor al +HAHA HAHA +activ ate +Ġtorped o +ĠF AR +Ġam assed +ĠA ram +ark in +ĠVict ims +st ab +Ġo m +ĠE CO +Ġopio ids +Ġpurpose ly +ĠV est +Ġer g +at an +ĠSur gery +Ġcorrect ing +ĠOrt iz +ĠBe et +Ġrev oke +Ġfre eway +ĠH iggins +F ail +ĠFar ms +ĠAT P +h ound +Ġp oking +ĠCommun ists +mon ster +iment ary +Ġunlock ing +Ġunf it +we ed +en ario +at ical +ĠEnlight enment +ĠN G +ĠComp ensation +de en +ĠWid ow +ĠCind y +ĠAfter wards +Ġ6 000 +ikh ail +ag ically +Ġrat ified +Ġcasual ty +H OME +p sey +f ee +Ġspark ling +Ġd é +Ġconcert ed +C atal +Ġcomp lying +ĠA res +ĠD ent +Sh ut +Ġsk im +ad minist +Ġhost ilities +ĠG ins +Ġ6 08 +Ġm uddy +ĠMc Int +ĠDec ay +5 25 +Ġconspic uous +ĠEx posure +Ġresc ind +Ġwear able +Ġ3 28 +our met +ah s +ĠRob ots +Ġe clips +inst ance +ĠRE PORT +ĠApp l +0 30 +ĠSk ies +01 00 +Ġfall acy +S ocket +ĠRece iver +Ġsol ves +ĠButter fly +ĠSho pping +ĠFI RE +65 4 +Med ic +Ġsing ers +ĠNeed less +'' '' +isher s +ĠD ive +58 8 +Ġselect ively +Ġcl umsy +88 9 +Ġpurch aser +ear ned +ard y +Ġbenef iting +eng lish +Ġyield ing +ĠP our +Ġspin ach +Ġdel ve +ĠC rom +6 10 +Ġexport ing +ĠMA KE +Ġ26 3 +Ġg rop +Ġenv oy +ĠInqu iry +ĠLu igi +d ry +ĠT uring +Thumbnail Image +ĠVar iety +Ġfac et +Ġfl uffy +Ġexcerpt s +Ġsh orth +ĠOl sen +CL UD +Ġrel iant +ĠUN C +T our +Ġbat hing +Comp any +Ġglobal ization +P red +ĠMalf oy +Ġh oc +j am +craft ed +ĠBond s +ĠKiss inger +Eng land +Ġorder ly +cat entry +Ġ26 1 +Ġexch anging +ĠInt ent +ĠAmend ments +D OM +Ġst out +³³³³³³³³ ³³³³³³³³ +ĠAir bus +Ġ27 8 +hy de +P oll +Item ThumbnailImage +Ġlooph oles +ĠPill ar +Ġexpl or +St retch +A part +Ġun married +Lim it +ĠTransform ers +Ġintellect ually +unct ure +18 00 +Ġd arn +B razil +Ġleft over +ber us +f red +Mine craft +3 26 +ĠForm s +Ġproof s +ĠDes igned +Ġindex es +ĠSupp ose +EM S +ĠL oving +ĠBon nie +im ating +OT US +Ġconduct or +Ġbehav ed +ĠF ren +Ġsy nerg +Ġmillenn ium +Ġcater ing +ĠL auder +W r +ĠY iannopoulos +ĠAT F +Ġensl aved +Ġawaken ed +D VD +ĠED ITION +ĠConc ert +ĠChall enger +ĠH aku +umer ic +Ġdep recated +ĠSH AR +4 12 +Ġdy stop +Ġtremb ling +Ġdread ed +ĠSp ac +p adding +Re pl +ĠG arrison +M ini +Ġun paralleled +am ar +URR ENT +w reck +c ertain +t al +ĠC LS +app ings +Ġsens ed +Ġf encing +ĠPas o +ĠDes k +Ġsc off +Ġcontem plate +ĠL iga +l iquid +75 7 +Ġapp rentice +ĠUCH IJ +5 70 +ĠTh ousand +ĠIll um +Ġchampion ed +ãĤ Į +Ġelect ors +Ġ3 98 +ĠH ancock +round ed +ĠJ OHN +Ġuns atisf +Ġqual ifier +ĠGad get +EN E +Ġdead liest +ĠPl ants +Ġ ions +Ġacc ents +Ġtwe aking +Ġsh aved +F REE +ĠCh aser +Again st +9 60 +Ġmeth amphetamine +Ġnormal ized +Ġ$ \ +ĠPre cision +ĠGu am +Ġch oked +ĠX II +ĠCast ing +Tor rent +Ġscal p +ĠJagu ar +w it +Ġsem ic +ix ie +ĠG ould +Ġconf ines +N usra +ĠL on +ĠJ ugg +y cle +ĠCod ec +E gypt +Ġrest rain +ĠAl iens +Ġch oking +ĠD unk +ĠBell a +ab c +Ġsl ang +Ġneuro trans +s av +Ġempower ment +â ĨĴ +Ġclim bers +ĠM im +ĠF ra +ros se +Cap ital +ĠCth ulhu +Inter face +Ġprof icient +ĠIN TO +Ġ3 18 +ront al +5 80 +ĠDes pair +K enn +Ġscrim mage +ĠCo at +as ions +Ġwall paper +ĠJ ol +Ġresurg ence +Ġant iv +ĠB alls +² ¾ +Ġbuff ers +Ġsub system +ĠSt ellar +ĠL ung +A IDS +Ġerad icate +Ġblat antly +Ġbehav es +ĠN un +Ġant ics +ex port +DE V +w b +Ġph p +ĠInteg rity +Ġexplore r +Ġrev olving +auth ored +g ans +Ġbas k +Ġas ynchronous +å į +TH ING +69 8 +G ene +ĠR acer +ĠN ico +iss ued +Ġser mon +p ossibly +Ġsize of +Ġentrepreneur ial +ox in +ĠMin erva +Ġpl atoon +n os +ri ks +A UT +ĠAval anche +ĠDes c +ij 士 +ĠP oc +Ġconf erred +Î » +Ġpat ched +F BI +66 2 +Ġfract ures +Ġdetect s +Ġded icate +Ġconstitu ent +Ġcos mos +W T +Ġswe ats +Ġspr ung +b ara +s olid +Ġuns us +Ġbul ky +ĠPhilipp e +ĠFen rir +Ġtherap ists +ore al +^^ ^^ +Ġtotal ed +Ġboo ze +ĠR PC +Prosecut ors +Ġdis eng +ĠSh ared +Ġmotor cycles +Ġinvent ions +Ġlett uce +ĠMer ge +ĠJ C +Ġspiritual ity +ĠWAR NING +Ġunl ucky +ĠT ess +Ġtong ues +ĠD UI +T umblr +Ġle ans +Ġinv aders +Ġcan opy +ĠHur ricanes +ĠB ret +ĠAP PLIC +id ine +ick le +Reg arding +Ġve ggies +Ġe jac +ju ven +F ish +D EM +ĠD ino +Th row +ĠCheck ing +be ard +( & +Ġj ails +Ġh r +trans fer +iv ating +Ġfle ets +ĠIm ag +ĠMc Donnell +Ġsnipp et +Is a +ĠCh att +ĠSt ain +ĠSet FontSize +ĠO y +ĠMathemat ics +49 4 +Ġelectro ly +ĠG ott +ĠBr as +B OOK +ĠF inger +d ump +Ġmut ants +Ġrent als +Ġinter tw +Ġc reek +ail a +Bro ther +ĠDisc ord +pe e +raw ler +Ġcar p +Ġ27 9 +ãĤ· ãĥ£ +rel ations +Ġcontr asts +Col umn +Ġrec onnaissance +Ġun know +Ġl ooting +Ġregul ates +Ġopt imum +ĠChero kee +ĠA ry +Lat est +Ġroad side +Ġd anced +ĠUnic orn +A cknowled +Ġuncont roll +ĠM US +at io +ch ance +ha ven +VAL UE +Ġfavour ites +Ġceremon ial +b inary +pe ed +wood s +EM P +Ġv ascular +Ġcontempl ated +Ġbar ren +ĠL IST +Y ellow +ospons ors +Ġwhisk y +ĠM amm +ĠDeV os +min imum +H ung +44 2 +P ic +ĠSnap dragon +77 6 +Ġcar ving +Ġund ecided +Ġadvantage ous +Ġpal ms +ĠA Q +Ġst arch +L oop +Ġpadd le +Ġfl aming +ĠHor izons +An imation +bo ost +Ġprob abilities +ĠM ish +Ġex odus +ĠEditor ial +Ġfung us +Ġdissent ing +ĠDel icious +rog ram +ĠD yn +d isk +t om +Ġfab rics +ĠC ove +ĠB ans +Ġsoft en +ĠCON S +Ġin eligible +Ġestim ating +ĠLex ington +pract ice +of i +Ġshe dding +ĠN ope +Ġbreat hed +ĠCorinth ians +y ne +ek i +B ull +Ġatt aching +reens hots +Ġanaly se +ĠK appa +Ġuns ustainable +Ġinter pol +ank y +he mer +Ġprot agonists +Ġform atted +ĠBry ce +ĠAch illes +ĠAb edin +sh ock +Ġb um +b os +qu a +ĠW arn +q t +ĠDi abetes +8 64 +ĠIn visible +Ġvan ish +Ġtrans mitting +Ġmur ky +ĠFe i +Ġawa ited +ĠJur assic +umm ies +Ġmen acing +g all +C ath +B uilt +ild o +ĠV otes +Ġon t +Ġmun itions +ĠFre em +ÃŃ n +Ġdec ency +lo pp +ie ved +ĠG ord +Ġun thinkable +ĠNews week +Ġ3 21 +He at +Ġpresent er +ji ang +Ġpl ank +ĠAval on +Ġben z +ĠR out +Ġslam ming +ĠD ai +ou ter +ĠCook ie +ĠAlic ia +ge y +Ġvan ity +Ġow l +á µ +t ested +ĠAw akens +Ġcan v +Ġblind ly +ĠRid ley +ĠEm ails +Requ ires +ĠSer bian +ograp hed +if rame +eter ia +Ġaltern ating +qu iet +Ġsoc iology +ĠUn lock +ĠCommun ism +Ġo ps +Ġatt ribution +Ġab duction +ĠAb ram +Ġsidel ined +ĠB OOK +Ġref ining +ĠFe eling +ĠOs lo +ĠPru itt +r ack +ang ible +Ġcaut iously +ĠM ARK +eed s +M ouse +ĠStep h +ĠP air +S ab +99 7 +ĠBa al +B ec +Ġcomm a +ĠP all +ĠG ael +Ġmisunder stand +ĠP esh +Order able +Ġdis mal +ĠSh iny +% " +Ġreal istically +Ġpat io +ĠG w +ĠVirt ue +Ġexhaust ing +wh atever +oph ys +y ip +4 18 +Ad just +ĠWa iting +ess on +ĠMaz da +ĠDo zens +Ġstream lined +Ġincompet ence +ĠM eth +Ġeth os +ON ES +Ġincent iv +Ġgr itty +ĠBut cher +Head er +Ġexp onential +à Ł +Ġcorrel ate +Ġcons ensual +s ounding +R ing +Orig in +Ġcon clusive +fe et +ac ly +ĠF ernandez +Buy able +Ġd ucks +aunt lets +Ġel ong +Ġ28 6 +Ġsim ul +G as +ĠK irst +Ġprot r +ĠRob o +ĠAo E +op ol +Ġpsych ologically +sp in +ilater ally +ĠCon rad +W ave +44 1 +ĠAd vertisement +ĠHarm on +ĠOri ental +is Special +Ġpresum ptive +Ġw il +ĠK ier +ne a +Ġp pm +Ġhar bour +ĠW ired +comp any +Ġcor oner +atur days +ĠP roud +ĠN EXT +ĠFl ake +val ued +ce iver +Ġfra ught +Ġc asing +Ġrun away +Ġg in +ĠLaure nt +ĠHar lem +ĠCur iosity +qu ished +Ġneuro science +ĠH ulu +Ġborrow er +Ġpetition er +ĠCo oldown +W ARD +Ġinv oking +conf idence +For ward +Ġst s +pop ulation +Delivery Date +Fil m +ĠC ov +quick Ship +quickShip Available +prim ary +isSpecial Orderable +inventory Quantity +channel Availability +BO X +ĠMulti player +ĠJen ner +77 8 +ĠM d +Ġ~ /. +M N +Ġchild ish +Ġantioxid ant +ĠChrom ebook +Ġ27 4 +Ġscreen play +Ġadvent urous +ĠRelations hip +respons ive +ming ton +Ġcorner stone +ĠF ey +F IR +Ġrook ies +ĠF eaturing +Ġorig inate +Ġelectro des +ant es +Ġscript ures +Ġgl ued +Ġdiscont ent +Ġaff licted +lay out +B rave +Ġm osa +ĠQuant ity +ĠH ik +w inner +H ours +Ġent ail +ĠCell s +olog ue +Ġv il +Ġpre acher +Ġdecor ative +d ifferent +Ġprejud ices +ĠSm oking +ĠNotting ham +so Type +Ġrhyth ms +ĠAl ph +bl ast +Ste el +ĠDaniel le +Ġstr ife +Ġrem atch +so DeliveryDate +ĠF ork +t rip +ol ulu +hes es +C G +ĠPOLIT ICO +ost a +ĠDr ift +é¾įå ¥ +é¾įå¥ ij士 +Ġvet ting +ĠJin ping +ĠRec ession +Min or +ĠF raud +enf ranch +Ġconven ed +ĠNA ACP +ĠMill ions +ĠFarm ing +ĠW oo +ĠFl are +rit o +imm igrant +Ġvac ancy +ĠHE AD +ĠV aj +eg al +ĠV igil +Stud y +Ġru ining +Ġr acks +Ġhe ater +ĠRand olph +ĠBr ush +ĠT ir +Ø ¨ +Ġc ov +% ] +Ġrecount s +ĠO PT +ĠM elt +Ġtr uce +Ġcas inos +Ġcrus ade +Ġcarn age +Ġstri pe +ĠK yl +Text ures +Ġ6 98 +Ġpro clamation +Ġgood ies +Ġ........ .. +pro claimed +P olit +Ġtop ical +Ġspecial ize +ĠA min +g m +Ġanch ored +Ġbear ings +s ample +ĠHigh land +ĠAut ism +Ġmerc enary +Ġinterview er +L ER +ĠSom ers +Ġembry o +ĠAss y +Ġ28 1 +ĠEd iting +ĠCh osen +6 60 +Ġp ci +ĠThunder bolt +BI LL +Ġchuck led +jri wal +h of +Ġearth ly +() { +ind ependence +Ġdisp ers +ĠV endor +ĠG areth +Ġp als +P enn +ĠSub mit +ic um +Th u +Ġcl andestine +Ġcann ibal +ĠCl erk +E Stream +gal itarian +âĻ ¥ +g ew +Ġhor rend +ĠL ov +ĠRe action +ocr in +Class ic +Ġecho ing +Ġdiscl osing +ĠIns ight +og un +ĠInc arn +upload s +pp erc +guy en +Ġ19 01 +ĠB ars +68 7 +Ġb ribes +ĠFres no +ur at +ĠRe ese +Ġintr usive +Ġgri pping +ĠBlue print +ĠR asm +un ia +man aged +ĠHeb do +Ġ3 45 +Ġdec oding +Ġpo ets +Ġj aws +ĠF IGHT +am eless +ĠMead ows +ĠHar baugh +Inter view +ĠH osp +ĠB RA +Ġdelet ion +m ob +W alker +ĠMoon light +ĠJ ed +ĠSoph ia +Ġus ur +Ġfortun ately +ĠPut ting +ĠF old +Ġsan itation +Ġpart isans +IS ON +B ow +ĠCON C +ĠRed uced +ĠS utton +Ġtouch screen +Ġembry os +âĢ¢âĢ¢ âĢ¢âĢ¢ +ĠK rug +com bat +ĠPet roleum +Ġam d +ĠCos mos +Ġpresc ribing +Ġconform ity +ours es +Ġplent iful +Ġdis illusion +ĠEc ology +itt al +Ġf anc +Ġassass inated +regn ancy +Ġperenn ial +ĠBul lets +Ġst ale +Ġc ached +ĠJud ith +ĠDise ases +All en +Ġl as +Ġsh ards +ĠSu arez +ĠFriend ship +inter face +ĠSupp orters +add ons +46 2 +ĠIm ran +ĠW im +Ġnew found +ĠM b +An imal +Ġd arling +and e +Ġrh y +ĠTw isted +pos al +yn ski +Var ious +× ľ +ĠK iw +uy omi +Ġwell being +ĠL au +an os +Ġunm ist +Ġmac OS +Ġrest room +ĠOl iv +ĠAir ways +Ġtimet able +9 80 +Ġrad ios +v oy +ias co +Ġcloud y +ĠDraw ing +Any thing +Sy ria +ĠH ert +st aking +Ġun checked +Ġb razen +ĠN RS +69 7 +onom ic +est ablish +Ġl eng +Ġdi agonal +ĠF ior +L air +ĠSt ard +Ġdef icient +jo ining +be am +Ġomn ip +Ġbl ender +Ġsun rise +Mo ore +ĠF ault +ĠCost ume +ĠM ub +Fl ags +an se +Ġpay out +ĠGovern ors +ĠD illon +ĠBan ana +N ar +Ġtra iled +Ġimperial ist +um ann +ats uki +4 35 +ĠRoad s +Ġsl ur +ĠIde ally +Ġt renches +C trl +Ġmir rored +ĠZ el +ĠC rest +Comp at +ĠRoll s +sc rib +ĠTra ils +omet ers +w inter +Ġimm ortality +il ated +Ġcontrad icts +un iversal +ill ions +ĠM ama +opt im +AT URE +Ġge o +et ter +ĠCar lo +4 24 +Ġcanon ical +ĠStrongh old +n ear +Ġperf ume +Ġorche stra +od iac +Ġup he +Ġreign ing +vers ive +Ġc aucuses +ĠD EM +Ġinsult ed +Ġ---- -- +ĠCr ush +Ġroot ing +ĠWra ith +Ġwh ore +Ġto fu +C md +ĠB ree +Ġ$ _ +Ġr ive +ĠAd vertising +Ġw att +ĠH O +Ġpersu asive +ĠParam eters +Ġobserv ational +ĠN CT +ĠMo j +ĠSal on +Ġtr unc +Ġexqu isite +ĠMar a +Ġpo op +ĠAN N +Ex c +ĠWonder ful +ĠT aco +Ġhome owner +ĠSmith sonian +orpor ated +mm mm +Ġlo af +ĠYam ato +ĠInd o +Ġcl inging +á s +Ġimm utable +h ub +Or ange +Ġfingert ips +ĠWood en +ĠK idd +ĠJ PM +ĠDam n +C ow +c odes +48 2 +Ġiniti ating +ĠEl k +ĠCut ting +Ġabsent ee +ĠV ance +ĠLil ith +G UI +Ġobsc ured +Ġdwar ves +ĠCh op +ĠB oko +Val ues +Ġmult imedia +Ġbrew ed +Reg ular +CRIP TION +ĠMort al +Ġa pex +Ġtravel er +Ġbo ils +Ġspray ing +Rep resent +ĠStars hip +4 28 +Ġdisappro val +Ġshadow y +Ġlament ed +ĠRe place +ĠFran ç +67 7 +d or +Ġunst oppable +Ġcoh orts +gy n +ĠClass ics +ĠAm ph +Ġsl uggish +ĠAdd iction +ĠPad res +Ġins cription +Ġin human +min us +ĠJere miah +at ars +Ter ror +ĠT os +ĠSh arma +ast a +c atch +Ġpl umbing +ĠTim bers +Sh ar +H al +ĠO sc +Ġcou pling +hum ans +Ġsp onge +Ġid ols +ĠSp a +ĠAdv ocate +ĠBe ats +lu a +Ġtick ing +Ġload er +ĠG ron +8 10 +Ġstim ulated +Ġside bar +ĠManufact urer +ore And +19 73 +Ġpra ises +ĠFl ores +dis able +ĠElect rical +ra ise +E th +Ġmigr ated +Ġlect urer +K ids +ĠCa vern +Ġk ettle +Ġgly c +ĠMand ela +ĠF ully +å§ « +FIN EST +Ġsquee zing +ĠRy der +amp oo +oreAnd Online +Inst oreAndOnline +Buyable InstoreAndOnline +Ġcommem orate +ĠRamp age +Aust in +ĠSh roud +ĠRu ins +9 15 +ĠK H +Ġwater front +ĠE SC +b aby +ĠC out +ĠEm blem +Ġequival ents +49 2 +Un ique +ĠNiet zsche +brow ser +Ġim itation +ĠWere wolf +ĠKir in +ac as +' ," +Ġà ¾ +Review ed +Ġc unt +Ġvo ic +ĠLen ovo +Ġbond ed +48 1 +Ġinhib itors +Ġendeav ors +ĠHav ana +ĠSt out +ĠJ olly +A ctor +*/ ( +Ġoccur rences +ĠT ens +Incre ased +ĠACT ION +Ġ ãĢĮ +ĠRank ings +ĠB reat +Ġ30 9 +D ou +Ġimpact ing +ĠDuc hess +pre fix +Q B +Ġsummon ing +Ġbest owed +ĠKe pler +ĠPOW ER +c ube +ĠK its +ĠG rip +Ġop ium +Ġrep utable +t oc +ich ael +ĠR ipple +Ġcaf é +ĠZ oom +ĠBur ma +Ġwa ive +Ġst alls +Ġdem eanor +inc erity +Ġfluor ide +ĠSH OULD +Par is +Ġlong ing +Ġpl at +Ġgross ly +Ġbull s +Ġshowc asing +ex pected +ĠG addafi +engine ering +Re peat +ĠK ut +Ġconce ivable +Ġtrim med +osc ope +ĠCand idate +ĠT ears +rol og +Lew is +S UP +Ġroad map +Ġsal iva +Ġtrump et +Jim my +Ġmirac ulous +Ġcolon ization +Ġam put +ĠGN OME +ate ch +D ifferent +ĠE LE +ĠGovern ments +ĠA head +ãħĭ ãħĭ +word press +L IB +ĠIn clude +ĠDor othy +0 45 +ĠColomb ian +Ġle ased +88 4 +Ġde grading +ĠDa isy +i ations +Ġbapt ized +Ġsurn ame +co x +Ġblink ed +ãĥ ¢ +Ġpoll en +Ġder mat +Ġre gex +ĠNich olson +ĠE ater +ç ľ +rad or +Ġnarrow er +Ġhur ricanes +Ġhalluc inations +r idden +ISS ION +ĠFire fly +Ġattain ment +Ġnom inate +Ġav ocado +ĠM eredith +Ġt s +Ġreve rence +Ġe uph +Ġcr ates +ĠT EXT +Ġ4 43 +Ġ3 19 +J SON +iqu ette +Ġshort stop +ic key +Ġpro pelled +Ġap i +ĠTh ieves +77 9 +Ġovers aw +Ġcol i +ĠNic ola +Ġover cl +ik awa +ĠC yr +Ġ38 4 +78 9 +ĠAll ows +10 27 +Det roit +TR Y +set up +ĠSocial ism +Sov iet +s usp +ĠAP R +ĠShut down +Ġal uminium +zb ek +ĠL over +GGGG GGGG +Ġdemocr acies +Ġ19 08 +ĠMer rill +ĠFranco is +gd ala +Ġtraff ickers +ĠT il +ĠGo at +Ġsp ed +ĠRes erv +Ġpro d +55 2 +Ġc ac +ĠUn iv +ĠSch we +Ġsw irling +ĠWild erness +ĠEgg s +Ġsadd ened +Ġarch aic +H yd +Ġexcess ively +B RE +Ġaer ospace +ĠVo ices +Cra ig +Ġign ited +In itially +ĠMc A +Ġhand set +Ġreform ing +Ġfrust rations +ĠDead pool +ĠBel ichick +ract or +ĠRagnar ok +ĠD rupal +ĠApp roximately +19 20 +ĠHub ble +arm or +ĠSar as +ĠJon as +Ġnostalg ic +Ġfeas ibility +Sah aran +Ġorb iting +Ġ9 70 +R u +Ġsh in +ĠInvestig ators +Ġinconsist encies +ĠP AN +B G +Ġgraz ing +Ġdetect ors +ĠStart up +ĠFun ny +ĠNa omi +Consider ing +Ġh og +ut f +ce mic +Ġfort ified +ĠFun ctions +Ġcod ec +nut rition +H at +" ! +micro soft +55 8 +ĠTh in +ĠA CE +Al ias +ĠO PS +p apers +P K +ãĢ İ +Ġimpro bable +N orthern +equ al +Ġlook out +Ġty res +ĠMod ified +ĠK op +Abs olutely +Ġbuild up +sil ver +Ġaud i +Ġgro tesque +ĠSab er +ĠPres byter +ON Y +Ġglac iers +ĠSho als +ĠK ass +ĠH RC +ĠNic ol +ĠL unch +ĠF oss +âĸ Ĵ +AD RA +ĠOne Plus +o ing +ground s +Ġincident al +Ġdatas ets +68 9 +ĠClarks on +Ġassemb ling +ĠCorrect ions +Ġdrink ers +Ġqual ifiers +Ġle ash +Ġunf ounded +ĠH undred +Ġkick off +T i +Ġrecon cil +ĠGr ants +ĠCompl iance +ĠDexter ity +Ġ19 06 +w arn +D allas +Max imum +n ard +av ia +be aut +ens itivity +tr ace +Ġpione ers +ĠF ract +ãĢ ı +Ġpre cept +Ġgloss y +ĠI EEE +Ac ross +Ġ6 80 +S leep +che on +Ġsatir ical +ĠMin otaur +ĠCla ude +Ġr é +ape go +Ġcar rot +ĠSem in +ino a +Ġz o +Ind ependent +Ġdiagn oses +ĠC ue +M AR +Ġrend ition +ĠK ik +Ġpath ology +Ġselect s +Link edIn +Ġass ay +ĠD res +Ġtext ual +post ed +IT AL +ĠM aul +N eal +Ġinter connected +Ġerr atic +ĠVir us +Ġ5 30 +Ġenvironmental ists +ĠP helps +Ġeng agements +ĠIN ST +Ġeconom ical +nox ious +Ġg earing +izz y +Ġfavor ably +ĠMcG ill +T erm +Ġh anged +Ġball park +ĠRe yes +Ġbe ware +ĠP sal +ĠMass acre +q i +Ġin accessible +acly sm +Ġfr ay +ill ac +Ġbitter ly +ĠCert ification +Mich igan +Ġir respective +al ore +Em pty +Ġendorse ments +Ġund et +f g +equ ipped +Ġmerc iless +ĠC ust +Ġimm ature +Ġvou cher +ĠBlack well +Ñ ı +h awk +dis ciplinary +ile e +ĠMak oto +ĠD ude +ãĥĩ ãĤ£ +Y ears +Ġin ver +Ġsh aman +ĠY ong +ip el +ell en +ĠCath y +br ids +Ġs arc +65 1 +N ear +Ġground work +Ġam az +Ġ4 15 +ĠHunting ton +hew s +ĠB ung +Ġarbit rarily +ĠW it +ĠAl berto +Ġdis qualified +best os +46 1 +Ġp c +Ġ28 4 +ro bat +Rob in +Ġh ugs +ĠTrans ition +ĠOcc asionally +Ġ3 26 +ĠWh ilst +ĠLe y +Ġspaces hip +cs v +Ġun successfully +ĠA u +le ck +ĠWing ed +ĠGrizz lies +. � +Ġne arer +ĠSorce ress +ĠInd igo +El se +8 40 +let es +Co ach +Ġup bringing +ĠK es +Ġseparat ist +Ġrac ists +Ġch ained +Ġabst inence +lear ning +Ġrein stated +Ġsymm etry +Ġremind ers +ĠChe vy +Ġm ont +Ġexempl ary +ĠT OR +Z X +Ġqual itative +ĠSt amp +ĠSav annah +ĠRoss i +Ġp aed +Ġdispens aries +ĠWall s +ĠCh ronic +Ġcompliment ary +ĠBeir ut +Ġ+ --- +igs list +Ġcrypt ographic +mas ters +ĠCap itals +Ġmax imal +Ġent ropy +Point s +Ġcombat ants +l ip +ĠGl ob +ĠB MC +ph ase +th ank +HT TP +Ġcomm uter +Ġ\( \ +.. / +ĠReg ener +ĠDO I +ĠActiv ision +Ġsl it +os al +RE M +Ġch ants +Y u +Ke ys +Bre xit +ĠFor ced +Ari zona +Ġsquad ron +IS O +ĠMal one +Ġ3 38 +Ġcontrast ing +Ġt idal +Ġlib el +Ġimpl anted +Ġupro ar +ĠC ater +Ġpropos itions +M anchester +ĠEuro s +it amin +G il +ĠEl ven +ĠSe ek +ĠB ai +Ġredevelop ment +ĠTown s +ĠL ub +! ", +al on +K rist +Ġmeas urable +Ġimagin able +Ġapost les +Y N +7 60 +Ġster oid +Ġspecific ity +ĠL ocated +ĠBeck er +ĠE du +ĠDiet ary +uts ch +ĠMar ilyn +Ġbl ister +ĠM EP +ĠK oz +ĠC MS +y ahoo +ĠCar ney +Ġbo asting +ĠC aleb +By te +read s +ad en +Pro blem +ĠWood ward +S we +S up +ĠK GB +Set up +Ġtac it +Ġret ribution +Ġd ues +ĠM ü +. ? +ä¸ Ń +p ots +Ġcame o +ĠP AL +educ ation +A my +like ly +g ling +Ġconstitution ally +ĠHam m +ĠSpe ak +Ġwid gets +br ate +Ġcra ppy +ĠI ter +Ġanticip ating +ĠB out +P ixel +ĠY ep +ĠLaur ie +Ġh ut +Ġbullet in +ĠSal vation +Ġch ats +ear able +Honest ly +AL TH +onse qu +c ult +isco very +ovy ch +Ġse lves +ĠSat oshi +S ounds +Ġconver gence +ĠRosen berg +19 74 +Ġnas al +Ġfull est +Ġfer ocious +x us +ist e +AM S +Ġlobb ied +Ġso othing +ĠGun n +t oday +0 24 +Ġinspir ational +ĠN BN +p b +g ewater +or ah +all owed +ĠCol iseum +Ġspecial izing +Ġinsane ly +ĠT ape +del ay +Ġt arn +ĠP ound +Ġmel anch +Ġdeploy ments +il and +Ġless en +Ġfur ry +ĠUE FA +Ġblood shed +ĠMe ier +ither ing +Ġhe irs +ĠJ aw +ax ter +ĠPublic ations +Ġal ters +int ention +ĠWinc hester +d etermination +ĠLif etime +th in +Mon ster +7 80 +Ġapprox imation +Ġsuper markets +ĠSecond s +or os +h uge +Ġb ribe +ĠLIM ITED +un ed +Ġmis interpret +ĠIn jury +Ġ3 67 +Ġthreshold s +ĠCarn ival +Ġgastro intestinal +Ġguid eline +Ġde ceived +f eatures +Ġpurported ly +ĠRon nie +ĠNew t +Ġsp acious +as us +Ġsuperhero es +ĠCyn thia +le gged +k amp +ch io +Ġth umbnail +ĠShir ley +ill ation +Ġshe ds +ĠZ y +E PA +Ġdam s +Ġy awn +n ah +ĠPe ggy +ĠE rie +ĠJu ventus +ĠF ountain +r x +don ald +al bum +ĠComp rehensive +Ġc aching +ĠU z +ulner ability +ĠPrinc iple +ĠJ ian +ing ers +cast s +ĠOs iris +ch art +t ile +ĠTiff any +ĠPatt on +ĠWh ip +Ġovers ized +J e +ĠCind erella +ĠB orders +ĠDa esh +M ah +Ġdog ma +Ġcommun ists +v u +Coun cil +Ġfresh water +Ġw ounding +Ġdeb acle +Ġyoung ster +Ġthread ed +ĠB ots +ĠSav ings +ãģ Ĥ +ol ing +oh o +Ġillum ination +M RI +Ġlo osen +tr ump +ag ency +ur ion +Ġmoment arily +ĠCh un +ĠBud apest +ĠAl ley +D isk +Ġaston ished +ĠCon quer +ĠAccount ing +h aving +ĠWe in +ĠAl right +Ġrev olver +Ġdel usion +Ġrelic s +Ġad herent +qu ant +Ġhand made +or io +Ġcomb ating +c oded +Ġquad ru +re th +N ik +ĠTrib al +ĠMyster ious +Ġin hal +ĠWin ning +ĠClass ification +ch anged +Ġun ab +Ġsc orn +icip ated +w l +ond uctor +Ġrein forcing +ĠChild hood +an ova +Ġadventure r +Ġdoctor al +ĠStrateg ies +Ġengulf ed +ĠEnc ounter +Ġl ashes +Crit ical +ric ular +ĠU TF +oci ation +check ing +ĠConsult ing +Run time +per iod +ĠAs gard +Ġdist illed +ĠPas adena +ĠD ying +ĠCOUN TY +Ġgran ite +Ġsm ack +Ġparach ute +ĠS UR +Virgin ia +ĠF urious +78 7 +ĠO kin +Ġcam el +ĠM bps +19 72 +ĠCh ao +ĠC yan +j oice +ef er +ĠW rap +ĠDeb ate +S eg +Ġfore arm +ĠIgn ore +Ġtim estamp +Ġprob ing +ĠNo on +ĠGra il +f en +Ġdorm ant +ĠFirst ly +ĠE ighth +ĠH UN +ĠDes ire +or as +Girl s +ĠDes mond +z ar +am ines +O AD +exec ute +Ġbo obs +ĠAT L +_ ( +Chel sea +Ġmasturb ation +ĠCo C +Ġdestroy er +ĠCh omsky +Ġsc atter +ĠAss ets +79 6 +ĠC argo +Ġrecept ive +ĠSc ope +Ġmarket ers +Ġlaun chers +Ġax le +ĠSE A +se q +ĠM off +f inding +ĠGib bs +Georg ia +extreme ly +N J +Ġlab orers +st als +Ġmed iation +ĠH edge +at own +Ġi od +des pite +v ill +J ane +ex istence +Ġcoinc ided +ĠUt ilities +ĠChe ap +Ġlog istical +Ġcul mination +ĠNic otine +p ak +F older +Ġrod ents +st uff +Ġlaw fully +Ġreper to +io ch +j j +Dial ogue +HH HH +lic tion +Look s +Ġ29 7 +Ġtur rets +ĠAb andon +Ġinc ess +ĠTraff ord +Ġcur led +Ġprefer ring +Ġprivat ization +Ġir resist +ĠP anda +ĠSh ake +ĠMc Gr +ãĥ Ħ +und ers +Ġdiscrim inated +Ġbart ender +I LE +Atl antic +Ġprop ensity +ĠW iz +ĠG im +con ference +Ġrein forces +G h +w agon +Ġe erie +F al +Ġhug ged +rac ist +R IC +F u +Ġf iller +ĠSt ub +Ġeng raved +ĠWrest le +Ġimagin ative +ĠPe er +ĠFact ors +an us +ĠDrac ula +mon itor +Ġrou ters +ib ia +ĠBoo lean +end ale +ĠSl aughter +ĠSh ack +R FC +ĠSpiel berg +S ax +ĠPH OTO +ĠCl over +ĠR ae +Dep ending +ĠMem or +ar am +Ġpier ced +Ġcur tains +v ale +ĠInqu isition +ĠP oke +Ġforecast ing +Ġcompl ains +S ense +ĠHer mes +isc overed +Ġb ible +ĠMor ph +Ġg erm +78 5 +D ON +Ġcon gen +Ġcr ane +ĠD PR +Ġrespect fully +R oom +ĠN aw +ĠDal ai +re ason +ĠAng us +Educ ation +ĠTitan ic +Ë ľ +Ġo val +un ited +Ġthird s +Ġmoist ur +ĠC PC +M iami +Ġtent acles +ĠPol aris +ex c +ex clusive +ĠPra irie +Ġcol ossal +ĠBl end +sur prisingly +ÃŃ s +Ġindo ctr +Ġbas al +ĠMP EG +und o +Spl it +Develop ment +Ġlan tern +19 71 +Ġprov ocation +Ġang uish +ĠB ind +ĠLe ia +duc ers +ipp y +conserv ancy +Ġinitial ize +ĠTw ice +ĠSu k +Ġpred ic +Ġdi ploma +Ġsoc iop +Ing redients +Ġhamm ered +ĠIr ma +Q aida +Ġglim ps +ĠB ian +Ġst acking +Ġf end +gov track +Ġun n +dem ocratic +ig ree +Ġ5 80 +Ġ29 4 +Ġstraw berry +ID ER +Ġcher ished +ĠH ots +Ġinfer red +Ġ8 08 +ĠS ocrates +O regon +ĠR oses +ĠFO IA +Ġins ensitive +Ġ40 8 +Recomm end +ĠSh ine +Ġpain staking +UG E +ĠHell er +ĠEnter prises +I OR +ad j +N RS +L G +Ġalien ated +Ġacknowled gement +ĠA UD +ĠRen eg +Ġvou chers +Ġ9 60 +Ġm oot +ĠDim ensions +Ġc abbage +B right +g at +ĠK lu +Ġlat ent +Ġz e +ĠM eng +Ġdis perse +Ġpand emonium +H Q +Ġvirt uous +ĠLoc ations +ee per +prov ided +Ġse ams +ĠW T +iz o +PR OV +Ġtit anium +Ġrecol lection +Ġcr an +Ġ7 80 +ĠN F +49 1 +64 2 +p acking +59 8 +text ure +Sp ider +fre edom +cipl ed +ĠTAM ADRA +âĻ ¦ +aut hent +ĠW ANT +r ified +Ġr ites +Ġuter us +k iss +Ġâī ¤ +Ġsk illet +Ġdis enfranch +ĠGa al +Comp an +Ġage ing +gu ide +B alt +Ġiter ator +Ġdiscretion ary +t ips +Ġprim ates +ĠTechn ique +ĠPay ments +az el +ĠR OCK +stant ial +0 60 +Ġd mg +ĠJack ets +ĠPlay off +Ġnurs ery +ĠSy mb +art on +Ġannex ation +Color ado +Ġco ils +ĠSh oes +âĦ¢ : +ĠRo z +COM PLE +ĠEve rest +ĠTri umph +J oy +G rid +à ¼ +process or +ĠPros per +ĠSever us +ĠSelect ed +r g +ĠTay yip +St ra +Ġski ing +Ġ? ) +Ġpe g +Tes la +Ġtime frame +Ġmaster mind +ĠN B +scient ific +ĠSh it +gener ic +IN TER +N UM +Ġst roll +ĠEn ix +ĠM MR +ĠE MS +m ovie +Ĥ ª +Ġminim izing +idd ling +Ġilleg itimate +Ġprot otyp +Ġpremature ly +Ġmanual s +obb ies +ĠCass idy +D EC +des ktop +Ġaer os +Ġscreen ings +Ġdeb ilitating +ĠGr ind +nature conservancy +Ġf ades +ter mination +assets adobe +F actor +Ġdefinitive ly +P oké +ap ult +ĠLaf ayette +C orn +ĠCor al +Ġstagn ant +T ue +Ġdissatisf action +G ender +Ġkid neys +ĠG ow +ĠDef eat +ĠAsh ton +Ġcart els +Ġfore closure +ĠExpl ore +stre ngth +ot in +Ġveterin arian +Ġf umble +Ġpar ap +ĠSt rait +r ils +Ġpr ick +ĠBerm uda +ĠAm munition +skin ned +Ġab ound +ĠB raz +Ġshar per +ĠAsc ension +Ġ9 78 +Ġpreview s +Ġcommun ion +ĠX Y +Ġph ony +Ġnewcom er +Ġ3 32 +." ," +Ġredist ribution +Prot ect +ĠSo f +K al +Ġlip stick +w orst +Ġtang led +Ġretrospect ive +int eger +Ġvolunte ering +Ġ19 07 +Ġ -------------------- +ic hen +Ġunve iling +Ġsen seless +Ġfisher ies +\ - +Ġh inges +Ġcalcul us +My th +Ġund efeated +Ġoptim izations +Ġdep ress +Ġbill board +ĠY ad +ĠPy ramid +Is n +I de +Ġleg ion +ĠK ramer +ent anyl +Ġpenet rating +ĠHaw th +ĠPR ODUCT +ĠGer ard +ĠP act +ĠIn cluding +ĠEl ias +ĠEl aine +vis ual +Ġhum ming +Ġcond esc +ĠF asc +ä¸ Ĭ +Ġe galitarian +Ġdev s +ĠD ahl +O ps +D H +ĠB ounce +id ated +ald o +Ġrepublic an +Ġh amb +ĠS ett +ograph ies +CH APTER +Ġtrans sexual +Ġsky rocket +ans wer +Ġmark up +Ø ª +Ġhero ine +Comp are +ĠT av +Be ast +Ġsuccess ors +Ġna ïve +ĠBuck ley +st ress +me at +Ġdownload able +Ġindex ed +Ġsc aff +ĠL ump +ĠHom o +Stud io +In sp +Ġr acked +far ious +ĠPet ty +Ex ternal +Ġ19 09 +W ars +com mit +put ers +Ġun ob +ĠEr r +ĠE G +ĠAl am +ĠSiber ia +ĠAtmosp heric +IS TER +ĠSatan ic +trans lation +ĠL oud +tra umatic +l ique +Ġreson ate +ĠWel ch +Ġspark ing +ĠT OM +t one +Ġout l +Ġhandc uffed +ĠSer ie +8 01 +Ġland marks +ĠRee ves +Ġsoft ened +Ġdazz ling +ĠW anted +month s +Mag ikarp +Ġunt reated +ĠBed ford +M i +ĠDynam o +O re +79 5 +Ġwrong ful +Ġl ured +Ġcort isol +Ġve x +d rawn +ile t +Download ha +ĠF action +Ġlab yrinth +Ġhij acked +w aters +er ick +Ġsuper iors +ĠRow ling +ĠGu inness +Ġt d +99 2 +Ġune arthed +Ġcentr if +Ġsham eless +P od +ĠF ib +Ġ icing +Ġpredict or +Ġ29 2 +fore station +con struct +C and +@ # +Ġag itated +Ġre pr +OV A +Ġkn itting +ĠLim a +Ġf odder +68 4 +ĠPerson a +k l +7 01 +Ġbreak up +á ¸ +Ġapp alled +Ġantidepress ants +ĠSus sex +Har ris +ĠTher mal +ee ee +U pload +Ġg ulf +Ġdoor step +ĠSh ank +L U +ĠM EN +ĠP ond +s orry +Ġmis fortune +n ance +Ġb ona +M ut +Ġde graded +ĠL OG +ĠN ess +an imal +Ġa version +und own +Ġsupplement ed +ĠC ups +Ġ50 4 +Ġdep rive +ĠSpark le +Å Ĥ +ĠMed itation +auth ors +ĠSab an +ĠN aked +air d +ĠMand arin +ĠScript ures +ĠPerson nel +ĠMahar ashtra +Ġ19 03 +ĠP ai +ĠMir age +omb at +Access ory +Ġfrag mented +T ogether +Ġbelie vable +ĠGl adiator +al igned +ĠSl ug +M AT +Ġconvert ible +ĠBour bon +amer on +ĠRe hab +nt ax +Ġpowd ered +pill ar +Ġsm oker +ĠMans on +ĠB F +5 11 +ĠGood ell +ĠD AR +m ud +g art +Ġob edient +ĠTrans mission +ĠDon ation +8 80 +Ġbother ing +Material s +ãĤ ± +dest roy +Ġfore going +Ġanarch ism +ĠK ry +ice ps +Ġl ittered +ĠSch iff +Ġanecd otal +un its +Ġf ian +ĠSt im +ĠS OME +ĠInv aders +Ġbehaviour al +ĠVent ures +Ġsub lime +Ġfru ition +ĠPen alty +Ġcorros ion +¶ ħ +Ġlik ened +Ġbesie ged +ween ey +ĠCre ep +Ġlinem en +mult i +ic ably +ud der +Ġvital ity +Ġshort fall +ĠP ants +ap ist +H idden +ĠDro ps +med ical +Ġpron unciation +ĠN RL +Ġinsight ful +J V +ĠBe ard +ĠCh ou +Ġchar ms +Ġb ins +Ġamb assadors +ĠS aturdays +Ġinhib itor +ĠFr anch +6 01 +', ' +ĠCon or +art ney +ĠX peria +g rave +be es +ĠProtest ants +Ġso aking +ĠM andal +Ġph ased +Ġ6 60 +Ġsc ams +Ġbuzz ing +ĠItal ians +ĠLoren zo +ĠJ A +Ġhes itated +Ġcl iffs +ĠG OT +ingu ishable +Ġk o +Ġinter ruption +Z ip +Lear ning +Ġundersc ores +ĠBl ink +K u +57 9 +ĠAut ob +I RE +Ġwater ing +Ġpast ry +8 20 +Ġvision ary +ĠTempl ar +awa ited +Ġpist on +Ġant id +current ly +Ġp ard +Ġw aging +Ġnob ility +ĠY us +Ġinject ing +f aith +ĠP ASS +å º +Ġret ake +ĠPR OC +Ġcat hedral +b ash +Ġwrest lers +Ġpartner ing +Ġn oses +Ġ3 58 +Trans form +am en +Ġb outs +ĠId eal +ĠConstant in +Ġse p +ĠMon arch +att en +ĠPe oples +mod ified +Ġmor atorium +Ġpen chant +Ġoffensive ly +Ġprox ies +ok ane +ĠTaiwan ese +ĠP oo +ĠH OME +us ional +Ġver bs +ĠO man +vis ory +Ġpersu asion +Ġmult it +Ġsc issors +G ay +ow ay +oph ysical +l us +gn u +Ġap ocalyptic +Ġabsurd ity +Ġplay book +Ġautobi ography +I UM +Ġsne aking +ĠSim ulation +pp s +ell ery +Plan et +Ġright fully +Ġn iece +ĠN EC +ĠIP O +ĠDis closure +lean or +ous y +ST ER +Ġ28 2 +Cru z +Ch all +64 3 +ĠSurv ive +ĠF atal +ĠAm id +ap o +We apons +D EN +7 70 +ĠGreen wald +Ġlin en +al os +Ġpollut ants +ĠPCI e +k at +Ġp aw +ĠK raft +C hem +ĠTermin ator +Ġre incarn +Ġ] [ +ĠSe eds +Ġsilhou ette +ĠSt ores +Ġgro oming +ĠD irection +ĠIs abel +ĠBr idges +ðŁ ij +E ED +ĠM orsi +Ġval ves +ĠRank ed +ĠPh arma +ĠOrgan izations +Ġpenet rated +ĠRod ham +ĠProt oss +Ġove rest +Ġex asper +ĠT J +Ġ 000000 +Ġtrick le +Ġbour bon +WH O +Ġw retched +Ġmicrosc opic +Ġcheck list +Ġad orned +R oyal +Ad minist +ĠRet irement +ĠHig hest +We ather +ile ge +Ġincre ments +ĠC osponsors +Ġmas se +ĠS inn +r f +Ġh ordes +as sembly +75 4 +ĠNat asha +ĠTY PE +ĠGEN ERAL +Ġarr anging +Ġ40 7 +l ator +Ġg lean +Ġdisc redited +Ġclin icians +UN E +Ġachie ves +ĠEm erson +com plex += [ +Ġprincip ally +Ġfra il +p icked +Ġthan king +Ġre cl +ĠL AST +Ġsupp ressing +il ic +Ġantidepress ant +ĠLis bon +Ġth or +Ġsp a +Ġking doms +ĠPear ce +em o +Ġpl ung +Ġdiv est +Ġ ******************************** +b is +osp els +ad r +Sp irit +hall a +P ink +end ez +Ġresurrect ed +esc ape +ĠRosen stein +Ġge ological +Ġnecess ities +Ġcarn iv +ĠE lys +ĠBar ney +Ġ29 6 +dig y +ST ON +D OWN +Ġmil estones +Ġk er +Ġdismant ling +Ġre prim +Ġcross ings +19 45 +Ġpatri archy +Ġblasp hemy +Ġ3 59 +met ry +ĠOb esity +ĠDiff erences +bl ocking +ãĥķ ãĤ¡ +ich ita +ĠSab ha +ph alt +ĠCol o +ual a +effic ients +ĠMed ina +con sole +55 7 +ĠHann ibal +ĠHab it +ĠF ever +Ġthen ce +Ġsyn agogue +Ġessential s +Ġw ink +ĠTr ader +ID A +ĠSp oiler +ĠIceland ic +ĠHay ward +Ġpe ac +Ġmal ice +Ġflash back +Ġth w +Ġlay offs +L iquid +Ġtro oper +Ġh inge +ĠRead ers +Ph ill +ĠB auer +Cre ated +Ġaud its +ac compan +Ġunsus pecting +ier a +6666 6666 +Ġbro ch +Ġapprehend ed +ĠM alk +cer ning +ĠCod ex +O VER +M arsh +ĠD eng +ĠExp ression +Ġdisrespect ful +Ġasc ending +t ests +ĠPlaint iff +ster y +ĠAl ibaba +din and +ĠDem psey +Applic ations +mor al +Ġthrough put +Ġquar rel +Ġm ills +Ġhe mor +ĠC ASE +terror ist +st im +ifest yle +ro zen +CE PT +Ar k +u ci +lect ic +Ġirrit ating +she ets +A y +Ġrede emed +Ġhorn y +ĠTe ach +ĠS ear +dem ocracy +4 65 +ĠRest ore +Ġstand by +ĠP is +iff in +Ġsleep y +Ġextr ater +Ġcompl iments +Fram eworks +Ġinstall s +Ġb anging +sur face +found land +Ġmetaph ysical +Ġ28 3 +oul s +dev ices +Ar gs +ĠSac rifice +ĠMcC orm +es on +Cons ervative +ĠM ikhail +see ing +is ively +ĠRo oms +ĠGener ic +Ġenthusi astically +Ġgri pped +Ġcomed ic +ĠElectric ity +Ġgu errilla +Ġdec oration +ĠPerspect ive +Ġconsult ations +Ġun amb +Ġplag iar +Ġmagic ian +Ġe rection +ĠTour ism +or ied +ro xy +11 00 +T am +Ī è +Î ³ +× ª +ĠPred ators +Nit rome +Ġtelesc opes +project s +Ġun protected +Ġst ocked +ĠEnt reprene +nex pected +Ġwast ewater +V ill +Ġint imately +Ġi Cloud +ĠConst able +Ġspo of +Ġne farious +Ġfin s +Ġcens or +ĠMod es +ĠEs per +ar bon +Ġinter sections +Ġlaud ed +Ġphys i +Ġgener ously +ĠThe Nitrome +ĠTheNitrome Fan +Ġar isen +ĠÙ Ī +Ġg lands +ĠPav ilion +ĠGu pta +Ġuniform ly +Ġr amps +ri et +ĠWH EN +ĠVan essa +Ġrout ed +Ġlim p +ĠC PI +p ter +int uitive +Ġv aping +Ġexperiment ed +ĠOlymp us +ĠAm on +Ġsight ing +Ġinfiltr ate +ĠGentle man +Ġsign ings +ĠMe ow +ĠNav igation +che cks +4 33 +Ġel apsed +ĠBulg arian +esp ie +ĠS OM +d uring +Ġsp ills +anc a +ĠPly mouth +M AL +Ġdomest ically +ĠWater gate +ĠF AM +k illed +ed ited +ĠYour self +Ġsynchron ization +ĠPract ices +ST EP +Ġgen omes +ĠQ R +not ice +Ġloc ating +z in +Ġ3 29 +al cohol +Ġk itten +V o +Ġr inse +Ġgrapp le +ĠSc rew +ĠD ul +A IR +Ġle asing +ĠCaf é +Ġro ses +ĠRes pect +Ġmis lead +Ġperfect ed +Ġnud ity +Ġnon partisan +ĠCons umption +Report ing +Ġnu ances +Ġdeduct ible +ĠSh ots +Ġ3 77 +Ġæ ľ +ano oga +Ben ef +ĠB am +ĠS amp +if ix +Ġgal van +ĠMed als +rad ius +Ġno bles +Ġe aves +igr ate +K T +ĠHar bour +u ers +Ġrisk ed +re q +Ġneuro t +get table +ain a +Rom ney +Ġunder pin +Ġlo ft +ĠSub committee +ĠMong ol +b iz +Ġmanif ests +ass isted +ĠG aga +Ġsy nergy +Ġreligious ly +ĠPre f +ĠG erry +T AG +ĠCho i +4 66 +beh ind +ĠO u +Gold Magikarp +Ġhemor rh +R iver +Ġtend on +Ġinj ure +ĠF iona +Ġp ag +Ġag itation +|| || +ur an +ĠE SA +Ġest eem +Ġdod ging +Ġ4 12 +r ss +Ġce ases +ex cluding +Ġint akes +Ġinsert s +Ġemb old +ĠO ral +up uncture +4 11 +ĠUn ified +ĠDe le +Ġfurn ace +ĠCoy otes +ĠBr ach +L abor +Ġhand shake +Ġbru ises +Gr ade +éĹ ĺ +ĠGram my +ile en +St ates +ĠScandinav ian +ĠKard ash +8 66 +Ġeffort lessly +ĠDI RECT +ĠTH EN +ĠMe i +ert ation +19 68 +Ġgro in +w itch +Requ irements +98 5 +Ġroof s +Ġest ates +ĠH F +Ġha ha +Ġdense ly +ĠO CT +Ġpl astics +Ġincident ally +ĠTr acks +ĠTax es +Ġch anted +Ġforce ful +ĠBie ber +ĠK ahn +K ent +ĠC ot +lic ts +F ed +Ġhide ous +ĠVer d +ĠSynd icate +ĠIl legal +J et +ĠD AV +re asonable +c rew +Ġfundamental ist +Ġtruth ful +ĠJ ing +Ġl il +Ġdown ed +Ġen chanted +ĠPolic ies +ĠMcM aster +ĠH are +ides how +Ġpar ams +en cers +gorith m +Ġallow ances +Ġturb ulent +Ġcomplex ities +ĠK T +Ġ3 37 +ĠGen etic +F UN +D oug +t ick +Ġg igs +ument hal +Ġpatriarch al +Ġcal c +, ... +Ġc out +ĠGu an +Ġpath ological +ĠR ivals +Ġunder rated +Ġflu orescent +ĠJ iu +arna ev +ĠQu an +Ġ4 29 +Ġ ਠ+M ario +Con struct +ĠC itation +ĠR acial +ĠR SA +ĠF idel +Ġ3 95 +Person ally +C ause +à » +rad ical +in en +Ġvehement ly +ĠPap a +Ġintern ship +Ġfl akes +ĠRe ck +Luck ily +B ra +20 20 +rav ings +R N +W onder +Ser iously +Ġre usable +Ġpoll uted +ĠP eng +le igh +ind le +Ġcircuit ry +ĠMad onna +ĠB ART +Res idents +att ribute +Phil adelphia +Cl ub +Ġplan ner +Ġfr antically +Ġfaith fully +ĠTerrit ories +ĠL AT +ĠAnders en +an u +ĠP ARK +ĠS ora +i age +ĠPlay offs +ĠG CC +4 27 +Ġab norm +ĠL ever +Ġdisob edience +As ync +ĠShe a +V ert +Ġsk irts +ĠSaw yer +x p +Ġwors ening +Ġsc apego +ĠAng le +oth al +Ġtro ve +ĠSt y +ĠN guyen +mar ine +ide on +Dep ths +Bl og +ĠIll uminati +Ġtract s +Ġorgan ise +Ġo str +F s +Ġlever aging +ĠD aredevil +as ar +Ġl ang +Ġex termin +urs ions +ĠRom o +ãĤ¤ ãĥĪ +Ġcont ended +Ġencounter ing +ĠTable t +ĠAltern ate +sk ill +Ġswe ets +Ġco hesive +cap acity +Ġrep ud +Ġl izard +ro o +Ġpilgr ims +ĠR uff +ĠInstr ument +ĠLog o +uit ous +E H +Ġsales man +Ġank les +L ed +ĠPat ty +ud os +Own er +Ġdiscrep ancies +k j +M U +Ġuncond itional +Dragon Magazine +i ard +O ak +ĠConvers ation +be er +ĠOs aka +D elta +us ky +Ġsecret ion +Ġpl aza +Ġm ing +Ġde pletion +ĠM ous +ĠI TS +ĠH imal +ĠFle ming +Ġcyt ok +ĠH ick +Ġbat ters +ĠInt ellectual +6 75 +é r +IS ION +ĠQu entin +ĠCh apters +ih adi +Ġco aster +WAY S +ĠL izard +ĠY or +and ering +S kin +ha ust +ab by +Ġportray ing +Ġwield ed +d ash +Ġprop onent +Ġr ipple +Ġgrap hene +Ġfly er +Ġrec urrent +Ġdev ils +Ġwater fall +æĺ ¯ +go o +Text Color +Ġtam pering +IV ES +TR UMP +ĠAb el +ĠS AL +ĠHend ricks +ĠLu cius +b ots +Ġ40 96 +IST ORY +Gu est +ĠN X +in ant +Ben z +ĠLoad ed +ĠCle ver +t reatment +Ġta vern +Ġ3 39 +ĠT NT +ific antly +Tem perature +F el +Ġunder world +ĠJud ges +Ġ< + +Ġst ump +Ġoccup ancy +Ġab er +ĠF inder +) ", +ĠN unes +res et +in et +ect omy +Ġwell ness +ĠP eb +quart ered +and an +Ġneg atives +ĠTh iel +ĠCl ip +ĠL TD +Ġbl ight +Ġreperto ire +K yle +Ġqu er +ĠC es +Ġha pl +98 9 +ĠTh ames +isc opal +Des k +ivari ate +ĠEx cellence +found ation +Ġâ ĩ +X i +Ġmyster iously +esty les +Ġper ish +ĠEng els +ĠDE AD +09 0 +}} } +ĠUn real +Ġrest less +ID ES +orth odox +ĠInter mediate +Ġdin ners +ĠTr out +ĠSe ym +ĠHall s +og ged +Ġtraged ies +Ġdid nt +67 6 +Ġail ments +Ġobserv able +ĠV ide +ad apt +ĠD usk +Ġprofessional ism +ĠPres cott +ĠInd ies +p ox +ĠMe hran +W ide +Ġend emic +ĠPar an +B ird +Ġped als +ĠI U +ĠAdam ant +ĠH urt +Ġcorrel ates +urd en +Ġspons oring +cl imate +ĠUnivers ities +ĠK not +enn es +ĠDam ian +ĠAx el +S port +Ġbar b +ĠS no +sh own +ste en +ud ence +Ġnon violent +Ġhom ophobia +Ġbiom ass +ĠDet ail +Ġsrf N +ĠT une +accompan ied +I ENCE +Al bert +ĠMong o +z x +ĠCer berus +or bit +c ens +Ġsl ay +SH ARE +H Y +Ġb rawl +ĠPro be +Ġnonex istent +ĠClare nce +ĠBlack burn +Ġport als +ĠR ita +ĠRem ain +ĠLe vant +Ġtrick ed +ĠF erry +aver ing +ĠStraw berry +ĠAn swers +Ġhorrend ous +ĠA man +Supp lement +ĠT oad +Ġpe eled +Ġman oeuv +ĠU zbek +mond s +ĠH ector +Ġ40 2 +pe es +fix es +Ġd j +Ġres umes +Ġaccount ant +Ġadvers ity +Ġham pered +ĠL arson +Ġd oping +part s +H ur +Ġbe arded +Ġy r +ĠPlug in +å¥ ³ +Ġ/ ** +rol ley +Ġwaters hed +ĠSub mission +if lower +AS C +Ġcho ir +Ġsculpt ures +m A +incre asing +ai i +Ġsne akers +Ġconfront s +ĠEle phant +ĠEl ixir +Ġrec al +ĠT TL +w idget +ĠW ax +ĠGr ayson +Ġha irst +Ġhumili ated +ĠWAR N +app iness +ĠT TC +F uel +Ġpol io +Ġcomplex es +Ġbab e +ĠX IV +P F +). [ +P arts +Ġ4 35 +M eg +ĠY ards +ĠAL P +Ġy ells +Ġprin ces +Ġbull ies +ĠCapital ism +ex empt +FA Q +ĠSp onge +ĠAl a +Ġpleas antly +Ġbu f +Ġden ote +Ġunp ublished +Ġkne eling +asc a +Ġl apse +al ien +99 4 +Ġrefere es +ĠLaw yers +S anta +Ġpuzz ling +ĠProm etheus +ĠPh araoh +ĠDel ay +Ġfacilit ates +ĠC ES +Ġjew els +Ġbook let +ond ing +Ġpolar ization +ĠMor an +ĠSal ad +ĠS OS +ĠAdv ice +PH OTOS +IC AN +iat ures +ex press +ĠWonder land +ĠC ODE +ĠCL ASS +9 75 +Ġg rep +ĠD iesel +ĠGl ac +! ?" +Ġr m +o ine +disc rimination +ĠN urse +m allow +Ġv ortex +ĠCons ortium +Ġlarge Download +stra ight +augh lin +G rad +Ġpublic ized +ĠW aves +ĠRed d +Ġfest ivities +ĠM ane +ar ov +Ġfleet ing +ĠDr unk +ug en +C ele +Ġchromos omes +ĠD OT +-+-+ -+-+ +Ġbus iest +ĠBe aver +Sy rian +ĠK yr +k as +ĠCross Ref +19 50 +76 01 +Ġrepe aling +ĠWin ners +ĠMac ro +ĠD OD +bl ance +S ort +64 1 +Ġmet re +ĠD irk +Ġgo ggles +Ġdraw backs +Ġcomplain ant +Ġauthor izing +Ġantit rust +oper ated +Ġm ah +Ġexagger ation +Am azing +ĠSer aph +Ġha ze +w ow +Ġextingu ished +Ġcan yon +ĠB osh +Ġv ents +Ġsc rape +Cor rect +4 26 +Ġav g +Dem and +ĠâĪ ¼ +Ġmicrobi ota +"} ]," +ĠSt ev +B io +ĠPlan es +Ġsuggest ive +Ġdec ipher +ĠRefuge e +ĠKe jriwal +ĠGreen peace +Ġdecl ass +ĠSound ers +Ġth o +Ġdec rypt +Ġbr ushing +ĠJane iro +ip op +S i +8 77 +ĠGeoff rey +Ġc pu +ĠHaz el +Ġview points +Ġcris py +ĠNot ification +Ġsold er +ĠMod est +ĠHem isphere +Ġcass ette +in cludes +Ġident ifiers +ĠC ALL +in cent +T odd +ĠSwe ep +Ġ3 34 +b oss +Ġsm ir +gin x +Ġtown ship +Ġg rieving +ĠMos que +Net flix +AS ED +ĠMillenn ials +oc om +19 67 +Ġbold ly +s leep +Ġes che +arij uana +Ġsw irl +ĠPen al +Ġneglig ent +ĠStephen son +K ER +ĠZ oro +ris is +Ġlocal ization +ĠSeym our +ĠAng lic +red itation +prot ection +ĠPa ige +Ġo mit +ĠR ousse +ĠT ub +Ġinv itations +t ty +Ġm oss +ph ysical +C redits +Ġan archy +Ġchild care +Ġl ull +ĠM ek +ĠL anguages +lat est +ĠSan ford +Ġus ability +Ġdiff use +ĠD ATA +Ġsp rites +ĠVeget a +ĠProm otion +ãĥ¼ ãĤ¯ +rict ing +z ee +Tur kish +ĠTD s +pro ven +57 1 +Ġsmug glers +707 10 +Ġreform ed +ĠLo is +Ġun fl +ĠWITH OUT +ĠReturn ing +ann ie +ĠTom as +Fr anc +ĠProf it +ĠSER V +ĠR umble +ik uman +es an +Ġt esters +Ġgad get +Ġbrace let +ĠF SA +comp onent +Ġparamed ics +Ġj an +ĠRem em +ĠSk inner +Ġl ov +ĠQu ake +rom a +Ġfl ask +Pr inc +Ġover power +Ġlod ging +ĠK KK +ret te +Ġabsor bs +w rote +Ġ ," +K ings +ĠH ail +ĠFall ing +xt ap +ĠHel ena +ire ns +L arry +Ġpamph let +ĠC PR +G ro +ĠHirosh ima +Ġhol istic +". [ +Ġdet achment +Ġas pire +Ġcompl icit +ĠGreen wood +Ġresp awn +ĠSt upid +ĠFin ished +f al +b ass +Ġab hor +Ġmock ery +ĠFe ast +VID EO +Ġcon sec +ĠHung ry +P ull +ĠH ust +it ance +? ãĢį +) -- +ĠPar allel +con v +4 69 +ha ar +w ant +P aper +m ins +ĠTor o +ĠTR UMP +ĠR ai +D W +ĠW icked +ĠL ep +Ġfun ky +Ġdetrim ent +ios is +ache v +Ġde grade +im ilation +Ġret ard +Ġfrag mentation +Ġcow boy +ĠY PG +ĠH AL +Parent s +ĠS ieg +ĠStra uss +ĠRub ber +× IJ +Fr ag +Ġp t +Ġoption ally +ĠZ IP +ĠTrans cript +ĠD well +88 2 +M erc +ĠM OT +ãĥ¯ ãĥ³ +Ġhun ts +Ġexec utes +In cludes +Ġacid ic +ĠRespons ibility +ĠD umb +we i +And erson +ĠJas per +ight on +abs olutely +Ad ult +Ġpl under +Mor ning +ĠT ours +ĠD ane +Î º +ĠT EST +ĠG ina +Ġcan ine +aw an +Ġsocial ists +ĠS oda +Ġimp etus +ĠSupplement ary +oli ath +ĠKinn ikuman +mitted ly +second s +Ġorganis ers +Ġdocument aries +Vari able +GRE EN +Ġres orts +Ġbr agging +Ġ3 68 +Art ist +w k +bl ers +Un common +ĠRet rieved +Ġhect ares +Ġtox in +r ank +Ġfaith s +ĠG raphic +Ġve c +ĠL IA +Af rican +Ġard ent +end iary +L ake +ĠD OS +cient ious +ĠOk awaru +ĠAll y +ĠTim eline +D ash +ĠI c +contin ue +Ġt idy +Ġinstinct ively +ĠP ossibly +ĠOut door +ĠWould n +Ġl ich +ĠBr ay +ĠA X +Ġà ī +Ġ+ # +\ ' +Direct ory +ab iding +Ġf eral +ic ative +but t +Ġper verse +S alt +Ġwar ped +Ġnin eteen +Ġcabin ets +Ġsrf Attach +ĠSl oan +Ġpower ing +reg ation +F light +se vere +Ġst ren +Ġc og +ap ache +Ġâ Ŀ +Ġcaf eteria +p aces +ĠGrim oire +uton ium +Ġr aining +Ġcir cling +Ġlineback ers +c redit +Ġrep atri +ĠCam den +lic ense +Ġly ric +Ġdescript or +Ġval leys +Ġre q +Ġback stage +ĠPro hibition +ĠK et +Op ening +S ym +æĸ ¹ +Ġserv ings +Ġoverse en +Ġaster oids +ĠMod s +ĠSpr inger +ĠCont ainer +è » +ĠM ens +Ġmult im +Ġfire fighter +pe c +Ġchlor ine +Ð ¼ +end i +Ġsp aring +Ġpolyg amy +ĠR N +ĠP ell +Ġt igers +Ġflash y +ĠMad ame +S word +Ġpref rontal +Ġpre requisite +uc a +Ġw ifi +Ġmiscon ception +Ġharsh ly +ĠStream ing +ot om +ĠGiul iani +foot ed +Ġtub ing +ind ividual +z ek +n uclear +m ol +Ġright ful +49 3 +Ġspecial ization +Ġpassion ately +ĠVel ocity +ĠAv ailability +T enn +Ġl atch +ĠSome body +Ġhel ium +cl aw +Ġdi pping +XX X +Ġinter personal +7 10 +Ġsub ter +Ġbi ologists +ĠLight ing +Ġopt ic +Ġden im +end on +ĠC orm +Ġ3 41 +ĠC oup +Ġfear less +Ġal ot +ĠCliff ord +ĠRun time +ĠProv ision +up dated +lene ck +Ġneur on +Ġgrad ing +ĠC t +sequ ence +in ia +con cept +Ġro aring +ri val +ĠCaucas ian +Ġmon og +key es +Ġappell ate +Ġlia ison +EStream Frame +ĠPl um +! . +Ġsp herical +Ġper ished +Ġbl ot +Ġben ches +Ġ4 11 +Ġpione ered +Ġhur led +Jenn ifer +ĠYose mite +Ch air +Ġreef s +Ġelect or +ĠAnt hem +65 2 +Ġun install +Ġimp ede +Ġbl inking +Ġgot o +Dec re +A ren +Ġstabil ization +ĠDis abled +ĠYanuk ovych +Ġoutlaw ed +ĠVent ura +ten ess +Ġplant ation +Ġy acht +ĠHu awei +Ġsol vent +Ġgr acious +Ġcur iously +Ġcapac itor +Ġc x +ĠRef lex +Ph ys +ĠC f +pt in +cons ervative +Ġinv ocation +c our +F N +ĠNew ly +H our +As ian +ĠLe ading +ĠAer ospace +An ne +Ġpre natal +Ġdeterior ating +H CR +ĠNorm andy +ol ini +ĠAm bro +9 10 +Ġset backs +ĠT RE +Ġs ig +ĠSc ourge +59 7 +79 8 +Game play +Ġm sec +M X +Ġprice y +ĠL LP +aker u +Ġover arching +ĠB ale +Ġworld ly +Cl ark +Ġscen ic +Ġdisl iked +ĠCont rolled +T ickets +ĠE W +ab ies +ĠPl enty +Non etheless +Ġart isan +Trans fer +ĠF amous +Ġinf ield +ble y +Ġunres olved +ĠML A +ãĤ Ĥ +Cor rection +Ġdemocr at +ĠMore no +ro cal +il ings +Ġsail or +Ġr ife +h ung +Ġtrop es +Ġsn atched +ĠL IN +ĠB ib +ES A +ĠPre v +ĠCam el +run time +Ġob noxious +4 37 +Ġsum mers +Ġunexpl ained +ĠWal ters +cal iber +Ġg ull +ĠEnd urance +ä½ ľ +Ġ3 47 +Ir ish +Ġaer obic +Ġcr amped +ĠHon olulu +à © +us erc +ec ast +AC Y +ĠQu ery +ãĤ¹ ãĥĪ +Bet a +Ġsuscept ibility +ĠSh iv +ĠLim baugh +Ġà ĸ +ĠN XT +ĠM uss +ĠBrit ons +ES CO +EG IN +Ġ% % +Ġsec ession +ĠPat ron +ĠLu a +n aires +ĠJPM organ +us b +ocy te +Ġcouncill ors +ĠLi ang +f arm +Ġnerv ously +Ġattract iveness +ĠK ov +j ump +Pl ot +Ġst ains +ĠStat ue +ĠApost les +he ter +ĠSUP PORT +Ġoverwhel m +Y ES +Ġ29 1 +d ensity +Ġtra pping +M it +Ġf ide +ĠPam ela +atl antic +Dam n +Ġp ts +OP A +Ġserv icing +Ġoverfl owing +ul o +ĠE rit +t icket +light ing +ĠH mm +ãĥ¼ ãĥ« +im oto +Ġchuck le +4 23 +ãģ ķ +sh ape +Ġque ues +Ġanch ors +ãĤ¼ ãĤ¦ãĤ¹ +F er +Ġaw oke +Ġ6 66 +h ands +Ġdiver gence +Ġ50 5 +T ips +Ġdep ot +Ġske w +ĠDel iver +op ot +Ġdiv ul +ĠE B +uns igned +ĠUn i +X box +Ġfor ks +Ġ7 02 +å ¯ +Ġpromot ers +ĠV apor +Ġlev ied +sl ot +Ġpig ment +Ġcyl inders +C RE +Ġsn atch +Ġperpet ually +Ġl icking +ĠFe et +ĠKra ken +ĠHold en +ĠCLS ID +m r +Ġproject or +Ġden otes +Ġchap el +ĠTor rent +b ler +R oute +ĠDef endant +ĠPublisher s +ĠM ales +ĠInn ov +ĠAg ility +rit er +ty mology +st ores +L ind +Ġf olly +ĠZur ich +B le +Ġnurt ure +Ġcoast line +uch in +D omin +Ġfri vol +ĠCons olid +res ults +M J +Ġphyl ogen +Ġha uled +ĠW iley +ĠJess ie +ĠPrep are +ĠE ps +Ġtreasure r +I AS +Ġcolon ists +Ġin und +ĠWW F +ĠCon verted +6 000 +out side +ĠApp earance +ĠRel ic +ĠM ister +s aw +Ġresult ant +Ġadject ive +ĠLaure l +ĠHind i +b da +Pe ace +Ġreb irth +Ġmembr anes +Ġforward ing +Ġcoll ided +ĠCar olyn +K ansas +5 99 +ĠSolid GoldMagikarp +Be ck +Ġstress ing +ĠGo o +ĠCooper ative +Ġf s +ĠAr chie +L iter +ĠK lopp +J erry +Ġfoot wear +War ren +Ġsc ree +h are +Under standing +P ed +Ġanth ology +ĠAnn ounce +M ega +Ġflu ent +Ġbond age +ĠDisc ount +il ial +C art +ĠNight mares +Sh am +ĠB oll +uss ie +H ttp +Atl anta +Ġun recogn +ĠB id +Ġunder grad +Ġforg iving +ĠGl over +AAAA AAAA +4 45 +V G +pa io +kill ers +Ġrespons ibly +Ġmobil ize +Ġeffect ed +ĠL umin +Ġk ale +Ġinfring ing +ann ounced +Ġf itt +b atch +ĠT ackle +ĠL ime +ĠAP P +uke mia +Ġrub y +Ġex oner +ĠCas ual +0 70 +Ġpel vic +Ġautom ate +ĠK ear +ĠCoast al +Ġcre ed +Ġbored om +ĠSt un +ri ott +Ĥ İ +Ġregener ate +Ġcomed ians +ĠOP ER +Sp ons +id ium +on is +L ocated +05 7 +Ġsusp ense +ĠD ating +C ass +Ġneoc ons +ĠShin zo +Ġaw oken +ch rist +ĠMess ages +att led +ĠSpr ay +ĠSp ice +C W +Ġshield ing +ĠG aul +Am id +Ġparam ilitary +Ġmult if +ĠTan ner +il k +Ġgodd amn +g ements +Ġbe friend +m obi +Ġ3 88 +fold er +acc a +Ġins in +g ap +N ev +fif th +Ġpsychiat ry +b anks +TH IS +Ġhar b +ac qu +Ġfac ade +ĠPower Point +80 3 +Ġbl uff +Sh ares +Ġfavor ing +El izabeth +Ãį Ãį +Ġr anger +77 2 +ĠAr che +h ak +ĠGen etics +ĠF EMA +Ġev olves +Ġest e +ĠP ets +ĠM é +ĠInterest ing +ĠCanter bury +ch apter +ĠStar fleet +Sp anish +Ġdraw back +ĠNor wich +9 70 +n orth +ag anda +Ġtransform ative +ram ids +bi ology +ad ay +Ġpropag ation +ĠGam ma +ĠDen ise +ĠCalcul ator +ent imes +ĠB ett +Ġapp endix +ĠHD D +AK ING +Ġst igmat +Ġhol ster +Ġord inarily +Ch ance +ĠCont rary +Ġad hesive +Ġgather s +6 12 +re au +ony ms +ew ays +Ġindu ces +Ġinterchange able +se m +Wh it +Ġtr ance +Ġincorpor ation +ĠExt ras +Fin ancial +Ġawkward ly +ĠStur geon +ĠH Y +Norm ally +ĠEnd ing +ĠAss ist +enc rypted +Ġsub jug +Ġn os +Ġfan atic +C ub +C U +?" . +Ġirre versible +å Ĥ +03 1 +ĠH AR +sp read +ul ia += $ +Sc ope +L ots +Ġlif estyles +ol on +Ġf eds +Ġcongrat ulate +web kit +Ġindist inguishable +ĠSw ing +Ġcommand ments +qu ila +ab ella +m ethyl +ann abin +Ġo vere +Ġlob ster +ĠQU EST +ĠCONT IN +bern atorial +:::: :::: +ĠTra ve +ĠSam oa +AN I +75 2 +Ð ´ +userc ontent +ĠMod erate +y eah +ĠK itt +Ġwe e +Ġstuff ing +ĠInter vention +ĠD ign +Ġware houses +ĠF iji +Ġpel lets +Ġtake away +ĠT ABLE +ĠClass ical +col lection +Ġland fall +ĠMus cle +Ġsett les +ĠAD V +Ġ3 44 +L aura +Ġf ared +ĠPart ial +4 36 +oss ibility +ĠD aly +ĠT arant +ĠFu ji +am l +c ence +55 1 +ĠProced ures +ĠO CD +ĠU D +t in +Q UI +ach o +4 38 +Ġgl itches +Ġenchant ment +Ġcalcul ates +IR O +ĠH ua +alys es +ĠL ift +um o +Ġle apt +Ġhypothes ized +ĠGust av +it ans +VERS ION +æ ł +Rog er +Ġr and +ĠAd apter +Ġ3 31 +ĠPet ition +k ies +M ars +Ġunder cut +ze es +ĠLy ons +ĠDH CP +Miss ing +Ġretire es +Ġins idious +el i +> ) +. ãĢį +Ġfinal ists +ĠA ure +Ġacc user +Ġwas tes +ĠY s +ĠL ori +Ġconstitu encies +Ġsupp er +Ġmay hem +or ange +Ġmis placed +Ġmanager ial +Ġex ce +ĠCL I +Ġprim al +ĠL ent +Cry stal +h over +ĠN TS +end um +Ġd w +ĠAl c +n ostic +Ġpres erves +ĠTs arnaev +Ġtri pled +rel ative +Arc ade +k illing +ĠW EEK +ĠH anna +D ust +Com pleted +ģ « +Ġappro ves +ĠSur f +ĠLuther an +ven ants +Ġrobber ies +we ights +soft ware +at ana +ug al +Ġgrav y +ĠC ance +OLOG Y +ly ak +Ton ight +Ġunve il +Ġ19 04 +ĠMin ion +ent ious +st ice +pack ages +ĠG EAR +Ġg ol +ĠHutch inson +ĠProf ession +ĠG UN +ĠDiff erence +ĠTsuk uyomi +ĠLes bian +6 70 +Ġfug itive +ĠPlan etary +-------------------------------- ------------------------ +Ġacc rued +Ġch icks +Ġsto pp +Ġblock ers +C od +Ġcomment ers +ĠSomew here +ĠPhot ographer +the me +Ġmay oral +w u +Ġanten nas +Ġrev amped +ĠSubject s +it é +im ura +Ġentr ances +liter ally +Ġten ets +ĠO MG +ĠMP H +ĠDon key +ĠOff ense +Ġ" + +Sn ap +ĠAF B +Ġan imate +ĠS od +His panic +Ġinconsist ency +D b +F Y +Ex port +Ġa pe +Ġpear l +ib el +ĠPAC s +Ġ{ \ +Ġact u +ĠHS BC +camp us +Ġpay off +Ġde ities +ĠN ato +ou ple +Ġcens ored +ĠCl ojure +Ġconf ounding +en i +Ġreck on +op he +Ġspot ting +Ġsign ifies +Ġprop el +Ġfest ive +S uggest +Ġpled ging +ĠB erman +Ġrebell ious +Ġovershadow ed +Ġinfiltr ated +j obs +67 2 +Ġscal able +Ġdomin ion +ĠNew foundland +ĠMead ow +Ġpart itions +AM I +Ġsupplement ary +str ument +Ġhair y +Ġperpet uate +Ġnuts hell +ĠPot ato +ĠHob bit +Ġcur ses +Flo at +Ġquiet er +Ġfuel ing +Ġcaps ules +ĠL ust +ĠH aunted +Exec utive +Ġchild birth +G re +Ġrad iant +å İ +Ġm alls +Ġin ept +ĠWarrant y +Ġspect ator +E h +t hens +Ġculmin ating +æ © +ary a +ãĤ ® +ilit arian +ĠOR IG +ĠSp ending +pt ives +ĠS iren +ĠRec ording +ay ne +Ġv im +Ġspr ang +T ang +ĠM FT +mor ning +ĠWe ed +m peg +cess ion +ĠCh ung +7 30 +w arning +56 2 +handed ly +P oor +P olitics +: # +Ġp ian +Ġfec es +ĠDocument ation +Ġban ished +Ġ3 99 +ĠAR C +Ġhe inous +J ake +ĠAm ir +way ne +v re +os henko +Ġnotebook s +Ġfound ational +Ġmarvel ous +ixt ape +Ġwithdraw als +Ġh orde +ĠD habi +is able +ĠK D +Ġcontag ious +ĠD ip +ĠAr rows +Ġpronoun s +Ġmorph ine +ĠB US +68 2 +Ġk osher +fin ished +ĠInstr uments +Ġf used +yd en +ĠSal mon +F ab +aff ected +K EN +C ENT +Dom ain +Ġpoke mon +ĠDr inking +G rowing +ĠInvestig ative +ĠA ether +em i +Ġtabl oid +Ġrep ro +ĠNot withstanding +ĠBers erker +Ġdram as +Ġclich é +Ġb ung +ĠU RI +ĠD os +0 44 +Ġpast ors +Ġl s +Ġac rylic +aun ts +Ed ward +Ġmajor ities +B ang +Ġfield ing +ĠRepl acement +ĠAl chemy +pp ard +ĠRome o +ĠSan ct +ĠLav rov +ib ble +Inst ruct +Ġimp ractical +ĠPlay boy +ce phal +Ġsw aps +Ġk an +ĠThe o +Ġillust rating +Ġdismant led +ĠTrans gender +ĠG uth +UG H +Ġtriumph ant +Ġencomp ass +Ġbook mark +udd in +j er +Ġpred icate +ES H +Ġwhen ce +ĠAB E +Ġnon profits +Se qu +Ġdi abetic +Ġp end +Ġheart felt +sh i +Ġinter acts +ĠTele com +Ġbombard ment +dep ending +ĠLow ry +ĠAd mission +ĠBl ooming +ust ration +ene gger +B rew +Ġmol ten +ĠNer d +P IN +âĸ Ģ +ave ment +Ġtou red +Ġco efficients +ĠTray von +ans son +Ġsand y +t old +fl ows +Ġpop ulous +ĠT inder +ĠBl iss +R achel +Min imum +Ġcontest ant +ĠRed uce +ĠMor se +ĠGrass ley +ĠClick er +Ġexp r +Ġs incerity +Ġmar qu +Ġelic it +ĠPro position +ĠDemon ic +Ġtac os +G reek +Ġpost war +Ġin sofar +ĠP ork +Ġ35 2 +doctor al +walk ing +Ġmid term +ĠSam my +sight ed +ĠTR ANS +ic i +AL D +ĠUS L +ĠF ISA +ĠAm pl +ĠAlex andra +ine lli +Tr ain +Ġsign ify +ĠVers us +Ġob fusc +Ġk h +Ġagg ro +ĠRen ault +Ġ3 48 +5 18 +ox icity +0 22 +ĠTw ist +Ġgoof y +D ynamic +Ġbrief ings +m ight +8 99 +Ġderog atory +T ro +Ġfor ging +ĠKor an +ĠMar ried +ĠBuc s +Ġpal ate +ĠCon version +m able +4 13 +Ġ( _ +Ġs iph +ĠN EO +col lege +Ġmarg inally +Ġfl irt +ĠTra ps +ĠP ace +é »Ĵ +Ġgoalt ender +Ġforb ids +Ġcler ks +ĠT ant +ĠRobb ins +ĠPrint ing +Ġpremie red +Ġmagn ification +ĠT G +ĠR ouse +ĠM ock +odynam ics +Ġpre clude +ism o +ĠPul itzer +Ġaval anche +ĠK odi +rib une +ĠL ena +Elect ric +Ġref inery +Ġend owed +Ġcounsel ors +Ġd olphin +ĠM ith +Ġarm oured +hib ited +Beg in +ĠP W +O il +ĠV or +ĠShar if +ĠFraz ier +est ate +Ġj ams +Pro xy +Ġband its +ĠPresbyter ian +ĠPrem iere +t iny +ĠCru el +Test ing +Ġhom er +ĠV ERS +ĠPro l +ĠDep osit +ĠCoff in +Ġsemin ars +Ġs ql +ĠDef endants +Altern atively +ĠR ats +ç « +ethy st +' > +Ġiss uer +58 9 +Ġch aired +ĠAccess ories +man ent +Ġmar row +ĠPrim ordial +C N +Ġlimit less +ĠCarn age +Ġund rafted +q v +IN ESS +on ew +Ġco hesion +98 7 +Ġne cks +Ġfootball er +ĠG ER +Ġdetect able +ĠSupport ing +ĠCS V +oc ally +k Hz +Ġund e +Ġsh one +Ġbud ding +tra k +Stand ing +ĠStar craft +ĠKem p +Ben ch +Ġthw arted +ĠGround s +ath i +L isa +Dial og +ĠS X +V ision +Ġingen ious +Ù IJ +Ġfost ering +ĠZ a +ĠIn gram +Ġ" @ +N aturally +6 16 +0 35 +ĠF AC +H mm +55 4 +Ġacceler ator +ĠV end +Ġsun screen +Ġtuber culosis +rav iolet +ĠFunction al +ĠEr rors +ed ar +19 66 +ĠSpect re +ĠRec ipes +88 5 +ĠM ankind +L iverpool +Ġ| -- +Ġsubst itutes +ĠX T +w ired +Ġinc o +ĠAf gh +E va +ic c +S ong +K night +Ġdilig ently +ĠBroad cast +A id +Ġaf ar +ĠH MS +aton in +ĠGr ateful +Ġfire place +ĠOm ni +e uro +ĠF RE +ĠSh ib +ĠDig est +t oggle +Ġheads ets +Ġdiff usion +ĠSqu irrel +ĠF N +Ġdark ened +out her +Ġsleep s +ĠX er +gun s +Ġset ups +Ġpars ed +Ġmamm oth +ĠCur ious +g ob +ĠFitz patrick +ĠEm il +im ov +........ ..... +ĠB enny +Second ly +Ġheart y +Ġcons on +st ained +Ġgal actic +cl ave +Ġplummet ed +Ġp ests +Ġsw at +Ġrefer rals +ĠLion el +h oly +Ġunder dog +ĠSl ater +ĠProv ide +ĠAm ar +ress or +å Į +ong a +Ġtim id +Ġp iety +ĠD ek +Ġsur ging +az o +Ġ6 10 +Ġdes ks +ĠSp okane +ĠAn field +Ġwars hips +ĠCob ra +Ġar ming +clus ively +ĠBad ge +ag ascar +ĠPR ESS +ĠMcK enzie +ĠFer dinand +burn ing +Af ee +Ġtyr ann +ĠI w +ĠBo one +100 7 +ĠRe pt +Ċ Âł +Ġcar avan +ĠD ill +ĠBundes liga +Ch uck +Ġheal er +ãĥ¼ãĥ Ĩ +ĠH obby +Ġneg ate +Ġcrit iques +section al +mop olitan +Ġd x +Ġouts ourcing +ĠC ipher +t ap +Sh arp +Ġup beat +Ġhang ar +Ġcru ising +ĠNi agara +Ġ3 42 +ill us +ĠS v +Ġsubt itles +Ġsqu ared +Ġbook store +Ġrevolution aries +ĠCarl ton +ab al +Ut ah +Ġdesp ise +ĠU M +cons ider +aid o +Ġc arts +ĠT urtles +Tr aining +Ġhonor ary + ¢ +Ġtri angles +4 22 +Ġreprint ed +Ġgrace ful +ĠMong olia +Ġdisrupt ions +ĠB oh +Ġ3 49 +Ġdr ains +Ġcons ulate +Ġb ends +Ġm afia +ur on +ĠF ulton +m isc +Ġren al +Ġin action +ck ing +Ġphot ons +Ġbru ised +ĠC odes +og i +Ġn ests +ĠLove ly +ĠLib re +ĠD aryl +Ġ# ## +S ys +. ," +Ġfree zes +est ablishment +and owski +Ġcum bers +ĠSt arg +ĠBom bs +Ġleg ions +Ġhand writing +Ġgr un +ĠC ah +sequ ent +Ġm oth +ĠMS M +Ins ert +F if +Ġmot el +Ġdex ter +ĠB ild +hearted ly +Ġpro pe +ĠText ure +ĠJ unction +ynt hesis +oc ard +ĠVer a +ĠBar th +Ġμ g +Ġl ashed +Ġ35 1 +ĠZ amb +ĠSt aples +ĠCort ex +ĠCork er +Ġcontinu um +ĠWR ITE +unt a +rid or +Ġde ems +0 33 +ĠG OLD +p as +Ġrep ressive +ãĥĨ ãĤ£ +Ġbaff led +Sc ar +Ġc rave +Ġ ______ +Ġentrepreneurs hip +ĠDirector ate +Ġ' [ +Ġv ines +Ġasc ended +ĠGR OUP +ĠGood bye +Ġdo gged +ãĥ´ ãĤ¡ +Man ufact +Ġunimagin able +ri ots +ier rez +Ġrel ativity +ĠCraft ing +ra ught +ud en +c ookie +Ġassass ins +Ġdissatisf ied +ac ci +Ġcondu it +Sp read +ĠR ican +n ice +izz le +Ġsc ares +ĠWH Y +ph ans +5 35 +Ġprot racted +ĠKrist en +5 36 +ĠSc rib +ĠNe h +Ġtwent ies +Ġpredic ament +Ġhandc uffs +Ġfruit ful +ĠU L +ĠLud wig +Ġatt est +ĠBre aker +Ġbi ologically +ĠDeal er +Ġrenov ations +f w +ess en +Al ice +ĠHen ri +Ġun ilaterally +ĠS idd +h ai +ĠSt retch +S ales +Ġcumbers ome +ĠJ avier +Ġtrend y +Ġrot ting +ĠChall enges +Ġscra ps +Ġfac ets +ĠVer onica +ĠVer ge +ĠS ana +Al ien +ĠR ih +Ġrad ial +ect ar +Ġ6 30 +cl i +Mar ie +Ġwild fire +ĠCat o +h ander +Ġwait ress +Ġch ops +ĠS ECTION +Ġblunt ly +ĠCat alog +n ian +stud y +Ġpat rolling +ĠT enth +nex us +ĠN ON +op sy +Ġsc athing +s ie +Ġdeterior ated +V B +Naz is +Ġdep ictions +Ġauthent icated +ĠCon ce +k rit +Ġpromul g +ĠL ONG +U FC +ĠVis itors +ĠRec all +Ġrehab ilit +ĠSL I +Ġglac ier +ĠB ite +Ġ50 3 +Ġvom it +Ġfer mented +ĠKh alid +Ġgrad ed +ĠMag icka +ĠIch igo +power ful +ic ators +75 3 +Ġsh rew +Ġ35 6 +Ġlegal izing +Ġall otted +ĠArch demon +ith ing +igg urat +V OL +Le od +Ġo ily +Ġindu cing +Ġamy gdala +Ġadm ins +ĠAcqu isition +C AN +Ġsche matic +Ġmo an +ĠCamer oon +Ġt ink +Ġmer ry +Ġbutter flies +ĠGo ff +Ġworks pace +ĠCor ona +Ġj avascript +ĠD olphin +ĠCant or +4 64 +to e +AP S +ĠAg ing +Ġpadd ed +ĠZ heng +ĠHe ld +Ġest ranged +Ġ7 70 +. } +ĠDun ham +Ġsm okes +Ġcap itals +und ai +Sh in +ĠFound ing +Ġent itle +Ġcenter piece +D iscover +Ġthere to +al ert +ĠN ou +ĠAnaly st +l c +F H +FI ELD +ĠP OV +gr ay +Ġar cs +ĠH OT +Ġr s +Ġoblig atory +ĠArchitect s +ĠS ven +ĠF EC +0 200 +Christ mas +ĠAlban ia +rat om +58 7 +Ġhard ships +Ġaut os +ĠCharg es +Ġap es +Ġ3 76 +wal let +Ġintox ication +Ġgobl in +Ġ5 70 +++++++++ ++++++++ +ĠYel p +ĠMag netic +ĠBr iggs +R ail +Ġspawn s +ĠW iggins +Ġshowc ased +Ġres orted +ub en +Ġwh ipping +Ġim itate +Ġdigest ion +ĠUS PS +ĠG est +Ġye a +ĠT ight +ind al +ic as +` . +C AST +'' ; +ĠF et +opath ic +In valid +Ġregrett ed +Ġbro ccoli +ĠSc ores +e ve +Ġpost ings +Ġaccum ulating +Ġneed less +elf th +Ġmay ors +Ġsc rib +Ġanecd otes +Ġbot ched +ĠRib bon +ĠConstant ine +i uses +ess es +Ġdev ise +Comp ared +Ġp udding +Ġg arg +Ġev oke +79 7 +Ġdet ox +9 09 +ĠPie ces +ĠMcC artney +Ġmet ast +ĠK rypt +P OR +Ġt ending +ĠMerch ants +Pro of +ĠV arg +ĠPort able +ãĥ¼ãĥĨ ãĤ£ +B rain +25 00 +Ġfol iage +Ø ¹ +Ġment ors +ĠA ires +Ġminimal ist +Ġing ested +ĠTro jan +ĠQ ian +inv olved +0 27 +Ġer oded +RA FT +Ġbl urry +M ob +Ġbuff et +ĠFn atic +ae a +KN OWN +ĠIn it +s afety +en um +ACT ION +ĠCrus her +ĠD ates +Ġ ................ +c alling +ak ov +Ġvent ured +Ġ5 55 +au ga +H art +ĠA ero +M AC +Ġthin ly +Ġar ra +ST ATE +ild e +ĠJac qu +ĠFem ales +Ġthe orem +Ġ3 46 +Ġsmart est +ĠPU BLIC +ĠK ron +ĠB its +ĠV essel +ĠTele phone +Ġdec ap +Ġadj unct +ĠS EN +mer ga +Ġred acted +Ġpre historic +Ġexplan atory +ĠRun s +ĠUtt ar +ĠM anny +ĠAUTH OR +ĠUnle ashed +ĠBow ling +be ans +79 3 +Ġunivers es +Ġsens it +ĠK ung +re peat +ctr l +Ġp aced +Ġfull er +Cl ock +Ġrec omb +ĠF aul +ĠB unker +Ġpool ed +Ġan a +ĠM outh +LL OW +hum ane +Ġbull do +ĠMicha els +f am +Ġwreck ed +Ġport rays +ĠWh ale +ĠH es +Ġguess es +ĠBrow se +ĠL APD +Ġconsequ ential +ĠInn ocent +ĠD RAG +Ġtrans gress +ĠO aks +Ġtri via +ĠRes on +ĠA DS +-- + +ĠT oll +Ġgrasp ing +ĠTHE M +ĠT ags +ĠCon clusion +Ġpract icable +Ġho op +Ġunintention ally +Ġign ite +ĠM ov +ur ized +le hem +Ter min +Ġcolour ful +ĠLin ear +ĠEll ie +G y +Ġman power +Ġj s +Ġem oji +ĠSHAR ES +_ . +0000 7 +Ġsophistic ation +Ġunders core +Ġpract ise +Ġbl ob +op ens +Uk raine +Ke eping +Y C +J R +ult imate +Cl aim +Ġautom obiles +99 3 +ste el +Ġpart ing +ĠL ank +... ? +Ġ38 5 +Ġremem brance +Ġe ased +Ġcov ari +ĠS ind +Effect ive +Ġdisse mination +ĠMo ose +ĠCl apper +br ates +App ly +Ġinv is +Ġwors ened +âĢĶ - +Ġlegisl ator +ĠL ol +ĠRow e +Ġdealers hip +um ar +id ences +Ġinvestig ates +Ġc ascade +Ġbid der +ĠB EN +Iron ically +Ġpres iding +Ġd ing +Ġcontrad icted +Ġshut s +ĠF IX +Ġ3 66 +Dist rict +Ġsin ful +ĠChar isma +o ops +Ġtot ality +Ġrest itution +ĠOpt imus +ĠD ah +Ġcl ueless +urn ed +Ġnut rit +Ġland owners +Ġfl ushed +Ġbroad en +m ie +Ġprint ln +Ġn ig +ĠCorp us +J en +Ġprot o +ĠWik imedia +ĠPal o +C OR +Ġstory lines +Ġevangel icals +ĠDar rell +Ġrot or +ĠH W +sk illed +ery l +Ġbe gg +ĠBl umenthal +Ġwe aving +Ġdown wards +ĠJack et +ĠANG EL +Te chnology +Ġes oteric +alde hyde +Ġfur iously +Ġforeign er +We ak +CH O +ĠH ound +Exper ience +ĠPlay station +ĠM IA +ĠU ng +cl oth +ag all +Ġcal ming +iz ens +St ruct +ĠW itches +ĠCeleb ration +Ġ........ ...... +pt roller +ĠTC U +Ġb unny +ãĥ į +ut orial +Ġup scale +ĠSt a +ĠCol ossus +Ġchlor ide +ĠZ ac +ĠRe asons +ĠBrook ings +ĠWH ITE +][ / +ĠL ose +9 05 +Ġunders ide +ern els +Ġv ape +do zen +upp et +ĠST OP +mat ical +ĠStat ements +hed dar +P AC +Custom er +Ġmem os +ĠP J +end ars +ĠLim its +l augh +Ġstabil ized +ĠALE C +Y A +Up grade +al am +Ġtechn o +Ġan ew +fore seen +Ġcolleg iate +ĠPy ro +ĠD ism +Ġfront line +Ġammon ia +I U +Qu ite +John ny +ass in +G OP +ĠSt yles +ĠSovere ign +acter ial +5 49 +ĠR IP +ĠL ists +Ġ3 64 +ĠRece p +s ocket +ĠByr d +ĠCand le +An cient +Ġappell ant +en forcement +ace a +ans ki +Ġold s +88 6 +Ġsl urs +Ġem pires +Ġbuck le +Ġalien ation +ĠAber deen +Ġunic orn +Ġoverr iding +ĠL X +pp a +Ġdesp ised +ĠB ugs +ĠB ST +S outhern +5 33 +Ġhall mark +ĠPost er +Ġstem med +Ġprincip als +ĠT ECH +ĠSand wich +It aly +Ġche esy +ĠSet TextColor +ĠProt ective +ĠC ohn +J O +apt op +Re ason +Lead er +ĠUnder stand +ĠFr idays +ĠContin uous +Ġcl ipping +ĠR ye +Ġber th +tim er +ann is +re act +Ġbuff alo +ĠPar as +Ġ6 55 +Ġpres ided +ĠSun rise +Ġve ts +Ġcl oves +ĠMcC ull +Stre ngth +G AN +Ġill iter +ĠPric ing +l é +Ġresist or +Ġbr un +ĠSuff olk +Ñ ĭ +ĠL iver +Re leased +Ġwhat s +8 60 +ĠMe asures +Ġden ouncing +ĠRy zen +Ġsou ven +Ġcareg ivers +ch ini +ĠScar lett +Ġt rough +Cong ratulations +Ġtax is +ĠTrad ition +j it +Ġtable top +Ġhither to +Ġdis information +off ensive +h ra +ĠDISTR ICT +Ġcompl icate +chen ko +ĠRecon struction +Ġpalp able +Ġa usp +Ġ4 28 +Ġshowc ases +ĠPublic ation +know ledge +inn on +4 19 +Ġretri eval +and ers +Ġref ute +Ġinqu ired +g ur +Ġneg ativity +Ġcons erve +Ġafter life +Ġpres upp +ĠGill espie +Ġm t +ĠD N +T ap +Ġper pend +ĠS my +does n +Ġsp illing +Ġhyp ers +K ate +® , +ke pt +ĠP owered +Ġj a +ĠK lux +ard e +ab an +Ġ4 44 +Ġflatt ened +ĠImprove ments +urg a +ĠK und +Ġins cribed +Ġfac ult +Ġunpre pared +ĠCons umers +Ġsatisf ies +Ġpul monary +Ġinf iltration +Ġex ternally +Ġcongrat ulations +ag han +Ġair liner +Ġfl ung +Ġfly ers +G D +Ġsnipp ets +Ġrec ursive +Ġmaster ing +L ex +Ġovert ly +v g +Ġluck ily +Ġenc ro +ĠLanc et +ĠAbyss al +function al +Ġs ow +Ġsqu id +Ġnar ration +Ġn aughty +ĠHon our +ĠSpart ans +Ġsh atter +ĠTac oma +ĠCal ories +ĠR aces +Sub mit +Ġpurpose fully +w av +ĠY ok +F est +ĠG err +Met ro +Ġit iner +f amous +Ġ" { +in line +was her +Iss ue +ĠCL IENT +oz o +Vers ions +7 25 +ĠGl ock +Ġshield ed +ĠPC R +ENC Y +ĠWe ld +ĠSim pl +Ġredirect ed +ĠK ham +Ġ( > +Ġlab ou +Ġdi apers +ss l +Ġcell ar +organ isms +ore sc +ĠBer ks +did n +Sh ipping +C hest +Ġund one +Ġmillion aire +Ġc ords +ĠYoung er +appropri ately +Ġsequ els +u ve +ant icipated +Ġle wd +ĠSh irt +ĠDmit ry +V eter +Ġsl aying +ĠY ar +Ġcompl ication +I owa +ĠEric a +ĠBL M +g irlfriend +b odied +6 26 +19 63 +Ġintermedi ary +Ġcons olation +M ask +ĠSi em +ow an +Beg inning +Ġfix me +Ġculmin ated +Ġcon duc +ĠVolunte er +Ġpos itional +Ġgre ets +ĠDefin itions +Ġthink er +Ġingen uity +Ġfresh men +ĠMom ents +Ġ35 7 +ate urs +ĠFed Ex +s g +69 4 +Ġdwind ling +ĠBO X +sel age +Ġt mp +Ġst en +ĠS ut +Ġneighbourhood s +Ġclass mate +f ledged +Ġleft ists +Ġclim ates +ATH ER +ĠScy the +ul iffe +Ġs ag +Ġho pped +ĠF t +ĠE ck +ĠC K +ĠDo omsday +k ids +Ġgas ped +Ġmon iker +ĠL od +ĠC FL +t ions +r ums +fol ios +Ġm d +Ġunc anny +Ġtrans ports +ĠLab rador +Ġrail ways +Ġappl iance +ĠCTR L +æ Ģ +Pop ulation +ĠConfeder acy +Ġunb earable +Ġdors al +ĠIn form +op ted +ĠK ILL +Mar x +Ġhypoc ritical +q us +ĠN umerous +ĠGeorg ian +ĠAmbro se +ĠL och +Ġgu bernatorial +ĠX eon +ĠSupp orts +ens er +ee ly +ĠAven ger +19 65 +Ar my +Ġju xtap +Ġcho pping +ĠSpl ash +ĠS ustainable +ĠFin ch +Ġ18 61 +ict ive +at meal +ĠG ohan +Ġlights aber +ĠG PA +ug u +ĠRE PL +vari able +Ġher pes +Ġdesert s +ac iously +Ġsitu ational +week ly +ob l +Ġtext ile +ĠCorn wall +Ġcontrace ptives +ĠA ke +] - +ä¹ ĭ +: , +ĠW em +ĠB ihar +Ġ' . +Ġbe re +Ġanal ogue +ĠCook ies +Ġtake off +Whe el +Ġmaj estic +Ġcomm uting +0 23 +ĠCor pse +ass ment +min i +Ġgor illa +ĠAl as +ere e +Ġacquaint ances +ĠAd vantage +Ġspirit ually +Ġey ed +pm wiki +ĠE nder +Ġtrans lucent +Ġnight time +ĠIM AGES +5 45 +ĠK amp +ĠFre ak +Ġ ig +Port land +4 32 +ĠM ata +Ġmar ines +Ġh ors +ater asu +ĠAtt ribution +Ġ-------- - +Ġk ins +ĠBEL OW +++ + +Ġre eling +ol ed +Ġcl utter +ĠRel ative +Ġ4 27 +B US +Ġa vert +ĠChe ong +ĠA ble +ĠPry or +Develop er +Ġen cyclopedia +ĠUSA F +ĠG arry +Sp ain +Bl ocks +Ġexp osition +ĠGamer Gate +W OR +Ġstockp ile +Ġclot hed +ĠT one +ĠR ue +t umblr +Ġtreacher ous +Ġf rying +Ñ Į +ĠS ph +Ġrest raints +Ġemb odies +ĠG es +S afety +Ġnegoti ators +min ing +ĠAppalach ian +L OS +ĠJenn a +Ġpass ers +ç ĭ +sn ap +Ġshort en +creat or +Ġinn umerable +uther land +67 4 +ĠW OM +ĠAs cend +ĠArm ory +ĠTrans action +K ick +Ġsuit case +day Name +Ġwaste ful +mar riage +ĠMcC abe +ite ch +ĠO ss +Cl osure +ĠTreasure r +Ġindec ent +ĠD ull +Ġresid ences +19 59 +ĠS ettlement +Ham ilton +Ġself ies +ĠRank ing +ĠBark ley +ĠB ore +ĠW CS +ĠMar itime +ĠH uh +ĠForest ry +Ġcultiv ating +ĠBall ard +Ġg arrison +ĠSD L +9 30 +Ġnas cent +Ġirresist ible +Ġaw fully +\/ \/ +Ġequ ate +Ġanthrop ology +ĠSylv ia +Ġintest ine +Ġinnoc uous +cess ive +ag ra +ĠMet roid +G rant +8 55 +ģ ĸ +Ġ" _ +ãĥĥ ãĥī +Ġappra isal +ĠFred dy +04 6 +Ġ40 6 +Ġ18 30 +Ġd ocking +St atic +Ġp ont +ĠVolt age +ĠSt ead +ĠMort gage +ĠJon ah +Y L +CLASS IFIED +Ġas bestos +nik ov +Ġcoll agen +ĠOrb ital +P ocket +7 99 +Ġhy brids +inc hes +Ġinv oice +und y +Ġinequ alities +T rend +w ashed +B ALL +Ġluc id +ĠComment ary +Ġw itty +Br andon +Ġbru ising +Ġ6 20 +es cent +box ing +P OL +Ġ3 78 +R ect +Ġlic ences +ĠMcG ee +p ressed +D anny +Ġj ammed +ord inate +Ġle th +Ġdistingu ishes +ĠYam aha +IL S +ĠH ume +ĠC ategories +Rober ts +Ch art +Ġbeet le +ĠGra veyard +Ġ($ ) +o ÄŁ +Ġtw ilight +are lla +á ½ +Ġbooth s +ĠH HS +ĠFeld man +Ġexcav ation +Ġphilosoph ies +at ography +ĠGar age +te chnology +Ġunfor gettable +Ġver ifying +Ġsubord inates +E ls +Ġne b +G aming +EN A +ĠAchieve ment +it ters +ĠG abe +Ġd umps +for cer +Ġpo ignant +ĠM BA +ĠHe idi +ime i +Ġm ages +Ġliber ate +Ġcircum cised +ĠMer maid +ĠMat th +t ogether +ĠW ichita +Ġstore front +ĠAd in +V II +Four th +Ġexplore rs +W ER +Not able +Bro ok +m ens +F aith +-------- - +ĠJ ou +¬ ¼ +Ġpine apple +Ġam alg +el n +ark able +ĠãĤµ ãĥ¼ãĥĨãĤ£ +ĠãĤµãĥ¼ãĥĨãĤ£ ãĥ¯ãĥ³ +Ġov arian +ĠE choes +Ġhairc ut +Ġp av +Ġch illed +anas ia +Ġsty led +Ġd ab +ni per +Ġminister ial +ĠD UP +T an +Ġsul ph +ĠD eter +ĠBo hem +od an +Ġeduc ator +â ĵĺ +sp ir +Ch icken +ĠE leanor +Ġqu i +Ġheav iest +Ġgrasp ed +U RA +Ġcro oked +Jess ica +pro blem +Ġpred etermined +Ġman iac +Ġbreath s +ĠLauder dale +Ġh obbies +y z +Cr ime +Ġcharism a +d L +Ġle aping +Ġk ittens +Ang elo +ĠJ ACK +ĠSu zanne +Ġhal ting +ENT ION +Ġswall owing +ĠEarthqu ake +Ġeight eenth +ĠN IC +ĠIN F +ĠCons cious +Ġparticular s +circ le +7 40 +Ġbene volent +Ġ7 47 +Ġ4 90 +Ġr undown +ĠVal erie +ĠB UR +Ġcivil isation +ĠS chn +W B +ot ide +intern ational +Ġj ohn +Ġ19 02 +Ġpe anuts +Ġflav ored +k us +Ġro ared +Ġcut off +é £ +Ġorn ament +Ġarchitect ures +Ġ3 69 +ol or +ĠWild e +ĠC RC +ĠAdjust ed +Ġprov oking +land ish +Ġrational ity +Ġjust ifies +Ġdisp el +Ġa meric +ĠPol es +Ø © +Ġen vis +ĠD oodle +ä½ ¿ +igs aw +auld ron +Techn ical +T een +up hem +ĠX iang +Ġdetract ors +ĠZ i +ĠJournal ists +Ġconduc ive +ĠVolunte ers +Ġs d +Know ing +Ġtrans missions +ĠPL AN +ĠL IB +Ġall uded +Ġob e +Ġd ope +ĠGold stein +Ġwavelength s +ĠDest ination +nd a +ug i +Ġattent ive +ĠLe an +ral tar +Ġman g +mb uds +ak ings +b ender +Ġacc ol +Ġcraw led +N OW +Min nesota +Ġflour ished +ĠZ up +ĠSuper visor +ĠOliv ier +Ex cellent +Ġwid en +D one +Ġw ig +Ġmiscon ceptions +Cor p +W an +Ġvener able +ĠNot ably +ĠKling on +an imate +Bo ost +ĠS AY +miss ing +ibli ography +mel on +Ġpay day +Ø ³ +bo le +Ġve iled +ĠAl phabet +It alian +Ġever lasting +ĠR IS +ĠC ree +rom pt +Ġh ating +Ġgrin ning +Ġge ographically +OS H +Ġwe eping +ĠÂłĠÂłĠÂłĠÂł ĠÂłĠÂłĠÂłĠÂł +Ġimpe cc +Let ter +Ġblo ated +PL A +ĠFe in +Ġper sever +Th under +Ġa ur +ĠR L +Ġpit falls +âĸ º +Ġpredomin ant +Ġ5 25 +7 18 +AP E +7 14 +Ġfarm land +ĠQ iao +Ġv iolet +ĠBah amas +Ġinflic ting +ĠE fficiency +Ġhome brew +Ġundert ook +Ġcur ly +ĠHard ing +man ia +59 6 +Ġtem pered +Ġhar rowing +ĠP ledge +ĠFranken stein +è ª +M otion +Ġpredict ably +ĠExpl osion +oc using +er d +col o +FF ER +Ġback field +ĠV IDE +ue bl +N arr +ĠArg ument +Ġgen omic +Ġbout ique +Ġbatt ed +ĠB inary +Ġg amb +ĠRh ythm +67 3 +Ġa float +ĠOlymp ia +Y ING +Ġend if +is in +Ġwin ters +Ġsc attering +I v +D istance +Ġtr u +ĠCom fort +Ġne xus +Ġair flow +ĠByz antine +p ayers +con i +ĠB etsy +D eal +ĠN ug +ĠContin ent +red ibly +Ġoptim izing +al beit +Ġec static +ĠPro to +ç · +iv ot +âĸ Ħ +em p +rou nder +Ġcl out +ĠI ST +66 3 +ĠDoll ars +ĠD AC +Ġsubsc ribed +Ġrehears al +Ġam ps +ĠSh ang +es m +Ġspr inkle +Ġassail ant +ĠO o +ĠCoin base +T act +Ġret ina +Ġn uns +R ON +att o +Ġj ug +ĠSV G +Ġb ikini +ĠFI LE +ĠFound ers +ep ort +ĠK P +Ġrest ores +ĠTh ick +Ġash ore +Ġappro vals +R ender +M AG +G raham +ĠCort ana +ãĥ³ ãĤ¸ +ss h +or ians +ars ity +ĠInsp ired +u pper +Ġsign alling +Ġreb uke +Ġfl ares +Ġdownt ime +Stud ies +Ġstagn ation +ĠSequ ence +Ġgr unt +Ġass ures +ĠPL A +59 2 +Ġintra ven +d epend +Sus an +ĠManz iel +Man ia +Cont ract +Ġsl ams +Ġcult ured +Ġcred itor +L IST +ĠH UM +ĠChatt anooga +serv ed +Ġclo aked +ĠF TP +p owder +ĠSt ella +uct ive +Ġcheap ly +ĠMU CH +ĠGalile o +Ġsu ites +spe ech +Ġdeliber ations +ĠCh ips +« ĺ +Bal ance +ĠWyn ne +ĠAk ron +Ass et +Ġhon oured +Ġed ged +Like wise +anim ous +ĠW age +ĠEz ek +ad vertisement +ĠRT X +ĠM AD +Ġmigr ating +ĠS QU +Ġ4 75 +Ed ited +Ġshorth and +ĠBas ics +Ġcro tch +ĠEV EN +Ġv m +effic iency +Ġcal ves +ĠF rie +ĠBrill iant +Ġstri kers +Ġrepent ance +Ġarter ies +r l +B ed +h ap +Ġcrypt ography +ĠSab res +Ġ4 14 +vi ks +ih ara +aps es +T alking +Ġintertw ined +Ġdoc ks +Ġalle le +ĠArt ifact +ĠH IM +t orn +ç ķ +Ġop acity +ĠE ly +os uke +Ġn ipple +Ġhand written +ĠV K +ĠChamber lain +ĠLa os +ig raph +g row +Ġtr illions +Ġdescend ant +ĠSail or +as uring +Ġce ilings +ĠWare house +f lying +ĠGl ow +Ġn ont +Ġmiscar riage +Ġrig s +Ġmin istries +Ġelabor ated +Ġdel usional +ĠHum ane +Ġ3 79 +n ets +Ġblack out +add ers +Ġn p +ĠT ire +ro sc +Ġsub div +Ġlink age +Ġchron ological +ĠHER O +Ġres ettlement +ĠVin yl +Ġpast oral +ĠMob il +ĠBar bar +Co oldown +ĠF ritz +c riminal +re pe +Ġbell ig +ĠBre ed +Ġ4 18 +Ġsem blance +ij k +Ġcur tail +Ġclin ch +cont ained +ĠProm pt +ast on +Ġw i +Ġpursu its +5 15 +ĠGl oss +Ġfl ips +Ġcoup ons +Ġcl oning +ĠLike ly +Rem oved +ĠQu artz +r ices +ĠSpe ars +Ġp ious +Ġdep reciation +ĠD are +oun ces +am az +O nt +Ġp innacle +d ocker +0 26 +ĠW yr +ĠPro per +Ë Ī +n il +By tes +Ġseek er +t rial +Ġunf olds +ĠMar se +Ġextravag ant +ĠSurviv ors +RED ACTED +ĠSpeed way +ĠCra igslist +sub mit +ĠGener ations +Ġup holding +Ġblood stream +ĠMiss ions +ĠL awn +Ġlim bo +ene i +H uh +ĠWild cats +pre p +ĠMark us +ĠFor bidden +rit ic +IN O +Ġexhib iting +requ ent +ch uk +Ġhabit ual +ĠComp atibility +Dr ag +RIP T +uj ah +GR OUND +Ġdelinqu ent +Ġburn er +Ġcontempor aries +Ġgimm ick +load s +Ġno zzle +p odcast +ĠW ak +ĠStat en +ĠK uh +ãģ ĵ +inter rupted +Ġinv incible +ĠBurn ett +cig arette +ĠPeb ble +ĠTem porary +ĠMar ino +58 2 +Ġwast eland +ident ly +T x +Ġr ite +ĠPan asonic +ĠM iddles +ĠHort on +ae us +Ġc uring +Ġm ats +Ġadj ourn +Ġfears ome +pe z +bo ats +Ġpro pell +Ġconflic ted +ĠAng er +Ġinsurg ent +K arl +Ġco ales +Ġsouth western +Ġdis su +ĠO vert +******** **** +Ġbox ed +ĠBr une +aa a +Ġgard ening +ĠEng el +tr acks +Ġpur ified +Ġplace holder +ĠL ikes +Ġd an +G ab +Ġe ct +ĠF aw +ĠEl iot +Ġ' , +otrop ic +ĠRu in +hed on +Ġca ul +Ġa ft +ĠCad illac +gh a +ass ian +ud eb +ĠT ick +Ġadjust s +AR GET +5 37 +isc he +ant y +ĠFried rich +ĠBl izz +ĠA OL +Camp aign +Ġmamm al +ĠVe il +ĠK ev +ĠMaur it +ĠDam ien +N ation +E astern +Ġ{ : +Ġ= ================================ +Ġstereotyp ical +Ġatt ic +ĠCy borg +requ ire +Ġaward ing +ĠPap ua +bt n +b ent +B oo +Ġ( = +ĠX ander +ĠSomers et +Ġcatch y +Ġcert ify +STR UCT +Ġit al +Ġt ides +ĠBr ands +G ray +comp etitive +Ġcur ator +ĠD G +omin ium +ĠGM Os +ci ating +ĠCarm en +ow ard +Balt imore +Ġr gb +C u +Ġwip es +spe ll +IT NESS +Ġsummar izes +ĠRe vis +Ġwhistlebl owers +ĠBre ach +Ġcro chet +k os +ews ki +Ġrep et +Ġcrim son +ĠKar achi +read able +dim ension +ĠI gor +ild ed +ĠZ ed +ĠKe ane +ĠCos metic +DE P +Ġretreat ing +ĠU A +ens ical +Ġd usk +ĠDick ens +Ġaren as +ĠPass age +level s +Ġcur v +P ope +Ġch ores +ĠEl ise +ĠComp ass +b ub +Ġmamm alian +ĠSans krit +ĠAN C +ĠCr ack +Q ual +L aun +amp unk +Ġlearn ers +Ġglam orous +Ġfur the +erm ott +c and +Gener ic +Ġnarr ated +Ġdisorder ly +ĠTrans actions +ĠDet ention +ĠR oku +Ä į +Ġunder statement +ĠS aur +ĠRodrig o +ĠAS AP +S in +Ġre joice +Method s +Ġelectro de +Ġworsh ipped +Ġid i +ĠPhys icians +Ġpop up +Ġde ft +ĠRem oval +ĠBu enos +ver bs +Ġfun k +ush a +rict ion +ore a +ĠBang alore +ĠKen obi +zz i +Ġnorm ative +Ġgobl ins +Ġcaf es +ĠUN CLASSIFIED +ĠF ired +S IGN +Ġs clerosis +ĠV oter +ĠSon ny +ĠExt end +ĠEV s +Ar senal +Ġp si +Ġwid est +ĠT us +Ġlo oms +Ġjust ifying +ĠGr anger +è ¯ +Ref er +58 3 +Ġflour ishing +ab re +Ġr ave +ĠCont ra +Ġ18 98 +Add s +Ġf ul +ĠCo oke +some one += # +67 1 +Ġy ak +Ġar te +ĠMis cellaneous +ĠDet ection +ĠCl ancy +â ģ +ass ies +Ġval iant +ĠFemin ist +cor ruption +V el +P ear +Ġsucc inct +Ġquick est +k w +Ġsp itting +ĠL ibraries +åħ ī +ant z +D ad +ĠSpec ifications +rup ulous +and r +RES ULTS +Ġsnow ball +Ġpred is +ĠB axter +ĠNurs ing +ĠCh aff +s we +Ġout age +Ġnest ing +Ġnotor iety +tr igger +on ite +j on +Ġf ou +ook ed +ĠCelebr ity +re ality +Ġfat ig +Ġhug ging +Ġbother s +ĠPan zer +ĠCh andra +fig ured +Ġvol ts +ĠCloud s +Ġfee ble +ĠCur ve +ĠAs us +78 6 +abs or +ĠV ICE +ĠH ess +Ġmanufact ures +Ġgri zz +ĠPower ful +ac id +Ġsub sections +ĠKrug man +ĠAl ps +is u +Ġsequ est +ĠUlt ron +ĠT inker +ĠGo ose +Ġmism atch +Att orney +Ġmorph ology +ĠSix ers +ut tered +ĠE LECT +gr an +Rus sell +ĠG SL +Ġfort night +Ġ. ) +Ġapost le +pr one +el ist +Unt itled +ĠIm plementation +ist ors +Ġtank er +Ġpl ush +Ġattend ants +ĠT ik +ĠGreen wich +ĠY on +ĠSP L +cell s +unt led +S olution +ĠQu é +Ġvac ated +Ġupt ick +ĠMer idian +æ ĥ +ĠDr ill +9 25 +58 4 +Ġrenov ated +ĠKub rick +zy k +Ġl ousy +pp el +ohyd rate +ĠI zzy +lesi astical +CC C +ĠAj ax +Ġad apters +ĠPetra eus +Ġaffirm ation +ĠST OR +le ms +ad oes +ĠConstantin ople +Ġp onies +Ġl ighthouse +Ġadherent s +ĠBre es +omorph ic +Fight ing +Ġpl aster +ĠP VC +ĠOb st +Ġdear ly +ĠTo oth +icks on +Ġsh aming +P lex +A gg +ĠâĢ¦ " +Ġsub reddits +Ġpige on +ĠResident ial +ĠPass ing +Ġl um +ĠP ension +Ġpessim istic +Ġ4 32 +z inski +c ade +0 75 +Ġapolog ised +iy ah +Put ting +Ġgloom y +ĠLy me +=-=-=-=- =-=-=-=- +ĠT ome +ĠPsych iatric +ĠH IT +c ms +ap olog +Ġbreak er +Ġdeep en +Ġtheor ist +ĠHigh lands +Ġb aker +Ġst aples +Ġinterf ered +ĠAb ortion +jo ined +ch u +Ġform ulate +Ġvacc inations +Ġban ter +phe us +Ġoutfield er +ĠM eter +Ġ# #### +Ġ18 95 +Ġnarrow ing +ĠST ORY +f p +ĠC ST +ign ore +Ġproclaim ing +ĠR U +ĠB ALL +yn a +65 3 +Ġpos it +P RE +59 4 +ĠRegist rar +ĠPil grim +ic io +Ġpre tt +Ġlif eless +Ġ__ _ +Ne igh +ĠCh urches +orn o +Ġor cs +Ġkind red +ĠAud it +Ġmillenn ial +ĠPers ia +g ravity +ĠDis ability +ĠD ARK +W s +od on +Ġgrand daughter +ĠBro oke +ĠA DA +ER A +Ġpick ups +ĠWil kinson +ĠSh ards +ĠN K +Ġexp el +ĠKis lyak +Ġj argon +Ġpolar ized +ian e +Pub lisher +Ġreb utt +Ġapprehens ion +ĠK essler +Ġpr ism +F UL +19 64 +ĠL oll +ä ¿ +le thal +Å Ł +Ġg hetto +Ġb oulder +ĠSlow ly +ĠOsc ars +ĠInst ruction +ĠUl tr +ĠM oe +N ich +ĠP ATH +( * +ĠRE LEASE +un ing +rou se +en eg +Ġre imb +ĠDet ected +Do S +Ġster ling +Ġaggreg ation +ĠLone ly +ĠAtt end +hig her +Ġairst rike +ks on +SE LECT +Ġdef lation +ĠHer rera +C ole +rit ch +Ġadvis able +F ax +Ġwork around +Ġp id +mort em +ers en +Ġtyp o +Ġal um +78 2 +ĠJam al +script s +Ġcapt ives +ĠPres ence +ĠLie berman +angel o +Ġalcohol ism +ass i +Ġrec ite +Ġgap ing +Ġbask ets +ĠG ou +Brow ser +ne au +Ġcorrect ive +und a +sc oring +ĠX D +Ġfil ament +Ġdeep ening +ĠStain less +Int eger +Ġbu ggy +Ġten ancy +ĠMub arak +Ġt uple +ĠD roid +ĠS itting +Ġforfe it +ĠRasm ussen +ixt ies +es i +ĠKim mel +Ġmetic ulously +Ġap opt +ĠS eller +08 8 +ec ake +hem atically +T N +Ġmind less +Ġdig s +ĠAcc ord +ons ense +em ing +br ace +Ġe Book +ĠDist ribut +ĠInvest ments +w t +] ), +beh avior +56 3 +Ġbl inding +ĠPro testers +top ia +Ġreb orn +ĠKel vin +ĠDo ver +ĠD airy +ĠOut s +Ġ[ / +Ï Ģ +b p +ĠVan ity +ĠRec ap +ĠHOU SE +ĠF ACE +Ġ4 22 +69 2 +ĠAnt ioch +cook ed +Ġcoll ide +Ġa pr +Ġsle eper +ĠJar vis +Ġalternative ly +ĠLe aves +ĠM aw +Ġantiqu ity +ĠAdin ida +Ġab user +Poké mon +Ġass orted +ĠRev ision +ĠP iano +ĠG ideon +O cean +Ġsal on +Ġbust ling +ogn itive +ĠRah man +Ġwa iter +Ġpres ets +ĠO sh +ĠG HC +oper ator +Ġrept iles +Ġ4 13 +ĠG arr +ĠCh ak +Ġhas hes +Ġfail ings +Ġfolk lore +Ġab l +ĠC ena +ĠMac Arthur +ĠCOUR T +Ġperipher y +app ers +Ġreck oned +ĠInf lu +ĠC ET +Ġ3 72 +ĠDefin itive +ass ault +4 21 +Ġreservoir s +Ġd ives +ĠCo il +DA Q +Ġvivid ly +ĠR J +ĠBel lev +Ġec lectic +ĠShow down +ĠK M +ip ed +reet ings +ĠAs uka +L iberal +ĠÏ Ħ +Ġbystand ers +ĠGood win +uk ong +S it +ĠT rem +Ġcrim inally +ĠCirc us +ch rome +88 7 +Ġnan op +ĠOb i +ĠL OW +o gh +ĠAuth ors +ob yl +Ur ban +Ġt i +ĠWe ir +t rap +ag y +Ġparent heses +Ġout numbered +Ġcounter productive +ĠTob ias +ub is +P arser +ST AR +Ġsyn aptic +ĠG ears +Ġh iber +Ġdebunk ed +Ġex alted +aw atts +H OU +Ch urch +ĠPix ie +ĠU ri +ĠForm ation +ĠPred iction +C EO +Ġthro tt +ĠBrit ann +ĠMad agascar +ë ĭ +Ġbill boards +ĠRPG s +ĠBe es +complete ly +F IL +Ġdoes nt +ĠGreen berg +re ys +Ġsl ing +Ġempt ied +ĠPix ar +ĠDh arma +l uck +ingu ished +Ġend ot +Ġbab ys +05 9 +che st +r ats +Ġr idden +Ġbeet les +Ġillum inating +Ġfict itious +ĠProv incial +Ġ7 68 +Ġshe pherd +ĠR ender +Ġ18 96 +C rew +Ġmold ed +ĠXia omi +ĠSp iral +Ġdel im +Ġorgan ising +Ġho ops +ĠBe i +z hen +Ġfuck in +Ġdec ad +Ġun biased +am my +sw ing +Ġsmugg led +Ġk ios +ĠP ERSON +ĠInquis itor +Ġsnow y +Ġscrap ing +ĠBurg ess +P tr +ag ame +R W +Ġdro id +ĠL ys +ĠCass andra +Jac ob +Ġ35 4 +Ġpast ure +Ġfr anc +ĠScot ch +ĠEnd s +ĠI GF +def inition +Ġhyster ical +ĠBrown e +77 1 +Ġmobil ization +æ ķ +iqu eness +Th or +Ġspear headed +Ġembro iled +Ġconject ure +jud icial +Ch oice +Ġpaper back +P ir +Ġrec overs +ĠSur ge +ĠSh ogun +ĠPed iatrics +ãģ ł +Ġsweep s +ĠLabor atories +ĠP acks +al us +add in +Ġhead lights +g ra +Ev idence +COL OR +Ad min +Ĭ ± +Ġconco ct +s ufficient +Ġun marked +Ġrich ness +Ġdiss ertation +Ġseason ing +Ġg ib +ĠM ages +un ctions +ĠN id +che at +ĠTM Z +c itizens +ĠCatholic ism +n b +Ġdisemb ark +ĠPROG RAM +a ques +Ty ler +Or g +ĠSl ay +ĠN ero +ĠTown send +IN TON +te le +Ġmes mer +9 01 +Ġfire ball +ev idence +aff iliated +ĠFrench man +ĠAugust a +0 21 +Ġs led +Ġre used +ĠImmun ity +Ġwrest le +assemb led +Mar ia +Ġgun shots +ĠBarb ie +Ġcannabin oids +ĠTo ast +ĠK inder +IR D +Ġre juven +Ġg ore +Ġrupt ure +Ġbre aching +ĠCart oon +Ġ4 55 +ĠPale o +6 14 +Ġspe ars +ĠAm es +ab us +Mad ison +GR OUP +Ġab orted +y ah +Ġfel on +Ġcaus ation +Ġprep aid +Ġp itted +op lan +ĠShel ley +ĠRus so +ĠP agan +Ġwill fully +ĠCan aver +und rum +ĠSal ary +ĠAr paio +read er +ĠR ational +ĠOver se +ĠCa uses +Ġ* . +Ġw ob +Ke ith +ĠCons ent +man ac +77 3 +6 23 +Ġfate ful +et imes +Ġspir ited +ĠD ys +Ġhe gemony +Ġboy cot +ĠEn rique +em outh +Ġtim elines +ĠSah ara +ĠRel ax +ĠQuin cy +ĠLess ons +ĠE QU +SE A +N K +ĠCost co +Incre ase +Ġmotiv ating +ĠCh ong +am aru +ĠDiv ide +Ġped igree +ĠTasman ia +ĠPrel ude +L as +9 40 +57 4 +Ġch au +ĠSp iegel +un ic +-- > +ĠPhil ips +ĠKaf ka +Ġuphe aval +Ġsent imental +Ġsa x +ĠAk ira +ser ial +Mat rix +Ġelect ing +Ġcomment er +ĠNeb ula +ple ts +ĠNad u +ĠAd ren +Ġen shr +ĠR AND +fin ancial +ĠCly de +uther ford +Ġsign age +Ġde line +Ġphosph ate +rovers ial +f ascist +ĠV all +ĠBeth lehem +Ġfor s +Ġeng lish +S olid +N ature +Ġv a +ĠGu ests +Ġtant al +Ġauto immune +;;;;;;;; ;;;; +ĠTot ally +ĠO v +Ġdef ences +ĠCoc onut +Ġtranqu il +Ġpl oy +Ġflav ours +ĠFl ask +ãĤ¨ ãĥ« +ĠWest on +ĠVol vo +8 70 +Ġmicro phones +ver bal +R PG +Ġi ii +; } +0 28 +Ġhead lined +Ġprim ed +Ġho ard +ĠSh ad +ĠEN TER +Ġtri angular +Ġcap it +l ik +ĠAn cients +Ġl ash +Ġconv ol +Ġcolon el +en emy +G ra +Ġpub s +ut ters +Ġassign s +ĠPen et +ĠMon strous +ĠBow en +il ver +H aunted +ĠD ing +start ed +pl in +Ġcontamin ants +ĠDO E +ff en +ĠTechn ician +R y +Ġrob bers +Ġhot line +ĠGuard iola +ĠKau fman +row er +ĠDres den +ĠAl pine +E lf +Ġf mt +ĠS ard +urs es +g pu +Un ix +Ġunequiv ocally +ĠCitizens hip +qu ad +m ire +ĠS weeney +B attery +6 15 +Ġpanc akes +Ġo ats +M aps +ĠCont rast +mbuds man +ĠE PS +Ġsub committee +Ġsour cing +Ġs izing +ĠBuff er +ĠMand atory +Ġmoder ates +ĠPattern s +ĠCh ocobo +ĠZ an +ĠSTAT ES +ĠJud ging +ĠIn her +* : +Ġb il +ĠY en +Ġexh ilar +oll ower +z ers +Ġsn ug +max imum +Ġdesp icable +ĠP ACK +ĠAn nex +Ġsarcast ic +Ġlate x +Ġt amp +ĠS ao +b ah +ĠRe verend +ĠChin atown +ĠA UT +d ocumented +ĠGA BA +ĠCan aan +ĠÙ ħ +Ġgovern s +pre v +E sc +ĠEst imates +OS P +Ġendeav our +ĠCl osing +omet ime +every one +Ġwor sen +Ġsc anners +Ġdev iations +ĠRobot ics +ĠCom pton +Ġsorce rer +Ġend ogenous +Ġem ulation +ĠPier cing +ĠA ph +ĠS ocket +Ġb ould +ĠO U +ĠBorder lands +Ġ18 63 +G ordon +ĠW TO +Ġrestrict s +Ġmosa ic +Ġmel odies +ç Ħ +T ar +Ġdis son +ĠProv ides +Ġ ...... +b ek +F IX +Ġbro om +ans hip +Do ctors +Ġner ds +ĠReg ions +na issance +Ġmet e +Ġcre pt +pl ings +Ġgirlfriend s +kn it +ig ent +ow e +Ġus hered +ĠB az +M obil +4 34 +ĠPres ents +orig in +Ġins omnia +ĠA ux +4 39 +ĠCh ili +irs ch +G AME +Ġgest ation +alg ia +rom ising +$ , +c row +ĠIn spection +at omic +Rel ations +J OHN +rom an +ĠClock work +ĠBak r +m one +M ET +Ġthirst y +Ġb c +Ġfacult ies +R um +Ġnu ance +ĠD arius +ple ting +fter s +etch up +Reg istration +ĠK E +R ah +Ġpref erential +ĠL ash +ĠH H +Val id +ĠN AV +Ġstar ve +ĠG ong +z ynski +ĠAct ress +Ġw ik +Ġun accompanied +lv l +Br ide +AD S +ĠCommand o +ĠVaugh n +Wal let +Ġho pping +ĠV ie +Ġcave ats +Ġal as +if led +ab use +66 1 +Ġib n +Ġg ul +Ġrob bing +t il +IL A +Ġmit igating +Ġapt ly +Ġty rant +Ġmid day +ĠGil more +ĠDe cker +Ġ§ § +part ial +Ex actly +Ġphen otype +Ġ[+ ] +ĠP lex +ĠI ps +vers ions +Ġe book +Ġch ic +g ross +":" "},{" +ĠSur prisingly +M organ +Ġresid ues +ĠConf ederation +in feld +Ġl yr +mod erate +Ġperpend icular +V K +Ġsynchron ized +Ġrefres hed +Ġad ore +ĠTor ment +ol ina +Ġ26 00 +Item Tracker +Ġp ies +ĠF AT +ĠR HP +0 48 +ĠRES P +ĠB J +all ows +P and +Ġunw elcome +ĠV oc +ĠBast ard +ĠO W +ĠL AR +ĠHeal er +Environment al +ĠKen yan +ĠTr ance +ĠP ats +Ġali ases +ĠGar field +Ġcampaign er +Ġadvance ments +ĠOkin awa +ĠC oh +ows ky +Ġstar ved +Ġsize able +Ġ: -) +Ġm RNA +Ġsusp ensions +ist ar +Scot land +Pr in +-------------------------------- ---------------- +Ġ50 2 +Ġteasp oons +Ġ10 50 +Ġcoerc ive +ĠMason ic +edd ed +ĠPass enger +Ġl att +Ġbr aces +ĠSt eal +ĠNY T +ĠK ats +ĠCel est +ae z +T u +ĠCoul ter +ðŁ ĺ +Fl ickr +ĠWil mington +ith s +++ ; +Ġv ending +Ġneg ro +ĠPh i +ĠYellow stone +Call back +Ġsh ampoo +ĠSh ades +w at +Ġsuper human +Ġridic uled +Ġhol iest +om bo +Ġintern s +Ġh one +ĠPar agu +UR I +Ġd angling +ãĤ » +so v +ict ional +av ailability +Ġrev ocation +Ġd ow +in ic +ĠTHE IR +Ġis o +Ġout ings +ĠLeth al +Ġ) )) +Ġinacc ur +Ġout landish +Ġan us +let ico +id on +l ol +Ġun regulated +Ġsuccumb ed +Ġc uff +ĠWast eland +let al +Ġsub str +Ġcoff ers +Ġautom akers +ov i +ĠX ue +ĠDayton a +Ġjar ring +Ġf umes +Ġdisband ed +z ik +itt on +Ġstriking ly +Ġsp ores +Ad apter +.) : +ĠLynd on +ival ry +Ġor ally +Ġtumult uous +Ġdisple asure +Ġcon es +or rect +Ġappe ase +Ġder by +ĠTrip oli +ĠAl ess +Ġp oked +ĠGu ilty +v P +En ough +Ġorig inals +6 99 +Ġrabb i +Ġproverb ial +Ġpostp one +el ope +ĠMist y +Ġstaff ed +ĠUn employment +redit ary +Ġdilig ent +re comm +me asures +as in +8 25 +Ġpond s +Ġmm ol +ĠS AR +ĠC ARE +Ġ3 71 +Ġclen ched +ĠCors air +Ġcaric ature +z n +att ach +ĠSch ro +spe ak +p ainted +ĠS uc +ĠE NT +Ġcell ul +ĠP aid +di agn +WH ERE +Ġtext ed +B arn +Ġret racted +ĠRe ferred +S av +Ġup keep +Ġwork places +ĠTok ens +Ġampl ify +cl inical +Ġmult ic +mber g +Ġconvol uted +Reg ion +5 65 +ĠTop ic +Ġsn ail +Ġsal ine +Ġins urrection +ĠPet r +f orts +B AT +ĠNav ajo +Ġrud imentary +ĠLak sh +OND ON +Me asure +Ġtransform er +ĠGodd ard +Ġcoinc ides +ir in +R ex +ĠB ok +qu it +Ġshotgun s +Ġprolet arian +Ġsc orp +ĠAd a +5 14 +Ġsl ander +record ed +Ġemb ell +ris ome +Ġapolog izing +ĠMul cair +ĠGib raltar +Cl a +Ġall ot +ĠAtt ention +Ġ4 33 +le ave +Ġwh ine +ĠIss a +ĠFa ust +ĠBar ron +hen y +Ġvictim ized +J ews +Ġnurt uring +ett el +W inged +ĠSub tle +Ġflavor ful +ĠRep s +eng ed +call back +Ġdirection al +Ġcl asp +ĠDirect ions +plan et +icult ure +Hel per +ic ion +ac ia +Ġç ¥ŀ +Ġsur ges +Ġcan oe +ĠPrem iership +be en +Ġdef ied +ĠTro oper +Ġtrip od +Ġgas p +ĠE uph +ĠAd s +vern ight +high ly +R ole +Ġent angled +ĠZe it +6 18 +ĠRust y +Ġhaven s +ĠVaugh an +HA EL +ĠSER VICE +/ , +Ġstr icken +Ġdel usions +Ġb is +ĠH af +Ġgrat ification +Ġent icing +UN CH +Ad ams +ĠOL ED +ĠBeet le +Ġ18 99 +ĠSO FTWARE +ateg or +V L +ĠTot em +ĠG ators +AT URES +Ġimped ance +Reg istered +ĠC ary +ĠAer ial +on ne +en ium +Ġd red +ĠBe g +Ġconcurrent ly +Ġsuper power +ĠX an +j ew +imes ter +ĠDick inson +âĶ ģ +F la +Ġp ree +ĠRoll ins +© ¶æ +Ġden omination +ĠL ana +5 16 +Ġinc iting +sc ribed +j uries +ĠWond ers +app roximately +Ġsusp ending +Ġmountain ous +ĠL augh +oid al +N s +Det ect +) = +ĠL uthor +ĠSchwarz enegger +ĠMull er +ĠDev i +ec ycle +J ar +6 13 +ĠL ongh +B ah +ĠSP ORTS +n w +Ġref inement +Ġwater ways +Ġd iner +Bl ade +68 3 +F ac +Ġinitial s +Ġro g +Ġparan ormal +B UT +Ġ[ ( +ĠSw anson +ĠM esh +âĸ ¬ +Impro ve +ĠRad iation +ĠEst her +ĠE sk +ĠA ly +ik y +Ġir rad +ĠBuck ingham +Ġref ill +Ġ. _ +Re pe +CON CLUS +Ġdifferent iated +Ġchi rop +ĠAt kins +Pat tern +Ġexc ise +Ġcab al +N SA +ĠST A +ĠS IL +ĠPar aly +Ġr ye +ĠHow ell +ĠCount down +ness es +alys ed +Ġres ize +ãĤ ½ +Ġbudget ary +ĠStr as +w ang +Ġap iece +Ġprecinct s +Ġpe ach +Ġsky line +Ġ35 3 +pop ular +App earances +ĠMechan ics +ĠDev Online +S ullivan +Z en +Ġp u +op olis +5 44 +Ġde form +Ġcounter act +ĠL ange +Ġ4 17 +Con sole +77 4 +Ġnodd ing +Ġpopul ism +Ġhe p +Ġcoun selling +compl iance +U FF +Ġunden iably +Ġrail ing +ĠHor owitz +ĠSim one +ĠBung ie +Ġa k +ĠTal ks +x ff +fl ake +Cr ash +Ġsweat y +Ġban quet +ĠOFF IC +Ġinvent ive +Ġastron omer +ĠStam ford +ĠSc are +ĠGRE EN +olic ited +Ġr usher +Ġcent rist +ight ing +Ġsub class +Ġdis av +Ġdef und +ĠN anto +oci ate +m ast +Ġpac if +Ġm end +e ers +imm igration +ESS ION +Ġnumber ing +Ġlaugh able +ĠEnd ed +v iation +em ark +P itt +Ġmetic ulous +ĠL F +Ġcongrat ulated +ĠBir ch +Ġsway ed +Ġsemif inals +Ġhum ankind +m atter +ĠEqu ip +opa usal +S aid +ĠLay out +Ġvo icing +Ġth ug +Ġporn ographic +I PS +Ġmo aning +Ġgriev ance +Ġconf essions +esc al +TEXT URE +Aut hent +os aurus +P urchase +Ġreleg ation +al ter +ĠÂł Âł +Ġr iddled +Ġo gre +ĠLow ell +Occ up +E at +ĠHy der +ĠAdvis er +Com merce +H unt +ĠOr th +ĠComp etitive +ĠCL A +CD C +Ġsal ads +F le +Ġindustrial ized +` , +ĠO WN +Ġbec k +ĠPart icularly +oub t +Ġm M +ĠHuss ain +ĠChen nai +Ġ9 20 +Ġappoint ing +ĠCull en +,,,, ,,,, +Ġp ores +ver ified +Ġbi ochemical +em ate +Ġcoward ly +ĠHels inki +ĠEthiop ian +S OURCE +ER C +est ro +Ġbi otech +ĠS our +Ġbrew er +Bloom berg +Ġintens ify +Gl ass +an co +ĠF DR +gre SQL +ĠF ires +©¶æ ¥µ +ec o +100 1 +ĠHom eless +Ġinstant aneous +ĠH aste +ig el +D iamond +Ġp aving +Ġland fill +Ġd ads +h oun +: ] +Ġinc endiary +ĠLiving ston +ĠHil bert +ĠChe cks +st yles +in ators +ĠCl ive +ph rine +Ġchimpan zees +Ġp all +ĠJ M +ĠAad haar +ð Ŀ +Ġachie vable +dis abled +P ET +OOOO OOOO +M ot +Ġint angible +Ġbal let +ĠWe bs +ĠEst imated +Effect s +Ġb ailed +Josh ua +Ġturb ulence +Ġoccup ant +ĠDay light +Ġ36 1 +me et +Ġstat ically +Ġon look +Ġk i +il legal +Ġvel vet +Ġdehyd ration +Ġacqu ies +ĠRe z +ak ura +ĠU pton +at ro +Ġincomp rehensible +Ġback door +ĠRh ino +7 27 +Ġmath s +) + +Ġhe resy +Ġd f +ĠRoc he +ĠL ydia +Ġpanc reat +re ply +arre ll +Ġsolicit ation +Ġcirc adian +BI P +Ġfor ay +Ġcrypt ic +iz u +ime o +ĠTom ato +ĠH oms +ex amination +Ġqu arry +ĠVal iant +ĠJer icho +ĠIN CLUD +Ġ18 40 +5 19 +Ġres ists +Ġsnap shots +ĠSp ur +ĠAnt iqu +Log in +Ġbest selling +Ġant ic +ĠS utherland +ãĤ¢ ãĥ« +Ġ~ / +ĠP arm +è ĥ +P ages +int ensity +Ġimm obil +Ġ18 65 +zz o +Ġn ifty +Ġf entanyl +ĠPres ervation +op hen +Ġd arts +ĠD inosaur +po inters +ĠR ite +s uggest +aware ness +ĠSher idan +Ġst ances +Ġsor cery +Ġper jury +ĠNik ola +ie ver +Ġf iance +ĠJordan ian +ĠBall oon +Ġn ab +Ġk b +Ġhuman ities +ĠTan aka +hill ary +Ġconsult ancy +ĠZ ub +Ġrem ission +Ġconf id +CH Q +ĠF ug +Ġimpro vis +Y ep +/ _ +Ġunwilling ness +Ġport folios +05 5 +ĠInstruct or +aim an +Ġclaim ants +M bps +ĠBy e +re ceived +T weet +Ġind emn +ri z +am ara +N at +Ġeval uates +ĠL ur +ep ad +FO X +ĠTh ro +Ġrust y +Ġbed rock +ĠOp rah +J B +Ġmanip ulative +Ġwill ful +Ġrel apse +Ġext ant +The me +S ensor +ĠSt ability +go vern +Ġpo ppy +Ġkn ack +Ġins ulated +ĠT ile +ĠExt rem +Ġunt old +Ġconver ge +Ġref uel +ig roup +Ġdistort ions +Ġrav aged +Ġmechan ically +ĠRe illy +ĠN ose +ĠIncarn ation +ĠBeck y +abb ling +Ġt aco +Ġr ake +Ġmelanch oly +Ġillust rious +ĠDart mouth +Gu ide +ĠR azer +ĠBen z +Ult imate +ĠSur prise +Ġpage ant +off er +Who ever +Ġw iser +Ġchem ist +ĠHE LL +ĠBul k +Ġpl utonium +ĠCO VER +Ö ¼ +f ailed +Ġtire lessly +Ġinf ertility +ĠTr ident +ĠShow time +ĠC iv +V ice +requ ires +itt ance +Ġun controlled +interest ing +56 1 +Ġinnov ate +ateg ic +L ie +ĠS elling +U l +Ġsav ior +ĠT osh +Ġsw ast +P ASS +Ġr ink +Ġcard io +ĠI ro +ud i +Ġv antage +Ġv ans +ĠNi ño ++ = +Ġpropag ate +< ? +Ġmethod ological +204 39 +Ġtrig lycer +Ġing rained +ĠAn notations +arr anted +6 17 +ĠS odium +ĠA AC +techn ical +mult ipl +Ġ3 73 +å ĭ +Ġdec isively +Ġboost ers +Ġdessert s +ĠGren ade +Ġtest ifying +ĠSc ully +ID s +Ġlock down +ĠSc her +ĠR é +ĠWhit man +ĠRams ay +rem ote +Ġh ikers +ĠHy undai +Ġcons cientious +Ġcler ics +ĠSiber ian +ut i +is bury +Ġrel ayed +Ġqu artz +ĠC BI +seek ers +ull a +Ġweld ing +ĠSh al +ble acher +T ai +ĠSam son +Ġt umble +ĠInvest or +Ġsub contract +ĠShin ra +ow icz +j andro +d ad +Ġtermin ating +ĠNe ural +ä» £ +Ġleak age +ĠMid lands +ĠCaucas us +í ķ +c it +ll an +iv ably +ĠAlb ion +Ġ4 57 +Ġregist rations +Ġcomr ade +Ġclip board +0 47 +Ġdiscour aging +ĠO ops +Ad apt +Ġem path +n v +ĠPR OT +ĠDon n +ĠP ax +ĠB ayer +t is +Squ are +Ġfoot prints +part icip +ĠChile an +B rend +ind ucing +M agn +Ġclub house +ĠMagn um +Ġenc amp +ĠEth nic +uch a +ere y +Ġw atered +ĠCal ais +Ġcomplex ion +Ġsect s +Ġren ters +Ġbr as +oÄŁ an +Time out +Man agement +Ġinf ographic +P okemon +Cl ar +Ġloc ality +Ġfl ora +as el +P ont +Ġpop ulate +ĠO ng +Ġsubs istence +Ġa uctions +ĠMcA uliffe +ĠL OOK +br inger +Ġtit an +Ġmanif old +ĠâĹ ı +Ġcalibr ated +Ġcal iphate +ĠSH E +ĠCommission ers +ce ivable +j c +W inner +5 24 +Ġcond one +Other wise +Ġp iling +Ġem body +ĠCrime an +ut ics +ĠEx hibition +Ġ4 26 +e ering +Ġv ying +ĠH UGE +* =- +Ġprin cipled +à ¦ +Ġquir ks +ĠEdit ors +put ing +G ES +ĠF TA +ठ¾ +add on +ĠH AM +ĠFrie za +W oman +. $ +Ġc rib +ĠHer od +Ġtim ers +ĠSp aces +ĠMac intosh +at aka +Ġgl ide +Ġsmell ing +ĠB AL +Ġun su +Ġcond os +Ġbicy cl +ĠRev ival +55 3 +Ġjugg ling +H ug +ĠKardash ian +ĠBalk ans +mult iple +Ġnutrit ious +oc ry +19 00 +Ġinteg rates +Ġad joining +ĠF older +roll ment +ven ient +Ġu ber +y i +Ġwh iff +ĠJu ven +ĠB orough +net te +Ġb ilingual +ĠSp arks +ph thal +man ufact +Ġt outing +ĠPH I +Ke efe +Rew ard +Ġinf all +ĠTem per +typ ically +ĠNik ol +Ġregular s +Ġpseud onym +Ġexhib itions +Ġbl aster +Ġ40 9 +w arming +Ġrever ber +Ġrecip rocal +Ġ6 70 +ip ient +b ett +ĠBe gins +Ġit ching +ĠPh ar +Ass uming +Ġem itting +ĠML G +Ġbirth place +Ġt aunt +ĠL uffy +ĠAm it +Ġcir cled +ĠN ost +enn ett +Ġde forestation +ĠHist orically +ĠEvery day +Ġovert ake +79 2 +Ġn un +ĠLuc ia +Ġaccompan ies +ĠSe eking +ĠTr ash +an ism +R ogue +Ġnorth western +ĠSupplement al +ĠNY U +ĠF RI +ĠSat isf +x es +5 17 +Ġreass ured +Ġspor adic +Ġ7 01 +Ġmed ial +Ġcannabin oid +Ġbarbar ic +Ġep is +ĠExplos ive +ĠD ough +Ġuns olved +Support ed +Ġacknowled gment +sp awn +Ġkit chens +Ġ- = +talk ing +ic ist +ĠPeg asus +ĠPS U +Ġphot on +ĠAuthent ication +R G +@# & +76 2 +ĠCl air +Ġdi aper +Ġbr ist +ĠProsecut ors +ĠJ em +6 28 +ĠEvery where +ĠJean ne +equ ality +ãĥ© ãĥ³ +object s +ĠPel icans +Ġ39 2 +Ġbl u +b ys +ĠA go +Ġinstruction al +Ġdiscrim inating +ĠTR AN +ĠCorn el +ag os +Ġty re +Ġas piration +ĠBrid gewater +": - +! ". +ĠEn s +ĠCoc o +P ie +Ġdet ach +ĠC ouch +Ġphys ique +ĠOccup ations +osc opic +en ough +B uzz +App earance +Y P +Ġrac er +Ġcompl icity +r pm +T oy +Ġinterrupt s +ĠCat alyst +Ġut ilitarian +imp act +Ġsp aghetti +Ġp orous +Ġeste emed +Ġinc iner +ĠI OC +7 48 +Ġesp resso +ĠSm ile +abil ia +6 35 +Ġmathematic ian +Ġ4 24 +ĠK L +ĠH IP +Ġover heard +ĠT ud +ĠT ec +Ġqu izz +Ġfl attering +Ġcon n +âĢ İ +Ġatt aches +ĠR OS +ĠAC S +Ġt cp +ĠSh ame +sk ip +res pected +ĠTrin idad +gr ain +Ġfooth old +ĠUnch arted +ĠJul io +z l +av ored +ĠAn xiety +er rors +ĠCent auri +its ch +D addy +Ġclutch ing +ĠIm plement +ĠGut ierrez +Ġ7 60 +Ġtele portation +end ra +Ġrevers ible +st ros +Ad venture +08 3 +Ġliber ating +Ġas phalt +ĠSp end +AR DS +im sy +PR ES +ĠEmer ging +Ġwild fires +Ġtechn ologically +Ġem its +ĠART ICLE +Ġirregular ities +Ġcher ish +çī Ī +Ġst ink +ĠR ost +Econom ic +Ġcough ing +ĠMcC ann +pro perties +ilant ro +Ġreneg oti +Trans lation +Ġin quest +ĠGra pe +oot ers +gu i +ĠSwords man +ace ae +h itting +Ġr c +Ġexert ed +ĠS AP +it ent +Ġperil ous +Ġobsc urity +Ġassass inate +Ġab original +Ġresc uing +ĠSh attered +lock ing +all ion +Ch anging +ĠHar rington +ĠB ord +ĠAfgh ans +Jam ie +aret z +ĠAugust us +Ġ38 6 +8 30 +Ġj og +ok ingly +Tr igger +ĠH OR +Stat istics +Ġviewers hip +Ġadd itives +h ur +Ġmaxim izing +ĠR ove +ĠLou ie +ĠBuck et +ĠCHR IST +ou sel +Ġstre aks +ir ted +Ġt ert +Ġcolonial ism +Ġbur ying +y k +Cond ition +ĠDPR K +By Id +75 1 +âĹ ¼ +Ġwor risome +Ġvoc ational +sl ice +Ġsa ils +ĠCorrection al +95 4 +Ġt ul +K id +l uster +Ġfam ilial +ĠSp it +ĠEp iscopal +Specific ally +ĠVol cano +run s +q s +Ġve tted +Ġcram med +t rop +here r +Thank fully +Ġper cussion +Ġor anges +Ġround up +Ġ4 99 +x ious +Char acters +ĠZion ism +ĠR ao +ÃĽ ÃĽ +W F +Ġunintention al +ONE Y +Gr ab +Com mercial +Ġglut amate +ĠMcK enna +ru ciating +ning ton +ih u +Ch an +ĠSw ap +Ġleaf lets +Ġfunction ally +er ous +F arm +Ġcal oric +ĠLiter ally +con cert +Ġshe nan +Ġrep aid +ey es +Ġbas hing +ĠG orge +Ġcollabor ations +Ġun account +itch ie +Ġteam work +pp elin +Ġpip ing +Ġmin ced +Ġd iam +ri eg +Ġmasc ara +Ġsuck er +ĠMo ons +App s +ĠPe ck +Ġper v +ĠFl oat +o ley +ĠN ish +im ize +Ġarom atic +u in +end ish +! / +ĠB icycle +ĠAS IC +ile ged +ĠQuad ro +ios yn +Ġlock out +ĠW ink +SP EC +Attempt s +Ġseed ed +red o +ias is +Ġsn ag +ãĥķ ãĤ© +ãĤ ¶ +Ġground ing +Ġrelie ver +Ġfrivol ous +ĠG ifts +ĠF aces +Es pecially +Ġmicrobi ome +im ag +ĠSch l +ĠP les +ĠBle ach +ĠIr win +ĠE aton +ĠDisc iple +Ġmultipl ication +Ġcoer ced +Ġ4 19 +st h +E vil +B omb +Ġex orc +Ġstag gered +L ESS +Ġinert ia +ĠED IT +Ġgo b +Tr aditional +Ġclass y +Lear y +ĠP AGE +yr s +Ġtrans porter +Ġmat ured +Ġhij ab +Ġbi ome +Where as +Ġex termination +ĠT ues +ĠT akeru +ĠAud rey +er ial +ĠAd en +aff les +Ġnarciss istic +ĠB aird +UT F +I re +ĠCon nie +Ch amp +Ġwhis pering +ĠH att +D K +Ġdis infect +Ġdeduct ed +Ġpart ake +Ġdown grade +ĠEs ports +ĠContin uing +Ġdemocr atically +icro bial +itt a +Ġlim estone +Ġexempt ed +ĠFren zy +H erm +7 28 +Ġfled gling +Met a +765 61 +69 3 +% : +w ake +5 26 +ĠDis cipline +Ġvirgin ity +ĠLeg ions +ĠFrank ie +int ent +Ġrest rooms +ĠRou ter +da q +Ġobjection able +âĨ ij +w ark +ĠRah ul +g ain +activ ation +abs olute +ĠAccess ed +Ġ24 00 +ogg les +Ġsecond ly +ĠDEF ENSE +Ġpost age +wra pper +sh arp +7 29 +Ġcommun icates +Ġadd on +ĠMil itia +H ong +Ġsl umped +ĠJP EG +ĠI car +ad ish +68 1 +Ġmaj esty +ĠWolf gang +ĠEl astic +u per +Ġv iz +Ġunconscious ly +ĠST D +ĠS ass +Ġflower ing +ĠHel ic +ĠDra per +ĠAm ateur +Ġman ure +Ġdis ingen +ĠLe i +br ing +9 49 +Ġinhib ited +Ġhead quartered +Ġen igmatic +�� � +Ġred ress +R H +Ġratt led +Ġd iction +l io +ĠT BA +ĠSN AP +C alling +Ġfasc ists +ĠD ove +iew icz +0 36 +Ġco asts +ĠR ect +Ġ) ] +L ot +6 29 +ĠS EM +ĠPeters en +ĠExpl ain +ĠBo ards +ĠBe zos +ĠJ ournals +Ġ20 24 +p arser +Ġmist rust +Ġgr ate +ĠL ocked +bo a +S aint +g aming +Ġvow el +in ately +bl ow +All ah +Ġun matched +Ġb ordering +ĠExp end +n r +Or acle +rou ch +Ġcont iguous +ac us +Ġdist raught +58 1 +Ġanat omical +O X +ap ixel +8 33 +ĠPL US +Ġres usc +Ġab iding +57 3 +Ġvac ancies +Em ily +Ġhyp othal +ĠWer ner +ĠWe e +ĠDJ s +5 13 +Ġwitch craft +Ġac upuncture +ent ary +benef it +Product s +ĠP SP +ĠMP G +ĠJ inn +ĠJ arrett +Ġ4 45 +ĠIm aging +ĠP yth +Fin ish +Ġte x +Ġjuven iles +Ġhero ism +Ġdoubt less +ĠA ki +ĠT end +ĠPatri arch +Ġbit ters +ĠTele communications +it atively +ag na +Ġr g +ĠS OLD +Ġcomp ulsion +ĠN asa +ĠKath ryn +Ġmillion aires +Ġintrins ically +Ġbolst ered +time out +fl o +Ġtut or +p our +Stat ement +Ġ{ * +ĠRud olph +ĠKimber ly +rog ens +adi q +] + +Ġindign ation +Ġfract uring +ĠRe leases +ĠGr ain +pro tein +L ago +Ġvac ations +Ġboot ed +ĠTH REE +ĠH G +oresc ence +Ġt f +Ġso ar +iosyn cr +Ġgl ances +ĠSp oon +ĠJ ury +ĠCow boy +Ġcreat ively +Hig her +Ġsolic itor +Ġhaw k +ac io +89 6 +Ġsuperf lu +Ġbombs hell +ct ure +Ġbroker age +Ġraid ing +Ġf rench +Ġang led +Trans action +ĠGen ocide +u pe +ĠHait ian +57 2 +! : +Ġunwitting ly +iter ator +sc roll +Ġtall ied +Ġbi omedical +ĠC ARD +Ġe uphem +Ġbrain storm +a quin +K o +Mic helle +ĠR unes +ĠBall istic +ud ers +Ġmod esty +ĠiP ads +ĠEzek iel +Y E +Ġstars hip +Ġpower fully +Ġper l +ĠSh ade +ĠQu art +ĠE EG +Ġfisher man +OS ED +ĠTyp ical +df x +Ġmes hes +Ġet ched +worth iness +Ġtopp led +Ġ3 96 +or ius +We iss +Ġmy sql +ĠVal halla +Ù Ĵ +le asing +Ġrec omp +rap nel +S el +04 3 +Ġder ailed +ĠGu ides +IR T +Ġde human +ĠBritt any +" )) +Ġex claim +Ġb alk +Ġ8 40 +CLA IM +int el +L AB +Ġpe gged +Ġast roph +sm oking +Ġrig ging +Ġfix ation +Ġcat apult +ins ide +ĠC ascade +ĠBolshe vik +G aza +Dep th +Ġloud spe +Ġalmond s +me yer +l eness +j en +f resh +Ġunbeat en +ĠSqu id +ĠPres umably +Tim er +B W +Ġro sters +Ġell ipt +ĠHar riet +dat abase +ĠMut ual +ĠComm odore +uk ed +kn ife +ĠCOMM UN +h ya +Ġmel ts +arch ives +Ġrat ification +Ġmultip lying +Ġinter oper +Ġasc ert +w ings +ver ting +ĠScorp ion +ay e +ĠPorts mouth +ĠM TA +n it +iaz ep +Ġqu arantine +Ġslides how +Ġcent imeters +Ġsyn opsis +Ġsp ate +th irst +Ġnom inating +ĠMel vin +Pre view +Ġthro b +Ġgener ational +ĠRad ius +rest ling +put able +aw ar +N ECT +Ġunlaw fully +ĠRevel ations +Wik ipedia +sur v +Ġeye ing +ij n +ĠF W +Ġbr unt +Ġinter stellar +Ġcl itor +ĠCroat ian +ĠCh ic +ev a +ĠDis app +ĠA kin +iner ies +d ust +Interest ed +Ġgen esis +ĠE ucl +ö n +p icking +Ġmut ated +Ġdisappro ve +ĠHD L +Ġ6 25 +Ì ¶ +c ancer +Ġsqu ats +Ġle vers +Disc uss += ] +D ex +ĠVIDE OS +A UD +Ġtrans act +ĠKin ect +ĠK uala +ĠC yp +7 47 +Ġsh attering +Ġarsen ic +ĠInt ake +ĠAngel o +ĠQu it +ĠK he +Ġ18 93 +M aker +0 29 +ĠPain ting +Dis able +9 16 +Ġanal ges +Ġtact ile +Ġprop hes +Ġd iced +ĠTravel s +ĠHe ader +ĠClub s +Ass istant +Ġinc rim +Ġd ips +Ġcruc ifix +ĠShan ahan +ĠInter pret +Ġ40 90 +al ogy +abb a +Ġsimul ac +hus band +S IM +Ġrecy cle +uc er +ed ged +Ġre naissance +ĠBomb ay +Cath olic +ĠL INE +ĠCl othing +re ports +Ġpl aus +Ġd ag +ĠM ace +Z I +Ġintr uder +ĠVeter inary +g ru +Ġsne aky +ĠS ie +ĠC innamon +P OSE +Ġcou rier +ĠC NS +Ġemanc ipation +s it +Ġplay through +ĠFac ilities +v irt +ĠG auntlet +Thom pson +Ġunbeliev ably +Param eters +Ġst itching +ign e +ĠTH ESE +Priv acy +Ġshenan igans +Ġvit ri +ĠVal id +59 1 +Ń · +ĠProt otype +ink a +SC P +ĠT id +è Ī +old ed +Ġindividual ity +Ġbark ing +Ġm ars +ĠW D +Ġ8 20 +Ġt ir +Ġsl apping +Ġdisgr untled +ĠAng ola +ri us +ĠTorn ado +ĠTh urs +Ġcapt cha +Ġang st +ĠP og +ĠAssass ins +ĠAd idas +Ġjoy ful +Ġwh ining +Emer gency +Ġphosph orus +Ġatt rition +oph on +ĠTimber wolves +ĠJ ah +ĠBr inging +ĠW ad +ĠEn sure +oh l +ĠX ie +omm el +c mp +Ġz ipper +Ġrel at +ĠCor ridor +m ilo +T ING +Av g +Ġcro pped +] } +Ġr aged +ĠLump ur +ĠGuer rero +our ke +N ut +Ġoff sets +og lu +dr m +Ġmort als +lat able +Ġdismiss ive +ä¸ ī +Ġthro ats +Ġchips et +ĠSpot light +Catal og +art ist +G b +Ġch illy +Ġst oked +Ġ3 74 +W ard +L atin +Ġf iasco +Ġble ach +Ġb rav +Enh anced +Ġin oc +ĠFior ina +_ > +Ġle ukemia +Ġel uc +Ġannoun cer +ĠLith uan +ĠArm ageddon +å ĩ +Len in +ĠR uk +Ġpe pp +ĠRom antic +ĠP IT +ĠInter stellar +ĠAt kinson +R aid +J s +Go al +C ourse +Ġvan ishing +es ley +ĠR ounds +Els a +59 3 +Ġredund ancy +ĠST AND +Ġprop hetic +Ġhabit able +ry u +Ġfaint ly +M ODE +Ġfl anked +IR C +Aw esome +Ġsp urious +ĠZ ah +ĠMS G +Ġsh ading +Ġmotiv ational +ĠSant ana +ĠS PR +Ġexc ruciating +om ial +ĠM iko +ĠLe opard +A byss +Ġ[ | +d irty +Ġbath s +Ġdem oral +and re +P B +Ġun ification +Ġsac rament +Ġ[ & +Ġpric eless +Ġgel atin +Ġeman ating +ĠAll aah +98 6 +Ġout burst +Ġer as +ĠX VI +ĠSP I +O tt +ĠLaz arus +PL IED +F lying +blog s +W isconsin +R aven +Ġreb ate +Ġcreep s +ĠSp an +ĠPain ter +ĠKir a +ĠAm os +ĠCor vette +Cons umer +ĠRec over +ck i +Ġpes ky +ĠIn vention +Compan ies +Ġchalleng ers +ad emic +ĠUkrain ians +ĠNeuro log +ĠFors aken +Ġent rants +Ġemb attled +Ġdef unct +ĠGlac ier +Ġpo isons +ĠH orses +m akes +ĠD irt +Ġ4 23 +hh h +ĠTrans formation +QUI RE +................ .. +Ġtrave ller +ĠSe xy +ĠK ern +ip olar +Ġransom ware +oooooooo oooooooo +E c +rub y +Prof essional +ĠOut break +arg ument +G rey +ĠFif a +ĠCH O +ĠFOR M +ĠAm trak +- [ +Ġcr adle +Ġantioxid ants +ãģ®å ® +7 36 +ĠNAS L +ĠContribut ions +Ind iana +ĠST EP +C SS +Ġsal ient +Ġall ocations +yr ights +Ġm ashed +ĠCut ter +Sex ual +Ġp ounded +Ġfan base +Ġc asc +ĠTrans parency +Ġanaly tic +ĠSummon er +× ŀ +ĠAD C +det ail +Ġvan quished +Ġcr abs +ar ie +Dest roy +ĠS ack +Ġtrans istor +Al abama +ĠK oen +ĠFisher ies +c one +Ġannex ed +ĠM GM +es a +Ġf aked +ĠCong ratulations +Ġhind ered +Ġcorrection al +ĠI TV +lee ve +Ġin appropriately +lic ks +Ġtresp ass +Ġp aws +Ġnegoti ator +ĠChrist ensen +lim its +ĠDian ne +Ġeleg ance +ĠContract s +an ke +Ob j +Ġvigil ance +Ġcast les +ĠN AD +ĠHol o +Ġemph atically +ĠTit us +ĠServ ing +ĠRich ie +ĠP igs +5 68 +Ġanim osity +ĠAtt ributes +ĠU riel +M Q +my ra +ĠApplic ant +Ġpsychiat rists +ĠV ij +ĠAb by +ag ree +P ush +Ġk Wh +hib a +Ġinc ite +ĠWe asley +ĠTax i +minist ic +hy per +ĠF arn +Ġ6 01 +ĠNation wide +F ake +95 2 +Ġma ize +Ġinteract ed +Ġtransition ed +Ġparas itic +Ġharm onic +Ġdec aying +Ġbas eless +ns ics +Ġtrans pired +Ġabund antly +ĠFore nsic +Ġtread mill +ĠJ av +ab and +Ġssh d +Ġfront man +ĠJak arta +oll er +dro ps +ĠSERV ICES +rompt u +oph ical +h ospital +bled on +6 45 +Ġmid range +ĠEV ENT +cul ated +raw led +Ġper ched +Ġover board +ĠPe el +ĠP wr +ĠCar th +ĠCOM PLE +co e +sh all +Ġdeter rence +M ETHOD +ĠAbs ent +M EN +Ġs ill +ĠLE VEL +Y ork +Ġsin ners +ĠOP EC +ĠN ur +ĠDesign s +se lection +Ġunw orthy +CH A +Ġstreng thens +88 3 +ed ly +Ġslic ing +Ġmal nutrition +Ġfilm making +ĠPol k +ur ated +Ġ4 21 +bre akers +!' " +Ġwet lands +ĠDisc rimination +Ġallow able +Ġste ered +ĠSic ily +S AM +Ġmust ache +Ġm ids +Ġcl ipped +Ġcirc ulate +Ġbr ittle +ĠBuild ings +ra ised +ĠRound up +Ġwealth ier +Ġoverw rite +Ġover powered +ĠGerr ard +s ites +PD ATED +Ġacute ly +ĠGam ble +Ġp im +ĠK us +Typ ically +De ploy +ĠMoroc can +p otion +com be +Ġvigil ante +Ġ36 3 +St ew +ĠB agg +Ġres ided +ĠSp o +Ġrem nant +Ġempt iness +br ainer +Ġout patient +pri ority +Ġle ptin +ĠPay ton +ĠGle aming +ĠS hed +ĠPol o +ĠMormon ism +rest ricted +arl ane +w x +Ġcreat ine +ĠAn on +ĠST UD +ĠJ UL +ĠT ee +5 28 +08 9 +Ġhat ched +Dis patch +ĠCompos ite +Ġ45 1 +p uff +ĠX COM +ĠOr n +ĠTH ANK +END ED +ĠAshe ville +Ġà ľ +Ġman go +ĠS lightly +world ly +ĠW ander +ĠExp and +ĠCh r +M ist +Ġorthodox y +ĠUN ESCO +reg ate +Else where +k ie +ir led +Ġtopp le +Ġadopt ive +ĠLeg s +d ress +ĠS agan +b are +ĠGl ou +Cr unch +Ġhelp ers +Ġchron ically +ĠH uma +1 0000 +Ġaccommod ating +äº Ķ +Ġwrink les +Ġdod ged +four th +Ġpre con +Ġcompress or +ĠK are +Ġev ict +ĠWar wick +im ar +Ġmodern ization +Ġband wagon +Ġref uted +Ġnet ted +ĠNa ples +ĠGen ie +per ors +Ġfield ed +Ġde re +ĠPar ables +le es +Ġtr out +asp ers +Ġn ihil +Ġhapp iest +Ġflo ppy +ĠLo ft +ĠHe ard +Ġun ison +Ġl ug +ĠRed mond +class ic +Supp orters +SH IP +G MT +Ġfue lled +ç IJ +Ġd d +ĠEmin em +Ġ18 97 +NY SE +Ġsecret aries +ĠF IA +ĠCanaver al +F avorite +Ġp omp +Ġdetain ee +ers hip +aim on +i our +ĠA pex +Ġplant ations +am ia +ac ion +R ust +Ġtow ed +ĠTru ly +5 77 +Ġshel tered +r ider +W o +Ġl air +ĠInt elligent +impro ve +m atically +Ġet iquette +ad ra +all o +ĠJun o +any thing +ĠStru ggle +ĠPred ict +ĠGr imes +ĠAMER ICA +ct x +ĠSit uation +W OOD +Ġsol uble +me ier +Ġintoler able +ang ering +Ġun interrupted +Ġtool tip +Ġinterrog ated +Ġgun ned +ĠSne ak +æŃ ¦ +Ġt ether +Ġcr umble +L ens +Ġclust ered +ĠSy l +ĠHas an +Ġdystop ian +w ana +Ġjoy stick +ĠTh ib +amm u +Tom orrow +5 46 +Ġoverc ame +Ġminim ized +cept or +Run ner +ENG TH +ĠBrend a +ĠAchieve ments +Ġtor ches +Ġrapp ort +ĠInvestig ator +ĠHand ling +rel ation +g rey +8 15 +Ġk cal +ĠComm ands +d q +Ġcur ls +Ġbe arer +Ġcyn icism +it ri +ĠUse ful +B ee +D CS +Ġab ras +P ract +BIL ITIES +7 12 +Ġdebug ger +Ġdebt or +ĠL ia +ĠK ers +Ġexacerb ate +ĠSt acy +ĠB land +ĠSc enes +Ġbranch ing +âĸĪâĸĪâĸĪâĸĪ âĸĪâĸĪâĸĪâĸĪ +ape ake +Ġs alsa +Ġmish and +ĠKon ami +ĠN ib +Ġanecd ote +Ġagree able +Ï ī +ĠNath aniel +ĠHe isman +ĠB eware +Ġ18 86 +spect ive +69 1 +5 22 +Ġinhib its +Ġhas hing +Ġ18 89 +å° Ĩ +v ich +P ure +Ġsolid ly +Ġaspir in +im aru +Ġstreet car +ĠU CS +ĠJ udd +Ġflash backs +p ins +Ġ14 40 +ĠUN HCR +ĠSym ptoms +T IT +5 38 +F ra +% ); +Ġo oz +Ġcur few +Ġcal med +Ġparticip ates +Te X +Ġnons ensical +Ġfull back +ĠDe L +mon key +h ari +Ġmetabol ites +Ġloot ed +ĠAL WAYS +ĠB CC +L t +oc het +B one +Ġveto ed +Ġg cc +ĠCL ICK +Ġ18 88 +s af +Ġstiff ness +Ġlow ly +ĠGe h +vers on +ors et +Ġun foreseen +Ġan esthesia +ĠOpt ical +Ġrecon structed +ĠT up +sh ows +NEW S +ĠNewsp aper +ĠA SA +ter a +N umbers +Ġinexpl icable +× ij +Ġhard ness +unt arily +ĠA cer +grad ient +ARD IS +Ġwood land +Ġmetaph ors +ĠWem bley +ĠPa vel +phil is +Ġre writing +Ġpercept ual +Ġ10 70 +worm s +ĠDown s +Ġunsur prisingly +Ġtag ging +fl ame +Ġlit res +Ġboun ces +ĠB abe +sh ut +Ġoverd oses +ĠShe ila +ĠCh au +ĠBl ess +Capt ure +ĠSign ificant +ĠSc ion +Ġ38 9 +ĠMc H +ĠTitan ium +ĠMe al +amed a +ag ents +agg ressive +B illy +76 3 +ĠS aying +DER R +it one +Coll ins +B ound +Ġbol ted +ĠDM CA +95 3 +Ġun iqueness +Ġep igen +un ci +ant am +Ġreck oning +ch airs +OG R +ĠSen egal +Ġ18 62 +re levant +Ġ ¯ +Ġpharm acies +ĠG eral +v ier +Y an +OR PG +Ġrab id +b ending +ĠUN ITED +Ġ4 65 +As sembly +Ġwe ep +Ġbe hest +ĠMother s +ĠJ ace +h id +Ġwh irlwind +ĠUN IVERS +Ġut opian +Ġkidn ap +Ph ilipp +K in +89 3 +Ġlivest ream +ĠM ISS +Ġsub versive +ĠTechn iques +ĠJUST ICE +ĠB ASE +Ġ38 7 +Ġassail ants +ĠHard core +Ġsprink led +ĠP se +é ļ +print ed +ĠH au +OR GE +ĠT OUR +Ġl aced +Ġit ch +G iving +Ġport ed +78 1 +//////////////// //////////////// +bre eding +Ġlog ger +ĠH OL +inn ie +First ly +Ġembry onic +Ġdeleg ated +p ai +O IL +Ġcentr ally +ĠR x +ĠSc outing +D utch +Ġhe reditary +ĠCru iser +s at +5 29 +ĠMar riott +other mal +Ġprohib itions +E arn +ĠSt ab +ĠColleg es +ĠBel ief +st retched +ĠL H +ĠEntity Item +C IA +Ġun rem +Ġlaure ate +Ġdenomin ations +sum mary +h ler +S pect +ĠK laus +ĠBe ans +Ġins ur +ĠPA X +Ġfield er +ĠV et +ĠSp arrow +z ie +ĠS Q +ĠMond ays +ĠOff line +ĠLer ner +ĠExt ensions +Ire land +Ġpatron age +Ġcontrast ed +ĠMan ia +h irt +Mos cow +Ġcondem ns +ĠAn ge +Ġcomp osing +ĠPe pe +ĠP addock +Ġheter ogeneity +Ġide ologically +Ġf ishes +Ġcur sing +ĠR utherford +ĠFlo ating +ĠAm elia +Te a +Syn opsis +Ġstun ts +Ġbe ad +Ġstock ing +ĠM ILL +ob ook +mass ive +\ < +Ġh ump +ĠPref erences +Engine Debug +ge ist +ĠNiet o +ome ver +ish y +eval uate +col onial +Altern ative +ĠGo Pro +ĠV ortex +ĠNET WORK +ans ky +Sec ure +ĠTh rust +Sn ake +Ġparcel s +Ġsam urai +Ġactress es +N ap +M F +ifer ation +Be er +5 23 +ĠI ly +oint ment +P ing +Ġstri ped +ĠMell on +oss ession +Ġneut ron +end ium +Ġa ph +ĠFlav oring +Ġ38 3 +Ġrespons iveness +ĠJ indal +ĠHitch cock +Den ver +ĠDRAG ON +sm anship +ĠDu pl +Ġs ly +Ġweb cam +ĠTw ain +ĠDar ling +ili ate +cons umer +D IT +Ġnames ake +Ġun orthodox +Ġfun er +ĠPL oS +ĠCONTR OL +ozy g +ogl obin +F ACE +ER G +ĠD ia +ĠF iesta +ce le +0 34 +Ġencl ave +âĸ¬ âĸ¬ +on ement +al ist +M and +Ġhome grown +ĠF ancy +Ġconcept ions +ĠCont ains +ure en +Ġreiter ate +Ġme ager +Ġinstall ments +Sp awn +6 27 +Ġphot oc +ĠCab rera +ĠRos enthal +ĠLans ing +is ner +Ġinvest s +ĠUFO s +EX P +Hard ware +Ġtr agically +Ġconced es +ie ft +ch am +bor gh +ĠSch r +ĠMel anie +ĠH oy +Ġvisit ation +Ġid iosyncr +Ġfract ions +Ġfore skin +ob os +Ġpo aching +ĠVI EW +Ġstimul ates +ĠG ork +can on +M IC +ĠNem esis +ĠInd ra +ĠDM V +Ġ5 29 +Ġinspect ing +Ġgrand ma +ĠW hedon +ĠSh ant +ĠP urg +ik an +ĠT eg +ĠCL R +z ac +Vict oria +ĠVer ify +ion ics +Ġpart ying +ĠM ou +col our +Ġtestim onies +l ations +Ġpress uring +hi ro +ac ers +Ġf id +ang ler +ĠCS I +Ġhere after +Ġdiss idents +report ing +iph any +che v +Ġsol itude +Ġl obe +Ġind is +Ġcred ential +re cent +ad ult +ĠNir vana +ĠFranch ise +L ayer +H yp +ĠBerks hire +Ġwill s +t if +Ġtot em +ĠJud ah +rep air +Inst ant +5 48 +Ġemb assies +Ġbott leneck +Ġb ount +Ġtyp ew +ĠAl vin +j ing +im ilar +R ush +Ġbr im +ĠHEL P +A im +] ' +Ġpass ively +Ġbound ed +ĠR ated +Ġcriminal ity +Ġbiom ark +Ġdisp atcher +ĠTow ards +Ġ+ ++ +right eous +f rog +ĠP anc +C arter +0 32 +æ© Ł +Ġult raviolet +ĠLic ensed +ĠT ata +ĠBl essing +ĠG AM +Ġchem ically +ĠSe af +ĠRE LE +ĠMerc enary +capital ist +Ġform ulations +Ġann ihilation +ĠVer b +ĠAr gon +Ġun loaded +Ġmorp hed +Ġconqu ering +back er +I ELD +Ġtheft s +Ġfront runner +ĠRoy ale +ĠFund amental +el ight +C hip +necess ary +ay n +ĠSl ip +Ġ4 48 +cern ed +P ause +Ġshock ingly +ĠAB V +Ġcomp osure +7 33 +ĠMotors port +ah ime +Mur ray +M ach +Ġgr ids +Ġdeb ian +Ġfurther more +Ġdexter ity +ĠCollect ions +os lov +il age +b j +ĠMont eneg +Ġstrut Connector +Ġmassac res +Ġbrief s +fet ched +uv ian +ol ition +Fail ure +emon ic +Ġfl ared +Ġclaim ant +Ġc ures +Ġgive aways +ĠSubst ance +al ions +Ġcr inge +ĠK ul +Ġarist ocracy +ĠUl ster +ol ated +h ousing +ĠM IS +Ġgl ared +ĠWil helm +ne eds +lam bda +build ers +ĠV IS +Ġradi ator +ĠGhost busters +Ġ4 36 +act ual +Ġher ds +ç a +watch ing +Ġcounter ing +Ch arge +Ġchar red +Ġwar heads +Ġiod ine +ĠM acy +04 1 +Ġdepart ures +ĠS ins +Ġdy ed +ĠConcept s +g ado +7 13 +Ġquot ations +Ġg ist +ĠChrist y +Ġant igen +ĠHem p +ĠD rawn +ĠB arg +ez vous +Ġp aternity +Ġar du +ĠAnch orage +ĠR ik +Ġover loaded +ĠUs ername +ĠTam my +ĠN au +ĠCell ular +Ġw aning +Ġrod ent +ĠWor cester +il ts +ĠT ad +Ġdwell ings +Ġbull ish +4 31 +Ġretali ate +Ġmig raine +ĠChev ron +CH ECK +Ġdon key +c rim +SP A +ĠAn alog +Ġmarqu ee +ĠHa as +B ir +ĠGD DR +ĠDownload s +Ġwill power +ĠFor th +ĠRecord ed +Ġimp ossibility +ĠLog ged +ĠFr anks +ĠR att +in itions +Ġclean ers +Ġsore ly +Ġflick ering +ĠEx amination +c atching +allow een +Ms g +Ġdun no +F a +Ġdys ph +c razy +.' '. +Ġmain line +Ġc s +Ġp tr +ĠW ally +ig un +95 1 +ĠBig foot +f ights +Ġretrie ving +J r +Ġdupl ication +ĠExpl an +Ġrel ational +Ġqu aint +Ġbisc uits +Ġad o +Ġsh udder +Ġantid ote +blood ed +ks h +Ġsa uces +Ġrein vest +Ġdispens ary +ĠD iver +Ġ9 000 +stud ent +Ġin separ +esc ap +Ġtodd lers +ĠGP IO +ĠAss ignment +head ers +Ġlack luster +Ġab ack +95 6 +Ġtool bar +7 45 +Ġo ust +Ġcontempl ation +ĠPRES IDENT +Ġ4 58 +==== == +Ġguarantee ing +ĠHe ist +ĠCann es +Ļ ½ +Ġcollabor ator +ĠAm p +Ġg ou +ĠSH ALL +st ories +78 3 +Ġmobil ized +Ġbro od +ĠL U +ĠðŁ ij +Ġref in +ĠAnthrop ology +v ind +ill i +Ġwarrant ies +ĠB abel +Ġsw ath +Ġc aches +Ġantagon ists +art ifacts +Ġhot ly +ĠSt arts +ĠG ö +z ag +!! !!! +Ġsc ourge +Ġcons piring +ru its +re verse +ĠShe en +ĠJes uit +ĠGiov anni +ad ies +Ġbutt ocks +ear cher +ac an +Ġvolley ball +Ġshroud ed +Ġscore board +b ats +ĠI PM +Ġass es +Ġde regulation +ĠTe legram +ĠReb oot +Ġ7 000 +ĠCan ary +Ġk ernels +ĠFranç ois +ĠD uff +ĠP on +ĠLe ica +ĠGar min +Ġor phans +ĠClaud ia +Ġcal endars +ĠLe ilan +ent o +R ocket +Ġbr unch +ĠHaw king +ain ers +Ġsens ibilities +Ġk W +ĠK and +Ġre claimed +Ġinteresting ly +× © +rom y +J M +ĠEnhance ment +b ush +Sk ip +Ġrapp ers +Ġg azing +p edia +ath lon +Rev olution +Ġsn ipers +Ġre verted +Ġconglomer ate +T erry +79 4 +Ġhars her +Ġdes olate +ĠHit man +Comm ission +Ġ( / +âĢ¦ ." +Com par +Ġampl ification +om inated +Ġreg ress +ĠColl ider +Ġinform ants +Ġg azed diff --git a/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-vocab.json b/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-vocab.json new file mode 100644 index 0000000000000000000000000000000000000000..1f1d9aaca301414e7f6c9396df506798ff4eb9a6 --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2-vocab.json @@ -0,0 +1 @@ +{"!": 0, "\"": 1, "#": 2, "$": 3, "%": 4, "&": 5, "'": 6, "(": 7, ")": 8, "*": 9, "+": 10, ",": 11, "-": 12, ".": 13, "/": 14, "0": 15, "1": 16, "2": 17, "3": 18, "4": 19, "5": 20, "6": 21, "7": 22, "8": 23, "9": 24, ":": 25, ";": 26, "<": 27, "=": 28, ">": 29, "?": 30, "@": 31, "A": 32, "B": 33, "C": 34, "D": 35, "E": 36, "F": 37, "G": 38, "H": 39, "I": 40, "J": 41, "K": 42, "L": 43, "M": 44, "N": 45, "O": 46, "P": 47, "Q": 48, "R": 49, "S": 50, "T": 51, "U": 52, "V": 53, "W": 54, "X": 55, "Y": 56, "Z": 57, "[": 58, "\\": 59, "]": 60, "^": 61, "_": 62, "`": 63, "a": 64, "b": 65, "c": 66, "d": 67, "e": 68, "f": 69, "g": 70, "h": 71, "i": 72, "j": 73, "k": 74, "l": 75, "m": 76, "n": 77, "o": 78, "p": 79, "q": 80, "r": 81, "s": 82, "t": 83, "u": 84, "v": 85, "w": 86, "x": 87, "y": 88, "z": 89, "{": 90, "|": 91, "}": 92, "~": 93, "\u00a1": 94, "\u00a2": 95, "\u00a3": 96, "\u00a4": 97, "\u00a5": 98, "\u00a6": 99, "\u00a7": 100, "\u00a8": 101, "\u00a9": 102, "\u00aa": 103, "\u00ab": 104, "\u00ac": 105, "\u00ae": 106, "\u00af": 107, "\u00b0": 108, "\u00b1": 109, "\u00b2": 110, "\u00b3": 111, "\u00b4": 112, "\u00b5": 113, "\u00b6": 114, "\u00b7": 115, "\u00b8": 116, "\u00b9": 117, "\u00ba": 118, "\u00bb": 119, "\u00bc": 120, "\u00bd": 121, "\u00be": 122, "\u00bf": 123, "\u00c0": 124, "\u00c1": 125, "\u00c2": 126, "\u00c3": 127, "\u00c4": 128, "\u00c5": 129, "\u00c6": 130, "\u00c7": 131, "\u00c8": 132, "\u00c9": 133, "\u00ca": 134, "\u00cb": 135, "\u00cc": 136, "\u00cd": 137, "\u00ce": 138, "\u00cf": 139, "\u00d0": 140, "\u00d1": 141, "\u00d2": 142, "\u00d3": 143, "\u00d4": 144, "\u00d5": 145, "\u00d6": 146, "\u00d7": 147, "\u00d8": 148, "\u00d9": 149, "\u00da": 150, "\u00db": 151, "\u00dc": 152, "\u00dd": 153, "\u00de": 154, "\u00df": 155, "\u00e0": 156, "\u00e1": 157, "\u00e2": 158, "\u00e3": 159, "\u00e4": 160, "\u00e5": 161, "\u00e6": 162, "\u00e7": 163, "\u00e8": 164, "\u00e9": 165, "\u00ea": 166, "\u00eb": 167, "\u00ec": 168, "\u00ed": 169, "\u00ee": 170, "\u00ef": 171, "\u00f0": 172, "\u00f1": 173, "\u00f2": 174, "\u00f3": 175, "\u00f4": 176, "\u00f5": 177, "\u00f6": 178, "\u00f7": 179, "\u00f8": 180, "\u00f9": 181, "\u00fa": 182, "\u00fb": 183, "\u00fc": 184, "\u00fd": 185, "\u00fe": 186, "\u00ff": 187, "\u0100": 188, "\u0101": 189, "\u0102": 190, "\u0103": 191, "\u0104": 192, "\u0105": 193, "\u0106": 194, "\u0107": 195, "\u0108": 196, "\u0109": 197, "\u010a": 198, "\u010b": 199, "\u010c": 200, "\u010d": 201, "\u010e": 202, "\u010f": 203, "\u0110": 204, "\u0111": 205, "\u0112": 206, "\u0113": 207, "\u0114": 208, "\u0115": 209, "\u0116": 210, "\u0117": 211, "\u0118": 212, "\u0119": 213, "\u011a": 214, "\u011b": 215, "\u011c": 216, "\u011d": 217, "\u011e": 218, "\u011f": 219, "\u0120": 220, "\u0121": 221, "\u0122": 222, "\u0123": 223, "\u0124": 224, "\u0125": 225, "\u0126": 226, "\u0127": 227, "\u0128": 228, "\u0129": 229, "\u012a": 230, "\u012b": 231, "\u012c": 232, "\u012d": 233, "\u012e": 234, "\u012f": 235, "\u0130": 236, "\u0131": 237, "\u0132": 238, "\u0133": 239, "\u0134": 240, "\u0135": 241, "\u0136": 242, "\u0137": 243, "\u0138": 244, "\u0139": 245, "\u013a": 246, "\u013b": 247, "\u013c": 248, "\u013d": 249, "\u013e": 250, "\u013f": 251, "\u0140": 252, "\u0141": 253, "\u0142": 254, "\u0143": 255, "\u0120t": 256, "\u0120a": 257, "he": 258, "in": 259, "re": 260, "on": 261, "\u0120the": 262, "er": 263, "\u0120s": 264, "at": 265, "\u0120w": 266, "\u0120o": 267, "en": 268, "\u0120c": 269, "it": 270, "is": 271, "an": 272, "or": 273, "es": 274, "\u0120b": 275, "ed": 276, "\u0120f": 277, "ing": 278, "\u0120p": 279, "ou": 280, "\u0120an": 281, "al": 282, "ar": 283, "\u0120to": 284, "\u0120m": 285, "\u0120of": 286, "\u0120in": 287, "\u0120d": 288, "\u0120h": 289, "\u0120and": 290, "ic": 291, "as": 292, "le": 293, "\u0120th": 294, "ion": 295, "om": 296, "ll": 297, "ent": 298, "\u0120n": 299, "\u0120l": 300, "st": 301, "\u0120re": 302, "ve": 303, "\u0120e": 304, "ro": 305, "ly": 306, "\u0120be": 307, "\u0120g": 308, "\u0120T": 309, "ct": 310, "\u0120S": 311, "id": 312, "ot": 313, "\u0120I": 314, "ut": 315, "et": 316, "\u0120A": 317, "\u0120is": 318, "\u0120on": 319, "im": 320, "am": 321, "ow": 322, "ay": 323, "ad": 324, "se": 325, "\u0120that": 326, "\u0120C": 327, "ig": 328, "\u0120for": 329, "ac": 330, "\u0120y": 331, "ver": 332, "ur": 333, "\u0120u": 334, "ld": 335, "\u0120st": 336, "\u0120M": 337, "'s": 338, "\u0120he": 339, "\u0120it": 340, "ation": 341, "ith": 342, "ir": 343, "ce": 344, "\u0120you": 345, "il": 346, "\u0120B": 347, "\u0120wh": 348, "ol": 349, "\u0120P": 350, "\u0120with": 351, "\u01201": 352, "ter": 353, "ch": 354, "\u0120as": 355, "\u0120we": 356, "\u0120(": 357, "nd": 358, "ill": 359, "\u0120D": 360, "if": 361, "\u01202": 362, "ag": 363, "ers": 364, "ke": 365, "\u0120\"": 366, "\u0120H": 367, "em": 368, "\u0120con": 369, "\u0120W": 370, "\u0120R": 371, "her": 372, "\u0120was": 373, "\u0120r": 374, "od": 375, "\u0120F": 376, "ul": 377, "ate": 378, "\u0120at": 379, "ri": 380, "pp": 381, "ore": 382, "\u0120The": 383, "\u0120se": 384, "us": 385, "\u0120pro": 386, "\u0120ha": 387, "um": 388, "\u0120are": 389, "\u0120de": 390, "ain": 391, "and": 392, "\u0120or": 393, "igh": 394, "est": 395, "ist": 396, "ab": 397, "rom": 398, "\u0120N": 399, "th": 400, "\u0120com": 401, "\u0120G": 402, "un": 403, "op": 404, "00": 405, "\u0120L": 406, "\u0120not": 407, "ess": 408, "\u0120ex": 409, "\u0120v": 410, "res": 411, "\u0120E": 412, "ew": 413, "ity": 414, "ant": 415, "\u0120by": 416, "el": 417, "os": 418, "ort": 419, "oc": 420, "qu": 421, "\u0120from": 422, "\u0120have": 423, "\u0120su": 424, "ive": 425, "ould": 426, "\u0120sh": 427, "\u0120this": 428, "nt": 429, "ra": 430, "pe": 431, "ight": 432, "art": 433, "ment": 434, "\u0120al": 435, "ust": 436, "end": 437, "--": 438, "all": 439, "\u0120O": 440, "ack": 441, "\u0120ch": 442, "\u0120le": 443, "ies": 444, "red": 445, "ard": 446, "\u00e2\u0122": 447, "out": 448, "\u0120J": 449, "\u0120ab": 450, "ear": 451, "iv": 452, "ally": 453, "our": 454, "ost": 455, "gh": 456, "pt": 457, "\u0120pl": 458, "ast": 459, "\u0120can": 460, "ak": 461, "ome": 462, "ud": 463, "The": 464, "\u0120his": 465, "\u0120do": 466, "\u0120go": 467, "\u0120has": 468, "ge": 469, "'t": 470, "\u0120U": 471, "rou": 472, "\u0120sa": 473, "\u0120j": 474, "\u0120but": 475, "\u0120wor": 476, "\u0120all": 477, "ect": 478, "\u0120k": 479, "ame": 480, "\u0120will": 481, "ok": 482, "\u0120whe": 483, "\u0120they": 484, "ide": 485, "01": 486, "ff": 487, "ich": 488, "pl": 489, "ther": 490, "\u0120tr": 491, "..": 492, "\u0120int": 493, "ie": 494, "ure": 495, "age": 496, "\u0120ne": 497, "ial": 498, "ap": 499, "ine": 500, "ice": 501, "\u0120me": 502, "\u0120out": 503, "ans": 504, "one": 505, "ong": 506, "ions": 507, "\u0120who": 508, "\u0120K": 509, "\u0120up": 510, "\u0120their": 511, "\u0120ad": 512, "\u01203": 513, "\u0120us": 514, "ated": 515, "ous": 516, "\u0120more": 517, "ue": 518, "og": 519, "\u0120St": 520, "ind": 521, "ike": 522, "\u0120so": 523, "ime": 524, "per": 525, ".\"": 526, "ber": 527, "iz": 528, "act": 529, "\u0120one": 530, "\u0120said": 531, "\u0120-": 532, "are": 533, "\u0120your": 534, "cc": 535, "\u0120Th": 536, "\u0120cl": 537, "ep": 538, "ake": 539, "able": 540, "ip": 541, "\u0120cont": 542, "\u0120which": 543, "ia": 544, "\u0120im": 545, "\u0120about": 546, "\u0120were": 547, "very": 548, "ub": 549, "\u0120had": 550, "\u0120en": 551, "\u0120comp": 552, ",\"": 553, "\u0120In": 554, "\u0120un": 555, "\u0120ag": 556, "ire": 557, "ace": 558, "au": 559, "ary": 560, "\u0120would": 561, "ass": 562, "ry": 563, "\u0120\u00e2\u0122": 564, "cl": 565, "ook": 566, "ere": 567, "so": 568, "\u0120V": 569, "ign": 570, "ib": 571, "\u0120off": 572, "\u0120te": 573, "ven": 574, "\u0120Y": 575, "ile": 576, "ose": 577, "ite": 578, "orm": 579, "\u0120201": 580, "\u0120res": 581, "\u0120man": 582, "\u0120per": 583, "\u0120other": 584, "ord": 585, "ult": 586, "\u0120been": 587, "\u0120like": 588, "ase": 589, "ance": 590, "ks": 591, "ays": 592, "own": 593, "ence": 594, "\u0120dis": 595, "ction": 596, "\u0120any": 597, "\u0120app": 598, "\u0120sp": 599, "int": 600, "ress": 601, "ations": 602, "ail": 603, "\u01204": 604, "ical": 605, "\u0120them": 606, "\u0120her": 607, "ount": 608, "\u0120Ch": 609, "\u0120ar": 610, "\u0120if": 611, "\u0120there": 612, "\u0120pe": 613, "\u0120year": 614, "av": 615, "\u0120my": 616, "\u0120some": 617, "\u0120when": 618, "ough": 619, "ach": 620, "\u0120than": 621, "ru": 622, "ond": 623, "ick": 624, "\u0120over": 625, "vel": 626, "\u0120qu": 627, "\u010a\u010a": 628, "\u0120sc": 629, "reat": 630, "ree": 631, "\u0120It": 632, "ound": 633, "port": 634, "\u0120also": 635, "\u0120part": 636, "fter": 637, "\u0120kn": 638, "\u0120bec": 639, "\u0120time": 640, "ens": 641, "\u01205": 642, "ople": 643, "\u0120what": 644, "\u0120no": 645, "du": 646, "mer": 647, "ang": 648, "\u0120new": 649, "----": 650, "\u0120get": 651, "ory": 652, "ition": 653, "ings": 654, "\u0120just": 655, "\u0120into": 656, "\u01200": 657, "ents": 658, "ove": 659, "te": 660, "\u0120people": 661, "\u0120pre": 662, "\u0120its": 663, "\u0120rec": 664, "\u0120tw": 665, "ian": 666, "irst": 667, "ark": 668, "ors": 669, "\u0120work": 670, "ade": 671, "ob": 672, "\u0120she": 673, "\u0120our": 674, "wn": 675, "ink": 676, "lic": 677, "\u012019": 678, "\u0120He": 679, "ish": 680, "nder": 681, "ause": 682, "\u0120him": 683, "ons": 684, "\u0120[": 685, "\u0120ro": 686, "form": 687, "ild": 688, "ates": 689, "vers": 690, "\u0120only": 691, "oll": 692, "\u0120spe": 693, "ck": 694, "ell": 695, "amp": 696, "\u0120acc": 697, "\u0120bl": 698, "ious": 699, "urn": 700, "ft": 701, "ood": 702, "\u0120how": 703, "hed": 704, "\u0120'": 705, "\u0120after": 706, "aw": 707, "\u0120att": 708, "ov": 709, "ne": 710, "\u0120play": 711, "erv": 712, "ict": 713, "\u0120could": 714, "itt": 715, "\u0120am": 716, "\u0120first": 717, "\u01206": 718, "\u0120act": 719, "\u0120$": 720, "ec": 721, "hing": 722, "ual": 723, "ull": 724, "\u0120comm": 725, "oy": 726, "old": 727, "ces": 728, "ater": 729, "\u0120fe": 730, "\u0120bet": 731, "we": 732, "iff": 733, "\u0120two": 734, "ock": 735, "\u0120back": 736, ").": 737, "ident": 738, "\u0120under": 739, "rough": 740, "sel": 741, "xt": 742, "\u0120may": 743, "round": 744, "\u0120po": 745, "ph": 746, "iss": 747, "\u0120des": 748, "\u0120most": 749, "\u0120did": 750, "\u0120add": 751, "ject": 752, "\u0120inc": 753, "fore": 754, "\u0120pol": 755, "ont": 756, "\u0120again": 757, "clud": 758, "tern": 759, "\u0120know": 760, "\u0120need": 761, "\u0120cons": 762, "\u0120co": 763, "\u0120.": 764, "\u0120want": 765, "\u0120see": 766, "\u01207": 767, "ning": 768, "iew": 769, "\u0120This": 770, "ced": 771, "\u0120even": 772, "\u0120ind": 773, "ty": 774, "\u0120We": 775, "ath": 776, "\u0120these": 777, "\u0120pr": 778, "\u0120use": 779, "\u0120because": 780, "\u0120fl": 781, "ng": 782, "\u0120now": 783, "\u0120\u00e2\u0122\u0135": 784, "com": 785, "ise": 786, "\u0120make": 787, "\u0120then": 788, "ower": 789, "\u0120every": 790, "\u0120Un": 791, "\u0120sec": 792, "oss": 793, "uch": 794, "\u0120em": 795, "\u0120=": 796, "\u0120Re": 797, "ied": 798, "rit": 799, "\u0120inv": 800, "lect": 801, "\u0120supp": 802, "ating": 803, "\u0120look": 804, "man": 805, "pect": 806, "\u01208": 807, "row": 808, "\u0120bu": 809, "\u0120where": 810, "ific": 811, "\u0120years": 812, "ily": 813, "\u0120diff": 814, "\u0120should": 815, "\u0120rem": 816, "Th": 817, "In": 818, "\u0120ev": 819, "day": 820, "'re": 821, "rib": 822, "\u0120rel": 823, "ss": 824, "\u0120def": 825, "\u0120right": 826, "\u0120sy": 827, "),": 828, "les": 829, "000": 830, "hen": 831, "\u0120through": 832, "\u0120Tr": 833, "__": 834, "\u0120way": 835, "\u0120don": 836, "\u0120,": 837, "\u012010": 838, "ased": 839, "\u0120ass": 840, "ublic": 841, "\u0120reg": 842, "\u0120And": 843, "ix": 844, "\u0120very": 845, "\u0120includ": 846, "other": 847, "\u0120imp": 848, "oth": 849, "\u0120sub": 850, "\u0120\u00e2\u0122\u0136": 851, "\u0120being": 852, "arg": 853, "\u0120Wh": 854, "==": 855, "ible": 856, "\u0120does": 857, "ange": 858, "ram": 859, "\u01209": 860, "ert": 861, "ps": 862, "ited": 863, "ational": 864, "\u0120br": 865, "\u0120down": 866, "\u0120many": 867, "aking": 868, "\u0120call": 869, "uring": 870, "ities": 871, "\u0120ph": 872, "ics": 873, "als": 874, "\u0120dec": 875, "ative": 876, "ener": 877, "\u0120before": 878, "ility": 879, "\u0120well": 880, "\u0120much": 881, "erson": 882, "\u0120those": 883, "\u0120such": 884, "\u0120ke": 885, "\u0120end": 886, "\u0120But": 887, "ason": 888, "ting": 889, "\u0120long": 890, "ef": 891, "\u0120think": 892, "ys": 893, "\u0120bel": 894, "\u0120sm": 895, "its": 896, "ax": 897, "\u0120own": 898, "\u0120prov": 899, "\u0120set": 900, "ife": 901, "ments": 902, "ble": 903, "ward": 904, "\u0120show": 905, "\u0120pres": 906, "ms": 907, "omet": 908, "\u0120ob": 909, "\u0120say": 910, "\u0120Sh": 911, "ts": 912, "ful": 913, "\u0120eff": 914, "\u0120gu": 915, "\u0120inst": 916, "und": 917, "ren": 918, "cess": 919, "\u0120ent": 920, "\u0120You": 921, "\u0120good": 922, "\u0120start": 923, "ince": 924, "\u0120made": 925, "tt": 926, "stem": 927, "olog": 928, "up": 929, "\u0120|": 930, "ump": 931, "\u0120hel": 932, "vern": 933, "ular": 934, "ually": 935, "\u0120ac": 936, "\u0120mon": 937, "\u0120last": 938, "\u0120200": 939, "10": 940, "\u0120stud": 941, "ures": 942, "\u0120Ar": 943, "self": 944, "ars": 945, "meric": 946, "ues": 947, "cy": 948, "\u0120min": 949, "ollow": 950, "\u0120col": 951, "io": 952, "\u0120mod": 953, "\u0120count": 954, "\u0120Com": 955, "hes": 956, "\u0120fin": 957, "air": 958, "ier": 959, "\u00e2\u0122\u0136": 960, "read": 961, "ank": 962, "atch": 963, "ever": 964, "\u0120str": 965, "\u0120point": 966, "ork": 967, "\u0120New": 968, "\u0120sur": 969, "ool": 970, "alk": 971, "ement": 972, "\u0120used": 973, "ract": 974, "ween": 975, "\u0120same": 976, "oun": 977, "\u0120Al": 978, "ci": 979, "\u0120differe": 980, "\u0120while": 981, "--------": 982, "\u0120game": 983, "cept": 984, "\u0120sim": 985, "...": 986, "\u0120inter": 987, "ek": 988, "\u0120report": 989, "\u0120produ": 990, "\u0120still": 991, "led": 992, "ah": 993, "\u0120here": 994, "\u0120world": 995, "\u0120though": 996, "\u0120num": 997, "arch": 998, "imes": 999, "ale": 1000, "\u0120Se": 1001, "\u0120If": 1002, "//": 1003, "\u0120Le": 1004, "\u0120ret": 1005, "\u0120ref": 1006, "\u0120trans": 1007, "ner": 1008, "ution": 1009, "ters": 1010, "\u0120take": 1011, "\u0120Cl": 1012, "\u0120conf": 1013, "way": 1014, "ave": 1015, "\u0120going": 1016, "\u0120sl": 1017, "ug": 1018, "\u0120Americ": 1019, "\u0120spec": 1020, "\u0120hand": 1021, "\u0120between": 1022, "ists": 1023, "\u0120De": 1024, "oot": 1025, "It": 1026, "\u0120ear": 1027, "\u0120against": 1028, "\u0120high": 1029, "gan": 1030, "az": 1031, "ather": 1032, "\u0120exp": 1033, "\u0120op": 1034, "\u0120ins": 1035, "\u0120gr": 1036, "\u0120help": 1037, "\u0120requ": 1038, "ets": 1039, "ins": 1040, "\u0120Pro": 1041, "ism": 1042, "\u0120found": 1043, "land": 1044, "ata": 1045, "uss": 1046, "ames": 1047, "\u0120person": 1048, "\u0120great": 1049, "pr": 1050, "\u0120sign": 1051, "\u0120An": 1052, "'ve": 1053, "\u0120somet": 1054, "\u0120ser": 1055, "hip": 1056, "\u0120run": 1057, "\u0120:": 1058, "\u0120ter": 1059, "irect": 1060, "\u0120follow": 1061, "\u0120det": 1062, "ices": 1063, "\u0120find": 1064, "12": 1065, "\u0120mem": 1066, "\u0120cr": 1067, "ered": 1068, "ex": 1069, "\u0120ext": 1070, "uth": 1071, "ense": 1072, "co": 1073, "\u0120team": 1074, "ving": 1075, "ouse": 1076, "ash": 1077, "att": 1078, "ved": 1079, "\u0120system": 1080, "\u0120As": 1081, "der": 1082, "ives": 1083, "min": 1084, "\u0120lead": 1085, "\u0120Bl": 1086, "cent": 1087, "\u0120around": 1088, "\u0120govern": 1089, "\u0120cur": 1090, "velop": 1091, "any": 1092, "\u0120cour": 1093, "alth": 1094, "ages": 1095, "ize": 1096, "\u0120car": 1097, "ode": 1098, "\u0120law": 1099, "\u0120read": 1100, "'m": 1101, "con": 1102, "\u0120real": 1103, "\u0120support": 1104, "\u012012": 1105, "....": 1106, "\u0120really": 1107, "ness": 1108, "\u0120fact": 1109, "\u0120day": 1110, "\u0120both": 1111, "ying": 1112, "\u0120serv": 1113, "\u0120For": 1114, "\u0120three": 1115, "\u0120wom": 1116, "\u0120med": 1117, "ody": 1118, "\u0120They": 1119, "50": 1120, "\u0120exper": 1121, "ton": 1122, "\u0120each": 1123, "akes": 1124, "\u0120che": 1125, "\u0120cre": 1126, "ines": 1127, "\u0120rep": 1128, "19": 1129, "gg": 1130, "illion": 1131, "\u0120grou": 1132, "ute": 1133, "ik": 1134, "We": 1135, "get": 1136, "ER": 1137, "\u0120met": 1138, "\u0120says": 1139, "ox": 1140, "\u0120during": 1141, "ern": 1142, "ized": 1143, "ared": 1144, "\u0120fam": 1145, "ically": 1146, "\u0120happ": 1147, "\u0120Is": 1148, "\u0120char": 1149, "med": 1150, "vent": 1151, "\u0120gener": 1152, "ient": 1153, "ple": 1154, "iet": 1155, "rent": 1156, "11": 1157, "ves": 1158, "ption": 1159, "\u012020": 1160, "formation": 1161, "\u0120cor": 1162, "\u0120offic": 1163, "ield": 1164, "\u0120too": 1165, "ision": 1166, "\u0120inf": 1167, "\u0120Z": 1168, "the": 1169, "oad": 1170, "\u0120public": 1171, "\u0120prog": 1172, "ric": 1173, "**": 1174, "\u0120war": 1175, "\u0120power": 1176, "view": 1177, "\u0120few": 1178, "\u0120loc": 1179, "\u0120different": 1180, "\u0120state": 1181, "\u0120head": 1182, "'ll": 1183, "\u0120poss": 1184, "\u0120stat": 1185, "ret": 1186, "ants": 1187, "\u0120val": 1188, "\u0120iss": 1189, "\u0120cle": 1190, "ivers": 1191, "anc": 1192, "\u0120expl": 1193, "\u0120another": 1194, "\u0120Q": 1195, "\u0120av": 1196, "thing": 1197, "nce": 1198, "Wh": 1199, "\u0120child": 1200, "\u0120since": 1201, "ired": 1202, "less": 1203, "\u0120life": 1204, "\u0120develop": 1205, "ittle": 1206, "\u0120dep": 1207, "\u0120pass": 1208, "\u00e3\u0125": 1209, "\u0120turn": 1210, "orn": 1211, "This": 1212, "bers": 1213, "ross": 1214, "\u0120Ad": 1215, "\u0120fr": 1216, "\u0120resp": 1217, "\u0120second": 1218, "oh": 1219, "\u0120/": 1220, "\u0120disc": 1221, "\u0120&": 1222, "\u0120something": 1223, "\u0120comple": 1224, "\u0120ed": 1225, "\u0120fil": 1226, "\u0120month": 1227, "aj": 1228, "uc": 1229, "\u0120government": 1230, "\u0120without": 1231, "\u0120leg": 1232, "\u0120dist": 1233, "\u0120put": 1234, "\u0120quest": 1235, "ann": 1236, "\u0120prot": 1237, "20": 1238, "\u0120never": 1239, "ience": 1240, "\u0120level": 1241, "\u0120art": 1242, "\u0120things": 1243, "\u0120might": 1244, "\u0120effect": 1245, "\u0120contro": 1246, "\u0120cent": 1247, "\u012018": 1248, "\u0120allow": 1249, "\u0120belie": 1250, "chool": 1251, "ott": 1252, "\u0120incre": 1253, "\u0120feel": 1254, "\u0120result": 1255, "\u0120lot": 1256, "\u0120fun": 1257, "ote": 1258, "\u0120ty": 1259, "erest": 1260, "\u0120contin": 1261, "\u0120using": 1262, "\u0120big": 1263, "201": 1264, "\u0120ask": 1265, "\u0120best": 1266, "\u0120)": 1267, "IN": 1268, "\u0120opp": 1269, "30": 1270, "\u0120number": 1271, "iness": 1272, "St": 1273, "lease": 1274, "\u0120ca": 1275, "\u0120must": 1276, "\u0120direct": 1277, "\u0120gl": 1278, "\u0120<": 1279, "\u0120open": 1280, "\u0120post": 1281, "\u0120come": 1282, "\u0120seem": 1283, "ording": 1284, "\u0120week": 1285, "ately": 1286, "ital": 1287, "\u0120el": 1288, "riend": 1289, "\u0120far": 1290, "\u0120tra": 1291, "inal": 1292, "\u0120pri": 1293, "\u0120US": 1294, "\u0120place": 1295, "\u0120form": 1296, "\u0120told": 1297, "\":": 1298, "ains": 1299, "ature": 1300, "\u0120Trump": 1301, "\u0120stand": 1302, "\u0120#": 1303, "ider": 1304, "\u0120Fr": 1305, "\u0120next": 1306, "\u0120soc": 1307, "\u0120pur": 1308, "\u0120let": 1309, "\u0120little": 1310, "\u0120hum": 1311, "\u0120i": 1312, "ron": 1313, "15": 1314, "\u012015": 1315, "\u0120commun": 1316, "\u0120mark": 1317, "\u0120There": 1318, "\u0120wr": 1319, "\u0120That": 1320, "\u0120information": 1321, "ways": 1322, "\u0120bus": 1323, "app": 1324, "\u0120invest": 1325, "me": 1326, "\u0120hard": 1327, "ained": 1328, "ead": 1329, "\u0120import": 1330, "\u0120appro": 1331, "\u0120test": 1332, "\u0120tri": 1333, "\u0120rest": 1334, "osed": 1335, "\u0120full": 1336, "\u0120care": 1337, "\u0120Sp": 1338, "\u0120case": 1339, "ON": 1340, "\u0120sk": 1341, "\u0120less": 1342, "\u0120+": 1343, "\u0120partic": 1344, "\u0120Pl": 1345, "ably": 1346, "uck": 1347, "ished": 1348, "chn": 1349, "be": 1350, "\u0120list": 1351, "ator": 1352, "\u0120top": 1353, "\u0120adv": 1354, "\u0120Be": 1355, "ruct": 1356, "\u0120dem": 1357, "ration": 1358, "ling": 1359, "gy": 1360, "reen": 1361, "ger": 1362, "\u0120home": 1363, "\u0120left": 1364, "\u0120better": 1365, "\u0120data": 1366, "\u012011": 1367, "\u0120attack": 1368, "\u0120proble": 1369, "line": 1370, "ards": 1371, "\u0120beh": 1372, "ral": 1373, "\u0120How": 1374, "\u0120She": 1375, "arge": 1376, "\u0120--": 1377, "://": 1378, "\u0120bro": 1379, "\u0120Ph": 1380, "ats": 1381, "\u0120build": 1382, "ww": 1383, "ided": 1384, "aim": 1385, "ases": 1386, "ency": 1387, "\u0120main": 1388, "ined": 1389, "\u0120including": 1390, "\u0120{": 1391, "\u0120got": 1392, "\u0120interest": 1393, "\u0120keep": 1394, "\u0120X": 1395, "\u0120eas": 1396, "aining": 1397, "\u0120class": 1398, "\u00e2\u0122\u00a6": 1399, "\u0120No": 1400, "\u0120var": 1401, "\u0120small": 1402, "ample": 1403, "AT": 1404, "\u0120ide": 1405, "\u0120So": 1406, "\u0120rece": 1407, "\u0120polit": 1408, "\u0120mov": 1409, "\u0120plan": 1410, "\u0120percent": 1411, "iving": 1412, "\u0120camp": 1413, "\u0120pay": 1414, "14": 1415, "sc": 1416, "ised": 1417, "\u0120unt": 1418, "oney": 1419, "ploy": 1420, "====": 1421, "\u0120didn": 1422, "\u0120Ind": 1423, "els": 1424, "ertain": 1425, "\u0120pos": 1426, "____": 1427, "iver": 1428, "\u0120process": 1429, "\u0120program": 1430, "ified": 1431, "\u0120Rep": 1432, "16": 1433, "uro": 1434, "ology": 1435, "atter": 1436, "ina": 1437, "\u0120name": 1438, "\u0120All": 1439, "\u0120four": 1440, "\u0120return": 1441, "vious": 1442, "bs": 1443, "\u0120called": 1444, "\u0120move": 1445, "\u0120Sc": 1446, "ird": 1447, "\u0120group": 1448, "\u0120bre": 1449, "\u0120men": 1450, "\u0120cap": 1451, "ten": 1452, "ee": 1453, "\u0120dri": 1454, "leg": 1455, "here": 1456, "uthor": 1457, "\u0120pat": 1458, "\u0120current": 1459, "ides": 1460, "\u0120pop": 1461, "to": 1462, "ention": 1463, "\u0120always": 1464, "\u0120mil": 1465, "\u0120women": 1466, "\u012016": 1467, "\u0120old": 1468, "iven": 1469, "raph": 1470, "\u0120Or": 1471, "ror": 1472, "ently": 1473, "\u0120near": 1474, "\u0120Ex": 1475, "ream": 1476, "sh": 1477, "\u012014": 1478, "\u0120free": 1479, "ission": 1480, "stand": 1481, "\u0120Con": 1482, "ality": 1483, "used": 1484, "13": 1485, "\u0120design": 1486, "\u0120change": 1487, "\u0120chang": 1488, "\u0120bo": 1489, "\u0120vis": 1490, "ember": 1491, "\u0120book": 1492, "ready": 1493, "\u0120kill": 1494, "25": 1495, "pped": 1496, "\u0120away": 1497, "\u0120able": 1498, "\u0120country": 1499, "\u0120const": 1500, "arn": 1501, "\u0120order": 1502, "AR": 1503, "ior": 1504, "ium": 1505, "orth": 1506, "18": 1507, "ailable": 1508, "\u0120sw": 1509, "\u0120million": 1510, "\u012013": 1511, "atic": 1512, "ted": 1513, "\u0120Go": 1514, "\u0120oper": 1515, "eng": 1516, "\u0120thing": 1517, "ajor": 1518, "conom": 1519, "\u0120Comm": 1520, "\u0120why": 1521, "ured": 1522, "ural": 1523, "\u0120school": 1524, "by": 1525, "\u0120Mar": 1526, "\u0120aff": 1527, "\u0120days": 1528, "\u0120ann": 1529, "ush": 1530, "ane": 1531, "If": 1532, "eg": 1533, "\u0120prof": 1534, "\u0120health": 1535, "outh": 1536, "But": 1537, "ional": 1538, ".,": 1539, "\u0120sol": 1540, "\u0120already": 1541, "\u012030": 1542, "\u0120charact": 1543, "He": 1544, "\u0120friend": 1545, "ES": 1546, "ians": 1547, "icle": 1548, "'d": 1549, "\u0120On": 1550, "\u0120least": 1551, "\u0120prom": 1552, "\u0120dr": 1553, "\u0120hist": 1554, "ither": 1555, "\u0120est": 1556, "iqu": 1557, "17": 1558, "son": 1559, "\u0120tell": 1560, "\u0120talk": 1561, "ohn": 1562, "oint": 1563, "lection": 1564, "AN": 1565, "\u0120until": 1566, "augh": 1567, "\u0120later": 1568, "\u0120ve": 1569, "\u0120view": 1570, "ending": 1571, "ived": 1572, "\u0120word": 1573, "ware": 1574, "\u0120cost": 1575, "\u0120enough": 1576, "\u0120give": 1577, "\u0120United": 1578, "\u0120techn": 1579, "arent": 1580, "OR": 1581, "\u0120par": 1582, "\u0120Dr": 1583, "\u01202016": 1584, "rist": 1585, "ering": 1586, "\u0120\u00c2": 1587, "\u0120large": 1588, "side": 1589, "acy": 1590, "ccess": 1591, "\u0120win": 1592, "\u0120important": 1593, "\u0120199": 1594, "\u0120doesn": 1595, "\u012017": 1596, "\u0120business": 1597, "\u0120clear": 1598, "\u0120rese": 1599, "\",": 1600, "ury": 1601, "\u0120equ": 1602, "aster": 1603, "alf": 1604, "\u0120American": 1605, "nect": 1606, "\u0120expect": 1607, "iversity": 1608, "\u0120occ": 1609, "\u0120Fl": 1610, "\u0120kind": 1611, "\u0120mean": 1612, "\u0120past": 1613, "\u0120dev": 1614, "\u0120bas": 1615, "let": 1616, "raft": 1617, "\u0120organ": 1618, "\u0120del": 1619, "\u0120perform": 1620, "\u0120story": 1621, "\u0120season": 1622, "\u0120Col": 1623, "\u0120claim": 1624, "\u0120came": 1625, "\u0120within": 1626, "\u0120line": 1627, "\u0120project": 1628, "\u0120At": 1629, "\u0120control": 1630, "ended": 1631, "\u0120Sy": 1632, "\u0120air": 1633, "ization": 1634, "\u0120*": 1635, "ley": 1636, "\u0120money": 1637, "idd": 1638, "You": 1639, "for": 1640, "\u0120family": 1641, "\u0120making": 1642, "\u0120bit": 1643, "\u0120police": 1644, "\u0120happen": 1645, "\u0120vers": 1646, "ony": 1647, "uff": 1648, "\u0120When": 1649, "\u0120sit": 1650, "ideo": 1651, "lf": 1652, "ison": 1653, "\u0120sure": 1654, "gin": 1655, "\u0120appear": 1656, "\u0120light": 1657, "\u0120es": 1658, "of": 1659, "\u0120water": 1660, "\u0120times": 1661, "not": 1662, "\u0120grow": 1663, "\u0120company": 1664, "\u0120Te": 1665, "ows": 1666, "\u0120mar": 1667, "ource": 1668, "iol": 1669, "arm": 1670, "br": 1671, "\u0120example": 1672, "\u0120conc": 1673, "\u0120fore": 1674, "\u0120To": 1675, "pro": 1676, "EN": 1677, "ries": 1678, "\u012025": 1679, "\u0120Can": 1680, "ney": 1681, "\u0120actually": 1682, "\u0120ever": 1683, "urity": 1684, "aken": 1685, "aps": 1686, "\u0120tax": 1687, "\u0120major": 1688, "ama": 1689, "\u0120often": 1690, "eral": 1691, "\u0120human": 1692, "\u0120job": 1693, "ister": 1694, "\u0120available": 1695, "ocr": 1696, "enn": 1697, "aid": 1698, "ivid": 1699, "\u0120record": 1700, "?\"": 1701, "\u0120sing": 1702, "\u0120Am": 1703, "idence": 1704, "\u0120news": 1705, "ster": 1706, "\u0120econom": 1707, "\u0120following": 1708, "\u0120Br": 1709, "ising": 1710, "\u0120hour": 1711, "most": 1712, "ument": 1713, "\u0120sex": 1714, "\u0120desc": 1715, "\u0120become": 1716, "\u0120Ed": 1717, "\u0120took": 1718, "\u0120having": 1719, "\u0120product": 1720, "ault": 1721, "As": 1722, "aring": 1723, "\u0120means": 1724, "\u0120hop": 1725, "une": 1726, "\u0120cho": 1727, "\u0120certain": 1728, "\u0120non": 1729, "\u0120deal": 1730, "24": 1731, "lement": 1732, "oci": 1733, "ene": 1734, "\u0120side": 1735, "\u0120Pr": 1736, "\u0120May": 1737, "\u0120reason": 1738, "ued": 1739, "ched": 1740, "ulation": 1741, "\u0120elect": 1742, "\u0120official": 1743, "\u0120possible": 1744, "\u0120hold": 1745, "ands": 1746, "ots": 1747, "\u0120city": 1748, "ories": 1749, "\u0120sever": 1750, "\u0120children": 1751, "\u0120once": 1752, "\u0120activ": 1753, "ler": 1754, "\u0120night": 1755, "itions": 1756, "\u0120John": 1757, "ape": 1758, "play": 1759, "\u0120done": 1760, "\u0120lim": 1761, "\u0120working": 1762, "\u0120Pres": 1763, "orld": 1764, "eb": 1765, "\u0120Co": 1766, "\u0120body": 1767, "ails": 1768, "utes": 1769, "\u0120Mr": 1770, "\u0120whether": 1771, "\u0120author": 1772, "rop": 1773, "\u0120proper": 1774, "\u0120seen": 1775, ");": 1776, "\u0120fac": 1777, "\u0120Su": 1778, "\u0120cond": 1779, "iting": 1780, "\u0120course": 1781, "\u0120}": 1782, "----------------": 1783, "aign": 1784, "\u0120event": 1785, "\u0120eng": 1786, "\u0120pot": 1787, "\u0120intern": 1788, "iam": 1789, "\u0120short": 1790, "empt": 1791, "\u00e3\u0124": 1792, "\u0120God": 1793, "ilar": 1794, "80": 1795, "\u0120orig": 1796, "IS": 1797, "ourn": 1798, "ability": 1799, "itive": 1800, "\u0120dam": 1801, "\u0120100": 1802, "\u0120press": 1803, "\u0120doing": 1804, "\u0120protect": 1805, "ring": 1806, "\u0120thought": 1807, "\u0120question": 1808, "rew": 1809, "\u0120War": 1810, "\u0120several": 1811, "\u0120State": 1812, "\u0120given": 1813, "\u0120fund": 1814, "\u0120Tw": 1815, "\u0120went": 1816, "ances": 1817, "work": 1818, "por": 1819, "my": 1820, "40": 1821, "\u0120arg": 1822, "artment": 1823, "ustom": 1824, "\u0120polic": 1825, "\u0120meet": 1826, "\u0120creat": 1827, "22": 1828, "\u0120States": 1829, "\u0120games": 1830, "raw": 1831, "uture": 1832, "\u0120understand": 1833, "urs": 1834, "\u0120Ob": 1835, "lish": 1836, "sy": 1837, "\u0120makes": 1838, "\u0120won": 1839, "agon": 1840, "\u0120htt": 1841, "\u0120love": 1842, "ential": 1843, "\u0120complete": 1844, "par": 1845, "\u0120Im": 1846, "AL": 1847, "\u0120account": 1848, "\u00c2\u0142": 1849, "ored": 1850, "vert": 1851, "\u0120ident": 1852, "\u01202015": 1853, "\u0120others": 1854, "\u0120Min": 1855, "iber": 1856, "verage": 1857, "There": 1858, "itional": 1859, "dd": 1860, "\u0120prob": 1861, "\u0120young": 1862, "\u0120along": 1863, "\u0120according": 1864, "\u0120yet": 1865, "\u0120members": 1866, "\u0120What": 1867, "oid": 1868, "\u0120Man": 1869, "And": 1870, "\u0120among": 1871, "ai": 1872, "\u0120employ": 1873, "\u0120Res": 1874, "\u0120>": 1875, "\u0120invol": 1876, "\u0120low": 1877, "af": 1878, "\u0120Car": 1879, "\u0120hig": 1880, "\u0120One": 1881, "\u0120Sec": 1882, "ination": 1883, "\u0120likely": 1884, "\u0120ant": 1885, "aged": 1886, "\u0120Russ": 1887, "\u0120ben": 1888, "\u0120rele": 1889, "For": 1890, "back": 1891, "\u0120Not": 1892, "\u0120president": 1893, "ball": 1894, "\u0120access": 1895, "ividual": 1896, "\u0120Dem": 1897, "\u0120Euro": 1898, "60": 1899, "\u0120known": 1900, "irl": 1901, "\u0120Gr": 1902, "\u0120early": 1903, "use": 1904, "iety": 1905, "\u00e2\u0122\u0135": 1906, "\u0120fight": 1907, "\u0120sent": 1908, "\u0120today": 1909, "\u0120market": 1910, "\".": 1911, "\u0120based": 1912, "\u0120strong": 1913, "urther": 1914, "\u0120deb": 1915, "mber": 1916, "\u0120problem": 1917, "\u0120death": 1918, "\u0120social": 1919, "imate": 1920, "AS": 1921, "ortun": 1922, "\u0120campaign": 1923, "ery": 1924, "Ch": 1925, "\u0120ey": 1926, "ially": 1927, "\u0120mus": 1928, "wh": 1929, "pos": 1930, "\u0120er": 1931, "\u0120saf": 1932, "\u0120months": 1933, "iron": 1934, "\u0120viol": 1935, "\u0120five": 1936, "\u0120stre": 1937, "\u0120players": 1938, "inc": 1939, "ald": 1940, "year": 1941, "aun": 1942, "\u0120success": 1943, "\u0120present": 1944, "erence": 1945, "\u01202014": 1946, "\u0120sugg": 1947, "\u0120particular": 1948, "\u0120try": 1949, "\u0120suggest": 1950, "\u0120Christ": 1951, "ones": 1952, "\u0120priv": 1953, "23": 1954, "\u0120crit": 1955, "\u0120land": 1956, "\u0120local": 1957, "ify": 1958, "29": 1959, "\u0120aut": 1960, "ED": 1961, "\u0120Gu": 1962, "\u0120mult": 1963, "\u0120political": 1964, "\u0120asked": 1965, "\u0120former": 1966, "itter": 1967, "ript": 1968, "\u0120close": 1969, "\u0120pract": 1970, "\u0120York": 1971, "\u0120getting": 1972, "\u0120across": 1973, "\u0120comb": 1974, "\u0120believe": 1975, "\u0120z": 1976, "\u0120toget": 1977, "\u0120together": 1978, "\u0120Cent": 1979, "irc": 1980, "\u0120individual": 1981, "\u0120Mc": 1982, "27": 1983, "isk": 1984, "\u0120Eng": 1985, "\u0120face": 1986, "\u012024": 1987, "\u0120value": 1988, "\u0120area": 1989, "ev": 1990, "\u0120writ": 1991, "\u0120President": 1992, "\u0120vot": 1993, "\u0120key": 1994, "\u0120mom": 1995, "put": 1996, "\u0120anything": 1997, "\u0120experience": 1998, "attle": 1999, "\u0120mind": 2000, "aff": 2001, "omm": 2002, "\u0120future": 2003, "ged": 2004, "\u0120cut": 2005, "\u0120tot": 2006, "itch": 2007, "\u0120video": 2008, "\u0120investig": 2009, "\u0120net": 2010, "\u0120My": 2011, "rict": 2012, "ien": 2013, ".)": 2014, "\u0120impro": 2015, "though": 2016, "wards": 2017, "\u0120connect": 2018, "\u0120Med": 2019, "selves": 2020, "ensive": 2021, "mb": 2022, "ober": 2023, "ators": 2024, "An": 2025, "\u012050": 2026, "\u0120redu": 2027, "resent": 2028, "\u0120above": 2029, "\u0120fre": 2030, "\u0120Europe": 2031, "sw": 2032, "\u0120amount": 2033, "\u0120App": 2034, "\u0120either": 2035, "\u0120milit": 2036, "\u0120anal": 2037, "\u0120fail": 2038, "\u0120En": 2039, "ales": 2040, "\u0120special": 2041, "\u0120black": 2042, "IT": 2043, "cher": 2044, "\u0120looking": 2045, "\u0120fire": 2046, "yn": 2047, "\u0120almost": 2048, "oon": 2049, "\u0120study": 2050, "\u0120miss": 2051, "ches": 2052, "rown": 2053, "\u0120tre": 2054, "\u0120community": 2055, "\u0120media": 2056, "\u0120food": 2057, "\u0120comes": 2058, "\u0120University": 2059, "\u0120single": 2060, "What": 2061, "uly": 2062, "\u0120half": 2063, "ague": 2064, "hod": 2065, "\u0120Republic": 2066, "\u0120started": 2067, "\u0120quick": 2068, "oto": 2069, "book": 2070, "\u0120issue": 2071, "itor": 2072, "\u0120else": 2073, "\u0120consider": 2074, "26": 2075, "rodu": 2076, "\u0120taken": 2077, "28": 2078, "99": 2079, "\u0120With": 2080, "\u0120true": 2081, "\u0120wa": 2082, "\u0120trad": 2083, "\u0120ago": 2084, "\u0120mess": 2085, "ief": 2086, "\u0120added": 2087, "oke": 2088, "\u0120bad": 2089, "\u0120fav": 2090, "33": 2091, "\u0120similar": 2092, "ask": 2093, "\u0120Don": 2094, "\u0120character": 2095, "orts": 2096, "\u0120House": 2097, "\u0120reported": 2098, "\u0120type": 2099, "val": 2100, "iod": 2101, "\u0120However": 2102, "\u0120targ": 2103, "\u0120entire": 2104, "pping": 2105, "\u0120history": 2106, "\u0120live": 2107, "ffic": 2108, "........": 2109, "ederal": 2110, "\u0120trying": 2111, "\u0120discuss": 2112, "\u0120Har": 2113, "aces": 2114, "lished": 2115, "\u0120self": 2116, "osp": 2117, "rest": 2118, "\u0120room": 2119, "elt": 2120, "\u0120fall": 2121, "olution": 2122, "\u0120et": 2123, "\u0120x": 2124, "\u0120isn": 2125, "\u0120idea": 2126, "bo": 2127, "\u0120sound": 2128, "\u0120Dep": 2129, "\u0120someone": 2130, "cially": 2131, "ully": 2132, "\u0120foc": 2133, "\u0120object": 2134, "ift": 2135, "aper": 2136, "\u0120player": 2137, "\u0120rather": 2138, "\u0120service": 2139, "ashing": 2140, "\u0120Do": 2141, "\u0120Part": 2142, "rug": 2143, "mon": 2144, "ply": 2145, "\u0120mor": 2146, "\u0120nothing": 2147, "\u0120provide": 2148, "IC": 2149, "ung": 2150, "\u0120party": 2151, "\u0120exist": 2152, "\u0120mag": 2153, "70": 2154, "\u0120rul": 2155, "\u0120house": 2156, "\u0120behind": 2157, "\u0120however": 2158, "\u0120World": 2159, "\u0120sum": 2160, "\u0120applic": 2161, "\u0120;": 2162, "\u0120function": 2163, "gr": 2164, "\u0120Pol": 2165, "\u0120front": 2166, "200": 2167, "\u0120series": 2168, "\u0120tem": 2169, "\u0120typ": 2170, "ills": 2171, "\u0120opt": 2172, "\u0120points": 2173, "\u0120below": 2174, "itted": 2175, "\u0120specific": 2176, "\u01202017": 2177, "umb": 2178, "\u0120ra": 2179, "\u0120previous": 2180, "\u0120pret": 2181, "reme": 2182, "\u0120custom": 2183, "\u0120court": 2184, "\u0120Me": 2185, "\u0120repl": 2186, "\u0120whole": 2187, "go": 2188, "cer": 2189, "\u0120treat": 2190, "\u0120Act": 2191, "\u0120probably": 2192, "\u0120learn": 2193, "ender": 2194, "\u0120Ass": 2195, "\u0120version": 2196, "now": 2197, "\u0120check": 2198, "\u0120Cal": 2199, "RE": 2200, "minist": 2201, "On": 2202, "ources": 2203, "\u0120benef": 2204, "\u0120doc": 2205, "\u0120deter": 2206, "\u0120enc": 2207, "\u0120super": 2208, "\u0120address": 2209, "\u0120vict": 2210, "\u01202013": 2211, "\u0120meas": 2212, "tr": 2213, "\u0120field": 2214, "When": 2215, "\u0120signific": 2216, "uge": 2217, "\u0120feat": 2218, "\u0120common": 2219, "load": 2220, "\u0120begin": 2221, "\u0120bring": 2222, "\u0120action": 2223, "erman": 2224, "\u0120describ": 2225, "\u0120indust": 2226, "\u0120wanted": 2227, "ried": 2228, "ming": 2229, "\u0120attempt": 2230, "45": 2231, "fer": 2232, "\u0120due": 2233, "ression": 2234, "##": 2235, "\u0120shall": 2236, "\u0120six": 2237, "oo": 2238, "\u0120step": 2239, "\u0120pub": 2240, "\u0120himself": 2241, "\u012023": 2242, "\u0120cop": 2243, "\u0120dest": 2244, "\u0120stop": 2245, "AC": 2246, "ibility": 2247, "\u0120lab": 2248, "icult": 2249, "\u0120hours": 2250, "\u0120create": 2251, "\u0120further": 2252, "\u0120America": 2253, "\u0120City": 2254, "\u0120dou": 2255, "head": 2256, "ST": 2257, "\u0120North": 2258, "cing": 2259, "\u0120national": 2260, "ule": 2261, "\u0120Inst": 2262, "\u0120taking": 2263, "\u0120Qu": 2264, "irt": 2265, "\u0120red": 2266, "\u0120research": 2267, "viron": 2268, "\u0120Ge": 2269, "\u0120break": 2270, "ana": 2271, "\u0120space": 2272, "aterial": 2273, "\u0120recent": 2274, "\u0120Ab": 2275, "\u0120general": 2276, "\u0120hit": 2277, "\u0120period": 2278, "\u0120everything": 2279, "ively": 2280, "\u0120phys": 2281, "\u0120saying": 2282, "anks": 2283, "\u0120cou": 2284, "\u0120cult": 2285, "aced": 2286, "eal": 2287, "uation": 2288, "\u0120coun": 2289, "lu": 2290, "\u0120include": 2291, "\u0120position": 2292, "\u0120After": 2293, "\u0120Canad": 2294, "\u0120Em": 2295, "\u0120imm": 2296, "\u0120Red": 2297, "\u0120pick": 2298, "\u0120compl": 2299, "\u0120matter": 2300, "reg": 2301, "ext": 2302, "angu": 2303, "isc": 2304, "ole": 2305, "aut": 2306, "\u0120compet": 2307, "eed": 2308, "fect": 2309, "\u012021": 2310, "\u0120Sen": 2311, "\u0120These": 2312, "asing": 2313, "\u0120cannot": 2314, "\u0120init": 2315, "\u0120relations": 2316, "ached": 2317, "\u0120bar": 2318, "\u012040": 2319, "\u0120TH": 2320, "\u01202012": 2321, "\u0120vol": 2322, "\u0120ground": 2323, "\u0120security": 2324, "\u0120upd": 2325, "ilt": 2326, "35": 2327, "\u0120concern": 2328, "\u0120Just": 2329, "\u0120white": 2330, "\u0120seems": 2331, "\u0120Her": 2332, "pecially": 2333, "ients": 2334, "\u0120announ": 2335, "\u0120fig": 2336, "ights": 2337, "\u0120stri": 2338, "like": 2339, "ids": 2340, "\u0120sus": 2341, "\u0120watch": 2342, "\u0120\u00e2": 2343, "\u0120wind": 2344, "\u0120Cont": 2345, "\u0120itself": 2346, "\u0120mass": 2347, "Al": 2348, "yle": 2349, "ique": 2350, "\u0120National": 2351, "\u0120abs": 2352, "\u0120pack": 2353, "\u0120outside": 2354, "\u0120anim": 2355, "\u0120pain": 2356, "eter": 2357, "\u0120manag": 2358, "duct": 2359, "ogn": 2360, "\u0120]": 2361, "\u0120Sept": 2362, "sec": 2363, "off": 2364, "\u0120Jan": 2365, "\u0120foot": 2366, "ades": 2367, "\u0120third": 2368, "\u0120mot": 2369, "\u0120evidence": 2370, "inton": 2371, "\u0120threat": 2372, "apt": 2373, "ples": 2374, "cle": 2375, "\u0120lo": 2376, "\u0120decl": 2377, "\u0120item": 2378, "medi": 2379, "\u0120represent": 2380, "omb": 2381, "amer": 2382, "\u0120significant": 2383, "ograph": 2384, "su": 2385, "\u0120cal": 2386, "ires": 2387, "0000": 2388, "ID": 2389, "AM": 2390, "\u0120simply": 2391, "\u0120longer": 2392, "\u0120file": 2393, "OT": 2394, "che": 2395, "So": 2396, "ateg": 2397, "org": 2398, "\u0120His": 2399, "\u0120ener": 2400, "\u0120dom": 2401, "\u0120upon": 2402, "ili": 2403, "\":\"": 2404, "\u0120themselves": 2405, "\u0120coming": 2406, "\u0120quite": 2407, "\u0120difficult": 2408, "\u0120Bar": 2409, "ilities": 2410, "rel": 2411, "ends": 2412, "cial": 2413, "64": 2414, "\u0120woman": 2415, "rap": 2416, "yr": 2417, "\u0120necess": 2418, "ips": 2419, "\u0120text": 2420, "\u0120require": 2421, "\u0120military": 2422, "\u0120review": 2423, "\u0120respons": 2424, "75": 2425, "\u0120subject": 2426, "\u0120instead": 2427, "\u0120issues": 2428, "\u0120gen": 2429, "\",\"": 2430, "\u0120minutes": 2431, "\u0120weap": 2432, "ray": 2433, "amed": 2434, "time": 2435, "bl": 2436, "How": 2437, "\u0120code": 2438, "\u0120Sm": 2439, "\u0120higher": 2440, "\u0120Ste": 2441, "ris": 2442, "\u0120page": 2443, "\u0120students": 2444, "\u0120Intern": 2445, "\u0120method": 2446, "\u0120Aug": 2447, "\u0120Per": 2448, "\u0120Ag": 2449, "\u0120policy": 2450, "\u0120Sw": 2451, "\u0120exec": 2452, "\u0120accept": 2453, "ume": 2454, "ribut": 2455, "\u0120words": 2456, "\u0120final": 2457, "\u0120changes": 2458, "\u0120Democr": 2459, "\u0120friends": 2460, "\u0120respect": 2461, "\u0120ep": 2462, "\u0120compan": 2463, "ivil": 2464, "\u0120damage": 2465, "****": 2466, "ogle": 2467, "vironment": 2468, "\u0120neg": 2469, "ental": 2470, "\u0120ap": 2471, "\u0120total": 2472, "ival": 2473, "!\"": 2474, "lim": 2475, "\u0120needs": 2476, "\u0120agre": 2477, "\u0120development": 2478, "\u0120age": 2479, "iple": 2480, "21": 2481, "\u0120results": 2482, "\u0120Af": 2483, "Sh": 2484, "\u0120gun": 2485, "\u0120Obama": 2486, "roll": 2487, "\u0120@": 2488, "\u0120rights": 2489, "\u0120Brit": 2490, "\u0120running": 2491, "\u0120wasn": 2492, "\u0120port": 2493, "\u0120rate": 2494, "\u0120pretty": 2495, "\u0120target": 2496, "\u0120saw": 2497, "\u0120circ": 2498, "\u0120works": 2499, "icro": 2500, "alt": 2501, "over": 2502, "www": 2503, "That": 2504, "lier": 2505, "\u0120everyone": 2506, "ude": 2507, "\u0120pie": 2508, "iddle": 2509, "rael": 2510, "\u0120rad": 2511, "\u0120block": 2512, "\u0120walk": 2513, "To": 2514, "\u00e3\u0123": 2515, "nes": 2516, "\u0120Aust": 2517, "aul": 2518, "rote": 2519, "\u0120South": 2520, "ession": 2521, "oph": 2522, "\u0120shows": 2523, "\u0120site": 2524, "\u0120jo": 2525, "\u0120risk": 2526, "clus": 2527, "lt": 2528, "\u0120inj": 2529, "iding": 2530, "\u0120Spe": 2531, "\u0120chall": 2532, "irm": 2533, "\u012022": 2534, "itting": 2535, "str": 2536, "\u0120hy": 2537, "LE": 2538, "key": 2539, "\u0120began": 2540, "atur": 2541, "ashington": 2542, "lam": 2543, "\u0120Dav": 2544, "bit": 2545, "\u0120size": 2546, "\u0120Par": 2547, "38": 2548, "ournal": 2549, "face": 2550, "\u0120decision": 2551, "\u0120larg": 2552, "\u0120jud": 2553, "rect": 2554, "\u0120continue": 2555, "\u0120Oct": 2556, "overed": 2557, "\u0120Int": 2558, "========": 2559, "\u0120parent": 2560, "\u0120Will": 2561, "\u0120easy": 2562, "\u0120drug": 2563, "anger": 2564, "\u0120sense": 2565, "\u0120di": 2566, "iday": 2567, "\u0120energy": 2568, "istic": 2569, "\u0120associ": 2570, "arter": 2571, "obal": 2572, "eks": 2573, "\u0120El": 2574, "urch": 2575, "\u0120girl": 2576, "oe": 2577, "itle": 2578, "\u012028": 2579, "\u0120Che": 2580, "\u0120request": 2581, "\u0120soon": 2582, "\u0120host": 2583, "ky": 2584, "\u0120states": 2585, "omes": 2586, "\u0120material": 2587, "lex": 2588, "\u0120moment": 2589, "\u0120answ": 2590, "onse": 2591, "\u0120especially": 2592, "\u0120norm": 2593, "\u0120services": 2594, "pite": 2595, "ran": 2596, "\u0120role": 2597, "44": 2598, "):": 2599, "\u0120cred": 2600, "Cl": 2601, "________": 2602, "\u0120mat": 2603, "\u0120log": 2604, "\u0120Clinton": 2605, "OU": 2606, "\u0120office": 2607, "\u012026": 2608, "\u0120charg": 2609, "\u0120track": 2610, "ma": 2611, "\u0120heart": 2612, "\u0120ball": 2613, "\u0120personal": 2614, "\u0120building": 2615, "na": 2616, "set": 2617, "body": 2618, "\u0120Black": 2619, "\u0120increase": 2620, "itten": 2621, "\u0120needed": 2622, "36": 2623, "32": 2624, "=\"": 2625, "\u0120lost": 2626, "\u0120became": 2627, "\u0120groups": 2628, "\u0120Mus": 2629, "\u0120wrote": 2630, "\u0120Pe": 2631, "\u0120prop": 2632, "joy": 2633, "\u00c3\u00a9": 2634, "\u0120White": 2635, "\u0120dead": 2636, ".'": 2637, "\u0120http": 2638, "\u0120webs": 2639, "OS": 2640, "\u0120inside": 2641, "\u0120wrong": 2642, "\u0120statement": 2643, "\u0120...": 2644, "yl": 2645, "\u0120film": 2646, "\u0120music": 2647, "\u0120share": 2648, "ification": 2649, "\u0120release": 2650, "\u0120forward": 2651, "\u0120stay": 2652, "\u0120comput": 2653, "itte": 2654, "ser": 2655, "\u0120original": 2656, "\u0120card": 2657, "\u0120cand": 2658, "\u0120div": 2659, "atural": 2660, "\u0120favor": 2661, "OM": 2662, "\u0120cases": 2663, "uses": 2664, "\u0120section": 2665, "\u0120leave": 2666, "ging": 2667, "oved": 2668, "\u0120Washington": 2669, "39": 2670, "\u0120Gl": 2671, "\u0120required": 2672, "action": 2673, "apan": 2674, "oor": 2675, "iter": 2676, "\u0120King": 2677, "\u0120countries": 2678, "\u0120German": 2679, "lling": 2680, "\u012027": 2681, "34": 2682, "\u0120questions": 2683, "\u0120prim": 2684, "\u0120cell": 2685, "\u0120shoot": 2686, "\u0120anyone": 2687, "\u0120West": 2688, "\u0120affect": 2689, "epend": 2690, "\u0120online": 2691, "\u0120Israel": 2692, "\u0120September": 2693, "\u0120ability": 2694, "\u0120content": 2695, "ises": 2696, "\u0120reve": 2697, "\u0120laun": 2698, "\u0120indic": 2699, "\u0120force": 2700, "cast": 2701, "\u0120sold": 2702, "aving": 2703, "fl": 2704, "\u0120soft": 2705, "\u0120companies": 2706, "ceed": 2707, "\u0120article": 2708, "\u0120aud": 2709, "\u0120rev": 2710, "\u0120educ": 2711, "\u0120playing": 2712, "05": 2713, "\u0120held": 2714, "ctor": 2715, "\u0120released": 2716, "\u0120federal": 2717, "37": 2718, "\u0120administ": 2719, "\u0120interview": 2720, "\u0120install": 2721, "\u0120received": 2722, "\u0120source": 2723, "uk": 2724, "Ph": 2725, "\u0120serious": 2726, "\u0120created": 2727, "\u0120cause": 2728, "\u0120immedi": 2729, "\u0120defin": 2730, "uel": 2731, "\u0120Department": 2732, "ctions": 2733, "\u0120Cour": 2734, "\u0120Now": 2735, "ze": 2736, "ites": 2737, "itution": 2738, "\u0120late": 2739, "\u0120speak": 2740, "ners": 2741, "\u0120legal": 2742, "ari": 2743, "\u0120Cor": 2744, "\u0120weeks": 2745, "\u0120model": 2746, "\u0120pred": 2747, "\u0120exact": 2748, "BC": 2749, "\u0120By": 2750, "ING": 2751, "osing": 2752, "\u0120takes": 2753, "\u0120regard": 2754, "\u0120opportun": 2755, "\u0120price": 2756, "\u0120198": 2757, "\u0120Apr": 2758, "fully": 2759, "\u0120ord": 2760, "\u0120problems": 2761, "ruction": 2762, "ham": 2763, "\u0120Count": 2764, "lege": 2765, "\u0120leaders": 2766, "ET": 2767, "lev": 2768, "\u0120deep": 2769, "ological": 2770, "ese": 2771, "haps": 2772, "\u0120Some": 2773, "\u0120pers": 2774, "\u0120contract": 2775, "\u0120relationship": 2776, "sp": 2777, "oud": 2778, "\u0120base": 2779, "48": 2780, "mit": 2781, "Ad": 2782, "ancial": 2783, "\u0120consum": 2784, "\u0120potential": 2785, "\u0120langu": 2786, "rem": 2787, "eth": 2788, "\u0120relig": 2789, "ressed": 2790, "66": 2791, "\u0120link": 2792, "\u0120lower": 2793, "ayer": 2794, "\u0120June": 2795, "\u0120fem": 2796, "unt": 2797, "erc": 2798, "urd": 2799, "\u0120contact": 2800, "\u0120ill": 2801, "\u0120mother": 2802, "\u0120estab": 2803, "htt": 2804, "\u0120March": 2805, "\u0120Bro": 2806, "\u0120China": 2807, "\u012029": 2808, "\u0120squ": 2809, "\u0120provided": 2810, "\u0120average": 2811, "asons": 2812, "\u01202011": 2813, "\u0120exam": 2814, "lin": 2815, "55": 2816, "ned": 2817, "\u0120perfect": 2818, "\u0120tou": 2819, "alse": 2820, "ux": 2821, "\u0120buy": 2822, "\u0120shot": 2823, "\u0120collect": 2824, "\u0120phot": 2825, "\u0120played": 2826, "\u0120surpr": 2827, "\u0120officials": 2828, "\u0120simple": 2829, "avy": 2830, "\u0120industry": 2831, "\u0120hands": 2832, "ground": 2833, "\u0120pull": 2834, "\u0120round": 2835, "\u0120user": 2836, "\u0120range": 2837, "uary": 2838, "\u0120private": 2839, "ops": 2840, "ees": 2841, "\u0120ways": 2842, "\u0120Mich": 2843, "\u0120veh": 2844, "\u0120except": 2845, "\u0120terms": 2846, "imum": 2847, "pper": 2848, "ION": 2849, "ores": 2850, "\u0120Dragon": 2851, "oul": 2852, "\u0120den": 2853, "\u0120performance": 2854, "\u0120bill": 2855, "cil": 2856, "47": 2857, "\u0120environment": 2858, "\u0120exc": 2859, "add": 2860, "\u0120worth": 2861, "\u0120pict": 2862, "\u0120chance": 2863, "\u01202018": 2864, "bor": 2865, "\u0120speed": 2866, "iction": 2867, "\u0120alleg": 2868, "\u0120Japan": 2869, "atory": 2870, "reet": 2871, "\u0120match": 2872, "\u0120II": 2873, "\u0120stru": 2874, "order": 2875, "\u0120ste": 2876, "\u0120living": 2877, "\u0120struct": 2878, "ino": 2879, "\u0120separ": 2880, "hern": 2881, "\u0120response": 2882, "\u0120enjoy": 2883, "\u0120via": 2884, "AD": 2885, "uments": 2886, "acebook": 2887, "\u0120member": 2888, "ibr": 2889, "izing": 2890, "\u0120tool": 2891, "\u0120Mon": 2892, "\u0120While": 2893, "hood": 2894, "\u0120Ang": 2895, "\u0120Def": 2896, "\u0120offer": 2897, "Tr": 2898, "aur": 2899, "\u0120turned": 2900, "\u0120July": 2901, "down": 2902, "anced": 2903, "\u0120recently": 2904, "\u0120Ear": 2905, "\u0120ce": 2906, "\u0120Star": 2907, "\u0120Cong": 2908, "rought": 2909, "\u0120blood": 2910, "\u0120hope": 2911, "\u0120comment": 2912, "aint": 2913, "\u0120arri": 2914, "iles": 2915, "\u0120particip": 2916, "ought": 2917, "ription": 2918, "08": 2919, "49": 2920, "\u0120gave": 2921, "\u0120select": 2922, "\u0120killed": 2923, "sych": 2924, "\u0120goes": 2925, "ij": 2926, "\u0120coll": 2927, "\u0120impact": 2928, "atives": 2929, "\u0120Ser": 2930, "09": 2931, "\u0120August": 2932, "\u0120boy": 2933, "de": 2934, "\u0120Des": 2935, "\u0120felt": 2936, "US": 2937, "\u0120expected": 2938, "\u0120image": 2939, "\u0120Mark": 2940, "ccording": 2941, "oice": 2942, "EC": 2943, "\u0120Mag": 2944, "ened": 2945, "hold": 2946, "\u0120Post": 2947, "\u0120prevent": 2948, "No": 2949, "\u0120involved": 2950, "\u0120eyes": 2951, "\u0120quickly": 2952, "At": 2953, "unk": 2954, "\u0120behav": 2955, "\u0120ur": 2956, "\u0120led": 2957, "come": 2958, "ey": 2959, "\u0120candid": 2960, "\u0120earlier": 2961, "\u0120focus": 2962, "ety": 2963, "Pro": 2964, "ledge": 2965, "ixed": 2966, "illed": 2967, "\u0120popular": 2968, "AP": 2969, "\u0120sett": 2970, "light": 2971, "\u0120various": 2972, "inks": 2973, "\u0120levels": 2974, "\u0120road": 2975, "ellig": 2976, "ables": 2977, "hel": 2978, "ittee": 2979, "\u0120Gener": 2980, "ype": 2981, "\u0120heard": 2982, "icles": 2983, "\u0120mis": 2984, "\u0120users": 2985, "\u0120San": 2986, "\u0120improve": 2987, "\u0120father": 2988, "\u0120search": 2989, "They": 2990, "vil": 2991, "\u0120profess": 2992, "\u0120knew": 2993, "\u0120loss": 2994, "\u0120events": 2995, "65": 2996, "\u0120billion": 2997, "07": 2998, "02": 2999, "\u0120News": 3000, "\u0120AM": 3001, "\u0120cover": 3002, "where": 3003, "ension": 3004, "\u0120bott": 3005, "\u0120areas": 3006, "ences": 3007, "ope": 3008, "\u0120Twitter": 3009, "ael": 3010, "\u0120gets": 3011, "\u0120Google": 3012, "\u0120sn": 3013, "iant": 3014, "\u0120vote": 3015, "\u0120nearly": 3016, "\u0120included": 3017, "\u0120recogn": 3018, "zz": 3019, "mm": 3020, "aled": 3021, "\u0120happened": 3022, "04": 3023, "\u0120hot": 3024, "\u0120whose": 3025, "\u0120civil": 3026, "\u0120suff": 3027, "oes": 3028, "itiz": 3029, "\u0120Syri": 3030, "\u0120respond": 3031, "\u0120hon": 3032, "\u0120features": 3033, "\u0120economic": 3034, "\u0120April": 3035, "rim": 3036, "\u0120technology": 3037, "\u0120option": 3038, "aging": 3039, "\u0120purch": 3040, "Re": 3041, "\u0120lat": 3042, "chie": 3043, "isl": 3044, "\u0120recomm": 3045, "uf": 3046, "\u0120training": 3047, "\u0120effects": 3048, "\u0120fast": 3049, "\u01202010": 3050, "\u0120occur": 3051, "\u0120website": 3052, "\u0120email": 3053, "\u0120sens": 3054, "ech": 3055, "\u0120oil": 3056, "\u0120influ": 3057, "\u0120currently": 3058, "\u0120Sch": 3059, "\u0120Add": 3060, "\u0120goal": 3061, "\u0120scient": 3062, "\u0120conv": 3063, "100": 3064, "emy": 3065, "\u0120decided": 3066, "\u0120travel": 3067, "\u0120mention": 3068, "LL": 3069, "03": 3070, "\u0120election": 3071, "\u0120phone": 3072, "\u0120looks": 3073, "\u0120situation": 3074, "\u0120cy": 3075, "\u0120hor": 3076, "bed": 3077, "\u0120Court": 3078, "aily": 3079, "aves": 3080, "\u0120quality": 3081, "\u0120Comp": 3082, "wise": 3083, "\u0120table": 3084, "\u0120staff": 3085, "\u0120Wind": 3086, "ett": 3087, "\u0120tried": 3088, "idered": 3089, "\u0120addition": 3090, "\u0120box": 3091, "\u0120lack": 3092, "arily": 3093, "\u0120wide": 3094, "\u0120mid": 3095, "\u0120board": 3096, "ysis": 3097, "\u0120anti": 3098, "ha": 3099, "\u0120dig": 3100, "ening": 3101, "\u0120dro": 3102, "Con": 3103, "68": 3104, "\u0120slow": 3105, "based": 3106, "sequ": 3107, "\u0120path": 3108, "Ex": 3109, "aker": 3110, "\u0120worked": 3111, "\u0120pen": 3112, "\u0120engine": 3113, "\u0120looked": 3114, "\u0120Super": 3115, "\u0120Serv": 3116, "\u0120victim": 3117, "Un": 3118, "\u0120property": 3119, "\u0120introdu": 3120, "\u0120execut": 3121, "\u0120PM": 3122, "Le": 3123, "\u0120color": 3124, "\u0120More": 3125, "\u012060": 3126, "\u0120network": 3127, "\u0120date": 3128, "cul": 3129, "idge": 3130, "\u0120extra": 3131, "31": 3132, "\u0120sle": 3133, "67": 3134, "\u0120wond": 3135, "\u0120reports": 3136, "just": 3137, "\u0120Austral": 3138, "\u0120capital": 3139, "\u0120ens": 3140, "\u0120command": 3141, "\u0120allowed": 3142, "\u0120prep": 3143, "\u0120capt": 3144, "hib": 3145, "\u0120numbers": 3146, "chan": 3147, "\u0120fair": 3148, "mp": 3149, "oms": 3150, "\u0120reach": 3151, "With": 3152, "tain": 3153, "\u0120broad": 3154, "\u0120couple": 3155, "ecause": 3156, "lying": 3157, "\u0120Feb": 3158, "\u0120screen": 3159, "\u0120lives": 3160, "\u0120prior": 3161, "\u0120Congress": 3162, "Ar": 3163, "\u0120approach": 3164, "\u0120emer": 3165, "aries": 3166, "\u0120Dis": 3167, "serv": 3168, "\u0120Ne": 3169, "\u0120built": 3170, "cies": 3171, "\u0120repe": 3172, "\u0120rules": 3173, "force": 3174, "\u0120Pal": 3175, "\u0120financial": 3176, "\u0120considered": 3177, "\u0120Char": 3178, "nces": 3179, "\u0120IS": 3180, "\u0120brought": 3181, "\u0120bi": 3182, "iers": 3183, "\u0120Sim": 3184, "OP": 3185, "\u0120products": 3186, "\u0120visit": 3187, "\u0120document": 3188, "\u0120conduct": 3189, "\u0120completely": 3190, "ining": 3191, "\u0120Calif": 3192, "ibly": 3193, "\u0120written": 3194, "\u0120TV": 3195, "ements": 3196, "\u0120draw": 3197, "One": 3198, "\u0120published": 3199, "\u0120secret": 3200, "rain": 3201, "het": 3202, "\u0120Facebook": 3203, "onday": 3204, "\u0120Up": 3205, "\u0120sexual": 3206, "\u0120thous": 3207, "\u0120Pat": 3208, "\u0120ess": 3209, "\u0120standard": 3210, "\u0120arm": 3211, "ges": 3212, "ection": 3213, "\u0120fell": 3214, "\u0120foreign": 3215, "ani": 3216, "\u0120Friday": 3217, "\u0120regular": 3218, "inary": 3219, "\u0120increased": 3220, "\u0120usually": 3221, "\u0120demon": 3222, "\u0120dark": 3223, "\u0120additional": 3224, "rol": 3225, "\u0120Of": 3226, "\u0120production": 3227, "!!": 3228, "undred": 3229, "\u0120international": 3230, "idents": 3231, "\u0120Free": 3232, "roup": 3233, "\u0120race": 3234, "\u0120mach": 3235, "\u0120huge": 3236, "All": 3237, "lear": 3238, "ovember": 3239, "\u0120town": 3240, "\u0120attention": 3241, "\u0120Off": 3242, "yond": 3243, "\u0120Then": 3244, "field": 3245, "\u0120terror": 3246, "raz": 3247, "\u0120Bo": 3248, "\u0120meeting": 3249, "\u0120Park": 3250, "\u0120arrest": 3251, "\u0120fear": 3252, "\u0120aw": 3253, "\u0120Val": 3254, "oring": 3255, "',": 3256, "\u0120extreme": 3257, "arr": 3258, "\u0120workers": 3259, "After": 3260, "\u012031": 3261, "net": 3262, "ament": 3263, "\u0120directly": 3264, "\u0120population": 3265, "ube": 3266, "\u0120October": 3267, "\u0120IN": 3268, "\u0120January": 3269, "59": 3270, "\u0120David": 3271, "\u0120cross": 3272, "cember": 3273, "\u0120First": 3274, "\u0120message": 3275, "irit": 3276, "\u0120nation": 3277, "\u0120poll": 3278, "isions": 3279, "\u0120answer": 3280, "ny": 3281, "isode": 3282, "\u0120carry": 3283, "\u0120Russia": 3284, "\u0120hear": 3285, "ength": 3286, "roy": 3287, "\u0120natural": 3288, "inally": 3289, "\u0120dog": 3290, "mitted": 3291, "\u0120trade": 3292, "\u0120subst": 3293, "\u0120multiple": 3294, "\u0120Afric": 3295, "\u0120fans": 3296, "\u0120sort": 3297, "\u0120global": 3298, "ication": 3299, "\u0120Wed": 3300, "ara": 3301, "\u0120achie": 3302, "\u0120language": 3303, "vey": 3304, "\u0120tal": 3305, "\u0120necessary": 3306, "\u0120details": 3307, "\u0120sen": 3308, "\u0120Sund": 3309, "\u0120Reg": 3310, "\u0120Rec": 3311, "06": 3312, "\u0120sil": 3313, "ressive": 3314, "\u0120medical": 3315, "unch": 3316, "ornia": 3317, "\u0120und": 3318, "fort": 3319, "ocks": 3320, "\u0120Monday": 3321, "uesday": 3322, "craft": 3323, "77": 3324, "urt": 3325, "\u0120ver": 3326, "\u0120Hill": 3327, "\u0120receive": 3328, "\u0120morning": 3329, "estern": 3330, "\u0120bank": 3331, "\u0120sat": 3332, "irth": 3333, "\u0120High": 3334, "\u0120device": 3335, "\u0120THE": 3336, "\u0120Center": 3337, "\u0120safe": 3338, "\u0120ple": 3339, "\u0120Canada": 3340, "\u0120systems": 3341, "\u0120assist": 3342, "\u0120surv": 3343, "\u0120battle": 3344, "\u0120Soc": 3345, "vertis": 3346, "She": 3347, "\u0120paper": 3348, "\u0120growth": 3349, "\u0120cast": 3350, "Sc": 3351, "\u0120plans": 3352, "lled": 3353, "\u0120parts": 3354, "\u0120wall": 3355, "\u0120movement": 3356, "\u0120practice": 3357, "imately": 3358, "\u0120display": 3359, "\u0120sometimes": 3360, "omp": 3361, "\u0120Paul": 3362, "\u0120Yes": 3363, "king": 3364, "58": 3365, "oly": 3366, "\u0120son": 3367, "\u0120avoid": 3368, "okes": 3369, "\u0120Jew": 3370, "\u0120towards": 3371, "asc": 3372, "\u0120//": 3373, "\u0120Kore": 3374, "\u0120talking": 3375, "\u0120correct": 3376, "\u0120spent": 3377, "icks": 3378, "iable": 3379, "eared": 3380, "\u0120term": 3381, "\u0120wants": 3382, "oming": 3383, "\u0120ut": 3384, "\u0120doub": 3385, "\u0120forces": 3386, "\u0120please": 3387, "69": 3388, "\u0120November": 3389, "atform": 3390, "ondon": 3391, "\u0120ones": 3392, "\u0120immediately": 3393, "\u0120Russian": 3394, "\u0120Met": 3395, "\u0120deg": 3396, "\u0120parents": 3397, "CH": 3398, "\u0120Americans": 3399, "aly": 3400, "\u0120Mod": 3401, "\u0120shown": 3402, "\u0120conditions": 3403, "\u0120stuff": 3404, "\u0120reb": 3405, "\u0120Your": 3406, "\u0120includes": 3407, "nown": 3408, "\u0120Sam": 3409, "\u0120experien": 3410, "mission": 3411, "\u0120Even": 3412, "aught": 3413, "\u0120announced": 3414, "\u0120Republican": 3415, "\u0120determin": 3416, "\u0120described": 3417, "\u0120County": 3418, "()": 3419, "\u0120door": 3420, "\u0120changed": 3421, "\u0120neigh": 3422, "\u0120Here": 3423, "\u0120clean": 3424, "\u0120pan": 3425, "\u0120December": 3426, "\u0120European": 3427, "iring": 3428, "apter": 3429, "\u0120club": 3430, "\u0120Tuesday": 3431, "\u0120paid": 3432, "\u0120Net": 3433, "\u0120attacks": 3434, "\u0120characters": 3435, "\u0120alone": 3436, "\u0120director": 3437, "dom": 3438, "\u012035": 3439, "\u0120load": 3440, "\u0120rout": 3441, "\u0120California": 3442, "\u0120finally": 3443, "\u0120rac": 3444, "\u0120contr": 3445, "\u0120exactly": 3446, "resh": 3447, "pri": 3448, "\u0120Islam": 3449, "\u0120nature": 3450, "\u0120career": 3451, "\u0120latest": 3452, "\u0120convers": 3453, "\u0120Sl": 3454, "pose": 3455, "cient": 3456, "\u0120Inc": 3457, "ivity": 3458, "88": 3459, "\u0120Att": 3460, "\u0120Mor": 3461, "nesday": 3462, "\u0120weight": 3463, "ken": 3464, "\u0120note": 3465, "\u0120teams": 3466, "\u0120\\": 3467, "airs": 3468, "\u0120Green": 3469, "\u0120hundred": 3470, "onent": 3471, "\u0120streng": 3472, "\u0120consist": 3473, "icated": 3474, "\u0120regul": 3475, "\u0120lic": 3476, "astic": 3477, "\u0120ten": 3478, "ursday": 3479, "elligence": 3480, "ously": 3481, "\u0120UK": 3482, "BI": 3483, "\u0120costs": 3484, "\u0120independ": 3485, "\u0120AP": 3486, "\u0120normal": 3487, "\u0120hom": 3488, "\u0120obvious": 3489, "\u0120swe": 3490, "\u0120star": 3491, "\u0120ready": 3492, "acher": 3493, "\u0120implement": 3494, "gest": 3495, "\u0120song": 3496, "\u0120Get": 3497, "\u0120Lab": 3498, "\u0120interesting": 3499, "using": 3500, "\u0120giving": 3501, "\u0120Sunday": 3502, "\u0120etc": 3503, "\u0120middle": 3504, "\u0120remember": 3505, "right": 3506, "osition": 3507, "utions": 3508, "\u0120max": 3509, "46": 3510, "\u0120yourself": 3511, "\u0120demand": 3512, "\u0120treatment": 3513, "\u0120danger": 3514, "\u0120Cons": 3515, "\u0120guy": 3516, "\u0120British": 3517, "\u0120physical": 3518, "\u0120related": 3519, "\u0120remain": 3520, "\u0120couldn": 3521, "\u0120refer": 3522, "\u0120citiz": 3523, "box": 3524, "ENT": 3525, "board": 3526, "\u0120inn": 3527, "IG": 3528, "ero": 3529, "\u0120Street": 3530, "ospital": 3531, "rench": 3532, "chers": 3533, "\u0120stra": 3534, "OL": 3535, "ager": 3536, "\u0120AN": 3537, "\u0120easily": 3538, "IA": 3539, "enge": 3540, "iny": 3541, "\u0120clos": 3542, "ocked": 3543, "\u0120uses": 3544, "\u0120Coun": 3545, "Im": 3546, "uild": 3547, "??": 3548, "more": 3549, "\u0120ang": 3550, "\u0120write": 3551, "olute": 3552, "57": 3553, "\u0120leader": 3554, "\u0120reading": 3555, "": 3784, "\u0120figure": 3785, "\u0120disapp": 3786, "enty": 3787, "\u0120software": 3788, "\u0120ult": 3789, "\u0120officers": 3790, "New": 3791, "Is": 3792, "\u0120remains": 3793, "\u0120India": 3794, "\u0120psych": 3795, "rief": 3796, "\u0120cat": 3797, "esc": 3798, "\u0120observ": 3799, "\u0120stage": 3800, "\u0120Dark": 3801, "\u0120enter": 3802, "change": 3803, "\u0120passed": 3804, "\u0120despite": 3805, "\u0120Out": 3806, "\u0120movie": 3807, "rs": 3808, "\u0120voice": 3809, "mine": 3810, "\u0120Play": 3811, "\u0120toward": 3812, "\u0120Ter": 3813, "\u0120region": 3814, "\u0120values": 3815, "orters": 3816, "\u0120mount": 3817, "\u0120officer": 3818, "\u0120Other": 3819, "ban": 3820, "\u0120hous": 3821, "wood": 3822, "room": 3823, "IV": 3824, "\u0120Sun": 3825, "see": 3826, "\u0120Over": 3827, "rog": 3828, "90": 3829, "\u0120lay": 3830, "\u0120Tur": 3831, "awn": 3832, "\u0120pressure": 3833, "\u0120Sub": 3834, "\u0120books": 3835, "edom": 3836, "\u0120Sand": 3837, "AA": 3838, "ago": 3839, "\u0120reasons": 3840, "ford": 3841, "\u0120activity": 3842, "UT": 3843, "Now": 3844, "\u0120Senate": 3845, "cell": 3846, "night": 3847, "\u0120calls": 3848, "inter": 3849, "\u0120letter": 3850, "\u0120Rob": 3851, "\u0120Je": 3852, "\u0120choose": 3853, "\u0120Law": 3854, "Get": 3855, "Be": 3856, "\u0120rob": 3857, "\u0120types": 3858, "\u0120platform": 3859, "\u0120quarter": 3860, "RA": 3861, "\u0120Time": 3862, "\u0120maybe": 3863, "\u0120Cr": 3864, "95": 3865, "pre": 3866, "\u0120moving": 3867, "\u0120lif": 3868, "\u0120gold": 3869, "\u0120som": 3870, "\u0120patients": 3871, "\u0120truth": 3872, "\u0120Ke": 3873, "urance": 3874, "antly": 3875, "mar": 3876, "\u0120charge": 3877, "\u0120Great": 3878, "\u0120cele": 3879, "--------------------------------": 3880, "\u0120rock": 3881, "roid": 3882, "ancy": 3883, "\u0120credit": 3884, "aud": 3885, "By": 3886, "\u0120Every": 3887, "\u0120moved": 3888, "inger": 3889, "ribution": 3890, "\u0120names": 3891, "\u0120straight": 3892, "\u0120Health": 3893, "\u0120Well": 3894, "\u0120feature": 3895, "\u0120rule": 3896, "\u0120sche": 3897, "inated": 3898, "\u0120Michael": 3899, "berg": 3900, "41": 3901, "iled": 3902, "band": 3903, "\u0120click": 3904, "\u0120Angel": 3905, "onents": 3906, "\u00c2\u0143": 3907, "\u0120Iraq": 3908, "\u0120Saturday": 3909, "\u0120aware": 3910, "part": 3911, "\u0120pattern": 3912, "OW": 3913, "\u0120Let": 3914, "\u0120grad": 3915, "igned": 3916, "\u0120associated": 3917, "\u0120style": 3918, "no": 3919, "iation": 3920, "aith": 3921, "ilies": 3922, "\u0120stories": 3923, "uration": 3924, "\u0120individuals": 3925, "\u0120\u00e2\u0122\u00a6": 3926, "miss": 3927, "\u0120Associ": 3928, "ishing": 3929, "aby": 3930, "\u0120summer": 3931, "\u0120Ben": 3932, "\u012032": 3933, "\u0120arch": 3934, "uty": 3935, "\u0120Texas": 3936, "hol": 3937, "\u0120fully": 3938, "\u0120mill": 3939, "\u0120followed": 3940, "\u0120Bill": 3941, "\u0120Indian": 3942, "\u0120Secret": 3943, "\u0120Bel": 3944, "\u0120February": 3945, "\u0120jobs": 3946, "\u0120seemed": 3947, "\u0120Govern": 3948, "ipped": 3949, "\u0120reality": 3950, "\u0120lines": 3951, "\u0120park": 3952, "\u0120measure": 3953, "\u0120Our": 3954, "IM": 3955, "\u0120brother": 3956, "\u0120growing": 3957, "\u0120ban": 3958, "\u0120estim": 3959, "\u0120cry": 3960, "\u0120School": 3961, "\u0120mechan": 3962, "\u0120OF": 3963, "\u0120Windows": 3964, "\u0120rates": 3965, "\u0120Oh": 3966, "\u0120positive": 3967, "\u0120culture": 3968, "istics": 3969, "ica": 3970, "\u0120har": 3971, "ya": 3972, "itely": 3973, "ipp": 3974, "\u0120map": 3975, "encies": 3976, "\u0120William": 3977, "II": 3978, "akers": 3979, "56": 3980, "\u0120Mart": 3981, "\u0120Rem": 3982, "\u0120altern": 3983, "itude": 3984, "\u0120coach": 3985, "rowd": 3986, "Don": 3987, "\u0120kids": 3988, "\u0120journal": 3989, "\u0120corpor": 3990, "\u0120false": 3991, "\u0120web": 3992, "\u0120sleep": 3993, "\u0120contain": 3994, "\u0120sto": 3995, "\u0120bed": 3996, "iverse": 3997, "\u0120Rich": 3998, "\u0120Chinese": 3999, "\u0120pun": 4000, "\u0120meant": 4001, "known": 4002, "\u0120notice": 4003, "\u0120favorite": 4004, "aven": 4005, "\u0120condition": 4006, "\u0120purpose": 4007, "))": 4008, "\u0120organization": 4009, "\u0120challeng": 4010, "\u0120manufact": 4011, "\u0120susp": 4012, "\u0120Ac": 4013, "\u0120critic": 4014, "unes": 4015, "uclear": 4016, "\u0120mer": 4017, "vention": 4018, "\u012080": 4019, "\u0120mist": 4020, "\u0120Us": 4021, "\u0120Tor": 4022, "http": 4023, "olf": 4024, "\u0120larger": 4025, "\u0120advant": 4026, "\u0120resear": 4027, "\u0120actions": 4028, "ml": 4029, "\u0120kept": 4030, "\u0120aim": 4031, ",'": 4032, "col": 4033, "\u0120benefits": 4034, "ifying": 4035, "\u0120actual": 4036, "\u0120International": 4037, "\u0120vehicle": 4038, "\u0120chief": 4039, "\u0120efforts": 4040, "\u0120League": 4041, "\u0120Most": 4042, "\u0120wait": 4043, "\u0120adult": 4044, "\u0120overall": 4045, "\u0120speech": 4046, "\u0120highly": 4047, "\u0120female": 4048, "\u0120error": 4049, "\u0120effective": 4050, "54": 4051, "\u0120encour": 4052, "well": 4053, "\u0120failed": 4054, "\u0120conserv": 4055, "\u0120programs": 4056, "\u0120trou": 4057, "\u0120ahead": 4058, "500": 4059, "vertisement": 4060, "IP": 4061, "\u0120Found": 4062, "pir": 4063, "\u0120%": 4064, "\u0120crime": 4065, "ander": 4066, "\u0120location": 4067, "\u0120Iran": 4068, "\u0120behavior": 4069, "azing": 4070, "\u0120rare": 4071, "\u0120emb": 4072, "\u0120caused": 4073, "\u0120ship": 4074, "\u0120active": 4075, "\u0120contribut": 4076, "\u0120green": 4077, "\u0120acqu": 4078, "\u0120reflect": 4079, "venue": 4080, "\u0120firm": 4081, "\u0120birth": 4082, "].": 4083, "\u0120clearly": 4084, "\u0120emot": 4085, "\u0120agency": 4086, "riage": 4087, "\u0120memory": 4088, "98": 4089, "SA": 4090, "\u0120See": 4091, "acing": 4092, "CC": 4093, "\u0120biggest": 4094, "\u0120rap": 4095, "\u0120basic": 4096, "\u0120band": 4097, "eat": 4098, "\u0120suspect": 4099, "\u0120Mac": 4100, "\u012090": 4101, "mark": 4102, "istan": 4103, "\u0120spread": 4104, "ams": 4105, "ki": 4106, "asy": 4107, "rav": 4108, "\u0120Rober": 4109, "\u0120demonstr": 4110, "rated": 4111, "\u0120absolute": 4112, "\u0120places": 4113, "\u0120impl": 4114, "ibrary": 4115, "\u0120cards": 4116, "\u0120destroy": 4117, "\u0120virt": 4118, "vere": 4119, "\u0120appeared": 4120, "yan": 4121, "point": 4122, "\u0120beg": 4123, "\u0120temper": 4124, "spe": 4125, "anted": 4126, "ears": 4127, "\u0120Direct": 4128, "\u0120length": 4129, "\u0120blog": 4130, "amb": 4131, "\u0120integ": 4132, "\u0120resources": 4133, "acc": 4134, "iful": 4135, "\u0120spot": 4136, "\u0120forced": 4137, "\u0120thousands": 4138, "\u0120Minister": 4139, "\u0120qual": 4140, "\u0120French": 4141, "atically": 4142, "\u0120generally": 4143, "\u0120drink": 4144, "\u0120thus": 4145, "IL": 4146, "odes": 4147, "\u0120appropri": 4148, "\u0120Read": 4149, "\u0120whom": 4150, "\u0120eye": 4151, "\u0120college": 4152, "\u012045": 4153, "irection": 4154, "\u0120ensure": 4155, "\u0120apparent": 4156, "iders": 4157, "\u0120religious": 4158, "\u0120minor": 4159, "olic": 4160, "\u0120tro": 4161, "\u0120Why": 4162, "ribute": 4163, "met": 4164, "\u0120primary": 4165, "\u0120developed": 4166, "\u0120peace": 4167, "\u0120skin": 4168, "ste": 4169, "ava": 4170, "\u0120blue": 4171, "\u0120families": 4172, "\u0120ir": 4173, "\u0120apply": 4174, "\u0120inform": 4175, "\u0120Smith": 4176, "CT": 4177, "ii": 4178, "\u0120limit": 4179, "\u0120resist": 4180, "................": 4181, "umn": 4182, "\u0120conflic": 4183, "\u0120twe": 4184, "udd": 4185, "\u0120Tom": 4186, "\u0120liter": 4187, "que": 4188, "bon": 4189, "\u0120hair": 4190, "\u0120eventually": 4191, "\u0120pus": 4192, "\u0120helped": 4193, "\u0120agg": 4194, "orney": 4195, "\u0120Apple": 4196, "\u0120fit": 4197, "\u0120Sur": 4198, "\u0120prem": 4199, "\u0120sales": 4200, "\u0120seconds": 4201, "\u0120strength": 4202, "\u0120feeling": 4203, "\u00bf\u00bd": 4204, "\u0120tour": 4205, "\u0120knows": 4206, "oom": 4207, "\u0120exerc": 4208, "\u0120somew": 4209, "\u00ef\u00bf\u00bd": 4210, ">>": 4211, "\u0120spokes": 4212, "\u0120ideas": 4213, "\u0120regist": 4214, "soft": 4215, "\u0120Del": 4216, "\u0120PC": 4217, "\u0120propos": 4218, "\u0120launch": 4219, "\u0120bottom": 4220, "TH": 4221, "\u0120Please": 4222, "vest": 4223, "itz": 4224, "\u0120Inter": 4225, "\u0120script": 4226, "\u0120rat": 4227, "arning": 4228, "\u0120il": 4229, "\u0120Jer": 4230, "\u0120Are": 4231, "\u0120whatever": 4232, "oken": 4233, "cience": 4234, "\u0120mode": 4235, "\u0120agree": 4236, "\u0120sources": 4237, "\u0120initial": 4238, "\u0120restrict": 4239, "\u0120wonder": 4240, "usion": 4241, "####": 4242, "\u0120Sil": 4243, "ville": 4244, "\u0120burn": 4245, "tw": 4246, "asion": 4247, "\u0120\u00c2\u00a3": 4248, "\u0120nor": 4249, "uing": 4250, "\u0120reached": 4251, "\u0120sun": 4252, "\u0120categ": 4253, "igration": 4254, "\u0120cook": 4255, "\u0120promot": 4256, "\u0120male": 4257, "\u0120climate": 4258, "\u0120fix": 4259, "\u0120alleged": 4260, "UR": 4261, "alled": 4262, "\u0120images": 4263, "Cont": 4264, "ota": 4265, "\u0120schools": 4266, "ios": 4267, "\u0120drop": 4268, "\u0120stream": 4269, "\u0120Mo": 4270, "\u0120previously": 4271, "aling": 4272, "\u0120pet": 4273, "\u0120double": 4274, "\u0120(@": 4275, "annel": 4276, "\u0120default": 4277, "ties": 4278, "\u0120rank": 4279, "\u0120Dec": 4280, "\u0120Council": 4281, "\u0120weapon": 4282, "\u0120stock": 4283, "\u0120analy": 4284, "\u0120Str": 4285, "\u0120picture": 4286, "\u0120Police": 4287, "ference": 4288, "\u0120century": 4289, "\u0120citizens": 4290, "\u0120onto": 4291, "\u0120expand": 4292, "\u0120hero": 4293, "\u0120Sol": 4294, "\u0120wild": 4295, "\u0120update": 4296, "\u0120customers": 4297, "ront": 4298, "def": 4299, "\u0120lik": 4300, "\u0120criminal": 4301, "\u0120Christian": 4302, "SP": 4303, "76": 4304, "\u0120leaving": 4305, "\u0120otherwise": 4306, "\u0120Dist": 4307, "\u0120basis": 4308, "52": 4309, "53": 4310, "icip": 4311, "\u0120Ber": 4312, "\u0120recommend": 4313, "\u0120floor": 4314, "\u0120crowd": 4315, "oles": 4316, "\u012070": 4317, "\u0120central": 4318, "\u0120Ev": 4319, "\u0120dream": 4320, "\u0120download": 4321, "\u0120confir": 4322, "\u0120Thom": 4323, "\u0120window": 4324, "\u0120happens": 4325, "\u0120unit": 4326, "\u0120tend": 4327, "\u0120spl": 4328, "\u0120becomes": 4329, "\u0120fighting": 4330, "\u0120predict": 4331, "\u0120Press": 4332, "\u0120Power": 4333, "\u0120heavy": 4334, "aked": 4335, "\u0120fan": 4336, "orter": 4337, "ategy": 4338, "BA": 4339, "izes": 4340, "\u0120spend": 4341, "Here": 4342, "\u01202007": 4343, "\u0120adop": 4344, "\u0120Ham": 4345, "\u0120football": 4346, "\u0120Port": 4347, "oday": 4348, "51": 4349, "ampions": 4350, "\u0120transfer": 4351, "ht": 4352, "\u012038": 4353, "term": 4354, "acity": 4355, "\u0120bur": 4356, "],": 4357, "ternal": 4358, "rig": 4359, "but": 4360, "\u0120therefore": 4361, "\u0120Because": 4362, "resp": 4363, "rey": 4364, "\u0120mission": 4365, "Some": 4366, "\u0120noted": 4367, "\u0120assum": 4368, "\u0120disease": 4369, "\u0120edit": 4370, "\u0120progress": 4371, "rd": 4372, "\u0120Brown": 4373, "ocal": 4374, "\u0120adding": 4375, "\u0120raised": 4376, "\u0120Any": 4377, "\u0120tick": 4378, "\u0120seeing": 4379, "\u0120People": 4380, "\u0120agreement": 4381, "\u0120server": 4382, "\u0120wat": 4383, "\u0120debate": 4384, "\u0120supposed": 4385, "iling": 4386, "\u0120largest": 4387, "\u0120successful": 4388, "\u0120Pri": 4389, "\u0120Democratic": 4390, "\u0120jump": 4391, "\u0120Syria": 4392, "\u0120owners": 4393, "\u0120offers": 4394, "\u0120shooting": 4395, "\u0120effic": 4396, "sey": 4397, "\u0120haven": 4398, "verse": 4399, "tered": 4400, "\u0120Light": 4401, "imal": 4402, "\u0120Big": 4403, "\u0120defend": 4404, "\u0120beat": 4405, "\u0120records": 4406, "%)": 4407, "\u0120scen": 4408, "\u0120employees": 4409, "\u0120devices": 4410, "hem": 4411, "\u0120commer": 4412, "\u0120Mex": 4413, "\u0120benefit": 4414, "\u0120Prof": 4415, "\u0120illeg": 4416, "\u0120surface": 4417, "\u0120Also": 4418, "\u0120harm": 4419, "ingly": 4420, "wide": 4421, "\u0120Alex": 4422, "\u0120shut": 4423, "\u0120Cur": 4424, "\u0120lose": 4425, "pm": 4426, "\u0120challenge": 4427, "semb": 4428, "\u0120station": 4429, "\u0120intelligence": 4430, "\u0120accur": 4431, "\u0120Flor": 4432, "\u0120requires": 4433, "\u0120Mal": 4434, "bum": 4435, "\u0120hospital": 4436, "\u0120spirit": 4437, "\u0120offered": 4438, "\u0120produce": 4439, "\u0120Commun": 4440, "\u0120creating": 4441, "\u0120cris": 4442, "spect": 4443, "\u0120ended": 4444, "\u0120daily": 4445, "\u0120voters": 4446, "lands": 4447, "ias": 4448, "ih": 4449, "ona": 4450, "\u0120smart": 4451, "\u0120Office": 4452, "\u0120Lord": 4453, "rial": 4454, "\u0120Internet": 4455, "\u0120circum": 4456, "\u0120extremely": 4457, "'.": 4458, "\u0120opinion": 4459, "\u0120Mil": 4460, "\u0120gain": 4461, "BS": 4462, "\u0120Fin": 4463, "yp": 4464, "\u0120useful": 4465, "\u0120budget": 4466, "\u0120comfort": 4467, "isf": 4468, "\u0120background": 4469, "eline": 4470, "\u0120episode": 4471, "\u0120enemy": 4472, "\u0120trial": 4473, "\u0120establish": 4474, "date": 4475, "\u0120Cap": 4476, "\u0120continues": 4477, "\u0120showing": 4478, "\u0120Union": 4479, "with": 4480, "\u0120posted": 4481, "\u0120System": 4482, "\u0120eat": 4483, "rian": 4484, "\u0120rise": 4485, "\u0120Germany": 4486, "ils": 4487, "\u0120signed": 4488, "\u0120vill": 4489, "\u0120grand": 4490, "mor": 4491, "\u0120England": 4492, "\u0120projects": 4493, "umber": 4494, "\u0120conference": 4495, "za": 4496, "\u0120responsible": 4497, "\u0120Arab": 4498, "\u0120learned": 4499, "\u00e2\u0122\u0136\u00e2\u0122\u0136": 4500, "ipping": 4501, "\u0120George": 4502, "OC": 4503, "\u0120returned": 4504, "\u0120Australia": 4505, "\u0120brief": 4506, "Qu": 4507, "\u0120brand": 4508, "illing": 4509, "abled": 4510, "\u0120highest": 4511, "\u0120train": 4512, "\u0120Commission": 4513, "while": 4514, "\u0120nom": 4515, "ception": 4516, "\u0120mut": 4517, "\u0120Blue": 4518, "\u0120incident": 4519, "vant": 4520, "86": 4521, "\u0120ID": 4522, "\u0120nuclear": 4523, "74": 4524, "\u0120Like": 4525, "\u0120RE": 4526, "\u0120Micro": 4527, "li": 4528, "mail": 4529, "\u0120charges": 4530, "89": 4531, "\u0120adjust": 4532, "ado": 4533, "\u0120earth": 4534, "NA": 4535, "\u0120prices": 4536, "PA": 4537, "\u0120draft": 4538, "\u0120runs": 4539, "\u0120candidate": 4540, "enses": 4541, "\u0120management": 4542, "\u0120Phil": 4543, "\u0120Miss": 4544, "\u0120teach": 4545, "gram": 4546, "\u0120understanding": 4547, "ait": 4548, "icago": 4549, "Add": 4550, "\u0120Ep": 4551, "secut": 4552, "\u0120separate": 4553, "\u0120instance": 4554, "\u0120eth": 4555, "\u0120unless": 4556, "********": 4557, "\u0120Fore": 4558, "inate": 4559, "\u0120operations": 4560, "Sp": 4561, "\u0120faith": 4562, "gar": 4563, "\u0120Church": 4564, "ronic": 4565, "\u0120config": 4566, "osure": 4567, "\u0120activities": 4568, "\u0120traditional": 4569, "\u012036": 4570, "\u0120direction": 4571, "\u0120machine": 4572, "\u0120surround": 4573, "\u0120push": 4574, "unction": 4575, "\u0120EU": 4576, "\u0120easier": 4577, "\u0120argument": 4578, "GB": 4579, "\u0120micro": 4580, "\u0120spending": 4581, "izations": 4582, "\u0120theory": 4583, "adow": 4584, "\u0120calling": 4585, "\u0120Last": 4586, "\u0120der": 4587, "\u0120influence": 4588, "\u0120commit": 4589, "\u0120photo": 4590, "\u0120unc": 4591, "istry": 4592, "gn": 4593, "aste": 4594, "acks": 4595, "\u0120disp": 4596, "ady": 4597, "do": 4598, "\u0120Good": 4599, "\u0120`": 4600, "\u0120wish": 4601, "\u0120revealed": 4602, "\u00c2\u0142\u00c2\u0142": 4603, "lig": 4604, "\u0120enforce": 4605, "\u0120Committee": 4606, "\u0120chem": 4607, "\u0120miles": 4608, "\u0120interested": 4609, "\u0120solution": 4610, "icy": 4611, "inct": 4612, "\u0120->": 4613, "\u0120Det": 4614, "\u0120removed": 4615, "\u0120compar": 4616, "eah": 4617, "\u0120plant": 4618, "\u0120Since": 4619, "\u0120achieve": 4620, "\u0120advantage": 4621, "\u0120slightly": 4622, "bing": 4623, "\u0120placed": 4624, "under": 4625, "2015": 4626, "\u0120Mad": 4627, "\u0120tim": 4628, "oses": 4629, "\u0120cru": 4630, "\u0120Rock": 4631, "\u0120mostly": 4632, "\u0120negative": 4633, "\u0120setting": 4634, "\u0120produced": 4635, "\u0120mur": 4636, "\u0120connection": 4637, "\u0120Mer": 4638, "\u0120driver": 4639, "\u0120executive": 4640, "\u0120assault": 4641, "\u0120born": 4642, "\u0120Ver": 4643, "tained": 4644, "\u0120structure": 4645, "\u0120reduce": 4646, "\u0120decades": 4647, "\u0120ded": 4648, "uke": 4649, "\u0120Many": 4650, "idden": 4651, "\u0120league": 4652, "Se": 4653, "\u0120join": 4654, "\u0120disco": 4655, "\u0120die": 4656, "cks": 4657, "actions": 4658, "\u0120assess": 4659, "agn": 4660, "\u0120goals": 4661, "ours": 4662, "IR": 4663, "\u0120senior": 4664, "iller": 4665, "mod": 4666, "ipment": 4667, "ocol": 4668, "uy": 4669, "\u0120Que": 4670, "\u0120parties": 4671, "irgin": 4672, "\u0120learning": 4673, "itable": 4674, "\u0120street": 4675, "\u0120camera": 4676, "App": 4677, "\u0120skills": 4678, "bre": 4679, "cious": 4680, "\u0120celebr": 4681, "\u0120Franc": 4682, "\u0120existing": 4683, "\u0120willing": 4684, "lor": 4685, "\u0120id": 4686, "\u0120Space": 4687, "\u0120critical": 4688, "\u0120La": 4689, "ortunately": 4690, "\u0120serve": 4691, "\u0120cold": 4692, "\u0120species": 4693, "TS": 4694, "\u0120animals": 4695, "\u0120Bay": 4696, "\u0120older": 4697, "\u0120Under": 4698, "estic": 4699, "\u0120Tre": 4700, "\u0120teacher": 4701, "\u0120prefer": 4702, "vis": 4703, "\u0120thread": 4704, "\u0120Matt": 4705, "\u0120manager": 4706, "\u00e3\u0125\u00bb": 4707, "\u0120professional": 4708, "\u0120Vol": 4709, "\u0120notes": 4710, "These": 4711, "ula": 4712, "\u0120fresh": 4713, "ented": 4714, "uzz": 4715, "edy": 4716, "clusion": 4717, "\u0120Rel": 4718, "\u0120doubt": 4719, "EO": 4720, "\u0120opened": 4721, "\u0120Bit": 4722, "Advertisement": 4723, "\u0120guess": 4724, "\u0120UN": 4725, "\u0120sequ": 4726, "\u0120explain": 4727, "otten": 4728, "\u0120attract": 4729, "aks": 4730, "\u0120string": 4731, "\u0120context": 4732, "ossible": 4733, "\u0120Republicans": 4734, "\u0120solid": 4735, "\u0120cities": 4736, "\u0120asking": 4737, "\u0120random": 4738, "ups": 4739, "uries": 4740, "arant": 4741, "dden": 4742, "gl": 4743, "\u0120Florida": 4744, "\u0120depend": 4745, "\u0120Scott": 4746, "\u012033": 4747, "\u0120iT": 4748, "icon": 4749, "\u0120mentioned": 4750, "\u01202000": 4751, "\u0120claimed": 4752, "\u0120definitely": 4753, "ulf": 4754, "\u0120core": 4755, "\u0120opening": 4756, "\u0120Const": 4757, "which": 4758, "\u0120Tra": 4759, "AG": 4760, "72": 4761, "\u0120believed": 4762, "ada": 4763, "\u012048": 4764, "\u0120Security": 4765, "yright": 4766, "\u0120Pet": 4767, "\u0120Lou": 4768, "\u0120holding": 4769, "================": 4770, "\u0120ice": 4771, "\u0120brow": 4772, "\u0120authorities": 4773, "host": 4774, "word": 4775, "\u0120score": 4776, "\u0120Div": 4777, "\u0120cells": 4778, "\u0120transl": 4779, "\u0120neighbor": 4780, "\u0120remove": 4781, "uct": 4782, "\u0120district": 4783, "\u0120According": 4784, "\u0120worse": 4785, "\u0120concerns": 4786, "\u0120presidential": 4787, "\u0120policies": 4788, "\u0120Hall": 4789, "73": 4790, "\u0120hus": 4791, "AY": 4792, "\u01202006": 4793, "\u0120Jud": 4794, "\u0120independent": 4795, "\u0120Justice": 4796, "iliar": 4797, "print": 4798, "ighter": 4799, "\u0120protection": 4800, "zen": 4801, "\u0120sudden": 4802, "house": 4803, "\u0120Jes": 4804, "PR": 4805, "\u0120Inf": 4806, "\u0120bul": 4807, "\u0120_": 4808, "\u0120Service": 4809, "\u0120PR": 4810, "\u0120strategy": 4811, "ffect": 4812, "\u0120girls": 4813, "\u0120missing": 4814, "oyal": 4815, "\u0120Team": 4816, "ulated": 4817, "\u0120dat": 4818, "\u0120politics": 4819, "abor": 4820, "According": 4821, "\u0120spell": 4822, "\u0120graph": 4823, "orthern": 4824, "TC": 4825, "Ab": 4826, "\u0120labor": 4827, "isher": 4828, "\u0120kick": 4829, "\u0120iTunes": 4830, "\u0120steps": 4831, "poses": 4832, "\u0120smaller": 4833, "En": 4834, "bert": 4835, "\u0120roll": 4836, "\u0120researchers": 4837, "\u0120closed": 4838, "\u0120transport": 4839, "\u0120lawy": 4840, "________________": 4841, "\u0120Chicago": 4842, "\u0120aspect": 4843, "\u0120none": 4844, "\u0120marriage": 4845, "96": 4846, "\u0120elements": 4847, "\u0120Fre": 4848, "\u0120Sal": 4849, "\u0120dram": 4850, "FC": 4851, "top": 4852, "equ": 4853, "\u0120hearing": 4854, "\u0120supported": 4855, "\u0120testing": 4856, "cohol": 4857, "\u0120massive": 4858, "\u0120stick": 4859, "\u0120guard": 4860, "isco": 4861, "phone": 4862, "From": 4863, "However": 4864, "\u0120border": 4865, "\u0120copy": 4866, "ography": 4867, "list": 4868, "71": 4869, "\u0120owner": 4870, "class": 4871, "ruit": 4872, "rate": 4873, "\u0120Once": 4874, "\u0120digital": 4875, "\u0120task": 4876, "ERS": 4877, "\u0120incred": 4878, "tes": 4879, "++": 4880, "\u0120France": 4881, "\u0120breat": 4882, "owl": 4883, "\u0120issued": 4884, "\u0120Western": 4885, "\u0120detect": 4886, "\u0120partners": 4887, "\u0120shared": 4888, "\u0120Call": 4889, "\u0120cancer": 4890, "ache": 4891, "ribe": 4892, "\u0120explained": 4893, "\u0120heat": 4894, "{\"": 4895, "\u0120investment": 4896, "\u0120Book": 4897, "\u0120wood": 4898, "\u0120tools": 4899, "\u0120Although": 4900, "\u0120belief": 4901, "\u0120crisis": 4902, "\u0120ge": 4903, "\u0120MP": 4904, "\u0120operation": 4905, "type": 4906, "~~": 4907, "ga": 4908, "\u0120contains": 4909, "anta": 4910, "\u0120express": 4911, "\u0120Group": 4912, "\u0120Journal": 4913, "ka": 4914, "\u0120amb": 4915, "\u0120USA": 4916, "\u0120finding": 4917, "\u0120funding": 4918, "how": 4919, "\u0120established": 4920, "ideos": 4921, "\u0120degree": 4922, "\u0120dangerous": 4923, "anging": 4924, "\u0120freedom": 4925, "pport": 4926, "outhern": 4927, "\u0120church": 4928, "\u0120catch": 4929, "\u0120Two": 4930, "\u0120presence": 4931, "\u0120Guard": 4932, "Up": 4933, "\u0120authority": 4934, "\u0120Project": 4935, "\u0120button": 4936, "\u0120consequ": 4937, "\u0120valid": 4938, "\u0120weak": 4939, "\u0120starts": 4940, "\u0120reference": 4941, "\u0120Mem": 4942, "\")": 4943, "UN": 4944, "orage": 4945, "\u0120Open": 4946, "\u0120collection": 4947, "ym": 4948, "gency": 4949, "\u0120beautiful": 4950, "ros": 4951, "\u0120tells": 4952, "\u0120waiting": 4953, "nel": 4954, "\u0120providing": 4955, "\u0120Democrats": 4956, "\u0120daughter": 4957, "\u0120master": 4958, "\u0120purposes": 4959, "\u0120Japanese": 4960, "\u0120equal": 4961, "\u0120turns": 4962, "\u0120documents": 4963, "\u0120watching": 4964, "Res": 4965, "\u0120ran": 4966, "2014": 4967, "\u0120reject": 4968, "\u0120Korea": 4969, "\u0120victims": 4970, "Level": 4971, "erences": 4972, "\u0120witness": 4973, "\u012034": 4974, "\u0120reform": 4975, "coming": 4976, "\u0120occup": 4977, "\u0120caught": 4978, "\u0120traffic": 4979, "ading": 4980, "\u0120models": 4981, "ario": 4982, "\u0120served": 4983, "\u0120batter": 4984, "uate": 4985, "\u0120Secretary": 4986, "\u0120agreed": 4987, "\u0120truly": 4988, "ynam": 4989, "\u0120Ret": 4990, "\u0120units": 4991, "\u0120Research": 4992, "hand": 4993, "azine": 4994, "\u0120Mike": 4995, "\u0120variety": 4996, "otal": 4997, "\u0120amazing": 4998, "\u0120confirmed": 4999, "\u0120entirely": 5000, "\u0120purchase": 5001, "\u0120element": 5002, "\u0120cash": 5003, "\u0120determine": 5004, "De": 5005, "\u0120cars": 5006, "\u0120Wall": 5007, "\u00e2\u0138": 5008, "\u0120views": 5009, "\u0120drugs": 5010, "\u0120department": 5011, "\u0120Step": 5012, "uit": 5013, "\u012039": 5014, "asure": 5015, "\u0120Class": 5016, "\u0120covered": 5017, "\u0120Bank": 5018, "\u0120mere": 5019, "uana": 5020, "\u0120multi": 5021, "\u0120mix": 5022, "\u0120unlike": 5023, "levision": 5024, "\u0120stopped": 5025, "\u0120sem": 5026, "\u0120Gal": 5027, "ules": 5028, "\u0120wel": 5029, "\u0120Johnson": 5030, "la": 5031, "\u0120skill": 5032, "\u0120becoming": 5033, "rie": 5034, "\u0120appropriate": 5035, "fe": 5036, "ellow": 5037, "\u0120Prot": 5038, "ulate": 5039, "ocation": 5040, "\u0120weekend": 5041, "odies": 5042, "\u0120sites": 5043, "\u0120animal": 5044, "\u0120Tim": 5045, "\u0120scale": 5046, "\u0120charged": 5047, "\u0120instruct": 5048, "illa": 5049, "\u0120methods": 5050, "\u0120cert": 5051, "\u0120judge": 5052, "\u0120Hel": 5053, "\u0120dollars": 5054, "\u0120standing": 5055, "\u0120Squ": 5056, "\u0120debt": 5057, "liam": 5058, "\u0120driving": 5059, "\u0120Sum": 5060, "\u0120Edition": 5061, "\u0120album": 5062, "andon": 5063, "IF": 5064, "\u0120Uk": 5065, "63": 5066, "ader": 5067, "\u0120commercial": 5068, "esh": 5069, "\u0120Government": 5070, "\u0120discovered": 5071, "\u0120output": 5072, "\u0120Hillary": 5073, "\u0120Carol": 5074, "\u01202005": 5075, "\u0120abuse": 5076, "ancing": 5077, "\u0120switch": 5078, "\u0120annual": 5079, "Tw": 5080, "\u0120stated": 5081, "agement": 5082, "inner": 5083, "\u0120democr": 5084, "\u0120residents": 5085, "\u0120allowing": 5086, "\u0120factors": 5087, "odd": 5088, "\u0120fuck": 5089, "emies": 5090, "\u0120occurred": 5091, "oti": 5092, "\u0120north": 5093, "\u0120Public": 5094, "\u0120injury": 5095, "\u0120insurance": 5096, "CL": 5097, "olly": 5098, "\u00e3\u0122": 5099, "\u0120repeated": 5100, "\u0120arms": 5101, "anged": 5102, "\u0120construction": 5103, "\u0120fle": 5104, "PU": 5105, "icians": 5106, "\u0120forms": 5107, "\u0120McC": 5108, "antic": 5109, "\u0120mental": 5110, "pire": 5111, "\u0120equipment": 5112, "\u0120fant": 5113, "\u0120discussion": 5114, "\u0120regarding": 5115, "kin": 5116, "arp": 5117, "\u0120chair": 5118, "ogue": 5119, "\u0120proceed": 5120, "\u0120Id": 5121, "Our": 5122, "\u0120murder": 5123, "Man": 5124, "\u012049": 5125, "asp": 5126, "\u0120supply": 5127, "\u0120input": 5128, "\u0120wealth": 5129, "liament": 5130, "\u0120proced": 5131, "orial": 5132, "\u0120Stat": 5133, "\u0120NFL": 5134, "hens": 5135, "\u0120Institute": 5136, "\u0120putting": 5137, "ournament": 5138, "etic": 5139, "\u0120located": 5140, "\u0120kid": 5141, "eria": 5142, "run": 5143, "\u0120princ": 5144, "\u0120!": 5145, "going": 5146, "\u0120Bet": 5147, "\u0120clot": 5148, "\u0120telling": 5149, "\u0120proposed": 5150, "iot": 5151, "orry": 5152, "\u0120funds": 5153, "gment": 5154, "\u0120Life": 5155, "\u0120baby": 5156, "\u0120Back": 5157, "\u0120spoke": 5158, "Image": 5159, "\u0120earn": 5160, "\u0120AT": 5161, "gu": 5162, "\u0120exchange": 5163, "\u0120Lin": 5164, "oving": 5165, "\u0120pair": 5166, "More": 5167, "azon": 5168, "\u0120arrested": 5169, "\u0120killing": 5170, "can": 5171, "\u0120Card": 5172, "yd": 5173, "\u0120identified": 5174, "\u0120mobile": 5175, "\u0120thanks": 5176, "onym": 5177, "\u0120Form": 5178, "\u0120hundreds": 5179, "\u0120Chris": 5180, "\u0120Cat": 5181, "\u0120trend": 5182, "hat": 5183, "\u0120Av": 5184, "oman": 5185, "\u0120electric": 5186, "\u0120Wil": 5187, "SE": 5188, "Of": 5189, "\u0120restaur": 5190, "oted": 5191, "\u0120trig": 5192, "\u0120nine": 5193, "\u0120bomb": 5194, "Why": 5195, "\u00c2\u00af": 5196, "\u0120coverage": 5197, "\u0120appeal": 5198, "\u0120Robert": 5199, "\u0120Sup": 5200, "\u0120finished": 5201, "\u0120flow": 5202, "\u0120deliver": 5203, "\u0120calcul": 5204, "\u0120photos": 5205, "\u0120phil": 5206, "\u0120pieces": 5207, "\u0120appre": 5208, "kes": 5209, "\u0120rough": 5210, "Do": 5211, "\u0120partner": 5212, "\u0120concerned": 5213, "\u012037": 5214, "\u0120Gen": 5215, "Col": 5216, "ctors": 5217, "\u0120=>": 5218, "state": 5219, "\u0120suggested": 5220, "\u0120Force": 5221, "CE": 5222, "\u0120herself": 5223, "\u0120Plan": 5224, "works": 5225, "ooth": 5226, "rency": 5227, "\u0120corner": 5228, "\u0120husband": 5229, "\u0120internet": 5230, "\u0120Aut": 5231, "ems": 5232, "osen": 5233, "\u0120Atl": 5234, "gen": 5235, "\u0120balance": 5236, "62": 5237, "\u0120sounds": 5238, "text": 5239, "\u0120arr": 5240, "oves": 5241, "\u0120millions": 5242, "\u0120radio": 5243, "\u0120satisf": 5244, "\u0120Dam": 5245, "Mr": 5246, "Go": 5247, "Spe": 5248, "\u0120combat": 5249, "rant": 5250, "\u0120Gree": 5251, "\u0120fuel": 5252, "\u0120distance": 5253, "\u0120tests": 5254, "\u0120decre": 5255, "\u0120Er": 5256, "\u0120managed": 5257, "DS": 5258, "\u0120tit": 5259, "\u0120measures": 5260, "\u0120Liber": 5261, "\u0120attend": 5262, "ashed": 5263, "\u0120Jose": 5264, "\u0120Night": 5265, "dit": 5266, "\u0120Nov": 5267, "\u0120End": 5268, "outs": 5269, "\u0120generation": 5270, "\u0120advoc": 5271, "yth": 5272, "\u0120conversation": 5273, "\u0120Sky": 5274, "active": 5275, "cel": 5276, "rier": 5277, "\u0120Frank": 5278, "\u0120gender": 5279, "\u0120concent": 5280, "\u0120carried": 5281, "anda": 5282, "\u0120Virgin": 5283, "\u0120arrived": 5284, "icide": 5285, "aded": 5286, "\u0120failure": 5287, "\u0120minimum": 5288, "lets": 5289, "\u0120worst": 5290, "\u0120keeping": 5291, "\u0120intended": 5292, "\u0120illegal": 5293, "\u0120subsc": 5294, "\u0120determined": 5295, "\u0120trip": 5296, "Yes": 5297, "\u0120raise": 5298, "\u0120~": 5299, "\u0120feels": 5300, "\u0120package": 5301, "\u0120Jo": 5302, "hi": 5303, "2016": 5304, "real": 5305, "\u0120fra": 5306, "\u0120symb": 5307, "Me": 5308, "ucky": 5309, "pret": 5310, "\u0120Kh": 5311, "\u0120Edit": 5312, "\u0120Web": 5313, "emic": 5314, "\u0120Color": 5315, "\u0120justice": 5316, "Int": 5317, "\u0120farm": 5318, "cknow": 5319, "\">": 5320, "eless": 5321, "\u0120reduced": 5322, "\u0120500": 5323, "xx": 5324, "\u0120Rad": 5325, "\u0120Wood": 5326, "\u0120clin": 5327, "\u0120hyp": 5328, "iler": 5329, "ura": 5330, "kins": 5331, "85": 5332, "61": 5333, "\u0120Their": 5334, "\u0120Mary": 5335, "\u0120san": 5336, "\u0120novel": 5337, "\u0120Who": 5338, "\u0120capacity": 5339, "\u0120impossible": 5340, "\u0120plays": 5341, "\u0120minister": 5342, "ijuana": 5343, "icate": 5344, "\u0120Set": 5345, "\u0120fram": 5346, "\u0120ing": 5347, "\u0120communities": 5348, "\u0120FBI": 5349, "ita": 5350, "\u0120bon": 5351, "\u0120strateg": 5352, "\u0120interests": 5353, "lock": 5354, "gers": 5355, "mas": 5356, "\u0120AND": 5357, "\u0120conflict": 5358, "\u0120requirements": 5359, "\u0120sac": 5360, "\u0120operating": 5361, "ini": 5362, "related": 5363, "\u0120committed": 5364, "\u0120relatively": 5365, "\u0120south": 5366, "\u00c2\u00af\u00c2\u00af": 5367, "\u0120afford": 5368, "\u0120identity": 5369, "\u0120decisions": 5370, "\u0120accused": 5371, "place": 5372, "\u0120victory": 5373, "och": 5374, "iat": 5375, "Name": 5376, "Com": 5377, "tion": 5378, "eds": 5379, "\u0120seek": 5380, "\u0120tight": 5381, "\u0120Images": 5382, "\u0120initi": 5383, "\u0120humans": 5384, "\u0120familiar": 5385, "\u0120audience": 5386, "\u0120internal": 5387, "venture": 5388, "\u0120sides": 5389, "\u0120TO": 5390, "\u0120dim": 5391, "\u0120conclud": 5392, "\u0120appoint": 5393, "\u0120enforcement": 5394, "\u0120Jim": 5395, "\u0120Association": 5396, "\u0120circumst": 5397, "\u0120Canadian": 5398, "\u0120joined": 5399, "\u0120differences": 5400, "\u0120Los": 5401, "\u0120protest": 5402, "\u0120twice": 5403, "win": 5404, "\u0120glass": 5405, "arsh": 5406, "\u0120Army": 5407, "\u0120expression": 5408, "\u0120decide": 5409, "\u0120planning": 5410, "ania": 5411, "\u0120handle": 5412, "\u0120Microsoft": 5413, "\u0120Nor": 5414, "\u0120maximum": 5415, "\u0120Rev": 5416, "\u0120sea": 5417, "\u0120eval": 5418, "\u0120helps": 5419, "ref": 5420, "\u0120bound": 5421, "\u0120mouth": 5422, "\u0120standards": 5423, "\u0120clim": 5424, "\u0120Camp": 5425, "\u0120Fox": 5426, "cles": 5427, "\u0120army": 5428, "\u0120Techn": 5429, "acking": 5430, "xy": 5431, "SS": 5432, "\u012042": 5433, "\u0120bug": 5434, "\u0120Ukrain": 5435, "\u0120Max": 5436, "\u0120Jones": 5437, "\u0120Show": 5438, "lo": 5439, "\u0120planet": 5440, "\u012075": 5441, "\u0120winning": 5442, "\u0120faster": 5443, "\u0120spect": 5444, "\u0120broken": 5445, "TR": 5446, "\u0120defined": 5447, "\u0120healthy": 5448, "\u0120competition": 5449, "https": 5450, "\u0120Island": 5451, "\u0120Fe": 5452, "\u0120announce": 5453, "\u0120Cup": 5454, "\u0120Instead": 5455, "\u0120client": 5456, "\u0120possibly": 5457, "section": 5458, "ocket": 5459, "look": 5460, "\u0120finish": 5461, "\u0120crew": 5462, "\u0120reserv": 5463, "\u0120editor": 5464, "\u0120hate": 5465, "\u0120sale": 5466, "\u0120controvers": 5467, "\u0120pages": 5468, "wing": 5469, "\u0120numer": 5470, "\u0120opposition": 5471, "\u01202004": 5472, "\u0120refuge": 5473, "\u0120flight": 5474, "\u0120apart": 5475, "\u0120Lat": 5476, "Americ": 5477, "\u0120Africa": 5478, "\u0120applications": 5479, "\u0120Palest": 5480, "\u0120Bur": 5481, "\u0120gar": 5482, "\u0120Social": 5483, "\u0120upgr": 5484, "\u0120shape": 5485, "\u0120speaking": 5486, "ansion": 5487, "ao": 5488, "\u0120Sn": 5489, "\u0120worry": 5490, "\u0120Britain": 5491, "Please": 5492, "roud": 5493, "\u0120hun": 5494, "\u0120introduced": 5495, "\u0120diet": 5496, "Ind": 5497, "\u0120Second": 5498, "\u0120functions": 5499, "uts": 5500, "\u0120Each": 5501, "\u0120Jeff": 5502, "\u0120stress": 5503, "\u0120accounts": 5504, "\u0120guarant": 5505, "\u0120Ann": 5506, "edia": 5507, "\u0120honest": 5508, "\u0120tree": 5509, "\u0120African": 5510, "\u0120Bush": 5511, "},": 5512, "\u0120sch": 5513, "\u0120Only": 5514, "\u0120fif": 5515, "igan": 5516, "\u0120exercise": 5517, "\u0120Exp": 5518, "\u0120scientists": 5519, "\u0120legislation": 5520, "\u0120Work": 5521, "\u0120Spr": 5522, "\u00c3\u0124": 5523, "\u0120Human": 5524, "\u0120\u00e8": 5525, "\u0120survey": 5526, "\u0120rich": 5527, "rip": 5528, "\u0120maintain": 5529, "\u0120flo": 5530, "\u0120leadership": 5531, "stream": 5532, "\u0120Islamic": 5533, "\u012001": 5534, "\u0120College": 5535, "\u0120magic": 5536, "\u0120Prime": 5537, "\u0120figures": 5538, "2017": 5539, "inder": 5540, "xual": 5541, "\u0120Dead": 5542, "\u0120absolutely": 5543, "\u0120fourth": 5544, "\u0120presented": 5545, "respond": 5546, "rible": 5547, "\u0120alcohol": 5548, "ato": 5549, "\u0120DE": 5550, "porary": 5551, "\u0120grab": 5552, "\u0120vari": 5553, "\u0120quant": 5554, "\u0120Photo": 5555, "\u0120plus": 5556, "rick": 5557, "arks": 5558, "\u0120alternative": 5559, "\u0120pil": 5560, "\u0120approx": 5561, "that": 5562, "\u0120objects": 5563, "\u0120Ro": 5564, "\u0120Android": 5565, "\u0120significantly": 5566, "\u0120Road": 5567, "kay": 5568, "Read": 5569, "avor": 5570, "\u0120acknow": 5571, "\u0120HD": 5572, "\u0120Sing": 5573, "Or": 5574, "\u0120Mont": 5575, "\u0120uns": 5576, "prof": 5577, "\u0120negoti": 5578, "\u0120Arch": 5579, "iki": 5580, "\u0120television": 5581, "\u0120Jewish": 5582, "\u0120committee": 5583, "\u0120motor": 5584, "\u0120appearance": 5585, "\u0120sitting": 5586, "\u0120strike": 5587, "\u0120Down": 5588, "comp": 5589, "\u0120Hist": 5590, "\u0120fold": 5591, "acement": 5592, "\u0120Louis": 5593, "\u0120belong": 5594, "\u0120\u00e2\u0122\u00a2": 5595, "\u0120mort": 5596, "\u0120prepared": 5597, "\u012064": 5598, "\u0120Master": 5599, "\u0120indeed": 5600, "\u0120Den": 5601, "\u0120rent": 5602, "TA": 5603, "ourney": 5604, "arc": 5605, "Su": 5606, "97": 5607, "\u0120advice": 5608, "\u0120changing": 5609, "\u0120listed": 5610, "\u0120launched": 5611, "isation": 5612, "\u0120Peter": 5613, "ishes": 5614, "\u0120lived": 5615, "\u0120Mel": 5616, "\u0120Supreme": 5617, "\u0120Federal": 5618, "\u0120);": 5619, "ructure": 5620, "\u0120sets": 5621, "\u0120philos": 5622, "uous": 5623, "\u0120\u00c2\u0142": 5624, "\u0120applied": 5625, "\u0120NOT": 5626, "\u0120housing": 5627, "\u0120Mount": 5628, "\u0120odd": 5629, "\u0120sust": 5630, "DA": 5631, "fficient": 5632, "\u0120?": 5633, "olved": 5634, "\u0120powers": 5635, "\u0120thr": 5636, "\u0120remaining": 5637, "\u0120Water": 5638, "LC": 5639, "\u0120causes": 5640, "\u00e3\u0123\u00ae": 5641, "\u0120manner": 5642, "ads": 5643, "\u0120suggests": 5644, "\u0120ends": 5645, "standing": 5646, "fig": 5647, "\u0120Dun": 5648, "idth": 5649, "\u0120gay": 5650, "\u0120termin": 5651, "\u0120Angeles": 5652, "MS": 5653, "\u0120scientific": 5654, "\u0120coal": 5655, "apers": 5656, "bar": 5657, "\u0120Thomas": 5658, "\u0120sym": 5659, "\u0120Run": 5660, "this": 5661, "PC": 5662, "igrants": 5663, "\u0120minute": 5664, "\u0120District": 5665, "cellent": 5666, "\u0120leaves": 5667, "\u0120completed": 5668, "amin": 5669, "\u0120focused": 5670, "\u0120monitor": 5671, "\u0120vehicles": 5672, "MA": 5673, "\u0120Mass": 5674, "\u0120Grand": 5675, "\u0120affected": 5676, "itutional": 5677, "\u0120construct": 5678, "\u0120follows": 5679, "\u0120ton": 5680, "reens": 5681, "\u0120homes": 5682, "\u0120Ext": 5683, "\u0120Level": 5684, "rast": 5685, "\u0120Ir": 5686, "\u0120elim": 5687, "\u0120largely": 5688, "\u0120Joe": 5689, "\u0120votes": 5690, "alls": 5691, "\u0120businesses": 5692, "\u0120Foundation": 5693, "\u0120Central": 5694, "\u0120yards": 5695, "\u0120materials": 5696, "ulner": 5697, "\u0120guide": 5698, "\u0120closer": 5699, "ums": 5700, "\u0120sports": 5701, "eder": 5702, "Just": 5703, "\u0120taxes": 5704, "84": 5705, "\u0120Old": 5706, "\u0120decade": 5707, "ola": 5708, "\u0120vir": 5709, "\u0120dropped": 5710, "\u0120delay": 5711, "itect": 5712, "\u0120secure": 5713, "stein": 5714, "level": 5715, "\u0120treated": 5716, "\u0120filed": 5717, "aine": 5718, "\u0120van": 5719, "\u0120mir": 5720, "\u0120column": 5721, "icted": 5722, "eper": 5723, "\u0120rot": 5724, "\u0120consult": 5725, "\u0120entry": 5726, "\u0120marijuana": 5727, "\u0120Dou": 5728, "\u0120apparently": 5729, "oking": 5730, "clusive": 5731, "\u0120increases": 5732, "ano": 5733, "\u0120specifically": 5734, "\u0120tele": 5735, "ensions": 5736, "\u0120religion": 5737, "abilities": 5738, "\u0120frame": 5739, "\u0120Note": 5740, "\u0120Lee": 5741, "\u0120helping": 5742, "\u0120edge": 5743, "oston": 5744, "\u0120organizations": 5745, "\u00c3\u0125": 5746, "\u0120Both": 5747, "hips": 5748, "\u0120bigger": 5749, "\u0120boost": 5750, "\u0120Stand": 5751, "\u0120row": 5752, "uls": 5753, "abase": 5754, "\u0120rid": 5755, "Let": 5756, "aren": 5757, "rave": 5758, "\u0120stret": 5759, "PD": 5760, "\u0120vision": 5761, "\u0120wearing": 5762, "\u0120appreci": 5763, "\u0120award": 5764, "\u0120Use": 5765, "\u0120factor": 5766, "war": 5767, "ulations": 5768, ")(": 5769, "\u0120god": 5770, "\u0120territ": 5771, "\u0120param": 5772, "asts": 5773, "87": 5774, "\u0120enemies": 5775, "\u0120Games": 5776, "FF": 5777, "\u0120accident": 5778, "Well": 5779, "\u0120Martin": 5780, "TER": 5781, "\u0120ath": 5782, "\u0120Hell": 5783, "\u0120forg": 5784, "\u0120veter": 5785, "\u0120Medic": 5786, "free": 5787, "\u0120stars": 5788, "\u0120expensive": 5789, "\u0120acad": 5790, "rawn": 5791, "\u0120Whe": 5792, "\u0120lock": 5793, "\u0120format": 5794, "\u0120soldiers": 5795, "sm": 5796, "\u0120agent": 5797, "\u0120responsibility": 5798, "ora": 5799, "\u0120Science": 5800, "\u0120rapid": 5801, "\u0120tough": 5802, "\u0120Jesus": 5803, "\u0120believes": 5804, "ML": 5805, "\u0120wear": 5806, "lete": 5807, "\u00c3\u0125\u00c3\u0124": 5808, "\u0120Dri": 5809, "\u0120commission": 5810, "\u0120Bob": 5811, "Oh": 5812, "aped": 5813, "\u0120warm": 5814, "\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124": 5815, "\u01202003": 5816, "ortion": 5817, "\u0120hasn": 5818, "uster": 5819, "\u0120univers": 5820, "\u0120Ill": 5821, "\u0120king": 5822, "ologies": 5823, "94": 5824, "\u0120Tem": 5825, "\u0120Mos": 5826, "\u0120patient": 5827, "\u0120Mexico": 5828, "cean": 5829, "\u0120Death": 5830, "\u0120Sanders": 5831, "you": 5832, "\u0120Cast": 5833, "\u0120Company": 5834, "pty": 5835, "\u0120happening": 5836, "FP": 5837, "\u0120Battle": 5838, "\u0120bought": 5839, "Am": 5840, "Mod": 5841, "Us": 5842, "uters": 5843, "\u0120Cre": 5844, "\u0120Those": 5845, "\u012044": 5846, "iser": 5847, "\u0120soul": 5848, "\u0120Top": 5849, "\u0120Harry": 5850, "\u0120Aw": 5851, "\u0120seat": 5852, "ffee": 5853, "\u0120revolution": 5854, "\u0120(\"": 5855, "\u0120During": 5856, "ette": 5857, "\u0120ring": 5858, "\u0120offensive": 5859, "\u0120returns": 5860, "\u0120videos": 5861, "\u0120discl": 5862, "\u0120famous": 5863, "enced": 5864, "\u0120Sign": 5865, "\u0120River": 5866, "\u0120300": 5867, "PM": 5868, "\u0120Bus": 5869, "\u0120CH": 5870, "\u0120candidates": 5871, "arden": 5872, "\u0120percentage": 5873, "\u0120visual": 5874, "\u0120thank": 5875, "\u0120trouble": 5876, "nergy": 5877, "\u01202001": 5878, "\u0120prove": 5879, "ashion": 5880, "\u0120enh": 5881, "\u0120Long": 5882, "UM": 5883, "\u0120connected": 5884, "\u0120possibility": 5885, "Over": 5886, "\u0120expert": 5887, "\u0120library": 5888, "arts": 5889, "\u0120Director": 5890, "\u0120fellow": 5891, "92": 5892, "irty": 5893, "\u0120dry": 5894, "\u0120signs": 5895, "\u0120Love": 5896, "\u0120quiet": 5897, "foot": 5898, "\u0120pure": 5899, "\u0120Hun": 5900, "\u0120filled": 5901, "phas": 5902, "\u0120Elect": 5903, "endment": 5904, "\u0120Expl": 5905, "\u0120unable": 5906, "ns": 5907, "mo": 5908, "\u0120vast": 5909, "obe": 5910, "\u0120identify": 5911, "apping": 5912, "\u0120Carolina": 5913, "gress": 5914, "\u0120prote": 5915, "\u0120fish": 5916, "\u0120circumstances": 5917, "razy": 5918, "\u0120Phot": 5919, "\u0120bodies": 5920, "\u0120Mur": 5921, "\u0120developing": 5922, "\u0120AR": 5923, "\u0120experienced": 5924, "\u0120substant": 5925, "\u0120Board": 5926, "esome": 5927, "\u0120domestic": 5928, "\u0120combined": 5929, "\u0120Put": 5930, "\u0120chemical": 5931, "\u0120Child": 5932, "\u0120pool": 5933, "\u0120Cy": 5934, "\u0120egg": 5935, "cons": 5936, "sters": 5937, "\u0120hurt": 5938, "\u0120markets": 5939, "\u0120conservative": 5940, "\u0120supporters": 5941, "\u0120agencies": 5942, "idel": 5943, "Ob": 5944, "urb": 5945, "\u012043": 5946, "\u0120Defense": 5947, "ye": 5948, "\u0120Ap": 5949, "dule": 5950, "\u0120temperature": 5951, "\u0120conducted": 5952, "\u0120Chief": 5953, "\u0120pulled": 5954, "\u0120fol": 5955, "Last": 5956, "onto": 5957, "osis": 5958, "VER": 5959, "Des": 5960, "\u0120Pan": 5961, "First": 5962, "\u0120advance": 5963, "\u0120license": 5964, "rors": 5965, "\u0120Jon": 5966, "\u0120imagine": 5967, "\u0120hell": 5968, "\u0120fixed": 5969, "\u0120incor": 5970, "osite": 5971, "\u0120Log": 5972, "icken": 5973, "]:": 5974, "\u0120surprise": 5975, "hab": 5976, "\u0120craft": 5977, "olt": 5978, "\u0120Jul": 5979, "\u0120dial": 5980, "\u0120relevant": 5981, "\u0120entered": 5982, "\u0120leads": 5983, "\u0120AD": 5984, "\u0120Clean": 5985, "\u0120pictures": 5986, "essor": 5987, "\u0120alt": 5988, "\u0120paying": 5989, "Per": 5990, "\u0120Market": 5991, "\u0120updates": 5992, "amily": 5993, "\u0120Type": 5994, "\u0120Home": 5995, "\u012055": 5996, "sembly": 5997, "rome": 5998, "83": 5999, "\u0120greatest": 6000, "\u0120height": 6001, "\u0120heav": 6002, "aints": 6003, "\u0120listen": 6004, "aser": 6005, "\u0120SH": 6006, "\u0120capable": 6007, "acle": 6008, "\u0120perspect": 6009, "inating": 6010, "\u0120offering": 6011, "rypt": 6012, "\u0120Develop": 6013, "abin": 6014, "rc": 6015, "\u0120bright": 6016, "alty": 6017, "arrow": 6018, "\u0120suppl": 6019, "inding": 6020, "acked": 6021, "gypt": 6022, "\u0120Another": 6023, "pg": 6024, "\u0120Virginia": 6025, "\u0120Lu": 6026, "\u0120planned": 6027, "\u0120pit": 6028, "\u0120sweet": 6029, "Type": 6030, "\u0120Di": 6031, "\u0120typically": 6032, "\u0120Francisco": 6033, "\u0120prospect": 6034, "\u0120Dan": 6035, "\u0120teen": 6036, "rees": 6037, "\u0120sched": 6038, "\u0120hol": 6039, "\u0120scr": 6040, "\u0120lots": 6041, "life": 6042, "\u0120newsp": 6043, "\u0120forget": 6044, "\u0120None": 6045, "\u0120Middle": 6046, "\u0120Ryan": 6047, "edd": 6048, "\u0120severe": 6049, "\u0120suit": 6050, "ller": 6051, "93": 6052, "\u0120correspond": 6053, "\u0120explos": 6054, "uations": 6055, "\u0120flag": 6056, "game": 6057, "rid": 6058, "\u0120prin": 6059, "\u0120Data": 6060, "\u0120deploy": 6061, "\u0120Enter": 6062, "suit": 6063, "ghan": 6064, "\u0120Men": 6065, "\u0120thoughts": 6066, "\u0120matters": 6067, "\u0120adapt": 6068, "\u0120Ari": 6069, "\u0120fill": 6070, "\u0120forth": 6071, "\u0120sam": 6072, "\u012041": 6073, "\u0120payment": 6074, "\u0120Hor": 6075, "\u0120spring": 6076, "duc": 6077, "\u0120losing": 6078, "\u0120bringing": 6079, "FO": 6080, "ala": 6081, "\u0120distribution": 6082, "hered": 6083, "bour": 6084, "\u0120Israeli": 6085, "oma": 6086, "\u0120combination": 6087, "\u0120plenty": 6088, "VE": 6089, "Can": 6090, "\u0120Haw": 6091, "\u0120perman": 6092, "\u0120Special": 6093, "\u0120tow": 6094, "\u0120seeking": 6095, "\u0120examples": 6096, "\u0120classes": 6097, "cr": 6098, "\u0120beer": 6099, "\u0120moves": 6100, "\u0120IP": 6101, "\u0120Kn": 6102, "\u0120panel": 6103, "Even": 6104, "\u0120properly": 6105, "\u0120ris": 6106, "\u0120plug": 6107, "\u0120estimated": 6108, "Every": 6109, "\u0120defensive": 6110, "agraph": 6111, "\u0120pregn": 6112, "\u0120instit": 6113, "\u0120Vict": 6114, "\u0120volume": 6115, "\u0120positions": 6116, "\u0120links": 6117, "\u0120Program": 6118, "\u0120Week": 6119, "agues": 6120, "\u0120transform": 6121, "ker": 6122, "\u0120CEO": 6123, "\u0120cas": 6124, "\u0120opponent": 6125, "\u0120tweet": 6126, "\u0120Code": 6127, "\u0120shop": 6128, "\u0120fly": 6129, "\u0120talks": 6130, "\u0120bag": 6131, "Phone": 6132, "\u0120aid": 6133, "\u0120plants": 6134, "\u012065": 6135, "\u0120attorney": 6136, "arters": 6137, "quest": 6138, "\u0120Magic": 6139, "\u0120begins": 6140, "\u0120myster": 6141, "\u0120environmental": 6142, "\u0120storage": 6143, "NN": 6144, "\u0120marg": 6145, "\u0120ske": 6146, "\u0120metal": 6147, "elly": 6148, "\u0120ordered": 6149, "\u0120remained": 6150, "\u0120loved": 6151, "\u0120prompt": 6152, "\u0120updated": 6153, "\u0120experts": 6154, "\u0120walking": 6155, "\u0120ancient": 6156, "\u0120performed": 6157, "ATE": 6158, "\u0120neither": 6159, "iency": 6160, "\u0120manufacture": 6161, "\u0120Pak": 6162, "\u0120selected": 6163, "\u0120mine": 6164, "\u0120ultimately": 6165, "\u0120explan": 6166, "\u0120label": 6167, "\u0120Services": 6168, "ributed": 6169, "Trump": 6170, "\u0120syn": 6171, "\u0120Ult": 6172, "SC": 6173, "\u0120meat": 6174, "\u0120giant": 6175, "\u0120Wars": 6176, "\u0120ON": 6177, "\u0120adm": 6178, "\u0120interpret": 6179, "\u0120evening": 6180, "\u0120evil": 6181, "\u0120Boston": 6182, "\u0120Wild": 6183, "\u0120\u00c3": 6184, "\u0120Bitcoin": 6185, "\u0120Amazon": 6186, "Dr": 6187, "\u0120Information": 6188, "\u0120obviously": 6189, "\u0120advanced": 6190, "Photo": 6191, "olar": 6192, "\u0120weather": 6193, "\u0120symbol": 6194, "\u0120sole": 6195, "\u0120potentially": 6196, "oster": 6197, "\u0120originally": 6198, "mun": 6199, "300": 6200, "aze": 6201, "essions": 6202, "\u0120deck": 6203, "\u0120stood": 6204, "\u0120youth": 6205, "\u0120Bern": 6206, "Rep": 6207, "\u0120Test": 6208, "\u0120basically": 6209, "otic": 6210, "\u0120involve": 6211, "olit": 6212, "lyn": 6213, "See": 6214, "\u0120aircraft": 6215, "\u0120confirm": 6216, "EW": 6217, "\u0120messages": 6218, "\u0120Richard": 6219, "\u0120kit": 6220, "\u0120prohib": 6221, "\u0120vulner": 6222, "isters": 6223, "\u0120existence": 6224, "\u0120turning": 6225, "\u0120SP": 6226, "\u0120desire": 6227, "\u0120flat": 6228, "\u0120ment": 6229, "season": 6230, "anges": 6231, "\u0120neighborhood": 6232, "\u0120Lake": 6233, "ATION": 6234, "\u0120pointed": 6235, "bur": 6236, "\u0120innov": 6237, "ucks": 6238, "UL": 6239, "\u0120professor": 6240, "\u0120expressed": 6241, "AB": 6242, "icious": 6243, "\u01202002": 6244, "\u0120Dev": 6245, "\u0120session": 6246, "\u0120bare": 6247, "sen": 6248, "\u0120diss": 6249, "\u0120Cath": 6250, "\u0120Pass": 6251, "\u0120Point": 6252, "\u0120doctor": 6253, "orrow": 6254, "ailed": 6255, "\u0120Rub": 6256, "\u0120DC": 6257, "\u0120Charl": 6258, "person": 6259, "\u0120writer": 6260, "ighters": 6261, "ureau": 6262, "\u0120oblig": 6263, "\u0120recorded": 6264, "\u0120broke": 6265, "\u0120orders": 6266, "ilty": 6267, "\u0120motion": 6268, "inity": 6269, "law": 6270, "adium": 6271, "\u0120immigration": 6272, "\u0120contrast": 6273, "\u0120batt": 6274, "\u0120excellent": 6275, "\u0120technical": 6276, "ami": 6277, "\u0120tun": 6278, "\u0120cloud": 6279, "\u0120Year": 6280, "geon": 6281, "\u0120creation": 6282, "\u0120strange": 6283, "\u0120auth": 6284, "\u0120fort": 6285, "born": 6286, "\u0120extent": 6287, "\u0120Today": 6288, "\u0120Club": 6289, "\u0120rain": 6290, "\u0120sample": 6291, "\u0120accepted": 6292, "\u0120tact": 6293, "\u0120fired": 6294, "\u0120Son": 6295, "\u0120stands": 6296, "\u0120boot": 6297, "\u012047": 6298, "\u0120statements": 6299, "\u0120versions": 6300, "\u0120selling": 6301, "ounded": 6302, "\u01201990": 6303, "\u0120weren": 6304, "\u0120Watch": 6305, "\u0120experiment": 6306, "Post": 6307, "\u0120retail": 6308, "uled": 6309, "Inst": 6310, "unte": 6311, "\u00e3\u0125\u00bc": 6312, "\u0120depart": 6313, "\u0120bond": 6314, "ivery": 6315, "ompl": 6316, "\u0120reaction": 6317, "\u0120Syrian": 6318, "\u0120Pac": 6319, "apped": 6320, "aniel": 6321, "DP": 6322, "\u0120resolution": 6323, "\u0120react": 6324, "\u0120approved": 6325, "onom": 6326, "mond": 6327, "\u0120Offic": 6328, "---": 6329, "\u0120replace": 6330, "\u0120tack": 6331, "\u0120sport": 6332, "\u0120chain": 6333, "\u0120emergency": 6334, "rad": 6335, "\u0120Palestin": 6336, "\u012046": 6337, "\u0120automatically": 6338, "\u0120route": 6339, "\u0120pal": 6340, "\u0120banks": 6341, "\u0120Paris": 6342, "\u0120Media": 6343, "road": 6344, "icing": 6345, "ixt": 6346, "isted": 6347, "\u0120grew": 6348, "\u0120coord": 6349, "\u0120Where": 6350, "omin": 6351, "\u0120subs": 6352, "\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd": 6353, "\u0120\u00c2\u00b1": 6354, "\u0120corporate": 6355, "\u0120selection": 6356, "noon": 6357, "\u0120Report": 6358, "cs": 6359, "cluding": 6360, "orders": 6361, "anche": 6362, "\u0120Its": 6363, "\u0120slowly": 6364, "\u0120Egypt": 6365, "\u0120Acc": 6366, "\u0120colle": 6367, "iques": 6368, "EX": 6369, "\u0120attempts": 6370, "url": 6371, "\u0120Cross": 6372, "\u0120findings": 6373, "\u0120SC": 6374, "\u0120OR": 6375, "\u0120index": 6376, "ensity": 6377, "\u0120Way": 6378, "\u0120Land": 6379, "\u0120shock": 6380, "dis": 6381, "\u0120dynam": 6382, "\u0120cart": 6383, "mosp": 6384, "Since": 6385, "iest": 6386, "\u0120Boy": 6387, "\u0120storm": 6388, "\u0120Contin": 6389, "2013": 6390, "hew": 6391, "ilit": 6392, "\u0120essential": 6393, "iquid": 6394, "Other": 6395, "ivered": 6396, "\u0120reasonable": 6397, "Act": 6398, "\u0120subsequ": 6399, "\u0120Pack": 6400, "\u0120Fort": 6401, "\u0120considering": 6402, "\u0120university": 6403, "log": 6404, "\u0120married": 6405, "\u0120illust": 6406, "\u0120True": 6407, "\u00a3\u0131": 6408, "\u0120numerous": 6409, "rastructure": 6410, "\u0120seriously": 6411, "\u0120referred": 6412, "ua": 6413, "\u0120consistent": 6414, "onna": 6415, "\u0120Real": 6416, "ruption": 6417, "ciples": 6418, "\u0120facts": 6419, "91": 6420, "otes": 6421, "erg": 6422, "Then": 6423, "\u0120accompl": 6424, "Note": 6425, "\u0120revenue": 6426, "\u0120passing": 6427, "\u0120mal": 6428, "een": 6429, "\u0120Yet": 6430, "\u0120gather": 6431, "terday": 6432, "ework": 6433, "\u0120Author": 6434, "Pe": 6435, "\u0120optim": 6436, "\u0120rub": 6437, "\u0120\u00e8\u00a3\u0131": 6438, "\u0120unknown": 6439, "stone": 6440, "\u0120union": 6441, "olve": 6442, "\u0120opportunities": 6443, "\u0120browser": 6444, "\u0120Wal": 6445, "\u0120Cost": 6446, "\u0120reporting": 6447, "sts": 6448, "pet": 6449, "\u0120sand": 6450, "\u0120suddenly": 6451, "\u0120surprising": 6452, "\u0120VR": 6453, "\u0120somewhat": 6454, "\u0120Bas": 6455, "ulture": 6456, "izz": 6457, "\u0120CD": 6458, "\u0120challenges": 6459, "\u0120settings": 6460, "\u0120experiences": 6461, "\u0120Full": 6462, "\u0120cann": 6463, "\u0120receiving": 6464, "EST": 6465, "\u0120joint": 6466, "\u0120cultural": 6467, "\u0120ast": 6468, "82": 6469, "astern": 6470, "ceived": 6471, "\u0120Cru": 6472, "\u0120bull": 6473, "pired": 6474, "amm": 6475, "\u0120facing": 6476, "power": 6477, "\u0120boss": 6478, "\u0120Hol": 6479, "\u0120instr": 6480, "\u0120increasingly": 6481, "\u0120shift": 6482, "\u0120streets": 6483, "\u0120Williams": 6484, "abb": 6485, "\u0120lie": 6486, "\u0120laugh": 6487, "\u0120Ca": 6488, "PL": 6489, "\u0120adults": 6490, "\u0120customer": 6491, "\u0120obtained": 6492, "\u0120supporting": 6493, "html": 6494, "fire": 6495, "\u0120detailed": 6496, "\u0120picked": 6497, "\u0120Right": 6498, "lder": 6499, "EE": 6500, "stood": 6501, "\u0120Kim": 6502, "\u0120wire": 6503, "\u0120sight": 6504, "\u0120developers": 6505, "\u0120persons": 6506, "\u0120sad": 6507, "\u0120cup": 6508, "\u0120warning": 6509, "\u0120boys": 6510, "long": 6511, "\u0120bird": 6512, "fo": 6513, "\u0120wal": 6514, "\u0120observed": 6515, "\u0120zone": 6516, "iveness": 6517, "\u0120channel": 6518, "cript": 6519, "\u0120refused": 6520, "\u0120Again": 6521, "\u0120suc": 6522, "\u0120spokesman": 6523, "\u0120Ref": 6524, "rite": 6525, "ouston": 6526, "\u00e3\u0125\u00b3": 6527, "\u0120Sher": 6528, "\u0120acts": 6529, "\u0120Name": 6530, "\u0120struggle": 6531, "arry": 6532, "ometimes": 6533, "\u0120discrim": 6534, "HT": 6535, "\u0120category": 6536, "\u0120realize": 6537, "\u0120employee": 6538, "\u0120Afghan": 6539, "enger": 6540, "\u0120guns": 6541, "\u0120Steve": 6542, "\u0120Mot": 6543, "\u0120Ol": 6544, "oked": 6545, "\u0120thick": 6546, "\u0120fairly": 6547, "illy": 6548, "\u0120surve": 6549, "\u0120Mat": 6550, "weight": 6551, "\u00e2\u0136": 6552, "\u0120troops": 6553, "\u0120agents": 6554, "\u0120battery": 6555, "\u0120motiv": 6556, "\u00c3\u00a1": 6557, "Sec": 6558, "den": 6559, "overy": 6560, "LS": 6561, "\u0120flu": 6562, "\u0120confident": 6563, "\u0120Oper": 6564, "\u0120empty": 6565, "\u0120phen": 6566, "\u0120sector": 6567, "\u0120excited": 6568, "\u0120remote": 6569, "aph": 6570, "oen": 6571, "\u0120destroyed": 6572, "\u0120moral": 6573, "\u0120HP": 6574, "\u0120Ron": 6575, "\u0120dress": 6576, "\u0120Bat": 6577, "\u0120lit": 6578, "\u0120MS": 6579, "\u0120af": 6580, "HL": 6581, "rum": 6582, "isms": 6583, "\u0120shouldn": 6584, "\u0120sympt": 6585, "\u0120Toronto": 6586, "hetic": 6587, "\u0120carbon": 6588, "\u0120installed": 6589, "\u0120violent": 6590, "\u0120solar": 6591, "ja": 6592, "\u0120practices": 6593, "\u0120ride": 6594, "\u0120Penn": 6595, "\u0120improved": 6596, "\u0120audio": 6597, "\u0120behavi": 6598, "\u0120PS": 6599, "\u0120eating": 6600, "Data": 6601, "\u0120Review": 6602, "pass": 6603, "claim": 6604, "uated": 6605, "angers": 6606, "chen": 6607, "\u0120properties": 6608, "\u0120anywhere": 6609, "Another": 6610, "\u0120blow": 6611, "\u0120Jackson": 6612, "\u0120proud": 6613, "\u0120plane": 6614, "lines": 6615, "\u0120square": 6616, "\u0120proof": 6617, "ansas": 6618, "\u0120talked": 6619, "makers": 6620, "\u0120sister": 6621, "\u0120holds": 6622, "\u0120resident": 6623, "\u0120==": 6624, "\u0120resistance": 6625, "\u0120split": 6626, "\u0120prosecut": 6627, "\u0120confidence": 6628, "resents": 6629, "\u0120cuts": 6630, "\u0120exception": 6631, "\u0120zero": 6632, "Getty": 6633, "\u0120copyright": 6634, "\u0120totally": 6635, "ormal": 6636, "ifications": 6637, "\u0120Australian": 6638, "\u0120sick": 6639, "\u0120150": 6640, "\u0120household": 6641, "\u0120fees": 6642, "\u0120drivers": 6643, "ogen": 6644, "\u0120NY": 6645, "\u0120necessarily": 6646, "\u0120regulations": 6647, "earing": 6648, "sl": 6649, "\u0120perspective": 6650, "care": 6651, "icial": 6652, "His": 6653, "\u0120escape": 6654, "\u0120surprised": 6655, "\u0120Van": 6656, "urrent": 6657, "\u0120vac": 6658, "81": 6659, "\u0120Thus": 6660, "\u0120emphas": 6661, "\u0120Champions": 6662, "\u0120Ice": 6663, "\u0120narr": 6664, "\u0120heads": 6665, "\u0120causing": 6666, "bel": 6667, "fortunately": 6668, "\u0120Ma": 6669, "\u0120targets": 6670, "cipl": 6671, "\u0120afternoon": 6672, "\u0120adds": 6673, "\u0120Maybe": 6674, "\u0120Four": 6675, "essed": 6676, "plete": 6677, "\u0120usual": 6678, "cho": 6679, "ingu": 6680, "\u0120withd": 6681, "\u0120Energy": 6682, "\u0120Econom": 6683, "OO": 6684, "\u0120articles": 6685, "\u0120injured": 6686, "\u0120manage": 6687, "\u0120explains": 6688, "\u0120diagn": 6689, "Rec": 6690, "atures": 6691, "\u0120linked": 6692, "\u0120discussed": 6693, "\u0120explo": 6694, "\u0120occasion": 6695, "athan": 6696, "\u0120opposite": 6697, "\u0120faces": 6698, "\u0120denied": 6699, "\u0120Knight": 6700, "\u0120nut": 6701, "\u0120approximately": 6702, "\u0120disappoint": 6703, "onymous": 6704, "\u0120Best": 6705, "\u0120Lo": 6706, "\u0120Hy": 6707, "\u0120Aff": 6708, "\u0120voting": 6709, "anwhile": 6710, "\u0120III": 6711, "\u0120institutions": 6712, "agram": 6713, "\u0120Daily": 6714, "\u0120drag": 6715, "\u0120nearby": 6716, "\u0120guilty": 6717, "\u0120conver": 6718, "Pre": 6719, "ship": 6720, "\u0120reward": 6721, "\u0120philosoph": 6722, "\u0120SS": 6723, "ugh": 6724, "\u0120apps": 6725, "friend": 6726, "\u0120upper": 6727, "\u0120advert": 6728, "\u0120snow": 6729, "\u0120frust": 6730, "\u0120ourselves": 6731, "Fr": 6732, "\u0120Die": 6733, "ampion": 6734, "\u0120dismiss": 6735, "\u0120cere": 6736, "\u0120signal": 6737, "from": 6738, "\u0120).": 6739, "\u012052": 6740, "\u0120crimes": 6741, "itors": 6742, "estival": 6743, "useum": 6744, "\u0120council": 6745, "\u0120Saud": 6746, "May": 6747, "\u0120Gun": 6748, "ician": 6749, "ether": 6750, "\u0120sufficient": 6751, "\u0120Hen": 6752, "sole": 6753, "\u0120historical": 6754, "\u0120Far": 6755, "\u0120Turn": 6756, "\u0120pin": 6757, "\u0120succeed": 6758, "mat": 6759, "lymp": 6760, "\u0120tradition": 6761, "\u0120Ok": 6762, "\u0120cro": 6763, "\u0120description": 6764, "alle": 6765, "\u0120sky": 6766, "Te": 6767, "\u0120widely": 6768, "\u0120wave": 6769, "\u0120definition": 6770, "\u0120Jews": 6771, "\u0120cycle": 6772, "\u0120refere": 6773, "\u0120brings": 6774, "usal": 6775, "\u0120alive": 6776, "\u0120frequently": 6777, "\u0120intention": 6778, "\u0120Control": 6779, "lv": 6780, "ystem": 6781, "\u0120privacy": 6782, "gent": 6783, "rence": 6784, "\u0120Quest": 6785, "\u0120Christmas": 6786, "\u0120rail": 6787, "\u0120cooper": 6788, "\u0120tested": 6789, "\u0120Capt": 6790, "asks": 6791, "\u0120comfortable": 6792, "\u0120delivered": 6793, "scape": 6794, "\u0120depth": 6795, "\u0120GOP": 6796, "\u0120writes": 6797, "\u0120assets": 6798, "\u0120sav": 6799, "iments": 6800, "\u0120transition": 6801, "\u0120artist": 6802, "\u0120Look": 6803, "\u0120lob": 6804, "\u0120components": 6805, "arity": 6806, "\u0120walked": 6807, "\u0120root": 6808, "\u0120participants": 6809, "\u0120noticed": 6810, "\u0120resc": 6811, "\u0120nav": 6812, "\u0120Administ": 6813, "da": 6814, "utral": 6815, "plate": 6816, "\u0120importance": 6817, "\u0120assert": 6818, "iously": 6819, "cription": 6820, "\u0120injuries": 6821, "\u0120Check": 6822, "\u0120registered": 6823, "\u0120intent": 6824, "\u0120missed": 6825, "ographic": 6826, "\u0120sentence": 6827, "ounter": 6828, "\u0120assistance": 6829, "evin": 6830, "\u0120database": 6831, "\u0120buildings": 6832, "\u0120classic": 6833, "\u0120thinks": 6834, "\u0120Ohio": 6835, "Pr": 6836, "ugg": 6837, "\u0120fee": 6838, "pan": 6839, "\u0120effectively": 6840, "\u0120facility": 6841, "\u0120bear": 6842, "\u0120chapter": 6843, "\u0120dogs": 6844, "\u0120Columb": 6845, "\u0120latter": 6846, "itial": 6847, "\u0120admitted": 6848, "TV": 6849, "\u0120Georg": 6850, "\u0120posts": 6851, "\\\\": 6852, "\u0120lawyer": 6853, "\u0120equival": 6854, "\u0120mand": 6855, "\u0120controlled": 6856, "\u0120Walk": 6857, "\u0120Andrew": 6858, "\u0120menu": 6859, "amental": 6860, "\u0120protected": 6861, "va": 6862, "\u0120administr": 6863, "oral": 6864, "\u0120rein": 6865, "\u0120Sar": 6866, "\u0120amounts": 6867, "\u0120native": 6868, "\u0120Moon": 6869, "\u0120represents": 6870, "\u0120abandon": 6871, "\u0120carrying": 6872, "\u0120tank": 6873, "mary": 6874, "\u0120declared": 6875, "Tube": 6876, "\u0120hat": 6877, "\u0120punish": 6878, "ellect": 6879, "mes": 6880, "\u0120universe": 6881, "\u0120Rod": 6882, "phy": 6883, "\u0120infrastructure": 6884, "\u012051": 6885, "\u0120opposed": 6886, "ownt": 6887, "ca": 6888, "\u0120Make": 6889, "\u0120hardware": 6890, "\u0120coffee": 6891, "Rel": 6892, "bal": 6893, "world": 6894, "\u0120Saf": 6895, "\u0120Sea": 6896, "inals": 6897, "\u0120owned": 6898, "\u0120hall": 6899, "ersion": 6900, "\u0120describe": 6901, "\u0120Pot": 6902, "\u0120portion": 6903, "\u0120atmosp": 6904, "\u0120governments": 6905, "\u0120depending": 6906, "\u0120offense": 6907, "\u0120trick": 6908, "awa": 6909, "\u0120Line": 6910, "\u0120Vis": 6911, "\u0120Hard": 6912, "\u0120Orig": 6913, "\u0120Click": 6914, "\u0120desk": 6915, "\u0120Valley": 6916, "\u0120Sov": 6917, "\u0120movies": 6918, "\u0120remark": 6919, "\u0120mail": 6920, "\u0120conscious": 6921, "\u0120ruling": 6922, "\u0120Rights": 6923, "\u0120medic": 6924, "hent": 6925, "\u0120Women": 6926, "><": 6927, "\u0120replaced": 6928, "\u0120Prem": 6929, "\u0120Thanks": 6930, "\u0120renew": 6931, "\u0120Ball": 6932, "iform": 6933, "\u0120shots": 6934, "Comm": 6935, "\u0120armed": 6936, "\u0120constant": 6937, "\u0120taste": 6938, "\u0120realized": 6939, "\u0120buff": 6940, "\u0120mo": 6941, "\u0120efficient": 6942, "Most": 6943, "oration": 6944, "ifies": 6945, "\u0120communication": 6946, "\u0120flood": 6947, "\u0120consequences": 6948, "\u0120anyway": 6949, "igg": 6950, "\u0120GM": 6951, "\u0120Thank": 6952, "\u0120iron": 6953, "\u0120evolution": 6954, "\u0120Cop": 6955, "twitter": 6956, "\u012095": 6957, "\u0120relationships": 6958, "adel": 6959, "\u0120Young": 6960, "\u0120proposal": 6961, "ayers": 6962, "uilding": 6963, "\u0120Hot": 6964, "ORE": 6965, "cos": 6966, "\u0120collabor": 6967, "PG": 6968, "axy": 6969, "\u0120knowing": 6970, "\u0120supports": 6971, "owed": 6972, "\u0120controls": 6973, "\u0120merely": 6974, "umer": 6975, "\u0120athlet": 6976, "\u0120fashion": 6977, "path": 6978, "\u0120gift": 6979, "\u0120era": 6980, "AND": 6981, "\u0120kinds": 6982, "\u0120Korean": 6983, "\u0120legit": 6984, "ulous": 6985, "\u0120essentially": 6986, "\u0120therap": 6987, "nic": 6988, "\u0120suffered": 6989, "\u0120hur": 6990, "\u0120promise": 6991, "\u0120excess": 6992, "\u0120overw": 6993, "\u0120prime": 6994, "\u0120Houston": 6995, "erry": 6996, "\u0120Ms": 6997, "RS": 6998, "2012": 6999, "\u0120stores": 7000, "\u0120Olymp": 7001, "\u0120journey": 7002, "Although": 7003, "Sub": 7004, "\u0120Educ": 7005, "\u0120Chapter": 7006, "\u0120requests": 7007, "\u0120consumers": 7008, "\u0120tiny": 7009, "\u0120isol": 7010, "\u0120Fair": 7011, "ba": 7012, "\u0120YOU": 7013, "\u0120crash": 7014, "celer": 7015, "\u0120emotional": 7016, "\u0120goods": 7017, "\u0120elected": 7018, "\u0120moder": 7019, "\u0120Linux": 7020, "\u0120blocks": 7021, "\u0120island": 7022, "\u0120Society": 7023, "\u0120elections": 7024, "\u0120broadcast": 7025, "\u0120cheap": 7026, "\u0120nations": 7027, "\u0120seasons": 7028, "400": 7029, "\u0120waste": 7030, "\u0120Sat": 7031, "\u0120fields": 7032, "employ": 7033, "\u0120profile": 7034, "\u0120authors": 7035, "ALL": 7036, "\u0120Gra": 7037, "west": 7038, "\u0120Ty": 7039, "\u0120deaths": 7040, "\u0120vacc": 7041, "\u0120formed": 7042, "\u0120du": 7043, "\u0120ongoing": 7044, "\u0120Muslims": 7045, "elf": 7046, "igure": 7047, "\u0120assume": 7048, "\u0120Ukraine": 7049, "water": 7050, "\u0120coast": 7051, "\u0120voted": 7052, "gor": 7053, "\u0120AS": 7054, "\u0120Michigan": 7055, "aza": 7056, "\u0120Arm": 7057, "iro": 7058, "\u0120flex": 7059, "asters": 7060, "''": 7061, "\u0120welcome": 7062, "arl": 7063, "\u0120locations": 7064, "igation": 7065, "\u0120Fil": 7066, "\u0120buying": 7067, "\u0120architect": 7068, "\u0120harder": 7069, "\u0120Cub": 7070, "\u0120interface": 7071, "\u0120restaurant": 7072, "\u0120discover": 7073, "\u0120exceed": 7074, "\u0120favour": 7075, "gery": 7076, "\u0120duty": 7077, "\u0120pitch": 7078, "ador": 7079, "\u0120Mach": 7080, "boy": 7081, "\u0120responded": 7082, "\u0120extended": 7083, "hers": 7084, "Many": 7085, "raid": 7086, "ifer": 7087, "\u0120Ins": 7088, "Ser": 7089, "\u0120medium": 7090, "she": 7091, "\u0120Sports": 7092, "\u0120magazine": 7093, "utation": 7094, "\u0120limits": 7095, "\u0120Gall": 7096, "\u0120external": 7097, "razil": 7098, "\u0120younger": 7099, "tle": 7100, "\u0120remind": 7101, "\u0120CON": 7102, "\u0120immediate": 7103, "\u0120hidden": 7104, "\u0120volunte": 7105, "\u0120simpl": 7106, "odcast": 7107, "\u0120phase": 7108, "dr": 7109, "\u0120plot": 7110, "\u0120exposure": 7111, "RI": 7112, "ograp": 7113, "vin": 7114, "anish": 7115, "\u0120Acad": 7116, "\u0120Engine": 7117, "\u0120expansion": 7118, "\u0120Pay": 7119, "Your": 7120, "\u0120pushed": 7121, "\u0120Ell": 7122, "\u0120Head": 7123, "\u0120marketing": 7124, "\u0120AC": 7125, "ket": 7126, "\u0120hits": 7127, "\u0120gro": 7128, "\u0120Age": 7129, "\u0120Scot": 7130, "][": 7131, "\u0120stim": 7132, "\u0120iPhone": 7133, "\u012a\u0134": 7134, "\u0120narrow": 7135, "\u0120Getty": 7136, "\u0120Turkey": 7137, "\u0120perfectly": 7138, "\u0120enable": 7139, "utch": 7140, "\u0120precise": 7141, "\u0120regime": 7142, "\u0120shif": 7143, "\u0120compens": 7144, "gun": 7145, "div": 7146, "\u0120chosen": 7147, "\u0120Ken": 7148, "Any": 7149, "\u0120trees": 7150, "\u0120recommended": 7151, "\u0120Ren": 7152, "uable": 7153, "\u0120HT": 7154, "Follow": 7155, "EG": 7156, "\u0120Hand": 7157, "\u0120Kenn": 7158, "\u0120arguments": 7159, "\u0120exists": 7160, "\u0120bike": 7161, "\u0120Conserv": 7162, "\u0120breaking": 7163, "\u0120Gar": 7164, "\u0120crazy": 7165, "\u0120virtual": 7166, "aylor": 7167, "ixel": 7168, "\u01201980": 7169, "\u0120permission": 7170, "\u0120Series": 7171, "\u0120consumer": 7172, "\u0120closely": 7173, "called": 7174, "\u012054": 7175, "\u0120hopes": 7176, "\u0120array": 7177, "\u0120Win": 7178, "\u0120Labour": 7179, "\u0120spons": 7180, "\u0120Ire": 7181, "\u0120pow": 7182, "\u0120readers": 7183, "\u0120employment": 7184, "\u0120creature": 7185, "\u0120resulting": 7186, "\u0120accurate": 7187, "\u0120moments": 7188, "\u0120argued": 7189, "\u0120ped": 7190, "During": 7191, "\u012053": 7192, "\u0120Tal": 7193, "\u0120sought": 7194, "\u0120suffering": 7195, "\u0120icon": 7196, "lee": 7197, "\u0120($": 7198, "alian": 7199, "\u00c2\u00b0": 7200, "\u0120pra": 7201, "\u0120bonus": 7202, "(\"": 7203, "ko": 7204, "\u0120acting": 7205, "DE": 7206, "fall": 7207, "\u0120comparison": 7208, "\u0120smooth": 7209, "\u0120NAS": 7210, "upp": 7211, "\u0120Joseph": 7212, "eping": 7213, "\u0120Take": 7214, "\u0120Mid": 7215, "\u0120sending": 7216, "fast": 7217, "\u0120Fall": 7218, "\u0120dealing": 7219, "user": 7220, "\u0120Organ": 7221, "Co": 7222, "\u0120attached": 7223, "\u0120sees": 7224, "%.": 7225, "\u0120typical": 7226, "ART": 7227, "\u0120finds": 7228, "\u0120Asia": 7229, "umin": 7230, "\u0120Core": 7231, "\u0120Ent": 7232, "inent": 7233, "uce": 7234, "\u0120Blood": 7235, "\u0120Never": 7236, "\u0120emails": 7237, "\u0120highlight": 7238, "\u0120confront": 7239, "atus": 7240, "uted": 7241, "\u0120unus": 7242, "\u0120topic": 7243, "\u0120Adam": 7244, "\u0120ble": 7245, "ati": 7246, "\u0120understood": 7247, "Set": 7248, "struct": 7249, "TP": 7250, "\u0120mob": 7251, "aa": 7252, "\u0120Start": 7253, "pected": 7254, "sell": 7255, "\u0120dedicated": 7256, "\u0120CA": 7257, "uan": 7258, "\u0120songs": 7259, "escription": 7260, "\u0120tech": 7261, "\u0120rape": 7262, "\u0120aside": 7263, "\u0120grant": 7264, "\u012056": 7265, "sub": 7266, "\u0120argue": 7267, "\u0120containing": 7268, "\u0120schedule": 7269, "\u0120liberal": 7270, "\u0120publicly": 7271, "\u0120heavily": 7272, "\u0120Ut": 7273, "iner": 7274, "\u0120Section": 7275, "\u0120Care": 7276, "weet": 7277, "ls": 7278, "Dis": 7279, "\u00e2\u0136\u0122": 7280, "\u0120Follow": 7281, "Back": 7282, "\u0120IT": 7283, "\u0120bes": 7284, "ji": 7285, "\u0120Hit": 7286, "ested": 7287, "\u0120everybody": 7288, "\u0120Swed": 7289, "\u0120femin": 7290, "\u0120facilities": 7291, "\u0120conven": 7292, "Comp": 7293, "\u0120OS": 7294, "core": 7295, "\u0120anx": 7296, "\u0120division": 7297, "\u0120Cam": 7298, "\u0120Stan": 7299, "mates": 7300, "\u0120explore": 7301, "plom": 7302, "\u0120shares": 7303, "pload": 7304, "anes": 7305, "\u0120ideal": 7306, "eters": 7307, "\u0120Base": 7308, "\u0120plastic": 7309, "\u0120distinct": 7310, "\u0120Network": 7311, "\u0120Seattle": 7312, "\u0120trading": 7313, "ensus": 7314, "intend": 7315, "\u0120exhib": 7316, "\u0120initially": 7317, "\u0120Food": 7318, "\u0120thousand": 7319, "\u0120Business": 7320, "acter": 7321, "\u0120paragraph": 7322, "\u0120roughly": 7323, "\u0120www": 7324, "\u0120creative": 7325, "\u0120Conf": 7326, "\u0120consumption": 7327, "\u0120films": 7328, "agan": 7329, "\u0120obtain": 7330, "\u0120tall": 7331, "\u0120tor": 7332, "\u0120acknowled": 7333, "\u0120grown": 7334, "alo": 7335, "KE": 7336, "\u0120400": 7337, "enders": 7338, "taining": 7339, "UG": 7340, "\u0120suicide": 7341, "\u0120watched": 7342, "\u0120List": 7343, "ali": 7344, "rehens": 7345, "\u0120surrounding": 7346, "\u0120pip": 7347, "\u0120flying": 7348, "\u0120Java": 7349, "ordan": 7350, "\u0120serving": 7351, "inations": 7352, "post": 7353, "\u0120sho": 7354, "Av": 7355, "\u0120jail": 7356, "zy": 7357, "\u01201999": 7358, "\u0120>": 9609, "orous": 9610, "\u0120firms": 9611, "screen": 9612, "una": 9613, "\u0120embarrass": 9614, "ulse": 9615, "\u0120letting": 9616, "\u0120threw": 9617, "iley": 9618, "\u0120channels": 9619, "lan": 9620, "\u0120Vegas": 9621, "\u0120sear": 9622, "\u0120fantastic": 9623, "arre": 9624, "uzzle": 9625, "\u0120Der": 9626, "Those": 9627, "\u0120swing": 9628, "\u0120sheet": 9629, "index": 9630, "cover": 9631, "ogan": 9632, "\u0120variables": 9633, "\u0120Tech": 9634, "\u0120spoken": 9635, "achel": 9636, "\u0120Da": 9637, "\u0120Mountain": 9638, "\u0120loaded": 9639, "\u0120footage": 9640, "version": 9641, "\u0120unl": 9642, "\u0120Phoenix": 9643, "\u0120throwing": 9644, "\u0120firing": 9645, "\u0120tracking": 9646, "\u0120width": 9647, "\u0120struggling": 9648, "rooms": 9649, "otion": 9650, "\u0120monthly": 9651, "\u0120Server": 9652, "\u0120eggs": 9653, "open": 9654, "MC": 9655, "\u01201993": 9656, "\u0120hired": 9657, "\u0120stayed": 9658, "\u0120Allen": 9659, "\u0120stro": 9660, "\u012098": 9661, "step": 9662, "\u0120Turkish": 9663, "\u0120fabric": 9664, "isting": 9665, "\u0120Dom": 9666, "\u0120dates": 9667, "\u0120pron": 9668, "\u0120basketball": 9669, "\u0120lucky": 9670, "\u0120Arabia": 9671, "\u0120assumed": 9672, "esty": 9673, "\u0120affairs": 9674, "\u0120glad": 9675, "\u0120Indeed": 9676, "\u0120FA": 9677, "\u0120Word": 9678, "\u0120joining": 9679, "ifice": 9680, "pread": 9681, "irts": 9682, "\u0120Select": 9683, "\u0120populations": 9684, "aware": 9685, "\u0120nose": 9686, "\u0120complaints": 9687, "start": 9688, "\u0120scoring": 9689, "Thanks": 9690, "\u0120mining": 9691, "\u0120visitors": 9692, "SH": 9693, "\u0120damaged": 9694, "\u0120characteristics": 9695, "\u0120Pent": 9696, "DC": 9697, "\u012083": 9698, "\u0120Six": 9699, "rates": 9700, "\u0120flags": 9701, "\u0120Brew": 9702, "dog": 9703, "Mark": 9704, "////": 9705, "\u0120execution": 9706, "\u0120joke": 9707, "phones": 9708, "\u0120testimony": 9709, "\u0120obst": 9710, "QL": 9711, "\u0120Cut": 9712, "\u0120studied": 9713, "\u0120Nintendo": 9714, "icket": 9715, "\u0120NBC": 9716, "\u0120lad": 9717, "\u0120Bra": 9718, "\u0120Moh": 9719, "\u0120kernel": 9720, "\u0120overwhelming": 9721, "\u0120aged": 9722, "\u0120applicable": 9723, "\u0120Cond": 9724, "\u0120roads": 9725, "\u0120Block": 9726, "made": 9727, "odge": 9728, "\u0120commands": 9729, "\u0120offices": 9730, "veland": 9731, "\u0120tut": 9732, "\u0120receiver": 9733, "\u0120Fro": 9734, "\u0120shopping": 9735, "\u0120iP": 9736, "\u0120Stre": 9737, "\u0120ABC": 9738, "\u0120entertainment": 9739, "\u0120Bow": 9740, "orted": 9741, "Mc": 9742, "\u0120reads": 9743, "grad": 9744, "\u0120Collect": 9745, "\u0120\u00e2\u012a\u0134": 9746, "\u0120Capital": 9747, "ederation": 9748, "\u0120employer": 9749, "\u0120involvement": 9750, "\u0120anxiety": 9751, "alia": 9752, "\u0120roof": 9753, "\u0120Among": 9754, "\u0120Democrat": 9755, "\u0120stats": 9756, "\u0120Vill": 9757, "\u0120constitutional": 9758, "\u0120referring": 9759, "itty": 9760, "\u0120tackle": 9761, "outube": 9762, "\u0120backed": 9763, "\u0120Hong": 9764, "\u0120Broad": 9765, "\u0120ele": 9766, "\u0120Ott": 9767, "\u01201992": 9768, "hour": 9769, "achusetts": 9770, "Cal": 9771, "\u0120defeated": 9772, "\u012081": 9773, "esp": 9774, "\u0120seemingly": 9775, "was": 9776, "\u0120Jenn": 9777, "\u0120Kurd": 9778, "\u0120gene": 9779, "\u0120discount": 9780, "Ret": 9781, "ECT": 9782, "();": 9783, "\u0120clubs": 9784, "\u0120sid": 9785, "\u0120Marsh": 9786, "Check": 9787, "\u0120pp": 9788, "\u0120Eag": 9789, "idespread": 9790, "\u0120beings": 9791, "FT": 9792, "\u0120introduction": 9793, "\u0120Change": 9794, "ARD": 9795, "\u0120110": 9796, "adows": 9797, "ierce": 9798, "\u0120meal": 9799, "author": 9800, "\u0120Bang": 9801, "lahoma": 9802, "\u0120ranks": 9803, "2011": 9804, "????": 9805, "max": 9806, "\u0120collapse": 9807, "\u0120opens": 9808, "\u0120echo": 9809, "\u0120soph": 9810, "\u0120racist": 9811, "\u0120enormous": 9812, "\u0120waves": 9813, "\u0120tap": 9814, "\u0120comprehensive": 9815, ".--": 9816, "\u0120Roy": 9817, "\u0120farmers": 9818, "Related": 9819, "aired": 9820, "rones": 9821, "\u0120Crim": 9822, "\u0120proportion": 9823, "\u0120designs": 9824, "\u0120negotiations": 9825, "\u0120virtually": 9826, "\u0120Batman": 9827, "\u0120warn": 9828, "\u0120legitimate": 9829, "mate": 9830, "\u0120convention": 9831, ",,": 9832, "netic": 9833, "\u0120SD": 9834, "\u0120consistently": 9835, "\u0120compensation": 9836, "\u0120punishment": 9837, "\u0120ye": 9838, "\u0120tie": 9839, "\u0120Bureau": 9840, "irlf": 9841, "\u0120Bu": 9842, "\u0120Aren": 9843, "\u0120Philipp": 9844, "\u0120knife": 9845, "\u0120memories": 9846, "\u0120Ross": 9847, "\u0120angle": 9848, "\u012086": 9849, "\u0120Thunder": 9850, "\u0120rend": 9851, "\u0120Tour": 9852, "\u0120counts": 9853, "sung": 9854, "\u0120Imp": 9855, "\u0120educational": 9856, "\u0120accessible": 9857, "COM": 9858, "\u0120drew": 9859, "yer": 9860, "Gl": 9861, "amine": 9862, "ORT": 9863, "OB": 9864, "IB": 9865, "master": 9866, "\u0120trials": 9867, "ogy": 9868, "har": 9869, "\u0120Trust": 9870, "\u0120preferred": 9871, "irlfriend": 9872, "\u0120Nev": 9873, "\u0120bin": 9874, "\u0120cow": 9875, "Page": 9876, "\u0120signature": 9877, "\u0120BL": 9878, "700": 9879, "\u0120retired": 9880, "\u0120bytes": 9881, "\u0120neighb": 9882, "\u0120Legend": 9883, "\u0120devast": 9884, "\u0120suspected": 9885, "isons": 9886, "\u0120Pok\u00c3\u00a9mon": 9887, "scale": 9888, "\u0120capabilities": 9889, "\u0120revel": 9890, "\u0120cheese": 9891, "dy": 9892, "igrant": 9893, "\u0120failing": 9894, "bits": 9895, "\u0120Heroes": 9896, "\u0120Ghost": 9897, "\u0120Scient": 9898, "\u0120appointed": 9899, "uri": 9900, "\u0120institution": 9901, "\u0120expanded": 9902, "greg": 9903, "\u0120monitoring": 9904, "\u0120podcast": 9905, "\u0120coalition": 9906, "\u012096": 9907, "Jo": 9908, "\u0120stolen": 9909, "\u0120Sab": 9910, "\u0120stops": 9911, "\u0120holiday": 9912, "\u0120intr": 9913, "Car": 9914, "Black": 9915, "\u0120LGBT": 9916, "\u0120warming": 9917, "\u0120Anderson": 9918, "\u012089": 9919, "\u0120producer": 9920, "Med": 9921, "\u0120accuracy": 9922, "\u0120Marvel": 9923, "izabeth": 9924, "\u0120Patrick": 9925, "mony": 9926, "\u0120mini": 9927, "acles": 9928, "\u0120overt": 9929, "they": 9930, "\u0120membership": 9931, "\u0120Ven": 9932, "\u0120exch": 9933, "\u0120removal": 9934, "\u0120Dave": 9935, "TY": 9936, "mad": 9937, "\u0120Find": 9938, "\u0120adequ": 9939, "\u0120ec": 9940, "\u0120teeth": 9941, "\u0120emotion": 9942, "\u0120perm": 9943, "\u0120solely": 9944, "db": 9945, "\u0120extraord": 9946, "IGHT": 9947, "cal": 9948, "\u0120guidelines": 9949, "\u0120dying": 9950, "\u0120suspended": 9951, "\u0120Premier": 9952, "\u0120Anthony": 9953, "elve": 9954, "\u0120dad": 9955, "\u0120Eth": 9956, "\u0120Football": 9957, "\u0120abandoned": 9958, "\u0120<<": 9959, "\u0120march": 9960, "\u0120horror": 9961, "\u00e2\u0122\u00a6\"": 9962, "\u0120childhood": 9963, "\u0120campaigns": 9964, "\u0120lunch": 9965, "\u0120Albert": 9966, "block": 9967, "\u00e2\u0138\u012a\u00e2\u0138\u012a": 9968, "ounding": 9969, "\u0120bone": 9970, "organ": 9971, "aders": 9972, "\u0120Flash": 9973, "\u0120Drive": 9974, "\u0120tonight": 9975, "\u0120wars": 9976, "\u0120FL": 9977, "\u0120formation": 9978, "const": 9979, "News": 9980, "\u0120compe": 9981, "orious": 9982, "\u0120Staff": 9983, "\u0120discussions": 9984, "\u0120Protection": 9985, "\u0120Jam": 9986, "\u0120criteria": 9987, "\u0120installation": 9988, "\u0120accomplish": 9989, "izza": 9990, "\u0120publisher": 9991, "\u0120rescue": 9992, "\u0120Try": 9993, "ULL": 9994, "\u0120Som": 9995, "\u0120Hop": 9996, "oret": 9997, "ths": 9998, "ordon": 9999, "\u0120pocket": 10000, "\u0120Inv": 10001, "Download": 10002, "\u0120Crime": 10003, "\u0120bene": 10004, "\u0120Guide": 10005, "\u0120Assembly": 10006, "\u0120parameters": 10007, "IE": 10008, "\u0120Alexander": 10009, "\u0120concert": 10010, "\u0120Sche": 10011, "\u0120shoes": 10012, "\u0120visiting": 10013, "\u0120recall": 10014, "\u0120bub": 10015, "\u0120rural": 10016, "\u0120concrete": 10017, "\u0120Ros": 10018, "Next": 10019, "Russ": 10020, "\u0120loans": 10021, "\u0120Shield": 10022, "\u0120trem": 10023, "hemat": 10024, "kg": 10025, "\u0120Harris": 10026, "isition": 10027, "\u0120Move": 10028, "\u0120FC": 10029, "\u0120fate": 10030, "\u0120Cho": 10031, "\u0120tired": 10032, "\u0120principal": 10033, "hist": 10034, "iences": 10035, "athy": 10036, "\u0120sevent": 10037, "\u0120mood": 10038, "\u0120strategic": 10039, "\u0120diseases": 10040, "\u0120forum": 10041, "\u0120tempor": 10042, "\u0120headquarters": 10043, "Par": 10044, "ige": 10045, "flix": 10046, "\u0120guitar": 10047, "\u012094": 10048, "Only": 10049, "\u0120releases": 10050, "roph": 10051, "================================": 10052, "\u0120600": 10053, "\u0120Continue": 10054, "igate": 10055, "\u0120Crit": 10056, "system": 10057, "\u0120disabled": 10058, "\u0120unexpected": 10059, "ithub": 10060, "\u0120unclear": 10061, "\u0120Est": 10062, "\u0120contrad": 10063, "\u0120strategies": 10064, "ventures": 10065, "\u0120passage": 10066, "AME": 10067, "\u0120improving": 10068, "\u0120reveals": 10069, "\u0120decrease": 10070, "ova": 10071, "\u0120annoy": 10072, "\u0120Short": 10073, "\u0120Library": 10074, "\u0120cyber": 10075, "nell": 10076, "\u0120Hur": 10077, "\u0120CB": 10078, "\u0120photograp": 10079, "UI": 10080, "\u0120sed": 10081, "Ge": 10082, "\u012087": 10083, "\u0120diverse": 10084, "\u0120encouraged": 10085, "\u0120conspiracy": 10086, "\u0120birds": 10087, "\u0120operator": 10088, "\u0120handful": 10089, "\u0120classified": 10090, "?)": 10091, "\u0120dramatic": 10092, "\u0120investigators": 10093, "ito": 10094, "\u0120widespread": 10095, "\u0120Room": 10096, "----------------------------------------------------------------": 10097, "\u0120collective": 10098, "\u0120journalist": 10099, "String": 10100, "\u0120temperatures": 10101, "ila": 10102, "\u0120guid": 10103, "\u0120inspect": 10104, "\u0120missile": 10105, "\u0120Mayor": 10106, "\u0120manual": 10107, "\u0120simultane": 10108, "\u0120ratings": 10109, "\u0120suck": 10110, "\u012097": 10111, "\u0120universal": 10112, "\u0120pharm": 10113, "\u0120disrupt": 10114, "iano": 10115, "AV": 10116, "\u0120ft": 10117, "\u0120statist": 10118, "olds": 10119, "\u0120Walker": 10120, "php": 10121, "\u0120undert": 10122, "\u0120Las": 10123, "ishop": 10124, "ntil": 10125, "reshold": 10126, "\u0120Whether": 10127, "Ms": 10128, "\u0120deny": 10129, "\u0120Cloud": 10130, "\u0120provider": 10131, "\u0120surviv": 10132, "\u0120Update": 10133, "has": 10134, "\u0120mistakes": 10135, "charge": 10136, "pled": 10137, "rity": 10138, "\u0120node": 10139, "\u0120Massachusetts": 10140, "ools": 10141, "lication": 10142, "\u0120fails": 10143, "emale": 10144, "ori": 10145, "backs": 10146, "\u0120shirt": 10147, "\u0120''": 10148, "\u0120NAT": 10149, "\u0120waters": 10150, "elson": 10151, "\u0120ease": 10152, "\u0120scar": 10153, "\u0120contents": 10154, "mind": 10155, "\u0120contribution": 10156, "\u0120shr": 10157, "\u0120handed": 10158, "\u0120stability": 10159, "\u0120trave": 10160, "Em": 10161, "\u0120mirror": 10162, "123": 10163, "\u0120weigh": 10164, "\u0120fiction": 10165, "ouver": 10166, "istant": 10167, "rition": 10168, "\u0120Fed": 10169, "\u0120physically": 10170, "\u0120stake": 10171, "\u0120Article": 10172, "\u0120Arc": 10173, "\u0120Lewis": 10174, "\u0120Mind": 10175, "\u0120demonstrate": 10176, "\u0120profits": 10177, "vision": 10178, "omic": 10179, "olid": 10180, "\u0120battles": 10181, "\u0120drives": 10182, "\u0120eastern": 10183, "\u0120Sony": 10184, "!!!": 10185, "aration": 10186, "vard": 10187, "\u0120GL": 10188, "portation": 10189, "\u012092": 10190, "\u0120lawmakers": 10191, "\u0120protecting": 10192, "\u0120EPA": 10193, "\u0120yeah": 10194, "\u0120shame": 10195, "olph": 10196, "even": 10197, "xit": 10198, "\u0120attach": 10199, "\u0120representing": 10200, "\u0120obs": 10201, "\u0120Utah": 10202, "iffs": 10203, "\u0120Freedom": 10204, "\u00c3\u00b3": 10205, "AK": 10206, "\u0120incidents": 10207, "itage": 10208, "\u0120viewers": 10209, "cd": 10210, "\u0120mouse": 10211, "\u0120clar": 10212, "\u0120accordance": 10213, "\u0120bot": 10214, "cor": 10215, "\u0120Summer": 10216, "held": 10217, "\u0120innocent": 10218, "\u0120initiative": 10219, "ols": 10220, "________________________________": 10221, "\u0120spots": 10222, "pace": 10223, "\u0120conventional": 10224, "\u0120corporations": 10225, "\u0120blocked": 10226, "HD": 10227, "attered": 10228, "\u0120refers": 10229, "\u0120buck": 10230, "\u0120Digital": 10231, "120": 10232, "\u0120topics": 10233, "TF": 10234, "\u00c4\u0123": 10235, "brid": 10236, "reement": 10237, "\u0120underlying": 10238, "\u0120Member": 10239, "\u0120investigating": 10240, "\u0120pregnancy": 10241, "\u0120touchdown": 10242, "\u0120Band": 10243, "\u0120Caller": 10244, "\u0120instances": 10245, "PP": 10246, "wa": 10247, "Good": 10248, "\u01201991": 10249, "\u0120Cold": 10250, "\u0120fears": 10251, "\u0120remarks": 10252, "\u0128\u0134": 10253, "atal": 10254, "\u0120mit": 10255, "\u0120experiments": 10256, "ipt": 10257, "Color": 10258, "indu": 10259, "Update": 10260, "\u012093": 10261, "Ag": 10262, "\u0120\u00e5": 10263, "ancouver": 10264, "Both": 10265, "\u0120judges": 10266, "Object": 10267, "\u0120stere": 10268, "umbn": 10269, "\u0120participation": 10270, "\u0120Stars": 10271, "\u0120Jere": 10272, "\u0120weekly": 10273, "\u0120Ban": 10274, "\u0120conversations": 10275, "\u0120Pitt": 10276, "uz": 10277, "\u0120Indiana": 10278, "\u0120Kick": 10279, "\u0120infection": 10280, "\u0120heroes": 10281, "\u0120settled": 10282, "\u0120strip": 10283, "\u0120hal": 10284, "\u0120dump": 10285, "\u0120Sci": 10286, "\u0120les": 10287, "\u0120references": 10288, "\u0120URL": 10289, "\u0120Bridge": 10290, "\u0120wanting": 10291, "Force": 10292, "\u0120exclus": 10293, "Meanwhile": 10294, "mn": 10295, "\u0120gentle": 10296, "maker": 10297, "senal": 10298, "\u0120Gro": 10299, "ouri": 10300, "\u0120Rain": 10301, "\u0120Alliance": 10302, "\u0120lift": 10303, "ela": 10304, "SD": 10305, "\u0120Cleveland": 10306, "\u0120ranked": 10307, "\u0120stadium": 10308, "\u0120deadly": 10309, "\u00e4\u00b8": 10310, "\u0120riding": 10311, "aria": 10312, "\u0120Armor": 10313, "\u0120documentation": 10314, "\u0120Greece": 10315, "reek": 10316, "\u0120lens": 10317, "\u0120Sa": 10318, "\u0120gross": 10319, "\u0120Emer": 10320, "agers": 10321, "\u0120Dub": 10322, "\u0120Rh": 10323, "\u0120AMD": 10324, "\u0120arrival": 10325, "\u0120desert": 10326, "\u0120supplement": 10327, "\u0120Resp": 10328, "\u0120knee": 10329, "\u0120margin": 10330, "font": 10331, "ogg": 10332, "2010": 10333, "\u0120Pir": 10334, "\u0120Prom": 10335, "ivals": 10336, "\u0120intake": 10337, "\u0120differently": 10338, "ugs": 10339, "\u0120bits": 10340, "cluded": 10341, "\u0120searching": 10342, "\u0120Du": 10343, "umble": 10344, "\u0120functional": 10345, "\u0120Baltimore": 10346, "\u0120Could": 10347, "\u0120desired": 10348, "\u0120circuit": 10349, "\u0120Lyn": 10350, "\u0120GO": 10351, "\u0120False": 10352, "repre": 10353, "':": 10354, "alties": 10355, "\u0120minim": 10356, "\u0120drove": 10357, "\u0120Should": 10358, "\u0120hip": 10359, "\u0120pros": 10360, "\u0120utility": 10361, "\u0120Nature": 10362, "\u0120Mode": 10363, "President": 10364, "opp": 10365, "rat": 10366, "formance": 10367, "\u0120concentration": 10368, "\u0120font": 10369, "\u0120Bud": 10370, "\u0120amid": 10371, "\u0120revers": 10372, "\u0120ML": 10373, "Bar": 10374, "\u0120interaction": 10375, "\u0120jurisd": 10376, "\u0120spells": 10377, "dep": 10378, "fil": 10379, "\u0120civilians": 10380, "utter": 10381, "\u0120Cooper": 10382, "\u0120Below": 10383, "\u0120entrance": 10384, "\u0120convert": 10385, "\u0120controversy": 10386, "owered": 10387, "\u0120contrary": 10388, "\u0120arc": 10389, "\u0120Executive": 10390, "\u0120Officer": 10391, "\u0120packages": 10392, "\u0120progressive": 10393, "width": 10394, "\u0120reserved": 10395, "vol": 10396, "\u0120Samsung": 10397, "\u0120printed": 10398, "\u0120centers": 10399, "\u0120introduce": 10400, "\u0120Kennedy": 10401, "\u0120odds": 10402, "\u0120surely": 10403, "\u0120independence": 10404, "\u0120passengers": 10405, "reprene": 10406, "\u0120Beh": 10407, "\u0120loves": 10408, "\u0120ESPN": 10409, "\u0120facilit": 10410, "\u0120identical": 10411, "\u0120doct": 10412, "\u0120partnership": 10413, "conf": 10414, "\u0120Hide": 10415, "\u0120confused": 10416, "\u0120Cow": 10417, "Men": 10418, "\u0120wrest": 10419, "\u0120Iraqi": 10420, "\u0120holes": 10421, "\u0120Studies": 10422, "\u0120pregnant": 10423, "hard": 10424, "\u0120signals": 10425, "IX": 10426, "\u0120pulling": 10427, "\u0120graduate": 10428, "\u0120nominee": 10429, "Date": 10430, "\u0120permitted": 10431, "\u0120\u00e2\u0124\u00ac": 10432, "\u0120Oklahoma": 10433, "Start": 10434, "\u0120authorized": 10435, "\u0120alarm": 10436, "\u0120Cos": 10437, "van": 10438, "\u0120generations": 10439, "cular": 10440, "\u0120dragon": 10441, "\u0120Software": 10442, "\u0120Edward": 10443, "\u0120controller": 10444, "Sen": 10445, "gered": 10446, "\u0120Vik": 10447, "\u0120approached": 10448, "Thank": 10449, "\u0120cance": 10450, "\u0120formula": 10451, "\u0120Small": 10452, "\u0120weakness": 10453, "\u0120ramp": 10454, "itudes": 10455, "jud": 10456, "\u0120brilliant": 10457, "\u0120accus": 10458, "source": 10459, "\u0120800": 10460, "\u0120Evil": 10461, "Sw": 10462, "\u0120homeless": 10463, "week": 10464, "iens": 10465, "rics": 10466, "\u0120Third": 10467, "TO": 10468, "\u0120organic": 10469, "\u0120presentation": 10470, "agh": 10471, "\u0120Download": 10472, "vation": 10473, "\u0120assembly": 10474, "orable": 10475, "holders": 10476, "\u0120Bernie": 10477, "\u0120Help": 10478, "\u0120tong": 10479, "\u0120Fight": 10480, "\u0120beach": 10481, "Book": 10482, "\u0120Lic": 10483, "\u0120rush": 10484, "\u0120Round": 10485, "oup": 10486, "\u0120Marx": 10487, "\u0120calculated": 10488, "\u0120Devil": 10489, "\u0120Sarah": 10490, "\u0120occasionally": 10491, "\u0120bullet": 10492, "Available": 10493, "gate": 10494, "\u012091": 10495, "\u0120hosp": 10496, "\u0120promises": 10497, "\u0120HIV": 10498, "\u0120Stadium": 10499, "\u0120Stock": 10500, "\u0120Corporation": 10501, "gage": 10502, "NG": 10503, "\u0120Credit": 10504, "\u0120sne": 10505, "ibl": 10506, "\u0120accum": 10507, "such": 10508, "\u0120terrorists": 10509, "\u0120consciousness": 10510, "\u0120Zh": 10511, "\u0120drama": 10512, "oola": 10513, "piration": 10514, "\u0120labour": 10515, "\u0120Nin": 10516, "\u0120utter": 10517, "\u0120democratic": 10518, "\u0120assass": 10519, "ilation": 10520, "\u0120gest": 10521, "\u0120abroad": 10522, "\u0120metab": 10523, "\u0120sorts": 10524, "\u0120flav": 10525, "UB": 10526, "\u0120mg": 10527, "\u0120Nothing": 10528, "\u0120Od": 10529, "\u0120musical": 10530, "2009": 10531, "\u0120drops": 10532, "ocated": 10533, "ateral": 10534, "000000": 10535, "\u0120gre": 10536, "\u0120equality": 10537, "\u0120burden": 10538, "\u0120vig": 10539, "\u0120Leader": 10540, "------------": 10541, "\u0120ceremony": 10542, "\u0120fighter": 10543, "\u0120actors": 10544, "\u0120\u00e6": 10545, "aman": 10546, "Fi": 10547, "\u0120align": 10548, "puter": 10549, "\u0120elder": 10550, "\u0120NSA": 10551, "\u0120representation": 10552, "\u0120Ontario": 10553, "ITH": 10554, "usalem": 10555, "\u0120harassment": 10556, "itzer": 10557, "\u0120symp": 10558, "\u0120boxes": 10559, "\u0120DR": 10560, "\u0120manifest": 10561, "atre": 10562, "\u0120^": 10563, "\u0120dies": 10564, "leton": 10565, "\u0120missions": 10566, "ethe": 10567, "\u0120resolve": 10568, "\u0120followers": 10569, "\u0120asc": 10570, "\u0120km": 10571, "lord": 10572, "ammed": 10573, "\u0120silent": 10574, "\u0120Associated": 10575, "\u0120timing": 10576, "\u0120prisoners": 10577, "\u0120Kings": 10578, "\u0120Five": 10579, "\u0120tower": 10580, "\u0120approaches": 10581, "\u0120precisely": 10582, "\u0120bureau": 10583, "\u0120Mother": 10584, "\u0120Iss": 10585, "\u0120keyboard": 10586, "itual": 10587, "\u0120funded": 10588, "\u0120staying": 10589, "\u0120psychological": 10590, "\u0120mile": 10591, "\u0120Leon": 10592, "\u0120Barb": 10593, "will": 10594, "\u0120wider": 10595, "\u0120Atlantic": 10596, "\u0120till": 10597, "\u0120Rome": 10598, "rot": 10599, "\u0120accompan": 10600, "\u0120flour": 10601, "aco": 10602, "World": 10603, "\u0120Express": 10604, "\u0120Yu": 10605, "Cor": 10606, "\u0120pleased": 10607, "party": 10608, "\u0120pointing": 10609, "\u0120inflation": 10610, "\u0120roy": 10611, "\u0120),": 10612, "ainer": 10613, "\u0120wedding": 10614, "ormon": 10615, "\u0120requiring": 10616, "\u0120qualified": 10617, "\u0120segment": 10618, "END": 10619, "\u0120sizes": 10620, "eals": 10621, "\u0120corrupt": 10622, "assador": 10623, "\u0120celeb": 10624, "\u0120dreams": 10625, "\u0120Mess": 10626, "\u0120checking": 10627, "\u0120Version": 10628, "\u0120preparing": 10629, "\u0120actively": 10630, "\u0120Diff": 10631, "\u0120lux": 10632, "\u0120Winter": 10633, "acteria": 10634, "\u0120NE": 10635, "\u0120deputy": 10636, "\u0120transgender": 10637, "\u0120summary": 10638, "\u0120inher": 10639, "eries": 10640, "char": 10641, "\u0120Yan": 10642, "\u0120knock": 10643, "\u0120Path": 10644, "\u0120lip": 10645, "roller": 10646, "\u0120impression": 10647, "\u0120celebrate": 10648, "\u0120slide": 10649, "\u0120guests": 10650, "\u0120clip": 10651, "FS": 10652, "\u0120savings": 10653, "\u0120captain": 10654, "\u0120legacy": 10655, "\u0120Denver": 10656, "\u0120wounded": 10657, "taboola": 10658, "ACT": 10659, "\u0120pursue": 10660, "\u0120oxy": 10661, "\u0120q": 10662, "\u0120semi": 10663, "\u0120Need": 10664, "\u0120Affairs": 10665, "\u0120obsc": 10666, "\u0120checked": 10667, "\u0120dual": 10668, "Code": 10669, "\u0120MD": 10670, "lem": 10671, "ulty": 10672, "\u0120\u00c2\u00a9": 10673, "\u0120Elizabeth": 10674, "\u0120centuries": 10675, "arded": 10676, "src": 10677, "\u0120evident": 10678, "ennis": 10679, "atin": 10680, "\u0120unemployment": 10681, "\u0120Mario": 10682, "\u0120intim": 10683, "Christ": 10684, "\u0120biological": 10685, "\u0120soldier": 10686, "\u0120Added": 10687, "\u0120math": 10688, "\u0120Gil": 10689, "\u0120bias": 10690, "\u0120dating": 10691, "\u0120Ocean": 10692, "\u0120mice": 10693, "Mus": 10694, "hire": 10695, "\u0120Tes": 10696, "Server": 10697, "limited": 10698, "Size": 10699, "\u0120meters": 10700, "\u0120rocket": 10701, "essee": 10702, "\u0120certificate": 10703, "\u0120Iranian": 10704, "ASS": 10705, "\u0120grid": 10706, "Dec": 10707, "\u0120rolling": 10708, "commun": 10709, "\u0120Sweden": 10710, "bury": 10711, "\u0120tissue": 10712, "\u0120racism": 10713, "\u0120Local": 10714, "\u0120mystery": 10715, "\u0120examine": 10716, "\u0120stem": 10717, "\u0120sits": 10718, "\u0120hoped": 10719, "oting": 10720, "\u0120dialogue": 10721, "\u0120persu": 10722, "Watch": 10723, "lay": 10724, "MAN": 10725, "\u0120chronic": 10726, "\u0120Portland": 10727, "market": 10728, "\u0120SEC": 10729, "\u0120parallel": 10730, "\u0120scandal": 10731, "\u0120carries": 10732, "\u0120phenomenon": 10733, "human": 10734, "acker": 10735, "\u0120Ox": 10736, "\u0120retirement": 10737, "tainment": 10738, "ovie": 10739, "\u0120Gear": 10740, "\u0120duties": 10741, "\u0120dose": 10742, "\u0120scroll": 10743, "MB": 10744, "inf": 10745, "\u0120sauce": 10746, "\u0120landscape": 10747, "reddit": 10748, "\u0120Championship": 10749, "\u0120Reddit": 10750, "alid": 10751, "\u0120coin": 10752, "\u0120overs": 10753, "\u0120posting": 10754, "about": 10755, "\u0120fel": 10756, "andy": 10757, "\u0120bold": 10758, "\u0120focusing": 10759, "effect": 10760, "GR": 10761, "\u0120deemed": 10762, "\u0120recommendations": 10763, "\u0120stepped": 10764, "\u0120voter": 10765, "\u0120Deep": 10766, "\u0120Instagram": 10767, "\u0120moderate": 10768, "\u0120Maryland": 10769, "\u0120restricted": 10770, "\u0120MB": 10771, "\u0120Chall": 10772, "\u0120tob": 10773, "\u0120cir": 10774, "\u0120Occ": 10775, "\u0120Ever": 10776, "\u0120collaps": 10777, "INFO": 10778, "=-": 10779, "\u0120Pict": 10780, "\u0120Account": 10781, "nc": 10782, "\u0120ought": 10783, "\u0120export": 10784, "\u0120drunk": 10785, "('": 10786, "\u0120wise": 10787, "\u0120Mort": 10788, "necess": 10789, "\u0120ancest": 10790, "\u0120Incre": 10791, "\u0120frequent": 10792, "mir": 10793, "\u0120interpretation": 10794, "\u0120dependent": 10795, "\u0120coins": 10796, "\u0120Bol": 10797, "Video": 10798, "\u0120Justin": 10799, "\u0120fatal": 10800, "\u0120cooking": 10801, "\u0120confusion": 10802, "ipher": 10803, "\u0120custody": 10804, "\u0120Morgan": 10805, "omach": 10806, "\u0120Governor": 10807, "\u0120restaurants": 10808, "eling": 10809, "\u0120acknowledged": 10810, "\u0120ther": 10811, "\u0120genes": 10812, "ching": 10813, "Hey": 10814, "\u0120tactics": 10815, "\u0120Mexican": 10816, "\u0120vend": 10817, "\u0120hes": 10818, "quer": 10819, "\u0120noting": 10820, "\u0120Cameron": 10821, "\u0120targeting": 10822, "rock": 10823, "\u0120credits": 10824, "\u0120emotions": 10825, "\u0120representatives": 10826, "news": 10827, "\u0120legislative": 10828, "\u0120removing": 10829, "\u0120tweeted": 10830, "\u0120Carter": 10831, "\u0120Fixed": 10832, "\u0120forcing": 10833, "\u0120speaker": 10834, "\u0120males": 10835, "\u0120Vietnam": 10836, "lined": 10837, "\u0120concepts": 10838, "\u0120voices": 10839, "oir": 10840, "\u0120Trib": 10841, "Whe": 10842, "\u0120Jerusalem": 10843, "\u0120Sant": 10844, "\u0120cul": 10845, "\u0120lady": 10846, "\u0120Hawai": 10847, "\u0120arts": 10848, "\u0120Inn": 10849, "\u0120Machine": 10850, "\u0120Emperor": 10851, "\u0120slot": 10852, "gly": 10853, "\u0120Process": 10854, "III": 10855, "\u0120athletes": 10856, "\u0120Temple": 10857, "\u0120Represent": 10858, "\u0120presc": 10859, "\u0120tons": 10860, "\u0120golden": 10861, "\u0120punch": 10862, "\u0120GR": 10863, "iverpool": 10864, "\u0120enact": 10865, "\u0120lobby": 10866, "\u0120mos": 10867, "\u0120picking": 10868, "\u0120lifetime": 10869, "\u0120cognitive": 10870, "Each": 10871, "zo": 10872, "\u0120dub": 10873, "\u0120consists": 10874, "oln": 10875, "\u0120festival": 10876, "amous": 10877, "\u0120intellig": 10878, "words": 10879, "\u0120Smart": 10880, "\u0120dele": 10881, "\u0120lapt": 10882, "\u0120magical": 10883, "\u0120Sin": 10884, "bus": 10885, "urities": 10886, "ighth": 10887, "\u0120Ruby": 10888, "\u0120Sure": 10889, "olving": 10890, "\u0120jun": 10891, "OST": 10892, "\u0120imposed": 10893, "\u0120astron": 10894, "\u0120correl": 10895, "\u0120NS": 10896, "\u0120Kit": 10897, "\u0120Future": 10898, "burn": 10899, "\u0120immune": 10900, "ocus": 10901, "\u0120courses": 10902, "\u0120String": 10903, "\u0120lean": 10904, "\u0120ghost": 10905, "\u0120outcomes": 10906, "\u0120expense": 10907, "\u0120everyday": 10908, "\u0120acceptable": 10909, "Ah": 10910, "\u0120equipped": 10911, "\u0120orange": 10912, "FR": 10913, "\u0120Dutch": 10914, "Though": 10915, "\u0120Rank": 10916, "QU": 10917, "\u0120Roberts": 10918, "what": 10919, "rend": 10920, "\u0120disappear": 10921, "\u0120spawn": 10922, "\u0120Lam": 10923, "ois": 10924, "\u0120deserve": 10925, "\u0120minimal": 10926, "\u0120nervous": 10927, "\u0120Would": 10928, "\u0120rook": 10929, "\u0120Vancouver": 10930, "\u0120resign": 10931, "shire": 10932, "\u0120Works": 10933, "\u0120Build": 10934, "\u0120affordable": 10935, "\u0120Gary": 10936, "\u0120Arena": 10937, "\u0120hanging": 10938, "\u0120implications": 10939, "\u0120Song": 10940, "\u0120maintaining": 10941, "\u0120guards": 10942, "CON": 10943, "\u0120derived": 10944, "\u0120executed": 10945, "\u0120theories": 10946, "\u0120quoted": 10947, "\u0120Andre": 10948, "oga": 10949, "seless": 10950, "info": 10951, "\u0120Belg": 10952, "\u0120tears": 10953, "\u0120Surv": 10954, "\u0120birthday": 10955, "igious": 10956, "immer": 10957, "\u0120spectrum": 10958, "\u0120architecture": 10959, "\u0120recruit": 10960, "arma": 10961, "Table": 10962, "\u0120monsters": 10963, "\u0120Gov": 10964, "\u0120destination": 10965, "\u0120attractive": 10966, "\u0120foss": 10967, "\u0120Moreover": 10968, "\u0120presents": 10969, "THE": 10970, "\u0120reply": 10971, "pton": 10972, "\u0120cum": 10973, "\u0120delight": 10974, "\u0120affects": 10975, "\u0120donations": 10976, "\u0120Toy": 10977, "\u0120Him": 10978, "MENT": 10979, "\u0120overcome": 10980, "itched": 10981, "\u0120Fantasy": 10982, "\u0120Hat": 10983, "\u0120Beast": 10984, "bott": 10985, "\u0120investigations": 10986, "Run": 10987, "\u0120hunting": 10988, "di": 10989, "fund": 10990, "\u0120sessions": 10991, "estyle": 10992, "\u0120portray": 10993, "oids": 10994, "Yeah": 10995, "\u0120communicate": 10996, "\u0120comedy": 10997, "\u0120Yang": 10998, "\u0120belt": 10999, "\u0120Marine": 11000, "\u0120predicted": 11001, "Play": 11002, "\u0120importantly": 11003, "\u0120remarkable": 11004, "\u0120eliminate": 11005, "David": 11006, "\u0120bind": 11007, "VID": 11008, "\u0120advocates": 11009, "\u0120Gaza": 11010, "imp": 11011, "DB": 11012, "\u0120Na": 11013, "\u0120Similar": 11014, "IES": 11015, "\u0120charity": 11016, "vas": 11017, "math": 11018, "\u0120\u00e2\u0138": 11019, "oker": 11020, "ndum": 11021, "\u0120caps": 11022, "\u0120Hal": 11023, "2000": 11024, "ean": 11025, "\u0120fleet": 11026, "\u0120recre": 11027, "Right": 11028, "\u0120sleeping": 11029, "ijing": 11030, "kind": 11031, "\u0120designated": 11032, "\u00c3\u00a4": 11033, "\u0120animation": 11034, "kee": 11035, "\u0120Introdu": 11036, "\u0120/>": 11037, "\u0120delayed": 11038, "\u0120tremend": 11039, "\u0120curious": 11040, "Use": 11041, "\u0120lect": 11042, "dam": 11043, "\u0120innovation": 11044, "\u0120Points": 11045, "\u0120loading": 11046, "\u0120dispute": 11047, "ctic": 11048, "irds": 11049, "\u0120BY": 11050, "\u0120nurs": 11051, "\u0120Value": 11052, "IONS": 11053, "\u0120Hum": 11054, "\u0120template": 11055, "mers": 11056, "\u0120appearances": 11057, "\u0120Entertainment": 11058, "\u0120translation": 11059, "\u0120sake": 11060, "\u0120beneath": 11061, "\u0120inhib": 11062, "\u0120euro": 11063, "abetes": 11064, "\u0120studying": 11065, "\u0120Mas": 11066, "\u0120perceived": 11067, "\u0120examined": 11068, "\u0120eager": 11069, "\u0120coaches": 11070, "\u0120imper": 11071, "chi": 11072, "\u0120produces": 11073, "\").": 11074, "\u0120Everyone": 11075, "\u0120municip": 11076, "\u0120girlfriend": 11077, "\u0120hire": 11078, "\u0120Vice": 11079, "\u0120suitable": 11080, "opy": 11081, "\u0120inequ": 11082, "\u0120Duke": 11083, "fish": 11084, "first": 11085, "\u0120Obs": 11086, "\u0120interior": 11087, "\u0120Bruce": 11088, "\u0120Ry": 11089, "\u0120analys": 11090, "\u0120considerable": 11091, "\u0120forecast": 11092, "\u0120fert": 11093, "orship": 11094, "\u0120Drug": 11095, "\u0120ALL": 11096, ":\"": 11097, "thur": 11098, "\u0120Mail": 11099, "\u0120ballot": 11100, "\u0120instantly": 11101, "\u0120Channel": 11102, "\u0120picks": 11103, "\u01201989": 11104, "\u0120tent": 11105, "oli": 11106, "\u0120civilian": 11107, "bling": 11108, "ello": 11109, "bu": 11110, "\u0120inch": 11111, "\u0120logo": 11112, "\u0120cooperation": 11113, "\u0120walks": 11114, "\u0120investments": 11115, "\u0120imprison": 11116, "\u0120Festival": 11117, "\u0120Ky": 11118, "\u0120legally": 11119, "\u0120gri": 11120, "charg": 11121, "Sl": 11122, "\u0120threatening": 11123, "duction": 11124, "flow": 11125, "\u0120dismissed": 11126, "ibraries": 11127, "cap": 11128, "ele": 11129, "\u0120McG": 11130, "\u0120Harvard": 11131, "\u0120Conservative": 11132, "\u0120CBS": 11133, "png": 11134, "\u0120roots": 11135, "\u0120Having": 11136, "umbled": 11137, "\u0120Fun": 11138, "\\/": 11139, "\u0120Search": 11140, "plex": 11141, "\u0120discussing": 11142, "\u0120continu": 11143, "\u0120Tai": 11144, "\u0120Wik": 11145, "Free": 11146, "fit": 11147, "\u0120refuse": 11148, "\u0120managing": 11149, "\u0120synd": 11150, "ipedia": 11151, "walk": 11152, "\u0120professionals": 11153, "\u0120guidance": 11154, "\u0120universities": 11155, "\u0120assemb": 11156, "untu": 11157, "Finally": 11158, "ASE": 11159, "\u0120Auto": 11160, "\u0120Had": 11161, "\u0120anniversary": 11162, "LD": 11163, "\u0120Dur": 11164, "\u0120Ultimate": 11165, "ihad": 11166, "product": 11167, "\u0120transit": 11168, "\u0120restore": 11169, "\u0120explaining": 11170, "\u0120asset": 11171, "\u0120transferred": 11172, "\u0120burst": 11173, "apolis": 11174, "\u0120Magazine": 11175, "\u0120Cra": 11176, "\u0120BR": 11177, "gged": 11178, "\u0120HE": 11179, "Mich": 11180, "bet": 11181, "\u0120Lady": 11182, "ylum": 11183, "erves": 11184, "\u0120meets": 11185, "white": 11186, "Log": 11187, "\u0120corresponding": 11188, "\u0120insisted": 11189, "GG": 11190, "\u0120surrounded": 11191, "\u0120tens": 11192, "\u0120lane": 11193, "\u0120coinc": 11194, "home": 11195, "\u0120existed": 11196, "ected": 11197, "\u0120Double": 11198, "lamm": 11199, "\u0120skept": 11200, "exp": 11201, "\u0120perception": 11202, "iev": 11203, "\u0120Being": 11204, "oft": 11205, "\u0120adopt": 11206, ".:": 11207, "];": 11208, "Windows": 11209, "\u0120satellite": 11210, "ASH": 11211, "\u0120infant": 11212, "description": 11213, "\u0120Meanwhile": 11214, "cm": 11215, "oca": 11216, "\u0120Treat": 11217, "actor": 11218, "\u0120tobacco": 11219, "\u0120Norm": 11220, "emption": 11221, "\u0120flesh": 11222, "\u0120je": 11223, "oop": 11224, "\u0120Heaven": 11225, "\u0120beating": 11226, "anim": 11227, "\u0120gathering": 11228, "\u0120cultiv": 11229, "GO": 11230, "abe": 11231, "\u0120Jonathan": 11232, "\u0120Safety": 11233, "\u0120badly": 11234, "prot": 11235, "\u0120choosing": 11236, "\u0120contacted": 11237, "\u0120quit": 11238, "\u0120distur": 11239, "\u0120stir": 11240, "\u0120token": 11241, "Det": 11242, "\u0120Pa": 11243, "\u0120functionality": 11244, "003": 11245, "some": 11246, "\u0120limitations": 11247, "\u0120meth": 11248, "build": 11249, "config": 11250, "NT": 11251, "rell": 11252, "blem": 11253, "\u0120Mom": 11254, "\u0120veterans": 11255, "\u0120Hu": 11256, "\u0120trends": 11257, "arer": 11258, "\u0120Given": 11259, "\u0120Caption": 11260, "may": 11261, "AST": 11262, "\u0120wondering": 11263, "\u0120Clark": 11264, "normal": 11265, "\u0120separated": 11266, "\u0120desp": 11267, "stic": 11268, "brew": 11269, "\u0120relating": 11270, "\u0120Nik": 11271, "\u0120Farm": 11272, "\u0120enthusi": 11273, "good": 11274, "deb": 11275, "\u0120activist": 11276, "\u0120mart": 11277, "\u0120explosion": 11278, "\u0120Economic": 11279, "Link": 11280, "\u0120insight": 11281, "\u0120convenient": 11282, "\u0120counterpart": 11283, "support": 11284, "\u0120Virt": 11285, "agen": 11286, "\u0120Tennessee": 11287, "\u0120Simon": 11288, "\u0120Award": 11289, "OCK": 11290, "\u0120Figure": 11291, "\u0120overseas": 11292, "\u0120pride": 11293, "\u0120Cas": 11294, "note": 11295, "mg": 11296, "Current": 11297, "\u0120displays": 11298, "content": 11299, "\u0120traveling": 11300, "\u0120hospitals": 11301, "\u0120Financial": 11302, "\u0120Past": 11303, "\u0120defendant": 11304, "\u0120streaming": 11305, "mble": 11306, "\u0120Berlin": 11307, "uki": 11308, "\u0120distribut": 11309, "\u0120antib": 11310, "\u0120chocolate": 11311, "\u0120Castle": 11312, "\u0120interrupt": 11313, "\u0120Row": 11314, "\u0120conversion": 11315, "\u0120bugs": 11316, "\u0120Rather": 11317, "liest": 11318, "LY": 11319, "\u0120Jean": 11320, "common": 11321, "akh": 11322, "\u0120130": 11323, "otton": 11324, "\u0120Dean": 11325, "\u0120amendment": 11326, "\u0120gameplay": 11327, "\u0120Warren": 11328, "oda": 11329, "\u0120highlights": 11330, "\u0120irre": 11331, "\u0120NATO": 11332, "\u0120balls": 11333, "\u0120demanding": 11334, "URE": 11335, "\u0120Luke": 11336, "Figure": 11337, "stop": 11338, "onia": 11339, "zone": 11340, "izers": 11341, "\u0120WR": 11342, "\u0120awarded": 11343, "\u0120regulatory": 11344, "\u0120Hart": 11345, "\u0120SN": 11346, "pling": 11347, "\u0120sour": 11348, "\u0120Pixel": 11349, "usive": 11350, "\u0120fet": 11351, "\u0120Sent": 11352, "\u0120automatic": 11353, "\u0120fer": 11354, "vernment": 11355, "\u0120Khan": 11356, "TON": 11357, "father": 11358, "\u0120extraordinary": 11359, "throp": 11360, "\u0120Python": 11361, "\u0120GPU": 11362, "\u0120sexually": 11363, "\u0120desktop": 11364, "itivity": 11365, "\u0120Antonio": 11366, "\u0120orient": 11367, "\u0120ears": 11368, "obby": 11369, "ouses": 11370, "vertisements": 11371, "\u0120manufacturers": 11372, "icient": 11373, "minute": 11374, "\u0120conviction": 11375, "\u0120garden": 11376, "public": 11377, "\u0120satisfied": 11378, "fold": 11379, "OK": 11380, "\u0120inhab": 11381, "\u0120Think": 11382, "\u0120programme": 11383, "\u0120stomach": 11384, "\u0120coordin": 11385, "\u0120holy": 11386, "\u0120threshold": 11387, "\u0120rhet": 11388, "\u0120serial": 11389, "\u0120employers": 11390, "\u0120Everything": 11391, "rah": 11392, "\u0120bother": 11393, "\u0120brands": 11394, "Value": 11395, "\u0120Ted": 11396, "\u0120Planet": 11397, "\u0120pink": 11398, "\u0120Furthermore": 11399, "sa": 11400, "PE": 11401, "reck": 11402, "\u0120USD": 11403, "otte": 11404, "\u0120&&": 11405, "\u0120landed": 11406, "gets": 11407, "\u0120producers": 11408, "\u0120healthcare": 11409, "\u0120dominant": 11410, "\u0120destro": 11411, "\u0120amended": 11412, "chron": 11413, "\u0120fits": 11414, "\u0120Syd": 11415, "\u0120Authority": 11416, "ATCH": 11417, "\u0120fights": 11418, "\u0120LLC": 11419, "\u0120---": 11420, "\u0120Corp": 11421, "\u0120toxic": 11422, "specific": 11423, "\u0120Corn": 11424, "\u0120Chel": 11425, "\u0120telephone": 11426, "\u0120Pant": 11427, "\u0120mysterious": 11428, "aunch": 11429, "odox": 11430, "media": 11431, "\u0120witnesses": 11432, "agu": 11433, "\u0120questioned": 11434, "\u0120Brexit": 11435, "\u0120Remember": 11436, "enez": 11437, "\u0120endorse": 11438, "iatric": 11439, "\u0120Ident": 11440, "\u0120ridiculous": 11441, "110": 11442, "\u0120prayer": 11443, "\u0120scientist": 11444, "\u01201950": 11445, "\u0120Aqu": 11446, "\u0120underground": 11447, "\u0120UFC": 11448, "mare": 11449, "\u0120Later": 11450, "wich": 11451, "\u0120subscrib": 11452, "\u0120hosts": 11453, "\u0120err": 11454, "\u0120grants": 11455, "antom": 11456, "\u0120summon": 11457, "early": 11458, "\u0120Clear": 11459, "\u0120Prim": 11460, "\u0120suspension": 11461, "\u0120guaranteed": 11462, "apper": 11463, "\u0120rice": 11464, "\u0120Sean": 11465, "\u0120Shin": 11466, "\u0120referendum": 11467, "\u0120fled": 11468, "rust": 11469, "\u0120360": 11470, "tery": 11471, "\u0120shocked": 11472, "BR": 11473, "\u0120Oil": 11474, "\u0120Allah": 11475, "\u0120partly": 11476, "\u0120ignor": 11477, "\u0120transmission": 11478, "\u0120homosexual": 11479, "iversal": 11480, "\u0120hopefully": 11481, "\u00e3\u0124\u00a4": 11482, "\u0120lesson": 11483, "Leg": 11484, "\u0120..": 11485, "Yet": 11486, "table": 11487, "appropri": 11488, "rett": 11489, "\u0120boards": 11490, "\u0120incorrect": 11491, "\u0120bacteria": 11492, "aru": 11493, "amac": 11494, "\u0120snap": 11495, ".'\"": 11496, "\u0120parad": 11497, "tem": 11498, "heart": 11499, "\u0120availability": 11500, "\u0120wisdom": 11501, "\u0120(+": 11502, "\u0120priest": 11503, "\u0120\u00c2\u0142\u0120\u00c2\u0142": 11504, "Open": 11505, "\u0120span": 11506, "\u0120parameter": 11507, "\u0120convince": 11508, "\u0120(%)": 11509, "rac": 11510, "\u0120fo": 11511, "\u0120safely": 11512, "\u0120converted": 11513, "\u0120Olympic": 11514, "\u0120reserve": 11515, "\u0120healing": 11516, "\u0120Mine": 11517, "Max": 11518, "\u0120inherent": 11519, "\u0120Graham": 11520, "\u0120integrated": 11521, "Dem": 11522, "\u0120pipeline": 11523, "\u0120applying": 11524, "\u0120embed": 11525, "\u0120Charlie": 11526, "\u0120cave": 11527, "2008": 11528, "\u0120consensus": 11529, "\u0120rewards": 11530, "Pal": 11531, "\u0120HTML": 11532, "\u0120popularity": 11533, "looking": 11534, "\u0120Sword": 11535, "\u0120Arts": 11536, "')": 11537, "\u0120electron": 11538, "clusions": 11539, "\u0120integrity": 11540, "\u0120exclusively": 11541, "\u0120grace": 11542, "\u0120torture": 11543, "\u0120burned": 11544, "two": 11545, "\u0120180": 11546, "Produ": 11547, "\u0120entreprene": 11548, "raphics": 11549, "\u0120gym": 11550, "ricane": 11551, "\u0120Tam": 11552, "\u0120administrative": 11553, "\u0120manufacturer": 11554, "\u0120vel": 11555, "\u0120Ni": 11556, "\u0120isolated": 11557, "\u0120Medicine": 11558, "\u0120backup": 11559, "\u0120promoting": 11560, "\u0120commander": 11561, "\u0120flee": 11562, "\u0120Russell": 11563, "\u0120forgotten": 11564, "\u0120Missouri": 11565, "\u0120residence": 11566, "mons": 11567, "\u0120resemb": 11568, "\u0120wand": 11569, "\u0120meaningful": 11570, "PT": 11571, "\u0120bol": 11572, "\u0120helic": 11573, "\u0120wealthy": 11574, "\u0120rifle": 11575, "strong": 11576, "rowing": 11577, "plan": 11578, "asury": 11579, "\u00e2\u0122\u00a6.": 11580, "\u0120expanding": 11581, "\u0120Hamilton": 11582, "\u0120receives": 11583, "SI": 11584, "eatures": 11585, "\u0120Anim": 11586, "REE": 11587, "Put": 11588, "\u0120briefly": 11589, "rive": 11590, "\u0120stimul": 11591, "\u0120``(": 11592, "\u0120__": 11593, "\u0120chip": 11594, "\u0120haz": 11595, "\u0120prize": 11596, "\u0120Things": 11597, "ACE": 11598, "ulin": 11599, "dict": 11600, "oku": 11601, "\u0120associate": 11602, "ockets": 11603, "youtube": 11604, "Story": 11605, "ategory": 11606, "\u0120mild": 11607, "ailing": 11608, "\u0120Ye": 11609, "Orig": 11610, "\u0120Ka": 11611, "orig": 11612, "\u0120propaganda": 11613, "\u0120anonymous": 11614, "\u0120struggled": 11615, "\u0120outrage": 11616, "ATED": 11617, "\u0120Beijing": 11618, "rary": 11619, "\u0120leather": 11620, "\u0120worlds": 11621, "\u0120broader": 11622, "125": 11623, "idal": 11624, "\u0120Better": 11625, "\u0120tear": 11626, "Ext": 11627, "\u0120proposals": 11628, "\u0120iter": 11629, "\u0120Squad": 11630, "\u0120volunt": 11631, "mi": 11632, "Did": 11633, "\u0120Pu": 11634, "pin": 11635, "\u0120speakers": 11636, "\u0120borders": 11637, "\u0120figured": 11638, "='": 11639, "\u0120simultaneously": 11640, "aeda": 11641, "\u0120charging": 11642, "\u0120urged": 11643, "\u0120conj": 11644, "256": 11645, "\u0120Gordon": 11646, "merce": 11647, "\u0120documentary": 11648, "Share": 11649, "itol": 11650, "ONE": 11651, "\u0120Garden": 11652, "hatt": 11653, "\u0120Thompson": 11654, "aneous": 11655, "apore": 11656, "\u0120tanks": 11657, "\u0120lessons": 11658, "track": 11659, "\u0120outstanding": 11660, "\u0120volunteers": 11661, "\u0120spray": 11662, "\u0120managers": 11663, "large": 11664, "\u0120camps": 11665, "\u0120artificial": 11666, "\u0120Ru": 11667, "\u0120bags": 11668, "thal": 11669, "\u0120compatible": 11670, "\u0120Blade": 11671, "\u0120fed": 11672, "\u0120argues": 11673, "FI": 11674, "\u0120unfair": 11675, "\u0120corn": 11676, "\u0120offset": 11677, "\u0120directions": 11678, "\u0120disappointed": 11679, "\u0120Convention": 11680, "\u0120viewing": 11681, "ME": 11682, "ocity": 11683, "\u0120towns": 11684, "\u0120layers": 11685, "\u0120rolled": 11686, "\u0120jumped": 11687, "\u0120attribute": 11688, "\u0120unnecess": 11689, "incoln": 11690, "\u0120suppose": 11691, "\u0120Nether": 11692, "cha": 11693, "\u0120buried": 11694, "\u0120sixth": 11695, "Ben": 11696, "ressing": 11697, "OUR": 11698, "\u0120wound": 11699, "\u0120cycl": 11700, "\u0120mechanisms": 11701, "\u0120congressional": 11702, "\u0120Element": 11703, "\u0120agreements": 11704, "\u0120decor": 11705, "\u0120closest": 11706, "\u0120Mit": 11707, "Google": 11708, "}}": 11709, "\u0120mixture": 11710, "\u0120fluid": 11711, "Sign": 11712, "\u0120Scholar": 11713, "\u0120pist": 11714, "asket": 11715, "abling": 11716, "\u0120racing": 11717, "hero": 11718, "riel": 11719, "assy": 11720, "\u0120cheaper": 11721, "ben": 11722, "\u0120vertical": 11723, "amacare": 11724, "\u0120Reading": 11725, "gments": 11726, "\u0120helicop": 11727, "\u0120sacrifice": 11728, "aya": 11729, "paren": 11730, "VA": 11731, "\u0120Les": 11732, "\u0120Studio": 11733, "\u0120violations": 11734, "\u0120Anna": 11735, "acer": 11736, "\u00e9\u00be": 11737, "\u0120Rat": 11738, "\u0120Beck": 11739, "\u0120Dick": 11740, "\u0120ACT": 11741, "\u0120composition": 11742, "\u0120texture": 11743, "\u0120Own": 11744, "\u0120smartphone": 11745, "\u0120NA": 11746, "\u0120forb": 11747, "import": 11748, "\u0120defending": 11749, "ilst": 11750, "rer": 11751, "\u0120oh": 11752, "\u0120Jeremy": 11753, "\u0120banking": 11754, "ceptions": 11755, "\u0120respective": 11756, "/.": 11757, "\u0120drinks": 11758, "\u0120Wi": 11759, "\u0120bands": 11760, "\u0120Liverpool": 11761, "\u0120grip": 11762, "\u0120Buy": 11763, "\u0120openly": 11764, "\u0120reviewed": 11765, "pert": 11766, "\u0120verify": 11767, "\u0120Cole": 11768, "\u0120Wales": 11769, "MO": 11770, "\u0120unpre": 11771, "\u0120shelter": 11772, "\u0120Imperial": 11773, "\u0120gui": 11774, "\u0120Dak": 11775, "\u0120suggestions": 11776, "\u0120explicitly": 11777, "\u0120slave": 11778, "\u0120blockchain": 11779, "\u0120competing": 11780, "\u0120promising": 11781, "SON": 11782, "\u0120soccer": 11783, "\u0120constitution": 11784, "429": 11785, "\u0120distract": 11786, "\u0120User": 11787, "esides": 11788, "\u0120Method": 11789, "\u0120Tokyo": 11790, "\u0120accompanied": 11791, "Client": 11792, "sur": 11793, "alog": 11794, "\u0120identification": 11795, "\u0120invasion": 11796, "asma": 11797, "\u0120industries": 11798, "ppers": 11799, "\u0120subtle": 11800, "\u0120Unit": 11801, "natural": 11802, "\u0120survived": 11803, "\u0120flaw": 11804, "\u013a\u0127": 11805, "\u0120Holl": 11806, "\u0120deficit": 11807, "\u0120tutorial": 11808, "\u0120Chance": 11809, "\u0120arguing": 11810, "\u0120contemporary": 11811, "\u0120integration": 11812, "forward": 11813, "\u0120tum": 11814, "itis": 11815, "\u0120hiding": 11816, "\u0120Domin": 11817, "\u0120Tan": 11818, "\u0120Building": 11819, "\u0120Vin": 11820, "\u0120spokesperson": 11821, "\u0120Notes": 11822, "\u0120emerging": 11823, "\u0120preparation": 11824, "\u0120prost": 11825, "\u0120suspects": 11826, "\u0120autonom": 11827, "Description": 11828, "\u0120dealt": 11829, "\u0120Pear": 11830, "\u0120steady": 11831, "\u0120decreased": 11832, "\u0120sovere": 11833, "\u0120Clin": 11834, "\u0120gradually": 11835, "orses": 11836, "\u0120WAR": 11837, "Serv": 11838, "\u00e3\u0124\u00a2": 11839, "hr": 11840, "\u0120dirty": 11841, "\u0120Barn": 11842, "\u0120BC": 11843, "\u0120dil": 11844, "\u0120calendar": 11845, "\u0120compliance": 11846, "\u0120chamber": 11847, "bb": 11848, "\u0120passenger": 11849, "ateful": 11850, "\u0120Title": 11851, "\u0120Sydney": 11852, "\u0120Got": 11853, "\u0120darkness": 11854, "\u0120defect": 11855, "\u0120packed": 11856, "assion": 11857, "\u0120gods": 11858, "\u0120harsh": 11859, "ICK": 11860, "leans": 11861, "\u0120algorithm": 11862, "\u0120oxygen": 11863, "\u0120visits": 11864, "\u0120blade": 11865, "\u0120kilomet": 11866, "\u0120Kentucky": 11867, "\u0120killer": 11868, "Pack": 11869, "enny": 11870, "\u0120divine": 11871, "\u0120nomination": 11872, "being": 11873, "\u0120engines": 11874, "\u0120cats": 11875, "\u0120buffer": 11876, "\u0120Phill": 11877, "\u0120traff": 11878, "AGE": 11879, "\u0120tongue": 11880, "\u0120radiation": 11881, "erer": 11882, "mem": 11883, "\u0120Explicit": 11884, "\u00e9\u00be\u012f": 11885, "\u0120couples": 11886, "\u0120physics": 11887, "\u0120McK": 11888, "\u0120politically": 11889, "awks": 11890, "\u0120Bloom": 11891, "\u0120worship": 11892, "eger": 11893, "uter": 11894, "\u0120FO": 11895, "\u0120mathemat": 11896, "\u0120sentenced": 11897, "\u0120disk": 11898, "\u0120Marg": 11899, "\u0120/*": 11900, "PI": 11901, "\u0120optional": 11902, "\u0120babies": 11903, "\u0120seeds": 11904, "\u0120Scottish": 11905, "\u0120thy": 11906, "]]": 11907, "\u0120Hitler": 11908, "PH": 11909, "ngth": 11910, "\u0120recovered": 11911, "inge": 11912, "\u0120powder": 11913, "\u0120lips": 11914, "\u0120designer": 11915, "\u0120disorders": 11916, "\u0120courage": 11917, "\u0120chaos": 11918, "\"},{\"": 11919, "\u0120carrier": 11920, "bably": 11921, "High": 11922, "\u0120RT": 11923, "esity": 11924, "len": 11925, "\u0120routes": 11926, "uating": 11927, "Fil": 11928, "NOT": 11929, "wall": 11930, "sburgh": 11931, "\u0120engaging": 11932, "\u0120JavaScript": 11933, "orer": 11934, "lihood": 11935, "\u0120unions": 11936, "\u0120Federation": 11937, "\u0120Tesla": 11938, "\u0120completion": 11939, "\u0120Ta": 11940, "\u0120privilege": 11941, "\u0120Orange": 11942, "\u0120neur": 11943, "parency": 11944, "\u0120bones": 11945, "\u0120titled": 11946, "\u0120prosecutors": 11947, "\u0120ME": 11948, "\u0120engineer": 11949, "\u0120Universe": 11950, "\u0120Hig": 11951, "nie": 11952, "oard": 11953, "\u0120hearts": 11954, "\u0120Gre": 11955, "ussion": 11956, "\u0120ministry": 11957, "\u0120penet": 11958, "\u0120Nut": 11959, "\u0120Ow": 11960, "\u0120XP": 11961, "instein": 11962, "\u0120bulk": 11963, "System": 11964, "icism": 11965, "\u0120Marketable": 11966, "\u0120preval": 11967, "\u0120poster": 11968, "\u0120attending": 11969, "urable": 11970, "\u0120licensed": 11971, "\u0120Gh": 11972, "etry": 11973, "\u0120Tradable": 11974, "\u0120blast": 11975, "\u00e0\u00a4": 11976, "\u0120Titan": 11977, "elled": 11978, "die": 11979, "Have": 11980, "\u0120Flame": 11981, "\u0120profound": 11982, "\u0120participating": 11983, "\u0120anime": 11984, "\u0120Ess": 11985, "\u0120specify": 11986, "\u0120regarded": 11987, "\u0120Spell": 11988, "\u0120sons": 11989, "owned": 11990, "\u0120merc": 11991, "\u0120experimental": 11992, "lando": 11993, "hs": 11994, "\u0120Dungeon": 11995, "inos": 11996, "\u0120comply": 11997, "\u0120Systems": 11998, "arth": 11999, "\u0120seized": 12000, "local": 12001, "\u0120Girls": 12002, "udo": 12003, "oned": 12004, "\u0120Fle": 12005, "\u0120constructed": 12006, "\u0120hosted": 12007, "\u0120scared": 12008, "actic": 12009, "\u0120Islands": 12010, "\u0120MORE": 12011, "\u0120bless": 12012, "\u0120blocking": 12013, "\u0120chips": 12014, "\u0120evac": 12015, "Ps": 12016, "\u0120corporation": 12017, "\u0120ox": 12018, "\u0120lighting": 12019, "\u0120neighbors": 12020, "\u0120Ub": 12021, "aro": 12022, "\u0120beef": 12023, "\u0120Uber": 12024, "Facebook": 12025, "armed": 12026, "itate": 12027, "\u0120Rating": 12028, "\u0120Quick": 12029, "\u0120occupied": 12030, "\u0120aims": 12031, "\u0120Additionally": 12032, "\u0120Interest": 12033, "\u0120dramatically": 12034, "\u0120heal": 12035, "\u0120painting": 12036, "\u0120engineers": 12037, "MM": 12038, "\u0120Must": 12039, "\u0120quantity": 12040, "Paul": 12041, "\u0120earnings": 12042, "\u0120Posts": 12043, "stra": 12044, "\u00e3\u0125\u00bc\u00e3\u0125": 12045, "\u0120stance": 12046, "\u0120dropping": 12047, "script": 12048, "\u0120dressed": 12049, "Make": 12050, "\u0120justify": 12051, "\u0120Ltd": 12052, "\u0120prompted": 12053, "\u0120scrut": 12054, "\u0120speeds": 12055, "\u0120Giants": 12056, "omer": 12057, "\u0120Editor": 12058, "\u0120describing": 12059, "\u0120Lie": 12060, "mented": 12061, "\u0120nowhere": 12062, "ocaly": 12063, "\u0120instruction": 12064, "fortable": 12065, "\u0120entities": 12066, "\u0120cm": 12067, "\u0120Natural": 12068, "\u0120inquiry": 12069, "\u0120pressed": 12070, "izont": 12071, "forced": 12072, "\u0120raises": 12073, "\u0120Netflix": 12074, "\u0120Side": 12075, "\u0120outer": 12076, "\u0120amongst": 12077, "ims": 12078, "owski": 12079, "\u0120climb": 12080, "never": 12081, "\u0120combine": 12082, "ding": 12083, "\u0120compr": 12084, "\u0120significance": 12085, "\u0120remembered": 12086, "\u0120Nevada": 12087, "\u0120Tel": 12088, "\u0120Scar": 12089, "\u0120Warriors": 12090, "\u0120Jane": 12091, "\u0120coup": 12092, "bas": 12093, "\u0120terminal": 12094, ",-": 12095, "OH": 12096, "\u0120tension": 12097, "\u0120wings": 12098, "\u0120Myster": 12099, "\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd": 12100, "\u0120Unlike": 12101, "valid": 12102, "vironments": 12103, "\u0120Ali": 12104, "\u0120naked": 12105, "books": 12106, "\u0120Mun": 12107, "\u0120Gulf": 12108, "\u0120density": 12109, "\u0120dimin": 12110, "\u0120desperate": 12111, "\u0120presidency": 12112, "\u01201986": 12113, "hy": 12114, "IND": 12115, "\u0120unlock": 12116, "imens": 12117, "\u0120handled": 12118, "\u0120Eb": 12119, "\u0120disappeared": 12120, "\u0120genre": 12121, "\u01201988": 12122, "\u0120determination": 12123, "Stream": 12124, "iko": 12125, "apters": 12126, "\u0120acknowledge": 12127, "Jan": 12128, "\u0120capitalism": 12129, "Pat": 12130, "\u01202020": 12131, "\u0120painful": 12132, "\u0120curve": 12133, "\u0120bombs": 12134, "storm": 12135, "\u0120Metal": 12136, "encer": 12137, "\u0120Fig": 12138, "\u0120Aaron": 12139, "anches": 12140, "\u0120inspiration": 12141, "\u0120exhaust": 12142, "tains": 12143, "ashi": 12144, "\u0120descript": 12145, "\u0120ritual": 12146, "\u0120Chelsea": 12147, "\u0120promotion": 12148, "\u0120Hung": 12149, "\u0120Ward": 12150, "iva": 12151, "\u0120ET": 12152, "\u0120toss": 12153, "allow": 12154, "\u0120Francis": 12155, "Dep": 12156, "\u0120happiness": 12157, "\u0120Glass": 12158, "\u0120beta": 12159, "\u0120strengthen": 12160, "NE": 12161, "oa": 12162, "\u0120buttons": 12163, "\u0120Murray": 12164, "\u0120kicked": 12165, "Quest": 12166, "\u0120Talk": 12167, "\u0120Several": 12168, "\u0120Zero": 12169, "\u0120drone": 12170, "ulk": 12171, "\u0120cam": 12172, "\u0120Mobile": 12173, "\u0120preventing": 12174, "\u0120retro": 12175, "\u0120Ax": 12176, "\u0120cruel": 12177, "\u0120float": 12178, ".),": 12179, "\u0120filing": 12180, "\u0120Grant": 12181, "\u0120Bor": 12182, "\u0120rib": 12183, "\u0120championship": 12184, "\u0120Merc": 12185, "\u0120styles": 12186, "\u0120cake": 12187, "\u0120builds": 12188, "\u0120Self": 12189, "iox": 12190, "\u0120epic": 12191, "oyd": 12192, "Bel": 12193, "\u0120Stew": 12194, ".(": 12195, "ahu": 12196, "\u0120Beyond": 12197, "\u0120outs": 12198, "\u0120solo": 12199, "\u0120Tree": 12200, "\u0120preserve": 12201, "\u0120tub": 12202, "ARE": 12203, "roc": 12204, "\u0120Impro": 12205, "\u0120Wright": 12206, "\u0120bund": 12207, "\u0120traged": 12208, "\u0120occasional": 12209, "bian": 12210, "Second": 12211, "rons": 12212, "\u0120interactions": 12213, "formed": 12214, "sing": 12215, "\u0120owns": 12216, "\u0120hockey": 12217, "General": 12218, "\u0120logical": 12219, "\u0120expend": 12220, "\u0120escal": 12221, "\u0120Griff": 12222, "\u0120Crown": 12223, "\u0120Reserve": 12224, "\u0120stopping": 12225, "\u0120excuse": 12226, "second": 12227, "\u0120operated": 12228, "\u0120reaches": 12229, "\u0120Malays": 12230, "\u0120pollution": 12231, "\u0120Brooklyn": 12232, "\u0120delete": 12233, "\u0120hash": 12234, "Block": 12235, "aha": 12236, "\u00e2\u0122\u00b3": 12237, "\u0120shorter": 12238, "piece": 12239, ">>>": 13163, "\u0120Mormon": 13164, "tor": 13165, "\u0120particles": 13166, "\u0120Bart": 13167, "ryption": 13168, "\u0120admin": 13169, "\u0120squee": 13170, "VIDIA": 13171, "\u0120creator": 13172, "iameter": 13173, "icular": 13174, "NBC": 13175, "\u0120grabbed": 13176, "\u0120nodd": 13177, "\u0120rated": 13178, "\u0120rotation": 13179, "\u0120grasp": 13180, "\u0120excessive": 13181, "\u0120EC": 13182, "\u0120Whit": 13183, "\u0120inventory": 13184, "aults": 13185, "\u0120FB": 13186, "\u0120ecosystem": 13187, "\u0120billions": 13188, "\u0120venture": 13189, "named": 13190, "\u0120defender": 13191, "oute": 13192, "Instead": 13193, "irable": 13194, "War": 13195, "\u0120assumption": 13196, "\u0120bite": 13197, "\u0120earthqu": 13198, "tail": 13199, "space": 13200, "\u0120gifts": 13201, "boys": 13202, "\u0120inevitable": 13203, "\u0120structural": 13204, "\u0120beneficial": 13205, "\u0120compelling": 13206, "hole": 13207, "ervation": 13208, "\u0120coat": 13209, "oj": 13210, "incarn": 13211, "\u0120Years": 13212, "\u0120determining": 13213, "\u0120rhetoric": 13214, "\u0120boundaries": 13215, "\u0120whites": 13216, "Ant": 13217, "addy": 13218, ")-": 13219, "raham": 13220, "etermin": 13221, "\u0120harvest": 13222, "\u0120Conc": 13223, "\u0120laptop": 13224, "\u0120Match": 13225, "\u0120enjoying": 13226, "cca": 13227, "ollar": 13228, "\u0120trips": 13229, "\u0120addiction": 13230, "\u0120Sak": 13231, "\u0120powered": 13232, "\u0120cous": 13233, "\u0120Russians": 13234, "iere": 13235, "\u0120retrie": 13236, "quality": 13237, "\u0120differ": 13238, "\u0120kingdom": 13239, "\u0120Laur": 13240, "\u0120Capitol": 13241, "\u0120conclusions": 13242, "\u0120Altern": 13243, "\u0120Nav": 13244, "\u0120transparent": 13245, "BER": 13246, "Group": 13247, "\u0120Complete": 13248, "\u0120infer": 13249, "\u0120intrig": 13250, "\u0120insane": 13251, "RO": 13252, "ophob": 13253, "isen": 13254, "qual": 13255, "Michael": 13256, "\u0120museum": 13257, "\u0120Pope": 13258, "\u0120reset": 13259, "rative": 13260, "five": 13261, "\u0120aggreg": 13262, "ittees": 13263, "ository": 13264, "\u0120carb": 13265, "\u0120Record": 13266, "\u0120decides": 13267, "\u0120Fix": 13268, "\u0120exceptions": 13269, "\u0120Commissioner": 13270, "uns": 13271, "\u0120Environmental": 13272, "\u0120legendary": 13273, "istence": 13274, "\u0120tunnel": 13275, "km": 13276, "\u0120insult": 13277, "\u0120troll": 13278, "\u0120shake": 13279, "\u0120detention": 13280, "ques": 13281, "\u0120Chrome": 13282, "\u0120Files": 13283, "\u0120subt": 13284, "\u0120prospects": 13285, "\u0120prol": 13286, "render": 13287, "proof": 13288, "\u0120performances": 13289, "Str": 13290, "\u0120href": 13291, "ername": 13292, "\u0120achievement": 13293, "\u0120fut": 13294, "Full": 13295, "\u0120Leban": 13296, "google": 13297, "\u00e3\u0125\u012a": 13298, "ampa": 13299, "Maybe": 13300, "\u0120projected": 13301, "\u0120Emb": 13302, "\u0120colleg": 13303, "\u0120awards": 13304, "\u0120\u00e2\u0136": 13305, "Gold": 13306, "\u0120Blake": 13307, "\u0120Raj": 13308, "ifting": 13309, "\u0120pending": 13310, "\u0120instinct": 13311, "\u0120developments": 13312, "Connect": 13313, "\u0120Mand": 13314, "\u0120WITH": 13315, "\u0120Philippines": 13316, "profile": 13317, "\u0120altogether": 13318, "\u0120Bund": 13319, "\u0120TD": 13320, "oooo": 13321, "amped": 13322, "iph": 13323, "\u0120steam": 13324, "\u0120oldest": 13325, "\u0120detection": 13326, "ulpt": 13327, "\u0120\u00e7": 13328, "\u0120Wayne": 13329, "2006": 13330, "fa": 13331, "\u0120circles": 13332, "\u0120Fu": 13333, "\u0120donors": 13334, "appropriate": 13335, "\u0120Dakota": 13336, "jamin": 13337, "\u0120motivated": 13338, "\u0120purchases": 13339, "\u0120Louisiana": 13340, "\u0120Spl": 13341, "\u0120globe": 13342, "\u0120105": 13343, "zip": 13344, "call": 13345, "\u0120departments": 13346, "\u0120sustainable": 13347, "105": 13348, "\u0120OP": 13349, "ifiers": 13350, "\u0120prevented": 13351, "\u0120incomp": 13352, "\u0120Commander": 13353, "\u0120dominated": 13354, "\u0120\u00c2\u00bb": 13355, "\u0120invested": 13356, "\u0120complexity": 13357, "\u0120incl": 13358, "\u0120ensuring": 13359, "\u0120realm": 13360, "ync": 13361, "\u0120Independent": 13362, "rained": 13363, "\u0120Jen": 13364, "\u0120Flight": 13365, "\u0120athe": 13366, "\u0120speculation": 13367, "\u0120TE": 13368, "ocate": 13369, "tic": 13370, "\u0120plaint": 13371, "herry": 13372, "\u0120toy": 13373, "\u0120111": 13374, "\u0120plates": 13375, "status": 13376, "\u0120Isa": 13377, "\u0120devoted": 13378, "Cop": 13379, "\u0120ES": 13380, "255": 13381, "urrency": 13382, "Main": 13383, "\u0120slaves": 13384, "\u0120pepper": 13385, "\u0120quotes": 13386, "\u0120ceiling": 13387, "\u0120Fish": 13388, "\u0120transformation": 13389, "\u0120fraction": 13390, "\u0120advantages": 13391, "\u0120toile": 13392, "\u0120stunning": 13393, "\u0120moist": 13394, "breaking": 13395, "si": 13396, "\u0120Location": 13397, "\u0120Medium": 13398, "\u0120texts": 13399, "\u0120ugly": 13400, "\u0120bio": 13401, ".\u00e2\u0122\u0136": 13402, "\u0120Based": 13403, "\u0120trains": 13404, "\u0120Wing": 13405, "\u0120Ancient": 13406, "\u0120Records": 13407, "\u0120Hope": 13408, "Special": 13409, "adesh": 13410, "obi": 13411, "[/": 13412, "\u0120temporarily": 13413, "Ver": 13414, "hu": 13415, "oser": 13416, "\u0120overnight": 13417, "\u0120mamm": 13418, "\u0120Treasury": 13419, "\u0120Venezuel": 13420, "\u0120Mega": 13421, "\u0120tar": 13422, "\u0120expects": 13423, "black": 13424, "orph": 13425, "\\\\\\\\": 13426, "\u0120acceptance": 13427, "\u0120radar": 13428, "sis": 13429, "\u0120junior": 13430, "\u0120frames": 13431, "\u0120observation": 13432, "acies": 13433, "Power": 13434, "\u0120Advanced": 13435, "Mag": 13436, "ologically": 13437, "\u0120Mechan": 13438, "\u0120sentences": 13439, "\u0120analysts": 13440, "aughters": 13441, "forcement": 13442, "\u0120vague": 13443, "\u0120clause": 13444, "\u0120directors": 13445, "\u0120evaluate": 13446, "\u0120cabinet": 13447, "Matt": 13448, "\u0120Classic": 13449, "Ang": 13450, "\u0120cler": 13451, "\u0120Buck": 13452, "\u0120researcher": 13453, "\u0120160": 13454, "\u0120poorly": 13455, "\u0120experiencing": 13456, "\u0120Ped": 13457, "\u0120Manhattan": 13458, "\u0120freed": 13459, "\u0120themes": 13460, "advant": 13461, "\u0120nin": 13462, "\u0120praise": 13463, "104": 13464, "\u0120Libya": 13465, "best": 13466, "\u0120trusted": 13467, "\u0120cease": 13468, "\u0120dign": 13469, "Direct": 13470, "\u0120bombing": 13471, "\u0120migration": 13472, "\u0120Sciences": 13473, "\u0120municipal": 13474, "\u0120Average": 13475, "\u0120glory": 13476, "\u0120revealing": 13477, "\u0120arena": 13478, "\u0120uncertainty": 13479, "\u0120battlefield": 13480, "iao": 13481, "God": 13482, "\u0120cinem": 13483, "rape": 13484, "elle": 13485, "apons": 13486, "\u0120listing": 13487, "\u0120waited": 13488, "\u0120spotted": 13489, "keley": 13490, "\u0120Audio": 13491, "eor": 13492, "arding": 13493, "idding": 13494, "igma": 13495, "\u0120Neg": 13496, "\u0120lone": 13497, "\u0120----": 13498, "exe": 13499, "deg": 13500, "\u0120transf": 13501, "\u0120wash": 13502, "\u0120slavery": 13503, "\u0120exploring": 13504, "\u0120WW": 13505, "atson": 13506, "\u0120encl": 13507, "lies": 13508, "\u0120Creek": 13509, "\u0120wooden": 13510, "Manager": 13511, "\u0120Brand": 13512, "ummy": 13513, "\u0120Arthur": 13514, "\u0120bureaucr": 13515, "\u0120blend": 13516, "arians": 13517, "Further": 13518, "\u0120supposedly": 13519, "\u0120winds": 13520, "\u01201979": 13521, "\u0120gravity": 13522, "\u0120analyses": 13523, "\u0120Travel": 13524, "\u0120Veter": 13525, "\u0120dumb": 13526, "\u0120alternate": 13527, "gal": 13528, "\u0120consumed": 13529, "\u0120effectiveness": 13530, ".''": 13531, "\u0120paths": 13532, "onda": 13533, "LA": 13534, "\u0120Strong": 13535, "\u0120enables": 13536, "\u0120escaped": 13537, "\u0120\"\"": 13538, "\u0120112": 13539, "\u01201983": 13540, "\u0120smiled": 13541, "\u0120tendency": 13542, "Fire": 13543, "\u0120pars": 13544, "\u0120Roc": 13545, "\u0120lake": 13546, "\u0120fitness": 13547, "\u0120Ath": 13548, "\u0120Horn": 13549, "\u0120hier": 13550, "\u0120impose": 13551, "mother": 13552, "\u0120pension": 13553, "icut": 13554, "borne": 13555, "iciary": 13556, "._": 13557, "\u0120SU": 13558, "\u0120polar": 13559, "isy": 13560, "engu": 13561, "itialized": 13562, "ATA": 13563, "write": 13564, "\u0120exercises": 13565, "\u0120Diamond": 13566, "otypes": 13567, "\u0120harmful": 13568, "onz": 13569, "\u0120printing": 13570, "story": 13571, "\u0120expertise": 13572, "\u0120Ger": 13573, "\u0120tragedy": 13574, "\u0120Fly": 13575, "\u0120divid": 13576, "ampire": 13577, "stock": 13578, "Mem": 13579, "\u0120reign": 13580, "\u0120unve": 13581, "\u0120amend": 13582, "\u0120Prophet": 13583, "\u0120mutual": 13584, "\u0120Fac": 13585, "\u0120replacing": 13586, "Har": 13587, "\u0120Circuit": 13588, "\u0120throat": 13589, "\u0120Shot": 13590, "\u0120batteries": 13591, "\u0120toll": 13592, "\u0120addressing": 13593, "\u0120Medicaid": 13594, "\u0120pupp": 13595, "\u0120Nar": 13596, "olk": 13597, "\u0120equity": 13598, "MR": 13599, "\u0120Hispan": 13600, "\u0120Large": 13601, "mid": 13602, "Dev": 13603, "\u0120exped": 13604, "\u0120demo": 13605, "\u0120Marshall": 13606, "ergus": 13607, "\u0120fiber": 13608, "\u0120divorce": 13609, "\u0120Create": 13610, "\u0120slower": 13611, "\u0120Parker": 13612, "\u0120Student": 13613, "\u0120Training": 13614, "Return": 13615, "\u0120Tru": 13616, "\u0120cub": 13617, "\u0120Reached": 13618, "\u0120panic": 13619, "\u0120quarters": 13620, "\u0120rect": 13621, "\u0120treating": 13622, "\u0120rats": 13623, "\u0120Christianity": 13624, "oler": 13625, "\u0120sacred": 13626, "\u0120declare": 13627, "ulative": 13628, "eting": 13629, "\u0120delivering": 13630, "estone": 13631, "\u0120tel": 13632, "\u0120Larry": 13633, "\u0120meta": 13634, "accept": 13635, "artz": 13636, "\u0120Roger": 13637, "handed": 13638, "\u0120header": 13639, "\u0120trapped": 13640, "\u0120Century": 13641, "\u0120knocked": 13642, "\u0120Oxford": 13643, "\u0120survivors": 13644, "bot": 13645, "\u0120demonstration": 13646, "\u0120dirt": 13647, "\u0120assists": 13648, "OME": 13649, "\u0120Draft": 13650, "ortunate": 13651, "folio": 13652, "pered": 13653, "usters": 13654, "gt": 13655, "\u0120Lock": 13656, "\u0120judicial": 13657, "verted": 13658, "\u0120secured": 13659, "outing": 13660, "\u0120Books": 13661, "\u0120hosting": 13662, "\u0120lifted": 13663, "length": 13664, "\u0120jer": 13665, "\u0120wheels": 13666, "\u0120Range": 13667, "umbnails": 13668, "\u0120diagnosis": 13669, "tech": 13670, "\u0120Stewart": 13671, "\u0120Pract": 13672, "\u0120nationwide": 13673, "\u0120dear": 13674, "\u0120obligations": 13675, "\u0120grows": 13676, "\u0120mandatory": 13677, "\u0120suspicious": 13678, "!'": 13679, "Apr": 13680, "Great": 13681, "\u0120mortgage": 13682, "\u0120prosecutor": 13683, "\u0120editorial": 13684, "\u0120Kr": 13685, "\u0120processed": 13686, "ungle": 13687, "\u0120flexibility": 13688, "Earlier": 13689, "\u0120Cart": 13690, "\u0120Sug": 13691, "\u0120focuses": 13692, "\u0120startup": 13693, "\u0120breach": 13694, "\u0120Tob": 13695, "cycle": 13696, "\u00e3\u0122\u012e": 13697, "rose": 13698, "\u0120bizarre": 13699, "\u00e3\u0122\u012f": 13700, "\u0120vegetables": 13701, "$$": 13702, "\u0120retreat": 13703, "oshi": 13704, "\u0120Shop": 13705, "\u0120Ground": 13706, "\u0120Stop": 13707, "\u0120Hawaii": 13708, "\u0120Ay": 13709, "Perhaps": 13710, "\u0120Beaut": 13711, "uffer": 13712, "enna": 13713, "\u0120productivity": 13714, "Fixed": 13715, "control": 13716, "\u0120absent": 13717, "\u0120Campaign": 13718, "Green": 13719, "\u0120identifying": 13720, "\u0120regret": 13721, "\u0120promoted": 13722, "\u0120Seven": 13723, "\u0120eru": 13724, "neath": 13725, "aughed": 13726, "\u0120Pin": 13727, "\u0120Living": 13728, "Cost": 13729, "omatic": 13730, "mega": 13731, "\u0120Nig": 13732, "ocy": 13733, "\u0120inbox": 13734, "\u0120empire": 13735, "\u0120horizont": 13736, "\u0120branches": 13737, "\u0120metaph": 13738, "Active": 13739, "edi": 13740, "\u0120Film": 13741, "\u0120Something": 13742, "\u0120mods": 13743, "incial": 13744, "\u0120Original": 13745, "Gen": 13746, "\u0120spirits": 13747, "\u0120earning": 13748, "Hist": 13749, "\u0120riders": 13750, "\u0120sacrific": 13751, "MT": 13752, "\u0120VA": 13753, "\u0120Salt": 13754, "\u0120occupation": 13755, "\u0120Mi": 13756, "\u0120disg": 13757, "lict": 13758, "\u0120nit": 13759, "\u0120nodes": 13760, "eem": 13761, "\u0120Pier": 13762, "\u0120hatred": 13763, "psy": 13764, "\u00e3\u0125\u012b": 13765, "\u0120theater": 13766, "\u0120sophisticated": 13767, "\u0120defended": 13768, "\u0120besides": 13769, "\u0120thoroughly": 13770, "\u0120Medicare": 13771, "\u0120blamed": 13772, "arently": 13773, "\u0120crying": 13774, "FOR": 13775, "priv": 13776, "\u0120singing": 13777, "\u0120Il": 13778, "\u0120cute": 13779, "oided": 13780, "olitical": 13781, "\u0120Neuro": 13782, "\u00e5\u00a4": 13783, "\u0120donation": 13784, "\u0120Eagles": 13785, "\u0120Give": 13786, "Tom": 13787, "\u0120substantially": 13788, "\u0120License": 13789, "\u0120Ja": 13790, "\u0120grey": 13791, "\u0120Animal": 13792, "\u0120ER": 13793, "\u0120Und": 13794, "\u0120keen": 13795, "\u0120conclude": 13796, "\u0120Mississippi": 13797, "Engine": 13798, "\u0120Studios": 13799, "Press": 13800, "overs": 13801, "llers": 13802, "\u0120350": 13803, "\u0120Rangers": 13804, "\u0120rou": 13805, "erto": 13806, "Ep": 13807, "issa": 13808, "ivan": 13809, "\u0120seal": 13810, "\u0120Regist": 13811, "display": 13812, "\u0120weaken": 13813, "uum": 13814, "\u0120Commons": 13815, "\u0120Say": 13816, "\u0120cultures": 13817, "\u0120laughed": 13818, "\u0120slip": 13819, "\u0120treatments": 13820, "izable": 13821, "mart": 13822, "\u0120Rice": 13823, "\u0120beast": 13824, "\u0120obesity": 13825, "\u0120Laure": 13826, "iga": 13827, "Which": 13828, "holder": 13829, "\u0120elderly": 13830, "\u0120pays": 13831, "\u0120complained": 13832, "\u0120crop": 13833, "\u0120proc": 13834, "\u0120explosive": 13835, "\u0120Fan": 13836, "\u0120Arsenal": 13837, "Author": 13838, "eful": 13839, "\u0120meals": 13840, "\u0120(-": 13841, "idays": 13842, "\u0120imagination": 13843, "\u0120annually": 13844, "\u0120ms": 13845, "asures": 13846, "Head": 13847, "ikh": 13848, "matic": 13849, "\u0120boyfriend": 13850, "\u0120Computer": 13851, "\u0120bump": 13852, "\u0120surge": 13853, "\u0120Craig": 13854, "\u0120Kirk": 13855, "Del": 13856, "mediate": 13857, "\u0120scenarios": 13858, "\u0120Mut": 13859, "\u0120Stream": 13860, "\u0120competitors": 13861, "\u00d9\u0126": 13862, "\u0120Stanford": 13863, "\u0120Resources": 13864, "azed": 13865, "bage": 13866, "\u0120organis": 13867, "\u0120Release": 13868, "\u0120separately": 13869, "\u0120habits": 13870, "\u0120measurements": 13871, "\u0120Close": 13872, "\u0120accompany": 13873, "\u0120gly": 13874, "\u0120tang": 13875, "\u0120Rou": 13876, "\u0120plugin": 13877, "\u0120convey": 13878, "\u0120Challenge": 13879, "oots": 13880, "jan": 13881, "\u0120curs": 13882, "\u0120Relations": 13883, "keeper": 13884, "\u0120approaching": 13885, "ping": 13886, "Speaking": 13887, "\u0120arrangement": 13888, "\u0120VI": 13889, "arettes": 13890, "\u0120affecting": 13891, "\u0120permits": 13892, "because": 13893, "\u0120useless": 13894, "\u0120Hus": 13895, "!!!!": 13896, "\u0120destroying": 13897, "Unfortunately": 13898, "\u0120fascinating": 13899, "Sem": 13900, "\u0120electoral": 13901, "\u0120transparency": 13902, "\u0120Chaos": 13903, "\u0120volunteer": 13904, "\u0120statistical": 13905, "\u0120activated": 13906, "rox": 13907, "Web": 13908, "HE": 13909, "\u0120Hampshire": 13910, "isive": 13911, "Map": 13912, "\u0120trash": 13913, "\u0120Lawrence": 13914, "stick": 13915, "Cr": 13916, "\u0120rings": 13917, "EXT": 13918, "\u0120operational": 13919, "opes": 13920, "Does": 13921, "\u0120Evans": 13922, "\u0120witnessed": 13923, "Port": 13924, "\u0120launching": 13925, "econom": 13926, "wear": 13927, "\u0120Particip": 13928, "umm": 13929, "cules": 13930, "\u0120RAM": 13931, "\u0120Tun": 13932, "\u0120assured": 13933, "\u0120binary": 13934, "\u0120betray": 13935, "\u0120exploration": 13936, "\u0120Fel": 13937, "\u0120admission": 13938, "itated": 13939, "Sy": 13940, "\u0120avoided": 13941, "\u0120Simulator": 13942, "\u0120celebrated": 13943, "\u0120Electric": 13944, "\u00a5\u0140": 13945, "\u0120cluster": 13946, "itzerland": 13947, "health": 13948, "Line": 13949, "\u0120Nash": 13950, "aton": 13951, "\u0120spare": 13952, "\u0120enterprise": 13953, "\u0120DIS": 13954, "cludes": 13955, "\u0120flights": 13956, "\u0120regards": 13957, "\u0120\u00c3\u0139": 13958, "half": 13959, "\u0120trucks": 13960, "\u0120contacts": 13961, "\u0120uncons": 13962, "\u0120Climate": 13963, "\u0120immense": 13964, "NEW": 13965, "occ": 13966, "ective": 13967, "\u0120embod": 13968, "\u0120patrol": 13969, "\u0120beside": 13970, "\u0120viable": 13971, "\u0120creep": 13972, "\u0120triggered": 13973, "verning": 13974, "\u0120comparable": 13975, "ql": 13976, "\u0120gaining": 13977, "asses": 13978, "\u0120();": 13979, "\u0120Grey": 13980, "\u0120MLS": 13981, "sized": 13982, "\u0120prosper": 13983, "\"?": 13984, "\u0120polling": 13985, "\u0120shar": 13986, "\u0120RC": 13987, "\u0120firearm": 13988, "orient": 13989, "\u0120fence": 13990, "\u0120variations": 13991, "giving": 13992, "\u0120Pi": 13993, "ospel": 13994, "\u0120pledge": 13995, "\u0120cure": 13996, "\u0120spy": 13997, "\u0120violated": 13998, "\u0120rushed": 13999, "\u0120stroke": 14000, "\u0120Blog": 14001, "sels": 14002, "\u0120Ec": 14003, ",''": 14004, "\u0120pale": 14005, "\u0120Collins": 14006, "terror": 14007, "\u0120Canadians": 14008, "\u0120tune": 14009, "\u0120laboratory": 14010, "\u0120nons": 14011, "tarian": 14012, "\u0120disability": 14013, "\u0120Gam": 14014, "\u0120singer": 14015, "alg": 14016, "\u0120Senior": 14017, "\u0120traded": 14018, "\u0120Warrior": 14019, "\u0120infring": 14020, "\u0120Franklin": 14021, "\u0120strain": 14022, "\u0120Swedish": 14023, "\u0120seventh": 14024, "\u0120Benn": 14025, "\u0120Tell": 14026, "\u0120syndrome": 14027, "\u0120wondered": 14028, "iden": 14029, "++++": 14030, "igo": 14031, "\u0120purple": 14032, "\u0120journalism": 14033, "\u0120rebel": 14034, "\u0120fu": 14035, "blog": 14036, "\u0120invite": 14037, "rencies": 14038, "\u0120Contact": 14039, "Israel": 14040, "\u0120Content": 14041, "\u0120cheer": 14042, "\u0120bedroom": 14043, "\u0120Engineering": 14044, "\u0120Queens": 14045, "\u0120dwell": 14046, "\u0120PlayStation": 14047, "\u0120Dim": 14048, "\u0120Colon": 14049, "lr": 14050, "\u0120operates": 14051, "\u0120motivation": 14052, "USA": 14053, "astered": 14054, "Core": 14055, "\u0120Truth": 14056, "olo": 14057, "OSE": 14058, "\u0120Memory": 14059, "\u0120predec": 14060, "\u0120anarch": 14061, "\u01201920": 14062, "\u0120Yam": 14063, "\u00c3\u00a8": 14064, "bid": 14065, "\u0120grateful": 14066, "\u0120excitement": 14067, "\u0120treasure": 14068, "\u0120longest": 14069, "ctive": 14070, "\u0120deserves": 14071, "\u0120reserves": 14072, "\u0120cops": 14073, "\u0120Ottawa": 14074, "\u0120Egyptian": 14075, "anked": 14076, "\u0120artif": 14077, "\u0120hypothesis": 14078, ":/": 14079, "\u0120purchasing": 14080, "\u0120lovely": 14081, "HP": 14082, "\u0120divide": 14083, "\u0120strictly": 14084, "\u0120questioning": 14085, "\u0120taxpayers": 14086, "\u0120Joy": 14087, "\u0120rolls": 14088, "\u0120Heavy": 14089, "\u0120ports": 14090, "\u0120magnetic": 14091, "\u0120inflamm": 14092, "\u0120brush": 14093, "tics": 14094, "\u00e2\u012a\u0134": 14095, "\u0120bottles": 14096, "ppy": 14097, "\u0120padd": 14098, "\u00e3\u0124\u00af": 14099, "million": 14100, "\u0120devastating": 14101, "\u0120compiled": 14102, "\u0120medication": 14103, "\u0120twelve": 14104, "\u0120Perry": 14105, "Space": 14106, "imb": 14107, "your": 14108, "\u0120leaked": 14109, "\u0120Tar": 14110, "\u0120unity": 14111, "\u0120infected": 14112, "\u0120traveled": 14113, "IDE": 14114, "\u0120McDonald": 14115, "txt": 14116, "\u0120Princ": 14117, "\u0120interven": 14118, "\u0120Taiwan": 14119, "\u0120Pow": 14120, "\u0120bearing": 14121, "\u0120Thread": 14122, "\u0120zones": 14123, "izards": 14124, "unks": 14125, "Chapter": 14126, "llor": 14127, "\u0120\u00c2\u00b7": 14128, "\u0120wounds": 14129, "\u0120discretion": 14130, "\u0120succeeded": 14131, "iking": 14132, "\u0120iconic": 14133, "Call": 14134, "\u0120screening": 14135, "\u0120Mis": 14136, "icts": 14137, "\u0120ministers": 14138, "\u0120separation": 14139, "Player": 14140, "\u0120bip": 14141, "\u0120beloved": 14142, "\u0120counting": 14143, "\u0120Eye": 14144, "around": 14145, "inging": 14146, "\u0120tablet": 14147, "\u0120offence": 14148, "inance": 14149, "have": 14150, "\u0120Info": 14151, "\u0120Ninja": 14152, "\u0120protective": 14153, "\u0120Cass": 14154, "Mac": 14155, "\u0120Quality": 14156, "North": 14157, "\u0120ic": 14158, "\u0120Cuba": 14159, "\u0120Chronicle": 14160, "\u0120Property": 14161, "\u0120fastest": 14162, "otos": 14163, "\u0120Germ": 14164, "OWN": 14165, "\u0120boom": 14166, "\u0120Stanley": 14167, "erguson": 14168, "\u0120clever": 14169, "\u0120enters": 14170, "mode": 14171, "terior": 14172, "\u0120Sens": 14173, "\u0120linear": 14174, "ARK": 14175, "\u0120comparing": 14176, "\u0120purely": 14177, "\u0120safer": 14178, "\u0120Potter": 14179, "\u0120cups": 14180, "RT": 14181, "\u0120gluc": 14182, "\u0120attributed": 14183, "\u0120dupl": 14184, "\u0120Pap": 14185, "\u0120precious": 14186, "\u0120pa": 14187, "ictionary": 14188, "\u0120Tig": 14189, "\u0120Too": 14190, "olutions": 14191, "stan": 14192, "\u0120robots": 14193, "\u0120lobb": 14194, "\u0120statute": 14195, "\u0120prevention": 14196, "western": 14197, "160": 14198, "\u0120Active": 14199, "\u0120Maria": 14200, "hal": 14201, "None": 14202, "ellar": 14203, "\u0120KB": 14204, "\u0120Partners": 14205, "\u0120Single": 14206, "\u0120Following": 14207, "ango": 14208, "acious": 14209, "\u0120thou": 14210, "\u0120kg": 14211, "\u0120influential": 14212, "\u0120Friends": 14213, "Sur": 14214, "ainted": 14215, "\u0120forums": 14216, "\u0120starter": 14217, "\u0120citizenship": 14218, "\u0120Election": 14219, "onge": 14220, "otation": 14221, "osph": 14222, ";;;;": 14223, "utical": 14224, "pur": 14225, "eren": 14226, "\u0120accusations": 14227, "bitious": 14228, "abbit": 14229, "\u0120Ord": 14230, "Posted": 14231, "irk": 14232, "\u0120sensitivity": 14233, "iche": 14234, "\u0120Amy": 14235, "\u0120Fab": 14236, "\u0120summit": 14237, "\u0120pedest": 14238, "\u0120rubber": 14239, "\u0120agricultural": 14240, "\u0120cancel": 14241, "AE": 14242, "\u0120inaug": 14243, "\u0120contam": 14244, "\u0120firmly": 14245, "iw": 14246, "stage": 14247, "\u0120Kan": 14248, "\u0120tier": 14249, "\u0120invention": 14250, "\u0120translated": 14251, "\u0120Rules": 14252, "Box": 14253, "Twitter": 14254, "IDS": 14255, "\u0120pizza": 14256, "\u0120debug": 14257, "\u0120Drop": 14258, "vs": 14259, "\u0120horses": 14260, "big": 14261, "\u0120boring": 14262, "\u0120hood": 14263, "\u0120McCain": 14264, "atched": 14265, "\u0120Bros": 14266, "\u0120skip": 14267, "\u0120essay": 14268, "stat": 14269, "\u0120Legends": 14270, "\u0120ammunition": 14271, "auc": 14272, "\u0120shooter": 14273, "\u0120unh": 14274, "\u0120supplied": 14275, "\u0120generic": 14276, "\u0120SK": 14277, "iban": 14278, "yrics": 14279, "\u0120255": 14280, "\u0120climbing": 14281, "Former": 14282, "\u0120flip": 14283, "\u0120jumping": 14284, "\u0120frustration": 14285, "\u0120Terry": 14286, "\u0120neighborhoods": 14287, "\u0120median": 14288, "bean": 14289, "\u0120brains": 14290, "Following": 14291, "\u0120shaped": 14292, "\u0120draws": 14293, "\u0120altered": 14294, "Jack": 14295, "\u0120recipes": 14296, "\u0120skilled": 14297, "wealth": 14298, "achi": 14299, "election": 14300, "\u0120behaviors": 14301, "deals": 14302, "\u0120Until": 14303, "Fe": 14304, "\u0120declaration": 14305, "marks": 14306, "\u0120Between": 14307, "celona": 14308, "\u0120reson": 14309, "\u0120bubble": 14310, "Among": 14311, "\u0120imperial": 14312, "GS": 14313, "\u0120feminist": 14314, "2005": 14315, "\u0120Kyle": 14316, "\u0120accounting": 14317, "\u0120Tele": 14318, "\u0120Tyr": 14319, "\u0120connecting": 14320, "\u0120rehab": 14321, "\u0120Pred": 14322, "sim": 14323, "\u0120meantime": 14324, "\u0120physician": 14325, "MW": 14326, "\u0120Campbell": 14327, "\u0120Brandon": 14328, "\u0120contributing": 14329, "\u0120Rule": 14330, "\u0120Weight": 14331, "\u0120Nap": 14332, "\u0120interactive": 14333, "\u0120vag": 14334, "\u0120helmet": 14335, "\u0120Comb": 14336, "four": 14337, "\u0120shipped": 14338, "\u0120completing": 14339, "\u0120PD": 14340, "PDATE": 14341, "\u0120spreading": 14342, "\u0120scary": 14343, "erving": 14344, "\u0120Gas": 14345, "\u0120frank": 14346, "school": 14347, "\u0120romantic": 14348, "\u0120stabil": 14349, "Rob": 14350, "\u0120accurately": 14351, "\u0120acute": 14352, "\u0120Hann": 14353, "\u0120symbols": 14354, "\u0120civilization": 14355, "\u0120AW": 14356, "\u0120lightning": 14357, "\u0120considers": 14358, "\u0120venue": 14359, "\u0120\u00d7": 14360, "\u0120oven": 14361, "\u0120SF": 14362, "his": 14363, "\u0120nu": 14364, "\u0120Learn": 14365, "\u0120peoples": 14366, "\u0120std": 14367, "\u0120slee": 14368, "\u0120slic": 14369, "\u0120Statistics": 14370, "\u0120corners": 14371, "\u0120Baker": 14372, "\u0120:)": 14373, "mentation": 14374, "olver": 14375, "\u0120laughing": 14376, "\u0120Todd": 14377, "onde": 14378, "\u0120Hills": 14379, "\u0120nuts": 14380, "\u0120Woman": 14381, "plane": 14382, "\u0120liver": 14383, "\u0120Inside": 14384, "Sorry": 14385, "\u0120agrees": 14386, "\u0120fundament": 14387, "\u0120Fisher": 14388, "\u0120auction": 14389, "\u0120threads": 14390, "glas": 14391, "\u0120Basic": 14392, "\u0120Nat": 14393, "\u0120lacking": 14394, "\u0120celebration": 14395, "ju": 14396, "\u0120silly": 14397, "Euro": 14398, "\u0120tatt": 14399, "ighty": 14400, "controlled": 14401, "Test": 14402, "\u0120Singh": 14403, "\u0120rage": 14404, "\u0120rhyth": 14405, "offic": 14406, "\u0120Phantom": 14407, "\u0120headlines": 14408, "\u0120responding": 14409, "\u0120Morning": 14410, "\u0120vitamin": 14411, "\u0120boots": 14412, "\u0120Site": 14413, "alin": 14414, "pi": 14415, "\u0120viral": 14416, "\u0120UC": 14417, "DER": 14418, "\u0120Sex": 14419, "\u0120stocks": 14420, "current": 14421, "\u0120churches": 14422, "\u0120Rare": 14423, "\u0120Murphy": 14424, "\u0120denial": 14425, "\u0120Gaming": 14426, "\u0120toug": 14427, "\u0120nick": 14428, "\u0120makers": 14429, "\u0120Ronald": 14430, "\u0120generous": 14431, "\u0120Doc": 14432, "\u0120Morris": 14433, "\u0120transformed": 14434, "\u0120Normal": 14435, "\u0120104": 14436, "\u0120Kickstarter": 14437, "\u0120Upon": 14438, "Online": 14439, "\u0120IRS": 14440, "\u0120wrap": 14441, "\u0120loving": 14442, "\u0120arrives": 14443, "\u0120Due": 14444, "\u0120heter": 14445, "\u0120Made": 14446, "\u0120rental": 14447, "\u0120belongs": 14448, "\u0120attorneys": 14449, "\u0120crops": 14450, "\u0120matched": 14451, "ulum": 14452, "oline": 14453, "109": 14454, "\u0120dispar": 14455, "\u0120buyers": 14456, "\u0120Cambridge": 14457, "\u0120ethics": 14458, "roups": 14459, "\u0120justified": 14460, "\u0120marginal": 14461, "\u0120respected": 14462, "winning": 14463, "\u0120nodded": 14464, "\u0120Serge": 14465, "\u0120Former": 14466, "Craft": 14467, "################": 14468, "\u0120Warner": 14469, "\u0120dash": 14470, "ete": 14471, "\u0120entert": 14472, "\u0120Escape": 14473, "outheast": 14474, "\u0120knees": 14475, "\u0120Bomb": 14476, "\u0120rug": 14477, "Pass": 14478, "\u0120attitudes": 14479, "government": 14480, "\u0120Prior": 14481, "\u0120qualities": 14482, "\u0120notification": 14483, "\u0120Phone": 14484, "lie": 14485, "\u0120anticipated": 14486, "\u0120Combat": 14487, "\u0120Barry": 14488, "\u01201982": 14489, "Users": 14490, "oner": 14491, "\u0120computing": 14492, "\u0120Connecticut": 14493, "\u0120lesser": 14494, "\u0120peers": 14495, "\u0120Cu": 14496, "\u0120technically": 14497, "\u0120submission": 14498, "\u0120Universal": 14499, "\u0120manually": 14500, "ourge": 14501, "\u0120respondents": 14502, "\u0120BTC": 14503, "\u0120Host": 14504, "\u0120fare": 14505, "\u0120Bird": 14506, "\u0120receipt": 14507, "also": 14508, "\u0120jack": 14509, "\u0120agriculture": 14510, "\u0120skull": 14511, "\u0120!=": 14512, "\u0120passive": 14513, "\u0120CI": 14514, "\u0120societies": 14515, "\u0120reminded": 14516, "\u0120interference": 14517, "Buy": 14518, "\u0120\u00e2\u013e": 14519, "gon": 14520, "\u0120scrutiny": 14521, "\u0120Witch": 14522, "\u0120conducting": 14523, "\u0120\u00e3\u0125": 14524, "\u0120exchanges": 14525, "\u0120Mitchell": 14526, "\u0120inhabit": 14527, "\u0120twist": 14528, "BD": 14529, "\u0120wherever": 14530, "groupon": 14531, "\u0120jokes": 14532, "\u0120Benjamin": 14533, "\u0120Random": 14534, "frame": 14535, "\u0120Lions": 14536, "\u0120highlighted": 14537, "\u0120Arkansas": 14538, "Ent": 14539, "\u0120pile": 14540, "\u0120prelim": 14541, "gs": 14542, "minded": 14543, "\u0120felony": 14544, "\u0120GA": 14545, "\u0120Luck": 14546, "\u0120practically": 14547, "\u0120Bos": 14548, "\u0120actress": 14549, "Dam": 14550, "\u0120Bou": 14551, "\u0120visa": 14552, "\u0120embedded": 14553, "\u0120hybrid": 14554, "\u0120earliest": 14555, "\u0120sooner": 14556, "social": 14557, "\u0120HA": 14558, "\u0120steep": 14559, "\u0120disadvant": 14560, "\u0120exploit": 14561, "\u0120Egg": 14562, "\u0120Ultra": 14563, "\u0120necessity": 14564, "Local": 14565, "iege": 14566, "\u0120dated": 14567, "\u0120masses": 14568, "\u0120subscription": 14569, "pless": 14570, "\u0120anonym": 14571, "\u0120presumably": 14572, "Blue": 14573, "Their": 14574, "asketball": 14575, "\u0120Philip": 14576, "\u0120comed": 14577, "loaded": 14578, "rane": 14579, "\u0120reflection": 14580, "China": 14581, "\u0120extends": 14582, "\u0120forming": 14583, "\u0120unders": 14584, "2001": 14585, "\u0120grat": 14586, "\u0120concentrations": 14587, "\u0120insulin": 14588, "\u0120secular": 14589, "\u0120whilst": 14590, "\u0120winners": 14591, "Advertisements": 14592, "\u0120deliberately": 14593, "\u0120Working": 14594, "\u0120sink": 14595, "etics": 14596, "dale": 14597, "\u0120mandate": 14598, "\u0120gram": 14599, "\u0120vacation": 14600, "\u0120warnings": 14601, "ripp": 14602, "\u0120THAT": 14603, "\u0120commentary": 14604, "\u0120intu": 14605, "\u0120aest": 14606, "\u0120reasoning": 14607, "\u0120breakdown": 14608, "\u0120Zombie": 14609, "\u0120-->": 14610, "\u0120Political": 14611, "cott": 14612, "\u0120thrust": 14613, "\u0120technological": 14614, "\u0120deciding": 14615, "\u0120trafficking": 14616, "Long": 14617, "Welcome": 14618, "prising": 14619, "\u0120Communications": 14620, "\u0120endors": 14621, "\u0120swift": 14622, "\u0120metabol": 14623, "coins": 14624, "resa": 14625, "\u0120HTTP": 14626, "\u0120enroll": 14627, "\u0120Happy": 14628, "usr": 14629, "intage": 14630, "\u0120[\"": 14631, "uably": 14632, "\u0120Material": 14633, "\u0120repeal": 14634, "Sept": 14635, "kh": 14636, "\u0120Modi": 14637, "\u0120underneath": 14638, "\u0120IL": 14639, "shore": 14640, "\u0120diagnosed": 14641, "aceutical": 14642, "\u0120shower": 14643, "aux": 14644, "\u0120Switch": 14645, "\u0120Strength": 14646, "\u0120jihad": 14647, "national": 14648, "\u0120trauma": 14649, "ussy": 14650, "oni": 14651, "\u0120consolid": 14652, "\u0120calories": 14653, "\u0120Flynn": 14654, "agged": 14655, "168": 14656, "\u0120Pink": 14657, "\u0120fulfill": 14658, "\u0120chains": 14659, "\u0120notably": 14660, "\u0120AV": 14661, "Life": 14662, "\u0120Chuck": 14663, "mus": 14664, "\u0120Urban": 14665, "\u0120Hend": 14666, "\u0120deposit": 14667, "\u0120Sad": 14668, "\u0120affair": 14669, "ORK": 14670, "ieval": 14671, "\u0120FDA": 14672, "\u0120trop": 14673, "\u0120Overall": 14674, "\u0120virtue": 14675, "\u0120satisfaction": 14676, "aund": 14677, "\u0120lun": 14678, "\u0120Switzerland": 14679, "\u0120Operation": 14680, "process": 14681, "\u0120shook": 14682, "\u0120counties": 14683, "leased": 14684, "\u0120Charlotte": 14685, "112": 14686, "\u0120transcript": 14687, "\u0120redd": 14688, "push": 14689, "\u0120Hey": 14690, "\u0120Analysis": 14691, "[\"": 14692, "\u0120alternatives": 14693, "ardless": 14694, "\u0120eleph": 14695, "\u0120prejud": 14696, "\u0120Leaf": 14697, "Having": 14698, "\u0120Hub": 14699, "\u0120expressions": 14700, "\u0120Volume": 14701, "\u0120shocking": 14702, "\u0120Reds": 14703, "\u0120readily": 14704, "\u0120planets": 14705, "adata": 14706, "\u0120collapsed": 14707, "\u0120Madrid": 14708, "\u0120irrit": 14709, "ipper": 14710, "\u0120Enc": 14711, "\u0120Wire": 14712, "\u0120buzz": 14713, "\u0120GP": 14714, "asha": 14715, "\u0120accidentally": 14716, "uru": 14717, "\u0120frustrated": 14718, "\u0120SA": 14719, "\u0120hungry": 14720, "\u0120Huff": 14721, "\u0120labels": 14722, "anto": 14723, "\u0120EP": 14724, "\u0120barriers": 14725, ")|": 14726, "\u0120Berkeley": 14727, "\u0120Jets": 14728, "\u0120pairs": 14729, "\u0120Lan": 14730, "James": 14731, "\u0120Bear": 14732, "\u0120humor": 14733, "\u0120Liberty": 14734, "\u0120magnitude": 14735, "\u0120aging": 14736, "\u0120Mason": 14737, "\u0120friendship": 14738, "umbling": 14739, "\u0120emerge": 14740, "\u0120newspapers": 14741, "\u0120ambitious": 14742, "\u0120Richards": 14743, "aternal": 14744, "\u01201981": 14745, "\u0120cookies": 14746, "\u0120sculpt": 14747, "\u0120pursuit": 14748, "Location": 14749, "\u0120scripts": 14750, "pc": 14751, "\u0120arrangements": 14752, "\u0120diameter": 14753, "\u0120loses": 14754, "amation": 14755, "\u0120liqu": 14756, "\u0120Jake": 14757, "arette": 14758, "\u0120understands": 14759, "\u0120Zen": 14760, "vm": 14761, "\u0120approve": 14762, "\u0120wip": 14763, "\u0120ultra": 14764, "\u0120intend": 14765, "\u0120DI": 14766, "ascular": 14767, "\u0120stays": 14768, "\u0120Kor": 14769, "\u0120Kl": 14770, "\u0120investing": 14771, "La": 14772, "\u0120believing": 14773, "bad": 14774, "mouth": 14775, "\u0120taxpayer": 14776, "\u00e3\u0125\u0125": 14777, "\u0120Quebec": 14778, "\u0120lap": 14779, "\u0120Swiss": 14780, "drop": 14781, "\u0120drain": 14782, "iri": 14783, "etc": 14784, "ften": 14785, "\u0120Nex": 14786, "\u0120straw": 14787, "\u0120screaming": 14788, "\u0120counted": 14789, "\u0120damaging": 14790, "\u0120ambassador": 14791, "century": 14792, "\u0120prox": 14793, "\u0120arrests": 14794, "uv": 14795, "ilateral": 14796, "\u0120Charg": 14797, "\u0120prescribed": 14798, "\u0120independently": 14799, "\u0120fierce": 14800, "\u0120Baby": 14801, "\u0120brave": 14802, "\u0120suits": 14803, "=>": 14804, "\u0120baseline": 14805, "\u0120Rate": 14806, "\u0120islands": 14807, "\u0120((": 14808, "green": 14809, "ixels": 14810, "\u0120namely": 14811, "\u0120Village": 14812, "than": 14813, "amy": 14814, "Version": 14815, "gmail": 14816, "entials": 14817, "\u0120Sud": 14818, "\u0120Melbourne": 14819, "\u0120arriving": 14820, "\u0120quantum": 14821, "eff": 14822, "ropolitan": 14823, "Tri": 14824, "\u0120funeral": 14825, "\u0120IR": 14826, "\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124": 14827, "\u0120Cob": 14828, "itably": 14829, "\u0120turb": 14830, "\u0120combo": 14831, "Review": 14832, "\u0120deployment": 14833, "uity": 14834, "\u0120Bott": 14835, "\u0120invisible": 14836, "\u0120rendering": 14837, "\u0120unlocked": 14838, "\u0120aqu": 14839, "\u0120Vladimir": 14840, "\u0120pad": 14841, "\u0120Brain": 14842, "\u0120Legacy": 14843, "dragon": 14844, "\u0120Kurdish": 14845, "\u0120sounded": 14846, "\u0120detained": 14847, "\u0120DM": 14848, "gary": 14849, "\u0120daughters": 14850, "\u0120disturbing": 14851, "uka": 14852, "\u0120Parad": 14853, "\u0120tast": 14854, "\u0120unfortunate": 14855, "\u0120ul": 14856, "emin": 14857, "\u0120attendance": 14858, "trl": 14859, "\u0120parks": 14860, "\u0120Memorial": 14861, "\u0120Alice": 14862, "othy": 14863, "guard": 14864, "\u0120Dise": 14865, "\u0120Shan": 14866, "\u0120Forum": 14867, "Rich": 14868, "\u0120shifted": 14869, "uez": 14870, "\u0120lighter": 14871, "\u0120Magn": 14872, "\u0120cod": 14873, "Sch": 14874, "hammad": 14875, "Pub": 14876, "350": 14877, "\u0120Pokemon": 14878, "\u0120prototype": 14879, "\u0120unre": 14880, "Base": 14881, "\u0120Students": 14882, "\u0120Reply": 14883, "\u0120Communist": 14884, "\u0120gau": 14885, "\u0120Tyler": 14886, "IZ": 14887, "\u0120participated": 14888, "\u0120suprem": 14889, "\u0120Details": 14890, "\u0120vessels": 14891, "rod": 14892, "\u0120tribe": 14893, "keep": 14894, "\u0120assumptions": 14895, "\u0120pound": 14896, "\u0120crude": 14897, "\u0120Available": 14898, "\u0120swimming": 14899, "\u0120inclusion": 14900, "\u0120advances": 14901, "culation": 14902, "\u0120conservation": 14903, "\u0120overd": 14904, "\u0120Buffalo": 14905, "Article": 14906, "edge": 14907, "\u0120awa": 14908, "\u0120Madison": 14909, "\u0120sidew": 14910, "\u0120catast": 14911, "\u0120Krist": 14912, "ucle": 14913, "\u0120Highway": 14914, "\u0120Terror": 14915, "\u0120activation": 14916, "\u0120unconscious": 14917, "\u0120Satan": 14918, "\u0120Susan": 14919, "illery": 14920, "\u0120arranged": 14921, "iop": 14922, "\u0120rumors": 14923, "urring": 14924, "think": 14925, "\u0120Keith": 14926, "\u0120Kind": 14927, "\u0120avoiding": 14928, "byn": 14929, "nut": 14930, "\u0120Speaker": 14931, "rus": 14932, "names": 14933, "\u0120guilt": 14934, "\u0120Olympics": 14935, "\u0120sail": 14936, "\u0120Mes": 14937, "levant": 14938, "\u0120Columbus": 14939, "aft": 14940, "City": 14941, "South": 14942, "\u0120Harvey": 14943, "\u0120Pun": 14944, "Several": 14945, "\u0120mentally": 14946, "\u0120impress": 14947, "mount": 14948, "\u0120Ubuntu": 14949, "\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136": 14950, "\u0120Superman": 14951, "\u0120MPs": 14952, "\u0120intentions": 14953, "\u0120Racing": 14954, "\u0120likelihood": 14955, "\u0120240": 14956, "Total": 14957, "\u0120toys": 14958, "\u0120Watson": 14959, "\u0120urge": 14960, "Lear": 14961, "\u0120Paper": 14962, "\u0120occurring": 14963, "\u0120Beng": 14964, "\u0120Cert": 14965, "\u0120stones": 14966, "Tim": 14967, "\u0120Twin": 14968, "zb": 14969, "\u0120Dynam": 14970, "\u0120politician": 14971, "kens": 14972, "\u0120Enterprise": 14973, "UTERS": 14974, "\u0120abol": 14975, "\u0120refresh": 14976, "\u0120arbitrary": 14977, "pection": 14978, "\u0120troubles": 14979, "\u0120});": 14980, "tv": 14981, "\u0120pilots": 14982, "\u0120distribute": 14983, "\u0120audit": 14984, "\u0120pause": 14985, "original": 14986, "\u0120rivals": 14987, "\u00c2\u00a3": 14988, "Fig": 14989, "TL": 14990, "abil": 14991, "rying": 14992, "Lin": 14993, "ioned": 14994, "lon": 14995, "\u0120fancy": 14996, "\u0120crashed": 14997, "\u0120tract": 14998, "\u0120shed": 14999, "\u0120consume": 15000, "Based": 15001, "download": 15002, "init": 15003, "\u0120voltage": 15004, "Introdu": 15005, "\u0120condemned": 15006, "\u0120Finance": 15007, "respect": 15008, "\u0120excluded": 15009, "\u0120establishing": 15010, "heric": 15011, "\u0120heritage": 15012, "\u0120spectacular": 15013, "\u0120unst": 15014, "\u0120Snowden": 15015, "\u0120Lane": 15016, "San": 15017, "\u0120protections": 15018, "struction": 15019, "incinn": 15020, "\u0120macro": 15021, "Custom": 15022, "iosity": 15023, "\u0120esp": 15024, "\u0120functioning": 15025, "\u0120mush": 15026, "\u0120puzzle": 15027, "\u0120ethical": 15028, "Mal": 15029, "\u0120governing": 15030, "\u0120Ferguson": 15031, "\u0120restored": 15032, "\u0120stressed": 15033, "\u0120Counter": 15034, "\u0120Kas": 15035, "clip": 15036, "ANS": 15037, "\u0120seiz": 15038, "UK": 15039, "byss": 15040, "oldown": 15041, "api": 15042, "\u0120permanently": 15043, "ounters": 15044, "West": 15045, "Through": 15046, "Light": 15047, "atoes": 15048, "\u0120neat": 15049, "\u0120cord": 15050, "urer": 15051, "\u0120severely": 15052, "\u0120Aven": 15053, "\u0120interrog": 15054, "\u0120triple": 15055, "Given": 15056, "Number": 15057, "\u0120arise": 15058, "\u0120sher": 15059, "plant": 15060, "\u0120flower": 15061, "\u0120Cou": 15062, "\u0120ate": 15063, "\u0120newer": 15064, "bul": 15065, "\u0120meanwhile": 15066, "\u0120Lair": 15067, "\u0120adjustment": 15068, "\u0120Copyright": 15069, "\u0120divers": 15070, "iological": 15071, "\u0120gamers": 15072, "oat": 15073, "\u0120historically": 15074, "\u0120analog": 15075, "\u0120longtime": 15076, "\u0120prescription": 15077, "\u0120Mist": 15078, "\u0120Hyper": 15079, "\u0120Maine": 15080, "\u0120Deity": 15081, "\u0120multipl": 15082, "\u0120Reincarn": 15083, "\u0120Hyd": 15084, "\u0120Pic": 15085, "Sil": 15086, "rants": 15087, "\u0120Cris": 15088, ".;": 15089, "({": 15090, "ependence": 15091, "\u0120recy": 15092, "ateur": 15093, "\u0120quad": 15094, "\u0120glob": 15095, "\u0120conced": 15096, "team": 15097, "\u0120capitalist": 15098, "\u0120Lot": 15099, "\u0120royal": 15100, "\u0120Cyber": 15101, "\u0120blacks": 15102, "metic": 15103, "riv": 15104, "\u0120Danny": 15105, "\u0120spo": 15106, "\u0120RO": 15107, "\u0120animated": 15108, "rypted": 15109, "\u0120Deputy": 15110, "\u0120rendered": 15111, "FE": 15112, "\u0120streak": 15113, "\u0120clouds": 15114, "\u0120Doug": 15115, "~~~~~~~~": 15116, "\u0120discour": 15117, "\u0120Veh": 15118, "\u0120psychology": 15119, "\u0120Journey": 15120, "\u0120crystal": 15121, "\u0120Frost": 15122, "\u0120suspicion": 15123, "\u0120relate": 15124, "orus": 15125, "\u0120Crypt": 15126, "\u0120NVIDIA": 15127, "comed": 15128, "uting": 15129, "incinnati": 15130, "\u0120vulnerability": 15131, "ostic": 15132, "\u0120isolation": 15133, "\u0120cooling": 15134, "\u0120Coalition": 15135, "\u0120119": 15136, "Four": 15137, "\u0120Deal": 15138, "\u0120\u00e2\u012b": 15139, "semble": 15140, "rament": 15141, "\u0120Barcelona": 15142, "\u0120102": 15143, "\u0120cocaine": 15144, "ocalypse": 15145, "Feb": 15146, "ogenic": 15147, "\u0120mutation": 15148, "\u0120cryptoc": 15149, "\u0120Kel": 15150, "\u0120Git": 15151, "ais": 15152, "\u0120sisters": 15153, "ANK": 15154, "\u0120activate": 15155, "Ter": 15156, "\u0120dread": 15157, "ylon": 15158, "\u0120propri": 15159, "Aust": 15160, "\u0120Default": 15161, "\u0120outdoor": 15162, "\u0120sheer": 15163, "ceive": 15164, "\u0120gently": 15165, "\u00d0\u00be": 15166, "Program": 15167, "\u0120\u00e2\u0128\u0134": 15168, "\u0120vegan": 15169, "\u0120Crus": 15170, "\u0120responsibilities": 15171, "\u0120HR": 15172, "OLD": 15173, "\u0120prevents": 15174, "\u0120stiff": 15175, "\u0120Were": 15176, "\u0120athletic": 15177, "\u0120Score": 15178, "\u0120):": 15179, "\u0120columns": 15180, "\u0120Loc": 15181, "available": 15182, "\u0120Fram": 15183, "\u0120Sessions": 15184, "\u0120companion": 15185, "\u0120packs": 15186, "140": 15187, "\u0120Knights": 15188, "\u0120fart": 15189, "\u0120streams": 15190, "\u0120shore": 15191, "\u0120appeals": 15192, "\u0120Performance": 15193, "haul": 15194, "\u0120Stra": 15195, "\u0120Nag": 15196, "103": 15197, "\u0120Transportation": 15198, "BB": 15199, "Ev": 15200, "zan": 15201, "Public": 15202, "\u0120twin": 15203, "ulsion": 15204, "Mult": 15205, "\u0120electro": 15206, "\u0120statue": 15207, "ationally": 15208, "\u0120Nort": 15209, "\u0120inspection": 15210, "/*": 15211, "igue": 15212, "\u0120compassion": 15213, "\u0120Tales": 15214, "\u0120Stein": 15215, "\u0120Screen": 15216, "\u0120Bug": 15217, "\u0120Lion": 15218, "girl": 15219, "\u0120withdrawal": 15220, "\u0120objectives": 15221, "\u0120bloody": 15222, "\u0120preliminary": 15223, "\u0120jacket": 15224, "\u0120dimensions": 15225, "\u0120Cool": 15226, "\u0120Occup": 15227, "\u0120wreck": 15228, "\u0120doubled": 15229, "anking": 15230, "\u01201975": 15231, "\u0120glasses": 15232, "\u0120Wang": 15233, "prov": 15234, "Path": 15235, "connected": 15236, "\u0120Multi": 15237, "\u0120Norway": 15238, "agonist": 15239, "\u0120feared": 15240, "\u0120touching": 15241, "\u0120arguably": 15242, "\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af": 15243, "\u0120NCAA": 15244, "chem": 15245, "\u0120spat": 15246, "\u0120WWE": 15247, "\u0120Cel": 15248, "igger": 15249, "\u0120attacker": 15250, "\u0120Join": 15251, "object": 15252, "etta": 15253, "\u0120eliminated": 15254, "det": 15255, "\u0120destruct": 15256, "\u0120Lucas": 15257, "ctuary": 15258, "180": 15259, "\u0120Brady": 15260, "\u0120Blues": 15261, "Bay": 15262, "aukee": 15263, "\u0120timeline": 15264, "\u0120delegates": 15265, "written": 15266, "ufficient": 15267, "\u0120shapes": 15268, "Copyright": 15269, "ouble": 15270, "service": 15271, "\u0120pione": 15272, "\u0120colleges": 15273, "\u0120rows": 15274, "\u0120spite": 15275, "\u0120assessed": 15276, "360": 15277, "\u0120lease": 15278, "\u0120confidential": 15279, "cker": 15280, "\u0120Manning": 15281, "\u0120Voice": 15282, "\u0120sealed": 15283, "\u0120calculate": 15284, "NO": 15285, "\u0120Assistant": 15286, "\u0120teenager": 15287, "ulent": 15288, "atherine": 15289, "\u0120mock": 15290, "\u0120diamond": 15291, "\u0120fest": 15292, "\u0120switched": 15293, "\u0120resume": 15294, "\u0120Puerto": 15295, "\u0120lanes": 15296, "iration": 15297, "\u0120Similarly": 15298, "\u0120rod": 15299, "\u0120Sel": 15300, "\u0120Palace": 15301, "\u0120Limited": 15302, "eous": 15303, "\u0120variant": 15304, "\u0120ward": 15305, "\u0120))": 15306, "Show": 15307, "OOK": 15308, "Alex": 15309, "\u0120Nep": 15310, "bris": 15311, "\u0120Wikipedia": 15312, "\u0120exceptional": 15313, "\u0120manages": 15314, "\u0120Draw": 15315, "Again": 15316, "\u0120copper": 15317, "utt": 15318, "\u0120exports": 15319, "\u0120portfolio": 15320, "\u0120elevated": 15321, "Rated": 15322, "\u0120Otherwise": 15323, "\u0120Tact": 15324, "\u0120Shel": 15325, "\u0120TX": 15326, "\"\u00e2\u0122\u0136": 15327, "\u0120resur": 15328, "\u0120Wa": 15329, "venant": 15330, "\u0120monetary": 15331, "people": 15332, "Email": 15333, "\u0120fifty": 15334, "\u0120Sweet": 15335, "\u0120Malaysia": 15336, "\u0120confusing": 15337, "\u0120Rio": 15338, "uda": 15339, "utenant": 15340, "\");": 15341, "\u0120praised": 15342, "\u0120volumes": 15343, "turn": 15344, "\u0120mature": 15345, "\u0120nonprofit": 15346, "\u0120passionate": 15347, "\u0120Private": 15348, "\u0120103": 15349, "\u0120descend": 15350, "\u00e7\u00a5\u0140": 15351, "uffy": 15352, "headed": 15353, "Whether": 15354, "rien": 15355, "zech": 15356, "beit": 15357, "\u0120chrom": 15358, "\u0120McM": 15359, "\u0120dancing": 15360, "\u0120eleg": 15361, "\u0120Noticed": 15362, "115": 15363, "\u0120advocacy": 15364, "ENTS": 15365, "ambling": 15366, "\u0120Minor": 15367, "\u0120Finn": 15368, "\u0120priorities": 15369, "\u0120thereof": 15370, "\u0120Stage": 15371, "\u0120Rogers": 15372, "\u0120substitute": 15373, "\u0120Jar": 15374, "\u0120Jefferson": 15375, "\u0120lightly": 15376, "102": 15377, "\u0120Lisa": 15378, "uits": 15379, "ysical": 15380, "\u0120shifts": 15381, "\u0120drones": 15382, "\u0120workplace": 15383, "\u0120resid": 15384, "ensed": 15385, "ahn": 15386, "\u0120preferences": 15387, "server": 15388, "\u0120debates": 15389, "doc": 15390, "\u0120Gods": 15391, "\u0120helicopter": 15392, "\u0120honour": 15393, "\u0120considerably": 15394, "eded": 15395, "\u0120Female": 15396, "\u0120Anne": 15397, "\u0120reun": 15398, "\u0120Face": 15399, "\u0120Hallow": 15400, "\u0120Budget": 15401, "\u0120condemn": 15402, "\u0120tender": 15403, "Prof": 15404, "ocratic": 15405, "\u0120Turner": 15406, "\u0120Agric": 15407, "\u01201976": 15408, "\u0120apt": 15409, "disc": 15410, "\u0120Fighter": 15411, "\u0120Aur": 15412, "\u0120garbage": 15413, "input": 15414, "\u0120Karl": 15415, "\u0120Oliver": 15416, "\u0120Language": 15417, "kn": 15418, "Non": 15419, "\u0120Clar": 15420, "\u0120traditions": 15421, "\u0120advertisement": 15422, "\u0120Sor": 15423, "\u0120archive": 15424, "\u0120villages": 15425, "750": 15426, "\u0120implementing": 15427, "waukee": 15428, "\u0120dietary": 15429, "\u0120switching": 15430, "Republic": 15431, "\u0120velocity": 15432, "\u0120cit": 15433, "\u0120Awards": 15434, "\u0120financing": 15435, "\u0120lasted": 15436, ")]": 15437, "\u0120reminder": 15438, "Person": 15439, "\u0120precision": 15440, "\u0120designers": 15441, "\u0120Fried": 15442, "\u0120Border": 15443, "\u0120tragic": 15444, "\u0120wield": 15445, "\u0120initiatives": 15446, "\u0120Tank": 15447, "wer": 15448, "\u0120joins": 15449, "Ro": 15450, "inery": 15451, "\u0120arrow": 15452, "\u0120generating": 15453, "founder": 15454, "\u0120searches": 15455, "\u0120randomly": 15456, "Access": 15457, "\u0120batch": 15458, "\u0120posed": 15459, "lat": 15460, "\u0120pursuing": 15461, "asa": 15462, "\u0120testified": 15463, "forming": 15464, "\u0120Shar": 15465, "wiki": 15466, "\u0120Either": 15467, "Sometimes": 15468, "\u0120senators": 15469, "\u0120Johnny": 15470, "\u0120Taliban": 15471, "\u0120GPS": 15472, "\":\"/": 15473, "\u00e3\u0123\u00ae\u00e5": 15474, "\u0120analyzed": 15475, "\u0120Rubio": 15476, "\u0120Movement": 15477, "opard": 15478, "iii": 15479, "Stand": 15480, "fight": 15481, "\u0120ignoring": 15482, "iang": 15483, "\u0120GN": 15484, "soever": 15485, "\u0120STAT": 15486, "\u0120refusing": 15487, "\u0120sweat": 15488, "\u0120bay": 15489, "PORT": 15490, "irmed": 15491, "aky": 15492, "\u0120dispro": 15493, "\u0120labeled": 15494, "\u0120108": 15495, "Hello": 15496, "\u0120pleasant": 15497, "aba": 15498, "\u0120triumph": 15499, "\u0120aboard": 15500, "\u0120incom": 15501, "\u0120Crow": 15502, "lett": 15503, "\u0120folk": 15504, "\u0120chase": 15505, "``": 15506, "\u0120Brus": 15507, "\u0120teens": 15508, "cue": 15509, "\u0120terrain": 15510, "hyd": 15511, "ilight": 15512, "ORY": 15513, "Support": 15514, "ews": 15515, "lli": 15516, "raints": 15517, "\u0120Cand": 15518, "\u0120abused": 15519, "achment": 15520, "larg": 15521, "Bas": 15522, "\u0120Cancer": 15523, "\u01201978": 15524, "\u0120supporter": 15525, "access": 15526, "\u0120Termin": 15527, "\u0120Tampa": 15528, "\u0120ANY": 15529, "\u0120newest": 15530, "\u0120Criminal": 15531, "edu": 15532, "\u01201930": 15533, "\u0120admits": 15534, "\u0120ende": 15535, "\u0120failures": 15536, "urate": 15537, "fulness": 15538, "cycl": 15539, "\u0120Subject": 15540, "\u0120infinite": 15541, "three": 15542, "WA": 15543, "pit": 15544, "\u0120Install": 15545, "Rad": 15546, "iliation": 15547, "GM": 15548, "\u0120continent": 15549, "\u0120accommodate": 15550, "\u0120Clay": 15551, "\u0120pup": 15552, "\u0120Function": 15553, "\u0120hammer": 15554, "\u0120Alberta": 15555, "\u0120revised": 15556, "\u0120minorities": 15557, "\u0120measurement": 15558, "Connell": 15559, "\u0120disable": 15560, "\u0120Mix": 15561, "Incre": 15562, "\u0120fork": 15563, "\u0120Rosen": 15564, "\u0120implies": 15565, "umblr": 15566, "ANG": 15567, "\u0120proteins": 15568, "\u0120aggression": 15569, "\u0120facilitate": 15570, "SN": 15571, "\u0120illegally": 15572, "uer": 15573, "\u0120academ": 15574, "\u0120puzz": 15575, "\u0120Shift": 15576, "pay": 15577, "ollo": 15578, "\u0120audiences": 15579, "Build": 15580, "\u0120noble": 15581, "\u0120syntax": 15582, "\u00e2\u013a\u0127": 15583, "\u0120beam": 15584, "\u0120Bed": 15585, "\u0120Ald": 15586, "\u0120origins": 15587, "video": 15588, "\u01201977": 15589, "\u0120Assault": 15590, "\u0120garage": 15591, "Team": 15592, "\u0120verdict": 15593, "\u0120dwar": 15594, "\u0120Virtual": 15595, "event": 15596, "Keep": 15597, "\u0120sentiment": 15598, "\u0120wildlife": 15599, "shirt": 15600, "\u0120burg": 15601, "\u0120recommendation": 15602, "represent": 15603, "\u0120gallery": 15604, "owners": 15605, "\u0120scholar": 15606, "\u0120convenience": 15607, "\u0120Swift": 15608, "\u0120convinc": 15609, "Cap": 15610, "\u0120warfare": 15611, "\u0120Visual": 15612, "\u0120constitute": 15613, "\u0120abort": 15614, "\u0120Weather": 15615, "\u0120Looking": 15616, "\u0120Hem": 15617, "\u0120martial": 15618, "\u0120incoming": 15619, "etition": 15620, "\u0120tolerance": 15621, "\u0120Created": 15622, "\u0120flows": 15623, "\u0120Elder": 15624, "\u0120souls": 15625, "\u0120foul": 15626, "\u0120Pain": 15627, "\u0120CAN": 15628, "\u0120220": 15629, "bc": 15630, "hend": 15631, "\u0120genius": 15632, "Real": 15633, "\u0120Wr": 15634, "ometer": 15635, "pad": 15636, "\u0120limiting": 15637, "\u0120Si": 15638, "\u0120Lore": 15639, "\u0120Adventures": 15640, "\u0120varied": 15641, "Disc": 15642, "fin": 15643, "\u0120Personal": 15644, "Chris": 15645, "\u0120invented": 15646, "\u0120dive": 15647, "\u0120Rise": 15648, "\u0120oz": 15649, "\u0120Comics": 15650, "\u0120expose": 15651, "\u0120Reb": 15652, "letters": 15653, "site": 15654, "imated": 15655, "\u0120hacking": 15656, "\u0120educated": 15657, "\u0120Nobody": 15658, "\u0120depri": 15659, "\u0120incentive": 15660, "\u00e3\u0124\u00b7": 15661, "\u0120oversight": 15662, "\u0120tribes": 15663, "\u0120Belgium": 15664, "\u0120licensing": 15665, "ourt": 15666, "Product": 15667, "ahl": 15668, "\u0120Gem": 15669, "\u0120specialist": 15670, "\u0120cra": 15671, "anners": 15672, "\u0120Corbyn": 15673, "\u01201973": 15674, "READ": 15675, "\u0120summar": 15676, "\u0120overlook": 15677, "\u0120Application": 15678, "\u0120inappropriate": 15679, "\u0120downloaded": 15680, "Que": 15681, "\u0120Bears": 15682, "\u0120thumb": 15683, "\u0120Character": 15684, "\u0120Reincarnated": 15685, "\u0120Sid": 15686, "\u0120demonstrates": 15687, "sky": 15688, "\u0120Bloomberg": 15689, "\u0120Array": 15690, "\u0120Results": 15691, "\u0120Fourth": 15692, "\u0120EDT": 15693, "\u0120Oscar": 15694, "cend": 15695, "\u0120106": 15696, "\u0120NULL": 15697, "\u0120HERE": 15698, "match": 15699, "\u0120Brun": 15700, "\u0120glucose": 15701, "ieg": 15702, "egu": 15703, "\u0120certified": 15704, "\u0120relie": 15705, "\u0120humanitarian": 15706, "\u0120prayers": 15707, "King": 15708, "\u0120nan": 15709, "hou": 15710, "108": 15711, "ulu": 15712, "\u0120renewable": 15713, "\u0120distinguish": 15714, "\u0120dense": 15715, "\u0120Vent": 15716, "\u0120Package": 15717, "\u0120Boss": 15718, "\u0120editors": 15719, "\u0120migr": 15720, "Tra": 15721, "\u0120Peters": 15722, "\u0120Arctic": 15723, "2004": 15724, "\u0120Cape": 15725, "\u0120locally": 15726, "\u0120lasting": 15727, "\u0120handy": 15728, ".).": 15729, "Pan": 15730, "\u0120RES": 15731, "Index": 15732, "\u0120tensions": 15733, "\u0120formerly": 15734, "\u0120ideological": 15735, "\u0120sensors": 15736, "\u0120dealers": 15737, "\u0120defines": 15738, "Sk": 15739, "\u0120proceeds": 15740, "\u0120proxy": 15741, "azines": 15742, "\u0120Bash": 15743, "\u0120Pad": 15744, "\u0120Craft": 15745, "ealous": 15746, "\u0120sheets": 15747, "ometry": 15748, "June": 15749, "clock": 15750, "TT": 15751, "\u0120Theatre": 15752, "\u0120Buzz": 15753, "\u0120chapters": 15754, "\u0120millenn": 15755, "\u0120dough": 15756, "\u0120Congressional": 15757, "\u0120imagined": 15758, "avior": 15759, "\u0120clinic": 15760, "\u01201945": 15761, "\u0120holder": 15762, "root": 15763, "olester": 15764, "\u0120restart": 15765, "BN": 15766, "\u0120Hamas": 15767, "\u0120Job": 15768, "\u0120orb": 15769, "\u0120ram": 15770, "\u0120disclose": 15771, "\u0120translate": 15772, "\u0120immigrant": 15773, "\u0120annoying": 15774, "\u0120treaty": 15775, "anium": 15776, "\u0120Tea": 15777, "\u0120Legion": 15778, "\u0120crowds": 15779, "\u0120Bec": 15780, "\u0120Aer": 15781, "ohyd": 15782, "Bro": 15783, "Looking": 15784, "\u0120lbs": 15785, "\u0120aggress": 15786, "\u0120seam": 15787, "\u0120intercept": 15788, "\u0120MI": 15789, "mercial": 15790, "activ": 15791, "\u0120Cit": 15792, "\u0120dimension": 15793, "\u0120consistency": 15794, "\u0120rushing": 15795, "\u0120Douglas": 15796, "\u0120trim": 15797, "Install": 15798, "icker": 15799, "\u0120shy": 15800, "106": 15801, "\u0120mentions": 15802, "pelled": 15803, "\u0120Tak": 15804, "cost": 15805, "\u0120classroom": 15806, "\u0120fortune": 15807, "driven": 15808, "\u0120unle": 15809, "\u0120Wheel": 15810, "\u0120investor": 15811, "\u0120Masters": 15812, "kit": 15813, "\u0120associations": 15814, "\u0120Evolution": 15815, "oping": 15816, "uscript": 15817, "\u0120provincial": 15818, "\u0120Walter": 15819, "avi": 15820, "SO": 15821, "\u0120unlimited": 15822, "English": 15823, "\u0120Cards": 15824, "\u0120Ebola": 15825, "nered": 15826, "\u0120revenge": 15827, "\u0120outright": 15828, "umper": 15829, "\u0120fitting": 15830, "\u0120Solid": 15831, "\u0120formally": 15832, "\u0120problematic": 15833, "\u0120hazard": 15834, "\u0120encryption": 15835, "\u0120straightforward": 15836, "\u0120AK": 15837, "\u0120pse": 15838, "\u0120Orb": 15839, "\u0120Chamber": 15840, "\u0120Mak": 15841, "Contents": 15842, "\u0120loyalty": 15843, "\u0120lyrics": 15844, "\u0120Sym": 15845, "\u0120welcomed": 15846, "\u0120cooked": 15847, "\u0120monop": 15848, "\u0120nurse": 15849, "\u0120misleading": 15850, "\u0120eternal": 15851, "\u0120shifting": 15852, "\u0120+=": 15853, "Vis": 15854, "\u0120institutional": 15855, "illary": 15856, "\u0120pant": 15857, "VERT": 15858, "\u0120ACC": 15859, "\u0120Enh": 15860, "\u0120incon": 15861, "\u0120REUTERS": 15862, "\u0120donated": 15863, "\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6": 15864, "Intern": 15865, "\u0120exhibit": 15866, "\u0120tire": 15867, "\u0120Ric": 15868, "\u0120Champion": 15869, "\u0120Muhammad": 15870, "NING": 15871, "\u0120Soccer": 15872, "\u0120mobility": 15873, "\u0120varying": 15874, "\u0120Movie": 15875, "\u0120lord": 15876, "oak": 15877, "Field": 15878, "\u0120vector": 15879, "usions": 15880, "\u0120scrap": 15881, "\u0120enabling": 15882, "make": 15883, "Tor": 15884, ".*": 15885, "||": 15886, "\u0120Website": 15887, "\u0120NPC": 15888, "\u0120socialist": 15889, "\u0120Billy": 15890, "\u0120Additional": 15891, "\u0120cargo": 15892, "\u0120farms": 15893, "\u0120Soon": 15894, "\u0120Prize": 15895, "\u0120midnight": 15896, "\u0120900": 15897, "seen": 15898, "\u0120Spot": 15899, "\u0120sheep": 15900, "\u0120sponsored": 15901, "\u0120Hi": 15902, "\u0120Jump": 15903, "\u01201967": 15904, "Microsoft": 15905, "\u0120Agent": 15906, "\u0120charts": 15907, "dir": 15908, "\u0120adjacent": 15909, "\u0120tricks": 15910, "\u0120manga": 15911, "\u0120exagger": 15912, "/>": 15913, "football": 15914, "\u0120FCC": 15915, "GC": 15916, "\u0120Tier": 15917, "andra": 15918, "OUND": 15919, "%),": 15920, "\u0120fruits": 15921, "VC": 15922, "\u0120AA": 15923, "Rober": 15924, "\u0120midst": 15925, "\u00e2\u0139": 15926, "anka": 15927, "\u0120legislature": 15928, "\u0120Neil": 15929, "\u0120tourists": 15930, "\"\"": 15931, "\u0120Warning": 15932, "\u0120Nevertheless": 15933, "\u0120Official": 15934, "\u0120Whatever": 15935, "\u0120mold": 15936, "\u0120drafted": 15937, "\u0120substances": 15938, "\u0120breed": 15939, "\u0120tags": 15940, "\u0120Task": 15941, "\u0120verb": 15942, "\u0120manufactured": 15943, "comments": 15944, "\u0120Polish": 15945, "Prov": 15946, "\u0120determines": 15947, "Obama": 15948, "kers": 15949, "\u0120utterly": 15950, "\u0120sect": 15951, "sche": 15952, "\u0120Gates": 15953, "\u0120Chap": 15954, "\u0120aluminum": 15955, "\u0120zombie": 15956, "\u0120Touch": 15957, "\u0120UP": 15958, "\u0120satisfy": 15959, "\u0120predomin": 15960, "ascript": 15961, "\u0120elaborate": 15962, "\u01201968": 15963, "\u0120measuring": 15964, "\u0120Vari": 15965, "anyahu": 15966, "\u0120sir": 15967, "ulates": 15968, "idges": 15969, "ickets": 15970, "\u0120Spencer": 15971, "TM": 15972, "oubted": 15973, "\u0120prey": 15974, "\u0120installing": 15975, "\u0120Cab": 15976, "reed": 15977, "reated": 15978, "Supp": 15979, "\u0120wrist": 15980, "\u0120Kerry": 15981, "107": 15982, "\u0120Kle": 15983, "\u0120Rachel": 15984, "\u0120cotton": 15985, "\u0120ARE": 15986, "\u0120Ele": 15987, "Control": 15988, "\u0120loads": 15989, "\u0120Dod": 15990, "anas": 15991, "bone": 15992, "\u0120classical": 15993, "\u0120Regional": 15994, "\u0120Integ": 15995, "VM": 15996, "\u0120desires": 15997, "\u0120autism": 15998, "supported": 15999, "\u0120Message": 16000, "\u0120compact": 16001, "writer": 16002, "\u0120109": 16003, "\u0120Hurricane": 16004, "cision": 16005, "\u0120cycles": 16006, "\u0120drill": 16007, "\u0120colleague": 16008, "\u0120maker": 16009, "German": 16010, "\u0120mistaken": 16011, "Sun": 16012, "\u0120Gay": 16013, "\u0120whatsoever": 16014, "\u0120sells": 16015, "\u0120Airl": 16016, "liv": 16017, "\u0120Option": 16018, "\u0120solved": 16019, "\u0120sectors": 16020, "\u0120horizontal": 16021, "\u0120equation": 16022, "\u0120Skill": 16023, "\u0120Bio": 16024, "gement": 16025, "\u0120Snap": 16026, "\u0120Legal": 16027, "\u0120trademark": 16028, "\u0120makeup": 16029, "\u0120assembled": 16030, "\u0120saves": 16031, "\u0120Halloween": 16032, "\u0120Vermont": 16033, "\u0120FROM": 16034, "\u0120farming": 16035, "\u0120Podcast": 16036, "acceptable": 16037, "\u0120Higher": 16038, "\u0120asleep": 16039, "ullivan": 16040, "\u0120referen": 16041, "\u0120Lev": 16042, "\u0120bullets": 16043, "oko": 16044, "HC": 16045, "\u0120stairs": 16046, "\u0120maintains": 16047, "\u0120Lower": 16048, "\u0120Vi": 16049, "\u0120marine": 16050, "\u0120acres": 16051, "\u0120coordinator": 16052, "\u0120Joh": 16053, "\u0120counterparts": 16054, "\u0120Brothers": 16055, "\u0120indict": 16056, "bra": 16057, "\u0120chunk": 16058, "\u0120cents": 16059, "Home": 16060, "\u0120Month": 16061, "\u0120accordingly": 16062, "ifles": 16063, "\u0120Germans": 16064, "\u0120Syn": 16065, "Hub": 16066, "\u0120eyeb": 16067, "\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122": 16068, "\u0120ranges": 16069, "\u0120Holland": 16070, "\u0120Robot": 16071, "fc": 16072, "Mike": 16073, "\u0120plasma": 16074, "\u0120swap": 16075, "\u0120athlete": 16076, "\u0120Rams": 16077, ",'\"": 16078, "\u0120infections": 16079, "\u0120corrid": 16080, "\u0120vib": 16081, "\u0120patches": 16082, "\u0120traditionally": 16083, "\u0120revelation": 16084, "\u0120sweep": 16085, "\u0120glance": 16086, "\u0120inex": 16087, "2003": 16088, "\u0120Raw": 16089, "working": 16090, "osures": 16091, "\u0120Dat": 16092, "\u0120Lynch": 16093, "\u0120leverage": 16094, "\u0120Reid": 16095, "\u0120correlation": 16096, "iances": 16097, "avascript": 16098, "\u0120repository": 16099, "retty": 16100, "\u01201972": 16101, "240": 16102, "\u0120oun": 16103, "pol": 16104, "\u0120Reed": 16105, "\u0120tactical": 16106, "isite": 16107, "Apple": 16108, "\u0120Quinn": 16109, "\u0120raped": 16110, "illo": 16111, "Europe": 16112, "\u0120algorithms": 16113, "\u0120Rodrig": 16114, "iu": 16115, "\u0120illum": 16116, "\u0120fame": 16117, "\u0120introducing": 16118, "\u0120delays": 16119, "\u0120Raiders": 16120, "\u0120whistle": 16121, "\u0120novels": 16122, "\u0120Really": 16123, "\u0120deriv": 16124, "\u0120publications": 16125, "\u0120Neither": 16126, "\u0120Commerce": 16127, "\u0120aston": 16128, "language": 16129, "Notes": 16130, "\u0120Roth": 16131, "\u0120Fear": 16132, "\u0120mate": 16133, "\u0120parade": 16134, "\u0120QB": 16135, "\u0120maneu": 16136, "\u0120Cincinnati": 16137, "mitting": 16138, "\u0120waist": 16139, "\u0120Rew": 16140, "\u0120discont": 16141, "\u00d0\u00b0": 16142, "\u0120staring": 16143, "\u0120alias": 16144, "\u0120securities": 16145, "\u0120toilet": 16146, "\u0120Jedi": 16147, "\u0120unlaw": 16148, "vised": 16149, "////////": 16150, "](": 16151, "\u0120Weiss": 16152, "\u0120prest": 16153, "\u0120Compan": 16154, "\u0120memo": 16155, "\u0120Grace": 16156, "July": 16157, "\u0120Elite": 16158, "center": 16159, "\u0120Stay": 16160, "\u0120galaxy": 16161, "\u0120tooth": 16162, "\u0120Settings": 16163, "\u0120subjected": 16164, "\u00e3\u0124\u00a6": 16165, "\u0120lineback": 16166, "\u0120retailers": 16167, "\u0120Want": 16168, "\u0120dangers": 16169, "Air": 16170, "\u0120voluntary": 16171, "eway": 16172, "\u0120interpreted": 16173, "otine": 16174, "\u00c3\u00a7": 16175, "\u0120pel": 16176, "Service": 16177, "\u0120Eventually": 16178, "\u0120careers": 16179, "\u0120threaten": 16180, "\u0120memor": 16181, "\u0120Bradley": 16182, "ancies": 16183, "sn": 16184, "\u0120Unknown": 16185, "National": 16186, "\u0120shadows": 16187, "ailand": 16188, "\u0120Dash": 16189, "Everyone": 16190, "izzard": 16191, "March": 16192, "=(": 16193, "\u0120pulls": 16194, "\u0120stranger": 16195, "\u0120backwards": 16196, "\u0120Bernard": 16197, "imensional": 16198, "\u0120chron": 16199, "\u0120theoretical": 16200, "ktop": 16201, "\u0120ware": 16202, "\u0120Investig": 16203, "\u0120Initi": 16204, "\u0120Operations": 16205, "oven": 16206, "ocide": 16207, "*/": 16208, "\u0120flames": 16209, "\u0120Cash": 16210, "shit": 16211, "\u0120cab": 16212, "\u0120Analy": 16213, "\u0120Seah": 16214, "\u0120defining": 16215, "\u0120ordering": 16216, "\u0120immun": 16217, "\u0120persistent": 16218, "ACH": 16219, "Russian": 16220, "mans": 16221, "\u0120hind": 16222, "\u0120photography": 16223, "\u00c2\u00a9": 16224, "\u0120hug": 16225, "\u0120107": 16226, "\u0120Hence": 16227, "iots": 16228, "udeau": 16229, "\u0120subsidies": 16230, "\u0120routinely": 16231, "\u0120Device": 16232, "itic": 16233, "\u0120disgust": 16234, "lander": 16235, "\u01201940": 16236, "\u0120assignment": 16237, "\u0120Besides": 16238, "wick": 16239, "\u0120Dust": 16240, "usc": 16241, "structed": 16242, "111": 16243, "develop": 16244, "\u0120fond": 16245, "\u0120intersection": 16246, "\u0120dignity": 16247, "\u0120commissioner": 16248, "Without": 16249, "reach": 16250, "\u0120cartoon": 16251, "\u0120scales": 16252, "\u00e3\u0125\u0143": 16253, "FIG": 16254, "\u0120surveys": 16255, "\u0120Indonesia": 16256, "\u0120artwork": 16257, "\u0120unch": 16258, "\u0120cycling": 16259, "unct": 16260, "auer": 16261, "orate": 16262, "\u0120Obviously": 16263, "\u0120characterized": 16264, "feld": 16265, "\u0120affirm": 16266, "\u0120innings": 16267, "\u0120\u00e9": 16268, "\u0120aliens": 16269, "\u0120cloth": 16270, "etooth": 16271, "\u0120Certain": 16272, "\u00c2\u00a7": 16273, "\u0120digest": 16274, "know": 16275, "\u0120XL": 16276, "\u0120predictions": 16277, "\u0120din": 16278, "WAR": 16279, "\u0120aftermath": 16280, "Example": 16281, "\u0120Success": 16282, "\u0120Thr": 16283, "IGN": 16284, "\u0120miner": 16285, "Bus": 16286, "\u0120clarity": 16287, "heimer": 16288, "\u0120OUT": 16289, "\u0120Send": 16290, "\u0120Circle": 16291, "\u0120Diet": 16292, "\u0120pronounced": 16293, "\u0120creators": 16294, "\u0120earthquake": 16295, "attery": 16296, "geons": 16297, "\u0120od": 16298, "\u0120laying": 16299, "orp": 16300, "Ult": 16301, "project": 16302, "\u0120undermin": 16303, "\u0120sequel": 16304, "Sam": 16305, "\u0120Darkness": 16306, "\u0120reception": 16307, "bull": 16308, "YS": 16309, "\u0120Vir": 16310, "\u0120sequences": 16311, "\u0120Coin": 16312, "\u0120outfit": 16313, "\u0120Wait": 16314, "119": 16315, "\u0120delivers": 16316, "......": 16317, "\u0120blown": 16318, "\u0120Esc": 16319, "\u0120Math": 16320, "perm": 16321, "\u0120Ul": 16322, "\u0120glim": 16323, "\u0120facial": 16324, "\u0120greenhouse": 16325, "\u0120tokens": 16326, "/-": 16327, "\u0120Annual": 16328, "\u0120ONE": 16329, "\u0120teenage": 16330, "\u0120Physical": 16331, "\u0120Lang": 16332, "\u0120Celt": 16333, "\u0120sued": 16334, "ividually": 16335, "\u0120patience": 16336, "chair": 16337, "regular": 16338, "\u0120aug": 16339, "inv": 16340, "except": 16341, "\u0120Lil": 16342, "\u0120nest": 16343, "fd": 16344, "sum": 16345, "\u0120Chase": 16346, "Russia": 16347, "\u0120Jennifer": 16348, "\u0120offseason": 16349, "Overall": 16350, "Fore": 16351, "\u0120riot": 16352, "Aud": 16353, "former": 16354, "\u0120defenders": 16355, "\u0120CT": 16356, "iotic": 16357, "ribly": 16358, "\u0120automated": 16359, "\u0120penis": 16360, "\u0120insist": 16361, "\u0120diagram": 16362, "\u0120SQL": 16363, "\u0120Garc": 16364, "\u0120witch": 16365, "client": 16366, "ierra": 16367, "ambers": 16368, "\u0120recount": 16369, "far": 16370, "Very": 16371, "osterone": 16372, "\u0120appreciated": 16373, "\u0120Perfect": 16374, "Section": 16375, "\u0120doses": 16376, "ocaust": 16377, "\u0120costly": 16378, "\u0120grams": 16379, "\u0120Shi": 16380, "\u0120wrestling": 16381, "\u01201971": 16382, "\u0120trophy": 16383, "\u0120nerve": 16384, "\u0120Kaz": 16385, "\u0120Experience": 16386, "\u0120pledged": 16387, "\u0120playback": 16388, "\u0120creativity": 16389, "bye": 16390, "\u0120attackers": 16391, "\u0120holders": 16392, "\u0120Coach": 16393, "\u0120PhD": 16394, "\u0120transfers": 16395, "\u0120colored": 16396, "\u0120Hindu": 16397, "\u0120drown": 16398, "\u0120listened": 16399, "\u0120WA": 16400, "iasm": 16401, "PO": 16402, "\u0120appealing": 16403, "\u0120disclosed": 16404, "\u0120Chicken": 16405, "agging": 16406, "\u0120pleaded": 16407, "\u0120navigation": 16408, "\u0120Returns": 16409, "\u0120[[": 16410, "ROR": 16411, "EA": 16412, "\u0120photographer": 16413, "\u0120Rider": 16414, "ippers": 16415, "\u0120slice": 16416, "\u0120erect": 16417, "\u0120hed": 16418, "issance": 16419, "\u0120Vikings": 16420, "urious": 16421, "\u0120appet": 16422, "oubtedly": 16423, "Child": 16424, "\u0120authentic": 16425, "oos": 16426, "\u0120Making": 16427, "\u0120announcing": 16428, "\u0120bod": 16429, "\u0120meter": 16430, "\u0120Nine": 16431, "\u0120Rogue": 16432, "\u0120workforce": 16433, "\u0120renewed": 16434, "\u0120organisations": 16435, "acs": 16436, "PLE": 16437, "Short": 16438, "\u0120compounds": 16439, "\u0120Visit": 16440, "\u0120envelop": 16441, "earth": 16442, "\u0120supportive": 16443, "ggle": 16444, "\u0120Brussels": 16445, "\u0120Guild": 16446, "Create": 16447, "REL": 16448, "\u0120averaged": 16449, "\u01201969": 16450, "riages": 16451, "\u0120lengthy": 16452, "\u0120forgot": 16453, "Okay": 16454, "\u0120Erd": 16455, "\u0120dealer": 16456, "\u0120recession": 16457, "DD": 16458, "\u0120desperately": 16459, "\u0120hunger": 16460, "\u0120sticks": 16461, "\u0120mph": 16462, "\u0120Faith": 16463, "\u0120intentionally": 16464, "\u0120demol": 16465, "ueller": 16466, "\u0120Sale": 16467, "\u0120debris": 16468, "spring": 16469, "\u0120leap": 16470, ">>>>": 16471, "\u0120containers": 16472, "selling": 16473, "ranean": 16474, "attering": 16475, "\u0120commented": 16476, "\u0120CM": 16477, "onut": 16478, "\u0120woods": 16479, "especially": 16480, "\u0120organize": 16481, "ivic": 16482, "\u0120Woods": 16483, "anga": 16484, "squ": 16485, "\u0120maj": 16486, "amon": 16487, "\u0120axis": 16488, "\u01201974": 16489, "\u0120Denmark": 16490, "\u0120warrior": 16491, "\u0120Pand": 16492, "\u0120outlined": 16493, "\u0120BO": 16494, "insula": 16495, "zilla": 16496, "ebook": 16497, "\u0120dare": 16498, "\u0120searched": 16499, "\u0120navigate": 16500, "Sn": 16501, "writing": 16502, "\u0120united": 16503, "Japan": 16504, "\u0120Hebrew": 16505, "\u0120flame": 16506, "\u0120relies": 16507, "\u0120catching": 16508, "\u0120Sho": 16509, "\u0120imprisonment": 16510, "\u0120pockets": 16511, "\u0120closure": 16512, "\u0120Fam": 16513, "tim": 16514, "adequ": 16515, "Activity": 16516, "\u0120recruiting": 16517, "\u0120WATCH": 16518, "\u0120Argentina": 16519, "dest": 16520, "\u0120apologize": 16521, "oro": 16522, "\u0120lacks": 16523, "\u0120tuned": 16524, "\u0120Griffin": 16525, "\u0120infamous": 16526, "\u0120celebrity": 16527, "sson": 16528, "\u0120----------------------------------------------------------------": 16529, "\u0120Isis": 16530, "\u0120Display": 16531, "\u0120credibility": 16532, "\u0120economies": 16533, "\u0120headline": 16534, "\u0120Cowboys": 16535, "\u0120indef": 16536, "\u0120lately": 16537, "\u0120incentives": 16538, "button": 16539, "\u0120Mob": 16540, "Aut": 16541, "\u0120resigned": 16542, "\u0120Om": 16543, "camp": 16544, "\u0120profiles": 16545, "\u0120schemes": 16546, "olphins": 16547, "ayed": 16548, "Clinton": 16549, "enh": 16550, "\u0120Yahoo": 16551, "\u0120abst": 16552, "\u0120ank": 16553, "suits": 16554, "\u0120wished": 16555, "\u0120Marco": 16556, "udden": 16557, "\u0120sphere": 16558, "\u0120Bishop": 16559, "\u0120incorporated": 16560, "\u0120Plant": 16561, "114": 16562, "\u0120hated": 16563, "pic": 16564, "\u0120donate": 16565, "\u0120lined": 16566, "\u0120beans": 16567, "\u0120stealing": 16568, "\u0120costume": 16569, "\u0120sheriff": 16570, "\u0120forty": 16571, "\u0120intact": 16572, "\u0120adapted": 16573, "\u0120travelling": 16574, "bart": 16575, "\u0120nicely": 16576, "\u0120dried": 16577, "\u0120scal": 16578, "osity": 16579, "NOTE": 16580, "\u0120Bh": 16581, "\u0120Broncos": 16582, "\u0120Ign": 16583, "\u0120intimate": 16584, "\u0120chemistry": 16585, "\u0120optimal": 16586, "Deb": 16587, "\u0120Generation": 16588, "\u0120],": 16589, "ichi": 16590, "\u0120Wii": 16591, "\u0120YOUR": 16592, "ventions": 16593, "Write": 16594, "\u0120popul": 16595, "unning": 16596, "\u0120Wor": 16597, "Vol": 16598, "\u0120queen": 16599, "heads": 16600, "KK": 16601, "\u0120analyze": 16602, "opic": 16603, "earchers": 16604, "\u0120dot": 16605, "legraph": 16606, "astically": 16607, "\u0120upgrades": 16608, "\u0120cares": 16609, "\u0120extending": 16610, "\u0120freeze": 16611, "\u0120inability": 16612, "\u0120organs": 16613, "\u0120pretend": 16614, "\u0120outlet": 16615, "113": 16616, "olan": 16617, "\u0120Mall": 16618, "uling": 16619, "talk": 16620, "\u0120expressing": 16621, "\u0120Always": 16622, "\u0120Begin": 16623, "files": 16624, "\u0120licenses": 16625, "%%": 16626, "\u0120Mitt": 16627, "\u0120filters": 16628, "\u0120Milwaukee": 16629, "GN": 16630, "\u0120unfold": 16631, "Mo": 16632, "\u0120nutrition": 16633, "ppo": 16634, "Bo": 16635, "\u0120founding": 16636, "\u0120undermine": 16637, "\u0120easiest": 16638, "\u0120Czech": 16639, "\u0120Mack": 16640, "\u0120sexuality": 16641, "\u0120Nixon": 16642, "Win": 16643, "\u0120Arn": 16644, "\u0120Kin": 16645, "\u00e3\u0124\u00a3": 16646, "icer": 16647, "\u0120fortun": 16648, "\u0120surfaces": 16649, "aghd": 16650, "\u0120carriers": 16651, "\u0120PART": 16652, "\u0120Tib": 16653, "\u0120interval": 16654, "\u0120frustrating": 16655, "\u0120Ship": 16656, "\u0120Armed": 16657, "ffe": 16658, "\u0120boats": 16659, "\u0120Abraham": 16660, "inis": 16661, "\u0120suited": 16662, "thread": 16663, "iov": 16664, "abul": 16665, "\u0120Venezuela": 16666, "\u0120tom": 16667, "super": 16668, "\u0120castle": 16669, "although": 16670, "ioxide": 16671, "eches": 16672, "\u0120evolutionary": 16673, "\u0120negotiate": 16674, "\u0120confronted": 16675, "Remember": 16676, "\u0120170": 16677, "Such": 16678, "\u0120911": 16679, "mult": 16680, "\u0120Abyss": 16681, "urry": 16682, "kees": 16683, "spec": 16684, "\u0120Barbara": 16685, "\u0120belonging": 16686, "\u0120villain": 16687, "istani": 16688, "\u0120accountable": 16689, "\u0120portions": 16690, "\u0120Decl": 16691, "Ur": 16692, "\u0120Kate": 16693, "gre": 16694, "\u0120magazines": 16695, "UCK": 16696, "\u0120regulate": 16697, "omon": 16698, "\u0120Almost": 16699, "\u0120overview": 16700, "\u0120scram": 16701, "\u0120loot": 16702, "\u0120Fitz": 16703, "\u0120characteristic": 16704, "\u0120Snake": 16705, "say": 16706, "\u0120Rico": 16707, "\u0120trait": 16708, "\u0120Joined": 16709, "aucus": 16710, "\u0120adaptation": 16711, "\u0120Airlines": 16712, "\u0120archae": 16713, "\u0120Ide": 16714, "\u0120bikes": 16715, "\u0120literary": 16716, "\u0120influences": 16717, "\u0120Used": 16718, "Creat": 16719, "\u0120plea": 16720, "\u0120Defence": 16721, "\u0120Assass": 16722, "\u0120pond": 16723, "ULT": 16724, ")\"": 16725, "\u0120evaluated": 16726, "\u0120obtaining": 16727, "\u0120demographic": 16728, "\u0120vigil": 16729, "aley": 16730, "\u0120spouse": 16731, "\u0120Seahawks": 16732, "respons": 16733, "\u0120Belt": 16734, "umatic": 16735, "\u0120rises": 16736, "runner": 16737, "\u0120Michelle": 16738, "\u0120potent": 16739, "race": 16740, "\u0120PAC": 16741, "Find": 16742, "olesterol": 16743, "ISS": 16744, "\u0120Introduced": 16745, "resses": 16746, "ignment": 16747, "Os": 16748, "\u0120Tu": 16749, "\u0120Dex": 16750, "icides": 16751, "\u0120sparked": 16752, "\u0120Laura": 16753, "\u0120Bryant": 16754, "\u0120smiling": 16755, "\u0120Nexus": 16756, "\u0120defendants": 16757, "\u0120Catal": 16758, "\u0120dishes": 16759, "shaped": 16760, "\u0120prolong": 16761, "mt": 16762, "($": 16763, "\u00e3\u0122\u0124": 16764, "\u0120calculations": 16765, "\u0120Same": 16766, "\u0120piv": 16767, "HH": 16768, "\u0120cancelled": 16769, "\u0120grin": 16770, "\u0120territories": 16771, "istically": 16772, "Come": 16773, "\u0120Parent": 16774, "Project": 16775, "\u0120neglig": 16776, "\u0120Privacy": 16777, "\u0120ammo": 16778, "LECT": 16779, "olutely": 16780, "\u0120Epic": 16781, "\u0120misunder": 16782, "wal": 16783, "April": 16784, "mos": 16785, "pathy": 16786, "\u0120Carson": 16787, "\u0120albums": 16788, "\u0120Easy": 16789, "\u0120pistol": 16790, "<<": 16791, "\u0120\\(": 16792, "target": 16793, "help": 16794, "\u0120interpre": 16795, "conscious": 16796, "\u0120Housing": 16797, "\u0120Joint": 16798, "127": 16799, "\u0120beers": 16800, "science": 16801, "\u0120Firefox": 16802, "effective": 16803, "\u0120Cabin": 16804, "\u0120Okay": 16805, "\u0120Applic": 16806, "\u0120spacecraft": 16807, "\u0120SR": 16808, "vet": 16809, "\u0120Strange": 16810, "SB": 16811, "\u0120corps": 16812, "iberal": 16813, "efficient": 16814, "\u0120prevalence": 16815, "\u0120economists": 16816, "118": 16817, "Thread": 16818, "ordable": 16819, "ODE": 16820, "\u0120Cant": 16821, "=-=-": 16822, "ifiable": 16823, "\u0120Around": 16824, "\u0120pole": 16825, "\u0120willingness": 16826, "CLA": 16827, "\u0120Kid": 16828, "\u0120complement": 16829, "\u0120scattered": 16830, "\u0120inmates": 16831, "\u0120bleeding": 16832, "every": 16833, "\u0120queue": 16834, "\u0120Train": 16835, "\u0120hij": 16836, "\u0120melee": 16837, "pleted": 16838, "\u0120digit": 16839, "\u0120gem": 16840, "official": 16841, "\u0120lifting": 16842, "\u00d0\u00b5": 16843, "Requ": 16844, "itutes": 16845, "\u0120packaging": 16846, "\u0120Workers": 16847, "hran": 16848, "\u0120Lebanon": 16849, "olesc": 16850, "\u0120punished": 16851, "\u0120Juan": 16852, "\u0120jam": 16853, "\u0120Document": 16854, "\u0120mapping": 16855, "icates": 16856, "\u0120inevitably": 16857, "\u0120vanilla": 16858, "\u0120Ton": 16859, "\u0120watches": 16860, "\u0120leagues": 16861, "\u0120initiated": 16862, "degree": 16863, "portion": 16864, "\u0120recalls": 16865, "\u0120ruin": 16866, "\u0120melt": 16867, "IAN": 16868, "\u0120hem": 16869, "Exp": 16870, "\u0120baking": 16871, "\u0120Colomb": 16872, "atible": 16873, "\u0120radius": 16874, "plug": 16875, "\u0120IF": 16876, "etically": 16877, "\u0120fict": 16878, "HER": 16879, "\u0120Tap": 16880, "atinum": 16881, "\u0120ink": 16882, "\u0120coh": 16883, "\u0120Wizard": 16884, "both": 16885, "tex": 16886, "\u0120spends": 16887, "\u0120Currently": 16888, "\u0120Pit": 16889, "\u0120neurons": 16890, "ignt": 16891, "\u0120rall": 16892, "\u0120buses": 16893, "building": 16894, "\u0120adjustments": 16895, "\u0120cried": 16896, "iblical": 16897, "atted": 16898, "\u0120Zion": 16899, "\u0120Matter": 16900, "\u0120meditation": 16901, "\u0120Dennis": 16902, "\u0120ours": 16903, "\u0120Tab": 16904, "\u0120rankings": 16905, "ortal": 16906, "\u0120advers": 16907, "\u0120surrender": 16908, "\u0120Gob": 16909, "cium": 16910, "omas": 16911, "imeter": 16912, "\u0120multiplayer": 16913, "\u0120heroin": 16914, "\u0120optimistic": 16915, "\u0120indicator": 16916, "\u0120Brig": 16917, "\u0120grocery": 16918, "\u0120applicant": 16919, "\u0120Rocket": 16920, "vid": 16921, "Exception": 16922, "pent": 16923, "\u0120organizing": 16924, "\u0120encounters": 16925, "\u0120TOD": 16926, "\u0120jewel": 16927, "Save": 16928, "\u0120Christie": 16929, "\u0120heating": 16930, "\u0120lazy": 16931, "\u0120CP": 16932, "\u0120cousin": 16933, "Config": 16934, "\u0120regener": 16935, "\u0120nearest": 16936, "\u0120achieving": 16937, "ENS": 16938, "throw": 16939, "\u0120Richmond": 16940, "antle": 16941, "2002": 16942, "\u0120anten": 16943, "bird": 16944, "133": 16945, "\u0120narc": 16946, "raint": 16947, "unny": 16948, "\u0120Hispanic": 16949, "ournaments": 16950, "\u0120prophe": 16951, "\u0120Thailand": 16952, "\u0120Ti": 16953, "\u0120injection": 16954, "\u0120inherit": 16955, "ravis": 16956, "\u0120medi": 16957, "\u0120whoever": 16958, "\u0120DEBUG": 16959, "GP": 16960, "\u0120Hud": 16961, "Card": 16962, "prom": 16963, "\u0120por": 16964, "\u0120overhead": 16965, "Law": 16966, "\u0120violate": 16967, "\u0120heated": 16968, "\u0120descriptions": 16969, "\u0120achievements": 16970, "\u0120Beer": 16971, "\u0120Quant": 16972, "Was": 16973, "\u0120eighth": 16974, "\u0120Iv": 16975, "\u0120specialized": 16976, "UPDATE": 16977, "\u0120Delta": 16978, "Pop": 16979, "Jul": 16980, "\u0120Ask": 16981, "ophy": 16982, "\u0120newsletters": 16983, "\u0120Tool": 16984, "\u0120gard": 16985, "\u0120Confeder": 16986, "\u0120GMT": 16987, "\u0120Abbott": 16988, "\u0120immunity": 16989, "\u0120VM": 16990, "Islam": 16991, "\u0120implicit": 16992, "wd": 16993, "\u01201944": 16994, "ravity": 16995, "ometric": 16996, "\u0120surviving": 16997, "urai": 16998, "\u0120Prison": 16999, "\u0120rust": 17000, "\u0120Sketch": 17001, "\u0120bees": 17002, "\u0120Theory": 17003, "\u0120merit": 17004, "Tex": 17005, "chat": 17006, "\u0120mim": 17007, "\u0120paste": 17008, "\u0120Koch": 17009, "\u0120ignorance": 17010, "\u0120Shoot": 17011, "\u0120basement": 17012, "United": 17013, "\u0120Advis": 17014, "height": 17015, "\u0120foster": 17016, "\u0120detain": 17017, "information": 17018, "\u0120neural": 17019, "';": 17020, "\u0120proves": 17021, "allery": 17022, "\u0120invitation": 17023, "umbers": 17024, "\u0120cattle": 17025, "\u0120bicycle": 17026, "zi": 17027, "\u0120consultant": 17028, "\u0120apology": 17029, "\u0120Tiger": 17030, "\u0120123": 17031, "999": 17032, "\u0120individually": 17033, "rt": 17034, "igion": 17035, "\u0120Brazilian": 17036, "\u0120disturb": 17037, "\u0120entrepreneurs": 17038, "\u0120forests": 17039, "cerpt": 17040, "plates": 17041, "pher": 17042, "clipse": 17043, "\u0120twitter": 17044, "\u0120acids": 17045, "ographical": 17046, "hum": 17047, "\u0120Bald": 17048, "ifully": 17049, "\u0120compiler": 17050, "\u0120DA": 17051, "\u0120donor": 17052, "asi": 17053, "\u0120tribal": 17054, "lash": 17055, "\u0120Config": 17056, "\u0120applicants": 17057, "\u0120salaries": 17058, "135": 17059, "Putin": 17060, "\u0120Focus": 17061, "irs": 17062, "\u0120misconduct": 17063, "\u0120Haz": 17064, "\u0120eaten": 17065, "Mobile": 17066, "Muslim": 17067, "\u0120Marcus": 17068, "viol": 17069, "\u0120favorable": 17070, "\u0120stub": 17071, "adin": 17072, "\u0120Hob": 17073, "\u0120faithful": 17074, "\u0120electronics": 17075, "\u0120vacuum": 17076, "wait": 17077, "backed": 17078, "economic": 17079, "dist": 17080, "\u0120tenure": 17081, "\u0120sincere": 17082, "\u0120Together": 17083, "\u0120Wave": 17084, "\u0120progression": 17085, "\u0120denying": 17086, "\u0120distress": 17087, "braska": 17088, "third": 17089, "\u0120mixing": 17090, "\u0120colonial": 17091, "\u0120privately": 17092, "\u0120unrest": 17093, "aternity": 17094, "\u0120premises": 17095, "anti": 17096, "gregation": 17097, "\u0120licence": 17098, "\u0120Hind": 17099, "\u0120Samuel": 17100, "\u0120convincing": 17101, "\u0120Ace": 17102, "\u0120Rust": 17103, "\u0120Netanyahu": 17104, "\u0120handles": 17105, "\u0120Patch": 17106, "oriented": 17107, "aho": 17108, "\u0120Gonz": 17109, "\u0120hackers": 17110, "claimer": 17111, "\u0120customs": 17112, "\u0120Gran": 17113, "fighters": 17114, "\u0120luc": 17115, "\u0120manuscript": 17116, "arenthood": 17117, "\u0120devil": 17118, "\u0120warriors": 17119, "\u0120offenders": 17120, "William": 17121, "\u0120holidays": 17122, "\u0120nightmare": 17123, "\u0120lever": 17124, "ifferent": 17125, "Stat": 17126, "\u0120exhibition": 17127, "puted": 17128, "\u0120Pure": 17129, "\u0120alpha": 17130, "\u0120enthusiasm": 17131, "\u0120Representatives": 17132, "EAR": 17133, "\u0120Typ": 17134, "\u0120wheat": 17135, "\u0120Alf": 17136, "\u0120correction": 17137, "\u0120evangel": 17138, "ATT": 17139, "Miss": 17140, "\u0120soup": 17141, "\u0120implied": 17142, "param": 17143, "\u0120sexy": 17144, "\u0120Lux": 17145, "\u0120republic": 17146, "patch": 17147, "ablish": 17148, "\u0120icons": 17149, "\u0120fathers": 17150, "\u0120GET": 17151, "\u0120Carib": 17152, "\u0120regulated": 17153, "\u0120Cohen": 17154, "\u0120Bobby": 17155, "\u0120ner": 17156, "\u0120bent": 17157, "ventory": 17158, "\u0120Along": 17159, "\u0120EST": 17160, "\u0120Wallace": 17161, "\u0120murders": 17162, "rise": 17163, "kell": 17164, "\u0120Commonwealth": 17165, "\u0120nasty": 17166, "eta": 17167, "\u0120MIT": 17168, "\u0120administered": 17169, "\u0120genuinely": 17170, "Editor": 17171, "nick": 17172, "\u0120hydro": 17173, "********************************": 17174, "\u0120Ble": 17175, "\u0120fines": 17176, "\u0120gorge": 17177, "ausible": 17178, "rh": 17179, "\u0120apple": 17180, "mentioned": 17181, "\u0120rope": 17182, "otyp": 17183, "HR": 17184, "\u0120disappointing": 17185, "\u0120cage": 17186, "nik": 17187, "\u0120doubts": 17188, "\u0120FREE": 17189, "prints": 17190, "\u0120MUST": 17191, "\u0120vendors": 17192, "\u0120Inqu": 17193, "\u0120liberals": 17194, "\u0120contractor": 17195, "\u0120upside": 17196, "children": 17197, "\u0120tricky": 17198, "\u0120regulators": 17199, "charged": 17200, "liter": 17201, "\u0120***": 17202, "\u0120rebell": 17203, "lang": 17204, "\u0120locals": 17205, "\u0120physicians": 17206, "\u0120hey": 17207, "arse": 17208, "tm": 17209, "\u0120Lex": 17210, "\u0120behavioral": 17211, "successful": 17212, "FX": 17213, "\u0120brick": 17214, "ovic": 17215, "\u0120conform": 17216, "\u0120reviewing": 17217, "\u0120insights": 17218, "\u0120biology": 17219, "\u0120Remove": 17220, "\u0120Extra": 17221, "\u0120committing": 17222, "induced": 17223, "ignty": 17224, "igm": 17225, "\u0120atomic": 17226, "Common": 17227, "\u0120EM": 17228, "\u0120Pere": 17229, "\u0120Items": 17230, "eh": 17231, "\u0120preserved": 17232, "\u0120Hood": 17233, "\u0120prisoner": 17234, "\u0120bankruptcy": 17235, "\u0120gren": 17236, "ushes": 17237, "\u0120exploitation": 17238, "\u0120signatures": 17239, "\u0120finan": 17240, "],\"": 17241, "\u0120MR": 17242, "\u0120meg": 17243, "remlin": 17244, "\u0120musicians": 17245, "\u0120selecting": 17246, "\u0120examining": 17247, "INK": 17248, "lated": 17249, "Hi": 17250, "\u0120artic": 17251, "\u0120pets": 17252, "\u0120impair": 17253, "\u0120MAN": 17254, "\u0120tablets": 17255, "include": 17256, "Range": 17257, "\u0120caut": 17258, "\u0120logs": 17259, "\u0120mounting": 17260, "\u0120unaware": 17261, "\u0120dynamics": 17262, "\u0120Palestine": 17263, "\u0120Quarter": 17264, "\u0120Purple": 17265, "\u0120ma": 17266, "\u0120Import": 17267, "\u0120collections": 17268, "ciation": 17269, "\u0120successor": 17270, "\u0120clone": 17271, "\u0120aiming": 17272, "\u0120possessed": 17273, "\u0120sticking": 17274, "\u0120shaking": 17275, "\u0120locate": 17276, "\u0120Hockey": 17277, "Turn": 17278, "170": 17279, "\u0120fifteen": 17280, "\u0120Harrison": 17281, "\u0120continuously": 17282, "\u0120TC": 17283, "\u0120Valent": 17284, "\u0120Rescue": 17285, "\u0120bypass": 17286, "amount": 17287, "\u0120mast": 17288, "\u0120protects": 17289, "\u0120artistic": 17290, "\u0120sometime": 17291, "\u0120shoe": 17292, "\u0120shouted": 17293, "ificant": 17294, "etitive": 17295, "\u0120Register": 17296, "\u0120Jin": 17297, "\u0120concentrated": 17298, "lington": 17299, "onies": 17300, "\u0120generator": 17301, "yrim": 17302, "\u0120Armen": 17303, "\u0120clearing": 17304, "ido": 17305, "\u0120TW": 17306, "alph": 17307, "\u0120ladies": 17308, "Hard": 17309, "\u0120dialog": 17310, "\u0120inputs": 17311, "\u00e6\u013e": 17312, "\u0120poses": 17313, "\u0120slots": 17314, "\u0120Premium": 17315, "\u0120leaks": 17316, "\u0120bosses": 17317, "\u0120113": 17318, "course": 17319, "Acc": 17320, "\u0120Newton": 17321, "\u0120Austria": 17322, "\u0120Mage": 17323, "\u0120teaches": 17324, "abad": 17325, "\u0120wears": 17326, "\u0120cyl": 17327, "\u0120curse": 17328, "\u0120Sales": 17329, "\u0120Wings": 17330, "\u0120psy": 17331, "\u0120gaps": 17332, "\u0120Iceland": 17333, "\u0120Pinterest": 17334, "\u0120landlord": 17335, "\u0120definitions": 17336, "\u0120Ker": 17337, "\u0120sufficiently": 17338, "\u0120Pence": 17339, "\u0120Architect": 17340, "\u0120surpass": 17341, "\u0120114": 17342, "\u0120superhero": 17343, "\u0120Disease": 17344, "\u0120priests": 17345, "\u0120Culture": 17346, "\u0120definitive": 17347, "\u0120secretly": 17348, "\u0120Dance": 17349, "install": 17350, "chief": 17351, "\u0120Jessica": 17352, "Would": 17353, "Updated": 17354, "\u0120locker": 17355, "\u0120Kay": 17356, "\u0120memorial": 17357, "\u00e8\u00a6": 17358, "fat": 17359, "\u0120disgu": 17360, "\u0120flavors": 17361, "\u0120Baseball": 17362, "\u0120Resistance": 17363, "\u0120kicks": 17364, "\u0120env": 17365, "\u0120teenagers": 17366, "Dark": 17367, "\u0120CAR": 17368, "\u0120halt": 17369, "\u0120LG": 17370, "\u0120Gabriel": 17371, "\u0120fever": 17372, "\u0120satur": 17373, "\u0120mall": 17374, "\u0120affiliate": 17375, "\u0120Sleep": 17376, "\u0120Specific": 17377, "\u0120Vel": 17378, "\u0120jar": 17379, "\u0120Sacred": 17380, "\u0120Edwards": 17381, "\u0120ACL": 17382, "\u0120retained": 17383, "\u0120Giant": 17384, "\u0120limitation": 17385, "inces": 17386, "\u0120refusal": 17387, "\u0120Tale": 17388, "\u0120Butler": 17389, "\u0120accidents": 17390, "\u0120CSS": 17391, "\u0120imported": 17392, "\u0120Copy": 17393, "\u00ce\u00b1": 17394, "ERT": 17395, "zel": 17396, "\u0120divisions": 17397, "hots": 17398, "\u0120Alb": 17399, "\u0120DS": 17400, "Loader": 17401, "Washington": 17402, "atisf": 17403, "\u0120Creative": 17404, "\\.": 17405, "\u0120Autom": 17406, "redict": 17407, "\u0120receptor": 17408, "\u0120Carlos": 17409, "Method": 17410, "oka": 17411, "\u0120malicious": 17412, "\u0120stepping": 17413, ",[": 17414, "\u0120Dad": 17415, "\u0120attraction": 17416, "\u0120Effects": 17417, "\u0120Pirate": 17418, "\u0120Cer": 17419, "\u0120Industry": 17420, "\u0120Rud": 17421, "\u0120charter": 17422, "\u0120dining": 17423, "\u0120insists": 17424, "\u0120configure": 17425, "\u0120(#": 17426, "\u0120Simple": 17427, "\u0120Scroll": 17428, "UTC": 17429, "175": 17430, "\u0120Kon": 17431, "\u0120marketplace": 17432, "\u0120\u00e3\u0124": 17433, "\u0120refres": 17434, "\u0120gates": 17435, "erred": 17436, "\u0120Pod": 17437, "\u0120behave": 17438, "Frank": 17439, "node": 17440, "\u0120endorsed": 17441, "hett": 17442, "asive": 17443, "\u0120Homeland": 17444, "\u0120rides": 17445, "\u0120Leave": 17446, "erness": 17447, "\u0120flooding": 17448, "AFP": 17449, "\u0120risen": 17450, "\u0120continually": 17451, "\u0120unanim": 17452, "\u0120Contract": 17453, "\u0120Pas": 17454, "\u0120guided": 17455, "\u0120Chile": 17456, "bd": 17457, "\u0120succ": 17458, "ptic": 17459, "\u0120committees": 17460, "\u0120Luther": 17461, "\u0120Anyone": 17462, "\u0120sab": 17463, "124": 17464, "\u0120pixel": 17465, "\u0120Bak": 17466, "\u0120Tag": 17467, "\u0120Bennett": 17468, "Enter": 17469, "small": 17470, "\u0120Presidential": 17471, "\u0120pul": 17472, "\u0120contrace": 17473, "archive": 17474, "\u0120coastal": 17475, "\u0120Kids": 17476, "192": 17477, "\u00e2\u0122\u00b2": 17478, "icky": 17479, "INGTON": 17480, "\u0120wolf": 17481, "\u0120Stalin": 17482, "Tur": 17483, "idget": 17484, "amas": 17485, "\u0120Unless": 17486, "\u0120sponsor": 17487, "\u0120morph": 17488, "\u0120Choose": 17489, "\u0120runner": 17490, "\u0120unbel": 17491, "\u0120mud": 17492, "\u0120Mana": 17493, "\u0120dubbed": 17494, "\u0120godd": 17495, "urers": 17496, "window": 17497, "\u0120relied": 17498, "\u0120celebrating": 17499, "osc": 17500, "\u0120135": 17501, "\u0120lobbying": 17502, "\u0120incomplete": 17503, "\u0120restriction": 17504, "\u0120incap": 17505, "itus": 17506, "\u0120expectation": 17507, "\u0120Apollo": 17508, "\u0120intens": 17509, "\u0120sync": 17510, "GH": 17511, "\u0120manipulation": 17512, "BY": 17513, "\u0120spear": 17514, "\u0120breasts": 17515, "\u0120volcan": 17516, "ilia": 17517, "Material": 17518, "\u0120formats": 17519, "\u0120Bast": 17520, "\u0120parliamentary": 17521, "\u0120snake": 17522, "\u0120servants": 17523, "\u0120Trudeau": 17524, "\u0120Grim": 17525, "\u0120Arabic": 17526, "\u0120SCP": 17527, "\u0120Boys": 17528, "station": 17529, "\u0120prospective": 17530, "orde": 17531, "initialized": 17532, "\u0120bored": 17533, "ABLE": 17534, "\u0120accessed": 17535, "\u0120taxi": 17536, "\u0120Shell": 17537, "aiden": 17538, "ursed": 17539, "inates": 17540, "\u0120Insurance": 17541, "\u0120Pete": 17542, "September": 17543, "650": 17544, "\u0120adventures": 17545, "\u0120Cover": 17546, "\u0120tribute": 17547, "\u0120sketch": 17548, "\u0120empower": 17549, "\u0120\u00d8": 17550, "\u0120Glenn": 17551, "\u0120Daw": 17552, "=\\\"": 17553, "\u0120Politics": 17554, "\u0120guides": 17555, "\u0120dioxide": 17556, "\u0120Gore": 17557, "\u0120Bright": 17558, "\u0120Sierra": 17559, "\u0120valued": 17560, "cond": 17561, "\u0120pointer": 17562, "Select": 17563, "\u0120risky": 17564, "\u0120absorb": 17565, "images": 17566, "\u0120refuses": 17567, "\u0120bonuses": 17568, "___": 17569, "\u0120hilar": 17570, "\u0120Features": 17571, "220": 17572, "\u0120Collector": 17573, "Foot": 17574, "\u01201964": 17575, "culus": 17576, "\u0120dawn": 17577, "\u0120workout": 17578, "\u0120LO": 17579, "\u0120philosophical": 17580, "\u0120Sandy": 17581, "\u0120Youth": 17582, "\u0120liable": 17583, "Af": 17584, "blue": 17585, "\u0120overturn": 17586, "lessness": 17587, "\u0120Tribune": 17588, "\u0120Ing": 17589, "\u0120factories": 17590, "\u0120catches": 17591, "\u0120prone": 17592, "\u0120matrix": 17593, "\u0120login": 17594, "\u0120inacc": 17595, "\u0120exert": 17596, "sys": 17597, "\u0120needle": 17598, "\u0120Qur": 17599, "\u0120notified": 17600, "oulder": 17601, "tx": 17602, "\u0120reminds": 17603, "\u0120publishers": 17604, "\u0120nort": 17605, "\u0120git": 17606, "\u0120flies": 17607, "\u0120Emily": 17608, "\u0120flowing": 17609, "\u0120Alien": 17610, "\u0120Strateg": 17611, "\u0120hardest": 17612, "\u0120modification": 17613, "API": 17614, "\u0120MY": 17615, "\u0120crashes": 17616, "stairs": 17617, "number": 17618, "\u0120urging": 17619, "channel": 17620, "\u0120Falcon": 17621, "\u0120inhabitants": 17622, "\u0120terrifying": 17623, "\u0120utilize": 17624, "\u0120banner": 17625, "\u0120cigarettes": 17626, "\u0120senses": 17627, "\u0120Holmes": 17628, "\u0120practition": 17629, "\u0120Phillips": 17630, "otto": 17631, "\u0120compile": 17632, "Model": 17633, "\u0120Ko": 17634, "\u0120[]": 17635, "Americans": 17636, "\u0120Terms": 17637, "\u0120medications": 17638, "\u0120Ana": 17639, "\u0120fundamentally": 17640, "\u0120Notice": 17641, "\u0120weaker": 17642, "\u01200000": 17643, "\u0120garlic": 17644, "\u0120outbreak": 17645, "\u0120economist": 17646, "\u0120Birth": 17647, "\u0120obstacles": 17648, "arcer": 17649, "\u0120Orthodox": 17650, "\u0120placebo": 17651, "\u0120Crew": 17652, "aspberry": 17653, "\u0120Angels": 17654, "\u0120discharge": 17655, "\u0120destructive": 17656, "117": 17657, "\u0120Rising": 17658, "\u0120dairy": 17659, "late": 17660, "\u0120collision": 17661, "\u0120Tigers": 17662, "eanor": 17663, "ocumented": 17664, "\u0120Invalid": 17665, "\u0120dont": 17666, "\u0120Liter": 17667, "\u0120Va": 17668, "\u0120hydrogen": 17669, "\u0120variants": 17670, "\u0120Browns": 17671, "\u01201965": 17672, "\u0120indigenous": 17673, "\u0120trades": 17674, "\u0120remainder": 17675, "\u0120swept": 17676, "\u0120Impact": 17677, "\u0120redist": 17678, "\u0120unint": 17679, "graduate": 17680, "\u00e3\u0125\u0137": 17681, "\u0120WILL": 17682, "\u00e3\u0123\u00ae\u00e7": 17683, "\u0120Critical": 17684, "\u0120fisher": 17685, "\u0120vicious": 17686, "\u0120reversed": 17687, "Year": 17688, "\u0120Sox": 17689, "\u0120shootings": 17690, "\u0120filming": 17691, "\u0120touchdowns": 17692, "aires": 17693, "mel": 17694, "\u0120grandfather": 17695, "\u0120affection": 17696, "ingle": 17697, "\u0120overly": 17698, "Additional": 17699, "\u0120supreme": 17700, "\u0120Grad": 17701, "\u0120sporting": 17702, "\u0120mercy": 17703, "\u0120Brooks": 17704, "ounty": 17705, "\u0120performs": 17706, "\u0120tightly": 17707, "\u0120demons": 17708, "\u0120killings": 17709, "\u0120faction": 17710, "\u0120Nova": 17711, "auts": 17712, "\u0120undoubtedly": 17713, "arin": 17714, "\u0120underway": 17715, "rak": 17716, "\u0120liv": 17717, "\u0120Region": 17718, "\u0120briefing": 17719, "sers": 17720, "cloud": 17721, "\u0120Mik": 17722, "usp": 17723, "\u0120prediction": 17724, "azor": 17725, "\u0120portable": 17726, "\u0120Gand": 17727, "\u0120presenting": 17728, "\u01201080": 17729, "\u00c2\u00bb": 17730, "ushi": 17731, "\u0120Spark": 17732, "thereum": 17733, "\u0120justification": 17734, "\u0120Ny": 17735, "\u0120contractors": 17736, "mingham": 17737, "\u0120Style": 17738, "\u00e5\u0127": 17739, "\u0120Chronicles": 17740, "\u0120Picture": 17741, "\u0120proving": 17742, "\u0120wives": 17743, "sett": 17744, "\u0120molecules": 17745, "\u0120Fairy": 17746, "\u0120consisting": 17747, "\u0120pier": 17748, "alone": 17749, "inition": 17750, "\u0120nucle": 17751, "json": 17752, "\u0120gotta": 17753, "\u0120mobil": 17754, "\u0120verbal": 17755, "arium": 17756, "\u0120monument": 17757, "ucked": 17758, "\u0120256": 17759, "Tech": 17760, "minecraft": 17761, "\u0120Track": 17762, "\u0120tile": 17763, "\u0120compatibility": 17764, "asis": 17765, "\u0120sadd": 17766, "\u0120instructed": 17767, "\u0120Mueller": 17768, "\u0120lethal": 17769, "\u0120hormone": 17770, "\u0120orche": 17771, "else": 17772, "\u0120skelet": 17773, "\u0120entertaining": 17774, "\u0120minimize": 17775, "again": 17776, "\u0120undergo": 17777, "\u0120constraints": 17778, "\u0120cigarette": 17779, "\u0120Islamist": 17780, "\u0120travels": 17781, "\u0120Panthers": 17782, "lings": 17783, "Care": 17784, "\u0120lawsuits": 17785, "uras": 17786, "\u0120cryst": 17787, "\u0120lowered": 17788, "\u0120aerial": 17789, "\u0120combinations": 17790, "\u0120haun": 17791, "\u0120cha": 17792, "\u0120vine": 17793, "\u0120quantities": 17794, "\u0120linking": 17795, "bank": 17796, "\u0120soy": 17797, "Bill": 17798, "\u0120Angela": 17799, "\u0120recipient": 17800, "\u0120Protest": 17801, "\u0120socket": 17802, "\u0120solidarity": 17803, "\u0120\u00e2\u0128": 17804, "mill": 17805, "\u0120varies": 17806, "\u0120Pakistani": 17807, "Dragon": 17808, "\u0120une": 17809, "\u0120horizon": 17810, "\u00c2\u0142\u00c2\u0142\u00c2\u0142\u00c2\u0142\u00c2\u0142\u00c2\u0142\u00c2\u0142\u00c2\u0142": 17811, "\u0120provinces": 17812, "\u0120frankly": 17813, "\u0120enacted": 17814, "notes": 17815, "['": 17816, "\u0120192": 17817, "ocracy": 17818, "\u0120endorsement": 17819, "\u0120overtime": 17820, "True": 17821, "Lab": 17822, "licted": 17823, "\u0120DNC": 17824, "\u0120beats": 17825, "\u0120Jamie": 17826, "152": 17827, "\u0120INT": 17828, "Contact": 17829, "\u0120accounted": 17830, "hash": 17831, "\u0120Packers": 17832, "pires": 17833, "\u0120lesbian": 17834, "\u0120amendments": 17835, "\u0120hopeful": 17836, "\u0120Finland": 17837, "\u0120spotlight": 17838, "\u0120configured": 17839, "\u0120troubled": 17840, "\u0120gaze": 17841, "\u0120Calgary": 17842, "\u0120reliability": 17843, "\u0120insurg": 17844, "swer": 17845, "buy": 17846, "\u0120Skin": 17847, "\u0120pixels": 17848, "\u0120handgun": 17849, "\u0120paras": 17850, "\u0120categor": 17851, "\u0120EL": 17852, "\u0120Rex": 17853, "Indeed": 17854, "\u0120kinda": 17855, "\u0120conjunction": 17856, "\u0120Bryan": 17857, "\u0120Manufact": 17858, "yang": 17859, "Plus": 17860, "SQL": 17861, "ishment": 17862, "\u0120dominate": 17863, "\u0120nail": 17864, "\u0120oath": 17865, "\u0120erupt": 17866, "\u0120Fine": 17867, "itbart": 17868, "\u0120Chip": 17869, "\u0120Abd": 17870, "\u0120Nam": 17871, "\u0120buyer": 17872, "\u0120dissent": 17873, "Leaks": 17874, "Contin": 17875, "\u0120rider": 17876, "\u0120Someone": 17877, "\u0120illusion": 17878, "cin": 17879, "\u0120Boeing": 17880, "\u0120inadequ": 17881, "ovation": 17882, "iants": 17883, "\u0120rebuild": 17884, "450": 17885, "\u0120Destiny": 17886, "SW": 17887, "\u0120Till": 17888, "Hit": 17889, "iaz": 17890, "\u0120Bangl": 17891, "achers": 17892, "\u0120Reform": 17893, "\u0120segments": 17894, "\u0120systematic": 17895, "dc": 17896, "\u0120Conservatives": 17897, "\u0120portal": 17898, "hor": 17899, "\u0120Dragonbound": 17900, "\u0120dragged": 17901, "omo": 17902, "\u0120thee": 17903, "advert": 17904, "\u0120Reports": 17905, "\u0120Et": 17906, "\u0120barrels": 17907, "August": 17908, "\u0120comparisons": 17909, "\u0120hex": 17910, "\u0120anthrop": 17911, "\"[": 17912, "borough": 17913, "abi": 17914, "\u0120pictured": 17915, "playing": 17916, "\u0120Address": 17917, "\u0120Mirror": 17918, "Smith": 17919, "\u0120tires": 17920, "\u0120NPR": 17921, "AAAA": 17922, "\u0120classification": 17923, "\u0120Than": 17924, "\u0120Harm": 17925, "\u0120RA": 17926, "\u0120rejection": 17927, "mination": 17928, "\u0120ranged": 17929, "\u0120Falls": 17930, "DI": 17931, "Host": 17932, "\u00e3\u0124\u00b4": 17933, "\u0120Example": 17934, "listed": 17935, "thirds": 17936, "\u0120safegu": 17937, "brand": 17938, "\u0120probable": 17939, "Canada": 17940, "ITION": 17941, "\u0120Qaeda": 17942, "\u0120chick": 17943, "\u0120imports": 17944, "hit": 17945, "loc": 17946, "WW": 17947, "\u0120blew": 17948, "\u0120anytime": 17949, "\u0120wholes": 17950, "iked": 17951, "\u0120calculation": 17952, "create": 17953, "\u0120Ori": 17954, "\u0120upgraded": 17955, "\u0120appar": 17956, "utory": 17957, "\u0120Mol": 17958, "Brit": 17959, "\u0120Jong": 17960, "INAL": 17961, "\u0120Starting": 17962, "\u0120dice": 17963, "urtle": 17964, "\u0120relying": 17965, "closure": 17966, "\u0120profitable": 17967, "\u0120slaughter": 17968, "\u0120Manual": 17969, "caster": 17970, "\u0120\"$": 17971, "\u0120feather": 17972, "\u0120Simply": 17973, "ieves": 17974, "\u0120deterior": 17975, "\u0120PCI": 17976, "\u0120stamp": 17977, "\u0120flaws": 17978, "\u0120shade": 17979, "hammer": 17980, "\u0120passport": 17981, "\u0120conting": 17982, "amel": 17983, "\u0120observers": 17984, "\u0120neglect": 17985, "\u0120RB": 17986, "\u0120Brotherhood": 17987, "\u0120skeptical": 17988, "family": 17989, "usk": 17990, "\u0120emotionally": 17991, "\u00e2\u013b": 17992, "\u0120Beta": 17993, "asonable": 17994, "idity": 17995, "\u0120Mul": 17996, "\u0120kicking": 17997, "\u0120Carm": 17998, "ollah": 17999, "VERTIS": 18000, "\u0120Athen": 18001, "\u0120ladder": 18002, "\u0120Bullet": 18003, "\u00e5\u00a3": 18004, "0001": 18005, "\u0120Wildlife": 18006, "\u0120Mask": 18007, "\u0120Nan": 18008, "Rev": 18009, "\u0120unacceptable": 18010, "legal": 18011, "\u0120crowded": 18012, "agi": 18013, "\u0120Cox": 18014, "je": 18015, "\u0120morality": 18016, "\u0120fuels": 18017, "\u0120cables": 18018, "\u0120mankind": 18019, "\u0120Caribbean": 18020, "\u0120anchor": 18021, "\u0120byte": 18022, "\u0120Often": 18023, "\u0120Oz": 18024, "\u0120crafted": 18025, "\u0120historian": 18026, "\u0120Wu": 18027, "\u0120towers": 18028, "\u0120Citizens": 18029, "\u0120helm": 18030, "\u0120credentials": 18031, "\u0120singular": 18032, "\u0120Jesse": 18033, "\u0120tackles": 18034, "\u0120contempt": 18035, "\u0120afore": 18036, "\u0120Shadows": 18037, "\u0120nil": 18038, "\u0120urgent": 18039, "apple": 18040, "blood": 18041, "\u0120von": 18042, "\u0120offline": 18043, "\u0120breathe": 18044, "\u0120jumps": 18045, "\u0120irrelevant": 18046, "oxic": 18047, "omal": 18048, "important": 18049, "Jim": 18050, "\u0120gloves": 18051, "arming": 18052, "depth": 18053, "\u0120talents": 18054, "ookie": 18055, "\u0120SB": 18056, "\u0120palm": 18057, "uffs": 18058, "esta": 18059, "IGH": 18060, "\u0120canon": 18061, "\u0120Verizon": 18062, "\u0120Ple": 18063, "\u0120coupled": 18064, "velt": 18065, "\u0120fundraising": 18066, "\u0120Getting": 18067, "\u0120DLC": 18068, "\u0120mathematical": 18069, "\u0120HS": 18070, "\u0120Cardinals": 18071, "telling": 18072, "\u0120sponsors": 18073, "\u0120\u00cf": 18074, "\u0120Bulls": 18075, "option": 18076, "\u0120propose": 18077, "\u0120memorable": 18078, "\u0120embraced": 18079, "\u0120declining": 18080, "Health": 18081, "eda": 18082, "\u0120};": 18083, "\u0120spam": 18084, "mile": 18085, "\u0120pitcher": 18086, "\u0120Eight": 18087, "\u0120caring": 18088, "utic": 18089, "role": 18090, "\u0120airline": 18091, "ernandez": 18092, "\u0120Athlet": 18093, "\u0120certification": 18094, "uxe": 18095, "riger": 18096, "\u0120empir": 18097, "\u0120sensation": 18098, "\u0120dism": 18099, "\u0120bolt": 18100, "\u0120evolve": 18101, "House": 18102, "\u0120consultation": 18103, "\u0120Duty": 18104, "\u0120touches": 18105, "\u0120Nathan": 18106, "\u0120faint": 18107, "had": 18108, "\"(": 18109, "\u0120Consumer": 18110, "\u0120Extreme": 18111, "\u0120127": 18112, "\u0120Herm": 18113, "\u0120Sacrament": 18114, "izoph": 18115, "\u0120anxious": 18116, "ulously": 18117, "\u0120socially": 18118, "\u0120UTC": 18119, "\u0120solving": 18120, "\u0120Letter": 18121, "History": 18122, "educ": 18123, "Price": 18124, "));": 18125, "\u0120reload": 18126, "amic": 18127, "\u0120pork": 18128, "\u0120discourse": 18129, "\u0120tournaments": 18130, "airo": 18131, "\u0120Kur": 18132, "\u0120Costa": 18133, "\u0120violating": 18134, "\u0120interfere": 18135, "\u0120recreational": 18136, "uffle": 18137, "\u0120speeches": 18138, "\u0120needing": 18139, "\u0120remembers": 18140, "\u0120credited": 18141, "nia": 18142, "focused": 18143, "amera": 18144, "\u0120bru": 18145, "umbs": 18146, "\u0120Cuban": 18147, "\u0120preceding": 18148, "\u0120nonsense": 18149, "acial": 18150, "\u0120smartphones": 18151, "\u0120Stories": 18152, "Sports": 18153, "\u0120Emergency": 18154, "ouncing": 18155, "efined": 18156, "\u0120ber": 18157, "\u0120consulting": 18158, "\u0120masters": 18159, "heastern": 18160, ".\"[": 18161, "\u0120Running": 18162, "\u0120suscept": 18163, "\u0120Feng": 18164, "America": 18165, "prises": 18166, "stitial": 18167, "\u0120Weekly": 18168, "\u0120Greater": 18169, "modules": 18170, "ifter": 18171, "Graphics": 18172, "uler": 18173, "\u0120wholly": 18174, "\u0120suppress": 18175, "\u0120concealed": 18176, "\u0120happily": 18177, "\u0120accepts": 18178, "\u0120Enjoy": 18179, "\u0120rivers": 18180, "\u0120Except": 18181, "225": 18182, "\u0120NHS": 18183, "\u0120McConnell": 18184, "\u0120pussy": 18185, "ferred": 18186, "utable": 18187, "\u0120attain": 18188, "\u0120>=": 18189, "\u0120deposits": 18190, "rophic": 18191, "\u0120notorious": 18192, "\u0120Shaw": 18193, "ilitation": 18194, "\u0120epidemic": 18195, "allic": 18196, "\u0120smallest": 18197, "ovich": 18198, "\u0120accessories": 18199, "perties": 18200, "\u0120surplus": 18201, "\u0120Mech": 18202, "\u0120ambig": 18203, "\u0120Immigration": 18204, "\u0120chim": 18205, "eval": 18206, "\u0120practicing": 18207, "\u0120Mystery": 18208, "\u0120domains": 18209, "\u0120Silicon": 18210, "apps": 18211, "\u0120kilometers": 18212, "ea": 18213, "\u0120Smash": 18214, "\u0120warranty": 18215, "\u0120nost": 18216, "sil": 18217, "rev": 18218, "Jon": 18219, "\u0120Dublin": 18220, "\u0120tastes": 18221, "\u0120bout": 18222, "great": 18223, "error": 18224, "\u0120switches": 18225, "\u0120Bapt": 18226, "DO": 18227, "oki": 18228, "\u0120sourced": 18229, "produ": 18230, "\u0120attachment": 18231, "\u0120Issue": 18232, "\u0120Question": 18233, "Join": 18234, "\u0120fitted": 18235, "\u0120unlawful": 18236, "^^": 18237, "erek": 18238, "\u0120authentication": 18239, "\u0120stole": 18240, "\u0120accountability": 18241, "label": 18242, "Search": 18243, "\u0120albeit": 18244, "atican": 18245, "funded": 18246, "\u0120Adding": 18247, "\u0120IQ": 18248, "\u0120submar": 18249, "lit": 18250, "aque": 18251, "\u0120Learning": 18252, "\u0120integer": 18253, "Master": 18254, "\u0120Chrom": 18255, "\u0120premier": 18256, "Op": 18257, "\u0120Liu": 18258, "\u0120blessed": 18259, "\u0120Globe": 18260, "\u0120Response": 18261, "\u0120legitim": 18262, "\u0120Merkel": 18263, "\u0120disposal": 18264, "\u00c2\u00b4": 18265, "\u0120gauge": 18266, "peat": 18267, "\u0120induced": 18268, "\u0120questionable": 18269, "arthy": 18270, "\u0120Vit": 18271, "\u0120Feed": 18272, "Until": 18273, "Ut": 18274, "worthy": 18275, "RY": 18276, "\u0120Herald": 18277, "\u0120Hammer": 18278, "\u0120medal": 18279, "\u0120Rivers": 18280, "\u0120Hack": 18281, "\u0120clarify": 18282, "\u0120tracked": 18283, "\u0120autonomous": 18284, "\u0120tenant": 18285, "\u0120Qatar": 18286, "erie": 18287, "\u0120grim": 18288, "\u0120Monitor": 18289, "\u0120resistant": 18290, "\u0120Spec": 18291, "\u0120Wells": 18292, "NAS": 18293, "148": 18294, "\u0120miners": 18295, "iotics": 18296, "\u0120misses": 18297, "116": 18298, "gian": 18299, "git": 18300, "\u0120Eyes": 18301, "pres": 18302, "\u0120graduated": 18303, "\u0120angel": 18304, "\u0120synchron": 18305, "\u0120efficiently": 18306, "\u0120transmitted": 18307, "Harry": 18308, "\u0120globally": 18309, "ENCE": 18310, "\u0120Montana": 18311, "raged": 18312, "\u0120Prevention": 18313, "\u0120piss": 18314, "\u0120Ll": 18315, "\u0120shelf": 18316, "\u0120BJP": 18317, "\u0120Testament": 18318, "\u0120Late": 18319, "iker": 18320, "\u0120Happ": 18321, "\u0120Julian": 18322, "hall": 18323, "\u0120spont": 18324, "\u0120shutdown": 18325, "\u0120inconsistent": 18326, "\u0120subscribers": 18327, "\u0120skeleton": 18328, "\u0120Nebraska": 18329, "\u0120inspire": 18330, "\u0120Void": 18331, "Feed": 18332, "\u0120angles": 18333, "\u0120Springs": 18334, "\u0120benchmark": 18335, "\u0120vaccines": 18336, "izophren": 18337, "sexual": 18338, "uffed": 18339, "\u0120shine": 18340, "\u0120Kath": 18341, "\u0120gesture": 18342, "inea": 18343, "\u0120rip": 18344, "\u0120oppression": 18345, "\u0120conscience": 18346, "bt": 18347, "\u0120Lum": 18348, "\u0120incidence": 18349, "\u0120Fa": 18350, "wr": 18351, "\u0120mineral": 18352, "\u0120Spurs": 18353, "alky": 18354, "\u0120thunder": 18355, "\u0120opio": 18356, "Being": 18357, "\u0120Palm": 18358, "\u0120wasted": 18359, "\u0120lb": 18360, "iaries": 18361, "\u0120Initiative": 18362, "\u0120curric": 18363, "\u0120marker": 18364, "\u0120McL": 18365, "\u0120extensions": 18366, "\u0120Pv": 18367, "\u0120Arms": 18368, "\u0120offerings": 18369, "\u0120defenses": 18370, "\u0120vendor": 18371, "\u0120contradict": 18372, "\u0120Colin": 18373, "\u0120reddit": 18374, "\u0120peripher": 18375, "122": 18376, "\u0120sins": 18377, "Edit": 18378, "ICT": 18379, "Soft": 18380, "\u0120Shah": 18381, "\u0120administrator": 18382, "\u0120Trip": 18383, "\u0120pornography": 18384, "\u0120tuition": 18385, "inence": 18386, "\u0120Progress": 18387, "\u0120catalog": 18388, "\u0120suite": 18389, "\u0120hike": 18390, "\u0120reproductive": 18391, "engine": 18392, "\u0120drought": 18393, "\u0120Noah": 18394, "\u0120230": 18395, "\u0120dude": 18396, "\u0120relaxed": 18397, "\u0120partition": 18398, "\u0120participant": 18399, "\u0120telesc": 18400, "\u0120feas": 18401, "\u0120FF": 18402, "owner": 18403, "\u0120sweeping": 18404, "\u0120lenses": 18405, "\u0120matchup": 18406, "\u0120Repl": 18407, "ournals": 18408, "\u0120credible": 18409, "\u0120grandmother": 18410, "\u0120thermal": 18411, "\u0120subscribing": 18412, "\u0120identities": 18413, "colm": 18414, "UCT": 18415, "\u0120reluctant": 18416, "users": 18417, "\u0120Cort": 18418, "\u0120assisted": 18419, "OSS": 18420, "ATIONS": 18421, "ISH": 18422, "\u0120pharmaceutical": 18423, "icable": 18424, "adian": 18425, "\u0120Sonic": 18426, "\u0120Fury": 18427, "\u0120Mong": 18428, "AH": 18429, "\u0120Psychology": 18430, "\u0120phosph": 18431, "\u0120treats": 18432, "\u0143\u0136": 18433, "\u0120steadily": 18434, "\u0120Hello": 18435, "\u0120relates": 18436, "\u0120clue": 18437, "Expl": 18438, "auth": 18439, "\u0120revision": 18440, "\u0120eld": 18441, "osion": 18442, "\u0120bron": 18443, "144": 18444, "rikes": 18445, "\u0120mines": 18446, "\u0120blanket": 18447, "\u0120Fail": 18448, "eled": 18449, "\u0120Imagine": 18450, "\u0120Planned": 18451, "aic": 18452, "Request": 18453, "Mad": 18454, "\u0120Horse": 18455, "\u0120Eagle": 18456, "\u0120capac": 18457, "157": 18458, "\u0120ling": 18459, "\u0120Nice": 18460, "\u0120Parenthood": 18461, "minster": 18462, "ogs": 18463, "ensitive": 18464, "Nothing": 18465, "\u0120carn": 18466, "Fin": 18467, "\u0120PE": 18468, "\u0120rifles": 18469, "\u0120LP": 18470, "Sand": 18471, "\u0120guiActive": 18472, "\u0120tourist": 18473, "CNN": 18474, "\u0120unveiled": 18475, "\u0120predecessor": 18476, "}{": 18477, "uber": 18478, "\u0120offshore": 18479, "\u0120optical": 18480, "\u0120Rot": 18481, "\u0120Pearl": 18482, "eton": 18483, "\u0120stared": 18484, "\u0120farther": 18485, "atility": 18486, "contin": 18487, "\u0120Gy": 18488, "\u0120Foster": 18489, "\u0120Coc": 18490, "rients": 18491, "\u0120designing": 18492, "\u0120Economy": 18493, "ONG": 18494, "Women": 18495, "\u0120Nancy": 18496, "erver": 18497, "\u0120mascul": 18498, "\u0120casualties": 18499, "\u0120225": 18500, "\u0120Sullivan": 18501, "\u0120Choice": 18502, "\u0120aster": 18503, "ws": 18504, "\u0120hotels": 18505, "\u0120considerations": 18506, "\u0120couch": 18507, "\u0120Strip": 18508, "\u0120Gn": 18509, "\u0120manipulate": 18510, "lied": 18511, "\u0120synthetic": 18512, "\u0120assaulted": 18513, "\u0120offenses": 18514, "\u0120Drake": 18515, "\u0120impe": 18516, "October": 18517, "\u0120Heritage": 18518, "hl": 18519, "\u0120Blair": 18520, "Unlike": 18521, "\u0120grief": 18522, "\u0120450": 18523, "\u0120opted": 18524, "\u0120resignation": 18525, "ilo": 18526, "\u0120verse": 18527, "\u0120Tomb": 18528, "\u0120upt": 18529, "\u0120aired": 18530, "\u0120Hook": 18531, "\u0120MLB": 18532, "\u0120assumes": 18533, "outed": 18534, "\u0120Vers": 18535, "\u0120inferior": 18536, "\u0120bundle": 18537, "\u0120DNS": 18538, "ographer": 18539, "\u0120multip": 18540, "\u0120Souls": 18541, "\u0120illustrated": 18542, "\u0120tactic": 18543, "\u0120dressing": 18544, "\u0120duo": 18545, "Conf": 18546, "\u0120relent": 18547, "\u0120cant": 18548, "\u0120scarce": 18549, "\u0120candy": 18550, "\u0120CF": 18551, "\u0120affiliated": 18552, "\u0120sprint": 18553, "ylan": 18554, "\u0120Garcia": 18555, "\u0120junk": 18556, "Print": 18557, "exec": 18558, "Crit": 18559, "\u0120portrait": 18560, "iries": 18561, "\u0120OFF": 18562, "\u0120disputes": 18563, "WR": 18564, "Love": 18565, "\u00e3\u0123\u0126": 18566, "\u0120Reyn": 18567, "\u0120hipp": 18568, "opath": 18569, "\u0120floors": 18570, "\u0120Feel": 18571, "\u0120worries": 18572, "\u0120settlements": 18573, "\u0120Pos": 18574, "\u0120mosque": 18575, "\u0120finals": 18576, "\u0120crushed": 18577, "\u0120Probably": 18578, "\u0120Bot": 18579, "\u0120Mans": 18580, "\u0120Period": 18581, "\u0120sovereignty": 18582, "\u0120seller": 18583, "\u0120apost": 18584, "\u0120amateur": 18585, "\u0120dorm": 18586, "\u0120consuming": 18587, "\u0120armour": 18588, "\u0120Roose": 18589, "\u0120intensive": 18590, "\u0120eliminating": 18591, "\u0120Sunni": 18592, "\u0120Aleppo": 18593, "jin": 18594, "\u0120advise": 18595, "pal": 18596, "\u0120Halo": 18597, "\u0120descent": 18598, "\u0120simpler": 18599, "\u0120booth": 18600, "STR": 18601, "Later": 18602, "\u0120Cave": 18603, "===": 18604, "\u0120mol": 18605, "\u0120fist": 18606, "\u0120shotgun": 18607, "supp": 18608, "\u0120robbery": 18609, "Effect": 18610, "\u0120obscure": 18611, "\u0120Professional": 18612, "\u0120embassy": 18613, "\u0120militant": 18614, "\u0120incarcer": 18615, "\u0120generates": 18616, "\u0120launches": 18617, "\u0120administrators": 18618, "\u0120shaft": 18619, "\u0120circular": 18620, "\u0120freshman": 18621, "\u0120Wes": 18622, "\u0120Joel": 18623, "\u0120Drew": 18624, "\u0120Duncan": 18625, "\u0120Apparently": 18626, "sight": 18627, "\u0120Internal": 18628, "\u0120Individual": 18629, "\u0120FE": 18630, "\u0120bore": 18631, "\u0120Mt": 18632, "\u0120broadly": 18633, "\u0120Options": 18634, "ountain": 18635, "ipes": 18636, "\u0120Videos": 18637, "204": 18638, "\u0120hills": 18639, "\u0120simulation": 18640, "\u0120disappointment": 18641, "itan": 18642, "\u0120Laboratory": 18643, "\u0120upward": 18644, "\u0120boundary": 18645, "\u0120darker": 18646, "hart": 18647, "\u0120dominance": 18648, "Cong": 18649, "\u0120Oracle": 18650, "\u0120Lords": 18651, "\u0120scholarship": 18652, "\u0120Vincent": 18653, "ede": 18654, "\u0120Rah": 18655, "\u0120encourages": 18656, "rov": 18657, "\u0120quo": 18658, "\u0120premise": 18659, "\u0120Crisis": 18660, "\u0120Holocaust": 18661, "\u0120rhythm": 18662, "\u0120metric": 18663, "club": 18664, "\u0120transported": 18665, "\u0120nod": 18666, "\u0120Pist": 18667, "\u0120ancestors": 18668, "\u0120Freder": 18669, "thumbnails": 18670, "\u0120CE": 18671, "OND": 18672, "Phil": 18673, "venge": 18674, "\u0120Products": 18675, "castle": 18676, "\u0120qualifying": 18677, "\u0120Karen": 18678, "VERTISEMENT": 18679, "\u0120mighty": 18680, "\u0120explanations": 18681, "\u0120fixing": 18682, "Di": 18683, "\u0120declaring": 18684, "\u0120anonymity": 18685, "\u0120juven": 18686, "\u0120Nord": 18687, "\u0120Doom": 18688, "\u0120Actually": 18689, "Ok": 18690, "phis": 18691, "\u0120Desert": 18692, "\u0120116": 18693, "IK": 18694, "\u0120FM": 18695, "\u0120incomes": 18696, "VEL": 18697, "okers": 18698, "\u0120pecul": 18699, "\u0120lightweight": 18700, "gue": 18701, "\u0120accent": 18702, "\u0120increment": 18703, "\u0120Chan": 18704, "\u0120complaining": 18705, "\u0120Baghd": 18706, "\u0120midfielder": 18707, "\u0120overhaul": 18708, "Process": 18709, "\u0120Hollow": 18710, "\u0120Titans": 18711, "Small": 18712, "manuel": 18713, "\u0120Unity": 18714, "\u0120Events": 18715, "Sty": 18716, "\u0120disproportion": 18717, "nesty": 18718, "enes": 18719, "\u0120Cod": 18720, "\u0120demonstrations": 18721, "\u0120Crimson": 18722, "\u0120OH": 18723, "\u0120enrolled": 18724, "\u0120cel": 18725, "\u0120Brett": 18726, "\u0120aide": 18727, "\u0120heels": 18728, "\u0120broadband": 18729, "\u0120marking": 18730, "\u0120wizard": 18731, "\u0120NJ": 18732, "\u0120Chiefs": 18733, "\u0120ingredient": 18734, "\u0120dug": 18735, "\u0120Shut": 18736, "urchase": 18737, "endor": 18738, "\u0120farmer": 18739, "\u0120Goldman": 18740, "129": 18741, "155": 18742, "Order": 18743, "\u0120lion": 18744, "iably": 18745, "\u0120stain": 18746, "array": 18747, "ilitary": 18748, "\u0120FAQ": 18749, "\u0120exploded": 18750, "\u0120McCarthy": 18751, "\u0120Tweet": 18752, "\u0120Greens": 18753, "eking": 18754, "ln": 18755, "ensen": 18756, "\u0120motorcycle": 18757, "\u0120particle": 18758, "\u0120cholesterol": 18759, "Bron": 18760, "\u0120stair": 18761, "\u0120oxid": 18762, "\u0120desirable": 18763, "ibles": 18764, "\u0120theor": 18765, "forcing": 18766, "\u0120promotional": 18767, "ovo": 18768, "boot": 18769, "\u0120Bonus": 18770, "rawling": 18771, "\u0120shortage": 18772, "\u0120Psy": 18773, "\u0120recruited": 18774, "\u0120infants": 18775, "\u0120testosterone": 18776, "\u0120deduct": 18777, "\u0120distinctive": 18778, "\u0120firmware": 18779, "built": 18780, "145": 18781, "\u0120explored": 18782, "\u0120factions": 18783, "\u0120vide": 18784, "\u0120tattoo": 18785, "\u0120financially": 18786, "\u0120fatigue": 18787, "\u0120proceeding": 18788, "constitutional": 18789, "\u0120miser": 18790, "\u0120chairs": 18791, "gging": 18792, "ipple": 18793, "\u0120dent": 18794, "\u0120disreg": 18795, "\u00e7\u0136": 18796, "stant": 18797, "llo": 18798, "bps": 18799, "akening": 18800, "\u0120abnormal": 18801, "\u0120ERA": 18802, "\u00e5\u00a3\u00ab": 18803, "\u0120HBO": 18804, "\u0120MAR": 18805, "\u0120concess": 18806, "\u0120servant": 18807, "\u0120aspir": 18808, "lav": 18809, "\u0120Panel": 18810, "amo": 18811, "\u0120precip": 18812, "\u0120recordings": 18813, "\u0120proceeded": 18814, "\u0120colony": 18815, "\u0120Tang": 18816, "ablo": 18817, "\u0120stripped": 18818, "Left": 18819, "too": 18820, "\u0120potatoes": 18821, "\u0120finest": 18822, "%).": 18823, "\u0120crap": 18824, "\u0120Zach": 18825, "abases": 18826, "\u0120Goth": 18827, "\u0120billionaire": 18828, "wolf": 18829, "\u0120sanction": 18830, "SK": 18831, "\u0120logged": 18832, "Po": 18833, "eyed": 18834, "unal": 18835, "\u0120cricket": 18836, "\u0120armies": 18837, "\u0120uncovered": 18838, "Cloud": 18839, "\u00c3\u00b3n": 18840, "\u0120rebounds": 18841, "\u0120mes": 18842, "Oper": 18843, "Pac": 18844, "\u0120nationally": 18845, "\u0120inserted": 18846, "pict": 18847, "\u0120governance": 18848, "\u00d0\u00b8": 18849, "\u0120privileges": 18850, "GET": 18851, "\u0120favorites": 18852, "imity": 18853, "\u0120lover": 18854, "them": 18855, "empl": 18856, "\u0120gorgeous": 18857, "Ann": 18858, "\u0120slipped": 18859, "\u0120veto": 18860, "Bob": 18861, "\u0120slim": 18862, "ucc": 18863, "\u0120Fame": 18864, "uddenly": 18865, "\u0120denies": 18866, "\u0120Maur": 18867, "\u0120distances": 18868, "\u0120wanna": 18869, "tar": 18870, "\u0120SER": 18871, "\u0120\u00e2\u012a": 18872, "\u0120lemon": 18873, "athetic": 18874, "\u0120literal": 18875, "\u0120distinguished": 18876, "\u0120answering": 18877, "GI": 18878, "\u0120religions": 18879, "\u0120Philos": 18880, "\u0120Lay": 18881, "\u0120compos": 18882, "irements": 18883, "\u0120Kos": 18884, "inez": 18885, "rolling": 18886, "\u0120youngest": 18887, "andise": 18888, "\u0120Born": 18889, "\u0120altar": 18890, "amina": 18891, "\u0120Boot": 18892, "voc": 18893, "\u0120digging": 18894, "\u0120pressures": 18895, "\u0120len": 18896, "264": 18897, "\u0120assassination": 18898, "\u0120Birmingham": 18899, "\u0120Myth": 18900, "\u0120sovereign": 18901, "\u0120Artist": 18902, "\u0120Photograph": 18903, "\u0120depicted": 18904, "\u0120dispens": 18905, "orthy": 18906, "\u0120ambul": 18907, "integ": 18908, "\u0120Cele": 18909, "\u0120Tibet": 18910, "\u0120hierarchy": 18911, "\u0120cu": 18912, "\u0120preseason": 18913, "\u0120Peterson": 18914, "\u0120colours": 18915, "\u0120worrying": 18916, "\u0120backers": 18917, "\u0120Palmer": 18918, "\u0120\u00ce\u00bc": 18919, "\u0120contributor": 18920, "\u0120hearings": 18921, "\u0120urine": 18922, "\u0120\u00d9": 18923, "ourgeois": 18924, "Similar": 18925, "\u0120Zimmer": 18926, "something": 18927, "\u0120USC": 18928, "\u0120strengths": 18929, "\u0120FI": 18930, "\u0120logging": 18931, "Asked": 18932, "\u0120Thai": 18933, "inqu": 18934, "\u0120Walt": 18935, "\u0120crews": 18936, "itism": 18937, "301": 18938, "\u0120sharply": 18939, "umed": 18940, "\u0120redirect": 18941, "rators": 18942, "Inf": 18943, "\u0120Weapons": 18944, "\u0120teasp": 18945, "1999": 18946, "Live": 18947, "\u0120Especially": 18948, "\u0120Ster": 18949, "\u0120Veterans": 18950, "\u0120intro": 18951, "otherapy": 18952, "\u0120malware": 18953, "\u0120breeding": 18954, "\u0120molecular": 18955, "\u0120Route": 18956, "\u0120Comment": 18957, "ochem": 18958, "\u0120ain": 18959, "Season": 18960, "\u0120linebacker": 18961, "\u00c4\u00ab": 18962, "\u0120Economics": 18963, "esar": 18964, "\u0120Lives": 18965, "\u0120Emma": 18966, "\u0120kin": 18967, "\u0120Territ": 18968, "\u0120planted": 18969, "oton": 18970, "\u0120Butter": 18971, "\u0120Spons": 18972, "PER": 18973, "\u0120dungeon": 18974, "\u0120symbolic": 18975, "\u0120filmed": 18976, "\u0120diets": 18977, "\u0120concludes": 18978, "\u0120certainty": 18979, "\u0120Format": 18980, "\u0120strangers": 18981, "format": 18982, "\u0120Phase": 18983, "\u0120copied": 18984, "\u0120metres": 18985, "lda": 18986, "\u0120Users": 18987, "\u0120deliberate": 18988, "\u0120washed": 18989, "\u0120Lance": 18990, "imation": 18991, "\u0120improper": 18992, "\u0120Genesis": 18993, "ickr": 18994, "\u0120Kush": 18995, "\u0120realise": 18996, "\u0120embarrassing": 18997, "alking": 18998, "bucks": 18999, "\u0120verified": 19000, "\u0120outline": 19001, "years": 19002, "\u0120Income": 19003, "202": 19004, "\u0120zombies": 19005, "Final": 19006, "\u0120Millenn": 19007, "\u0120modifications": 19008, "\u0120Vision": 19009, "\u0120Moses": 19010, "verb": 19011, "iterranean": 19012, "\u0120Jet": 19013, "\u0120naval": 19014, "\u0120Agg": 19015, "\u0120url": 19016, "\u0120victories": 19017, "\u0120nonetheless": 19018, "\u0120injust": 19019, "\u0120Fact": 19020, "\u00e7\u013c": 19021, "\u0120insufficient": 19022, "review": 19023, "facebook": 19024, "\u0120negotiating": 19025, "\u0120guarantees": 19026, "imen": 19027, "utenberg": 19028, "\u0120gambling": 19029, "\u0120congr": 19030, "Loading": 19031, "\u0120nevertheless": 19032, "\u0120presidents": 19033, "\u0120Industrial": 19034, "\u0120118": 19035, "\u0120poured": 19036, "\u0120Tory": 19037, "\u0120175": 19038, "\u0120:=": 19039, "Scott": 19040, "angered": 19041, "Tok": 19042, "\u0120organizers": 19043, "Mat": 19044, "\u0120Growth": 19045, "\u0120adul": 19046, "\u0120ensures": 19047, "\u0120117": 19048, "\u00e9\u00be\u012f\u00e5": 19049, "\u0120massacre": 19050, "\u0120grades": 19051, "before": 19052, "ADVERTISEMENT": 19053, "\u0120Slow": 19054, "\u0120MMA": 19055, "\u00e2\u0122\u0136\"": 19056, "\u0120Vatican": 19057, "Qaeda": 19058, "\u0120owe": 19059, "6666": 19060, "\u0120Sorry": 19061, "\u0120Grass": 19062, "\u0120backgrounds": 19063, "\u0120exhausted": 19064, "\u0120clan": 19065, "\u0120compromised": 19066, "\u0120Elf": 19067, "\u0120Isaac": 19068, "enson": 19069, "Invest": 19070, "IFA": 19071, "\u0120interrupted": 19072, "\u00e3\u0125\u012b\u00e3\u0125\u00a9": 19073, "\u0120twisted": 19074, "\u0120Dragons": 19075, "Mode": 19076, "\u0120Kremlin": 19077, "\u0120fertil": 19078, "heres": 19079, "phan": 19080, "\u0120Node": 19081, "fed": 19082, "\u0120Orc": 19083, "\u0120unwilling": 19084, "Cent": 19085, "\u0120priorit": 19086, "\u0120graduates": 19087, "\u0120subjective": 19088, "\u0120issuing": 19089, "\u0120Lt": 19090, "\u0120viewer": 19091, "\u0120woke": 19092, "Thus": 19093, "brook": 19094, "\u0120depressed": 19095, "\u0120bracket": 19096, "\u0120Gor": 19097, "\u0120Fighting": 19098, "\u0120striker": 19099, "Report": 19100, "\u0120Portugal": 19101, "\u0120neo": 19102, "wed": 19103, "199": 19104, "\u0120fleeing": 19105, "shadow": 19106, "identified": 19107, "USE": 19108, "Steam": 19109, "\u0120stretched": 19110, "\u0120revelations": 19111, "arted": 19112, "\u0120Dw": 19113, "\u0120alignment": 19114, "eston": 19115, "\u0120Jared": 19116, "Sep": 19117, "\u0120blogs": 19118, "update": 19119, "gom": 19120, "risk": 19121, "\u0120clash": 19122, "\u0120Hour": 19123, "\u0120runtime": 19124, "\u0120unwanted": 19125, "\u0120scam": 19126, "\u0120rack": 19127, "\u0120enlight": 19128, "onest": 19129, "\u0120Ferr": 19130, "\u0120convictions": 19131, "\u0120piano": 19132, "\u0120circulation": 19133, "\u0120Welcome": 19134, "\u0120backlash": 19135, "\u0120Wade": 19136, "\u0120receivers": 19137, "otive": 19138, "Jeff": 19139, "\u0120networking": 19140, "\u0120Prep": 19141, "\u0120Explorer": 19142, "\u0120lecture": 19143, "\u0120uploaded": 19144, "\u0120Meat": 19145, "BLE": 19146, "\u0120Nazis": 19147, "\u0120Synd": 19148, "stud": 19149, "roots": 19150, "rians": 19151, "\u0120portrayed": 19152, "\u0120??": 19153, "\u0120Buddha": 19154, "sun": 19155, "Robert": 19156, "\u0120Complex": 19157, "\u0120oversee": 19158, "\u0120stealth": 19159, "Title": 19160, "\u0120Jobs": 19161, "\u0120Kum": 19162, "\u0120appreciation": 19163, "\u0120MOD": 19164, "\u0120basics": 19165, "\u0120clips": 19166, "\u0120nursing": 19167, "\u0120proposition": 19168, "\u0120realised": 19169, "\u0120NYC": 19170, "\u0120allocated": 19171, "rium": 19172, "aran": 19173, "\u0120Production": 19174, "\u0120Vote": 19175, "\u0120smugg": 19176, "\u0120hunter": 19177, "azer": 19178, "\u0120Changes": 19179, "\u0120fluct": 19180, "yon": 19181, "Array": 19182, "\u0120kits": 19183, "Water": 19184, "\u0120uncommon": 19185, "\u0120resting": 19186, "ells": 19187, "would": 19188, "\u0120pursued": 19189, "\u0120assertion": 19190, "ometown": 19191, "\u0120Mosul": 19192, "\u0120Platform": 19193, "iolet": 19194, "\u0120shareholders": 19195, "\u0120trails": 19196, "Pay": 19197, "\u0120Enforcement": 19198, "types": 19199, "\u0120Anonymous": 19200, "\u0120satisfying": 19201, "ilogy": 19202, "\u0120('": 19203, "wave": 19204, "city": 19205, "Steve": 19206, "\u0120confrontation": 19207, "\u0120Eld": 19208, "Capt": 19209, "ahan": 19210, "htm": 19211, "\u0120Ctrl": 19212, "ONS": 19213, "230": 19214, "ifa": 19215, "holding": 19216, "\u0120delicate": 19217, "\u0120jaw": 19218, "\u0120Going": 19219, "orum": 19220, "Sal": 19221, "\u0120dull": 19222, "\u0120Beth": 19223, "\u0120prisons": 19224, "\u0120ego": 19225, "\u0120Elsa": 19226, "avorite": 19227, "\u0120Gang": 19228, "\u0120Nuclear": 19229, "\u0120spider": 19230, "atsu": 19231, "\u0120sampling": 19232, "\u0120absorbed": 19233, "\u0120Pharm": 19234, "ieth": 19235, "\u0120bucket": 19236, "\u0120Recomm": 19237, "OF": 19238, "\u0120Factory": 19239, "ANCE": 19240, "\u0120bacter": 19241, "Has": 19242, "\u0120Observ": 19243, "121": 19244, "\u0120premiere": 19245, "Develop": 19246, "\u0120currencies": 19247, "Cast": 19248, "\u0120accompanying": 19249, "\u0120Nashville": 19250, "\u0120fatty": 19251, "\u0120Brend": 19252, "\u0120locks": 19253, "\u0120centered": 19254, "\u0120UT": 19255, "aughs": 19256, "orie": 19257, "\u0120Affordable": 19258, "vance": 19259, "DL": 19260, "emet": 19261, "\u0120throne": 19262, "\u0120Bluetooth": 19263, "\u0120naming": 19264, "ifts": 19265, "ADE": 19266, "\u0120corrected": 19267, "\u0120promptly": 19268, "\u0120STR": 19269, "\u0120genome": 19270, "\u0120cope": 19271, "\u0120valley": 19272, "\u0120rounded": 19273, "\u0120Kend": 19274, "alion": 19275, "pers": 19276, "\u0120tourism": 19277, "\u0120stark": 19278, "vl": 19279, "\u0120blowing": 19280, "\u0120Schedule": 19281, "std": 19282, "\u0120unhappy": 19283, "\u0120litigation": 19284, "cedes": 19285, "\u0120android": 19286, "\u0120integral": 19287, "erers": 19288, "uded": 19289, "tax": 19290, "\u0120reiter": 19291, "\u0120Motors": 19292, "ociated": 19293, "\u0120wonders": 19294, "\u0120Apost": 19295, "ucking": 19296, "\u0120Roosevelt": 19297, "fram": 19298, "\u0120yields": 19299, "\u0120constitutes": 19300, "awk": 19301, "Interest": 19302, "\u0120interim": 19303, "\u0120breakthrough": 19304, "\u0120Cher": 19305, "\u0120prosec": 19306, "\u0120Dj": 19307, "\u0120MT": 19308, "Resp": 19309, "\u0120PT": 19310, "\u0120sperm": 19311, "edit": 19312, "BT": 19313, "Linux": 19314, "country": 19315, "league": 19316, "\u0120dick": 19317, "\u0120oct": 19318, "\u0120inserting": 19319, "\u0120scra": 19320, "\u0120Brewing": 19321, "\u01201966": 19322, "\u0120runners": 19323, "\u0120plun": 19324, "idy": 19325, "\u0120Dian": 19326, "\u0120dysfunction": 19327, "\u0120exclusion": 19328, "\u0120disgr": 19329, "\u0120incorporate": 19330, "\u0120reconc": 19331, "\u0120nominated": 19332, "\u0120Archer": 19333, "draw": 19334, "achelor": 19335, "\u0120writings": 19336, "\u0120shallow": 19337, "\u0120hast": 19338, "\u0120BMW": 19339, "\u0120RS": 19340, "\u0120thigh": 19341, "\u01201963": 19342, "\u0120lamb": 19343, "\u0120favored": 19344, "agle": 19345, "\u0120cooler": 19346, "\u0120Hours": 19347, "\u0120GU": 19348, "\u0120Origin": 19349, "\u0120glimpse": 19350, "--------------------": 19351, "Lim": 19352, "\u0120cheek": 19353, "\u0120jealous": 19354, "-'": 19355, "\u0120harness": 19356, "\u0120Poison": 19357, "\u0120disabilities": 19358, "neapolis": 19359, "\u0120outlook": 19360, "\u0120notify": 19361, "\u0120Indianapolis": 19362, "\u0120abrupt": 19363, "nsic": 19364, "\u0120encrypted": 19365, "\u0120forfe": 19366, "reath": 19367, "\u0120rabb": 19368, "\u0120foundations": 19369, "\u0120compliment": 19370, "\u0120Interview": 19371, "\u0120Swe": 19372, "\u0120adolesc": 19373, "\u0120monitors": 19374, "\u0120Sacramento": 19375, "\u0120timely": 19376, "\u0120contempl": 19377, "\u0120positioned": 19378, "\u0120posters": 19379, "phies": 19380, "iovascular": 19381, "void": 19382, "\u0120Fifth": 19383, "\u0120investigative": 19384, "OUN": 19385, "\u0120integrate": 19386, "\u0120INC": 19387, "isha": 19388, "iblings": 19389, "\u0120Request": 19390, "\u0120Rodriguez": 19391, "\u0120slides": 19392, "\u0120DX": 19393, "\u0120feminism": 19394, "\u0120datas": 19395, "\u0120bend": 19396, "irus": 19397, "\u0120Nigeria": 19398, "Fox": 19399, "Change": 19400, "\u0120airplane": 19401, "\u0120Laden": 19402, "\u0120publicity": 19403, "ixty": 19404, "\u0120commitments": 19405, "\u0120aggregate": 19406, "\u0120displaying": 19407, "\u0120Arrow": 19408, "\u0120122": 19409, "\u0120respects": 19410, "android": 19411, "six": 19412, "\u0120Sha": 19413, "\u0120restoration": 19414, ")\\": 19415, "WS": 19416, "oys": 19417, "\u0120illustrate": 19418, "without": 19419, "126": 19420, "\u0120\u00e2\u0136\u0124": 19421, "\u0120pickup": 19422, "nels": 19423, "\u0120....": 19424, "food": 19425, "\u0120Fen": 19426, ")?": 19427, "\u0120phenomena": 19428, "\u0120companions": 19429, "\u0120Write": 19430, "\u0120spill": 19431, "\u0120bridges": 19432, "\u0120Updated": 19433, "\u0120Fo": 19434, "\u0120insects": 19435, "ASHINGTON": 19436, "\u0120scare": 19437, "iltr": 19438, "\u0120Zhang": 19439, "\u0120severity": 19440, "\u0120indul": 19441, "149": 19442, "\u0120Coffee": 19443, "\u0120norms": 19444, "\u0120pulse": 19445, "\u0120FT": 19446, "\u0120horrific": 19447, "\u0120Destroy": 19448, "\u0120JSON": 19449, "\u0120olive": 19450, "\u0120discusses": 19451, "Rest": 19452, "Elect": 19453, "\u0120Winn": 19454, "\u0120Surviv": 19455, "\u0120Hait": 19456, "Sure": 19457, "oped": 19458, "\u0120rooted": 19459, "\u0120Ske": 19460, "\u0120Bronze": 19461, "\u0120lol": 19462, "Default": 19463, "\u0120commodity": 19464, "redited": 19465, "\u0120libertarian": 19466, "\u0120forbidden": 19467, "\u0120gran": 19468, "\u00e0\u00a8": 19469, "\u0120lag": 19470, "enz": 19471, "drive": 19472, "\u0120mathematics": 19473, "\u0120wires": 19474, "\u0120critically": 19475, "\u0120carbohyd": 19476, "\u0120Chancellor": 19477, "\u0120Eddie": 19478, "\u0120banning": 19479, "\u0120Fri": 19480, "\u0120complications": 19481, "etric": 19482, "\u0120Bangladesh": 19483, "\u0120bandwidth": 19484, "Stop": 19485, "\u0120Originally": 19486, "\u0120halfway": 19487, "ynasty": 19488, "shine": 19489, "\u0120tales": 19490, "rities": 19491, "avier": 19492, "\u0120spinning": 19493, "\u0120WHO": 19494, "\u0120neighbourhood": 19495, "bach": 19496, "\u0120commerce": 19497, "\u0120Sle": 19498, "BU": 19499, "\u0120entrepreneur": 19500, "\u0120peculiar": 19501, "\u0120Comments": 19502, "fre": 19503, "320": 19504, "ICS": 19505, "\u0120imagery": 19506, "\u0120Canon": 19507, "\u0120Electronic": 19508, "short": 19509, "((": 19510, "Dig": 19511, "\u0120commem": 19512, "uced": 19513, "\u0120inclined": 19514, "\u0120Summon": 19515, "\u0120cliff": 19516, "\u0120Mediterranean": 19517, "\u0120poetry": 19518, "\u0120prosperity": 19519, "\u0120Rece": 19520, "\u0120pills": 19521, "member": 19522, "\u0120finale": 19523, "unc": 19524, "\u0120Gig": 19525, "\u00e4\u00bd": 19526, "\u0120lod": 19527, "\u0120backward": 19528, "-+": 19529, "\u0120Forward": 19530, "\u0120thri": 19531, "sure": 19532, "\u0120soap": 19533, "\u0120FX": 19534, "RES": 19535, "\u0120Sexual": 19536, "oulos": 19537, "\u0120foolish": 19538, "\u0120righteous": 19539, "\u0120coff": 19540, "terrorism": 19541, "ustain": 19542, "oter": 19543, "\u0120abuses": 19544, "next": 19545, "\u0120abusive": 19546, "\u0120thereafter": 19547, "\u0120prohibition": 19548, "\u0120SUP": 19549, "\u0120dip": 19550, "\u0120ripped": 19551, "\u0120inherited": 19552, "\u0120bats": 19553, "stru": 19554, "GT": 19555, "\u0120flawed": 19556, "phabet": 19557, "\u0120fog": 19558, "doors": 19559, "\u0120imaging": 19560, "\u0120digits": 19561, "\u0120Hungary": 19562, "\u0120arrog": 19563, "\u0120teachings": 19564, "\u0120protocols": 19565, "\u0120Banks": 19566, "\u00e0\u00b8": 19567, "pound": 19568, "\u0120Curt": 19569, ".\")": 19570, "./": 19571, "\u0120exemption": 19572, "endix": 19573, "\u0120Mull": 19574, "\u0120improves": 19575, "\u0120Gamer": 19576, "dimensional": 19577, "Icon": 19578, "\u0120Margaret": 19579, "Status": 19580, "dates": 19581, "\u0120intends": 19582, "\u0120depict": 19583, "\u0120parked": 19584, "Joe": 19585, "\u0120Marines": 19586, "chnology": 19587, "!).": 19588, "\u0120judged": 19589, "\u0120weights": 19590, "Ray": 19591, "\u0120apartments": 19592, "hester": 19593, "\u0120reinforce": 19594, "\u0120offender": 19595, "occup": 19596, "\u0120sore": 19597, "ept": 19598, "\u0120PHP": 19599, "\u0120Brow": 19600, "\u0120authorization": 19601, "\u0120Risk": 19602, "\u0120Delaware": 19603, "\u0120QU": 19604, "\u0120notifications": 19605, "\u0120sunlight": 19606, "\u0120exclude": 19607, "dat": 19608, "\u0120mesh": 19609, "\u0120Sudan": 19610, "\u0120belonged": 19611, "\u0120subway": 19612, "\u0120noon": 19613, "\u0120Interior": 19614, "olics": 19615, "\u0120Lakers": 19616, "\u0120coding": 19617, "Disclaimer": 19618, "Calif": 19619, "Old": 19620, "\u0120disl": 19621, "?????": 19622, "\u0120confirms": 19623, "\u0120recruitment": 19624, "\u0120homicide": 19625, "Consider": 19626, "\u0120Jeffrey": 19627, "fty": 19628, "};": 19629, "\u0120objection": 19630, "doing": 19631, "\u0120Leo": 19632, "Want": 19633, "\u0120glow": 19634, "\u0120Clarke": 19635, "\u0120Norman": 19636, "\u0120verification": 19637, "\u0120packet": 19638, "\u0120Formula": 19639, "\u0120plag": 19640, "esville": 19641, "\u0120shouting": 19642, "\u0120ov": 19643, "\u0120REC": 19644, "\u0120Bub": 19645, "\u0120ninth": 19646, "\u0120energ": 19647, "\u0120validity": 19648, "\u0120ups": 19649, "jack": 19650, "\u0120neighboring": 19651, "\u0120Nec": 19652, "eworks": 19653, "\u0120Hab": 19654, "arez": 19655, "\u0120spine": 19656, "\u0120eventual": 19657, "\u0120Leaders": 19658, "\u0120Carn": 19659, "\u0120probation": 19660, "\u0120romance": 19661, "msg": 19662, "\u0120Mechanical": 19663, "ERY": 19664, "Rock": 19665, "\u0120partisan": 19666, "Node": 19667, "assets": 19668, "minent": 19669, "\u0120foreigners": 19670, "\u0120testify": 19671, "\u0120Usually": 19672, "lords": 19673, "\u0120Gren": 19674, "\u0120Powell": 19675, "BIL": 19676, "\u0120sr": 19677, "\u0120addict": 19678, "\u0120shells": 19679, "\u0120sigh": 19680, "\u0120Yale": 19681, "ternity": 19682, "\u0120750": 19683, "EU": 19684, "\u0120Rifle": 19685, "\u0120patron": 19686, "ema": 19687, "\u0120Bannon": 19688, "anity": 19689, "\u0120tropical": 19690, "\u0120VII": 19691, "cross": 19692, "Everything": 19693, "\u0120ISO": 19694, "\u0120humble": 19695, "assing": 19696, "\u0120FIG": 19697, "\u0120updating": 19698, "yson": 19699, "\u0120calcium": 19700, "\u0120competent": 19701, "\u0120steering": 19702, "Prot": 19703, "\u0120SY": 19704, "\u0120Finals": 19705, "\u0120Rug": 19706, "159": 19707, "137": 19708, "\u0120Golf": 19709, "\u0120126": 19710, "\u0120accommodation": 19711, "\u0120Hughes": 19712, "\u0120aesthetic": 19713, "artisan": 19714, "\u0120Twilight": 19715, "\u0120prince": 19716, "\u0120Agriculture": 19717, "\u0120Disco": 19718, "\u0120precedent": 19719, "\u0120typing": 19720, "authorized": 19721, "Option": 19722, "\u0120Aub": 19723, "lishes": 19724, "acht": 19725, "mag": 19726, "Peter": 19727, "\u0120UFO": 19728, "monton": 19729, "\u0120Lith": 19730, "\u0120arom": 19731, "\u0120securing": 19732, "\u0120confined": 19733, "private": 19734, "\u0120swords": 19735, "\u0120markers": 19736, "\u0120metabolic": 19737, "select": 19738, "\u0120Curse": 19739, "\u0120Ot": 19740, "gressive": 19741, "\u0120incumb": 19742, "\u0120Saga": 19743, "\u0120priced": 19744, "\u0120clearance": 19745, "Content": 19746, "\u0120drilling": 19747, "\u0120notices": 19748, "\u0120bourgeois": 19749, "\u0120vest": 19750, "\u0120cookie": 19751, "\u0120Guardians": 19752, "rys": 19753, "inyl": 19754, "\u0120124": 19755, "\u0120plausible": 19756, "ongh": 19757, "\u0120Odin": 19758, "\u0120conception": 19759, "\u0120Yuk": 19760, "\u0120Baghdad": 19761, "\u0120Flag": 19762, "Austral": 19763, "\u0120IBM": 19764, "\u0120internationally": 19765, "\u0120WikiLeaks": 19766, "IED": 19767, "\u0120cyn": 19768, "\u0120chooses": 19769, "\u0120Pill": 19770, "\u0120combining": 19771, "\u0120radi": 19772, "\u0120Mohammed": 19773, "defense": 19774, "atching": 19775, "Subject": 19776, "iciency": 19777, "Frame": 19778, "\u0120{\"": 19779, "\u0120chess": 19780, "\u0120timer": 19781, "190": 19782, "\u0120tin": 19783, "\u0120ordinance": 19784, "emetery": 19785, "\u0120accusing": 19786, "\u0120noticeable": 19787, "\u0120centres": 19788, "\u0120lid": 19789, "\u0120Mills": 19790, "imgur": 19791, "\u0120zoom": 19792, "ergic": 19793, "\u0120compression": 19794, "prim": 19795, "find": 19796, "\u0120surg": 19797, "\u0120pand": 19798, "\u0120Kee": 19799, "\u0120Chad": 19800, "cellence": 19801, "oyle": 19802, "\u0120socialism": 19803, "\u0120Travis": 19804, "\u0120MHz": 19805, "\u0120guild": 19806, "ALLY": 19807, "\u0120Subscribe": 19808, "\u0120Related": 19809, "\u0120occurrence": 19810, "itching": 19811, "\u0120fictional": 19812, "\u0120crush": 19813, "\u0120EA": 19814, "cod": 19815, "mix": 19816, "\u0120Triple": 19817, "\u0120retrieve": 19818, "\u0120stimulus": 19819, "\u0120psychiat": 19820, "\u0120Door": 19821, "\u0120homosexuality": 19822, "\u0120elementary": 19823, "\u0120cellular": 19824, "idian": 19825, "\u0120Laun": 19826, "\u0120intriguing": 19827, "\u0120foam": 19828, "\u0120Bass": 19829, "idi": 19830, "itsu": 19831, "\u0120assure": 19832, "\u0120congrat": 19833, "\u0120businessman": 19834, "\u0120Boost": 19835, "close": 19836, "\u0120lied": 19837, "\u0120sciences": 19838, "\u0120Omega": 19839, "\u0120Graphics": 19840, "\u0120<=": 19841, "spoken": 19842, "\u0120connectivity": 19843, "Saturday": 19844, "\u0120Avengers": 19845, "\u0120toggle": 19846, "\u0120ankle": 19847, "\u0120nationalist": 19848, "model": 19849, "\u0120Pool": 19850, "ophobia": 19851, "Var": 19852, "\u0120Mons": 19853, "atories": 19854, "\u0120aggressively": 19855, "Clear": 19856, "Forge": 19857, "acters": 19858, "\u0120hedge": 19859, "\u0120pipes": 19860, "\u0120blunt": 19861, "\u0120sq": 19862, "\u0120remotely": 19863, "Wed": 19864, "asers": 19865, "\u0120refriger": 19866, "\u0120tiles": 19867, "\u0120rescued": 19868, "\u0120comprised": 19869, "insky": 19870, "\u0120manif": 19871, "avanaugh": 19872, "\u0120prolifer": 19873, "\u0120aligned": 19874, "xml": 19875, "\u0120triv": 19876, "\u0120coordination": 19877, "\u0120PER": 19878, "\u0120Quote": 19879, "134": 19880, "bf": 19881, "\u0120Saw": 19882, "\u0120termination": 19883, "\u0120190": 19884, "\u0120additions": 19885, "\u0120trio": 19886, "\u0120projections": 19887, "\u0120positively": 19888, "\u0120inclusive": 19889, "\u0120membr": 19890, "1990": 19891, "older": 19892, "\u0120practiced": 19893, "inkle": 19894, "Arch": 19895, "\u0120starters": 19896, "arius": 19897, "\u0120intermediate": 19898, "\u0120Benef": 19899, "\u0120Killer": 19900, "\u0120interventions": 19901, "\u0120Kil": 19902, "\u0120Flying": 19903, "Inv": 19904, "\u0120premature": 19905, "\u0120psychiatric": 19906, "\u0120indie": 19907, "\u0120collar": 19908, "\u0120Rainbow": 19909, "afi": 19910, "\u0120disruption": 19911, "\u0120FOX": 19912, "casting": 19913, "\u0120misdem": 19914, "cro": 19915, "\u0120wipe": 19916, "ardon": 19917, "\u0120bast": 19918, "\u0120Tommy": 19919, "\u0120Representative": 19920, "\u0120belly": 19921, "\u0120PO": 19922, "\u0120Breitbart": 19923, "132": 19924, "\u0120messaging": 19925, "Should": 19926, "References": 19927, "\u0120GRE": 19928, "istical": 19929, "LP": 19930, "\u0120Cav": 19931, "\u0120Crazy": 19932, "\u0120intuitive": 19933, "keeping": 19934, "\u0120Moss": 19935, "\u0120discontin": 19936, "\u0120Module": 19937, "\u0120unrelated": 19938, "\u0120Practice": 19939, "\u0120Transport": 19940, "\u0120statistically": 19941, "orns": 19942, "\u0120sized": 19943, "pu": 19944, "\u0120caf": 19945, "\u0120Worlds": 19946, "\u0120Rodgers": 19947, "\u0120Lun": 19948, "\u0120Comic": 19949, "living": 19950, "\u0120cared": 19951, "\u0120climbed": 19952, "){": 19953, "\u0120consisted": 19954, "\u0120medieval": 19955, "folk": 19956, "\u0120hacked": 19957, "\u0120dire": 19958, "\u0120Hermione": 19959, "\u0120tended": 19960, "ceans": 19961, "Daniel": 19962, "went": 19963, "\u0120legislators": 19964, "\u0120redes": 19965, "games": 19966, "\u0120gn": 19967, "amiliar": 19968, "\u0120++": 19969, "ggy": 19970, "threat": 19971, "\u0120magnet": 19972, "\u0120perceive": 19973, "\u0120zip": 19974, "\u0120indictment": 19975, "\u0120critique": 19976, "gard": 19977, "\u0120Safe": 19978, "\u0120Cream": 19979, "\u0120advent": 19980, "oba": 19981, "\u0120vowed": 19982, "ousands": 19983, "\u0120ski": 19984, "\u0120abortions": 19985, "uart": 19986, "\u0120stunned": 19987, "\u0120advancing": 19988, "\u0120lacked": 19989, "\u0120\\\"": 19990, "\u0120schizophren": 19991, "\u0120elegant": 19992, "\u0120conferences": 19993, "\u0120canceled": 19994, "\u0120Hudson": 19995, "\u0120Hopefully": 19996, "\u0120trump": 19997, "\u0120frequencies": 19998, "\u0120meteor": 19999, "\u0120Junior": 20000, "\u0120Fleet": 20001, "\u0120Malcolm": 20002, "\u0120Tools": 20003, "\u0120........": 20004, "\u0120hobby": 20005, "\u0120Europeans": 20006, "\u01201500": 20007, "\u0120Into": 20008, "\u0120sway": 20009, "\u0120Appro": 20010, "\u0120Compl": 20011, "Community": 20012, "\u0120tide": 20013, "\u0120Summit": 20014, "\u00e4\u00bb": 20015, "\u0120intervals": 20016, "\u0120Ether": 20017, "\u0120habitat": 20018, "\u0120Stevens": 20019, "lishing": 20020, "\u0120Domain": 20021, "\u0120triggers": 20022, "\u0120chasing": 20023, "\u0120charm": 20024, "\u0120Flower": 20025, "itored": 20026, "\u0120blessing": 20027, "\u0120textures": 20028, "Five": 20029, "\u0120liquor": 20030, "RP": 20031, "FIN": 20032, "\u01201962": 20033, "CAR": 20034, "Unknown": 20035, "\u0120resil": 20036, "\u0120Lily": 20037, "\u0120abundance": 20038, "\u0120predictable": 20039, "rar": 20040, "\u0120bullshit": 20041, "leen": 20042, "chet": 20043, "Mor": 20044, "Much": 20045, "\u00e4\u00b9": 20046, "\u0120emphasized": 20047, "\u0120crust": 20048, "\u0120primitive": 20049, "\u0120enjoyable": 20050, "\u0120Pictures": 20051, "\u0120teammate": 20052, "pler": 20053, "\u0120Tol": 20054, "\u0120Kane": 20055, "\u0120summoned": 20056, "thy": 20057, "rama": 20058, "\u0120Honda": 20059, "\u0120realizing": 20060, "\u0120quicker": 20061, "\u0120concentrate": 20062, "clear": 20063, "\u0120210": 20064, "\u0120Erdogan": 20065, "aris": 20066, "\u0120responds": 20067, "\u0120BI": 20068, "\u0120eligibility": 20069, "\u0120pushes": 20070, "\u0120Idaho": 20071, "\u0120aggrav": 20072, "\u0120ruins": 20073, "urations": 20074, "\u0120bans": 20075, "\u0120anat": 20076, "share": 20077, "\u0120grind": 20078, "hin": 20079, "umen": 20080, "\u0120utilities": 20081, "\u0120Yankees": 20082, "\u0120databases": 20083, "\u0120DD": 20084, "\u0120displaced": 20085, "\u0120dependencies": 20086, "\u0120stimulation": 20087, "hun": 20088, "houses": 20089, "\u0120Pretty": 20090, "\u0120Ravens": 20091, "\u0120TODAY": 20092, "\u0120associates": 20093, "\u0120therape": 20094, "cled": 20095, "\u0120deer": 20096, "\u0120repairs": 20097, "rentice": 20098, "\u0120receptors": 20099, "\u0120remed": 20100, "\u0120Ce": 20101, "\u0120marriages": 20102, "\u0120ballots": 20103, "\u0120Soldier": 20104, "\u0120hilarious": 20105, "opl": 20106, "138": 20107, "\u0120inherently": 20108, "\u0120ignorant": 20109, "\u0120bounce": 20110, "\u0120Easter": 20111, "RELATED": 20112, "\u0120Currency": 20113, "EV": 20114, "\u00e3\u0125\u0140": 20115, "\u0120Lead": 20116, "\u0120deceased": 20117, "Brien": 20118, "\u0120Musk": 20119, "JS": 20120, "\u0120merge": 20121, "hearted": 20122, "creat": 20123, "mitt": 20124, "mund": 20125, "\u0120\u00e2\u0122\u012d": 20126, "\u0120Bag": 20127, "\u0120projection": 20128, "\u0120java": 20129, "\u0120Standards": 20130, "\u0120Leonard": 20131, "\u0120coconut": 20132, "\u0120Population": 20133, "\u0120traject": 20134, "\u0120imply": 20135, "\u0120curiosity": 20136, "\u0120DB": 20137, "\u0120Fresh": 20138, "\u0120Por": 20139, "\u0120heavier": 20140, "neys": 20141, "gomery": 20142, "\u0120deserved": 20143, "\u0120phrases": 20144, "\u0120GC": 20145, "\u0120yeast": 20146, "desc": 20147, "Death": 20148, "\u0120reboot": 20149, "\u0120metadata": 20150, "ICAL": 20151, "\u0120repay": 20152, "\u0120Independence": 20153, "\u0120suburban": 20154, "icals": 20155, "\u0120atop": 20156, "\u0120allocation": 20157, "generation": 20158, "\u0120Gram": 20159, "\u0120moisture": 20160, "\u0120pine": 20161, "\u0120Liberals": 20162, "\u0120aides": 20163, "\u0120underest": 20164, "\u0120Berry": 20165, "\u0120ceremon": 20166, "370": 20167, "astrous": 20168, "\u0120Pirates": 20169, "\u0120tense": 20170, "\u0120Industries": 20171, "\u0120Appeals": 20172, "\u0120Near": 20173, "\u0120\u00e8\u00a3\u0131\u00e7": 20174, "\u0120lovers": 20175, "\u0120CAP": 20176, "\u0120Craw": 20177, "\u0120giants": 20178, "\u0120efficacy": 20179, "Element": 20180, "\u0120Behavior": 20181, "\u0120Toyota": 20182, "\u0120intest": 20183, "Priv": 20184, "AI": 20185, "\u0120maneuver": 20186, "\u0120perfection": 20187, "\u0120bang": 20188, "paper": 20189, "rill": 20190, "George": 20191, "border": 20192, "inters": 20193, "\u0120Seth": 20194, "\u0120clues": 20195, "\u0120Levi": 20196, "\u0120Revenue": 20197, "147": 20198, "\u0120vapor": 20199, "\u0120fortunate": 20200, "\u0120threatens": 20201, "\u0120vet": 20202, "\u0120dependency": 20203, "ersed": 20204, "article": 20205, "\u0120Blizzard": 20206, "\u0120chlor": 20207, "\u0120minus": 20208, "\u0120Bills": 20209, "\u0120cryptocurrency": 20210, "\u0120metabolism": 20211, "tering": 20212, "\u0120pestic": 20213, "steps": 20214, "\u0120Treasure": 20215, "racted": 20216, "\u0120Constant": 20217, "\u0120temp": 20218, "139": 20219, "\u0120Detective": 20220, "urally": 20221, "\u0120recovering": 20222, "\u0120cortex": 20223, "\u0120144": 20224, "closed": 20225, "\u0120prejudice": 20226, "aunted": 20227, "\u0120storms": 20228, "\u0120NOW": 20229, "\u0120machinery": 20230, "Address": 20231, "\u0120compelled": 20232, "270": 20233, "\u0120despair": 20234, "bane": 20235, "\u0120vegetable": 20236, "\u0120beds": 20237, "Learn": 20238, "\u0120colorful": 20239, "\u0120spike": 20240, "\u0120margins": 20241, "\u0120sympathy": 20242, "\u0120workshop": 20243, "\u0120CBC": 20244, "Sat": 20245, "\u0120burns": 20246, "\u0120Gender": 20247, "\u0120129": 20248, "\u0120Cable": 20249, "\u0120debts": 20250, "\u0120Theresa": 20251, "\u0120reflecting": 20252, "\u0120airst": 20253, "\u0120rim": 20254, "ramid": 20255, "\u0120weaknesses": 20256, "Writ": 20257, "oggle": 20258, "ti": 20259, "\u0120Charge": 20260, "\u0120weighed": 20261, "\u0120(.": 20262, "\u0120laughter": 20263, "\u0120router": 20264, "\u0120Democracy": 20265, "Dear": 20266, "\u0120hasht": 20267, "\u0120dy": 20268, "\u0120hints": 20269, "running": 20270, "\u0120finishes": 20271, "arus": 20272, "Mass": 20273, "result": 20274, "ascus": 20275, "\u0120vintage": 20276, "\u0120conqu": 20277, "\u0120wildly": 20278, "acist": 20279, "\u0120lingu": 20280, "\u0120protagonist": 20281, "strom": 20282, "teenth": 20283, "\u0120Solo": 20284, "mac": 20285, "filled": 20286, "\u0120renown": 20287, "itives": 20288, "\u0120motive": 20289, "\u0120Antar": 20290, "\u0120Mann": 20291, "\u0120Adjust": 20292, "\u0120rockets": 20293, "\u0120troubling": 20294, "ei": 20295, "\u0120organisms": 20296, "assis": 20297, "Christian": 20298, "\u0120145": 20299, "\u0120Hass": 20300, "\u0120swall": 20301, "\u0120wax": 20302, "\u0120Survival": 20303, "VS": 20304, "\u0120Murd": 20305, "vd": 20306, "standard": 20307, "\u0120dragons": 20308, "\u0120acceleration": 20309, "rational": 20310, "final": 20311, "\u0120paired": 20312, "\u0120Ethereum": 20313, "\u0120interfaces": 20314, "\u0120resent": 20315, "\u0120artifacts": 20316, "\u00c5\u00ab": 20317, "arel": 20318, "\u0120competitor": 20319, "\u0120Nicholas": 20320, "\u0120Surface": 20321, "cpp": 20322, "\u0120Tot": 20323, "\u0120economically": 20324, "\u0120organised": 20325, "\u0120enforced": 20326, "inho": 20327, "\u0120varieties": 20328, "\u0120abdom": 20329, "\u0120Bailey": 20330, "idav": 20331, "\u0120Salv": 20332, "paid": 20333, "\u0120altitude": 20334, "essert": 20335, "\u0120Gutenberg": 20336, "area": 20337, "opoulos": 20338, "\u0120professors": 20339, "iggs": 20340, "\u0120Fate": 20341, "hey": 20342, "\u01203000": 20343, "Dist": 20344, "\u0120twins": 20345, "cill": 20346, "\u0120Maps": 20347, "\u0120traps": 20348, "\u0120weed": 20349, "\u0120Kiss": 20350, "\u0120yoga": 20351, "\u0120recipients": 20352, "\u0120Westminster": 20353, "\u0120pools": 20354, "\u0120Walmart": 20355, "188": 20356, "\u0120Schools": 20357, "attack": 20358, "\u0120ARM": 20359, "paragraph": 20360, "Warning": 20361, "jl": 20362, "\u0120selfish": 20363, "anchez": 20364, "\u0120Heights": 20365, "Fre": 20366, "\u0120Soph": 20367, "\u0120--------------------------------": 20368, "tml": 20369, "333": 20370, "\u0120raids": 20371, "\u0120satellites": 20372, "KEY": 20373, "\u0120lasts": 20374, "\u00d1\u0124": 20375, "Ins": 20376, "\u0120Dame": 20377, "\u0120unpredict": 20378, "///": 20379, "ghai": 20380, "\u0120artillery": 20381, "\u0120cruise": 20382, "\u0120gel": 20383, "\u0120Cabinet": 20384, "\u0120blows": 20385, "\u0120Esp": 20386, "\u0120proximity": 20387, "othe": 20388, "\u0120Skills": 20389, "\u0120Upper": 20390, "obo": 20391, "\u0120NDP": 20392, "\u0120enjoys": 20393, "\u0120repeating": 20394, "\u0120Construction": 20395, "\u0120Questions": 20396, "Hillary": 20397, "\u0120uint": 20398, "\u0120processors": 20399, "\u0120Gibson": 20400, "\u0120Multiple": 20401, "qa": 20402, "\u0120Bom": 20403, "\u0120Miles": 20404, "ventional": 20405, "\u0120hurts": 20406, "skin": 20407, "\u0120AIDS": 20408, "\u0120advisers": 20409, "\u0120Root": 20410, "\u0120methodology": 20411, "\u0120Dale": 20412, "\u0120deton": 20413, "\u0120Knowledge": 20414, "sequently": 20415, "\u0120121": 20416, "\u0120connects": 20417, "Cy": 20418, "\u0120Danger": 20419, "\u0120contributors": 20420, "\u0120Bent": 20421, "\u0120brass": 20422, "\u0120Guns": 20423, "into": 20424, "\u0120Fortune": 20425, "\u0120broker": 20426, "balance": 20427, "\u0120lengths": 20428, "\u0120vic": 20429, "\u0120averaging": 20430, "\u0120appropriately": 20431, "\u0120Camera": 20432, "\u0120sandwich": 20433, "\u0120CDC": 20434, "\u0120coordinate": 20435, "\u0120navig": 20436, "\u0120goodness": 20437, "laim": 20438, "\u0120brake": 20439, "\u0120extremist": 20440, "\u0120Wake": 20441, "\u0120Mend": 20442, "\u0120Tiny": 20443, "\u0120COL": 20444, "\u0120RF": 20445, "\u0120Dual": 20446, "\u0120Wine": 20447, "Case": 20448, "\u0120refined": 20449, "\u0120lamp": 20450, "Lead": 20451, "\u0120bapt": 20452, "\u0120Carb": 20453, "\u0120Sadd": 20454, "\u0120Minneapolis": 20455, "PDF": 20456, "Early": 20457, "\u0120Hidden": 20458, "Its": 20459, "\u0120TIME": 20460, "\u0120pap": 20461, "\u0120commissioned": 20462, "\u0120Few": 20463, "\u0120Colts": 20464, "\u0120Bren": 20465, "\u0120bothered": 20466, "\u0120likewise": 20467, "Exper": 20468, "\u0120Schw": 20469, "cry": 20470, "nn": 20471, "\u0120Mitch": 20472, "imon": 20473, "MG": 20474, "bm": 20475, "UMP": 20476, "rays": 20477, "\u0120registry": 20478, "\u0120270": 20479, "achine": 20480, "rella": 20481, "anting": 20482, "00000": 20483, "\u0120ruined": 20484, "spot": 20485, "\u0120ta": 20486, "\u0120maximize": 20487, "\u0120inconven": 20488, "Dead": 20489, "Human": 20490, "Enabled": 20491, "\u0120Marie": 20492, "\u0120chill": 20493, "\u0120Paradise": 20494, "\u0120starring": 20495, "\u0120Latino": 20496, "\u0120Protocol": 20497, "\u0120EVER": 20498, "\u0120suppliers": 20499, "message": 20500, "\u0120Brock": 20501, "\u0120serum": 20502, "\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a": 20503, "\u0120encomp": 20504, "\u0120ambition": 20505, "uese": 20506, "\u0120arrows": 20507, "Andrew": 20508, "\u0120antenna": 20509, "\u01201961": 20510, "\u0120Bark": 20511, "\u0120bool": 20512, "\u00e3\u0124\u00aa": 20513, "\u0120Storage": 20514, "\u0120railway": 20515, "\u0120tougher": 20516, "\u0120Cad": 20517, "\u0120washing": 20518, "Py": 20519, "']": 20520, "embed": 20521, "\u0120Memphis": 20522, "ackle": 20523, "\u0120famously": 20524, "\u0120Fortunately": 20525, "ovies": 20526, "\u0120mindset": 20527, "\u0120sneak": 20528, "\u0120Dh": 20529, "RAW": 20530, "\u0120Simpson": 20531, "\u0120livest": 20532, "\u0120landmark": 20533, "\u0120cement": 20534, "Low": 20535, "\u0120thrilled": 20536, "\u0120Course": 20537, "inel": 20538, "\u0120chuck": 20539, "idate": 20540, "global": 20541, "\u0120whit": 20542, "\u0120\u00ef\u00bf\u00bd": 20543, "adays": 20544, "ski": 20545, "\u0120SV": 20546, "\u0120viruses": 20547, "306": 20548, "\u0120Respons": 20549, "\u0120theaters": 20550, "\u0120Branch": 20551, "\u0120Geneva": 20552, "\u0120MK": 20553, "\u0120unbeliev": 20554, "\u0120communist": 20555, "Original": 20556, "\u0120Received": 20557, "\u0120Transfer": 20558, "\u0120Arg": 20559, "Input": 20560, "\u0120Strategy": 20561, "\u0120palace": 20562, "thening": 20563, "Dri": 20564, "\u0120sentencing": 20565, "umbnail": 20566, "\u0120pins": 20567, "recy": 20568, "\u0120siblings": 20569, "Getting": 20570, "\u0120BU": 20571, "\u0120Northwest": 20572, "\u0120prolonged": 20573, "\u0120Sakura": 20574, "Comb": 20575, "\u0120Bour": 20576, "\u0120inadequate": 20577, "\u0120Kash": 20578, "\u0120username": 20579, "\u0120Improve": 20580, "\u0120battling": 20581, "\u0120MAC": 20582, "\u0120curriculum": 20583, "\u0120soda": 20584, "\u0120Cannon": 20585, "\u0120sensible": 20586, "spons": 20587, "December": 20588, "\u0120wicked": 20589, "\u0120Pengu": 20590, "\u0120dictators": 20591, "\u0120Hearts": 20592, "ogyn": 20593, "\u0120similarities": 20594, "\u0120Stats": 20595, "\u0120hollow": 20596, "itations": 20597, "\":[": 20598, "\u0120hover": 20599, "\u0120Listen": 20600, "sch": 20601, "Sund": 20602, "\u0120cad": 20603, "\u0120Parks": 20604, "\u0120lur": 20605, "\u0120hype": 20606, "\u0120Lem": 20607, "NAME": 20608, "isure": 20609, "Friday": 20610, "\u0120shoots": 20611, "\u0120closes": 20612, "\u0120db": 20613, "\u0120Ridge": 20614, "\u0120Different": 20615, "\u0120replies": 20616, "\u0120Broadway": 20617, "opers": 20618, "\u0120intoler": 20619, "\u0120Zeus": 20620, "akespe": 20621, "\u0120proprietary": 20622, "\u0120requesting": 20623, "\u0120controllers": 20624, "\u0120MIN": 20625, "imedia": 20626, "becca": 20627, "\u0120expans": 20628, "\u0120oils": 20629, "Bot": 20630, "\u0120Chand": 20631, "\u0120printer": 20632, "\u0120topped": 20633, "\u0120POL": 20634, "\u0120Earlier": 20635, "Social": 20636, "avin": 20637, "\u0120decreases": 20638, "\u0120Seb": 20639, "\u0120specifications": 20640, "\u0120Blast": 20641, "\u0120Kurt": 20642, "\u0120freel": 20643, "Brown": 20644, "\u0120dilig": 20645, "roe": 20646, "\u0120Problem": 20647, "\u0120Quad": 20648, "\u0120decentral": 20649, "\u0120Vector": 20650, "anut": 20651, "\u0120plugins": 20652, "\u0120Gregory": 20653, "\u0120fucked": 20654, "elines": 20655, "\u0120Ambassador": 20656, "take": 20657, "\u0120cleans": 20658, "ongyang": 20659, "Anonymous": 20660, "stro": 20661, "\"}": 20662, "aline": 20663, "\u0120Odd": 20664, "\u0120Eug": 20665, "216": 20666, "\u0120boil": 20667, "\u0120Powers": 20668, "\u0120nurses": 20669, "Obviously": 20670, "\u0120Technical": 20671, "\u0120exceeded": 20672, "ORS": 20673, "\u0120extremists": 20674, "\u0120traces": 20675, "expl": 20676, "\u0120comr": 20677, "\u0120Sach": 20678, ")/": 20679, "\u0120masks": 20680, "\u0120sci": 20681, "Bon": 20682, "\u0120regression": 20683, "wegian": 20684, "\u0120advisor": 20685, "itures": 20686, "\u0120Vo": 20687, "example": 20688, "\u0120Instruct": 20689, "\u0120siege": 20690, "\u0120reductions": 20691, "ptr": 20692, "\u0120statutory": 20693, "\u0120removes": 20694, "\u0120puck": 20695, "redits": 20696, "\u0120bee": 20697, "\u0120salad": 20698, "\u0120promotions": 20699, "\u0120Joshua": 20700, "withstanding": 20701, "ETH": 20702, "\u0120Cha": 20703, "imus": 20704, "\u0120expenditure": 20705, "aunting": 20706, "\u0120delighted": 20707, "\u0120155": 20708, "beh": 20709, "\u0120carpet": 20710, "\u0120Spart": 20711, "\u0120jungle": 20712, "lists": 20713, "\u0120bullying": 20714, "\u0120Nobel": 20715, "\u0120Glen": 20716, "\u0120referenced": 20717, "\u0120introduces": 20718, "sein": 20719, "\u0120chopped": 20720, "glass": 20721, "\u0120Wrest": 20722, "\u0120neutrality": 20723, "\u0120\u00e2\u013b": 20724, "\u0120investigator": 20725, "\u0120shelves": 20726, "\u0120unconstitutional": 20727, "\u0120reproduction": 20728, "\u0120merchant": 20729, "mia": 20730, "\u0120metrics": 20731, "\u0120explosives": 20732, "\u0120Sonia": 20733, "\u0120bodily": 20734, "\u0120thickness": 20735, "\u0120predominantly": 20736, "\u0120Ability": 20737, "\u0120monitored": 20738, "ICH": 20739, "\u0120].": 20740, "\u0120Martinez": 20741, "\u0120visibility": 20742, "\u0120queries": 20743, "\u0120genocide": 20744, "\u0120Warfare": 20745, "Query": 20746, "\u0120studios": 20747, "\u0120embry": 20748, "\u0120corridor": 20749, "\u0120cleaned": 20750, "complete": 20751, "\u0120MH": 20752, "\u0120enrollment": 20753, "INGS": 20754, "\u0120impacted": 20755, "\u0120disastrous": 20756, "\u0120Yun": 20757, "\u0120Claire": 20758, "\u0120Basically": 20759, "yt": 20760, "usterity": 20761, "\u0120indirectly": 20762, "wik": 20763, "\u0120dod": 20764, "\u0120Carr": 20765, "\u0120amp": 20766, "\u0120prohibit": 20767, "\u0120Initial": 20768, "\u0120Rd": 20769, "iji": 20770, "\u0120educate": 20771, "corn": 20772, "iott": 20773, "\u0120Beauty": 20774, "\u0120detective": 20775, "\u0120Conn": 20776, "since": 20777, "\u0120stagger": 20778, "\u0120obese": 20779, "\u0120bree": 20780, "ologic": 20781, "isse": 20782, "walker": 20783, "\u0120blades": 20784, "\u0120lawful": 20785, "func": 20786, "\u0120Behind": 20787, "\u0120appetite": 20788, "\u0120(*": 20789, "\u0120tennis": 20790, "\u0120offspring": 20791, "\u0120jets": 20792, "\u0120structured": 20793, "\u0120aforementioned": 20794, "Nov": 20795, "\u0120scaling": 20796, "fill": 20797, "\u0120stew": 20798, "\u0120curb": 20799, "\u0120Stephan": 20800, "edIn": 20801, "SF": 20802, "obic": 20803, "\u00e9\u0143\u0136": 20804, "oug": 20805, "\u0120MM": 20806, "\u0120genetically": 20807, "opez": 20808, "136": 20809, "\u0120umb": 20810, "ancers": 20811, "\u0120cohort": 20812, "\u0120merchandise": 20813, "\u0120imposing": 20814, "\u0120Legislature": 20815, "\u0120Archive": 20816, "ivia": 20817, "\u0120Naval": 20818, "\u0120offences": 20819, "\u0120miracle": 20820, "\u0120snapped": 20821, "\u0120foes": 20822, "\u0120extensively": 20823, "\u0120Raf": 20824, "\u0120cater": 20825, "edience": 20826, "Kit": 20827, "\u0120Bin": 20828, "\u0120recommends": 20829, "\u0120Cities": 20830, "\u0120rigid": 20831, "\u0120READ": 20832, "\u0120Noble": 20833, "\u0120Tian": 20834, "\u0120certificates": 20835, "antis": 20836, "oiler": 20837, "\u0120Buddhist": 20838, "did": 20839, "\u0120surveyed": 20840, "\u0120downward": 20841, "\u0120prints": 20842, "\u0120Motion": 20843, "ronics": 20844, "\u0120Sans": 20845, "ossibly": 20846, "uctions": 20847, "\u0120colonies": 20848, "\u0120Danish": 20849, "unit": 20850, "\u0120spoil": 20851, "\u0120advisory": 20852, "berries": 20853, "Plan": 20854, "\u0120specification": 20855, "ophers": 20856, "\u0120Resource": 20857, "\u0120shirts": 20858, "prisingly": 20859, "communications": 20860, "\u0120trivial": 20861, "\u0120mentioning": 20862, "isexual": 20863, "\u0120supplements": 20864, "\u0120supervision": 20865, "BP": 20866, "vor": 20867, "\u0120wit": 20868, "\u0120cooldown": 20869, "\u0120plaintiff": 20870, "\u0120Reviews": 20871, "\u0120Sri": 20872, "\u0120Mint": 20873, "\u0120Sugar": 20874, "\u0120afterward": 20875, "\u0120Priest": 20876, "\u0120Investment": 20877, "ogene": 20878, "\u0120Taking": 20879, "\u0120stretching": 20880, "\u0120inflammation": 20881, "\u0120Tehran": 20882, "\u0120lining": 20883, "\u0120freezing": 20884, "\u0120Entity": 20885, "\u0120inspiring": 20886, "special": 20887, "price": 20888, "\u0120sue": 20889, "\u0120Porter": 20890, "ounge": 20891, "ETA": 20892, "\u0120Derek": 20893, "\u0120Luis": 20894, "uo": 20895, "ymph": 20896, "\u0120exterior": 20897, "ihil": 20898, "\u0120Ashley": 20899, "inator": 20900, "\u0120nutrients": 20901, "\u0120Thrones": 20902, "\u0120finances": 20903, "\u0120Inspect": 20904, "\u0120specially": 20905, "\u0120Required": 20906, "\u0120PTS": 20907, "\u0120Violence": 20908, "ointed": 20909, "shots": 20910, "\u0120excerpt": 20911, "coon": 20912, "INS": 20913, "\u0120Gri": 20914, "\u0120recognised": 20915, "Week": 20916, "Young": 20917, "\u0120vom": 20918, "isle": 20919, "\u0120Curry": 20920, "\u0120Buddh": 20921, "\u0120notebook": 20922, "\u0120durable": 20923, "/?": 20924, "\u0120Gad": 20925, "\u0120Pupp": 20926, "\u0120forgive": 20927, "park": 20928, "\u0120personalities": 20929, "analysis": 20930, "clamation": 20931, "\u0120elevator": 20932, "\u0120warehouse": 20933, "\u0120Role": 20934, "unn": 20935, "\u0120illustration": 20936, "\u0120Scan": 20937, "\u0120atmospheric": 20938, "Import": 20939, "ANC": 20940, "ricted": 20941, "fu": 20942, "010": 20943, "\u0120arche": 20944, "\u0120rewarded": 20945, "akespeare": 20946, "\u0120internally": 20947, "\u0120RBI": 20948, "alker": 20949, "\u0120elephant": 20950, "owitz": 20951, "\u0120Pizza": 20952, "\u0120bipartisan": 20953, "\u00c3\u00a9s": 20954, "\u0120slowed": 20955, "\u0120Stark": 20956, "\u0120override": 20957, "OUS": 20958, "\u0120320": 20959, "undreds": 20960, "\u0120Deck": 20961, "\u0120Census": 20962, "bee": 20963, "146": 20964, "otor": 20965, "\u0120ip": 20966, "\u0120ub": 20967, "ocations": 20968, "\u0120Button": 20969, "rice": 20970, "\u0120cripp": 20971, "fff": 20972, "\u0120originated": 20973, "\u0120overwhelmed": 20974, "appa": 20975, "\u0120foremost": 20976, "\u00e2\u0122\u0133": 20977, "\u0120LEG": 20978, "release": 20979, "eatured": 20980, "atches": 20981, "\u0120reps": 20982, "\u0120lending": 20983, "\u0120Reference": 20984, "\u0120Client": 20985, "165": 20986, "venth": 20987, "Complete": 20988, "\u0120Patrol": 20989, "\u0120sworn": 20990, "cam": 20991, "\u0120shuttle": 20992, "\u0120Ralph": 20993, "\u0120hometown": 20994, "-,": 20995, "onal": 20996, "\u0120BP": 20997, "\u00e5\u0131": 20998, "\u0120persuade": 20999, "\u0120Alexand": 21000, "\u0120combines": 21001, "\u0120vivid": 21002, "\u0120Lag": 21003, "\u0120encoding": 21004, "\u0120salvation": 21005, "wen": 21006, "\u0120Recovery": 21007, "iya": 21008, "University": 21009, "\u0120Biden": 21010, "\u0120budgets": 21011, "\u0120Texans": 21012, "fits": 21013, "\u0120honored": 21014, "\u0120python": 21015, "TD": 21016, "###": 21017, "clone": 21018, "\u0120blink": 21019, "\u0120Liquid": 21020, "\u0120unemployed": 21021, "\u0120clashes": 21022, "\u0120Counsel": 21023, "\u0120directing": 21024, "\u0120punct": 21025, "\u0120Falcons": 21026, "\u0120shark": 21027, "\u0120Damascus": 21028, "\u0120jeans": 21029, "\u0120embark": 21030, "\u0120seize": 21031, "\u0120upwards": 21032, "280": 21033, "\u0120Ez": 21034, "\u0120Anything": 21035, "\u0120exotic": 21036, "lower": 21037, "\u0120Creator": 21038, "\u0120Um": 21039, "\u0120suburbs": 21040, "berger": 21041, "\u0120Wend": 21042, "\u0120mint": 21043, "\u0120XX": 21044, "\u0120Dro": 21045, "\u0120suffers": 21046, "\u0120herb": 21047, "tree": 21048, "\u0120fragile": 21049, "\u0120flooded": 21050, "\u0120Alcohol": 21051, "olean": 21052, "nyder": 21053, "\u0120KO": 21054, "Fram": 21055, "\u0120136": 21056, "\u0120owed": 21057, "\u0120Melee": 21058, "\u0120Hash": 21059, "\u0120whisk": 21060, "\u0120sudo": 21061, "rr": 21062, "Quick": 21063, "appro": 21064, "\u0120ii": 21065, "\u0120Examples": 21066, "hee": 21067, "\u0120promotes": 21068, "perature": 21069, "kar": 21070, "\u0120Honor": 21071, "\u0120sodium": 21072, "\u0120Lif": 21073, "rosso": 21074, "intendent": 21075, "\u0120correspondent": 21076, "Found": 21077, "secret": 21078, "\u0120identifies": 21079, "agne": 21080, "\u0120lou": 21081, "\u0120PP": 21082, "\u0120coincidence": 21083, "move": 21084, "\u0120militia": 21085, "\u0120infiltr": 21086, "\u0120Primary": 21087, "\u0120pitching": 21088, "\u0120Ib": 21089, "\u0120GOOD": 21090, "\u00e3\u0124\u00b8": 21091, "\u0120Wizards": 21092, "iral": 21093, "\u0120Venus": 21094, "RR": 21095, "\u0120\u00e2\u0122\u0137": 21096, "\u0120Casey": 21097, "\u0120sadly": 21098, "\u0120admire": 21099, "\u0120embarrassed": 21100, "cb": 21101, "Mel": 21102, "\u0120tubes": 21103, "\u0120beautifully": 21104, "\u0120Queensland": 21105, "Below": 21106, "rez": 21107, "quet": 21108, "pleasant": 21109, "\u0120\u00c2\u00ab": 21110, "Camp": 21111, "\u0120decisive": 21112, "1998": 21113, "\u0120Lamb": 21114, "utton": 21115, "hn": 21116, "\u0120Jagu": 21117, "aunder": 21118, "\u0120Cord": 21119, "\u0120clerk": 21120, "\u0120caffe": 21121, "\u0120wiped": 21122, "\u0120reim": 21123, "\u0120Mountains": 21124, "\u0120imprisoned": 21125, "\u0120develops": 21126, "\u0120Pra": 21127, "\u0120modeling": 21128, "Anyone": 21129, "ancel": 21130, "\u0120Sit": 21131, "\u0120shields": 21132, "\u0120lawn": 21133, "\u0120cardiovascular": 21134, "\u0120demonstrating": 21135, "\u0120parse": 21136, "\u0120Israelis": 21137, "\u0120euros": 21138, "143": 21139, "\u0120glorious": 21140, "inski": 21141, "ecd": 21142, "\u0120conditioning": 21143, "\u0120helpless": 21144, "\u0120microsc": 21145, "\u0120Harbor": 21146, "\u0120stakes": 21147, "\u0120260": 21148, "\u0120unequ": 21149, "\u0120Floyd": 21150, "\u0120damp": 21151, "\u0120apparatus": 21152, "\u0120Laws": 21153, "\u0120counters": 21154, "\u0120induce": 21155, "atable": 21156, "\u0120Ahmed": 21157, "\u0120slam": 21158, "November": 21159, "\u0120persist": 21160, "\u0120imminent": 21161, "\u00c3\u00a1n": 21162, "\u0120shred": 21163, "\u0120phases": 21164, "\u0120Edmonton": 21165, "\u0120Armstrong": 21166, "\u0120Meet": 21167, "\u0120Kitty": 21168, "\u00d1\u0122": 21169, "circ": 21170, "\u0120Adult": 21171, "\u0120arose": 21172, "\u0120Xen": 21173, "Dan": 21174, "gow": 21175, "\u0120superf": 21176, "\u0120Admir": 21177, "\u0120endure": 21178, "\u0120keyword": 21179, "yrus": 21180, "\u0120yarn": 21181, "\u0120pathway": 21182, "\u0120Hopkins": 21183, "midt": 21184, "\u0120censorship": 21185, "dependent": 21186, "\u0120instructor": 21187, "Sources": 21188, "\u0120toe": 21189, "\u0120balloon": 21190, "Nob": 21191, "\u0120swear": 21192, "\u0120Castro": 21193, "\u0120gloss": 21194, "\u0120Kavanaugh": 21195, "\u0120remarkably": 21196, "Photos": 21197, "\u0120Nom": 21198, "\u0120Southeast": 21199, "yers": 21200, "\u0120validation": 21201, "\u0120cannon": 21202, "\u0120Victory": 21203, "\u0120Pierre": 21204, "\u0120cautious": 21205, "Audio": 21206, "\u0120fetch": 21207, "\u0120Gift": 21208, "\u0120Hyp": 21209, "\u0120remedy": 21210, "ZE": 21211, "\u0120scent": 21212, "\u0120beard": 21213, "\u0120Rut": 21214, "-\"": 21215, "\u0120patents": 21216, "Hy": 21217, "\u0120unjust": 21218, "\u0120potato": 21219, "\u0120forthcoming": 21220, "\u0120chef": 21221, "\u0120Rift": 21222, "affe": 21223, "\u0120ROM": 21224, "\u0120Launch": 21225, "\u0120pads": 21226, "\u0120Neo": 21227, "\u0120onset": 21228, "\u0120squeeze": 21229, "safe": 21230, "\u0120prefix": 21231, "\u0120TM": 21232, "\u0120Nearly": 21233, "\u0120Clinical": 21234, "\u0120Mental": 21235, "otiation": 21236, "\u0120Unic": 21237, "antry": 21238, "\u0120Cir": 21239, "\u0120epit": 21240, "\u00c3\u00a6": 21241, "\u0120extracted": 21242, "versely": 21243, "riad": 21244, "\u0120strains": 21245, "\u0120tops": 21246, "\u0120poem": 21247, "\u0120Randy": 21248, "\u0120Maple": 21249, "THER": 21250, "upiter": 21251, "\u0120SSD": 21252, "\u013c\u00e9": 21253, "\u0120uncon": 21254, "pering": 21255, "\u0120slept": 21256, "iners": 21257, "\u0120underwater": 21258, "\u0120Evidence": 21259, "gone": 21260, "205": 21261, "\u0120historians": 21262, "\u0120synthesis": 21263, "\u0120frog": 21264, "basketball": 21265, "\u0120vibrant": 21266, "\u0120subord": 21267, "\u0120365": 21268, "\u0120Dial": 21269, "\u0120cooperate": 21270, "HAHA": 21271, "\u0120greeted": 21272, "158": 21273, "\u0120jazz": 21274, "\u0120intox": 21275, "\u0120Walking": 21276, "\u0120supervisor": 21277, "\u0120Fusion": 21278, "\u0120Mercedes": 21279, "send": 21280, "Ham": 21281, "sd": 21282, "nl": 21283, "\u0120tours": 21284, "\u0120FIFA": 21285, "\u0120culp": 21286, "gd": 21287, "304": 21288, "\u0120pleas": 21289, "\u0120illustrates": 21290, "\u0120Colombia": 21291, "\u0120highlighting": 21292, "\u0120Summary": 21293, "\u0120exposing": 21294, "\u0120Dru": 21295, "\u0120irony": 21296, "ritional": 21297, "\u0120Carroll": 21298, "\u0120Ellis": 21299, "Pict": 21300, "\u0120Rapt": 21301, "\u0120adapter": 21302, "\u0120unm": 21303, "\u0120corpse": 21304, "\u0120celebrities": 21305, "Den": 21306, "atum": 21307, "\u0120Apocalypse": 21308, "\u0120Wag": 21309, "lining": 21310, "\u0120hormones": 21311, "Rub": 21312, "\u0120Xi": 21313, "\u0120Vaults": 21314, "208": 21315, "alkyrie": 21316, "inosaur": 21317, "\u0120feeds": 21318, "vity": 21319, "\u0120defeating": 21320, "Wait": 21321, "\u0120emphasize": 21322, "\u0120Steelers": 21323, "yrinth": 21324, "leys": 21325, "\u0120Whenever": 21326, "Currently": 21327, "\u0120Clock": 21328, "\u0120collectively": 21329, "anyon": 21330, "\u0120JP": 21331, "\u0120mentality": 21332, "\u0120downloads": 21333, "\u0120surroundings": 21334, "\u0120Barnes": 21335, "\u0120flagship": 21336, "\u0120indicators": 21337, "\u0120grapp": 21338, "January": 21339, "\u0120Elemental": 21340, "\u0120Athena": 21341, "ibal": 21342, "\u0120sights": 21343, "\u0120capita": 21344, "\u0120Treaty": 21345, "\u0120voiced": 21346, "\u0120Gaz": 21347, "lette": 21348, "\u0120ya": 21349, "\u0120expired": 21350, "Legend": 21351, "Hot": 21352, "nature": 21353, "\u0120unstable": 21354, "\u0120280": 21355, "\u00c3\u00ba": 21356, "Comment": 21357, "ALE": 21358, "\u0120quests": 21359, "\u0120handler": 21360, "nis": 21361, "\u0120versatile": 21362, "\u0120conceal": 21363, "engeance": 21364, "\u0120Interactive": 21365, "\u0120obsessed": 21366, "\u0120Dogs": 21367, "\u0120cracked": 21368, "Sound": 21369, "sv": 21370, "\u0120Dylan": 21371, "roads": 21372, "fx": 21373, "\u0120Catholics": 21374, "\u0120Hag": 21375, "\u0120slammed": 21376, "\u0120glowing": 21377, "sale": 21378, "\u0120tissues": 21379, "\u0120Chi": 21380, "nee": 21381, "\u0120cher": 21382, "sic": 21383, "urrection": 21384, "\u0120bacon": 21385, "ulatory": 21386, ").\"": 21387, "\u0120irregular": 21388, "FORM": 21389, "assed": 21390, "\u0120intentional": 21391, "\u0120compensate": 21392, "\u0120Speaking": 21393, "\u0120Sets": 21394, "153": 21395, "\u0120conventions": 21396, "bands": 21397, "emade": 21398, "\u0120ecc": 21399, "\u0120Winston": 21400, "\u0120Assassin": 21401, "\u0120Belgian": 21402, "\u0120dependence": 21403, "\u0120niche": 21404, "\u0120bark": 21405, "\u0120Jazz": 21406, "\u0120disadvantage": 21407, "\u0120gasoline": 21408, "\u0120165": 21409, "\u00e7\u013c\u0126": 21410, "essa": 21411, "module": 21412, "angular": 21413, "OY": 21414, "\u0120Treatment": 21415, "itas": 21416, "olation": 21417, "\u0120Arnold": 21418, "\u0120feud": 21419, "\u0120Nest": 21420, "\u0120theatre": 21421, "ewater": 21422, "\u0120minors": 21423, "olicy": 21424, "\u0120Haven": 21425, "division": 21426, "\u0120trunk": 21427, "Far": 21428, "\u0120Pull": 21429, "\u0120capturing": 21430, "\u01201800": 21431, "\u0120Teen": 21432, "\u0120exempl": 21433, "\u0120clinics": 21434, "\u0120Burg": 21435, "\u0120substit": 21436, "\u0120payload": 21437, "\u0120Lav": 21438, "\u0120Troy": 21439, "\u0120Witness": 21440, "\u0120fragments": 21441, "\u0120passwords": 21442, "\u0120gospel": 21443, "\u0120Gin": 21444, "\u0120tenants": 21445, "olith": 21446, "Six": 21447, "Previous": 21448, "\u0120Ages": 21449, "\u0120Darwin": 21450, "\u0120blat": 21451, "\u0120empathy": 21452, "smith": 21453, "bag": 21454, "\u0120Echo": 21455, "\u0120Camb": 21456, "\u0120Madd": 21457, "\u0120Boo": 21458, "\u0120rede": 21459, "\u0120Burning": 21460, "\u0120smoothly": 21461, "\u0120Adrian": 21462, "\u0120Vampire": 21463, "\u0120Monsters": 21464, "steam": 21465, "Style": 21466, "Ma": 21467, "rea": 21468, "\u0120Dwar": 21469, "alyst": 21470, "ursor": 21471, "\u0120elimination": 21472, "\u0120crypto": 21473, "cht": 21474, "\u0120Eternal": 21475, "\u00e2\u0122\u00a6]": 21476, "\u0120Sorce": 21477, "Ill": 21478, "NER": 21479, "\u0120uh": 21480, "Conclusion": 21481, "wage": 21482, "\u0120respir": 21483, "\u0120reminis": 21484, "hetical": 21485, "\u0120gy": 21486, "\u0120utilized": 21487, "icidal": 21488, "\u01201900": 21489, "\u0120hunters": 21490, "\u0120Swan": 21491, "\u0120React": 21492, "\u0120visitor": 21493, "\u0120Thanksgiving": 21494, "308": 21495, "Posts": 21496, "\u0120hips": 21497, "1997": 21498, "omers": 21499, "\u0120knocking": 21500, "\u0120Vehicle": 21501, "\u0120til": 21502, "\u0120138": 21503, "\u0120mi": 21504, "\u0120Investigation": 21505, "\u0120Kenya": 21506, "\u0120casino": 21507, "\u0120motives": 21508, "\u0120regain": 21509, "rex": 21510, "\u0120weekends": 21511, "\u0120stabbed": 21512, "boro": 21513, "\u0120exploited": 21514, "\u0120HAVE": 21515, "\u0120Television": 21516, "cock": 21517, "\u0120preparations": 21518, "\u0120endeav": 21519, "\u0120Remote": 21520, "\u0120Maker": 21521, "\u0120Produ": 21522, "\u0120Evan": 21523, "\u0120informational": 21524, "\u0120Louisville": 21525, "154": 21526, "\u0120Dreams": 21527, "\u0120plots": 21528, "\u0120Runner": 21529, "\u0120hurting": 21530, "\u0120academy": 21531, "\u0120Montgomery": 21532, "nm": 21533, "\u0120Lanc": 21534, "\u0120Alz": 21535, "210": 21536, "elong": 21537, "\u0120retailer": 21538, "\u0120arising": 21539, "\u0120rebellion": 21540, "\u0120blonde": 21541, "played": 21542, "\u0120instrumental": 21543, "Cross": 21544, "\u0120retention": 21545, "\u0120therapeutic": 21546, "\u0120seas": 21547, "\u0120infantry": 21548, "\u0120Clint": 21549, "\u0120prompting": 21550, "\u0120bitch": 21551, "\u0120stems": 21552, "\u0120Kra": 21553, "\u0120thesis": 21554, "\u0120Bog": 21555, "rued": 21556, "\u0120kings": 21557, "\u0120clay": 21558, "ificent": 21559, "\u0120YES": 21560, "\u0120Thing": 21561, "\u0120Cubs": 21562, "veyard": 21563, "elsh": 21564, "inarily": 21565, "\u0120Ey": 21566, "\u0120Rolling": 21567, "\u0120evolving": 21568, "India": 21569, "\u0120recognizes": 21570, "\u0120graduation": 21571, "isers": 21572, "\u0120fertility": 21573, "\u0120Milan": 21574, "Command": 21575, "\u0120boxing": 21576, "\u01201943": 21577, "\u0120gluten": 21578, "\u0120Emir": 21579, "\u0120idol": 21580, "\u0120conceived": 21581, "\u0120Creation": 21582, "Merit": 21583, "uddy": 21584, "ussions": 21585, "\u0120Lieutenant": 21586, "ietal": 21587, "\u0120unchanged": 21588, "\u0120Scale": 21589, "\u0120Crimea": 21590, "balls": 21591, "atorial": 21592, "\u0120depths": 21593, "\u0120empirical": 21594, "\u0120transm": 21595, "\u0120unsafe": 21596, "missible": 21597, "comfort": 21598, "156": 21599, "\u0120mechanic": 21600, "002": 21601, "lins": 21602, "\u0120smoked": 21603, "Pos": 21604, "\u0120slowing": 21605, "\u0120lav": 21606, "Texas": 21607, "\u0120cheating": 21608, "\u0120Metropolitan": 21609, "ethyl": 21610, "\u0120discovering": 21611, "asse": 21612, "\u0120pencil": 21613, "\u0120Pyongyang": 21614, "\u0120closet": 21615, "\u0120Sheet": 21616, "\u0120Entry": 21617, "oustic": 21618, "\u0120myst": 21619, "erate": 21620, "ariat": 21621, "\u0120minerals": 21622, "\u0120musician": 21623, "\u0120Pul": 21624, "\u0120Maz": 21625, "249": 21626, "\u0120permissions": 21627, "\u0120iv": 21628, "enary": 21629, "ickers": 21630, "\u0120Bing": 21631, "hea": 21632, "enable": 21633, "\u0120griev": 21634, "\u0120asserted": 21635, "\u0120Colonel": 21636, "\u0120affidav": 21637, "wo": 21638, "\u0120seated": 21639, "\u0120Ride": 21640, "\u0120paintings": 21641, "\u0120Pix": 21642, "\u0120137": 21643, "ishi": 21644, "umbai": 21645, "gotten": 21646, "\u0120Earl": 21647, "\u0120inning": 21648, "\u0120census": 21649, "\u0120travelled": 21650, "\u0120Consult": 21651, "185": 21652, "bind": 21653, "\u0120simplicity": 21654, "\u0120overlooked": 21655, "\u0120Helpful": 21656, "\u0120monkey": 21657, "\u0120overwhelmingly": 21658, "Blood": 21659, "\u0120Flint": 21660, "\u0120Jama": 21661, "\u0120Present": 21662, "\u0120Rage": 21663, "\u0120TA": 21664, "ptive": 21665, "\u0120turnout": 21666, "wald": 21667, "\u0120Dolphins": 21668, "\u0120VPN": 21669, "\u0120onion": 21670, "\u0120crafting": 21671, "mma": 21672, "\u0120Mercury": 21673, "\u0120arrange": 21674, "\u0120alerts": 21675, "\u0120OT": 21676, "zbollah": 21677, "\u0120gases": 21678, "\u0120Richardson": 21679, "sal": 21680, "lar": 21681, "\u0120frost": 21682, "\u0120lowering": 21683, "\u0120acclaim": 21684, "\u0120startups": 21685, "\u0120Gain": 21686, "essment": 21687, "\u0120guardian": 21688, "\u00e4\u00ba\u00ba": 21689, "\u0120Pie": 21690, "\u0120Links": 21691, "\u0120merits": 21692, "\u0120awake": 21693, "\u0120parental": 21694, "\u0120exceeds": 21695, "\u0120idle": 21696, "\u0120Pilot": 21697, "\u0120eBay": 21698, "\u0120Accept": 21699, "ipeg": 21700, "Cam": 21701, "\u0120Kot": 21702, "\u0120traders": 21703, "olitics": 21704, "unker": 21705, "\u0120Pale": 21706, "osi": 21707, "anmar": 21708, "\u01201947": 21709, "\u0120Fell": 21710, "estial": 21711, "itating": 21712, "GF": 21713, "\u0120Sr": 21714, "ifted": 21715, "\u0120connector": 21716, "\u0120Bone": 21717, "illes": 21718, "260": 21719, "hma": 21720, "\u0120overlap": 21721, "\u0120GitHub": 21722, "\u0120cleaner": 21723, "\u0120Baptist": 21724, "\u0120WAS": 21725, "\u0120lungs": 21726, "\u00d1\u0123": 21727, "\u0120BUT": 21728, "\u0120cite": 21729, "\u0120pitched": 21730, "reatment": 21731, "\u0120trophies": 21732, "\u0120Nu": 21733, "386": 21734, "\u0120Pride": 21735, "\u0120attendees": 21736, "[]": 21737, "179": 21738, "\u0120spatial": 21739, "\u0120prizes": 21740, "\u0120Religion": 21741, "\u0120showcase": 21742, "\u0120Category": 21743, "vidia": 21744, "Target": 21745, "Property": 21746, "?,": 21747, "\u0120fusion": 21748, "pie": 21749, "\u0120UCLA": 21750, "\u0120soundtrack": 21751, "\u0120princess": 21752, "\u0120Caval": 21753, "should": 21754, "\u0120limbs": 21755, "Background": 21756, "\u0120lonely": 21757, "\u0120cores": 21758, "\u0120Tail": 21759, "sheet": 21760, "\u0120132": 21761, "Ra": 21762, "\u00e3\u0124\u00ab": 21763, "\u0120Bolt": 21764, "\u0120booked": 21765, "\u0120administer": 21766, "\u0120equals": 21767, "wy": 21768, "\u0120observing": 21769, "\u0120Baron": 21770, "\u0120Adobe": 21771, "\u0120virgin": 21772, "\u0120Socialist": 21773, "Move": 21774, "ghazi": 21775, "\u0120Linda": 21776, "212": 21777, "\u0120brewing": 21778, "\u0120merchants": 21779, "burse": 21780, "\u0120divor": 21781, "\u0120metals": 21782, "\u0120Ner": 21783, "\u0120sums": 21784, "\u0120Enemy": 21785, "\u0120envision": 21786, "\u0120granting": 21787, "\u0120Honey": 21788, "\u0120Skyrim": 21789, "\u0120socio": 21790, "graded": 21791, "\u0120selective": 21792, "WASHINGTON": 21793, "\u01201948": 21794, "\u0120Sirius": 21795, "\u0120Gross": 21796, "activity": 21797, "\u0120Ivan": 21798, "\u0120furious": 21799, "BSD": 21800, "\u0120Previous": 21801, "\u0120responsive": 21802, "\u0120charitable": 21803, "\u0120leaning": 21804, "\u0120Pew": 21805, "\u0120violates": 21806, "\\\\\\\\\\\\\\\\": 21807, "\u0120Coming": 21808, "wire": 21809, "\u0120poet": 21810, "\u0120resolutions": 21811, "command": 21812, "\u0120Portuguese": 21813, "\u0120nickname": 21814, "\u0120deaf": 21815, "February": 21816, "\u0120recognise": 21817, "\u0120entirety": 21818, "\u0120seasonal": 21819, "placed": 21820, "\u0120Telegraph": 21821, "\u0120microphone": 21822, "ouring": 21823, "\u0120grains": 21824, "\u0120governed": 21825, "\u0120postp": 21826, "\u0120Waters": 21827, "inement": 21828, "\u0120undocumented": 21829, "\u0120Comcast": 21830, "\u0120fox": 21831, "\u0120assaults": 21832, "reon": 21833, "many": 21834, "\u0120Jenkins": 21835, "\u0120Anyway": 21836, "\u0120assessments": 21837, "\u0120downs": 21838, "\u0120Mouse": 21839, "\u0120superb": 21840, "kt": 21841, "\u0120Dow": 21842, "\u0120taxation": 21843, "401": 21844, "\u0120smiles": 21845, "\u0120undertaken": 21846, "\u0120exh": 21847, "\u0120enthusiastic": 21848, "\u0120twent": 21849, "\u0120governmental": 21850, "\u0120autonomy": 21851, "\u0120Technologies": 21852, "\u0120Chain": 21853, "\u0120prevalent": 21854, "fb": 21855, "\u0120nicotine": 21856, "ogram": 21857, "job": 21858, "\u0120awaiting": 21859, "\u0120Menu": 21860, "\u0120deputies": 21861, "kov": 21862, "ishops": 21863, "Button": 21864, "\u0120Shanghai": 21865, "\u0120diesel": 21866, "\u0120Duck": 21867, "Ryan": 21868, "\u0120PCs": 21869, "NF": 21870, "jury": 21871, "ente": 21872, "\u0120inaccurate": 21873, "eddy": 21874, "Whatever": 21875, "\u0120showc": 21876, "\u0120Nad": 21877, "odus": 21878, "etr": 21879, "\u0120plaintiffs": 21880, "\u0120WOR": 21881, "\u0120Assange": 21882, "\u0120privat": 21883, "\u0120premiums": 21884, "\u0120tam": 21885, "URL": 21886, "\u0120elites": 21887, "\u0120Ranger": 21888, "ottenham": 21889, "\u0120Hoff": 21890, "\u0120Athens": 21891, "\u0120definite": 21892, "\u0120sighed": 21893, "\u0120evenly": 21894, "211": 21895, "\u0120Amber": 21896, "akia": 21897, "\u0120mailing": 21898, "\u0120crashing": 21899, "\u0120Confederate": 21900, "rugged": 21901, "Wal": 21902, "\u0120Depths": 21903, "\u0120juvenile": 21904, "\u0120reactor": 21905, "Introduction": 21906, "\u0120Deluxe": 21907, "1995": 21908, "\u0120Sanchez": 21909, "\u0120Mead": 21910, "ivable": 21911, ":-": 21912, "\u0120Planning": 21913, "\u0120Trap": 21914, "quin": 21915, "\u0120Protect": 21916, "vered": 21917, "Information": 21918, "\u0120kidney": 21919, "innamon": 21920, "las": 21921, "\u0120policing": 21922, "\u0120tolerate": 21923, "\u0120Qi": 21924, "\u0120biased": 21925, "Fort": 21926, "\u0120Ki": 21927, "save": 21928, "\u0120privileged": 21929, "\u0120beasts": 21930, "\u0120Glas": 21931, "\u0120Cinem": 21932, "\u0120comeback": 21933, "Sunday": 21934, "\u0120extinction": 21935, "hops": 21936, "\u0120transmit": 21937, "\u0120doubles": 21938, "\u0120Flat": 21939, "167": 21940, "\u0120disputed": 21941, "\u0120injustice": 21942, "foo": 21943, "Vict": 21944, "roleum": 21945, "\u0120Julie": 21946, "Context": 21947, "\u0120Rarity": 21948, "issue": 21949, "Component": 21950, "\u0120counseling": 21951, "anne": 21952, "dark": 21953, "\u0120objections": 21954, "uilt": 21955, "\u0120gast": 21956, "\u0120plac": 21957, "\u0120unused": 21958, "\u00e3\u0125\u0129": 21959, "\u0120Trial": 21960, "\u0120Jas": 21961, "hedral": 21962, "obb": 21963, "\u0120temporal": 21964, "\u0120PRO": 21965, "\u0120NW": 21966, "\u0120Anniversary": 21967, "Large": 21968, "\u0120therm": 21969, "\u0120david": 21970, "\u0120systemic": 21971, "\u0120Shir": 21972, "mut": 21973, "\u0120Nept": 21974, "address": 21975, "\u0120scanning": 21976, "\u0120understandable": 21977, "\u0120canvas": 21978, "Cat": 21979, "\u0120Zoo": 21980, "\u0120angels": 21981, "LO": 21982, "\u0120Statement": 21983, "\u0120Sig": 21984, "ovable": 21985, "\u0120Away": 21986, "sharing": 21987, "ocrats": 21988, "stated": 21989, "\u0120weighing": 21990, "Nor": 21991, "wild": 21992, "Bey": 21993, "\u0120astonishing": 21994, "\u0120Reynolds": 21995, "\u0120opener": 21996, "\u0120trainer": 21997, "\u0120surgical": 21998, "pn": 21999, "\u0120adjusting": 22000, "wheel": 22001, "\u0120frown": 22002, "ervative": 22003, "\u0120suspend": 22004, "Within": 22005, "tein": 22006, "\u0120obstacle": 22007, "\u0120liberties": 22008, "ymes": 22009, "\u0120uranium": 22010, "ansom": 22011, "anol": 22012, "uba": 22013, "\u0120Loss": 22014, "\u0120arous": 22015, "\u0120Henderson": 22016, "Wow": 22017, "spl": 22018, "cur": 22019, "\u0120\u00c2\u0143": 22020, "\u0120theirs": 22021, "Damage": 22022, "\u0120downloading": 22023, "\u0120discern": 22024, "\u0120Sto": 22025, "\u0120Fla": 22026, "\u0120hath": 22027, "\u0120Aj": 22028, "\u0120unpleasant": 22029, "European": 22030, "expensive": 22031, "\u0120screenshot": 22032, "\u0120UV": 22033, "\u0120allied": 22034, "\u0120Persian": 22035, "\u0120monopoly": 22036, "\u0120atom": 22037, "\u0120Redskins": 22038, "\"><": 22039, "\u0120cancell": 22040, "\u0120cinema": 22041, "131": 22042, "fair": 22043, "\u0120Alfred": 22044, "\u0120duck": 22045, "args": 22046, "223": 22047, "\u0120ISI": 22048, "\u0120signaling": 22049, "inar": 22050, "\u0120laughs": 22051, "\u0120forwards": 22052, "\u0120reckless": 22053, "\u0120listeners": 22054, "ativity": 22055, "\u0120vastly": 22056, "nant": 22057, "Less": 22058, "\u0120Hunting": 22059, "\u0120Scientific": 22060, "ITED": 22061, "\u0120knight": 22062, "\u0120HTC": 22063, "usa": 22064, "tmp": 22065, "\u0120rude": 22066, "\u0120Legendary": 22067, "\u0120arises": 22068, "Bad": 22069, "\u0120Claim": 22070, "peg": 22071, "\u0120realities": 22072, "Think": 22073, "\u0120\u00c2\u00b0": 22074, "\u0120rode": 22075, "\u0120strive": 22076, "\u0120anecd": 22077, "\u0120shorts": 22078, "\u0120hypothes": 22079, "\u0120coordinated": 22080, "\u0120Gandhi": 22081, "\u0120FPS": 22082, "RED": 22083, "\u0120susceptible": 22084, "\u0120shrink": 22085, "\u0120Chart": 22086, "Help": 22087, "\u0120ion": 22088, "deep": 22089, "ribes": 22090, "\u0120Kai": 22091, "\u0120Customer": 22092, "Summary": 22093, "\u0120cough": 22094, "wife": 22095, "\u0120lend": 22096, "\u0120positioning": 22097, "\u0120lottery": 22098, "\u0120Canyon": 22099, "\u0120fade": 22100, "\u0120bronze": 22101, "\u0120Kenny": 22102, "\u0120boasts": 22103, "\u0120Enhanced": 22104, "record": 22105, "\u0120emergence": 22106, "\u0120akin": 22107, "\u0120Bert": 22108, "itous": 22109, "\u00e2\u0138\u0133": 22110, "\u0120stip": 22111, "\u0120exchanged": 22112, "omore": 22113, "alsh": 22114, "\u0120reservoir": 22115, "\u0120standpoint": 22116, "WM": 22117, "\u0120initiate": 22118, "\u0120decay": 22119, "\u0120brewery": 22120, "\u0120terribly": 22121, "\u0120mortal": 22122, "levard": 22123, "\u0120revis": 22124, "NI": 22125, "elo": 22126, "\u0120confess": 22127, "\u0120MSNBC": 22128, "\u0120submissions": 22129, "Controller": 22130, "\u0120202": 22131, "\u0120Ruth": 22132, "});": 22133, "\u0120Azure": 22134, "\u0120.\"": 22135, "206": 22136, "\u0120Marketing": 22137, "\u0120laund": 22138, "iencies": 22139, "\u0120renowned": 22140, "\u0120Trou": 22141, "\u0120NGO": 22142, "blems": 22143, "\u0120terrified": 22144, "\u0120warns": 22145, "\u0120pert": 22146, "\u0120unsure": 22147, "480": 22148, "alez": 22149, "ultz": 22150, "\u0120Outside": 22151, "\u0120styl": 22152, "\u0120Underground": 22153, "\u0120panc": 22154, "\u0120dictionary": 22155, "\u0120foe": 22156, "riminal": 22157, "\u0120Norwegian": 22158, "\u0120jailed": 22159, "\u0120maternal": 22160, "\u00c3\u00a9e": 22161, "\u0120Lucy": 22162, "cop": 22163, "Cho": 22164, "\u0120unsigned": 22165, "\u0120Zelda": 22166, "\u0120Insider": 22167, "\u0120Continued": 22168, "\u0120133": 22169, "\u0120Naruto": 22170, "\u0120Majority": 22171, "169": 22172, "\u0120Wo": 22173, "\u00e3\u0124\u0135": 22174, "\u0120pastor": 22175, "\u0120informal": 22176, "\u00d0\u00bd": 22177, "anthrop": 22178, "join": 22179, "\u00e3\u0123\u0139": 22180, "itational": 22181, "NP": 22182, "\u0120Writing": 22183, "fn": 22184, "\u0120Bever": 22185, "195": 22186, "\u0120yelling": 22187, "\u0120drastically": 22188, "\u0120eject": 22189, "\u0120neut": 22190, "\u0120thrive": 22191, "\u0120Frequ": 22192, "oux": 22193, "\u0120possesses": 22194, "\u0120Senators": 22195, "\u0120DES": 22196, "\u0120Shakespeare": 22197, "\u0120Franco": 22198, "\u0120LB": 22199, "uchi": 22200, "\u0120incarn": 22201, "\u0120founders": 22202, "Function": 22203, "\u0120brightness": 22204, "\u0120BT": 22205, "\u0120whale": 22206, "\u0120Theater": 22207, "mass": 22208, "\u0120Doll": 22209, "Something": 22210, "\u0120echoed": 22211, "\u0120Hex": 22212, "crit": 22213, "afia": 22214, "\u0120goddess": 22215, "\u0120eleven": 22216, "\u0120Preview": 22217, "\u0120Aurora": 22218, "\u0120401": 22219, "ulsive": 22220, "\u0120Logan": 22221, "inburgh": 22222, "\u0120Centers": 22223, "\u0120ONLY": 22224, "\u0120Aid": 22225, "\u0120paradox": 22226, "\u0120hurd": 22227, "\u0120LC": 22228, "Due": 22229, "court": 22230, "\u0120offended": 22231, "\u0120evaluating": 22232, "\u0120Matthews": 22233, "\u0120tomb": 22234, "\u0120payroll": 22235, "\u0120extraction": 22236, "\u0120Hands": 22237, "ifi": 22238, "\u0120supernatural": 22239, "\u0120COMM": 22240, "]=": 22241, "dogs": 22242, "\u0120512": 22243, "\u0120Meeting": 22244, "Richard": 22245, "\u0120Maximum": 22246, "\u0120ideals": 22247, "Things": 22248, "mand": 22249, "\u0120Regardless": 22250, "\u0120humili": 22251, "buffer": 22252, "Little": 22253, "\u0120Dani": 22254, "\u0120Nak": 22255, "\u0120liberation": 22256, "\u0120Abe": 22257, "\u0120OL": 22258, "\u0120stuffed": 22259, "aca": 22260, "inda": 22261, "raphic": 22262, "\u0120mosqu": 22263, "\u0120campaigning": 22264, "\u0120occupy": 22265, "Squ": 22266, "rina": 22267, "\u0120Wel": 22268, "\u0120VS": 22269, "\u0120physic": 22270, "\u0120puls": 22271, "rint": 22272, "oaded": 22273, "ETF": 22274, "\u0120Archives": 22275, "\u0120venues": 22276, "hner": 22277, "\u0120Turbo": 22278, "\u0120lust": 22279, "\u0120appealed": 22280, "quez": 22281, "ilib": 22282, "\u0120Timothy": 22283, "\u0120omn": 22284, "dro": 22285, "\u0120obsession": 22286, "\u0120Savage": 22287, "1996": 22288, "Global": 22289, "Jes": 22290, "214": 22291, "\u0120sliding": 22292, "\u0120disappro": 22293, "\u0120Magical": 22294, "\u0120voluntarily": 22295, "gb": 22296, "aney": 22297, "\u0120prophet": 22298, "\u0120Rein": 22299, "\u0120Julia": 22300, "\u0120Worth": 22301, "aurus": 22302, "\u0120bounds": 22303, "ieu": 22304, ")))": 22305, "\u0120crore": 22306, "\u0120Citizen": 22307, "Sky": 22308, "\u0120columnist": 22309, "\u0120seekers": 22310, "ondo": 22311, "ISA": 22312, "\u0120Length": 22313, "\u0120nostalg": 22314, "\u0120newcom": 22315, "\u0120detrim": 22316, "entric": 22317, "375": 22318, "\u0120GE": 22319, "\u0120autop": 22320, "\u0120academics": 22321, "AppData": 22322, "\u0120Shen": 22323, "\u0120idiot": 22324, "\u0120Transit": 22325, "\u0120teaspoon": 22326, "Wil": 22327, "KO": 22328, "\u0120Comedy": 22329, ">,": 22330, "\u0120populated": 22331, "WD": 22332, "\u0120pigs": 22333, "\u0120Oculus": 22334, "\u0120sympathetic": 22335, "\u0120marathon": 22336, "198": 22337, "\u0120seizure": 22338, "sided": 22339, "\u0120dop": 22340, "irtual": 22341, "Land": 22342, "\u0120Floor": 22343, "osaurs": 22344, "...]": 22345, "\u0120los": 22346, "\u0120subsidiary": 22347, "EY": 22348, "\u0120Parts": 22349, "\u0120Stef": 22350, "\u0120Judiciary": 22351, "\u0120134": 22352, "\u0120mirrors": 22353, "\u0120ket": 22354, "times": 22355, "\u0120neurolog": 22356, "\u0120cav": 22357, "\u0120Guest": 22358, "\u0120tumor": 22359, "scill": 22360, "\u0120Lloyd": 22361, "Est": 22362, "\u0120clearer": 22363, "\u0120stereotypes": 22364, "\u0120dur": 22365, "nothing": 22366, "Reddit": 22367, "\u0120negotiated": 22368, "------------------------": 22369, "235": 22370, "\u0120flown": 22371, "\u0120Seoul": 22372, "\u0120Resident": 22373, "\u0120SCH": 22374, "\u0120disappearance": 22375, "\u0120Vince": 22376, "grown": 22377, "\u0120grabs": 22378, "ril": 22379, "\u0120Infinite": 22380, "\u0120Twenty": 22381, "\u0120pedestrian": 22382, "\u0120jersey": 22383, "\u0120Fur": 22384, "\u0120Infinity": 22385, "\u0120Elliott": 22386, "\u0120mentor": 22387, "\u0120morally": 22388, "\u0120obey": 22389, "secure": 22390, "iffe": 22391, "\u0120antibiotics": 22392, "angled": 22393, "\u0120Freeman": 22394, "\u0120Introduction": 22395, "Jun": 22396, "\u0120marsh": 22397, "icans": 22398, "\u0120EVENTS": 22399, "ochond": 22400, "Wall": 22401, "iculty": 22402, "\u0120misdemeanor": 22403, "\u0120ly": 22404, "Thomas": 22405, "\u0120Resolution": 22406, "\u0120animations": 22407, "\u0120Dry": 22408, "\u0120intercourse": 22409, "\u0120Newcastle": 22410, "\u0120Hog": 22411, "\u0120Equipment": 22412, "177": 22413, "\u0120territorial": 22414, "\u0120archives": 22415, "203": 22416, "Filter": 22417, "\u0120Munich": 22418, "\u0120commanded": 22419, "\u0120Wand": 22420, "\u0120pitches": 22421, "\u0120Croat": 22422, "\u0120ratios": 22423, "\u0120Mits": 22424, "\u0120accumulated": 22425, "\u0120Specifically": 22426, "\u0120gentleman": 22427, "acerb": 22428, "\u0120penn": 22429, "\u0120aka": 22430, "\u0120Fuk": 22431, "\u0120intervene": 22432, "\u0120Refuge": 22433, "\u0120Alzheimer": 22434, "\u0120succession": 22435, "ohan": 22436, "does": 22437, "Lord": 22438, "\u0120separat": 22439, "\u0120correspondence": 22440, "\u0120shiny": 22441, "Prior": 22442, "\u0120sulf": 22443, "\u0120miserable": 22444, "\u0120dedication": 22445, "().": 22446, "\u0120specialists": 22447, "\u0120defects": 22448, "\u0120Cult": 22449, "\u0120Xia": 22450, "\u0120jeopard": 22451, "\u0120Ore": 22452, "Ability": 22453, "\u0120lear": 22454, "\u0120ambitions": 22455, "\u0120BMI": 22456, "\u0120Arabs": 22457, "\u01201942": 22458, "\u0120preservation": 22459, "ificate": 22460, "\u0120ashamed": 22461, "loss": 22462, "\u0120Restaur": 22463, "\u0120resemble": 22464, "\u0120enrich": 22465, "\u0120KN": 22466, "\u0120Clan": 22467, "float": 22468, "\u0120playable": 22469, "ITT": 22470, "\u0120harmony": 22471, "arrison": 22472, "\u0120Weinstein": 22473, "were": 22474, "\u0120poisoning": 22475, "\u0120Comput": 22476, "\u0120WordPress": 22477, "major": 22478, "\u0120Valve": 22479, "Fan": 22480, "\u0120Throw": 22481, "\u0120Romans": 22482, "\u0120Depression": 22483, "ados": 22484, "\u0120tortured": 22485, "\u0120balancing": 22486, "bottom": 22487, "\u0120acquiring": 22488, "\u0120Monte": 22489, "ardi": 22490, "\u0120aura": 22491, "\u0120##": 22492, "\u0120Standing": 22493, "\u0120Atlas": 22494, "CF": 22495, "\u0120intrins": 22496, "\u0120Benghazi": 22497, "\u0120camping": 22498, "\u0120tapped": 22499, "blade": 22500, "strous": 22501, "\u0120Rabb": 22502, "\u0120Written": 22503, "tip": 22504, "\u0120Neigh": 22505, "sterdam": 22506, "\u0120Allow": 22507, "\u0120Healing": 22508, "\u0120Rhod": 22509, "num": 22510, "\u0120caffeine": 22511, "\u0120Percent": 22512, "\u0120boo": 22513, "\u0120apples": 22514, "305": 22515, "\u0120welcoming": 22516, "\u0120applaud": 22517, "\u0120austerity": 22518, "\u00c2\u00b1": 22519, "\u0120Reality": 22520, "efe": 22521, "\u00e5\u00ae": 22522, "\u0120sucks": 22523, "\u0120tabs": 22524, "\u0120PayPal": 22525, "\u0120backpack": 22526, "\u0120gifted": 22527, "abulary": 22528, "\u0120Scout": 22529, "irteen": 22530, "\u0120chin": 22531, "\u0120omitted": 22532, "\u0120negatively": 22533, "\u0120accessing": 22534, "\u0120Earn": 22535, "\u0120ambulance": 22536, "\u0120headphones": 22537, "\u0120205": 22538, "\u0120Refresh": 22539, "president": 22540, "\u0120Kitchen": 22541, "\u0120Entered": 22542, "\u0120Snyder": 22543, "005": 22544, "omical": 22545, "\u0120borrowed": 22546, "\u0120Nem": 22547, "\u0120aviation": 22548, "\u0120stall": 22549, "rimination": 22550, "\u0120uniforms": 22551, "itime": 22552, "\u0120Simmons": 22553, "energy": 22554, "ablished": 22555, "yy": 22556, "qualified": 22557, "\u0120rallies": 22558, "\u0120Stuart": 22559, "flight": 22560, "\u0120gangs": 22561, "rag": 22562, "\u0120vault": 22563, "lux": 22564, "\u0120Compar": 22565, "\u0120designation": 22566, "209": 22567, "\u0120Jos": 22568, "dollar": 22569, "zero": 22570, "\u0120wells": 22571, "303": 22572, "\u0120constituents": 22573, "\u0120heck": 22574, "\u0120cows": 22575, "\u0120commanders": 22576, "\u0120differential": 22577, "\u0120Catherine": 22578, "299": 22579, "\u0120valve": 22580, "\u0120brace": 22581, "\u0120perspectives": 22582, "cert": 22583, "fact": 22584, "icularly": 22585, "\u0120McN": 22586, "planes": 22587, "\u0120intric": 22588, "\u0120peas": 22589, "ovan": 22590, "\u0120tossed": 22591, "retch": 22592, "\u0120Lopez": 22593, "\u0120unfamiliar": 22594, "death": 22595, "\u0120Apart": 22596, "\u0120Chang": 22597, "\u0120relieved": 22598, "rophe": 22599, "\u0120airports": 22600, "\u0120freak": 22601, "util": 22602, "Mill": 22603, "\u0120Chin": 22604, "\u0120Owen": 22605, "male": 22606, "\u0120Broken": 22607, "\u0120Winds": 22608, "rob": 22609, "rising": 22610, "\u0120firefighters": 22611, "\u0120authoritarian": 22612, "\u0120148": 22613, "Bitcoin": 22614, "external": 22615, "\u0120browsers": 22616, "ichever": 22617, "orian": 22618, "\u0120unb": 22619, "\u0120poke": 22620, "\u0120Zot": 22621, "Mid": 22622, "\u0120Popular": 22623, "\u0120covert": 22624, "\u0120contributes": 22625, "\u0120650": 22626, "\u0120contention": 22627, "Gate": 22628, "\u0120consoles": 22629, "\u0120chromos": 22630, "\u0120IX": 22631, "\u0120visually": 22632, "\u0120Eisen": 22633, "\u0120jewelry": 22634, "\u0120delegation": 22635, "\u0120accelerate": 22636, "\u0120Riley": 22637, "\u0120slope": 22638, "\u0120indoor": 22639, "itially": 22640, "\u0120hugely": 22641, "\u0120tunnels": 22642, "\u0120fined": 22643, "\u0120directive": 22644, "\u0120forehead": 22645, "ustomed": 22646, "\u0120skate": 22647, "Music": 22648, "gas": 22649, "\u0120recognizing": 22650, "ambo": 22651, "\u0120overweight": 22652, "\u0120Grade": 22653, "\u00d9\u012c": 22654, "\u0120sounding": 22655, "\u0120locking": 22656, "\u0120REM": 22657, "Store": 22658, "\u0120excav": 22659, "\u0120Likewise": 22660, "\u0120Lights": 22661, "\u0120elbow": 22662, "\u0120Supply": 22663, "wic": 22664, "\u0120handsome": 22665, "1994": 22666, "Coll": 22667, "\u0120adequately": 22668, "\u0120Associate": 22669, "\u0120strips": 22670, "\u0120crackdown": 22671, "\u0120marvel": 22672, "\u0120Kun": 22673, "\u0120passages": 22674, "@@@@": 22675, "\u0120Tall": 22676, "\u0120thoughtful": 22677, "namese": 22678, "\u0120prostitution": 22679, "business": 22680, "\u0120ballistic": 22681, "personal": 22682, "cig": 22683, "izational": 22684, "Round": 22685, "\u0120\u00c2\u0142\u0120\u00c2\u0142\u0120\u00c2\u0142\u0120\u00c2\u0142": 22686, "\u0120Coleman": 22687, "\u0120admitting": 22688, "\u0120Plug": 22689, "\u0120bitcoins": 22690, "\u0120Suz": 22691, "\u0120fairness": 22692, "\u0120supplier": 22693, "\u0120catastrophic": 22694, "\u0120Helen": 22695, "oqu": 22696, "Marc": 22697, "\u0120Articles": 22698, "gie": 22699, "\u0120endangered": 22700, "\u0120destiny": 22701, "\u0120Volt": 22702, "olia": 22703, "axis": 22704, "\u0120cheat": 22705, "\u0120unified": 22706, "ICO": 22707, "quote": 22708, "302": 22709, "\u0120Sed": 22710, "\u0120suppression": 22711, "\u0120analyzing": 22712, "\u0120squat": 22713, "\u0120figuring": 22714, "\u0120coordinates": 22715, "\u0120chunks": 22716, "\u01201946": 22717, "\u0120subp": 22718, "\u0120wiki": 22719, "\u0120Forbes": 22720, "\u0120Jupiter": 22721, "\u0120Erik": 22722, "imer": 22723, "\u0120Commercial": 22724, "\\)": 22725, "\u0120legitimacy": 22726, "\u0120dental": 22727, "\u0120Mean": 22728, "\u0120deficits": 22729, "550": 22730, "Originally": 22731, "\u0120Horror": 22732, "\u0120contamination": 22733, "llah": 22734, "\u0120confisc": 22735, "\u0120Clare": 22736, "TB": 22737, "\u0120Failed": 22738, "aned": 22739, "\u0120ruler": 22740, "\u0120Controller": 22741, "\u0120feminists": 22742, "Fix": 22743, "gay": 22744, "207": 22745, "\u0120rabbit": 22746, "Third": 22747, "owntown": 22748, "\u0120glue": 22749, "\u0120volatile": 22750, "\u0120shining": 22751, "\u0120foll": 22752, "\u0120impaired": 22753, "\u0120supers": 22754, "\u00e6\u012a": 22755, "\u0120clutch": 22756, "\u013c\u00e9\u0128\u0134": 22757, "\u0120prolet": 22758, "\u0120(!": 22759, "\u0120yelled": 22760, "\u0120Kiev": 22761, "\u0120Ern": 22762, "\u0120Shock": 22763, "KB": 22764, "\u0120situated": 22765, "query": 22766, "\u0120Nas": 22767, "\u0120annex": 22768, "character": 22769, "\u0120Holiday": 22770, "\u0120automation": 22771, "\u0120Jill": 22772, "\u0120Remastered": 22773, "\u0120linem": 22774, "\u0120wilderness": 22775, "\u0120Horizon": 22776, "\u0120Guinea": 22777, "AZ": 22778, "\u0120mainland": 22779, "\u0120secrecy": 22780, "LEASE": 22781, "\u0120punk": 22782, "\u0120Province": 22783, "(),": 22784, "Speed": 22785, "\u0120handing": 22786, "\u0120Sebast": 22787, "Sir": 22788, "rase": 22789, "\u0120journals": 22790, "\u0120congest": 22791, "\u0120Tut": 22792, "irrel": 22793, "\u0120schizophrenia": 22794, "\u0120misogyn": 22795, "healthy": 22796, "Iron": 22797, "\u0120reacted": 22798, "-$": 22799, "252": 22800, "\u0120plural": 22801, "\u0120plum": 22802, "\u0120bargain": 22803, "\u0120grounded": 22804, "finder": 22805, "\u0120disse": 22806, "\u0120Laz": 22807, "OOD": 22808, "\u0120atroc": 22809, "Factory": 22810, "\u0120minions": 22811, "\u0120ori": 22812, "\u0120Brave": 22813, "\u0120PRE": 22814, "\u0120Myanmar": 22815, "\u0120Hod": 22816, "\u0120expedition": 22817, "\u0120explode": 22818, "\u0120Coord": 22819, "\u0120extr": 22820, "\u0120Brief": 22821, "\u0120ADHD": 22822, "\u0120hardcore": 22823, "feeding": 22824, "\u0120dile": 22825, "\u0120Fruit": 22826, "\u0120vaccination": 22827, "\u0120Mao": 22828, "osphere": 22829, "\u0120contests": 22830, "-|": 22831, "\u0120fren": 22832, "isphere": 22833, "Rom": 22834, "\u0120Sharp": 22835, "\u0120Trend": 22836, "\u0120disconnect": 22837, "\u00e2\u0122\u00a2\u00e2\u0122\u00a2": 22838, "\u0120persecution": 22839, "Earth": 22840, "\u0120healthier": 22841, "384": 22842, "\u0120cob": 22843, "\u0120Trinity": 22844, "OWS": 22845, "ANN": 22846, "\u0120specialty": 22847, "\u0120gru": 22848, "\u0120cooperative": 22849, "why": 22850, "Starting": 22851, "\u0120Issues": 22852, "stre": 22853, "ensor": 22854, "\u0120185": 22855, "Adv": 22856, "!?": 22857, "\u0120Revel": 22858, "emia": 22859, "\u0120Hulk": 22860, "\u0120celebrations": 22861, "\u0120Sou": 22862, "raud": 22863, "\u0120Klein": 22864, "\u0120unreal": 22865, "context": 22866, "\u0120partnerships": 22867, "\u0120adopting": 22868, "tical": 22869, "\u0120splash": 22870, "\u0120Hezbollah": 22871, "category": 22872, "cyclop": 22873, "xton": 22874, "\u0120Dot": 22875, "urdy": 22876, "tz": 22877, "\u0120envelope": 22878, "\u0120NL": 22879, "\u00e2\u0137": 22880, "\u0120wherein": 22881, "Spec": 22882, "184": 22883, "\u0120telev": 22884, "aliation": 22885, "\u0120myths": 22886, "\u00e5\u00b0": 22887, "\u0120rigorous": 22888, "\u0120communicating": 22889, "\u0120observer": 22890, "\u0120rehe": 22891, "\u0120Wash": 22892, "\u0120apologized": 22893, "\u0120Tin": 22894, "\u0120expenditures": 22895, "workers": 22896, "document": 22897, "\u0120hesitate": 22898, "\u0120Lenin": 22899, "\u0120unpredictable": 22900, "\u0120renewal": 22901, "cler": 22902, "okia": 22903, "\u0120CONT": 22904, "\u0120postseason": 22905, "Tokens": 22906, "\u0120exacerb": 22907, "\u0120betting": 22908, "\u0120147": 22909, "\u0120elevation": 22910, "Wood": 22911, "\u0120Solomon": 22912, "194": 22913, "004": 22914, "output": 22915, "\u0120redund": 22916, "\u0120Mumbai": 22917, "\u0120pH": 22918, "\u0120reproduce": 22919, "\u0120Duration": 22920, "MAX": 22921, "\u0120bog": 22922, "CBS": 22923, "\u0120Balance": 22924, "\u0120Sgt": 22925, "\u0120Recent": 22926, "\u0120cd": 22927, "\u0120popped": 22928, "\u0120incompet": 22929, "prop": 22930, "ayan": 22931, "guy": 22932, "Pacific": 22933, "\u0120tyr": 22934, "\u0120{{": 22935, "\u0120Mystic": 22936, "\u0120Dana": 22937, "\u0120masturb": 22938, "\u0120geometry": 22939, "\u00c3\u00a2": 22940, "\u0120Correct": 22941, "\u0120trajectory": 22942, "\u0120distracted": 22943, "\u0120foo": 22944, "\u0120Welsh": 22945, "Luc": 22946, "mith": 22947, "\u0120rugby": 22948, "\u0120respiratory": 22949, "\u0120triangle": 22950, "\u0120215": 22951, "\u0120undergraduate": 22952, "\u0120Superior": 22953, "changing": 22954, "_-": 22955, "\u0120rightly": 22956, "\u0120referee": 22957, "\u0120lucrative": 22958, "\u0120unauthorized": 22959, "\u0120resembles": 22960, "\u0120GNU": 22961, "\u0120Derby": 22962, "\u0120pathways": 22963, "\u0120Led": 22964, "\u0120endurance": 22965, "\u0120stint": 22966, "\u0120collector": 22967, "Fast": 22968, "\u0120dots": 22969, "\u0120nationals": 22970, "\u0120Securities": 22971, "\u0120whip": 22972, "Param": 22973, "\u0120learns": 22974, "Magic": 22975, "\u0120detailing": 22976, "moon": 22977, "\u0120broadcasting": 22978, "\u0120baked": 22979, "265": 22980, "holm": 22981, "\u0120Sah": 22982, "\u0120Hussein": 22983, "\u0120Courtesy": 22984, "174": 22985, "\u0120146": 22986, "\u0120geographic": 22987, "peace": 22988, "\u0120judging": 22989, "\u0120Stern": 22990, "Bur": 22991, "\u0120storyline": 22992, "Gun": 22993, "\u0120Stick": 22994, "245": 22995, "307": 22996, "\u00e3\u0124\u00b4\u00e3\u0125\u00b3": 22997, "\u0120Administrator": 22998, "\u0120burnt": 22999, "\u0120pave": 23000, "choes": 23001, "Exec": 23002, "\u0120campuses": 23003, "Result": 23004, "\u0120mutations": 23005, "\u0120Charter": 23006, "\u0120captures": 23007, "\u0120compares": 23008, "\u0120badge": 23009, "Scient": 23010, "\u0120erad": 23011, "iery": 23012, "oi": 23013, "ettes": 23014, "\u0120Estate": 23015, "\u0120strap": 23016, "\u0120proudly": 23017, "\u0120fried": 23018, "\u0120withdrawn": 23019, "\u0120Voy": 23020, "phony": 23021, "Items": 23022, "\u0120Pierce": 23023, "bard": 23024, "\u0120annotation": 23025, "anton": 23026, "illon": 23027, "Impro": 23028, "...)": 23029, "\u0120happier": 23030, "------": 23031, "adjust": 23032, "\u0120staffers": 23033, "\u0120activism": 23034, "\u0120perf": 23035, "\u0120alright": 23036, "Need": 23037, "\u0120commence": 23038, "\u0120opioid": 23039, "\u0120Amanda": 23040, "Es": 23041, "\u0120Pars": 23042, "\u0120Kaw": 23043, "Works": 23044, "248": 23045, "\u0120indo": 23046, "tc": 23047, "endant": 23048, "\u0120Moto": 23049, "\u0120legalization": 23050, "OTE": 23051, "\u0120tasked": 23052, "\u0120tsp": 23053, "\u0120ACTIONS": 23054, "166": 23055, "\u0120refreshing": 23056, "\u0120NR": 23057, "\u0120Perez": 23058, "\u0120infringement": 23059, "SY": 23060, "Listen": 23061, "inning": 23062, "ku": 23063, "\u0120rotate": 23064, "program": 23065, "arah": 23066, "Design": 23067, "\u0120(\u00c2\u00a3": 23068, "\u0120storing": 23069, "\u0120warrants": 23070, "\u0120judgement": 23071, "\u0120Brist": 23072, "usually": 23073, "photo": 23074, "\u0120Ran": 23075, "\u0120Pine": 23076, "\u0120outrageous": 23077, "\u0120Valentine": 23078, "luence": 23079, "\u0120Everybody": 23080, "Altern": 23081, "\u0120relevance": 23082, "\u0120terminated": 23083, "\u0120dessert": 23084, "\u0120fulfilled": 23085, "\u0120prosecuted": 23086, "\u0120Words": 23087, "\u0120migrant": 23088, "\u0120cultivation": 23089, "\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124": 23090, "idelity": 23091, "\u0120Vern": 23092, "\u0120Login": 23093, "\u0120metaphor": 23094, "\u0120Tip": 23095, "\u0120recruits": 23096, "\u0120Pig": 23097, "ribing": 23098, "\u0120enthusiasts": 23099, "exper": 23100, "\u0120frightening": 23101, "\u0120Hair": 23102, "anson": 23103, "strate": 23104, "\u0120hi": 23105, "Height": 23106, "\u0120owning": 23107, "none": 23108, "\u0120dislike": 23109, "\u0120knives": 23110, "pherd": 23111, "\u0120loudly": 23112, "\u0120APIs": 23113, "Display": 23114, "\u0120Lac": 23115, "\u0120USS": 23116, "abl": 23117, "verages": 23118, "Jew": 23119, "\u0120172": 23120, "\u0120Historical": 23121, "atoon": 23122, "\u0120Physics": 23123, "intern": 23124, "\u0120warmth": 23125, "\u0120topp": 23126, "DM": 23127, "\u0120gunman": 23128, "\u0120emperor": 23129, "odi": 23130, "\u00e3\u0125\u00a3": 23131, "inatory": 23132, "\u0120Rib": 23133, "\u0120131": 23134, "\u0120Saturn": 23135, "\u0120Shining": 23136, "\u0120waking": 23137, "Quotes": 23138, "\u0120comedian": 23139, "enberg": 23140, "\u00c2\u00bd": 23141, "\u0120believers": 23142, "\u0120paperwork": 23143, "custom": 23144, "\u0120lev": 23145, "\u0120lament": 23146, "\u0120pouring": 23147, "222": 23148, "political": 23149, "\u0120Supplement": 23150, "maid": 23151, "\u0120cruelty": 23152, "\u0120tread": 23153, "ysics": 23154, "Aw": 23155, "rites": 23156, "\u0120modifier": 23157, "\u0120Position": 23158, "Adam": 23159, "lb": 23160, "ubs": 23161, "\u0120imperfect": 23162, "\u0120clusters": 23163, "\u0120Engineer": 23164, "\u0120Cherry": 23165, "\u0120inauguration": 23166, "\u0120Sau": 23167, "\u0120embodiment": 23168, "\u0120Uncle": 23169, "\u0120overr": 23170, "\u0120explosions": 23171, "cule": 23172, "\u0120Princeton": 23173, "\u0120Andrea": 23174, "\u0120incorrectly": 23175, "\u0120earnest": 23176, "\u0120pilgr": 23177, "\u0120Sprint": 23178, "\u0120sleeve": 23179, "\u0120hears": 23180, "\u0120Amazing": 23181, "\u0120browsing": 23182, "agin": 23183, "\u0120homeland": 23184, "\u0120haw": 23185, "\u0120diving": 23186, "istered": 23187, "178": 23188, "\u0120bargaining": 23189, "\u0120Arcade": 23190, "\u0120delegate": 23191, "terson": 23192, "................................................................": 23193, "\u0120Jacksonville": 23194, "275": 23195, "\u0120stagn": 23196, "\u0120adam": 23197, "\u0120Sherman": 23198, "CB": 23199, "\u0120suburb": 23200, "\u0120Foods": 23201, "\u0120converting": 23202, "\u0120Arist": 23203, "\u0120chambers": 23204, "love": 23205, "\u0120amino": 23206, "\u0120Gan": 23207, "\u0120madness": 23208, "mc": 23209, "\u0120USE": 23210, "defined": 23211, "\u0120ultr": 23212, "indust": 23213, "\u0120wolves": 23214, "lance": 23215, "Additionally": 23216, "\u0120cracks": 23217, "asia": 23218, "\u0120Reason": 23219, "\u0120Pump": 23220, "\u0120accidental": 23221, "\u0120Laser": 23222, "\u0120Rid": 23223, "\u0120initialized": 23224, "elli": 23225, "\u0120unnamed": 23226, "\u0120noun": 23227, "\u0120Passed": 23228, "\u0120hostage": 23229, "\u0120Ethiop": 23230, "shirts": 23231, "\u0120unrel": 23232, "\u0120Embassy": 23233, "\u01201941": 23234, "\u0120atoms": 23235, "\u0120purported": 23236, "164": 23237, "\u0120Fi": 23238, "\u0120gallons": 23239, "\u0120Monica": 23240, "\u0120pg": 23241, "enment": 23242, "\u0120sorted": 23243, "\u0120Gospel": 23244, "\u0120heights": 23245, "\u0120traced": 23246, "\u0120undergoing": 23247, "Shell": 23248, "\u0120sacks": 23249, "\u0120proportions": 23250, "\u0120halluc": 23251, "Font": 23252, "acet": 23253, "\u0120warmer": 23254, "\u0120INTER": 23255, "\u0120grabbing": 23256, "Plug": 23257, "\u0120realization": 23258, "\u0120Burke": 23259, "\u0120enchant": 23260, "ATER": 23261, "\u0120Seed": 23262, "\u0120abundant": 23263, "FM": 23264, "\u0120civic": 23265, "Vs": 23266, "isi": 23267, "\u0120vow": 23268, "\u0120reper": 23269, "\u0120Partnership": 23270, "\u0120penetration": 23271, "\u0120axe": 23272, "\u0120shattered": 23273, "\u0120Zombies": 23274, "\u0120vinyl": 23275, "\u0120Alert": 23276, "eon": 23277, "\u0120obliged": 23278, "\u0120Illust": 23279, "\u0120Plaza": 23280, "\u0120Frontier": 23281, "\u0120davidjl": 23282, "\u0120Serial": 23283, "\u0120Hav": 23284, "\u0120Nutrition": 23285, "Bi": 23286, "\u0120\u00e2\u0138\u012a": 23287, "\u0120Jays": 23288, "linux": 23289, "\u0120hurry": 23290, "\u0120voy": 23291, "\u0120hopeless": 23292, "\u0120Stealth": 23293, "\u0120\u00e3\u0123": 23294, "essors": 23295, "ttle": 23296, "borg": 23297, "\u0120Safari": 23298, "fell": 23299, "\u0120wary": 23300, "due": 23301, "\u0120Above": 23302, "Ha": 23303, "ELL": 23304, "\u0120notor": 23305, "\u0120Won": 23306, "Too": 23307, "\u0120occupations": 23308, "\u0120possessions": 23309, "\u0120inviting": 23310, "\u0120predators": 23311, "\u0120accelerated": 23312, "\u0120157": 23313, "uterte": 23314, "\u0120Cube": 23315, "east": 23316, "account": 23317, "Give": 23318, "\u0120transplant": 23319, "redients": 23320, "idable": 23321, "\u0120screenshots": 23322, "\u0120Gund": 23323, "\u0120FS": 23324, "\u0120travelers": 23325, "\u0120sensory": 23326, "\u0120Fiat": 23327, "\u0120Rockets": 23328, "\u0130\u012d": 23329, "_{": 23330, "Friend": 23331, "\u0120charming": 23332, "ALS": 23333, "\u0120enjoyment": 23334, "mph": 23335, "\u01205000": 23336, "\u0120REG": 23337, "\u00d9\u0128": 23338, "bia": 23339, "\u0120compilation": 23340, "rost": 23341, "\u0120VP": 23342, "\u0120Schne": 23343, "2019": 23344, "\u0120copying": 23345, "MORE": 23346, "\u0120Flore": 23347, "falls": 23348, "215": 23349, "total": 23350, "\u0120disciples": 23351, "double": 23352, "\u0120exceeding": 23353, "\u0120smashed": 23354, "\u0120conceptual": 23355, "\u0120Romania": 23356, "\u0120Brent": 23357, "\u0120ICE": 23358, "\u0120Tou": 23359, "\u0120grap": 23360, "\u0120nails": 23361, "189": 23362, "\u00e3\u0125\u013a": 23363, "\u0120procure": 23364, "eur": 23365, "\u0120confirming": 23366, "\u0120Cec": 23367, "awi": 23368, "\u0120Eden": 23369, "\u0120ng": 23370, "\u0120engineered": 23371, "atics": 23372, "\u0120hooked": 23373, "\u0120disgusting": 23374, "\u0120Murder": 23375, "\u00e3\u0124\u00bf": 23376, "Library": 23377, "\u0120168": 23378, "Almost": 23379, "hematic": 23380, "Menu": 23381, "\u0120Notre": 23382, "\u0120Jur": 23383, "\u0120kidnapped": 23384, "\u0120hacker": 23385, "\u0120Jade": 23386, "\u0120creepy": 23387, "\u0120drawings": 23388, "\u0120Sponsor": 23389, "\u0120cyclists": 23390, "\u0120Goblin": 23391, "\u0120optimized": 23392, "\u0120staged": 23393, "\u0120McD": 23394, "between": 23395, "Age": 23396, "eno": 23397, "Sex": 23398, "\u0120Wide": 23399, "nings": 23400, "avis": 23401, "\u0120incapable": 23402, "\u0120Kob": 23403, "\u0120rewarding": 23404, "\u0120Lone": 23405, "olescent": 23406, "\u0120contracted": 23407, "\u0120sticky": 23408, "Jose": 23409, "Ball": 23410, "fest": 23411, "\u0120Input": 23412, "\u0120Recently": 23413, "\u0120tomat": 23414, "square": 23415, "Application": 23416, "\u0120nitrogen": 23417, "\u0120duplicate": 23418, "\u0120Recon": 23419, "\u0120Dear": 23420, "London": 23421, "\u0120intra": 23422, "\u0120dock": 23423, "\u0120outreach": 23424, "\u0120Million": 23425, "\u0120mammals": 23426, "ampton": 23427, "VAL": 23428, "\u0120snaps": 23429, "\u0120dos": 23430, "\u0120Whole": 23431, "\u0120Ready": 23432, "Try": 23433, "\u0120Winnipeg": 23434, "earance": 23435, "\u0120incurred": 23436, "renched": 23437, "\u0120NSW": 23438, "ilot": 23439, "raine": 23440, "\u0120cube": 23441, "got": 23442, "\u0120runway": 23443, "etermined": 23444, "\u0120Hawks": 23445, "\u0120survivor": 23446, "\u0120Wish": 23447, "\u0120Din": 23448, "\u0120DEF": 23449, "\u0120Vault": 23450, "187": 23451, "\u0120mushrooms": 23452, "\u0120crisp": 23453, "bey": 23454, "\u0120Discovery": 23455, "\u0120developmental": 23456, "\u0120paradigm": 23457, "\u0120chaotic": 23458, "\u0120Tsu": 23459, "\u0120333": 23460, "bons": 23461, "\u0120bacterial": 23462, "\u0120commits": 23463, "\u0120cosmic": 23464, "\u0120mega": 23465, "ocative": 23466, "\u0120Paint": 23467, "ophobic": 23468, "\u0120vain": 23469, "\u0120carved": 23470, "\u0120Thief": 23471, "\u0120Gul": 23472, "owship": 23473, "\u0120cites": 23474, "\u0120Edinburgh": 23475, "\u0120diminished": 23476, "\u0120acknowledges": 23477, "\u0120Kills": 23478, "\u0120microw": 23479, "\u0120Hera": 23480, "\u0120seniors": 23481, "\u0120whereby": 23482, "Hop": 23483, "atron": 23484, "\u0120unavailable": 23485, "\u0120Nate": 23486, "\u0120480": 23487, "\u0120slated": 23488, "\u0120Rebecca": 23489, "\u0120Battery": 23490, "\u0120grammar": 23491, "\u0120headset": 23492, "\u0120cursor": 23493, "\u0120excluding": 23494, "anye": 23495, "aundering": 23496, "ebin": 23497, "\u0120feasible": 23498, "\u0120Publishing": 23499, "\u0120Labs": 23500, "\u0120Cliff": 23501, "\u0120Ferrari": 23502, "\u0120pac": 23503, "visible": 23504, "marked": 23505, "pell": 23506, "\u0120polite": 23507, "\u0120staggering": 23508, "\u0120Galactic": 23509, "\u0120superst": 23510, "\u0120paran": 23511, "\u0120Officers": 23512, "\u00e3\u0122\u0123": 23513, "\u0120specifics": 23514, "ulus": 23515, "239": 23516, "\u0120Paste": 23517, "AMP": 23518, "\u0120Panama": 23519, "\u0120Delete": 23520, "anguard": 23521, "restrial": 23522, "\u0120heroic": 23523, "\u0120Dy": 23524, "\u00d8\u00a7\u00d9\u0126": 23525, "\u0120incumbent": 23526, "\u0120crunch": 23527, "tro": 23528, "\u0120scoop": 23529, "\u0120blogger": 23530, "\u0120sellers": 23531, "uren": 23532, "\u0120medicines": 23533, "\u0120Caps": 23534, "\u0120Animation": 23535, "oxy": 23536, "\u0120outward": 23537, "\u0120inquiries": 23538, "229": 23539, "\u0120psychologist": 23540, "\u0120Sask": 23541, "evil": 23542, "\u0120contaminated": 23543, "\u00e3\u0124\u00a8": 23544, "herence": 23545, "\u0120branded": 23546, "\u0120Abdul": 23547, "zh": 23548, "\u0120paragraphs": 23549, "\u0120mins": 23550, "\u0120correlated": 23551, "erb": 23552, "\u0120impart": 23553, "\u0120milestone": 23554, "\u0120Solutions": 23555, "otle": 23556, "\u0120undercover": 23557, "\u0120marched": 23558, "\u0120Chargers": 23559, "fax": 23560, "\u0120Secrets": 23561, "\u0120ruth": 23562, "weather": 23563, "\u0120feminine": 23564, "\u0120sham": 23565, "\u0120prestigious": 23566, "iggins": 23567, "\u0120sung": 23568, "history": 23569, "ettle": 23570, "ggie": 23571, "\u0120outdated": 23572, "oland": 23573, "\u0120perceptions": 23574, "\u0120Session": 23575, "\u0120Dodgers": 23576, "uj": 23577, "\u0120END": 23578, "Doc": 23579, "\u0120deficiency": 23580, "Grand": 23581, "\u0120Joker": 23582, "\u0120retrospect": 23583, "\u0120diagnostic": 23584, "\u0120harmless": 23585, "\u0120rogue": 23586, "\u0120Aval": 23587, "Equ": 23588, "\u0120transc": 23589, "\u0120Robertson": 23590, "\u0120Depending": 23591, "\u0120Burns": 23592, "ivo": 23593, "\u0120hostility": 23594, "Features": 23595, "\u0135\u013a": 23596, "\u0120discomfort": 23597, "\u0120LCD": 23598, "specified": 23599, "\u0120Expect": 23600, "340": 23601, "\u0120imperative": 23602, "\u0120Regular": 23603, "Chinese": 23604, "\u0120statewide": 23605, "\u0120symm": 23606, "\u0120loops": 23607, "\u0120autumn": 23608, "Nick": 23609, "\u0120shaping": 23610, "\u0120quot": 23611, "\u0120cherry": 23612, "\u0120Crossref": 23613, "\u00e8\u00a6\u013c\u00e9\u0128\u0134": 23614, "Standard": 23615, "heed": 23616, "\u0120Dell": 23617, "\u0120Vietnamese": 23618, "\u0120ost": 23619, "\u0120Valkyrie": 23620, "OA": 23621, "Assad": 23622, "\u0120rebound": 23623, "\u0120Traffic": 23624, "places": 23625, "\u00e6\u013a": 23626, "\u0120Buc": 23627, "172": 23628, "\u0120shelters": 23629, "\u0120insisting": 23630, "\u0120Certainly": 23631, "\u0120Kenneth": 23632, "\u0120TCP": 23633, "\u0120penal": 23634, "\u0120Replay": 23635, "heard": 23636, "\u0120dialect": 23637, "iza": 23638, "\u0120FY": 23639, "itcher": 23640, "\u0120DL": 23641, "\u0120spiral": 23642, "\u0120quarterbacks": 23643, "\u0120hull": 23644, "\u0120google": 23645, "\u0120todd": 23646, "\u0120Sterling": 23647, "\u0120Plate": 23648, "\u0120spying": 23649, "mbol": 23650, "\u0120Realm": 23651, "\u0120Proced": 23652, "\u0120Crash": 23653, "\u0120terminate": 23654, "\u0120protesting": 23655, "Center": 23656, "guided": 23657, "\u0120uncover": 23658, "\u0120boycott": 23659, "\u0120realizes": 23660, "sound": 23661, "\u0120pretending": 23662, "\u0120Vas": 23663, "1980": 23664, "\u0120framed": 23665, "\u0120139": 23666, "\u0120descended": 23667, "\u0120rehabilitation": 23668, "\u0120borrowing": 23669, "\u0120Buch": 23670, "\u0120blur": 23671, "Ron": 23672, "\u0120Frozen": 23673, "enza": 23674, "Chief": 23675, "\u0120Poor": 23676, "\u0120translates": 23677, "MIN": 23678, "\u0120212": 23679, "JECT": 23680, "\u0120erupted": 23681, "\u0120successes": 23682, "SEC": 23683, "\u0120plague": 23684, "\u0120gems": 23685, "doms": 23686, "\u0120stretches": 23687, "\u0120Spy": 23688, "\u0120storytelling": 23689, "Credit": 23690, "\u0120Push": 23691, "\u0120traction": 23692, "\u0120ineffective": 23693, "\u0120Luna": 23694, "\u0120tapes": 23695, "\u0120analytics": 23696, "ercise": 23697, "\u0120programmes": 23698, "\u0120Carbon": 23699, "\u0120behold": 23700, "heavy": 23701, "\u0120Conservation": 23702, "\u0120FIR": 23703, "\u0120sack": 23704, "termin": 23705, "ricks": 23706, "\u0120housed": 23707, "\u0120unusually": 23708, "Ice": 23709, "\u0120executing": 23710, "\u0120Moroc": 23711, "eday": 23712, "\u0120editions": 23713, "\u0120smarter": 23714, "\u0120BA": 23715, "\u0120outlaw": 23716, "\u0120vanished": 23717, "iba": 23718, "ALSE": 23719, "\u0120Silva": 23720, "238": 23721, "Could": 23722, "\u0120philosopher": 23723, "\u0120evacuated": 23724, "Secret": 23725, "142": 23726, "\u0120visas": 23727, "\u00e3\u0124\u00ac": 23728, "\u0120Malt": 23729, "\u0120Clearly": 23730, "\u0120Niger": 23731, "\u0120Cairo": 23732, "\u0120Fist": 23733, "380": 23734, "\u0120XML": 23735, "auto": 23736, "itant": 23737, "\u0120reinforced": 23738, "Record": 23739, "\u0120Survivor": 23740, "GHz": 23741, "\u0120screws": 23742, "parents": 23743, "\u0120oceans": 23744, "mares": 23745, "\u0120brakes": 23746, "vasive": 23747, "\u0120hello": 23748, "\u0120SIM": 23749, "rimp": 23750, "\u0120ore": 23751, "\u0120Armour": 23752, "247": 23753, "\u0120terrific": 23754, "\u0120tones": 23755, "141": 23756, "\u0120Minutes": 23757, "Episode": 23758, "\u0120curves": 23759, "\u0120inflammatory": 23760, "\u0120batting": 23761, "\u0120Beautiful": 23762, "Lay": 23763, "\u0120unpop": 23764, "vable": 23765, "\u0120riots": 23766, "\u0120Tactics": 23767, "baugh": 23768, "\u0120Cock": 23769, "\u0120orgasm": 23770, "\u0120Sas": 23771, "\u0120constructor": 23772, "etz": 23773, "Gov": 23774, "\u0120antagon": 23775, "\u0120theat": 23776, "\u0120deeds": 23777, "hao": 23778, "cuts": 23779, "\u0120McCl": 23780, "\u0120um": 23781, "\u0120Scientists": 23782, "\u0120grassroots": 23783, "yssey": 23784, "\"]=>": 23785, "\u0120surfaced": 23786, "\u0120shades": 23787, "\u0120neighbours": 23788, "\u0120advertis": 23789, "oya": 23790, "\u0120merged": 23791, "Upon": 23792, "\u0120gad": 23793, "\u0120anticipate": 23794, "Anyway": 23795, "\u0120slogan": 23796, "\u0120disrespect": 23797, "Iran": 23798, "\u0120TB": 23799, "acted": 23800, "\u0120subpoen": 23801, "mediately": 23802, "OOOO": 23803, "\u0120waiver": 23804, "\u0120vulnerabilities": 23805, "ottesville": 23806, "\u0120Huffington": 23807, "Josh": 23808, "\u0120DH": 23809, "Monday": 23810, "\u0120Ellen": 23811, "Know": 23812, "xon": 23813, "items": 23814, "228": 23815, "\u0120fills": 23816, "\u0120Nike": 23817, "\u0120cumulative": 23818, "andals": 23819, "Ir": 23820, "\u0120\u00ec": 23821, "\u0120friction": 23822, "igator": 23823, "\u0120scans": 23824, "\u0120Vienna": 23825, "ldom": 23826, "\u0120performers": 23827, "Prim": 23828, "\u0120bidding": 23829, "Mur": 23830, "\u0120leaned": 23831, "\u0120Prix": 23832, "alks": 23833, "\u0120[\u00e2\u0122\u00a6]": 23834, "\u0120Twitch": 23835, "\u0120Developer": 23836, "\u0120Gir": 23837, "\u0120callback": 23838, "Abstract": 23839, "\u0120accustomed": 23840, "\u0120freedoms": 23841, "\u0120PG": 23842, "uracy": 23843, "\u0120lump": 23844, "isman": 23845, ",,,,": 23846, "1992": 23847, "\u0120RED": 23848, "\u0120worm": 23849, "Match": 23850, "\u0120Platinum": 23851, "IJ": 23852, "\u0120Owner": 23853, "Trivia": 23854, "compl": 23855, "\u0120newborn": 23856, "\u0120fantas": 23857, "Own": 23858, "\u01201959": 23859, "\u0120sympath": 23860, "\u0120ubiqu": 23861, "\u0120outputs": 23862, "\u0120allev": 23863, "\u0120prag": 23864, "Kevin": 23865, "\u0120favors": 23866, "\u0120burial": 23867, "\u0120nurt": 23868, "solete": 23869, "cache": 23870, "\u0120156": 23871, "\u0120unlocks": 23872, "techn": 23873, "Making": 23874, "\u0120conquer": 23875, "adic": 23876, "\u00e6\u0138": 23877, "\u0120elf": 23878, "\u0120electorate": 23879, "\u0120Kurds": 23880, "\u0120Stack": 23881, "\u0120Samurai": 23882, "\u0120\u00e2\u013a\u0127": 23883, "\u0120{}": 23884, "\u0120Said": 23885, "\u0120Fallout": 23886, "\u0120kindness": 23887, "\u0120Customs": 23888, "\u0120Boulevard": 23889, "\u0120helicopters": 23890, "otics": 23891, "\u0120Veget": 23892, "comment": 23893, "\u0120criticised": 23894, "\u0120polished": 23895, "\u0120Remix": 23896, "\u0120Cultural": 23897, "\u0120recons": 23898, "\u0120doi": 23899, "atem": 23900, "Screen": 23901, "\u0120barred": 23902, "Comments": 23903, "\u0120Generally": 23904, "\u0120slap": 23905, "720": 23906, "Vari": 23907, "pine": 23908, "\u0120empt": 23909, "\u0120hats": 23910, "\u0120Playing": 23911, "lab": 23912, "average": 23913, "forms": 23914, "\u0120Cotton": 23915, "\u0120cans": 23916, "\u0120DON": 23917, "\u0120Somalia": 23918, "Crypt": 23919, "\u0120Increases": 23920, "Ever": 23921, "modern": 23922, "\u0120surgeon": 23923, "3000": 23924, "\u0120randomized": 23925, "================================================================": 23926, "Bern": 23927, "impl": 23928, "\u0120COR": 23929, "\u0120proclaim": 23930, "thouse": 23931, "\u0120toes": 23932, "\u0120ample": 23933, "\u0120preserving": 23934, "\u0120disbel": 23935, "grand": 23936, "Besides": 23937, "\u0120silk": 23938, "\u0120Pattern": 23939, "hm": 23940, "\u0120enterprises": 23941, "\u0120affidavit": 23942, "\u0120Advisory": 23943, "\u0120advertised": 23944, "\u0120Religious": 23945, "sections": 23946, "psych": 23947, "\u0120Fields": 23948, "aways": 23949, "\u0120hashtag": 23950, "\u0120Nightmare": 23951, "\u0120vampire": 23952, "\u0120forensic": 23953, "rossover": 23954, "nar": 23955, "\u0120navy": 23956, "\u0120vacant": 23957, "\u0120Duel": 23958, "\u0120hallway": 23959, "\u0120facebook": 23960, "identally": 23961, "\u0120NRA": 23962, "\u0120matt": 23963, "\u0120hurricane": 23964, "\u0120Kirby": 23965, "\u0120Puzzle": 23966, "\u0120skirt": 23967, "oust": 23968, "dullah": 23969, "\u0120analogy": 23970, "inion": 23971, "\u0120tomatoes": 23972, "\u0120NV": 23973, "\u0120Peak": 23974, "\u0120Meyer": 23975, "\u0120appointments": 23976, "\u0120masc": 23977, "\u0120alley": 23978, "rehend": 23979, "\u0120charities": 23980, "\u0120undo": 23981, "\u0120destinations": 23982, "\u0120Testing": 23983, "\">\"": 24618, "cats": 24619, "*.": 24620, "\u0120gestures": 24621, "general": 24622, "League": 24623, "\u0120packets": 24624, "\u0120Inspector": 24625, "\u0120Berg": 24626, "\u0120fraudulent": 24627, "\u0120criticize": 24628, "Fun": 24629, "\u0120blaming": 24630, "ndra": 24631, "\u0120slash": 24632, "\u0120Eston": 24633, "\u0120proposing": 24634, "\u0120whales": 24635, "\u0120therapist": 24636, "\u0120subset": 24637, "\u0120leisure": 24638, "ELD": 24639, "\u0120CVE": 24640, "\u0120Activity": 24641, "\u0120culmin": 24642, "shop": 24643, "\u0120DAY": 24644, "ischer": 24645, "\u0120Admiral": 24646, "\u0120Attacks": 24647, "\u01201958": 24648, "\u0120memoir": 24649, "\u0120folded": 24650, "\u0120sexist": 24651, "\u0120153": 24652, "\u0120LI": 24653, "\u0120readings": 24654, "\u0120embarrassment": 24655, "\u0120Employment": 24656, "wart": 24657, "chin": 24658, "\u0120continuation": 24659, "lia": 24660, "Recently": 24661, "\u0120duel": 24662, "\u0120evacuation": 24663, "\u0120Kashmir": 24664, "\u0120disposition": 24665, "\u0120Rig": 24666, "\u0120bolts": 24667, "\u0120insurers": 24668, "467": 24669, "Mex": 24670, "\u0120retaliation": 24671, "\u0120misery": 24672, "\u0120unreasonable": 24673, "raining": 24674, "Imm": 24675, "\u0120PU": 24676, "emer": 24677, "\u0120genital": 24678, "\u00e3\u0124\u00b3": 24679, "\u0120Candy": 24680, "\u0120onions": 24681, "\u0120Patt": 24682, "liner": 24683, "\u0120conceded": 24684, "\u0120fa": 24685, "\u0120forc": 24686, "\u0120Hernandez": 24687, "\u0120Geoff": 24688, "debian": 24689, "\u0120Teams": 24690, "\u0120cries": 24691, "\u0120homeowners": 24692, "237": 24693, "ABC": 24694, "\u0120stitch": 24695, "\u0120statistic": 24696, "\u0120headers": 24697, "\u0120Biology": 24698, "\u0120motors": 24699, "\u0120GEN": 24700, "\u0120Lip": 24701, "\u0120hates": 24702, "\u0120heel": 24703, "Self": 24704, "ipl": 24705, "EDIT": 24706, "orting": 24707, "\u0120annot": 24708, "\u0120Speech": 24709, "oldemort": 24710, "\u0120Javascript": 24711, "\u0120LeBron": 24712, "\u0120footprint": 24713, "\u0120fn": 24714, "\u0120seizures": 24715, "nas": 24716, "hide": 24717, "\u01201954": 24718, "\u0120Bee": 24719, "\u0120Declaration": 24720, "\u0120Katie": 24721, "\u0120reservations": 24722, "NR": 24723, "female": 24724, "\u0120saturated": 24725, "\u0120biblical": 24726, "\u0120trolls": 24727, "Device": 24728, "photos": 24729, "\u0120drums": 24730, "\u00e3\u0125\u012b\u00e3\u0125\u00a9\u00e3\u0124\u00b4\u00e3\u0125\u00b3": 24731, "Night": 24732, "fighter": 24733, "\u0120Hak": 24734, "riber": 24735, "\u0120cush": 24736, "\u0120disciplinary": 24737, "baum": 24738, "\u0120GH": 24739, "\u0120Schmidt": 24740, "ilibrium": 24741, "\u0120sixty": 24742, "\u0120Kushner": 24743, "rots": 24744, "\u0120pund": 24745, "\u0120Rac": 24746, "\u0120springs": 24747, "\u0120conve": 24748, "Business": 24749, "Fall": 24750, "\u0120qualifications": 24751, "\u0120verses": 24752, "\u0120narciss": 24753, "\u0120Koh": 24754, "\u0120Wow": 24755, "\u0120Charlottesville": 24756, "edo": 24757, "\u0120interrogation": 24758, "\u0120Wool": 24759, "365": 24760, "Brian": 24761, "\u0120\u00e2\u013e\u0135": 24762, "\u0120alleges": 24763, "onds": 24764, "idation": 24765, "\u0120Jackie": 24766, "yu": 24767, "\u0120lakes": 24768, "\u0120worthwhile": 24769, "\u0120crystals": 24770, "\u0120Juda": 24771, "\u0120comprehend": 24772, "\u0120flush": 24773, "\u0120absorption": 24774, "\u0120OC": 24775, "\u0120frightened": 24776, "\u0120Chocolate": 24777, "Martin": 24778, "\u0120buys": 24779, "\u0120bucks": 24780, "\u0120appell": 24781, "\u0120Championships": 24782, "\u0120listener": 24783, "\u0120Defensive": 24784, "\u0120cz": 24785, "uds": 24786, "\u0120Mate": 24787, "\u0120replay": 24788, "\u0120decorated": 24789, "\u0120sunk": 24790, "\u0120VIP": 24791, "\u0120Ank": 24792, "\u0120195": 24793, "aaaa": 24794, "Nobody": 24795, "\u0120Milk": 24796, "\u0120Gur": 24797, "\u0120Mk": 24798, "\u0120Sara": 24799, "\u0120seating": 24800, "\u0120Wid": 24801, "Track": 24802, "\u0120employs": 24803, "\u0120gigantic": 24804, "APP": 24805, "\u00e3\u0124\u00a7": 24806, "inventory": 24807, "\u0120towel": 24808, "atche": 24809, "lasting": 24810, "\u0120TL": 24811, "\u0120latency": 24812, "\u0120kne": 24813, "Ber": 24814, "meaning": 24815, "\u0120upheld": 24816, "\u0120playground": 24817, "\u0120mant": 24818, "Side": 24819, "\u0120stereo": 24820, "\u0120northwest": 24821, "\u0120exceptionally": 24822, "\u0120rays": 24823, "\u0120recurring": 24824, "Drive": 24825, "\u0120upright": 24826, "\u0120abduct": 24827, "\u0120Marathon": 24828, "\u0120goodbye": 24829, "\u0120alphabet": 24830, "hp": 24831, "\u0120courtroom": 24832, "rington": 24833, "othing": 24834, "Tag": 24835, "\u0120diplomats": 24836, "\u0120barbar": 24837, "\u0120Aqua": 24838, "183": 24839, "3333": 24840, "\u0120maturity": 24841, "\u0120instability": 24842, "\u0120Apache": 24843, "\u0120===": 24844, "\u0120fasting": 24845, "\u0120Grid": 24846, "ModLoader": 24847, "\u0120152": 24848, "Abs": 24849, "\u0120Operating": 24850, "etti": 24851, "\u0120acquaint": 24852, "Donnell": 24853, "\u0120Kem": 24854, "\u0120Forge": 24855, "\u0120armored": 24856, "Mil": 24857, "\u0120philosophers": 24858, "invest": 24859, "Players": 24860, "\u00e2\u012a": 24861, "\u0120myriad": 24862, "\u0120comrades": 24863, "Rot": 24864, "\u0120remembering": 24865, "\u0120corresponds": 24866, "\u0120programmers": 24867, "\u0120Lynn": 24868, "\u0120olig": 24869, "\u0120coherent": 24870, "ynchron": 24871, "\u0120Chemical": 24872, "\u0120jugg": 24873, "pair": 24874, "posts": 24875, "Eye": 24876, "\u0120Inner": 24877, "\u0120semester": 24878, "ottest": 24879, "\u0120Emirates": 24880, "ricanes": 24881, "orously": 24882, "mits": 24883, "\u0120Wis": 24884, "\u0120dodge": 24885, "location": 24886, "\u0120faded": 24887, "Amazon": 24888, "\u0120Proceed": 24889, "\u0120INFO": 24890, "journal": 24891, "\u0120Truck": 24892, "Ten": 24893, "\u0120217": 24894, "\u0120statutes": 24895, "mobile": 24896, "\u0120Types": 24897, "Recomm": 24898, "buster": 24899, "pex": 24900, "\u0120legends": 24901, "\u0120headache": 24902, "faced": 24903, "\u0120WiFi": 24904, "ifty": 24905, "\u0120HER": 24906, "\u0120circuits": 24907, "ERROR": 24908, "226": 24909, "olin": 24910, "\u0120cylinder": 24911, "ospace": 24912, "ikers": 24913, "Prem": 24914, "Quant": 24915, "\u0120conflicting": 24916, "\u0120slightest": 24917, "\u0120forged": 24918, "ionage": 24919, "Stephen": 24920, "\u0120Kub": 24921, "\u0120Opportun": 24922, "\u0120Heal": 24923, "\u0120blo": 24924, "\u0120rulers": 24925, "\u0120huh": 24926, "\u0120submarine": 24927, "fy": 24928, "asser": 24929, "\u0120allowance": 24930, "\u0120Kasich": 24931, "\u0120Tas": 24932, "\u0120Australians": 24933, "ForgeModLoader": 24934, "\u0120\u00e2\u0128\u0133": 24935, "\u0120Matrix": 24936, "amins": 24937, "\u01201200": 24938, "\u0120Acqu": 24939, "236": 24940, "Document": 24941, "\u0120Breaking": 24942, "193": 24943, "\u0120Subst": 24944, "\u0120Roller": 24945, "\u0120Properties": 24946, "\u0120NI": 24947, "tier": 24948, "\u0120crushing": 24949, "\u0120advocating": 24950, "Furthermore": 24951, "keepers": 24952, "\u0120sexism": 24953, "xd": 24954, "\u0120caller": 24955, "\u0120Sense": 24956, "chieve": 24957, "\u0120TF": 24958, "\u0120fueled": 24959, "\u0120reminiscent": 24960, "\u0120obsess": 24961, "urst": 24962, "\u0120uphold": 24963, "\u0120Fans": 24964, "hetics": 24965, "\u0120\u00e2\u0139": 24966, "\u0120Bath": 24967, "\u0120beverage": 24968, "\u0120oscill": 24969, "254": 24970, "\u0120poles": 24971, "\u0120gradual": 24972, "\u0120exting": 24973, "\u0120Suff": 24974, "\u0120Suddenly": 24975, "\u0120liking": 24976, "\u01201949": 24977, "unciation": 24978, "amination": 24979, "\u0120Omar": 24980, "\u0120LV": 24981, "\u0120Consequently": 24982, "\u0120synthes": 24983, "\u0120GIF": 24984, "\u0120pains": 24985, "\u0120interacting": 24986, "uously": 24987, "incre": 24988, "\u0120rumor": 24989, "\u0120Scientology": 24990, "197": 24991, "\u0120Zig": 24992, "\u0120spelling": 24993, "\u0120ASS": 24994, "\u0120extingu": 24995, "mson": 24996, "\u0120gh": 24997, "\u0120remarked": 24998, "\u0120Strategic": 24999, "\u0120MON": 25000, "\u00e5\u00a5": 25001, "gae": 25002, "\u0120WHAT": 25003, "Eric": 25004, "\u0120Campus": 25005, "\u0120methane": 25006, "\u0120imagin": 25007, "JUST": 25008, "\u0120Alm": 25009, "XT": 25010, "iq": 25011, "\u0120RSS": 25012, "\u0120wrongdoing": 25013, "atta": 25014, "\u0120bigot": 25015, "\u0120demonstrators": 25016, "\u0120Calvin": 25017, "\u0120Villa": 25018, "\u0120membrane": 25019, "\u0120Awesome": 25020, "\u0120benefic": 25021, "268": 25022, "\u0120magnificent": 25023, "\u0120Lots": 25024, "Greg": 25025, "\u0120Boris": 25026, "\u0120detainees": 25027, "\u0120Herman": 25028, "\u0120whispered": 25029, "\u0120awe": 25030, "Professor": 25031, "funding": 25032, "\u0120physiological": 25033, "\u0120Destruction": 25034, "\u0120limb": 25035, "\u0120manipulated": 25036, "\u0120bubbles": 25037, "\u0120pseud": 25038, "\u0120hydra": 25039, "\u0120Bristol": 25040, "\u0120stellar": 25041, "\u0120Expansion": 25042, "\u0120Kell": 25043, "\u0120Interestingly": 25044, "\u0120mans": 25045, "\u0120dragging": 25046, "\u0120ecological": 25047, "\u0120Fit": 25048, "\u0120gent": 25049, "\u0120benefited": 25050, "\u0120Haiti": 25051, "\u0120polyg": 25052, "\u00e3\u0125\u0130": 25053, "\u01202030": 25054, "\u0120prow": 25055, "\u0120reconstruction": 25056, "\u0120wast": 25057, "\u0120psychic": 25058, "\u0120Greeks": 25059, "Handler": 25060, "162": 25061, "\u0120Pulse": 25062, "\u0120solicit": 25063, "\u0120sys": 25064, "\u0120influx": 25065, "\u0120Gentle": 25066, "percent": 25067, "\u0120proliferation": 25068, "\u0120taxable": 25069, "\u0120disregard": 25070, "\u0120escaping": 25071, "\u0120ginger": 25072, "\u0120withstand": 25073, "\u0120devastated": 25074, "\u0120Dew": 25075, "series": 25076, "\u0120injected": 25077, "elaide": 25078, "\u0120turnover": 25079, "heat": 25080, "\u013b\u0124": 25081, "Happy": 25082, "\u0120Silent": 25083, "\u00e3\u0124\u0143": 25084, "ivism": 25085, "\u0120irrational": 25086, "AMA": 25087, "\u0120reef": 25088, "rub": 25089, "\u0120162": 25090, "\u0120bankers": 25091, "\u0120Ethics": 25092, "vv": 25093, "\u0120criticisms": 25094, "Kn": 25095, "186": 25096, "Movie": 25097, "\u0120Tories": 25098, "\u0120nood": 25099, "\u0120distortion": 25100, "False": 25101, "odore": 25102, "\u0120tasty": 25103, "Research": 25104, "\u0120UID": 25105, "-)": 25106, "\u0120divorced": 25107, "\u0120MU": 25108, "\u0120Hayes": 25109, "\u0120Isn": 25110, "iani": 25111, "\u0120HQ": 25112, "\u0120\"#": 25113, "ignant": 25114, "\u0120traumatic": 25115, "\u0120Ling": 25116, "Hun": 25117, "\u0120sabot": 25118, "online": 25119, "random": 25120, "\u0120renamed": 25121, "rared": 25122, "KA": 25123, "dead": 25124, "\u00c3\u00a9t": 25125, "\u0120Assistance": 25126, "\u0120seaf": 25127, "++++++++": 25128, "\u0120seldom": 25129, "\u0120Webb": 25130, "\u0120boolean": 25131, "ulet": 25132, "\u0120refrain": 25133, "\u0120DIY": 25134, "rule": 25135, "\u0120shutting": 25136, "\u0120utilizing": 25137, "loading": 25138, "\u0120Param": 25139, "coal": 25140, "ooter": 25141, "\u0120attracting": 25142, "\u0120Dol": 25143, "\u0120hers": 25144, "agnetic": 25145, "\u0120Reach": 25146, "imo": 25147, "\u0120discarded": 25148, "\u0120Pip": 25149, "015": 25150, "\u00c3\u00bcr": 25151, "\u0120mug": 25152, "Imagine": 25153, "COL": 25154, "\u0120cursed": 25155, "\u0120Shows": 25156, "\u0120Curtis": 25157, "\u0120Sachs": 25158, "speaking": 25159, "\u0120Vista": 25160, "\u0120Framework": 25161, "ongo": 25162, "\u0120subreddit": 25163, "\u0120crus": 25164, "\u0120Oval": 25165, "Row": 25166, "growing": 25167, "\u0120installment": 25168, "\u0120glac": 25169, "\u0120Advance": 25170, "ECK": 25171, "\u0120LGBTQ": 25172, "LEY": 25173, "\u0120acet": 25174, "\u0120successive": 25175, "\u0120Nicole": 25176, "\u01201957": 25177, "Quote": 25178, "\u0120circumstance": 25179, "ackets": 25180, "\u0120142": 25181, "ortium": 25182, "\u0120guessed": 25183, "\u0120Frame": 25184, "\u0120perpetrators": 25185, "\u0120Aviation": 25186, "\u0120Bench": 25187, "\u0120handc": 25188, "Ap": 25189, "\u01201956": 25190, "259": 25191, "rand": 25192, "NetMessage": 25193, "din": 25194, "urtles": 25195, "hig": 25196, "\u0120VIII": 25197, "ffiti": 25198, "\u0120Swords": 25199, "bial": 25200, "\u0120kidnapping": 25201, "device": 25202, "\u0120barn": 25203, "\u0120Eli": 25204, "aucas": 25205, "Send": 25206, "Constructed": 25207, "\u0120\u00c2\u00bd": 25208, "\u0120needles": 25209, "\u0120advertisements": 25210, "\u0120vou": 25211, "\u0120exhibited": 25212, "\u0120Fortress": 25213, "Ask": 25214, "Berry": 25215, "TYPE": 25216, "\u0120cancers": 25217, "umping": 25218, "\u0120Territory": 25219, "\u0120prud": 25220, "\u0120nas": 25221, "\u0120atheist": 25222, "\u0120balances": 25223, "\u00e3\u0123\u0141": 25224, "\u0120Shawn": 25225, "&&": 25226, "\u0120landsc": 25227, "\u0120RGB": 25228, "\u0120petty": 25229, "\u0120excellence": 25230, "\u0120translations": 25231, "\u0120parcel": 25232, "\u0120Chev": 25233, "East": 25234, "\u0120Output": 25235, "imi": 25236, "\u0120ambient": 25237, "\u0120Threat": 25238, "\u0120villains": 25239, "\u0120550": 25240, "ICA": 25241, "\u0120taller": 25242, "\u0120leaking": 25243, "cup": 25244, "\u0120polish": 25245, "\u0120infectious": 25246, "\u0120KC": 25247, "\u0120@@": 25248, "background": 25249, "\u0120bureaucracy": 25250, "\u0120Sai": 25251, "unless": 25252, "itious": 25253, "\u0120Skype": 25254, "Atl": 25255, "IDENT": 25256, "008": 25257, "\u0120hypocr": 25258, "\u0120pitchers": 25259, "\u0120guessing": 25260, "\u0120FINAL": 25261, "Between": 25262, "\u0120villagers": 25263, "\u0120252": 25264, "fashion": 25265, "\u0120Tunis": 25266, "Beh": 25267, "\u0120Exc": 25268, "\u0120MID": 25269, "288": 25270, "\u0120Haskell": 25271, "196": 25272, "\u0120NOR": 25273, "\u0120specs": 25274, "\u0120invari": 25275, "\u0120glut": 25276, "\u0120Cars": 25277, "\u0120impulse": 25278, "\u0120honors": 25279, "gel": 25280, "\u0120jurisdictions": 25281, "\u0120Bundle": 25282, "ulas": 25283, "California": 25284, "\u0120Increase": 25285, "\u0120pear": 25286, "\u0120singles": 25287, "\u0120cues": 25288, "\u0120underwent": 25289, "\u0120WS": 25290, "\u0120exaggerated": 25291, "\u0120dubious": 25292, "\u0120flashing": 25293, "LOG": 25294, ")].": 25295, "Journal": 25296, "tg": 25297, "Van": 25298, "\u0120Istanbul": 25299, "\u0120Insp": 25300, "\u0120Franken": 25301, "Draw": 25302, "\u0120sadness": 25303, "\u0120ironic": 25304, "\u0120Fry": 25305, "xc": 25306, "\u0120164": 25307, "isch": 25308, "Way": 25309, "\u0120Protestant": 25310, "horn": 25311, "\u0120unaff": 25312, "\u0120Viv": 25313, "illas": 25314, "\u0120Productions": 25315, "\u0120Hogan": 25316, "\u0120perimeter": 25317, "\u0120Sisters": 25318, "\u0120spontaneous": 25319, "\u0120downside": 25320, "\u0120descendants": 25321, "\u0120orn": 25322, "worm": 25323, "Japanese": 25324, "\u01201955": 25325, "\u0120151": 25326, "\u0120Doing": 25327, "elsen": 25328, "umbles": 25329, "\u0120radically": 25330, "\u0120Drum": 25331, "\u0120Bach": 25332, "\u0120liabilities": 25333, "\u0120OB": 25334, "\u0120Elementary": 25335, "\u0120meme": 25336, "ynes": 25337, "\u0120fingerprint": 25338, "\u0120Grab": 25339, "\u0120undertake": 25340, "Members": 25341, "\u0120Reader": 25342, "\u0120Sims": 25343, "god": 25344, "\u0120hypothetical": 25345, "scient": 25346, "\u0120AJ": 25347, "\u0120charism": 25348, "\u0120admissions": 25349, "\u0120Missile": 25350, "trade": 25351, "\u0120exercising": 25352, "\u0120Background": 25353, "Written": 25354, "\u0120vocals": 25355, "whether": 25356, "\u0120vi": 25357, "\u0120Winner": 25358, "\u0120litter": 25359, "\u0120Shooting": 25360, "STEM": 25361, "\u00e3\u0124\u00a1": 25362, "\u0120AFL": 25363, "\u0120variability": 25364, "\u0120eats": 25365, "\u0120DPS": 25366, "brow": 25367, "\u0120elephants": 25368, "\u0120strat": 25369, "\u0120\u00c5": 25370, "\u0120settlers": 25371, "Matthew": 25372, "\u0120inadvert": 25373, "HI": 25374, "\u0120IMF": 25375, "\u0120Goal": 25376, "\u0120nerves": 25377, "Johnson": 25378, "eye": 25379, "ablishment": 25380, "Thursday": 25381, "BILITY": 25382, "Had": 25383, "amoto": 25384, "hetamine": 25385, "eps": 25386, "\u0120mitochond": 25387, "\u0120compressed": 25388, "\u0120Trevor": 25389, "\u0120Animals": 25390, "Tool": 25391, "Lock": 25392, "\u0120tweak": 25393, "\u0120pinch": 25394, "\u0120cancellation": 25395, "Pot": 25396, "\u0120focal": 25397, "\u0120Astron": 25398, "173": 25399, "\u0120ASC": 25400, "\u0120OTHER": 25401, "umni": 25402, "\u0120demise": 25403, "dl": 25404, "\u00d9\u0127": 25405, "Semitism": 25406, "\u0120cracking": 25407, "\u0120collaborative": 25408, "\u0120explores": 25409, "sql": 25410, "\u0120herbs": 25411, "\u0120configurations": 25412, "mis": 25413, "\u0120Result": 25414, "acey": 25415, "\u0120Smoke": 25416, "\u0120sanct": 25417, "elia": 25418, "\u0120degener": 25419, "\u0120deepest": 25420, "\u0120screamed": 25421, "\u0120nap": 25422, "Software": 25423, "\u0120STAR": 25424, "EF": 25425, "\u0120Xin": 25426, "sponsored": 25427, "manship": 25428, "233": 25429, "\u0120primaries": 25430, "\u0120filtering": 25431, "\u0120assemble": 25432, "mil": 25433, "\u0120Myers": 25434, "bows": 25435, "\u0120punched": 25436, "Mic": 25437, "\u0120innovations": 25438, "\u0120func": 25439, "ando": 25440, "\u0120fracking": 25441, "\u0120Vul": 25442, "\u00d0\u00be\u00d0": 25443, "oshop": 25444, "\u0120Immun": 25445, "\u0120settling": 25446, "\u0120adolescents": 25447, "\u0120rebuilding": 25448, "\u0120transforming": 25449, "\u0120parole": 25450, "\u0120harbor": 25451, "\u0120booking": 25452, "otional": 25453, "ongevity": 25454, "\u0120Yo": 25455, "bug": 25456, "\u0120emerges": 25457, "\u0120Methods": 25458, "\u0120Chu": 25459, "Pres": 25460, "\u0120Dungeons": 25461, "\u0120trailing": 25462, "\u0120Rum": 25463, "\u0120Hugh": 25464, "\u00e5\u00a4\u00a9": 25465, "\u0120Era": 25466, "\u0120Battles": 25467, "Results": 25468, "\u0120Trading": 25469, "\u0120versa": 25470, "css": 25471, "axies": 25472, "heet": 25473, "\u0120greed": 25474, "1989": 25475, "\u0120gardens": 25476, "\u0120contingent": 25477, "Park": 25478, "\u0120Leafs": 25479, "hook": 25480, "robe": 25481, "\u0120diplomacy": 25482, "\u0120Fuel": 25483, "\u0120Invasion": 25484, "\u0120upgrading": 25485, "Male": 25486, "\u0120elic": 25487, "\u0120relentless": 25488, "\u0120Covenant": 25489, "apesh": 25490, "\u0120Trop": 25491, "Ty": 25492, "production": 25493, "arty": 25494, "\u0120punches": 25495, "ako": 25496, "cyclopedia": 25497, "\u0120Rabbit": 25498, "\u0120HDMI": 25499, "\u0120141": 25500, "\u0120foil": 25501, "ItemImage": 25502, "\u0120FG": 25503, "\u0120implementations": 25504, "\u0120Pom": 25505, "ixtures": 25506, "\u0120await": 25507, "\u0120330": 25508, "amus": 25509, "\u0120umbrella": 25510, "\u0120foresee": 25511, "separ": 25512, "\u0120circumcision": 25513, "\u0120peripheral": 25514, "Say": 25515, "\u0120Expert": 25516, "Inc": 25517, "\u0120withdrew": 25518, "\u0120Anders": 25519, "fried": 25520, "\u0120radioactive": 25521, "\u0120Opening": 25522, "\u0120boarding": 25523, "\u0120ND": 25524, "\u0120overthrow": 25525, "Activ": 25526, "WP": 25527, "\u0120Acts": 25528, "\u00d7\u013b": 25529, "\u0120motions": 25530, "vic": 25531, "\u0120Mighty": 25532, "\u0120Defender": 25533, "aer": 25534, "\u0120thankful": 25535, "\u0120Killing": 25536, "\u0120Bris": 25537, "moil": 25538, "\u0120predicting": 25539, "266": 25540, "choice": 25541, "\u0120killers": 25542, "\u0120incub": 25543, "\u0120Chest": 25544, "athering": 25545, "\u0120proclaimed": 25546, "flower": 25547, "ossom": 25548, "umbledore": 25549, "\u0120Cycling": 25550, "\u0120Occupy": 25551, "AGES": 25552, "Pen": 25553, "\u0120Yug": 25554, "\u0120packaged": 25555, "\u0120heightened": 25556, "cot": 25557, "stack": 25558, "Cond": 25559, "\u0120stamps": 25560, "mage": 25561, "\u0120persuaded": 25562, "\u0120ensl": 25563, "\u0120Cardinal": 25564, "\u0120solitary": 25565, "\u0120possessing": 25566, "\u0120Cork": 25567, "\u0120evid": 25568, "\u0120Tay": 25569, "\u0120blues": 25570, "\u0120extremism": 25571, "\u0120lunar": 25572, "\u0120clown": 25573, "Techn": 25574, "\u0120festivals": 25575, "\u0120PvP": 25576, "\u0120Lar": 25577, "\u0120consequently": 25578, "present": 25579, "\u0120someday": 25580, "\u00e7\u0130\u012d": 25581, "\u0120Meteor": 25582, "\u0120touring": 25583, "culture": 25584, "\u0120beaches": 25585, "Ship": 25586, "cause": 25587, "\u0120Flood": 25588, "\u00e3\u0125\u00af": 25589, "\u0120purity": 25590, "those": 25591, "\u0120emission": 25592, "bolt": 25593, "\u0120chord": 25594, "\u0120Scripture": 25595, "Lu": 25596, "\u0120${": 25597, "created": 25598, "Others": 25599, "258": 25600, "\u0120elemental": 25601, "\u0120annoyed": 25602, "\u0120AE": 25603, "dan": 25604, "\u0120Sag": 25605, "Researchers": 25606, "\u0120fairy": 25607, "\u00e2\u0122\u0135\u00e2\u0122\u0135": 25608, "============": 25609, "Smart": 25610, "GGGG": 25611, "\u0120skeletons": 25612, "\u0120pupils": 25613, "linked": 25614, "\u0120urgency": 25615, "enabled": 25616, "\u0120Fuck": 25617, "\u0120councill": 25618, "rab": 25619, "UAL": 25620, "TI": 25621, "\u0120lifes": 25622, "\u0120confessed": 25623, "Bug": 25624, "\u0120harmon": 25625, "\u0120CONFIG": 25626, "\u0120Neutral": 25627, "Double": 25628, "\u0120staple": 25629, "\u0120SHA": 25630, "British": 25631, "\u0120SNP": 25632, "ATOR": 25633, "oco": 25634, "\u0120swinging": 25635, "gex": 25636, "oleon": 25637, "plain": 25638, "\u0120Missing": 25639, "\u0120Trophy": 25640, "vari": 25641, "ranch": 25642, "\u0120301": 25643, "440": 25644, "0000000000000000": 25645, "\u0120restoring": 25646, "\u0120haul": 25647, "ucing": 25648, "nerg": 25649, "\u0120futures": 25650, "\u0120strategist": 25651, "question": 25652, "\u0120lateral": 25653, "\u0120Bard": 25654, "\u0120sor": 25655, "\u0120Rhodes": 25656, "\u0120Downtown": 25657, "?????-": 25658, "\u0120Lit": 25659, "\u0120Bened": 25660, "\u0120coil": 25661, "street": 25662, "\u0120Portal": 25663, "FILE": 25664, "\u0120Gru": 25665, "*,": 25666, "231": 25667, "neum": 25668, "\u0120sucked": 25669, "\u0120rapper": 25670, "\u0120tendencies": 25671, "\u0120Lauren": 25672, "cellaneous": 25673, "267": 25674, "\u0120browse": 25675, "\u0120overc": 25676, "header": 25677, "oise": 25678, "\u0120beet": 25679, "\u0120Gle": 25680, "Stay": 25681, "\u0120mum": 25682, "\u0120typed": 25683, "\u0120discounts": 25684, "Talk": 25685, "\u0120Og": 25686, "existing": 25687, "\u0120Sell": 25688, "uph": 25689, "CI": 25690, "\u0120Austrian": 25691, "\u0120Warm": 25692, "\u0120dismissal": 25693, "\u0120averages": 25694, "camera": 25695, "\u0120allegiance": 25696, "LAN": 25697, "=\"#": 25698, "\u0120commentators": 25699, "\u0120Setting": 25700, "\u0120Midwest": 25701, "\u0120pharmac": 25702, "\u0120EXP": 25703, "\u0120stainless": 25704, "Chicago": 25705, "\u0120tan": 25706, "244": 25707, "\u0120countryside": 25708, "\u0120Vac": 25709, "295": 25710, "\u0120pinned": 25711, "\u0120crises": 25712, "\u0120standardized": 25713, "Task": 25714, "\u0120Jail": 25715, "\u0120Docker": 25716, "colored": 25717, "forth": 25718, "\"},": 25719, "\u0120patrons": 25720, "\u0120spice": 25721, "\u0120mourn": 25722, "\u0120Mood": 25723, "\u0120laundry": 25724, "\u0120equip": 25725, "\u0120Mole": 25726, "yll": 25727, "\u0120THC": 25728, "nation": 25729, "\u0120Sherlock": 25730, "\u0120issu": 25731, "\u0120Kre": 25732, "\u0120Americas": 25733, "\u0120AAA": 25734, "\u0120systematically": 25735, "\u0120contra": 25736, "\u0120Sally": 25737, "\u0120rationale": 25738, "\u0120carriage": 25739, "\u0120peaks": 25740, "\u0120contradiction": 25741, "ensation": 25742, "\u0120Failure": 25743, "\u0120props": 25744, "\u0120namespace": 25745, "\u0120cove": 25746, "fields": 25747, "\u00e3\u0124\u012d": 25748, "\u0120wool": 25749, "\u0120Catch": 25750, "\u0120presumed": 25751, "\u0120Diana": 25752, "ragon": 25753, "igi": 25754, "\u0120hamm": 25755, "\u0120stunt": 25756, "\u0120GUI": 25757, "\u0120Observatory": 25758, "\u0120Shore": 25759, "\u0120smells": 25760, "annah": 25761, "\u0120cockpit": 25762, "\u0120Duterte": 25763, "850": 25764, "\u0120oppressed": 25765, "breaker": 25766, "\u0120Contribut": 25767, "\u0120Peru": 25768, "\u0120Monsanto": 25769, "\u0120Attempt": 25770, "\u0120commanding": 25771, "\u0120fridge": 25772, "\u0120Rin": 25773, "\u0120Chess": 25774, "uality": 25775, "\u0120ol": 25776, "Republican": 25777, "\u0120Glory": 25778, "\u0120WIN": 25779, ".......": 25780, "agent": 25781, "reading": 25782, "\u0120inh": 25783, "Jones": 25784, "\u0120clicks": 25785, "alan": 25786, "\u0120[];": 25787, "\u0120Majesty": 25788, "\u0120Ced": 25789, "opus": 25790, "atel": 25791, "\u00c3\u00aa": 25792, "ARC": 25793, "\u0120Ecuador": 25794, "\u00e3\u0125\u0142": 25795, "\u0120Kuro": 25796, "\u0120rituals": 25797, "\u0120captive": 25798, "\u0120ounce": 25799, "\u0120disagreement": 25800, "\u0120slog": 25801, "fuel": 25802, "Pet": 25803, "Mail": 25804, "\u0120exercised": 25805, "\u0120solic": 25806, "\u0120rainfall": 25807, "\u0120devotion": 25808, "\u0120Assessment": 25809, "\u0120robotic": 25810, "options": 25811, "\u0120RP": 25812, "\u0120Families": 25813, "\u0120Flames": 25814, "\u0120assignments": 25815, "007": 25816, "akedown": 25817, "\u0120vocabulary": 25818, "Reilly": 25819, "\u0120caval": 25820, "gars": 25821, "\u0120suppressed": 25822, "\u0120SET": 25823, "\u0120Johns": 25824, "\u0120warp": 25825, "broken": 25826, "\u0120statues": 25827, "\u0120advocated": 25828, "\u0120275": 25829, "\u0120peril": 25830, "omorph": 25831, "\u0120Femin": 25832, "perfect": 25833, "\u0120hatch": 25834, "Lib": 25835, "512": 25836, "\u0120lifelong": 25837, "313": 25838, "\u0120cheeks": 25839, "\u0120numbered": 25840, "\u0120Mug": 25841, "Body": 25842, "ravel": 25843, "Weight": 25844, "\u0120Jak": 25845, "\u0120Heath": 25846, "\u0120kissing": 25847, "\u0120JUST": 25848, "\u0120waving": 25849, "upload": 25850, "\u0120insider": 25851, "\u0120Progressive": 25852, "\u0120Filter": 25853, "tta": 25854, "\u0120Beam": 25855, "\u0120violently": 25856, "ipation": 25857, "\u0120skepticism": 25858, "\u01201918": 25859, "\u0120Annie": 25860, "\u0120SI": 25861, "\u0120genetics": 25862, "\u0120onboard": 25863, "atl": 25864, "\u0120Friedman": 25865, "\u0120Bri": 25866, "ceptive": 25867, "\u0120pirate": 25868, "\u0120Reporter": 25869, "278": 25870, "\u0120mythology": 25871, "\u0120eclipse": 25872, "\u0120skins": 25873, "\u0120glyph": 25874, "ingham": 25875, "Files": 25876, "Cour": 25877, "women": 25878, "\u0120regimes": 25879, "\u0120photographed": 25880, "Kat": 25881, "\u0120MAX": 25882, "Officials": 25883, "\u0120unexpectedly": 25884, "\u0120impressions": 25885, "Front": 25886, ";;;;;;;;": 25887, "\u0120supremacy": 25888, "\u0120sang": 25889, "\u0120aggravated": 25890, "\u0120abruptly": 25891, "\u0120Sector": 25892, "\u0120excuses": 25893, "\u0120costing": 25894, "idepress": 25895, "Stack": 25896, "\u0120RNA": 25897, "obil": 25898, "\u0120ghosts": 25899, "ldon": 25900, "atibility": 25901, "Topics": 25902, "\u0120reimburse": 25903, "\u0120HM": 25904, "\u0120Deg": 25905, "\u0120thief": 25906, "yet": 25907, "ogenesis": 25908, "leaning": 25909, "\u0120Kol": 25910, "\u0120Basketball": 25911, "\u0120fi": 25912, "\u0120Seeing": 25913, "\u0120recycling": 25914, "\u0120[-": 25915, "Congress": 25916, "\u0120lectures": 25917, "Psy": 25918, "\u0120nep": 25919, "\u0120maid": 25920, "\u0120oriented": 25921, "AX": 25922, "\u0120respectful": 25923, "rene": 25924, "flush": 25925, "\u0120Unloaded": 25926, "request": 25927, "grid": 25928, "\u0120Alternatively": 25929, "\u0120Hugo": 25930, "\u0120decree": 25931, "\u0120Buddhism": 25932, "andum": 25933, "Android": 25934, "\u0120Congo": 25935, "\u0120Joyce": 25936, "\u0120acknowledging": 25937, "hesive": 25938, "\u0120Tomorrow": 25939, "\u0120Hiro": 25940, "thren": 25941, "\u0120Maced": 25942, "\u0120hoax": 25943, "\u0120Increased": 25944, "\u0120Pradesh": 25945, "Wild": 25946, "______": 25947, "161": 25948, "\u0120aunt": 25949, "\u0120distributing": 25950, "\u0120Tucker": 25951, "\u0120SSL": 25952, "\u0120Wolves": 25953, "Building": 25954, "oult": 25955, "\u0120Luo": 25956, "\u0120Yas": 25957, "\u0120Spir": 25958, "\u0120Shape": 25959, "\u0120Cambod": 25960, "\u0120IPv": 25961, "\u0120ml": 25962, "\u0120extrad": 25963, "390": 25964, "\u0120Penny": 25965, "dream": 25966, "\u0120stationed": 25967, "optional": 25968, "eworthy": 25969, ".": 26700, "\u0120Workshop": 26701, "\u0120Retail": 26702, "\u0120Avatar": 26703, "625": 26704, "Na": 26705, "\u0120VC": 26706, "\u0120Secure": 26707, "MY": 26708, "1988": 26709, "ossip": 26710, "\u0120prostate": 26711, "\u0120unden": 26712, "\u0120gamer": 26713, "\u0120Contents": 26714, "\u0120Warhammer": 26715, "\u0120Sentinel": 26716, "310": 26717, "\u0120segregation": 26718, "\u0120Flex": 26719, "\u0120MAY": 26720, "\u0120drills": 26721, "\u0120Drugs": 26722, "Islamic": 26723, "\u0120spur": 26724, "\u0120cafe": 26725, "\u0120imaginary": 26726, "\u0120guiding": 26727, "\u0120swings": 26728, "\u0120Theme": 26729, "oby": 26730, "\u0120nud": 26731, "\u0120begging": 26732, "\u0120strongh": 26733, "\u0120rejecting": 26734, "\u0120pedestrians": 26735, "\u0120Prospect": 26736, "Rare": 26737, "sle": 26738, "\u0120concessions": 26739, "\u0120Constitutional": 26740, "\u0120beams": 26741, "\u0120fibers": 26742, "poon": 26743, "\u0120instincts": 26744, "property": 26745, "\u0120BIG": 26746, "Sanders": 26747, "imates": 26748, "\u0120coating": 26749, "\u0120corpses": 26750, "\u0120TRUE": 26751, "checked": 26752, "\u0120166": 26753, "Ash": 26754, "\u0120JS": 26755, "\u0120Fiction": 26756, "\u0120communal": 26757, "\u0120energetic": 26758, "oooooooo": 26759, "\u0120nowadays": 26760, "ILD": 26761, "ibo": 26762, "\u0120SUV": 26763, "Ren": 26764, "\u0120dwelling": 26765, "Silver": 26766, "\u0120tally": 26767, "\u0120Moving": 26768, "\u0120coward": 26769, "\u0120generals": 26770, "\u0120horns": 26771, "\u0120circulated": 26772, "\u0120robbed": 26773, "\u0120Unlimited": 26774, "\u0120harassed": 26775, "\u0120inhibit": 26776, "\u0120composer": 26777, "\u0120Spotify": 26778, "\u0120spreads": 26779, "364": 26780, "\u0120suicidal": 26781, "\u0120noises": 26782, "\u0120Stur": 26783, "\u0120saga": 26784, "\u0120Kag": 26785, "iso": 26786, "\u0120theoretically": 26787, "Money": 26788, "\u0120similarity": 26789, "\u0120sliced": 26790, "utils": 26791, "inges": 26792, "\"-": 26793, "\u0120anth": 26794, "\u0120imped": 26795, "Module": 26796, "Throughout": 26797, "\u0120menus": 26798, "committee": 26799, "andi": 26800, "obj": 26801, "inav": 26802, "fired": 26803, "\u0120Abdullah": 26804, "\u0120undead": 26805, "\u0120fonts": 26806, "Hold": 26807, "ENG": 26808, "\u0120sustainability": 26809, "\u0120flick": 26810, "\u0120razor": 26811, "\u0120Fest": 26812, "\u0120Characters": 26813, "\u0120wording": 26814, "\u0120populist": 26815, "\u0120criticizing": 26816, "\u0120muse": 26817, "vine": 26818, "\u0120cardboard": 26819, "\u0120kindly": 26820, "\u0120fringe": 26821, "\u0120Theft": 26822, "icultural": 26823, "\u0120governors": 26824, "\u0120\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd": 26825, "\u0120163": 26826, "\u0120timeout": 26827, "\u0120Auth": 26828, "Children": 26829, "AU": 26830, "\u0120redemption": 26831, "\u0120Alger": 26832, "\u01201914": 26833, "\u0120waved": 26834, "\u0120astronauts": 26835, "ograms": 26836, "\u0120swamp": 26837, "\u0120Finnish": 26838, "\u0120candle": 26839, "\u0120tonnes": 26840, "utm": 26841, "\u0120ray": 26842, "\u0120spun": 26843, "\u0120fearful": 26844, "articles": 26845, "\u0120caus": 26846, "orically": 26847, "\u0120Requires": 26848, "\u0120Gol": 26849, "\u0120pope": 26850, "\u0120inaugural": 26851, "\u0120gle": 26852, "ADA": 26853, "\u0120ISIL": 26854, "\u0120Offensive": 26855, "\u0120watchdog": 26856, "\u0120balcon": 26857, "entity": 26858, "\u0120Hoo": 26859, "\u0120gallon": 26860, "ACC": 26861, "\u0120doubling": 26862, "\u0120implication": 26863, "\u0120Sight": 26864, "\u0120doctr": 26865, "-------": 26866, "\u0120\\\\": 26867, "\u0120malt": 26868, "Roll": 26869, "\u0120\u00e2\u012b\u00a5": 26870, "\u0120recap": 26871, "adding": 26872, "uces": 26873, "\u0120Bend": 26874, "figure": 26875, "\u0120turkey": 26876, "\u0120societal": 26877, "\u0120Tickets": 26878, "\u0120commercially": 26879, "\u0120spicy": 26880, "\u0120216": 26881, "\u0120Ramp": 26882, "\u0120superiority": 26883, "\u00c3\u00af": 26884, "\u0120Tracker": 26885, "Carl": 26886, "\u0120Coy": 26887, "\u0120Patriot": 26888, "\u0120consulted": 26889, "\u0120listings": 26890, "\u0120slew": 26891, "reenshot": 26892, "\u0120Gone": 26893, "\u0120[...]": 26894, "309": 26895, "\u0120hottest": 26896, "\u00d8\u00b1": 26897, "\u0120rocky": 26898, "\u0120Diaz": 26899, "\u0120massage": 26900, "\u0120paraly": 26901, "\u0120pony": 26902, "Az": 26903, "\u0120cartridge": 26904, "\u0120NZ": 26905, "\u0120snack": 26906, "\u0120Lamar": 26907, "plement": 26908, "\u0120Leslie": 26909, "\u0120mater": 26910, "\u0120snipp": 26911, "246": 26912, "\u0120jointly": 26913, "\u0120Brisbane": 26914, "\u0120iPod": 26915, "\u0120pumping": 26916, "\u0120goat": 26917, "\u0120Sharon": 26918, "ealing": 26919, "\u0120coron": 26920, "\u0120anomal": 26921, "rahim": 26922, "\u0120Connection": 26923, "\u0120sculpture": 26924, "\u0120scheduling": 26925, "\u0120Daddy": 26926, "athing": 26927, "\u0120eyebrows": 26928, "\u0120curved": 26929, "\u0120sentiments": 26930, "\u0120drafting": 26931, "Drop": 26932, "([": 26933, "\u0120nominal": 26934, "\u0120Leadership": 26935, "\u0120Grow": 26936, "\u0120176": 26937, "\u0120constructive": 26938, "ivation": 26939, "\u0120corrupted": 26940, "gerald": 26941, "\u0120Cros": 26942, "\u0120Chester": 26943, "\u0120Lap": 26944, "\u00e3\u0123\u00aa": 26945, "OTH": 26946, "DATA": 26947, "\u0120almond": 26948, "probably": 26949, "Imp": 26950, "\u0120feast": 26951, "\u0120Warcraft": 26952, "Flor": 26953, "\u0120checkpoint": 26954, "\u0120transcription": 26955, "\u0120204": 26956, "\u0120tweaks": 26957, "\u0120relieve": 26958, "Science": 26959, "\u0120performer": 26960, "Zone": 26961, "\u0120turmoil": 26962, "igated": 26963, "hibit": 26964, "\u0120Cafe": 26965, "themed": 26966, "\u0120fluor": 26967, "bench": 26968, "\u0120decom": 26969, "\u0120Unt": 26970, "\u0120Barrett": 26971, "\u0120Facts": 26972, "\u0120tasting": 26973, "\u0120PTSD": 26974, "\u0120Seal": 26975, "\u0120Judaism": 26976, "\u0120Dynamic": 26977, "\u0120Cors": 26978, "Ve": 26979, "\u0120Ming": 26980, "\u0120Transform": 26981, "von": 26982, "\u0120Defenders": 26983, "\u0120Tactical": 26984, "\u0120Von": 26985, "\u0120Univers": 26986, "\u0120distorted": 26987, "\u0120Breath": 26988, "?'\"": 26989, "\u0120agon": 26990, "\u0120Deadly": 26991, "\u0120lan": 26992, "\u0120Cycle": 26993, "orned": 26994, "\u0120reliably": 26995, "\u0120glor": 26996, "\u0120Monkey": 26997, "\u00e3\u0125\u00a1": 26998, "\u0120adren": 26999, "\u0120microwave": 27000, "\u0120Alban": 27001, "ircraft": 27002, "digit": 27003, "smart": 27004, "\u0120Dread": 27005, "\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af\u00c2\u00af": 27006, "{{": 27007, "\u0120Rochester": 27008, "\u0120simplified": 27009, "\u0120inflicted": 27010, "\u0120takeover": 27011, "\u0120yourselves": 27012, "aditional": 27013, "\u0120muscular": 27014, "KS": 27015, "\u0120ingen": 27016, "Tax": 27017, "\u0120Feature": 27018, "277": 27019, "\u0120cruc": 27020, "\u0120crate": 27021, "\u0120unidentified": 27022, "\u0120acclaimed": 27023, "\u0120Manga": 27024, "\u0120Frances": 27025, "\u0120Nepal": 27026, "\u0120Gerald": 27027, "\u0120Kuwait": 27028, "\u0120slain": 27029, "\u0120Heb": 27030, "\u0120Goku": 27031, "\u00e3\u0123\u00ae\u00e6": 27032, "286": 27033, "Mrs": 27034, "\u0120Cody": 27035, "\u0120Sanctuary": 27036, "016": 27037, "\u0120dismant": 27038, "\u0120dataset": 27039, "\u0120Hond": 27040, "buck": 27041, "\u0120Patterson": 27042, "\u0120palette": 27043, "\u0120GD": 27044, "icol": 27045, "\u0120Lodge": 27046, "\u0120planetary": 27047, "akin": 27048, "\u0120Registered": 27049, "abwe": 27050, "\u0120Petersburg": 27051, "\u0120hailed": 27052, "\u0120Piece": 27053, "Sche": 27054, "\u0120DOJ": 27055, "\u0120enumer": 27056, "181": 27057, "\u0120Observer": 27058, "\u0120Bold": 27059, "founded": 27060, "commerce": 27061, "\u0120exploits": 27062, "\u0120Finding": 27063, "URN": 27064, "\u0120Sne": 27065, "\u0120Acid": 27066, "ayette": 27067, "\u0120Values": 27068, "\u0120drastic": 27069, "\u0120architectural": 27070, "\u0120\".": 27071, "\u00d7\u0137": 27072, "umped": 27073, "\u0120wrapping": 27074, "\u0120widow": 27075, "\u0120Slayer": 27076, "lace": 27077, "once": 27078, "Germany": 27079, "avoid": 27080, "\u0120temples": 27081, "PAR": 27082, "\u00c3\u00b4": 27083, "\u0120Lucifer": 27084, "\u0120Flickr": 27085, "lov": 27086, "forces": 27087, "\u0120scouting": 27088, "\u0120louder": 27089, "tesy": 27090, "\u0120beforehand": 27091, "\u00c4\u0135": 27092, "\u0120Neon": 27093, "\u0120Wol": 27094, "\u0120Typically": 27095, "\u0120Politico": 27096, "-+-+": 27097, "\u0120builder": 27098, "\u0120derive": 27099, "Kill": 27100, "\u0120poker": 27101, "\u0120ambiguous": 27102, "\u0120lifts": 27103, "\u0120cyt": 27104, "\u0120ribs": 27105, "oodle": 27106, "\u0120Sounds": 27107, "hair": 27108, "\u0120Syndrome": 27109, "tf": 27110, "\u0120proportional": 27111, "uid": 27112, "\u0120pertaining": 27113, "\u0120Kindle": 27114, "\u0120Negro": 27115, "\u0120reiterated": 27116, "\u0120Tonight": 27117, "oths": 27118, "\u0120Cornell": 27119, "\u0120owing": 27120, "\u0120208": 27121, "elfare": 27122, "ocating": 27123, "\u0120Birds": 27124, "Subscribe": 27125, "\u0120essays": 27126, "\u0120burdens": 27127, "\u0120illustrations": 27128, "arious": 27129, "ERAL": 27130, "\u0120Calcul": 27131, "\u0120xen": 27132, "\u0120LinkedIn": 27133, "\u0120Jung": 27134, "\u0120redesign": 27135, "Connor": 27136, "296": 27137, "\u0120reversal": 27138, "\u0120Adelaide": 27139, "\u0120LL": 27140, "\u0120sinking": 27141, "\u0120gum": 27142, "USH": 27143, "capt": 27144, "\u0120Grimm": 27145, "\u0120footsteps": 27146, "\u0120CBD": 27147, "ispers": 27148, "\u0120prose": 27149, "Wednesday": 27150, "\u0120Movies": 27151, "edin": 27152, "\u0120overturned": 27153, "\u0120contentious": 27154, "USB": 27155, "~~~~~~~~~~~~~~~~": 27156, "\u0120Copper": 27157, "\u0120pointless": 27158, "NV": 27159, "values": 27160, "olphin": 27161, "dain": 27162, "\u0120deposited": 27163, "\u0120GW": 27164, "\u0120preceded": 27165, "\u0120Cla": 27166, "\u0120Golem": 27167, "\u0120Nim": 27168, "\u0120\u00ce\u00b2": 27169, "\u0120Engineers": 27170, "middle": 27171, "\u0120flatt": 27172, "operative": 27173, "\u0120councils": 27174, "imbabwe": 27175, "elin": 27176, "\u0120stressful": 27177, "\u0120LD": 27178, "\u0120resh": 27179, "lake": 27180, "\u0120wheelchair": 27181, "\u0120Alternative": 27182, "\u0120optimize": 27183, "operation": 27184, "\u0120peek": 27185, "\u0120oneself": 27186, "igil": 27187, "\u0120transitions": 27188, "opathy": 27189, "blank": 27190, "\u0120169": 27191, "171": 27192, "________________________________________________________________": 27193, "\u0120laundering": 27194, "Enc": 27195, "\u0120DEC": 27196, "\u0120workouts": 27197, "\u0120spikes": 27198, "\u0120dinosaurs": 27199, "\u0120discriminatory": 27200, "Pool": 27201, "Rather": 27202, "385": 27203, "RNA": 27204, "testers": 27205, "eto": 27206, "\u0120Identity": 27207, "\u0120vein": 27208, "\u0120Burton": 27209, "\u0120arcade": 27210, "420": 27211, "Ultimately": 27212, "\u0120Sadly": 27213, "\u00c3\u00b0": 27214, "pill": 27215, "\u0120cubic": 27216, "\u0120Spectrum": 27217, "these": 27218, "states": 27219, "\u0120unofficial": 27220, "hawks": 27221, "\u0120EVERY": 27222, "\u0120rainbow": 27223, "\u0120incarceration": 27224, "anding": 27225, "\u0120syll": 27226, "\u0120Everton": 27227, "\u0120179": 27228, "\u0120Serbia": 27229, "\u0120189": 27230, "meter": 27231, "\u0120Mickey": 27232, "\u0120antiqu": 27233, "\u0120factual": 27234, "neck": 27235, "\u0120Nare": 27236, "norm": 27237, "must": 27238, "\u0120highways": 27239, "\u0120glam": 27240, "\u0120dividing": 27241, "\u0120Squadron": 27242, "\u0120Martha": 27243, "\u0120births": 27244, "Cover": 27245, "////////////////": 27246, "\u0120Wong": 27247, "Phot": 27248, "\u0120ALS": 27249, "rio": 27250, "\u0120Nonetheless": 27251, "\u0120Lemon": 27252, "\u0120206": 27253, "\u0120EE": 27254, "\u0120derivative": 27255, "\u0120WWII": 27256, "vote": 27257, "\u0120therein": 27258, "\u0120separating": 27259, "446": 27260, "sync": 27261, "\u0120Streets": 27262, "\u0120ratt": 27263, "\u0120municipality": 27264, "\u0120Shortly": 27265, "\u0120monk": 27266, "),\"": 27267, "\u0120scrub": 27268, "\u0120operatives": 27269, "Neither": 27270, "Place": 27271, "\u0120Limit": 27272, "Female": 27273, "\u0120Actor": 27274, "Character": 27275, "\u0120constituted": 27276, "357": 27277, "\u0120protested": 27278, "\u0120Straw": 27279, "\u0120Height": 27280, "ilda": 27281, "\u0120Typh": 27282, "\u0120floods": 27283, "\u0120cosmetic": 27284, "WAY": 27285, "perture": 27286, "upon": 27287, "tons": 27288, "essing": 27289, "\u0120Pocket": 27290, "\u0120rooft": 27291, "\u0120Caucas": 27292, "\u0120antidepress": 27293, "\u0120incompatible": 27294, "ECD": 27295, "\u0120opera": 27296, "\u0120Contest": 27297, "\u0120generators": 27298, "lime": 27299, "Defense": 27300, "1987": 27301, "forum": 27302, "\u0120savage": 27303, "\u0120Hungarian": 27304, "nz": 27305, "\u0120metallic": 27306, "\u0120expelled": 27307, "\u0120residency": 27308, "\u0120dresses": 27309, "666": 27310, "\u0120Clement": 27311, "fires": 27312, "Category": 27313, "\u0120geek": 27314, "alis": 27315, "\u0120cemetery": 27316, "educated": 27317, "\u0120crawl": 27318, "\u0120Unable": 27319, "\u0120Tyson": 27320, "akis": 27321, "\u0120pardon": 27322, "\u0120Wra": 27323, "\u0120strengthened": 27324, "\u0120Fors": 27325, "335": 27326, "\u0120HC": 27327, "\u0120Mond": 27328, "\u0120visuals": 27329, "\u0120Beatles": 27330, "ettlement": 27331, "\u0120\u00ef": 27332, "gro": 27333, "\u0120bash": 27334, "\u0120poorest": 27335, "\u0120excel": 27336, "\u0120aspirations": 27337, "\u0120Municip": 27338, "ensible": 27339, "\u0120ceremonies": 27340, "\u0120intimidation": 27341, "\u0120CONTR": 27342, "beck": 27343, "\u0120Kap": 27344, "asu": 27345, "\u0120trademarks": 27346, "\u0120Sew": 27347, "\u0120Competition": 27348, "network": 27349, "\u0120Arri": 27350, "\u0120Tet": 27351, "Roaming": 27352, "WC": 27353, "Dat": 27354, "\u0120sob": 27355, "\u0120pairing": 27356, "\u0120overdose": 27357, "SAY": 27358, "aber": 27359, "\u0120revolt": 27360, "\u0120Fah": 27361, "acting": 27362, "eq": 27363, "estation": 27364, "Fight": 27365, "\u0120Marks": 27366, "273": 27367, "\u0120178": 27368, "Raw": 27369, "\u00e3\u0123\u012d": 27370, "349": 27371, "blocks": 27372, "\u0120verge": 27373, "estine": 27374, "\u0120Podesta": 27375, "\u0120invasive": 27376, "\u0120profoundly": 27377, "\u0120Ao": 27378, "each": 27379, "\u0120lest": 27380, "interpret": 27381, "\u0120shrinking": 27382, "\u0120errone": 27383, "\u0120chees": 27384, "lys": 27385, "\u0120Ivy": 27386, "\u0120Directory": 27387, "\u0120hinted": 27388, "VICE": 27389, "\u0120contacting": 27390, "\u0120Gent": 27391, "hei": 27392, "\u0120labeling": 27393, "\u0120mercury": 27394, "\u0120Lite": 27395, "\u0120expires": 27396, "\u0120destabil": 27397, "ritis": 27398, "cu": 27399, "\u0120feathers": 27400, "\u0120steer": 27401, "\u0120programmed": 27402, "\u0120Vader": 27403, "Going": 27404, "\u0120Elim": 27405, "\u0120yo": 27406, "\u0120Miche": 27407, "\u0120203": 27408, "\u0120sleeves": 27409, "\u0120bully": 27410, "\u0120Humans": 27411, "368": 27412, "\u0120compress": 27413, "\u0120Banner": 27414, "ARS": 27415, "\u0120awhile": 27416, "\u0120calib": 27417, "\u0120sponsorship": 27418, "\u0120Difficulty": 27419, "\u0120Papers": 27420, "\u0120identifier": 27421, "}.": 27422, "\u0120yog": 27423, "\u0120Shia": 27424, "\u0120cleanup": 27425, "\u0120vibe": 27426, "introdu": 27427, "imming": 27428, "Australia": 27429, "\u0120outlines": 27430, "\u0120Youtube": 27431, "train": 27432, "\u0120Makes": 27433, "\u0120deported": 27434, "\u0120centr": 27435, "\u0120Dug": 27436, "\u0120Boulder": 27437, "\u0120Buffy": 27438, "\u0120injunction": 27439, "\u0120Harley": 27440, "\u0120Groups": 27441, "\u0120Dumbledore": 27442, "\u0120Clara": 27443, "\u0120\"-": 27444, "\u0120sacrificed": 27445, "eph": 27446, "Shadow": 27447, "ibling": 27448, "\u0120freelance": 27449, "\u0120evidently": 27450, "phal": 27451, "\u0120retains": 27452, "Mir": 27453, "\u0120finite": 27454, "dar": 27455, "\u0120Cous": 27456, "\u0120repaired": 27457, "\u0120periodic": 27458, "\u0120championships": 27459, "\u0120asteroid": 27460, "blind": 27461, "\u0120expressly": 27462, "\u0120Astros": 27463, "\u0120scaled": 27464, "\u0120geographical": 27465, "\u0120Rapids": 27466, "Enjoy": 27467, "\u0120elastic": 27468, "\u0120Mohamed": 27469, "Market": 27470, "begin": 27471, "\u0120discovers": 27472, "\u0120telecommunications": 27473, "\u0120scanner": 27474, "\u0120enlarge": 27475, "\u0120sharks": 27476, "\u0120psychedel": 27477, "\u0120Rouge": 27478, "\u0120snapshot": 27479, "isine": 27480, "XP": 27481, "\u0120pesticides": 27482, "\u0120LSD": 27483, "\u0120Distribution": 27484, "really": 27485, "\u0120degradation": 27486, "\u0120disguise": 27487, "\u0120biom": 27488, "\u0120EXT": 27489, "\u0120equations": 27490, "\u0120hazards": 27491, "\u0120Compared": 27492, ")*": 27493, "\u0120virtues": 27494, "\u0120elders": 27495, "\u0120enhancing": 27496, "\u0120Across": 27497, "eros": 27498, "angling": 27499, "\u0120combust": 27500, "ucci": 27501, "\u0120concussion": 27502, "\u0120contraception": 27503, "\u0120Kang": 27504, "\u0120expresses": 27505, "\u0120aux": 27506, "\u0120Pione": 27507, "\u0120exhibits": 27508, "Debug": 27509, "OTAL": 27510, "\u0120Already": 27511, "\u0120Wheeler": 27512, "\u0120expands": 27513, "?:": 27514, "\u0120reconciliation": 27515, "\u0120pirates": 27516, "\u0120purse": 27517, "\u0120discourage": 27518, "\u0120spectacle": 27519, "Rank": 27520, "\u0120wraps": 27521, "\u0120Thought": 27522, "\u0120impending": 27523, "Opp": 27524, "\u0120Anglo": 27525, "\u0120EUR": 27526, "\u0120screwed": 27527, "retched": 27528, "\u0120encouragement": 27529, "models": 27530, "\u0120confuse": 27531, "mmm": 27532, "\u0120Vitamin": 27533, "\u00e2\u0138\u0133\u00e2\u0138\u0133": 27534, "Cru": 27535, "\u0120knights": 27536, "\u0120discard": 27537, "\u0120bishops": 27538, "\u0120Wear": 27539, "\u0120Garrett": 27540, "kan": 27541, "\u00e3\u0125\u0141": 27542, "\u0120masculine": 27543, "capital": 27544, "\u0120Aus": 27545, "\u0120fatally": 27546, "thanks": 27547, "\u0120AU": 27548, "\u0120Gut": 27549, "1200": 27550, "\u012000000000": 27551, "\u0120surrog": 27552, "\u0120BIOS": 27553, "raits": 27554, "\u0120Watts": 27555, "\u0120resurrection": 27556, "\u0120Electoral": 27557, "\u0120Tips": 27558, "4000": 27559, "\u0120nutrient": 27560, "\u0120depicting": 27561, "\u0120sprink": 27562, "\u0120muff": 27563, "\u0120LIM": 27564, "\u0120Sample": 27565, "psc": 27566, "ibi": 27567, "generated": 27568, "\u0120specimens": 27569, "\u0120dissatisf": 27570, "\u0120tailored": 27571, "\u0120holdings": 27572, "\u0120Monthly": 27573, "\u0120Eat": 27574, "poons": 27575, "\u0120nec": 27576, "\u0120Cage": 27577, "\u0120Lotus": 27578, "\u0120Lantern": 27579, "\u0120frontier": 27580, "\u0120pensions": 27581, "\u0120joked": 27582, "\u0120Hardy": 27583, "=-=-=-=-": 27584, "rade": 27585, "UID": 27586, "\u0120rails": 27587, "\u0120emit": 27588, "\u0120slate": 27589, "\u0120smug": 27590, "\u0120spit": 27591, "\u0120Calls": 27592, "\u0120Jacobs": 27593, "feat": 27594, "\u0120UE": 27595, "\u0120restruct": 27596, "\u0120regeneration": 27597, "\u0120energies": 27598, "\u0120Connor": 27599, "OHN": 27600, "\u0120Cheese": 27601, "\u0120ger": 27602, "\u0120resurrect": 27603, "management": 27604, "NW": 27605, "\u0120presently": 27606, "\u0120Bruins": 27607, "Member": 27608, "\u0120Mang": 27609, "idan": 27610, "\u0120boosting": 27611, "wyn": 27612, "+.": 27613, "requisite": 27614, "\u0120NYPD": 27615, "\u0120Megan": 27616, "\u0120Conditions": 27617, "\u0120pics": 27618, "nesium": 27619, "\u0120Rash": 27620, "\u0120174": 27621, "\u0120Ducks": 27622, "\u0120embro": 27623, "zu": 27624, "onian": 27625, "religious": 27626, "\u0120craz": 27627, "\u0120ACA": 27628, "\u0120Zucker": 27629, "EMA": 27630, "\u0120Pros": 27631, "Weapon": 27632, "\u0120Knox": 27633, "\u0120Arduino": 27634, "\u0120stove": 27635, "\u0120heavens": 27636, "\u0120Purchase": 27637, "\u0120herd": 27638, "\u0120fundraiser": 27639, "Digital": 27640, "5000": 27641, "\u0120proponents": 27642, "/\u00e2\u0122\u012d": 27643, "\u0120jelly": 27644, "\u0120Visa": 27645, "\u0120monks": 27646, "\u0120advancement": 27647, "\u0120Wer": 27648, "\u0120187": 27649, "eus": 27650, "ertility": 27651, "\u0120fetal": 27652, "\u01201936": 27653, "Lo": 27654, "\u0120outfits": 27655, "\u0120staircase": 27656, "bomb": 27657, "\u0120customized": 27658, "clair": 27659, "Tree": 27660, "\u0120mapped": 27661, "\u0120Considering": 27662, "\u0120Torres": 27663, "\u0120methyl": 27664, "\u0120approximate": 27665, "\u0120doom": 27666, "\u0120Hansen": 27667, "\u0120crossover": 27668, "\u0120standalone": 27669, "\u00e4\u00bc": 27670, "\u0120invites": 27671, "\u0120graveyard": 27672, "\u0120hp": 27673, "DonaldTrump": 27674, "\u0120escort": 27675, "Gar": 27676, "\u0120predecessors": 27677, "\u0120hay": 27678, "\u0120enzyme": 27679, "\u0120Straight": 27680, "visors": 27681, "Ing": 27682, "aneously": 27683, "\u0120Applied": 27684, "\u0120fec": 27685, "\u0120Durant": 27686, "\u0120outspoken": 27687, "orb": 27688, "\u0120zeal": 27689, "\u0120disgrace": 27690, "').": 27691, "\u0120Cheng": 27692, "289": 27693, "\u0120Rena": 27694, "\u0120Suicide": 27695, "294": 27696, "\u0120outraged": 27697, "\u0120Newman": 27698, "\u0120Nvidia": 27699, "\u0120Aber": 27700, "\u0120Bers": 27701, "\u0120recreation": 27702, "Window": 27703, "\u0120DP": 27704, "xe": 27705, "\u0120pedoph": 27706, "\u0120fallout": 27707, "amboo": 27708, "\u0120presentations": 27709, "\u0120Apps": 27710, "\u0120html": 27711, "345": 27712, "\u0120XXX": 27713, "\u0120rubbing": 27714, "\u0120Leather": 27715, "\u0120humidity": 27716, "seys": 27717, "established": 27718, "\u0120Units": 27719, "646": 27720, "\u0120respectable": 27721, "Auto": 27722, "\u0120thriving": 27723, "\u0120Innovation": 27724, "angs": 27725, "Extra": 27726, "regulation": 27727, "298": 27728, "pick": 27729, "Examples": 27730, "\u0120CJ": 27731, "Attack": 27732, "\u0120dracon": 27733, "LT": 27734, "\u0120sticker": 27735, "rers": 27736, "\u0120sunny": 27737, "Iss": 27738, "regulated": 27739, "dim": 27740, "\u0120Abstract": 27741, "\u0120husbands": 27742, "Office": 27743, "omination": 27744, "itars": 27745, "ANGE": 27746, "ascal": 27747, "\u0120Kris": 27748, "\u0120Infantry": 27749, "\u0120malf": 27750, "\u0120Athe": 27751, "\u0120Rally": 27752, "balanced": 27753, "........................": 27754, "OUP": 27755, "\u0120molecule": 27756, "metics": 27757, "\u0120Split": 27758, "\u0120Instructions": 27759, "\u0120Nights": 27760, "cards": 27761, "\u0120tug": 27762, "\u0120cone": 27763, "\u00e5\u0143": 27764, "\u0120tx": 27765, "\u0120Discussion": 27766, "\u0120catastrophe": 27767, "ppe": 27768, "gio": 27769, "\u0120communism": 27770, "\u0120halted": 27771, "\u0120Guant": 27772, "clean": 27773, "\u0120Sched": 27774, "\u0120Kanye": 27775, "\u0120wander": 27776, "\u0120Seriously": 27777, "\u0120188": 27778, "ennial": 27779, "follow": 27780, "productive": 27781, "\u0120Flow": 27782, "\u0120Sail": 27783, "\u0120craw": 27784, "\u0120simulations": 27785, "oru": 27786, "angles": 27787, "\u0120Nolan": 27788, "\u0120menstru": 27789, "470": 27790, "\u0120207": 27791, "aja": 27792, "\u0120casually": 27793, "boarding": 27794, "\u0120222": 27795, "ovy": 27796, "\u0120Numbers": 27797, "umat": 27798, "OE": 27799, "287": 27800, "\u0120Clemson": 27801, "\u0120certs": 27802, "\u0120slid": 27803, "\u0120Tribe": 27804, "\u0120toast": 27805, "\u0120fortunes": 27806, "\u0120fals": 27807, "\u0120Committees": 27808, "\u0120gp": 27809, "\u0120fiery": 27810, "\u0120Nets": 27811, "\u0120Anime": 27812, "Package": 27813, "\u0120Compare": 27814, "laughter": 27815, "infect": 27816, "\u0120atrocities": 27817, "\u0120justices": 27818, "\u0120insults": 27819, "\u0120Vernon": 27820, "\u0120shaken": 27821, "\u0120persona": 27822, "estamp": 27823, "367": 27824, "brain": 27825, "\u0120experimenting": 27826, "Ken": 27827, "\u0120Electronics": 27828, "\u0120161": 27829, "domain": 27830, "\u0120graphical": 27831, "bishop": 27832, "\u0120whopping": 27833, "\u0120Evangel": 27834, "\u0120advertisers": 27835, "\u0120Spear": 27836, "\u0120bids": 27837, "\u0120destroys": 27838, "utz": 27839, "\u0120undersc": 27840, "\u0120ADD": 27841, "\u0120ants": 27842, "\u0120Cum": 27843, "ipples": 27844, "\u0120Fill": 27845, "\u0120glanced": 27846, "\u0120indicted": 27847, "\u0120Eff": 27848, "\u0120miscon": 27849, "\u0120Desktop": 27850, "\u0120abide": 27851, "\u00e3\u0125\u0122": 27852, "\u0120Io": 27853, "\u0120Coul": 27854, "\u0120capsule": 27855, "\u0120Chrys": 27856, "MON": 27857, "\u0120undes": 27858, "\u0120IRA": 27859, "\u0120citation": 27860, "\u0120dictate": 27861, "\u0120Networks": 27862, "\u0120Conflict": 27863, "\u0120Stuff": 27864, "xa": 27865, "isec": 27866, "\u0120Chemistry": 27867, "\u0120quarterly": 27868, "Williams": 27869, "anan": 27870, "Opt": 27871, "\u0120Alexandria": 27872, "outheastern": 27873, "\u0120Springfield": 27874, "\u0120Blacks": 27875, "\u0120geography": 27876, "242": 27877, "\u0120utmost": 27878, "\u0120Exxon": 27879, "abouts": 27880, "EVA": 27881, "\u0120Enable": 27882, "\u0120Barr": 27883, "\u0120disagreed": 27884, "\u0120Cyprus": 27885, "\u0120dementia": 27886, "\u0120labs": 27887, "\u0120ubiquitous": 27888, "\u0120LOVE": 27889, "\u0120consolidated": 27890, "sr": 27891, "\u0120creamy": 27892, "\u0120Timber": 27893, "Regardless": 27894, "\u0120Certificate": 27895, "\u0120\"...": 27896, "ogenous": 27897, "Captain": 27898, "\u0120insulting": 27899, "\u0120Soros": 27900, "\u0120Instr": 27901, "\u0120Bulgaria": 27902, "better": 27903, "\u0120sucking": 27904, "\u0120Davidson": 27905, "atz": 27906, "\u0120collateral": 27907, "gif": 27908, "\u0120plagued": 27909, "\u0120Cancel": 27910, "\u0120Gardner": 27911, "RB": 27912, "\u0120sixteen": 27913, "Remove": 27914, "uristic": 27915, "cook": 27916, "Rod": 27917, "\u0120comprising": 27918, "fle": 27919, ")\u00e2\u0122\u0136": 27920, "\u0120Viking": 27921, "growth": 27922, "agonal": 27923, "\u0120srf": 27924, "afety": 27925, "mot": 27926, "Nearly": 27927, "stown": 27928, "\u0120Factor": 27929, "\u0120automobile": 27930, "\u0120procedural": 27931, "mask": 27932, "ampires": 27933, "\u0120disappears": 27934, "jab": 27935, "315": 27936, "\u01201951": 27937, "needed": 27938, "\u0120daring": 27939, "leader": 27940, "\u0120podium": 27941, "\u0120unhealthy": 27942, "\u0120mund": 27943, "\u0120pyramid": 27944, "ocre": 27945, "\u0120kissed": 27946, "\u0120dreamed": 27947, "\u0120Fantastic": 27948, "\u0120Gly": 27949, "\u00e5\u012c": 27950, "\u0120greatness": 27951, "\u0120spices": 27952, "\u0120metropolitan": 27953, "\u0120compuls": 27954, "iets": 27955, "1016": 27956, "\u0120Sham": 27957, "\u0120Pyr": 27958, "flies": 27959, "\u0120Midnight": 27960, "\u0120swallowed": 27961, "\u0120genres": 27962, "\u0120Lucky": 27963, "\u0120Rewards": 27964, "\u0120dispatch": 27965, "\u0120IPA": 27966, "\u0120Apply": 27967, "\u0120aven": 27968, "alities": 27969, "312": 27970, "things": 27971, "\u0120().": 27972, "\u0120mates": 27973, "\u0120Sz": 27974, "\u0120COP": 27975, "olate": 27976, "OFF": 27977, "\u0120recharge": 27978, "caps": 27979, "\u0120Yorker": 27980, "icone": 27981, "\u0120galaxies": 27982, "ileaks": 27983, "Dave": 27984, "\u0120Puzz": 27985, "\u0120Celtic": 27986, "\u0120AFC": 27987, "276": 27988, "\u0120Sons": 27989, "\u0120affirmative": 27990, "Hor": 27991, "\u0120tutorials": 27992, "\u0120CITY": 27993, "\u0120Rosa": 27994, "\u0120Extension": 27995, "Series": 27996, "\u0120fats": 27997, "\u0120rab": 27998, "lis": 27999, "\u0120unic": 28000, "\u0120eve": 28001, "\u0120Spin": 28002, "\u0120adulthood": 28003, "typ": 28004, "\u0120sectarian": 28005, "\u0120checkout": 28006, "\u0120Cycl": 28007, "Single": 28008, "\u0120martyr": 28009, "\u0120chilling": 28010, "888": 28011, "oufl": 28012, "\u0120];": 28013, "\u0120congestion": 28014, "mk": 28015, "\u0120Whereas": 28016, "\u01201938": 28017, "urrencies": 28018, "erion": 28019, "\u0120boast": 28020, "\u0120Patients": 28021, "\u0120chap": 28022, "\u0120BD": 28023, "realDonaldTrump": 28024, "\u0120examines": 28025, "hov": 28026, "\u0120startling": 28027, "\u0120Babylon": 28028, "wid": 28029, "omew": 28030, "brance": 28031, "\u0120Odyssey": 28032, "wig": 28033, "\u0120torch": 28034, "\u0120Vox": 28035, "\u0120Moz": 28036, "\u0120Troll": 28037, "\u0120Ans": 28038, "Similarly": 28039, "\u0120Ful": 28040, "006": 28041, "Unless": 28042, "\u0120Alone": 28043, "stead": 28044, "\u0120Publisher": 28045, "rights": 28046, "tu": 28047, "\u0120Doesn": 28048, "\u0120professionally": 28049, "\u0120clo": 28050, "icz": 28051, "\u0120steals": 28052, "\u0120\u00e1": 28053, "1986": 28054, "\u0120sturdy": 28055, "\u0120Johann": 28056, "\u0120medals": 28057, "\u0120filings": 28058, "\u0120Fraser": 28059, "done": 28060, "\u0120multinational": 28061, "\u0120feder": 28062, "\u0120worthless": 28063, "\u0120pest": 28064, "Yesterday": 28065, "ankind": 28066, "\u0120gays": 28067, "\u0120borne": 28068, "\u0120POS": 28069, "Picture": 28070, "\u0120percentages": 28071, "251": 28072, "rame": 28073, "\u0120potions": 28074, "AMD": 28075, "\u0120Lebanese": 28076, "\u0120rang": 28077, "\u0120LSU": 28078, "ongs": 28079, "\u0120peninsula": 28080, "\u0120Clause": 28081, "ALK": 28082, "oha": 28083, "\u0120MacBook": 28084, "\u0120unanimous": 28085, "\u0120lenders": 28086, "\u0120hangs": 28087, "\u0120franchises": 28088, "orers": 28089, "\u0120Updates": 28090, "\u0120isolate": 28091, "andro": 28092, "Soon": 28093, "\u0120disruptive": 28094, "\u0120Surve": 28095, "\u0120stitches": 28096, "\u0120Scorp": 28097, "\u0120Dominion": 28098, "\u0120supplying": 28099, "Arg": 28100, "\u0120turret": 28101, "\u0120Luk": 28102, "\u0120brackets": 28103, "*)": 28104, "\u0120Revolutionary": 28105, "\u0120Honest": 28106, "\u0120noticing": 28107, "\u0120Shannon": 28108, "\u0120afforded": 28109, "\u0120tha": 28110, "\u0120Janet": 28111, "!--": 28112, "\u0120Narendra": 28113, "\u0120Plot": 28114, "Hol": 28115, "sever": 28116, "eenth": 28117, "\u0120obstruction": 28118, "\u01201024": 28119, "staff": 28120, "jas": 28121, "orget": 28122, "scenes": 28123, "laughs": 28124, "\u0120Fargo": 28125, "crime": 28126, "\u0120orchestr": 28127, "\u0120delet": 28128, "iliary": 28129, "rieved": 28130, "\u0120militar": 28131, "\u0120Greene": 28132, "\u00e2\u0139\u0131": 28133, "\u00e3\u0123\u00a6": 28134, "\u0120Guards": 28135, "\u0120unleashed": 28136, "\u0120Weber": 28137, "\u0120adjustable": 28138, "\u0120caliber": 28139, "\u0120motivations": 28140, "\u0120\u00c3\u0142": 28141, "mAh": 28142, "\u0120Lanka": 28143, "handle": 28144, "\u0120pent": 28145, "\u0120Rav": 28146, "\u0120Angular": 28147, "\u0120Kau": 28148, "umbing": 28149, "\u0120philanthrop": 28150, "\u0120dehyd": 28151, "\u0120toxicity": 28152, "eer": 28153, "\u0120YORK": 28154, "witz": 28155, "\u00e5\u00bc": 28156, "\u0120IE": 28157, "community": 28158, "\u0120AH": 28159, "\u0120retali": 28160, "\u0120massively": 28161, "\u0120Daniels": 28162, "\u0120DEL": 28163, "\u0120carcin": 28164, "Url": 28165, "\u0120routing": 28166, "\u0120NPCs": 28167, "\u0120RAF": 28168, "ryce": 28169, "\u0120waived": 28170, "\u0120Guatem": 28171, "Everybody": 28172, "\u0120covenant": 28173, "\u0120173": 28174, "\u0120relaxing": 28175, "\u0120quart": 28176, "almost": 28177, "\u0120guarded": 28178, "\u0120Soldiers": 28179, "\u0120PLAY": 28180, "\u0120outgoing": 28181, "LAND": 28182, "\u0120rewrite": 28183, "\u0120MOV": 28184, "\u0120Imper": 28185, "\u0120Solution": 28186, "\u0120phenomenal": 28187, "\u0120longevity": 28188, "\u0120impat": 28189, "\u0120Nissan": 28190, "irie": 28191, "\u0120odor": 28192, "\u0120Zar": 28193, "oks": 28194, "\u0120militias": 28195, "\u0120SPEC": 28196, "\u0120tolerated": 28197, "arser": 28198, "\u0120Bradford": 28199, "+,": 28200, "\u0120surreal": 28201, "sf": 28202, "Canadian": 28203, "\u0120resemblance": 28204, "\u0120carbohydrate": 28205, "VIEW": 28206, "\u0120accessory": 28207, "meal": 28208, "largest": 28209, "iegel": 28210, "Someone": 28211, "\u0120toughest": 28212, "oso": 28213, "\u0120funnel": 28214, "\u0120condemnation": 28215, "luent": 28216, "\u0120wired": 28217, "\u0120Sunset": 28218, "Jesus": 28219, "\u0120PST": 28220, "\u0120Pages": 28221, "\u0120Tycoon": 28222, "\u0120PF": 28223, "\u0120selections": 28224, "\u0120\u00e0\u00a4": 28225, "partisan": 28226, "\u0120highs": 28227, "\u0120Rune": 28228, "\u0120crafts": 28229, "lead": 28230, "\u0120Parents": 28231, "\u0120reclaim": 28232, "eker": 28233, "\u0120Allied": 28234, "aeper": 28235, "\u0120looming": 28236, "\u0120beneficiaries": 28237, "\u0120Hull": 28238, "Students": 28239, "Jewish": 28240, "dj": 28241, "\u0120pact": 28242, "template": 28243, "\u0120Officials": 28244, "\u0120Baylor": 28245, "\u0120hemp": 28246, "\u0120youths": 28247, "\u0120Levels": 28248, "\u0120Xiao": 28249, "\u0120Ches": 28250, "\u0120endeavor": 28251, "\u0120Removed": 28252, "\u0120hippocamp": 28253, "Hell": 28254, "\u00e3\u0124\u012c": 28255, "805": 28256, "\u0120dinosaur": 28257, "\u0120Wrath": 28258, "\u0120Indonesian": 28259, "\u0120calculator": 28260, "\u0120Dictionary": 28261, "\u0120420": 28262, "\u0120MAG": 28263, "(_": 28264, "!,": 28265, "tarians": 28266, "\u0120restricting": 28267, "racuse": 28268, "\u0120weekday": 28269, "OUNT": 28270, "\u0120shrugged": 28271, "leground": 28272, "\u0120bald": 28273, "\u0120Doctors": 28274, "\u0120touted": 28275, "\u0120Maxwell": 28276, "\u0120214": 28277, "\u0120diplomat": 28278, "\u0120repression": 28279, "\u0120constituency": 28280, "vice": 28281, "ranked": 28282, "\u0120Napoleon": 28283, "gang": 28284, "\u0120Forever": 28285, "tun": 28286, "\u0120bulb": 28287, "\u0120PDT": 28288, "\u0120Cisco": 28289, "VEN": 28290, "\u0120resumed": 28291, "Steven": 28292, "\u0120Manitoba": 28293, "\u0120fabulous": 28294, "\u0120Agents": 28295, "1984": 28296, "\u0120amusing": 28297, "\u0120Mysteries": 28298, "\u0120orthodox": 28299, "floor": 28300, "\u0120questionnaire": 28301, "\u0120penetrate": 28302, "\u0120filmmakers": 28303, "\u0120Unc": 28304, "\u0120stamped": 28305, "\u0120thirteen": 28306, "\u0120outfield": 28307, "\u0120forwarded": 28308, "\u0120appra": 28309, "\u0120aided": 28310, "try": 28311, "\u0120unfocused": 28312, "\u0120Liz": 28313, "\u0120Wendy": 28314, "\u0120Scene": 28315, "Charg": 28316, "\u0120rejects": 28317, "\u0120leftist": 28318, "\u0120Providence": 28319, "\u0120Brid": 28320, "regn": 28321, "\u0120prophecy": 28322, "\u0120LIVE": 28323, "499": 28324, "\u0120forge": 28325, "\u0120FML": 28326, "\u0120intrinsic": 28327, "\u0120Frog": 28328, "\u0120wont": 28329, "\u0120Holt": 28330, "\u0120famed": 28331, "CLUS": 28332, "aepernick": 28333, "\u0120Hate": 28334, "\u0120Cay": 28335, "\u0120registering": 28336, "ortality": 28337, "ropy": 28338, "ocalyptic": 28339, "aan": 28340, "nav": 28341, "\u0120fascist": 28342, "IFIED": 28343, "\u0120implicated": 28344, "\u0120Resort": 28345, "\u0120Chandler": 28346, "\u0120Brick": 28347, "Pin": 28348, "ysc": 28349, "Usage": 28350, "\u0120Helm": 28351, "usra": 28352, "\u00e2\u013a\u0127\u00e2\u013a\u0127": 28353, "\u0120Abbas": 28354, "\u0120unanimously": 28355, "\u0120keeper": 28356, "\u0120addicted": 28357, "???": 28358, "\u0120helmets": 28359, "\u0120antioxid": 28360, "apsed": 28361, "808": 28362, "giene": 28363, "\u0120waits": 28364, "\u0120minion": 28365, "raved": 28366, "\u0120Porsche": 28367, "\u0120dreaming": 28368, "\u0120171": 28369, "\u0120Cain": 28370, "\u0120unfor": 28371, "asso": 28372, "\u0120Configuration": 28373, "kun": 28374, "hardt": 28375, "\u0120nested": 28376, "\u0120LDS": 28377, "LES": 28378, "\u0120tying": 28379, "enos": 28380, "\u0120cue": 28381, "\u0120Marqu": 28382, "skirts": 28383, "\u0120clicked": 28384, "\u0120expiration": 28385, "\u0120Accordingly": 28386, "\u0120WC": 28387, "\u0120blessings": 28388, "\u0120addictive": 28389, "\u0120Narr": 28390, "yx": 28391, "\u0120Jaguars": 28392, "\u0120rents": 28393, "\u0120Siber": 28394, "\u0120tipped": 28395, "ousse": 28396, "\u0120Fitzgerald": 28397, "\u0120hierarch": 28398, "outine": 28399, "\u0120wavelength": 28400, ">.": 28401, "chid": 28402, "\u0120Processing": 28403, "/+": 28404, "ranking": 28405, "Easy": 28406, "\u0120Construct": 28407, "\u0120tet": 28408, "insured": 28409, "HUD": 28410, "\u0120quoting": 28411, "\u0120communicated": 28412, "inx": 28413, "\u0120inmate": 28414, "\u0120erected": 28415, "\u0120Absolutely": 28416, "\u0120Surely": 28417, "\u0120unim": 28418, "\u0120Throne": 28419, "heid": 28420, "\u0120claws": 28421, "\u0120superstar": 28422, "\u0120Lenn": 28423, "\u0120Whis": 28424, "Uk": 28425, "abol": 28426, "\u0120sket": 28427, "\u0120Niet": 28428, "\u0120perks": 28429, "\u0120affinity": 28430, "\u0120openings": 28431, "phasis": 28432, "\u0120discriminate": 28433, "Tip": 28434, "vc": 28435, "\u0120grinding": 28436, "\u0120Jenny": 28437, "\u0120asthma": 28438, "holes": 28439, "\u0120Homer": 28440, "\u0120registers": 28441, "\u0120Glad": 28442, "\u0120creations": 28443, "\u0120lithium": 28444, "\u0120applause": 28445, "until": 28446, "Justice": 28447, "\u0120Turks": 28448, "\u0120scandals": 28449, "\u0120bake": 28450, "tank": 28451, "Mech": 28452, "\u0120Means": 28453, "\u0120Maid": 28454, "Republicans": 28455, "isal": 28456, "windows": 28457, "\u0120Santos": 28458, "\u0120vegetation": 28459, "338": 28460, "tri": 28461, "\u0120flux": 28462, "insert": 28463, "\u0120clarified": 28464, "\u0120mortg": 28465, "\u0120Chim": 28466, "\u0120Tort": 28467, "\u0120disclaim": 28468, "metal": 28469, "\u0120Aside": 28470, "\u0120induction": 28471, "\u0120infl": 28472, "\u0120atheists": 28473, "amph": 28474, "\u0120ether": 28475, "\u0120Vital": 28476, "\u0120Built": 28477, "Mind": 28478, "\u0120weaponry": 28479, "SET": 28480, "\u0120186": 28481, "admin": 28482, "gam": 28483, "contract": 28484, "afa": 28485, "\u0120derivatives": 28486, "\u0120snacks": 28487, "\u0120churn": 28488, "Econom": 28489, "\u0120capped": 28490, "\u0120Understanding": 28491, "\u0120Hers": 28492, "\u0120Iz": 28493, "\u0120duct": 28494, "IENT": 28495, "aughty": 28496, "\u0120\u00e2\u013e\u0136": 28497, "\u0120NP": 28498, "\u0120sailing": 28499, "Initialized": 28500, "\u0120ted": 28501, "\u0120reactors": 28502, "\u0120Lomb": 28503, "\u0120choke": 28504, "\u0120Worm": 28505, "\u0120admiration": 28506, "\u0120swung": 28507, "ensibly": 28508, "\u0120rash": 28509, "\u0120Goals": 28510, "\u0120Important": 28511, "Shot": 28512, "\u0120Ras": 28513, "\u0120trainers": 28514, "\u0120Bun": 28515, "Working": 28516, "\u0120harmed": 28517, "\u0120Pandora": 28518, "\u0120LTE": 28519, "\u0120mushroom": 28520, "\u0120CHAR": 28521, "\u0120Fee": 28522, "\u0120Moy": 28523, "Born": 28524, "oliberal": 28525, "\u0120Martial": 28526, "\u0120gentlemen": 28527, "\u0120lingering": 28528, "Official": 28529, "\u0120graffiti": 28530, "\u0120Names": 28531, "Der": 28532, "\u0120quint": 28533, "istrate": 28534, "azeera": 28535, "\u0120NOTICE": 28536, "\u0120Florence": 28537, "\u0120payable": 28538, "\u0120depicts": 28539, "\u0120Species": 28540, "Heart": 28541, "\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122\u00e2\u0136\u0122": 28542, "\u0120enclosed": 28543, "Increases": 28544, "Daily": 28545, "\u0120Lis": 28546, "\u0120enactment": 28547, "\u0120Bacon": 28548, "\u0120Steele": 28549, "demand": 28550, "\u0120183": 28551, "\u0120mouths": 28552, "\u0120stranded": 28553, "\u0120enhancement": 28554, "011": 28555, "\u0120Whats": 28556, "\u0120healed": 28557, "eny": 28558, "\u0120Rab": 28559, "\u0120340": 28560, "\u0120Labyrinth": 28561, "roach": 28562, "\u0120Yosh": 28563, "\u0120Clippers": 28564, "\u0120concerts": 28565, "Internet": 28566, "355": 28567, "\u0120stickers": 28568, "\u0120termed": 28569, "\u0120Axe": 28570, "\u0120grandparents": 28571, "France": 28572, "\u0120Clim": 28573, "\u0120Uh": 28574, "ulic": 28575, "\u0120thrill": 28576, "centric": 28577, "\u0120Overview": 28578, "\u0120Conduct": 28579, "\u0120substantive": 28580, "\u0120182": 28581, "mur": 28582, "\u0120stray": 28583, "\u0120Coff": 28584, "\u0120repetitive": 28585, "\u0120Forgotten": 28586, "\u0120qualification": 28587, "ewitness": 28588, "\u0120Zimbabwe": 28589, "\u0120simulated": 28590, "\u0120JD": 28591, "253": 28592, "\u0120Ware": 28593, "\u0120unsc": 28594, "Times": 28595, "\u0120summons": 28596, "\u0120disconnected": 28597, "\u0120184": 28598, "cius": 28599, "\u0120Gujar": 28600, "odka": 28601, "\u0120erase": 28602, "\u0120Tobacco": 28603, "elected": 28604, "\u0120uncont": 28605, "\u0120Shepard": 28606, "\u0120Lamp": 28607, "\u0120alerted": 28608, "\u0120operative": 28609, "arna": 28610, "uint": 28611, "\u0120negligence": 28612, "acements": 28613, "\u0120supra": 28614, "\u0120prevail": 28615, "\u0120Shark": 28616, "\u0120belts": 28617, "\u00e3\u0123\u00ab": 28618, "\u0120tighter": 28619, "Engineers": 28620, "\u0120inactive": 28621, "\u0120exponent": 28622, "\u0120Willie": 28623, "aples": 28624, "\u0120heir": 28625, "\u0120Hits": 28626, "iann": 28627, "\u0120Says": 28628, "\u0120currents": 28629, "\u0120Bengal": 28630, "\u0120arist": 28631, "Buffer": 28632, "\u0120breeze": 28633, "\u0120Wesley": 28634, "Cola": 28635, "\u0120pronoun": 28636, "\u0120deed": 28637, "\u0120Kling": 28638, "\u0120oft": 28639, "\u0120inflict": 28640, "\u0120punishing": 28641, "\u0120nm": 28642, "iku": 28643, "ODUCT": 28644, "014": 28645, "\u0120subsidy": 28646, "\u0120DEA": 28647, "\u0120Herbert": 28648, "\u0120Jal": 28649, "Bank": 28650, "\u0120deferred": 28651, "\u0120shipment": 28652, "Bott": 28653, "\u0120alle": 28654, "bearing": 28655, "HTML": 28656, "Offline": 28657, "\u0120213": 28658, "\u0120scrolling": 28659, "\u0120scanned": 28660, "\u0120Libyan": 28661, "\u0120TOP": 28662, "chrom": 28663, "dt": 28664, "column": 28665, "PsyNetMessage": 28666, "Zero": 28667, "\u0120torso": 28668, "050": 28669, "\u00e2\u0137\u0132": 28670, "\u0120imperson": 28671, "\u0120Schwartz": 28672, "udic": 28673, "\u0120pissed": 28674, "\u0120Sapp": 28675, "257": 28676, "\u0120ISPs": 28677, "ogl": 28678, "\u0120supervised": 28679, "\u0120adolescent": 28680, "\u0120attained": 28681, "\u0120Delivery": 28682, "\u0120Bunny": 28683, "\u01201937": 28684, "\u0120miniature": 28685, "\u0120os": 28686, "\u0120370": 28687, "608": 28688, "\u0120Mourinho": 28689, "\u0120innate": 28690, "\u0120tempo": 28691, "\u0120NM": 28692, "\u0120Fallen": 28693, "009": 28694, "\u0120provocative": 28695, "Streamer": 28696, "\u0120Benedict": 28697, "\u0120Bolshe": 28698, "\u0120turtle": 28699, "\u0120PCB": 28700, "\u0120Equal": 28701, "Director": 28702, "\u0120Rend": 28703, "\u0120fluids": 28704, "Authorities": 28705, "\u0120cousins": 28706, "requency": 28707, "\u0120Neighbor": 28708, "sets": 28709, "shared": 28710, "Charles": 28711, "password": 28712, "\u0120gears": 28713, "\u0120211": 28714, "\u0120Hardware": 28715, "rika": 28716, "\u0120upstream": 28717, "Hom": 28718, "\u0120disproportionately": 28719, "ivities": 28720, "\u0120undefined": 28721, "\u0120electrons": 28722, "\u0120commemor": 28723, "Eventually": 28724, "\u0120><": 28725, "\u0120irresponsible": 28726, "218": 28727, "\u0120Released": 28728, "\u0120OVER": 28729, "\u0120IGN": 28730, "\u0120Bread": 28731, "stellar": 28732, "\u0120Sage": 28733, "tted": 28734, "damage": 28735, "edition": 28736, "\u0120Prec": 28737, "\u0120lime": 28738, "\u0120confinement": 28739, "\u0120calorie": 28740, "weapon": 28741, "\u0120differing": 28742, "\u0120Sina": 28743, "mys": 28744, "amd": 28745, "\u0120intricate": 28746, "kk": 28747, "\u0120PAT": 28748, "\u00c3\u00a3o": 28749, "stones": 28750, "links": 28751, "\u0120ranch": 28752, "Semitic": 28753, "\u0120differentiate": 28754, "\u0120Singer": 28755, "occupied": 28756, "\u0120fortress": 28757, "cmd": 28758, "\u0120interception": 28759, "\u0120Ankara": 28760, "\u0120rept": 28761, "\u0120Solitaire": 28762, "\u0120remake": 28763, "pred": 28764, "\u0120dared": 28765, "autions": 28766, "\u0120BACK": 28767, "Running": 28768, "\u0120debugging": 28769, "\u0120graphs": 28770, "399": 28771, "\u0120Nigel": 28772, "\u0120bun": 28773, "\u0120pillow": 28774, "\u0120progressed": 28775, "fashioned": 28776, "\u0120obedience": 28777, "ERN": 28778, "\u0120rehears": 28779, "Cell": 28780, "tl": 28781, "Sher": 28782, "\u0120herald": 28783, "\u0120Payment": 28784, "\u0120Cory": 28785, "\u0120Dept": 28786, "\u0120repent": 28787, "\u0120Weak": 28788, "uckland": 28789, "\u0120pleasing": 28790, "\u0120shortages": 28791, "\u0120jurors": 28792, "\u0120Kab": 28793, "qqa": 28794, "Anti": 28795, "\u0120wow": 28796, "\u0120RCMP": 28797, "\u0120tsun": 28798, "\u0120Sic": 28799, "\u0120comprises": 28800, "\u0120spies": 28801, "\u0120precinct": 28802, "nu": 28803, "\u0120urges": 28804, "\u0120timed": 28805, "\u0120stripes": 28806, "\u0120Boots": 28807, "\u0120yen": 28808, "Advanced": 28809, "\u0120discrete": 28810, "\u0120Archangel": 28811, "employment": 28812, "Diff": 28813, "\u0120monuments": 28814, "\u0120209": 28815, "worker": 28816, "\u0120196": 28817, "\u0120Ig": 28818, "utterstock": 28819, "TPS": 28820, "Jac": 28821, "\u0120homelessness": 28822, "\u0120commentator": 28823, "\u0120racially": 28824, "fing": 28825, "seed": 28826, "Ele": 28827, "ellation": 28828, "\u0120ethanol": 28829, "\u0120parish": 28830, "\u0120Dong": 28831, "\u0120Awakening": 28832, "\u0120deviation": 28833, "\u0120Bearing": 28834, "\u0120Tsuk": 28835, "\u0120recess": 28836, "\u0120lymph": 28837, "\u0120Cannabis": 28838, "\u00e5\u013e": 28839, "\u0120NEWS": 28840, "\u0120dra": 28841, "\u0120Stefan": 28842, "\u0120Wrong": 28843, "\u0120SAM": 28844, "\u0120loosely": 28845, "\u0120interpreter": 28846, "\u0120Plain": 28847, "Government": 28848, "\u0120bigotry": 28849, "\u0120grenades": 28850, "avez": 28851, "pictured": 28852, "\u0120mandated": 28853, "\u0120Monk": 28854, "\u0120Pedro": 28855, "\u0120lava": 28856, "274": 28857, "\u0120cynical": 28858, "\u0120Scrolls": 28859, "locks": 28860, "Mp": 28861, "\u0120congregation": 28862, "ornings": 28863, "phil": 28864, "\u0120Ibid": 28865, "\u0120ferv": 28866, "\u0120disappearing": 28867, "\u0120arrogant": 28868, "syn": 28869, "\u0120Maver": 28870, "\u0120Suit": 28871, "241": 28872, "\u0120abbre": 28873, "ackers": 28874, "Pa": 28875, "\u0120Yel": 28876, "Whenever": 28877, "\u0120235": 28878, "\u0120Vine": 28879, "\u0120Anat": 28880, "\u0120extinct": 28881, "LET": 28882, "\u0120executable": 28883, "VERS": 28884, "oxide": 28885, "DNA": 28886, "\u0120Prel": 28887, "\u0120resentment": 28888, "\u0120comprise": 28889, "\u0120Aviv": 28890, "\u0120interceptions": 28891, "\u0120prolific": 28892, "INA": 28893, "\u0120Erin": 28894, "thought": 28895, "219": 28896, "\u0120Psychiatry": 28897, "unky": 28898, "chemist": 28899, "Ho": 28900, "\u0120McCoy": 28901, "\u0120bricks": 28902, "Los": 28903, "rily": 28904, "\u0120USSR": 28905, "\u0120rud": 28906, "\u0120laud": 28907, "\u0120Wise": 28908, "\u0120Emerald": 28909, "\u0120revived": 28910, "\u0120damned": 28911, "\u0120Repair": 28912, "idem": 28913, "ctica": 28914, "\u0120patriarch": 28915, "\u0120Nurs": 28916, "meg": 28917, "\u0120cheapest": 28918, "reements": 28919, "empty": 28920, "\u0120Celebr": 28921, "\u0120deprivation": 28922, "chanted": 28923, "\u0120Thumbnails": 28924, "Energy": 28925, "\u0120Ethan": 28926, "\u0120Qing": 28927, "\u0120opposes": 28928, "WIND": 28929, "vik": 28930, "\u0120Mau": 28931, "\u0120SUB": 28932, "667": 28933, "GRE": 28934, "\u0120Volunte": 28935, "nton": 28936, "Cook": 28937, "\u00e5\u0132": 28938, "esque": 28939, "\u0120plummet": 28940, "\u0120suing": 28941, "\u0120pronounce": 28942, "\u0120resisting": 28943, "\u0120Fishing": 28944, "\u0120Trials": 28945, "\u0120yell": 28946, "\u0120310": 28947, "\u0120induct": 28948, "\u0120personalized": 28949, "often": 28950, "Reb": 28951, "EMBER": 28952, "\u0120viewpoint": 28953, "\u0120existential": 28954, "())": 28955, "remove": 28956, "MENTS": 28957, "lasses": 28958, "\u0120evapor": 28959, "\u0120aisle": 28960, "meta": 28961, "\u0120reflective": 28962, "\u0120entitlement": 28963, "\u0120devised": 28964, "music": 28965, "ascade": 28966, "\u0120winding": 28967, "offset": 28968, "\u0120accessibility": 28969, "kered": 28970, "Better": 28971, "\u0120Johnston": 28972, "thinking": 28973, "Snow": 28974, "\u0120Croatia": 28975, "\u0120Atomic": 28976, "271": 28977, "348": 28978, "\u0120textbook": 28979, "\u0120Sixth": 28980, "\u0120\u00d8\u00a7\u00d9\u0126": 28981, "\u0120slider": 28982, "\u0120Burger": 28983, "bol": 28984, "Sync": 28985, "\u0120grandchildren": 28986, "\u0120cerv": 28987, "+)": 28988, "\u0120eternity": 28989, "\u0120tweeting": 28990, "\u0120speculative": 28991, "\u0120pivotal": 28992, "\u0120WP": 28993, "\u0120TER": 28994, "ynamic": 28995, "\u0120upl": 28996, "\u0120Cats": 28997, "perhaps": 28998, "\u0120classmates": 28999, "\u0120blatant": 29000, "'-": 29001, "\u0120lakh": 29002, "antine": 29003, "\u0120Borg": 29004, "iom": 29005, "/(": 29006, "\u0120Athletic": 29007, "\u0120sar": 29008, "OTA": 29009, "\u0120Hoffman": 29010, "Nevertheless": 29011, "\u0120adorable": 29012, "\u0120spawned": 29013, "Associated": 29014, "\u0120Domestic": 29015, "\u0120implant": 29016, "\u0120Luxem": 29017, "\u0120Kens": 29018, "\u0120pumps": 29019, "\u0120SAT": 29020, "Attributes": 29021, "509": 29022, "avour": 29023, "\u0120centralized": 29024, "\u0120TN": 29025, "\u0120freshly": 29026, "\u0120Achieve": 29027, "\u0120outsiders": 29028, "herty": 29029, "\u0120Ree": 29030, "\u0120Towers": 29031, "\u0120Dart": 29032, "akable": 29033, "\u0120mp": 29034, "\u0120Heavenly": 29035, "\u0120ripe": 29036, "\u0120Caroline": 29037, "ryan": 29038, "\u0120classics": 29039, "\u0120retiring": 29040, "\u0120228": 29041, "\u0120ah": 29042, "\u0120dealings": 29043, "\u0120punching": 29044, "\u0120Chapman": 29045, "Options": 29046, "maxwell": 29047, "volume": 29048, "\u0120stal": 29049, "\u0120exported": 29050, "\u0120Quite": 29051, "\u0120numerical": 29052, "Burn": 29053, "Fact": 29054, "\u0120Keystone": 29055, "\u0120trending": 29056, "\u0120altering": 29057, "\u0120Africans": 29058, "478": 29059, "\u0120MN": 29060, "\u0120Knock": 29061, "\u0120temptation": 29062, "\u0120prestige": 29063, "Overview": 29064, "\u0120Traditional": 29065, "\u0120Bahrain": 29066, "Private": 29067, "\u0120HOU": 29068, "\u0120barr": 29069, "\u0120Tat": 29070, "Cube": 29071, "USD": 29072, "\u0120Grande": 29073, "\u0120Gat": 29074, "\u0120Flo": 29075, "\u0120resides": 29076, "\u0120indec": 29077, "volent": 29078, "\u0120perpetual": 29079, "ubes": 29080, "\u0120worldview": 29081, "\u0120Quantum": 29082, "\u0120filtered": 29083, "\u0120ensu": 29084, "orgetown": 29085, "ERSON": 29086, "\u0120Mild": 29087, "379": 29088, "OTT": 29089, "\u00c3\u00a5": 29090, "\u0120vitamins": 29091, "\u0120ribbon": 29092, "\u0120sincerely": 29093, "\u0120Hin": 29094, "\u0120eighteen": 29095, "\u0120contradictory": 29096, "\u0120glaring": 29097, "\u0120expectancy": 29098, "\u0120conspir": 29099, "\u0120monstrous": 29100, "\u0120380": 29101, "reci": 29102, "\u0120handic": 29103, "\u0120pumped": 29104, "\u0120indicative": 29105, "\u0120rapp": 29106, "\u0120avail": 29107, "\u0120LEGO": 29108, "\u0120Marijuana": 29109, "1985": 29110, "erton": 29111, "\u0120twentieth": 29112, "################################": 29113, "\u0120Swamp": 29114, "\u0120valuation": 29115, "\u0120affiliates": 29116, "adjusted": 29117, "\u0120Facility": 29118, "262": 29119, "\u0120enzymes": 29120, "itudinal": 29121, "\u0120imprint": 29122, "Site": 29123, "\u0120installer": 29124, "\u0120TRA": 29125, "mology": 29126, "linear": 29127, "\u0120Collective": 29128, "igating": 29129, "\u0120Token": 29130, "\u0120speculated": 29131, "KN": 29132, "\u0120Cly": 29133, "ority": 29134, "\u0120defer": 29135, "\u0120inspectors": 29136, "approved": 29137, "RM": 29138, "\u0120Suns": 29139, "\u0120informing": 29140, "\u0120Syracuse": 29141, "ibli": 29142, "765": 29143, "\u0120glove": 29144, "\u0120authorize": 29145, "\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6\u00e2\u0122\u00a6": 29146, "\u0120Cruise": 29147, "\u0120contracting": 29148, "shell": 29149, "IFE": 29150, "\u0120Jewel": 29151, "pract": 29152, "\u0120Photoshop": 29153, "\u0120Knowing": 29154, "harm": 29155, "\u0120attractions": 29156, "adan": 29157, "etus": 29158, "018": 29159, "wagen": 29160, "Alt": 29161, "\u0120multiply": 29162, "\u0120equilibrium": 29163, ":{": 29164, "\u0120Fighters": 29165, "\u0120Edgar": 29166, "\u0120fourteen": 29167, "Govern": 29168, "\u0120misuse": 29169, "\u0120abusing": 29170, "\u0120ancestry": 29171, "ramer": 29172, "644": 29173, "\u0120worms": 29174, "\u0120thicker": 29175, "\u0120Combine": 29176, "\u0120peasants": 29177, "\u0120vind": 29178, "\u0120conquest": 29179, "\u0120mocked": 29180, "\u0120cinnamon": 29181, "\u0120Cald": 29182, "\u0120Gallup": 29183, "\u0120avoidance": 29184, "\u0120incarnation": 29185, "\u0120Strat": 29186, "\u0120tasted": 29187, "enta": 29188, "\u0120Neal": 29189, "pared": 29190, "\u0120terminology": 29191, "jection": 29192, "Scientists": 29193, "\u0120INS": 29194, "\u0120Dee": 29195, "\u0120directories": 29196, "Road": 29197, "\u0120Shap": 29198, "bright": 29199, "\u0120Directors": 29200, "\u0120Column": 29201, "\u0120bob": 29202, "\u0120preferably": 29203, "\u0120glitch": 29204, "furt": 29205, "\u0120eg": 29206, "idis": 29207, "CBC": 29208, "\u0120surrendered": 29209, "\u0120testament": 29210, "336": 29211, "uggest": 29212, "\u0120Nil": 29213, "another": 29214, "\u0120pathetic": 29215, "\u0120Donna": 29216, "\u0120218": 29217, "\u0120Avery": 29218, "\u0120whiskey": 29219, "\u0120fixture": 29220, "\u0120Conquest": 29221, "\u0120bets": 29222, "Occ": 29223, "\u0120Leicester": 29224, "].\"": 29225, "\u0120));": 29226, "\u0120flashes": 29227, "456": 29228, "\u0120masked": 29229, "gebra": 29230, "\u0120computed": 29231, "chel": 29232, "auder": 29233, "\u0120defeats": 29234, "\u0120Liberation": 29235, "\u0120Osama": 29236, "\u0120Vive": 29237, "Changes": 29238, "Channel": 29239, "\u0120tariffs": 29240, "\u0120mage": 29241, "\u0120Sax": 29242, "\u0120inadvertently": 29243, "\u0120CRE": 29244, "\u0120Reaper": 29245, "inky": 29246, "grading": 29247, "\u0120stereotyp": 29248, "\u0120curl": 29249, "\u0120FANT": 29250, "\u0120frameworks": 29251, "Mom": 29252, "\u0120Anch": 29253, "\u0120flavour": 29254, "carbon": 29255, "\u0120permitting": 29256, "letcher": 29257, "\u0120Mozilla": 29258, "\u0120Parking": 29259, "\u0120Champ": 29260, "Scroll": 29261, "\u0120murderer": 29262, "\u0120rested": 29263, "\u0120owes": 29264, "\u0120Poss": 29265, "ADD": 29266, "IFF": 29267, "resolution": 29268, "\u0120Mining": 29269, "\u0120comparative": 29270, "Dim": 29271, "\u0120neighbouring": 29272, "\u0120AST": 29273, "\u0120Toxic": 29274, "\u0120biases": 29275, "\u0120gunfire": 29276, "urous": 29277, "\u0120Moment": 29278, "1983": 29279, "\u0120pervasive": 29280, "ttp": 29281, "\u0120Normally": 29282, "rir": 29283, "Sarah": 29284, "\u0120Albany": 29285, "\u0120unsett": 29286, "\u0120SMS": 29287, "ipers": 29288, "layer": 29289, "\u0120Whites": 29290, "uple": 29291, "\u0120turbo": 29292, "\u0120Leeds": 29293, "\u0120thats": 29294, "\u0120Miner": 29295, "MER": 29296, "\u0120Reign": 29297, "\u0120perme": 29298, "\u0120Blitz": 29299, "\u01201934": 29300, "\u0120intimidating": 29301, "tube": 29302, "\u0120eccentric": 29303, "abolic": 29304, "boxes": 29305, "\u0120Associates": 29306, "votes": 29307, "\u0120simulate": 29308, "umbo": 29309, "astery": 29310, "\u0120shipments": 29311, "FFFF": 29312, "anth": 29313, "\u0120seasoned": 29314, "\u0120experimentation": 29315, "\u00e2\u0138\u0142": 29316, "laws": 29317, "Meet": 29318, "iddles": 29319, "antics": 29320, "Rating": 29321, "ISIS": 29322, "hift": 29323, "\u0120fronts": 29324, "buf": 29325, "017": 29326, "\u0120unatt": 29327, "\u0120Dil": 29328, "leases": 29329, "\u0120Gardens": 29330, "777": 29331, "touch": 29332, "vell": 29333, "458": 29334, "\u0120=====": 29335, "saving": 29336, "\u0120erosion": 29337, "\u0120Quin": 29338, "\u0120earns": 29339, "\u0120accomplishment": 29340, "\u0120Wei": 29341, "\u0120<[": 29342, "_____": 29343, "\u0120irrig": 29344, "\u0120Teddy": 29345, "\u0120conquered": 29346, "\u0120Armored": 29347, "\u0120asserts": 29348, "\u0120manipulating": 29349, "r\u00c3\u00a9": 29350, "\u0120transcripts": 29351, "Gallery": 29352, "\u0120plotting": 29353, "Neil": 29354, "\u0120betrayal": 29355, "loader": 29356, "\u0120Sul": 29357, "\u0120displacement": 29358, "\u0120royalty": 29359, "\u0120WI": 29360, "heit": 29361, "\u0120Devices": 29362, "allel": 29363, "\u0120municipalities": 29364, "\u0120canal": 29365, "Stars": 29366, "\u0120UAE": 29367, "\u0120\"\u00e2\u0122\u00a6": 29368, "\u0120CU": 29369, "above": 29370, "\u0120resonance": 29371, "\u0120guiActiveUn": 29372, "added": 29373, "\u0120Braves": 29374, "\u0120Ibn": 29375, "\u0120hereby": 29376, "\u0120BRE": 29377, "\u0120shareholder": 29378, "\u0120Hir": 29379, "\u0120Ji": 29380, "\u0120strangely": 29381, "\u0120admired": 29382, "\u0120plight": 29383, "\u0120bachelor": 29384, "\u0120Pole": 29385, "ciplinary": 29386, "Tony": 29387, "\u0120Armenian": 29388, "\u0120unman": 29389, "\u0120Zionist": 29390, "Stage": 29391, "iscover": 29392, "\u0120automotive": 29393, "\u0120sidelines": 29394, "\u0120slick": 29395, "\u0120Renaissance": 29396, "\u0120FUN": 29397, "Images": 29398, "\u0120Haj": 29399, "\u0120ping": 29400, "\u0120shortcut": 29401, "\u0120Blvd": 29402, "\u0120Looks": 29403, "\u0120bursts": 29404, "\u0120clamp": 29405, "\u0120mish": 29406, "\u0120sorting": 29407, "\u0120patriot": 29408, "\u0120correctness": 29409, "\u0120Scandinav": 29410, "\u0120Cavaliers": 29411, "python": 29412, "azar": 29413, "\u0120375": 29414, "\u0120Jaune": 29415, "409": 29416, "\u0120detrimental": 29417, "\u0120stabbing": 29418, "\u0120poisoned": 29419, "\u0120fountain": 29420, "ocent": 29421, "orst": 29422, "\u0120Mari": 29423, "\u0120rains": 29424, "\u0120Overs": 29425, "\u0120Institution": 29426, "udget": 29427, "AMY": 29428, "tale": 29429, "\u0120KR": 29430, "\u0120Prices": 29431, "\u0120headaches": 29432, "\u0120landsl": 29433, "\u0120Aura": 29434, "Bonus": 29435, "\u0120Zhao": 29436, "\u0120Hip": 29437, "\u0120hops": 29438, "\u0120Kurdistan": 29439, "\u0120exploiting": 29440, "ryn": 29441, "\u0120hypocrisy": 29442, "opening": 29443, "\u0120gunshot": 29444, "\u0120wed": 29445, "interstitial": 29446, "Interstitial": 29447, "\u0120amen": 29448, "Breaking": 29449, "\u0120marketed": 29450, "Wire": 29451, "\u0120Crowd": 29452, "Continue": 29453, "\u0120Known": 29454, "\u0120Effective": 29455, "orean": 29456, "izons": 29457, "Joseph": 29458, "\u0120escalation": 29459, "username": 29460, "\u0120curtain": 29461, "ATES": 29462, "\u0120PAR": 29463, "\u0120Miy": 29464, "\u0120counterfe": 29465, "lene": 29466, "\u0120contenders": 29467, "daily": 29468, "\u0120Asc": 29469, "\u0120Phillip": 29470, "mostly": 29471, "\u0120filename": 29472, "hene": 29473, "\u0120resembling": 29474, "\u0120staging": 29475, "\u0120Chloe": 29476, "\u0120wiring": 29477, "Hon": 29478, "\u0120Renew": 29479, "ottage": 29480, "\u0120Hybrid": 29481, "much": 29482, "\u0120strokes": 29483, "\u0120policymakers": 29484, "APTER": 29485, "\u0120Arkham": 29486, "plot": 29487, "\u0120assistants": 29488, "\u0120deport": 29489, "\u0120Sega": 29490, "\u0120influenza": 29491, "\u0120Cursed": 29492, "\u0120Kobe": 29493, "\u0120skinny": 29494, "Provider": 29495, "\u0120Rip": 29496, "\u0120incremental": 29497, "products": 29498, "BF": 29499, "\u0120dome": 29500, "\u0120Credits": 29501, "\u0120losers": 29502, "ints": 29503, "\u0120Betty": 29504, "\u0120Talent": 29505, "\u0120DAM": 29506, "Lv": 29507, "Ess": 29508, "\u0120dens": 29509, "temp": 29510, "Judge": 29511, "odic": 29512, "\u0120'(": 29513, "URES": 29514, "etsk": 29515, "VO": 29516, "\u0120retrieved": 29517, "\u0120architects": 29518, "\u00d9\u0129": 29519, "\u0120ethic": 29520, "\u0120Secondary": 29521, "stocks": 29522, "adia": 29523, "\u0120325": 29524, "\u0120Opinion": 29525, "\u0120simultaneous": 29526, "\u0120dizz": 29527, "ulp": 29528, "\u0120smuggling": 29529, "ippery": 29530, "Random": 29531, "facing": 29532, "\u0120Das": 29533, "\u0120stockp": 29534, "\u0120disclosures": 29535, "pointer": 29536, "\u0120coral": 29537, "\u0120Selection": 29538, "\u0120Pike": 29539, "ivalent": 29540, "\u0120ruthless": 29541, "\u0120Rim": 29542, "\u0120ensuing": 29543, "\u0120Experiment": 29544, "\u0120congressman": 29545, "\u0120believer": 29546, "\u0120unspecified": 29547, "\u0120Mord": 29548, "\u0120knowledgeable": 29549, "\u0120VERY": 29550, "TX": 29551, "\u0120straps": 29552, "\u0120turf": 29553, "apeshifter": 29554, "\u0120marital": 29555, "\u0120flock": 29556, "\u00e3\u0123\u0128": 29557, "263": 29558, "AMES": 29559, "\u0120Opposition": 29560, "\u0120treasures": 29561, "\u0120GOD": 29562, "\u0120modeled": 29563, "\u0120WORLD": 29564, "\u0120([": 29565, "\u0120Usage": 29566, "HF": 29567, "\u0120$(": 29568, "ussed": 29569, "\u0120pioneer": 29570, "Eight": 29571, "parse": 29572, "bread": 29573, "ritz": 29574, "\u0120Miranda": 29575, "\u0120Kant": 29576, "++)": 29577, "oren": 29578, "\u0120provoked": 29579, "\u0120breeds": 29580, "\u0120Includes": 29581, "\u0120Pastebin": 29582, "\u0120Flip": 29583, "Java": 29584, "\u0120brink": 29585, "\u0120rumored": 29586, "\u0120unseen": 29587, "\u0120garnered": 29588, "\u0120Defin": 29589, "alted": 29590, "\u0120tattoos": 29591, "\u0120hesitation": 29592, "isitions": 29593, "\u0120Weaver": 29594, "\u0120Reporting": 29595, "\u0120therapies": 29596, "\u0120consultants": 29597, "\u0120residual": 29598, "\u0120Mali": 29599, "\u0120Roma": 29600, "iago": 29601, "\u0120Residents": 29602, "ubi": 29603, "\u0120remedies": 29604, "\u0120adaptive": 29605, "\u0120Alive": 29606, "\u0120Barcl": 29607, "\u0120wallets": 29608, "crypt": 29609, "etermination": 29610, "\u0120Pelosi": 29611, "\u0120slipping": 29612, "otonin": 29613, "\u0120alliances": 29614, "patrick": 29615, "iris": 29616, "\u0120orth": 29617, "\u0120Perkins": 29618, "\u0120DeV": 29619, "\u0120Gets": 29620, "\u0120drying": 29621, "gee": 29622, "forest": 29623, "\u0120Forget": 29624, "orem": 29625, "339": 29626, "\u0120vaguely": 29627, "\u0120Dion": 29628, "\u0120Porn": 29629, "\u0120HOW": 29630, "\u0120pneum": 29631, "\u0120rubble": 29632, "\u0120Taste": 29633, "encia": 29634, "\u0120Gel": 29635, "\u0120dst": 29636, "\u0120245": 29637, "\u0120Morocco": 29638, "inflamm": 29639, "\u0120Twins": 29640, "\u0120bots": 29641, "daughter": 29642, "\u0120Balk": 29643, "\u0120brethren": 29644, "\u0120logos": 29645, "\u0120gobl": 29646, "fps": 29647, "\u0120subdivision": 29648, "\u0120pawn": 29649, "\u0120squeezed": 29650, "\u0120morale": 29651, "\u0120DW": 29652, "'\"": 29653, "\u0120knot": 29654, "ooky": 29655, "\u0120divisive": 29656, "\u0120boosted": 29657, "chy": 29658, "\u00e3\u0125\u0132": 29659, "ifact": 29660, "\u0120newcomers": 29661, "\u0120Wrestling": 29662, "\u0120scouts": 29663, "wolves": 29664, "Rat": 29665, "\u0120nineteenth": 29666, "\u0120Osborne": 29667, "Stats": 29668, "\u0120empowered": 29669, "\u0120psychopath": 29670, "\u0120OEM": 29671, "uggage": 29672, "\u0120PK": 29673, "\u0120Mohammad": 29674, "Pak": 29675, "\u0120anarchists": 29676, "\u0120Extract": 29677, "esthes": 29678, "\u0120Stockholm": 29679, "loo": 29680, "\u0120Graph": 29681, "\u0120deploying": 29682, "\u0120Stranger": 29683, "\u0120Mold": 29684, "\u0120staffer": 29685, "\u0120discounted": 29686, "uckle": 29687, "please": 29688, "\u0120Landing": 29689, "\u00c3\u0143a": 29690, "\u0120193": 29691, "\u0120ante": 29692, "\u0120repetition": 29693, "\u0120+/-": 29694, "\u0120parody": 29695, "\u0120lively": 29696, "AAA": 29697, "\u0120Horus": 29698, "\u0120pits": 29699, "inders": 29700, "LOC": 29701, "\u0120Venice": 29702, "406": 29703, "\u0120Discover": 29704, "\u00e2\u0128": 29705, "ellectual": 29706, "\u0120pens": 29707, "\u0120eyel": 29708, "iguous": 29709, "Impl": 29710, "\u0120joking": 29711, "\u0120inval": 29712, "\u0120Belfast": 29713, "\u0120creditors": 29714, "\u0120Skywalker": 29715, "ovsky": 29716, "\u0120ceasefire": 29717, "\u0120seals": 29718, "isoft": 29719, ")).": 29720, "\u0120Felix": 29721, "ITS": 29722, "\u0120tresp": 29723, "\u0120Blockchain": 29724, "eware": 29725, "\u0120Schwar": 29726, "enne": 29727, "mounted": 29728, "\u0120Beacon": 29729, "lesh": 29730, "\u0120immensely": 29731, "\u0120cheering": 29732, "Employ": 29733, "scene": 29734, "ishly": 29735, "atchewan": 29736, "\u0120Nicolas": 29737, "\u0120drained": 29738, "\u0120Exit": 29739, "\u0120Azerb": 29740, "jun": 29741, "\u0120floated": 29742, "uania": 29743, "Deep": 29744, "\u0120superv": 29745, "\u0120mystical": 29746, "\u0120Dollar": 29747, "\u0120Apostle": 29748, "\u0120REL": 29749, "\u0120Provided": 29750, "\u0120Bucks": 29751, "\u00e3\u0125\u00b4": 29752, "cutting": 29753, "\u0120enhancements": 29754, "\u0120Penguins": 29755, "\u0120Isaiah": 29756, "\u0120jerk": 29757, "\u0120Wyn": 29758, "\u0120stalled": 29759, "\u0120cryptocurrencies": 29760, "\u0120Roland": 29761, "single": 29762, "\u0120lumin": 29763, "\u0120Fellow": 29764, "\u0120Capacity": 29765, "\u0120Kazakh": 29766, "WN": 29767, "\u0120financed": 29768, "389": 29769, "\u0120tid": 29770, "\u0120collusion": 29771, "\u0120Myr": 29772, "\u00ee\u0122": 29773, "Senator": 29774, "\u0120pediatric": 29775, "\u0120neatly": 29776, "\u0120sandwiches": 29777, "\u0120Architecture": 29778, "\u0120tucked": 29779, "\u0120balcony": 29780, "\u0120earthquakes": 29781, "quire": 29782, "Future": 29783, "\u0120hefty": 29784, "\u00e9\u0139": 29785, "\u0120specializes": 29786, "\u0120stresses": 29787, "\u0120sender": 29788, "\u0120misunderstanding": 29789, "\u0120epile": 29790, "\u0120provoke": 29791, "\u0120Colors": 29792, "\u0120dismay": 29793, "uko": 29794, "[_": 29795, "586": 29796, "neutral": 29797, "\u0120donating": 29798, "\u0120Randall": 29799, "Multi": 29800, "\u0120conveniently": 29801, "\u0120Sung": 29802, "\u0120Coca": 29803, "\u0120tents": 29804, "\u0120Acceler": 29805, "\u0120partnered": 29806, "272": 29807, "irming": 29808, "\u0120BAS": 29809, "sometimes": 29810, "\u0120objected": 29811, "ubric": 29812, "posed": 29813, "LCS": 29814, "grass": 29815, "\u0120attributable": 29816, "VIS": 29817, "Israeli": 29818, "\u0120repeats": 29819, "\u0120RM": 29820, "vag": 29821, "uta": 29822, "inous": 29823, "\u0120inert": 29824, "\u0120Miguel": 29825, "\u00e6\u0143": 29826, "\u0120Hawaiian": 29827, "Board": 29828, "\u0120artific": 29829, "\u0120Azerbai": 29830, "asio": 29831, "\u0120Rent": 29832, "AIN": 29833, "\u0120appliances": 29834, "\u0120nationality": 29835, "\u0120asshole": 29836, "\u0120Neb": 29837, "\u0120notch": 29838, "hani": 29839, "\u0120Bride": 29840, "Availability": 29841, "\u0120intercepted": 29842, "\u0120continental": 29843, "\u0120swelling": 29844, "\u0120Perspect": 29845, "bies": 29846, ".<": 29847, "ithmetic": 29848, "\u0120Lara": 29849, "\u0120tempting": 29850, "addr": 29851, "\u0120overseeing": 29852, "clad": 29853, "\u0120DV": 29854, "\u0120Gingrich": 29855, "\u0120mun": 29856, "\u0120Appropri": 29857, "\u0120alterations": 29858, "\u0120Patreon": 29859, "\u0120havoc": 29860, "\u0120disciplines": 29861, "\u0120notoriously": 29862, "akuya": 29863, "ieri": 29864, "?).": 29865, "\u0120Went": 29866, "\u0120silicon": 29867, "\u0120tremb": 29868, "Container": 29869, "Known": 29870, "\u0120mortar": 29871, "este": 29872, "icka": 29873, "Arthur": 29874, "\u0120Previously": 29875, "\u0120Marty": 29876, "\u0120sparse": 29877, "gins": 29878, "\u0120inward": 29879, "\u0120Participant": 29880, "Copy": 29881, "\u0120Misc": 29882, "\u0120antibiotic": 29883, "\u0120Retro": 29884, "\u0120elusive": 29885, "\u0120assail": 29886, "\u0120Battalion": 29887, "\u0120Bought": 29888, "\u0120diminish": 29889, "\u0120Europa": 29890, "session": 29891, "\u0120Dangerous": 29892, "iesel": 29893, "\u0120disbelief": 29894, "\u0120blasts": 29895, "extreme": 29896, "\u0120Boyd": 29897, "\u0120Projects": 29898, "\u0120Guys": 29899, "\u0120undergone": 29900, "\u0120grill": 29901, "\u0120Dwight": 29902, "\u0120197": 29903, "USER": 29904, "\u0120filesystem": 29905, "\u0120clocks": 29906, "Taylor": 29907, "\u0120wrapper": 29908, "\u0120folding": 29909, "ousand": 29910, "\u0120Philippine": 29911, "ATIONAL": 29912, "\u0120Perth": 29913, "\u0120ashes": 29914, "\u0120accumulate": 29915, "\u0120Gateway": 29916, "Shop": 29917, "orkshire": 29918, "Han": 29919, "\u0120Barrel": 29920, "\u0120Leh": 29921, "\u0120XV": 29922, "\u0120whim": 29923, "\u0120repo": 29924, "\u0120CG": 29925, "\u0120Mam": 29926, "\u0120incorporating": 29927, "\u0120bailout": 29928, "\u0120linguistic": 29929, "\u0120disinteg": 29930, "CLE": 29931, "\u0120cinematic": 29932, "\u0120Fiber": 29933, "Syn": 29934, "ilion": 29935, "\u0120Compos": 29936, "chens": 29937, "\u0120neoc": 29938, "\u0120boiled": 29939, "FINE": 29940, "ono": 29941, "uncle": 29942, "iken": 29943, "\u0120BM": 29944, "\u00ce\u00b9": 29945, "\u0120receipts": 29946, "\u0120disposed": 29947, "\u0120Thirty": 29948, "\u0120Rough": 29949, "\u0120ABS": 29950, "\u0120notwithstanding": 29951, "ollen": 29952, "#$": 29953, "\u0120unreliable": 29954, "\u0120bloom": 29955, "\u0120mediocre": 29956, "\u0120tram": 29957, "\u0120Tasman": 29958, "\u0120shakes": 29959, "\u0120manifesto": 29960, "\u0120MW": 29961, "\u0120satisfactory": 29962, "\u0120shores": 29963, "\u0120computation": 29964, "\u0120assertions": 29965, "ormons": 29966, "arag": 29967, "abit": 29968, "Democrats": 29969, "\u0120Loot": 29970, "\u0120Volks": 29971, "haired": 29972, "\u0120gravitational": 29973, "Sing": 29974, "\u0120Miz": 29975, "\u0120throttle": 29976, "\u0120tyranny": 29977, "\u0120Views": 29978, "\u0120robber": 29979, "\u0120Minority": 29980, "\u0120shrine": 29981, "scope": 29982, "purpose": 29983, "\u0120nucleus": 29984, "ourcing": 29985, "\u0120USDA": 29986, "\u0120DHS": 29987, "wra": 29988, "\u0120Bowie": 29989, "Scale": 29990, "\u0120BEL": 29991, "xi": 29992, "Iter": 29993, "\u0120(),": 29994, "wright": 29995, "\u0120sailors": 29996, "oused": 29997, "NASA": 29998, "\u0120Proof": 29999, "\u0120Mineral": 30000, "token": 30001, "\u0120FD": 30002, "Rew": 30003, "\u0120ell": 30004, "630": 30005, "\u0120chancellor": 30006, "\u0120Gos": 30007, "\u0120amounted": 30008, "\u0120Recre": 30009, "omez": 30010, "\u0120Optim": 30011, "\u0120Olive": 30012, "\u0120tracker": 30013, "owler": 30014, "\u0120Unique": 30015, "Root": 30016, "\u0120maritime": 30017, "\u0120Quran": 30018, "\u0120Adapt": 30019, "\u0120ecosystems": 30020, "\u0120Repeat": 30021, "\u0120Soy": 30022, "\u0120IMP": 30023, "\u0120graduating": 30024, "andem": 30025, "Pur": 30026, "\u0120Reset": 30027, "\u0120Trick": 30028, "\u0120Philly": 30029, "\u0120Tue": 30030, "\u0120Malaysian": 30031, "\u0120climax": 30032, "\u0120bury": 30033, "\u0120conspic": 30034, "\u0120Southampton": 30035, "\u0120Flowers": 30036, "\u0120escorted": 30037, "\u0120Educational": 30038, "\u0120IRC": 30039, "\u0120brutally": 30040, "eating": 30041, "\u0120pillar": 30042, "\u0120Sang": 30043, "\u0120Jude": 30044, "arling": 30045, "\u0120Amnesty": 30046, "\u0120reminding": 30047, "\u0120Administrative": 30048, "hesda": 30049, "\u0120flashed": 30050, "\u0120PBS": 30051, "perate": 30052, "feature": 30053, "\u0120swipe": 30054, "\u0120graves": 30055, "oultry": 30056, "261": 30057, "breaks": 30058, "\u0120Guer": 30059, "\u0120shrimp": 30060, "\u0120Voting": 30061, "quist": 30062, "\u0120analytical": 30063, "\u0120tablespoons": 30064, "\u0120SOU": 30065, "\u0120researched": 30066, "\u0120disrupted": 30067, "\u0120jour": 30068, "\u0120replica": 30069, "\u0120cartoons": 30070, "bians": 30071, "})": 30072, "copy": 30073, "Got": 30074, "ouched": 30075, "PUT": 30076, "\u0120swarm": 30077, "notations": 30078, "said": 30079, "\u0120rebuilt": 30080, "\u0120collaborate": 30081, "\u0120raging": 30082, "\u0120nar": 30083, "\u0120demographics": 30084, "\u0120DDR": 30085, "\u0120distrust": 30086, "ossier": 30087, "\u0120Kro": 30088, "\u0120pumpkin": 30089, "\u0120regrets": 30090, "\u0120fatalities": 30091, "\u0120Lens": 30092, "\u0120Ole": 30093, "pd": 30094, "\u0120puppet": 30095, "\u0120Outlook": 30096, "\u0120Stam": 30097, "Ol": 30098, "Fair": 30099, "UU": 30100, "\u0120rewritten": 30101, "\u00c4\u00b1": 30102, "\u0120fascinated": 30103, "\u0120vectors": 30104, "\u0120tribunal": 30105, "uay": 30106, "\u0120Mats": 30107, "\u0120Coins": 30108, "[[": 30109, "\u0120181": 30110, "\u0120renders": 30111, "\u0120Kaepernick": 30112, "\u0120espionage": 30113, "\u0120summ": 30114, "\u0120ditch": 30115, "Account": 30116, "\u0120spreadsheet": 30117, "\u0120mutant": 30118, "past": 30119, "407": 30120, "\u0120dye": 30121, "\u0120initiation": 30122, "\u01204000": 30123, "\u0120punishable": 30124, "\u0120thinner": 30125, "\u0120Khal": 30126, "\u0120intermedi": 30127, "Dun": 30128, "\u0120Gotham": 30129, "\u0120eagerly": 30130, "\u0120vaginal": 30131, "powers": 30132, "VW": 30133, "\u0120WATCHED": 30134, "\u0120predator": 30135, "amsung": 30136, "\u0120disparity": 30137, "\u0120[*": 30138, "\u0120amph": 30139, "\u0120outskirts": 30140, "\u0120Spirits": 30141, "\u0120skeletal": 30142, "\u00d0\u00bb": 30143, "\u0120Rear": 30144, "\u0120issuance": 30145, "\u0120Logic": 30146, "released": 30147, "ZZ": 30148, "\u0120Bound": 30149, "Entry": 30150, "\u0120exits": 30151, "isol": 30152, "\u0120Founder": 30153, "\u0120wre": 30154, "\u0120Greenland": 30155, "\u0120MMO": 30156, "taker": 30157, "INC": 30158, "\u00e3\u0123\u00be": 30159, "\u0120hourly": 30160, "henko": 30161, "\u0120fantasies": 30162, "\u0120disob": 30163, "\u0120demolition": 30164, "\u00e3\u0125\u012d": 30165, "\u0120enlisted": 30166, "ratulations": 30167, "\u0120misguided": 30168, "\u0120ensured": 30169, "\u0120discouraged": 30170, "mort": 30171, "\u0120flank": 30172, "\u0120cess": 30173, "\u0120reacts": 30174, "\u0120Sere": 30175, "sensitive": 30176, "\u0120Serpent": 30177, "assad": 30178, "\u0120247": 30179, "\u0120calmly": 30180, "busters": 30181, "\u0120bleed": 30182, "\u0120Stro": 30183, "\u0120amusement": 30184, "\u0120Antarctica": 30185, "\u0120scept": 30186, "\u0120Gaw": 30187, "aq": 30188, "asonic": 30189, "\u0120sprawling": 30190, "native": 30191, "aturated": 30192, "\u0120Battlefield": 30193, "IVERS": 30194, "EB": 30195, "\u0120Gems": 30196, "\u0120Northwestern": 30197, "\u0120Films": 30198, "\u0120Automatic": 30199, "\u0120apprehend": 30200, "\u00e3\u0123\u00a8": 30201, "\u0120guiName": 30202, "\u0120backend": 30203, "\u0120evidenced": 30204, "geant": 30205, "012": 30206, "\u0120Siege": 30207, "\u0120externalTo": 30208, "\u0120unfocusedRange": 30209, "\u0120guiActiveUnfocused": 30210, "\u0120guiIcon": 30211, "\u0120externalToEVA": 30212, "\u0120externalToEVAOnly": 30213, "Fri": 30214, "chard": 30215, "enaries": 30216, "\u0120chiefs": 30217, "\u0120cf": 30218, "\u0120HUD": 30219, "\u0120corrobor": 30220, "\u0120dB": 30221, "\u0120Taken": 30222, "\u0120Patricia": 30223, "rail": 30224, "\u0120Charm": 30225, "\u0120Libertarian": 30226, "rieve": 30227, "Personal": 30228, "\u0120OUR": 30229, "geries": 30230, "\u0120dumping": 30231, "\u0120neurological": 30232, "itimate": 30233, "\u0120Clintons": 30234, "rafted": 30235, "\u0120Molly": 30236, "\u0120terminals": 30237, "register": 30238, "\u0120flare": 30239, "\u0120encoded": 30240, "\u0120autopsy": 30241, "pel": 30242, "machine": 30243, "\u0120exemptions": 30244, "\u0120Royals": 30245, "distance": 30246, "\u0120drafts": 30247, "\u0120lame": 30248, "\u0120Cunning": 30249, "\u0120spouses": 30250, "\u0120Markets": 30251, "\u0120Carrier": 30252, "\u0120implying": 30253, "\u0120Yak": 30254, "sid": 30255, "\u0120loser": 30256, "\u0120vigilant": 30257, "\u0120impeachment": 30258, "\u0120augmented": 30259, "\u0120Employees": 30260, "\u0120unintended": 30261, "ternally": 30262, "\u0120Watt": 30263, "\u0120recognizable": 30264, "essim": 30265, "\u00e6\u013f": 30266, "\u0120coated": 30267, "rha": 30268, "\u0120lieutenant": 30269, "\u0120Legislation": 30270, "published": 30271, "444": 30272, "013": 30273, "\u0120ideally": 30274, "\u0120Password": 30275, "\u0120simplify": 30276, "\u0120Meta": 30277, "\u0120MRI": 30278, "\u0120pleading": 30279, "organized": 30280, "handler": 30281, "\u0120unravel": 30282, "correct": 30283, "\u0120icy": 30284, "\u0120paranoid": 30285, "\u0120passer": 30286, "\u0120inspections": 30287, "ofer": 30288, "\u0120Healthcare": 30289, "283": 30290, "\u0120Brut": 30291, "iola": 30292, "forge": 30293, "\u0120Medieval": 30294, "MSN": 30295, "ievers": 30296, "\u0120Programming": 30297, "\u00e5\u012b": 30298, "\u0120223": 30299, "mu": 30300, "\u0120CLE": 30301, "uga": 30302, "\u0120shoppers": 30303, "\u0120informative": 30304, "\u0120Plans": 30305, "\u0120supplementation": 30306, "\u0120Tests": 30307, "tyard": 30308, "ocytes": 30309, "\u0120Vega": 30310, "\u0120Gujarat": 30311, "ermanent": 30312, "Except": 30313, "\u0120LOT": 30314, "alla": 30315, "\u0120Cumm": 30316, "\u0120Osw": 30317, "\u0120venom": 30318, "\u0120Debt": 30319, "\u0120DOWN": 30320, "\u0120reunion": 30321, "\u0120muc": 30322, "\u0120Relief": 30323, "\u0120geop": 30324, "\u0120\u00f0\u0141\u013a": 30325, "alogue": 30326, "Anth": 30327, "echo": 30328, "\u0120corros": 30329, "\u0120replication": 30330, "\u0120Blazing": 30331, "\u0120Daughter": 30332, "\u0120inflic": 30333, "\u0120Lindsey": 30334, "\u00d9\u012a": 30335, "284": 30336, "Exit": 30337, "\u0120gloom": 30338, "TAIN": 30339, "\u0120undermining": 30340, "\u0120advising": 30341, "hidden": 30342, "\u0120overflow": 30343, "\u0120gor": 30344, "urdue": 30345, "\u0120echoes": 30346, "enhagen": 30347, "\u0120impuls": 30348, "drug": 30349, "cash": 30350, "\u0120async": 30351, "\u0120mirac": 30352, "atts": 30353, "punk": 30354, "\u0120pivot": 30355, "\u0120Legislative": 30356, "\u0120bloggers": 30357, "\u0120Claw": 30358, "sburg": 30359, "dyl": 30360, "\u0120Recommend": 30361, "\u0120verte": 30362, "\u0120prohibiting": 30363, "\u0120Panther": 30364, "Jonathan": 30365, "\u0120omin": 30366, "\u0120hateful": 30367, "281": 30368, "\u0120Orche": 30369, "\u0120Murdoch": 30370, "downs": 30371, "\u0120asymm": 30372, "GER": 30373, "Always": 30374, "\u0120informs": 30375, "\u0120WM": 30376, "\u0120Pony": 30377, "\u0120Appendix": 30378, "\u0120Arlington": 30379, "Jam": 30380, "\u0120medicinal": 30381, "\u0120Slam": 30382, "ITIES": 30383, "\u0120reaff": 30384, "\u0120Ri": 30385, "FG": 30386, "Spring": 30387, "bool": 30388, "\u0120thighs": 30389, "\u0120markings": 30390, "\u0120Raqqa": 30391, "\u0120Lak": 30392, "poll": 30393, "tsky": 30394, "\u0120Morty": 30395, "\u0120Definition": 30396, "\u0120debunk": 30397, "endered": 30398, "\u0120Leone": 30399, "avers": 30400, "\u0120mortgages": 30401, "Apparently": 30402, "Nic": 30403, "haus": 30404, "\u0120Thousands": 30405, "auld": 30406, "\u0120mash": 30407, "shoot": 30408, "\u0120diarr": 30409, "\u0120consciously": 30410, "Hero": 30411, "eas": 30412, "\u0120Naturally": 30413, "\u0120Destroyer": 30414, "\u0120dashboard": 30415, "services": 30416, "Rog": 30417, "\u0120millennials": 30418, "\u0120invade": 30419, "-(": 30420, "\u0120commissions": 30421, "\u0120Auckland": 30422, "\u0120broadcasts": 30423, "\u0120frontal": 30424, "\u0120crank": 30425, "\u0120Historic": 30426, "\u0120rumours": 30427, "CTV": 30428, "\u0120steril": 30429, "\u0120booster": 30430, "rocket": 30431, "\u00e3\u0124\u00bc": 30432, "utsche": 30433, "\u0120PI": 30434, "\u0120233": 30435, "\u0120Producer": 30436, "\u0120Analytics": 30437, "\u0120invaluable": 30438, "\u0120unintention": 30439, "\u0120CY": 30440, "\u0120scrutin": 30441, "\u0120gigg": 30442, "\u0120engulf": 30443, "\u0120proletariat": 30444, "\u0120hacks": 30445, "\u0120Hew": 30446, "arak": 30447, "\u0120Slime": 30448, "ielding": 30449, "agher": 30450, "\u0120Elliot": 30451, "\u0120telecom": 30452, "\u0120219": 30453, "ultan": 30454, "\u0120Arbor": 30455, "\u0120Scouts": 30456, "Ban": 30457, "\u0120lifespan": 30458, "\u0120blasp": 30459, "388": 30460, "\u0120judiciary": 30461, "\u0120Continental": 30462, "asking": 30463, "McC": 30464, "LED": 30465, "\u0120baggage": 30466, "\u0120Sorcerer": 30467, "\u0120remnants": 30468, "\u0120Griffith": 30469, "etsu": 30470, "\u0120Subaru": 30471, "\u0120Personality": 30472, "designed": 30473, "ushima": 30474, "agnar": 30475, "\u0120recoil": 30476, "\u0120passions": 30477, "\\\":": 30478, "\u0120tee": 30479, "\u0120abolition": 30480, "\u0120Creating": 30481, "jac": 30482, "\u0120194": 30483, "019": 30484, "\u0120pillars": 30485, "riched": 30486, "/\"": 30487, "tk": 30488, "\u0120livelihood": 30489, "\u0120roasted": 30490, "ahon": 30491, "\u0120Hutch": 30492, "assert": 30493, "\u0120dividend": 30494, "\u0120knit": 30495, "\u0120daunting": 30496, "\u0120disturbance": 30497, "\u0120shale": 30498, "\u0120cultivated": 30499, "\u0120refrigerator": 30500, "LB": 30501, "\u0120NET": 30502, "\u0120commercials": 30503, "\u0120thinkers": 30504, "455": 30505, "\u0120chop": 30506, "Broad": 30507, "\u0120suspicions": 30508, "\u0120tagged": 30509, "lifting": 30510, "\u0120stylish": 30511, "\u0120Shields": 30512, "Shortly": 30513, "\u0120tails": 30514, "Auth": 30515, "STE": 30516, "\u0120GAME": 30517, "\u0120seism": 30518, "\u0120Kis": 30519, "ologne": 30520, "\u0120cowork": 30521, "\u0120forcibly": 30522, "\u0120thyroid": 30523, "\u0120PB": 30524, "ANE": 30525, "married": 30526, "horse": 30527, "\u0120polymer": 30528, "\u0120Chal": 30529, "odor": 30530, "DEBUG": 30531, "\u0120Context": 30532, "\u0120bliss": 30533, "\u0120pinpoint": 30534, "\u0120Mathemat": 30535, "legram": 30536, "\u0120Weekend": 30537, "\u0120labelled": 30538, "\u0120bart": 30539, "itles": 30540, "\u0120estrogen": 30541, "\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136\u00e2\u0122\u0136": 30542, "\"'": 30543, "\u0120visibly": 30544, "\u0120outsider": 30545, "aida": 30546, "Area": 30547, "\u0120dissemin": 30548, "\u0120dishonest": 30549, "\u0120Closed": 30550, "\u0120Bulletin": 30551, "\u0120Ramsey": 30552, "sword": 30553, "\u0120XI": 30554, "ourced": 30555, "Same": 30556, "346": 30557, "\u0120Repe": 30558, "\u0120Kou": 30559, "cake": 30560, "emis": 30561, "Cache": 30562, "\u0120Meaning": 30563, "\u0120Enlight": 30564, "onomy": 30565, "\u0120manifestation": 30566, "sworth": 30567, "Jay": 30568, "\u0120chore": 30569, "\u00c3\u00b6r": 30570, "Dream": 30571, "\u0120sanctioned": 30572, "\u0120culturally": 30573, "\u0120Ara": 30574, "Nav": 30575, "\u0120theological": 30576, "\u0120strut": 30577, "\u0120VO": 30578, "\u0120Handbook": 30579, "\u0120constructing": 30580, "\u0120\u00c2\u00b6": 30581, "\u0120Benefits": 30582, "\u0120Psychological": 30583, "sac": 30584, "\u00e5\u00b8": 30585, "policy": 30586, "\u0120Matters": 30587, "\u0120Reported": 30588, "\u0120Byte": 30589, "\u0120vitro": 30590, "\u0120Maiden": 30591, "\u0120lam": 30592, "\u0120Jennings": 30593, "\u0120garment": 30594, "\u0120Rutgers": 30595, "\u0120Stafford": 30596, "\u0120Wellington": 30597, "\u0120intermitt": 30598, "\u0120npm": 30599, "\u0120ordeal": 30600, "\u0120plugged": 30601, "ooming": 30602, "inished": 30603, "framework": 30604, "\u0120timber": 30605, "\u0120cass": 30606, "\u0120850": 30607, "iless": 30608, "\u0120Redux": 30609, "768": 30610, "Stre": 30611, "\u0120surpassed": 30612, "whel": 30613, "\u0120parallels": 30614, "\u0120veil": 30615, "\u0120GI": 30616, "\u0120REST": 30617, "\u0120readiness": 30618, "sort": 30619, "\u0120modifying": 30620, "\u0120Slate": 30621, "ruff": 30622, "\u0120marble": 30623, "\u0120infrared": 30624, "\u0120auditor": 30625, "\u0120FANTASY": 30626, "\u0120Poverty": 30627, "\u0120SPD": 30628, "\u0120\"(": 30629, "Ky": 30630, "RAY": 30631, "\u0120executions": 30632, "\u0120Beverly": 30633, "\u0120Marxism": 30634, "\u0120Burst": 30635, "\u0120Kali": 30636, "estones": 30637, "Clearly": 30638, "Ell": 30639, "\u00e3\u0123\u00a7": 30640, "\u0120Proceedings": 30641, "Token": 30642, "IFIC": 30643, "\u00c3\u00b1a": 30644, "Central": 30645, "\u0120Haley": 30646, "\u0120Drama": 30647, "\u0120formations": 30648, "ORN": 30649, "Books": 30650, "\u0120dominating": 30651, "\u0120Flyers": 30652, "\u0120Companion": 30653, "\u0120disciplined": 30654, "\u0120Yugoslav": 30655, "\u0120Spells": 30656, "\u0120vengeance": 30657, "\u0120landlords": 30658, "Len": 30659, "\u0120Ogre": 30660, "anoia": 30661, "\u0120piercing": 30662, "\u0120congreg": 30663, "\u0120scorer": 30664, "obia": 30665, "\u0120nickel": 30666, "\u0120Learns": 30667, "\u0120rejo": 30668, "\u0120masterpiece": 30669, "Flash": 30670, "\u0120inhabited": 30671, "\u0120OpenGL": 30672, "\u0120Dud": 30673, "\u0120ICO": 30674, "\u0120arter": 30675, "\u0120plur": 30676, "\u0120mastery": 30677, "\u0120longstanding": 30678, "sted": 30679, "\u0120wines": 30680, "\u0120televised": 30681, "\u0120Shrine": 30682, "\u0120Bayern": 30683, "\u0120\u00e2\u0135\u013a": 30684, "\u0120enclosure": 30685, "john": 30686, "\u0120prophets": 30687, "\u0120Resurrection": 30688, "\u0120Orders": 30689, "\u0120uneven": 30690, "rals": 30691, "\u0120dwind": 30692, "\u0120Lah": 30693, "\u0120Sloven": 30694, "378": 30695, "\u0120insistence": 30696, "affle": 30697, "\u0120Clone": 30698, "\u0120hardship": 30699, "\u0120Congressman": 30700, "\u0120plead": 30701, "\u0120reviewers": 30702, "\u0120cured": 30703, "\u01201935": 30704, "asley": 30705, "fake": 30706, "\u0120Thinking": 30707, "ydia": 30708, "PART": 30709, "\u0120Dota": 30710, "oit": 30711, "\u0120whipped": 30712, "\u0120bouncing": 30713, "\u0120Hispanics": 30714, "comings": 30715, "\u0120cannabin": 30716, "\u0120Chambers": 30717, "\u0120Zack": 30718, "Optional": 30719, "\u0120coats": 30720, "\u0120prowess": 30721, "\u0120Norton": 30722, "\u0120plainly": 30723, "\u0120freight": 30724, "\u0120inhibition": 30725, "\u0120clam": 30726, "\u0120303": 30727, "kef": 30728, "aleigh": 30729, "Luke": 30730, "\u0120psycho": 30731, "atorium": 30732, "MED": 30733, "\u0120treaties": 30734, "\u0120indisc": 30735, "\u0120dc": 30736, "OPS": 30737, "\u0120resilient": 30738, "\u0120Interstate": 30739, "\u0120slack": 30740, "\u0120mundane": 30741, "\u0120establishes": 30742, "359": 30743, "\u0120strained": 30744, "\u0120nond": 30745, "Sus": 30746, "\u0120caste": 30747, "arate": 30748, "ieving": 30749, "\u0120unfairly": 30750, "\u0120parser": 30751, "onial": 30752, "ursive": 30753, "Via": 30754, "\u0120Otto": 30755, "\u0120Authorities": 30756, "stroke": 30757, "KR": 30758, "\u0120Mercy": 30759, "\u0120furnished": 30760, "\u0120outset": 30761, "\u0120metic": 30762, "1982": 30763, "olithic": 30764, "\u0120Tent": 30765, "ogical": 30766, "\u0120Aircraft": 30767, "\u0120hides": 30768, "\u0120Became": 30769, "\u0120educators": 30770, "reaching": 30771, "\u0120volatility": 30772, "\u0120toddler": 30773, "\u0120NASCAR": 30774, "\u0120Twelve": 30775, "\u0120Highlights": 30776, "\u0120grape": 30777, "\u0120splits": 30778, "\u0120peasant": 30779, "\u0120reneg": 30780, "\u0120MSI": 30781, "Temp": 30782, "stars": 30783, "\u0120trek": 30784, "\u0120Hyde": 30785, "binding": 30786, "\u0120realism": 30787, "\u0120oxide": 30788, "\u0120Hos": 30789, "\u0120mounts": 30790, "\u0120biting": 30791, "\u0120collapsing": 30792, "\u0120postal": 30793, "\u0120museums": 30794, "\u0120detached": 30795, "\u0120respecting": 30796, "\u0120monopol": 30797, "\u0120workflow": 30798, "\u0120Cake": 30799, "Template": 30800, "\u0120Organisation": 30801, "\u0120persistence": 30802, "369": 30803, "Coming": 30804, "Brad": 30805, "\u0120redundant": 30806, "\u0120GTA": 30807, "\u0120bending": 30808, "\u0120revoked": 30809, "\u0120offending": 30810, "\u0120framing": 30811, "\u0120printf": 30812, "Commun": 30813, "members": 30814, "Outside": 30815, "\u0120construed": 30816, "\u0120coded": 30817, "FORE": 30818, "\u0120chast": 30819, "Chat": 30820, "Indian": 30821, "\u0120Yard": 30822, "?!\"": 30823, "\u0120Ports": 30824, "\u0120Xavier": 30825, "\u0120RET": 30826, "'.\"": 30827, "\u0120Boat": 30828, "ivated": 30829, "icht": 30830, "umerable": 30831, "Ds": 30832, "\u0120Dunn": 30833, "\u0120coffin": 30834, "\u0120securely": 30835, "\u0120Raptors": 30836, "\u0120Bes": 30837, "Installation": 30838, "\u0120inception": 30839, "\u0120Healthy": 30840, "endants": 30841, "\u0120psychologists": 30842, "\u0120Sheikh": 30843, "cultural": 30844, "\u0120BlackBerry": 30845, "shift": 30846, "Fred": 30847, "oche": 30848, "\u0120cakes": 30849, "\u0120SEO": 30850, "\u0120Gian": 30851, "\u0120Asians": 30852, "ogging": 30853, "element": 30854, "\u0120pundits": 30855, "\u0120Vaugh": 30856, "\u0120Gavin": 30857, "\u0120hitter": 30858, "\u0120drowned": 30859, "\u0120chalk": 30860, "\u0120Zika": 30861, "\u0120measles": 30862, "802": 30863, "\u00e2\u0122\u00a6..": 30864, "\u0120AWS": 30865, "]\"": 30866, "\u0120distort": 30867, "\u0120Mast": 30868, "\u0120antibodies": 30869, "\u0120Mash": 30870, "Memory": 30871, "\u0120Uganda": 30872, "\u0120Prob": 30873, "\u0120vomiting": 30874, "\u0120Turns": 30875, "\u0120occupying": 30876, "\u0120evasion": 30877, "\u0120Therapy": 30878, "\u0120promo": 30879, "\u0120electr": 30880, "\u0120blueprint": 30881, "\u0120Dre": 30882, "priced": 30883, "\u0120Depot": 30884, "\u0120alleviate": 30885, "\u0120Somali": 30886, "marg": 30887, "nine": 30888, "\u0120nostalgia": 30889, "\u0120Shepherd": 30890, "\u0120cavalry": 30891, "\u0120torped": 30892, "\u0120Bloody": 30893, "xb": 30894, "\u0120sank": 30895, "\u0120goalt": 30896, "reportprint": 30897, "embedreportprint": 30898, "cloneembedreportprint": 30899, "\u0120Initially": 30900, "\u0120Fischer": 30901, "\u0120noteworthy": 30902, "cern": 30903, "\u0120inefficient": 30904, "rawdownload": 30905, "rawdownloadcloneembedreportprint": 30906, "cation": 30907, "\u0120Dynasty": 30908, "lag": 30909, "DES": 30910, "\u0120distinctly": 30911, "\u0120Estonia": 30912, "\u0120openness": 30913, "\u0120gossip": 30914, "ruck": 30915, "Width": 30916, "\u0120Ibrahim": 30917, "\u0120petroleum": 30918, "\u0120avatar": 30919, "\u0120Hed": 30920, "atha": 30921, "\u0120Hogwarts": 30922, "\u0120caves": 30923, "678": 30924, "\u0120safeguard": 30925, "\u0120Mog": 30926, "isson": 30927, "\u0120Durham": 30928, "slaught": 30929, "\u0120Graduate": 30930, "\u0120subconscious": 30931, "\u0120Excellent": 30932, "\u0120Dum": 30933, "-----": 30934, "\u0120piles": 30935, "\u0120WORK": 30936, "\u0120Garn": 30937, "\u0120Fol": 30938, "\u0120ATM": 30939, "\u0120avoids": 30940, "\u0120Tul": 30941, "\u0120bleak": 30942, "ELY": 30943, "ivist": 30944, "lightly": 30945, "Pers": 30946, "\u0120Dob": 30947, "\u0120LS": 30948, "\u0120insanity": 30949, "\u00ce\u00b5": 30950, "atalie": 30951, "Enlarge": 30952, "\u0120twists": 30953, "\u0120faulty": 30954, "\u0120piracy": 30955, "\u0120impover": 30956, "\u0120rugged": 30957, "\u0120Fashion": 30958, "\u0120sands": 30959, "'?": 30960, "swick": 30961, "\u0120natives": 30962, "\u0120hen": 30963, "\u0120Noise": 30964, "\u00e3\u0125\u0139": 30965, "\u0120greens": 30966, "\u0120freezer": 30967, "\u0120dynasty": 30968, "\u0120Fathers": 30969, "\u0120Newark": 30970, "\u0120archaeological": 30971, "\u0120ot": 30972, "obar": 30973, "\u0120blockade": 30974, "\u0120allerg": 30975, "LV": 30976, "\u0120debit": 30977, "\u0120RFC": 30978, "\u0120Milton": 30979, "\u0120Pressure": 30980, "\u0120willingly": 30981, "\u0120disproportionate": 30982, "\u0120oppressive": 30983, "\u0120diamonds": 30984, "\u0120belongings": 30985, "1970": 30986, "\u0120bells": 30987, "\u0120imperialism": 30988, "\u0120227": 30989, "\u0120exploding": 30990, "\u0120Eclipse": 30991, "\u01201919": 30992, "\u0120rant": 30993, "\u0120nominations": 30994, "347": 30995, "\u0120peacefully": 30996, "rica": 30997, "\u0120FUCK": 30998, "\u0120vibration": 30999, "malink": 31000, "\u0120ropes": 31001, "\u0120Ivanka": 31002, "\u0120Brewery": 31003, "\u0120Booker": 31004, "\u0120Owens": 31005, "goers": 31006, "Services": 31007, "\u0120Snape": 31008, "\u0120191": 31009, "395": 31010, "\u0120299": 31011, "justice": 31012, "\u0120bri": 31013, "\u0120discs": 31014, "\u0120prominently": 31015, "\u0120vulgar": 31016, "\u0120skipping": 31017, "lves": 31018, "\u0120tsunami": 31019, "374": 31020, "\u0120Urug": 31021, "\u0120Eid": 31022, "recated": 31023, "phen": 31024, "\u0120faults": 31025, "\u0120Started": 31026, "950": 31027, "\u0120pi": 31028, "\u0120detector": 31029, "\u0120bastard": 31030, "\u0120validated": 31031, "SpaceEngineers": 31032, "OURCE": 31033, "\u0120(~": 31034, "\u0120unsur": 31035, "\u0120affirmed": 31036, "\u0120fascism": 31037, "\u0120resolving": 31038, "\u0120Chavez": 31039, "\u0120Cyn": 31040, "\u0120detract": 31041, "Lost": 31042, "\u0120rigged": 31043, "\u0120homage": 31044, "\u0120Bruno": 31045, "555": 31046, "eca": 31047, "\u0120presses": 31048, "\u0120humour": 31049, "\u0120spacing": 31050, "\u0120'/": 31051, "olkien": 31052, "Coun": 31053, "OPER": 31054, "Tre": 31055, "Son": 31056, "\u0120Cambodia": 31057, "ierre": 31058, "mong": 31059, "ozy": 31060, "\u0120liquidity": 31061, "\u0120Soviets": 31062, "\u0120Fernando": 31063, "\u0120229": 31064, "\u0120slug": 31065, "\u0120Catalan": 31066, "electric": 31067, "\u0120scenery": 31068, "\u0120Hearth": 31069, "\u0120constrained": 31070, "\u0120goalie": 31071, "\u0120Guidelines": 31072, "\u0120Ammo": 31073, "\u0120Pearson": 31074, "\u0120taxed": 31075, "\u0120fetus": 31076, "Response": 31077, "\u0120Alexis": 31078, "thia": 31079, "Guy": 31080, "\u0120reconstruct": 31081, "\u0120extremes": 31082, "\u0120concluding": 31083, "\u0120Peg": 31084, "ooks": 31085, "\u0120deductions": 31086, "Rose": 31087, "\u0120groundbreaking": 31088, "\u0120Targ": 31089, "\u00e3\u0125\u0123": 31090, "\u0120Reve": 31091, "resource": 31092, "\u0120moons": 31093, "\u0120electromagnetic": 31094, "\u0120amidst": 31095, "\u0120Viktor": 31096, "NESS": 31097, "BACK": 31098, "\u0120commute": 31099, "\u0120Anaheim": 31100, "\u0120fluctuations": 31101, "640": 31102, "\u0120noodles": 31103, "\u0120Copenhagen": 31104, "\u0120Tide": 31105, "\u0120Grizz": 31106, "\u0120SEE": 31107, "\u0120pipelines": 31108, "\u0120scars": 31109, "endo": 31110, "agus": 31111, "\u0120ETF": 31112, "/#": 31113, "\u0120Become": 31114, "448": 31115, "\u0120visc": 31116, "\u0120Recommended": 31117, "\u0120jumper": 31118, "\u0120cognition": 31119, "\u0120assassin": 31120, "\u0120witnessing": 31121, "\u0120Setup": 31122, "\u0120lac": 31123, "vim": 31124, "ISM": 31125, "pages": 31126, "SSL": 31127, "358": 31128, "\u0120adject": 31129, "industrial": 31130, "lore": 31131, "chery": 31132, "\u0120glitter": 31133, "\u0120calf": 31134, "Florida": 31135, "\u0120spoilers": 31136, "\u0120succeeds": 31137, "\u0120chanting": 31138, "\u0120slogans": 31139, "\u0120Tracy": 31140, "Visit": 31141, "rology": 31142, "\u0120mornings": 31143, "\u0120lineage": 31144, "\u0120sip": 31145, "\u0120intensely": 31146, "\u0120flourish": 31147, "\u0120Sleeping": 31148, "\u0120Fem": 31149, "orpor": 31150, "\u0120Klan": 31151, "\u0120Darth": 31152, "hack": 31153, "\u0120Nielsen": 31154, "\u0120tumors": 31155, "\u0120procurement": 31156, "\u0120Yorkshire": 31157, "\u0120raided": 31158, "KY": 31159, "Anna": 31160, "\u0120//[": 31161, "\u0120Disorder": 31162, "\u0120Mustang": 31163, "\u0120Wen": 31164, "\u0120Trying": 31165, "sq": 31166, "\u0120deliveries": 31167, "\u0120shutter": 31168, "\u0120cerebral": 31169, "\u0120bipolar": 31170, "\u0120CN": 31171, "lass": 31172, "jet": 31173, "\u0120debating": 31174, ">:": 31175, "\u0120eagle": 31176, "grades": 31177, "\u0120Dixon": 31178, "UGC": 31179, "MAS": 31180, "\u0120Draco": 31181, "\u0120Machines": 31182, "affer": 31183, "\u0120eman": 31184, "\u00c2\u00b2": 31185, "pron": 31186, "\u0120Gym": 31187, "\u0120comparatively": 31188, "\u0120Tribunal": 31189, "PRO": 31190, "\u0120lex": 31191, "\u0120fertile": 31192, "\u0120depressing": 31193, "\u0120superficial": 31194, "essential": 31195, "\u0120Hunters": 31196, "gp": 31197, "\u0120prominence": 31198, "Liber": 31199, "\u0120Ancest": 31200, "otechnology": 31201, "\u0120mocking": 31202, "\u0120Traff": 31203, "\u0138\u013c": 31204, "Medium": 31205, "Iraq": 31206, "\u0120psychiatrist": 31207, "Quantity": 31208, "\u0120Lect": 31209, "\u0120noisy": 31210, "520": 31211, "GY": 31212, "\u0120slapped": 31213, "\u0120MTV": 31214, "\u0120para": 31215, "pull": 31216, "Multiple": 31217, "asher": 31218, "\u0120nour": 31219, "\u0120Seg": 31220, "Spell": 31221, "vous": 31222, "ordial": 31223, "Senior": 31224, "\u0120Goldberg": 31225, "\u0120Plasma": 31226, "need": 31227, "\u0120messenger": 31228, "eret": 31229, "\u0120teamed": 31230, "\u0120literacy": 31231, "\u0120Leah": 31232, "\u0120Doyle": 31233, "\u0120emitted": 31234, "UX": 31235, "\u0120evade": 31236, "\u0120maze": 31237, "\u0120wrongly": 31238, "\u0120Lars": 31239, "\u0120stereotype": 31240, "\u0120pledges": 31241, "\u0120aroma": 31242, "\u0120MET": 31243, "\u0120acre": 31244, "\u0120OD": 31245, "\u0120ff": 31246, "\u0120breweries": 31247, "\u0120Hilton": 31248, "undle": 31249, "\u0120Kak": 31250, "\u0120Thankfully": 31251, "\u0120Canucks": 31252, "inctions": 31253, "\u0120Appears": 31254, "\u0120coer": 31255, "\u0120undermined": 31256, "rovers": 31257, "Andre": 31258, "\u0120blaze": 31259, "umers": 31260, "\u0120famine": 31261, "amphetamine": 31262, "ulkan": 31263, "Amount": 31264, "\u0120desperation": 31265, "wikipedia": 31266, "development": 31267, "\u0120Corinth": 31268, "ussia": 31269, "Jackson": 31270, "LI": 31271, "Native": 31272, "Rs": 31273, "Ohio": 31274, "\u0120Kathleen": 31275, "Fortunately": 31276, "\u0120attendant": 31277, "\u0120Preferred": 31278, "\u0120Didn": 31279, "\u0120Vs": 31280, "Mis": 31281, "\u0120respondent": 31282, "\u0120boun": 31283, "stable": 31284, "\u0120paved": 31285, "\u0120unexpl": 31286, "\u0120Cheney": 31287, "LM": 31288, "\u0120Cull": 31289, "blown": 31290, "\u0120confronting": 31291, "ocese": 31292, "serving": 31293, "Wi": 31294, "\u0120Lithuania": 31295, "anni": 31296, "\u0120stalk": 31297, "hd": 31298, "\u0120vener": 31299, "APH": 31300, "ynchronous": 31301, "URR": 31302, "umably": 31303, "historic": 31304, "Half": 31305, "Hay": 31306, "\u0120resilience": 31307, "spection": 31308, "\u0120abandoning": 31309, "Obs": 31310, "\u0120Debbie": 31311, "\u0120gradient": 31312, "\u0120Plaint": 31313, "\u0120Canal": 31314, "ARCH": 31315, "\u0120expansive": 31316, "\u0120fung": 31317, "\u0120bounced": 31318, "Und": 31319, "\u0120precautions": 31320, "\u0120clarification": 31321, "\u0120dagger": 31322, "\u0120grips": 31323, "\u0120\u00c2\u00b5": 31324, "\u0120Rivera": 31325, "\u0120Undead": 31326, "isites": 31327, "\u0120FIRST": 31328, "\u00c3\u00b1o": 31329, "audi": 31330, "\u0120hostages": 31331, "\u0120compliant": 31332, "\u0120alumni": 31333, "Seven": 31334, "\u0120cybersecurity": 31335, "either": 31336, "Collect": 31337, "\u0120invariably": 31338, "\u0120Soci": 31339, "\u0120lawmaker": 31340, "\u0120ale": 31341, "\u0120Personally": 31342, "Nazi": 31343, "\u0120customization": 31344, "\u0120Proc": 31345, "\u0120Saskatchewan": 31346, "eaturing": 31347, "\u0120spared": 31348, "\u0120discontinued": 31349, "\u0120computational": 31350, "\u0120Motorola": 31351, "\u0120supremacist": 31352, "governmental": 31353, "\u0120paradise": 31354, "\u0120Downing": 31355, "\u0120Nikon": 31356, "\u0120catalyst": 31357, "berra": 31358, "Toronto": 31359, "875": 31360, "beta": 31361, "\u0120Macron": 31362, "\u0120unrealistic": 31363, "vector": 31364, "\u0120Vehicles": 31365, "itiveness": 31366, "\u0120RV": 31367, "\u0120Colbert": 31368, "sin": 31369, "oji": 31370, "entin": 31371, "\u0120Krish": 31372, "hello": 31373, "ffield": 31374, "oky": 31375, "\u0120Tate": 31376, "\u0120maple": 31377, "\u0120aids": 31378, "chemical": 31379, "334": 31380, "nuts": 31381, "\u0120Warp": 31382, "\u0120xx": 31383, "\u0120Robb": 31384, "umerous": 31385, "_-_": 31386, "ftime": 31387, "\u0120VW": 31388, "\u0120winger": 31389, "\u0120Dome": 31390, "tools": 31391, "\u0120PV": 31392, "\u0120Georgetown": 31393, "\u0120geared": 31394, "\u0120jihadists": 31395, "\u0120cp": 31396, "\u0120steroids": 31397, "Mother": 31398, "clerosis": 31399, "\u0120DRM": 31400, "nesia": 31401, "\u0120linger": 31402, "\u0120immersive": 31403, "\u0120COUN": 31404, "\u0120outweigh": 31405, "ensual": 31406, "Band": 31407, "\u0120transforms": 31408, "matched": 31409, "psons": 31410, "\u0120Judicial": 31411, "factor": 31412, "\u0120referral": 31413, "\u0120oddly": 31414, "\u0120Wenger": 31415, "Bring": 31416, "\u0120Bows": 31417, "602": 31418, "ICLE": 31419, "\u0120lions": 31420, "\u0120Academic": 31421, "\u0120Thorn": 31422, "\u0120Raider": 31423, "kefeller": 31424, "Storage": 31425, "Lower": 31426, "\u0120Ort": 31427, "\u0120Equality": 31428, "ALT": 31429, "\u0120SOC": 31430, "Types": 31431, "\u0120lyn": 31432, "\u0120Asset": 31433, "coat": 31434, "TPP": 31435, "CVE": 31436, "\u0120Pioneer": 31437, "application": 31438, "Modern": 31439, "\u0120HK": 31440, "Environment": 31441, "Alright": 31442, "Rain": 31443, "IPP": 31444, "\u0120Shiite": 31445, "\u0120mound": 31446, "\u0120Abilities": 31447, "condition": 31448, "Staff": 31449, "\u0120competence": 31450, "\u0120Moor": 31451, "\u0120Diablo": 31452, "\u0120withheld": 31453, "\u0120ostensibly": 31454, "\u0120Brom": 31455, "\u0120msg": 31456, "\u0120denomin": 31457, "\u0120References": 31458, "\u0120FP": 31459, "\u0120plunged": 31460, "\u0120pamph": 31461, "moving": 31462, "central": 31463, "\u0120downright": 31464, "\u0120fading": 31465, "Tal": 31466, "Typ": 31467, "\u0120Thy": 31468, "ukes": 31469, "ithe": 31470, "\u0120ove": 31471, "\u0120battled": 31472, "\u0120seafood": 31473, "\u0120figur": 31474, "\u0120RD": 31475, "crop": 31476, "\u0120squads": 31477, "{\\": 31478, "\u00e0\u00b9": 31479, "\u0120Eh": 31480, "\u0120interviewing": 31481, "\u0120Qin": 31482, "\u0120aspiring": 31483, "PLIC": 31484, "\u0120clauses": 31485, "\u0120Gast": 31486, "\u0120Nir": 31487, "\u0120luggage": 31488, "\u0120hose": 31489, "\u0120systemd": 31490, "\u0120descending": 31491, "\u0120Revised": 31492, "\u0120Rails": 31493, "align": 31494, "709": 31495, "337": 31496, "\u0120fug": 31497, "charging": 31498, "tags": 31499, "\u0120uter": 31500, "kish": 31501, "WARNING": 31502, "490": 31503, "profits": 31504, "\u0120voyage": 31505, "\u0120ace": 31506, "\u0120Vanguard": 31507, "\u0120Tanks": 31508, "\u0120Muk": 31509, "\u0120226": 31510, "Safe": 31511, "Armor": 31512, "\u0120volcanic": 31513, "\u0120womb": 31514, "\u0120MIL": 31515, "\u0120beginner": 31516, "\u0120Recogn": 31517, "\u0120AAP": 31518, "PLAY": 31519, ")!": 31520, "\u0120detecting": 31521, "cn": 31522, "\u0120breaches": 31523, "Basically": 31524, "\u0120Pag": 31525, "\u0120Municipal": 31526, "\u0120Indie": 31527, "\u0120Laf": 31528, "\u0120Disable": 31529, "\u0120Olson": 31530, "\u0120restrained": 31531, "\u0120rulings": 31532, "\u0120humane": 31533, "events": 31534, "\u0120Cinema": 31535, "displayText": 31536, "\u0120Hatch": 31537, "actionDate": 31538, "onnaissance": 31539, "\u0120assaulting": 31540, "\u0120Lug": 31541, "CHAT": 31542, "\u0120vigorous": 31543, "\u0120Perse": 31544, "\u0120intolerance": 31545, "\u0120Snapchat": 31546, "\u0120Sharks": 31547, "\u0120dummy": 31548, "\u0120Diagn": 31549, "\u0120Guitar": 31550, "imeters": 31551, "403": 31552, "REG": 31553, "Ax": 31554, "\u0120separates": 31555, "\u0120Mahm": 31556, "\u0120tv": 31557, "jah": 31558, "OOL": 31559, "Circ": 31560, "\u0120Windsor": 31561, "ussian": 31562, "\u0120intuition": 31563, "\u0120disdain": 31564, "\u0120Donovan": 31565, "\u0120221": 31566, "Emb": 31567, "\u0120condemning": 31568, "\u0120generosity": 31569, "zzy": 31570, "\u0120panties": 31571, "\u0120Prevent": 31572, "ActionCode": 31573, "ANA": 31574, "342": 31575, "externalActionCode": 31576, "\u0120specifying": 31577, "\u0120crystall": 31578, "Jere": 31579, "\u0120rupt": 31580, "\u0120Apprentice": 31581, "\u0120profiling": 31582, "\u00d0\u00ba": 31583, "Strike": 31584, "\u0120sideline": 31585, "\u0120obligated": 31586, "\u0120occult": 31587, "\u0120bureaucratic": 31588, "antically": 31589, "rupted": 31590, "negative": 31591, "\u0120Ethiopia": 31592, "\u0120Civic": 31593, "\u0120insiders": 31594, "eligible": 31595, "\u0120TVs": 31596, "\u0120BAR": 31597, "\u0120TI": 31598, "iologist": 31599, "\u0120AIR": 31600, "\u0120substituted": 31601, "Arab": 31602, "\u0120Saul": 31603, "\u0120Yog": 31604, "prem": 31605, "\u0120builders": 31606, "\u0120stationary": 31607, "\u0120doubtful": 31608, "\u0120vigorously": 31609, "\u0120thrilling": 31610, "Physical": 31611, "\u0120Carey": 31612, "\u0120Hydra": 31613, "geoning": 31614, "\u0120Sly": 31615, "yton": 31616, "\u0120borrowers": 31617, "\u0120Parkinson": 31618, "\u0120\u00eb": 31619, "\u0120Jamaica": 31620, "\u0120satir": 31621, "\u0120insurgents": 31622, "\u0120Firm": 31623, "\u0120isot": 31624, "\u0120Karn": 31625, "ourning": 31626, "akens": 31627, "docs": 31628, "little": 31629, "\u0120Monaco": 31630, "CLASS": 31631, "Turkey": 31632, "Ly": 31633, "\u0120Conan": 31634, "assic": 31635, "\u0120starred": 31636, "\u0120Pacers": 31637, "eties": 31638, "\u0120tipping": 31639, "Moon": 31640, "\u0120Rw": 31641, "same": 31642, "\u0120cavity": 31643, "\u0120goof": 31644, "\u0120Zo": 31645, "Shock": 31646, "ummer": 31647, "\u0120emphasizes": 31648, "\u0120regrett": 31649, "\u0120novelty": 31650, "\u0120envy": 31651, "\u0120Passive": 31652, "rw": 31653, "505": 31654, "\u0120indifferent": 31655, "\u0120Rica": 31656, "\u0120Himself": 31657, "\u0120Freddie": 31658, "\u0120adip": 31659, "\u00e4\u00b8\u0122": 31660, "\u0120breakout": 31661, "\u0120hurried": 31662, "\u0120Huang": 31663, "\u0120Disk": 31664, "\u0120roaming": 31665, "?????-?????-": 31666, "UV": 31667, "\u0120Ricky": 31668, "\u0120Sigma": 31669, "\u0120marginalized": 31670, "\u0120edits": 31671, "\u0120304": 31672, "memory": 31673, "\u0120specimen": 31674, "293": 31675, "\u00e3\u0123\u00af": 31676, "\u0120vertically": 31677, "\u0120audition": 31678, "\u0120Heck": 31679, "\u0120caster": 31680, "\u0120Holdings": 31681, "adal": 31682, "\u0120Cron": 31683, "\u0120Liam": 31684, "\u0120deflect": 31685, "Pick": 31686, "\u0120Debug": 31687, "REF": 31688, "\u0120versatility": 31689, "othes": 31690, "classified": 31691, "\u0120Mahar": 31692, "\u0120Hort": 31693, "Counter": 31694, "stasy": 31695, "noticed": 31696, "331": 31697, "\u0120Shim": 31698, "fuck": 31699, "\u0120Bie": 31700, "\u0120airing": 31701, "\u0120Protein": 31702, "\u0120Holding": 31703, "\u0120spectators": 31704, "iliated": 31705, "\u0120Thatcher": 31706, "nosis": 31707, "\u00e3\u0125\u00bc\u00e3\u0125\u00b3": 31708, "Tele": 31709, "Boston": 31710, "\u0120Templ": 31711, "stay": 31712, "\u0120declarations": 31713, "479": 31714, "Volume": 31715, "\u0120Designer": 31716, "\u0120Overwatch": 31717, "idae": 31718, "\u0120onwards": 31719, "\u0120nets": 31720, "\u0120Manila": 31721, "particularly": 31722, "\u0120politic": 31723, "oother": 31724, "\u0120portraits": 31725, "\u0120pavement": 31726, "cffff": 31727, "\u0120saints": 31728, "\u0120beginners": 31729, "ESPN": 31730, "\u0120shortcomings": 31731, "\u00e2\u0137\u0132\u00e2\u0137\u0132": 31732, "\u0120comet": 31733, "\u0120Organic": 31734, "quel": 31735, "\u0120hospitalized": 31736, "Break": 31737, "\u0120peel": 31738, "dylib": 31739, "aspx": 31740, "urances": 31741, "\u0120TIM": 31742, "Pg": 31743, "\u0120readable": 31744, "\u0120Malik": 31745, "\u0120muzzle": 31746, "\u0120benchmarks": 31747, "dal": 31748, "\u0120Vacc": 31749, "\u0120Hicks": 31750, "609": 31751, "\u0120Biblical": 31752, "heng": 31753, "\u0120overload": 31754, "\u0120Civilization": 31755, "\u0120immoral": 31756, "\u0120fries": 31757, "\u00e3\u0124\u0134": 31758, "\u0120reproduced": 31759, "\u0120formulation": 31760, "jug": 31761, "irez": 31762, "gear": 31763, "\u0120coached": 31764, "MpServer": 31765, "\u0120SJ": 31766, "\u0120Kw": 31767, "Init": 31768, "deal": 31769, "\u0120Oro": 31770, "\u0120Loki": 31771, "\u0120Songs": 31772, "\u0120232": 31773, "\u0120Louise": 31774, "asionally": 31775, "\u0120uncond": 31776, "ollywood": 31777, "\u0120progressives": 31778, "\u0120Enough": 31779, "\u0120Doe": 31780, "\u0120wreckage": 31781, "\u0120brushed": 31782, "\u0120BaseType": 31783, "\u0120zoning": 31784, "ishable": 31785, "hetically": 31786, "\u0120Caucus": 31787, "\u0120Hue": 31788, "\u0120karma": 31789, "\u0120Sporting": 31790, "\u0120trader": 31791, "\u0120seeming": 31792, "\u0120Capture": 31793, "430": 31794, "bish": 31795, "\u0120tunes": 31796, "\u0120indoors": 31797, "\u0120Sphere": 31798, "\u0120Dancing": 31799, "TERN": 31800, "\u0120nob": 31801, "\u0120GST": 31802, "maps": 31803, "\u0120peppers": 31804, "Fit": 31805, "\u0120oversees": 31806, "\u0120Rabbi": 31807, "\u0120Ruler": 31808, "vertising": 31809, "office": 31810, "xxx": 31811, "\u0120raft": 31812, "Changed": 31813, "\u0120textbooks": 31814, "Links": 31815, "\u0120Omn": 31816, "\u00e3\u0122\u0133": 31817, "\u0120inconvenience": 31818, "\u0120Donetsk": 31819, "=~": 31820, "\u0120implicitly": 31821, "\u0120boosts": 31822, "\u0120Bones": 31823, "\u0120Boom": 31824, "Courtesy": 31825, "\u0120sensational": 31826, "ANY": 31827, "\u0120greedy": 31828, "eden": 31829, "\u0120inexper": 31830, "\u0120Ler": 31831, "\u0120Vale": 31832, "\u0120tighten": 31833, "\u0120EAR": 31834, "\u0120Num": 31835, "\u0120ancestor": 31836, "Sent": 31837, "\u0120Horde": 31838, "urgical": 31839, "allah": 31840, "\u0120sap": 31841, "amba": 31842, "\u0120Spread": 31843, "twitch": 31844, "\u0120grandson": 31845, "\u0120fracture": 31846, "\u0120moderator": 31847, "\u0120Seventh": 31848, "\u0120Reverse": 31849, "\u0120estimation": 31850, "Choose": 31851, "\u0120parach": 31852, "\u0120barric": 31853, "\u00e3\u0122\u0132": 31854, "\u0120compass": 31855, "\u0120allergic": 31856, "\u00e2\u0122\u0137": 31857, "OTHER": 31858, "errilla": 31859, "\u0120wagon": 31860, "\u0120zinc": 31861, "\u0120rubbed": 31862, "\u0120Fuller": 31863, "\u0120Luxembourg": 31864, "\u0120Hoover": 31865, "\u0120liar": 31866, "\u0120Evening": 31867, "\u0120Cobb": 31868, "esteem": 31869, "\u0120selector": 31870, "\u0120Brawl": 31871, "isance": 31872, "\u0120Ek": 31873, "\u0120troop": 31874, "\u0120guts": 31875, "\u0120Appeal": 31876, "\u0120Tibetan": 31877, "\u0120routines": 31878, "\u0120Ment": 31879, "\u0120summarized": 31880, "steamapps": 31881, "\u0120tranqu": 31882, "\u01201929": 31883, "oran": 31884, "\u0120Authent": 31885, "\u0120gmaxwell": 31886, "\u0120apprehens": 31887, "\u0120poems": 31888, "\u0120sausage": 31889, "\u0120Webster": 31890, "urus": 31891, "\u0120themed": 31892, "\u0120lounge": 31893, "\u0120charger": 31894, "Spoiler": 31895, "\u0120spilled": 31896, "hog": 31897, "\u0120Sunder": 31898, "\u0120Ain": 31899, "\u0120Angry": 31900, "\u0120disqual": 31901, "\u0120Frequency": 31902, "\u0120Ethernet": 31903, "\u0120helper": 31904, "Percent": 31905, "\u0120horrifying": 31906, "\u0120ail": 31907, "\u0120Allan": 31908, "EEE": 31909, "\u0120Crossing": 31910, "449": 31911, "\u0120holog": 31912, "\u0120Puzzles": 31913, "\u0120Goes": 31914, "erenn": 31915, "604": 31916, "\u00e3\u0123\u0131": 31917, "\u0120Rafael": 31918, "\u0120atten": 31919, "\u0120Emanuel": 31920, "\u0120upro": 31921, "\u0120Susp": 31922, "Psych": 31923, "\u0120Trainer": 31924, "\u0120NES": 31925, "\u0120Hunts": 31926, "becue": 31927, "\u0120counselor": 31928, "Rule": 31929, "\u0120toxins": 31930, "\u0120banners": 31931, "rifice": 31932, "\u0120greeting": 31933, "\u0120frenzy": 31934, "\u0120allocate": 31935, "\u0120*)": 31936, "expr": 31937, "503": 31938, "\u0120Chick": 31939, "\u0120Torn": 31940, "\u0120consolidation": 31941, "\u0120Fletcher": 31942, "switch": 31943, "frac": 31944, "clips": 31945, "\u0120McKin": 31946, "\u0120Lunar": 31947, "Month": 31948, "ITCH": 31949, "\u0120scholarly": 31950, "raped": 31951, "398": 31952, "\u01201910": 31953, "\u0120egreg": 31954, "\u0120insecure": 31955, "\u0120victorious": 31956, "cffffcc": 31957, "\u0120singled": 31958, "\u0120elves": 31959, "\u0120Wond": 31960, "burst": 31961, "\u0120camoufl": 31962, "\u0120BLACK": 31963, "\u0120conditioned": 31964, "\u00e7\u012b": 31965, "answered": 31966, "\u0120compulsory": 31967, "ascist": 31968, "\u0120podcasts": 31969, "\u0120Frankfurt": 31970, "bnb": 31971, "\u0120neoliberal": 31972, "\u0120Keyboard": 31973, "\u0120Belle": 31974, "warm": 31975, "\u0120trusts": 31976, "\u0120insured": 31977, "\u0120Bucc": 31978, "usable": 31979, "607": 31980, "\u0120Plains": 31981, "\u01201890": 31982, "\u0120sabotage": 31983, "\u0120lodged": 31984, "felt": 31985, "\u0120ga": 31986, "\u0120Narc": 31987, "\u0120Salem": 31988, "\u0120seventy": 31989, "\u0120Blank": 31990, "pocket": 31991, "\u0120whisper": 31992, "\u0120mating": 31993, "omics": 31994, "\u0120Salman": 31995, "\u0120Kad": 31996, "\u0120angered": 31997, "\u0120collisions": 31998, "\u0120extraordinarily": 31999, "\u0120coercion": 32000, "Ghost": 32001, "birds": 32002, "\u00e8\u0122": 32003, "kok": 32004, "\u0120permissible": 32005, "avorable": 32006, "\u0120pointers": 32007, "\u0120dissip": 32008, "aci": 32009, "\u0120theatrical": 32010, "\u0120Cosmic": 32011, "\u0120forgetting": 32012, "\u0120finalized": 32013, "\u00e5\u00a4\u00a7": 32014, "yout": 32015, "library": 32016, "\u0120booming": 32017, "\u0120Believe": 32018, "\u0120Teacher": 32019, "\u0120Liv": 32020, "\u0120GOODMAN": 32021, "\u0120Dominican": 32022, "ORED": 32023, "\u0120Parties": 32024, "\u0120precipitation": 32025, "\u0120Slot": 32026, "Roy": 32027, "\u0120Combined": 32028, "\u0120integrating": 32029, "\u0120chrome": 32030, "\u0120intestinal": 32031, "\u0120Rebell": 32032, "\u0120matchups": 32033, "\u0120blockbuster": 32034, "\u0120Loren": 32035, "\u0120Levy": 32036, "\u0120preaching": 32037, "\u0120Sending": 32038, "\u0120Purpose": 32039, "rax": 32040, "fif": 32041, "\u0120authoritative": 32042, "\u0120PET": 32043, "astical": 32044, "\u0120dishon": 32045, "\u0120chatting": 32046, "\u0120\"$:/": 32047, "Connection": 32048, "\u0120recreate": 32049, "\u0120delinqu": 32050, "\u0120broth": 32051, "\u0120Dirty": 32052, "\u0120Admin": 32053, "zman": 32054, "\u0120scholarships": 32055, "\u0120253": 32056, "contact": 32057, "alsa": 32058, "767": 32059, "creen": 32060, "abbage": 32061, "\u01201915": 32062, "\u0120blended": 32063, "\u0120alarmed": 32064, "Language": 32065, "356": 32066, "\u0120blends": 32067, "\u0120Changed": 32068, "Wolf": 32069, "\u0120hepat": 32070, "Creating": 32071, "\u0120persecut": 32072, "\u0120sweetness": 32073, "arte": 32074, "\u0120forfeiture": 32075, "\u0120Roberto": 32076, "impro": 32077, "NFL": 32078, "\u0120Magnet": 32079, "Detailed": 32080, "\u0120insignificant": 32081, "\u0120POLIT": 32082, "\u0120BBQ": 32083, "\u0120CPS": 32084, "\u0120seaw": 32085, "aminer": 32086, "mL": 32087, "endif": 32088, "finals": 32089, "\u0120265": 32090, "uish": 32091, "\u0120})": 32092, "\u0120Problems": 32093, "\u0120emblem": 32094, "\u0120seriousness": 32095, "\u0120parsing": 32096, "\u0120substitution": 32097, "\u0120pressured": 32098, "\u0120recycled": 32099, "aleb": 32100, "Ruby": 32101, "\u0120proficiency": 32102, "Driver": 32103, "\u0120Wester": 32104, ":'": 32105, "AFTA": 32106, "\u0120mantle": 32107, "\u0120Clayton": 32108, "flag": 32109, "\u0120practitioner": 32110, "covered": 32111, "\u0120Struct": 32112, "addafi": 32113, "425": 32114, "\u0120Township": 32115, "\u0120Hydro": 32116, "Louis": 32117, "343": 32118, "\u0120condo": 32119, "\u0120Tao": 32120, "\u0120utilization": 32121, "\u0120nausea": 32122, "\u0120Dems": 32123, "ridges": 32124, "pause": 32125, "\u0120formulas": 32126, "\u0120challenger": 32127, "376": 32128, "\u0120defective": 32129, "\u0120Railway": 32130, "\u0120PubMed": 32131, "\u0120yogurt": 32132, "lbs": 32133, "\u0120Norfolk": 32134, "OPE": 32135, "\u0120Moody": 32136, "\u0120distributor": 32137, "\u0120scrolls": 32138, "\u0120extracts": 32139, "Stan": 32140, "\u0120viability": 32141, "\u0120exposes": 32142, "\u0120starvation": 32143, "\u0120Steps": 32144, "\u0120Dodd": 32145, "few": 32146, "STD": 32147, "332": 32148, "\u0120closures": 32149, "\u0120complementary": 32150, "\u0120Sasha": 32151, "umpy": 32152, "\u0120monet": 32153, "\u0120articulate": 32154, "\u0120Doct": 32155, "killer": 32156, "\u0120scrim": 32157, "\u0120264": 32158, "\u0120prostitutes": 32159, "\u0120severed": 32160, "\u0120attachments": 32161, "\u0120cooled": 32162, "Lev": 32163, "\u0120Falk": 32164, "fail": 32165, "\u0120policeman": 32166, "\u0120Dag": 32167, "\u0120prayed": 32168, "\u0120Kernel": 32169, "\u0120clut": 32170, "\u0120cath": 32171, "\u0120anomaly": 32172, "Storm": 32173, "emaker": 32174, "\u0120Breakfast": 32175, "uli": 32176, "oire": 32177, "JJ": 32178, "hz": 32179, "Operation": 32180, "\u0120Sick": 32181, "354": 32182, "\u0120Guatemala": 32183, "Rate": 32184, "\u0120exposures": 32185, "faces": 32186, "\u0120Archae": 32187, "raf": 32188, "\u0120Mia": 32189, "\u01202025": 32190, "\u0120opaque": 32191, "\u0120disguised": 32192, "\u0120Headquarters": 32193, "Sah": 32194, "\u0120pots": 32195, "978": 32196, "\u0120Malf": 32197, "\u0120frowned": 32198, "\u0120poisonous": 32199, "\u0120Convers": 32200, "eeks": 32201, "\u0120crab": 32202, ".\"\"": 32203, "\u0120treason": 32204, "\u0120ranc": 32205, "\u0120escalating": 32206, "\u0120warr": 32207, "\u0120mobs": 32208, "\u0120lamps": 32209, "\u0120Sunshine": 32210, "\u0120Brunswick": 32211, "Phones": 32212, "\u0120spelled": 32213, "\u0120Skip": 32214, "\u01202050": 32215, "\u01201911": 32216, "\u0120Pluto": 32217, "\u0120Amend": 32218, "\u0120meats": 32219, "387": 32220, "\u0120stomp": 32221, "\u0120Zhou": 32222, "\u0120Leviathan": 32223, "\u0120Hazard": 32224, "adv": 32225, "\u0120Orwell": 32226, "\u0120aloud": 32227, "\u0120bumper": 32228, "\u0120Anarch": 32229, "ubuntu": 32230, "\u0120Serious": 32231, "fitting": 32232, "\u0120Optional": 32233, "\u0120Cecil": 32234, "REAM": 32235, "\u0120serotonin": 32236, "\u0120cultivate": 32237, "agogue": 32238, "}\\": 32239, "\u0120mosques": 32240, "\u0120Sunny": 32241, "\u0120reactive": 32242, "revolution": 32243, "\u0120Lup": 32244, "\u0120Fedora": 32245, "\u0120defenseman": 32246, "\u0120VID": 32247, "istine": 32248, "\u0120drowning": 32249, "\u0120Broadcasting": 32250, "\u0120thriller": 32251, "\u0120Scy": 32252, "\u0120accelerating": 32253, "\u0120directs": 32254, "odied": 32255, "bike": 32256, "duration": 32257, "\u0120painfully": 32258, "Redd": 32259, "\u0120productions": 32260, "\u0120gag": 32261, "\u0120whist": 32262, "\u0120sock": 32263, "\u0120infinitely": 32264, "\u0120Concern": 32265, "\u0120Citadel": 32266, "\u0120lieu": 32267, "\u0120candles": 32268, "ogeneous": 32269, "arger": 32270, "\u0120heavenly": 32271, "inflammatory": 32272, "Performance": 32273, "Cs": 32274, "ructose": 32275, "azaki": 32276, "\u0120pessim": 32277, "\u0120inference": 32278, "\u0120powd": 32279, "\u0120Zoe": 32280, "\u0120paints": 32281, "\u0120dazz": 32282, "pta": 32283, "-----------": 32284, "\u0120inspir": 32285, "\u0120Experimental": 32286, "\u0120Knife": 32287, "regor": 32288, "bors": 32289, "\u0120showers": 32290, "romeda": 32291, "\u0120saint": 32292, "\u0120benign": 32293, "\u0120Jiang": 32294, "\u0120envisioned": 32295, "\u0120shroud": 32296, "IFT": 32297, "HO": 32298, "\u0120shuff": 32299, "\u0120ICC": 32300, "\u0120segreg": 32301, "\u0120revisit": 32302, "ighthouse": 32303, "Li": 32304, "\u0120substrate": 32305, "\u0120Seas": 32306, "\u0120Reward": 32307, "\u0120Hep": 32308, "\u0120Brass": 32309, "sbm": 32310, "\u0120eliminates": 32311, "\u0120stamina": 32312, "\u0120VAT": 32313, "\u0120Loan": 32314, "\u0120constraint": 32315, "\u0120appropriated": 32316, "\u0120pes": 32317, "\u0120ALE": 32318, "ranging": 32319, "\u0120404": 32320, "392": 32321, "\u0120intellectuals": 32322, "achu": 32323, "\u0120restructuring": 32324, "\u0120Levin": 32325, "\u0120runes": 32326, "\u0120delightful": 32327, "\u0120carbohydrates": 32328, "\u0120Models": 32329, "\u0120Expo": 32330, "\u0120transporting": 32331, "alloc": 32332, "\u0120ringing": 32333, "Samsung": 32334, "\u0120scarcely": 32335, "\u0120URLs": 32336, "\u0120MAS": 32337, "\u0120prototypes": 32338, "\u0120narrator": 32339, "\u0120CPUs": 32340, "cdn": 32341, "\u0120Barton": 32342, "\u0120decidedly": 32343, "\u0120Shu": 32344, "ixir": 32345, "ocious": 32346, "\u0120Myst": 32347, "Nintendo": 32348, "\u0120reuse": 32349, "\u0120forgiven": 32350, "Few": 32351, "inical": 32352, "nat": 32353, "\u0120seamless": 32354, "\u0120Eva": 32355, "\u0120EVE": 32356, "\u0120JO": 32357, "landers": 32358, "\u0120softer": 32359, "negie": 32360, "\u0120transient": 32361, "\u0120orbital": 32362, "\u0120fulfil": 32363, "\u0120Kom": 32364, "Hopefully": 32365, "\u0120dynamically": 32366, "\u0120Hunger": 32367, "\u00e5\u013d": 32368, "\u0120Armenia": 32369, "elman": 32370, "berto": 32371, "\u0120pige": 32372, "\u0120IDs": 32373, "limit": 32374, "\u0120veins": 32375, "\u0120soaring": 32376, "packs": 32377, "Golden": 32378, "\u0120Crab": 32379, "istor": 32380, "\u0120RPM": 32381, "\u0120$$": 32382, "gression": 32383, "\u0120jihadist": 32384, "\u0120gamble": 32385, "\u0120careg": 32386, "\u0120inflated": 32387, "Face": 32388, "\u0120Firearms": 32389, "\u0120Emmanuel": 32390, "\u00e2\u013f": 32391, "\u0120shocks": 32392, "grab": 32393, "\u0120splend": 32394, "\u0120HPV": 32395, "abortion": 32396, "Above": 32397, "Entity": 32398, "players": 32399, "\u0120commenced": 32400, "ulence": 32401, "\u0120fulfillment": 32402, "\u0120embodiments": 32403, "\u0120Welfare": 32404, "\u0120hail": 32405, "\u0120<@": 32406, "tten": 32407, "\u0120catcher": 32408, "\u0120Jazeera": 32409, "\u0120volcano": 32410, "\u0120stabilize": 32411, "\u0120Handler": 32412, "\u0120intensified": 32413, "\u0120Abrams": 32414, "\u0120humiliation": 32415, "paced": 32416, "605": 32417, "\u0120CentOS": 32418, "Specific": 32419, "\u0120heed": 32420, "\u0120CAM": 32421, "\u0120Galile": 32422, "Die": 32423, "\u0120abolished": 32424, "\u0120Thomson": 32425, "\u0120Teachers": 32426, "\u0120Wass": 32427, "jong": 32428, "\u0120ISBN": 32429, "\u0120Allies": 32430, "shake": 32431, "\u00e5\u00b7": 32432, "vict": 32433, "Howard": 32434, "\u0120deem": 32435, "\u0120exceedingly": 32436, "\u0120Smartstocks": 32437, "ibe": 32438, "\u0120doorway": 32439, "\u0120competed": 32440, "igmat": 32441, "\u0120nationalists": 32442, "\u0120groom": 32443, "\u0120Keen": 32444, "\u0120disposable": 32445, "decl": 32446, "\u0120Tolkien": 32447, "\u0120Scheme": 32448, "\u0120biod": 32449, "\u0120avid": 32450, "\u0120Elon": 32451, "agar": 32452, "\u0120TSA": 32453, "Roman": 32454, "\u0120artificially": 32455, "\u0120advisors": 32456, "XL": 32457, "\u0120Inferno": 32458, "366": 32459, "\u0120tedious": 32460, "\u0120Photography": 32461, "\u0120Carrie": 32462, "\u0120trope": 32463, "\u0120Sandra": 32464, "\u0120decimal": 32465, "Queen": 32466, "\u0120Gundam": 32467, "\u0120OM": 32468, "otech": 32469, "NBA": 32470, "\u01201932": 32471, "\u0120entrenched": 32472, "\u0120Marion": 32473, "\u0120fraternity": 32474, "Labour": 32475, "Henry": 32476, "\u0120latitude": 32477, "Either": 32478, "\u0120enhances": 32479, "\u0120Potential": 32480, "\u0120shines": 32481, "idad": 32482, "\u0120breadth": 32483, "\u0120capacities": 32484, "\u0120\u00f0\u0141\u013b\u0124": 32485, "\u0120Bronx": 32486, "\u0120sexes": 32487, "\u0120differentiation": 32488, "\u0120heavyweight": 32489, "\u0120Taj": 32490, "dra": 32491, "\u0120migrate": 32492, "\u0120exhaustion": 32493, "\u0120RUN": 32494, "elsius": 32495, "\u0120Cuomo": 32496, "\u0120guitars": 32497, "\u0120clones": 32498, "\u0120Somew": 32499, "\u0120Pry": 32500, "-------------": 32501, "\u0120warranted": 32502, "cycles": 32503, "\u0120salvage": 32504, "\u0120disks": 32505, "RANT": 32506, "\u0120NGOs": 32507, "\u0120Martian": 32508, "\":[{\"": 32509, "\u0120addicts": 32510, "ojure": 32511, "illet": 32512, "\u0120amazingly": 32513, "artments": 32514, "pixel": 32515, "\u0120GPUs": 32516, "Layout": 32517, "\u00e8\u00a3": 32518, "\u0120Tamil": 32519, "\u0120Basil": 32520, "\u0120impartial": 32521, "\u0120Structure": 32522, "fork": 32523, "bryce": 32524, "\u0120ridge": 32525, "\u0120Hamburg": 32526, "rious": 32527, "\u0120blitz": 32528, "cigarettes": 32529, "\u0120canned": 32530, "402": 32531, "\u0120ironically": 32532, "\u0120compassionate": 32533, "\u0120Hawkins": 32534, ".#": 32535, "\u0120Cathedral": 32536, "\u0120rallied": 32537, "internal": 32538, "\u0120quota": 32539, "stakes": 32540, "TEXT": 32541, "mom": 32542, "\u0120completes": 32543, "\u0120238": 32544, "\u0120shrug": 32545, "\u00e3\u0125\u0133": 32546, "\u0120Ninth": 32547, "\u0120revise": 32548, "\u0120Provider": 32549, "\u0120treacher": 32550, "\u0120quasi": 32551, "\u0120PRES": 32552, "\u0120deposition": 32553, "\u0120confidentiality": 32554, "issors": 32555, "\u0120imbalance": 32556, "\u0120spanning": 32557, "\u0120angular": 32558, "\u0120Cul": 32559, "communication": 32560, "\u0120Nora": 32561, "\u0120Genius": 32562, "opter": 32563, "\u0120sacked": 32564, "Spot": 32565, "\u0120finely": 32566, "\u0120CHR": 32567, "282": 32568, "waves": 32569, "Palest": 32570, "\u0120Rohing": 32571, "NL": 32572, "\u00e8\u00bf": 32573, "\u0120shitty": 32574, "\u0120Scalia": 32575, "475": 32576, "Progress": 32577, "\u0120referencing": 32578, "\u0120classrooms": 32579, "abee": 32580, "\u0120sod": 32581, "hesion": 32582, "708": 32583, "\u0120Zuckerberg": 32584, "\u0120Finish": 32585, "\u0120Scotia": 32586, "\u0120Savior": 32587, "\u0120Installation": 32588, "antha": 32589, "(-": 32590, "\u0120302": 32591, "\u0120Punk": 32592, "\u0120crater": 32593, "youtu": 32594, "\u0120roast": 32595, "\u0120influencing": 32596, "\u0120dup": 32597, "\u0120JR": 32598, "\u0120Grav": 32599, "\u0120stature": 32600, "\u0120bathrooms": 32601, "Aside": 32602, "Wiki": 32603, "mean": 32604, "\u0120Zak": 32605, "\u0120Ones": 32606, "\u0120Nath": 32607, "\u0120hypert": 32608, "\u0120commencement": 32609, "Civil": 32610, "\u0120moderately": 32611, "\u0120distributors": 32612, "\u0120breastfeeding": 32613, "\u0120980": 32614, "\u0120Sik": 32615, "\u0120Cig": 32616, "\u0120AMER": 32617, "RIP": 32618, "\u0120Career": 32619, "usting": 32620, "\u0120messed": 32621, "\u0120eh": 32622, "\u0120Jensen": 32623, "/$": 32624, "\u0120blackmail": 32625, "\u0120conversions": 32626, "\u0120scientifically": 32627, "\u0120mantra": 32628, "paying": 32629, "\u0120ivory": 32630, "\u0120Courts": 32631, "OUGH": 32632, "auntlet": 32633, "Serial": 32634, "Brow": 32635, "\u0120Hundreds": 32636, "323": 32637, "\u0120pee": 32638, "\u0120linux": 32639, "\u0120submer": 32640, "\u0120Principal": 32641, "485": 32642, "\u0120DSL": 32643, "\u0120Cousins": 32644, "\u0120doctrines": 32645, "\u0120Athletics": 32646, "\u0120315": 32647, "\u0120Karma": 32648, "\u0120attent": 32649, "urger": 32650, "\u0120prescribe": 32651, "\u0120encaps": 32652, "\u0120Came": 32653, "\u0120secretive": 32654, "\u0120Crimes": 32655, "dn": 32656, "Clean": 32657, "\u0120Egyptians": 32658, "\u0120Carpenter": 32659, "\u0120ll": 32660, "Hum": 32661, "\u0120Milo": 32662, "\u0120capitalists": 32663, "\u0120briefed": 32664, "Twe": 32665, "\u0120Basin": 32666, "elvet": 32667, "Mos": 32668, "\u0120plunge": 32669, "\u0120Kaiser": 32670, "\u0120Fuj": 32671, "illin": 32672, "\u0120safeguards": 32673, "\u0120oste": 32674, "\u0120Opportunity": 32675, "\u0120Mafia": 32676, "\u0120Calling": 32677, "apa": 32678, "urban": 32679, "brush": 32680, "illard": 32681, "c\u00c3\u00a9": 32682, "intelligence": 32683, "\u0120Lob": 32684, "\u0120Druid": 32685, "\u0120smoother": 32686, "\u0120footing": 32687, "\u0120motorists": 32688, "arcity": 32689, "\u0120masculinity": 32690, "\u0120mism": 32691, "\u0120abdominal": 32692, "\u0120Tavern": 32693, "\u0120Roh": 32694, "\u0120escapes": 32695, "signed": 32696, "Anthony": 32697, "\u0120sacrificing": 32698, "\u0120intimacy": 32699, "\u0120anterior": 32700, "\u0120Kod": 32701, "\u0120motif": 32702, "\u0120graz": 32703, "\u0120visualization": 32704, "\u0120guitarist": 32705, "\u0120Trotsky": 32706, "magic": 32707, "Dar": 32708, "\u0120Mori": 32709, "\u0120wards": 32710, "\u0120toilets": 32711, "lest": 32712, "\u0120teleport": 32713, "\u0120Sundays": 32714, "\u0120Plat": 32715, "ETS": 32716, "\u0120eSports": 32717, "Patrick": 32718, "\u0120Katherine": 32719, "enko": 32720, "\u0120hassle": 32721, "\u0120Mick": 32722, "ggles": 32723, "\u0120hob": 32724, "aintain": 32725, "\u0120airborne": 32726, "\u0120spans": 32727, "\u0120chili": 32728, "\u0120aperture": 32729, "\u0120volunteered": 32730, "\u0120Incident": 32731, "\u0120Fres": 32732, "\u0120Veteran": 32733, "aughtered": 32734, "ingo": 32735, "\u0120uninsured": 32736, "CLOSE": 32737, "\u0120fuse": 32738, "\u0120erotic": 32739, "\u0120advertise": 32740, "raising": 32741, "Texture": 32742, "\u0120attends": 32743, "\u0120REAL": 32744, "uddled": 32745, "\u0120smoot": 32746, "\u0120305": 32747, "\u0120Willis": 32748, "\u0120blond": 32749, "Analysis": 32750, "\u0120VT": 32751, "onica": 32752, "\u0120stronghold": 32753, "RF": 32754, "NM": 32755, ".>>": 32756, "\u0120prosperous": 32757, "\u0120boasted": 32758, "292": 32759, "\u0120Manufacturing": 32760, "PRESS": 32761, "gren": 32762, "\u0120pharmacy": 32763, "\u0120Rockefeller": 32764, "kai": 32765, "\u0120thumbs": 32766, "\u0120Hut": 32767, "\u0120motherboard": 32768, "\u0120guardians": 32769, "\u0120Alter": 32770, "llular": 32771, "\u0120shack": 32772, "\u0120wisely": 32773, "\u0120backbone": 32774, "erva": 32775, "\u0120suicides": 32776, "\u0120McGregor": 32777, "ijah": 32778, "Emer": 32779, "\u0120Brav": 32780, "\u0120designate": 32781, "POST": 32782, "produced": 32783, "\u0120cleansing": 32784, "irlwind": 32785, "existent": 32786, "\u0120Humph": 32787, "\u0120Payne": 32788, "\u0120vested": 32789, "\u00c5\u00a1": 32790, "\u0120stringent": 32791, "iona": 32792, "\u0120unsub": 32793, "\u0120summed": 32794, "\u0120Hercules": 32795, "subject": 32796, "\u0120Ragnar": 32797, "\u0120Nos": 32798, "\u0120characterization": 32799, "\u0120savvy": 32800, "\u0120Dawson": 32801, "\u0120Casino": 32802, "\u0120fri": 32803, "\u0120Barrier": 32804, "\u0120misinformation": 32805, "\u0120insulation": 32806, "\u0120corridors": 32807, "\u0120airplanes": 32808, "\u0120Noct": 32809, "ahi": 32810, "\u01201916": 32811, "kb": 32812, "armac": 32813, "\u0120shun": 32814, "\u0120schema": 32815, "\u0120horrified": 32816, "\u0120239": 32817, "aunders": 32818, "NB": 32819, "iates": 32820, "erity": 32821, "\u0120Shard": 32822, "\u0120rarity": 32823, "\u0120grouped": 32824, "\u0120Ghana": 32825, "against": 32826, "\u0120Biological": 32827, "\u0120Aware": 32828, "owell": 32829, "\u00cf\u0126": 32830, "\u0120Beau": 32831, "shaw": 32832, "Hack": 32833, "\u0120Julius": 32834, "USS": 32835, "olson": 32836, "auna": 32837, "cru": 32838, "\u0120Maurice": 32839, "\u0120Ik": 32840, "\u0120sequencing": 32841, "\u0120radicals": 32842, "\u0120(?,": 32843, "virtual": 32844, "\u0120anyways": 32845, "\u0120reperc": 32846, "\u0120handlers": 32847, "\u0120hesitant": 32848, "\u00e9\u0125": 32849, "\u0120MF": 32850, "plementation": 32851, "associated": 32852, "\u0120campaigned": 32853, "\u0120Yue": 32854, "utations": 32855, "\u0120Yoga": 32856, "\u0120simmer": 32857, "\u0120rods": 32858, "\u0120melody": 32859, "\u0120convoy": 32860, "videos": 32861, "\u0120screened": 32862, "Neg": 32863, "ochemical": 32864, "\u0120())": 32865, "\u0120ultras": 32866, "\u0120antip": 32867, "\u0120Islanders": 32868, "704": 32869, "\u0120fetish": 32870, "\u0120ridiculously": 32871, "\u0120Kart": 32872, "\u0120mitochondrial": 32873, "\u0120interfering": 32874, "Builder": 32875, "\u0120overfl": 32876, "\u0120acne": 32877, "\u0120Mud": 32878, "\u0120Kerr": 32879, "flex": 32880, "\u0120Postal": 32881, "\u0120Baltic": 32882, "477": 32883, "\u0120Persons": 32884, "ourage": 32885, "HB": 32886, "\u0120Muse": 32887, "\u0120Immortal": 32888, "\u0120Driving": 32889, "\u0120petitions": 32890, "\u0120subscript": 32891, "\u0120sorce": 32892, "\u0120Processor": 32893, "uton": 32894, "Sony": 32895, "\u0120phon": 32896, "\u0120raced": 32897, "\u0120Anthrop": 32898, "\u0120daytime": 32899, "\u0120Exercise": 32900, "Adding": 32901, "\u0120engages": 32902, "\u0120Qualcomm": 32903, "\u0120miracles": 32904, "\u0120memes": 32905, "\u0120Drink": 32906, "\u0120Orioles": 32907, "\u0120hairs": 32908, "\u0120Polar": 32909, "athom": 32910, "\u0120slippery": 32911, "\u0120Remy": 32912, "\u0120caramel": 32913, "\u0120YEAR": 32914, "\u0120alk": 32915, "Ign": 32916, "aution": 32917, "\u0120Merlin": 32918, "\u0120Cran": 32919, "\u0120apologies": 32920, "\u0120410": 32921, "\u0120outing": 32922, "\u0120Memories": 32923, "appointed": 32924, "\u0120countered": 32925, "uld": 32926, "posing": 32927, "\u0120firewall": 32928, "\u0120Wast": 32929, "\u0120Wet": 32930, "worked": 32931, "seller": 32932, "\u0120repealed": 32933, "ereo": 32934, "assuming": 32935, "BLIC": 32936, "mite": 32937, "\u0120CEOs": 32938, "\u0120Chapel": 32939, "elligent": 32940, "________________________": 32941, "Dog": 32942, "\u0120wart": 32943, "\u0120subscriber": 32944, "sports": 32945, "\u0120begged": 32946, "\u0120MV": 32947, "\u0120semif": 32948, "ethical": 32949, "\u0120preach": 32950, "\u0120revital": 32951, "\u0120punitive": 32952, "\u0120shortcuts": 32953, "\u0120instituted": 32954, "\u0120Warsaw": 32955, "\u0120abdomen": 32956, "\u0120KING": 32957, "\u0120superintendent": 32958, "\u0120fry": 32959, "\u0120Geo": 32960, "TOR": 32961, "\u0120contradictions": 32962, "aptic": 32963, "\u0120landscapes": 32964, "bugs": 32965, "\u0120clust": 32966, "\u0120volley": 32967, "cribed": 32968, "\u0120tandem": 32969, "\u0120robes": 32970, "WHAT": 32971, "\u0120promoter": 32972, "\u0120eloqu": 32973, "reviewed": 32974, "\u0120DK": 32975, "\u0120Plato": 32976, "\u0120fps": 32977, "Tank": 32978, "\u0120Derrick": 32979, "\u0120prioritize": 32980, "asper": 32981, "\u0120Honduras": 32982, "\u0120Completed": 32983, "nec": 32984, "\u0120mog": 32985, "nir": 32986, "\u0120Mayo": 32987, "DEF": 32988, "stall": 32989, "inness": 32990, "\u0120Volkswagen": 32991, "\u0120precaution": 32992, "\u0120Mell": 32993, "iak": 32994, "istries": 32995, "\u0120248": 32996, "\u0120overlapping": 32997, "Senate": 32998, "\u0120Enhance": 32999, "resy": 33000, "racial": 33001, "ORTS": 33002, "\u0120Mormons": 33003, "Strong": 33004, "\u0120Coch": 33005, "Mexico": 33006, "\u0120Maduro": 33007, "\u0120jars": 33008, "\u0120cane": 33009, "Wik": 33010, "olla": 33011, "ifference": 33012, "\u0120physicist": 33013, "\u0120Maggie": 33014, "\u0120285": 33015, "\u0120depiction": 33016, "\u0120McLaren": 33017, "Ju": 33018, "\u0120slows": 33019, "\u0120commissioners": 33020, "\u0120Willow": 33021, "\u0120Explos": 33022, "hovah": 33023, "\u0120technician": 33024, "\u0120homicides": 33025, "\u0120Flav": 33026, "\u0120Truman": 33027, "\u012010000": 33028, "uctor": 33029, "\u0120shader": 33030, "Newsletter": 33031, "457": 33032, "\u0120rever": 33033, "\u0120hardened": 33034, "\u0120whereabouts": 33035, "\u0120redevelop": 33036, "\u0120carbs": 33037, "\u0120travers": 33038, "\u0120squirrel": 33039, "\u0120follower": 33040, "\u0120sings": 33041, "508": 33042, "\u0120rabbits": 33043, "emonium": 33044, "\u0120documenting": 33045, "\u0120misunderstood": 33046, ")'": 33047, "Rick": 33048, "ggies": 33049, "\u0120premie": 33050, "\u0120skating": 33051, "\u0120passports": 33052, "\u0120fists": 33053, "ageddon": 33054, "Haw": 33055, "ACP": 33056, "080": 33057, "\u0120Thoughts": 33058, "\u0120Carlson": 33059, "\u0120priesthood": 33060, "hua": 33061, "\u0120dungeons": 33062, "\u0120Loans": 33063, "\u0120antis": 33064, "\u0120familiarity": 33065, "\u0120Sabb": 33066, "opal": 33067, "\u0120Ink": 33068, "strike": 33069, "\u0120cram": 33070, "\u0120legalized": 33071, "\u0120cuisine": 33072, "\u0120fibre": 33073, "Travel": 33074, "\u0120Monument": 33075, "ODY": 33076, "ethy": 33077, "\u0120interstate": 33078, "\u0120PUR": 33079, "emporary": 33080, "\u0120Arabian": 33081, "developed": 33082, "\u0120saddle": 33083, "\u0120github": 33084, "\u0120Offer": 33085, "\u0120ISP": 33086, "rolet": 33087, "\u0120SUPER": 33088, "\u0120Denis": 33089, "\u0120multiplier": 33090, "\u0120stirred": 33091, "Interestingly": 33092, "\u0120customary": 33093, "\u0120billed": 33094, "hex": 33095, "\u0120multiplied": 33096, "\u0120flipping": 33097, "\u0120Crosby": 33098, "\u0120fundamentals": 33099, "iae": 33100, "\u0120Played": 33101, "\u0120Atom": 33102, "amazon": 33103, "\u0120Flam": 33104, "eez": 33105, "activated": 33106, "\u0120tablespoon": 33107, "\u0120liberalism": 33108, "\u0120Palin": 33109, "\u0120Patel": 33110, "Num": 33111, "\u0120TAM": 33112, "\u0120surn": 33113, "\u0120Reloaded": 33114, "\u0120coined": 33115, "\"],": 33116, "\u0120Clash": 33117, "\u0120Agu": 33118, "\u0120pragmatic": 33119, "\u0120Activate": 33120, "\u0120802": 33121, "\u0120trailers": 33122, "\u0120silhou": 33123, "\u0120probes": 33124, "\u0120circus": 33125, "\u0120Bain": 33126, "\u0120Lindsay": 33127, "\u0120Abbey": 33128, "Delivery": 33129, "\u0120concession": 33130, "\u0120gastro": 33131, "\u0120Sprite": 33132, "\u00c4\u0141": 33133, "andel": 33134, "\u0120gimm": 33135, "\u0120autobi": 33136, "\u0120Turtle": 33137, "\u0120wonderfully": 33138, "\u0120Haram": 33139, "\u0120Worldwide": 33140, "\u0120Handle": 33141, "\u0120theorists": 33142, "\u0120sleek": 33143, "\u0120Zhu": 33144, "ographically": 33145, "EGA": 33146, "\u0120Owners": 33147, "aths": 33148, "\u0120Antarctic": 33149, "natal": 33150, "=\"\"": 33151, "flags": 33152, "````": 33153, "\u0120sul": 33154, "Kh": 33155, "\u0120potassium": 33156, "\u0120lineman": 33157, "\u0120cereal": 33158, "\u0120Seasons": 33159, "\u01202022": 33160, "\u0120mathematic": 33161, "\u0120astronomers": 33162, "professional": 33163, "\u0120fares": 33164, "cknowled": 33165, "\u0120chi": 33166, "\u0120youngsters": 33167, "\u0120mistakenly": 33168, "\u0120hemisphere": 33169, "\u0120Divinity": 33170, "rone": 33171, "\u0120\",": 33172, "rings": 33173, "\u0120attracts": 33174, "vana": 33175, "\u00e5\u00b9": 33176, "CAP": 33177, "\u0120playlist": 33178, "\u0120porch": 33179, "\u00e3\u0123\u00a3": 33180, "\u0120incorporates": 33181, "\u0120soak": 33182, "\u0120asserting": 33183, "\u0120Terrorism": 33184, "\u0120Pablo": 33185, "Ja": 33186, "cester": 33187, "\u0120fearing": 33188, "\u0120Prayer": 33189, "\u0120escalated": 33190, "GW": 33191, "\u0120robe": 33192, "\u0120Brighton": 33193, "acists": 33194, "\u0120Symphony": 33195, "\u0120Dwarf": 33196, "\u0120Parade": 33197, "\u0120Lego": 33198, "\u0120inexpl": 33199, "\u0120lords": 33200, "leaf": 33201, "RAG": 33202, "liber": 33203, "\u0120cigars": 33204, "\u0120Jehovah": 33205, "606": 33206, "WINDOWS": 33207, "\u0120Liberia": 33208, "ebus": 33209, "Heavy": 33210, "\u0120lubric": 33211, "\u0120RW": 33212, "anguages": 33213, "\u0120narrowed": 33214, "computer": 33215, "\u0120Ember": 33216, "\u0120murdering": 33217, "\u0120downstream": 33218, "\u0120Tuls": 33219, "\u0120Tables": 33220, "Topic": 33221, "\u0120Accuracy": 33222, "=/": 33223, "lost": 33224, "\u0120Rei": 33225, "\u0120progresses": 33226, "bear": 33227, "\u0120establishments": 33228, "Justin": 33229, "\u0120Peach": 33230, "\u0120Gomez": 33231, "\u00e5\u00bf": 33232, "\u0120Triangle": 33233, "Ident": 33234, "\u0120Hive": 33235, "Resources": 33236, "\u0120mixes": 33237, "\u0120Assuming": 33238, "Mu": 33239, "\u0120hypoc": 33240, "\u0120sane": 33241, "\u0120Wan": 33242, "idious": 33243, "Success": 33244, "\u0120io": 33245, "Angel": 33246, "\u0120dangerously": 33247, "\u0120Creature": 33248, "WORK": 33249, ":[": 33250, "\u0120Katrina": 33251, "Listener": 33252, "Miller": 33253, "\u0120Idlib": 33254, "hang": 33255, "\u0120circumvent": 33256, "href": 33257, "\u0120celestial": 33258, "\u0120Weeks": 33259, "\u0120Pug": 33260, "\u0120Dalton": 33261, "\u0120subpoena": 33262, "uku": 33263, "\u0120persisted": 33264, "pei": 33265, "olding": 33266, "\u0120Documents": 33267, "\u0120Hast": 33268, "\u0120CENT": 33269, "\u0120primer": 33270, "\u0120synonymous": 33271, "\u0120nib": 33272, "ombs": 33273, "\u0120notation": 33274, "\u0120Dish": 33275, "\u0120Atmosp": 33276, "\u0120forbid": 33277, "\u0120ANG": 33278, "pattern": 33279, "los": 33280, "\u0120projectiles": 33281, "brown": 33282, ".\",": 33283, "\u0120Venom": 33284, "\u0120fiercely": 33285, "ublished": 33286, "\u0120Uran": 33287, "\u0120Nicarag": 33288, "410": 33289, "\u0120CAL": 33290, "OTOS": 33291, "\u0120Miracle": 33292, "\u0120Enchant": 33293, "\u0120guarding": 33294, "append": 33295, "Attach": 33296, "\u0120leveled": 33297, "\u0120condoms": 33298, "ihilation": 33299, "649": 33300, "\u0120nightmares": 33301, "\u0120THEY": 33302, "\u0120START": 33303, "\u0120Kinn": 33304, "\u0120roommate": 33305, "\u0120hygiene": 33306, "opping": 33307, "Job": 33308, "\u0120lvl": 33309, "\u0120VER": 33310, "\u0120Keeping": 33311, "abetic": 33312, "\u0120formatting": 33313, "erala": 33314, "\u0120revisions": 33315, "\u0120resurg": 33316, "Tel": 33317, "\u0120Goodman": 33318, "353": 33319, "pod": 33320, "\u0120indisp": 33321, "\u0120Translation": 33322, "\u0120gown": 33323, "\u0120Mund": 33324, "\u0120cis": 33325, "\u0120bystand": 33326, "collect": 33327, "\u0120Punjab": 33328, "actively": 33329, "\u0120Gamb": 33330, "tell": 33331, "\u0120importing": 33332, "gencies": 33333, "\u0120locom": 33334, "\u0120Brill": 33335, "Holy": 33336, "\u0120Berger": 33337, "\u0120showdown": 33338, "\u0120responders": 33339, "ILY": 33340, "\u0120takedown": 33341, "leted": 33342, "\u0120mattered": 33343, "\u0120predictive": 33344, "\u0120overlay": 33345, "GPU": 33346, "\u0120Vick": 33347, "\u0120conveyed": 33348, "Tab": 33349, "peer": 33350, "Scan": 33351, "\u0120defensively": 33352, "vae": 33353, "\u0120approving": 33354, "\u0120tiers": 33355, "\u0120Via": 33356, "querade": 33357, "\u0120Saudis": 33358, "\u0120demolished": 33359, "\u0120Prophe": 33360, "\u0120mono": 33361, "\u0120hospitality": 33362, "HAM": 33363, "\u0120Ariel": 33364, "MOD": 33365, "\u0120Torah": 33366, "\u0120blah": 33367, "\u0120Belarus": 33368, "erential": 33369, "\u0120Tuc": 33370, "\u0120banker": 33371, "397": 33372, "\u0120mosquit": 33373, "\u0120Scientist": 33374, "\u0120Musical": 33375, "\u0120hust": 33376, "Shift": 33377, "\u0120torment": 33378, "\u0120standoff": 33379, "Educ": 33380, "\u0120Fog": 33381, "\u0120amplifier": 33382, "Shape": 33383, "Instance": 33384, "\u0120Critics": 33385, "\u0120daemon": 33386, "Houston": 33387, "\u0120mattress": 33388, "\u0120IDF": 33389, "\u0120obscene": 33390, "\u0120Amer": 33391, "hetti": 33392, "\u0120compiling": 33393, "352": 33394, "verett": 33395, "\u0120Reduction": 33396, "istration": 33397, "\u0120Blessed": 33398, "\u0120Bachelor": 33399, "316": 33400, "\u0120prank": 33401, "\u0120Vulcan": 33402, "dding": 33403, "\u0120mourning": 33404, "\u0120Quint": 33405, "\u0120Blaster": 33406, "testing": 33407, "\u0120sediment": 33408, ">>>": 33409, "\u0120Eternity": 33410, "\u0120WHERE": 33411, "\u0120Maze": 33412, "\u0120reacting": 33413, "\u0120Alv": 33414, "omsday": 33415, "\u0120CRA": 33416, "\u0120translator": 33417, "\u0120bogus": 33418, "atu": 33419, "Website": 33420, "olls": 33421, "\u0120baptism": 33422, "\u0120sibling": 33423, "\u0120Autumn": 33424, "vez": 33425, "\u00e3\u0123\u00ae\u00e9": 33426, "guards": 33427, "Georg": 33428, "assadors": 33429, "\u0120Freud": 33430, "\u0120continents": 33431, "\u0120Registry": 33432, "Bernie": 33433, "\u0138\u013c\u00e5\u00a3\u00ab": 33434, "\u0120tolerant": 33435, "\u0120UW": 33436, "\u0120horribly": 33437, "995": 33438, "\u0120MIDI": 33439, "\u0120impatient": 33440, "ocado": 33441, "eri": 33442, "\u0120Worst": 33443, "\u0120Norris": 33444, "\u0120Talking": 33445, "\u0120defends": 33446, "ensable": 33447, "\u01202021": 33448, "\u0120anatomy": 33449, "Lew": 33450, "\u0120drawer": 33451, "\u0120Canberra": 33452, "\u0120patriotic": 33453, "\u00e9\u00be\u012f\u00e5\u0138\u013c\u00e5\u00a3\u00ab": 33454, "\u0120Avg": 33455, "ARM": 33456, "\u0120undisclosed": 33457, "\u0120farewell": 33458, "459": 33459, "bable": 33460, "\u0120Allison": 33461, "OLOG": 33462, "\u0120conco": 33463, "tight": 33464, "\u0120ACPI": 33465, "\u0120Mines": 33466, "lich": 33467, "\u0120\u00e2\u0136\u013e": 33468, "represented": 33469, "200000": 33470, "\u0120enthusiast": 33471, "OTS": 33472, "bil": 33473, "\u0120Ingredients": 33474, "\u0120inventor": 33475, "\u0120MySQL": 33476, "\u00c2\u0142\u00c2\u0142\u00c2\u0142": 33477, "\u0120ABOUT": 33478, "within": 33479, "\u0120mk": 33480, "Bul": 33481, "\u0120Fake": 33482, "\u0120draconian": 33483, "Wa": 33484, "helm": 33485, "\u0120Terran": 33486, "erville": 33487, "\u0120commonplace": 33488, "SIZE": 33489, "\u0120\"<": 33490, "replace": 33491, "ographs": 33492, "\u0120SELECT": 33493, "incible": 33494, "\u0120Mostly": 33495, "\u0120Sheffield": 33496, "\u0120IDE": 33497, "uggle": 33498, "\u0120citations": 33499, "hurst": 33500, "\u0120Unix": 33501, "\u0120unleash": 33502, "\u0120Piper": 33503, "\u0120Nano": 33504, "\u0120succumb": 33505, "\u0120reluctance": 33506, "\u01202500": 33507, "\u0120Merchant": 33508, "\u0120wiret": 33509, "\u0120combos": 33510, "\u0120Birthday": 33511, "\u0120charcoal": 33512, "\u0120UPS": 33513, "\u0120Fairfax": 33514, "\u0120driveway": 33515, "\u0120Tek": 33516, "\u0120Pitch": 33517, "overe": 33518, "\u0120technicians": 33519, "\u0120Actual": 33520, "flation": 33521, "\u0120Fiscal": 33522, "\u0120Empty": 33523, "anamo": 33524, "\u0120magnesium": 33525, "\u0120slut": 33526, "\u0120growers": 33527, "Investigators": 33528, "():": 33529, "\u0120Satellite": 33530, "\u0120Keynes": 33531, "missive": 33532, "lane": 33533, "\u0120borough": 33534, "344": 33535, "\u0120TEAM": 33536, "\u0120Bethesda": 33537, "CV": 33538, "hower": 33539, "\u0120RAD": 33540, "\u0120chant": 33541, "\u0120Riy": 33542, "\u0120compositions": 33543, "\u0120mildly": 33544, "\u0120meddling": 33545, "\u0120agility": 33546, "aneers": 33547, "501": 33548, "\u0120synth": 33549, "linger": 33550, "291": 33551, "\u0120exclaimed": 33552, "Party": 33553, "\u0120contamin": 33554, "\u0120Manor": 33555, "\u0120Respond": 33556, "\u0120praising": 33557, "\u0120manners": 33558, "fleet": 33559, "Summer": 33560, "\u0120Lynd": 33561, "\u0120Definitely": 33562, "grim": 33563, "\u0120bowling": 33564, "stri": 33565, "\u00e7\u013d": 33566, "ynt": 33567, "\u0120mandates": 33568, "DIV": 33569, "\u0120reconcile": 33570, "views": 33571, "\u0120Damon": 33572, "vette": 33573, "Flo": 33574, "\u0120Greatest": 33575, "ilon": 33576, "icia": 33577, "\u0120portrayal": 33578, "\u0120cushion": 33579, "504": 33580, "1979": 33581, "ossal": 33582, "Applic": 33583, "scription": 33584, "\u0120mitigation": 33585, "ATS": 33586, "pac": 33587, "\u0120erased": 33588, "\u0120deficiencies": 33589, "\u0120Hollande": 33590, "\u0120Xu": 33591, "\u0120bred": 33592, "\u0120pregnancies": 33593, "femin": 33594, "\u0120emph": 33595, "\u0120planners": 33596, "\u0120outper": 33597, "uttering": 33598, "\u0120perpetrator": 33599, "\u0120motto": 33600, "\u0120Ellison": 33601, "\u0120NEVER": 33602, "\u0120admittedly": 33603, "ARI": 33604, "\u0120Azerbaijan": 33605, "\u0120millisec": 33606, "\u0120combustion": 33607, "\u0120Bottle": 33608, "\u0120Lund": 33609, "\u0120Ps": 33610, "\u0120Dress": 33611, "\u0120fabricated": 33612, "\u0120battered": 33613, "\u0120sidel": 33614, "\u0120Notting": 33615, "Foreign": 33616, "\u0120Jerome": 33617, "020": 33618, "\u0120Arbit": 33619, "\u0120knots": 33620, "\u0120RIGHT": 33621, "Moving": 33622, "\u00e3\u0123\u013b": 33623, "\u0120surgeries": 33624, "\u0120courthouse": 33625, "\u0120mastered": 33626, "\u0120hovering": 33627, "\u0120Bran": 33628, "\u0120Alison": 33629, "\u0120safest": 33630, "military": 33631, "\u0120bullied": 33632, "\u0120barrage": 33633, "Reader": 33634, "ESE": 33635, "\u0120Geographic": 33636, "Tools": 33637, "314": 33638, "\u0120Geek": 33639, "roth": 33640, "glers": 33641, "\u0120FIN": 33642, "\u00cf\u0123": 33643, "\u0120Aston": 33644, "altern": 33645, "488": 33646, "\u0120veterin": 33647, "Gamer": 33648, "\u0120intel": 33649, "renches": 33650, "Shield": 33651, "\u0120amnesty": 33652, "\u0120Bhar": 33653, "\u0120piled": 33654, "\u0120honorable": 33655, "\u0120Institutes": 33656, "\u0120soaked": 33657, "\u0120coma": 33658, "\u0120EFF": 33659, "341": 33660, "bytes": 33661, "\u0120Gmail": 33662, "lein": 33663, "\u0120Canadiens": 33664, "material": 33665, "Il": 33666, "\u0120instructors": 33667, "\u0120KY": 33668, "\u0120conceive": 33669, "ubb": 33670, "\u0120Possible": 33671, "\u0120easing": 33672, "\u0120Christina": 33673, "\u0120caric": 33674, "\u0120HDR": 33675, "ROM": 33676, "\u0120shovel": 33677, "delete": 33678, "\u0120puff": 33679, "\u0120Changing": 33680, "\u0120seamlessly": 33681, "Attribute": 33682, "\u0120acquisitions": 33683, "akery": 33684, "\u0120EF": 33685, "\u0120autistic": 33686, "\u0120Takes": 33687, "\u0120Powder": 33688, "\u0120Stir": 33689, "510": 33690, "\u0120Bubble": 33691, "settings": 33692, "\u0120Fowler": 33693, "\u0120mustard": 33694, "\u0120moreover": 33695, "\u0120copyrighted": 33696, "\u0120LEDs": 33697, "1500": 33698, "\u00e6\u012b": 33699, "\u0120HIS": 33700, "enf": 33701, "\u0120custod": 33702, "\u0120Huck": 33703, "Gi": 33704, "\u0120img": 33705, "Answer": 33706, "Ct": 33707, "jay": 33708, "\u0120Infrastructure": 33709, "\u0120federally": 33710, "Loc": 33711, "\u0120microbes": 33712, "\u0120overrun": 33713, "dds": 33714, "otent": 33715, "adiator": 33716, ">>>>>>>>": 33717, "\u0120tornado": 33718, "\u0120adjud": 33719, "\u0120intrigued": 33720, "\u0120si": 33721, "\u0120Revelation": 33722, "progress": 33723, "\u0120burglary": 33724, "\u0120Saiyan": 33725, "\u0120Kathy": 33726, "\u0120serpent": 33727, "\u0120Andreas": 33728, "\u0120compel": 33729, "essler": 33730, "\u0120Plastic": 33731, "\u0120Advent": 33732, "\u0120Positive": 33733, "\u0120Qt": 33734, "\u0120Hindus": 33735, "registered": 33736, "ularity": 33737, "\u0120righteousness": 33738, "\u0120demonic": 33739, "uitive": 33740, "\u0120BDS": 33741, "\u0120Gregg": 33742, "cia": 33743, "\u0120Crusade": 33744, "\u0120Sinai": 33745, "WARE": 33746, "+(": 33747, "\u0120mell": 33748, "\u0120derail": 33749, "yards": 33750, "Ast": 33751, "\u0120noticeably": 33752, "\u0120Ober": 33753, "Ram": 33754, "\u0120unnoticed": 33755, "\u0120seq": 33756, "avage": 33757, "Ts": 33758, "\u0120640": 33759, "\u0120concede": 33760, "\u0120])": 33761, "Fill": 33762, "\u0120captivity": 33763, "\u0120Improvement": 33764, "\u0120Crusader": 33765, "araoh": 33766, "MAP": 33767, "\u00e6\u0139": 33768, "\u0120stride": 33769, "always": 33770, "Fly": 33771, "Nit": 33772, "\u0120algae": 33773, "\u0120Cooking": 33774, "\u0120Doors": 33775, "Malley": 33776, "\u0120policemen": 33777, "\u00e3\u0123\u012f": 33778, "\u0120astronaut": 33779, "accessible": 33780, "495": 33781, "\u0120RAW": 33782, "cliffe": 33783, "udicrous": 33784, "\u0120depended": 33785, "alach": 33786, "\u0120ventures": 33787, "rake": 33788, "\u0120tits": 33789, "\u0120Hou": 33790, "\u0120condom": 33791, "ormonal": 33792, "\u0120indent": 33793, "\u0120uploading": 33794, "Footnote": 33795, "Important": 33796, "\u0120271": 33797, "\u0120mindful": 33798, "\u0120contends": 33799, "Cra": 33800, "\u0120calibr": 33801, "\u0120OECD": 33802, "plugin": 33803, "Fat": 33804, "\u0120ISS": 33805, "\u0120Dynamics": 33806, "ansen": 33807, "686": 33808, "'),": 33809, "\u0120sprite": 33810, "\u0120handheld": 33811, "\u0120Hipp": 33812, "=~=~": 33813, "Trust": 33814, "\u0120semantics": 33815, "\u0120Bundes": 33816, "\u0120Reno": 33817, "\u0120Literature": 33818, "sense": 33819, "Gary": 33820, "\u0120Aeg": 33821, "\u0120Trin": 33822, "EEK": 33823, "\u0120cleric": 33824, "\u0120SSH": 33825, "\u0120christ": 33826, "\u0120invading": 33827, "ibu": 33828, "\u0120enum": 33829, "aura": 33830, "\u0120allege": 33831, "\u0120Incredible": 33832, "BBC": 33833, "\u0120thru": 33834, "\u0120sailed": 33835, "\u0120emulate": 33836, "\u0120insecurity": 33837, "\u0120crou": 33838, "\u0120accommodations": 33839, "\u0120incompetent": 33840, "\u0120slips": 33841, "\u0120Earthqu": 33842, "sama": 33843, "ILLE": 33844, "\u0120iPhones": 33845, "asaki": 33846, "\u0120bye": 33847, "\u0120ard": 33848, "\u0120extras": 33849, "\u0120slaughtered": 33850, "\u0120crowdfunding": 33851, "resso": 33852, "\u0120filib": 33853, "\u0120ERROR": 33854, "\u0120TLS": 33855, "egg": 33856, "\u0120Ital": 33857, "\u0120enlist": 33858, "\u0120Catalonia": 33859, "\u0120Scots": 33860, "\u0120sergeant": 33861, "\u0120dissolve": 33862, "NH": 33863, "\u0120standings": 33864, "rique": 33865, "IQ": 33866, "\u0120beneficiary": 33867, "\u0120aquarium": 33868, "YouTube": 33869, "\u0120PowerShell": 33870, "\u0120brightest": 33871, "\u0120Warrant": 33872, "Sold": 33873, "Writing": 33874, "\u0120beginnings": 33875, "\u0120Reserved": 33876, "\u0120Latinos": 33877, "heading": 33878, "\u0120440": 33879, "\u0120rooftop": 33880, "ATING": 33881, "\u0120390": 33882, "VPN": 33883, "Gs": 33884, "kernel": 33885, "turned": 33886, "\u0120preferable": 33887, "\u0120turnovers": 33888, "\u0120Hels": 33889, "Sa": 33890, "\u0120Shinji": 33891, "veh": 33892, "\u0120MODULE": 33893, "Viol": 33894, "\u0120exiting": 33895, "\u0120jab": 33896, "\u0120Vanilla": 33897, "\u0120acron": 33898, "\u0120Gap": 33899, "bern": 33900, "Ak": 33901, "\u0120McGu": 33902, "\u0120endlessly": 33903, "\u0120Farage": 33904, "\u0120Noel": 33905, "Va": 33906, "MK": 33907, "\u0120brute": 33908, "\u0120Kru": 33909, "\u0120ESV": 33910, "\u0120Olivia": 33911, "\u00e2\u0122\u0142": 33912, "\u0120Kaf": 33913, "\u0120trusting": 33914, "\u0120hots": 33915, "324": 33916, "\u0120malaria": 33917, "\u0120json": 33918, "\u0120pounding": 33919, "ortment": 33920, "Country": 33921, "\u0120postponed": 33922, "\u0120unequiv": 33923, "?),": 33924, "\u0120Rooney": 33925, "udding": 33926, "\u0120Leap": 33927, "urrence": 33928, "shapeshifter": 33929, "\u0120HAS": 33930, "osate": 33931, "\u0120cavern": 33932, "\u0120conservatism": 33933, "\u0120BAD": 33934, "\u0120mileage": 33935, "\u0120arresting": 33936, "Vaults": 33937, "\u0120mixer": 33938, "Democratic": 33939, "\u0120Benson": 33940, "\u0120authored": 33941, "8000": 33942, "\u0120proactive": 33943, "\u0120Spiritual": 33944, "tre": 33945, "\u0120incarcerated": 33946, "\u0120Sort": 33947, "\u0120peaked": 33948, "\u0120wielding": 33949, "reciation": 33950, "\u00d7\u013b\u00d7": 33951, "Patch": 33952, "\u0120Emmy": 33953, "\u0120exqu": 33954, "tto": 33955, "\u0120Ratio": 33956, "\u0120Picks": 33957, "\u0120Gry": 33958, "phant": 33959, "\u0120fret": 33960, "\u0120ethn": 33961, "\u0120archived": 33962, "%-": 33963, "cases": 33964, "\u0120Blaze": 33965, "\u0120imb": 33966, "cv": 33967, "yss": 33968, "imony": 33969, "\u0120countdown": 33970, "\u0120awakening": 33971, "\u0120Tunisia": 33972, "\u0120Refer": 33973, "\u0120MJ": 33974, "\u0120unnatural": 33975, "\u0120Carnegie": 33976, "izen": 33977, "\u0120Nuggets": 33978, "hess": 33979, "\u0120evils": 33980, "647": 33981, "\u0120introductory": 33982, "loving": 33983, "\u0120McMahon": 33984, "\u0120ambiguity": 33985, "Label": 33986, "\u0120Almighty": 33987, "\u0120coloring": 33988, "\u0120Claus": 33989, "setting": 33990, "NULL": 33991, "\u0120Favorite": 33992, "\u0120SIG": 33993, ">(": 33994, "\u0120Shiva": 33995, "\u0120Mayer": 33996, "\u0120stormed": 33997, "\u0120Coverage": 33998, "weapons": 33999, "igham": 34000, "\u0120unanswered": 34001, "\u0120leve": 34002, "\u0120coy": 34003, "cas": 34004, "bags": 34005, "asured": 34006, "Seattle": 34007, "\u0120Santorum": 34008, "serious": 34009, "\u0120courageous": 34010, "\u0120Soup": 34011, "\u0120confiscated": 34012, "\u0120///": 34013, "\u0120unconventional": 34014, "\u0120moms": 34015, "\u0120Rohingya": 34016, "\u0120Orchestra": 34017, "\u0120Potion": 34018, "\u0120discredit": 34019, "\u0120FIL": 34020, "fixed": 34021, "\u0120Deer": 34022, "doi": 34023, "\u0120Dimension": 34024, "\u0120bureaucrats": 34025, "eteen": 34026, "\u0120actionGroup": 34027, "ohm": 34028, "\u0120bumps": 34029, "\u0120Utility": 34030, "\u0120submarines": 34031, "renheit": 34032, "research": 34033, "\u0120Shapiro": 34034, "\u0120sketches": 34035, "\u0120deceptive": 34036, "\u0120Vil": 34037, "esame": 34038, "\u0120Essentially": 34039, "\u0120rampage": 34040, "isky": 34041, "\u0120muttered": 34042, "thritis": 34043, "\u0120236": 34044, "fet": 34045, "bars": 34046, "\u0120pupil": 34047, "\u0120Thou": 34048, "oS": 34049, "song": 34050, "\u0120fractured": 34051, "\u0120revert": 34052, "picture": 34053, "\u0120criterion": 34054, "usher": 34055, "\u0120repercussions": 34056, "\u0120Vintage": 34057, "\u0120Superintendent": 34058, "Officers": 34059, "\u0120flagged": 34060, "\u0120blames": 34061, "\u0120inverse": 34062, "ographers": 34063, "\u0120makeshift": 34064, "\u0120devoid": 34065, "\u0120fossils": 34066, "\u0120Aristotle": 34067, "\u0120Funds": 34068, "\u0120depleted": 34069, "\u0120Flu": 34070, "\u0120Yuan": 34071, "\u0120woes": 34072, "\u0120lipid": 34073, "\u0120situ": 34074, "requisites": 34075, "\u0120furnish": 34076, "\u0120Samar": 34077, "\u0120shameful": 34078, "\u0120adversely": 34079, "\u0120adept": 34080, "\u0120remorse": 34081, "\u0120murderous": 34082, "uckles": 34083, "\u0120ESL": 34084, "\u0120314": 34085, "sent": 34086, "\u0120redef": 34087, "\u0120Cache": 34088, "\u0120Purs": 34089, "igans": 34090, "\u0120460": 34091, "\u0120prescriptions": 34092, "\u0120fres": 34093, "Fuck": 34094, "ocrates": 34095, "Twenty": 34096, "\u0120Weird": 34097, "\u0120Toggle": 34098, "\u0120Called": 34099, "itizens": 34100, "\u0120poultry": 34101, "\u0120harvesting": 34102, "\u00e3\u0124\u00a6\u00e3\u0124\u00b9": 34103, "Bottom": 34104, "\u0120cautioned": 34105, "tn": 34106, "396": 34107, "\u0120Nikki": 34108, "\u0120evaluations": 34109, "\u0120harassing": 34110, "\u0120bindings": 34111, "\u0120Monetary": 34112, "\u0120hitters": 34113, "\u0120adversary": 34114, "unts": 34115, "\u0120setback": 34116, "\u0120encrypt": 34117, "\u0120Cait": 34118, "\u0120lows": 34119, "enges": 34120, "\u0120Norn": 34121, "\u0120bulbs": 34122, "\u0120bottled": 34123, "\u0120Voyager": 34124, "317": 34125, "\u0120spheres": 34126, "politics": 34127, "\u0120subtract": 34128, "\u0120sensations": 34129, "\u0120appalling": 34130, "\u0120316": 34131, "\u0120environmentally": 34132, "\u0120STEM": 34133, "\u0120publishes": 34134, "560": 34135, "\u0120diligence": 34136, "484": 34137, "\u0120advises": 34138, "\u0120petrol": 34139, "\u0120imagining": 34140, "\u0120patrols": 34141, "\u0120Integer": 34142, "\u0120Ashes": 34143, "actus": 34144, "\u0120Radiant": 34145, "\u0120LT": 34146, "itability": 34147, "htaking": 34148, "Setting": 34149, "\u0120nuanced": 34150, "\u0120Reef": 34151, "\u0120Developers": 34152, "Ni": 34153, "pieces": 34154, "990": 34155, "License": 34156, "\u0120lowers": 34157, "\u0120Ottoman": 34158, "327": 34159, "ooo": 34160, "\u0120quitting": 34161, "markets": 34162, "Behind": 34163, "\u0120basin": 34164, "\u0120docs": 34165, "anie": 34166, "flash": 34167, "ctl": 34168, "\u0120civilized": 34169, "\u0120Fukushima": 34170, "\"],\"": 34171, "\u0120KS": 34172, "\u0120Honestly": 34173, "arat": 34174, "\u0120constructs": 34175, "\u0120Lans": 34176, "\u0120Dire": 34177, "\u0120LIKE": 34178, "\u0120Trouble": 34179, "\u0120withholding": 34180, "\u0120Oblivion": 34181, "\u0120sanity": 34182, "anya": 34183, "Const": 34184, "\u0120grocer": 34185, "\u0120Celsius": 34186, "\u0120recounted": 34187, "\u0120Wife": 34188, "Border": 34189, "atered": 34190, "happy": 34191, "\u0120spoiler": 34192, "\u0120logically": 34193, "Hall": 34194, "\u0120succeeding": 34195, "\u0120polymorph": 34196, "\u0120axes": 34197, "\u0120Shotgun": 34198, "\u0120Slim": 34199, "\u0120Principles": 34200, "\u0120Leth": 34201, "arta": 34202, "\u0120scor": 34203, "Screenshot": 34204, "\u0120relaxation": 34205, "#$#$": 34206, "\u0120deterrent": 34207, "iddy": 34208, "\u0120powerless": 34209, "\u0120lesbians": 34210, "\u0120chords": 34211, "\u0120Edited": 34212, "selected": 34213, "\u0120separatists": 34214, "0002": 34215, "\u0120airspace": 34216, "\u0120turnaround": 34217, "\u0120cunning": 34218, "PATH": 34219, "Poly": 34220, "\u0120bombed": 34221, "\u0120tion": 34222, "xs": 34223, "\u0120withhold": 34224, "\u0120waged": 34225, "\u0120Liberties": 34226, "Flag": 34227, "\u0120comforting": 34228, "454": 34229, "\u0120Iris": 34230, "arers": 34231, "\u0120rag": 34232, "\u0120relocated": 34233, "\u0120Guarant": 34234, "\u0120strategically": 34235, "\u0120gamma": 34236, "uberty": 34237, "\u0120Lockheed": 34238, "gres": 34239, "\u0120grilled": 34240, "\u0120Lowe": 34241, "stats": 34242, "\u0120Rocks": 34243, "\u0120sensing": 34244, "\u0120renting": 34245, "\u0120Geological": 34246, "\u00d8\u00a7\u00d8": 34247, "otrop": 34248, "\u0120sew": 34249, "\u0120improperly": 34250, "486": 34251, "\u0120\u00e2\u0138\u0142": 34252, "\u0120starving": 34253, "\u0120Bj": 34254, "Discussion": 34255, "328": 34256, "\u0120Combo": 34257, "\u0120Fixes": 34258, "NAT": 34259, "\u0120striving": 34260, "thora": 34261, "\u0120harvested": 34262, "\u0120Ping": 34263, "\u0120playful": 34264, "\u0120avenues": 34265, "\u0120occupational": 34266, "\u0120wakes": 34267, "\u0120Courier": 34268, "\u0120drummer": 34269, "\u0120Browser": 34270, "\u0120Houth": 34271, "itu": 34272, "\u0120apparel": 34273, "paste": 34274, "\u0120hunted": 34275, "\u0120Secondly": 34276, "lain": 34277, "XY": 34278, "\u0120PIN": 34279, "icons": 34280, "\u0120cocktails": 34281, "\u0120sizable": 34282, "\u0120hurdles": 34283, "estinal": 34284, "\u0120Recreation": 34285, "\u0120eco": 34286, "648": 34287, "\u0120Died": 34288, "mint": 34289, "\u0120fingerprints": 34290, "\u0120dispose": 34291, "\u0120Bosnia": 34292, "tsy": 34293, "2200": 34294, "\u0120inspected": 34295, "\u0120Fou": 34296, "\u0120fuss": 34297, "\u0120ambush": 34298, "\u0120Rak": 34299, "\u0120manifested": 34300, "Prosecut": 34301, "\u0120suffice": 34302, "rences": 34303, "\u0120compensated": 34304, "\u0120Cyrus": 34305, "\u0120genus": 34306, "\u0120Wolverine": 34307, "\u0120Trends": 34308, "\u0120hikes": 34309, "\u0120Seen": 34310, "\u0120enrol": 34311, "Cold": 34312, "\u0120politely": 34313, "\u0120Slav": 34314, "\u0120Rupert": 34315, "\u0120eyewitness": 34316, "\u0120Alto": 34317, "\u0120uncomp": 34318, "\u0120posterior": 34319, "Must": 34320, "\u0120Herz": 34321, "\u0120progressively": 34322, "\u0120234": 34323, "\u0120indifference": 34324, "\u0120Cunningham": 34325, "\u0120academia": 34326, "\u0120sewer": 34327, "\u0120astounding": 34328, "\u0120AES": 34329, "rather": 34330, "\u0120eldest": 34331, "\u0120climbs": 34332, "\u0120Adds": 34333, "\u0120outcry": 34334, "\u0120contag": 34335, "\u0120Houses": 34336, "\u0120pept": 34337, "\u0120Melania": 34338, "interested": 34339, "\u0120UCH": 34340, "\u0120Roots": 34341, "\u0120Hubbard": 34342, "\u0120TBD": 34343, "\u0120Romanian": 34344, "filename": 34345, "Stone": 34346, "\u0120Impl": 34347, "\u0120chromosome": 34348, "Cle": 34349, "dx": 34350, "\u0120scrambled": 34351, "\u0120Pt": 34352, "\u0120242": 34353, "OPLE": 34354, "\u0120tremendously": 34355, "Street": 34356, "\u0120craving": 34357, "\u0120bundled": 34358, "\u0120RG": 34359, "pipe": 34360, "\u0120injuring": 34361, "\u0120arcane": 34362, "Particip": 34363, "\u0120Heroic": 34364, "sty": 34365, "\u0120topping": 34366, "\u0120Tempest": 34367, "rentices": 34368, "bh": 34369, "\u0120paranoia": 34370, "\u0120Unicode": 34371, "\u0120egregious": 34372, "\u0120\\'": 34373, "\u0120Oswald": 34374, "\u0120gravel": 34375, "\u0120Simpsons": 34376, "\u0120bland": 34377, "\u0120Guantanamo": 34378, "Writer": 34379, "liners": 34380, "\u0120Dice": 34381, "JC": 34382, "\u0120parity": 34383, "\u0120sided": 34384, "\u0120237": 34385, "\u0120Pyrrha": 34386, "atters": 34387, "dk": 34388, "Fine": 34389, "compan": 34390, "\u0120formulated": 34391, "\u0120Idol": 34392, "ilers": 34393, "hemoth": 34394, "\u0120Fav": 34395, "\u0120intrusion": 34396, "\u0120carrots": 34397, "\u0120Layer": 34398, "\u0120Hacker": 34399, "\u0120----------------": 34400, "\u0120moderation": 34401, "\u00e9\u0123": 34402, "ococ": 34403, "\u0120characterize": 34404, "\u0120Teresa": 34405, "\u0120socioeconomic": 34406, "\u0120perk": 34407, "\u0120Participation": 34408, "training": 34409, "\u0120Paulo": 34410, "phys": 34411, "\u0120trustworthy": 34412, "\u0120embodied": 34413, "\u0120Merch": 34414, "currency": 34415, "\u0120Priority": 34416, "\u0120teasing": 34417, "\u0120absorbing": 34418, "\u0120unfinished": 34419, "\u0120Comparison": 34420, "\u0120disple": 34421, "writers": 34422, "\u0120professions": 34423, "\u0120Penguin": 34424, "\u0120angrily": 34425, "\u0120LINK": 34426, "688": 34427, "\u0120Correspond": 34428, "\u0120prevailed": 34429, "\u0120cartel": 34430, "lp": 34431, "asms": 34432, "\u0120Redemption": 34433, "\u0120Islamists": 34434, "effects": 34435, "dose": 34436, "\u0120Latter": 34437, "\u0120Halifax": 34438, "\u0120vas": 34439, "\u0120Topics": 34440, "\u0120Named": 34441, "advertising": 34442, "zza": 34443, "ICES": 34444, "\u0120retarded": 34445, "achable": 34446, "\u0120Puppet": 34447, "\u0120ItemLevel": 34448, "\u0120retract": 34449, "\u0120identifiable": 34450, "Aaron": 34451, "\u0120Buster": 34452, "sol": 34453, "helle": 34454, "assemb": 34455, "Hope": 34456, "ranged": 34457, "Ba": 34458, "\u0120Purch": 34459, "\u00e9\u0122": 34460, "\u0120Siri": 34461, "\u0120arrivals": 34462, "\u01201912": 34463, "\u0120shortened": 34464, "\u0120312": 34465, "\u0120discrepancy": 34466, "\u0120Temperature": 34467, "\u0120Walton": 34468, "\u0120kinderg": 34469, "polit": 34470, "\u0120remix": 34471, "\u0120connectors": 34472, "\u00e3\u0125\u013a\u00e3\u0125\u00a9": 34473, "\u0120Kazakhstan": 34474, "dominated": 34475, "\u0120sugars": 34476, "imble": 34477, "\u0120Panic": 34478, "\u0120Demand": 34479, "\u0120Colony": 34480, "onen": 34481, "\u0120MER": 34482, "775": 34483, "uria": 34484, "azaar": 34485, "\u0120Degree": 34486, "Pri": 34487, "\u0120sunshine": 34488, "\u0120251": 34489, "\u0120psychedelic": 34490, "\u0120digitally": 34491, "\u0120Braun": 34492, "\u0120shimmer": 34493, "\u0120shave": 34494, "\u0120Telesc": 34495, "\u0120Astral": 34496, "\u0120Venezuelan": 34497, "\u0120OG": 34498, "\u0120crawling": 34499, "Integ": 34500, "\u0120Feather": 34501, "\u0120unfolding": 34502, "\u0120appropriation": 34503, "\u0120\u00e8\u00a3\u0131\u00e8": 34504, "\u0120Mobility": 34505, "\u0120Ney": 34506, "-.": 34507, "bilt": 34508, "LIN": 34509, "\u0120Tube": 34510, "\u0120Conversely": 34511, "\u0120keyboards": 34512, "\u0120Cao": 34513, "\u0120overth": 34514, "\u0120laure": 34515, ">>\\": 34516, "\u0120Viper": 34517, "acha": 34518, "Offset": 34519, "\u0120Raleigh": 34520, "\u0120Jae": 34521, "Jordan": 34522, "jp": 34523, "\u0120totalitarian": 34524, "Connector": 34525, "\u0120observes": 34526, "\u0120Spartan": 34527, "\u0120Immediately": 34528, "\u0120Scal": 34529, "Cool": 34530, "\u0120taps": 34531, "\u0120roar": 34532, "Past": 34533, "\u0120chars": 34534, "\u0120Bender": 34535, "\u0120Sheldon": 34536, "\u0120painter": 34537, "\u0120beacon": 34538, "\u0120Creatures": 34539, "\u0120downturn": 34540, "\u0120hinder": 34541, "\u0120Andromeda": 34542, "\u00c3\u013d": 34543, "ccoli": 34544, "\u0120Fitness": 34545, "etrical": 34546, "\u0120utilizes": 34547, "\u0120senate": 34548, "\u0120ensemble": 34549, "\u0120cheers": 34550, "TW": 34551, "\u0120affluent": 34552, "kil": 34553, "rylic": 34554, "ordering": 34555, "Computer": 34556, "\u0120gruesome": 34557, "ostics": 34558, "\u0120Ubisoft": 34559, "\u0120Kelley": 34560, "\u0120wrench": 34561, "\u0120bourgeoisie": 34562, "IBLE": 34563, "\u0120Preston": 34564, "worn": 34565, "arist": 34566, "reating": 34567, "\u0120stained": 34568, "arine": 34569, "\u0120slime": 34570, "ENN": 34571, "\u0120chests": 34572, "\u0120groundwater": 34573, "annot": 34574, "\u0120Tray": 34575, "\u0120Locke": 34576, "\u0120CTR": 34577, "\u0120dudes": 34578, "\u0120External": 34579, "\u0120Decoder": 34580, "\u0120paramed": 34581, "\u0120Medline": 34582, "809": 34583, "\u0120Dinner": 34584, "rupal": 34585, "gz": 34586, "\u0120Gum": 34587, "\u0120Demo": 34588, "jee": 34589, "\u0120dh": 34590, "berman": 34591, "archs": 34592, "\u0120enqu": 34593, "\u0120Epstein": 34594, "\u0120devastation": 34595, "\u0120friendships": 34596, "\u0120Ard": 34597, "\u0120231": 34598, "\u0120Rubin": 34599, "\u0120Distance": 34600, "\u0120spurred": 34601, "\u0120dossier": 34602, "\u0120overlooking": 34603, "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\": 34604, "Forest": 34605, "\u0120Comes": 34606, "\\\",": 34607, "\u0120Iranians": 34608, "\u0120fixtures": 34609, "Laughs": 34610, "\u0120curry": 34611, "\u0120Kingston": 34612, "\u0120squash": 34613, "\u0120catalogue": 34614, "\u0120abnormalities": 34615, "\u0120digestive": 34616, ".........": 34617, "\u0120subordinate": 34618, "ogly": 34619, "\u0120249": 34620, "Middle": 34621, "\u0120massac": 34622, "\u0120burgers": 34623, "\u0120downstairs": 34624, "\u01201931": 34625, "394": 34626, "\u0120VG": 34627, "\u0120lasers": 34628, "\u0120Sikh": 34629, "\u0120Alexa": 34630, "derived": 34631, "\u0120cyclist": 34632, "\u00e3\u0123\u00ae\u00e9\u0143\u0136": 34633, "oneliness": 34634, "!!!!!!!!": 34635, "\u0120buffs": 34636, "legate": 34637, "\u0120raping": 34638, "\u0120recommending": 34639, "rored": 34640, "\u0120multicultural": 34641, "unique": 34642, "\u0120businessmen": 34643, "\u0120uneasy": 34644, "\u0120MAP": 34645, "\u0120dispersed": 34646, "cipline": 34647, "Jess": 34648, "\u0120Kerala": 34649, "\u00e5\u00a7": 34650, "\u0120abstraction": 34651, "Surv": 34652, "Uh": 34653, "\u0120printers": 34654, "ija": 34655, "owder": 34656, "\u0120analogous": 34657, "\u0120ASP": 34658, "afer": 34659, "\u0120unfolded": 34660, "\u0120leveling": 34661, "\u0120breached": 34662, "\u0120Hearing": 34663, "\u0120nat": 34664, "\u0120translating": 34665, "critical": 34666, "\u0120antagonist": 34667, "\u0120Yesterday": 34668, "\u0120fuzzy": 34669, "wash": 34670, "mere": 34671, "\u0120bewild": 34672, "\u0120Mae": 34673, "Virgin": 34674, "phrase": 34675, "\u0120signaled": 34676, "\u0120HIGH": 34677, "\u0120protester": 34678, "\u0120garner": 34679, "unknown": 34680, "\u0120kay": 34681, "\u0120abducted": 34682, "\u0120stalking": 34683, "amn": 34684, "\u0120deserving": 34685, "\u0120Riv": 34686, "\u0120Jorge": 34687, "\u0120scratching": 34688, "\u0120Saving": 34689, "iping": 34690, "\u0120tease": 34691, "\u0120missionary": 34692, "\u0120Morrow": 34693, "TIME": 34694, "Present": 34695, "\u0120chemotherapy": 34696, "terness": 34697, "\u0120Homes": 34698, "\u0120Purdue": 34699, "\u0120staunch": 34700, "\u0120Whitney": 34701, "\u0120THERE": 34702, "\u00ce\u00bc": 34703, "iatus": 34704, "\u0120Ernest": 34705, "\u0120Deploy": 34706, "\u0120coveted": 34707, "FML": 34708, "\u0120Dialogue": 34709, "\u0120exited": 34710, "fruit": 34711, "\u0120nerd": 34712, "\":\"\",\"": 34713, "\u0120vivo": 34714, "ruly": 34715, "460": 34716, "\u0120Amen": 34717, "rehensible": 34718, "\u0120\u00e2\u013a": 34719, "DIR": 34720, "\u0120adherence": 34721, "\u0120chew": 34722, "\u0120Coke": 34723, "\u0120Sergei": 34724, "digital": 34725, "\u0120Neck": 34726, "gently": 34727, "enthal": 34728, "/)": 34729, "\u0120weary": 34730, "\u0120guise": 34731, "\u0120Concord": 34732, "\u0120Onion": 34733, "atcher": 34734, "\u0120binge": 34735, "\u0120Directive": 34736, "\u0120manned": 34737, "ansk": 34738, "\u0120illusions": 34739, "\u0120billionaires": 34740, "383": 34741, "olyn": 34742, "odynamic": 34743, "\u0120Wheat": 34744, "\u0120Alic": 34745, "\u0120coloured": 34746, "\u0120NAFTA": 34747, "abo": 34748, "\u0120macros": 34749, "independent": 34750, "sweet": 34751, "\u0120spac": 34752, "\u0120Kabul": 34753, "\u0120\u00c4": 34754, "eme": 34755, "\u0120dictated": 34756, "\u0120shouts": 34757, "={": 34758, "\u0120ripping": 34759, "\u0120Shay": 34760, "\u0120Cricket": 34761, "directed": 34762, "\u0120analysed": 34763, "\u0120WARRANT": 34764, "agons": 34765, "\u0120Blazers": 34766, "\u0120cheered": 34767, "\u0120arithmetic": 34768, "\u0120Tanz": 34769, "373": 34770, "\u0120Flags": 34771, "\u0120295": 34772, "\u0120witches": 34773, "\u0120Included": 34774, "\u0120Gained": 34775, "\u0120Blades": 34776, "Gam": 34777, "\u0120Samantha": 34778, "\u0120Atlantis": 34779, "\u0120Pratt": 34780, "\u0120spoiled": 34781, "\u0120IB": 34782, "\u0120Ramirez": 34783, "Probably": 34784, "rero": 34785, "\u0120Ng": 34786, "\u0120Warlock": 34787, "tp": 34788, "\u0120overhe": 34789, "\u0120administrations": 34790, "\u0120tint": 34791, "\u0120regiment": 34792, "\u0120pistols": 34793, "\u0120blankets": 34794, "\u0120epist": 34795, "\u0120bowls": 34796, "\u0120hydraulic": 34797, "\u0120dean": 34798, "\u0120jung": 34799, "\u0120ascend": 34800, "705": 34801, "\u0120Santiago": 34802, "\u00c3\u00ae": 34803, "\u0120unavoid": 34804, "\u0120Shaman": 34805, "reb": 34806, "\u0120stemming": 34807, "998": 34808, "\u0120MG": 34809, "sticks": 34810, "esthesia": 34811, "ERO": 34812, "\u0120morbid": 34813, "\u0120Grill": 34814, "\u0120Poe": 34815, "anyl": 34816, "\u0120deleting": 34817, "\u0120Surveillance": 34818, "\u0120directives": 34819, "\u0120iterations": 34820, "\u0120Rox": 34821, "\u0120Milky": 34822, "Father": 34823, "\u0120patented": 34824, "447": 34825, "\u0120precursor": 34826, "\u0120maiden": 34827, "\u0120Phen": 34828, "\u0120Vegan": 34829, "\u0120Patent": 34830, "Kelly": 34831, "Redditor": 34832, "\u0120nods": 34833, "\u0120ventilation": 34834, "\u0120Schwarz": 34835, "\u0120wizards": 34836, "\u0120ominous": 34837, "\u0120Heads": 34838, "\u0120BG": 34839, "\u0120lumber": 34840, "\u0120Spiel": 34841, "\u0120isEnabled": 34842, "\u0120ancestral": 34843, "\u0120Ships": 34844, "\u0120wrestler": 34845, "phi": 34846, "\u0120yuan": 34847, "\u0120Rebellion": 34848, "\u0120iceberg": 34849, "\u0120magically": 34850, "\u0120diversion": 34851, "arro": 34852, "ythm": 34853, "\u0120Riders": 34854, "\u0120Robbie": 34855, "\u0120Kara": 34856, "\u0120Maintenance": 34857, "\u0120Herb": 34858, "\u0120harms": 34859, "packed": 34860, "\u0120Feinstein": 34861, "\u0120marrying": 34862, "\u0120blending": 34863, "\u0120Rates": 34864, "\u01201880": 34865, "\u0120wrink": 34866, "\u0120Unch": 34867, "\u0120Torch": 34868, "described": 34869, "\u0120humanoid": 34870, "ilitating": 34871, "\u0120Conv": 34872, "\u0120Feld": 34873, "IGHTS": 34874, "\u0120whistleblower": 34875, "ortmund": 34876, "etsy": 34877, "arrett": 34878, "\u0120Mono": 34879, "\u0120Ike": 34880, "\u0120CNBC": 34881, "\u0120WAY": 34882, "\u0120MDMA": 34883, "\u0120Individuals": 34884, "\u0120supplemental": 34885, "\u0120powerhouse": 34886, "\u0120Stru": 34887, "Focus": 34888, "aphael": 34889, "\u0120Colleg": 34890, "atti": 34891, "ZA": 34892, "\u0120perenn": 34893, "\u0120Signature": 34894, "\u0120Rodney": 34895, "\u0120cubes": 34896, "iddled": 34897, "\u0120Dante": 34898, "\u0120INV": 34899, "ilingual": 34900, "\u0120Cth": 34901, "\u0120sofa": 34902, "\u0120intimidate": 34903, "\u0120Roe": 34904, "\u0120Diplom": 34905, "\u0120Countries": 34906, "ayson": 34907, "\u0120extradition": 34908, "\u0120disabling": 34909, "\u0120Cardiff": 34910, "\u0120memorandum": 34911, "\u0120Trace": 34912, "\u0120???": 34913, "sector": 34914, "\u0120Rouhani": 34915, "\u0120Yates": 34916, "\u0120Freeze": 34917, "\u0120bladder": 34918, "Motor": 34919, "\u0120Promise": 34920, "antasy": 34921, "\u0120foreseeable": 34922, "\u0120Cologne": 34923, "container": 34924, "\u0120Trees": 34925, "\u0120Gors": 34926, "\u0120Sinclair": 34927, "\u0120barring": 34928, "keye": 34929, "\u0120slashed": 34930, "\u0120Statistical": 34931, "\u00e9\u0129": 34932, "\u0120\u00e2\u0138\u00ba": 34933, "Allows": 34934, "\u0120humility": 34935, "\u0120drilled": 34936, "\u0120Furn": 34937, "443": 34938, "\u0120sewage": 34939, "\u0120homepage": 34940, "\u0120courtyard": 34941, "\u0120vile": 34942, "\u0120subsidiaries": 34943, "ajo": 34944, "directory": 34945, "\u0120ammon": 34946, "Vers": 34947, "charges": 34948, "\u0120}}": 34949, "\u0120Chains": 34950, "\u0120246": 34951, "nob": 34952, "\u0120percept": 34953, "\u0120grit": 34954, "\u0120fishermen": 34955, "\u0120Iraqis": 34956, "\u0120DISTR": 34957, "\u0120FULL": 34958, "\u0120Evaluation": 34959, "graph": 34960, "atial": 34961, "\u0120cooperating": 34962, "\u0120melan": 34963, "\u0120enlightened": 34964, "\u0120ali": 34965, "tailed": 34966, "\u0120salute": 34967, "\u0120weakest": 34968, "\u0120Bulldogs": 34969, "UA": 34970, "\u0120Alloy": 34971, "\u0120semen": 34972, "ocene": 34973, "\u0120Williamson": 34974, "spr": 34975, ",\u00e2\u0122\u0136": 34976, "\u0120GF": 34977, "ittens": 34978, "Beat": 34979, "\u0120Junk": 34980, "iphate": 34981, "\u0120Farmers": 34982, "\u0120Bitcoins": 34983, "igers": 34984, "dh": 34985, "\u0120Loyal": 34986, "payer": 34987, "\u0120entertained": 34988, "\u0120penned": 34989, "\u0120coupon": 34990, "Queue": 34991, "\u0120weakening": 34992, "carry": 34993, "\u0120underestimate": 34994, "\u0120shootout": 34995, "\u0120charismatic": 34996, "\u0120Procedure": 34997, "\u0120prudent": 34998, "inances": 34999, "\u0120riches": 35000, "\u0120cortical": 35001, "\u0120strides": 35002, "\u0120drib": 35003, "\u0120Oilers": 35004, "540": 35005, "\u0120Perform": 35006, "\u0120Bangkok": 35007, "\u0120euth": 35008, "SER": 35009, "\u0120simplistic": 35010, "tops": 35011, "campaign": 35012, "Quality": 35013, "\u0120impoverished": 35014, "\u0120Eisenhower": 35015, "\u0120augment": 35016, "\u0120Harden": 35017, "\u0120intervened": 35018, "\u0120listens": 35019, "\u0120Kok": 35020, "\u0120sage": 35021, "\u0120rubbish": 35022, "\u0120Ded": 35023, "\u0120mull": 35024, "pelling": 35025, "\u0120videot": 35026, "Production": 35027, "DJ": 35028, "miah": 35029, "\u0120adaptations": 35030, "\u0120medically": 35031, "\u0120boarded": 35032, "\u0120arrogance": 35033, "\u0120scrapped": 35034, "\u0120oppress": 35035, "FORMATION": 35036, "\u0120junction": 35037, "415": 35038, "EEEE": 35039, "Skill": 35040, "\u0120subdu": 35041, "\u0120Suggest": 35042, "\u0120Pett": 35043, "\u0120lett": 35044, "\u0120Manip": 35045, "\u0120Caf": 35046, "\u0120Cooperation": 35047, "Ther": 35048, "\u0120regained": 35049, "\u00b6\u00e6": 35050, "reflect": 35051, "\u0120thugs": 35052, "\u0120Shelby": 35053, "\u0120dictates": 35054, "\u0120Weiner": 35055, "\u0120Hale": 35056, "\u0120battleground": 35057, "schild": 35058, "\u0120condol": 35059, "hunt": 35060, "ositories": 35061, "\u0120accuses": 35062, "Filename": 35063, "\u0120shri": 35064, "\u0120motivate": 35065, "\u0120reflections": 35066, "Null": 35067, "\u0120Lobby": 35068, "\u00a5\u00b5": 35069, "\u0120SATA": 35070, "\u0120Backup": 35071, "\u00d1\u0125": 35072, "nin": 35073, "\u0120Correction": 35074, "\u0120juicy": 35075, "utra": 35076, "\u0120Pric": 35077, "\u0120restraining": 35078, "\u0120Airbnb": 35079, "\u0120Arrest": 35080, "\u0120appropriations": 35081, "\u0120slopes": 35082, "\u0120manslaughter": 35083, "\u0120workings": 35084, "\u0120Huss": 35085, "\u0120Frey": 35086, "Leave": 35087, "\u0120Harmony": 35088, "\u0120Feder": 35089, "\u0120430": 35090, "\u0120trench": 35091, "\u0120gladly": 35092, "\u0120bullpen": 35093, "\u0120Gau": 35094, "bones": 35095, "\u0120groove": 35096, "\u0120pretext": 35097, "\u00e3\u0127\u012d": 35098, "\u0120transmitter": 35099, "\u0120Component": 35100, "\u0120underage": 35101, "\u0120Empires": 35102, "Tile": 35103, "\u0120oy": 35104, "\u0120Marvin": 35105, "\u0120CAS": 35106, "\u0120bloss": 35107, "\u0120replicated": 35108, "\u0120Mariners": 35109, "Marcus": 35110, "\u0120Blocks": 35111, "\u0120liberated": 35112, "\u0120butterfly": 35113, "Feel": 35114, "\u0120fermentation": 35115, "\u0120youtube": 35116, "\u0120offend": 35117, "\u0120Term": 35118, "resist": 35119, "\u0120cessation": 35120, "\u0120insurgency": 35121, "\u0120bir": 35122, "\u0120Raise": 35123, "595": 35124, "\u0120hypotheses": 35125, "502": 35126, "\u0120plaque": 35127, "ocrat": 35128, "\u0120jackets": 35129, "\u0120HuffPost": 35130, "among": 35131, "\u0120confer": 35132, "487": 35133, "\u0120Lilly": 35134, "\u0120adapting": 35135, "\u0120Fay": 35136, "\u0120shoved": 35137, "vec": 35138, "\u0120refine": 35139, "\u0120gon": 35140, "\u0120gunmen": 35141, "zai": 35142, "\u0120Shuttle": 35143, "\u0120Izan": 35144, "\u01201913": 35145, "\u0120plethora": 35146, "\u00c2\u00b7\u00c2\u00b7": 35147, "\u0120510": 35148, "\u0120puberty": 35149, "\u0120241": 35150, "\u0120Wealth": 35151, "\u0120Alma": 35152, "\u0120MEM": 35153, "\u0120Adults": 35154, "Cas": 35155, "prison": 35156, "Race": 35157, "\u0120waterproof": 35158, "\u0120athleticism": 35159, "\u0120capitalize": 35160, "\u0120Juice": 35161, "\u0120illuminated": 35162, "\u0120Pascal": 35163, "\u0120irritation": 35164, "\u0120Witnesses": 35165, "adle": 35166, "\u0120Astro": 35167, "\u0120fax": 35168, "\u0120Elvis": 35169, "Primary": 35170, "\u0120Lich": 35171, "\u0120Elves": 35172, "\u0120residing": 35173, "\u0120stumble": 35174, "319": 35175, "\u0120PKK": 35176, "\u0120adversaries": 35177, "DOS": 35178, "\u0120Ritual": 35179, "\u0120smear": 35180, "\u0120arson": 35181, "idental": 35182, "\u0120scant": 35183, "\u0120monarchy": 35184, "\u0120halftime": 35185, "\u0120residue": 35186, "\u0120indign": 35187, "\u0120Shaun": 35188, "\u0120Elm": 35189, "auri": 35190, "Aff": 35191, "WATCH": 35192, "\u0120Lyon": 35193, "helps": 35194, "361": 35195, "\u0120lobbyist": 35196, "\u0120diminishing": 35197, "\u0120outbreaks": 35198, "\u0120goats": 35199, "favorite": 35200, "\u0120Nah": 35201, "sonian": 35202, "\u0120Booster": 35203, "\u0120sandbox": 35204, "\u0120Fare": 35205, "\u0120Malta": 35206, "\u0120attRot": 35207, "\u0120MOR": 35208, "lde": 35209, "\u0120navigating": 35210, "Touch": 35211, "\u0120untrue": 35212, "\u0120Disaster": 35213, "\u0120ludicrous": 35214, "Password": 35215, "\u0120JFK": 35216, "blogspot": 35217, "416": 35218, "\u0120UNDER": 35219, "ernal": 35220, "\u0120delaying": 35221, "TOP": 35222, "\u0120implants": 35223, "\u0120AVG": 35224, "\u0120Huge": 35225, "attr": 35226, "\u0120journalistic": 35227, "\u0120Peyton": 35228, "\u0120IA": 35229, "Rap": 35230, "goal": 35231, "\u0120Programme": 35232, "\u0120smashing": 35233, "wives": 35234, "println": 35235, "\u0120Plague": 35236, "inus": 35237, "EEP": 35238, "\u0120cruiser": 35239, "\u0120Parish": 35240, "uminium": 35241, "\u0120occupants": 35242, "\u0120Jihad": 35243, "mop": 35244, "\u0120pint": 35245, "\u0120hect": 35246, "\u0120Mecca": 35247, "director": 35248, "\u0120Funding": 35249, "\u0120Mixed": 35250, "\u0120stag": 35251, "Tier": 35252, "\u0120gust": 35253, "\u0120brightly": 35254, "orsi": 35255, "\u0120uphill": 35256, "RD": 35257, "\u0120lesions": 35258, "\u0120Bundy": 35259, "livious": 35260, "\u0120biologist": 35261, "\u0120Faculty": 35262, "\u0120Authorization": 35263, "\u0120244": 35264, "Allow": 35265, "\u00ef\u00b8": 35266, "\u0120Giul": 35267, "\u0120pertinent": 35268, "otaur": 35269, "esse": 35270, "\u0120Roof": 35271, "\u0120unmanned": 35272, "351": 35273, "\u0120Shak": 35274, "\u0120Orient": 35275, "\u0120endanger": 35276, "Dir": 35277, "\u0120replen": 35278, "edient": 35279, "\u0120tailor": 35280, "\u0120gadgets": 35281, "\u0120audible": 35282, "\u00e2\u013a\u0128": 35283, "Nice": 35284, "\u0120bombard": 35285, "\u0120Rape": 35286, "\u0120defiance": 35287, "\u0120TWO": 35288, "\u0120Filipino": 35289, "\u0120unaffected": 35290, "ervatives": 35291, "\u0120soared": 35292, "\u0120Bolton": 35293, "\u0120compromising": 35294, "\u0120Brewers": 35295, "RAL": 35296, "\u0120AHL": 35297, "icycle": 35298, "\u0120vampires": 35299, "\u0120dipped": 35300, "oyer": 35301, "\u0120XIII": 35302, "\u0120sideways": 35303, "\u0120Waste": 35304, "\u0120Diss": 35305, "\u0120\u00e2\u0136\u013e\u00e2\u0136\u0122\u00e2\u0136\u0122": 35306, "$.": 35307, "\u0120habitats": 35308, "\u0120Beef": 35309, "truth": 35310, "trained": 35311, "split": 35312, "Rus": 35313, "Andy": 35314, "\u0120Bram": 35315, "REP": 35316, "pid": 35317, "\u00e8\u00a3\u0127": 35318, "\u0120Mutant": 35319, "Anim": 35320, "\u0120Marina": 35321, "\u0120futile": 35322, "highest": 35323, "frequency": 35324, "\u0120epilepsy": 35325, "\u0120coping": 35326, "\u0120concise": 35327, "\u0120tracing": 35328, "\u0120SUN": 35329, "panel": 35330, "\u0120Sophie": 35331, "\u0120Crowley": 35332, "\u0120Adolf": 35333, "\u0120Shooter": 35334, "\u0120shaky": 35335, "\u0120IG": 35336, "\u0120Lies": 35337, "\u0120Barber": 35338, "pkg": 35339, "\u0120uptake": 35340, "\u0120predatory": 35341, "ULTS": 35342, "/**": 35343, "\u0120intoxicated": 35344, "\u0120Westbrook": 35345, "odder": 35346, "hement": 35347, "\u0120baseman": 35348, "APD": 35349, "storage": 35350, "\u0120Fifty": 35351, "editor": 35352, "GEN": 35353, "UTION": 35354, "irting": 35355, "\u0120sewing": 35356, "rift": 35357, "\u0120agony": 35358, "\u0120Sands": 35359, "\u0120254": 35360, "Cash": 35361, "\u0120lodge": 35362, "\u0120punt": 35363, "Natural": 35364, "\u0120Ideas": 35365, "\u0120erroneous": 35366, "\u0120Sensor": 35367, "\u0120Hannity": 35368, "\u01201921": 35369, "\u0120mould": 35370, "\u0120Gon": 35371, "kaya": 35372, "\u0120anonymously": 35373, "\u0120KEY": 35374, "\u0120simulator": 35375, "Winter": 35376, "\u0120streamed": 35377, "507": 35378, "?\",": 35379, "\u0120teased": 35380, "\u0120coefficient": 35381, "\u0120wartime": 35382, "\u0120THR": 35383, "''.": 35384, "\u0120Banking": 35385, "mpire": 35386, "\u0120fandom": 35387, "\u0120lia": 35388, "Ga": 35389, "\u0120downhill": 35390, "\u0120interpreting": 35391, "Individual": 35392, "Norm": 35393, "\u0120jealousy": 35394, "bitcoin": 35395, "\u0120pleasures": 35396, "\u0120Toys": 35397, "\u0120Chevrolet": 35398, "\u0120Advisor": 35399, "IZE": 35400, "\u0120receptions": 35401, "706": 35402, "Cro": 35403, "\u0120262": 35404, "\u0120citrus": 35405, "iru": 35406, "Reviewer": 35407, "jected": 35408, "UES": 35409, "anz": 35410, "1981": 35411, "\u0120Worker": 35412, "\u0120complied": 35413, "orescent": 35414, "continental": 35415, "Ton": 35416, "\u0120Prism": 35417, "\u0120Sheep": 35418, "\u0120288": 35419, "nox": 35420, "\u0120Vog": 35421, "Ord": 35422, "\u0120realms": 35423, "tek": 35424, "\u0120irrigation": 35425, "\u0120bicycles": 35426, "\u0120electronically": 35427, "poly": 35428, "tall": 35429, "());": 35430, "\u0120aesthetics": 35431, "\u0120Integrated": 35432, "Explore": 35433, "\u0120dunk": 35434, "476": 35435, "pain": 35436, "\u0120Jacques": 35437, "\u0120Dmit": 35438, "Frames": 35439, "\u0120reunited": 35440, "\u0120humid": 35441, "Dro": 35442, "Political": 35443, "\u0120youthful": 35444, "\u0120entails": 35445, "\u0120mosquito": 35446, "363": 35447, "species": 35448, "\u0120coordinating": 35449, "\u0120Mayhem": 35450, "\u0120Magnus": 35451, "Mount": 35452, "Improved": 35453, "\u0120STATE": 35454, "ATTLE": 35455, "\u0120flowed": 35456, "\u0120tackled": 35457, "\u0120fashioned": 35458, "\u0120reorgan": 35459, "ivari": 35460, "finger": 35461, "\u0120reluctantly": 35462, "etting": 35463, "\u0120Vand": 35464, "young": 35465, "\u0120Garland": 35466, "\u0120presumption": 35467, "\u0120amenities": 35468, "\u0120Pleasant": 35469, "onential": 35470, "\u0120Oxy": 35471, "\u0120morals": 35472, "\u0120Yah": 35473, "Ready": 35474, "Simon": 35475, "Enh": 35476, "Demon": 35477, "\u0120clich": 35478, "Monitor": 35479, "\u0120DU": 35480, "\u0120welcomes": 35481, "\u0120standout": 35482, "\u0120dreadful": 35483, "\u0120bananas": 35484, "\u0120balloons": 35485, "hooting": 35486, "basic": 35487, "\u0120suffix": 35488, "\u0120duly": 35489, "cano": 35490, "Chain": 35491, "atos": 35492, "\u0120geopolitical": 35493, "\u0120(&": 35494, "\u0120Gemini": 35495, "\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124\u00c3\u0125\u00c3\u0124": 35496, "\u0120acquitted": 35497, "Luck": 35498, "protect": 35499, "1024": 35500, "\u0120scarcity": 35501, "\u0120mindfulness": 35502, "ecided": 35503, "DN": 35504, "prime": 35505, "\u0120Presidents": 35506, "\u0120VIDEO": 35507, "\u0120(\u00e2\u012a\u0134": 35508, "addock": 35509, "NOR": 35510, "\u0120Pru": 35511, "pun": 35512, "\u0120LOL": 35513, "))))": 35514, "\u0120Liqu": 35515, "\u0120SAS": 35516, "\u0120styling": 35517, "\u0120punishments": 35518, "\u0120numb": 35519, "\u0120ascertain": 35520, "\u0120Rockies": 35521, "flu": 35522, "Thumbnail": 35523, "\u0120perpetrated": 35524, "\u0120Semi": 35525, "\u0120disarm": 35526, "\u0120Older": 35527, "\u0120Exception": 35528, "\u0120exponentially": 35529, "\u0120Communities": 35530, "\u0120abolish": 35531, "\u0120Partner": 35532, "ptoms": 35533, "\u0120777": 35534, "\u0120Foley": 35535, "\u0120Cases": 35536, "\u0120grease": 35537, "\u0120Rebirth": 35538, "Ground": 35539, "\u0120;)": 35540, "\u0120Doctrine": 35541, "ikini": 35542, "Ye": 35543, "\u0120Blossom": 35544, "\u0120persists": 35545, "bill": 35546, "\u0120infusion": 35547, "\u0120buddies": 35548, "911": 35549, "\u0120Patient": 35550, "\u0120demos": 35551, "\u0120acquaintance": 35552, "\u0120Paw": 35553, "atari": 35554, "\u0120xml": 35555, "\u0120fascination": 35556, "\u0120Serve": 35557, "\u00cf\u0124": 35558, "branded": 35559, "\u0120az": 35560, "Returns": 35561, "\u0120overshadow": 35562, "\u0120roam": 35563, "\u0120speedy": 35564, "numbered": 35565, "helial": 35566, "\u0120disciple": 35567, "\u0120assurances": 35568, "given": 35569, "pecting": 35570, "\u0120Natalie": 35571, "\u00e7\u0136\u00b0": 35572, "\u0120mosquitoes": 35573, "rotein": 35574, "\u0120numeric": 35575, "\u0120independents": 35576, "\u0120transitional": 35577, "\u0120reactionary": 35578, "\u0120Mechdragon": 35579, "doctor": 35580, "\u0120shortest": 35581, "\u0120sequential": 35582, "\u0120Bac": 35583, "\u0120Accounts": 35584, "\u00e3\u0123\u012e": 35585, "achy": 35586, "ractive": 35587, "\u0120Regiment": 35588, "\u0120breathtaking": 35589, "fficiency": 35590, "\u0120Bates": 35591, "\u0120311": 35592, "\u0120wardrobe": 35593, "fts": 35594, "\u0120Berk": 35595, "Simply": 35596, "\u0120Riverside": 35597, "ivering": 35598, "idential": 35599, "lucent": 35600, "\u0120enriched": 35601, "\u0120Conver": 35602, "\u0120Giving": 35603, "\u00e3\u0125\u013b": 35604, "\u0120legalize": 35605, "\u0120FTC": 35606, "\u0120freaking": 35607, "Mix": 35608, "\u0120terrestrial": 35609, "esian": 35610, "cients": 35611, "Wing": 35612, "LOAD": 35613, "\u0120ledge": 35614, "\u0120Violent": 35615, "\u0120Metall": 35616, "\u0120308": 35617, "\u0120southeastern": 35618, "hetto": 35619, "Meat": 35620, "\u0120slowdown": 35621, "\u0120retreated": 35622, "Jeremy": 35623, "endas": 35624, "*****": 35625, "eric": 35626, "\u0120reins": 35627, "oppable": 35628, "\u0120Humanity": 35629, "earances": 35630, "rigan": 35631, "Camera": 35632, "\u0120waivers": 35633, "soc": 35634, "\u0120alteration": 35635, "transform": 35636, "\u0120Cemetery": 35637, "506": 35638, "\u0120indefinite": 35639, "\u0120stimulating": 35640, "yg": 35641, "603": 35642, "\u0120Sop": 35643, "\u0120descriptive": 35644, "Phase": 35645, "\u0120Edmund": 35646, "\u0120pneumonia": 35647, "ventus": 35648, "Amb": 35649, "\u0120laboratories": 35650, "\u0120Exclusive": 35651, "ugar": 35652, "Were": 35653, "\u0120malfunction": 35654, "\u0120homosexuals": 35655, "\u0120-------": 35656, "uni": 35657, "\u0120turbines": 35658, "\u0120Equity": 35659, "Du": 35660, "\u0120minded": 35661, "\u0120RH": 35662, "\u0120Blackhawks": 35663, "\u0120feats": 35664, "\u01201700": 35665, "repl": 35666, "362": 35667, "laden": 35668, "\u0120indispensable": 35669, "lyss": 35670, "tti": 35671, "\u0120reel": 35672, "\u0120diverted": 35673, "\u0120likeness": 35674, "\u0120subscriptions": 35675, "\u0120fingert": 35676, "\u0120filthy": 35677, "destruct": 35678, "draft": 35679, "\u0120Bernardino": 35680, "launch": 35681, "\u0120perplex": 35682, "\u0120SUM": 35683, "carb": 35684, "\u0120sweater": 35685, "\u0120Venture": 35686, "\u0120Jag": 35687, "\u0120Celeb": 35688, "\u0120Voters": 35689, "\u0120steadfast": 35690, "\u0120athletics": 35691, "\u0120Hanson": 35692, "\u0120Drac": 35693, "Tracker": 35694, "\u0120commend": 35695, "\u0120Presidency": 35696, "\u0120DID": 35697, "informed": 35698, "\u0120webpage": 35699, "Pretty": 35700, "\u0120forcefully": 35701, "\u00e3\u0125\u0125\u00e3\u0124\u00af": 35702, "\u0120relocation": 35703, "\u0120satire": 35704, "\u00e2\u012b": 35705, "\u0120Sunderland": 35706, "\u00e6\u0126": 35707, "Voice": 35708, "????????": 35709, "\u0120informant": 35710, "\u0120bowel": 35711, "\u0120Uniform": 35712, "\u0120...\"": 35713, "\u0120purge": 35714, "\u0120picnic": 35715, "\u0120Umb": 35716, "\u0120UPDATE": 35717, "\u0120Sapphire": 35718, "\u0120Stall": 35719, "learn": 35720, "\u0120objectively": 35721, "\u0120obliter": 35722, "\u0120loophole": 35723, "\u0120journeys": 35724, "\u0120omission": 35725, "Pros": 35726, "\u0120Sidney": 35727, "ploma": 35728, "\u0120sprayed": 35729, "\u0120guru": 35730, "\u0120traitor": 35731, "\u0120timet": 35732, "\u0120snapping": 35733, "\u0120Sevent": 35734, "urnal": 35735, "\u0120Ukip": 35736, "\u0120bowed": 35737, "poral": 35738, "liberal": 35739, "Ros": 35740, "Questions": 35741, "iOS": 35742, "\u0120summarize": 35743, "STAT": 35744, "\u01201850": 35745, "apest": 35746, "\u0120lender": 35747, "\u0120Variable": 35748, "bringing": 35749, "\u0120LORD": 35750, ",)": 35751, "\u0120collapses": 35752, "xiety": 35753, "\u0120Ned": 35754, "YD": 35755, "\u0120Scha": 35756, "\u0120antibody": 35757, "\u0120disband": 35758, "yre": 35759, "illusion": 35760, "\u0120rover": 35761, "shed": 35762, "\u0120Hirosh": 35763, "cci": 35764, "\u0120calam": 35765, "\u0120Morton": 35766, "Pinterest": 35767, "\u01201928": 35768, "\u0120Euras": 35769, "ordes": 35770, "\u0120fences": 35771, "\u0120Inventory": 35772, "\u0120Valencia": 35773, "\u0120Ud": 35774, "\u0120Tiff": 35775, "\u0120sque": 35776, "\u0120quotation": 35777, "\u0120troublesome": 35778, "erker": 35779, "QUEST": 35780, "\u0120Kingdoms": 35781, "south": 35782, "\u0120levy": 35783, "Prince": 35784, "\u0120Sting": 35785, "\u0120nicknamed": 35786, "\u0120appe": 35787, "\u0120photographic": 35788, "\u0120corpus": 35789, "reference": 35790, "\u0120Trog": 35791, "Unt": 35792, ")=(": 35793, "\u0120Latvia": 35794, "\u0120activating": 35795, "\u0120licensee": 35796, "\u0120disparities": 35797, "\u0120Newsletter": 35798, "\u00e3\u0125\u0125\u00e3\u0125\u012a": 35799, "\u0120freeing": 35800, "\u0120Jeep": 35801, "\u0120Perception": 35802, "insk": 35803, "\u0120silicone": 35804, "\u0120Hayden": 35805, "Lean": 35806, "\u0120Suzuki": 35807, "ibrarian": 35808, "668": 35809, "\u0120spor": 35810, "\u0120correlations": 35811, "aghetti": 35812, "\u0120tuber": 35813, "\u0120IPCC": 35814, "ilus": 35815, "\u0120Vu": 35816, "\u0120wealthiest": 35817, "\u0120Carbuncle": 35818, "anza": 35819, "\u0120fooled": 35820, "\u0120Zur": 35821, "\u0120daddy": 35822, "rano": 35823, "ilian": 35824, "\u0120knockout": 35825, "fman": 35826, "required": 35827, "\u0120Wikileaks": 35828, "\u0120Duffy": 35829, "ONT": 35830, "\u0120insol": 35831, "\u0120Objects": 35832, "\u0120bou": 35833, "\u0120Nordic": 35834, "\u0120Insert": 35835, "scan": 35836, "\u0120dancers": 35837, "\u0120idiots": 35838, "majority": 35839, "\u0120Neville": 35840, "\u0120FreeBSD": 35841, "\u0120tart": 35842, "panic": 35843, "690": 35844, "\u0120cocoa": 35845, "\u0120sampled": 35846, "\u0120lookup": 35847, "Indust": 35848, "\u0120injections": 35849, "genre": 35850, "\u0120au": 35851, "\u0120roadway": 35852, "\u0120genitals": 35853, "Kind": 35854, "\u0120Examiner": 35855, "\u0120Yaz": 35856, "Fresh": 35857, "\u0120paralysis": 35858, "\u0120Aluminum": 35859, "\u0120reap": 35860, "ok\u00c3\u00a9": 35861, "\u0120sloppy": 35862, "\u0120Tunnel": 35863, "posium": 35864, "nery": 35865, "enic": 35866, "\u0120herbal": 35867, "\u0120Outer": 35868, "\u0120Builder": 35869, "\u0120incur": 35870, "\u0120ideologies": 35871, "\u0120backups": 35872, "consuming": 35873, "\u0120Detect": 35874, "deck": 35875, "\u0120KNOW": 35876, "\u0120Gret": 35877, "\u0120MIC": 35878, "\u0120toughness": 35879, "\u0120Exhibit": 35880, "\u0120hive": 35881, "Les": 35882, "\u0120SCHOOL": 35883, "\u0120Atari": 35884, "alde": 35885, "\u0120Null": 35886, "andestine": 35887, "mouse": 35888, "\u0120brigade": 35889, "489": 35890, "\u0120revol": 35891, "\u0120Lawson": 35892, "\u0120Wah": 35893, "opoly": 35894, "ebted": 35895, "\u0120Saunders": 35896, "\u0120313": 35897, "\u0120Winc": 35898, "\u0120taboo": 35899, "\u0120Helmet": 35900, "\u0120wedge": 35901, "chip": 35902, "\u0120Tina": 35903, "bg": 35904, "\u0120infuri": 35905, "rn": 35906, "\u0120anomalies": 35907, "\u0120Sync": 35908, "\u0120Exam": 35909, "\u0120Commit": 35910, "\u0120Diary": 35911, "\u0120ALSO": 35912, "\u0120Debor": 35913, "omedical": 35914, "\u0120comprehension": 35915, "655": 35916, "\u0120empowering": 35917, "\u0120ire": 35918, "\u0120juices": 35919, "\u0120ETH": 35920, "\u0120Boxing": 35921, "=\"/": 35922, "\u0120facilitated": 35923, "poke": 35924, "\u0120Parsons": 35925, "\u0120Moder": 35926, "travel": 35927, "\u0120civilizations": 35928, "\u0120libertarians": 35929, "\u0120rune": 35930, "\u0120Clarks": 35931, "athed": 35932, "\u0120campaigners": 35933, "\u0120Dispatch": 35934, "\u0120Fahrenheit": 35935, "\u0120Capcom": 35936, "----------": 35937, "\u0120lace": 35938, "\u0120draining": 35939, "\u0120liner": 35940, "\u0120Artificial": 35941, "\u00c3\u00a9n": 35942, "task": 35943, "]).": 35944, "\u0120GMO": 35945, "\u0120Operator": 35946, "ordinary": 35947, "\u0120Influence": 35948, "\u0120Ups": 35949, "\u0120potency": 35950, "ussen": 35951, "ospons": 35952, "\u0120Swim": 35953, "\u0120Deadline": 35954, "Unity": 35955, "\u0120culinary": 35956, "\u0120enlightenment": 35957, "\u0120wearer": 35958, "\u0120mined": 35959, "\u0120ply": 35960, "\u0120incest": 35961, "\u0120DVDs": 35962, "Walk": 35963, "BTC": 35964, "Trade": 35965, "\u0120deval": 35966, "iband": 35967, "\u0120Oversight": 35968, "Palestinian": 35969, "\u0120dart": 35970, "\u0120mul": 35971, "LR": 35972, "\u0120removable": 35973, "\u0120Realms": 35974, "\u00ec\u013f": 35975, "\u0120miscar": 35976, "\u0120Vulkan": 35977, "685": 35978, "\u00c3\u00a8re": 35979, "\u0120Sap": 35980, "\u0120merging": 35981, "\u0120Carly": 35982, "chester": 35983, "\u0120brisk": 35984, "\u0120luxurious": 35985, "\u0120Generator": 35986, "\u0120bitterness": 35987, "\u0120edible": 35988, "\u0120243": 35989, "TG": 35990, "\u0120rectangle": 35991, "WithNo": 35992, "below": 35993, "Jenn": 35994, "\u0120darkest": 35995, "\u0120hitch": 35996, "\u0120dosage": 35997, "\u0120scaven": 35998, "\u0120Keller": 35999, "\u0120Illustrated": 36000, "Certainly": 36001, "\u0120Mavericks": 36002, "Marginal": 36003, "\u0120diarrhea": 36004, "\u0120enormously": 36005, "\u0120999": 36006, "shr": 36007, "quart": 36008, "\u0120adamant": 36009, "\u0120Mew": 36010, "\u0120renovation": 36011, "\u0120cervical": 36012, "\u0120Percentage": 36013, "eners": 36014, "\u0120Kimber": 36015, "\u0120floats": 36016, "\u0120dex": 36017, "\u0120Witcher": 36018, "\u0120Swansea": 36019, "dm": 36020, "\u0120salty": 36021, "yellow": 36022, "\u0120cape": 36023, "\u0120Drain": 36024, "\u0120Paula": 36025, "\u0120Toledo": 36026, "lesi": 36027, "Magazine": 36028, "\u0120Wick": 36029, "\u0120Mn": 36030, "\u0120Ack": 36031, "\u0120Riding": 36032, "ASON": 36033, "\u0120homophobic": 36034, "ARP": 36035, "\u0120wandered": 36036, "CPU": 36037, "oodoo": 36038, "\u0120Pipe": 36039, "\u0120tightening": 36040, "\u0120Butt": 36041, "318": 36042, "\u0120deserted": 36043, "Session": 36044, "\u0120facilitating": 36045, "Jump": 36046, "\u0120emergencies": 36047, "OWER": 36048, "\u0120exhaustive": 36049, "\u0120AFTER": 36050, "\u0120heartbeat": 36051, "\u0120Label": 36052, "acky": 36053, "\u0120Certified": 36054, "iltration": 36055, "Ze": 36056, "\u0120Utt": 36057, "\u01201300": 36058, "\u0120presume": 36059, "\u0120Disp": 36060, "\u0120surged": 36061, "\u0120dolls": 36062, "Columb": 36063, "\u0120chimpan": 36064, "\u0120Razor": 36065, "\u0120ticks": 36066, "\u0120councillor": 36067, "\u0120pilgrimage": 36068, "\u0120Rebels": 36069, "\u0120QC": 36070, "\u0120Auction": 36071, "xia": 36072, "ikk": 36073, "bred": 36074, "\u0120insertion": 36075, "\u0120coarse": 36076, "dB": 36077, "SEE": 36078, "\u0120Zap": 36079, "\u0120Foo": 36080, "\u0120contempor": 36081, "\u0120Quarterly": 36082, "otions": 36083, "\u0120Alchemist": 36084, "\u0120Trey": 36085, "\u0120Duo": 36086, "Sweet": 36087, "804": 36088, "\u0120Giov": 36089, "\u0120funn": 36090, "Nin": 36091, "hoff": 36092, "\u0120ramifications": 36093, "\u01201922": 36094, "\u0120Experts": 36095, "azes": 36096, "\u0120garments": 36097, "arial": 36098, "\u0120Nab": 36099, "\u0120257": 36100, "\u0120Ved": 36101, "\u0120humorous": 36102, "\u0120Pompe": 36103, "\u0120nylon": 36104, "\u0120lurking": 36105, "\u0120Sergey": 36106, "\u0120Mattis": 36107, "\u0120misogyny": 36108, "\u0120Components": 36109, "\u0120Watching": 36110, "\u0120Folk": 36111, "ractical": 36112, "Bush": 36113, "\u0120taped": 36114, "\u0120grouping": 36115, "\u0120beads": 36116, "\u01202048": 36117, "\u0120condu": 36118, "querque": 36119, "Reading": 36120, "\u0120grievances": 36121, "Ultra": 36122, "\u0120endpoint": 36123, "Hig": 36124, "\u0120Static": 36125, "\u0120Scarborough": 36126, "Lua": 36127, "\u0120Messi": 36128, "aqu": 36129, "\u0120PsyNet": 36130, "\u0120Rudd": 36131, "\u0120avenue": 36132, "vp": 36133, "Jer": 36134, "\u0120shady": 36135, "\u0120Resist": 36136, "\u0120Artemis": 36137, "\u0120careless": 36138, "\u0120brokers": 36139, "\u0120temperament": 36140, "\u0120520": 36141, "Tags": 36142, "\u0120Turning": 36143, "\u0120uttered": 36144, "\u0120pedd": 36145, "\u0120improvised": 36146, "\u0120:(": 36147, "\u0120tabl": 36148, "\u0120plains": 36149, "1600": 36150, "pressure": 36151, "\u0120Essence": 36152, "margin": 36153, "friends": 36154, "\u0120Restoration": 36155, "\u0120pollut": 36156, "\u0120Poker": 36157, "\u0120Augustine": 36158, "\u0120CIS": 36159, "\u0120SEAL": 36160, "orama": 36161, "\u0120thwart": 36162, "seek": 36163, "\u0120pagan": 36164, "\u00c2\u00ba": 36165, "cpu": 36166, "\u0120garn": 36167, "\u0120assortment": 36168, "\u0120ILCS": 36169, "tower": 36170, "Recommended": 36171, "\u0120unborn": 36172, "\u0120RandomRedditor": 36173, "\u0120RandomRedditorWithNo": 36174, "\u0120paralyzed": 36175, "\u0120eruption": 36176, "\u0120intersect": 36177, "\u0120Stoke": 36178, "\u0120Sco": 36179, "Bind": 36180, "\u00e5\u00be": 36181, "\u0120PNG": 36182, "\u0120Negative": 36183, "\u0120NOAA": 36184, "Leon": 36185, "\u0120alloy": 36186, "\u0120Lama": 36187, "\u0120Diversity": 36188, "575": 36189, "\u0120underestimated": 36190, "\u0120Scor": 36191, "\u0120mural": 36192, "\u0120busted": 36193, "soon": 36194, "lif": 36195, "\u0120nonex": 36196, "\u0120allergy": 36197, "\u0120Underworld": 36198, "\u0120Rays": 36199, "\u0120Blasio": 36200, "\u0120hrs": 36201, "\u0120Dir": 36202, "\u0120327": 36203, "byter": 36204, "\u0120replacements": 36205, "\u0120activates": 36206, "rived": 36207, "MH": 36208, "\u0120pans": 36209, "\u0120HI": 36210, "\u0120longitudinal": 36211, "\u0120nuisance": 36212, "aler": 36213, "\u0120swell": 36214, "\u0120Signed": 36215, "sci": 36216, "\u0120Isles": 36217, "\u0120AGA": 36218, "\u0120defiant": 36219, "\u0120sonic": 36220, "ocon": 36221, "KC": 36222, "\u0120Aim": 36223, "tie": 36224, "ahah": 36225, "\u0120mL": 36226, "DX": 36227, "\u0120bisc": 36228, "\u0120Billboard": 36229, "\u0120SYSTEM": 36230, "NEY": 36231, "gaard": 36232, "\u0120distressed": 36233, "formerly": 36234, "Alan": 36235, "\u0120chefs": 36236, "\u0120optics": 36237, "\u0120Comet": 36238, "\u0120AMC": 36239, "\u0120redesigned": 36240, "irmation": 36241, "\u0120sightings": 36242, "382": 36243, "311": 36244, "\u0120WB": 36245, "\u0120contraction": 36246, "\u0120TOTAL": 36247, "Dual": 36248, "\u0120startled": 36249, "\u0120understandably": 36250, "\u0120sunglasses": 36251, "ETHOD": 36252, "\u0120docker": 36253, "\u0120surfing": 36254, "\u0120HEL": 36255, "\u0120Slack": 36256, "tones": 36257, "\u0120shalt": 36258, "Visual": 36259, "498": 36260, "Department": 36261, "cussion": 36262, "\u0120unrestricted": 36263, "\u0120tad": 36264, "\u0120rename": 36265, "employed": 36266, "\u0120educating": 36267, "\u0120grinned": 36268, "bedroom": 36269, "\u0120Activities": 36270, "\u0120Velvet": 36271, "\u0120SWAT": 36272, "\u0120shuffle": 36273, "igor": 36274, "\u0120saturation": 36275, "Finding": 36276, "cream": 36277, "icter": 36278, "\u0120vodka": 36279, "tracking": 36280, "tec": 36281, "\u0120foreground": 36282, "iesta": 36283, "\u0120vehement": 36284, "\u0120ECB": 36285, "\u0120Tie": 36286, "Ey": 36287, "\u0120turtles": 36288, "\u0120Railroad": 36289, "\u0120Katz": 36290, "\u0120Frames": 36291, "\u0120menace": 36292, "\u0120Fellowship": 36293, "\u0120Essential": 36294, "uggish": 36295, "\u0120drip": 36296, "chwitz": 36297, "\u0120Kyoto": 36298, "sb": 36299, "\u0120Nina": 36300, "Parameter": 36301, "\u0120alarms": 36302, "\u0120Claud": 36303, "\u0120pioneering": 36304, "\u0120chiefly": 36305, "\u0120Scream": 36306, "Collection": 36307, "\u0120thankfully": 36308, "\u0120Ronaldo": 36309, "\u00e5\u0143\u0132": 36310, "strip": 36311, "\u0120Disneyland": 36312, "commercial": 36313, "Seeing": 36314, "Soul": 36315, "\u0120evacuate": 36316, "\u0120civ": 36317, "\u0120Ashe": 36318, "\u0120divides": 36319, "\u0120Dagger": 36320, "rehensive": 36321, "\u0120berries": 36322, "\u0120DF": 36323, "\u0120sushi": 36324, "\u0120plurality": 36325, "WI": 36326, "\u0120disadvantaged": 36327, "\u0120battalion": 36328, "obiles": 36329, "451": 36330, "\u0120cling": 36331, "\u0120undeniable": 36332, "\u0120Lounge": 36333, "\u0120haunt": 36334, "phe": 36335, "\u0120quantify": 36336, "\u0120differed": 36337, "\u0120[*]": 36338, "\u0120Viz": 36339, "cum": 36340, "slave": 36341, "\u0120videog": 36342, "\u0120quar": 36343, "\u0120bundles": 36344, "\u0120Alonso": 36345, "tackle": 36346, "\u0120neuronal": 36347, "\u0120landslide": 36348, "confirmed": 36349, "\u0120Depth": 36350, "\u0120renewables": 36351, "Bear": 36352, "\u0120Macedonia": 36353, "\u0120jerseys": 36354, "\u0120bunk": 36355, "\u0120Spawn": 36356, "\u0120Controls": 36357, "\u0120Buchanan": 36358, "\u0120robotics": 36359, "\u0120emphasizing": 36360, "\u0120Tutorial": 36361, "hyp": 36362, "iston": 36363, "\u0120monumental": 36364, "\u00e6\u00b0": 36365, "\u0120Carry": 36366, "\u0120tbsp": 36367, "enance": 36368, "Hill": 36369, "arthed": 36370, "\u0120rotten": 36371, "Dean": 36372, "\u0120twisting": 36373, "\u0120goodwill": 36374, "\u0120immersion": 36375, "Living": 36376, "\u0120brushes": 36377, "\u0120CGI": 36378, "\u0120Atk": 36379, "traditional": 36380, "\u0120phantom": 36381, "\u0120Stamina": 36382, "\u0120expansions": 36383, "\u0120Marin": 36384, "\u0120embarked": 36385, "\u0120Eg": 36386, "intestinal": 36387, "\u0120PEOPLE": 36388, "\u0120Booth": 36389, "\u0120Appalach": 36390, "\u0120relegated": 36391, "VT": 36392, "MIT": 36393, "\u0120muster": 36394, "\u0120withdrawing": 36395, "\u0120microscope": 36396, "\u0120Gathering": 36397, "\u0120Crescent": 36398, "\u0120Argentine": 36399, "\u0120Decre": 36400, "\u0120Dominic": 36401, "\u0120buds": 36402, "antage": 36403, "\u0120Ion": 36404, "\u0120widened": 36405, "ONSORED": 36406, "\u0120Gloves": 36407, "iannopoulos": 36408, "razen": 36409, "feel": 36410, "\u0120repayment": 36411, "\u0120hindsight": 36412, "\u0120REALLY": 36413, "\u0120Pistol": 36414, "\u0120Brah": 36415, "\u0120watts": 36416, "\u0120survives": 36417, "\u0120flurry": 36418, "issy": 36419, "Alert": 36420, "\u0120Uruguay": 36421, "Phoenix": 36422, "Slow": 36423, "\u0120Grave": 36424, "\u0120Fir": 36425, "\u0120manageable": 36426, "\u0120tariff": 36427, "\u0120UDP": 36428, "\u0120Pistons": 36429, "\u0120Nigerian": 36430, "\u0120strikeouts": 36431, "\u0120cosmetics": 36432, "whelming": 36433, "fab": 36434, "cape": 36435, "proxy": 36436, "\u0120rethink": 36437, "\u0120overcoming": 36438, "simple": 36439, "\u0120woo": 36440, "\u0120distracting": 36441, "\u0120Stanton": 36442, "\u0120Tulsa": 36443, "\u0120Dock": 36444, "659": 36445, "\u0120discord": 36446, "\u0120Emacs": 36447, "\u0120Ves": 36448, "\u0120ROB": 36449, "\u0120reassuring": 36450, "\u0120consortium": 36451, "Muslims": 36452, "321": 36453, "\u0120prompts": 36454, "sei": 36455, "\u0120Hitch": 36456, "imposed": 36457, "\u0120Fool": 36458, "\u0120indiscrim": 36459, "wrong": 36460, "buquerque": 36461, "Davis": 36462, "!]": 36463, "\u0120timeless": 36464, "\u0120NEED": 36465, "\u0120pesticide": 36466, "\u0120rallying": 36467, "\u0120Calder": 36468, "\u0120\u00e5\u00a4": 36469, "\u0120xp": 36470, "\u0120Unle": 36471, "\u0120Export": 36472, "luaj": 36473, "Buff": 36474, ")[": 36937, "\u0120sqor": 36938, "Saudi": 36939, "\u0120istg": 36940, "\u0120indulge": 36941, "proc": 36942, "\u0120disgusted": 36943, "\u0120compounded": 36944, "\u0120nem": 36945, "\u0120schooling": 36946, "\u0120Cure": 36947, "processing": 36948, "Sol": 36949, "\u0120proverb": 36950, "itized": 36951, "\u0120Alvarez": 36952, "\u0120scarf": 36953, "\u0120rectangular": 36954, "reve": 36955, "\u0120hormonal": 36956, "\u0120Stress": 36957, "itizen": 36958, "\u0120425": 36959, "girls": 36960, "\u0120Noir": 36961, "\u0120Rapp": 36962, "\u0120marches": 36963, "church": 36964, "\u0120Uses": 36965, "\u0120405": 36966, "\u0120Berm": 36967, "\u0120ordinances": 36968, "\u0120Judgment": 36969, "Charges": 36970, "\u0120Zin": 36971, "\u0120dusty": 36972, "\u0120strawberries": 36973, "\u0120perce": 36974, "\u0120Thur": 36975, "\u0120Deborah": 36976, "netflix": 36977, "\u0120Lambert": 36978, "\u0120amused": 36979, "\u0120Guang": 36980, "YOU": 36981, "RGB": 36982, "\u0120CCTV": 36983, "\u0120fiat": 36984, "rang": 36985, "\u0120federation": 36986, "\u0120Mant": 36987, "\u0120Bust": 36988, "\u0120Mare": 36989, "respective": 36990, "\u0120Migration": 36991, "\u0120BIT": 36992, "590": 36993, "\u0120patriotism": 36994, "\u0120outlining": 36995, "region": 36996, "\u0120Jos\u00c3\u00a9": 36997, "\u0120blasting": 36998, "\u0120Ezra": 36999, "Bs": 37000, "\u0120undermines": 37001, "\u0120Smooth": 37002, "\u0120clashed": 37003, "radio": 37004, "\u0120transitioning": 37005, "\u0120Buccaneers": 37006, "\u0120Owl": 37007, "\u0120plugs": 37008, "\u0120hiatus": 37009, "\u0120Pinball": 37010, "\u0120mig": 37011, "\u0120Nutr": 37012, "\u0120Wolfe": 37013, "\u0120integers": 37014, "\u0120orbits": 37015, "\u0120Edwin": 37016, "\u0120DirectX": 37017, "bite": 37018, "\u0120blazing": 37019, "vr": 37020, "Edge": 37021, "\u0120PID": 37022, "exit": 37023, "\u0120Comed": 37024, "\u0120Pathfinder": 37025, "\u0120Guid": 37026, "\u0120Signs": 37027, "\u0120Zer": 37028, "\u0120Agenda": 37029, "\u0120reimbursement": 37030, "Mesh": 37031, "iPhone": 37032, "\u0120Marcos": 37033, "\u0120Sites": 37034, "hate": 37035, "enburg": 37036, "\u0120sockets": 37037, "pend": 37038, "Batman": 37039, "vir": 37040, "\u0120SHOW": 37041, "\u0120provisional": 37042, "conn": 37043, "\u0120Deaths": 37044, "ATIVE": 37045, "Profile": 37046, "sym": 37047, "JA": 37048, "\u0120ninja": 37049, "installed": 37050, "idates": 37051, "ebra": 37052, "\u0120Omaha": 37053, "\u0120seizing": 37054, "\u0120Beasts": 37055, "\u0120salts": 37056, "Mission": 37057, "Generally": 37058, "\u0120Trilogy": 37059, "heon": 37060, "legates": 37061, "\u0120dime": 37062, "\u0120faire": 37063, "parable": 37064, "Graph": 37065, "\u0120totaling": 37066, "\u0120diagrams": 37067, "\u0120Yanuk": 37068, "plet": 37069, "\u0120Meh": 37070, "\u0120mythical": 37071, "\u0120Stephens": 37072, "autical": 37073, "ochemistry": 37074, "\u0120kilograms": 37075, "\u0120elbows": 37076, "ancock": 37077, "\u0120BCE": 37078, "\u0120Prague": 37079, "\u0120improv": 37080, "\u0120Devin": 37081, "\u0120\"\\": 37082, "paralle": 37083, "\u0120supremacists": 37084, "\u0120Billion": 37085, "\u0120regimen": 37086, "innacle": 37087, "\u0120requisite": 37088, "angan": 37089, "\u0120Burlington": 37090, "ainment": 37091, "\u0120Objective": 37092, "omsky": 37093, "GV": 37094, "\u0120unilateral": 37095, "\u0120tc": 37096, "\u0120hires": 37097, "mental": 37098, "\u0120involuntary": 37099, "\u0120transpl": 37100, "\u0120ASCII": 37101, "\u00c2\u00a8": 37102, "Events": 37103, "\u0120doubted": 37104, "\u0120Kaplan": 37105, "\u0120Courage": 37106, "igon": 37107, "\u0120Managing": 37108, "\u0120Tart": 37109, "\u0120falsehood": 37110, "\u0120Violet": 37111, "\u0120airs": 37112, "\u0120fertilizer": 37113, "Britain": 37114, "\u0120aquatic": 37115, "ouf": 37116, "Words": 37117, "\u0120Hartford": 37118, "\u0120evenings": 37119, "\u0120Vengeance": 37120, "quite": 37121, "Gall": 37122, "\u0120Pret": 37123, "\u0120pdf": 37124, "\u0120LM": 37125, "\u0120Sochi": 37126, "\u0120Intercept": 37127, "920": 37128, "\u0120profitability": 37129, "\u0120Idle": 37130, "\u0120MacDonald": 37131, "\u0120Establishment": 37132, "umsy": 37133, "\u0120gatherings": 37134, "\u0120Naj": 37135, "Charlie": 37136, "\u0120ascent": 37137, "\u0120Protector": 37138, "\u0120algebra": 37139, "\u0120bios": 37140, "forums": 37141, "ELS": 37142, "Introduced": 37143, "\u0120335": 37144, "\u0120astronomy": 37145, "Contribut": 37146, "\u0120Polic": 37147, "Platform": 37148, "\u0120containment": 37149, "wrap": 37150, "\u0120coronary": 37151, "\u0120Jelly": 37152, "manager": 37153, "\u0120heartbreaking": 37154, "cair": 37155, "\u0120Chero": 37156, "cgi": 37157, "Medical": 37158, "\u0120Accountability": 37159, "!!\"": 37160, "ophile": 37161, "\u0120psychotic": 37162, "\u0120Restrict": 37163, "\u0120equitable": 37164, "issues": 37165, "\u01201905": 37166, "\u0120Nek": 37167, "cised": 37168, "\u0120Tracking": 37169, "\u0120ozone": 37170, "\u0120cooker": 37171, "rosis": 37172, "\u0120reopen": 37173, "\u0120infinity": 37174, "\u0120Pharmaceutical": 37175, "ensional": 37176, "Attempt": 37177, "\u0120Rory": 37178, "Marco": 37179, "\u0120awaits": 37180, "HOW": 37181, "treated": 37182, "\u0120bolst": 37183, "\u0120revered": 37184, "\u0120pods": 37185, "oppers": 37186, "0010": 37187, "\u0120amplitude": 37188, "rican": 37189, "SPONSORED": 37190, "\u0120trousers": 37191, "\u0120halves": 37192, "\u0120Kaine": 37193, "\u0120Cutler": 37194, "\u0120AUTH": 37195, "\u0120splendid": 37196, "\u0120preventive": 37197, "\u0120Dudley": 37198, "ifacts": 37199, "uminati": 37200, "\u0120Yin": 37201, "\u0120admon": 37202, "\u0120Vag": 37203, "\u0120inverted": 37204, "\u0120hastily": 37205, "\u0120Hague": 37206, "Lyn": 37207, "\u0120ledger": 37208, "\u0120astronomical": 37209, "getting": 37210, "\u0120circa": 37211, "\u0120Cic": 37212, "\u0120Tennis": 37213, "Limited": 37214, "\u0120dru": 37215, "\u0120BYU": 37216, "\u0120travellers": 37217, "\u0120pane": 37218, "\u0120Intro": 37219, "\u0120patiently": 37220, "\u0120aiding": 37221, "\u0120loos": 37222, "\u0120Tough": 37223, "\u0120293": 37224, "\u0120consumes": 37225, "SourceFile": 37226, "\u0120\"\"\"": 37227, "\u0120bonding": 37228, "\u0120tilted": 37229, "\u0120menstrual": 37230, "\u0120Celestial": 37231, "ULAR": 37232, "Plugin": 37233, "\u0120risking": 37234, "Naz": 37235, "\u0120Riyadh": 37236, "\u0120accredited": 37237, "\u0120skirm": 37238, "\u00e9\u013d": 37239, "\u0120examiner": 37240, "\u0120messing": 37241, "\u0120nearing": 37242, "\u0120Chern": 37243, "\u0120Beckham": 37244, "\u0120swapped": 37245, "\u0120goose": 37246, "Kay": 37247, "\u0120lofty": 37248, "\u0120Wallet": 37249, "\u0120['": 37250, "\u0120apocalypse": 37251, "\u0120bamboo": 37252, "\u0120SPACE": 37253, "\u0120Elena": 37254, "\u0120306": 37255, "acons": 37256, "\u0120tightened": 37257, "\u0120adolescence": 37258, "\u0120rainy": 37259, "\u0120vandalism": 37260, "\u0120Newtown": 37261, "\u0120conject": 37262, "cakes": 37263, "\u0120cheated": 37264, "\u0120moderators": 37265, "params": 37266, "EFF": 37267, "\u0120deceit": 37268, "\u0120STL": 37269, "\u0120Tanzania": 37270, "\u0120RI": 37271, "\u01201923": 37272, "\u0120Exile": 37273, "thel": 37274, "\u0120theolog": 37275, "\u0120quirky": 37276, "\u0120Irvine": 37277, "\u0120needy": 37278, "oris": 37279, "Um": 37280, "Ka": 37281, "\u0120mailbox": 37282, "322": 37283, "\u0120bos": 37284, "\u0120Petra": 37285, "KING": 37286, "\u0120enlarged": 37287, "Often": 37288, "\u0120badass": 37289, "\u0120343": 37290, "\u0120Places": 37291, "\u0120CAD": 37292, "\u0120pristine": 37293, "\u0120intervening": 37294, "direction": 37295, "\u0120laz": 37296, "\u0120DSM": 37297, "\u0120projecting": 37298, "\u0120Funk": 37299, "agog": 37300, "payment": 37301, "nov": 37302, "\u0120chatter": 37303, "ARB": 37304, "\u0120examinations": 37305, "\u0120Household": 37306, "\u0120Gus": 37307, "Ford": 37308, "414": 37309, "Boss": 37310, "\u0120mystic": 37311, "\u0120leaps": 37312, "\u0120Bav": 37313, "ulz": 37314, "budget": 37315, "Football": 37316, "\u0120subsidized": 37317, "\u0120firsthand": 37318, "\u0120coincide": 37319, "ocular": 37320, "Conn": 37321, "\u0120Collabor": 37322, "\u0120fools": 37323, "amura": 37324, "ahar": 37325, "rists": 37326, "\u0120swollen": 37327, "\u0120expended": 37328, "\u0120Pau": 37329, "sup": 37330, "\u0120spar": 37331, "\u0120keynote": 37332, "suff": 37333, "\u0120unequal": 37334, "\u0120progressing": 37335, "strings": 37336, "\u0120Gamergate": 37337, "Disney": 37338, "\u0120Eleven": 37339, "omnia": 37340, "\u0120scripted": 37341, "\u0120earners": 37342, "brother": 37343, "\u0120Enabled": 37344, "\u00e6\u00b3": 37345, "\u0120larvae": 37346, "\u0120LOC": 37347, "mess": 37348, "Wilson": 37349, "\u0120Template": 37350, "successfully": 37351, "\u0120paramount": 37352, "\u0120camouflage": 37353, "\u0120binds": 37354, "\u0120Quiet": 37355, "\u0120Shutterstock": 37356, "rush": 37357, "\u0120mascot": 37358, "fortune": 37359, "\u0120Colt": 37360, "\u0120Beyon": 37361, "habi": 37362, "\u0120hairc": 37363, "\u0120267": 37364, "\u0120Deus": 37365, "\u0120twitch": 37366, "\u0120concentrating": 37367, "\u0120nipples": 37368, "cible": 37369, "\u0120gir": 37370, "NZ": 37371, "Math": 37372, "nih": 37373, "Required": 37374, "\u0120ponder": 37375, "\u0120SAN": 37376, "\u0120weddings": 37377, "\u0120loneliness": 37378, "NES": 37379, "\u0120Mahjong": 37380, "695": 37381, "addle": 37382, "\u0120Garner": 37383, "\u0120COUR": 37384, "Bridge": 37385, "\u0120spree": 37386, "\u0120Caldwell": 37387, "\u0120bribery": 37388, "\u0120\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd\u00ef\u00bf\u00bd": 37389, "plugins": 37390, "\u0120racket": 37391, "\u0120champagne": 37392, "versible": 37393, "Vote": 37394, "\u0120modifiers": 37395, "Mayor": 37396, "680": 37397, "\u0120assemblies": 37398, "\u0120Sultan": 37399, "\u0120Ning": 37400, "\u0120Ladies": 37401, "\u0120sulfur": 37402, "\u0120orbs": 37403, "\u0120-----": 37404, "_______": 37405, "\u0120Journalism": 37406, "\u0120esports": 37407, "\u0120lush": 37408, "\u0120hue": 37409, "\u0120spectral": 37410, "Honest": 37411, "\u00e3\u0125\u0131": 37412, "\u0120bushes": 37413, "\u0120reinforcement": 37414, "\u0120reopened": 37415, "\u0120Wheels": 37416, "\u0120Morg": 37417, "rieving": 37418, "\u0120auxiliary": 37419, "\u0120jQuery": 37420, "\u0120BAT": 37421, "tesque": 37422, "\u0120vertex": 37423, "pure": 37424, "frey": 37425, "\u00e3\u0124\u00ba": 37426, "dos": 37427, "\u0120typh": 37428, "\u0120cull": 37429, "\u0120eq": 37430, "\u0120decon": 37431, "\u0120tossing": 37432, "\u0120disparate": 37433, "\u0120Brigham": 37434, "printf": 37435, "ledged": 37436, "\u0120sund": 37437, "\u0120cozy": 37438, "\u0120hepatitis": 37439, "performing": 37440, "\u0120aval": 37441, "\u0120GG": 37442, "future": 37443, "\u0120petertodd": 37444, "\u0120Kosovo": 37445, "\u0120magnets": 37446, "Already": 37447, "\u0120Edison": 37448, "\u0120Ceres": 37449, "\u0120RAID": 37450, "\u0120brilliance": 37451, "576": 37452, "\u0120derives": 37453, "\u0120hypertension": 37454, "\u0120\u00ce\u0136": 37455, "\u0120lambda": 37456, "\u0120flair": 37457, "\u0120missionaries": 37458, "\u0120rapes": 37459, "\u0120Starter": 37460, "\u0120Months": 37461, "\u0120defy": 37462, "\u0120seismic": 37463, "\u0120Raphael": 37464, "\u0120eurozone": 37465, "656": 37466, "zsche": 37467, "\u0120scratched": 37468, "\u0120bows": 37469, "\u0120Lennon": 37470, "\u0120Gaia": 37471, "\u0120dripping": 37472, "facts": 37473, "Ale": 37474, "\u0120frogs": 37475, "\u0120Breast": 37476, "ogeneity": 37477, "\u0120Prosecutor": 37478, "\u0120amplified": 37479, "\u0120Hodg": 37480, "\u0120Fn": 37481, "Thousands": 37482, "\u0120NIH": 37483, "\u0120Monitoring": 37484, "FTWARE": 37485, "\u0120Priebus": 37486, "\u0120Growing": 37487, "hunter": 37488, "\u0120diagnose": 37489, "\u0120Mald": 37490, "\u0120LR": 37491, "\u0120crowned": 37492, "\u0120bursting": 37493, "\u0120dissolution": 37494, "javascript": 37495, "\u0120usefulness": 37496, "\u0120Execution": 37497, ":(": 37498, "\u0120Ivory": 37499, "aah": 37500, "\u0120persecuted": 37501, "violence": 37502, "istas": 37503, "\u0120Crate": 37504, "\u0120impulses": 37505, "\u0120Spani": 37506, "edes": 37507, "Handle": 37508, "\u0120Zerg": 37509, "thinkable": 37510, "Lastly": 37511, "\u0120spontaneously": 37512, "\u0120inconvenient": 37513, "\u0120dismissing": 37514, "\u0120plotted": 37515, "\u0120eighty": 37516, "\u0120737": 37517, "rish": 37518, "\u0120Thornton": 37519, "atham": 37520, "\u0120sitcom": 37521, "Ven": 37522, "Recipe": 37523, "tel": 37524, "lund": 37525, "\u0120clears": 37526, "\u0120Sasuke": 37527, "\u0120258": 37528, "\u0120opting": 37529, "\u0120enraged": 37530, "esthetic": 37531, "\u0120Ae": 37532, "uchs": 37533, "Prep": 37534, "Flow": 37535, "\u0120runoff": 37536, "\u0120Eating": 37537, "\u0120Giles": 37538, "\u0120Acting": 37539, "resources": 37540, "ibaba": 37541, "\u0120rpm": 37542, "\u0120skewed": 37543, "\u0120Blanc": 37544, "\u0120Sakuya": 37545, "\u0120hotter": 37546, "\u01201924": 37547, "opian": 37548, "cko": 37549, "\u0120crumbling": 37550, "\u0120captains": 37551, "\u0120Appropriations": 37552, "leaders": 37553, "dropping": 37554, "anuts": 37555, "\u0120reversing": 37556, "\u0120Pose": 37557, "\u0120Sek": 37558, "Scot": 37559, "\u0120Idea": 37560, "cise": 37561, "\u0120Slovenia": 37562, "\u0120317": 37563, "Doctor": 37564, "\u0120crocod": 37565, "aldi": 37566, "Sea": 37567, "\u0120Farrell": 37568, "\u0120mercenaries": 37569, "\u0120RNC": 37570, "\u0120Guess": 37571, "\u0120pacing": 37572, "Machine": 37573, "StreamerBot": 37574, "\u0120Charity": 37575, "\u0120298": 37576, "\u0120cannons": 37577, "\u0120Toby": 37578, "TPPStreamerBot": 37579, "\u0120Passion": 37580, "cfg": 37581, "Thom": 37582, "\u0120badges": 37583, "\u0120Bernstein": 37584, ".\u00e2\u0122\u0135": 37585, "\u0120POP": 37586, "\u0120Conj": 37587, "\u0120initialization": 37588, "\u0120biodiversity": 37589, "Dub": 37590, "\u0120feudal": 37591, "\u0120disclaimer": 37592, "\u0120crow": 37593, "\u0120ignition": 37594, "arf": 37595, "SHA": 37596, "\u0120kHz": 37597, "hazard": 37598, "\u0120Artists": 37599, "oeuv": 37600, "679": 37601, "\u0120Rudy": 37602, "Nine": 37603, "\u0120Ramadan": 37604, "\u00e5\u00bd": 37605, "itto": 37606, "\u0120adrenaline": 37607, "Cert": 37608, "\u0120smelled": 37609, "\u0120impunity": 37610, "\u0120agendas": 37611, "\u0120Reborn": 37612, "\u0120Concent": 37613, "\u0120Seems": 37614, "\u0120omega": 37615, "\u0120Dustin": 37616, "\u0120backer": 37617, "\u0120Sauce": 37618, "\u0120Boyle": 37619, "WIN": 37620, "\u0120spins": 37621, "\u0120pauses": 37622, "upt": 37623, "\u0120shredded": 37624, "\u0120strapped": 37625, "\u0120Corruption": 37626, "\u0120scratches": 37627, "\u0120ni": 37628, "\u0120attire": 37629, "\u0120SAF": 37630, "FactoryReloaded": 37631, "\u0120IPS": 37632, "\u0120(%": 37633, "\u0120seminar": 37634, "focus": 37635, "civil": 37636, "\u01201860": 37637, "intosh": 37638, "\u0120continual": 37639, "\u0120abbrevi": 37640, "\u0120Sok": 37641, "ocobo": 37642, "XM": 37643, "\u0120frantic": 37644, "\u0120unavoidable": 37645, "\u0120artery": 37646, "\u0120annotations": 37647, "bath": 37648, "Climate": 37649, "\u0120dors": 37650, "\u0120Slide": 37651, "coord": 37652, "\u0120Reload": 37653, "\u0120LDL": 37654, "\u0120Lovecraft": 37655, "\u0120unimagin": 37656, "\u0120resembled": 37657, "\u0120barracks": 37658, "np": 37659, "\u0120surrogate": 37660, "\u0120categorized": 37661, "\u00e3\u0124\u00a9": 37662, "\u0120vaccinated": 37663, "\u0120drainage": 37664, "\u0120indist": 37665, "\u0120WhatsApp": 37666, "\u01201870": 37667, "olerance": 37668, "invoke": 37669, "amorph": 37670, "\u0120reconnect": 37671, "\u0120emanc": 37672, "\u0120blindness": 37673, "\u01201280": 37674, "internet": 37675, "collar": 37676, "\u0120altru": 37677, "\u0120abyss": 37678, "\u0120TRI": 37679, "657": 37680, "\u0120infused": 37681, "HEAD": 37682, "\u0120forestry": 37683, "\u0120Woody": 37684, "\u0120Ci": 37685, "wi": 37686, "sam": 37687, "784": 37688, "holiday": 37689, "\u0120mogul": 37690, "\u0120Fees": 37691, "\u0120DEN": 37692, "Internal": 37693, "urbed": 37694, "fusc": 37695, "atom": 37696, "\u0120Illusion": 37697, "\u0120polled": 37698, "\u0120flap": 37699, "\u0120coax": 37700, "LGBT": 37701, "Analy": 37702, "\u0120Sections": 37703, "\u0120Californ": 37704, "emn": 37705, "\u0120hither": 37706, "\u0120NIGHT": 37707, "\u0120nailed": 37708, "\u0120Pipeline": 37709, "391": 37710, "oof": 37711, "\u0120Primal": 37712, "verend": 37713, "\u0120slashing": 37714, "\u0120retri": 37715, "aviour": 37716, "\u0120departing": 37717, "gil": 37718, "ISC": 37719, "\u0120midway": 37720, "\u0120ultrasound": 37721, "\u0120behaving": 37722, "\u0120Tara": 37723, "classes": 37724, "Virtual": 37725, "\u0120Colonial": 37726, "\u0120stripping": 37727, "\u0120orchestrated": 37728, "\u0120Graves": 37729, "452": 37730, "\u0120Ironically": 37731, "\u0120Writers": 37732, "\u0120lends": 37733, "\u0120Manz": 37734, "\u0120raven": 37735, "\u0120oxidative": 37736, "\u0120266": 37737, "ELF": 37738, "actually": 37739, "ascar": 37740, "Draft": 37741, "\u0120favourable": 37742, "\u0120humiliating": 37743, "\u0120fidelity": 37744, "\u0120Hof": 37745, "\u0120Xuan": 37746, "496": 37747, "\u0120layered": 37748, "atis": 37749, "790": 37750, "\u0120paycheck": 37751, "iton": 37752, "Kar": 37753, "\u0120VMware": 37754, "\u0120Farmer": 37755, "\u0120servic": 37756, "glomer": 37757, "\u0120slump": 37758, "\u0120Fabric": 37759, "\u0120DOC": 37760, "esting": 37761, "\u0120reassure": 37762, "\u0120phyl": 37763, "volt": 37764, "itory": 37765, "Rules": 37766, "\u0120oxidation": 37767, "\u0120prized": 37768, "\u0120mistress": 37769, "\u0120Django": 37770, "WARN": 37771, "\u00e5\u0133": 37772, "\u0120encode": 37773, "\u0120Feedback": 37774, "\u0120stupidity": 37775, "Ian": 37776, "\u0120Yugoslavia": 37777, "\u00d7\u00a8": 37778, "acl": 37779, "UTE": 37780, "1977": 37781, "\u0120qualifies": 37782, "\u0120pulses": 37783, "pretty": 37784, "\u0120froze": 37785, "\u0120ss": 37786, "Iterator": 37787, "\u0120urgently": 37788, "\u0120mailed": 37789, "\u0120Cham": 37790, "\u0120sustaining": 37791, "\u0120basil": 37792, "\u0120puppies": 37793, "ilant": 37794, "\u0120PLEASE": 37795, "lap": 37796, "aceous": 37797, "Fear": 37798, "\u0120Mastery": 37799, "automatic": 37800, "\u0120TAG": 37801, "\u0120antim": 37802, "agles": 37803, "473": 37804, "frames": 37805, "\u0120whispers": 37806, "\u0120Whoever": 37807, "\u0120bravery": 37808, "\u0120UKIP": 37809, "ractions": 37810, "\"\"\"": 37811, "\u0120tame": 37812, "\u0120parted": 37813, "everything": 37814, "CONT": 37815, "\u0120indebted": 37816, "\u0120addr": 37817, "rek": 37818, "IRED": 37819, "\u0120eminent": 37820, "clinton": 37821, "\u0120ousted": 37822, "\u0120reviewer": 37823, "\u0120meltdown": 37824, "\u0120rearr": 37825, "\u0120Yao": 37826, "thereal": 37827, "abyte": 37828, "\u0120stumbling": 37829, "\u0120batches": 37830, "\u0120259": 37831, "\u0120contraceptive": 37832, "\u0120prostitute": 37833, "ensis": 37834, "Decl": 37835, "\u0120Strikes": 37836, "Military": 37837, "\u0120Oath": 37838, "vacc": 37839, "ppings": 37840, "052": 37841, "\u0120partName": 37842, "amping": 37843, "Reports": 37844, "KI": 37845, "CHR": 37846, "\u0120subtly": 37847, "swers": 37848, "Blake": 37849, "usual": 37850, "\u0120contestants": 37851, "\u0120cartridges": 37852, "\u0120GREAT": 37853, "\u0120blush": 37854, "\u0120\u00e2\u0122\u00ba": 37855, "472": 37856, "\u0120reasoned": 37857, "\u00e3\u0125\u00a4": 37858, "paralleled": 37859, "\u0120dyn": 37860, "agate": 37861, "\u0120nightly": 37862, "\u00e5\u0128": 37863, "556": 37864, "\u0120semantic": 37865, "\u0120Advoc": 37866, "\u0120!!": 37867, "\u0120disagrees": 37868, "\u0120BW": 37869, "Veh": 37870, "\u0120harming": 37871, "\u0120embraces": 37872, "\u0120strives": 37873, "\u0120inland": 37874, "\u0120Kard": 37875, "\u0120heats": 37876, "\u0120Ginny": 37877, "utan": 37878, "ernaut": 37879, "ylene": 37880, "\u0120Elev": 37881, "JD": 37882, "\u0120hars": 37883, "\u0120Starr": 37884, "\u0120skysc": 37885, "\u0120collaborators": 37886, "Usually": 37887, "\u0120revolutions": 37888, "\u0120STATS": 37889, "\u0120dismantle": 37890, "\u0120confidently": 37891, "\u0120kinetic": 37892, "Ali": 37893, "\u0120percentile": 37894, "\u0120extracting": 37895, "illian": 37896, "estead": 37897, "\u0120physicists": 37898, "\u0120Marshal": 37899, "\u0120fellowship": 37900, "\u0120dashed": 37901, "\u0120UR": 37902, "\u0120Sioux": 37903, "\u0120Compact": 37904, "amide": 37905, "Python": 37906, "\u0120Leigh": 37907, "\u0120Pharmac": 37908, "istrates": 37909, "herical": 37910, "\u0120fue": 37911, "\u0120Emin": 37912, "\u0120({": 37913, "\u0120Neighborhood": 37914, "\u0120disrupting": 37915, "\u0120Dup": 37916, "\u0120gland": 37917, "\u0120Sev": 37918, "\u0120Marian": 37919, "argon": 37920, "\u0120Dund": 37921, "\u0120": 46904, "\u0120Philips": 46905, "\u0120Kafka": 46906, "\u0120upheaval": 46907, "\u0120sentimental": 46908, "\u0120sax": 46909, "\u0120Akira": 46910, "serial": 46911, "Matrix": 46912, "\u0120electing": 46913, "\u0120commenter": 46914, "\u0120Nebula": 46915, "plets": 46916, "\u0120Nadu": 46917, "\u0120Adren": 46918, "\u0120enshr": 46919, "\u0120RAND": 46920, "financial": 46921, "\u0120Clyde": 46922, "utherford": 46923, "\u0120signage": 46924, "\u0120deline": 46925, "\u0120phosphate": 46926, "roversial": 46927, "fascist": 46928, "\u0120Vall": 46929, "\u0120Bethlehem": 46930, "\u0120fors": 46931, "\u0120english": 46932, "Solid": 46933, "Nature": 46934, "\u0120va": 46935, "\u0120Guests": 46936, "\u0120tantal": 46937, "\u0120autoimmune": 46938, ";;;;;;;;;;;;": 46939, "\u0120Totally": 46940, "\u0120Ov": 46941, "\u0120defences": 46942, "\u0120Coconut": 46943, "\u0120tranquil": 46944, "\u0120ploy": 46945, "\u0120flavours": 46946, "\u0120Flask": 46947, "\u00e3\u0124\u00a8\u00e3\u0125\u00ab": 46948, "\u0120Weston": 46949, "\u0120Volvo": 46950, "870": 46951, "\u0120microphones": 46952, "verbal": 46953, "RPG": 46954, "\u0120iii": 46955, ";}": 46956, "028": 46957, "\u0120headlined": 46958, "\u0120primed": 46959, "\u0120hoard": 46960, "\u0120Shad": 46961, "\u0120ENTER": 46962, "\u0120triangular": 46963, "\u0120capit": 46964, "lik": 46965, "\u0120Ancients": 46966, "\u0120lash": 46967, "\u0120convol": 46968, "\u0120colonel": 46969, "enemy": 46970, "Gra": 46971, "\u0120pubs": 46972, "utters": 46973, "\u0120assigns": 46974, "\u0120Penet": 46975, "\u0120Monstrous": 46976, "\u0120Bowen": 46977, "ilver": 46978, "Haunted": 46979, "\u0120Ding": 46980, "started": 46981, "plin": 46982, "\u0120contaminants": 46983, "\u0120DOE": 46984, "ffen": 46985, "\u0120Technician": 46986, "Ry": 46987, "\u0120robbers": 46988, "\u0120hotline": 46989, "\u0120Guardiola": 46990, "\u0120Kaufman": 46991, "rower": 46992, "\u0120Dresden": 46993, "\u0120Alpine": 46994, "Elf": 46995, "\u0120fmt": 46996, "\u0120Sard": 46997, "urses": 46998, "gpu": 46999, "Unix": 47000, "\u0120unequivocally": 47001, "\u0120Citizenship": 47002, "quad": 47003, "mire": 47004, "\u0120Sweeney": 47005, "Battery": 47006, "615": 47007, "\u0120pancakes": 47008, "\u0120oats": 47009, "Maps": 47010, "\u0120Contrast": 47011, "mbudsman": 47012, "\u0120EPS": 47013, "\u0120subcommittee": 47014, "\u0120sourcing": 47015, "\u0120sizing": 47016, "\u0120Buffer": 47017, "\u0120Mandatory": 47018, "\u0120moderates": 47019, "\u0120Patterns": 47020, "\u0120Chocobo": 47021, "\u0120Zan": 47022, "\u0120STATES": 47023, "\u0120Judging": 47024, "\u0120Inher": 47025, "*:": 47026, "\u0120bil": 47027, "\u0120Yen": 47028, "\u0120exhilar": 47029, "ollower": 47030, "zers": 47031, "\u0120snug": 47032, "maximum": 47033, "\u0120despicable": 47034, "\u0120PACK": 47035, "\u0120Annex": 47036, "\u0120sarcastic": 47037, "\u0120latex": 47038, "\u0120tamp": 47039, "\u0120Sao": 47040, "bah": 47041, "\u0120Reverend": 47042, "\u0120Chinatown": 47043, "\u0120AUT": 47044, "documented": 47045, "\u0120GABA": 47046, "\u0120Canaan": 47047, "\u0120\u00d9\u0127": 47048, "\u0120governs": 47049, "prev": 47050, "Esc": 47051, "\u0120Estimates": 47052, "OSP": 47053, "\u0120endeavour": 47054, "\u0120Closing": 47055, "ometime": 47056, "everyone": 47057, "\u0120worsen": 47058, "\u0120scanners": 47059, "\u0120deviations": 47060, "\u0120Robotics": 47061, "\u0120Compton": 47062, "\u0120sorcerer": 47063, "\u0120endogenous": 47064, "\u0120emulation": 47065, "\u0120Piercing": 47066, "\u0120Aph": 47067, "\u0120Socket": 47068, "\u0120bould": 47069, "\u0120OU": 47070, "\u0120Borderlands": 47071, "\u01201863": 47072, "Gordon": 47073, "\u0120WTO": 47074, "\u0120restricts": 47075, "\u0120mosaic": 47076, "\u0120melodies": 47077, "\u00e7\u0126": 47078, "Tar": 47079, "\u0120disson": 47080, "\u0120Provides": 47081, "\u0120......": 47082, "bek": 47083, "FIX": 47084, "\u0120broom": 47085, "anship": 47086, "Doctors": 47087, "\u0120nerds": 47088, "\u0120Regions": 47089, "naissance": 47090, "\u0120mete": 47091, "\u0120crept": 47092, "plings": 47093, "\u0120girlfriends": 47094, "knit": 47095, "igent": 47096, "owe": 47097, "\u0120ushered": 47098, "\u0120Baz": 47099, "Mobil": 47100, "434": 47101, "\u0120Presents": 47102, "origin": 47103, "\u0120insomnia": 47104, "\u0120Aux": 47105, "439": 47106, "\u0120Chili": 47107, "irsch": 47108, "GAME": 47109, "\u0120gestation": 47110, "algia": 47111, "romising": 47112, "$,": 47113, "crow": 47114, "\u0120Inspection": 47115, "atomic": 47116, "Relations": 47117, "JOHN": 47118, "roman": 47119, "\u0120Clockwork": 47120, "\u0120Bakr": 47121, "mone": 47122, "MET": 47123, "\u0120thirsty": 47124, "\u0120bc": 47125, "\u0120faculties": 47126, "Rum": 47127, "\u0120nuance": 47128, "\u0120Darius": 47129, "pleting": 47130, "fters": 47131, "etchup": 47132, "Registration": 47133, "\u0120KE": 47134, "Rah": 47135, "\u0120preferential": 47136, "\u0120Lash": 47137, "\u0120HH": 47138, "Valid": 47139, "\u0120NAV": 47140, "\u0120starve": 47141, "\u0120Gong": 47142, "zynski": 47143, "\u0120Actress": 47144, "\u0120wik": 47145, "\u0120unaccompanied": 47146, "lvl": 47147, "Bride": 47148, "ADS": 47149, "\u0120Commando": 47150, "\u0120Vaughn": 47151, "Wallet": 47152, "\u0120hopping": 47153, "\u0120Vie": 47154, "\u0120caveats": 47155, "\u0120alas": 47156, "ifled": 47157, "abuse": 47158, "661": 47159, "\u0120ibn": 47160, "\u0120gul": 47161, "\u0120robbing": 47162, "til": 47163, "ILA": 47164, "\u0120mitigating": 47165, "\u0120aptly": 47166, "\u0120tyrant": 47167, "\u0120midday": 47168, "\u0120Gilmore": 47169, "\u0120Decker": 47170, "\u0120\u00c2\u00a7\u00c2\u00a7": 47171, "partial": 47172, "Exactly": 47173, "\u0120phenotype": 47174, "\u0120[+]": 47175, "\u0120Plex": 47176, "\u0120Ips": 47177, "versions": 47178, "\u0120ebook": 47179, "\u0120chic": 47180, "gross": 47181, "\":\"\"},{\"": 47182, "\u0120Surprisingly": 47183, "Morgan": 47184, "\u0120residues": 47185, "\u0120Confederation": 47186, "infeld": 47187, "\u0120lyr": 47188, "moderate": 47189, "\u0120perpendicular": 47190, "VK": 47191, "\u0120synchronized": 47192, "\u0120refreshed": 47193, "\u0120adore": 47194, "\u0120Torment": 47195, "olina": 47196, "\u01202600": 47197, "ItemTracker": 47198, "\u0120pies": 47199, "\u0120FAT": 47200, "\u0120RHP": 47201, "048": 47202, "\u0120RESP": 47203, "\u0120BJ": 47204, "allows": 47205, "Pand": 47206, "\u0120unwelcome": 47207, "\u0120Voc": 47208, "\u0120Bastard": 47209, "\u0120OW": 47210, "\u0120LAR": 47211, "\u0120Healer": 47212, "Environmental": 47213, "\u0120Kenyan": 47214, "\u0120Trance": 47215, "\u0120Pats": 47216, "\u0120aliases": 47217, "\u0120Garfield": 47218, "\u0120campaigner": 47219, "\u0120advancements": 47220, "\u0120Okinawa": 47221, "\u0120Coh": 47222, "owsky": 47223, "\u0120starved": 47224, "\u0120sizeable": 47225, "\u0120:-)": 47226, "\u0120mRNA": 47227, "\u0120suspensions": 47228, "istar": 47229, "Scotland": 47230, "Prin": 47231, "------------------------------------------------": 47232, "\u0120502": 47233, "\u0120teaspoons": 47234, "\u01201050": 47235, "\u0120coercive": 47236, "\u0120Masonic": 47237, "edded": 47238, "\u0120Passenger": 47239, "\u0120latt": 47240, "\u0120braces": 47241, "\u0120Steal": 47242, "\u0120NYT": 47243, "\u0120Kats": 47244, "\u0120Celest": 47245, "aez": 47246, "Tu": 47247, "\u0120Coulter": 47248, "\u00f0\u0141\u013a": 47249, "Flickr": 47250, "\u0120Wilmington": 47251, "iths": 47252, "++;": 47253, "\u0120vending": 47254, "\u0120negro": 47255, "\u0120Phi": 47256, "\u0120Yellowstone": 47257, "Callback": 47258, "\u0120shampoo": 47259, "\u0120Shades": 47260, "wat": 47261, "\u0120superhuman": 47262, "\u0120ridiculed": 47263, "\u0120holiest": 47264, "ombo": 47265, "\u0120interns": 47266, "\u0120hone": 47267, "\u0120Paragu": 47268, "URI": 47269, "\u0120dangling": 47270, "\u00e3\u0124\u00bb": 47271, "sov": 47272, "ictional": 47273, "availability": 47274, "\u0120revocation": 47275, "\u0120dow": 47276, "inic": 47277, "\u0120THEIR": 47278, "\u0120iso": 47279, "\u0120outings": 47280, "\u0120Lethal": 47281, "\u0120)))": 47282, "\u0120inaccur": 47283, "\u0120outlandish": 47284, "\u0120anus": 47285, "letico": 47286, "idon": 47287, "lol": 47288, "\u0120unregulated": 47289, "\u0120succumbed": 47290, "\u0120cuff": 47291, "\u0120Wasteland": 47292, "letal": 47293, "\u0120substr": 47294, "\u0120coffers": 47295, "\u0120automakers": 47296, "ovi": 47297, "\u0120Xue": 47298, "\u0120Daytona": 47299, "\u0120jarring": 47300, "\u0120fumes": 47301, "\u0120disbanded": 47302, "zik": 47303, "itton": 47304, "\u0120strikingly": 47305, "\u0120spores": 47306, "Adapter": 47307, ".):": 47308, "\u0120Lyndon": 47309, "ivalry": 47310, "\u0120orally": 47311, "\u0120tumultuous": 47312, "\u0120displeasure": 47313, "\u0120cones": 47314, "orrect": 47315, "\u0120appease": 47316, "\u0120derby": 47317, "\u0120Tripoli": 47318, "\u0120Aless": 47319, "\u0120poked": 47320, "\u0120Guilty": 47321, "vP": 47322, "Enough": 47323, "\u0120originals": 47324, "699": 47325, "\u0120rabbi": 47326, "\u0120proverbial": 47327, "\u0120postpone": 47328, "elope": 47329, "\u0120Misty": 47330, "\u0120staffed": 47331, "\u0120Unemployment": 47332, "reditary": 47333, "\u0120diligent": 47334, "recomm": 47335, "measures": 47336, "asin": 47337, "825": 47338, "\u0120ponds": 47339, "\u0120mmol": 47340, "\u0120SAR": 47341, "\u0120CARE": 47342, "\u0120371": 47343, "\u0120clenched": 47344, "\u0120Corsair": 47345, "\u0120caricature": 47346, "zn": 47347, "attach": 47348, "\u0120Schro": 47349, "speak": 47350, "painted": 47351, "\u0120Suc": 47352, "\u0120ENT": 47353, "\u0120cellul": 47354, "\u0120Paid": 47355, "diagn": 47356, "WHERE": 47357, "\u0120texted": 47358, "Barn": 47359, "\u0120retracted": 47360, "\u0120Referred": 47361, "Sav": 47362, "\u0120upkeep": 47363, "\u0120workplaces": 47364, "\u0120Tokens": 47365, "\u0120amplify": 47366, "clinical": 47367, "\u0120multic": 47368, "mberg": 47369, "\u0120convoluted": 47370, "Region": 47371, "565": 47372, "\u0120Topic": 47373, "\u0120snail": 47374, "\u0120saline": 47375, "\u0120insurrection": 47376, "\u0120Petr": 47377, "forts": 47378, "BAT": 47379, "\u0120Navajo": 47380, "\u0120rudimentary": 47381, "\u0120Laksh": 47382, "ONDON": 47383, "Measure": 47384, "\u0120transformer": 47385, "\u0120Goddard": 47386, "\u0120coincides": 47387, "irin": 47388, "Rex": 47389, "\u0120Bok": 47390, "quit": 47391, "\u0120shotguns": 47392, "\u0120proletarian": 47393, "\u0120scorp": 47394, "\u0120Ada": 47395, "514": 47396, "\u0120slander": 47397, "recorded": 47398, "\u0120embell": 47399, "risome": 47400, "\u0120apologizing": 47401, "\u0120Mulcair": 47402, "\u0120Gibraltar": 47403, "Cla": 47404, "\u0120allot": 47405, "\u0120Attention": 47406, "\u0120433": 47407, "leave": 47408, "\u0120whine": 47409, "\u0120Issa": 47410, "\u0120Faust": 47411, "\u0120Barron": 47412, "heny": 47413, "\u0120victimized": 47414, "Jews": 47415, "\u0120nurturing": 47416, "ettel": 47417, "Winged": 47418, "\u0120Subtle": 47419, "\u0120flavorful": 47420, "\u0120Reps": 47421, "enged": 47422, "callback": 47423, "\u0120directional": 47424, "\u0120clasp": 47425, "\u0120Directions": 47426, "planet": 47427, "iculture": 47428, "Helper": 47429, "icion": 47430, "acia": 47431, "\u0120\u00e7\u00a5\u0140": 47432, "\u0120surges": 47433, "\u0120canoe": 47434, "\u0120Premiership": 47435, "been": 47436, "\u0120defied": 47437, "\u0120Trooper": 47438, "\u0120tripod": 47439, "\u0120gasp": 47440, "\u0120Euph": 47441, "\u0120Ads": 47442, "vernight": 47443, "highly": 47444, "Role": 47445, "\u0120entangled": 47446, "\u0120Zeit": 47447, "618": 47448, "\u0120Rusty": 47449, "\u0120havens": 47450, "\u0120Vaughan": 47451, "HAEL": 47452, "\u0120SERVICE": 47453, "/,": 47454, "\u0120stricken": 47455, "\u0120delusions": 47456, "\u0120bis": 47457, "\u0120Haf": 47458, "\u0120gratification": 47459, "\u0120enticing": 47460, "UNCH": 47461, "Adams": 47462, "\u0120OLED": 47463, "\u0120Beetle": 47464, "\u01201899": 47465, "\u0120SOFTWARE": 47466, "ategor": 47467, "VL": 47468, "\u0120Totem": 47469, "\u0120Gators": 47470, "ATURES": 47471, "\u0120impedance": 47472, "Registered": 47473, "\u0120Cary": 47474, "\u0120Aerial": 47475, "onne": 47476, "enium": 47477, "\u0120dred": 47478, "\u0120Beg": 47479, "\u0120concurrently": 47480, "\u0120superpower": 47481, "\u0120Xan": 47482, "jew": 47483, "imester": 47484, "\u0120Dickinson": 47485, "\u00e2\u0136\u0123": 47486, "Fla": 47487, "\u0120pree": 47488, "\u0120Rollins": 47489, "\u00a9\u00b6\u00e6": 47490, "\u0120denomination": 47491, "\u0120Lana": 47492, "516": 47493, "\u0120inciting": 47494, "scribed": 47495, "juries": 47496, "\u0120Wonders": 47497, "approximately": 47498, "\u0120suspending": 47499, "\u0120mountainous": 47500, "\u0120Laugh": 47501, "oidal": 47502, "Ns": 47503, "Detect": 47504, ")=": 47505, "\u0120Luthor": 47506, "\u0120Schwarzenegger": 47507, "\u0120Muller": 47508, "\u0120Devi": 47509, "ecycle": 47510, "Jar": 47511, "613": 47512, "\u0120Longh": 47513, "Bah": 47514, "\u0120SPORTS": 47515, "nw": 47516, "\u0120refinement": 47517, "\u0120waterways": 47518, "\u0120diner": 47519, "Blade": 47520, "683": 47521, "Fac": 47522, "\u0120initials": 47523, "\u0120rog": 47524, "\u0120paranormal": 47525, "BUT": 47526, "\u0120[(": 47527, "\u0120Swanson": 47528, "\u0120Mesh": 47529, "\u00e2\u0138\u00ac": 47530, "Improve": 47531, "\u0120Radiation": 47532, "\u0120Esther": 47533, "\u0120Esk": 47534, "\u0120Aly": 47535, "iky": 47536, "\u0120irrad": 47537, "\u0120Buckingham": 47538, "\u0120refill": 47539, "\u0120._": 47540, "Repe": 47541, "CONCLUS": 47542, "\u0120differentiated": 47543, "\u0120chirop": 47544, "\u0120Atkins": 47545, "Pattern": 47546, "\u0120excise": 47547, "\u0120cabal": 47548, "NSA": 47549, "\u0120STA": 47550, "\u0120SIL": 47551, "\u0120Paraly": 47552, "\u0120rye": 47553, "\u0120Howell": 47554, "\u0120Countdown": 47555, "nesses": 47556, "alysed": 47557, "\u0120resize": 47558, "\u00e3\u0124\u00bd": 47559, "\u0120budgetary": 47560, "\u0120Stras": 47561, "wang": 47562, "\u0120apiece": 47563, "\u0120precincts": 47564, "\u0120peach": 47565, "\u0120skyline": 47566, "\u0120353": 47567, "popular": 47568, "Appearances": 47569, "\u0120Mechanics": 47570, "\u0120DevOnline": 47571, "Sullivan": 47572, "Zen": 47573, "\u0120pu": 47574, "opolis": 47575, "544": 47576, "\u0120deform": 47577, "\u0120counteract": 47578, "\u0120Lange": 47579, "\u0120417": 47580, "Console": 47581, "774": 47582, "\u0120nodding": 47583, "\u0120populism": 47584, "\u0120hep": 47585, "\u0120counselling": 47586, "compliance": 47587, "UFF": 47588, "\u0120undeniably": 47589, "\u0120railing": 47590, "\u0120Horowitz": 47591, "\u0120Simone": 47592, "\u0120Bungie": 47593, "\u0120ak": 47594, "\u0120Talks": 47595, "xff": 47596, "flake": 47597, "Crash": 47598, "\u0120sweaty": 47599, "\u0120banquet": 47600, "\u0120OFFIC": 47601, "\u0120inventive": 47602, "\u0120astronomer": 47603, "\u0120Stamford": 47604, "\u0120Scare": 47605, "\u0120GREEN": 47606, "olicited": 47607, "\u0120rusher": 47608, "\u0120centrist": 47609, "ighting": 47610, "\u0120subclass": 47611, "\u0120disav": 47612, "\u0120defund": 47613, "\u0120Nanto": 47614, "ociate": 47615, "mast": 47616, "\u0120pacif": 47617, "\u0120mend": 47618, "eers": 47619, "immigration": 47620, "ESSION": 47621, "\u0120numbering": 47622, "\u0120laughable": 47623, "\u0120Ended": 47624, "viation": 47625, "emark": 47626, "Pitt": 47627, "\u0120meticulous": 47628, "\u0120LF": 47629, "\u0120congratulated": 47630, "\u0120Birch": 47631, "\u0120swayed": 47632, "\u0120semifinals": 47633, "\u0120humankind": 47634, "matter": 47635, "\u0120Equip": 47636, "opausal": 47637, "Said": 47638, "\u0120Layout": 47639, "\u0120voicing": 47640, "\u0120thug": 47641, "\u0120pornographic": 47642, "IPS": 47643, "\u0120moaning": 47644, "\u0120grievance": 47645, "\u0120confessions": 47646, "escal": 47647, "TEXTURE": 47648, "Authent": 47649, "osaurus": 47650, "Purchase": 47651, "\u0120relegation": 47652, "alter": 47653, "\u0120\u00c2\u0142\u00c2\u0142": 47654, "\u0120riddled": 47655, "\u0120ogre": 47656, "\u0120Lowell": 47657, "Occup": 47658, "Eat": 47659, "\u0120Hyder": 47660, "\u0120Adviser": 47661, "Commerce": 47662, "Hunt": 47663, "\u0120Orth": 47664, "\u0120Competitive": 47665, "\u0120CLA": 47666, "CDC": 47667, "\u0120salads": 47668, "Fle": 47669, "\u0120industrialized": 47670, "`,": 47671, "\u0120OWN": 47672, "\u0120beck": 47673, "\u0120Particularly": 47674, "oubt": 47675, "\u0120mM": 47676, "\u0120Hussain": 47677, "\u0120Chennai": 47678, "\u0120920": 47679, "\u0120appointing": 47680, "\u0120Cullen": 47681, ",,,,,,,,": 47682, "\u0120pores": 47683, "verified": 47684, "\u0120biochemical": 47685, "emate": 47686, "\u0120cowardly": 47687, "\u0120Helsinki": 47688, "\u0120Ethiopian": 47689, "SOURCE": 47690, "ERC": 47691, "estro": 47692, "\u0120biotech": 47693, "\u0120Sour": 47694, "\u0120brewer": 47695, "Bloomberg": 47696, "\u0120intensify": 47697, "Glass": 47698, "anco": 47699, "\u0120FDR": 47700, "greSQL": 47701, "\u0120Fires": 47702, "\u00a9\u00b6\u00e6\u00a5\u00b5": 47703, "eco": 47704, "1001": 47705, "\u0120Homeless": 47706, "\u0120instantaneous": 47707, "\u0120Haste": 47708, "igel": 47709, "Diamond": 47710, "\u0120paving": 47711, "\u0120landfill": 47712, "\u0120dads": 47713, "houn": 47714, ":]": 47715, "\u0120incendiary": 47716, "\u0120Livingston": 47717, "\u0120Hilbert": 47718, "\u0120Checks": 47719, "styles": 47720, "inators": 47721, "\u0120Clive": 47722, "phrine": 47723, "\u0120chimpanzees": 47724, "\u0120pall": 47725, "\u0120JM": 47726, "\u0120Aadhaar": 47727, "\u00f0\u013f": 47728, "\u0120achievable": 47729, "disabled": 47730, "PET": 47731, "OOOOOOOO": 47732, "Mot": 47733, "\u0120intangible": 47734, "\u0120ballet": 47735, "\u0120Webs": 47736, "\u0120Estimated": 47737, "Effects": 47738, "\u0120bailed": 47739, "Joshua": 47740, "\u0120turbulence": 47741, "\u0120occupant": 47742, "\u0120Daylight": 47743, "\u0120361": 47744, "meet": 47745, "\u0120statically": 47746, "\u0120onlook": 47747, "\u0120ki": 47748, "illegal": 47749, "\u0120velvet": 47750, "\u0120dehydration": 47751, "\u0120acquies": 47752, "\u0120Rez": 47753, "akura": 47754, "\u0120Upton": 47755, "atro": 47756, "\u0120incomprehensible": 47757, "\u0120backdoor": 47758, "\u0120Rhino": 47759, "727": 47760, "\u0120maths": 47761, ")+": 47762, "\u0120heresy": 47763, "\u0120df": 47764, "\u0120Roche": 47765, "\u0120Lydia": 47766, "\u0120pancreat": 47767, "reply": 47768, "arrell": 47769, "\u0120solicitation": 47770, "\u0120circadian": 47771, "BIP": 47772, "\u0120foray": 47773, "\u0120cryptic": 47774, "izu": 47775, "imeo": 47776, "\u0120Tomato": 47777, "\u0120Homs": 47778, "examination": 47779, "\u0120quarry": 47780, "\u0120Valiant": 47781, "\u0120Jericho": 47782, "\u0120INCLUD": 47783, "\u01201840": 47784, "519": 47785, "\u0120resists": 47786, "\u0120snapshots": 47787, "\u0120Spur": 47788, "\u0120Antiqu": 47789, "Login": 47790, "\u0120bestselling": 47791, "\u0120antic": 47792, "\u0120Sutherland": 47793, "\u00e3\u0124\u00a2\u00e3\u0125\u00ab": 47794, "\u0120~/": 47795, "\u0120Parm": 47796, "\u00e8\u0125": 47797, "Pages": 47798, "intensity": 47799, "\u0120immobil": 47800, "\u01201865": 47801, "zzo": 47802, "\u0120nifty": 47803, "\u0120fentanyl": 47804, "\u0120Preservation": 47805, "ophen": 47806, "\u0120darts": 47807, "\u0120Dinosaur": 47808, "pointers": 47809, "\u0120Rite": 47810, "suggest": 47811, "awareness": 47812, "\u0120Sheridan": 47813, "\u0120stances": 47814, "\u0120sorcery": 47815, "\u0120perjury": 47816, "\u0120Nikola": 47817, "iever": 47818, "\u0120fiance": 47819, "\u0120Jordanian": 47820, "\u0120Balloon": 47821, "\u0120nab": 47822, "\u0120kb": 47823, "\u0120humanities": 47824, "\u0120Tanaka": 47825, "hillary": 47826, "\u0120consultancy": 47827, "\u0120Zub": 47828, "\u0120remission": 47829, "\u0120confid": 47830, "CHQ": 47831, "\u0120Fug": 47832, "\u0120improvis": 47833, "Yep": 47834, "/_": 47835, "\u0120unwillingness": 47836, "\u0120portfolios": 47837, "055": 47838, "\u0120Instructor": 47839, "aiman": 47840, "\u0120claimants": 47841, "Mbps": 47842, "\u0120Bye": 47843, "received": 47844, "Tweet": 47845, "\u0120indemn": 47846, "riz": 47847, "amara": 47848, "Nat": 47849, "\u0120evaluates": 47850, "\u0120Lur": 47851, "epad": 47852, "FOX": 47853, "\u0120Thro": 47854, "\u0120rusty": 47855, "\u0120bedrock": 47856, "\u0120Oprah": 47857, "JB": 47858, "\u0120manipulative": 47859, "\u0120willful": 47860, "\u0120relapse": 47861, "\u0120extant": 47862, "Theme": 47863, "Sensor": 47864, "\u0120Stability": 47865, "govern": 47866, "\u0120poppy": 47867, "\u0120knack": 47868, "\u0120insulated": 47869, "\u0120Tile": 47870, "\u0120Extrem": 47871, "\u0120untold": 47872, "\u0120converge": 47873, "\u0120refuel": 47874, "igroup": 47875, "\u0120distortions": 47876, "\u0120ravaged": 47877, "\u0120mechanically": 47878, "\u0120Reilly": 47879, "\u0120Nose": 47880, "\u0120Incarnation": 47881, "\u0120Becky": 47882, "abbling": 47883, "\u0120taco": 47884, "\u0120rake": 47885, "\u0120melancholy": 47886, "\u0120illustrious": 47887, "\u0120Dartmouth": 47888, "Guide": 47889, "\u0120Razer": 47890, "\u0120Benz": 47891, "Ultimate": 47892, "\u0120Surprise": 47893, "\u0120pageant": 47894, "offer": 47895, "Whoever": 47896, "\u0120wiser": 47897, "\u0120chemist": 47898, "\u0120HELL": 47899, "\u0120Bulk": 47900, "\u0120plutonium": 47901, "\u0120COVER": 47902, "\u00d6\u00bc": 47903, "failed": 47904, "\u0120tirelessly": 47905, "\u0120infertility": 47906, "\u0120Trident": 47907, "\u0120Showtime": 47908, "\u0120Civ": 47909, "Vice": 47910, "requires": 47911, "ittance": 47912, "\u0120uncontrolled": 47913, "interesting": 47914, "561": 47915, "\u0120innovate": 47916, "ategic": 47917, "Lie": 47918, "\u0120Selling": 47919, "Ul": 47920, "\u0120savior": 47921, "\u0120Tosh": 47922, "\u0120swast": 47923, "PASS": 47924, "\u0120rink": 47925, "\u0120cardio": 47926, "\u0120Iro": 47927, "udi": 47928, "\u0120vantage": 47929, "\u0120vans": 47930, "\u0120Ni\u00c3\u00b1o": 47931, "+=": 47932, "\u0120propagate": 47933, "": 49029, "\u0120leukemia": 49030, "\u0120eluc": 49031, "\u0120announcer": 49032, "\u0120Lithuan": 49033, "\u0120Armageddon": 49034, "\u00e5\u0129": 49035, "Lenin": 49036, "\u0120Ruk": 49037, "\u0120pepp": 49038, "\u0120Romantic": 49039, "\u0120PIT": 49040, "\u0120Interstellar": 49041, "\u0120Atkinson": 49042, "Raid": 49043, "Js": 49044, "Goal": 49045, "Course": 49046, "\u0120vanishing": 49047, "esley": 49048, "\u0120Rounds": 49049, "Elsa": 49050, "593": 49051, "\u0120redundancy": 49052, "\u0120STAND": 49053, "\u0120prophetic": 49054, "\u0120habitable": 49055, "ryu": 49056, "\u0120faintly": 49057, "MODE": 49058, "\u0120flanked": 49059, "IRC": 49060, "Awesome": 49061, "\u0120spurious": 49062, "\u0120Zah": 49063, "\u0120MSG": 49064, "\u0120shading": 49065, "\u0120motivational": 49066, "\u0120Santana": 49067, "\u0120SPR": 49068, "\u0120excruciating": 49069, "omial": 49070, "\u0120Miko": 49071, "\u0120Leopard": 49072, "Abyss": 49073, "\u0120[|": 49074, "dirty": 49075, "\u0120baths": 49076, "\u0120demoral": 49077, "andre": 49078, "PB": 49079, "\u0120unification": 49080, "\u0120sacrament": 49081, "\u0120[&": 49082, "\u0120priceless": 49083, "\u0120gelatin": 49084, "\u0120emanating": 49085, "\u0120Allaah": 49086, "986": 49087, "\u0120outburst": 49088, "\u0120eras": 49089, "\u0120XVI": 49090, "\u0120SPI": 49091, "Ott": 49092, "\u0120Lazarus": 49093, "PLIED": 49094, "Flying": 49095, "blogs": 49096, "Wisconsin": 49097, "Raven": 49098, "\u0120rebate": 49099, "\u0120creeps": 49100, "\u0120Span": 49101, "\u0120Painter": 49102, "\u0120Kira": 49103, "\u0120Amos": 49104, "\u0120Corvette": 49105, "Consumer": 49106, "\u0120Recover": 49107, "cki": 49108, "\u0120pesky": 49109, "\u0120Invention": 49110, "Companies": 49111, "\u0120challengers": 49112, "ademic": 49113, "\u0120Ukrainians": 49114, "\u0120Neurolog": 49115, "\u0120Forsaken": 49116, "\u0120entrants": 49117, "\u0120embattled": 49118, "\u0120defunct": 49119, "\u0120Glacier": 49120, "\u0120poisons": 49121, "\u0120Horses": 49122, "makes": 49123, "\u0120Dirt": 49124, "\u0120423": 49125, "hhh": 49126, "\u0120Transformation": 49127, "QUIRE": 49128, "..................": 49129, "\u0120traveller": 49130, "\u0120Sexy": 49131, "\u0120Kern": 49132, "ipolar": 49133, "\u0120ransomware": 49134, "oooooooooooooooo": 49135, "Ec": 49136, "ruby": 49137, "Professional": 49138, "\u0120Outbreak": 49139, "argument": 49140, "Grey": 49141, "\u0120Fifa": 49142, "\u0120CHO": 49143, "\u0120FORM": 49144, "\u0120Amtrak": 49145, "-[": 49146, "\u0120cradle": 49147, "\u0120antioxidants": 49148, "\u00e3\u0123\u00ae\u00e5\u00ae": 49149, "736": 49150, "\u0120NASL": 49151, "\u0120Contributions": 49152, "Indiana": 49153, "\u0120STEP": 49154, "CSS": 49155, "\u0120salient": 49156, "\u0120allocations": 49157, "yrights": 49158, "\u0120mashed": 49159, "\u0120Cutter": 49160, "Sexual": 49161, "\u0120pounded": 49162, "\u0120fanbase": 49163, "\u0120casc": 49164, "\u0120Transparency": 49165, "\u0120analytic": 49166, "\u0120Summoner": 49167, "\u00d7\u0140": 49168, "\u0120ADC": 49169, "detail": 49170, "\u0120vanquished": 49171, "\u0120crabs": 49172, "arie": 49173, "Destroy": 49174, "\u0120Sack": 49175, "\u0120transistor": 49176, "Alabama": 49177, "\u0120Koen": 49178, "\u0120Fisheries": 49179, "cone": 49180, "\u0120annexed": 49181, "\u0120MGM": 49182, "esa": 49183, "\u0120faked": 49184, "\u0120Congratulations": 49185, "\u0120hindered": 49186, "\u0120correctional": 49187, "\u0120ITV": 49188, "leeve": 49189, "\u0120inappropriately": 49190, "licks": 49191, "\u0120trespass": 49192, "\u0120paws": 49193, "\u0120negotiator": 49194, "\u0120Christensen": 49195, "limits": 49196, "\u0120Dianne": 49197, "\u0120elegance": 49198, "\u0120Contracts": 49199, "anke": 49200, "Obj": 49201, "\u0120vigilance": 49202, "\u0120castles": 49203, "\u0120NAD": 49204, "\u0120Holo": 49205, "\u0120emphatically": 49206, "\u0120Titus": 49207, "\u0120Serving": 49208, "\u0120Richie": 49209, "\u0120Pigs": 49210, "568": 49211, "\u0120animosity": 49212, "\u0120Attributes": 49213, "\u0120Uriel": 49214, "MQ": 49215, "myra": 49216, "\u0120Applicant": 49217, "\u0120psychiatrists": 49218, "\u0120Vij": 49219, "\u0120Abby": 49220, "agree": 49221, "Push": 49222, "\u0120kWh": 49223, "hiba": 49224, "\u0120incite": 49225, "\u0120Weasley": 49226, "\u0120Taxi": 49227, "ministic": 49228, "hyper": 49229, "\u0120Farn": 49230, "\u0120601": 49231, "\u0120Nationwide": 49232, "Fake": 49233, "952": 49234, "\u0120maize": 49235, "\u0120interacted": 49236, "\u0120transitioned": 49237, "\u0120parasitic": 49238, "\u0120harmonic": 49239, "\u0120decaying": 49240, "\u0120baseless": 49241, "nsics": 49242, "\u0120transpired": 49243, "\u0120abundantly": 49244, "\u0120Forensic": 49245, "\u0120treadmill": 49246, "\u0120Jav": 49247, "aband": 49248, "\u0120sshd": 49249, "\u0120frontman": 49250, "\u0120Jakarta": 49251, "oller": 49252, "drops": 49253, "\u0120SERVICES": 49254, "romptu": 49255, "ophical": 49256, "hospital": 49257, "bledon": 49258, "645": 49259, "\u0120midrange": 49260, "\u0120EVENT": 49261, "culated": 49262, "rawled": 49263, "\u0120perched": 49264, "\u0120overboard": 49265, "\u0120Peel": 49266, "\u0120Pwr": 49267, "\u0120Carth": 49268, "\u0120COMPLE": 49269, "coe": 49270, "shall": 49271, "\u0120deterrence": 49272, "METHOD": 49273, "\u0120Absent": 49274, "MEN": 49275, "\u0120sill": 49276, "\u0120LEVEL": 49277, "York": 49278, "\u0120sinners": 49279, "\u0120OPEC": 49280, "\u0120Nur": 49281, "\u0120Designs": 49282, "selection": 49283, "\u0120unworthy": 49284, "CHA": 49285, "\u0120strengthens": 49286, "883": 49287, "edly": 49288, "\u0120slicing": 49289, "\u0120malnutrition": 49290, "\u0120filmmaking": 49291, "\u0120Polk": 49292, "urated": 49293, "\u0120421": 49294, "breakers": 49295, "!'\"": 49296, "\u0120wetlands": 49297, "\u0120Discrimination": 49298, "\u0120allowable": 49299, "\u0120steered": 49300, "\u0120Sicily": 49301, "SAM": 49302, "\u0120mustache": 49303, "\u0120mids": 49304, "\u0120clipped": 49305, "\u0120circulate": 49306, "\u0120brittle": 49307, "\u0120Buildings": 49308, "raised": 49309, "\u0120Roundup": 49310, "\u0120wealthier": 49311, "\u0120overwrite": 49312, "\u0120overpowered": 49313, "\u0120Gerrard": 49314, "sites": 49315, "PDATED": 49316, "\u0120acutely": 49317, "\u0120Gamble": 49318, "\u0120pim": 49319, "\u0120Kus": 49320, "Typically": 49321, "Deploy": 49322, "\u0120Moroccan": 49323, "potion": 49324, "combe": 49325, "\u0120vigilante": 49326, "\u0120363": 49327, "Stew": 49328, "\u0120Bagg": 49329, "\u0120resided": 49330, "\u0120Spo": 49331, "\u0120remnant": 49332, "\u0120emptiness": 49333, "brainer": 49334, "\u0120outpatient": 49335, "priority": 49336, "\u0120leptin": 49337, "\u0120Payton": 49338, "\u0120Gleaming": 49339, "\u0120Shed": 49340, "\u0120Polo": 49341, "\u0120Mormonism": 49342, "restricted": 49343, "arlane": 49344, "wx": 49345, "\u0120creatine": 49346, "\u0120Anon": 49347, "\u0120STUD": 49348, "\u0120JUL": 49349, "\u0120Tee": 49350, "528": 49351, "089": 49352, "\u0120hatched": 49353, "Dispatch": 49354, "\u0120Composite": 49355, "\u0120451": 49356, "puff": 49357, "\u0120XCOM": 49358, "\u0120Orn": 49359, "\u0120THANK": 49360, "ENDED": 49361, "\u0120Asheville": 49362, "\u0120\u00c3\u013e": 49363, "\u0120mango": 49364, "\u0120Slightly": 49365, "worldly": 49366, "\u0120Wander": 49367, "\u0120Expand": 49368, "\u0120Chr": 49369, "Mist": 49370, "\u0120orthodoxy": 49371, "\u0120UNESCO": 49372, "regate": 49373, "Elsewhere": 49374, "kie": 49375, "irled": 49376, "\u0120topple": 49377, "\u0120adoptive": 49378, "\u0120Legs": 49379, "dress": 49380, "\u0120Sagan": 49381, "bare": 49382, "\u0120Glou": 49383, "Crunch": 49384, "\u0120helpers": 49385, "\u0120chronically": 49386, "\u0120Huma": 49387, "10000": 49388, "\u0120accommodating": 49389, "\u00e4\u00ba\u0136": 49390, "\u0120wrinkles": 49391, "\u0120dodged": 49392, "fourth": 49393, "\u0120precon": 49394, "\u0120compressor": 49395, "\u0120Kare": 49396, "\u0120evict": 49397, "\u0120Warwick": 49398, "imar": 49399, "\u0120modernization": 49400, "\u0120bandwagon": 49401, "\u0120refuted": 49402, "\u0120netted": 49403, "\u0120Naples": 49404, "\u0120Genie": 49405, "perors": 49406, "\u0120fielded": 49407, "\u0120dere": 49408, "\u0120Parables": 49409, "lees": 49410, "\u0120trout": 49411, "aspers": 49412, "\u0120nihil": 49413, "\u0120happiest": 49414, "\u0120floppy": 49415, "\u0120Loft": 49416, "\u0120Heard": 49417, "\u0120unison": 49418, "\u0120lug": 49419, "\u0120Redmond": 49420, "classic": 49421, "Supporters": 49422, "SHIP": 49423, "GMT": 49424, "\u0120fuelled": 49425, "\u00e7\u0132": 49426, "\u0120dd": 49427, "\u0120Eminem": 49428, "\u01201897": 49429, "NYSE": 49430, "\u0120secretaries": 49431, "\u0120FIA": 49432, "\u0120Canaveral": 49433, "Favorite": 49434, "\u0120pomp": 49435, "\u0120detainee": 49436, "ership": 49437, "aimon": 49438, "iour": 49439, "\u0120Apex": 49440, "\u0120plantations": 49441, "amia": 49442, "acion": 49443, "Rust": 49444, "\u0120towed": 49445, "\u0120Truly": 49446, "577": 49447, "\u0120sheltered": 49448, "rider": 49449, "Wo": 49450, "\u0120lair": 49451, "\u0120Intelligent": 49452, "improve": 49453, "matically": 49454, "\u0120etiquette": 49455, "adra": 49456, "allo": 49457, "\u0120Juno": 49458, "anything": 49459, "\u0120Struggle": 49460, "\u0120Predict": 49461, "\u0120Grimes": 49462, "\u0120AMERICA": 49463, "ctx": 49464, "\u0120Situation": 49465, "WOOD": 49466, "\u0120soluble": 49467, "meier": 49468, "\u0120intolerable": 49469, "angering": 49470, "\u0120uninterrupted": 49471, "\u0120tooltip": 49472, "\u0120interrogated": 49473, "\u0120gunned": 49474, "\u0120Sneak": 49475, "\u00e6\u0143\u00a6": 49476, "\u0120tether": 49477, "\u0120crumble": 49478, "Lens": 49479, "\u0120clustered": 49480, "\u0120Syl": 49481, "\u0120Hasan": 49482, "\u0120dystopian": 49483, "wana": 49484, "\u0120joystick": 49485, "\u0120Thib": 49486, "ammu": 49487, "Tomorrow": 49488, "546": 49489, "\u0120overcame": 49490, "\u0120minimized": 49491, "ceptor": 49492, "Runner": 49493, "ENGTH": 49494, "\u0120Brenda": 49495, "\u0120Achievements": 49496, "\u0120torches": 49497, "\u0120rapport": 49498, "\u0120Investigator": 49499, "\u0120Handling": 49500, "relation": 49501, "grey": 49502, "815": 49503, "\u0120kcal": 49504, "\u0120Commands": 49505, "dq": 49506, "\u0120curls": 49507, "\u0120bearer": 49508, "\u0120cynicism": 49509, "itri": 49510, "\u0120Useful": 49511, "Bee": 49512, "DCS": 49513, "\u0120abras": 49514, "Pract": 49515, "BILITIES": 49516, "712": 49517, "\u0120debugger": 49518, "\u0120debtor": 49519, "\u0120Lia": 49520, "\u0120Kers": 49521, "\u0120exacerbate": 49522, "\u0120Stacy": 49523, "\u0120Bland": 49524, "\u0120Scenes": 49525, "\u0120branching": 49526, "\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a\u00e2\u0138\u012a": 49527, "apeake": 49528, "\u0120salsa": 49529, "\u0120mishand": 49530, "\u0120Konami": 49531, "\u0120Nib": 49532, "\u0120anecdote": 49533, "\u0120agreeable": 49534, "\u00cf\u012b": 49535, "\u0120Nathaniel": 49536, "\u0120Heisman": 49537, "\u0120Beware": 49538, "\u01201886": 49539, "spective": 49540, "691": 49541, "522": 49542, "\u0120inhibits": 49543, "\u0120hashing": 49544, "\u01201889": 49545, "\u00e5\u00b0\u0128": 49546, "vich": 49547, "Pure": 49548, "\u0120solidly": 49549, "\u0120aspirin": 49550, "imaru": 49551, "\u0120streetcar": 49552, "\u0120UCS": 49553, "\u0120Judd": 49554, "\u0120flashbacks": 49555, "pins": 49556, "\u01201440": 49557, "\u0120UNHCR": 49558, "\u0120Symptoms": 49559, "TIT": 49560, "538": 49561, "Fra": 49562, "%);": 49563, "\u0120ooz": 49564, "\u0120curfew": 49565, "\u0120calmed": 49566, "\u0120participates": 49567, "TeX": 49568, "\u0120nonsensical": 49569, "\u0120fullback": 49570, "\u0120DeL": 49571, "monkey": 49572, "hari": 49573, "\u0120metabolites": 49574, "\u0120looted": 49575, "\u0120ALWAYS": 49576, "\u0120BCC": 49577, "Lt": 49578, "ochet": 49579, "Bone": 49580, "\u0120vetoed": 49581, "\u0120gcc": 49582, "\u0120CLICK": 49583, "\u01201888": 49584, "saf": 49585, "\u0120stiffness": 49586, "\u0120lowly": 49587, "\u0120Geh": 49588, "verson": 49589, "orset": 49590, "\u0120unforeseen": 49591, "\u0120anesthesia": 49592, "\u0120Optical": 49593, "\u0120reconstructed": 49594, "\u0120Tup": 49595, "shows": 49596, "NEWS": 49597, "\u0120Newspaper": 49598, "\u0120ASA": 49599, "tera": 49600, "Numbers": 49601, "\u0120inexplicable": 49602, "\u00d7\u0133": 49603, "\u0120hardness": 49604, "untarily": 49605, "\u0120Acer": 49606, "gradient": 49607, "ARDIS": 49608, "\u0120woodland": 49609, "\u0120metaphors": 49610, "\u0120Wembley": 49611, "\u0120Pavel": 49612, "philis": 49613, "\u0120rewriting": 49614, "\u0120perceptual": 49615, "\u01201070": 49616, "worms": 49617, "\u0120Downs": 49618, "\u0120unsurprisingly": 49619, "\u0120tagging": 49620, "flame": 49621, "\u0120litres": 49622, "\u0120bounces": 49623, "\u0120Babe": 49624, "shut": 49625, "\u0120overdoses": 49626, "\u0120Sheila": 49627, "\u0120Chau": 49628, "\u0120Bless": 49629, "Capture": 49630, "\u0120Significant": 49631, "\u0120Scion": 49632, "\u0120389": 49633, "\u0120McH": 49634, "\u0120Titanium": 49635, "\u0120Meal": 49636, "ameda": 49637, "agents": 49638, "aggressive": 49639, "Billy": 49640, "763": 49641, "\u0120Saying": 49642, "DERR": 49643, "itone": 49644, "Collins": 49645, "Bound": 49646, "\u0120bolted": 49647, "\u0120DMCA": 49648, "953": 49649, "\u0120uniqueness": 49650, "\u0120epigen": 49651, "unci": 49652, "antam": 49653, "\u0120reckoning": 49654, "chairs": 49655, "OGR": 49656, "\u0120Senegal": 49657, "\u01201862": 49658, "relevant": 49659, "\u0120\u00c2\u00af": 49660, "\u0120pharmacies": 49661, "\u0120Geral": 49662, "vier": 49663, "Yan": 49664, "ORPG": 49665, "\u0120rabid": 49666, "bending": 49667, "\u0120UNITED": 49668, "\u0120465": 49669, "Assembly": 49670, "\u0120weep": 49671, "\u0120behest": 49672, "\u0120Mothers": 49673, "\u0120Jace": 49674, "hid": 49675, "\u0120whirlwind": 49676, "\u0120UNIVERS": 49677, "\u0120utopian": 49678, "\u0120kidnap": 49679, "Philipp": 49680, "Kin": 49681, "893": 49682, "\u0120livestream": 49683, "\u0120MISS": 49684, "\u0120subversive": 49685, "\u0120Techniques": 49686, "\u0120JUSTICE": 49687, "\u0120BASE": 49688, "\u0120387": 49689, "\u0120assailants": 49690, "\u0120Hardcore": 49691, "\u0120sprinkled": 49692, "\u0120Pse": 49693, "\u00e9\u013c": 49694, "printed": 49695, "\u0120Hau": 49696, "ORGE": 49697, "\u0120TOUR": 49698, "\u0120laced": 49699, "\u0120itch": 49700, "Giving": 49701, "\u0120ported": 49702, "781": 49703, "////////////////////////////////": 49704, "breeding": 49705, "\u0120logger": 49706, "\u0120HOL": 49707, "innie": 49708, "Firstly": 49709, "\u0120embryonic": 49710, "\u0120delegated": 49711, "pai": 49712, "OIL": 49713, "\u0120centrally": 49714, "\u0120Rx": 49715, "\u0120Scouting": 49716, "Dutch": 49717, "\u0120hereditary": 49718, "\u0120Cruiser": 49719, "sat": 49720, "529": 49721, "\u0120Marriott": 49722, "othermal": 49723, "\u0120prohibitions": 49724, "Earn": 49725, "\u0120Stab": 49726, "\u0120Colleges": 49727, "\u0120Belief": 49728, "stretched": 49729, "\u0120LH": 49730, "\u0120EntityItem": 49731, "CIA": 49732, "\u0120unrem": 49733, "\u0120laureate": 49734, "\u0120denominations": 49735, "summary": 49736, "hler": 49737, "Spect": 49738, "\u0120Klaus": 49739, "\u0120Beans": 49740, "\u0120insur": 49741, "\u0120PAX": 49742, "\u0120fielder": 49743, "\u0120Vet": 49744, "\u0120Sparrow": 49745, "zie": 49746, "\u0120SQ": 49747, "\u0120Mondays": 49748, "\u0120Offline": 49749, "\u0120Lerner": 49750, "\u0120Extensions": 49751, "Ireland": 49752, "\u0120patronage": 49753, "\u0120contrasted": 49754, "\u0120Mania": 49755, "hirt": 49756, "Moscow": 49757, "\u0120condemns": 49758, "\u0120Ange": 49759, "\u0120composing": 49760, "\u0120Pepe": 49761, "\u0120Paddock": 49762, "\u0120heterogeneity": 49763, "\u0120ideologically": 49764, "\u0120fishes": 49765, "\u0120cursing": 49766, "\u0120Rutherford": 49767, "\u0120Floating": 49768, "\u0120Amelia": 49769, "Tea": 49770, "Synopsis": 49771, "\u0120stunts": 49772, "\u0120bead": 49773, "\u0120stocking": 49774, "\u0120MILL": 49775, "obook": 49776, "massive": 49777, "\\<": 49778, "\u0120hump": 49779, "\u0120Preferences": 49780, "EngineDebug": 49781, "geist": 49782, "\u0120Nieto": 49783, "omever": 49784, "ishy": 49785, "evaluate": 49786, "colonial": 49787, "Alternative": 49788, "\u0120GoPro": 49789, "\u0120Vortex": 49790, "\u0120NETWORK": 49791, "ansky": 49792, "Secure": 49793, "\u0120Thrust": 49794, "Snake": 49795, "\u0120parcels": 49796, "\u0120samurai": 49797, "\u0120actresses": 49798, "Nap": 49799, "MF": 49800, "iferation": 49801, "Beer": 49802, "523": 49803, "\u0120Ily": 49804, "ointment": 49805, "Ping": 49806, "\u0120striped": 49807, "\u0120Mellon": 49808, "ossession": 49809, "\u0120neutron": 49810, "endium": 49811, "\u0120aph": 49812, "\u0120Flavoring": 49813, "\u0120383": 49814, "\u0120responsiveness": 49815, "\u0120Jindal": 49816, "\u0120Hitchcock": 49817, "Denver": 49818, "\u0120DRAGON": 49819, "smanship": 49820, "\u0120Dupl": 49821, "\u0120sly": 49822, "\u0120webcam": 49823, "\u0120Twain": 49824, "\u0120Darling": 49825, "iliate": 49826, "consumer": 49827, "DIT": 49828, "\u0120namesake": 49829, "\u0120unorthodox": 49830, "\u0120funer": 49831, "\u0120PLoS": 49832, "\u0120CONTROL": 49833, "ozyg": 49834, "oglobin": 49835, "FACE": 49836, "ERG": 49837, "\u0120Dia": 49838, "\u0120Fiesta": 49839, "cele": 49840, "034": 49841, "\u0120enclave": 49842, "\u00e2\u0138\u00ac\u00e2\u0138\u00ac": 49843, "onement": 49844, "alist": 49845, "Mand": 49846, "\u0120homegrown": 49847, "\u0120Fancy": 49848, "\u0120conceptions": 49849, "\u0120Contains": 49850, "ureen": 49851, "\u0120reiterate": 49852, "\u0120meager": 49853, "\u0120installments": 49854, "Spawn": 49855, "627": 49856, "\u0120photoc": 49857, "\u0120Cabrera": 49858, "\u0120Rosenthal": 49859, "\u0120Lansing": 49860, "isner": 49861, "\u0120invests": 49862, "\u0120UFOs": 49863, "EXP": 49864, "Hardware": 49865, "\u0120tragically": 49866, "\u0120concedes": 49867, "ieft": 49868, "cham": 49869, "borgh": 49870, "\u0120Schr": 49871, "\u0120Melanie": 49872, "\u0120Hoy": 49873, "\u0120visitation": 49874, "\u0120idiosyncr": 49875, "\u0120fractions": 49876, "\u0120foreskin": 49877, "obos": 49878, "\u0120poaching": 49879, "\u0120VIEW": 49880, "\u0120stimulates": 49881, "\u0120Gork": 49882, "canon": 49883, "MIC": 49884, "\u0120Nemesis": 49885, "\u0120Indra": 49886, "\u0120DMV": 49887, "\u0120529": 49888, "\u0120inspecting": 49889, "\u0120grandma": 49890, "\u0120Whedon": 49891, "\u0120Shant": 49892, "\u0120Purg": 49893, "ikan": 49894, "\u0120Teg": 49895, "\u0120CLR": 49896, "zac": 49897, "Victoria": 49898, "\u0120Verify": 49899, "ionics": 49900, "\u0120partying": 49901, "\u0120Mou": 49902, "colour": 49903, "\u0120testimonies": 49904, "lations": 49905, "\u0120pressuring": 49906, "hiro": 49907, "acers": 49908, "\u0120fid": 49909, "angler": 49910, "\u0120CSI": 49911, "\u0120hereafter": 49912, "\u0120dissidents": 49913, "reporting": 49914, "iphany": 49915, "chev": 49916, "\u0120solitude": 49917, "\u0120lobe": 49918, "\u0120indis": 49919, "\u0120credential": 49920, "recent": 49921, "adult": 49922, "\u0120Nirvana": 49923, "\u0120Franchise": 49924, "Layer": 49925, "Hyp": 49926, "\u0120Berkshire": 49927, "\u0120wills": 49928, "tif": 49929, "\u0120totem": 49930, "\u0120Judah": 49931, "repair": 49932, "Instant": 49933, "548": 49934, "\u0120embassies": 49935, "\u0120bottleneck": 49936, "\u0120bount": 49937, "\u0120typew": 49938, "\u0120Alvin": 49939, "jing": 49940, "imilar": 49941, "Rush": 49942, "\u0120brim": 49943, "\u0120HELP": 49944, "Aim": 49945, "]'": 49946, "\u0120passively": 49947, "\u0120bounded": 49948, "\u0120Rated": 49949, "\u0120criminality": 49950, "\u0120biomark": 49951, "\u0120dispatcher": 49952, "\u0120Towards": 49953, "\u0120+++": 49954, "righteous": 49955, "frog": 49956, "\u0120Panc": 49957, "Carter": 49958, "032": 49959, "\u00e6\u00a9\u0141": 49960, "\u0120ultraviolet": 49961, "\u0120Licensed": 49962, "\u0120Tata": 49963, "\u0120Blessing": 49964, "\u0120GAM": 49965, "\u0120chemically": 49966, "\u0120Seaf": 49967, "\u0120RELE": 49968, "\u0120Mercenary": 49969, "capitalist": 49970, "\u0120formulations": 49971, "\u0120annihilation": 49972, "\u0120Verb": 49973, "\u0120Argon": 49974, "\u0120unloaded": 49975, "\u0120morphed": 49976, "\u0120conquering": 49977, "backer": 49978, "IELD": 49979, "\u0120thefts": 49980, "\u0120frontrunner": 49981, "\u0120Royale": 49982, "\u0120Fundamental": 49983, "elight": 49984, "Chip": 49985, "necessary": 49986, "ayn": 49987, "\u0120Slip": 49988, "\u0120448": 49989, "cerned": 49990, "Pause": 49991, "\u0120shockingly": 49992, "\u0120ABV": 49993, "\u0120composure": 49994, "733": 49995, "\u0120Motorsport": 49996, "ahime": 49997, "Murray": 49998, "Mach": 49999, "\u0120grids": 50000, "\u0120debian": 50001, "\u0120furthermore": 50002, "\u0120dexterity": 50003, "\u0120Collections": 50004, "oslov": 50005, "ilage": 50006, "bj": 50007, "\u0120Monteneg": 50008, "\u0120strutConnector": 50009, "\u0120massacres": 50010, "\u0120briefs": 50011, "fetched": 50012, "uvian": 50013, "olition": 50014, "Failure": 50015, "emonic": 50016, "\u0120flared": 50017, "\u0120claimant": 50018, "\u0120cures": 50019, "\u0120giveaways": 50020, "\u0120Substance": 50021, "alions": 50022, "\u0120cringe": 50023, "\u0120Kul": 50024, "\u0120aristocracy": 50025, "\u0120Ulster": 50026, "olated": 50027, "housing": 50028, "\u0120MIS": 50029, "\u0120glared": 50030, "\u0120Wilhelm": 50031, "needs": 50032, "lambda": 50033, "builders": 50034, "\u0120VIS": 50035, "\u0120radiator": 50036, "\u0120Ghostbusters": 50037, "\u0120436": 50038, "actual": 50039, "\u0120herds": 50040, "\u00c3\u00a7a": 50041, "watching": 50042, "\u0120countering": 50043, "Charge": 50044, "\u0120charred": 50045, "\u0120warheads": 50046, "\u0120iodine": 50047, "\u0120Macy": 50048, "041": 50049, "\u0120departures": 50050, "\u0120Sins": 50051, "\u0120dyed": 50052, "\u0120Concepts": 50053, "gado": 50054, "713": 50055, "\u0120quotations": 50056, "\u0120gist": 50057, "\u0120Christy": 50058, "\u0120antigen": 50059, "\u0120Hemp": 50060, "\u0120Drawn": 50061, "\u0120Barg": 50062, "ezvous": 50063, "\u0120paternity": 50064, "\u0120ardu": 50065, "\u0120Anchorage": 50066, "\u0120Rik": 50067, "\u0120overloaded": 50068, "\u0120Username": 50069, "\u0120Tammy": 50070, "\u0120Nau": 50071, "\u0120Cellular": 50072, "\u0120waning": 50073, "\u0120rodent": 50074, "\u0120Worcester": 50075, "ilts": 50076, "\u0120Tad": 50077, "\u0120dwellings": 50078, "\u0120bullish": 50079, "431": 50080, "\u0120retaliate": 50081, "\u0120migraine": 50082, "\u0120Chevron": 50083, "CHECK": 50084, "\u0120donkey": 50085, "crim": 50086, "SPA": 50087, "\u0120Analog": 50088, "\u0120marquee": 50089, "\u0120Haas": 50090, "Bir": 50091, "\u0120GDDR": 50092, "\u0120Downloads": 50093, "\u0120willpower": 50094, "\u0120Forth": 50095, "\u0120Recorded": 50096, "\u0120impossibility": 50097, "\u0120Logged": 50098, "\u0120Franks": 50099, "\u0120Ratt": 50100, "initions": 50101, "\u0120cleaners": 50102, "\u0120sorely": 50103, "\u0120flickering": 50104, "\u0120Examination": 50105, "catching": 50106, "alloween": 50107, "Msg": 50108, "\u0120dunno": 50109, "Fa": 50110, "\u0120dysph": 50111, "crazy": 50112, ".''.": 50113, "\u0120mainline": 50114, "\u0120cs": 50115, "\u0120ptr": 50116, "\u0120Wally": 50117, "igun": 50118, "951": 50119, "\u0120Bigfoot": 50120, "fights": 50121, "\u0120retrieving": 50122, "Jr": 50123, "\u0120duplication": 50124, "\u0120Explan": 50125, "\u0120relational": 50126, "\u0120quaint": 50127, "\u0120biscuits": 50128, "\u0120ado": 50129, "\u0120shudder": 50130, "\u0120antidote": 50131, "blooded": 50132, "ksh": 50133, "\u0120sauces": 50134, "\u0120reinvest": 50135, "\u0120dispensary": 50136, "\u0120Diver": 50137, "\u01209000": 50138, "student": 50139, "\u0120insepar": 50140, "escap": 50141, "\u0120toddlers": 50142, "\u0120GPIO": 50143, "\u0120Assignment": 50144, "headers": 50145, "\u0120lackluster": 50146, "\u0120aback": 50147, "956": 50148, "\u0120toolbar": 50149, "745": 50150, "\u0120oust": 50151, "\u0120contemplation": 50152, "\u0120PRESIDENT": 50153, "\u0120458": 50154, "======": 50155, "\u0120guaranteeing": 50156, "\u0120Heist": 50157, "\u0120Cannes": 50158, "\u013b\u00bd": 50159, "\u0120collaborator": 50160, "\u0120Amp": 50161, "\u0120gou": 50162, "\u0120SHALL": 50163, "stories": 50164, "783": 50165, "\u0120mobilized": 50166, "\u0120brood": 50167, "\u0120LU": 50168, "\u0120\u00f0\u0141\u0133": 50169, "\u0120refin": 50170, "\u0120Anthropology": 50171, "vind": 50172, "illi": 50173, "\u0120warranties": 50174, "\u0120Babel": 50175, "\u0120swath": 50176, "\u0120caches": 50177, "\u0120antagonists": 50178, "artifacts": 50179, "\u0120hotly": 50180, "\u0120Starts": 50181, "\u0120G\u00c3\u00b6": 50182, "zag": 50183, "!!!!!": 50184, "\u0120scourge": 50185, "\u0120conspiring": 50186, "ruits": 50187, "reverse": 50188, "\u0120Sheen": 50189, "\u0120Jesuit": 50190, "\u0120Giovanni": 50191, "adies": 50192, "\u0120buttocks": 50193, "earcher": 50194, "acan": 50195, "\u0120volleyball": 50196, "\u0120shrouded": 50197, "\u0120scoreboard": 50198, "bats": 50199, "\u0120IPM": 50200, "\u0120asses": 50201, "\u0120deregulation": 50202, "\u0120Telegram": 50203, "\u0120Reboot": 50204, "\u01207000": 50205, "\u0120Canary": 50206, "\u0120kernels": 50207, "\u0120Fran\u00c3\u00a7ois": 50208, "\u0120Duff": 50209, "\u0120Pon": 50210, "\u0120Leica": 50211, "\u0120Garmin": 50212, "\u0120orphans": 50213, "\u0120Claudia": 50214, "\u0120calendars": 50215, "\u0120Leilan": 50216, "ento": 50217, "Rocket": 50218, "\u0120brunch": 50219, "\u0120Hawking": 50220, "ainers": 50221, "\u0120sensibilities": 50222, "\u0120kW": 50223, "\u0120Kand": 50224, "\u0120reclaimed": 50225, "\u0120interestingly": 50226, "\u00d7\u00a9": 50227, "romy": 50228, "JM": 50229, "\u0120Enhancement": 50230, "bush": 50231, "Skip": 50232, "\u0120rappers": 50233, "\u0120gazing": 50234, "pedia": 50235, "athlon": 50236, "Revolution": 50237, "\u0120snipers": 50238, "\u0120reverted": 50239, "\u0120conglomerate": 50240, "Terry": 50241, "794": 50242, "\u0120harsher": 50243, "\u0120desolate": 50244, "\u0120Hitman": 50245, "Commission": 50246, "\u0120(/": 50247, "\u00e2\u0122\u00a6.\"": 50248, "Compar": 50249, "\u0120amplification": 50250, "ominated": 50251, "\u0120regress": 50252, "\u0120Collider": 50253, "\u0120informants": 50254, "\u0120gazed": 50255, "<|endoftext|>": 50256} \ No newline at end of file diff --git a/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2_vocab_merge_download_info.json b/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2_vocab_merge_download_info.json new file mode 100644 index 0000000000000000000000000000000000000000..06ab6cdec3cb8d8f4c1c14b142d2b59d8701849a --- /dev/null +++ b/core/models/latent_diffusion/vae/optimus_modules/vocab/gpt2_vocab_merge_download_info.json @@ -0,0 +1,4 @@ +{ + "vocab_file": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-vocab.json", + "merges_file": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-merges.txt", +} \ No newline at end of file diff --git a/core/models/model_module_infer.py b/core/models/model_module_infer.py new file mode 100644 index 0000000000000000000000000000000000000000..c05a646c722805f89799536c3896f38e61309364 --- /dev/null +++ b/core/models/model_module_infer.py @@ -0,0 +1,172 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as tvtrans + +from einops import rearrange + +import pytorch_lightning as pl + +from . import get_model +from ..cfg_helper import model_cfg_bank +from ..common.utils import regularize_image, regularize_video, remove_duplicate_word + +import warnings +warnings.filterwarnings("ignore") + + +class model_module(pl.LightningModule): + def __init__(self, model='codi', load_weights=True, data_dir='pretrained', pth=["CoDi_encoders.pth"], fp16=False): + super().__init__() + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + cfgm = model_cfg_bank()(model) + net = get_model()(cfgm) + if fp16: + net = net.half() + if load_weights: + for path in pth: + net.load_state_dict(torch.load(os.path.join(data_dir, path), map_location='cpu'), strict=False) + print('Load pretrained weight from {}'.format(pth)) + + self.net = net + + from core.models.ddim.ddim_vd import DDIMSampler_VD + self.sampler = DDIMSampler_VD(net) + + def decode(self, z, xtype): + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + device = z.device + net = self.net + z = z.to(device) + if xtype == 'image': + x = net.autokl_decode(z) + x = torch.clamp((x+1.0)/2.0, min=0.0, max=1.0) + # x = [tvtrans.ToPILImage()(xi) for xi in x] + return x + + elif xtype == 'video': + num_frames = z.shape[2] + z = rearrange(z, 'b c f h w -> (b f) c h w') + x = net.autokl_decode(z) + x = rearrange(x, '(b f) c h w -> b f c h w', f=num_frames) + + x = torch.clamp((x+1.0)/2.0, min=0.0, max=1.0) + video_list = [] + for video in x: + video_list.append([tvtrans.ToPILImage()(xi) for xi in video]) + return video_list + + elif xtype == 'text': + prompt_temperature = 1.0 + prompt_merge_same_adj_word = True + x = net.optimus_decode(z, temperature=prompt_temperature) + """ + if prompt_merge_same_adj_word: + xnew = [] + for xi in x: + xi_split = xi.split() + xinew = [] + for idxi, wi in enumerate(xi_split): + if idxi!=0 and wi==xi_split[idxi-1]: + continue + xinew.append(wi) + xnew.append(remove_duplicate_word(' '.join(xinew))) + x = xnew + """ + return x + + elif xtype == 'audio': + x = net.audioldm_decode(z) + x = net.mel_spectrogram_to_waveform(x) + return x + + def forward(self, xtype=[], condition=[], condition_types=[], n_samples=1, mix_weight={'video': 1, 'audio': 1, 'text': 1, 'image': 1}, image_size=256, ddim_steps=50, scale=7.5, num_frames=8): + # import torch + # device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu") + device = self.device + net = self.net + sampler = self.sampler + ddim_eta = 0.0 + + conditioning = [] + assert len(set(condition_types)) == len(condition_types), "we don't support condition with same modalities yet." + assert len(condition) == len(condition_types) + + for i, condition_type in enumerate(condition_types): + if condition_type == 'image': + print(condition[i].shape) + ctemp1 = regularize_image(condition[i]).squeeze().to(device) + print(ctemp1.shape) + ctemp1 = ctemp1[None].repeat(n_samples, 1, 1, 1) + cim = net.clip_encode_vision(ctemp1).to(device) + uim = None + if scale != 1.0: + dummy = torch.zeros_like(ctemp1).to(device) + uim = net.clip_encode_vision(dummy).to(device) + conditioning.append(torch.cat([uim, cim])) + + elif condition_type == 'video': + ctemp1 = regularize_video(condition[i]).to(device) + ctemp1 = ctemp1[None].repeat(n_samples, 1, 1, 1, 1) + cim = net.clip_encode_vision(ctemp1).to(device) + uim = None + if scale != 1.0: + dummy = torch.zeros_like(ctemp1).to(device) + uim = net.clip_encode_vision(dummy).to(device) + conditioning.append(torch.cat([uim, cim])) + + elif condition_type == 'audio': + ctemp = condition[i][None].repeat(n_samples, 1, 1) + cad = net.clap_encode_audio(ctemp) + uad = None + if scale != 1.0: + dummy = torch.zeros_like(ctemp) + uad = net.clap_encode_audio(dummy) + conditioning.append(torch.cat([uad, cad])) + + elif condition_type == 'text': + ctx = net.clip_encode_text(n_samples * [condition[i]]).to(device) + utx = None + if scale != 1.0: + utx = net.clip_encode_text(n_samples * [""]).to(device) + conditioning.append(torch.cat([utx, ctx])) + + shapes = [] + for xtype_i in xtype: + if xtype_i == 'image': + h, w = [image_size, image_size] + shape = [n_samples, 4, h//8, w//8] + elif xtype_i == 'video': + h, w = [image_size, image_size] + shape = [n_samples, 4, num_frames, h//8, w//8] + elif xtype_i == 'text': + n = 768 + shape = [n_samples, n] + elif xtype_i == 'audio': + h, w = [256, 16] + shape = [n_samples, 8, h, w] + else: + raise + shapes.append(shape) + + z, _ = sampler.sample( + steps=ddim_steps, + shape=shapes, + condition=conditioning, + unconditional_guidance_scale=scale, + xtype=xtype, + condition_types=condition_types, + eta=ddim_eta, + verbose=False, + mix_weight=mix_weight) + + out_all = [] + for i, xtype_i in enumerate(xtype): + z[i] = z[i].to(device) + x_i = self.decode(z[i], xtype_i) + out_all.append(x_i) + return out_all diff --git a/core/models/sd.py b/core/models/sd.py new file mode 100644 index 0000000000000000000000000000000000000000..d73577ebd2302b17b53a16b4e30b96cc54fff8be --- /dev/null +++ b/core/models/sd.py @@ -0,0 +1,319 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +import numpy.random as npr +import copy +from functools import partial +from contextlib import contextmanager + +from .common.get_model import get_model, register +from .ema import LitEma + +version = '0' +symbol = 'sd' + + +def make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3): + if schedule == "linear": + betas = ( + torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + elif schedule == "sqrt": + betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5 + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() + + +def extract_into_tensor(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def highlight_print(info): + print('') + print(''.join(['#']*(len(info)+4))) + print('# '+info+' #') + print(''.join(['#']*(len(info)+4))) + print('') + + +class DDPM(nn.Module): + def __init__(self, + unet_config, + timesteps=1000, + use_ema=True, + + beta_schedule="linear", + beta_linear_start=1e-4, + beta_linear_end=2e-2, + loss_type="l2", + + clip_denoised=True, + cosine_s=8e-3, + given_betas=None, + + l_simple_weight=1., + original_elbo_weight=0., + + v_posterior=0., # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta + parameterization="eps", + use_positional_encodings=False, + learn_logvar=False, + logvar_init=0, ): + + super().__init__() + assert parameterization in ["eps", "x0"], \ + 'currently only supporting "eps" and "x0"' + self.parameterization = parameterization + highlight_print("Running in {} mode".format(self.parameterization)) + + self.cond_stage_model = None + self.clip_denoised = clip_denoised + self.use_positional_encodings = use_positional_encodings + + from collections import OrderedDict + self.model = nn.Sequential(OrderedDict([('diffusion_model', get_model()(unet_config))])) + + self.use_ema = use_ema + if self.use_ema: + self.model_ema = LitEma(self.model) + print_log(f"Keeping EMAs of {len(list(self.model_ema.buffers()))}.") + + self.v_posterior = v_posterior + self.l_simple_weight = l_simple_weight + self.original_elbo_weight = original_elbo_weight + + self.register_schedule( + given_betas=given_betas, + beta_schedule=beta_schedule, + timesteps=timesteps, + linear_start=beta_linear_start, + linear_end=beta_linear_end, + cosine_s=cosine_s) + + self.loss_type = loss_type + self.learn_logvar = learn_logvar + self.logvar = torch.full( + fill_value=logvar_init, size=(self.num_timesteps,)) + if self.learn_logvar: + self.logvar = nn.Parameter(self.logvar, requires_grad=True) + + def register_schedule(self, + given_betas=None, + beta_schedule="linear", + timesteps=1000, + linear_start=1e-4, + linear_end=2e-2, + cosine_s=8e-3): + if given_betas is not None: + betas = given_betas + else: + betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end, + cosine_s=cosine_s) + alphas = 1. - betas + alphas_cumprod = np.cumprod(alphas, axis=0) + alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1]) + + timesteps, = betas.shape + self.num_timesteps = int(timesteps) + self.linear_start = linear_start + self.linear_end = linear_end + assert alphas_cumprod.shape[0] == self.num_timesteps, \ + 'alphas have to be defined for each timestep' + + to_torch = partial(torch.tensor, dtype=torch.float32) + + self.register_buffer('betas', to_torch(betas)) + self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod)) + self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev)) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod))) + self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod))) + self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod))) + self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod))) + self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1))) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / ( + 1. - alphas_cumprod) + self.v_posterior * betas + # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t) + self.register_buffer('posterior_variance', to_torch(posterior_variance)) + # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain + self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20)))) + self.register_buffer('posterior_mean_coef1', to_torch( + betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod))) + self.register_buffer('posterior_mean_coef2', to_torch( + (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod))) + + if self.parameterization == "eps": + lvlb_weights = self.betas ** 2 / ( + 2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod)) + elif self.parameterization == "x0": + lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod)) + else: + raise NotImplementedError("mu not supported") + # TODO how to choose this term + lvlb_weights[0] = lvlb_weights[1] + self.register_buffer('lvlb_weights', lvlb_weights, persistent=False) + assert not torch.isnan(self.lvlb_weights).all() + + @contextmanager + def ema_scope(self, context=None): + if self.use_ema: + self.model_ema.store(self.model.parameters()) + self.model_ema.copy_to(self.model) + if context is not None: + print_log(f"{context}: Switched to EMA weights") + try: + yield None + finally: + if self.use_ema: + self.model_ema.restore(self.model.parameters()) + if context is not None: + print_log(f"{context}: Restored training weights") + + def q_mean_variance(self, x_start, t): + """ + Get the distribution q(x_t | x_0). + :param x_start: the [N x C x ...] tensor of noiseless inputs. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :return: A tuple (mean, variance, log_variance), all of x_start's shape. + """ + mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start) + variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape) + log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape) + return mean, variance, log_variance + + def predict_start_from_noise(self, x_t, t, noise): + value1 = extract_into_tensor( + self.sqrt_recip_alphas_cumprod, t, x_t.shape) + value2 = extract_into_tensor( + self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) + return value1*x_t -value2*noise + + def q_posterior(self, x_start, x_t, t): + posterior_mean = ( + extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start + + extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance(self, x, t, clip_denoised: bool): + model_out = self.model(x, t) + if self.parameterization == "eps": + x_recon = self.predict_start_from_noise(x, t=t, noise=model_out) + elif self.parameterization == "x0": + x_recon = model_out + if clip_denoised: + x_recon.clamp_(-1., 1.) + + model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t) + return model_mean, posterior_variance, posterior_log_variance + + @torch.no_grad() + def p_sample(self, x, t, clip_denoised=True, repeat_noise=False): + b, *_, device = *x.shape, x.device + model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised) + noise = noise_like(x.shape, device, repeat_noise) + # no noise when t == 0 + nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1))) + return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise + + @torch.no_grad() + def p_sample_loop(self, shape, return_intermediates=False): + device = self.betas.device + b = shape[0] + img = torch.randn(shape, device=device) + intermediates = [img] + for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps): + img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long), + clip_denoised=self.clip_denoised) + if i % self.log_every_t == 0 or i == self.num_timesteps - 1: + intermediates.append(img) + if return_intermediates: + return img, intermediates + return img + + @torch.no_grad() + def sample(self, batch_size=16, return_intermediates=False): + image_size = self.image_size + channels = self.channels + return self.p_sample_loop((batch_size, channels, image_size, image_size), + return_intermediates=return_intermediates) + + def q_sample(self, x_start, t, noise=None): + noise = torch.randn_like(x_start) if noise is None else noise + return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise) + + def get_loss(self, pred, target, mean=True): + if self.loss_type == 'l1': + loss = (target - pred).abs() + if mean: + loss = loss.mean() + elif self.loss_type == 'l2': + if mean: + loss = torch.nn.functional.mse_loss(target, pred) + else: + loss = torch.nn.functional.mse_loss(target, pred, reduction='none') + else: + raise NotImplementedError("unknown loss type '{loss_type}'") + return loss + + def p_losses(self, x_start, t, noise=None): + noise = default(noise, lambda: torch.randn_like(x_start)) + x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise) + model_out = self.model(x_noisy, t) + + loss_dict = {} + if self.parameterization == "eps": + target = noise + elif self.parameterization == "x0": + target = x_start + else: + raise NotImplementedError(f"Paramterization {self.parameterization} not yet supported") + + loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3]) + + log_prefix = 'train' if self.training else 'val' + + loss_dict.update({f'{log_prefix}/loss_simple': loss.mean()}) + loss_simple = loss.mean() * self.l_simple_weight + + loss_vlb = (self.lvlb_weights[t] * loss).mean() + loss_dict.update({f'{log_prefix}/loss_vlb': loss_vlb}) + + loss = loss_simple + self.original_elbo_weight * loss_vlb + + loss_dict.update({f'{log_prefix}/loss': loss}) + + return loss, loss_dict + + def forward(self, x, *args, **kwargs): + # b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size + # assert h == img_size and w == img_size, f'height and width of image must be {img_size}' + t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long() + return self.p_losses(x, t, *args, **kwargs) + + def on_train_batch_end(self, *args, **kwargs): + if self.use_ema: + self.model_ema(self.model) diff --git a/core/sync.py b/core/sync.py new file mode 100644 index 0000000000000000000000000000000000000000..bc2ee238bb80ee87fbee17d643258162e0cf5c73 --- /dev/null +++ b/core/sync.py @@ -0,0 +1,198 @@ +from multiprocessing import shared_memory + +import random +import pickle +import time +import copy +import torch +import torch.distributed as dist +from .cfg_holder import cfg_unique_holder as cfguh + + +def singleton(class_): + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + return getinstance + + +def is_ddp(): + return dist.is_available() and dist.is_initialized() + + +def get_rank(type='local'): + ddp = is_ddp() + global_rank = dist.get_rank() if ddp else 0 + local_world_size = torch.cuda.device_count() + if type == 'global': + return global_rank + elif type == 'local': + return global_rank % local_world_size + elif type == 'node': + return global_rank // local_world_size + elif type == 'all': + return global_rank, \ + global_rank % local_world_size, \ + global_rank // local_world_size + else: + assert False, 'Unknown type' + + +def get_world_size(type='local'): + ddp = is_ddp() + global_rank = dist.get_rank() if ddp else 0 + global_world_size = dist.get_world_size() if ddp else 1 + local_world_size = torch.cuda.device_count() + if type == 'global': + return global_world_size + elif type == 'local': + return local_world_size + elif type == 'node': + return global_world_size // local_world_size + elif type == 'all': + return global_world_size, local_world_size, \ + global_world_size // local_world_size + else: + assert False, 'Unknown type' + + +class barrier_lock(object): + def __init__(self, n): + self.n = n + id = int(random.random()*10000) + int(time.time())*10000 + self.lock_shmname = 'barrier_lock_{}'.format(id) + lock_shm = shared_memory.SharedMemory( + name=self.lock_shmname, create=True, size=n) + for i in range(n): + lock_shm.buf[i] = 0 + lock_shm.close() + + def destroy(self): + try: + lock_shm = shared_memory.SharedMemory( + name=self.lock_shmname) + lock_shm.close() + lock_shm.unlink() + except: + return + + def wait(self, k): + lock_shm = shared_memory.SharedMemory( + name=self.lock_shmname) + assert lock_shm.buf[k] == 0, 'Two waits on the same id is not allowed.' + lock_shm.buf[k] = 1 + if k == 0: + while sum([lock_shm.buf[i]==0 for i in range(self.n)]) != 0: + pass + for i in range(self.n): + lock_shm.buf[i] = 0 + return + else: + while lock_shm.buf[k] != 0: + pass + + +class nodewise_sync_global(object): + """ + This is the global part of nodewise_sync that need to call at master process + before spawn. + """ + def __init__(self): + self.local_world_size = get_world_size('local') + self.b_lock = barrier_lock(self.local_world_size) + id = int(random.random()*10000) + int(time.time())*10000 + self.id_shmname = 'nodewise_sync_id_shm_{}'.format(id) + + def destroy(self): + self.b_lock.destroy() + try: + shm = shared_memory.SharedMemory(name=self.id_shmname) + shm.close() + shm.unlink() + except: + return + + +@singleton +class nodewise_sync(object): + """ + A class that centralize nodewise sync activities. + The backend is multiprocess sharememory, not torch, as torch not support this. + """ + def __init__(self): + pass + + def copy_global(self, reference): + self.local_world_size = reference.local_world_size + self.b_lock = reference.b_lock + self.id_shmname = reference.id_shmname + return self + + def local_init(self): + self.ddp = is_ddp() + self.global_rank, self.local_rank, self.node_rank = get_rank('all') + self.global_world_size, self.local_world_size, self.nodes = get_world_size('all') + if self.local_rank == 0: + temp = int(random.random()*10000) + int(time.time())*10000 + temp = pickle.dumps(temp) + shm = shared_memory.SharedMemory( + name=self.id_shmname, create=True, size=len(temp)) + shm.close() + return self + + def random_sync_id(self): + assert self.local_rank is not None, 'Not initialized!' + if self.local_rank == 0: + sync_id = int(random.random()*10000) + int(time.time())*10000 + data = pickle.dumps(sync_id) + shm = shared_memory.SharedMemory(name=self.id_shmname) + shm.buf[0:len(data)] = data[0:len(data)] + self.barrier() + shm.close() + else: + self.barrier() + shm = shared_memory.SharedMemory(name=self.id_shmname) + sync_id = pickle.loads(shm.buf) + shm.close() + return sync_id + + def barrier(self): + self.b_lock.wait(self.local_rank) + + def broadcast_r0(self, data=None): + assert self.local_rank is not None, 'Not initialized!' + id = self.random_sync_id() + shmname = 'broadcast_r0_{}'.format(id) + if self.local_rank == 0: + assert data!=None, 'Rank 0 needs to input data!' + data = pickle.dumps(data) + datan = len(data) + load_info_shm = shared_memory.SharedMemory( + name=shmname, create=True, size=datan) + load_info_shm.buf[0:datan] = data[0:datan] + self.barrier() + self.barrier() + load_info_shm.close() + load_info_shm.unlink() + return None + else: + assert data==None, 'Rank other than 1 should input None as data!' + self.barrier() + shm = shared_memory.SharedMemory(name=shmname) + data = pickle.loads(shm.buf) + shm.close() + self.barrier() + return data + + def destroy(self): + self.barrier.destroy() + try: + shm = shared_memory.SharedMemory(name=self.id_shmname) + shm.close() + shm.unlink() + except: + return +